spring框架对JDBC的封装
JdbcTemplate简化JDBC的使用,可以避免常见的异常,封装了JDBC的核心流程,应用只需要提供SQL,提取结果集就可以,线程安全。初始化可以设置数据源,因此资源管理问题也得到解决。
RowMapper对结果集处理,Spring JDBC 提供了一个RowMapper 对象,可以针对Employee创建一个EmployeeRowMapper对象,实现RowMapper接口,并重写mapRow()方法,在mapRow()方法中完成对结果集的处理。而其是作为JdbcTemplate的参数来使用。
示例代码 EmployeeRowMapperpackage com.tiger.rowmapper;
import com.tiger.bean.Employee;
import org.springframework.jdbc.core.RowMapper;
import java.sql.ResultSet;
import java.sql.SQLException;
public class EmployeeRowMapper implements RowMapper {
@Override
public Object mapRow(ResultSet resultSet, int i) throws SQLException {
Employee employee = new Employee();
employee.setEmpId(resultSet.getInt("emp_id"));
employee.setEmpName(resultSet.getString("emp_name"));
employee.setGender(resultSet.getString("gender"));
employee.setEmail(resultSet.getString("email"));
employee.setdId(resultSet.getInt("d_id"));
return employee;
}
}
baseRowMapper
package com.tiger.rowmapper; import org.springframework.jdbc.core.RowMapper; import java.lang.reflect.Field; import java.math.BigDecimal; import java.sql.ResultSet; import java.sql.ResultSetmetaData; import java.sql.SQLException; import java.sql.Timestamp; import java.util.Date; import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; public class baseRowMapperEmployeeRowMapperTestimplements RowMapper { private Class> targetClazz; private HashMap fieldMap; public baseRowMapper(Class> targetClazz) { this.targetClazz = targetClazz; fieldMap = new HashMap<>(); Field[] fields = targetClazz.getDeclaredFields(); for (Field field : fields) { fieldMap.put(field.getName(), field); } } @Override public T mapRow(ResultSet rs, int arg1) throws SQLException { T obj = null; try { obj = (T) targetClazz.newInstance(); final ResultSetmetaData metaData = rs.getmetaData(); String columnName = null; for (int i = 1; i <= metaData.getColumnCount(); i++) { columnName = metaData.getColumnName(i); // 获得字段类型 Class fieldClazz = fieldMap.get(camel(columnName)).getType(); // 获得字段 Field field = fieldMap.get(camel(columnName)); // 开启 field.setAccessible(true); // 对应反射没种类型的数据 // fieldClazz == Character.class || fieldClazz == char.class if (fieldClazz == int.class || fieldClazz == Integer.class) { // int field.set(obj, rs.getInt(columnName)); } else if (fieldClazz == boolean.class || fieldClazz == Boolean.class) { // boolean field.set(obj, rs.getBoolean(columnName)); } else if (fieldClazz == String.class) { // string field.set(obj, rs.getString(columnName)); } else if (fieldClazz == float.class) { // float field.set(obj, rs.getFloat(columnName)); } else if (fieldClazz == double.class || fieldClazz == Double.class) { // double field.set(obj, rs.getDouble(columnName)); } else if (fieldClazz == BigDecimal.class) { // big decimal field.set(obj, rs.getBigDecimal(columnName)); } else if (fieldClazz == short.class || fieldClazz == Short.class) { // short field.set(obj, rs.getShort(columnName)); } else if (fieldClazz == Date.class) { // date field.set(obj, rs.getDate(columnName)); } else if (fieldClazz == Timestamp.class) { // timestamp field.set(obj, rs.getTimestamp(columnName)); } else if (fieldClazz == Long.class || fieldClazz == long.class) { // long field.set(obj, rs.getLong(columnName)); } // 关闭 field.setAccessible(false); } } catch (Exception e) { e.printStackTrace(); } return obj; } public static String camel(String str) { Pattern pattern = Pattern.compile("_(\w)"); Matcher matcher = pattern.matcher(str); StringBuffer sb = new StringBuffer(str); if (matcher.find()) { sb = new StringBuffer(); matcher.appendReplacement(sb, matcher.group(1).toUpperCase()); matcher.appendTail(sb); } else { return sb.toString(); } return camel(sb.toString()); } }
package com.tiger;
import com.tiger.bean.Employee;
import com.tiger.rowmapper.baseRowMapper;
import com.tiger.rowmapper.EmployeeRowMapper;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.List;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"classpath:applicationContext.xml"})
public class EmployeeRowMapperTest {
@Autowired
JdbcTemplate jdbcTemplate;
List list;
@Test
public void EmployeeTest() {
// list = jdbcTemplate.query("select * from tbl_emp", new EmployeeRowMapper());
list = jdbcTemplate.query(" select * from tbl_emp" ,new baseRowMapper(Employee.class));
System.out.println(list);
}
}
applicationContext.xml
jdbc.properties
driverClassName=com.mysql.jdbc.Driver url=jdbc:mysql://159.75.79.151:3306/mybatis?useUnicode=true&characterEncoding=utf-8&rewriteBatchedStatements=true username=root password=root druid.driverClassName=com.mysql.jdbc.Driver druid.url=jdbc:mysql://159.75.79.151:3306/mybatis?useUnicode=true&characterEncoding=utf-8&rewriteBatchedStatements=true druid.username=root druid.password=root # 初始化时建立物理连接的个数。初始化发生在显示调用init方法,或者第一次getConnection时 druid.initialSize=10 # 最小连接池数量 druid.minIdle=6 # 最大连接池数量 druid.maxActive=50 # 获取连接时最大等待时间,单位毫秒。 druid.maxWait=60000 # 1) Destroy线程会检测连接的间隔时间 2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明 druid.timeBetweenEvictionRunsMillis=60000 # Destory线程中如果检测到当前连接的最后活跃时间和当前时间的差值大于minEvictableIdleTimeMillis,则关闭当前连接。 druid.minEvictableIdleTimeMillis=300000 # 申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效 druid.testWhileIdle=true # 申请连接时执行validationQuery检测连接是否有效,做了这个配置会降低性能。 druid.testonBorrow=false #归还连接时执行validationQuery检测连接是否有效,做了这个配置会降低性能 druid.testonReturn=false # 是否缓存preparedStatement,也就是PSCache。 druid.poolPreparedStatements=false # PSCache的数量 druid.maxOpenPreparedStatements=60pom.xml
4.0.0 com.tiger spring-mybatis 1.0-SNAPSHOT war 5.1.3.RELEASE 1.7.12 5.1.47 1.1.14 4.12 org.springframework spring-webmvc ${spring.version} org.springframework spring-jdbc ${spring.version} org.hibernate hibernate-validator 5.4.1.Final mysql mysql-connector-java ${mysql.version} com.alibaba druid ${druid.version} org.springframework spring-test ${spring.version} junit junit ${junit.version} compile org.apache.maven.plugins maven-compiler-plugin 7 7



