阿里巴巴的开源数据库连接池工具Druid数据源,在实际使用的时候,通常会和Spring或SpringBoot整合一起使用。
但有些时候如果不用使用Spring或SpringBoot,而直接使用Druid的原生API来操作数据库的话,该如何正确使用呢?
下面是一个使用Druid连接池简单操作数据库的工具类,重点关注Druid数据源的创建和复用:
/** * 数据库操作工具类 * * @author shiyanjun * @since 2020/01/05 */ @Slf4j public class DbUtil { private static final String DB_URL = "jdbc:MysqL://localhost:3306/authapi"; // 数据库连接URL private static final String DB_USERNAME = "root"; // 数据库用户名 private static final String DB_PASSWORD = "123456"; // 数据库密码 // Druid数据源,全局唯一(只创建一次) private static DruidDataSource druidDataSource; /** * 执行sql更新 * * @param updatesql * @throws sqlException */ public static void insert(String updatesql) throws sqlException { Connection connection = null; Statement statement = null; ResultSet resultSet = null; try { connection = getDruidConnection(); statement = connection.createStatement(); int count = statement.executeUpdate(updatesql); log.info(">>>>>>>>>>>>> 插入数据 {}", count); } finally { // 切记!!! 一定要释放资源 closeResource(connection, statement, resultSet); } } /** * 执行SQL查询 * * @param querysql * @return * @throws Exception */ public static List<Map<String, Object>> executeQuery(String querysql) throws Exception { List<Map<String, Object>> resultList = new ArrayList<>(); Connection connection = null; Statement statement = null; ResultSet resultSet = null; try { connection = getDruidConnection(); statement = connection.createStatement(); resultSet = statement.executeQuery(querysql); ResultSetMetaData MetaData = resultSet.getMetaData(); while (resultSet.next()) { int columnCount = MetaData.getColumnCount(); Map<String, Object> resultMap = new LinkedHashMap<>(); for (int i = 1; i <= columnCount; i++) { String columnName = MetaData.getColumnName(i);// 字段名称 Object columnValue = resultSet.getobject(columnName);// 字段值 resultMap.put(columnName, columnValue); } resultList.add(resultMap); } log.info(">>>>>>>>>>>> 查询数据:{}", resultList); } finally { // 切记!!! 一定要释放资源 closeResource(connection, statement, resultSet); } return resultList; } /** * 获取Druid数据源 * * @return * @throws sqlException */ private static DruidDataSource getDruidDataSource() throws sqlException { // 保证Druid数据源在多线程下只创建一次 if (druidDataSource == null) { synchronized (DbUtil.class) { if (druidDataSource == null) { druidDataSource = createDruidDataSource(); return druidDataSource; } } } log.info(">>>>>>>>>>> 复用Druid数据源:url={}, username={}, password={}", druidDataSource.getUrl(), druidDataSource.getUsername(), druidDataSource.getpassword()); return druidDataSource; } /** * 创建Druid数据源 * * @return * @throws sqlException */ private static DruidDataSource createDruidDataSource() throws sqlException { DruidDataSource druidDataSource = new DruidDataSource(); druidDataSource.setUrl(DB_URL); druidDataSource.setUsername(DB_USERNAME); druidDataSource.setPassword(DB_PASSWORD); /*----下面的具体配置参数自己根据项目情况进行调整----*/ druidDataSource.setMaxActive(20); druidDataSource.setinitialSize(1); druidDataSource.setMinIdle(1); druidDataSource.setMaxWait(60000); druidDataSource.setValidationQuery("select 1 from dual"); druidDataSource.setTimeBetweenevictionRunsMillis(60000); druidDataSource.setMinevictableIdleTimeMillis(300000); druidDataSource.setTestWhileIdle(true); druidDataSource.setTestOnBorrow(false); druidDataSource.setTestOnReturn(false); druidDataSource.setPoolPreparedStatements(true); druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(20); druidDataSource.init(); log.info(">>>>>>>>>>> 创建Druid数据源:url={}, username={}, password={}", druidDataSource.getUrl(), druidDataSource.getUsername(), druidDataSource.getpassword()); return druidDataSource; } /** * 获取Druid连接 * * @return * @throws sqlException */ private static DruidPooledConnection getDruidConnection() throws sqlException { DruidDataSource druidDataSource = getDruidDataSource(); DruidPooledConnection connection = druidDataSource.getConnection(); return connection; } /** * 释放资源 * * @param connection * @param statement * @param resultSet * @throws sqlException */ private static void closeResource(Connection connection, Statement statement, ResultSet resultSet) throws sqlException { // 注意资源释放顺序 if (resultSet != null) { resultSet.close(); } if (statement != null) { statement.close(); } if (connection != null) { connection.close(); } } }
import java.text.SimpleDateFormat; import java.util.Date; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * CREATE TABLE `auth_code` ( * `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', * `auth_code` varchar(255) CHaraCTER SET utf8mb4 NOT NULL COMMENT '授权码', * `status` varchar(255) CHaraCTER SET utf8mb4 NOT NULL DEFAULT '1' COMMENT '状态', * `remark` varchar(255) CHaraCTER SET utf8mb4 DEFAULT NULL COMMENT '备注', * `create_date` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建日期', * `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改日期', * PRIMARY KEY (`id`) * ) ENGINE=InnoDB AUTO_INCREMENT=18 DEFAULT CHARSET=latin1; */ public class AppTest { private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static ExecutorService executorService = Executors.newCachedThreadPool();// 线程池 public static void main(String[] args) throws Exception { String INSERT_sql = "INSERT INTO `authapi`.`auth_code` " + "(`auth_code`, `status`, `remark`, `create_date`, `update_time`) VALUES ('%s',%s,'%s','%s','%s')"; // 模拟多线程向数据库插入1万条数据 for (int i = 0; i < 10000; i++) { Thread.sleep(5); String code = UUID.randomUUID().toString().replaceAll("-", ""); String dateStr = dateFormat.format(new Date()); String formatsql = String.format(INSERT_sql, code, i, code, dateStr, dateStr); executorService.execute(() -> { try { DbUtil.insert(formatsql); } catch (Exception e) { e.printstacktrace(); } }); } } }
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。