Druid監控配置及擴展

一、基礎監控配置

依賴

				<!-- Druid -->
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>druid-spring-boot-starter</artifactId>
            <version>1.1.10</version>
        </dependency>

1、純配置文件方式

普通的單數據源項目中,通過在配置文件中配置的方式即可實現監控(但特殊的需求如SpringAOP還是需要通過部分編碼)

spring:
  datasource:
    type: com.alibaba.druid.pool.DruidDataSource
    druid:
      filters: stat,wall,slf4j # 配置監控統計攔截的filters,去掉後監控界面sql無法統計,'wall'用於防火牆
      max-active: 20 #最大連接池數量 maxIdle已經不再使用
      initial-size: 5 #初始化時建立物理連接的個數
      max-wait: 60000
      min-idle: 5 #最小連接池數量
      time-between-eviction-runs-millis: 60000 #既作爲檢測的間隔時間又作爲testWhileIdel執行的依據
      min-evictable-idle-time-millis: 300000 #銷燬線程時檢測當前連接的最後活動時間和當前時間差大於該值時,關閉當前連接
      validation-query: select 'x' #用來檢測連接是否有效的sql
      #申請連接的時候檢測,如果空閒時間大於timeBetweenEvictionRunsMillis,執行validationQuery檢測連接是否有效。
      test-while-idle: true
      test-on-borrow: false #申請連接時會執行validationQuery檢測連接是否有效,開啓會降低性能,默認爲true
      test-on-return: false #歸還連接時會執行validationQuery檢測連接是否有效,開啓會降低性能,默認爲true
      pool-prepared-statements: false # 是否緩存preparedStatement,也就是PSCache  官方建議MySQL下建議關閉
      max-open-prepared-statements: 20
      max-pool-prepared-statement-per-connection-size: 20 #當值大於0時poolPreparedStatements會自動修改爲true
      # 通過connectProperties屬性來打開mergeSql功能;慢SQL記錄(配置慢SQL的定義時間)
      connection-properties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
      use-global-data-source-stat: true # 合併多個DruidDataSource的監控數據

      # 設置監控配置
      web-stat-filter:
        enabled: true
        url-pattern: /*
        exclusions: "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*"
        session-stat-enable: true
        session-stat-max-count: 100
      #設置視圖攔截,訪問druid監控頁的賬號和密碼,默認沒有
      stat-view-servlet:
        enabled: true
        url-pattern: /druid/*
        reset-enable: true
        login-username: admin
        login-password: admin

2、通過配置類配置方式

在某些特殊情況下,如系統中有多數據源、動態數據源等情況,Spring無法直接讀取配置文件,我們會採用Config類的方式進行配置,這樣比較靈活。

1)配置文件,同上,供配置類讀取

spring:
  datasource:
    type: com.alibaba.druid.pool.DruidDataSource
    druid:
      filters: stat,wall,slf4j # 配置監控統計攔截的filters,去掉後監控界面sql無法統計,'wall'用於防火牆
      max-active: 20 #最大連接池數量 maxIdle已經不再使用
      initial-size: 5 #初始化時建立物理連接的個數
      max-wait: 60000
      min-idle: 5 #最小連接池數量
      time-between-eviction-runs-millis: 60000 #既作爲檢測的間隔時間又作爲testWhileIdel執行的依據
      min-evictable-idle-time-millis: 300000 #銷燬線程時檢測當前連接的最後活動時間和當前時間差大於該值時,關閉當前連接
      validation-query: select 'x' #用來檢測連接是否有效的sql
      #申請連接的時候檢測,如果空閒時間大於timeBetweenEvictionRunsMillis,執行validationQuery檢測連接是否有效。
      test-while-idle: true
      test-on-borrow: false #申請連接時會執行validationQuery檢測連接是否有效,開啓會降低性能,默認爲true
      test-on-return: false #歸還連接時會執行validationQuery檢測連接是否有效,開啓會降低性能,默認爲true
      pool-prepared-statements: false # 是否緩存preparedStatement,也就是PSCache  官方建議MySQL下建議關閉
      max-open-prepared-statements: 20
      max-pool-prepared-statement-per-connection-size: 20 #當值大於0時poolPreparedStatements會自動修改爲true
      # 通過connectProperties屬性來打開mergeSql功能;慢SQL記錄(配置慢SQL的定義時間)
      connection-properties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
      use-global-data-source-stat: true # 合併多個DruidDataSource的監控數據

      # 設置監控配置
      web-stat-filter:
        enabled: true
        url-pattern: /*
        exclusions: "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*"
        session-stat-enable: true
        session-stat-max-count: 100
      #設置視圖攔截,訪問druid監控頁的賬號和密碼,默認沒有
      stat-view-servlet:
        enabled: true
        url-pattern: /druid/*
        reset-enable: true
        login-username: admin
        login-password: admin

2)配置信息類DruidDataSourceProperties

負責讀入配置文件中的信息

import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;

/**
 * Druid配置信息類
 *
 * @author 
 * @date
 */
@ConfigurationProperties(prefix = "spring.datasource.druid")
public class DruidDataSourceProperties {

    private String driverClassName;
    private String url;
    private String username;
    private String password;
    
    private int initialSize;
    private int minIdle;
    private int maxActive = 100;
    private long maxWait;
    private long timeBetweenEvictionRunsMillis;
    private long minEvictableIdleTimeMillis;
    private String validationQuery;
    private boolean testWhileIdle;
    private boolean testOnBorrow;
    private boolean testOnReturn;
    private boolean poolPreparedStatements;
    private int maxPoolPreparedStatementPerConnectionSize;
    
    private String filters;

    public int getInitialSize() {
        return initialSize;
    }

    public void setInitialSize(int initialSize) {
        this.initialSize = initialSize;
    }

    public int getMinIdle() {
        return minIdle;
    }

    public void setMinIdle(int minIdle) {
        this.minIdle = minIdle;
    }

    public int getMaxActive() {
        return maxActive;
    }

    public void setMaxActive(int maxActive) {
        this.maxActive = maxActive;
    }

    public long getMaxWait() {
        return maxWait;
    }

    public void setMaxWait(long maxWait) {
        this.maxWait = maxWait;
    }

    public long getTimeBetweenEvictionRunsMillis() {
        return timeBetweenEvictionRunsMillis;
    }

    public void setTimeBetweenEvictionRunsMillis(long timeBetweenEvictionRunsMillis) {
        this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
    }

    public long getMinEvictableIdleTimeMillis() {
        return minEvictableIdleTimeMillis;
    }

    public void setMinEvictableIdleTimeMillis(long minEvictableIdleTimeMillis) {
        this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
    }

    public String getValidationQuery() {
        return validationQuery;
    }

    public void setValidationQuery(String validationQuery) {
        this.validationQuery = validationQuery;
    }

    public boolean isTestWhileIdle() {
        return testWhileIdle;
    }

    public void setTestWhileIdle(boolean testWhileIdle) {
        this.testWhileIdle = testWhileIdle;
    }

    public boolean isTestOnBorrow() {
        return testOnBorrow;
    }

    public void setTestOnBorrow(boolean testOnBorrow) {
        this.testOnBorrow = testOnBorrow;
    }

    public boolean isTestOnReturn() {
        return testOnReturn;
    }

    public void setTestOnReturn(boolean testOnReturn) {
        this.testOnReturn = testOnReturn;
    }

    public boolean isPoolPreparedStatements() {
        return poolPreparedStatements;
    }

    public void setPoolPreparedStatements(boolean poolPreparedStatements) {
        this.poolPreparedStatements = poolPreparedStatements;
    }

    public int getMaxPoolPreparedStatementPerConnectionSize() {
        return maxPoolPreparedStatementPerConnectionSize;
    }

    public void setMaxPoolPreparedStatementPerConnectionSize(int maxPoolPreparedStatementPerConnectionSize) {
        this.maxPoolPreparedStatementPerConnectionSize = maxPoolPreparedStatementPerConnectionSize;
    }

    public String getFilters() {
        return filters;
    }

    public void setFilters(String filters) {
        this.filters = filters;
    }

    public String getDriverClassName() {
        return driverClassName;
    }

    public void setDriverClassName(String driverClassName) {
        this.driverClassName = driverClassName;
    }

    public String getUrl() {
        return url;
    }

    public void setUrl(String url) {
        this.url = url;
    }

    public String getUsername() {
        return username;
    }

    public void setUsername(String username) {
        this.username = username;
    }

    public String getPassword() {
        return password;
    }

    public void setPassword(String password) {
        this.password = password;
    }

}

3)配置DataSource

類中引入剛纔的配置信息類

@Configuration
@EnableConfigurationProperties({DruidDataSourceProperties.class})
public class DataSourceConfig{

    @Autowired
    private DruidDataSourceProperties druidProperties;
}

手動配置相關信息

private void setDruidDataSourceConfig(DruidDataSource druidDataSource) {
        druidDataSource.setDriverClassName(druidProperties.getDriverName);
        druidDataSource.setUrl(druidProperties.getUrl);
        druidDataSource.setUsername(druidProperties.getUsername);
        druidDataSource.setPassword(druidProperties.getPassword);
        druidDataSource.setConnectionInitSqls(new ArrayList<String>(){{add(initSqls);}});
        // 初始化大小,最小,最大
        druidDataSource.setInitialSize(druidProperties.getInitialSize());
        druidDataSource.setMinIdle(druidProperties.getMinIdle());
        druidDataSource.setMaxActive(druidProperties.getMaxActive());
        // 配置獲取連接等待超時的時間
        druidDataSource.setMaxWait(druidProperties.getMaxWait());
        // 配置間隔多久才進行一次檢測,檢測需要關閉的空閒連接,單位是毫秒
        druidDataSource.setTimeBetweenEvictionRunsMillis(druidProperties.getTimeBetweenEvictionRunsMillis());
        // 配置一個連接在池中最小生存的時間,單位是毫秒
        druidDataSource.setMinEvictableIdleTimeMillis(druidProperties.getMinEvictableIdleTimeMillis());
        druidDataSource.setTestWhileIdle(druidProperties.isTestWhileIdle());
        druidDataSource.setTestOnBorrow(druidProperties.isTestOnBorrow());
        druidDataSource.setTestOnReturn(druidProperties.isTestOnReturn());
        // 打開PSCache,並且指定每個連接上PSCache的大小
        druidDataSource.setPoolPreparedStatements(druidProperties.isPoolPreparedStatements());
        druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(druidProperties.getMaxPoolPreparedStatementPerConnectionSize());
        druidDataSource.setUseGlobalDataSourceStat(false);

				// 配置慢SQL信息,注意druid.timeBetweenLogStatsMillis爲多久記錄日誌並清空一次信息,與setUseGlobalDataSourceStat互斥
        Properties properties = new Properties();
        properties.setProperty("druid.stat.mergeSql", "true");
        properties.setProperty("druid.stat.slowSqlMillis", "5000");
        properties.setProperty("druid.timeBetweenLogStatsMillis", "100000");
        druidDataSource.setConnectProperties(properties);

        try {
            druidDataSource.setFilters(druidProperties.getFilters());
            druidDataSource.init();
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }

一般情況下,我們採用純配置文件的方式使用即可。

配置完成後,啓動訪問http://localhost:8080/druid/index.html

如果配置了訪問賬號密碼,需要登錄後查看相關信息

在這裏插入圖片描述

二、擴展

1、Spring類的AOP監控

在以上信息完成完成後,有一項還無法查看,就是Spring監控,是因爲Spring監控需要對方法做AOP攔截,需要額外配置。這個功能非常強大並且實用,以下是配置方法

新建druid-bean.xml,放入resource文件夾中

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
       xmlns:aop="http://www.springframework.org/schema/aop" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
       xsi:schemaLocation="
        http://www.springframework.org/schema/beans
        http://www.springframework.org/schema/beans/spring-beans.xsd
        http://www.springframework.org/schema/aop
        http://www.springframework.org/schema/aop/spring-aop.xsd">

    <!-- 配置_Druid和Spring關聯監控配置 -->
    <bean id="druid-stat-interceptor"
          class="com.alibaba.druid.support.spring.stat.DruidStatInterceptor"></bean>

    <!-- 方法名正則匹配攔截配置 -->
    <bean id="druid-stat-pointcut" class="org.springframework.aop.support.JdkRegexpMethodPointcut"
          scope="prototype">
        <property name="patterns">
            <list>
                <value>com.sogou.test.*.service.*</value>
            </list>
        </property>
    </bean>

    <aop:config proxy-target-class="true">
        <aop:advisor advice-ref="druid-stat-interceptor"
                     pointcut-ref="druid-stat-pointcut" />
    </aop:config>

</beans>

其中patterns處配置你需要切的位置,比如dao或service層,會對你配置方法進行監控

引入配置

@SpringBootApplication
@ImportResource(locations = { "classpath:druid-bean.xml" })
public class Application {

    public static void main(String[] args) {
        TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai"));
        SpringApplication.run(Application.class, args);
    }

}

配置完成後,即可在Spring監控中查看到對應方法的sql執行信息,可以方便地查看到哪個方法的sql執行有異常情況。

[外鏈圖片轉存失敗,源站可能有防盜鏈機制,建議將圖片保存下來直接上傳(img-0qKuw3bH-1592488510002)(https://s1.ax1x.com/2020/06/17/NAhzjA.png)]

2、日誌數據持久化

druid監控的數據,都是存儲在緩存中,當應用重啓或重新發布時數據會清空,頁面上也有兩個重置按鈕,其中記錄日誌並重置會將當前日誌打印。

當配置了druid.timeBetweenLogStatsMillis參數時,會每隔一段時間記錄日誌並重置統計信息,會將連接數、SQL信息都打印到日誌中,但這樣有個缺點是會將這段時間的SQL也打印出來,沒有必要,可以通過自定義StatLogger的方式來自定義輸出格式。

public class DruidStatLogger extends DruidDataSourceStatLoggerAdapter implements DruidDataSourceStatLogger {

    private static final Log LOG    = LogFactory.getLog(DruidDataSourceStatLoggerImpl.class);

    private Log logger = LOG;

    public DruidStatLogger() {
        this.configFromProperties(System.getProperties());
    }

    public boolean isLogEnable() {
        return logger.isInfoEnabled();
    }

    public void log(String value) {
        logger.info(value);
    }

    @Override
    public void log(DruidDataSourceStatValue druidDataSourceStatValue) {
        if (!isLogEnable()) {
            return;
        }
        Map<String, Object> map = new LinkedHashMap<>();

        map.put("dbType", druidDataSourceStatValue.getDbType());
        map.put("name", druidDataSourceStatValue.getName());
        map.put("activeCount", druidDataSourceStatValue.getActiveCount());

        if (druidDataSourceStatValue.getActivePeak() > 0) {
            map.put("activePeak", druidDataSourceStatValue.getActivePeak());
            map.put("activePeakTime", druidDataSourceStatValue.getActivePeakTime());
        }
        map.put("poolingCount", druidDataSourceStatValue.getPoolingCount());
        if (druidDataSourceStatValue.getPoolingPeak() > 0) {
            map.put("poolingPeak", druidDataSourceStatValue.getPoolingPeak());
            map.put("poolingPeakTime", druidDataSourceStatValue.getPoolingPeakTime());
        }
        map.put("connectCount", druidDataSourceStatValue.getConnectCount());
        map.put("closeCount", druidDataSourceStatValue.getCloseCount());

        if (druidDataSourceStatValue.getWaitThreadCount() > 0) {
            map.put("waitThreadCount", druidDataSourceStatValue.getWaitThreadCount());
        }

        if (druidDataSourceStatValue.getNotEmptyWaitCount() > 0) {
            map.put("notEmptyWaitCount", druidDataSourceStatValue.getNotEmptyWaitCount());
        }

        if (druidDataSourceStatValue.getNotEmptyWaitMillis() > 0) {
            map.put("notEmptyWaitMillis", druidDataSourceStatValue.getNotEmptyWaitMillis());
        }

        if (druidDataSourceStatValue.getLogicConnectErrorCount() > 0) {
            map.put("logicConnectErrorCount", druidDataSourceStatValue.getLogicConnectErrorCount());
        }

        if (druidDataSourceStatValue.getPhysicalConnectCount() > 0) {
            map.put("physicalConnectCount", druidDataSourceStatValue.getPhysicalConnectCount());
        }

        if (druidDataSourceStatValue.getPhysicalCloseCount() > 0) {
            map.put("physicalCloseCount", druidDataSourceStatValue.getPhysicalCloseCount());
        }

        if (druidDataSourceStatValue.getPhysicalConnectErrorCount() > 0) {
            map.put("physicalConnectErrorCount", druidDataSourceStatValue.getPhysicalConnectErrorCount());
        }

        if (druidDataSourceStatValue.getExecuteCount() > 0) {
            map.put("executeCount", druidDataSourceStatValue.getExecuteCount());
        }

        if (druidDataSourceStatValue.getErrorCount() > 0) {
            map.put("errorCount", druidDataSourceStatValue.getErrorCount());
        }

        if (druidDataSourceStatValue.getCommitCount() > 0) {
            map.put("commitCount", druidDataSourceStatValue.getCommitCount());
        }

        if (druidDataSourceStatValue.getRollbackCount() > 0) {
            map.put("rollbackCount", druidDataSourceStatValue.getRollbackCount());
        }

        if (druidDataSourceStatValue.getPstmtCacheHitCount() > 0) {
            map.put("pstmtCacheHitCount", druidDataSourceStatValue.getPstmtCacheHitCount());
        }

        if (druidDataSourceStatValue.getPstmtCacheMissCount() > 0) {
            map.put("pstmtCacheMissCount", druidDataSourceStatValue.getPstmtCacheMissCount());
        }

        if (druidDataSourceStatValue.getStartTransactionCount() > 0) {
            map.put("startTransactionCount", druidDataSourceStatValue.getStartTransactionCount());
            map.put("transactionHistogram", (druidDataSourceStatValue.getTransactionHistogram()));
        }

        if (druidDataSourceStatValue.getConnectCount() > 0) {
            map.put("connectionHoldTimeHistogram", (druidDataSourceStatValue.getConnectionHoldTimeHistogram()));
        }

        if (druidDataSourceStatValue.getClobOpenCount() > 0) {
            map.put("clobOpenCount", druidDataSourceStatValue.getClobOpenCount());
        }

        if (druidDataSourceStatValue.getBlobOpenCount() > 0) {
            map.put("blobOpenCount", druidDataSourceStatValue.getBlobOpenCount());
        }

        if (druidDataSourceStatValue.getSqlSkipCount() > 0) {
            map.put("sqlSkipCount", druidDataSourceStatValue.getSqlSkipCount());
        }
        if (!isLogEnable()) {
            return;
        }
        //Map<String, Object> map = new LinkedHashMap<String, Object>();
        myArrayList<Map<String, Object>> sqlList = new myArrayList<Map<String, Object>>();

        //有執行sql的話 只顯示sql語句
        if (druidDataSourceStatValue.getSqlList().size() > 0) {
            for (JdbcSqlStatValue sqlStat : druidDataSourceStatValue.getSqlList()) {
                Map<String, Object> sqlStatMap = new LinkedHashMap<String, Object>();
                sqlStatMap.put("執行了sql語句: ", sqlStat.getSql());
                sqlList.add(sqlStatMap);
                String text = sqlList.toString();
                //log(text);
            }
        }
        //沒有sql語句的話就顯示最上面那些
        else{
            String text = map.toString();
            log(text);
        }
    }

    @Override
    public void configFromProperties(Properties properties) {
        String property = properties.getProperty("druid.stat.loggerName");
        if (property != null && property.length() > 0) {
            setLoggerName(property);
        }
    }

    @Override
    public void setLogger(Log log) {
        if (log == null) {
            throw new IllegalArgumentException("logger can not be null");
        }
        this.logger = log;
    }

    @Override
    public void setLoggerName(String loggerName) {
        logger = LogFactory.getLog(loggerName);
    }

    class myArrayList<E> extends ArrayList<E> {
        @Override
        public String toString() {
            Iterator<E> it = iterator();
            if (!it.hasNext()) {
                return "";
            }

            StringBuilder sb = new StringBuilder();
            for (; ; ) {
                E e = it.next();
                sb.append(e == this ? "(this Collection)" : e);
                if (!it.hasNext()) {
                    return sb.toString();
                }
                sb.append(',').append(' ');
            }
        }
    }

DataSource配置

@Configuration
public class DruidConfig {
 
    @ConfigurationProperties(prefix="spring.datasource")
    @Bean
    public DataSource druidDataSource()
    {
        DruidDataSource dataSource = new DruidDataSource();
        dataSource.setStatLogger(new MyStatLogger());
        return dataSource;
    }
 
    @Bean
    public ServletRegistrationBean statViewServlet()
    {
        ServletRegistrationBean<StatViewServlet> bean=new ServletRegistrationBean<>(new StatViewServlet(),"/druid/*");
 
        //後臺需要有人登錄監控
        HashMap<String,String> initParameters=new HashMap<>();
 
        //增加配置
        initParameters.put("loginUsername","admin");
        initParameters.put("loginPassword","123456");
 
        //允許誰能訪問
        initParameters.put("allow"," ");
 
        bean.setInitParameters(initParameters);//設置初始化參數
        return bean;
    }
 
    @Bean
    public FilterRegistrationBean webStatFilter()
    {
        FilterRegistrationBean bean=new FilterRegistrationBean();
        bean.setFilter(new WebStatFilter());
 
        HashMap<String,String> initParameters=new HashMap<>();
 
        initParameters.put("exclusions","*.js,*.css,/druid/*");
 
        bean.setInitParameters(initParameters);
 
        return bean;
    }
 
}

可以設置每隔24小時記錄日誌並清空當前數據。

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章