1、导入pom依赖
org.springframework.boot spring-boot-starter-data-ldaporg.springframework.boot spring-boot-starter-jdbccom.alibaba druid-spring-boot-starter1.2.8 com.sun toolscom.sun jconsoleorg.mybatis.spring.boot mybatis-spring-boot-starter2.1.1 mysql mysql-connector-java5.1.46 org.apache.hive hive-jdbc3.1.0 javax.jdo jdo2-apiorg.apache.hive hive-execorg.eclipse.jetty *servlet-api javax.servlet org.apache.httpcomponents httpclient4.5.6 com.arronlong httpclientutil1.0.4
2、添加Configuration类
(1)、DataSourceConfig
@Configuration
public class DataSourceConfig {
@Primary
@Bean(name = "hiveMetaDataSource")
@Qualifier("hiveMetaDataSource")
@ConfigurationProperties(prefix = "spring.datasource.druid.hive-meta")
public DataSource hiveMetaDataSource() {
DruidDataSource dataSource = new DruidDataSource();
return dataSource;
}
@Primary
@Bean(name = "hiveMetaSecondDataSource")
@Qualifier("hiveMetaSecondDataSource")
@ConfigurationProperties(prefix = "spring.datasource.druid.hive-meta-second")
public DataSource hiveMetaSecondDataSource() {
DruidDataSource dataSource = new DruidDataSource();
return dataSource;
}
@Bean(name = "hiveDataSource")
@Qualifier("hiveDataSource")
@ConfigurationProperties(prefix = "spring.datasource.druid.hive")
public DataSource hiveDataSource() {
return new DruidDataSource();
}
@Bean(name = "tablexMetaDataSource")
@Qualifier("tablexMetaDataSource")
@ConfigurationProperties(prefix = "spring.datasource.druid.tablex-meta")
public DataSource tablexMetaDataSource() {
return new DruidDataSource();
}
// 未使用 fixme
@Bean
public JdbcTemplate hiveMetaJdbcTemplate(@Qualifier("hiveMetaDataSource") DataSource hiveMetaDataSource) {
return new JdbcTemplate(hiveMetaDataSource);
}
@Bean
public JdbcTemplate hiveMetaSecondJdbcTemplate(@Qualifier("hiveMetaSecondDataSource") DataSource hiveMetaSecondDataSource) {
return new JdbcTemplate(hiveMetaSecondDataSource);
}
@Bean
public JdbcTemplate hiveTemplate(@Qualifier("hiveDataSource") DataSource hiveDataSource) {
return new JdbcTemplate(hiveDataSource);
}
@Bean
public JdbcTemplate tablexMetaTemplate(@Qualifier("tablexMetaDataSource") DataSource tablexMetaDataSource) {
return new JdbcTemplate(tablexMetaDataSource);
}
}
(2)、HIVE元数据配置类HiveTabMetaConfig
@Configuration
@MapperScan(
basePackages = "com.xxxx.xxxx.mapper.hivemeta",
sqlSessionFactoryRef = "hiveTabMetaSqlSessionFactory",
sqlSessionTemplateRef = "hiveTabMetaSqlSessionTemplate"
)
public class HiveTabMetaConfig {
private DataSource hiveMetaDataSource;
public HiveTabMetaConfig(@Qualifier("hiveMetaDataSource") DataSource hiveMetaDataSource) {
this.hiveMetaDataSource = hiveMetaDataSource;
}
@Bean(name= "hiveTabMetaTransactionManager")
public DataSourceTransactionManager hiveTabMetaTransactionManager() {
return new DataSourceTransactionManager(hiveMetaDataSource);
}
@Bean(name = "hiveTabMetaSqlSessionFactory")
public SqlSessionFactory hiveTabMetaSqlSessionFactory() throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(hiveMetaDataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mapper/hivemeta
public HttpMessageConverter fastJsonHttpMessageConverters() {
FastJsonHttpMessageConverter fastConvert = new FastJsonHttpMessageConverter();
FastJsonConfig fastJsonConfig = new FastJsonConfig();
fastJsonConfig.setSerializerFeatures(SerializerFeature.PrettyFormat,
SerializerFeature.WriteNullStringAsEmpty,
SerializerFeature.WriteNullNumberAsZero,
SerializerFeature.WriteNullListAsEmpty,
SerializerFeature.WriteMapNullValue,
SerializerFeature.DisableCheckSpecialChar);
// 设置返回时间格式,默认24小时制,若不设置,时间会转换为时间戳
fastJsonConfig.setDateFormat("yyyy-MM-dd HH:mm:ss");
//处理中文乱码问题
List fastMediaTypes = Lists.newArrayList();
fastMediaTypes.add(MediaType.APPLICATION_JSON_UTF8);
fastConvert.setSupportedMediaTypes(fastMediaTypes);
fastConvert.setFastJsonConfig(fastJsonConfig);
return fastConvert;
}
}
(7)、Clickhouse工具类,封装http Post请求
@Component
@Data
@AllArgsConstructor
public class ClickHouseUtil {
@Autowired
@Qualifier("restTemplate")
RestTemplate restTemplate;
@Value("${clickhouse.url}")
private String chUrl;
@Value("${clickhouse.username}")
private String userName;
@Value("${clickhouse.password}")
private String password;
public ClickHouseUtil() {
}
public boolean sendPostRequest(String sql) {
boolean isok = false;
// 设置请求头,请求认证
HttpHeaders headers;
headers = new HttpHeaders();
String authorization = userName + ":" + password;
String basicAuth = new String(Base64.getEncoder().encode(authorization.getBytes(Charset.forName("US-ASCII"))));
headers.set("Authorization", "Basic " + basicAuth);
headers.setContentType(MediaType.APPLICATION_JSON);
HttpEntity> httpEntity = new HttpEntity(sql,headers);
ResponseEntity responseString = null;
try {
// 执行 post sql 查询
responseString = restTemplate.exchange(chUrl, HttpMethod.POST, httpEntity, String.class);
System.out.println("responseString: "+responseString);
if (responseString.getStatusCode().value() == 200) {
isok = true;
} else{
// throw exception
}
} catch (Exception e) {
e.printStackTrace();
throw new MyException(e.getMessage());
}
return isok;
}
public String sendPostRequest1(String sql) {
// 设置请求头,请求认证
HttpHeaders headers;
headers = new HttpHeaders();
String authorization = userName + ":" + password;
String basicAuth = new String(Base64.getEncoder().encode(authorization.getBytes(Charset.forName("US-ASCII"))));
headers.set("Authorization", "Basic " + basicAuth);
headers.setContentType(MediaType.APPLICATION_JSON);
HttpEntity> httpEntity = new HttpEntity(sql,headers);
ResponseEntity responseString = null;
try {
// 执行 post sql 查询
responseString = restTemplate.exchange(chUrl, HttpMethod.POST, httpEntity, String.class);
// System.out.println("responseString: "+responseString);
if (responseString.getStatusCode().value() == 200) {
} else{
// throw exception
}
} catch (Exception e) {
e.printStackTrace();
throw new MyException(e.getMessage());
}
return responseString.getBody();
}
}
(8)、yml配置文件
application.yml
spring:
########## 配置WebStatFilter,用于采集web关联监控的数据 ##########
web-stat-filter:
enabled: true # 启动 StatFilter
url-pattern: /* # 过滤所有url
exclusions: "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*" # 排除一些不必要的url
session-stat-enable: true # 开启session统计功能
session-stat-max-count: 1000 # session的最大个数,默认100
## druid
datasource:
druid:
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: true
login-username: admin
login-password: zhangx102
allow: # 设置允许指定 ip 可以访问 druid 后台监控界面
profiles:
active: dev
application-dev.yml
# CLICKHOUSE DATASOURCE CONFIG
clickhouse:
url: http://xx.xx.x.xx:9092
username: xxxx
password: xxxx
## 数据源配置
datasource:
druid:
### HIVE META CONFIG
hive-meta:
name: HIVE-META
url: jdbc:mysql://xx.xx.x.xx:3306/xxxx?characterEncoding=utf8&useSSL=false
username: xxxx
password: xxxx
driver-class-name: com.mysql.jdbc.Driver
initialSize: 3
maxActive: 10
maxWait: 60000
minIdle: 1
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
testWhileIdle: true
testOnBorrow: true
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
validationQuery: SELECT 1
validation-query-timeout: 500
filters: stat,wall
updatetableparam.path: /hiveServing/addUpdateTableParams
updatecolumncomment.path: /hiveServing/updateColumnOfHiveColumn·
hive-meta-second:
name: HIVE-META-ANOTHER
url: jdbc:mysql://xx.xx.x.xx:3306/xxxx?characterEncoding=utf8&useSSL=false
username: xxxx
password: xxxx
driver-class-name: com.mysql.jdbc.Driver
initialSize: 3
maxActive: 10
maxWait: 60000
minIdle: 1
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
testWhileIdle: true
testOnBorrow: true
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
validationQuery: SELECT 1
validation-query-timeout: 500
filters: stat,wall
updatetableparam.path: /hiveServing/addUpdateTableParams
updatecolumncomment.path: /hiveServing/updateColumnOfHiveColumn·
tablex-meta:
name: tablex-meta
url: jdbc:mysql://xx.xx.x.xx:3306/tablemanagement?characterEncoding=utf8&useSSL=false
username: xxxx
password: xxxx
driver-class-name: com.mysql.jdbc.Driver
initialSize: 5
maxActive: 10
maxWait: 60000
minIdle: 1
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
testWhileIdle: true
testOnBorrow: true
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
validationQuery: SELECT 1
validation-query-timeout: 500
filters: stat,wall
hive: #hive数据源
url: jdbc:hive2://xx.xx.x.xx:2181,xx.xx.x.xx:2181,xx.xx.x.xx:2181/xxxx;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2
username: xxxx
password: xxxx
driver-class-name: org.apache.hive.jdbc.HiveDriver
initialSize: 1
minIdle: 1
maxIdle: 5
maxActive: 50
maxWait: 10000
timeBetweenEvictionRunsMillis: 10000
minEvictableIdleTimeMillis: 300000
validationQuery: select 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
# 打开PSCache,并且指定每个连接上PSCache的大小
maxPoolPreparedStatementPerConnectionSize: 20
connectionErrorRetryAttempts: 0
breakAfterAcquireFailure: false



