----------直接上代码-----------------
package com.hx.test;
import com.hx.conf.HbaseConfig;
import com.hx.test.model.IdTimeTemperature;
import com.hx.utils.HbaseUtil;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.hbase.sink.HbaseSinkFunction;
import org.apache.flink.connector.hbase.sink.LegacyMutationConverter;
import org.apache.flink.connector.hbase.util.HbaseTableSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.Row;
import org.apache.hadoop.conf.Configuration;
import java.util.Arrays;
import static com.hx.utils.ExecutionEnvUtil.createParameterTool;
public class HbaseSink {
public static void main(String[] args) throws Exception{
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(2, 1000));
System.setProperty("java.security.auth.login.config","D:\workspace\flink2doris\src\main\resources\kafka_client_jaas.conf");
System.setProperty("java.security.krb5.conf","D:\workspace\flink2doris\src\main\resources\krb5.ini");
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
DataStream
SingleOutputStreamOperator
String[] split = line.split(",");
return Tuple3.of(split[0], split[1], split[2]);
}).returns(Types.TUPLE(Types.STRING, Types.STRING, Types.STRING));
ParameterTool parameterTool = createParameterTool();
String zkQuorum = parameterTool.getRequired("hbase.zookeeper.quorum");
String zkPort = parameterTool.get("hbase.zookeeper.port", "2181");
HbaseConfig build = HbaseConfig.builder()
.zookeeper_quorum(zkQuorum)
.zookeeper_port(zkPort)
.zookeeper_znode("/hbase")
.isKerberos(true)
.build();
Configuration hConfig = HbaseUtil.initHbaseClientConfig(build);
HbaseTableSchema hbaseTableSchema = new HbaseTableSchema();
hbaseTableSchema.setRowKey("rowKey", String.class);
Arrays.stream(IdTimeTemperature.schemas).forEach(column ->{
hbaseTableSchema.addColumn("cf1",column,String.class);
});
LegacyMutationConverter legacyMutationConverter = new LegacyMutationConverter(hbaseTableSchema);
HbaseSinkFunction
"testzs:test",
hConfig,
legacyMutationConverter,
10 * 1024,
10 * 1024,
1000
);
DataStream
.name("format vehicle trip").uid("format vehicle trip").rebalance();
tuple2Result.addSink(tuple2HbaseSinkFunction).name("sink to hbase").uid("sink to hbase");
env.execute();
}
public static class DataToHbaseTypeFuncation extends RichMapFunction
@Override
public Tuple2
String id = value.f0;
String time = value.f1;
String temperature = value.f2;
String rowKey = id +"_"+ time;
Row resultRow = Row.of(rowKey, Row.of(
id,
time,
temperature
));
return Tuple2.of(true,resultRow);
}
}
}
---------------------------------------------------------------------
package com.hx.utils;
import com.hx.conf.HbaseConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HbaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.security.UserGroupInformation;
public class HbaseUtil {
public static Configuration initHbaseClientConfig(HbaseConfig hbaseConfig) throws Exception{
// 连接Hbase需要的参数
Configuration conf = HbaseConfiguration.create();
String zookeeper_quorum = hbaseConfig.getZookeeper_quorum();
conf.set("hbase.zookeeper.quorum", "cdp-kfk01.hx.tc,cdp-kfk02.hx.tc,cdp-kfk03.hx.tc");
conf.set(HConstants.ZOOKEEPER_CLIENT_PORT, "2181"); //2181
conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/hbase"); //hbase
// 认证需要的参数
// hbase authentication is kerberos ,rpc is privacy
//
conf.set("hadoop.security.authentication", "kerberos");
conf.set("hbase.security.authentication","kerberos");
conf.set("hbase.rpc.protection", "privacy");
// 指定kerberos配置参数
conf.set("keytab.file", "D:/soft/kerbros/hbase.keytab");
conf.set("hbase.master.kerberos.principal", "hbase/_HOST@hx.TC");
conf.set("hbase.regionserver.kerberos.principal", "hbase/_HOST@hx.TC");
conf.set("kerberos.principal", "hbase/cdp-master01.hx.tc@hx.TC");
// 设置配置文件信息
UserGroupInformation.setConfiguration(conf);
// 通过keytab登录用户
System.out.println("--------------> 开始进行keytab认证 <--------------");
UserGroupInformation.loginUserFromKeytab("hbase/cdp-master01.hx.tc@hx.TC", "D:/soft/kerbros/hbase.keytab");
System.out.println("--------------> keytab认证结束 <--------------");
return conf;
}
public static Connection getConnection(String zkQuorum, int port) throws Exception {
Configuration conf = HbaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", zkQuorum);
conf.set("hbase.zookeeper.property.clientPort", port + "");
Connection connection = ConnectionFactory.createConnection(conf);
return connection;
}
// public static HbaseTableSchema getHbaseSechema(String[] schemas, String cf) {
// HbaseTableSchema hbaseTableSchema = new HbaseTableSchema();
// hbaseTableSchema.setRowKey("rowKey", String.class);
// Arrays.stream(schemas).forEach(column -> {
// hbaseTableSchema.addColumn(cf, column, String.class);
// });
// return hbaseTableSchema;
//
// }
public static String getRealm(String hosts) {
String realm = "JQDEV.SHANGHAIGM.COM";
String[] hostArray = hosts.split(",");
realm = hostArray[0].substring(hostArray[0].indexOf(".") + 1, hostArray[0].length()).toUpperCase();
return realm;
}
public static void main(String[] args) {
String hosts = "jqdev-l-02479.jqdev.shanghaigm.com,jqdev-l-02481.jqdev.shanghaigm.com,jqdev-l-02480.jqdev.shanghaigm.com";
String realm = getRealm(hosts);
System.out.println(realm);
}
}
----------------------------pom 文件同上篇-读hbase篇---------------------------



