Flink CDC
连接oracle首先需要打开这两个服务
提前创建对应目录然后cmd命令行运行以下命令
-- 以 DBA 身份连接到数据库 sqlplus sys/oracleadmin AS SYSDBA -- 启用日志归档 alter system set db_recovery_file_dest_size = 10G; alter system set db_recovery_file_dest = 'F:ORACLEdb_recovery_file_dest' scope=spfile; shutdown immediate; startup mount; alter database archivelog; alter database open; -- 检查日志归档是否开启 archive log list; -- 为捕获的数据库启用补充日志记录,以便数据更改捕获更改的数据库行之前的状态,下面说明了如何在数据库级别进行配置。 ALTER DATAbase ADD SUPPLEMENTAL LOG DATA; -- 创建表空间 CREATE TABLESPACE logminer_tbs DATAFILE 'F:ORACLEnamespacelogminer_tbs.dbf' SIZE 25M REUSE AUTOEXTEND ON MAXSIZE UNLIMITED; -- 创建用户family绑定表空间LOGMINER_TBS CREATE USER family IDENTIFIED BY chickenkang DEFAULT TABLESPACE LOGMINER_TBS QUOTA UNLIMITED ON LOGMINER_TBS; -- 授予family用户dba的权限 grant connect,resource,dba to family; -- 并授予权限 GRANT CREATE SESSION TO family; GRANT SELECT ON V_$DATAbase to family; GRANT FLASHBACK ANY TABLE TO family; GRANT SELECT ANY TABLE TO family; GRANT SELECT_CATALOG_ROLE TO family; GRANT EXECUTE_CATALOG_ROLE TO family; GRANT SELECT ANY TRANSACTION TO family; GRANT EXECUTE ON SYS.DBMS_LOGMNR TO family; GRANT SELECT ON V_$LOGMNR_ConTENTS TO family; GRANT CREATE TABLE TO family; GRANT LOCK ANY TABLE TO family; GRANT ALTER ANY TABLE TO family; GRANT CREATE SEQUENCE TO family; GRANT EXECUTE ON DBMS_LOGMNR TO family; GRANT EXECUTE ON DBMS_LOGMNR_D TO family; GRANT SELECT ON V_$LOG TO family; GRANT SELECT ON V_$LOG_HISTORY TO family; GRANT SELECT ON V_$LOGMNR_LOGS TO family; GRANT SELECT ON V_$LOGMNR_ConTENTS TO family; GRANT SELECT ON V_$LOGMNR_PARAMETERS TO family; GRANT SELECT ON V_$LOGFILE TO family; GRANT SELECT ON V_$ARCHIVED_LOG TO family; GRANT SELECT ON V_$ARCHIVE_DEST_STATUS TO family; -- 修改user表让其支持增量日志,这句先在Oracle里创建user表再执行 ALTER TABLE FAMILY."user" ADD SUPPLEMENTAL LOG DATA (ALL) COLUMNS;
其中有条命令不行的,参考另一篇博客https://blog.csdn.net/eyeofeagle/article/details/119180015
创建表user
OracleSourceExample.java1.8 ${java.version} ${java.version} 1.13.0 2.12 org.apache.flink flink-table-planner-blink_${scala.version} ${flink.version} com.ververica flink-connector-oracle-cdc 2.1.0 com.oracle.database.jdbc ojdbc10 19.10.0.0 org.apache.flink flink-streaming-java_${scala.version} ${flink.version} org.apache.flink flink-clients_${scala.version} ${flink.version} org.apache.flink flink-cep_${scala.version} ${flink.version} org.apache.flink flink-java ${flink.version} org.apache.flink flink-json ${flink.version} com.alibaba fastjson 1.2.68 org.projectlombok lombok 1.18.20 org.slf4j slf4j-api 1.7.25 org.slf4j slf4j-log4j12 1.7.25 org.apache.logging.log4j log4j-to-slf4j 2.14.0 org.apache.maven.plugins maven-assembly-plugin 3.0.0 jar-with-dependencies make-assembly package single
import com.ververica.cdc.connectors.oracle.table.StartupOptions;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import com.ververica.cdc.connectors.oracle.OracleSource;
public class OracleSourceExample {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
SourceFunction sourceFunction = OracleSource.builder()
.hostname("localhost")
.port(1521)
.database("ORCL") // 可以百度看看自己处于哪个数据库中
.schemaList("family") // monitor inventory schema//表头
//.tableList("inventory.products") // monitor products table
.username("family")
.password("chickenkang")
.deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
.startupOptions(StartupOptions.initial())
.build();
DataStreamSource stringDataStreamSource = env.addSource(sourceFunction);
stringDataStreamSource.print(); // use parallelism 1 for sink to keep message ordering
env.execute("JOB");
}
}
对表进行增加删除修改的效果
详细学习可看
https://debezium.io/documentation/reference/1.4/connectors/oracle.html#oracle-create-users-logminer



