当前位置: 首页 > 工具软件 > BigData-Notes > 使用案例 >

oracle goldengate for bigdata

孔鸿哲
2023-12-01
-- 安装需要的软件包:
# yum -y install rlwrap unzip
-- 解压软件:
# unzip OGG_BigData_Linux_x64_12.3.2.1.1.zip 
Archive:  OGG_BigData_Linux_x64_12.3.2.1.1.zip
  inflating: OGGBD-12.3.2.1-README.txt  
  inflating: OGG_BigData_12.3.2.1.1_Release_Notes.pdf  
  inflating: OGG_BigData_Linux_x64_12.3.2.1.1.tar  

# mkdir -p /usr/local/ogg_kafka

# tar -xvf OGG_BigData_Linux_x64_12.3.2.1.1.tar  -C /usr/local/ogg_kafka/


设置环境变量:
# cat /root/.bash_profile 
# .bash_profile

# Get the aliases and functions
if [ -f ~/.bashrc ]; then
        . ~/.bashrc
fi

# User specific environment and startup programs

PATH=$PATH:$HOME/bin

export PATH
alias ggsci="rlwrap /usr/local/ogg_kafka/ggsci"
-- 生效:
# source  /root/.bash_profile 
--
报错信息:
# ggsci 
/usr/local/ogg_kafka/ggsci: error while loading shared libraries: libjvm.so: cannot open shared object file: No such file or directory

需要设置Java的环境变量:
# tar -xzvf jdk-8u192-linux-x64.tar.gz  -C /usr/local/

[root@node4 soft]# source /etc/profile.d/java.sh 
[root@node4 soft]# cat /etc/profile.d/java.sh 
export JAVA_HOME=/usr/local/jdk1.8.0_192/
export PATH=$JAVA_HOME/bin:$PATH
export LD_LIBRARY_PATH=$JAVA_HOME/jre/lib/amd64:$JAVA_HOME/jre/lib/amd64/server:$JAVA_HOME/jre/lib/amd64/libjsig.so:$JAVA_HOME/jre/lib/amd64/server/libjvm.so


--登录设置:
# ggsci 

Oracle GoldenGate for Big Data
Version 12.3.2.1.1 (Build 005)

Oracle GoldenGate Command Interpreter
Version 12.3.0.1.2 OGGCORE_OGGADP.12.3.0.1.2_PLATFORMS_180712.2305
Linux, x64, 64bit (optimized), Generic on Jul 13 2018 00:46:09
Operating system character set identified as UTF-8.

Copyright (C) 1995, 2018, Oracle and/or its affiliates. All rights reserved.



GGSCI (node4.example.com) 1> create subdirs

Creating subdirectories under current directory /usr/local/ogg_kafka

Parameter file                 /usr/local/ogg_kafka/dirprm: created.
Report file                    /usr/local/ogg_kafka/dirrpt: created.
Checkpoint file                /usr/local/ogg_kafka/dirchk: created.
Process status files           /usr/local/ogg_kafka/dirpcs: created.
SQL script files               /usr/local/ogg_kafka/dirsql: created.
Database definitions files     /usr/local/ogg_kafka/dirdef: created.
Extract data files             /usr/local/ogg_kafka/dirdat: created.
Temporary files                /usr/local/ogg_kafka/dirtmp: created.
Credential store files         /usr/local/ogg_kafka/dircrd: created.
Masterkey wallet files         /usr/local/ogg_kafka/dirwlt: created.
Dump files                     /usr/local/ogg_kafka/dirdmp: created.

--源库的MySQL进行增加、修改和删除操作:
mysql> insert into scott.dept(deptno,dname,loc)values(10,'AA','shenzhen');     
Query OK, 1 row affected (0.02 sec)

mysql> update scott.dept set dname='BB' where deptno=10;
Query OK, 1 row affected (0.01 sec)
Rows matched: 1  Changed: 1  Warnings: 0

mysql> delete from scott.dept where deptno=10;
Query OK, 1 row affected (0.01 sec)

--kafka消费者信息:
# ./kafka-console-consumer.sh --zookeeper 192.168.4.184:2181 --topic  dept
Using the ConsoleConsumer with old consumer is deprecated and will be removed in a future major release. Consider using the new consumer by passing [bootstrap-server] instead of [zookeeper].
注释:1.针对源库中的每个表创建一个单独的topic
2.注意使用kafka的版本信息需要kafka1.1.1版本及以下版本,2.0版本已经没有--zookeeper参数.


{"table":"scott.dept","op_type":"I","op_ts":"2019-02-20 09:24:26.578417","current_ts":"2019-02-20T17:24:32.472000","pos":"00000000000000003663","DEPTNO":10,"DNAME":"AA","LOC":"shenzhen"}
{"table":"scott.dept","op_type":"U","op_ts":"2019-02-20 09:25:12.578331","current_ts":"2019-02-20T17:25:19.566000","pos":"00000000000000004007","DEPTNO":10,"DNAME":"BB","LOC":"shenzhen"}
{"table":"scott.dept","op_type":"D","op_ts":"2019-02-20 09:25:50.578253","current_ts":"2019-02-20T17:25:58.089000","pos":"00000000000000004093","DEPTNO":10,"DNAME":"BB","LOC":"shenzhen"}

op_type:I-- insert U--update D--delete

https://blog.csdn.net/wuwenxiang91322/article/details/78807905

 类似资料:

相关阅读

相关文章

相关问答