1、安装jdk
[root@Antiy47 ~]# mkdir /usr/local/java
[root@Antiy47 ~]# tar zxvf jdk-8u211-linux-x64.tar.gz -C /usr/local/java
[root@Antiy47 ~]# rpm -e --nodeps `rpm -qa | grep java `
[root@Antiy47 ~]# alternatives --install /usr/bin/java java /usr/local/java/jdk1.8.0_211/bin/java 300
[root@Antiy47 ~]# alternatives --config java
2、安装mvn
[root@Antiy47 ~]# tar zxvf apache-maven-3.6.1-bin.tar.gz -C /opt
[root@Antiy47 ~]# vim /etc/profile
export JAVA_HOME=/usr/local/java/jdk1.8.0_211
export JRE_HOME=/usr/local/java/jdk1.8.0_211/jre
export PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export CLASSPATH=$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
export MAVEN_HOME=/opt/apache-maven-3.6.1/
export PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin:$MAVEN_HOME/bin
[root@Antiy47 ~]# java -version
java version "1.8.0_171"
Java(TM) SE Runtime Environment (build 1.8.0_171-b11)
Java HotSpot(TM) 64-Bit Server VM (build 25.171-b11, mixed mode)
[root@Antiy47 ~]# mvn -version
Apache Maven 3.6.1 (d66c9c0b3152b2e69ee9bac180bb8fcc8e6af555; 2019-04-05T03:00:29+08:00)
Maven home: /opt/apache-maven-3.6.1
Java version: 1.8.0_211, vendor: Oracle Corporation, runtime: /usr/local/java/jdk1.8.0_211/jre
Default locale: en_US, platform encoding: UTF-8
OS name: "linux", version: "4.11.6-1.el6.elrepo.x86_64", arch: "amd64", family: "unix"
3、安装驱动
[root@Antiy47 ~]# rpm -ivh mysql-connector-java-8.0.16-1.el7.noarch.rpm
[root@Antiy47 ~]# cp postgresql-42.2.5.jar /usr/local/java/jdk1.8.0_211/lib/
4、安装datax
[root@Antiy47 ~]# tar zxvf datax.tar.gz -C /opt/
5、测试datax
[root@Antiy47 ~]# cd /opt/datax/bin
[root@Antiy47 ~]# python datax.py ../job/job.json
最后返回
2019-06-14 00:09:44.149 [job-0] INFO StandAloneJobContainerCommunicator - Total 100000 records, 2600000 bytes | Speed 253.91KB/s, 10000 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.022s | All Task WaitReaderTime 0.036s | Percentage 100.00%
2019-06-14 00:09:44.149 [job-0] INFO JobContainer -
任务启动时刻 : 2019-06-14 00:09:34
任务结束时刻 : 2019-06-14 00:09:44
任务总计耗时 : 10s
任务平均流量 : 253.91KB/s
记录写入速度 : 10000rec/s
读出记录总数 : 100000
读写失败总数 : 0
6、从mysql导数据到postgresql
首先确认mysql与postgresql的表结构一致,其次确认pg的host登陆方法是md5
[root@Antiy47 bin]# python datax.py -r mysqlreader -w postgresqlwriter > myjob.json
[root@Antiy47 bin]# vim myjob.json
配置相关的jdbc url和数据库和表
{
"job": {
"content": [
{
"reader": {
"name": "mysqlreader",
"parameter": {
"column": ["id","title","hosts"],
"connection": [
{
"jdbcUrl": ["jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf8"],
"table": ["dashboard_graph"]
}
],
"password": "123456",
"username": "root",
"where": ""
}
},
"writer": {
"name": "postgresqlwriter",
"parameter": {
"column": ["id","title","hosts"],
"connection": [
{
"jdbcUrl": "jdbc:postgresql://127.0.0.1:5432/pgone",
"table": ["t1"]
}
],
"password": "111111",
"postSql": [],
"preSql": [],
"username": "postgres"
}
}
}
],
"setting": {
"speed": {
"channel": "3"
}
}
}
}
导数据
[root@Antiy47 bin]# python datax.py myjob.json
DataX (DATAX-OPENSOURCE-3.0), From Alibaba !
Copyright (C) 2010-2017, Alibaba Group. All Rights Reserved.
2019-06-13 23:51:32.463 [main] INFO VMInfo - VMInfo# operatingSystem class => sun.management.OperatingSystemImpl
2019-06-13 23:51:32.470 [main] INFO Engine - the machine info =>
osInfo: Oracle Corporation 1.8 25.171-b11
jvmInfo: Linux amd64 4.11.6-1.el6.elrepo.x86_64
cpu num: 64
totalPhysicalMemory: -0.00G
freePhysicalMemory: -0.00G
maxFileDescriptorCount: -1
currentOpenFileDescriptorCount: -1
GC Names [PS MarkSweep, PS Scavenge]
MEMORY_NAME | allocation_size | init_size
PS Eden Space | 256.00MB | 256.00MB
Code Cache | 240.00MB | 2.44MB
Compressed Class Space | 1,024.00MB | 0.00MB
PS Survivor Space | 42.50MB | 42.50MB
PS Old Gen | 683.00MB | 683.00MB
Metaspace | -0.00MB | 0.00MB
2019-06-13 23:51:32.488 [main] INFO Engine -
{
"content":[
{
"reader":{
"name":"mysqlreader",
"parameter":{
"column":[
"id",
"title",
"hosts"
],
"connection":[
{
"jdbcUrl":[
"jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf8"
],
"table":[
"dashboard_graph"
]
}
],
"password":"******",
"username":"root",
"where":""
}
},
"writer":{
"name":"postgresqlwriter",
"parameter":{
"column":[
"id",
"title",
"hosts"
],
"connection":[
{
"jdbcUrl":"jdbc:postgresql://127.0.0.1:5432/pgone",
"table":[
"t1"
]
}
],
"password":"******",
"postSql":[],
"preSql":[],
"username":"postgres"
}
}
}
],
"setting":{
"speed":{
"channel":"3"
}
}
}
2019-06-13 23:51:32.503 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
2019-06-13 23:51:32.504 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
2019-06-13 23:51:32.504 [main] INFO JobContainer - DataX jobContainer starts job.
2019-06-13 23:51:32.506 [main] INFO JobContainer - Set jobId = 0
2019-06-13 23:51:32.811 [job-0] INFO OriginalConfPretreatmentUtil - Available jdbcUrl:jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf8&yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true.
2019-06-13 23:51:32.825 [job-0] INFO OriginalConfPretreatmentUtil - table:[dashboard_graph] has columns:[id,title,hosts,counters,screen_id,timespan,graph_type,method,position,falcon_tags].
2019-06-13 23:51:32.906 [job-0] INFO OriginalConfPretreatmentUtil - table:[t1] all columns:[
id,title,hosts
].
2019-06-13 23:51:32.915 [job-0] INFO OriginalConfPretreatmentUtil - Write data [
INSERT INTO %s (id,title,hosts) VALUES(?,?,?)
], which jdbcUrl like:[jdbc:postgresql://127.0.0.1:5432/pgone]
2019-06-13 23:51:32.915 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
2019-06-13 23:51:32.915 [job-0] INFO JobContainer - DataX Reader.Job [mysqlreader] do prepare work .
2019-06-13 23:51:32.916 [job-0] INFO JobContainer - DataX Writer.Job [postgresqlwriter] do prepare work .
2019-06-13 23:51:32.916 [job-0] INFO JobContainer - jobContainer starts to do split ...
2019-06-13 23:51:32.917 [job-0] INFO JobContainer - Job set Channel-Number to 3 channels.
2019-06-13 23:51:32.920 [job-0] INFO JobContainer - DataX Reader.Job [mysqlreader] splits to [1] tasks.
2019-06-13 23:51:32.921 [job-0] INFO JobContainer - DataX Writer.Job [postgresqlwriter] splits to [1] tasks.
2019-06-13 23:51:32.938 [job-0] INFO JobContainer - jobContainer starts to do schedule ...
2019-06-13 23:51:32.941 [job-0] INFO JobContainer - Scheduler starts [1] taskGroups.
2019-06-13 23:51:32.943 [job-0] INFO JobContainer - Running by standalone Mode.
2019-06-13 23:51:32.949 [taskGroup-0] INFO TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
2019-06-13 23:51:32.953 [taskGroup-0] INFO Channel - Channel set byte_speed_limit to -1, No bps activated.
2019-06-13 23:51:32.953 [taskGroup-0] INFO Channel - Channel set record_speed_limit to -1, No tps activated.
2019-06-13 23:51:32.962 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
2019-06-13 23:51:32.964 [0-0-0-reader] INFO CommonRdbmsReader$Task - Begin to read record by Sql: [select id,title,hosts from dashboard_graph
] jdbcUrl:[jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf8&yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true].
2019-06-13 23:51:32.976 [0-0-0-reader] INFO CommonRdbmsReader$Task - Finished read record by Sql: [select id,title,hosts from dashboard_graph
] jdbcUrl:[jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf8&yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true].
2019-06-13 23:51:33.262 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[302]ms
2019-06-13 23:51:33.263 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] completed it's tasks.
2019-06-13 23:51:42.960 [job-0] INFO StandAloneJobContainerCommunicator - Total 1 records, 209 bytes | Speed 20B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 100.00%
2019-06-13 23:51:42.960 [job-0] INFO AbstractScheduler - Scheduler accomplished all tasks.
2019-06-13 23:51:42.961 [job-0] INFO JobContainer - DataX Writer.Job [postgresqlwriter] do post work.
2019-06-13 23:51:42.961 [job-0] INFO JobContainer - DataX Reader.Job [mysqlreader] do post work.
2019-06-13 23:51:42.961 [job-0] INFO JobContainer - DataX jobId [0] completed successfully.
2019-06-13 23:51:42.962 [job-0] INFO HookInvoker - No hook invoked, because base dir not exists or is a file: /opt/datax/hook
2019-06-13 23:51:42.963 [job-0] INFO JobContainer -
[total cpu info] =>
averageCpu | maxDeltaCpu | minDeltaCpu
-1.00% | -1.00% | -1.00%
[total gc info] =>
NAME | totalGCCount | maxDeltaGCCount | minDeltaGCCount | totalGCTime | maxDeltaGCTime | minDeltaGCTime
PS MarkSweep | 0 | 0 | 0 | 0.000s | 0.000s | 0.000s
PS Scavenge | 0 | 0 | 0 | 0.000s | 0.000s | 0.000s
2019-06-13 23:51:42.963 [job-0] INFO JobContainer - PerfTrace not enable!
2019-06-13 23:51:42.963 [job-0] INFO StandAloneJobContainerCommunicator - Total 1 records, 209 bytes | Speed 20B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 100.00%
2019-06-13 23:51:42.964 [job-0] INFO JobContainer -
任务启动时刻 : 2019-06-13 23:51:32
任务结束时刻 : 2019-06-13 23:51:42
任务总计耗时 : 10s
任务平均流量 : 20B/s
记录写入速度 : 0rec/s
读出记录总数 : 1
读写失败总数 : 0