链接:https://pan.baidu.com/s/1WE4Cywsz4w3XHd0v1ObbzQ
提取码:5yh7
(1)上传docker安装包,本次上传至”/opt/”文件夹下
(2)cd /opt
(3)执行tar xzvf docker-20.10.16.tgz
(4)进入docker目录,将目录下的所有文件拷贝至”/usr/bin/”
cd docker
cp * /usr/bin/
(5)vi /etc/systemd/system/docker.service
粘贴以下内容:
[Unit]
Description=Docker Application Container Engine
Documentation=https://docs.docker.com
After=network-online.target firewalld.service
Wants=network-online.target
[Service]
Type=notify
ExecStart=/usr/bin/dockerd
ExecReload=/bin/kill -s HUP $MAINPID
LimitNOFILE=infinity
LimitNPROC=infinity
TimeoutStartSec=0
Delegate=yes
KillMode=process
Restart=on-failure
StartLimitBurst=3
StartLimitInterval=60s
[Install]
WantedBy=multi-user.target
.
(6)给文件增加权限
chmod +x /etc/systemd/system/docker.service
systemctl daemon-reload
(7)开机启动
systemctl enable docker.service
(8)启动docker
systemctl start docker
(9)测试docker是否安装完成
docker -v
下载好离线包,
mv docker-compose-linux-x86_64 /usr/local/bin/
//修改文件名
mv docker-compose-linux-x86_64 docker-compose
//授权
sudo chmod +x /usr/local/bin/docker-compose
//查看安装是否成功
docker-compose -v
1、上传包并解压并
[root@node1 ~]# tar -zxvf jdk-8u171-linux-x64.tar.gz
[root@node1 ~]# mv jdk-8u171-linux-x64 openjdk-8
2、配置环境变量
[root@node1 ~]# vim /etc/profile
#添加文件以下内容
export JAVA_HOME=/usr/local/openjdk-8
export HADOOP_CONF_DIR=/usr/local/soft/hadoop-2.7.6/etc/hadoop/
export HADOOP_HOME=/usr/local/soft/hadoop-2.7.6
export ZOOKEEPER_HOME=/usr/local/soft/zookeeper-3.4.6
export SPARK_HOME=/usr/local/soft/spark-2.4.5
export HIVE_HOME=/usr/local/soft/hive-1.2.1
export DATAX_HOME=/opt/soft/datax
export HBASE_HOME=/usr/local/soft/hbase-1.4.6
export HIVE_CONF=/usr/local/soft/hive-1.2.1/conf
export KYLIN_HOME=/usr/local/soft/kylin-2.5.0
export KAFKA_HOME=/usr/local/soft/kafka_2.11-1.0.0
export FLUME_HOME=/usr/local/soft/flume-1.6.0
export FLINK_HOME=/usr/local/soft/flink-1.11.2
export KE_HOME=/usr/local/soft/kafka-eagle-bin-2.0.3/kafka-eagle-web-2.0.3/
export PATH=.:
P
A
T
H
:
PATH:
PATH:JAVA_HOME/bin:
D
A
T
A
X
H
O
M
E
/
b
i
n
:
DATAX_HOME/bin:
DATAXHOME/bin:FLINK_HOME/bin:
F
L
U
M
E
H
O
M
E
/
b
i
n
:
FLUME_HOME/bin:
FLUMEHOME/bin:KAFKA_HOME/bin:
K
Y
L
I
N
H
O
M
E
/
b
i
n
:
KYLIN_HOME/bin:
KYLINHOME/bin:HBASE_HOME/bin:
H
A
D
O
O
P
H
O
M
E
/
b
i
n
:
HADOOP_HOME/bin:
HADOOPHOME/bin:HIVE_HOME/bin:
S
P
A
R
K
H
O
M
E
/
b
i
n
:
SPARK_HOME/bin:
SPARKHOME/bin:ZOOKEEPER_HOME/bin:$HADOOP_HOME/sbin:
export PATH=.:$PATH:
export PATH USER LOGNAME MAIL HOSTNAME HISTSIZE HISTCONTROL
3、测试java是否安装成功
[root@node1 ~]# Java -version
1、创建/opt/soft文件夹并上传压缩包
[root@node1 ~]# mkdir -p /opt/soft
2、解压
[root@node1 ~]# tar -zxvf datax.tar.gz
3、测试datax是否安装成功
[root@node1 ~]# cd /opt/soft/datax
[root@node1 datax]# datax.py ./job/job.json
1、创建dolphinscheduler文件夹
[root@node1 ~]# mkdir -p /opt/dolphinscheduler
2、上传docker-compose.yml和config.env.sh文件到dolphinscheduler文件夹下
3、加载dolphinscheduler、zookeeper和progresql镜像
[root@node1 ~]# docker load</opt/dolphinscheduler.tar
[root@node1 ~]# docker load</opt/zookeeper.tar
[root@node1 ~]# docker load</opt/postgres.tar
4、启动海豚调度
[root@node1 dolphinscheduler]# docker-compose up -d
#关闭海豚调度
[root@node1 dolphinscheduler]# docker-compose down
5、访问海豚调度页面
输入网址 http://192.168.184.202:12345/dolphinscheduler
用户名: admin
密码 : dolphinscheduler123