1、準備需要的軟件:
jdk-8u161-linux-x64.tar
hadoop-3.1.0.tar
2、docker安裝centos7、啟動
docker pull centos
docker run -d -t centos
3、安裝ssh
docker container exec -it a8d4b8936b29 /bin/bash 進入鏡像系統
yum -y install openssh-server
yum -y install openssh-clients
/usr/sbin/sshd -D啟動sshd報錯
運行以下命令解決錯誤
ssh-keygen -t rsa -f /etc/ssh/ssh_host_rsa_key -N ""
ssh-keygen -t ecdsa -f /etc/ssh/ssh_host_ecdsa_key -N ""
ssh-keygen -t ed25519 -f /etc/ssh/ssh_host_ed25519_key -N ""
vi /etc/ssh/sshd_config 修改配置文件把UsePAM yes 改為#UsePAM no
修改root的密碼
passwd root
退出鏡像系統,回到宿主主機,建立剛才配置好ssh的鏡像,取名test1
docker commit a8d4b8936b29 test1
運行鏡像test1
docker run -d -p 10022:22 test1 /usr/sbin/sshd -D
4、安裝jdk、hadoop
從宿主主機拷貝jdk、hadoop到鏡像系統/opt目錄
docker cp jdk-8u161-linux-x64.tar 0d3dc5424d22:/opt
docker cp hadoop-3.1.0.tar 0d3dc5424d22:/opt
解壓拷貝到/usr/local/jdk8 /usr/local/hadoop
tar -xf hadoop-3.1.0.tar
tar -xf jdk-8u161-linux-x64.tar
mv hadoop-3.1.0 /usr/local/hadoop
mv jdk1.8.0_161 /usr/local/jdk8
配置jdk、hadoop環境變量
vi ~/.bashrc
source ~/.bashrc
export JAVA_HOME=/usr/local/jdk8
export HADOOP_HOME=/usr/local/hadoop
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
修改hadoop配置文件
配置/usr/local/hadoop/etc/hadoop/hadoop-env.sh
vi hadoop-env.sh export JAVA_HOME=/usr/local/jdk8
配置/usr/local/hadoop/etc/hadoop/core-site.xml
vi core-site.xml
配置/usr/local/hadoop/etc/hadoop/hdfs-site.xml
vi hdfs-site.xml
配置/usr/local/hadoop/etc/hadoop/mapred-site.xml
vi mapred-site.xml
配置/usr/local/hadoop/etc/hadoop/yarn-site.xml
vi yarn-site.xml
5、修改啟動關閉腳本
vi start-dfs.sh 、 vi stop-dfs.sh 在開始部分加入以下配置
HDFS_DATANODE_USER=root
HADOOP_SECURE_DN_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
vi start-yarn.sh 、 vi stop-yarn.sh 在開始部分加入以下配置
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root
5、ssh免密登錄
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0600 ~/.ssh/authorized_keys
6、格式化namenode
hdfs namenode -format
7、啟動dfs、yarn
start-dfs.sh啟動dfs
start-yarn.sh啟動yarn
8、試用
hdfs dfs -mkdir /input
hdfs dfs -put ../etc/hadoop/*.xml /input
hadoop jar ../share/hadoop/mapreduce/hadoop-mapreduce-examples-3.1.0.jar grep /input /output 'dfs[a-z.]+'
閱讀更多 Lucif墮落天使 的文章