不睡觉

docker run -ti --name host01 -v /home/user/miku:/miku -p 30122:22 ubuntu

1、安装Java JDK

转外
wget https://mirrors.huaweicloud.com/java/jdk/8u202-b08/jdk-8u202-linux-x64.tar.gz
tar -zxvf jdk-8u202-linux-x64.tar.gz
mv jdk1.8.0_202/ jdk
转内
cp /etc/profile /miku
转外
sudo vi profile
尾部添加
# JDK,TOMCAT,ORACLE
export JAVA_HOME=/miku/jdk
export JRE_HOME=$JAVA_HOME/jre
export CLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib
export PATH=$PATH:$JAVA_HOME/bin
转内
cp /miku/profile /etc/profile
source /etc/profile
java -version

2、配置服务器hosts

apt update
apt install openssh-server 6/70
/etc/init.d/ssh start
外查看容器IP: docker inspect --format='{{.NetworkSettings.IPAddress}}' host01
分配固定IP: https://www.cnblogs.com/brock0624/p/9795208.html
cp /etc/hosts /miku
cp /etc/hostname /miku
外 sudo vi hosts
外 sudo vi hostname
cp /miku/hosts /etc/hosts
cp /miku/hostname /etc/hostname
重新启动后都会复原exit start attach
/etc/init.d/ssh start
source /etc/profile

3、配置免密登录

docker run -ti --name nn01 -h nn01 --add-host=nn02:172.17.0.4 -v /home/user/miku:/miku -p 30222:22 ubuntu
docker run -ti --name nn02 -h nn02 --add-host=nn01:172.17.0.3 -v /home/user/miku:/miku -p 30322:22 ubuntu
安装openssh-server并启动
useradd miku
passwd miku
apt-get install sudo
cp /etc/sudoers /miku
外sudo vi sudoers + miku ALL=(ALL:ALL) ALL :wq!
cp /miku/sudoers /etc/sudoers
su miku
sudo whoami
-- sudo: setrlimit(RLIMIT_CORE): Operation not permitted
sudo mkdir /home/miku
sudo chown miku:miku /home/miku
ssh-keygen -t rsa 全默认,生成/home/miku/.ssh/id_rsa.pub
scp ~/.ssh/id_rsa.pub miku@nn02:/home/miku
在nn02 cat id_rsa.pub >> ~/.ssh/authorized_keys
ssh nn02 已经免密
nn02做一遍
添加自己 cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys

4、别睡觉

wget http://mirrors.sonic.net/apache/hadoop/common/hadoop-2.10.0/hadoop-2.10.0.tar.gz (最新3.2.1)
tar xvzf hadoop-2.10.0.tar.gz
mv hadoop-2.10.0 hadoop
sudo vi .bashrc
#hadoop
export HADOOP_HOME=/miku/hadoop
export HADOOP_PREFIX=$HADOOP_HOME
export HADOOP_CONFIG_HOME=$HADOOP_HOME/etc/hadoop
export PATH=$PATH:$HADOOP_HOME/bin
export PATH=$PATH:$HADOOP_HOME/sbin
mkdir data
mkdir data/logs
mkdir data/logs/hadoop
mkdir data/hadoop
mkdir data/hadoop/hdfs
mkdir data/hadoop/hdfs/nn
mkdir data/hadoop/hdfs/dn
cd ~/miku/hadoop/etc/hadoop
vi hdfs-site.xml

<property>
    <name>dfs.replication</name>
    <value>1</value>
</property>
<property>
    <name>dfs.namenode.name.dir</name>
    <value>/miku/data/hadoop/hdfs/nn</value>
</property>
<property>
    <name>dfs.datanode.data.dir</name>
    <value>/miku/data/hadoop/hdfs/dn</value>
</property>

转内
cp /miku/.bashrc ~/.bashrc
source ~/.bashrc
格式化 hadoop namenode -format
start-dfs.sh
Error: JAVA_HOME is not set and could not be found.
vi ~/miku/hadoop/etc/hadoop/hdoop-env.sh # JAVA_HOME 写绝对路径

标签: none

添加新评论