node01
解压
tar -zxvf hadoop-3.1.2.tar.gz
移动目录
mv hadoop-3.1.2 /opt/yjx/
打开目录
cd hadoop-3.1.2/etc/hadoop/
修改集群环境
vim hadoop-env.sh
export JAVA_HOME=/usr/java/jdk1.8.0_231-amd64
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
修改配置文件
vim core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://node01:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/var/yjx/hadoop/full</value>
</property>
vim hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>node02:50090</value>
</property>
<property>
<name>dfs.namenode.secondary.https-address</name>
<value>node02:50091</value>
</property>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
</configuration>
vim workers
node01
node02
node03
拷贝到其他虚拟机
node02 scp -r root@node01:/opt/yjx/hadoop-3.1.2 /opt/yjx/
node03 scp -r root@node01:/opt/yjx/hadoop-3.1.2 /opt/yjx/
修改环境变量
vim /etc/profile
export HADOOP_HOME=/opt/yjx/hadoop-3.1.2
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
将配置好的环境变量发放到node02 node03
scp /etc/profile root@node02:/etc/profile
scp /etc/profile root@node03:/etc/profile
重新加载环境变量
source /etc/profile
格式化Namenode
hdfs namenode -format
测试
启动
start-dfs.sh
访问端口
http://192.168.88.101:9870/
对hadoop进行操作
创建一个目录
hdfs dfs -mkdir -p /yjx
上传
hdfs dfs -put hadoop-3.1.2.tar.gz /yjx
关闭集群
stop-dfs.sh
|