Huawei Cloud Single Machine Edition Hadoop2.7.3 and foreign HDFS network visit

Close the firewall

systemctl stop firewalld.service
systemctl disable firewalld.service

Set IP assignment
Internal IP: 192.168.x.xxx
Public network IP: 115.61.xx.xx

vim /etc/hosts
::1     localhost       localhost.localdomain   localhost6      localhost6.localdomain6
127.0.0.1       localhost       localhost.localdomain   localhost4      localhost4.localdomain4
127.0.0.1       localhost       localhost
127.0.0.1       ecs-1211        ecs-1211
192.168.x.xxx   lkk2

Use the hostname command to change the hostname

hostname lkk2

Set up SSH login – for free

ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0600 ~/.ssh/authorized_keys

Check if it can be registered freely

ssh lkk2

The delete system ships with OpenJDK

[[email protected]~]
java-1.8.0-openjdk-headless-1.8.0.232.b09-0.el7_7.aarch64
java-1.8.0-openjdk-devel-1.8.0.232.b09-0.el7_7.aarch64
java-1.8.0-openjdk-1.8.0.232.b09-0.el7_7.aarch64
copy-jdk-configs-3.3-10.el7_5.noarch
[[email protected]~]
[[email protected]~]
[[email protected]~]

Install the JDK
mkdir -p /usr/local/java
cd / usr / local / java
Download JDK-8U281-Linux-Aarch64.tar.gz в /usr/local/java Directory

 tar -zxvf jdk-8u281-linux-aarch64.tar.gz 

Set JDK environment variables

vim /etc/profile
JAVA_HOME=/usr/local/java/jdk1.8.0_281
JRE_HOME=/usr/java/jdk1.8.0_281/jre
CLASS_PATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export JAVA_HOME JRE_HOME CLASS_PATH PATH

Active configuration file

source /etc/profile 
[[email protected] hadoop]
java version "1.8.0_281"
Java(TM) SE Runtime Environment (build 1.8.0_281-b09)
Java HotSpot(TM) 64-Bit Server VM (build 25.281-b09, mixed mode)

Hadoop crack
mkdir -p / data / hadoop
Загрузить Hadoop-2.7.3.tar.gz in / data / hadoop

tar -zxvf  hadoop-2.7.3.tar.gz 

Setting Hadoop environment variables

vim /etc/profile
JAVA_HOME=/usr/local/java/jdk1.8.0_281
JRE_HOME=/usr/java/jdk1.8.0_281/jre
HADOOP_HOME=/data/hadoop/hadoop-2.7.3
CLASS_PATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
export JAVA_HOME JRE_HOME HADOOP_HOME CLASS_PATH PATH

Active configuration file

source /etc/profile 

Hadoop configuration files

 cd /data/hadoop/hadoop-2.7.3/etc/hadoop/

hadoop-env.sh
yarn-env.sh
core-site. xml
hdfs-site. xml
mapred-site. xml
Spinning site .xml

hadoop-env.sh

export JAVA_HOME=/usr/local/java/jdk1.8.0_281

spin vim-env.sh

 
 export JAVA_HOME=/usr/local/java/jdk1.8.0_281
 JAVA_HEAP_MAX=Xmx3072m

vim core-site. xml

 <property>
		<name>fs.default.name</name>
		<value>hdfs://lkk2:9000</value>
  </property>
  
  <property>
   	<name>hadoop.tmp.dir</name>
   	<value>file:/data/hadoop/data/tmp</value>
  </property>

vim hdfs-site. xml

<property>
		<name>dfs.datanode.data.dir</name>
		<value>/data/hadoop/data/hdf/data</value>
		<final>true</final>
	  </property>
	  <property>
		<name>dfs.namenode.name.dir</name>
		<value>/data/hadoop/data/hdf/name</value>
		<final>true</final>
	  </property>
	  <property>
			<name>dfs.replication</name>
			<value>1</value>
	  </property>
	  <property>
		<name>dfs.permissions.enabled</name>
		<value>false</value>
	  </property>
	<property>
		<name>dfs.client.use.datanode.hostname</name>
		<value>true</value>
		<description>only cofig in clients</description>
	</property>

vim mapred-site. xml

<property>
    <name>mapreduce.framework.name</name>
    <value>yarn</value>
  </property>
  <property>
    <name>mapreduce.jobhistory.address</name>
    <value>lkk2:10020</value>
  </property>
  <property>
    <name>mapreduce.jobhistory.webapp.address</name>
    <value>lkk2:19888</value>
  </property>

vim yarn-site. xml

<property>
	<name>yarn.resourcemanager.hostname</name>
	<value>lkk2</value>
</property>
<property>
	<name>yarn.nodemanager.aux-services</name>
	<value>mapreduce_shuffle</value>
</property>

slaves fem

lkk2

name format
cd /data/hadoop/hadoop-2.7.3/sbin/

hdfs namenode -format

Being

start-all.sh   

http://115.61.xx.xx:50070

Leave a Comment