1. hadoop
1.1 hadoop-env.sh
配置JAVA_HOME
1.2 core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>file:{}</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
1.3 hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:{}tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:{}tmp/dfs/data</value>
</property>
</configuration>
2. spark
2.1 spark-env.sh
JAVA_HOME
2.2 spark-defaults.conf
spark.local.dir
3. .profile
JAVA_HOME=/home/peerslee/opt/jdk
SCALA_HOME=/home/peerslee/opt/scala
HADOOP_HOME=/home/peerslee/opt/hadoop
SPARK_HOME=/home/peerslee/opt/spark-hadoop
PATH=$SPARK_HOME/sbin:$SPARK_HOME/bin:$HADOOP_HOME/sbin:$HADOOP_HOME/bin:$SCALA_HOME/bin:$JAVA_HOME/bin:$PATH
export HADOOP_HOME SCALA_HOME JAVA_HOME PATH
转载请注明原文地址: https://ju.6miu.com/read-15122.html