1. 程式人生 > >Spark環境常用配置(profile)

Spark環境常用配置(profile)

... dfs already yarn XP etc for file HR

# /etc/profile: system-wide .profile file for the Bourne shell (sh(1))
# and Bourne compatible shells (bash(1), ksh(1), ash(1), ...).

if [ "$PS1" ]; then
if [ "$BASH" ] && [ "$BASH" != "/bin/sh" ]; then
# The file bash.bashrc already sets the default PS1.
# PS1=‘\h:\w\$ ‘
if [ -f /etc/bash.bashrc ]; then


. /etc/bash.bashrc
fi
else
if [ "`id -u`" -eq 0 ]; then
PS1=‘# ‘
else
PS1=‘$ ‘
fi
fi
fi

if [ -d /etc/profile.d ]; then
for i in /etc/profile.d/*.sh; do
if [ -r $i ]; then
. $i
fi
done
unset i
fi

######java######
export JAVA_HOME=/opt/jdk1.8.0_171
export CLASS_PATH=/opt/jdk1.8.0_171/lib


export PATH=$PATH:$JAVA_HOME/bin

######scala#####
export SCALA_HOME=/opt/scala-2.11.12
export PATH=$PATH:$SCALA_HOME/bin

######spark######
export SPARK_HOME=/opt/spark-2.3.0-bin-hadoop2.7
export PATH=$PATH:$SPARK_HOME/bin


#######nodejs#####
export NODEJS_HOME=/opt/node-v8.11.1-linux-x64
export PATH=$PATH:$NODEJS_HOME/bin


########Hadoop#######
export HADOOP_HOME=/opt/hadoop-2.7.6
export PATH=$PATH:$HADOOP_HOME/bin
export PATH=$PATH:$HADOOP_HOME/sbin
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS=‘-DJava.library.path=$HADOOP_HOME/lib‘
export JAVA_LIBRARY_PATH=$HADOOP_HOME/lib/native:$JAVA_LIBRARY_PATH

########spark ipython notebook############
export PYSPARK_DRIVER_PYTHON=ipython
export PYSPARK_DRIVER_PYTHON_OPTS="notebook"

#######Hbase############
export HBASE_HOME=/opt/hbase-2.0.0
export PATH=$PATH:$HBASE_HOME/bin

Spark環境常用配置(profile)