1 批量启动与停止
1.1
# Start all hadoop daemons. Run this on master node.
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
#变量配置命令
. "$bin"/
#Dfs服务器启动
# start dfs daemons
"$bin"/ --config $HADOOP_CONF_DIR
MR服务器启动
# start mapred daemons
"$bin"/ --config $HADOOP_CONF_DIR
1.2
"$bin"/ --config $HADOOP_CONF_DIR
"$bin"/ --config $HADOOP_CONF_DIR
2 hdfs的启动与停止
2.1
# start dfs daemons
# start namenode after datanodes, to minimize time namenode is up w/o data
# note: datanodes will log connection errors until namenode starts
"$bin"/ --config $HADOOP_CONF_DIR start namenode $nameStartOpt
"$bin"/ --config $HADOOP_CONF_DIR start datanode $dataStartOpt
"$bin"/ --config $HADOOP_CONF_DIR --hosts masters start secondarynamenode
2.2
"$bin"/ --config $HADOOP_CONF_DIR stop namenode
"$bin"/ --config $HADOOP_CONF_DIR stop datanode
"$bin"/ --config $HADOOP_CONF_DIR --hosts masters stop secondarynamenode
3 MapReduce的启动与停止
3.1
# start mapred daemons
# start jobtracker first to minimize connection errors at startup
"$bin"/ --config $HADOOP_CONF_DIR start jobtracker
"$bin"/ --config $HADOOP_CONF_DIR start tasktracker
3.2
"$bin"/ --config $HADOOP_CONF_DIR stop jobtracker
"$bin"/ --config $HADOOP_CONF_DIR stop tasktracker
4 Balancer的启动与停止
4.1
"$bin"/ --config $HADOOP_CONF_DIR start balancer $@
4.2
"$bin"/ --config $HADOOP_CONF_DIR stop balancer
5 主机系统脚本
5.1 start
nohup nice -n $HADOOP_NICENESS "$HADOOP_HOME"/bin/hadoop --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
5.2 stop
echo stopping $command
kill `cat $pid`
6 从机系统脚本
Run a Hadoop command on all slave hosts.
exec "$bin/" --config $HADOOP_CONF_DIR cd "$HADOOP_HOME" \; "$bin/" --config $HADOOP_CONF_DIR "$@"
7 hadoop应用程序调用脚本
基于java调用相应的类
7.1 6个服务器类
7.1.1 namenode服务器
# figure out which class to run
if [ "$COMMAND" = "namenode" ] ; then
CLASS=''
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
7.1.2 SecondaryNameNode服务器
elif [ "$COMMAND" = "secondarynamenode" ] ; then
CLASS=''
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
7.1.3 DataNode服务器
elif [ "$COMMAND" = "datanode" ] ; then
CLASS=''
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
7.1.4 JOBSTACKET服务器
elif [ "$COMMAND" = "jobtracker" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
7.1.5 TaskTracker服务器
elif [ "$COMMAND" = "tasktracker" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"
7.1.6 Balancer负载均衡服务器
elif [ "$COMMAND" = "balancer" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
7.2 基它客户端命令
7.2.1 Fs-fsshell
elif [ "$COMMAND" = "fs" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.2 Dfs-fsshell
elif [ "$COMMAND" = "dfs" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.3 Dfsadmin-DFSADmin
elif [ "$COMMAND" = "dfsadmin" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.4 Fsck-DFSck
elif [ "$COMMAND" = "fsck" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.5 Job-JobClient
elif [ "$COMMAND" = "job" ] ; then
CLASS=
7.2.6 Queue- JobQueueClient
elif [ "$COMMAND" = "queue" ] ; then
CLASS=
7.2.7 Pipes- Submitter
elif [ "$COMMAND" = "pipes" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.8 Version- VersionInfo
elif [ "$COMMAND" = "version" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.9 Jar- JobShell
elif [ "$COMMAND" = "jar" ] ; then
CLASS=
7.2.10 Distcp- DistCp
elif [ "$COMMAND" = "distcp" ] ; then
CLASS=
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.11 Daemonlog-LogLevel
elif [ "$COMMAND" = "daemonlog" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.12 Archive- HadoopArchives
elif [ "$COMMAND" = "archive" ] ; then
CLASS=
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
7.2.13 Sampler- InputSampler
elif [ "$COMMAND" = "sampler" ] ; then
CLASS=
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
else
CLASS=$COMMAND
fi
8
# the root of the Hadoop installation
export HADOOP_HOME=`dirname "$this"`/..
# Allow alternate conf dir location.
HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"