start-all.sh脚本现在已经废弃,推荐使用start-dfs.sh和start-yarn.sh分别启动HDFS和YARN。
1
|
--config
/home/orange/hadoop-2
.0.0-alpha
/etc/hadoop
--hostnames localhost --script
/home/orange/hadoop-2
.0.0-alpha
/sbin/hdfs
start namenode
|
1
|
exec
"$bin/slaves.sh"
--config $HADOOP_CONF_DIR
cd
"$HADOOP_PREFIX"
\;
"$bin/hadoop-daemon.sh"
--config $HADOOP_CONF_DIR
"$@"
|
1
|
--config
/home/orange/hadoop-2
.0.0-alpha
/etc/hadoop
cd
/home/orange/hadoop-2
.0.0-alpha ;
/home/orange/hadoop-2
.0.0-alpha
/sbin/hadoop-daemon
.sh --config
/home/orange/hadoop-2
.0.0-alpha
/etc/hadoop
--script
/home/orange/hadoop-2
.0.0-alpha
/sbin/hdfs
start namenode
|
1
|
localhost: --config
/home/orange/hadoop-2
.0.0-alpha
/etc/hadoop
--script
/home/orange/hadoop-2
.0.0-alpha
/sbin/hdfs
start namenode
|
1
|
nohup
nice
-n $HADOOP_NICENESS $hdfsScript --config $HADOOP_CONF_DIR $
command
"$@"
>
"$log"
2>&1 <
/dev/null
&
|
1
2
3
|
if
[ -f ~/.bashrc ];
then
. ~/.bashrc
fi
|
1
|
"$bin"
/yarn-daemon
.sh --config $YARN_CONF_DIR start resourcemanager
|
1
|
nohup
nice
-n $YARN_NICENESS
"$YARN_HOME"
/bin/yarn
--config $YARN_CONF_DIR $
command
"$@"
>
"$log"
2>&1 <
/dev/null
&
|
1
|
"$bin"
/yarn-daemons
.sh --config $YARN_CONF_DIR start nodemanager
|
1
|
exec
"$bin/slaves.sh"
--config $YARN_CONF_DIR
cd
"$YARN_HOME"
\;
"$bin/yarn-daemon.sh"
--config $YARN_CONF_DIR
"$@"
|