cd /usr/local/hadoop
./sbin/start-dfs.sh
创建test1.txt和test2.txt中分别写入“hello how are you?”和“i like running”
echo "hello how are you?" > test1.txt
echo "i like running" > test2.txt
./bin/hdfs dfs -touchz test3.txt
把本地文件系统中的“test1.txt和test2.txt”上传到HDFS中
./bin/hdfs dfs -put ./test2.txt
./bin/hdfs dfs -put ./test1.txt
追加到文件末尾的指令
./bin/hdfs dfs -appendToFile test1.txt test2.txt
显示hdfs中test2.txt文件内容
./bin/hdfs dfs -cat test2.txt
./bin/hdfs -copyFromLocal -f test1.txt test3.txt
if $(./bin/hdfs dfs -test -e /usr/local/hadoop/text.txt);
then $(./bin/hdfs dfs -copyToLocal text.txt ./text.txt);
else $(./bin/hdfs dfs -copyToLocal text.txt ./text2.txt);
fi
结果如下
test1.txt中的内容
test4.txt中的内容
./bin/hdfs dfs -cat test2.txt
./bin/hdfs dfs -ls -h test2.txt
./bin/hdfs dfs -ls -R -h /user/hadoop
if $(./bin/hdfs dfs -d /user/local/hadoop);
then $(./bin/hdfs dfs -touchz /usr/local/hadoop/test/test1.txt);
else $(./bin/hdfs -mkdir -p /usr/local/hadoop/test && ./bin/hdfs dfs -touchz /usr/local/hadoop/test/test1.txt);
./bin/hdfs dfs -mv test1.txt input
./bin/hdfs dfs -ls input