编程实现如下指定功能,并利用Hadoop提供的Shell命令完成相同任务:linux
cd /usr/local/hadoop
touch linux.txt
cat linux.txt
./sbin/start-dfs.sh ./bin/hdfs dfs -mkdir -p /user/hadoop ./bin/hdfs dfs -mkdir input ./bin/hdfs dfs -put ./linux.txt input
./bin/hdfs dfs -ls
./bin/hdfs dfs -ls input
./bin/hdfs dfs -cat input/linux.txt
./bin/hdfs dfs -rm -ls input/linux.txt
./bin/hdfs dfs -get input/test.txt ~/linux.txt
if $(hdfs dfs -test -e text.txt); then $(hdfs dfs -appendToFile local.txt text.txt); else $(hdfs dfs -copyFromLocal -f local.txt text.txt); fi
if $(hdfs dfs -test -e file:///usr/hadoop/text.txt); then $(hdfs dfs -copyToLocal text.txt ./text2.txt); else $(hdfs dfs -copyToLocal text.txt ./text.txt); fi
hdfs dfs -cat text.txt
hdfs dfs -ls -h text.txt
hdfs dfs -ls -R -h /user/hadoop
if $(hdfs dfs -test -d dir1/dir2); then $(hdfs dfs -touchz dir1/dir2/filename); else $(hdfs dfs -mkdir -p dir1/dir2 && hdfs dfs -touchz dir1/dir2/filename); fi 删除文件:hdfs dfs -rm dir1/dir2/filename
建立目录:hdfs dfs -mkdir -p dir1/dir2 删除目录(若是目录非空则会提示not empty,不执行删除):hdfs dfs -rmdir dir1/dir2 强制删除目录:hdfs dfs -rm -R dir1/dir2
追加到文件末尾:hdfs dfs -appendToFile local.txt text.txt 追加到文件开头: (因为没有直接的命令能够操做,方法之一是先移动到本地进行操做,再进行上传覆盖): hdfs dfs -get text.txt cat text.txt >> local.txt hdfs dfs -copyFromLocal -f text.txt text.txt
hdfs dfs -rm text.txt
删除目录(若是目录非空则会提示not empty,不执行删除):hdfs dfs -rmdir dir1/dir2
强制删除目录:hdfs dfs -rm -R dir1/dir2
hdfs dfs -mv text.txt text2.txt