Hadoop(三)java操做HDFS

1.Java读取hdfs文件内容java

    1.1.在建立 /root/Downloads/ 目录下建立hello文件,并写入内容linux

    1.2.把hello文件从linux上传到hdfs中apache

    1.3 eclipse中建立java项目并导入jar包(jar包来自hadoop)eclipse

    1.4 输入以下代码,并执行oop

public class TestHDFSRead {
	
	public static final String HDFS_PATH="hdfs://192.168.80.100:9000/hadooptest/hello";
	
	public static void main(String[] args) throws Exception {
		// 将hdfs 格式的url 转换成系统可以识别的
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
		final URL url = new URL(HDFS_PATH);
		final InputStream in = url.openStream();
		//copyBytes(in, out, buffSize, close)
		//四个参数分别为输入流,输出流,缓冲区大小,是否关闭流
		IOUtils.copyBytes(in, System.out, 1024, true);
	}
}

2.java建立HDFS文件夹url

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;

public class TestHDFS {
	
	public static final String HDFS_PATH="hdfs://192.168.80.100:9000";
	//前面有斜线表示从根目录开始,没有斜线从/usr/root开始
	public static final String HadoopTestPath="/hadooptest2";
	
	public static void main(String[] args) throws Exception {
		final FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
		//建立文件夹
		fileSystem.mkdirs(new Path(HadoopTestPath));
	}
}

执行后,指令查询是否有该文件夹:.net

3.java上传文件到HDFScode

import java.io.FileInputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class TestHDFS {
	
	public static final String HDFS_PATH="hdfs://192.168.80.100:9000";
	//前面有斜线表示从根目录开始,没有斜线从/usr/root开始
	public static final String HadoopTestPath="/hadooptest2";
	public static final String HadoopFilePath="/hadooptest2/hello123";//hello123为须要写入的文件
	
	public static void main(String[] args) throws Exception {
		final FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
		//建立文件夹
		//fileSystem.mkdirs(new Path(HadoopTestPath));
		//上传文件
		final FSDataOutputStream out = fileSystem.create(new Path(HadoopFilePath));
		final FileInputStream in = new FileInputStream("d:/hello2.txt");//hello2.txt文件写入hello123
		IOUtils.copyBytes(in,out, 1024, true);
	}
}

4.java下载HDFS文件递归

public class TestHDFS {
	
	public static final String HDFS_PATH="hdfs://192.168.80.100:9000";
	//前面有斜线表示从根目录开始,没有斜线从/usr/root开始
	public static final String HadoopTestPath="/hadooptest2";
	public static final String HadoopFilePath="/hadooptest2/hello123";//hello123为须要写入的文件
	
	public static void main(String[] args) throws Exception {
		final FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
		//建立文件夹
		//fileSystem.mkdirs(new Path(HadoopTestPath));
		//上传文件
		//final FSDataOutputStream out = fileSystem.create(new Path(HadoopFilePath));
		//final FileInputStream in = new FileInputStream("d:/hello2.txt");//hello2.txt文件写入hello123
		//IOUtils.copyBytes(in,out, 1024, true);
		//下载文件
		final FSDataInputStream in = fileSystem.open(new Path(HadoopFilePath));
		IOUtils.copyBytes(in,System.out, 1024, true);
		
	}
}

5.java删除HDFS文件ip

public class TestHDFS {
	
	public static final String HDFS_PATH="hdfs://192.168.80.100:9000";
	//前面有斜线表示从根目录开始,没有斜线从/usr/root开始
	public static final String HadoopTestPath="/hadooptest2";
	public static final String HadoopFilePath="/hadooptest2/hello123";//hello123为须要写入的文件
	
	public static void main(String[] args) throws Exception {
		final FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
		//建立文件夹
		//fileSystem.mkdirs(new Path(HadoopTestPath));
		//上传文件
		//final FSDataOutputStream out = fileSystem.create(new Path(HadoopFilePath));
		//final FileInputStream in = new FileInputStream("d:/hello2.txt");//hello2.txt文件写入hello123
		//IOUtils.copyBytes(in,out, 1024, true);
		//下载文件
		//final FSDataInputStream in = fileSystem.open(new Path(HadoopFilePath));
		//IOUtils.copyBytes(in,System.out, 1024, true);
		//删除文件(夹),两个参数 第一个为删除路径,第二个为递归删除
		fileSystem.delete(new Path(HadoopFilePath),true);
	}
}

下图第一个红框为删除前,第二个红框为删除后:

相关文章
相关标签/搜索