【Hadoop】- HDFS API 基本用法

HDFS: Hadoop设计的一种分布式文件系统,本文主要介绍hdfs文件系统的JavaAPI的使用

①:公共代码:设置hdfs的访问接口node

private static String hdfsPath = "hdfs://192.168.174.131:9090";
private static Configuration conf = new Configuration();

②:获取HDFS分布式文件系统内部的DataNode节点信息app

public static DatanodeInfo[] getDataNodeInfos(){
		FileSystem hdfs = null;
		DatanodeInfo[] datanodeInfos= null;
		try {
			hdfs = FileSystem.get(URI.create(hdfsPath),conf);
			DistributedFileSystem dbfs = (DistributedFileSystem) hdfs;
			datanodeInfos = dbfs.getDataNodeStats();
			hdfs.close();
		} catch (IOException e) {
			return null;
		}
		return datanodeInfos;
	}

③:判断HDFS是否存储目标文件分布式

public static boolean judgeFileExist(String hdfsFile){
		FileSystem hdfs = null;
        boolean isSuccess = false;
		try {
			hdfs = FileSystem.get(URI.create(hdfsPath),conf);
			isSuccess = hdfs.exists(new Path(hdfsFile));
			hdfs.close();
		} catch (IOException e) {
			return false;
		}
		return isSuccess;
	}

④:获取指定HDFS目录下的文件列表信息ide

public static FileStatus[] getFilesByDir(String hdfsFileDir){
		FileSystem hdfs = null;
        FileStatus[] fileStatus = null;
		try {
			hdfs = FileSystem.get(URI.create(hdfsPath),conf);
			fileStatus = hdfs.listStatus(new Path(hdfsFileDir));
			hdfs.close();
		} catch (IOException e) {
			return null;
		}
		return fileStatus;
	}

⑤:HDFS文件系统中建立目录oop

public static boolean mkdirFromHdfs(String hdfsFileDir){
    	
        FileSystem hdfs = null;
        boolean isSuccess = false;
		try {
			hdfs = FileSystem.get(URI.create(hdfsPath),conf);
			isSuccess = hdfs.mkdirs(new Path(hdfsFileDir));
			hdfs.close();
		} catch (IOException e) {
			return false;
		}
		return isSuccess;
    }

⑥:将文本信息写入HDFS的指定文件设计

public static boolean writeInfoToHdfsFile(String dest,byte[] content){
		
		    FileSystem hdfs = null;
		    FSDataOutputStream fsDataOutputStream = null;
		    
	        try {
	        	hdfs = FileSystem.get(URI.create(hdfsPath),conf);
	        	fsDataOutputStream = hdfs.create(new Path(dest));
	        	fsDataOutputStream.write(content);
	        	fsDataOutputStream.flush();
	        	fsDataOutputStream.close();
			    hdfs.close();
			} catch (IOException e) {
				return false;
			}
	        return true;
	}

⑦:读取HDFS文件系统文本文件的内容code

public static String readFile(String hdfsFilePath) throws IOException{
    	
    	FileSystem hdfs = null;
    	InputStream is = null;
    	StringBuffer sb = new StringBuffer();
        try {
        	hdfs = FileSystem.get(URI.create(hdfsPath),conf);
            is = hdfs.open(new Path(hdfsFilePath));
            int length = 0;
            byte[] buf = new byte[1024];
            while((length=is.read(buf))!=-1){
            	sb.append(new String(buf,0,length));
            }
        } finally {
            IOUtils.closeStream(is);
            hdfs.close();
        }
        return sb.toString();
    }

⑧:删除HDFS指定文件递归

public static boolean deleteHdfsFile(String hdfsFilePath){
    	
        FileSystem hdfs = null;
        boolean isSuccess = false;
		try {
			hdfs = FileSystem.get(URI.create(hdfsPath),conf);
			
			//hdfs.delete(path,true)  递归删除
			isSuccess = hdfs.deleteOnExit(new Path(hdfsFilePath));
		    hdfs.close();
		} catch (IOException e) {
			return false;
		}
		return isSuccess;
    }

⑨:重命名HDFS文件接口

public static boolean renameHdfsFile(String oldName,String newName){
		
	        FileSystem hdfs = null;
	        boolean isSuccess = false;
			try {
				hdfs = FileSystem.get(URI.create(hdfsPath),conf);
				isSuccess = hdfs.rename(new Path(oldName), new Path(newName));
			    hdfs.close();
			} catch (IOException e) {
				return false;
			}
	       return isSuccess;
	}

⑩:上传本地文件到HDFS文件系统hadoop

public static boolean uploadLocalFileToHDFS(boolean delSrc,boolean override,String src,String dest){

		FileSystem hdfs = null;
		try {
			hdfs = FileSystem.get(URI.create(hdfsPath),conf);
			//注意:目标地址能够写全路径,若是不写则默认在当前访问的用户主目录下操做
			hdfs.copyFromLocalFile(delSrc,override,new Path(src), new Path(dest));
			hdfs.close();
		} catch (IOException e) {
			return false;
		}
		return true;
	}

注意:可能出现“Permission Denied”等错误信息,设置hadoop目录的访问权限便可:hadoop dfs -chmod -R 777 hadoop文件路径 (例)

相关文章
相关标签/搜索