IDEA 在hdfs中建立目录

1.先下载maven并配置好maven环境变量。以后在idea中配置好maven和tomcatjava

2.建立maven项目spring

hdfs-projacet

    在项目中的pom.xml中导入相应的依赖apache

 1 <dependencies>
 2 	<dependency>
 3 		<groupId>junit</groupId>
 4 		<artifactId>junit</artifactId>
 5 		<version>RELEASE</version>
 6 	</dependency>
 7 	<dependency>
 8 		<groupId>org.apache.logging.log4j</groupId>
 9 		<artifactId>log4j-core</artifactId>
 10 		<version>2.8.2</version>
 11 	</dependency>
 12 	<dependency>
 13 		<groupId>org.apache.hadoop</groupId>
 14 		<artifactId>hadoop-common</artifactId>
 15 		<version>2.7.2</version>
 16 	</dependency>
 17 	<dependency>
 18 		<groupId>org.apache.hadoop</groupId>
 19 		<artifactId>hadoop-client</artifactId>
 20 		<version>2.7.2</version>
 21 	</dependency>
 22 	<dependency>
 23 		<groupId>org.apache.hadoop</groupId>
 24 		<artifactId>hadoop-hdfs</artifactId>
 25 		<version>2.7.2</version>
 26 	</dependency>
 27 	<dependency>
 28 		<groupId>jdk.tools</groupId>
 29 		<artifactId>jdk.tools</artifactId>
 30 		<version>1.8</version>
 31 		<scope>system</scope>
 32 		<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
 33 	</dependency>
 34 </dependencies>

    在项目中的src/main/resources目录下新建一个文件——log4j.propertiestomcat

 1 log4j.rootLogger=INFO, stdout
 2 log4j.appender.stdout=org.apache.log4j.ConsoleAppender
 3 log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
 4 log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
 5 log4j.appender.logfile=org.apache.log4j.FileAppender
 6 log4j.appender.logfile.File=target/spring.log
 7 log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
 8 log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n

    在项目中的src/main/目录下建立java文件夹,建立com.hadoop.demo包->HDFSClientDemoapp

 1 public class HDFSClientDemo {
 2     public static void main(String[] args) throws IOException, InterruptedException {
 3         //远程操做hadoop集群
 4         //获取文件系统
 5         Configuration conf = new Configuration();
 6         //设置集群配置信息,键值对 (由于在本机上没有配置host,因此"hdfs://hadoop161:9000"没法识别,需改为要操做的机子的ip地址)
 7         conf.set("fs.defaultFS","hdfs://192.168.12.161:9000");
 8         //拦截本机user用户而后改为集群上的hadoop用户
 9         System.setProperty("HADOOP_USER_NAME","hadoop");
 10         //获取HDFS客户端对象
 11         FileSystem fs = FileSystem.get(conf);
 12         //在hdfs上建立目录
 13         fs.mkdirs(new Path("/0300/abc"));
 14         //关闭资源(释放资源)
 15         fs.close();
 16         //验证程序是否结束
 17         System.out.println("over");
 18     }
 19 }
 20

3.hdfs上查看maven

hdfs

相关文章
相关标签/搜索