HDFS java接口——实现目录增删文件读写

1、在Idea中,创建maven项目,选择org.apache.maven.archetype:maven-archetype-quickstart,创建一个普通的Java项目(archetypeCatalog:internal),默认会创建 App 类。

2、添加依赖

    <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>2.6.0</version>
    </dependency>

 2、添加log4j.properties

#全局配置
log4j.rootLogger= ERROR,stdout
#Hadoop 日志配置
log4j.logger.org.apache.hadoop= DEBUG
#控制台输出配置
log4j.appender.stdout= org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout= org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern= %5p [%t] - %m%n

3、文件和目录相关操作

package Hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.IOException;

/**
 * Hello world!
 *
 */
public class App 
{
    private static Configuration configuration = new Configuration();

    static {
       configuration.set("fs.defaultFS","hdfs://192.168.255.128:9000");
       System.setProperty("HADOOP_USER_NAME","root");
       System.setProperty("hadoop.home.dir","E:/hadoop2.6");
    }

    public static void main( String[] args ) throws IOException {
        //createDir();
        //list();
        //uploadFile();
        //readContent();
        deleteDir();
    }

    /**
     * 创建文件夹
     * @throws IOException
     */
    private static void createDir() throws IOException {
        FileSystem fs = FileSystem.get(configuration);
        fs.mkdirs(new Path("/hdfs2"));
        fs.close();
        System.out.println("创建成功");
    }

    /**
     * 上传文件(将文件拷贝到HDFS上)
     * @throws IOException
     */
    private static void uploadFile() throws IOException {
        FileSystem fs = FileSystem.get(configuration);
        Path srcPath = new Path("E:/1.txt");
        Path targetPath = new Path("/hdfs/");
        fs.copyFromLocalFile(srcPath,targetPath);
        fs.close();
        System.out.println("上传成功");
    }

    /**
     * 展示hdfs文件或目录
     * @throws IOException
     */
    private static void list() throws IOException {
        FileSystem fs = FileSystem.get(configuration);
        FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
        for(FileStatus fileStatus:fileStatuses){
            String type = fileStatus.isDirectory() ? "目录":"文件";
            String name = fileStatus.getPath().getName();
            System.out.println(type + "---" + name);
        }
        fs.close();
    }

    /**
     * 读取文件内容
     * @throws IOException
     */
    private static void readContent() throws IOException {
        FileSystem fs = FileSystem.get(configuration);
        FSDataInputStream stream = fs.open(new Path("/input/input2.txt"));
        IOUtils.copyBytes(stream,System.out,1024,true);
        IOUtils.closeStream(stream);
        fs.close();
    }

    /**
     * 删除目录
     * @throws IOException
     */
    private static void deleteDir() throws IOException {
        FileSystem fs = FileSystem.get(configuration);
        fs.delete(new Path("/hdfs2"),true);
        fs.close();
        System.out.println("删除成功");
    }
}

E:\hadoop2.6\bin下文件如下

猜你喜欢

转载自blog.csdn.net/Milan__Kundera/article/details/83752792