HDFS上传下载API

  HDFS上传脚本实例:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.*;

public class HDFSUpload {

    public static void main(String[] args) throws IOException {

        System.setProperty("HADOOP_USER_NAME","root");

        //第一步:加载配置,构造客户端
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://192.168.37.129:9000");
        FileSystem client = FileSystem.get(conf);

        //第二步:构造输入流
        File file = new File("D:\\test.txt");
        InputStream input = new FileInputStream(file);

        //第三步:构造输出流
        OutputStream output=client.create(new Path("/0114/helloworld.txt"));

        //第四步:通过IOUtils文件传输
        IOUtils.copyBytes(input,output,1024);
        System.out.println("ok");

        //第五步:关闭流
        input.close();
        output.close();
        client.close();


    }

}

HDFS下载实例:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.*;

public class HDFSDownload {

    public static void main(String[] args) throws IOException {
        //预定义环境变量
        System.setProperty("HADOOP_USER_NAME","root");

        //第一步:加载配置,构造客户端
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://192.168.37.129:9000");
        FileSystem client = FileSystem.get(conf);

        //第二步:构造输入流
        InputStream input = client.open(new Path("/0114/helloworld.txt"));

        //第三步:构造输出流
        // File file = new File("D:\\test_download.txt");
        OutputStream output = new FileOutputStream("D:\\test_download.txt");

        //第四步:通过IOUtils文件传输
        IOUtils.copyBytes(input,output,1024);
        System.out.println("ok");

        //第五步:关闭流
        input.close();
        output.close();
        client.close();

猜你喜欢

转载自www.cnblogs.com/cmbk/p/10274203.html
今日推荐