学习笔记:从0开始学习大数据-5.hadoop hdfs文件读写api操作

学习测试,网上下的代码,测试通过,助于理解读写程序流程

package com.linbin.testmaven;

import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DistributedFileSystem;


/**
 * Hello world!
 *
 */
public class App 
{
    public static DistributedFileSystem dfs=new DistributedFileSystem();;
    public static String nameNodeUri="hdfs://centos7:8020";
    
    public static void main( String[] args ) throws Exception 
    {
    	System.out.println( "Hello World! how are you?" );
    	  App fot = new App();
        fot.initFileSystem();
        fot.testMkDir();
        fot.testFileList();
    }
    
    public void initFileSystem() throws Exception{
        System.out.println("初始化hadoop客户端");
        //设置hadoop的登录用户名
        System.setProperty("HADOOP_USER_NAME", "root");
        //dfs=new DistributedFileSystem();
        dfs.initialize(new URI(nameNodeUri), new Configuration());
        System.out.println("客户端连接成功");
        Path workingDirectory = dfs.getWorkingDirectory();
        System.out.println("工作目录:"+workingDirectory);
    }
    
    /**
     * 创建文件夹
     * @throws Exception 
     */
    public void testMkDir() throws Exception{
        boolean res = dfs.mkdirs(new Path("/tmp/bbb"));
        System.out.println("目录创建结果:"+(res?"创建成功":"创建失败"));
    }
    /**
     * 获取指定目录下所有文件(忽略目录)
     * @throws Exception 
     * @throws IllegalArgumentException 
     * @throws FileNotFoundException 
     */
    public void testFileList() throws Exception{
        RemoteIterator<LocatedFileStatus> listFiles = dfs.listFiles(new Path("/"), true);
        SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
        while (listFiles.hasNext()) {
            LocatedFileStatus fileStatus = (LocatedFileStatus) listFiles.next();
            FsPermission permission = fileStatus.getPermission();
            String owner = fileStatus.getOwner();
            String group = fileStatus.getGroup();
            long len = fileStatus.getLen();
            long modificationTime = fileStatus.getModificationTime();
            Path path = fileStatus.getPath();
            System.out.println("-------------------------------");
            System.out.println("path:"+path);
            System.out.println("permission:"+permission);
            System.out.println("owner:"+owner);
            System.out.println("group:"+group);
            System.out.println("len:"+len);
            System.out.println("modificationTime:"+sdf.format(new Date(modificationTime)));
        }
    }
}

测试结果,成功读写hadoop目录信息

猜你喜欢

转载自blog.csdn.net/oLinBSoft/article/details/84312900