封装输入输出流,封装对象池

装饰者模式封装对象流

import org.apache.hadoop.fs.FSDataOutputStream;

import java.io.IOException;

/**
 *装饰流
 */
public class MyFSDataOutputStream extends FSDataOutputStream{
    private String path ;
    private FSDataOutputStream out ;

    private HDFSOutputStreamPool pool ;

    public MyFSDataOutputStream(String path , FSDataOutputStream out, HDFSOutputStreamPool pool) throws IOException{
            super(null);
            this.out = out ;
            this.pool = pool ;
    }

    public void close(){
        try{
            out.close();
        }
        catch(Exception e){
            e.printStackTrace();
        }
    }

    public void hflush() throws IOException {
        out.hflush();
    }

    public void write(byte[] b) throws IOException {
        out.write(b);
    }

    public void hsync() throws IOException {
        out.hsync();
    }

    /**
     * 回收
     */
    public void release(){
        pool.putBack(path, this);
    }
}

对象池

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.util.HashMap;
import java.util.Map;

/**
 * 输出流池
 */
public class HDFSOutputStreamPool {

    private FileSystem fs;

    //存放的所有的输出流
    private Map<String, FSDataOutputStream> pool = new HashMap<String, FSDataOutputStream>();

    private static HDFSOutputStreamPool instance;

    private HDFSOutputStreamPool() {
        try {
            Configuration conf = new Configuration();
            fs = FileSystem.get(conf);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static HDFSOutputStreamPool getInstance() {
        if (instance == null) {
            instance = new HDFSOutputStreamPool();
        }
        return instance;
    }

    /**
     * 通过路径得到对应的输出流
     */
    public synchronized FSDataOutputStream takeOutputStream(String path) {
        try{
            FSDataOutputStream out = pool.remove(path);
            if(out == null){
                Path p = new Path(path);
                if(!fs.exists(p)){
                    fs.createNewFile(p);
                }
                out = fs.append(p);
            }
            //转换成自己的流
            return new MyFSDataOutputStream(path,out,this) ;
        }
        catch(Exception e){
            e.printStackTrace();
        }
        return null ;
    }

    /**
     * 回收流
     */
    public synchronized void putBack(String path,FSDataOutputStream out){
        pool.put(path,out) ;
    }

    /**
     * 释放池子
     */
    public synchronized void releasePool(){
        try{
            for(FSDataOutputStream o : pool.values()){
                o.close();
            }
            pool.clear();
            System.out.println("池子释放了!!!");
        }
        catch(Exception e){
            e.printStackTrace();
        }
    }
}

关闭对象流,关闭对象流的对象

import java.util.TimerTask;
import org.apache.spark.ml.feature.Tokenizer;

/**
 * 关闭线程池
 */
public class CloseFSOuputStreamTask extends TimerTask{
    public void run() {
        HDFSOutputStreamPool pool = HDFSOutputStreamPool.getInstance();
        pool.releasePool();
    }
}

定时关闭对象池

//开启定时器任务,周期性关闭流
        new Timer().schedule(new CloseFSOuputStreamTask(), 0, 30000);

        //hdfs消费者
        new Thread(){
            public void run() {
                HDFSRawConsumer consumer = new HDFSRawConsumer();
                consumer.processLog();
            }
        }.start();

猜你喜欢

转载自blog.csdn.net/Lu_Xiao_Yue/article/details/85697586
今日推荐