获取本地文件工具类
import java.io.*;
public class GetLocalFile {
public static String getLocalFile(String filePath) throws Exception{
FileInputStream fis=new FileInputStream(filePath);
InputStreamReader isr=new InputStreamReader(fis);
BufferedReader br=new BufferedReader(isr);
String tempLine=null;
StringBuilder sb=new StringBuilder();
while((tempLine=br.readLine())!=null){
sb.append(tempLine);
sb.append("\\n");
}
br.close();
return sb.toString();
}
}
将文件上传至集群工具类
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class FileToHdfs {
static Configuration hadoopConf = new Configuration();
public static boolean fileToHdfs(String hdfsPath,String filePath){
try {
FileSystem fs = FileSystem.get(hadoopConf);
Path path=new Path(hdfsPath);//将路径转换成hdfs路径
FSDataOutputStream fos=fs.create(path);//通过fs打开从集群的输出流路径,写入数据
fos.write( GetLocalFile.getLocalFile(filePath).getBytes("utf-8")); //将读取的字符串类型转换成字节类型
} catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
}
****************************************************************************************************
主函数
public class Zhu {
public static void main(String[] args){
String filePath="index.html";
String hdfsPath="/user/dan/index.html.bak";
Boolean isOk=FileToHdfs.fileToHdfs(hdfsPath,filePath);
if(isOk){
System.out.println("成功");
}else{
System.out.println("失败");
}
}
}