本地环境与Hadoop的数据交互


##  pip install hdfs
from hdfs.client import Client

# 覆盖数据写到hdfs文件
def write_to_hdfs(client, hdfs_path, data):
    client.write(hdfs_path, data, overwrite=True, append=False, encoding='utf-8')



# 从hdfs获取文件到本地
def get_from_hdfs(client, hdfs_path, srcPath):
    client.download(hdfs_path, srcPath, overwrite=False)


# 删除hdfs文件
def delete_hdfs_file(client, hdfs_path):
    client.delete(hdfs_path)

# 上传
def upload(srcPath, hdfs_path):
    client.upload(hdfs_path, srcPath, cleanup=True)
    
if __name__ == '__main__':
    client = Client("http://slave01:50070/",root="/",timeout=10000,session=False)
    upload("D:/mysql-connector-java-5.1.46.jar", "/")
    
    pass
发布了108 篇原创文章 · 获赞 268 · 访问量 19万+

猜你喜欢

转载自blog.csdn.net/pingsha_luoyan/article/details/98218895