python 下载阿里云mysql的备份文件及binlog到本地

#废话不多说,直接上代码。您有好的意见,可以留言!共同探讨,一起进步!

宗旨:

       一起努力,一起进步!



# !/usr/bin/python
# coding=utf-8

import json
import os
import datetime
import urllib
from aliyunsdkcore import client
from aliyunsdkrds.request.v20140815.DescribeBackupsRequest import DescribeBackupsRequest
from aliyunsdkrds.request.v20140815.DescribeBinlogFilesRequest import DescribeBinlogFilesRequest


class PullBackupfile():
def __init__(self,accessid,key,region,instanceid):
self.accessid = accessid
self.key = key
self.region = region
self.instanceid = instanceid
#自动创建备份目录
def mkdir(self):
now = datetime.date.today() - datetime.timedelta(days=1)
path = 'D:\databack\%s' % now
data_path = os.path.join(path, 'data')
bin_log_path = os.path.join(path, 'binlog')
path_list = [path, data_path, bin_log_path]
for i in path_list:
if os.path.isdir(i):
continue
else:
os.popen('mkdir %s' % i)
return data_path,bin_log_path

def login(self):
clt = client.AcsClient(self.accessid,self.key,self.region)
return clt

# 阿里云返回的数据为UTC时间,因此要转换为东八区时间。2013-08-15T12:00:00Z为北京时间2013年8月15日20点0分0秒。
def backup_time(self,name):
now = datetime.datetime.now()
#零点
end_time = now - datetime.timedelta(hours=now.hour + 8, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
start_time = end_time - datetime.timedelta(days=1)
#时间格式不相同
if name == 'datafile':
starttime = start_time.strftime('%Y-%m-%dT%H:%MZ')
endtime = end_time.strftime('%Y-%m-%dT%H:%MZ')
if name == 'binlog':
starttime = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
endtime = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')
return starttime,endtime


def download_rds_backfile(self):
data_path,bin_log_path = self.mkdir()
starttime,endtime = self.backup_time('datafile')
try:
req_bakup = DescribeBackupsRequest()
req_bakup.set_DBInstanceId(self.instanceid)
req_bakup.set_accept_format('json')
req_bakup.set_StartTime(starttime)
req_bakup.set_EndTime(endtime)
clt = self.login()
backup = clt.do_action_with_exception(req_bakup)
jsload = json.loads(backup)
num = jsload["PageRecordCount"]
print("backfiles:" + str(num))
i = 0
back_path = os.path.join(data_path,'')
while i < num:

bak_url = jsload["Items"]["Backup"][i]["BackupDownloadURL"]
bak_host = jsload["Items"]["Backup"][i]["HostInstanceID"]
bak_id = jsload["Items"]["Backup"][i]["BackupId"]
print ("BackupId:" + str(bak_id), "HostInstanceID:" + str(bak_host), "downloadurl:" + bak_url)
save_name = back_path + bak_url.split('?')[0].split('/')[-1]
u = urllib.request.urlopen(bak_url)
f_header = u.info()
print(f_header)
bak_size = int(f_header["Content-Length"])

print ("backup file size: %s M ,fime nema: %s" % (bak_size / 1024 / 1024, save_name))

with open(save_name, "wb") as f:

file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break

file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / bak_size)
# status = status + chr(8) * (len(status) + 1)
print (status)
i = i + 1
print ("download complet!")
except:
print("无备份")


# 备份binlog 下载到本地服务器
def download_rds_binlog(self):
data_path, bin_log_path = self.mkdir()
print(bin_log_path)
starttime,endtime = self.backup_time('binlog')
try:
request = DescribeBinlogFilesRequest()
request.set_DBInstanceId(self.instanceid)
request.set_accept_format('json')
request.set_StartTime(starttime)
request.set_EndTime(endtime)
clt = self.login()
binlog_backup = clt.do_action_with_exception(request)
jsload = json.loads(binlog_backup)
num = jsload["TotalRecordCount"]
print("backfiles:" + str(num))
i = 0
back_path = os.path.join(bin_log_path,'')
print(back_path)
while i < num:
bak_url = jsload["Items"]["BinLogFile"][i]["DownloadLink"]
bak_host = jsload["Items"]["BinLogFile"][i]["HostInstanceID"]
bak_name = jsload["Items"]["BinLogFile"][i]["LogFileName"]
bak_size = jsload["Items"]["BinLogFile"][i]["FileSize"]
bak_time = datetime.datetime.strptime(jsload["Items"]["BinLogFile"][i]['LogEndTime'],'%Y-%m-%dT%H:%M:%SZ')
# print("LogFileName:" + str(bak_name), "HostInstanceID:" + str(bak_host), "downloadurl:" + bak_url)
save_name = back_path + bak_name + '_' + str(bak_time).replace(' ','').replace(':','').replace('-','')
# print("backup file size: %s M ,fime nema: %s" % (bak_size / 1024 / 1024, save_name))
with open(save_name, "wb") as f:
urllib.request.urlretrieve(bak_url,save_name)
i = i + 1
print("download complet!")
except:
print('无备份')
#删除超过7天的文件夹及子目录,windows 命令
def remove_file(self):
os.popen('forfiles /p "D:\databack" /d -7 /c "cmd /c echo deleting @path ... && rd @path /s /q" ')

if __name__ == '__main__':
pull_file = PullBackupfile('accessid','key','region','instanceid')
    pull_file.download_rds_backfile()
pull_file.download_rds_binlog()
pull_file.remove_file()

猜你喜欢

转载自www.cnblogs.com/leeInvisible/p/11582608.html
今日推荐