m1/aliyun_api.py
2020-04-17 11:37:13 +08:00

86 lines
3.1 KiB
Python
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from oss2 import SizedFileAdapter, determine_part_size
from oss2.models import PartInfo
import oss2
from aliyunsdkcore.client import AcsClient
from aliyunsdkcdn.request.v20180510 import RefreshObjectCachesRequest
from ops.mlog import log
import os
import json
import hashlib
import base64
"""
pip install oss2 aliyun-python-sdk-core aliyun-python-sdk-cdn
阿里云api通用类
"""
class AliApi(object):
def __init__(self, access_key_id=None, access_key_secret=None):
self.access_key_id = access_key_id or os.environ.get('ALI_ACCESS_KEY_ID')
self.access_key_secret = access_key_secret or os.environ.get('ALI_ACCESS_KEY_SECRET')
def calculate_file_md5(self, file_name, block_size=64 * 1024):
"""计算文件的MD5
:param file_name: 文件名
:param block_size: 计算MD5的数据块大小默认64KB
:return 文件内容的MD5值
"""
with open(file_name, 'rb') as f:
md5 = hashlib.md5()
while True:
data = f.read(block_size)
if not data:
break
md5.update(data)
return base64.b64encode(md5.digest()).decode()
def oss_upload(self, endpoint, bucket_name, file, key):
auth = oss2.Auth(self.access_key_id, self.access_key_secret)
bucket = oss2.Bucket(auth, endpoint, bucket_name)
total_size = os.path.getsize(file)
# determine_part_size方法用来确定分片大小。
part_size = determine_part_size(total_size, preferred_size=100 * 1024)
# 初始化分片。
upload_id = bucket.init_multipart_upload(key).upload_id
parts = []
try:
# 逐个上传分片。
with open(file, 'rb') as fileobj:
part_number = 1
offset = 0
while offset < total_size:
num_to_upload = min(part_size, total_size - offset)
# SizedFileAdapter(fileobj, size)方法会生成一个新的文件对象,重新计算起始追加位置。
result = bucket.upload_part(key, upload_id, part_number, SizedFileAdapter(fileobj, num_to_upload),
headers={'Content-MD5': self.calculate_file_md5(file)})
parts.append(PartInfo(part_number, result.etag))
offset += num_to_upload
part_number += 1
# 完成分片上传。
bucket.complete_multipart_upload(key, upload_id, parts)
log.info("{0} upload complete".format(file))
except:
log.info("{0} upload failed".format(file))
def cdn_refresh_dir(self, refresh_url, region='cn-shanghai'):
client = AcsClient(self.access_key_id, self.access_key_secret, region)
request = RefreshObjectCachesRequest.RefreshObjectCachesRequest()
request.set_ObjectPath(refresh_url)
if refresh_url.endswith('/'):
request.set_ObjectType('Directory')
else:
request.set_ObjectType('File')
response = client.do_action_with_exception(request)
rel = json.loads(response.decode('utf-8'))
return rel