Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
849b699
add ut
l-iberty Mar 8, 2023
10473f5
fix ut/test.py
l-iberty Mar 8, 2023
808eff1
fix ut
l-iberty Mar 9, 2023
7a3b704
fix ut
l-iberty Mar 9, 2023
cfd576b
fix ut
l-iberty Mar 9, 2023
533e50c
fix ut
l-iberty Mar 9, 2023
7a9c766
fix ut
l-iberty Mar 9, 2023
c67412b
fix ut
l-iberty Mar 9, 2023
0ccac95
...
l-iberty Mar 9, 2023
1d2777c
ci ut 桶名修改
fly-zll Mar 10, 2023
346a911
ci ut
fly-zll Mar 13, 2023
9949ffd
fix domain cert ut
l-iberty Mar 14, 2023
8c28f06
支持短连接
l-iberty Mar 15, 2023
bbbe6f1
fix ut
l-iberty Mar 15, 2023
0e809fc
fix ci ut
l-iberty Mar 15, 2023
02e7c22
fix ut
l-iberty Mar 16, 2023
e57f953
config appid
l-iberty Mar 16, 2023
56943d8
ci ut补充
fly-zll Mar 16, 2023
8ecd03a
config appid
l-iberty Mar 17, 2023
2bb18f3
ci ut补充
fly-zll Mar 18, 2023
9f88b5d
ci ut补充优化
fly-zll Mar 19, 2023
ff4cb61
ci ut补充优化
fly-zll Mar 19, 2023
d0d9260
fix cos ut
l-iberty Apr 4, 2023
5c7bc3a
upload_file
l-iberty Apr 12, 2023
57fe882
cos ut
l-iberty Apr 20, 2023
cff7bd5
cos ut
l-iberty Apr 20, 2023
c4bbb34
ut
l-iberty Apr 20, 2023
bce6b20
ut
l-iberty Apr 20, 2023
6b60478
ut
l-iberty Apr 20, 2023
b62961f
ut
l-iberty Apr 21, 2023
e3512e5
ci ut补充优化
fly-zll Apr 21, 2023
c8733df
Merge remote-tracking branch 'origin/master'
fly-zll Apr 21, 2023
032f081
ut
l-iberty Apr 21, 2023
1973e90
ut
l-iberty Apr 21, 2023
99a3ab3
ut
l-iberty Apr 21, 2023
9bb064d
ut
l-iberty Apr 21, 2023
97c681c
ut
l-iberty Apr 21, 2023
c6bb2f9
ut
l-iberty Apr 21, 2023
527e511
create_bucket OFS
l-iberty Apr 23, 2023
f38047e
ut
l-iberty Apr 25, 2023
da82820
ut
l-iberty Apr 25, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion demo/ci_media.py
Original file line number Diff line number Diff line change
Expand Up @@ -929,7 +929,7 @@ def ci_list_media_pic_jobs():

def ci_get_media_pic_jobs():
# 图片处理任务详情
response = client.ci_get_media_jobs(
response = client.ci_get_media_pic_jobs(
Bucket=bucket_name,
JobIDs='c01742xxxxxxxxxxxxxxxxxx7438e39',
ContentType='application/xml'
Expand Down
35 changes: 24 additions & 11 deletions qcloud_cos/cos_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class CosConfig(object):

def __init__(self, Appid=None, Region=None, SecretId=None, SecretKey=None, Token=None, CredentialInstance=None, Scheme=None, Timeout=None,
Access_id=None, Access_key=None, Secret_id=None, Secret_key=None, Endpoint=None, IP=None, Port=None,
Anonymous=None, UA=None, Proxies=None, Domain=None, ServiceDomain=None, PoolConnections=10,
Anonymous=None, UA=None, Proxies=None, Domain=None, ServiceDomain=None, KeepAlive=True, PoolConnections=10,
PoolMaxSize=10, AllowRedirects=False, SignHost=True, EndpointCi=None, EndpointPic=None, EnableOldDomain=True, EnableInternalDomain=True):
"""初始化,保存用户的信息

Expand All @@ -65,6 +65,7 @@ def __init__(self, Appid=None, Region=None, SecretId=None, SecretKey=None, Token
:param Proxies(dict): 使用代理来访问COS
:param Domain(string): 使用自定义的域名来访问COS
:param ServiceDomain(string): 使用自定义的域名来访问cos service
:param KeepAlive(bool): 是否使用长连接
:param PoolConnections(int): 连接池个数
:param PoolMaxSize(int): 连接池中最大连接数
:param AllowRedirects(bool): 是否重定向
Expand All @@ -87,6 +88,7 @@ def __init__(self, Appid=None, Region=None, SecretId=None, SecretKey=None, Token
self._proxies = Proxies
self._domain = Domain
self._service_domain = ServiceDomain
self._keep_alive = KeepAlive
self._pool_connections = PoolConnections
self._pool_maxsize = PoolMaxSize
self._allow_redirects = AllowRedirects
Expand Down Expand Up @@ -332,6 +334,8 @@ def send_request(self, method, url, bucket, timeout=30, cos_request=True, **kwar
kwargs['headers']['Host'] = self._conf._domain
elif bucket is not None:
kwargs['headers']['Host'] = self._conf.get_host(bucket)
if self._conf._keep_alive == False:
kwargs['headers']['Connection'] = 'close'
kwargs['headers'] = format_values(kwargs['headers'])

file_position = None
Expand Down Expand Up @@ -1273,7 +1277,7 @@ def select_object_content(self, Bucket, Key, Expression, ExpressionType, InputSe
return data

# s3 bucket interface begin
def create_bucket(self, Bucket, BucketAZConfig=None, **kwargs):
def create_bucket(self, Bucket, BucketAZConfig=None, BucketArchConfig=None, **kwargs):
"""创建一个bucket

:param Bucket(string): 存储桶名称. 存储桶名称不支持大写字母,COS 后端会将用户传入的大写字母自动转换为小写字母用于创建存储桶.
Expand All @@ -1297,11 +1301,16 @@ def create_bucket(self, Bucket, BucketAZConfig=None, **kwargs):
"""
headers = mapped(kwargs)
xml_config = None
bucket_config = dict()
if BucketAZConfig == 'MAZ':
bucket_config = {'BucketAZConfig': 'MAZ'}
bucket_config.update({'BucketAZConfig': 'MAZ'})
if BucketArchConfig == 'OFS':
bucket_config.update({'BucketArchConfig': 'OFS'})
if len(bucket_config) != 0:
xml_config = format_xml(data=bucket_config, root='CreateBucketConfiguration')
headers['Content-MD5'] = get_md5(xml_config)
headers['Content-Type'] = 'application/xml'

url = self._conf.uri(bucket=Bucket)
logger.info("create bucket, url=:{url} ,headers=:{headers}".format(
url=url,
Expand Down Expand Up @@ -3531,8 +3540,8 @@ def download_file(self, Bucket, Key, DestFilePath, PartSize=20, MAXThread=5, Ena

def upload_file(self, Bucket, Key, LocalFilePath, PartSize=1, MAXThread=5, EnableMD5=False, progress_callback=None,
**kwargs):
"""小于等于20MB的文件简单上传,大于20MB的文件使用分块上传

"""
:param Bucket(string): 存储桶名称.
:param key(string): 分块上传路径名.
:param LocalFilePath(string): 本地文件路径名.
Expand All @@ -3557,7 +3566,7 @@ def upload_file(self, Bucket, Key, LocalFilePath, PartSize=1, MAXThread=5, Enabl
)
"""
file_size = os.path.getsize(LocalFilePath)
if file_size <= 1024 * 1024 * 20:
if file_size <= 1024 * 1024 * PartSize:
with open(LocalFilePath, 'rb') as fp:
rt = self.put_object(Bucket=Bucket, Key=Key, Body=fp, EnableMD5=EnableMD5, **kwargs)
return rt
Expand Down Expand Up @@ -7012,9 +7021,9 @@ def ci_list_workflowexecution(self, Bucket, WorkflowId, Name='', StartCreationTi
NextToken=to_unicode('nextToken='+NextToken)
)
if StartCreationTime is not None:
url = u"{url}&{StartCreationTime}".format(StartCreationTime=to_unicode('startCreationTime='+StartCreationTime))
url = u"{url}&{StartCreationTime}".format(url=to_unicode(url), StartCreationTime=quote(to_bytes(to_unicode('startCreationTime='+StartCreationTime)), b'/-_.~='))
if EndCreationTime is not None:
url = u"{url}&{EndCreationTime}".format(EndCreationTime=to_unicode('endCreationTime='+EndCreationTime))
url = u"{url}&{EndCreationTime}".format(url=to_unicode(url), EndCreationTime=quote(to_bytes(to_unicode('endCreationTime='+EndCreationTime)), b'/-_.~='))
logger.info("ci_list_workflowexecution result, url=:{url} ,headers=:{headers}, params=:{params}".format(
url=url,
headers=headers,
Expand Down Expand Up @@ -7518,9 +7527,11 @@ def ci_list_doc_jobs(self, Bucket, QueueId, StartCreationTime=None, EndCreationT
NextToken=to_unicode('nextToken='+NextToken)
)
if StartCreationTime is not None:
url = u"{url}&{StartCreationTime}".format(StartCreationTime=to_unicode('startCreationTime='+StartCreationTime))
url = u"{url}&{StartCreationTime}".format(url=to_unicode(url),
StartCreationTime=quote(to_bytes(to_unicode('startCreationTime='+StartCreationTime)), b'/-_.~='))
if EndCreationTime is not None:
url = u"{url}&{EndCreationTime}".format(EndCreationTime=to_unicode('endCreationTime='+EndCreationTime))
url = u"{url}&{EndCreationTime}".format(url=to_unicode(url),
EndCreationTime=quote(to_bytes(to_unicode('endCreationTime='+EndCreationTime)), b'/-_.~='))
logger.info("list_doc_jobs result, url=:{url} ,headers=:{headers}, params=:{params}".format(
url=url,
headers=headers,
Expand Down Expand Up @@ -8130,9 +8141,11 @@ def ci_list_asr_jobs(self, Bucket, QueueId, StartCreationTime=None, EndCreationT
NextToken=to_unicode('nextToken='+NextToken)
)
if StartCreationTime is not None:
url = u"{url}&{StartCreationTime}".format(StartCreationTime=to_unicode('startCreationTime='+StartCreationTime))
url = u"{url}&{StartCreationTime}".format(url=to_unicode(url),
StartCreationTime=quote(to_bytes(to_unicode('startCreationTime='+StartCreationTime)), b'/-_.~='))
if EndCreationTime is not None:
url = u"{url}&{EndCreationTime}".format(EndCreationTime=to_unicode('endCreationTime='+EndCreationTime))
url = u"{url}&{EndCreationTime}".format(url=to_unicode(url),
EndCreationTime=quote(to_bytes(to_unicode('endCreationTime='+EndCreationTime)), b'/-_.~='))
logger.info("list_asr_jobs result, url=:{url} ,headers=:{headers}, params=:{params}".format(
url=url,
headers=headers,
Expand Down
14 changes: 7 additions & 7 deletions qcloud_cos/cos_comm.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,13 +178,13 @@ def xml_to_dict(data, origin_str="", replace_str=""):
return xmldict


def get_id_from_xml(data, name):
"""解析xml中的特定字段"""
tree = xml.dom.minidom.parseString(data)
root = tree.documentElement
result = root.getElementsByTagName(name)
# use childNodes to get a list, if has no child get itself
return result[0].childNodes[0].nodeValue
# def get_id_from_xml(data, name):
# """解析xml中的特定字段"""
# tree = xml.dom.minidom.parseString(data)
# root = tree.documentElement
# result = root.getElementsByTagName(name)
# # use childNodes to get a list, if has no child get itself
# return result[0].childNodes[0].nodeValue


def mapped(headers):
Expand Down
29 changes: 15 additions & 14 deletions qcloud_cos/cos_threadpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,25 +86,26 @@ def get_result(self):


if __name__ == '__main__':
pass

pool = SimpleThreadPool(2)
# pool = SimpleThreadPool(2)

def task_sleep(x):
from time import sleep
sleep(x)
return 'hello, sleep %d seconds' % x
# def task_sleep(x):
# from time import sleep
# sleep(x)
# return 'hello, sleep %d seconds' % x

def raise_exception():
raise ValueError("Pa! Exception!")
# def raise_exception():
# raise ValueError("Pa! Exception!")

for i in range(1000):
pool.add_task(task_sleep, 0.001)
print(i)
pool.add_task(task_sleep, 0)
pool.add_task(task_sleep, 0)
# for i in range(1000):
# pool.add_task(task_sleep, 0.001)
# print(i)
# pool.add_task(task_sleep, 0)
# pool.add_task(task_sleep, 0)
# pool.add_task(raise_exception)
# pool.add_task(raise_exception)

pool.wait_completion()
print(pool.get_result())
# pool.wait_completion()
# print(pool.get_result())
# [(1, 0, ['hello, sleep 5 seconds']), (2, 1, ['hello, sleep 2 seconds', 'hello, sleep 3 seconds', ValueError('Pa! Exception!',)])]
21 changes: 11 additions & 10 deletions qcloud_cos/xml2dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,14 @@ def updateDict(self, aDict):


if __name__ == "__main__":
s = """<?xml version="1.0" encoding="utf-8" ?>
<result xmlns= "wqa.bai.com">
<count n="1">10</count>
<data><id>1</id><name>test1</name></data>
<data><id>2</id><name>test2</name></data>
<data><id>3</id><name>test3</name></data>
</result>"""
root = xml.etree.ElementTree.fromstring(s)
xmldict = Xml2Dict(root)
print(xmldict)
pass
# s = """<?xml version="1.0" encoding="utf-8" ?>
# <result xmlns= "wqa.bai.com">
# <count n="1">10</count>
# <data><id>1</id><name>test1</name></data>
# <data><id>2</id><name>test2</name></data>
# <data><id>3</id><name>test3</name></data>
# </result>"""
# root = xml.etree.ElementTree.fromstring(s)
# xmldict = Xml2Dict(root)
# print(xmldict)
Loading