Skip to content

Commit

Permalink
combine test case of py2/py3 together to fix CI issue on different py…
Browse files Browse the repository at this point in the history
…3 versions
  • Loading branch information
wjo1212 committed Nov 22, 2017
1 parent 7305c22 commit d775904
Show file tree
Hide file tree
Showing 6 changed files with 116 additions and 331 deletions.
3 changes: 1 addition & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,4 @@ install:

script:
- cd tests
- if [[ $TRAVIS_PYTHON_VERSION == 2* || $TRAVIS_PYTHON_VERSION == 'pypy' ]]; then python ./test_cli.py ./cmd_list_py2.txt; fi
- if [[ $TRAVIS_PYTHON_VERSION == 3* || $TRAVIS_PYTHON_VERSION == 'pypy3' ]]; then python ./test_cli.py ./cmd_list_py3.txt; fi
- python ./test_cli.py ./cmd_list.txt
79 changes: 76 additions & 3 deletions aliyunlogcli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
from docopt import docopt
from config import load_config

import collections
from six import StringIO


def configure_confidential(secure_id, secure_key, endpoint, client_name=LOG_CONFIG_SECTION):
""" configure confidential
Expand Down Expand Up @@ -40,6 +43,36 @@ def configure_confidential(secure_id, secure_key, endpoint, client_name=LOG_CONF
config.write(configfile)


def _get_str(obj, enclosed=True):
if enclosed:
return repr(obj)
return str(obj)

def _sort_str_dict(obj, enclosed=False):
buf = StringIO()
if isinstance(obj, dict):
buf.write('{')
for i, x in enumerate(sorted(obj)):
if i == 0:
buf.write("{0}: {1}".format(_sort_str_dict(x, True), _sort_str_dict(obj[x], True)))
else:
buf.write(", {0}: {1}".format(_sort_str_dict(x, True), _sort_str_dict(obj[x], True)))

buf.write('}')
return buf.getvalue()
elif isinstance(obj, list):
buf.write('[')
for i, x in enumerate(obj):
if i == 0:
buf.write("{0}".format(_sort_str_dict(x, True)))
else:
buf.write(", {0}".format(_sort_str_dict(x, True)))
buf.write(']')
return buf.getvalue()
else:
return _get_str(obj, enclosed)


def main():
method_types, optdoc = parse_method_types_optdoc_from_class(LogClient, LOG_CLIENT_METHOD_BLACK_LIST)

Expand All @@ -65,13 +98,13 @@ def main():
print(jmespath.compile(jmes_filter).search(ret.get_body()))
except jmespath.exceptions.ParseError as ex:
print("**fail to parse with JMSE path, original data: ", ex)
print(ret.get_body())
print(_sort_str_dict(ret.get_body()))
exit(1)
elif ret is not None:
print(ret.get_body())
print(_sort_str_dict(ret.get_body()))

except LogException as ex:
print(ex.get_resp_body())
print(_sort_str_dict(ex.get_resp_body()))
exit(2)

# process configure command
Expand All @@ -83,3 +116,43 @@ def main():

if __name__ == '__main__':
main()


# def test_convert():
# d1 = {1:'\n'}
# print(_sort_str_dict(d1))
# assert r"{1: '\n'}" == _sort_str_dict(d1)
#
# d1 = {1:'\t'}
# print(_sort_str_dict(d1))
# assert r"{1: '\t'}" == _sort_str_dict(d1)
#
# d1 = "123"
# print(_sort_str_dict(d1))
# assert """123""" == _sort_str_dict(d1)
#
# d1 = ""
# print(_sort_str_dict(d1))
# assert """""" == _sort_str_dict(d1)
#
# d1 = 123
# print(_sort_str_dict(d1))
# assert """123""" == _sort_str_dict(d1)
#
# d1 = [1,'2', 3]
# print(_sort_str_dict(d1))
# assert """[1, '2', 3]""" == _sort_str_dict(d1)
#
# d1 = {1:1, '3':3, 2:'2'}
# print(_sort_str_dict(d1))
# assert """{1: 1, 2: '2', '3': 3}""" == _sort_str_dict(d1)
#
# d1 = [1,'2', {1:1, '3':3, 2:'2'}]
# print(_sort_str_dict(d1))
# assert """[1, '2', {1: 1, 2: '2', '3': 3}]""" == _sort_str_dict(d1)
#
# d1 = {1:{1:1, '3':3, 2:'2'}, '3':{1:1, '3':{1:1, '3':3, 2:'2'}, 2:'2'}, 2:'2'}
# print(_sort_str_dict(d1))
# assert """{1: {1: 1, 2: '2', '3': 3}, 2: '2', '3': {1: 1, 2: '2', '3': {1: 1, 2: '2', '3': 3}}}""" == _sort_str_dict(d1)
#
# exit(10)
5 changes: 2 additions & 3 deletions aliyunlogcli/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,6 @@ def _request_maker(cls):
def maker(json_str):
args_list, option_arg_pos = _parse_method(cls.__init__)

# print("**** convert json:")
# print(json_str)
#
if json_str.startswith('file://'):
with open(json_str[7:], "r") as f:
json_str = f.read()
Expand Down Expand Up @@ -218,6 +215,8 @@ def maker(json_str):
try:
obj = _request_maker(cls)(json_str)
return obj
except IOError as ex:
print("*** IO error: ", ex, json_str)
except Exception as ex:
continue

Expand Down
73 changes: 35 additions & 38 deletions tests/cmd_list_py3.txt → tests/cmd_list.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,28 +35,30 @@ aliyun log create_shipper --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logst
aliyun log apply_config_to_machine_group --project_name="dlq-test-cli-${RANDOM_NUMBER}" --config_name="config_name1" --group_name="group_name1"

aliyun log get_project --project_name="dlq-test-cli-${RANDOM_NUMBER}"
> 'status': 'Normal'
> 'description': 'testdata1'
> 'projectName': 'dlq-test-cli-${RANDOM_NUMBER}
> 'projectName': 'dlq-test-cli-${RANDOM_NUMBER}'
> 'status': 'Normal'

aliyun log get_logstore --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name="logstore2"
> 'logstoreName': 'logstore2'
> 'shardCount': 2
> 'logstoreName': 'logstore2', 'shardCount': 2

aliyun log get_logtail_config --project_name="dlq-test-cli-${RANDOM_NUMBER}" --config_name="config_name1"
> 'configName': 'config_name1'
> 'inputDetail': {'adjustTimezone': False, 'delayAlarmBytes': 0, 'discardNonUtf8': False, 'discardUnmatch': True, 'enableRawLog': False, 'enableTag': False, 'fileEncoding': 'utf8', 'filePattern': 'file_pattern', 'filterKey': ['time', 'value'], 'filterRegex': ['time', 'value'], 'key': ['reg_key1', 'reg_key2'], 'localStorage': True, 'logBeginRegex': 'xxx.*', 'logPath': '/log_path', 'logTimezone': '', 'logType': 'common_reg_log', 'maxDepth': 1000, 'maxSendRate': -1, 'mergeType': 'topic', 'preserve': True, 'preserveDepth': 0, 'regex': 'xxx ([\\w\\-]+\\s[\\d\\:]+)\\s+(.*)', 'sendRateExpire': 0, 'sensitive_keys': [], 'tailExisted': False, 'timeFormat': '%Y-%m-%d %H:%M:%S', 'topicFormat': 'none'}, 'inputType': 'file'
> 'inputDetail': {'adjustTimezone': False, 'delayAlarmBytes': 0, 'discardNonUtf8': False, 'discardUnmatch': True, 'enableRawLog': False, 'enableTag': False, 'fileEncoding': 'utf8', 'filePattern': 'file_pattern', 'filterKey': ['time', 'value'], 'filterRegex': ['time', 'value'], 'key': ['reg_key1', 'reg_key2'], 'localStorage': True, 'logBeginRegex': 'xxx.*', 'logPath': '/log_path', 'logTimezone': '', 'logType': 'common_reg_log', 'maxDepth': 1000, 'maxSendRate': -1, 'mergeType': 'topic', 'preserve': True, 'preserveDepth': 0, 'regex': 'xxx ([\\w\\-]+\\s[\\d\\:]+)\\s+(.*)', 'sendRateExpire': 0, 'sensitive_keys': [], 'tailExisted': False, 'timeFormat': '%Y-%m-%d %H:%M:%S', 'topicFormat': 'none'}
> 'inputType': 'file'
> 'logSample': 'xxx 2017-11-11 11:11:11 hello alicloud.'


aliyun log get_config_applied_machine_groups --project_name="dlq-test-cli-${RANDOM_NUMBER}" --config_name="config_name1"
> {'machinegroups': ['group_name1'], 'count': 1}
> {'count': 1, 'machinegroups': ['group_name1']}

aliyun log get_index_config --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name="logstore1"
> 'index_mode': 'v2', 'keys': {'key1': {'caseSensitive': False, 'doc_value': True, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t'], 'type': 'text'}, 'key2': {'caseSensitive': False, 'doc_value': True, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t'], 'type': 'text'}, 'key3': {'caseSensitive': False, 'doc_value': True, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t'], 'type': 'text'}, 'key4': {'caseSensitive': False, 'doc_value': True, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t'], 'type': 'text'}}, 'line': {'caseSensitive': False, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t']}, 'storage': 'pg', 'ttl': 2

> 'index_mode': 'v2', 'keys': {'key1': {'caseSensitive': False, 'doc_value': True, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t'], 'type': 'text'}, 'key2': {'caseSensitive': False, 'doc_value': True, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t'], 'type': 'text'}, 'key3': {'caseSensitive': False, 'doc_value': True, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t'], 'type': 'text'}, 'key4': {'caseSensitive': False, 'doc_value': True, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t'], 'type': 'text'}}
> 'line': {'caseSensitive': False, 'token': [',', ' ', '"', '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t']}, 'storage': 'pg', 'ttl': 2

aliyun log get_machine_group --project_name="dlq-test-cli-${RANDOM_NUMBER}" --group_name="group_name1"
> {'groupName': 'group_name1', 'groupType': 'Armory', 'groupAttribute': {'externalName': 'ex name', 'groupTopic': 'topic x'}, 'machineIdentifyType': 'userdefined', 'machineList': ['machine1', 'machine2']
> 'groupAttribute': {'externalName': 'ex name', 'groupTopic': 'topic x'}, 'groupName': 'group_name1', 'groupType': 'Armory'
> 'machineIdentifyType': 'userdefined', 'machineList': ['machine1', 'machine2']


aliyun log get_machine_group_applied_configs --project_name="dlq-test-cli-${RANDOM_NUMBER}" --group_name="group_name1"
Expand Down Expand Up @@ -97,6 +99,7 @@ sleep 10
###########



##########
# put logs compress=True
# prepare request json
Expand All @@ -106,45 +109,33 @@ aliyun log put_logs --request="file://./put_logs_${RANDOM_NUMBER}.json"
###########




# get logs
aliyun log get_logs --request="{\"topic\": \"\", \"logstore\": \"logstore1\", \"project\": \"dlq-test-cli-${RANDOM_NUMBER}\", \"toTime\": \"${TIME-NOW}\", \"offset\": \"0\", \"query\": \"*\", \"line\": \"10\", \"fromTime\": \"${TIME-ONE-HOUR-AGO}\", \"reverse\":\"false\"}"
> {'__source__': 'source1', '__tag__:tag1': 'v1', '__tag__:tag2': 'v2', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key1': 'v1', 'key2': 'v2'}
> {'__source__': 'source1', '__tag__:tag1': 'v1', '__tag__:tag2': 'v2', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key3': 'v3', 'key4': 'v4'}
> [{'__source__': 'source1', '__tag__:tag1': 'v1', '__tag__:tag2': 'v2', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key1': 'v1', 'key2': 'v2'}, {'__source__': 'source1', '__tag__:tag1': 'v1', '__tag__:tag2': 'v2', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key3': 'v3', 'key4': 'v4'}]


rm -f ./get_logs_${RANDOM_NUMBER}.json
echo "{\"topic\": \"\",\"logstore\": \"logstore1\",\"project\": \"dlq-test-cli-${RANDOM_NUMBER}\",\"toTime\": \"${TIME-NOW}\",\"offset\": \"0\",\"query\": \"*\",\"line\": \"10\",\"fromTime\": \"${TIME-ONE-HOUR-AGO}\",\"reverse\": \"true\"}" >> ./get_logs_${RANDOM_NUMBER}.json
aliyun log get_logs --request="file://./get_logs_${RANDOM_NUMBER}.json"
> {'__source__': 'source1', '__tag__:tag1': 'v1', '__tag__:tag2': 'v2', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key1': 'v1', 'key2': 'v2'}
> {'__source__': 'source1', '__tag__:tag1': 'v1', '__tag__:tag2': 'v2', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key3': 'v3', 'key4': 'v4'}

> [{'__source__': 'source1', '__tag__:tag1': 'v1', '__tag__:tag2': 'v2', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key3': 'v3', 'key4': 'v4'}, {'__source__': 'source1', '__tag__:tag1': 'v1', '__tag__:tag2': 'v2', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key1': 'v1', 'key2': 'v2'}]

rm -f ./get_project_logs_${RANDOM_NUMBER}.json
echo "{\"project\": \"dlq-test-cli-${RANDOM_NUMBER}\",\"query\": \"select * from logstore1 where __time__ >= ${TIME-ONE-HOUR-AGO} and __time__ <= ${TIME-NOW}\"}" >> ./get_project_logs_${RANDOM_NUMBER}.json
aliyun log get_project_logs --request="file://./get_project_logs_${RANDOM_NUMBER}.json"
> '__source__': 'source1', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key1': 'v1', 'key2': 'v2', 'key3': 'null', 'key4': 'null'
> '__source__': 'source1', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key1': 'null', 'key2': 'null', 'key3': 'v3', 'key4': 'v4'

#> 'key3': 'null', 'key2': 'v2', 'key1': 'v1', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', '__line__': 'null', 'key4': 'null', '__date__': 'null', '__source__': 'source1'
#> 'key3': 'v3', 'key2': 'null', 'key1': 'null', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', '__line__': 'null', 'key4': 'v4', '__date__': 'null', '__source__': 'source1'

> [{'__date__': 'null', '__line__': 'null', '__sls_query_parameter__': 'null', '__source__': 'source1', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key1': 'v1', 'key2': 'v2', 'key3': 'null', 'key4': 'null'}, {'__date__': 'null', '__line__': 'null', '__sls_query_parameter__': 'null', '__source__': 'source1', '__time__': '${TIME-ONE-HOUR-AGO}', '__topic__': 'topic1', 'key1': 'null', 'key2': 'null', 'key3': 'v3', 'key4': 'v4'}]


aliyun log get_histograms --request="{\"project\": \"dlq-test-cli-${RANDOM_NUMBER}\", \"logstore\": \"logstore1\", \"topic\": \"topic1\", \"fromTime\": \"${TIME-ONE-HOUR-AGO}\", \"toTime\": \"${TIME-NOW}\"}"


aliyun log pull_logs --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name="logstore1" --shard_id=0 --cursor="MTUxMDMwODcyMDA5NjE0Mzg1MQ==" --count="10" --end_cursor="MTUxMDMwODcyMDA5NjE0Mzg1MQ==" --compress="false"


#test compression
aliyun log pull_logs --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name="logstore1" --shard_id=0 --cursor="MTUxMDMwODcyMDA5NjE0Mzg1MQ==" --count="10" --end_cursor="MTUxMDMwODcyMDA5NjE0Mzg1MQ==" --compress="true"



aliyun log list_consumer_group --project="dlq-test-cli-${RANDOM_NUMBER}" --logstore="logstore1"
> [{'name': 'consumer_group1', 'timeout': 20, 'order': True}]
> [{'name': 'consumer_group1', 'order': True, 'timeout': 20}]

aliyun log list_logstore --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name_pattern="testdata1" --offset=0 --size=100
> {'count': 3, 'logstores': ['logstore3', 'logstore1', 'logstore2'], 'total': 3}
Expand All @@ -153,14 +144,16 @@ aliyun log list_logtail_config --project_name="dlq-test-cli-${RANDOM_NUMBER}" --
> {'configs': ['config_name1', 'config_name2'], 'count': 2, 'total': 2}

aliyun log list_machine_group --project_name="dlq-test-cli-${RANDOM_NUMBER}" --offset=0 --size=100
> {'machinegroups': ['group_name1', 'group_name2'], 'count': 2, 'total': 2}
> {'count': 2, 'machinegroups': ['group_name1', 'group_name2'], 'total': 2}

aliyun log list_machines --project_name="dlq-test-cli-${RANDOM_NUMBER}" --group_name="group_name1" --offset=0 --size=100
> {'machines': [], 'count': 0, 'total': 0}
> {'count': 0, 'machines': [], 'total': 0}

aliyun log list_shards --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name="logstore2"
> {'shardID': 0, 'status': 'readwrite', 'inclusiveBeginKey': '00000000000000000000000000000000', 'exclusiveEndKey': '80000000000000000000000000000000'
> {'shardID': 1, 'status': 'readwrite', 'inclusiveBeginKey': '80000000000000000000000000000000', 'exclusiveEndKey': 'ffffffffffffffffffffffffffffffff'
#> [{'shardID': 0, 'status': 'readwrite', 'inclusiveBeginKey': '00000000000000000000000000000000',
#> 'exclusiveEndKey': '80000000000000000000000000000000'}, {'shardID': 1, 'status': 'readwrite', 'inclusiveBeginKey': '80000000000000000000000000000000',
#> 'exclusiveEndKey': 'ffffffffffffffffffffffffffffffff'}]


aliyun log list_shipper --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name="logstore2"
> {'count': 0, 'shipper': [], 'total': 0}
Expand All @@ -173,14 +166,16 @@ aliyun log list_topics --request="{\"project\":\"dlq-test-cli-${RANDOM_NUMBER}\"


aliyun log merge_shard --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name="logstore2" --shardId="0"
> {'shardID': 2, 'status': 'readwrite', 'inclusiveBeginKey': '00000000000000000000000000000000', 'exclusiveEndKey': 'ffffffffffffffffffffffffffffffff'
> {'shardID': 0, 'status': 'readonly', 'inclusiveBeginKey': '00000000000000000000000000000000', 'exclusiveEndKey': '80000000000000000000000000000000'
> {'shardID': 1, 'status': 'readonly', 'inclusiveBeginKey': '80000000000000000000000000000000', 'exclusiveEndKey': 'ffffffffffffffffffffffffffffffff'
#> [{'shardID': 2, 'status': 'readwrite', 'inclusiveBeginKey': '00000000000000000000000000000000',
#> 'exclusiveEndKey': 'ffffffffffffffffffffffffffffffff'}, {'shardID': 0, 'status': 'readonly', 'inclusiveBeginKey': '00000000000000000000000000000000',
#> 'exclusiveEndKey': '80000000000000000000000000000000'}, {'shardID': 1, 'status': 'readonly', 'inclusiveBeginKey': '80000000000000000000000000000000',
#> 'exclusiveEndKey': 'ffffffffffffffffffffffffffffffff'}]

aliyun log split_shard --project_name="dlq-test-cli-${RANDOM_NUMBER}" --logstore_name="logstore3" --shardId="0" --split_hash="40000000000000000000000000000000"
> {'shardID': 0, 'status': 'readonly', 'inclusiveBeginKey': '00000000000000000000000000000000', 'exclusiveEndKey': '80000000000000000000000000000000'
> {'shardID': 2, 'status': 'readwrite', 'inclusiveBeginKey': '00000000000000000000000000000000', 'exclusiveEndKey': '40000000000000000000000000000000'
> {'shardID': 3, 'status': 'readwrite', 'inclusiveBeginKey': '40000000000000000000000000000000', 'exclusiveEndKey': '80000000000000000000000000000000'
#> [{'shardID': 0, 'status': 'readonly', 'inclusiveBeginKey': '00000000000000000000000000000000',
#> 'exclusiveEndKey': '80000000000000000000000000000000'}, {'shardID': 2, 'status': 'readwrite', 'inclusiveBeginKey': '00000000000000000000000000000000',
#> 'exclusiveEndKey': '40000000000000000000000000000000'}, {'shardID': 3, 'status': 'readwrite', 'inclusiveBeginKey': '40000000000000000000000000000000',
#> 'exclusiveEndKey': '80000000000000000000000000000000'}]

# check-point
aliyun log update_check_point --project="dlq-test-cli-${RANDOM_NUMBER}" --logstore="logstore1" --consumer_group="consumer_group1" --shard="0" --check_point="MTUxMDMwODcyMDA5NjE0Mzg1MQ==" --consumer="testdata1" --force_success="true"
Expand All @@ -204,7 +199,8 @@ aliyun log retry_shipper_tasks --project_name="dlq-test-cli-${RANDOM_NUMBER}" --
> {"errorCode":"ParameterInvalid","errorMessage":"shipperName shipper1 not exist"}

aliyun log get_check_point --project="dlq-test-cli-${RANDOM_NUMBER}" --logstore="logstore1" --consumer_group="consumer_group1" --shard="0"
> 'shard': 0, 'checkpoint': 'MTU
> [{'checkpoint': 'MTU


# copy project
aliyun log copy_project --from_project="dlq-test-cli-${RANDOM_NUMBER}" --to_project="dlq-test-cli-${RANDOM_NUMBER}-copied"
Expand Down Expand Up @@ -259,8 +255,6 @@ sleep 40

aliyun log delete_project --project_name="dlq-test-cli-${RANDOM_NUMBER}-copied"



aliyun configure access_id access_key endpoint123

cat ~/.aliyunlogcli
Expand All @@ -282,3 +276,6 @@ cat ~/.aliyunlogcli
> access-key = b
> region-endpoint = c




0 comments on commit d775904

Please sign in to comment.