diff --git a/aliyun/log/logtail_config_detail.py b/aliyun/log/logtail_config_detail.py index 652b0b4b..0861f131 100755 --- a/aliyun/log/logtail_config_detail.py +++ b/aliyun/log/logtail_config_detail.py @@ -13,7 +13,7 @@ from .logexception import LogException import logging -__all__ = ['SeperatorFileConfigDetail', 'SimpleFileConfigDetail', 'FullRegFileConfigDetail', +__all__ = ['PluginConfigDetail', 'SeperatorFileConfigDetail', 'SimpleFileConfigDetail', 'FullRegFileConfigDetail', 'JsonFileConfigDetail', 'ApsaraFileConfigDetail', 'SyslogConfigDetail', 'LogtailConfigGenerator', 'CommonRegLogConfigDetail'] @@ -430,6 +430,70 @@ def to_json(self): return self.value +class PluginConfigDetail(ConfigDetailBase): + """The logtail config for simple mode + + :type logstoreName: string + :param logstoreName: the logstore name + + :type configName: string + :param configName: the config name + + :type logPath: string + :param logPath: folder of log path /apsara/nuwa/ + + :type filePattern: string + :param filePattern: file path, e.g. *.log, it will be /apsara/nuwa/.../*.log + + :type localStorage: bool + :param localStorage: if use local cache 1GB when logtail is offline. default is True. + + :type enableRawLog: bool + :param enableRawLog: if upload raw data in content, default is False + + :type topicFormat: string + :param topicFormat: "none", "group_topic" or regex to extract value from file path e.g. "/test/(\w+).log" will extract each file as topic, default is "none" + + :type fileEncoding: string + :param fileEncoding: "utf8" or "gbk" so far + + :type maxDepth: int + :param maxDepth: max depth of folder to scan, by default its 100, 0 means just scan the root folder + + :type preserve: bool + :param preserve: if preserve time-out, by default is False, 30 min time-out if set it as True + + :type preserveDepth: int + :param preserveDepth: time-out folder depth. 1-3 + + :type filterKey: string list + :param filterKey: only keep log which match the keys. e.g. ["city", "location"] will only scan files math the two fields + + :type filterRegex: string list + :param filterRegex: matched value for filterKey, e.g. ["shanghai|beijing|nanjing", "east"] note, it's regex value list + + :type createTime: int + :param createTime: timestamp of created, only useful when getting data from REST + + :type modifyTime: int + :param modifyTime: timestamp of last modified time, only useful when getting data from REST + + :type extended_items: dict + :param extended_items: extended items + + """ + MANDATORY_FIELDS_DETAIL = ConfigDetailBase.MANDATORY_FIELDS_DETAIL + ['plugin'] + DEFAULT_DETAIL_FIELDS = ConfigDetailBase.DEFAULT_DETAIL_FIELDS + + def __init__(self, logstoreName, configName, plugin, **extended_items): + input_detail = { + "plugin": plugin + } + input_detail.update(extended_items) + + ConfigDetailBase.__init__(self, logstoreName, configName, "plugin", **input_detail) + + class SeperatorFileConfigDetail(ConfigDetailBase): """The logtail config for separator mode @@ -954,10 +1018,16 @@ def generate_full_regex_config(json_value): def generate_apsara_config(json_value): return ApsaraFileConfigDetail.from_json(json_value) + @staticmethod + def generate_plugin_config(json_value): + return PluginConfigDetail.from_json(json_value) + @staticmethod def generate_config(json_value): input_type = json_value.get("inputType", "") - if input_type == LogtailInputType.SYSLOG.value: + if input_type == LogtailInputType.PLUGIN.value: + return LogtailConfigGenerator.generate_plugin_config(json_value) + elif input_type == LogtailInputType.SYSLOG.value: return LogtailConfigGenerator.generate_syslog_config(json_value) elif input_type == LogtailInputType.FILE.value: log_type = json_value["inputDetail"].get("logType", "") diff --git a/doc/source/api.rst b/doc/source/api.rst index b43f1227..2910f0e8 100644 --- a/doc/source/api.rst +++ b/doc/source/api.rst @@ -26,6 +26,7 @@ Request and Config Class ListLogstoresRequest PutLogsRequest LogtailConfigGenerator + PluginConfigDetail SeperatorFileConfigDetail SimpleFileConfigDetail FullRegFileConfigDetail @@ -215,6 +216,7 @@ Logs put_logs pull_logs pull_log + pull_log_dump get_log get_logs get_log_all @@ -291,6 +293,7 @@ Definitions .. autoclass:: ListLogstoresRequest .. autoclass:: LogtailConfigGenerator :members: +.. autoclass:: PluginConfigDetail .. autoclass:: SeperatorFileConfigDetail .. autoclass:: SimpleFileConfigDetail .. autoclass:: FullRegFileConfigDetail diff --git a/tests/integration_test/data/docker-stdout-config.json b/tests/integration_test/data/docker-stdout-config.json new file mode 100644 index 00000000..df046a3b --- /dev/null +++ b/tests/integration_test/data/docker-stdout-config.json @@ -0,0 +1,20 @@ +{ + "configName": "docker-stdout-config", + "inputDetail": { + "plugin": { + "inputs": [ + { + "detail": { + "ExcludeLabel": {}, + "IncludeLabel": {} + }, + "type": "service_docker_stdout" + } + ] + } + }, + "inputType": "plugin", + "outputDetail": { + "logstoreName": "logstore" + } +} diff --git a/tests/integration_test/data/json_4_docker.json b/tests/integration_test/data/json_4_docker.json new file mode 100644 index 00000000..73ddea91 --- /dev/null +++ b/tests/integration_test/data/json_4_docker.json @@ -0,0 +1,49 @@ +{ + "configName": "json_4_docker", + "inputDetail": { + "dockerExcludeLabel": { + "k2": "v2" + }, + "dockerFile": true, + "dockerIncludeLabel": { + "k1": "v1" + }, + + "logType": "json_log", + "filePattern": "test.log", + "logPath": "/json_1", + "timeFormat": "%Y-%M-%D", + "timeKey": "my_key_time", + + "localStorage": false, + "enableRawLog": true, + "topicFormat": "(file_reg_for_topic).*", + "fileEncoding": "gbk", + "maxDepth": 200, + "preserve": false, + "preserveDepth": 3, + "filterKey": [ + "filter_reg1", + "filter_reg2" + ], + "filterRegex": [ + "`12[];',./~!@#$%^&*(){}:\"<>?", + "`12[];',./~!@#$%^&*(){}:\"<>?" + ], + + "adjustTimezone": false, + "delayAlarmBytes": 0, + "discardNonUtf8": false, + "discardUnmatch": true, + "enableTag": false, + "maxSendRate": -1, + "mergeType": "topic", + "shardHashKey": [], + "tailExisted": false + + }, + "inputType": "file", + "outputDetail": { + "logstoreName": "logstore" + } +} \ No newline at end of file diff --git a/tests/integration_test/data/mysql-binlog-config.json b/tests/integration_test/data/mysql-binlog-config.json new file mode 100644 index 00000000..bf082764 --- /dev/null +++ b/tests/integration_test/data/mysql-binlog-config.json @@ -0,0 +1,29 @@ +{ + "configName": "mysql-binlog-config", + "inputDetail": { + "plugin": { + "inputs": [ + { + "detail": { + "ExcludeTables": [ + "mysql\\..*" + ], + "Host": "************.mysql.rds.aliyuncs.com", + "IncludeTables": [ + ".*\\..*" + ], + "Password": "*******", + "ServerID": 32355, + "TextToString": true, + "User": "********" + }, + "type": "service_canal" + } + ] + } + }, + "inputType": "plugin", + "outputDetail": { + "logstoreName": "logstore" + } +} diff --git a/tests/integration_test/data/mysql-rawsql-config.json b/tests/integration_test/data/mysql-rawsql-config.json new file mode 100644 index 00000000..b0cc599f --- /dev/null +++ b/tests/integration_test/data/mysql-rawsql-config.json @@ -0,0 +1,31 @@ +{ + "configName": "mysql-rawsql-config", + "inputDetail": { + "plugin": { + "inputs": [ + { + "detail": { + "Address": "************.mysql.rds.aliyuncs.com", + "CheckPoint": true, + "CheckPointColumn": "time", + "CheckPointColumnType": "time", + "CheckPointSavePerPage": true, + "CheckPointStart": "2018-01-01 00:00:00", + "DataBase": "****", + "IntervalMs": 60000, + "Limit": true, + "PageSize": 100, + "Password": "*******", + "StateMent": "select * from db.VersionOs where time > ?", + "User": "****" + }, + "type": "service_mysql" + } + ] + } + }, + "inputType": "plugin", + "outputDetail": { + "logstoreName": "logstore" + } +} diff --git a/tests/integration_test/data/nginx-status-config.json b/tests/integration_test/data/nginx-status-config.json new file mode 100644 index 00000000..135b0b04 --- /dev/null +++ b/tests/integration_test/data/nginx-status-config.json @@ -0,0 +1,45 @@ +{ + "configName": "nginx-status-config", + "inputDetail": { + "plugin": { + "inputs": [ + { + "detail": { + "Addresses": [ + "http://**********/****" + ], + "IncludeBody": true, + "IntervalMs": 10000 + }, + "type": "metric_http" + } + ], + "processors": [ + { + "detail": { + "FullMatch": true, + "KeepSource": false, + "Keys": [ + "connection", + "accepts", + "handled", + "requests", + "reading", + "writing", + "waiting" + ], + "NoKeyError": true, + "NoMatchError": true, + "Regex": "Active connections: (\\d+)\\s+server accepts handled requests\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+Reading: (\\d+) Writing: (\\d+) Waiting: (\\d+)[\\s\\S]*", + "SourceKey": "content" + }, + "type": "processor_regex" + } + ] + } + }, + "inputType": "plugin", + "outputDetail": { + "logstoreName": "logstore" + } +} diff --git a/tests/integration_test/data/reg_4_docker.json b/tests/integration_test/data/reg_4_docker.json new file mode 100644 index 00000000..2c6fa482 --- /dev/null +++ b/tests/integration_test/data/reg_4_docker.json @@ -0,0 +1,49 @@ +{ + "configName": "reg_4_docker", + "inputDetail": { + "dockerExcludeLabel": { + "k2": "v2" + }, + "dockerFile": true, + "dockerIncludeLabel": { + "k1": "v1" + }, + + "logType": "json_log", + "filePattern": "test.log", + "logPath": "/json_1", + "timeFormat": "%Y-%M-%D", + "timeKey": "my_key_time", + + "localStorage": false, + "enableRawLog": true, + "topicFormat": "(file_reg_for_topic).*", + "fileEncoding": "gbk", + "maxDepth": 200, + "preserve": false, + "preserveDepth": 3, + "filterKey": [ + "filter_reg1", + "filter_reg2" + ], + "filterRegex": [ + "`12[];',./~!@#$%^&*(){}:\"<>?", + "`12[];',./~!@#$%^&*(){}:\"<>?" + ], + + "adjustTimezone": false, + "delayAlarmBytes": 0, + "discardNonUtf8": false, + "discardUnmatch": true, + "enableTag": false, + "maxSendRate": -1, + "mergeType": "topic", + "shardHashKey": [], + "tailExisted": false + + }, + "inputType": "file", + "outputDetail": { + "logstoreName": "logstore" + } +} \ No newline at end of file diff --git a/tests/integration_test/data/sep_4_docker.json b/tests/integration_test/data/sep_4_docker.json new file mode 100644 index 00000000..d6841a36 --- /dev/null +++ b/tests/integration_test/data/sep_4_docker.json @@ -0,0 +1,58 @@ +{ + "configName": "sep_4_docker", + "logSample": "\u8fd9\u662f||\u6d4b\u8bd5||\u7528\u4f8b||\u54c8\u54c8", + "inputDetail": { + "dockerExcludeLabel": { + "k2": "v2" + }, + "dockerFile": true, + "dockerIncludeLabel": { + "k1": "v1" + }, + + "logType": "delimiter_log", + "logPath": "/user", + "filePattern": "test.log", + + "separator": "||", + "key": [ + "\u8fd9\u662f", + "\u6d4b\u8bd5", + "\u7528\u4f8b", + "\u54c8\u54c8" + ], + "timeFormat": "2017-1-1", + "timeKey": "\u8fd9\u662f", + + "localStorage": false, + "enableRawLog": true, + "topicFormat": "(file_reg_for_topic).*", + "fileEncoding": "gbk", + "maxDepth": 200, + "preserve": false, + "preserveDepth": 3, + "filterKey": [ + "filter_reg1", + "filter_reg2" + ], + "filterRegex": [ + "`12[];',./~!@#$%^&*(){}:\"<>?", + "`12[];',./~!@#$%^&*(){}:\"<>?" + ], + + "adjustTimezone": false, + "delayAlarmBytes": 0, + "discardNonUtf8": false, + "discardUnmatch": true, + "enableTag": false, + "maxSendRate": -1, + "mergeType": "topic", + "shardHashKey": [], + "tailExisted": false + + }, + "inputType": "file", + "outputDetail": { + "logstoreName": "logstore" + } +} \ No newline at end of file diff --git a/tests/integration_test/data/simple_4_docker.json b/tests/integration_test/data/simple_4_docker.json new file mode 100644 index 00000000..d38371a7 --- /dev/null +++ b/tests/integration_test/data/simple_4_docker.json @@ -0,0 +1,46 @@ +{ + "configName": "simple_4_docker", + "inputDetail": { + "dockerExcludeLabel": { + "k2": "v2" + }, + "dockerFile": true, + "dockerIncludeLabel": { + "k1": "v1" + }, + + "logType": "common_reg_log", + "logPath": "/user", + "filePattern": "test2.log", + + "localStorage": false, + "enableRawLog": true, + "topicFormat": "(file_reg_for_topic).*", + "fileEncoding": "gbk", + "maxDepth": 200, + "preserve": false, + "preserveDepth": 3, + "filterKey": [ + "filter_reg1", + "filter_reg2" + ], + "filterRegex": [ + "`12[];',./~!@#$%^&*(){}:\"<>?", + "`12[];',./~!@#$%^&*(){}:\"<>?" + ], + + "adjustTimezone": false, + "delayAlarmBytes": 0, + "discardNonUtf8": false, + "discardUnmatch": true, + "enableTag": false, + "maxSendRate": -1, + "mergeType": "topic", + "shardHashKey": [], + "tailExisted": false + }, + "inputType": "file", + "outputDetail": { + "logstoreName": "logstore" + } +} \ No newline at end of file diff --git a/tests/integration_test/test_logtail_config.py b/tests/integration_test/test_logtail_config.py index 78e7d05f..831e7733 100755 --- a/tests/integration_test/test_logtail_config.py +++ b/tests/integration_test/test_logtail_config.py @@ -39,13 +39,15 @@ def clean_project(client, project): def test_logtail_config(client, project): dir_path = os.sep.join([os.path.dirname(__file__), "data"]) file_names = [ - 'simple_1', 'simple_2', 'simple_3', + 'simple_1', 'simple_2', 'simple_3', 'simple_4_docker', 'feitian_1', 'feitian_2', - 'json_1', 'json_2', 'json_3', + 'json_1', 'json_2', 'json_3', 'json_4_docker', 'ngnix_1', - 'reg_1', 'reg_2', 'reg_3', - 'sep_1', 'sep_2', 'sep_3', - 'syslog_1' + 'reg_1', 'reg_2', 'reg_3', 'reg_4_docker', + 'sep_1', 'sep_2', 'sep_3','sep_4_docker', + 'syslog_1', + 'docker-stdout-config', 'mysql-binlog-config', + 'mysql-rawsql-config', 'nginx-status-config' ] for file_name in file_names: @@ -63,6 +65,14 @@ def test_logtail_config(client, project): res = client.get_logtail_config(project, config_name) res.log_print() + for file_name in file_names: + json_path = os.sep.join([dir_path, file_name + '.json']) + with open(json_path, "r") as f: + json_value = json.load(f) + detail = LogtailConfigGenerator.generate_config(json_value) + print("****update config", file_name) + res = client.update_logtail_config(project, detail) + res.log_print() def main(): endpoint = os.environ.get('ALIYUN_LOG_SAMPLE_ENDPOINT', '')