Skip to content

Commit

Permalink
fix issue #67 to support logtail plug-in config CRUD
Browse files Browse the repository at this point in the history
- add test cases / API reference as well
  • Loading branch information
wjo1212 committed Mar 19, 2018
1 parent cf98b8f commit f99ced0
Show file tree
Hide file tree
Showing 11 changed files with 417 additions and 7 deletions.
74 changes: 72 additions & 2 deletions aliyun/log/logtail_config_detail.py
Expand Up @@ -13,7 +13,7 @@
from .logexception import LogException
import logging

__all__ = ['SeperatorFileConfigDetail', 'SimpleFileConfigDetail', 'FullRegFileConfigDetail',
__all__ = ['PluginConfigDetail', 'SeperatorFileConfigDetail', 'SimpleFileConfigDetail', 'FullRegFileConfigDetail',
'JsonFileConfigDetail', 'ApsaraFileConfigDetail', 'SyslogConfigDetail',
'LogtailConfigGenerator', 'CommonRegLogConfigDetail']

Expand Down Expand Up @@ -430,6 +430,70 @@ def to_json(self):
return self.value


class PluginConfigDetail(ConfigDetailBase):
"""The logtail config for simple mode
:type logstoreName: string
:param logstoreName: the logstore name
:type configName: string
:param configName: the config name
:type logPath: string
:param logPath: folder of log path /apsara/nuwa/
:type filePattern: string
:param filePattern: file path, e.g. *.log, it will be /apsara/nuwa/.../*.log
:type localStorage: bool
:param localStorage: if use local cache 1GB when logtail is offline. default is True.
:type enableRawLog: bool
:param enableRawLog: if upload raw data in content, default is False
:type topicFormat: string
:param topicFormat: "none", "group_topic" or regex to extract value from file path e.g. "/test/(\w+).log" will extract each file as topic, default is "none"
:type fileEncoding: string
:param fileEncoding: "utf8" or "gbk" so far
:type maxDepth: int
:param maxDepth: max depth of folder to scan, by default its 100, 0 means just scan the root folder
:type preserve: bool
:param preserve: if preserve time-out, by default is False, 30 min time-out if set it as True
:type preserveDepth: int
:param preserveDepth: time-out folder depth. 1-3
:type filterKey: string list
:param filterKey: only keep log which match the keys. e.g. ["city", "location"] will only scan files math the two fields
:type filterRegex: string list
:param filterRegex: matched value for filterKey, e.g. ["shanghai|beijing|nanjing", "east"] note, it's regex value list
:type createTime: int
:param createTime: timestamp of created, only useful when getting data from REST
:type modifyTime: int
:param modifyTime: timestamp of last modified time, only useful when getting data from REST
:type extended_items: dict
:param extended_items: extended items
"""
MANDATORY_FIELDS_DETAIL = ConfigDetailBase.MANDATORY_FIELDS_DETAIL + ['plugin']
DEFAULT_DETAIL_FIELDS = ConfigDetailBase.DEFAULT_DETAIL_FIELDS

def __init__(self, logstoreName, configName, plugin, **extended_items):
input_detail = {
"plugin": plugin
}
input_detail.update(extended_items)

ConfigDetailBase.__init__(self, logstoreName, configName, "plugin", **input_detail)


class SeperatorFileConfigDetail(ConfigDetailBase):
"""The logtail config for separator mode
Expand Down Expand Up @@ -954,10 +1018,16 @@ def generate_full_regex_config(json_value):
def generate_apsara_config(json_value):
return ApsaraFileConfigDetail.from_json(json_value)

@staticmethod
def generate_plugin_config(json_value):
return PluginConfigDetail.from_json(json_value)

@staticmethod
def generate_config(json_value):
input_type = json_value.get("inputType", "")
if input_type == LogtailInputType.SYSLOG.value:
if input_type == LogtailInputType.PLUGIN.value:
return LogtailConfigGenerator.generate_plugin_config(json_value)
elif input_type == LogtailInputType.SYSLOG.value:
return LogtailConfigGenerator.generate_syslog_config(json_value)
elif input_type == LogtailInputType.FILE.value:
log_type = json_value["inputDetail"].get("logType", "")
Expand Down
3 changes: 3 additions & 0 deletions doc/source/api.rst
Expand Up @@ -26,6 +26,7 @@ Request and Config Class
ListLogstoresRequest
PutLogsRequest
LogtailConfigGenerator
PluginConfigDetail
SeperatorFileConfigDetail
SimpleFileConfigDetail
FullRegFileConfigDetail
Expand Down Expand Up @@ -215,6 +216,7 @@ Logs
put_logs
pull_logs
pull_log
pull_log_dump
get_log
get_logs
get_log_all
Expand Down Expand Up @@ -291,6 +293,7 @@ Definitions
.. autoclass:: ListLogstoresRequest
.. autoclass:: LogtailConfigGenerator
:members:
.. autoclass:: PluginConfigDetail
.. autoclass:: SeperatorFileConfigDetail
.. autoclass:: SimpleFileConfigDetail
.. autoclass:: FullRegFileConfigDetail
Expand Down
20 changes: 20 additions & 0 deletions tests/integration_test/data/docker-stdout-config.json
@@ -0,0 +1,20 @@
{
"configName": "docker-stdout-config",
"inputDetail": {
"plugin": {
"inputs": [
{
"detail": {
"ExcludeLabel": {},
"IncludeLabel": {}
},
"type": "service_docker_stdout"
}
]
}
},
"inputType": "plugin",
"outputDetail": {
"logstoreName": "logstore"
}
}
49 changes: 49 additions & 0 deletions tests/integration_test/data/json_4_docker.json
@@ -0,0 +1,49 @@
{
"configName": "json_4_docker",
"inputDetail": {
"dockerExcludeLabel": {
"k2": "v2"
},
"dockerFile": true,
"dockerIncludeLabel": {
"k1": "v1"
},

"logType": "json_log",
"filePattern": "test.log",
"logPath": "/json_1",
"timeFormat": "%Y-%M-%D",
"timeKey": "my_key_time",

"localStorage": false,
"enableRawLog": true,
"topicFormat": "(file_reg_for_topic).*",
"fileEncoding": "gbk",
"maxDepth": 200,
"preserve": false,
"preserveDepth": 3,
"filterKey": [
"filter_reg1",
"filter_reg2"
],
"filterRegex": [
"`12[];',./~!@#$%^&*(){}:\"<>?",
"`12[];',./~!@#$%^&*(){}:\"<>?"
],

"adjustTimezone": false,
"delayAlarmBytes": 0,
"discardNonUtf8": false,
"discardUnmatch": true,
"enableTag": false,
"maxSendRate": -1,
"mergeType": "topic",
"shardHashKey": [],
"tailExisted": false

},
"inputType": "file",
"outputDetail": {
"logstoreName": "logstore"
}
}
29 changes: 29 additions & 0 deletions tests/integration_test/data/mysql-binlog-config.json
@@ -0,0 +1,29 @@
{
"configName": "mysql-binlog-config",
"inputDetail": {
"plugin": {
"inputs": [
{
"detail": {
"ExcludeTables": [
"mysql\\..*"
],
"Host": "************.mysql.rds.aliyuncs.com",
"IncludeTables": [
".*\\..*"
],
"Password": "*******",
"ServerID": 32355,
"TextToString": true,
"User": "********"
},
"type": "service_canal"
}
]
}
},
"inputType": "plugin",
"outputDetail": {
"logstoreName": "logstore"
}
}
31 changes: 31 additions & 0 deletions tests/integration_test/data/mysql-rawsql-config.json
@@ -0,0 +1,31 @@
{
"configName": "mysql-rawsql-config",
"inputDetail": {
"plugin": {
"inputs": [
{
"detail": {
"Address": "************.mysql.rds.aliyuncs.com",
"CheckPoint": true,
"CheckPointColumn": "time",
"CheckPointColumnType": "time",
"CheckPointSavePerPage": true,
"CheckPointStart": "2018-01-01 00:00:00",
"DataBase": "****",
"IntervalMs": 60000,
"Limit": true,
"PageSize": 100,
"Password": "*******",
"StateMent": "select * from db.VersionOs where time > ?",
"User": "****"
},
"type": "service_mysql"
}
]
}
},
"inputType": "plugin",
"outputDetail": {
"logstoreName": "logstore"
}
}
45 changes: 45 additions & 0 deletions tests/integration_test/data/nginx-status-config.json
@@ -0,0 +1,45 @@
{
"configName": "nginx-status-config",
"inputDetail": {
"plugin": {
"inputs": [
{
"detail": {
"Addresses": [
"http://**********/****"
],
"IncludeBody": true,
"IntervalMs": 10000
},
"type": "metric_http"
}
],
"processors": [
{
"detail": {
"FullMatch": true,
"KeepSource": false,
"Keys": [
"connection",
"accepts",
"handled",
"requests",
"reading",
"writing",
"waiting"
],
"NoKeyError": true,
"NoMatchError": true,
"Regex": "Active connections: (\\d+)\\s+server accepts handled requests\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+Reading: (\\d+) Writing: (\\d+) Waiting: (\\d+)[\\s\\S]*",
"SourceKey": "content"
},
"type": "processor_regex"
}
]
}
},
"inputType": "plugin",
"outputDetail": {
"logstoreName": "logstore"
}
}
49 changes: 49 additions & 0 deletions tests/integration_test/data/reg_4_docker.json
@@ -0,0 +1,49 @@
{
"configName": "reg_4_docker",
"inputDetail": {
"dockerExcludeLabel": {
"k2": "v2"
},
"dockerFile": true,
"dockerIncludeLabel": {
"k1": "v1"
},

"logType": "json_log",
"filePattern": "test.log",
"logPath": "/json_1",
"timeFormat": "%Y-%M-%D",
"timeKey": "my_key_time",

"localStorage": false,
"enableRawLog": true,
"topicFormat": "(file_reg_for_topic).*",
"fileEncoding": "gbk",
"maxDepth": 200,
"preserve": false,
"preserveDepth": 3,
"filterKey": [
"filter_reg1",
"filter_reg2"
],
"filterRegex": [
"`12[];',./~!@#$%^&*(){}:\"<>?",
"`12[];',./~!@#$%^&*(){}:\"<>?"
],

"adjustTimezone": false,
"delayAlarmBytes": 0,
"discardNonUtf8": false,
"discardUnmatch": true,
"enableTag": false,
"maxSendRate": -1,
"mergeType": "topic",
"shardHashKey": [],
"tailExisted": false

},
"inputType": "file",
"outputDetail": {
"logstoreName": "logstore"
}
}

0 comments on commit f99ced0

Please sign in to comment.