From c555ece9797aad86da075b4984a141421a14632a Mon Sep 17 00:00:00 2001 From: "zhanglei.sec" Date: Thu, 24 Dec 2020 21:02:16 +0800 Subject: [PATCH 1/8] Update agent. --- agent/CODE_OF_CONDUCT.md | 76 ++ agent/README-zh_CN.md | 149 +++ agent/README.md | 133 +++ agent/common/common.go | 125 +++ agent/config/config.go | 130 +++ agent/driver/.gitignore | 3 + agent/driver/Cargo.toml | 34 + agent/driver/Makefile | 10 + agent/driver/README-zh_CN.md | 67 ++ agent/driver/README.md | 63 ++ agent/driver/build.rs | 372 +++++++ agent/driver/src/cache.rs | 81 ++ agent/driver/src/config.rs | 8 + agent/driver/src/main.rs | 95 ++ agent/driver/src/parser.rs | 929 +++++++++++++++++ agent/driver/src/prepare.rs | 163 +++ agent/driver/template.toml | 162 +++ agent/go.mod | 17 + agent/go.sum | 157 +++ agent/health/health.go | 174 ++++ agent/journal_watcher/.gitignore | 3 + agent/journal_watcher/Cargo.toml | 15 + agent/journal_watcher/Makefile | 6 + agent/journal_watcher/README-zh_CN.md | 34 + agent/journal_watcher/README.md | 35 + agent/journal_watcher/src/main.rs | 72 ++ agent/journal_watcher/src/watcher.rs | 191 ++++ agent/log/log.go | 36 + agent/main.go | 132 +++ agent/plugin/plugin.go | 182 ++++ agent/plugin/procotol/regist.go | 8 + agent/plugin/procotol/regist_gen.go | 160 +++ agent/plugin/procotol/regist_gen_test.go | 123 +++ agent/plugin/server.go | 162 +++ agent/rust-toolchain | 1 + agent/spec/spec.go | 10 + agent/spec/spec_gen.go | 313 ++++++ agent/spec/spec_gen_test.go | 236 +++++ agent/support/README-zh_CN.md | 47 + agent/support/README.md | 49 + agent/support/rust/flexi_logger/.gitignore | 17 + agent/support/rust/flexi_logger/.travis.yml | 36 + agent/support/rust/flexi_logger/CHANGELOG.md | 436 ++++++++ agent/support/rust/flexi_logger/Cargo.toml | 56 ++ .../support/rust/flexi_logger/LICENSE-APACHE | 201 ++++ agent/support/rust/flexi_logger/LICENSE-MIT | 25 + agent/support/rust/flexi_logger/README.md | 124 +++ .../benches/bench_reconfigurable.rs | 43 + .../flexi_logger/benches/bench_standard.rs | 45 + .../rust/flexi_logger/examples/colors.rs | 75 ++ .../rust/flexi_logger/examples/performance.rs | 51 + .../rust/flexi_logger/scripts/cleanup.rs | 46 + .../rust/flexi_logger/scripts/qualify.rs | 85 ++ .../rust/flexi_logger/scripts/qualify_fast.rs | 58 ++ .../rust/flexi_logger/src/code_examples.rs | 301 ++++++ .../rust/flexi_logger/src/deferred_now.rs | 23 + .../rust/flexi_logger/src/flexi_error.rs | 63 ++ .../rust/flexi_logger/src/flexi_logger.rs | 151 +++ .../support/rust/flexi_logger/src/formats.rs | 416 ++++++++ agent/support/rust/flexi_logger/src/lib.rs | 63 ++ .../flexi_logger/src/log_specification.rs | 927 +++++++++++++++++ agent/support/rust/flexi_logger/src/logger.rs | 936 ++++++++++++++++++ .../rust/flexi_logger/src/parameters.rs | 145 +++ .../rust/flexi_logger/src/primary_writer.rs | 251 +++++ .../src/reconfiguration_handle.rs | 157 +++ .../support/rust/flexi_logger/src/writers.rs | 105 ++ .../src/writers/file_log_writer.rs | 502 ++++++++++ .../src/writers/file_log_writer/builder.rs | 293 ++++++ .../src/writers/file_log_writer/config.rs | 45 + .../src/writers/file_log_writer/state.rs | 713 +++++++++++++ .../flexi_logger/src/writers/log_writer.rs | 49 + .../flexi_logger/src/writers/syslog_writer.rs | 321 ++++++ .../flexi_logger/tests/test_age_or_size.rs | 109 ++ .../rust/flexi_logger/tests/test_colors.rs | 16 + .../tests/test_custom_log_writer.rs | 62 ++ .../tests/test_custom_log_writer_format.rs | 78 ++ .../tests/test_default_file_and_writer.rs | 43 + .../tests/test_default_files_dir.rs | 21 + .../tests/test_default_files_dir_rot.rs | 17 + .../tests/test_detailed_files_rot.rs | 23 + .../test_detailed_files_rot_timestamp.rs | 24 + .../tests/test_env_logger_style.rs | 12 + .../rust/flexi_logger/tests/test_mods.rs | 66 ++ .../rust/flexi_logger/tests/test_mods_off.rs | 47 + .../flexi_logger/tests/test_multi_logger.rs | 104 ++ .../tests/test_multi_threaded_cleanup.rs | 155 +++ .../tests/test_multi_threaded_dates.rs | 158 +++ .../tests/test_multi_threaded_numbers.rs | 158 +++ .../rust/flexi_logger/tests/test_no_logger.rs | 16 + .../tests/test_opt_files_dir_dscr.rs | 24 + .../tests/test_opt_files_dir_dscr_rot.rs | 41 + .../flexi_logger/tests/test_parse_errors.rs | 80 ++ .../tests/test_reconfigure_methods.rs | 113 +++ .../rust/flexi_logger/tests/test_recursion.rs | 32 + .../rust/flexi_logger/tests/test_specfile.rs | 147 +++ .../rust/flexi_logger/tests/test_syslog.rs | 69 ++ .../flexi_logger/tests/test_textfilter.rs | 54 + .../tests/test_windows_line_ending.rs | 55 + .../flexi_logger/tests/version_numbers.rs | 6 + agent/support/rust/plugin/.gitignore | 1 + agent/support/rust/plugin/Cargo.lock | 231 +++++ agent/support/rust/plugin/Cargo.toml | 13 + agent/support/rust/plugin/src/lib.rs | 105 ++ agent/support/rust/plugin_builder/.gitignore | 1 + agent/support/rust/plugin_builder/Cargo.lock | 429 ++++++++ agent/support/rust/plugin_builder/Cargo.toml | 13 + agent/support/rust/plugin_builder/src/lib.rs | 75 ++ agent/transport/fileout/fileout.go | 48 + agent/transport/kafka/kafka.go | 43 + agent/transport/stdout/stdout.go | 30 + agent/transport/transport.go | 41 + 111 files changed, 14282 insertions(+) create mode 100644 agent/CODE_OF_CONDUCT.md create mode 100644 agent/README-zh_CN.md create mode 100644 agent/README.md create mode 100644 agent/common/common.go create mode 100644 agent/config/config.go create mode 100644 agent/driver/.gitignore create mode 100644 agent/driver/Cargo.toml create mode 100644 agent/driver/Makefile create mode 100644 agent/driver/README-zh_CN.md create mode 100644 agent/driver/README.md create mode 100644 agent/driver/build.rs create mode 100644 agent/driver/src/cache.rs create mode 100644 agent/driver/src/config.rs create mode 100644 agent/driver/src/main.rs create mode 100644 agent/driver/src/parser.rs create mode 100644 agent/driver/src/prepare.rs create mode 100644 agent/driver/template.toml create mode 100644 agent/go.mod create mode 100644 agent/go.sum create mode 100644 agent/health/health.go create mode 100644 agent/journal_watcher/.gitignore create mode 100644 agent/journal_watcher/Cargo.toml create mode 100644 agent/journal_watcher/Makefile create mode 100644 agent/journal_watcher/README-zh_CN.md create mode 100644 agent/journal_watcher/README.md create mode 100644 agent/journal_watcher/src/main.rs create mode 100644 agent/journal_watcher/src/watcher.rs create mode 100644 agent/log/log.go create mode 100644 agent/main.go create mode 100644 agent/plugin/plugin.go create mode 100644 agent/plugin/procotol/regist.go create mode 100644 agent/plugin/procotol/regist_gen.go create mode 100644 agent/plugin/procotol/regist_gen_test.go create mode 100644 agent/plugin/server.go create mode 100644 agent/rust-toolchain create mode 100644 agent/spec/spec.go create mode 100644 agent/spec/spec_gen.go create mode 100644 agent/spec/spec_gen_test.go create mode 100644 agent/support/README-zh_CN.md create mode 100644 agent/support/README.md create mode 100644 agent/support/rust/flexi_logger/.gitignore create mode 100644 agent/support/rust/flexi_logger/.travis.yml create mode 100644 agent/support/rust/flexi_logger/CHANGELOG.md create mode 100644 agent/support/rust/flexi_logger/Cargo.toml create mode 100644 agent/support/rust/flexi_logger/LICENSE-APACHE create mode 100644 agent/support/rust/flexi_logger/LICENSE-MIT create mode 100644 agent/support/rust/flexi_logger/README.md create mode 100644 agent/support/rust/flexi_logger/benches/bench_reconfigurable.rs create mode 100644 agent/support/rust/flexi_logger/benches/bench_standard.rs create mode 100644 agent/support/rust/flexi_logger/examples/colors.rs create mode 100644 agent/support/rust/flexi_logger/examples/performance.rs create mode 100644 agent/support/rust/flexi_logger/scripts/cleanup.rs create mode 100644 agent/support/rust/flexi_logger/scripts/qualify.rs create mode 100644 agent/support/rust/flexi_logger/scripts/qualify_fast.rs create mode 100644 agent/support/rust/flexi_logger/src/code_examples.rs create mode 100644 agent/support/rust/flexi_logger/src/deferred_now.rs create mode 100644 agent/support/rust/flexi_logger/src/flexi_error.rs create mode 100644 agent/support/rust/flexi_logger/src/flexi_logger.rs create mode 100644 agent/support/rust/flexi_logger/src/formats.rs create mode 100644 agent/support/rust/flexi_logger/src/lib.rs create mode 100644 agent/support/rust/flexi_logger/src/log_specification.rs create mode 100644 agent/support/rust/flexi_logger/src/logger.rs create mode 100644 agent/support/rust/flexi_logger/src/parameters.rs create mode 100644 agent/support/rust/flexi_logger/src/primary_writer.rs create mode 100644 agent/support/rust/flexi_logger/src/reconfiguration_handle.rs create mode 100644 agent/support/rust/flexi_logger/src/writers.rs create mode 100644 agent/support/rust/flexi_logger/src/writers/file_log_writer.rs create mode 100644 agent/support/rust/flexi_logger/src/writers/file_log_writer/builder.rs create mode 100644 agent/support/rust/flexi_logger/src/writers/file_log_writer/config.rs create mode 100644 agent/support/rust/flexi_logger/src/writers/file_log_writer/state.rs create mode 100644 agent/support/rust/flexi_logger/src/writers/log_writer.rs create mode 100644 agent/support/rust/flexi_logger/src/writers/syslog_writer.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_age_or_size.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_colors.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_custom_log_writer.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_custom_log_writer_format.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_default_file_and_writer.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_default_files_dir.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_default_files_dir_rot.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_detailed_files_rot.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_detailed_files_rot_timestamp.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_env_logger_style.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_mods.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_mods_off.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_multi_logger.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_multi_threaded_cleanup.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_multi_threaded_dates.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_multi_threaded_numbers.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_no_logger.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr_rot.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_parse_errors.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_reconfigure_methods.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_recursion.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_specfile.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_syslog.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_textfilter.rs create mode 100644 agent/support/rust/flexi_logger/tests/test_windows_line_ending.rs create mode 100644 agent/support/rust/flexi_logger/tests/version_numbers.rs create mode 100644 agent/support/rust/plugin/.gitignore create mode 100644 agent/support/rust/plugin/Cargo.lock create mode 100644 agent/support/rust/plugin/Cargo.toml create mode 100644 agent/support/rust/plugin/src/lib.rs create mode 100644 agent/support/rust/plugin_builder/.gitignore create mode 100644 agent/support/rust/plugin_builder/Cargo.lock create mode 100644 agent/support/rust/plugin_builder/Cargo.toml create mode 100644 agent/support/rust/plugin_builder/src/lib.rs create mode 100644 agent/transport/fileout/fileout.go create mode 100644 agent/transport/kafka/kafka.go create mode 100644 agent/transport/stdout/stdout.go create mode 100644 agent/transport/transport.go diff --git a/agent/CODE_OF_CONDUCT.md b/agent/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..9b278a268 --- /dev/null +++ b/agent/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at hids@bytedance.com. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/agent/README-zh_CN.md b/agent/README-zh_CN.md new file mode 100644 index 000000000..52ca54db9 --- /dev/null +++ b/agent/README-zh_CN.md @@ -0,0 +1,149 @@ +[![License](https://img.shields.io/badge/License-Apache%20v2-blue.svg)](https://github.com/DianrongSecurity/AgentSmith-HIDS/blob/master/LICENSE) +[![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) + +[English](README.md) | 简体中文 +## 关于 AgentSmith-HIDS Agent +AgentSmith-HIDS Agent 是一个用户态的程序,主要是用来转发其他功能插件发送来的数据以及通过配置来控制其他插件。 + +AgentSmith-HIDS Agent基于Golang构建,但其他功能插件可以用不同的语言去完成([目前已经支持Rust](support/rust),下一个受到支持的语言是Golang)。 + +插件是一个具有特定功能并且可以独立配置与更新的程序。当插件向Agent注册之后,插件的资源使用情况会被受到监视,并且插件本身产生的的日志也会被转发给Agent。 + +在[driver](driver/) 与 [journal_watcher](journal_watcher/)下你可以看到两个示例插件。前者用来解析并丰富AgentSmith-HIDS Driver从内核发来的数据,后者用来监控系统日志。 + +通过Agent-Plugin的这种模式,我们可以将基础模块(例如通信与控制和资源监控等)与功能模块(例如进程监控和文件监控以及漏洞分析等)解耦,进而实现动态增减相关模块。 + +## 平台兼容性 +理论上,所有Linux下的发行版都是兼容的,但是只有Debian(包括Ubuntu)与RHEL(包括CentOS)经过了充分测试。当前,我们只在x86_64平台上进行了测试。 + +另外,为了更好的与插件兼容,建议将Agent运行在物理机或者虚拟机中,而不是容器中。 + +为了功能的完整性,你可能需要以root权限运行AgentSmith-HIDS Agent。 + +## 需要的编译环境 +* Golang 1.15(推荐) +## 快速开始 +``` +git clone https://github.com/bytedance/AgentSmith-HIDS +cd AgentSmith-HIDS/agent +go build +``` +在当前目录下,你将会看见`agent`二进制文件。 +## 参数和选项 +如果你想查看当前Agent支持的参数,请执行: +``` +./agent --help +``` +你将会看到: +``` +Usage: + agent [OPTIONS] +Application Options: + -v, --version Print agent version + --plugin= Plugin socket path (default: plugin.sock) + --log= Log file path (default: log/agent_smith.log) + --config= Config file path(.yaml) (default: config.yaml) + --data=[file|stdout|kafka] Set data output (default: stdout) + --file_path= If data option is file ,this option is used to set the file path (default: data.log) + --addr= If data option is kafka ,this option is used to set kafka addr + --topic= If data option is kafka ,this option is used to set kafka topic name + +Help Options: + -h, --help Show this help message + +``` +配置文件是用来控制当前运行的插件实例的。如果你只是想简单快速的开始运行Agent本身,不想开启功能插件,那么你可以直接执行`./agent`,你将会在当前终端的stdout上看到数据输出: + +``` +[{"data_type":"1001","level":"error","msg":"no such file or directory","source":"config/config.go:114","timestamp":"${current_timestamp}"}] +[{"cpu":"0.00000","data_type":"1000","distro":"${your_distro}","io":"8192","kernel_version":"${your_kernel_version}","memory":"${current_agent_memory_usage}","plugins":"[]","slab":"${current_sys_slab_usage}","timestamp":"${current_timestamp}"}] +``` +第一行的错误数据是因为配置文件没有被找到,在这里我们可以暂时忽略。 +第二行是当前Agent的心跳数据,里面的字段描述了当前Agent和当前已加载 +插件的相关信息。 +## 数据输出 +当前版本的AgentSmith-HIDS Agent更多是用于本地的测试,它不支持远程控制与配置,但是支持将数据发送到远端(通过sarama/kafka)。 + +注意:请不要用于生产环境。 +### Stdout(默认) +将所有数据输出到stdout。 + +注意:这个方式不会持久化保存任何数据,当数据发送速度过快时可能会导致当前终端运行缓慢。 +### File +将所有数据保存到特定的文件中,默认是当前Agent工作目录下的`data.log`文件。 +### Kafka +Agent将会产生一个同步生产者去发送数据到Kafka,在此之前请配置`addr`和`topic` 参数。 +### 其他方法 +你可以通过实现[transport](transport/transport.go)下的`Transport interface`来完成自定义的方法。 + +实现后请修改`main`函数,将自定义的方法设置为默认的。在未来,我们会支持gRPC数据传输。 +## 日志 +你可以通过配置`log`参数来配置Agent日志的存放位置(默认是Agent工作目录下的`log/agent_smith.log`)。 + +更加具体的日志配置,请修改`main`函数中的相应日志选项。所有等级大于等于Error的日志都将会被转发到[数据输出](#数据输出)中。 +## 配置文件 +当前,处于测试目的,我们提供了一个配置文件去控制Agent中插件的添加与删除。这将会带来较大的安全风险,所以请不要在生产环境中使用。 + +当Agent开始运行时,`config`参数中所配置的文件(默认是Agent工作目录下的`config.yaml`)将会被监视(通过inotify)。每当文件的修改事件被触发,配置文件都会被重新解析并与当前加载的Agent插件列表进行对比,进而实现对已加载插件的动态修改。请注意,不要使用类似vim/gedit等工具进行修改,因为它们[不会触发inotify的修改事件](https://stackoverflow.com/questions/13312794/inotify-dont-treat-vim-editting-as-a-modification-event)。 + +一个正确的配置文件如下所示: +``` +plugins: + - + name: exmple1 + version: 0.0.0.1 + path: /opt/plugins/exmple1 + sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 + - + name: exmple2 + version: 0.0.1.0 + path: /opt/plugins/exmple2 + sha256: 01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b +``` +其中,`name` 与 `version`需要与[插件](support/README-zh_CN.md#注册)配置中的保持一致。`path`用于查找插件的二进制文件,`sha256`用于验证启动的文件。 + +所有与插件相关的事件都可以在[日志文件](#日志)中看到。 + +## 与AgentSmith-HIDS Driver兼容运行的示例 +### 前提条件 +* [Linux Kernrl Module](../driver) (一个ko文件) +* [Driver Plugin](driver) (一个二进制文件) +* [Agent](#快速开始) (一个二进制文件) +### 选择工作目录 +在接下来的步骤中,我将会以`/etc/hids`作为Agent的工作目录: +``` +mkdir -p /etc/hids +``` +### 安装 +创建插件的工作目录并将相关文件复制到对应的目录中: +``` +cp agent /etc/hids/agent +mkdir -p /etc/hids/plugin/driver/ +cp driver /etc/hids/plugin/driver/driver +cp hids_driver.ko /etc/hids/plugin/driver/hids_driver-latest.ko +``` +### 创建配置文件 +首先先计算插件二进制文件的`sha256`: +``` +shasum -a 256 /etc/hids/plugin/driver/driver +5b76d3da59d45be3dd5d2326c1f2a87bd454ed4028201750b5b3eebb29cc6eac /etc/hids/plugin/driver/driver +``` +然后,修改`/etc/hids/config.yaml`的内容: +``` +echo "plugins: [{name: hids_driver,version: 1.5.0.0,path: ./plugin/driver/driver,sha256: 5b76d3da59d45be3dd5d2326c1f2a87bd454ed4028201750b5b3eebb29cc6eac}]" > /etc/hids/config.yaml +``` +### 运行Agent +执行下面的命令 +``` +cd /etc/hids/ && ./agent +``` +在当前屏幕上你将会看到来自内核的相关数据。 + +如果你想关闭这个插件,请修改配置文件移除相关内容: +``` +echo "plugins : []" > /etc/hids/config.yaml +``` +如果你想再次开启这个插件,请[恢复配置文件](#配置文件)。 + +## License +AgentSmith-HIDS Agent are distributed under the Apache-2.0 license. \ No newline at end of file diff --git a/agent/README.md b/agent/README.md new file mode 100644 index 000000000..88b15407e --- /dev/null +++ b/agent/README.md @@ -0,0 +1,133 @@ +[![License](https://img.shields.io/badge/License-Apache%20v2-blue.svg)](https://github.com/DianrongSecurity/AgentSmith-HIDS/blob/master/LICENSE) +[![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) + +English | [简体中文](README-zh_CN.md) +## About AgentSmith-HIDS Agent +AgentSmith-HIDS Agent is a user space program,which is used to forward data sent by other plugins to the remote end, and control other plugins according to configuration. + +AgentSmith-HIDS Agent is built in golang, but plugins can be built in other languages ​​([rust is currently supported](support/rust), and the next supported one will be golang). + +A plugin is a program with a specific function that can be independently updated and configured. After the plugin is registered to the agent, the resource usage of the plugin will be monitored, and the log of the plugin will also be passed to the Agent. + +You can see two example plugins in the [driver](driver/) and [journal_watcher](journal_watcher/) directories. The former is used to parse and enrich the data transmitted by the AgentSmith-HIDS Driver from the kernel, and the latter is used for log monitoring. + +Through this Agent-Plugins struct, we can decouple basic modules (such as communication and control/resource monitoring, etc.) from functional modules (such as process monitoring/file monitoring/vulnerability analysis, etc.) to achieve dynamic increase and decrease of the modules. + +## Supported Platforms +In theory, all distribution systems under Linux are compatible, but Debian (including Ubuntu) and RHEL (including CentOS) have been fully tested.Currently, we have only tested on the x86_64 platform. +In addition, for better compatibility with the plugins, it is recommended to run the AgentSmith-HIDS Agent in a physical machine or a virtual machine instead of a container. +For maximum functionality, you should probably run with root privileges. +## Compilation Environment Requirements +* Golang 1.15(Recommended) +## To Start Using AgentSmith-HIDS Agent +``` +git clone https://github.com/bytedance/AgentSmith-HIDS +cd AgentSmith-HIDS/agent +go build +``` +You will see the `agent` binary program in the current directory. +## Parameters And Options +If you want to see the parameters supported by the agent, please execute: +``` +./agent --help +``` +You will see: +``` +Usage: + agent [OPTIONS] +Application Options: + -v, --version Print agent version + --plugin= Plugin socket path (default: plugin.sock) + --log= Log file path (default: log/agent_smith.log) + --config= Config file path(.yaml) (default: config.yaml) + --data=[file|stdout|kafka] Set data output (default: stdout) + --file_path= If data option is file ,this option is used to set the file path (default: data.log) + --addr= If data option is kafka ,this option is used to set kafka addr + --topic= If data option is kafka ,this option is used to set kafka topic name + +Help Options: + -h, --help Show this help message + +``` +The configuration file is used to control the currently running plugin instance. If you want to start running the Agent itself simply and quickly without enabling any plugin functions, then you can directly execute `./agent`, you will see the data output on the stdout of the current terminal: +``` +[{"data_type":"1001","level":"error","msg":"no such file or directory","source":"config/config.go:114","timestamp":"${current_timestamp}"}] +[{"cpu":"0.00000","data_type":"1000","distro":"${your_distro}","io":"8192","kernel_version":"${your_kernel_version}","memory":"${current_agent_memory_usage}","plugins":"[]","slab":"${current_sys_slab_usage}","timestamp":"${current_timestamp}"}] +``` +The error in the first line is caused by the configuration file not being found and can be ignored for now. The second line is the agent's heartbeat data, each field in it describes the current Agent and Plugin information. +## Data Output +The current version of AgentSmith-HIDS Agent is more used for local testing. It does not support remote control and configuration, but supports transmission of data to the remote (via sarama/kafka).Note: please do not use it in a production environment. +### Stdout(Default) +Flush all data in stdout. Note: This method does not save the data persistently. When data sending speed is too fast, it may cause the current terminal to run slowly. +### File +Save the data to the specified file, the default is the `data.log` in agent working directory. +### Kafka +Agent will start a synchronous producer to send data to Kafka, please remember to configure the `addr` and `topic` parameters. +### Other Methods +You can use custom data output by implementing `Transport interface` under [transport](transport/transport.go).Next, modify the `main` function and set it as the default transport method.In the future, we will support gRPC. +## Logs +You can configure the storage path of the log file by setting the `log` parameter(default is `log/agent_smith.log`), but for more detailed log configuration, please modify the corresponding configuration in the `main` function. All logs of error level or above will be sent to [Data Output](#About-Data-Output). +## Config File +Currently for testing purposes, a configuration file is provided to control the addition and deletion of plugins. This poses a great security risk, please do not use it in a production environment. + +When Agent starts, the config file which is set by `--config`(default is `config.yaml` in working directory) will be monitored(via inotify). Whenever a modification event is triggered, the configuration file will be parsed and compared with the currently loaded plugins to achieve dynamic modification. Note: Please don't use vim/gedit and other tools when modifying, [they will not trigger the modification event of inotify](https://stackoverflow.com/questions/13312794/inotify-dont-treat-vim-editting-as-a-modification-event). + +A correct configuration file looks like this: +``` +plugins : + - + name: exmple1 + version: 0.0.0.1 + path: /opt/plugins/exmple1 + sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 + - + name: exmple2 + version: 0.0.1.0 + path: /opt/plugins/exmple2 + sha256: 01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b +``` +Among them, `name` and `version` need to be the same as the [plugin](support/README.md#registration) config, `path` is used to find the plugin binary file, and `sha256` is used to verify the actual startup file. + +All events related to the plugin can be seen in the [log](#Logs) file. +## Example With AgentSmith-HIDS Driver +### Precondition +* The [Linux Kernrl Module](../driver) (a ko file). +* The [Driver Plugin](driver) (a binary file). +* The [Agent](#To-Start-Using-AgentSmith-HIDS-Agent) (a binary file). +### Select a working directory +I will use `/etc/hids` as the working directory for the following steps: +``` +mkdir -p /etc/hids +``` +### Install +Create the working directory of the plugin and copy the files to it: +``` +cp agent /etc/hids/agent +mkdir -p /etc/hids/plugin/driver/ +cp driver /etc/hids/plugin/driver/driver +cp hids_driver.ko /etc/hids/plugin/driver/hids_driver-latest.ko +``` +### Create config file +Calculate `sha256` of the plugin: +``` +shasum -a 256 /etc/hids/plugin/driver/driver +5b76d3da59d45be3dd5d2326c1f2a87bd454ed4028201750b5b3eebb29cc6eac /etc/hids/plugin/driver/driver +``` +Content of `/etc/hids/config.yaml`: +``` +echo "plugins: [{name: hids_driver,version: 1.5.0.0,path: ./plugin/driver/driver,sha256: 5b76d3da59d45be3dd5d2326c1f2a87bd454ed4028201750b5b3eebb29cc6eac}]" > /etc/hids/config.yaml +``` +### Run Agent +Execute the following command: +``` +/etc/hids/agent +``` +You will see the data from kernel module on the screen. +If you want to disable this plugin, modify the configuration file and delete driver related fields: +``` +echo "plugins: []" > /etc/hids/config.yaml +``` +If you want to enable the Driver Plugin again, just [restore the configuration file](#Create-config-file). + +## License +AgentSmith-HIDS Agent are distributed under the Apache-2.0 license. \ No newline at end of file diff --git a/agent/common/common.go b/agent/common/common.go new file mode 100644 index 000000000..56fae400b --- /dev/null +++ b/agent/common/common.go @@ -0,0 +1,125 @@ +package common + +import ( + "fmt" + "io/ioutil" + "net" + "os" + "regexp" + "strings" + + "github.com/google/uuid" +) + +var ( + PrivateIPv4 []string + PublicIPv4 []string + PrivateIPv6 []string + PublicIPv6 []string + AgentID string + Hostname string + Distro string + KernelVersion string +) + +const ( + Version = "0.0.0.1" +) + +var IDPath = "agent-id" + +func init() { + id, err := ioutil.ReadFile(IDPath) + if err != nil { + AgentID = uuid.New().String() + err = ioutil.WriteFile(IDPath, []byte(AgentID), 0700) + if err != nil { + AgentID = "PLACEHOLDER-WRITE-AGENT-ID-ERROR-" + err.Error() + fmt.Fprintf(os.Stderr, "Failed to write agent id file:%v", err) + } + } else { + _, err = uuid.Parse(string(id)) + if err != nil { + AgentID = uuid.New().String() + err = ioutil.WriteFile(IDPath, []byte(AgentID), 0700) + if err != nil { + AgentID = "PLACEHOLDER-WRITE-AGENT-ID-ERROR-" + err.Error() + fmt.Fprintf(os.Stderr, "Failed to write agent id file:%v", err) + } + } else { + AgentID = string(id) + } + } + interfaces, err := net.Interfaces() + if err != nil { + fmt.Fprintf(os.Stderr, "Cann't get interfaces:%v", err) + } + for _, i := range interfaces { + if strings.HasPrefix(i.Name, "docker") || strings.HasPrefix(i.Name, "lo") || strings.HasPrefix(i.Name, "br-") { + continue + } + addr, err := i.Addrs() + if err != nil { + continue + } + for _, j := range addr { + ip, _, err := net.ParseCIDR(j.String()) + if err != nil { + continue + } + if ip.To4() == nil { + if strings.HasPrefix(ip.String(), "fe80") { + continue + } + if strings.HasPrefix(ip.String(), "fd") { + PrivateIPv6 = append(PrivateIPv6, ip.String()) + } else { + PublicIPv6 = append(PublicIPv6, ip.String()) + } + } else { + if strings.HasPrefix(ip.String(), "169.254.") { + continue + } + if strings.HasPrefix(ip.String(), "10.") || strings.HasPrefix(ip.String(), "192.168.") || regexp.MustCompile(`^172\.([1][6-9]|[2]\d|[3][0-1])\.`).MatchString(ip.String()) { + PrivateIPv4 = append(PrivateIPv4, ip.String()) + } else { + PublicIPv4 = append(PublicIPv4, ip.String()) + } + + } + } + } + Hostname, err = os.Hostname() + if err != nil { + fmt.Fprintf(os.Stderr, "Cann't get hostname:%v", err) + Hostname = "PLACEHOLDER-GET-HOSTNAME-ERROR-" + err.Error() + } + kcontent, err := ioutil.ReadFile("/proc/sys/kernel/osrelease") + if err != nil { + fmt.Fprintf(os.Stderr, "Cann't get kernel version:%v", err) + KernelVersion = "PLACEHOLDER-GET-KVERSION-ERROR-" + err.Error() + } else { + KernelVersion = strings.TrimSpace(string(kcontent)) + } + files, err := ioutil.ReadDir("/etc") + if err != nil { + fmt.Fprintf(os.Stderr, "Cann't get distribution version:%v", err) + Distro = "PLACEHOLDER-GET-DISTRIBUTION-ERROR-" + err.Error() + } else { + for _, i := range files { + if strings.HasSuffix(i.Name(), "release") && i.Size() < 1024*1024 { + content, err := ioutil.ReadFile("/etc/" + i.Name()) + if err != nil { + continue + } + if strings.Contains(string(content), "Debian") { + Distro = "debian" + } else if strings.Contains(string(content), "CentOS") { + Distro = "centos" + } else { + Distro = "else" + } + } + } + } +} diff --git a/agent/config/config.go b/agent/config/config.go new file mode 100644 index 000000000..6992a2dbe --- /dev/null +++ b/agent/config/config.go @@ -0,0 +1,130 @@ +package config + +import ( + "crypto/sha256" + "encoding/hex" + "io" + "os" + "time" + + "github.com/bytedance/AgentSmith-HIDS/agent/plugin" + "github.com/fsnotify/fsnotify" + "go.uber.org/config" + "go.uber.org/zap" +) + +var ConfigPath = "" + +type cfg struct { + Name string + Version string + Path string + SHA256 string +} + +func parseConfig() error { + f, err := os.Open("config.yaml") + if err != nil { + return err + } + config, err := config.NewYAML(config.Source(f)) + if err != nil { + return err + } + var plugins []cfg + err = config.Get("plugins").Populate(&plugins) + if err != nil { + return err + } + s, err := plugin.GetServer() + if err != nil { + return err + } + for _, c := range plugins { + p, ok := s.Get(c.Name) + if !ok || p.Version() != c.Version { + zap.S().Infof("Update config:%+v", c) + s.Delete(c.Name) + f, e := os.Open(c.Path) + if err == nil { + hasher := sha256.New() + io.Copy(hasher, f) + checksum := hasher.Sum(nil) + if hex.EncodeToString(checksum) != c.SHA256 { + zap.S().Error("Checksum doesn't match") + continue + } + } else { + zap.S().Error(e) + continue + } + new, err := plugin.NewPlugin(c.Name, c.Version, c.SHA256, c.Path) + if err != nil { + zap.S().Error(err) + continue + } + s.Insert(c.Name, new) + if err := new.Run(); err != nil { + zap.S().Error(err) + s.Delete(c.Name) + } else { + go func(n string) { + time.Sleep(time.Second * 30) + if !new.Connected() { + zap.S().Errorf("Plugin seems to be dead:%v", new) + s.Delete(n) + } + }(c.Name) + } + } + } + loadedPlugins := s.PluginList() + for _, name := range loadedPlugins { + del := true + for _, c := range plugins { + if name == c.Name { + del = false + break + } + } + if del { + zap.S().Infof("Delete plugin:%v", name) + s.Delete(name) + } + } + return nil +} +func Watcher() { + watcher, err := fsnotify.NewWatcher() + if err != nil { + zap.S().Error(err) + } + err = watcher.Add("config.yaml") + if err == nil { + err := parseConfig() + if err != nil { + zap.S().Error(err) + } + for { + select { + case event, ok := <-watcher.Events: + if !ok { + return + } + if event.Op&fsnotify.Write == fsnotify.Write { + err := parseConfig() + if err != nil { + zap.S().Error(err) + } + } + case err, ok := <-watcher.Errors: + if !ok { + zap.S().Error(err) + return + } + } + } + } else { + zap.S().Error(err) + } +} diff --git a/agent/driver/.gitignore b/agent/driver/.gitignore new file mode 100644 index 000000000..5dd81401e --- /dev/null +++ b/agent/driver/.gitignore @@ -0,0 +1,3 @@ +target +Cargo.lock +*.log* \ No newline at end of file diff --git a/agent/driver/Cargo.toml b/agent/driver/Cargo.toml new file mode 100644 index 000000000..2fdcacb6b --- /dev/null +++ b/agent/driver/Cargo.toml @@ -0,0 +1,34 @@ +[package] +authors = ["zhanglei.sec "] +edition = "2018" +name = "driver" +version = "0.1.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[build-dependencies] +serde = { version = "1.0", features = ["derive"] } +toml = "0.5" +codegen = "0.1" +heck = "0.3" + +[dependencies] +anyhow = "1.0" +hex = "0.4.2" +ipnet = "2.3.0" +iprange = "0.6.3" +lazy_static = "1.4.0" +log = "0.4" +sha2 = "0.9.2" +plugin = { path = "../support/rust/plugin" } +plugin_builder = { path = "../support/rust/plugin_builder" } +regex = "1" +reqwest = { version = "0.10.8", features = ["blocking", "rustls-tls"], default-features = false } +ttl_cache = "0.5.1" +twox-hash = "1.6.0" +users = "0.11.0" +walkdir = "2" +clru = "0.3.0" +fnv = "1.0.7" +serde = { version = "1.0", features = ["derive"] } +parking_lot = "0.11" +coarsetime = "0.1" diff --git a/agent/driver/Makefile b/agent/driver/Makefile new file mode 100644 index 000000000..b1a04c031 --- /dev/null +++ b/agent/driver/Makefile @@ -0,0 +1,10 @@ +run: + cargo run --release +run-musl: + cargo run --release --target x86_64-unknown-linux-musl +build: + cargo build --release +build-musl: + cargo build --release --target x86_64-unknown-linux-musl +clean: + cargo clean \ No newline at end of file diff --git a/agent/driver/README-zh_CN.md b/agent/driver/README-zh_CN.md new file mode 100644 index 000000000..69501ade2 --- /dev/null +++ b/agent/driver/README-zh_CN.md @@ -0,0 +1,67 @@ +[English](README.md) | 简体中文 +## 关于Driver插件 +Driver Plugin用于管理内核模块(安装/卸载/升级)。它可以接收并解析来自内核模块的数据,并将其进一步丰富,然后将数据转发给Agent。 + + +## 平台兼容性 +与[AgentSmith-HIDS Agent](../README-zh_CN.md#平台兼容性)相同。 + +## 需要的编译环境 +* Rust 1.48.0 + +快速安装 [rust](https://www.rust-lang.org/tools/install) 环境: +``` +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +``` + +## 编译 +执行以下命令: +``` +make build +``` +或者: +``` +cargo build --release +``` +你将会在`target/release/`下面找到`driver`二进制文件。 + +如果你想获得完全静态链接的二进制文件(更易于分发),执行以下命令: +``` +make build-musl +``` +或者: +``` +cargo build --release --target x86_64-unknown-linux-musl +``` +你将会在`target/x86_64-unknown-linux-musl/release/`下面找到`driver`二进制文件。 + +详情请查阅: +https://doc.rust-lang.org/edition-guide/rust-2018/platform-and-target-support/musl-support-for-fully-static-binaries.html + +## 模版生成 +根据[Driver中的数据定义](../../driver),我们使用代码生成实现了解析器,模版数据的结构被定义在[template.toml](template.toml)。 + +`metadata`字段定义了[LKM](../../driver)的版本与这个模版的维护者; + +`config`由如下几个配置项组成:`ko_url`定义了ko的分发下载地址(如果需要的话);`pipe_path`字段定义了[LKM](../../driver)的pipe文件路径;`name`字段定义了所要管理的ko文件的名字;`socket_path`定义了与Agent通信的Socket地址。请注意:`socket_path`必须与[Agent中的相应参数](../README-zh_CN.md#参数和选项)保持一致。 + + +`structures` 字段描述了不同的数据类型,请根据需要进行修改。关于`toml`文件的详细信息,请参阅:https://github.com/toml-lang/toml + +## 分发 ko +你可以将不同内核版本的ko文件放在文件服务器上以方便分发。请按照如下要求对ko文件进行命名: + +文件名应该由三部分组成: `NAME-VERSION-KERNEL_VERSION.ko`。 + +`NAME`应该与`template.toml`中的`config.name`字段保持一致。 + +`VERSION`应该与`config.version`字段保持一致(也就是[LKM](../../driver)的版本)。 + +`KERNEL_VERSION`应该与`uname -r`保持一致。 + +除此之外,对于每个ko文件都应该与一个命名为`NAME-VERSION-KERNEL_VERSION.sha256`的文本文件一起上传,这个文件中包含着`NAME-VERSION-KERNEL_VERSION.ko`文件的`sha256`字符串(经过hex编码)。例如: +``` +cat hids_driver-1.0.0.0-4.14-amd64.sha256 +3ca9eb8143e99fac18a50613247cadb900ba79bf6f7d9a073b61e4ab303d3635 +``` +最后,将你的文件服务器地址填入到`config.ko_url`列表中(可以有多个)。这样当插件启动时,与当前插件和内核版本保持一致的ko文件将会被自动下载。 \ No newline at end of file diff --git a/agent/driver/README.md b/agent/driver/README.md new file mode 100644 index 000000000..28b518e8d --- /dev/null +++ b/agent/driver/README.md @@ -0,0 +1,63 @@ +English | [简体中文](README-zh_CN.md) +## About Driver Plugin +The Driver Plugin is used to manage the kernel module (install/uninstall/update). + +It can receive and parse the data from the kernel module, and further enrich the information, and then forward it to the Agent. + +## Supported Platforms +Same as [AgentSmith-HIDS Agent](../README.md#supported-platforms) + +## Compilation Environment Requirements +* Rust 1.48.0 + +Quickly install [rust](https://www.rust-lang.org/tools/install) environment: +``` +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +``` + +## Building +Just run: +``` +make build +``` +or +``` +cargo build --release +``` +You will find the driver binary file under `target/release/`. + +If you want to get a fully statically linked binary plugin (easier to distribute), do the following: +``` +make build-musl +``` +or +``` +cargo build --release --target x86_64-unknown-linux-musl +``` +You will find the driver binary file under `target/x86_64-unknown-linux-musl/release/`. + +For details, please refer to: +https://doc.rust-lang.org/edition-guide/rust-2018/platform-and-target-support/musl-support-for-fully-static-binaries.html + +## Template Generation +According to the [data definition in Driver](../../driver), we use code generation to build parser.The structures is defined in the [template.toml](template.toml). + +The `metadata` field defines the [LKM](../../driver) version and the maintenance members of this template.The `config` field defines the download address of the ko distribution (if needed), the pipe path of the [LKM](../../driver), the name of the ko file to be managed, and the socket path of the Agent. + +Please note: The socket path must be consistent with the [parameters in the Agent](../README.md#parameters-and-options). + +The `structures` field describes various data types, please modify as needed. For details, please refer to:https://github.com/toml-lang/toml + +## Distribute ko +You can put ko files of different kernel versions on a file server for easy distribution. Please rename each ko file according to the following requirements: + +The file name consists of three parts: `NAME-VERSION-KERNEL_VERSION.ko`. + +`NAME` needs to be consistent with the `config.name` field in `template.toml`, `VERSION` and `config.version` fields should be consistent (that is, the [LKM](../../driver) version), and `KERNEL_VERSION` should be consistent with `uname -r`. + +In addition, for each ko file, a text file named `NAME-VERSION-KERNEL_VERSION.sha256` should be uploaded together, which contains the `sha256` hex encoding value of the `NAME-VERSION-KERNEL_VERSION.ko` file. E.g: +``` +cat hids_driver-1.0.0.0-4.14-amd64.sha256 +3ca9eb8143e99fac18a50613247cadb900ba79bf6f7d9a073b61e4ab303d3635 +``` +Finally, set your file server address in the `config.ko_url` ist (there can be multiple addresses), so that when the plugin starts, the ko file that is compatible with the [LKM](../../driver) and kernel version will be automatically downloaded. \ No newline at end of file diff --git a/agent/driver/build.rs b/agent/driver/build.rs new file mode 100644 index 000000000..0f7fddc57 --- /dev/null +++ b/agent/driver/build.rs @@ -0,0 +1,372 @@ +use codegen; +use heck::CamelCase; +use serde::Deserialize; +use std::collections::HashMap; +use std::fs; +use toml; +#[derive(Deserialize, Debug)] +struct Template { + metadata: Metadata, + config: Config, + structures: HashMap, +} +#[derive(Deserialize, Debug)] +struct Metadata { + version: String, + maintainers: Vec, +} +#[derive(Deserialize, Debug)] +struct Config { + ko_url: Vec, + name: String, + pipe_path: String, + socket_path: String, +} +#[derive(Deserialize, Debug)] +struct Structure { + common_fields: bool, + data_type: usize, + #[serde(rename = "additional_fields")] + fields: HashMap, +} +#[derive(Deserialize, Debug, Clone)] +struct Field { + index: usize, + #[serde(rename = "type")] + _type: String, +} +const COMMON_FIELDS_KERNEL: &[&str] = &[ + "uid", + "data_type", + "exe", + "pid", + "ppid", + "pgid", + "tgid", + "comm", + "nodename", + "sessionid", +]; +const COMMON_FIELDS_USER: &[&str] = &[ + "username", + "timestamp", + "exe_hash", + "ppid_argv", + "pgid_argv", + "argv", + "pid_tree", +]; +const PARSER_PATH: &str = "src/parser.rs"; +const CONFIG_PATH: &str = "src/config.rs"; +fn generate_parser(content: &mut Template) { + content.structures.iter_mut().for_each(|(_, v)| { + if v.common_fields { + v.fields.iter_mut().for_each(|(_, f)| { + f.index += 10; + }); + for i in 0..COMMON_FIELDS_KERNEL.len() { + v.fields.insert( + String::from(COMMON_FIELDS_KERNEL[i]), + Field { + index: i, + _type: String::from("kernel"), + }, + ); + } + for i in 0..COMMON_FIELDS_USER.len() { + if !v.fields.contains_key(COMMON_FIELDS_USER[i]) { + v.fields.insert( + String::from(COMMON_FIELDS_USER[i]), + Field { + index: v.fields.len() + 1, + _type: String::from("user"), + }, + ); + } + } + } + }); + let mut scope = codegen::Scope::new(); + scope.import("crate::cache", "{ArgvCache, FileHashCache}"); + scope.import("anyhow", "*"); + scope.import("clru", "CLruCache"); + scope.import("coarsetime", "Clock"); + scope.import("fnv", "FnvBuildHasher"); + scope.import("ipnet", "{Ipv4Net, Ipv6Net}"); + scope.import("iprange", "IpRange"); + scope.import("plugin", "*"); + scope.import("log", "*"); + scope.import("serde", "Serialize"); + scope.import("users", "{Users, UsersCache}"); + content.structures.iter().for_each(|(k, v)| { + let s = scope.new_struct(&k.to_camel_case()); + s.generic("\'a").derive("Debug").derive("Serialize"); + for (i, _) in &v.fields { + s.field(i, "&'a str"); + } + }); + scope + .new_struct("Parser") + .field("sender", "Sender") + .field("user_cache", "UsersCache") + .field("argv_cache", "ArgvCache") + .field("pid_tree_cache", "CLruCache") + .field("file_hash_cache", "FileHashCache") + .vis("pub"); + let mut parser_impl = codegen::Impl::new("Parser"); + parser_impl + .new_fn("new") + .arg("sender", "Sender") + .ret("Self") + .line("let mut ipv4_range: IpRange = IpRange::new();") + .line("let mut ipv6_range: IpRange = IpRange::new();") + .line("ipv4_range.add(\"127.0.0.1/8\".parse().unwrap());") + .line("ipv6_range.add(\"::1/128\".parse().unwrap());") + .line("ipv6_range.add(\"fe80::/10\".parse().unwrap());") + .line("Self {sender,user_cache: UsersCache::default(),") + .line("argv_cache: ArgvCache::new(10240),") + .line("pid_tree_cache: CLruCache::with_hasher(10240, FnvBuildHasher::default()),") + .line("file_hash_cache: FileHashCache::new(10240),}") + .vis("pub"); + let parser_func = parser_impl + .new_fn("parse") + .arg_mut_self() + .arg("fields", "Vec<&str>") + .ret("Result<()>") + .line("match fields[1] {") + .vis("pub"); + for (k, v) in content.structures.iter() { + parser_func.line(format!("\"{}\"=>{{", v.data_type)); + // Add timestamp + if v.fields.contains_key("timestamp") { + parser_func.line("let timestamp = Clock::now_since_epoch().as_secs().to_string();"); + } + // Add username + if let Some(f) = v.fields.get("uid") { + parser_func.line(format!( + "let username = if let Ok(uid) = fields[{}].parse::() {{ + match self.user_cache.get_user_by_uid(uid) {{ + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => \"-3\".to_string(), + }} + }} else {{ + \"-3\".to_string() + }};", + f.index + )); + } + // Add old_username + if v.fields.contains_key("old_username") { + parser_func.line(format!( + "let old_username = if let Ok(old_uid) = fields[{}].parse::() {{ + match self.user_cache.get_user_by_uid(old_uid) {{ + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => \"-3\".to_string(), + }} + }} else {{ + \"-3\".to_string() + }};", + v.fields.get("old_uid").unwrap().index + )); + } + // Add exe_hash + if v.fields.contains_key("exe_hash") { + parser_func.line( + "let exe_hash = if fields[2] != \"-1\" && fields[2] != \"\" { + self.file_hash_cache.get(fields[2]) + } else { + \"-3\".to_string() + };", + ); + } + // Add pid tree or put in cache + if let Some(f) = v.fields.get("pid_tree") { + match f._type.as_ref() { + "user" | "placeholder" => { + parser_func.line( + "let pid_tree = if let Ok(pid) = fields[3].parse::() { + let pid_tree = match self.pid_tree_cache.get(&pid) { + Some(t) => t, + None => \"-3\", + }; + pid_tree + } else { + \"-3\" + };", + ); + } + "kernel" => { + parser_func.line(format!( + "if let Ok(pid) = fields[3].parse::() {{ + if fields[{}] != \"\" && fields[{}] != \"-1\" {{ + self.pid_tree_cache.put(pid, fields[{}].to_string()); + }} + }}", + f.index, f.index, f.index + )); + } + _ => {} + } + } + // Add argv or put in cache + if let Some(f) = v.fields.get("argv") { + match f._type.as_ref() { + "user" | "placeholder" => { + parser_func.line( + "let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + \"-3\".to_string() + };", + ); + } + "kernel" => { + parser_func.line(format!( + "if let Ok(pid) = fields[3].parse::() {{ + if fields[{}] != \"\" && fields[{}] != \"-1\" {{ + self.argv_cache.put(pid, fields[{}].to_string()); + }} + }}", + f.index, f.index, f.index + )); + } + _ => {} + } + } + // Add pgid_argv + if let Some(f) = v.fields.get("pgid_argv") { + match f._type.as_ref() { + "user" | "placeholder" => { + parser_func.line( + "let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + \"-3\".to_string() + };", + ); + } + _ => {} + } + } + // Add ppid_argv + if let Some(f) = v.fields.get("ppid_argv") { + match f._type.as_ref() { + "user" | "placeholder" => { + parser_func.line( + "let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + \"-3\".to_string() + };", + ); + } + _ => {} + } + } + // Add socket_argv + if let Some(f) = v.fields.get("socket_argv") { + match f._type.as_ref() { + "user" | "placeholder" => { + parser_func.line(format!( + "let socket_argv = if let Ok(socket_pid) = fields[{}].parse::() {{ + self.argv_cache.get(&socket_pid) + }} else {{ + \"-3\".to_string() + }};", + v.fields.get("socket_pid").unwrap().index, + )); + } + _ => {} + } + } + // Add send func + parser_func.line(format!("self.sender.send(&{} {{", k.to_camel_case())); + // Add data struct + for (i, j) in v.fields.iter() { + match j._type.as_str() { + "kernel" => { + parser_func.line(format!("{}:fields[{}],", i, j.index)); + } + "placeholder" | "user" => { + parser_func.line(format!("{}:&{},", i, i)); + } + _ => {} + } + } + parser_func.line("})},"); + } + + // Unsupported type + parser_func + .line("_ => {") + .line("warn!(\"Datatype does not support:{:?}\", fields);") + .line("Ok(())}"); + parser_func.line("}"); + scope.push_impl(parser_impl); + // Write to file + fs::write( + PARSER_PATH, + format!( + "// Code generated by build.rs DO NOT EDIT.\n// VERSION: {}\n// Maintainers: {:?}\n{}", + content.metadata.version, + content.metadata.maintainers, + scope.to_string() + ), + ) + .unwrap(); + // Format file + std::process::Command::new("rustfmt") + .arg("--edition") + .arg("2018") + .arg(PARSER_PATH) + .spawn() + .unwrap(); +} +fn generate_config(content: &mut Template) { + let mut scope = String::new(); + scope.push_str(&format!( + "pub const KO_URL: &[&str] = &{:?};\n", + content.config.ko_url + )); + scope.push_str(&format!( + "pub const NAME: &str = \"{}\";\n", + content.config.name + )); + scope.push_str(&format!( + "pub const PIPE_PATH: &str = \"{}\";\n", + content.config.pipe_path + )); + scope.push_str(&format!( + "pub const SOCKET_PATH: &str = \"{}\";\n", + content.config.socket_path + )); + scope.push_str(&format!( + "pub const VERSION: &str = \"{}\";\n", + content.metadata.version + )); + // Write to file + fs::write( + CONFIG_PATH, + format!( + "// Code generated by build.rs DO NOT EDIT.\n// VERSION: {}\n// Maintainers: {:?}\n{}", + content.metadata.version, + content.metadata.maintainers, + scope.to_string() + ), + ) + .unwrap(); + // Format file + std::process::Command::new("rustfmt") + .arg("--edition") + .arg("2018") + .arg(CONFIG_PATH) + .spawn() + .unwrap(); +} +fn main() { + println!("cargo:rerun-if-changed=template.toml"); + let template = fs::read_to_string("template.toml").unwrap(); + let mut content: Template = toml::from_str(&template).unwrap(); + generate_parser(&mut content); + generate_config(&mut content); +} diff --git a/agent/driver/src/cache.rs b/agent/driver/src/cache.rs new file mode 100644 index 000000000..4d0114bd9 --- /dev/null +++ b/agent/driver/src/cache.rs @@ -0,0 +1,81 @@ +use clru::CLruCache; +use fnv::FnvBuildHasher; +use hex::encode; +use std::fs::read_to_string; +use std::fs::File; +use std::hash::Hasher; +use std::io::{Error, Read}; +use twox_hash::{RandomXxh3HashBuilder64, XxHash64}; + +const XXHASH_BUFF_SIZE: usize = 32 * 1024; + +pub struct ArgvCache { + i: CLruCache, +} + +impl ArgvCache { + pub fn new(cap: usize) -> Self { + Self { + i: CLruCache::<_, _, FnvBuildHasher>::with_hasher(cap, FnvBuildHasher::default()), + } + } + + pub fn get(&mut self, key: &u32) -> String { + return match self.i.get(key) { + Some(v) => v.to_owned(), + None => { + if let Ok(v) = read_to_string(format!("/proc/{}/cmdline", key)) { + let normalized = v.replace('\0', " "); + self.i.put(*key, normalized.clone()); + normalized + } else { + "-3".to_owned() + } + } + }; + } + + pub fn put(&mut self, key: u32, value: String) { + self.i.put(key, value); + } +} + +pub struct FileHashCache { + i: CLruCache, +} + +impl FileHashCache { + pub fn new(cap: usize) -> Self { + Self { + i: CLruCache::<_, _, RandomXxh3HashBuilder64>::with_hasher( + cap, + Default::default(), + ), + } + } + + pub fn get>(&mut self, key: T) -> String { + match self.i.get(key.as_ref()) { + Some(h) => h.to_owned(), + None => { + let mut buffer = [0; XXHASH_BUFF_SIZE]; + + File::open(key.as_ref()) + .map(|mut f| (f.metadata(), f.read(&mut buffer[..]))) + .and_then(|stat| match stat { + (Ok(l), Ok(_)) => Ok(l.len()), + (_, _) => Err(Error::last_os_error()), + }) + .map(|len| { + let mut hasher = XxHash64::default(); + hasher.write(&buffer[..XXHASH_BUFF_SIZE]); + hasher.write(&len.to_be_bytes()); + let hash = encode(hasher.finish().to_be_bytes()); + self.i.put(key.as_ref().to_owned(), hash.to_owned()); + hash + }) + .unwrap_or_else(|_| "-3".to_string()) + } + } + } +} diff --git a/agent/driver/src/config.rs b/agent/driver/src/config.rs new file mode 100644 index 000000000..703f19963 --- /dev/null +++ b/agent/driver/src/config.rs @@ -0,0 +1,8 @@ +// Code generated by build.rs DO NOT EDIT. +// VERSION: 1.5.0.0 +// Maintainers: ["zhanglei.sec@bytedance.com"] +pub const KO_URL: &[&str] = &["http://example.com/download/ko/"]; +pub const NAME: &str = "hids_driver"; +pub const PIPE_PATH: &str = "/proc/hids_driver/1"; +pub const SOCKET_PATH: &str = "/etc/hids/plugin.sock"; +pub const VERSION: &str = "1.5.0.0"; diff --git a/agent/driver/src/main.rs b/agent/driver/src/main.rs new file mode 100644 index 000000000..b44678352 --- /dev/null +++ b/agent/driver/src/main.rs @@ -0,0 +1,95 @@ +use config::*; +use log::*; +use plugin_builder::Builder; +use prepare::*; +use std::fs::*; +use std::io::{BufRead, BufReader}; +use std::time::Duration; + +mod cache; +mod config; +mod parser; +mod prepare; + +const SLEEP_INTERVAL: Duration = Duration::from_millis(126); + +fn safety_exit() { + std::thread::sleep(SLEEP_INTERVAL); + warn!("Safety exit"); + let _ = std::process::Command::new("rmmod") + .arg("hids_driver") + .env("PATH", "/sbin:/bin:/usr/bin:/usr/sbin") + .spawn(); +} + +fn main() { + let (sender, receiver) = Builder::new(SOCKET_PATH, NAME, VERSION).unwrap().build(); + if let Some(dmesg) = check_crash() { + error!("Detect latest kernel panic, dmesg:{}", dmesg); + std::thread::sleep(SLEEP_INTERVAL); + return; + } else { + info!("Crash check passed"); + } + + if let Err(version) = check_kernel_version() { + error!("Unsupported kernel version:{}", version); + std::thread::sleep(SLEEP_INTERVAL); + return; + } else { + info!("Kernel version check passed"); + } + if let Err(e) = prepare_ko() { + error!("{}", e); + std::thread::sleep(SLEEP_INTERVAL); + return;} + + let handle = std::thread::spawn(move || { + let mut parser = parser::Parser::new(sender); + loop { + let pipe = match File::open(PIPE_PATH) { + Ok(pipe) => pipe, + Err(e) => { + error!("{}", e); + return; + } + }; + let pipe = BufReader::new(pipe); + let lines = pipe.split(b'\x17'); + for line in lines { + match line { + Ok(content) => { + let content = match String::from_utf8(content) { + Ok(c) => c, + Err(e) => { + warn!("{}", e); + continue; + } + }; + let fields: Vec<&str> = content.split('\x1e').collect(); + if parser.parse(fields).is_err() { + return; + }; + } + Err(e) => { + error!("{}", e); + break; + } + } + } + warn!("Pipe read end"); + std::thread::sleep(Duration::from_secs(10)); + } + }); + loop { + match receiver.receive() { + Ok(t) => println!("{:?}", t), + Err(e) => { + error!("{}", e); + break; + } + } + } + let _ = handle.join(); + safety_exit(); +} diff --git a/agent/driver/src/parser.rs b/agent/driver/src/parser.rs new file mode 100644 index 000000000..0024bdf6c --- /dev/null +++ b/agent/driver/src/parser.rs @@ -0,0 +1,929 @@ +// Code generated by build.rs DO NOT EDIT. +// VERSION: 1.5.0.0 +// Maintainers: ["zhanglei.sec@bytedance.com"] +use crate::cache::{ArgvCache, FileHashCache}; +use anyhow::*; +use clru::CLruCache; +use coarsetime::Clock; +use fnv::FnvBuildHasher; +use ipnet::{Ipv4Net, Ipv6Net}; +use iprange::IpRange; +use log::*; +use plugin::*; +use serde::Serialize; +use users::{Users, UsersCache}; + +#[derive(Debug, Serialize)] +struct CreateFile<'a> { + pgid_argv: &'a str, + ppid_argv: &'a str, + pgid: &'a str, + uid: &'a str, + sessionid: &'a str, + pid_tree: &'a str, + exe_hash: &'a str, + ppid: &'a str, + timestamp: &'a str, + pid: &'a str, + username: &'a str, + exe: &'a str, + argv: &'a str, + file_path: &'a str, + comm: &'a str, + tgid: &'a str, + nodename: &'a str, + data_type: &'a str, +} + +#[derive(Debug, Serialize)] +struct ProcFileHook<'a> { + timestamp: &'a str, + module_name: &'a str, + hidden: &'a str, + data_type: &'a str, +} + +#[derive(Debug, Serialize)] +struct LoadModule<'a> { + exe_hash: &'a str, + timestamp: &'a str, + run_path: &'a str, + ppid: &'a str, + argv: &'a str, + nodename: &'a str, + pid: &'a str, + ppid_argv: &'a str, + username: &'a str, + sessionid: &'a str, + mod_info: &'a str, + tgid: &'a str, + uid: &'a str, + pgid_argv: &'a str, + data_type: &'a str, + pid_tree: &'a str, + exe: &'a str, + pgid: &'a str, + comm: &'a str, +} + +#[derive(Debug, Serialize)] +struct SyscallHook<'a> { + hidden: &'a str, + timestamp: &'a str, + data_type: &'a str, + syscall_number: &'a str, + module_name: &'a str, +} + +#[derive(Debug, Serialize)] +struct Ptrace<'a> { + ptrace_request: &'a str, + addr: &'a str, + uid: &'a str, + pgid: &'a str, + nodename: &'a str, + pid_tree: &'a str, + target_pid: &'a str, + username: &'a str, + ppid_argv: &'a str, + sessionid: &'a str, + tgid: &'a str, + data_type: &'a str, + pgid_argv: &'a str, + data: &'a str, + comm: &'a str, + exe_hash: &'a str, + pid: &'a str, + timestamp: &'a str, + ppid: &'a str, + argv: &'a str, + exe: &'a str, +} + +#[derive(Debug, Serialize)] +struct UpdateCred<'a> { + ppid: &'a str, + timestamp: &'a str, + old_username: &'a str, + data_type: &'a str, + pid: &'a str, + pid_tree: &'a str, + ppid_argv: &'a str, + sessionid: &'a str, + res: &'a str, + old_uid: &'a str, + uid: &'a str, + pgid: &'a str, + comm: &'a str, + username: &'a str, + exe_hash: &'a str, + pgid_argv: &'a str, + argv: &'a str, + exe: &'a str, + tgid: &'a str, + nodename: &'a str, +} + +#[derive(Debug, Serialize)] +struct Connect<'a> { + sport: &'a str, + nodename: &'a str, + exe_hash: &'a str, + sip: &'a str, + timestamp: &'a str, + argv: &'a str, + data_type: &'a str, + pid_tree: &'a str, + pgid_argv: &'a str, + res: &'a str, + sa_family: &'a str, + uid: &'a str, + sessionid: &'a str, + pgid: &'a str, + username: &'a str, + dport: &'a str, + dip: &'a str, + tgid: &'a str, + comm: &'a str, + ppid: &'a str, + pid: &'a str, + exe: &'a str, + connect_type: &'a str, + ppid_argv: &'a str, +} + +#[derive(Debug, Serialize)] +struct Dns<'a> { + qr: &'a str, + data_type: &'a str, + uid: &'a str, + comm: &'a str, + pid: &'a str, + sip: &'a str, + sport: &'a str, + nodename: &'a str, + ppid_argv: &'a str, + ppid: &'a str, + sessionid: &'a str, + pgid: &'a str, + username: &'a str, + dport: &'a str, + query: &'a str, + sa_family: &'a str, + tgid: &'a str, + dip: &'a str, + exe_hash: &'a str, + pid_tree: &'a str, + opcode: &'a str, + rcode: &'a str, + timestamp: &'a str, + pgid_argv: &'a str, + argv: &'a str, + exe: &'a str, +} + +#[derive(Debug, Serialize)] +struct LkmHidden<'a> { + data_type: &'a str, + hidden: &'a str, + module_name: &'a str, + timestamp: &'a str, +} + +#[derive(Debug, Serialize)] +struct Bind<'a> { + pgid_argv: &'a str, + data_type: &'a str, + nodename: &'a str, + res: &'a str, + exe: &'a str, + exe_hash: &'a str, + ppid_argv: &'a str, + ppid: &'a str, + pid: &'a str, + argv: &'a str, + pid_tree: &'a str, + uid: &'a str, + comm: &'a str, + sport: &'a str, + sa_family: &'a str, + sip: &'a str, + pgid: &'a str, + tgid: &'a str, + sessionid: &'a str, + username: &'a str, + timestamp: &'a str, +} + +#[derive(Debug, Serialize)] +struct InterruptsHook<'a> { + data_type: &'a str, + interrupts_number: &'a str, + hidden: &'a str, + timestamp: &'a str, + module_name: &'a str, +} + +#[derive(Debug, Serialize)] +struct Execve<'a> { + dip: &'a str, + sa_family: &'a str, + pid: &'a str, + ppid: &'a str, + comm: &'a str, + sip: &'a str, + socket_argv: &'a str, + run_path: &'a str, + username: &'a str, + exe: &'a str, + res: &'a str, + stdin: &'a str, + stdout: &'a str, + nodename: &'a str, + ssh: &'a str, + exe_hash: &'a str, + sport: &'a str, + argv: &'a str, + ld_preload: &'a str, + uid: &'a str, + data_type: &'a str, + pgid_argv: &'a str, + socket_pid: &'a str, + tgid: &'a str, + tty: &'a str, + pid_tree: &'a str, + timestamp: &'a str, + sessionid: &'a str, + ppid_argv: &'a str, + dport: &'a str, + pgid: &'a str, +} + +#[derive(Debug, Serialize)] +struct Mprotect<'a> { + owner_pid: &'a str, + pid: &'a str, + comm: &'a str, + nodename: &'a str, + tgid: &'a str, + username: &'a str, + timestamp: &'a str, + ppid_argv: &'a str, + pgid_argv: &'a str, + mprotect_prot: &'a str, + uid: &'a str, + argv: &'a str, + exe: &'a str, + owner_file: &'a str, + ppid: &'a str, + pid_tree: &'a str, + exe_hash: &'a str, + vm_pid: &'a str, + data_type: &'a str, + pgid: &'a str, + sessionid: &'a str, +} + +pub struct Parser { + sender: Sender, + user_cache: UsersCache, + argv_cache: ArgvCache, + pid_tree_cache: CLruCache, + file_hash_cache: FileHashCache, +} + +impl Parser { + pub fn new(sender: Sender) -> Self { + let mut ipv4_range: IpRange = IpRange::new(); + let mut ipv6_range: IpRange = IpRange::new(); + ipv4_range.add("127.0.0.1/8".parse().unwrap()); + ipv6_range.add("::1/128".parse().unwrap()); + ipv6_range.add("fe80::/10".parse().unwrap()); + Self { + sender, + user_cache: UsersCache::default(), + argv_cache: ArgvCache::new(10240), + pid_tree_cache: CLruCache::with_hasher(10240, FnvBuildHasher::default()), + file_hash_cache: FileHashCache::new(10240), + } + } + + pub fn parse(&mut self, fields: Vec<&str>) -> Result<()> { + match fields[1] { + "602" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + let pid_tree = if let Ok(pid) = fields[3].parse::() { + let pid_tree = match self.pid_tree_cache.get(&pid) { + Some(t) => t, + None => "-3", + }; + pid_tree + } else { + "-3" + }; + let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + "-3".to_string() + }; + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + self.sender.send(&CreateFile { + pgid_argv: &pgid_argv, + ppid_argv: &ppid_argv, + pgid: fields[5], + uid: fields[0], + sessionid: fields[9], + pid_tree: &pid_tree, + exe_hash: &exe_hash, + ppid: fields[4], + timestamp: ×tamp, + pid: fields[3], + username: &username, + exe: fields[2], + argv: &argv, + file_path: fields[10], + comm: fields[7], + tgid: fields[6], + nodename: fields[8], + data_type: fields[1], + }) + } + "700" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + self.sender.send(&ProcFileHook { + timestamp: ×tamp, + module_name: fields[2], + hidden: fields[3], + data_type: fields[1], + }) + } + "603" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + if let Ok(pid) = fields[3].parse::() { + if fields[11] != "" && fields[11] != "-1" { + self.pid_tree_cache.put(pid, fields[11].to_string()); + } + } + let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + "-3".to_string() + }; + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + self.sender.send(&LoadModule { + exe_hash: &exe_hash, + timestamp: ×tamp, + run_path: fields[12], + ppid: fields[4], + argv: &argv, + nodename: fields[8], + pid: fields[3], + ppid_argv: &ppid_argv, + username: &username, + sessionid: fields[9], + mod_info: fields[10], + tgid: fields[6], + uid: fields[0], + pgid_argv: &pgid_argv, + data_type: fields[1], + pid_tree: fields[11], + exe: fields[2], + pgid: fields[5], + comm: fields[7], + }) + } + "701" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + self.sender.send(&SyscallHook { + hidden: fields[3], + timestamp: ×tamp, + data_type: fields[1], + syscall_number: fields[4], + module_name: fields[2], + }) + } + "101" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + if let Ok(pid) = fields[3].parse::() { + if fields[14] != "" && fields[14] != "-1" { + self.pid_tree_cache.put(pid, fields[14].to_string()); + } + } + let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + "-3".to_string() + }; + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + self.sender.send(&Ptrace { + ptrace_request: fields[10], + addr: fields[12], + uid: fields[0], + pgid: fields[5], + nodename: fields[8], + pid_tree: fields[14], + target_pid: fields[11], + username: &username, + ppid_argv: &ppid_argv, + sessionid: fields[9], + tgid: fields[6], + data_type: fields[1], + pgid_argv: &pgid_argv, + data: fields[13], + comm: fields[7], + exe_hash: &exe_hash, + pid: fields[3], + timestamp: ×tamp, + ppid: fields[4], + argv: &argv, + exe: fields[2], + }) + } + "604" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let old_username = if let Ok(old_uid) = fields[11].parse::() { + match self.user_cache.get_user_by_uid(old_uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + if let Ok(pid) = fields[3].parse::() { + if fields[10] != "" && fields[10] != "-1" { + self.pid_tree_cache.put(pid, fields[10].to_string()); + } + } + let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + "-3".to_string() + }; + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + self.sender.send(&UpdateCred { + ppid: fields[4], + timestamp: ×tamp, + old_username: &old_username, + data_type: fields[1], + pid: fields[3], + pid_tree: fields[10], + ppid_argv: &ppid_argv, + sessionid: fields[9], + res: fields[12], + old_uid: fields[11], + uid: fields[0], + pgid: fields[5], + comm: fields[7], + username: &username, + exe_hash: &exe_hash, + pgid_argv: &pgid_argv, + argv: &argv, + exe: fields[2], + tgid: fields[6], + nodename: fields[8], + }) + } + "42" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + let pid_tree = if let Ok(pid) = fields[3].parse::() { + let pid_tree = match self.pid_tree_cache.get(&pid) { + Some(t) => t, + None => "-3", + }; + pid_tree + } else { + "-3" + }; + let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + "-3".to_string() + }; + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + self.sender.send(&Connect { + sport: fields[15], + nodename: fields[8], + exe_hash: &exe_hash, + sip: fields[14], + timestamp: ×tamp, + argv: &argv, + data_type: fields[1], + pid_tree: &pid_tree, + pgid_argv: &pgid_argv, + res: fields[16], + sa_family: fields[11], + uid: fields[0], + sessionid: fields[9], + pgid: fields[5], + username: &username, + dport: fields[13], + dip: fields[12], + tgid: fields[6], + comm: fields[7], + ppid: fields[4], + pid: fields[3], + exe: fields[2], + connect_type: fields[10], + ppid_argv: &ppid_argv, + }) + } + "601" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + let pid_tree = if let Ok(pid) = fields[3].parse::() { + let pid_tree = match self.pid_tree_cache.get(&pid) { + Some(t) => t, + None => "-3", + }; + pid_tree + } else { + "-3" + }; + let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + "-3".to_string() + }; + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + self.sender.send(&Dns { + qr: fields[16], + data_type: fields[1], + uid: fields[0], + comm: fields[7], + pid: fields[3], + sip: fields[14], + sport: fields[15], + nodename: fields[8], + ppid_argv: &ppid_argv, + ppid: fields[4], + sessionid: fields[9], + pgid: fields[5], + username: &username, + dport: fields[13], + query: fields[10], + sa_family: fields[11], + tgid: fields[6], + dip: fields[12], + exe_hash: &exe_hash, + pid_tree: &pid_tree, + opcode: fields[17], + rcode: fields[18], + timestamp: ×tamp, + pgid_argv: &pgid_argv, + argv: &argv, + exe: fields[2], + }) + } + "702" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + self.sender.send(&LkmHidden { + data_type: fields[1], + hidden: fields[3], + module_name: fields[2], + timestamp: ×tamp, + }) + } + "49" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + let pid_tree = if let Ok(pid) = fields[3].parse::() { + let pid_tree = match self.pid_tree_cache.get(&pid) { + Some(t) => t, + None => "-3", + }; + pid_tree + } else { + "-3" + }; + let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + "-3".to_string() + }; + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + self.sender.send(&Bind { + pgid_argv: &pgid_argv, + data_type: fields[1], + nodename: fields[8], + res: fields[13], + exe: fields[2], + exe_hash: &exe_hash, + ppid_argv: &ppid_argv, + ppid: fields[4], + pid: fields[3], + argv: &argv, + pid_tree: &pid_tree, + uid: fields[0], + comm: fields[7], + sport: fields[12], + sa_family: fields[10], + sip: fields[11], + pgid: fields[5], + tgid: fields[6], + sessionid: fields[9], + username: &username, + timestamp: ×tamp, + }) + } + "703" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + self.sender.send(&InterruptsHook { + data_type: fields[1], + interrupts_number: fields[4], + hidden: fields[3], + timestamp: ×tamp, + module_name: fields[2], + }) + } + "59" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + if let Ok(pid) = fields[3].parse::() { + if fields[21] != "" && fields[21] != "-1" { + self.pid_tree_cache.put(pid, fields[21].to_string()); + } + } + if let Ok(pid) = fields[3].parse::() { + if fields[11] != "" && fields[11] != "-1" { + self.argv_cache.put(pid, fields[11].to_string()); + } + } + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + let socket_argv = if let Ok(socket_pid) = fields[23].parse::() { + self.argv_cache.get(&socket_pid) + } else { + "-3".to_string() + }; + self.sender.send(&Execve { + dip: fields[16], + sa_family: fields[20], + pid: fields[3], + ppid: fields[4], + comm: fields[7], + sip: fields[18], + socket_argv: &socket_argv, + run_path: fields[12], + username: &username, + exe: fields[2], + res: fields[26], + stdin: fields[14], + stdout: fields[15], + nodename: fields[8], + ssh: fields[24], + exe_hash: &exe_hash, + sport: fields[19], + argv: fields[11], + ld_preload: fields[25], + uid: fields[0], + data_type: fields[1], + pgid_argv: &pgid_argv, + socket_pid: fields[23], + tgid: fields[6], + tty: fields[22], + pid_tree: fields[21], + timestamp: ×tamp, + sessionid: fields[9], + ppid_argv: &ppid_argv, + dport: fields[17], + pgid: fields[5], + }) + } + "10" => { + let timestamp = Clock::now_since_epoch().as_secs().to_string(); + let username = if let Ok(uid) = fields[0].parse::() { + match self.user_cache.get_user_by_uid(uid) { + Some(n) => n.name().to_str().unwrap_or_default().to_owned(), + None => "-3".to_string(), + } + } else { + "-3".to_string() + }; + let exe_hash = if fields[2] != "-1" && fields[2] != "" { + self.file_hash_cache.get(fields[2]) + } else { + "-3".to_string() + }; + if let Ok(pid) = fields[3].parse::() { + if fields[14] != "" && fields[14] != "-1" { + self.pid_tree_cache.put(pid, fields[14].to_string()); + } + } + let argv = if let Ok(pid) = fields[3].parse::() { + self.argv_cache.get(&pid) + } else { + "-3".to_string() + }; + let pgid_argv = if let Ok(pgid_id) = fields[5].parse::() { + self.argv_cache.get(&pgid_id) + } else { + "-3".to_string() + }; + let ppid_argv = if let Ok(ppid) = fields[4].parse::() { + self.argv_cache.get(&ppid) + } else { + "-3".to_string() + }; + self.sender.send(&Mprotect { + owner_pid: fields[11], + pid: fields[3], + comm: fields[7], + nodename: fields[8], + tgid: fields[6], + username: &username, + timestamp: ×tamp, + ppid_argv: &ppid_argv, + pgid_argv: &pgid_argv, + mprotect_prot: fields[10], + uid: fields[0], + argv: &argv, + exe: fields[2], + owner_file: fields[12], + ppid: fields[4], + pid_tree: fields[14], + exe_hash: &exe_hash, + vm_pid: fields[13], + data_type: fields[1], + pgid: fields[5], + sessionid: fields[9], + }) + } + _ => { + warn!("Datatype does not support:{:?}", fields); + Ok(()) + } + } + } +} diff --git a/agent/driver/src/prepare.rs b/agent/driver/src/prepare.rs new file mode 100644 index 000000000..6fd7a50a0 --- /dev/null +++ b/agent/driver/src/prepare.rs @@ -0,0 +1,163 @@ +use super::config::*; +use anyhow::*; +use lazy_static::lazy_static; +use log::*; +use regex::Regex; +use reqwest::blocking; +use sha2::{Digest, Sha256}; +use std::fs::*; +use walkdir::WalkDir; +lazy_static! { + pub static ref KERNEL_VERSION_RE: Regex = + Regex::new(r"(^3\.1[0-9]\.)|(^4\.1[0-9]\.)|(^4\.[0-9]\.)|(^4\.20\.)|(^5\.[0-4]\.)") + .unwrap(); +} + +fn download_sha(url: &str) -> Result { + // shasum is short. we can clone it + let resp = blocking::get(url)?; + Ok(resp.error_for_status()?.text()?) +} + +fn download_and_verify_sha(file_url: &str, file_path: &str, sha_url: &str) -> Result<()> { + info!("Downloading checksum from {}", sha_url); + let digest = download_sha(sha_url)?; + + info!("Downloading from {}", file_url); + let resp = blocking::get(file_url)?; + let bin = resp.error_for_status()?.bytes()?; + + info!("Download success"); + + if digest != format!("{:x}", Sha256::digest(&bin)) { + Err(anyhow!("Checksum check failed")) + } else { + info!("Checksum check passed"); + Ok(write(file_path, bin)?) + } +} + +// Check if there is a recent crash +pub fn check_crash() -> Option { + // walk through all crashes + for entry in WalkDir::new("/var/crash") + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.file_name().to_str().is_some()) + { + let file_name = entry.file_name().to_str().unwrap_or_default(); + let is_expired = std::time::SystemTime::now() + .duration_since(entry.metadata().unwrap().modified().unwrap()) + .unwrap() + > std::time::Duration::from_secs(48 * 60 * 60); + if is_expired { + continue; + } + if file_name.starts_with("dmesg") { + let content = read_to_string(entry.path()).unwrap_or_default(); + if content.find("[hids_driver]").is_some() { + return Some(content); + } else { + return None; + } + } + } + None +} + +// Check if the kernel version meets the requirements +pub fn check_kernel_version() -> std::result::Result<(), String> { + let kernel_version = read_to_string("/proc/sys/kernel/osrelease").unwrap_or_default(); + if (*KERNEL_VERSION_RE).is_match(&kernel_version) { + Ok(()) + } else { + Err(kernel_version) + } +} + +// Uninstall the driver +pub fn uninstall_driver() { + let _ = std::process::Command::new("rmmod") + .arg("hids_driver") + .env("PATH", "/sbin:/bin:/usr/bin:/usr/sbin") + .output(); +} + +fn get_modinfo(arg: &str) -> Result { + std::process::Command::new("modinfo") + .arg(arg) + .env("PATH", "/sbin:/bin:/usr/bin:/usr/sbin") + .output() + .map_err(|e| anyhow!("{}", e)) +} + +fn get_info_from_modinfo(arg: &str, prefix: &str) -> Result { + let output = get_modinfo(arg)?; + let lines = std::str::from_utf8(&output.stdout)?.lines(); + for i in lines { + if i.starts_with(prefix) { + let fields: Vec<&str> = i.split_whitespace().collect(); + if fields.len() == 2 { + return Ok(fields[1].trim().to_owned()); + } + } + } + + Err(anyhow!("cannot find {} in modinfo output", prefix)) +} +fn insmodprobe(cmd: &str, arg: &str) -> Result<()> { + let output = std::process::Command::new(cmd) + .arg(arg) + .env("PATH", "/sbin:/bin:/usr/bin:/usr/sbin") + .output()?; + if output.status.success() { + info!("{} hids_driver success", cmd); + Ok(()) + } else { + Err(anyhow!(String::from_utf8(output.stderr).unwrap_or_default())) + } +} + +// Prepare compatible ko +pub fn prepare_ko() -> Result<()> { + // Uninstall LKM in case the user mode process aborted abnormally + uninstall_driver(); + + // Get the previous driver version + let ko_file = format!("./{}-latest.ko", NAME); + let last_version = get_info_from_modinfo(&ko_file, "version:").unwrap_or_default(); + + // If version is the same, insmod and exit the function + if last_version == VERSION { + info!("Last version is the same version"); + return insmodprobe("insmod", &ko_file); + } + + let kernel_version = read_to_string("/proc/sys/kernel/osrelease").unwrap_or_default(); + + // version is different. download the dpkg package and install + for i in KO_URL { + // gen download link + let checksum_url = format!("{}{}-{}-{}.sha256", i, NAME, VERSION, kernel_version); + let ko_url = format!("{}{}-{}-{}.ko", i, NAME, VERSION, kernel_version); + + info!("Downloading ko from {}", ko_url); + + if let Err(e) = download_and_verify_sha(&ko_url, &ko_file, &checksum_url) { + warn!("{}", e); + continue; + } + + info!("Ko write in file succeeded."); + + // Verify ko's vermagic + if let Ok(info) = get_info_from_modinfo(&ko_file, "vermagic:") { + if info.find(&kernel_version).is_none() { + return Err(anyhow!("Ko vermagic verified failed")); + } + } + // Install after verification + return insmodprobe("insmod", &ko_file); + } + Err(anyhow!("Couldn't download ko")) +} diff --git a/agent/driver/template.toml b/agent/driver/template.toml new file mode 100644 index 000000000..fd227c420 --- /dev/null +++ b/agent/driver/template.toml @@ -0,0 +1,162 @@ +[metadata] +version = "1.5.0.0" +maintainers = ["zhanglei.sec@bytedance.com"] + +[config] +ko_url = ["http://example.com/download/ko/"] +pipe_path = "/proc/hids_driver/1" +name = "hids_driver" +socket_path = "/etc/hids/plugin.sock" + +[structures.execve] +# common_fields contains [uid,data_type,exe,pid,ppid,pgid,tgid,comm,nodename,sessionid] from kernel,[username,timestamp,exe_hash,ppid_argv,pgid_argv,argv,pid_tree] from user. +common_fields = true +data_type = 59 +# additional_fields can overwrite the common fields.Key is fields name,value is one of [kernel,placeholder,user]. +[structures.execve.additional_fields] +socket_argv = { index = 0, type = "placeholder" } +argv = { index = 1, type = "kernel" } +run_path = { index = 2, type = "kernel" } +pgid_argv = { index = 3, type = "placeholder" } +stdin = { index = 4, type = "kernel" } +stdout = { index = 5, type = "kernel" } +dip = { index = 6, type = "kernel" } +dport = { index = 7, type = "kernel" } +sip = { index = 8, type = "kernel" } +sport = { index = 9, type = "kernel" } +sa_family = { index = 10, type = "kernel" } +pid_tree = { index = 11, type = "kernel" } +tty = { index = 12, type = "kernel" } +socket_pid = { index = 13, type = "kernel" } +ssh = { index = 14, type = "kernel" } +ld_preload = { index = 15, type = "kernel" } +res = { index = 16, type = "kernel" } + +[structures.connect] +common_fields = true +data_type = 42 + +[structures.connect.additional_fields] +connect_type = { index = 0, type = "kernel" } +sa_family = { index = 1, type = "kernel" } +dip = { index = 2, type = "kernel" } +dport = { index = 3, type = "kernel" } +sip = { index = 4, type = "kernel" } +sport = { index = 5, type = "kernel" } +res = { index = 6, type = "kernel" } + +[structures.ptrace] +common_fields = true +data_type = 101 + +[structures.ptrace.additional_fields] +ptrace_request = { index = 0, type = "kernel" } +target_pid = { index = 1, type = "kernel" } +addr = { index = 2, type = "kernel" } +data = { index = 3, type = "kernel" } +pid_tree = { index = 4, type = "kernel" } + +[structures.bind] +common_fields = true +data_type = 49 + +[structures.bind.additional_fields] +sa_family = { index = 0, type = "kernel" } +sip = { index = 1, type = "kernel" } +sport = { index = 2, type = "kernel" } +res = { index = 3, type = "kernel" } + +[structures.update_cred] +common_fields = true +data_type = 604 + +[structures.update_cred.additional_fields] +pid_tree = { index = 0, type = "kernel" } +old_uid = { index = 1, type = "kernel" } +res = { index = 2, type = "kernel" } +old_username = { index = 3, type = "user" } + +[structures.load_module] +common_fields = true +data_type = 603 + +[structures.load_module.additional_fields] +mod_info = { index = 0, type = "kernel" } +pid_tree = { index = 1, type = "kernel" } +run_path = { index = 2, type = "kernel" } + +[structures.mprotect] +common_fields = true +data_type = 10 + +[structures.mprotect.additional_fields] +mprotect_prot = { index = 0, type = "kernel" } +owner_pid = { index = 1, type = "kernel" } +owner_file = { index = 2, type = "kernel" } +vm_pid = { index = 3, type = "kernel" } +pid_tree = { index = 4, type = "kernel" } + +[structures.create_file] +common_fields = true +data_type = 602 + +[structures.create_file.additional_fields] +file_path = { index = 0, type = "kernel" } + +[structures.dns] +common_fields = true +data_type = 601 + +[structures.dns.additional_fields] +query = { index = 0, type = "kernel" } +sa_family = { index = 1, type = "kernel" } +dip = { index = 2, type = "kernel" } + +dport = { index = 3, type = "kernel" } +sip = { index = 4, type = "kernel" } +sport = { index = 5, type = "kernel" } +qr = { index = 6, type = "kernel" } +opcode = { index = 7, type = "kernel" } +rcode = { index = 8, type = "kernel" } + +[structures.proc_file_hook] +common_fields = false +data_type = 700 + +[structures.proc_file_hook.additional_fields] +data_type = { index = 1, type = "kernel" } +module_name = { index = 2, type = "kernel" } +hidden = { index = 3, type = "kernel" } +timestamp = { index = 4, type = "user" } + +[structures.syscall_hook] +common_fields = false +data_type = 701 + +[structures.syscall_hook.additional_fields] +data_type = { index = 1, type = "kernel" } +module_name = { index = 2, type = "kernel" } +hidden = { index = 3, type = "kernel" } +syscall_number = { index = 4, type = "kernel" } +timestamp = { index = 5, type = "user" } + +[structures.lkm_hidden] +common_fields = false +data_type = 702 + +[structures.lkm_hidden.additional_fields] +data_type = { index = 1, type = "kernel" } +module_name = { index = 2, type = "kernel" } +hidden = { index = 3, type = "kernel" } +timestamp = { index = 4, type = "user" } + +[structures.interrupts_hook] +common_fields = false +data_type = 703 + +[structures.interrupts_hook.additional_fields] +data_type = { index = 1, type = "kernel" } +module_name = { index = 2, type = "kernel" } +hidden = { index = 3, type = "kernel" } +interrupts_number = { index = 4, type = "kernel" } +timestamp = { index = 5, type = "user" } diff --git a/agent/go.mod b/agent/go.mod new file mode 100644 index 000000000..70556a683 --- /dev/null +++ b/agent/go.mod @@ -0,0 +1,17 @@ +module github.com/bytedance/AgentSmith-HIDS/agent + +go 1.15 + +require ( + github.com/Shopify/sarama v1.27.2 + github.com/fsnotify/fsnotify v1.4.9 + github.com/google/uuid v1.1.2 + github.com/jessevdk/go-flags v1.4.0 + github.com/nightlyone/lockfile v1.0.0 + github.com/prometheus/procfs v0.2.0 + github.com/tinylib/msgp v1.1.4 + go.uber.org/atomic v1.7.0 + go.uber.org/config v1.4.0 + go.uber.org/zap v1.16.0 + gopkg.in/natefinch/lumberjack.v2 v2.0.0 +) diff --git a/agent/go.sum b/agent/go.sum new file mode 100644 index 000000000..723838968 --- /dev/null +++ b/agent/go.sum @@ -0,0 +1,157 @@ +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Shopify/sarama v1.27.2 h1:1EyY1dsxNDUQEv0O/4TsjosHI2CgB1uo9H/v56xzTxc= +github.com/Shopify/sarama v1.27.2/go.mod h1:g5s5osgELxgM+Md9Qni9rzo7Rbt+vvFQI4bt/Mc93II= +github.com/Shopify/toxiproxy v2.1.4+incompatible h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/eapache/go-resiliency v1.2.0 h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q= +github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= +github.com/frankban/quicktest v1.10.2 h1:19ARM85nVi4xH7xPXuc5eM/udya5ieh7b/Sv+d844Tk= +github.com/frankban/quicktest v1.10.2/go.mod h1:K+q6oSqb0W0Ininfk863uOk1lMy69l/P6txr3mVT54s= +github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/jcmturner/gofork v1.0.0 h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8= +github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= +github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= +github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.11.0 h1:wJbzvpYMVGG9iTI9VxpnNZfd4DzMPoCWze3GgSqz8yg= +github.com/klauspost/compress v1.11.0/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nightlyone/lockfile v1.0.0 h1:RHep2cFKK4PonZJDdEl4GmkabuhbsRMgk/k3uAmxBiA= +github.com/nightlyone/lockfile v1.0.0/go.mod h1:rywoIealpdNse2r832aiD9jRk8ErCatROs6LzC841CI= +github.com/philhofer/fwd v1.1.0 h1:PAdZw9+/BCf4gc/kA2L/PbGPkFe72Kl2GLZXTG8HpU8= +github.com/philhofer/fwd v1.1.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= +github.com/pierrec/lz4 v2.5.2+incompatible h1:WCjObylUIOlKy/+7Abdn34TLIkXiA4UWUMhxq9m9ZXI= +github.com/pierrec/lz4 v2.5.2+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/procfs v0.2.0 h1:wH4vA7pcjKuZzjF7lM8awk4fnuJO6idemZXoKnULUx4= +github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0 h1:MkV+77GLUNo5oJ0jf870itWm3D0Sjh7+Za9gazKc5LQ= +github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/tinylib/msgp v1.1.4 h1:LoJjc8YHnBUXK7kR6ocUJ0xHuonGLzpzV3RxMZ/4G4M= +github.com/tinylib/msgp v1.1.4/go.mod h1:fw0zyanbVLI0CNimiAzGT53nQhEXzCaKYmTfeon9xHc= +github.com/ttacon/chalk v0.0.0-20160626202418-22c06c80ed31/go.mod h1:onvgF043R+lC5RZ8IT9rBXDaEDnpnw/Cl+HFiw+v/7Q= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/config v1.4.0 h1:upnMPpMm6WlbZtXoasNkK4f0FhxwS+W4Iqz5oNznehQ= +go.uber.org/config v1.4.0/go.mod h1:aCyrMHmUAc/s2h9sv1koP84M9ZF/4K+g2oleyESO/Ig= +go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0 h1:KCa4XfM8CWFCpxXRGok+Q0SS/0XBhMDbHHGABQLvD2A= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee h1:0mgffUl7nfd+FpvXMVz4IDEaUSmT1ysygQC7qYo7sG4= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.16.0 h1:uFRZXykJGK9lLY4HtgSw44DnIcAM+kRBP7x5m+NpAOM= +go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a h1:vclmkQCjlDX5OydZ9wv8rBCcS0QyQY66Mpf/7BZbInM= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974 h1:IX6qOQeG5uLjB/hjjwjedwfjND0hgjPMMyO1RoIXQNI= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191104232314-dc038396d1f0/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9 h1:sEvmEcJVKBNUvgCUClbUQeHOAa9U0I2Ce1BooMvVCY4= +golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b h1:QRR6H1YWRnHb4Y/HeNFCTJLFVxaq6wH4YuVdsUOr75U= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/jcmturner/aescts.v1 v1.0.1 h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw= +gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo= +gopkg.in/jcmturner/dnsutils.v1 v1.0.1 h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM= +gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q= +gopkg.in/jcmturner/goidentity.v3 v3.0.0 h1:1duIyWiTaYvVx3YX2CYtpJbUFd7/UuPYCfgXtQ3VTbI= +gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4= +gopkg.in/jcmturner/gokrb5.v7 v7.5.0 h1:a9tsXlIDD9SKxotJMK3niV7rPZAJeX2aD/0yg3qlIrg= +gopkg.in/jcmturner/gokrb5.v7 v7.5.0/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM= +gopkg.in/jcmturner/rpc.v1 v1.1.0 h1:QHIUxTX1ISuAv9dD2wJ9HWQVuWDX/Zc0PfeC2tjc4rU= +gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8= +gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= +gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5 h1:ymVxjfMaHvXD8RqPRmzHHsB3VvucivSkIAvJFDI5O3c= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= diff --git a/agent/health/health.go b/agent/health/health.go new file mode 100644 index 000000000..2c86a9e9b --- /dev/null +++ b/agent/health/health.go @@ -0,0 +1,174 @@ +package health + +import ( + "encoding/json" + "os" + "runtime" + "runtime/debug" + "strconv" + "time" + + "github.com/bytedance/AgentSmith-HIDS/agent/common" + "github.com/bytedance/AgentSmith-HIDS/agent/plugin" + "github.com/bytedance/AgentSmith-HIDS/agent/spec" + "github.com/bytedance/AgentSmith-HIDS/agent/transport" + "github.com/prometheus/procfs" + "go.uber.org/zap" +) + +func getMemoryMap() map[string]string { + memoryMap := make(map[string]string, 20) + memoryStats := runtime.MemStats{} + runtime.ReadMemStats(&memoryStats) + memoryMap["data_type"] = "1003" + memoryMap["alloc"] = strconv.FormatUint(memoryStats.Alloc, 10) + memoryMap["total_alloc"] = strconv.FormatUint(memoryStats.TotalAlloc, 10) + memoryMap["sys"] = strconv.FormatUint(memoryStats.Sys, 10) + memoryMap["lookups"] = strconv.FormatUint(memoryStats.Lookups, 10) + memoryMap["mallocs"] = strconv.FormatUint(memoryStats.Mallocs, 10) + memoryMap["frees"] = strconv.FormatUint(memoryStats.Frees, 10) + memoryMap["heap_alloc"] = strconv.FormatUint(memoryStats.HeapAlloc, 10) + memoryMap["heap_sys"] = strconv.FormatUint(memoryStats.HeapSys, 10) + memoryMap["heap_idle"] = strconv.FormatUint(memoryStats.HeapIdle, 10) + memoryMap["heap_inuse"] = strconv.FormatUint(memoryStats.HeapInuse, 10) + memoryMap["heap_released"] = strconv.FormatUint(memoryStats.HeapReleased, 10) + memoryMap["heap_objects"] = strconv.FormatUint(memoryStats.HeapObjects, 10) + memoryMap["stack_inuse"] = strconv.FormatUint(memoryStats.StackInuse, 10) + memoryMap["stack_sys"] = strconv.FormatUint(memoryStats.StackSys, 10) + memoryMap["mspan_inuse"] = strconv.FormatUint(memoryStats.MSpanInuse, 10) + memoryMap["mspan_sys"] = strconv.FormatUint(memoryStats.MSpanSys, 10) + memoryMap["buckhash_sys"] = strconv.FormatUint(memoryStats.BuckHashSys, 10) + memoryMap["gc_sys"] = strconv.FormatUint(memoryStats.GCSys, 10) + memoryMap["other_sys"] = strconv.FormatUint(memoryStats.OtherSys, 10) + return memoryMap +} + +type Heart struct { + io uint64 + cpu float64 + sys float64 + lastFree time.Time +} + +func (h *Heart) Beat() { + report := make(map[string]string) + p, err := procfs.Self() + if err != nil { + zap.Error(err) + return + } + stat, err := p.Stat() + if err != nil { + zap.Error(err) + return + } + io, err := p.IO() + if err != nil { + zap.Error(err) + return + } + sys, err := procfs.NewDefaultFS() + if err != nil { + zap.Error(err) + return + } + sysStat, err := sys.Stat() + if err != nil { + zap.Error(err) + return + } + sysMem, err := sys.Meminfo() + if err != nil { + zap.Error(err) + return + } + if stat.RSS*os.Getpagesize() > 100000000 { + if time.Now().Sub(h.lastFree) <= time.Minute*5 { + zap.S().Panic("Force GC frequency too fast") + } + debug.FreeOSMemory() + h.lastFree = time.Now() + if err != nil { + zap.S().Panic(err) + } + } + report["kernel_version"] = common.KernelVersion + report["distro"] = common.Distro + report["memory"] = strconv.Itoa(stat.RSS * os.Getpagesize()) + report["data_type"] = "1000" + report["timestamp"] = strconv.FormatInt(time.Now().Unix(), 10) + if h.sys == 0 { + report["cpu"] = strconv.FormatFloat(0, 'f', 5, 64) + + } else { + report["cpu"] = strconv.FormatFloat(float64(runtime.NumCPU())*(stat.CPUTime()-h.cpu)/(getTotal(sysStat)-h.sys), 'f', 5, 64) + } + report["io"] = strconv.FormatUint(io.ReadBytes+io.WriteBytes-h.io, 10) + report["slab"] = strconv.FormatUint(sysMem.Slab, 10) + plugins := []struct { + RSS int `json:"rss"` + IO uint64 `json:"io"` + CPU float64 `json:"cpu"` + Name string `json:"name"` + Version string `json:"version"` + Pid int `json:"pid"` + QPS float64 `json:"qps"` + }{} + s, err := plugin.GetServer() + if err == nil { + s.ForEach(func(k string, p *plugin.Plugin) { + item := struct { + RSS int `json:"rss"` + IO uint64 `json:"io"` + CPU float64 `json:"cpu"` + Name string `json:"name"` + Version string `json:"version"` + Pid int `json:"pid"` + QPS float64 `json:"qps"` + }{Name: p.Name(), Version: p.Version(), Pid: p.PID()} + proc, err := procfs.NewProc(p.PID()) + if err == nil { + stat, err := proc.Stat() + if err == nil { + item.RSS = stat.RSS * os.Getpagesize() + } + if p.CPU != 0 { + item.CPU = float64(runtime.NumCPU()) * (stat.CPUTime() - p.CPU) / (getTotal(sysStat) - h.sys) + } + io, err := proc.IO() + if err == nil { + item.IO = io.ReadBytes + io.WriteBytes - p.IO + } + item.QPS = float64(p.Counter.Swap(0)) / 30.0 + p.CPU = stat.CPUTime() + p.IO = io.ReadBytes + io.WriteBytes + } + plugins = append(plugins, item) + }) + } + encodedPlugins, err := json.Marshal(plugins) + report["plugins"] = string(encodedPlugins) + zap.S().Infof("%+v", report) + err = transport.Send(&spec.Data{report}) + if err != nil { + zap.S().Error(err) + } + h.sys = getTotal(sysStat) + h.cpu = stat.CPUTime() + h.io = io.ReadBytes + io.WriteBytes +} +func getTotal(sysStat procfs.Stat) float64 { + return sysStat.CPUTotal.Idle + sysStat.CPUTotal.IRQ + sysStat.CPUTotal.Iowait + sysStat.CPUTotal.Nice + sysStat.CPUTotal.SoftIRQ + sysStat.CPUTotal.System + sysStat.CPUTotal.User +} + +func Start() { + ticker := time.NewTicker(time.Second * 30) + h := &Heart{} + h.Beat() + for { + select { + case <-ticker.C: + h.Beat() + } + } +} diff --git a/agent/journal_watcher/.gitignore b/agent/journal_watcher/.gitignore new file mode 100644 index 000000000..bfb5ae532 --- /dev/null +++ b/agent/journal_watcher/.gitignore @@ -0,0 +1,3 @@ +Cargo.lock +target +*log* \ No newline at end of file diff --git a/agent/journal_watcher/Cargo.toml b/agent/journal_watcher/Cargo.toml new file mode 100644 index 000000000..0ecce8b8c --- /dev/null +++ b/agent/journal_watcher/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "journal_watcher" +version = "0.1.0" +authors = ["zhanglei.sec "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +libsystemd-sys = "0.8.0" +anyhow = "1" +plugin_builder = { path = "../support/rust/plugin_builder" } +plugin = { path = "../support/rust/plugin" } +serde = { version = "1.0", features = ["derive"] } +log = "0.4" diff --git a/agent/journal_watcher/Makefile b/agent/journal_watcher/Makefile new file mode 100644 index 000000000..79281e943 --- /dev/null +++ b/agent/journal_watcher/Makefile @@ -0,0 +1,6 @@ +run: + cargo run --release +build: + cargo build --release +clean: + cargo clean \ No newline at end of file diff --git a/agent/journal_watcher/README-zh_CN.md b/agent/journal_watcher/README-zh_CN.md new file mode 100644 index 000000000..461ce7137 --- /dev/null +++ b/agent/journal_watcher/README-zh_CN.md @@ -0,0 +1,34 @@ +[English](README.md) | 简体中文 +## 关于Journal Watcher插件 +Journal Wacher是一个用来监视systemd日志的插件。当前插件会解析并产生ssh相关的事件,这些事件可以用来监视不正常的登陆行为,例如ssh暴力破解或者krb5的不正常登陆等。 + +## 平台兼容性 +与[AgentSmith-HIDS Agent](../README-zh_CN.md#平台兼容性)相同。 + +## 需要的编译环境 +* Rust 1.48.0 + +快速安装 [rust](https://www.rust-lang.org/tools/install) 环境: +``` +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +``` + +## 编译 +执行以下命令: +``` +make build +``` +或者: +``` +cargo build --release +``` +你将会在`target/release/`下面找到`journal_watcher`二进制文件。 + +## 配置 +在[main.rs](./src/main.rs)中,有下面几个常量: +``` +const AGENT_SOCK_PATH:&str = "/etc/hids/plugin.sock"; +const PLUGIN_NAME:&str = "journal_watcher"; +const PLUGIN_VERSION:&str = "1.0.0.0"; +``` +这些常量可以根据需要进行修改,但是要注意:他们需要与[Agent参数](../README-zh_CN.md#参数和选项)以及[Agent的配置文件](../README-zh_CN.md#配置文件)保持一致。 \ No newline at end of file diff --git a/agent/journal_watcher/README.md b/agent/journal_watcher/README.md new file mode 100644 index 000000000..5267a8576 --- /dev/null +++ b/agent/journal_watcher/README.md @@ -0,0 +1,35 @@ +English | [简体中文](README-zh_CN.md) +## About Journal Watcher Plugin +Journal Wacher is a plugin for monitoring systemd logs. +Currently, ssh-related events are mainly generated, which can be used to monitor abnormal login behaviors, such as ssh brute-force attack or krb5 abnormal logins, etc. + +## Supported Platforms +Same as [AgentSmith-HIDS Agent](../README.md#supported-platforms) + +## Compilation Environment Requirements +* Rust 1.48.0 + +Quickly install [rust](https://www.rust-lang.org/tools/install) environment: +``` +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +``` + +## Building +Just run: +``` +make build +``` +or +``` +cargo build --release +``` +You will find the journal_watcher binary file under `target/release/`. + +## Config +In `main.rs`, there are the following constants: +``` +const AGENT_SOCK_PATH:&str = "/etc/hids/plugin.sock"; +const PLUGIN_NAME:&str = "journal_watcher"; +const PLUGIN_VERSION:&str = "1.0.0.0"; +``` +They can be configured as required, but note: it needs to be consistent with the [agent's parameters ](../README.md#parameters-and-options) and [agent's config.yaml](../README.md#config-file). \ No newline at end of file diff --git a/agent/journal_watcher/src/main.rs b/agent/journal_watcher/src/main.rs new file mode 100644 index 000000000..43476925b --- /dev/null +++ b/agent/journal_watcher/src/main.rs @@ -0,0 +1,72 @@ +use log::{error, info}; +use std::sync::{atomic, Arc}; +use std::thread; +use std::time::Duration; + +mod watcher; + +const SENDER_SLEEP_INTERVAL: Duration = Duration::from_millis(126); +const AGENT_SOCK_PATH:&str = "/etc/hids/plugin.sock"; +const PLUGIN_NAME:&str = "journal_watcher"; +const PLUGIN_VERSION:&str = "1.0.0.0"; + +fn main() { + let (sender, receiver) = plugin_builder::Builder::new( + AGENT_SOCK_PATH, + PLUGIN_NAME, + PLUGIN_VERSION, + ) + .unwrap() + .build(); + + let exit = Arc::new(atomic::AtomicBool::new(false)); + let sender_exit = exit.clone(); + let receiver_exit = sender_exit.clone(); + + let sender_handler = thread::spawn(move || { + let mut watcher = match watcher::JournalWatcher::new(sender) { + Ok(w) => w, + Err(e) => { + error!("{}", e); + thread::sleep(SENDER_SLEEP_INTERVAL); + return; + } + }; + loop { + if sender_exit.load(atomic::Ordering::Relaxed) { + error!("Sender detect exit"); + return; + } + if let Err(e) = watcher.parse() { + error!("{}", e); + thread::sleep(SENDER_SLEEP_INTERVAL); + break; + } + } + sender_exit.store(true, atomic::Ordering::Relaxed); + }); + let receiver_handler = thread::spawn(move || { + loop { + if receiver_exit.load(atomic::Ordering::Relaxed) { + error!("Sender detect exit"); + return; + } + match receiver.receive() { + Ok(t) => info!("{:?}", t), + Err(e) => { + error!("{}", e); + break; + } + } + } + receiver_exit.store(true, atomic::Ordering::Relaxed); + }); + if sender_handler.join().is_err() { + error!("Sender panic"); + exit.store(true, atomic::Ordering::Relaxed); + } + if receiver_handler.join().is_err() { + error!("Receiver panic"); + exit.store(true, atomic::Ordering::Relaxed); + } +} diff --git a/agent/journal_watcher/src/watcher.rs b/agent/journal_watcher/src/watcher.rs new file mode 100644 index 000000000..331491f3d --- /dev/null +++ b/agent/journal_watcher/src/watcher.rs @@ -0,0 +1,191 @@ +use anyhow::{anyhow, Result}; +use libsystemd_sys::journal::{ + sd_journal, sd_journal_add_match, sd_journal_close, sd_journal_get_data, sd_journal_next, + sd_journal_open, sd_journal_wait, SD_JOURNAL_SYSTEM, +}; +use log::error; +use serde::Serialize; +use std::cmp::Ordering; +use std::ffi::CString; +use std::io::Error; +use std::mem::MaybeUninit; + +#[derive(Serialize, Debug)] +struct SshEvent { + data_type: String, + timestamp: String, + status: String, + types: String, + invalid: String, + user: String, + sip: String, + sport: String, + rawlog: String, +} + +#[derive(Serialize, Debug)] +struct Krb5Event { + data_type: String, + timestamp: String, + authorized: String, + principal: String, + rawlog: String, +} + +pub struct JournalWatcher { + journal_context: *mut sd_journal, + sender: plugin::Sender, +} + +impl Drop for JournalWatcher { + fn drop(&mut self) { + if !self.journal_context.is_null() { + let _ = unsafe { sd_journal_close(self.journal_context) }; + self.sender.close(); + } + } +} + +impl JournalWatcher { + pub fn new(sender: plugin::Sender) -> Result { + let mut p = MaybeUninit::uninit(); + let r = unsafe { sd_journal_open(p.as_mut_ptr(), SD_JOURNAL_SYSTEM) }; + if r != 0 { + return Err(anyhow!(Error::last_os_error())); + } + let journal_context = unsafe { p.assume_init() }; + let inf_cstr = CString::new("_SYSTEMD_UNIT=ssh.service")?; + let r = unsafe { sd_journal_add_match(journal_context, inf_cstr.as_ptr() as _, 0) }; + if r != 0 { + Err(anyhow!(Error::last_os_error())) + } else { + Ok(Self { + journal_context, + sender, + }) + } + } + + pub fn parse(&mut self) -> Result<()> { + + let r = unsafe { sd_journal_next(self.journal_context) }; + + match r.cmp(&0) { + Ordering::Less => Err(anyhow!(Error::last_os_error())), // 没有 + Ordering::Equal => { + + let r = unsafe { sd_journal_wait(self.journal_context, 1_000_000) }; + if r < 0 { + Err(anyhow!(Error::last_os_error())) + + } else { + Ok(()) + } + } + Ordering::Greater => { + + let message = self.get_field("MESSAGE")?; + let mut timestamp = self.get_field("_SOURCE_REALTIME_TIMESTAMP")?; + timestamp.truncate(10); + let fields: Vec<&str> = message.split_whitespace().collect(); + if fields.len() < 4 { + error!("Unexpected len. Raw message: {}", message); + return Ok(()); + } + match fields[0] { + "Authorized" => { + if fields.len() < 6 { + error!("Unexpected len. Raw message: {}", message); + return Ok(()); + } + self.sender.send(&Krb5Event { + timestamp, + data_type: "4001".to_owned(), + authorized: fields[2].replace(",", ""), + principal: fields[5].to_string(), + rawlog: message, + }) + } + "Accepted" => { + if fields.len() < 8 { + error!("Parse len is unexpected,raw message: {}", message); + return Ok(()); + } + self.sender.send(&SshEvent { + timestamp, + data_type: "4000".to_owned(), + status: "true".to_string(), + types: fields[1].to_string(), + invalid: "false".to_string(), + user: fields[3].to_string(), + sip: fields[5].to_string(), + sport: fields[7].to_string(), + rawlog: message, + }) + } + "Failed" => match fields[3] { + "invalid" => { + if fields.len() < 10 { + error!("Parse len is unexpected,raw message: {}", message); + return Ok(()); + } + self.sender.send(&SshEvent { + timestamp, + data_type: "4000".to_owned(), + status: "false".to_owned(), + types: fields[1].to_string(), + invalid: "true".to_owned(), + user: fields[5].to_string(), + sip: fields[7].to_string(), + sport: fields[9].to_string(), + rawlog: message, + }) + } + _ => { + if fields.len() < 8 { + error!("Unexpected len. Raw message: {}", message); + return Ok(()); + } + self.sender.send(&SshEvent { + timestamp, + data_type: "4000".to_owned(), + status: "false".to_owned(), + types: fields[1].to_string(), + invalid: "false".to_owned(), + user: fields[3].to_string(), + sip: fields[5].to_string(), + sport: fields[7].to_string(), + rawlog: message, + }) + } + }, + _ => Ok(()), + } + } + } + } + + fn get_field(&self, field: &str) -> Result { + let mut d = MaybeUninit::uninit(); + let mut l = MaybeUninit::uninit(); + let field_c = CString::new(field)?; + let r = unsafe { + sd_journal_get_data( + self.journal_context, + field_c.as_ptr(), + d.as_mut_ptr(), + l.as_mut_ptr(), + ) + }; + if r != 0 { + return Err(anyhow!(Error::last_os_error())); + } + let s = unsafe { std::slice::from_raw_parts(d.assume_init(), l.assume_init()) }; + Ok(std::str::from_utf8(s)? + .to_string() + .splitn(2, '=') + .last() + .unwrap_or_default() + .to_owned()) + } +} diff --git a/agent/log/log.go b/agent/log/log.go new file mode 100644 index 000000000..e6e53c993 --- /dev/null +++ b/agent/log/log.go @@ -0,0 +1,36 @@ +package log + +import ( + "encoding/json" + "fmt" + + "github.com/bytedance/AgentSmith-HIDS/agent/spec" + "github.com/bytedance/AgentSmith-HIDS/agent/transport" +) + +type ErrorLog struct { + Level string `json:"level"` + Timestamp string `json:"timestamp"` + Source string `json:"source"` + Msg string `json:"msg"` +} +type LoggerWriter struct{} + +func (*LoggerWriter) Write(p []byte) (n int, err error) { + l := ErrorLog{} + e := json.Unmarshal(p, &l) + if err != nil { + return 0, e + } + m := make(map[string]string) + m["level"] = l.Level + m["timestamp"] = l.Timestamp + m["source"] = l.Source + m["msg"] = l.Msg + m["data_type"] = "1001" + err = transport.Send(&spec.Data{m}) + if err != nil { + fmt.Println(err) + } + return len(p), nil +} diff --git a/agent/main.go b/agent/main.go new file mode 100644 index 000000000..9ff4f22e2 --- /dev/null +++ b/agent/main.go @@ -0,0 +1,132 @@ +package main + +import ( + "fmt" + "os" + "os/signal" + "runtime" + "strconv" + "syscall" + "time" + + "github.com/Shopify/sarama" + "github.com/bytedance/AgentSmith-HIDS/agent/common" + "github.com/bytedance/AgentSmith-HIDS/agent/config" + "github.com/bytedance/AgentSmith-HIDS/agent/health" + "github.com/bytedance/AgentSmith-HIDS/agent/log" + "github.com/bytedance/AgentSmith-HIDS/agent/plugin" + "github.com/bytedance/AgentSmith-HIDS/agent/transport" + "github.com/bytedance/AgentSmith-HIDS/agent/transport/fileout" + "github.com/bytedance/AgentSmith-HIDS/agent/transport/kafka" + "github.com/jessevdk/go-flags" + + "github.com/nightlyone/lockfile" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" + "gopkg.in/natefinch/lumberjack.v2" +) + +var opts struct { + Version bool `short:"v" long:"version" description:"Print agent version"` + Plugin string `long:"plugin" description:"Plugin socket path" default:"plugin.sock"` + Log string `long:"log" description:"Log file path" default:"log/agent_smith.log"` + Config string `long:"config" description:"Config file path(.yaml)" default:"config.yaml"` + Data string `long:"data" choice:"file" choice:"stdout" choice:"kafka" description:"Set data output" default:"stdout"` + FilePath string `long:"file_path" description:"If data option is file ,this option is used to set the file path" default:"data.log"` + Addr string `long:"addr" description:"If data option is kafka ,this option is used to set kafka addr"` + Topic string `long:"topic" description:"If data option is kafka ,this option is used to set kafka topic name"` +} + +func init() { + if _, err := flags.ParseArgs(&opts, os.Args); err != nil { + switch flagsErr := err.(type) { + case *flags.Error: + if flagsErr.Type == flags.ErrHelp { + os.Exit(0) + } + } + os.Exit(1) + } + if opts.Version { + fmt.Println("Agent version :", common.Version) + os.Exit(0) + } + if runtime.NumCPU() >= 8 { + runtime.GOMAXPROCS(8) + } +} +func main() { + plugin.SocketPath = opts.Plugin + config.ConfigPath = opts.Config + switch opts.Data { + case "stdout": + case "file": + fo, err := fileout.NewFileOut(opts.FilePath) + defer fo.Close() + if err != nil { + zap.S().Panic(err) + } else { + transport.SetTransport(fo) + } + case "kafka": + cfg := sarama.NewConfig() + cfg.Producer.Return.Successes = true + client, err := sarama.NewClient([]string{opts.Addr}, cfg) + if err != nil { + zap.S().Panic(err) + } + k, err := kafka.NewKafka(client, opts.Topic) + if err != nil { + zap.S().Panic(err) + } + transport.SetTransport(k) + } + lock, err := lockfile.New("/var/run/agent_smith.pid") + if err != nil { + zap.S().Panicf("%v", err) + } + if err = lock.TryLock(); err != nil { + zap.S().Panicf("%v", err) + } + defer lock.Unlock() + + logConfig := zap.NewProductionEncoderConfig() + logConfig.CallerKey = "source" + logConfig.TimeKey = "timestamp" + logConfig.EncodeTime = func(t time.Time, z zapcore.PrimitiveArrayEncoder) { + z.AppendString(strconv.FormatInt(t.Unix(), 10)) + } + remoteEncoder := zapcore.NewJSONEncoder(logConfig) + remoteWriter := zapcore.AddSync(&log.LoggerWriter{}) + fileEncoder := zapcore.NewConsoleEncoder(zap.NewDevelopmentEncoderConfig()) + fileWriter := zapcore.AddSync(&lumberjack.Logger{ + Filename: opts.Log, + MaxSize: 1, // megabytes + MaxBackups: 10, + MaxAge: 10, //days + Compress: true, // disabled by default + }) + core := zapcore.NewTee(zapcore.NewCore(remoteEncoder, remoteWriter, zap.ErrorLevel), zapcore.NewCore(fileEncoder, fileWriter, zap.InfoLevel)) + logger := zap.New(core, zap.AddCaller()) + defer logger.Sync() + undo := zap.ReplaceGlobals(logger) + defer undo() + + zap.S().Infof("Agent Smith Version:v%s", common.Version) + zap.S().Infof("Agent ID:%s", common.AgentID) + zap.S().Infof("Private IPv4:%v", common.PrivateIPv4) + zap.S().Infof("Public IPv4:%v", common.PublicIPv4) + zap.S().Infof("Private IPv6:%v", common.PrivateIPv6) + zap.S().Infof("Public IPv6:%v", common.PublicIPv6) + zap.S().Infof("Hostname:%v", common.Hostname) + + go health.Start() + go config.Watcher() + go plugin.Run() + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGTERM, syscall.SIGINT) + select { + case sig := <-sigs: + zap.S().Infof("Receive signal %v", sig.String()) + } +} diff --git a/agent/plugin/plugin.go b/agent/plugin/plugin.go new file mode 100644 index 000000000..8690c3605 --- /dev/null +++ b/agent/plugin/plugin.go @@ -0,0 +1,182 @@ +package plugin + +import ( + "errors" + "net" + "os" + "os/exec" + "path" + "path/filepath" + "strconv" + "syscall" + "time" + + "github.com/bytedance/AgentSmith-HIDS/agent/plugin/procotol" + "github.com/bytedance/AgentSmith-HIDS/agent/spec" + "github.com/prometheus/procfs" + "github.com/tinylib/msgp/msgp" + "go.uber.org/atomic" + "go.uber.org/zap" +) + +// The time to wait before forcing the plug-in to kill, +// this is to leave the necessary time for the plugin to the clean environment normally +const exitTimeout = 1 * time.Second + +// Plugin contains the process, socket, metadata and other information of a plugin +type Plugin struct { + name string + version string + checksum string + cmd *exec.Cmd + conn net.Conn + runtimePID int + pgid int + IO uint64 + CPU float64 + reader *msgp.Reader + Counter atomic.Uint64 +} + +// Name func returns the name of the plugin +func (p *Plugin) Name() string { + return p.name +} + +// Version func returns the version of the plugin +func (p *Plugin) Version() string { + return p.version +} + +// Checksum func returns the checksum of the plugin +func (p *Plugin) Checksum() string { + return p.checksum +} + +// PID func returns the real run pid of the plugin +func (p *Plugin) PID() int { + return p.runtimePID +} + +// Close func is used to close this plugin, +// when closing it will kill all processes under the same process group +func (p *Plugin) Close(timeout bool) { + if p.conn != nil { + p.conn.Close() + } + if timeout { + time.Sleep(exitTimeout) + } + if p.pgid != 0 { + syscall.Kill(-p.pgid, syscall.SIGKILL) + } + if p.cmd != nil && p.cmd.Process != nil { + p.cmd.Process.Kill() + } +} + +// Receive func is used to read data from the socket connection of plugin +func (p *Plugin) Receive() (*spec.Data, error) { + data := &spec.Data{} + err := data.DecodeMsg(p.reader) + p.Counter.Add(uint64(len(*data))) + return data, err +} + +// Send func is used to send tasks to this plugin +func (p *Plugin) Send(t spec.Task) error { + w := msgp.NewWriter(p.conn) + err := t.EncodeMsg(w) + if err != nil { + return err + } + err = w.Flush() + return err +} + +func (p *Plugin) Run() error { + if p.cmd == nil { + return errors.New("Plugin cmd is nil") + } + err := p.cmd.Start() + if err != nil { + return err + } + go p.cmd.Wait() + if p.cmd.Process == nil { + return errors.New("Plugin cmd process is nil") + } + pgid, err := syscall.Getpgid(p.cmd.Process.Pid) + if err != nil { + return err + } + p.pgid = pgid + return nil +} + +func (p *Plugin) Connected() bool { + return p.conn != nil +} + +// Connect func is used to verify the connection request, +// if the pgid is inconsistent, an error will be returned +// Note that it is necessary to call Server's Delete func to clean up after this func returns error +func (p *Plugin) Connect(req procotol.RegistRequest, conn net.Conn) error { + if p.conn != nil { + return errors.New("The same plugin has been connected, it may be a malicious attack") + } + reqPgid, err := syscall.Getpgid(int(req.Pid)) + if err != nil { + return errors.New("Cann't get req process which pid is " + strconv.FormatUint(uint64(req.Pid), 10)) + } + cmdPgid, err := syscall.Getpgid(p.cmd.Process.Pid) + if err != nil { + return errors.New("Cann't get cmd process which pid is " + strconv.FormatUint(uint64(p.cmd.Process.Pid), 10)) + } + if reqPgid != cmdPgid { + return errors.New("Pgid does not match") + } + p.runtimePID = int(req.Pid) + proc, err := procfs.NewProc(p.runtimePID) + if err == nil { + procIO, err := proc.IO() + if err == nil { + p.IO = procIO.ReadBytes + procIO.WriteBytes + } + procStat, err := proc.Stat() + if err == nil { + p.CPU = procStat.CPUTime() + } + } + p.conn = conn + p.version = req.Version + p.name = req.Name + p.reader = msgp.NewReaderSize(conn, 8*1024) + + return nil +} + +// NewPlugin func creates a new plugin instance +func NewPlugin(name, version, checksum, runPath string) (*Plugin, error) { + var err error + runPath, err = filepath.Abs(runPath) + if err != nil { + return nil, err + } + dir, file := path.Split(runPath) + zap.S().Infof("Plugin work directory: %s", dir) + c := exec.Command(runPath) + c.Dir = dir + c.Stderr, err = os.OpenFile(dir+file+".stderr", os.O_RDWR|os.O_CREATE, 0700) + if err != nil { + return nil, err + } + c.Stdin = nil + c.Stdout, err = os.OpenFile(dir+file+".stdout", os.O_RDWR|os.O_CREATE, 0700) + if err != nil { + return nil, err + } + c.SysProcAttr = &syscall.SysProcAttr{Setpgid: true, Pgid: 0} + p := Plugin{cmd: c, name: name, version: version, checksum: checksum} + return &p, nil +} diff --git a/agent/plugin/procotol/regist.go b/agent/plugin/procotol/regist.go new file mode 100644 index 000000000..9f14bbdab --- /dev/null +++ b/agent/plugin/procotol/regist.go @@ -0,0 +1,8 @@ +package procotol + +//go:generate msgp +type RegistRequest struct { + Name string `msg:"name"` + Version string `msg:"version"` + Pid int `msg:"pid"` +} diff --git a/agent/plugin/procotol/regist_gen.go b/agent/plugin/procotol/regist_gen.go new file mode 100644 index 000000000..5cb25c177 --- /dev/null +++ b/agent/plugin/procotol/regist_gen.go @@ -0,0 +1,160 @@ +package procotol + +// Code generated by github.com/tinylib/msgp DO NOT EDIT. + +import ( + "github.com/tinylib/msgp/msgp" +) + +// DecodeMsg implements msgp.Decodable +func (z *RegistRequest) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zb0001 uint32 + zb0001, err = dc.ReadMapHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0001 > 0 { + zb0001-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + err = msgp.WrapError(err) + return + } + switch msgp.UnsafeString(field) { + case "name": + z.Name, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Name") + return + } + case "version": + z.Version, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Version") + return + } + case "pid": + z.Pid, err = dc.ReadInt() + if err != nil { + err = msgp.WrapError(err, "Pid") + return + } + default: + err = dc.Skip() + if err != nil { + err = msgp.WrapError(err) + return + } + } + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z RegistRequest) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 3 + // write "name" + err = en.Append(0x83, 0xa4, 0x6e, 0x61, 0x6d, 0x65) + if err != nil { + return + } + err = en.WriteString(z.Name) + if err != nil { + err = msgp.WrapError(err, "Name") + return + } + // write "version" + err = en.Append(0xa7, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e) + if err != nil { + return + } + err = en.WriteString(z.Version) + if err != nil { + err = msgp.WrapError(err, "Version") + return + } + // write "pid" + err = en.Append(0xa3, 0x70, 0x69, 0x64) + if err != nil { + return + } + err = en.WriteInt(z.Pid) + if err != nil { + err = msgp.WrapError(err, "Pid") + return + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z RegistRequest) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 3 + // string "name" + o = append(o, 0x83, 0xa4, 0x6e, 0x61, 0x6d, 0x65) + o = msgp.AppendString(o, z.Name) + // string "version" + o = append(o, 0xa7, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e) + o = msgp.AppendString(o, z.Version) + // string "pid" + o = append(o, 0xa3, 0x70, 0x69, 0x64) + o = msgp.AppendInt(o, z.Pid) + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *RegistRequest) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zb0001 uint32 + zb0001, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0001 > 0 { + zb0001-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + switch msgp.UnsafeString(field) { + case "name": + z.Name, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Name") + return + } + case "version": + z.Version, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Version") + return + } + case "pid": + z.Pid, bts, err = msgp.ReadIntBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Pid") + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + } + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z RegistRequest) Msgsize() (s int) { + s = 1 + 5 + msgp.StringPrefixSize + len(z.Name) + 8 + msgp.StringPrefixSize + len(z.Version) + 4 + msgp.IntSize + return +} diff --git a/agent/plugin/procotol/regist_gen_test.go b/agent/plugin/procotol/regist_gen_test.go new file mode 100644 index 000000000..b55dd4a12 --- /dev/null +++ b/agent/plugin/procotol/regist_gen_test.go @@ -0,0 +1,123 @@ +package procotol + +// Code generated by github.com/tinylib/msgp DO NOT EDIT. + +import ( + "bytes" + "testing" + + "github.com/tinylib/msgp/msgp" +) + +func TestMarshalUnmarshalRegistRequest(t *testing.T) { + v := RegistRequest{} + bts, err := v.MarshalMsg(nil) + if err != nil { + t.Fatal(err) + } + left, err := v.UnmarshalMsg(bts) + if err != nil { + t.Fatal(err) + } + if len(left) > 0 { + t.Errorf("%d bytes left over after UnmarshalMsg(): %q", len(left), left) + } + + left, err = msgp.Skip(bts) + if err != nil { + t.Fatal(err) + } + if len(left) > 0 { + t.Errorf("%d bytes left over after Skip(): %q", len(left), left) + } +} + +func BenchmarkMarshalMsgRegistRequest(b *testing.B) { + v := RegistRequest{} + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + v.MarshalMsg(nil) + } +} + +func BenchmarkAppendMsgRegistRequest(b *testing.B) { + v := RegistRequest{} + bts := make([]byte, 0, v.Msgsize()) + bts, _ = v.MarshalMsg(bts[0:0]) + b.SetBytes(int64(len(bts))) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + bts, _ = v.MarshalMsg(bts[0:0]) + } +} + +func BenchmarkUnmarshalRegistRequest(b *testing.B) { + v := RegistRequest{} + bts, _ := v.MarshalMsg(nil) + b.ReportAllocs() + b.SetBytes(int64(len(bts))) + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := v.UnmarshalMsg(bts) + if err != nil { + b.Fatal(err) + } + } +} + +func TestEncodeDecodeRegistRequest(t *testing.T) { + v := RegistRequest{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + + m := v.Msgsize() + if buf.Len() > m { + t.Log("WARNING: TestEncodeDecodeRegistRequest Msgsize() is inaccurate") + } + + vn := RegistRequest{} + err := msgp.Decode(&buf, &vn) + if err != nil { + t.Error(err) + } + + buf.Reset() + msgp.Encode(&buf, &v) + err = msgp.NewReader(&buf).Skip() + if err != nil { + t.Error(err) + } +} + +func BenchmarkEncodeRegistRequest(b *testing.B) { + v := RegistRequest{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + b.SetBytes(int64(buf.Len())) + en := msgp.NewWriter(msgp.Nowhere) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + v.EncodeMsg(en) + } + en.Flush() +} + +func BenchmarkDecodeRegistRequest(b *testing.B) { + v := RegistRequest{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + b.SetBytes(int64(buf.Len())) + rd := msgp.NewEndlessReader(buf.Bytes(), b) + dc := msgp.NewReader(rd) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + err := v.DecodeMsg(dc) + if err != nil { + b.Fatal(err) + } + } +} diff --git a/agent/plugin/server.go b/agent/plugin/server.go new file mode 100644 index 000000000..eb6a8863b --- /dev/null +++ b/agent/plugin/server.go @@ -0,0 +1,162 @@ +package plugin + +import ( + "io" + "net" + "os" + "sync" + "time" + + "github.com/bytedance/AgentSmith-HIDS/agent/plugin/procotol" + "github.com/bytedance/AgentSmith-HIDS/agent/transport" + + "github.com/tinylib/msgp/msgp" + "go.uber.org/zap" +) + +var SocketPath = "plugin.sock" + +// Server is the unix doamin socket listener of the plugin, and it maintains a plugin map +type Server struct { + m map[string]*Plugin + mu sync.Mutex + l net.Listener +} + +// ForEach is used to traverse all plugin instances with specified operations, +// for efficiency reasons, do not perform longer operations +func (s *Server) ForEach(f func(string, *Plugin)) { + s.mu.Lock() + for k, p := range s.m { + f(k, p) + } + s.mu.Unlock() +} + +// PluginList func traverses the server map and returns all plugin names +func (s *Server) PluginList() []string { + s.mu.Lock() + l := []string{} + for k := range s.m { + l = append(l, k) + } + s.mu.Unlock() + return l +} + +// Insert a new plugin, note: if there is a plugin with the same name before, please close it first +func (s *Server) Insert(k string, p *Plugin) { + s.mu.Lock() + s.m[k] = p + s.mu.Unlock() +} + +// Get func gets the plugin instance with the corresponding name from the Server +func (s *Server) Get(k string) (*Plugin, bool) { + s.mu.Lock() + p, ok := s.m[k] + s.mu.Unlock() + return p, ok +} + +// Delete func deletes a plugin instance from the server, +// the Close() method of the plugin will be called before deleting from the map +func (s *Server) Delete(k string) { + p, ok := s.Get(k) + if ok { + p.Close(true) + s.mu.Lock() + delete(s.m, k) + s.mu.Unlock() + } +} + +// Close func closes the unix domain socket listener in the server and deletes all plugin instances +func (s *Server) Close() { + s.l.Close() + time.Sleep(exitTimeout) + s.mu.Lock() + for _, v := range s.m { + v.Close(false) + } + s.m = make(map[string]*Plugin, 10) + s.mu.Unlock() +} + +// Globally unique server instance +var instance *Server + +// GetServer func is used to obtain the server instance, please note: this function is not concurrently safe +func GetServer() (*Server, error) { + if instance == nil { + err := os.RemoveAll("plugin.sock") + if err != nil { + return nil, err + } + l, err := net.Listen("unix", "plugin.sock") + if err != nil { + return nil, err + } + instance = &Server{l: l, m: make(map[string]*Plugin, 10)} + } + return instance, nil +} + +// Run is used for plugin server. +func Run() { + defer func() { + if err := recover(); err != nil { + time.Sleep(time.Second) + panic(err) + } + }() + s, err := GetServer() + if err != nil { + zap.S().Panic(err) + } + for { + conn, err := s.l.Accept() + if err != nil { + zap.S().Panicf("Accept connect error: %v", err) + } + go func() { + r := msgp.NewReader(conn) + req := procotol.RegistRequest{} + err := (&req).DecodeMsg(r) + if err != nil { + zap.S().Error(err) + conn.Close() + return + } + zap.S().Infof("Received a registration:%+v", req) + p, ok := s.Get(req.Name) + if !ok { + zap.S().Errorf("Plugin %v isn't in map", req.Name) + conn.Close() + return + } + err = p.Connect(req, conn) + if err != nil { + zap.S().Error(err) + if err.Error() != "The same plugin has been connected, it may be a malicious attack" { + s.Delete(req.Name) + } + return + } + zap.S().Infof("Plugin has been successfully connected:%+v", p) + go func() { + for { + data, err := p.Receive() + if err != nil { + if err != io.EOF { + zap.S().Error(err) + } + s.Delete(req.Name) + return + } + transport.Send(data) + } + }() + }() + } +} diff --git a/agent/rust-toolchain b/agent/rust-toolchain new file mode 100644 index 000000000..9db5ea12f --- /dev/null +++ b/agent/rust-toolchain @@ -0,0 +1 @@ +1.48.0 diff --git a/agent/spec/spec.go b/agent/spec/spec.go new file mode 100644 index 000000000..7102505d8 --- /dev/null +++ b/agent/spec/spec.go @@ -0,0 +1,10 @@ +package spec + +type Data []map[string]string + +//go:generate msgp +type Task struct { + ID uint32 `msg:"id" json:"id"` + Content string `msg:"content" json:"content"` + Token string `msg:"token" json:"token"` +} diff --git a/agent/spec/spec_gen.go b/agent/spec/spec_gen.go new file mode 100644 index 000000000..c9fa5acb6 --- /dev/null +++ b/agent/spec/spec_gen.go @@ -0,0 +1,313 @@ +package spec + +// Code generated by github.com/tinylib/msgp DO NOT EDIT. + +import ( + "github.com/tinylib/msgp/msgp" +) + +// DecodeMsg implements msgp.Decodable +func (z *Data) DecodeMsg(dc *msgp.Reader) (err error) { + var zb0004 uint32 + zb0004, err = dc.ReadArrayHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + if cap((*z)) >= int(zb0004) { + (*z) = (*z)[:zb0004] + } else { + (*z) = make(Data, zb0004) + } + for zb0001 := range *z { + var zb0005 uint32 + zb0005, err = dc.ReadMapHeader() + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + if (*z)[zb0001] == nil { + (*z)[zb0001] = make(map[string]string, zb0005) + } else if len((*z)[zb0001]) > 0 { + for key := range (*z)[zb0001] { + delete((*z)[zb0001], key) + } + } + for zb0005 > 0 { + zb0005-- + var zb0002 string + var zb0003 string + zb0002, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + zb0003, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, zb0001, zb0002) + return + } + (*z)[zb0001][zb0002] = zb0003 + } + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z Data) EncodeMsg(en *msgp.Writer) (err error) { + err = en.WriteArrayHeader(uint32(len(z))) + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0006 := range z { + err = en.WriteMapHeader(uint32(len(z[zb0006]))) + if err != nil { + err = msgp.WrapError(err, zb0006) + return + } + for zb0007, zb0008 := range z[zb0006] { + err = en.WriteString(zb0007) + if err != nil { + err = msgp.WrapError(err, zb0006) + return + } + err = en.WriteString(zb0008) + if err != nil { + err = msgp.WrapError(err, zb0006, zb0007) + return + } + } + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z Data) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + o = msgp.AppendArrayHeader(o, uint32(len(z))) + for zb0006 := range z { + o = msgp.AppendMapHeader(o, uint32(len(z[zb0006]))) + for zb0007, zb0008 := range z[zb0006] { + o = msgp.AppendString(o, zb0007) + o = msgp.AppendString(o, zb0008) + } + } + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *Data) UnmarshalMsg(bts []byte) (o []byte, err error) { + var zb0004 uint32 + zb0004, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + if cap((*z)) >= int(zb0004) { + (*z) = (*z)[:zb0004] + } else { + (*z) = make(Data, zb0004) + } + for zb0001 := range *z { + var zb0005 uint32 + zb0005, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + if (*z)[zb0001] == nil { + (*z)[zb0001] = make(map[string]string, zb0005) + } else if len((*z)[zb0001]) > 0 { + for key := range (*z)[zb0001] { + delete((*z)[zb0001], key) + } + } + for zb0005 > 0 { + var zb0002 string + var zb0003 string + zb0005-- + zb0002, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, zb0001) + return + } + zb0003, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, zb0001, zb0002) + return + } + (*z)[zb0001][zb0002] = zb0003 + } + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z Data) Msgsize() (s int) { + s = msgp.ArrayHeaderSize + for zb0006 := range z { + s += msgp.MapHeaderSize + if z[zb0006] != nil { + for zb0007, zb0008 := range z[zb0006] { + _ = zb0008 + s += msgp.StringPrefixSize + len(zb0007) + msgp.StringPrefixSize + len(zb0008) + } + } + } + return +} + +// DecodeMsg implements msgp.Decodable +func (z *Task) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zb0001 uint32 + zb0001, err = dc.ReadMapHeader() + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0001 > 0 { + zb0001-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + err = msgp.WrapError(err) + return + } + switch msgp.UnsafeString(field) { + case "id": + z.ID, err = dc.ReadUint32() + if err != nil { + err = msgp.WrapError(err, "ID") + return + } + case "content": + z.Content, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Content") + return + } + case "token": + z.Token, err = dc.ReadString() + if err != nil { + err = msgp.WrapError(err, "Token") + return + } + default: + err = dc.Skip() + if err != nil { + err = msgp.WrapError(err) + return + } + } + } + return +} + +// EncodeMsg implements msgp.Encodable +func (z Task) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 3 + // write "id" + err = en.Append(0x83, 0xa2, 0x69, 0x64) + if err != nil { + return + } + err = en.WriteUint32(z.ID) + if err != nil { + err = msgp.WrapError(err, "ID") + return + } + // write "content" + err = en.Append(0xa7, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74) + if err != nil { + return + } + err = en.WriteString(z.Content) + if err != nil { + err = msgp.WrapError(err, "Content") + return + } + // write "token" + err = en.Append(0xa5, 0x74, 0x6f, 0x6b, 0x65, 0x6e) + if err != nil { + return + } + err = en.WriteString(z.Token) + if err != nil { + err = msgp.WrapError(err, "Token") + return + } + return +} + +// MarshalMsg implements msgp.Marshaler +func (z Task) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 3 + // string "id" + o = append(o, 0x83, 0xa2, 0x69, 0x64) + o = msgp.AppendUint32(o, z.ID) + // string "content" + o = append(o, 0xa7, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74) + o = msgp.AppendString(o, z.Content) + // string "token" + o = append(o, 0xa5, 0x74, 0x6f, 0x6b, 0x65, 0x6e) + o = msgp.AppendString(o, z.Token) + return +} + +// UnmarshalMsg implements msgp.Unmarshaler +func (z *Task) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zb0001 uint32 + zb0001, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + for zb0001 > 0 { + zb0001-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + switch msgp.UnsafeString(field) { + case "id": + z.ID, bts, err = msgp.ReadUint32Bytes(bts) + if err != nil { + err = msgp.WrapError(err, "ID") + return + } + case "content": + z.Content, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Content") + return + } + case "token": + z.Token, bts, err = msgp.ReadStringBytes(bts) + if err != nil { + err = msgp.WrapError(err, "Token") + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + err = msgp.WrapError(err) + return + } + } + } + o = bts + return +} + +// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z Task) Msgsize() (s int) { + s = 1 + 3 + msgp.Uint32Size + 8 + msgp.StringPrefixSize + len(z.Content) + 6 + msgp.StringPrefixSize + len(z.Token) + return +} diff --git a/agent/spec/spec_gen_test.go b/agent/spec/spec_gen_test.go new file mode 100644 index 000000000..a7c78c4a0 --- /dev/null +++ b/agent/spec/spec_gen_test.go @@ -0,0 +1,236 @@ +package spec + +// Code generated by github.com/tinylib/msgp DO NOT EDIT. + +import ( + "bytes" + "testing" + + "github.com/tinylib/msgp/msgp" +) + +func TestMarshalUnmarshalData(t *testing.T) { + v := Data{} + bts, err := v.MarshalMsg(nil) + if err != nil { + t.Fatal(err) + } + left, err := v.UnmarshalMsg(bts) + if err != nil { + t.Fatal(err) + } + if len(left) > 0 { + t.Errorf("%d bytes left over after UnmarshalMsg(): %q", len(left), left) + } + + left, err = msgp.Skip(bts) + if err != nil { + t.Fatal(err) + } + if len(left) > 0 { + t.Errorf("%d bytes left over after Skip(): %q", len(left), left) + } +} + +func BenchmarkMarshalMsgData(b *testing.B) { + v := Data{} + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + v.MarshalMsg(nil) + } +} + +func BenchmarkAppendMsgData(b *testing.B) { + v := Data{} + bts := make([]byte, 0, v.Msgsize()) + bts, _ = v.MarshalMsg(bts[0:0]) + b.SetBytes(int64(len(bts))) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + bts, _ = v.MarshalMsg(bts[0:0]) + } +} + +func BenchmarkUnmarshalData(b *testing.B) { + v := Data{} + bts, _ := v.MarshalMsg(nil) + b.ReportAllocs() + b.SetBytes(int64(len(bts))) + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := v.UnmarshalMsg(bts) + if err != nil { + b.Fatal(err) + } + } +} + +func TestEncodeDecodeData(t *testing.T) { + v := Data{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + + m := v.Msgsize() + if buf.Len() > m { + t.Log("WARNING: TestEncodeDecodeData Msgsize() is inaccurate") + } + + vn := Data{} + err := msgp.Decode(&buf, &vn) + if err != nil { + t.Error(err) + } + + buf.Reset() + msgp.Encode(&buf, &v) + err = msgp.NewReader(&buf).Skip() + if err != nil { + t.Error(err) + } +} + +func BenchmarkEncodeData(b *testing.B) { + v := Data{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + b.SetBytes(int64(buf.Len())) + en := msgp.NewWriter(msgp.Nowhere) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + v.EncodeMsg(en) + } + en.Flush() +} + +func BenchmarkDecodeData(b *testing.B) { + v := Data{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + b.SetBytes(int64(buf.Len())) + rd := msgp.NewEndlessReader(buf.Bytes(), b) + dc := msgp.NewReader(rd) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + err := v.DecodeMsg(dc) + if err != nil { + b.Fatal(err) + } + } +} + +func TestMarshalUnmarshalTask(t *testing.T) { + v := Task{} + bts, err := v.MarshalMsg(nil) + if err != nil { + t.Fatal(err) + } + left, err := v.UnmarshalMsg(bts) + if err != nil { + t.Fatal(err) + } + if len(left) > 0 { + t.Errorf("%d bytes left over after UnmarshalMsg(): %q", len(left), left) + } + + left, err = msgp.Skip(bts) + if err != nil { + t.Fatal(err) + } + if len(left) > 0 { + t.Errorf("%d bytes left over after Skip(): %q", len(left), left) + } +} + +func BenchmarkMarshalMsgTask(b *testing.B) { + v := Task{} + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + v.MarshalMsg(nil) + } +} + +func BenchmarkAppendMsgTask(b *testing.B) { + v := Task{} + bts := make([]byte, 0, v.Msgsize()) + bts, _ = v.MarshalMsg(bts[0:0]) + b.SetBytes(int64(len(bts))) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + bts, _ = v.MarshalMsg(bts[0:0]) + } +} + +func BenchmarkUnmarshalTask(b *testing.B) { + v := Task{} + bts, _ := v.MarshalMsg(nil) + b.ReportAllocs() + b.SetBytes(int64(len(bts))) + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := v.UnmarshalMsg(bts) + if err != nil { + b.Fatal(err) + } + } +} + +func TestEncodeDecodeTask(t *testing.T) { + v := Task{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + + m := v.Msgsize() + if buf.Len() > m { + t.Log("WARNING: TestEncodeDecodeTask Msgsize() is inaccurate") + } + + vn := Task{} + err := msgp.Decode(&buf, &vn) + if err != nil { + t.Error(err) + } + + buf.Reset() + msgp.Encode(&buf, &v) + err = msgp.NewReader(&buf).Skip() + if err != nil { + t.Error(err) + } +} + +func BenchmarkEncodeTask(b *testing.B) { + v := Task{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + b.SetBytes(int64(buf.Len())) + en := msgp.NewWriter(msgp.Nowhere) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + v.EncodeMsg(en) + } + en.Flush() +} + +func BenchmarkDecodeTask(b *testing.B) { + v := Task{} + var buf bytes.Buffer + msgp.Encode(&buf, &v) + b.SetBytes(int64(buf.Len())) + rd := msgp.NewEndlessReader(buf.Bytes(), b) + dc := msgp.NewReader(rd) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + err := v.DecodeMsg(dc) + if err != nil { + b.Fatal(err) + } + } +} diff --git a/agent/support/README-zh_CN.md b/agent/support/README-zh_CN.md new file mode 100644 index 000000000..3712f6843 --- /dev/null +++ b/agent/support/README-zh_CN.md @@ -0,0 +1,47 @@ +[English](README.md) | 简体中文 +## 关于插件 +插件通过Unix Domain Socket(UDS)与Agent进行通信,通信消息的序列化方式为[messagepack](https://msgpack.org/)。目前版本的插件-Agent协议还比较粗糙,之后会持续完善通信协议。 +## 启动 +插件一般是被Agent启动的,在这里我们有一个假设:Agent接收到的指令(配置)是可信的,否则会导致一些恶意命令被执行。 + +Agent执行的目标需要是一个具有可执行权限的文件(二进制或者脚本)。在执行前会校验所要执行文件的`sha256`,如果与命令(配置)中的不一致,则会拒绝启动。 + +在启动前,Agent会将插件相关信息以插件名为key放入一个map中,以方便后续查询。如果这时map中有同名的插件,那么会对比两者的版本信息,如果版本一致则不会启动。 + +在启动时Agent会设置插件的工作目录为`plugin/${plugin_name}/`,并将插件进程的`stdout`与`stderr`重定向到文件中,另外还会重设插件`pgig`以方便管理。 + +启动时,Agent会记录下子进程的`pid`及其`pgid`,等待后续校验。 +## 注册 +插件进程在被启动后,首先会连接Agent的Socket,并发出一个注册请求。 + +注册请求中包含着插件的名字、插件的版本以及插件进程的`pid`。 + +Agent在接收到注册请求后,会根据请求中的插件名字查询map,如果没有查询到则会断开这个连接(只有map中的插件才允许建立连接)。 + +如果查询到了相应的插件数据,则会查找请求中的`pid`对应的`pgid`,并与启动时记录的`pgid`进行对比,如果不一致则会断开连接。 + +## 数据传输 +在上述注册过程完成之后,Plugin可以开始进行数据传输了。 + +数据传输是双向的,Plugin向Agent发送Data,Agent向Plugin发送Task。 + +其中,Data在传输时会被序列化为`map[string]string`结构,在Rust中结构体也会被映射成为上述结构。结构中一个必要的字段为`data_type`以区分不同的数据类型。 + +Task数据结构如下所示: +``` +id uint32 +content string +token string +``` +`id`用于判别数据类型,进而解析`content`。如果需要根据Task返回数据,则可以在Data字段中增加`token`以进行数据对账。 + +## 异常与退出 +Socket连接贯穿于Plugin的整个生命周期,如果连接中断,则插件需要自行退出。 +对等的,如果Agent发现与某个Plugin的连接中断,那么会从map中删除相应插件,并强制结束相关子进程。 + +## Plugin SDK +### Rust +* [plugin](rust/plugin):封装了底层通信细节,返回Sender/Receiver两端。 +* [plugin_builder](rust/plugin_builder):以工厂模式对日志/通信做了高级封装。 +### Golang +* TODO \ No newline at end of file diff --git a/agent/support/README.md b/agent/support/README.md new file mode 100644 index 000000000..293d8becc --- /dev/null +++ b/agent/support/README.md @@ -0,0 +1,49 @@ +English | [简体中文](README-zh_CN.md) +## About Plugin +The plugin communicates with the agent through Unix Domain Socket (UDS), and the serialization method of the message is [messagepack](https://msgpack.org/). + +The current version of the plugin-agent protocol is still relatively rough, and the communication protocol will continue to be improved in the future. +## Startup +The plugin is generally started by the agent. Here we have an assumption: the command (configuration) received by the agent are credible, otherwise some malicious commands will be executed. + +The target executed by the agent needs to be a file (binary or script) with executable permissions. Before execution, it will check the `sha256` of the file to be executed. If it is inconsistent with the command (configuration), the plugin will refuse to start. + +Before startup, the agent will put the plugin related information into a map with the plugin name. If there was a plugin with the same name in the map, then the version of the two will be compared, and if the version is the same, plugin will not start. + +At startup, the agent will set the working directory of the plugin to `plugin/${plugin_name}/`, redirect the `stdout` and `stderr` of the plugin process to files, and reset the plugin `pgig` for convenience management. + +At startup, the agent will record the `pid` and `pgid` of the plugin process. + +## Registration +After the plugin process is started, it will first connect to the socket of the agent and send a registration request. + +The registration request contains the name,the version, and the `pid` of the plugin process. + +After the agent receives the registration request, it will query the map according to the plugin name in the request. If it is not found, the connection will be disconnected (only plugins in the map are allowed to establish a connection). + +If the corresponding plugin is in the map, the `pgid` corresponding to the `pid` in the request will be compared with the `pgid` saved at startup. If it is inconsistent, the connection will be disconnected. + +## Data Transmission +After the above registration process is completed, plugin can start data transmission. + +Data transmission is two-way, plugin sends `Data` to agent, and agent sends `Task` to plugin. + +Data will be serialized into a `map[string]string` structure during transmission, and the `struct` in Rust will also be mapped to the above structure. A necessary field in the structure is `data_type` to distinguish different data types. + +`Task` structure is as follows: +``` +id uint32 +content string +token string +``` +`id` is used to identify the data type, and then parse `content`. If you need to return `Data` based on `Task`, you can send `Data` with the `token`. +## Exception and Exit +Socket connection runs through the entire life cycle of Plugin. If the connection is disconnected, the plugin needs to exit. +It is peer-to-peer for agent.If agent finds that the connection with a plugin is disconnected, it will delete the corresponding plugin from the map and kill the related child process. + +## Plugin SDK +### Rust +* [plugin](rust/plugin):Encapsulates the underlying communication details and returns the Sender/Receiver. +* [plugin_builder](rust/plugin_builder):High-level encapsulation of log/communication. +### Golang +* TODO \ No newline at end of file diff --git a/agent/support/rust/flexi_logger/.gitignore b/agent/support/rust/flexi_logger/.gitignore new file mode 100644 index 000000000..6a3af2091 --- /dev/null +++ b/agent/support/rust/flexi_logger/.gitignore @@ -0,0 +1,17 @@ +examples/_* +target +Cargo.lock +*.alerts +*.log +*.seclog +*.gz +*.zip +link_to_log +link_to_mt_log +log_files +todo +*logspec.toml +tests/*logspec.toml +*~ +.*~ +.vscode \ No newline at end of file diff --git a/agent/support/rust/flexi_logger/.travis.yml b/agent/support/rust/flexi_logger/.travis.yml new file mode 100644 index 000000000..e625ae7d8 --- /dev/null +++ b/agent/support/rust/flexi_logger/.travis.yml @@ -0,0 +1,36 @@ +language: rust + +rust: + - stable + - beta + - nightly + # - 1.37.0 // does not work since cargo test is executed automatically + +os: + - linux + - windows + - osx + +jobs: + fast_finish: true + allow_failures: + - rust: nightly + - os: windows + include: + - if: rust = stable + script: + - rustup component add rustfmt + - cargo fmt --all -- --check + + - if: rust = stable OR rust = beta OR rust = nightly + script: + - cargo test --release --all-features + - cargo doc --all-features --no-deps + - cargo test --release + - cargo test --release --no-default-features + + - if: rust = "1.37.0" + script: + - cargo build --release + - cargo build --release --no-default-features + - cargo build --release --all-features diff --git a/agent/support/rust/flexi_logger/CHANGELOG.md b/agent/support/rust/flexi_logger/CHANGELOG.md new file mode 100644 index 000000000..118f36af3 --- /dev/null +++ b/agent/support/rust/flexi_logger/CHANGELOG.md @@ -0,0 +1,436 @@ +# Changelog for flexi_logger + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this +project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.16.2] - 2020-11-18 + +Add module +[code-examples](https://docs.rs/flexi_logger/latest/flexi_logger/code_examples/index.html) +with additional usage documentation. +This is a follow-up of a PR, kudos goes to [devzbysiu](https://github.com/devzbysiu)! + +## [0.16.1] - 2020-09-20 + +Support empty toml spec files (kudos to ijackson for +[pull request 66](https://github.com/emabee/flexi_logger/pull/66)) +(was supposed to be part of 0.16.0, but I had forgotten to merge it). + +## [0.16.0] - 2020-09-19 + +If file logging is used, do not create the output file if no log is written. +Solves [issue-62](https://github.com/emabee/flexi_logger/issues/62). + +Improve color handling + +- introduce AdaptiveFormat for a clearer API +- Support using feature `atty` without provided coloring +- Extend example `colors` to provide insight in how AdaptiveFormat works +- Remove the deprecated method `Logger::do_not_log()`; use `log_target()` with `LogTarget::DevNull` instead. +- Remove deprecated method `Logger::o_log_to_file()`; use `log_target()` instead. The clearer convenience method `Logger::log_to_file()` is still available. + +Improve the compression feature. Solves [issue-65](https://github.com/emabee/flexi_logger/issues/65). + +- breaking change: change the file suffix for the compressed log files from `.zip` to `.gz` +- Fix wrong wording in code and documentation +- deprecate the feature name `ziplog` and call the feature now `compress` +- rename `Cleanup::KeepZipFiles` into `Cleanup::KeepCompressedFiles` + and `Cleanup::KeepLogAndZipFiles` into `Cleanup::KeepLogAndCompressedFiles` + - the old names still work but are deprecated + +If file logging is used, do not create the output file if no log is written +Solves issue [issue-62](https://github.com/emabee/flexi_logger/issues/62). + +## [0.15.12] - 2020-28-08 + +Make `1.37.0` the minimal rust version for `flexi_logger`. + +## [0.15.11] - 2020-08-07 + +Introduce feature `specfile_without_notification` to allow coping with OS issues +(solves [issue-59](https://github.com/emabee/flexi_logger/issues/59)). + +## [0.15.10] - 2020-07-22 + +Minor code maintenance. + +## [0.15.9] - 2020-07-21 + +Allow using the log target with fantasy names, like with `env_logger`. +Solves [issue-56](https://github.com/emabee/flexi_logger/issues/56). + +## [0.15.8] - 2020-07-20 + +Allow modifying the coloring palette through the environment variable `FLEXI_LOGGER_PALETTE`. +See function [style](https://docs.rs/flexi_logger/latest/flexi_logger/fn.style.html) for details. +Solves [issue-55](https://github.com/emabee/flexi_logger/issues/55). + +By default, don't use colors if stdout or stderr are not a terminal +Solves [issue-57](https://github.com/emabee/flexi_logger/issues/57). + +Add variant Criterion::AgeOrSize +(kudos to +[pscott](https://github.com/pscott), +[PR-54](https://github.com/emabee/flexi_logger/pull/54)). + +## [0.15.7] - 2020-07-02 + +Add some Debug derives +(kudos to +[pscott](https://github.com/pscott), +[PR-52](https://github.com/emabee/flexi_logger/pull/52)). + +## [0.15.6] - 2020-07-02 + +Introduce separate formatting for stdout +(kudos to +[pscott](https://github.com/pscott), +[PR-51](https://github.com/emabee/flexi_logger/pull/51)). + +Deprecate `Logger::do_not_log()`. + +## [0.15.5] - 2020-06-18 + +Add `Logger::duplicate_to_stdout()` to fix +[issue-47](https://github.com/emabee/flexi_logger/issues/47). + +## [0.15.4] - 2020-06-09 + +Fix [issue-45](https://github.com/emabee/flexi_logger/issues/45), which was a panic in +the specfile watcher when some log files were deleted manually while the program was running +(kudos to +[avl](https://github.com/avl), +[PR-46](https://github.com/emabee/flexi_logger/pull/46)). + +## [0.15.3] - 2020-06-04 + +Add compatibility with multi_log by adding methods +`Logger::build` and `Logger::build_with_specfile` (fixes issue-44). + +Add `LogSpecBuilder::insert_modules_from()` (fixes issue-43). + +## [0.15.2] - 2020-03-24 + +Improve handling of parse-errors. + +Fix default format for files (was and is documented to be uncolored, but was colored). + +## [0.15.1] - 2020-03-04 + +Make the textfilter functionality an optional default feature; +deselecting it removes the regex crate as a required dependency, +which reduces the size overhead for any binary using `flexi_logger` +(kudos to [Petre Eftime](petre.eftime@gmail.com)). + +## [0.15.0] - 2020-02-27 + +Refine and rename error variants to allow e.g. differentiating +between errors related to the output (files) +and errors related to the specfile. + +## [0.14.8] - 2020-02-06 + +Make cleanup more robust, and allow controlling the cleanup-thread also with +`Logger::start_with_specfile()`. + +## [0.14.7] - 2020-02-04 + +If rotation is used with cleanup, do the cleanup by default in a background thread +(solves [issue 39](https://github.com/emabee/flexi_logger/issues/39)). + +For the ziplog feature, switch from `zip` crate to `flate2`. + +## [0.14.6] - 2020-01-28 + +Fix [issue 38](https://github.com/emabee/flexi_logger/issues/38) +(Old log files are not removed if rCURRENT doesn't overflow). + +## [0.14.5] - 2019-11-06 + +Pass format option into custom loggers (pull request 37). + +## [0.14.4] - 2019-09-25 + +Fix bug in specfile handling ([issue 36](https://github.com/emabee/flexi_logger/issues/36)). + +Improve docu and implementation of create_symlink. + +Minor other stuff. + +## [0.14.3] - 2019-08-04 + +Allow defining custom handlers for the default log target +(solves [issue 32](https://github.com/emabee/flexi_logger/issues/32)). + +## [0.14.2] - 2019-08-04 + +Use implicit locking of stderr in StdErrWriter. + +Allow failures in travis' windows build. + +Add license files. + +## [0.14.1] - 2019-08-04 + +Support recursive logging also with FileLogWriter, sharing the buffer with the PrimaryWriter. + +Fix multi-threading issue (incorrect line-break handling with stderr). + +## [0.14.0] - 2019-07-22 + +Further stabilize the specfile feature. + +Remove `LogSpecification::ensure_specfile_exists()` and `LogSpecification::from_file()` +from public API, where they should not be (-> version bump). + +Harmonize all eprintln! calls to +prefix the output with "`[flexi_logger]` ". + +## [0.13.4] - 2019-07-19 + +Only relevant for the `specfile` feature: +initialize the logger before dealing in any way with the specfile, +and do the initial read of the specfile in the main thread, +i.e. synchronously, to ensure a deterministic behavior during startup +(fixes [issue 31](https://github.com/emabee/flexi_logger/issues/31)). + +## [0.13.3] - 2019-07-08 + +Improve the file watch for the specfile to make the `specfile` feature more robust. +E.g. allow editing the specfile on linux +with editors that move the original file to a backup name. + +Add an option to write the log to stdout, as recommended for +[twelve-factor apps](https://12factor.net/logs). + +## [0.13.2] - 2019-06-02 + +Make get_creation_date() more robust on all platforms. + +## [0.13.1] - 2019-05-29 + +Fix fatal issue with get_creation_date() on linux +(see ). + +## [0.13.0] - 2019-05-28 + +Improve performance for plain stderr logging. + +Improve robustnesss for recursive log calls. + +## [0.12.0] - 2019-05-24 + +Revise handling of record.metadata().target() versus record.module_path(). + +Incompatible API modification: Logger.rotate() takes now three parameters. + +Suppport different formatting for stderr and files. + +Add feature `colors` (see `README.md` for details). + +Remove the deprecated `Logger::start_reconfigurable()` and `Logger::rotate_over_size()`. + +## [0.11.5] - 2019-05-15 + +Fix [issue 26](https://github.com/emabee/flexi_logger/issues/26) (logging off for specific modules). + +Fix [issue 27](https://github.com/emabee/flexi_logger/issues/27) (log files blank after restart). + +Fix [issue 28](https://github.com/emabee/flexi_logger/issues/28) +(add a corresponding set of unit tests to FileLogWriter). + +## [0.11.4] - 2019-04-01 + +Version updates of dependencies. + +## [0.11.3] - 2019-03-28 + +Add SyslogWriter. + +## [0.11.2] - 2019-03-22 + +Change API to more idiomatic parameter types, in a compatible way. + +Add first implementation of a SyslogWriter. + +## [0.11.1] - 2019-03-06 + +Add option to write windows line endings, rather than a plain `\n`. + +## [0.11.0] - 2019-03-02 + +Add options to cleanup rotated log files, by deleting and/or zipping older files. + +Remove some deprecated methods. + +## [0.10.7] - 2019-02-27 + +Let the BlackHoleLogger, although it doesn't write a log, still duplicate to stderr. + +## [0.10.6] - 2019-02-26 + +Deprecate `Logger::start_reconfigurable()`, let `Logger::start()` return a reconfiguration handle. + +Add an option to write all logs to nowhere (i.e., do not write any logs). + +## [0.10.5] - 2019-01-15 + +Eliminate performance penalty for using reconfigurability. + +## [0.10.4] - 2019-01-07 + +Add methods to modify the log spec temporarily. + +## [0.10.3] - 2018-12-08 + +Advance to edition 2018. + +## [0.10.2] - 2018-12-07 + +Log-spec parsing is improved, more whitespace is tolerated. + +## [0.10.1] - 2018-11-08 + +When file rotation is used, the name of the file to which the logs are written is now stable. + +Details: + +- the logs are always written to a file with infix _rCURRENT +- if this file exceeds the specified rotate-over-size, it is closed and renamed + to a file with a sequential number infix, and then the logging continues again + to the (fresh) file with infix _rCURRENT + +Example: + +After some logging with your program my_prog, you will find files like + +```text +my_prog_r00000.log +my_prog_r00001.log +my_prog_r00002.log +my_prog_rCURRENT.log +``` + +## [0.10.0] - 2018-10-30 + +`LogSpecification::parse()` now returns a `Result`, rather than +a log spec directly (-> version bump). +This enables a more reliable usage of FlexiLogger in non-trivial cases. + +For the sake of compatibility for the normal usecases, the Logger methods `with_str()` etc. +remain unchanged. An extra method is added to retrieve parser errors, if desired. + +## [0.9.3] - 2018-10-27 + +Docu improvement. + +## [0.9.2] - 2018-08-13 + +Fix incorrect filename generation with rotation, +i.e., switch off timestamp usage when rotation is used. + +## [0.9.1] - 2018-08-12 + +Introduce `Logger::duplicate_to_stderr()`, as a more flexible replacement for `duplicate_error()` +and `duplicate_info()`. + +## [0.9.0] - 2018-07-06 + +### Eliminate String allocation + +Get rid of the unneccessary String allocation we've been +carrying with us since ages. This implies changing the signature of the format functions. + +In case you provide your own format function, you'll need to adapt it to the new signature. +Luckily, the effort is low. + +As an example, here is how the definition of the `opt_format` function changed: + +```rust +- pub fn opt_format(record: &Record) -> String { +- format!( +--- ++ pub fn opt_format(w: &mut io::Write, record: &Record) -> Result<(), io::Error> { ++ write!( ++ w, +``` + +Similarly, if you're using the advanced feature of providing your own implementation of LogWriter, +you need to adapt it. The change again is trivial, and should even slightly +simplify your code (you can return io errors and don't have to catch them yourself). + +### Misc + +The docu generation on docs.rs is now configured to considers all features, we thus +expose `Logger.start_with_specfile()` only if the specfile feature is used. So we can revert the +change done with 0.8.1. + +## [0.8.4] - 2018-06-18 + +Add flexi_logger to category `development-tools::debugging` + +## [0.8.3] - 2018-05-14 + +Make append() also work for rotating log files + +## [0.8.2] - 2018-04-03 + +Add option to append to existing log files, rather than always truncating them + +## [0.8.1] - 2018-3-19 + +Expose `Logger.start_with_specfile()` always +...and not only if the feature "specfile" is used - otherwise it does not appear +in the auto-generated docu (because it does not use --allfeatures) + +## [0.8.0] - 2018-03-18 + +Add specfile feature + +- Add a feature that allows to specify the LogSpecification via a file + that can be edited while the program is running +_ Remove/hide deprecated APIs +- As a consequence, cleanup code, get rid of duplicate stuff. + +## [0.7.1] - 2018-03-07 + +Bugfix: do not create empty files when used in env_logger style. +Update docu and the description in cargo.toml + +## [0.7.0] - 2018-02-25 + +Add support for multiple log output streams + +- replace FlexiWriter with DefaultLogWriter, which wraps a FileLogWriter +- add test where a SecurityWriter and an AlertWriter are added +- add docu +- move deprecated structs to separate package +- move benches to folder benches + +## [0.6.13] 2018-02-09 + +Add Logger::with_env_or_str() + +## [0.6.12] 2018-2-07 + +Add ReconfigurationHandle::parse_new_spec() + +## [0.6.11] 2017-12-29 + +Fix README.md + +## [0.6.10] 2017-12-29 + +Publish version based on log 0.4 + +## (...) + +## [0.6.0] 2017-07-13 + +Use builder pattern for LogSpecification and Logger + +- deprecate outdated API +- "objectify" LogSpecification +- improve documentation, e.g. document the dash/underscore issue diff --git a/agent/support/rust/flexi_logger/Cargo.toml b/agent/support/rust/flexi_logger/Cargo.toml new file mode 100644 index 000000000..2d954c7c8 --- /dev/null +++ b/agent/support/rust/flexi_logger/Cargo.toml @@ -0,0 +1,56 @@ +[package] +name = "flexi_logger" +version = "0.16.2" +authors = ["emabee "] +edition = "2018" +license = "MIT OR Apache-2.0" +readme = "README.md" +repository = "https://github.com/emabee/flexi_logger" +documentation = "https://docs.rs/flexi_logger" +homepage = "https://crates.io/crates/flexi_logger" +description = """ +An easy-to-configure and flexible logger that writes logs to stderr and/or to files. \ +It allows custom logline formats, and it allows changing the log specification at runtime. \ +It also allows defining additional log streams, e.g. for alert or security messages.\ +""" +keywords = ["file", "logger"] +categories = ["development-tools::debugging"] + +[package.metadata.docs.rs] +all-features = true + +[features] +default = ["colors", "textfilter"] +colors = ["atty", "lazy_static", "yansi"] +specfile = ["specfile_without_notification", "notify"] +specfile_without_notification = ["serde", "toml", "serde_derive"] +syslog_writer = ["libc", "hostname"] +ziplog = ["compress"] # for backwards compatibility +compress = ["flate2"] +textfilter = ["regex"] + +[dependencies] +plugin = { path = "../plugin" } +anyhow = "1" +atty = { version = "0.2", optional = true } +chrono = "0.4" +glob = "0.3" +hostname = { version = "0.3", optional = true } +lazy_static = { version = "1.4", optional = true } +log = { version = "0.4", features = ["std"] } +notify = { version = "4.0", optional = true } +regex = { version = "1.1", optional = true } +serde = { version = "1.0", optional = true } +serde_derive = { version = "1.0", optional = true } +thiserror = "1.0" +toml = { version = "0.5", optional = true } +yansi = { version = "0.5", optional = true } +flate2 = { version = "1.0", optional = true } + +[target.'cfg(linux)'.dependencies] +libc = { version = "^0.2.50", optional = true } + +[dev-dependencies] +serde_derive = "1.0" +version-sync = "0.9" +#env_logger = '*' # optionally needed for the performance example diff --git a/agent/support/rust/flexi_logger/LICENSE-APACHE b/agent/support/rust/flexi_logger/LICENSE-APACHE new file mode 100644 index 000000000..f8e5e5ea0 --- /dev/null +++ b/agent/support/rust/flexi_logger/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/agent/support/rust/flexi_logger/LICENSE-MIT b/agent/support/rust/flexi_logger/LICENSE-MIT new file mode 100644 index 000000000..695259257 --- /dev/null +++ b/agent/support/rust/flexi_logger/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2018 The AUTHORS + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/agent/support/rust/flexi_logger/README.md b/agent/support/rust/flexi_logger/README.md new file mode 100644 index 000000000..d01754051 --- /dev/null +++ b/agent/support/rust/flexi_logger/README.md @@ -0,0 +1,124 @@ +# flexi_logger + +**A flexible and easy-to-use logger that writes logs to stderr and/or to files, and/or to +other output streams, and that can be influenced while the program is running.** + +[![Latest version](https://img.shields.io/crates/v/flexi_logger.svg)](https://crates.io/crates/flexi_logger) +[![Documentation](https://docs.rs/flexi_logger/badge.svg)](https://docs.rs/flexi_logger) +![License](https://img.shields.io/crates/l/flexi_logger.svg) +[![Travis CI](https://travis-ci.org/emabee/flexi_logger.svg?branch=master)](https://travis-ci.org/emabee/flexi_logger) + +## Usage + +Add flexi_logger to the dependencies section in your project's `Cargo.toml`, with + +```toml +[dependencies] +flexi_logger = "0.16" +log = "0.4" +``` + +or, if you want to use some of the optional features, with something like + +```toml +[dependencies] +flexi_logger = { version = "0.16", features = ["specfile", "compress"] } +log = "0.4" +``` + +or, to get the smallest footprint (and no colors), with + +```toml +[dependencies] +flexi_logger = { version = "0.16", default_features = false } +log = "0.4" +``` + +Note: `log` is needed because `flexi_logger` plugs into the standard Rust logging facade given +by the [log crate](https://crates.io/crates/log), +and you use the ```log``` macros to write log lines from your code. + +## Code examples + +See the documentation of module +[code_examples](https://docs.rs/flexi_logger/latest/flexi_logger/code_examples/index.html). + +## Options + +There are configuration options to e.g. + +* decide whether you want to write your logs to stderr or to a file, +* configure the path and the filenames of the log files, +* use file rotation, +* specify the line format for the log lines, +* define additional log streams, e.g for alert or security messages, +* support changing the log specification on the fly, while the program is running, + +See the API documentation for a complete reference. + +## Crate Features + +Make use of any of these features by specifying them in your `Cargo.toml` +(see above in the usage section). + +### **`colors`** + +Getting colored output is also possible without this feature, +by implementing and using your own coloring format function. + +The default feature `colors` simplifies this by doing three things: + +* it activates the optional dependency to `yansi` and +* provides additional colored pendants to the existing uncolored format functions +* it uses `colored_default_format()` for the output to stderr, + and the non-colored `default_format()` for the output to files +* it activates the optional dependency to `atty` to being able to switch off + coloring if the output is not sent to a terminal but e.g. piped to another program. + +**Colors**, +or styles in general, are a matter of taste, and no choice will fit every need. So you can override the default formatting and coloring in various ways. + +With `--no-default-features --features="atty"` you can remove the yansi-based coloring but keep the capability to switch off your own coloring. + +### **`compress`** + +The `compress` feature adds two options to the `Logger::Cleanup` `enum`, which allow keeping some +or all rotated log files in compressed form (`.gz`) rather than as plain text files. + +The feature was previously called `ziplogs`. The old name still works, but is deprecated and +should be replaced. + +### **`specfile`** + +The `specfile` feature adds a method `Logger::start_with_specfile(specfile)`. + +If started with this method, `flexi_logger` uses the log specification +that was given to the factory method (one of `Logger::with...()`) as initial spec +and then tries to read the log specification from the named file. + +If the file does not exist, it is created and filled with the initial spec. + +By editing the log specification in the file while the program is running, +you can change the logging behavior in real-time. + +The implementation of this feature uses some additional crates that you might +not want to depend on with your program if you don't use this functionality. +For that reason the feature is not active by default. + +### **`specfile_without_notification`** + +Pretty much like `specfile`, except that updates to the file are being ignored. +See [issue-59](https://github.com/emabee/flexi_logger/issues/59) for more details. + +### **`textfilter`** + +Removes the ability to filter logs by text, but also removes the dependency on the regex crate. + +### **`syslog`** + +This is still an experimental feature, likely working, but not well tested. +Feedback of all kinds is highly appreciated. + +## Versions + +See the [change log](https://github.com/emabee/flexi_logger/blob/master/CHANGELOG.md). diff --git a/agent/support/rust/flexi_logger/benches/bench_reconfigurable.rs b/agent/support/rust/flexi_logger/benches/bench_reconfigurable.rs new file mode 100644 index 000000000..98ebb5873 --- /dev/null +++ b/agent/support/rust/flexi_logger/benches/bench_reconfigurable.rs @@ -0,0 +1,43 @@ +#![feature(extern_crate_item_prelude)] +#![feature(test)] + +extern crate test; +use log::{error, trace}; + +use flexi_logger::Logger; +use test::Bencher; + +#[bench] +fn b10_no_logger_active(b: &mut Bencher) { + b.iter(use_error); +} + +#[bench] +fn b20_initialize_logger(_: &mut Bencher) { + Logger::with_str("info") + .log_to_file() + .directory("log_files") + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); +} + +#[bench] +fn b30_relevant_logs(b: &mut Bencher) { + b.iter(use_error); +} + +#[bench] +fn b40_suppressed_logs(b: &mut Bencher) { + b.iter(use_trace); +} + +fn use_error() { + for _ in 1..100 { + error!("This is an error message"); + } +} +fn use_trace() { + for _ in 1..100 { + trace!("This is a trace message"); + } +} diff --git a/agent/support/rust/flexi_logger/benches/bench_standard.rs b/agent/support/rust/flexi_logger/benches/bench_standard.rs new file mode 100644 index 000000000..5dbe4b565 --- /dev/null +++ b/agent/support/rust/flexi_logger/benches/bench_standard.rs @@ -0,0 +1,45 @@ +#![feature(extern_crate_item_prelude)] +#![feature(test)] + +extern crate flexi_logger; +extern crate test; +#[macro_use] +extern crate log; + +use flexi_logger::Logger; +use test::Bencher; + +#[bench] +fn b10_no_logger_active(b: &mut Bencher) { + b.iter(use_error); +} + +#[bench] +fn b20_initialize_logger(_: &mut Bencher) { + Logger::with_str("info") + .log_to_file() + .directory("log_files") + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); +} + +#[bench] +fn b30_relevant_logs(b: &mut Bencher) { + b.iter(use_error); +} + +#[bench] +fn b40_suppressed_logs(b: &mut Bencher) { + b.iter(use_trace); +} + +fn use_error() { + for _ in 1..100 { + error!("This is an error message"); + } +} +fn use_trace() { + for _ in 1..100 { + trace!("This is a trace message"); + } +} diff --git a/agent/support/rust/flexi_logger/examples/colors.rs b/agent/support/rust/flexi_logger/examples/colors.rs new file mode 100644 index 000000000..cf1e5cf09 --- /dev/null +++ b/agent/support/rust/flexi_logger/examples/colors.rs @@ -0,0 +1,75 @@ +fn main() { + #[cfg(not(feature = "colors"))] + println!("Feature color is switched off"); + + #[cfg(feature = "colors")] + { + use atty::Stream::{Stderr, Stdout}; + use yansi::{Color, Paint, Style}; + + for i in 0..=255 { + println!("{}: {}", i, Paint::fixed(i, i)); + } + + println!(""); + + if atty::is(Stdout) { + println!( + "Stdout is considered a tty - \ + flexi_logger::AdaptiveFormat will use colors", + ); + } else { + println!( + "Stdout is not considered a tty - \ + flexi_logger::AdaptiveFormat will NOT use colors" + ); + } + + if atty::is(Stderr) { + println!( + "Stderr is considered a tty - \ + flexi_logger::AdaptiveFormat will use colors", + ); + } else { + println!( + "Stderr is not considered a tty - \ + flexi_logger::AdaptiveFormat will NOT use colors!" + ); + } + + // Enable ASCII escape sequence support on Windows consoles, + // but disable coloring on unsupported Windows consoles + if cfg!(windows) { + if !Paint::enable_windows_ascii() { + println!("unsupported windows console detected => coloring disabled"); + Paint::disable(); + return; + } + } + + println!( + "\n{}", + Style::new(Color::Fixed(196)) + .bold() + .paint("This is red output like by default with err!") + ); + println!( + "{}", + Style::new(Color::Fixed(208)) + .bold() + .paint("This is yellow output like by default with warn!") + ); + println!( + "{}", + Style::new(Color::Unset).paint("This is normal output like by default with info!") + ); + println!( + "{}", + Style::new(Color::Fixed(7)).paint("This is output like by default with debug!") + ); + println!( + "{}", + Style::new(Color::Fixed(8)).paint("This is grey output like by default with trace!") + ); + } +} diff --git a/agent/support/rust/flexi_logger/examples/performance.rs b/agent/support/rust/flexi_logger/examples/performance.rs new file mode 100644 index 000000000..113cf7fb5 --- /dev/null +++ b/agent/support/rust/flexi_logger/examples/performance.rs @@ -0,0 +1,51 @@ +use std::fmt; +use std::time::Instant; + +struct Struct { + data: [u8; 32], +} + +impl fmt::Display for Struct { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:?}", self.data) + } +} + +fn main() { + // -------------------------------- + println!("flexi_logger"); + flexi_logger::Logger::with_str("off") + .format(flexi_logger::detailed_format) + .start() + .unwrap(); + // -------------------------------- + // $> Set-Item -Path Env:RUST_LOG -Value "trace" + // println!("env_logger"); + // env_logger::init(); + // $> Set-Item -Path Env:RUST_LOG + // -------------------------------- + let mut structs = Vec::new(); + for i in 0..100 { + structs.push(Struct { + data: [i as u8; 32], + }); + } + + { + // With format + let start = Instant::now(); + for s in &structs { + log::info!("{}", format!("{}", s)); + } + eprintln!("with format: {:?}", start.elapsed()); // 2-7ms + } + + { + // Plain logger + let start = Instant::now(); + for s in &structs { + log::info!("{}", s); + } + eprintln!("plain: {:?}", start.elapsed()); // 17-26ms + } +} diff --git a/agent/support/rust/flexi_logger/scripts/cleanup.rs b/agent/support/rust/flexi_logger/scripts/cleanup.rs new file mode 100644 index 000000000..e867b2d2d --- /dev/null +++ b/agent/support/rust/flexi_logger/scripts/cleanup.rs @@ -0,0 +1,46 @@ +//! Cleans up all files and folders that were produced by test runs. +//! +//! ```cargo +//! [dependencies] +//! glob = "*" +//! ``` +extern crate glob; + +fn main() { + for pattern in &[ + "./*.log", + "./*.alerts", + "./*.seclog", + "./*logspec.toml", + "./log_files/**/*.log", + "./log_files/**/*.zip", + "./log_files/**/*.gz", + "./test_spec/*.toml", + ] { + for globresult in glob::glob(pattern).unwrap() { + match globresult { + Err(e) => eprintln!("Evaluating pattern {:?} produced error {}", pattern, e), + Ok(pathbuf) => { + std::fs::remove_file(&pathbuf).unwrap(); + } + } + } + } + + let dirs: Vec = glob::glob("./log_files/**") + .unwrap() + .filter_map(|r| match r { + Err(e) => { + eprintln!("Searching for folders produced error {}", e); + None + } + Ok(_) => Some(r.unwrap()), + }) + .collect(); + for pathbuf in dirs.iter().rev() { + std::fs::remove_dir(&pathbuf).unwrap(); + } + + std::fs::remove_dir("./log_files/").ok(); + std::fs::remove_dir("./test_spec/").ok(); +} diff --git a/agent/support/rust/flexi_logger/scripts/qualify.rs b/agent/support/rust/flexi_logger/scripts/qualify.rs new file mode 100644 index 000000000..c0e824c54 --- /dev/null +++ b/agent/support/rust/flexi_logger/scripts/qualify.rs @@ -0,0 +1,85 @@ +//! ```cargo +//! [dependencies] +//! yansi = "0.5" +//! ``` +extern crate yansi; +use std::process::Command; + +macro_rules! run_command { + ($cmd:expr , $($arg:expr),*) => ( + let mut command = command!($cmd, $($arg),*); + let mut child = command.spawn().unwrap(); + let status = child.wait().unwrap(); + if !status.success() { + print!("> {}",yansi::Paint::red("qualify terminates due to error")); + std::process::exit(-1); + } + ) +} + +macro_rules! command { + ($cmd:expr , $($arg:expr),*) => ( + { + print!("\n> {}",yansi::Paint::yellow($cmd)); + let mut command = Command::new($cmd); + $( + print!(" {}",yansi::Paint::yellow(&$arg)); + command.arg($arg); + )* + print!("\n"); + command + } + ) +} + +fn run_script(s: &str) { + let mut path = std::path::PathBuf::from(std::env::var("CARGO_SCRIPT_BASE_PATH").unwrap()); + path.push(s); + let script = path.to_string_lossy().to_owned().to_string(); + run_command!("cargo", "script", script); +} + +fn main() { + // format + run_command!("cargo", "fmt"); + + // Build in important variants + std::fs::remove_file("Cargo.lock"); + run_command!("cargo", "+1.37.0", "build", "--no-default-features"); + run_command!("cargo", "+1.37.0", "build", "--all-features"); + + std::fs::remove_file("Cargo.lock"); + run_command!("cargo", "build"); + run_command!("cargo", "build", "--no-default-features"); + run_command!("cargo", "build", "--no-default-features", "--features=atty"); + run_command!("cargo", "build", "--all-features"); + run_command!("cargo", "build", "--release"); + run_command!("cargo", "build", "--release", "--all-features"); + + // Clippy in important variants + run_command!("cargo", "clippy", "--", "-D", "warnings"); + run_command!("cargo", "clippy", "--all-features", "--", "-D", "warnings"); + + // Run tests in important variants + run_command!("cargo", "test", "--release", "--all-features"); + run_command!("cargo", "test", "--no-default-features"); + run_command!("cargo", "test", "--release"); + #[rustfmt::skip] + run_command!("cargo", "test", "--release", "--features", "specfile_without_notification"); + run_script("cleanup"); + + // doc + run_command!("cargo", "doc", "--all-features", "--no-deps", "--open"); + + // check git status + let mut cmd = command!("git", "status", "-s"); + let child = cmd.stdout(std::process::Stdio::piped()).spawn().unwrap(); + let output = child.wait_with_output().unwrap(); + if output.stdout.len() > 0 { + print!("> {}", yansi::Paint::red("there are unsubmitted files")); + std::process::exit(-1); + } + + // say goodbye + println!("\n> all done :-) Looks like you're ready to \"cargo publish\"?"); +} diff --git a/agent/support/rust/flexi_logger/scripts/qualify_fast.rs b/agent/support/rust/flexi_logger/scripts/qualify_fast.rs new file mode 100644 index 000000000..ec34153fe --- /dev/null +++ b/agent/support/rust/flexi_logger/scripts/qualify_fast.rs @@ -0,0 +1,58 @@ +//! ```cargo +//! [dependencies] +//! yansi = "0.5" +//! ``` +extern crate yansi; +use std::process::Command; + +macro_rules! run_command { + ($cmd:expr , $($arg:expr),*) => ( + let mut command = command!($cmd, $($arg),*); + let mut child = command.spawn().unwrap(); + let status = child.wait().unwrap(); + if !status.success() { + print!("> {}",yansi::Paint::red("qualify terminates due to error")); + std::process::exit(-1); + } + ) +} + +macro_rules! command { + ($cmd:expr , $($arg:expr),*) => ( + { + print!("\n> {}",yansi::Paint::yellow($cmd)); + let mut command = Command::new($cmd); + $( + print!(" {}",yansi::Paint::yellow(&$arg)); + command.arg($arg); + )* + print!("\n"); + command + } + ) +} + +fn run_script(s: &str) { + let mut path = std::path::PathBuf::from(std::env::var("CARGO_SCRIPT_BASE_PATH").unwrap()); + path.push(s); + let script = path.to_string_lossy().to_owned().to_string(); + run_command!("cargo", "script", script); +} + +fn main() { + // Build in important variants + run_command!("cargo", "build", "--release", "--all-features"); + + // Clippy in important variants + run_command!("cargo", "clippy", "--all-features", "--", "-D", "warnings"); + + // Run tests in important variants + run_command!("cargo", "test", "--release", "--all-features"); + run_script("cleanup"); + + // doc + run_command!("cargo", "doc", "--all-features", "--no-deps", "--open"); + + // say goodbye + println!("\n> fast qualification is done :-) Looks like you're ready to do the full qualification?"); +} diff --git a/agent/support/rust/flexi_logger/src/code_examples.rs b/agent/support/rust/flexi_logger/src/code_examples.rs new file mode 100644 index 000000000..cade9098b --- /dev/null +++ b/agent/support/rust/flexi_logger/src/code_examples.rs @@ -0,0 +1,301 @@ +//! Here are some examples for the `flexi_logger` initialization. +//! +//! +//! ## Write logs to stderr +//! +//! Expect the log specification in the environment variable `RUST_LOG`: +//! +//! ` Logger::`[`with_env()`](../struct.Logger.html#method.with_env)`.start()?;` +//! +//! (if `RUST_LOG` is not set, or if its value cannot be interpreted, nothing is logged) +//! +//! or provide the log spec programmatically: +//! +//! ` Logger::`[`with_str("info")`](../struct.Logger.html#method.with_str)`.start()?;` +//! +//! or combine both options: +//! +//! ` Logger::`[`with_env_or_str("info")`](../struct.Logger.html#method.with_env_or_str)`.start()?;` +//! +//! After that, you just use the log-macros from the log crate. +//! +//! ## Choose the log output channel +//! +//! By default, logs are written to `stderr`. +//! With [`Logger::log_target`](../struct.Logger.html#method.log_target) +//! you can send the logs to `stdout`, a file, an implementation of `LogWriter`, +//! or write them not at all. +//! +//! When writing to files, you sometimes want to have parts of the log still on the terminal; +//! this can be achieved with +//! [`Logger::duplicate_to_stderr`](../struct.Logger.html#method.duplicate_to_stderr) or +//! [`Logger::duplicate_to_stdout`](../struct.Logger.html#method.duplicate_to_stdout), +//! which duplicate log messages to the terminal. +//! +//! ```rust +//! # use flexi_logger::{LogTarget,Logger,Duplicate}; +//! # fn main() -> Result<(), Box> { +//! Logger::with_str("info") +//! .log_target(LogTarget::File) // write logs to file +//! .duplicate_to_stderr(Duplicate::Warn) // print warnings and errors also to the console +//! .start()?; +//! # Ok(()) +//! # } +//! ``` +//! +//! ## Influence the location and name of the log file +//! +//! By default, the log files are created in the current directory (where the program was started). +//! With [`Logger:directory`](../struct.Logger.html#method.directory) +//! you can specify a concrete folder in which the files should be created. +//! +//! Using [`Logger::discriminant`](../struct.Logger.html#method.discriminant) +//! you can add a discriminating infix to the log file name. +//! +//! With [`Logger::suffix`](../struct.Logger.html#method.suffix) +//! you can change the suffix that is used for the log files. +//! +//! When writing to files, especially when they are in a distant folder, you may want to let the +//! user know where the log file is. +//! +//! [`Logger::print_message`](../struct.Logger.html#method.print_message) +//! prints an info to `stdout` to which file the log is written. +//! +//! `create_symlink(path)` creates (on unix-systems only) a symbolic link at the +//! specified path that points to the log file. +//! +//! ```rust +//! # use flexi_logger::Logger; +//! # fn main() -> Result<(), Box> { +//! Logger::with_str("info") +//! .log_to_file() // write logs to file +//! .directory("traces") // create files in folder ./traces +//! .discriminant("Sample4711A") // use infix in log file name +//! .suffix("trc") // use suffix .trc instead of .log +//! .print_message() // +//! .create_symlink("current_run") // create a symbolic link to the current log file +//! .start()?; +//! # Ok(()) +//! # } +//! ``` +//! +//! This example will print a message +//! "Log is written to `./traces/foo_Sample4711A_2020-11-17_19-24-35.trc`" +//! and, on unix, create a symbolic link called `current_run`. +//! +//! ## Specify the format for the log lines explicitly +//! +//! With [`Logger::format`](../struct.Logger.html#method.format) +//! you set the format for all used output channels of `flexi_logger`. +//! +//! `flexi_logger` provides a couple of format functions, and you can also create and use your own, +//! e.g. by copying and modifying one of the provided format functions. +//! +//! Depending on the configuration, `flexi_logger` can write logs to multiple channels +//! (stdout, stderr, files, or additional writers) +//! at the same time. You can control the format for each output channel individually, using +//! [`Logger::format_for_files`](../struct.Logger.html#method.format_for_files), +//! [`Logger::format_for_stderr`](../struct.Logger.html#method.format_for_stderr), +//! [`Logger::format_for_stdout`](../struct.Logger.html#method.format_for_stdout), or +//! [`Logger::format_for_writer`](../struct.Logger.html#method.format_for_writer). +//! +//! As argument for these functions you can use one of the provided non-coloring format functions +//! +//! - [`default_format`](../fn.default_format.html) +//! - [`detailed_format`](../fn.detailed_format.html) +//! - [`opt_format`](../fn.opt_format.html) +//! - [`with_thread`](../fn.with_thread.html), +//! +//! or one of their coloring pendants +//! +//! - [`colored_default_format`](../fn.colored_default_format.html) +//! - [`colored_detailed_format`](../fn.colored_detailed_format.html) +//! - [`colored_opt_format`](../fn.colored_opt_format.html). +//! - [`colored_with_thread`](../fn.colored_with_thread.html). +//! +//! ### Adaptive Coloring +//! +//! You can use coloring for `stdout` and/or `stderr` +//! conditionally, such that colors are used when the output goes to a tty, +//! and suppressed if you e.g. pipe the output to some other program. +//! With +//! [`Logger::adaptive_format_for_stderr`](../struct.Logger.html#method.adaptive_format_for_stderr) or +//! [`Logger::adaptive_format_for_stdout`](../struct.Logger.html#method.adaptive_format_for_stdout) +//! you can specify one of the provided format pairs +//! (which are based on the format functions listed above), +//! or you can provide your own colored and non-colored format functions. +//! +//! ### Defaults +//! +//! `flexi_logger` initializes by default equivalently to this: +//! +//! ```rust,ignore +//! # use flexi_logger::{Logger,AdaptiveFormat,default_format}; +//! # use log::{debug, error, info, trace, warn}; +//! # fn main() -> Result<(), Box> { +//! # Logger::with_str("info") // Write all error, warn, and info messages +//! # .directory(std::env::temp_dir()) +//! .adaptive_format_for_stderr(AdaptiveFormat::Default) +//! .adaptive_format_for_stdout(AdaptiveFormat::Default) +//! .format_for_files(default_format) +//! .format_for_writer(default_format) +//! # .start()?; +//! # error!("This is an error message"); +//! # warn!("This is a warning"); +//! # info!("This is an info message"); +//! # debug!("This is a debug message - you must not see it!"); +//! # trace!("This is a trace message - you must not see it!"); +//! # run() +//! # } +//! # fn run() -> Result<(), Box> {Ok(())} +//! ``` +//! +//! ## Use a fixed log file, and truncate or append the file on each program start +//! +//! With [`Logger::log_to_file`](../struct.Logger.html#method.log_to_file) and without rotation, +//! `flexi_logger` uses by default files with a timestamp in the name, like +//! `foo_2020-11-16_08-37-44.log` (for a program called `foo`), which are quite unique for each +//! program start. +//! +//! With [`Logger::suppress_timestamp`](../struct.Logger.html#method.suppress_timestamp) +//! you get a simple fixed filename, like `foo.log`. +//! +//! In that case, a restart of the program will truncate an existing log file. +//! +//! Use additionally [`Logger::append`](../struct.Logger.html#method.append) +//! to append the logs of each new run to the existing file. +//! +//! ```rust +//! # use flexi_logger::Logger; +//! # use log::{debug, error, info, trace, warn}; +//! # fn main() -> Result<(), Box> { +//! Logger::with_str("info") // Write all error, warn, and info messages +//! # .directory(std::env::temp_dir()) +//! .log_to_file() // Write the log to a file +//! .suppress_timestamp() // use a simple filename without a timestamp +//! .append() // do not truncate the log file when the program is restarted +//! .start()?; +//! +//! # error!("This is an error message"); +//! # warn!("This is a warning"); +//! # info!("This is an info message"); +//! # debug!("This is a debug message - you must not see it!"); +//! # trace!("This is a trace message - you must not see it!"); +//! # run() +//! # } +//! # fn run() -> Result<(), Box> {Ok(())} +//! ``` +//! +//! ## Rotate the log file +//! +//! With rotation, the logs are always written to a file +//! with the infix `rCURRENT`, like e.g. `foo_rCURRENT.log`. +//! +//! [`Logger::rotation`](../struct.Logger.html#method.rotation) +//! takes three enum arguments to define its behavior: +//! +//! - [`Criterion`](../enum.Criterion.html) +//! - with `Criterion::Age` the rotation happens +//! when the clock switches to a new day, hour, minute, or second +//! - with `Criterion::Size` the rotation happens when the current log file exceeds +//! the specified limit +//! - with `Criterion::AgeOrSize` the rotation happens when either of the two limits is reached +//! +//! - [`Naming`](../enum.Naming.html)
The current file is then renamed +//! - with `Naming::Timestamps` to something like `foo_r2020-11-16_08-56-52.log` +//! - with `Naming::Numbers` to something like `foo_r00000.log` +//! +//! and a fresh `rCURRENT` file is created. +//! +//! - [`Cleanup`](../enum.Cleanup.html) defines if and how you +//! avoid accumulating log files indefinitely: +//! - with `Cleanup::KeepLogFiles` you specify the number of log files that should be retained; +//! if there are more, the older ones are getting deleted +//! - with `Cleanup::KeepCompressedFiles` you specify the number of log files that should be +//! retained, and these are being compressed additionally +//! - with `Cleanup::KeepLogAndCompressedFiles` you specify the number of log files that should be +//! retained as is, and an additional number that are being compressed +//! - with `Cleanup::Never` no cleanup is done, all files are retained. +//! +//! ```rust +//! # use flexi_logger::{Age, Cleanup, Criterion, Logger, Naming}; +//! # use log::{debug, error, info, trace, warn}; +//! # fn main() -> Result<(), Box> { +//! Logger::with_str("info") // Write all error, warn, and info messages +//! # .directory(std::env::temp_dir()) +//! .log_to_file() // Write the log to a file +//! .rotate( // If the program runs long enough, +//! Criterion::Age(Age::Day), // - create a new file every day +//! Naming::Timestamps, // - let the rotated files have a timestamp in their name +//! Cleanup::KeepLogFiles(7), // - keep at most 7 log files +//! ) +//! .start()?; +//! +//! # error!("This is an error message"); +//! # warn!("This is a warning"); +//! # info!("This is an info message"); +//! # debug!("This is a debug message - you must not see it!"); +//! # trace!("This is a trace message - you must not see it!"); +//! # run() +//! # } +//! # fn run() -> Result<(), Box> {Ok(())} +//! ``` +//! +//! ## Reconfigure the log specification programmatically +//! +//! This can be especially handy in debugging situations where you want to see +//! traces only for a short instant. +//! +//! Obtain the `ReconfigurationHandle` +//! +//! ```rust +//! # use flexi_logger::Logger; +//! let mut log_handle = Logger::with_str("info") +//! // ... logger configuration ... +//! .start() +//! .unwrap(); +//! ``` +//! +//! and modify the effective log specification from within your code: +//! +//! ```rust, ignore +//! // ... +//! log_handle.parse_and_push_temp_spec("info, critical_mod = trace"); +//! // ... critical calls ... +//! log_handle.pop_temp_spec(); +//! // ... continue with the log spec you had before. +//! ``` +//! +//! ## Reconfigure the log specification dynamically by editing a spec-file +//! +//! If you start `flexi_logger` with a specfile, +//! +//! ```rust,ignore +//! # use flexi_logger::Logger; +//! Logger::with_str("info") +//! // ... logger configuration ... +//! .start_with_specfile("/server/config/logspec.toml") +//! .unwrap(); +//! ``` +//! +//! then you can change the log specification dynamically, *while your program is running*, +//! by editing the specfile. This can be a great help e.g. if you want to get detailed traces +//! for _some_ requests to a long running server. +//! +//! See [`Logger::start_with_specfile`](../struct.Logger.html#method.start_with_specfile) +//! for more information. +//! +//! ## Miscellaneous +//! +//! For the sake of completeness, we refer here to some more configuration methods. +//! See their documentation for more details. +//! +//! [`Logger::check_parser_error`](../struct.Logger.html#method.check_parser_error) +//! +//! [`Logger::set_palette`](../struct.Logger.html#method.set_palette) +//! +//! [`Logger::cleanup_in_background_thread`](../struct.Logger.html#method.cleanup_in_background_thread) +//! +//! [`Logger::use_windows_line_ending`](../struct.Logger.html#method.use_windows_line_ending) +//! +//! [`Logger::add_writer`](../struct.Logger.html#method.add_writer) diff --git a/agent/support/rust/flexi_logger/src/deferred_now.rs b/agent/support/rust/flexi_logger/src/deferred_now.rs new file mode 100644 index 000000000..199a23891 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/deferred_now.rs @@ -0,0 +1,23 @@ +use chrono::{DateTime, Local}; + +/// Deferred timestamp creation. +/// +/// Is used to ensure that a log record that is sent to multiple outputs +/// (in maybe different formats) always uses the same timestamp. +#[derive(Debug)] +pub struct DeferredNow(Option>); +impl<'a> DeferredNow { + pub(crate) fn new() -> Self { + Self(None) + } + + /// Retrieve the timestamp. + /// + /// Requires mutability because the first caller will generate the timestamp. + pub fn now(&'a mut self) -> &'a DateTime { + if self.0.is_none() { + self.0 = Some(Local::now()); + } + self.0.as_ref().unwrap() + } +} diff --git a/agent/support/rust/flexi_logger/src/flexi_error.rs b/agent/support/rust/flexi_logger/src/flexi_error.rs new file mode 100644 index 000000000..273773a6c --- /dev/null +++ b/agent/support/rust/flexi_logger/src/flexi_error.rs @@ -0,0 +1,63 @@ +use crate::log_specification::LogSpecification; +// use std::backtrace::Backtrace; +use thiserror::Error; + +/// Describes errors in the initialization of `flexi_logger`. +#[derive(Error, Debug)] +pub enum FlexiLoggerError { + /// Log file cannot be written because the specified path is not a directory. + #[error("Log file cannot be written because the specified path is not a directory")] + OutputBadDirectory, + + /// Spawning the cleanup thread failed. + /// + /// This error can safely be avoided with `Logger::cleanup_in_background_thread(false)`. + #[error("Spawning the cleanup thread failed.")] + OutputCleanupThread(std::io::Error), + + /// Log cannot be written, e.g. because the configured output directory is not accessible. + #[error( + "Log cannot be written, e.g. because the configured output directory is not accessible" + )] + OutputIo(#[from] std::io::Error), + + /// Filesystem notifications for the specfile could not be set up. + #[error("Filesystem notifications for the specfile could not be set up")] + #[cfg(feature = "specfile")] + SpecfileNotify(#[from] notify::Error), + + /// Parsing the configured logspec toml-file failed. + #[error("Parsing the configured logspec toml-file failed")] + #[cfg(feature = "specfile_without_notification")] + SpecfileToml(#[from] toml::de::Error), + + /// Specfile cannot be accessed or created. + #[error("Specfile cannot be accessed or created")] + #[cfg(feature = "specfile_without_notification")] + SpecfileIo(std::io::Error), + + /// Specfile has an unsupported extension. + #[error("Specfile has an unsupported extension")] + #[cfg(feature = "specfile_without_notification")] + SpecfileExtension(&'static str), + + /// Invalid level filter. + #[error("Invalid level filter")] + LevelFilter(String), + + /// Failed to parse log specification. + #[error("Failed to parse log specification: {0}")] + Parse(String, LogSpecification), + + /// Logger initialization failed. + #[error("Logger initialization failed")] + Log(#[from] log::SetLoggerError), + + /// Some synchronization object is poisoned. + #[error("Some synchronization object is poisoned")] + Poison, + + /// Palette parsing failed + #[error("Palette parsing failed")] + Palette(#[from] std::num::ParseIntError), +} diff --git a/agent/support/rust/flexi_logger/src/flexi_logger.rs b/agent/support/rust/flexi_logger/src/flexi_logger.rs new file mode 100644 index 000000000..7b643ec35 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/flexi_logger.rs @@ -0,0 +1,151 @@ +use crate::primary_writer::PrimaryWriter; +use crate::writers::LogWriter; +use crate::LogSpecification; + +#[cfg(feature = "textfilter")] +use regex::Regex; +use std::collections::HashMap; +use std::sync::{Arc, RwLock}; + +// Implements log::Log to plug into the log crate. +// +// Delegates the real logging to the configured PrimaryWriter and optionally to other writers. +// The `PrimaryWriter` is either a `StdErrWriter` or an `ExtendedFileWriter`. +// An ExtendedFileWriter logs to a file, by delegating to a FileWriter, +// and can additionally duplicate log lines to stderr. +pub(crate) struct FlexiLogger { + log_specification: Arc>, + primary_writer: Arc, + other_writers: Arc>>, +} + +impl FlexiLogger { + pub fn new( + log_specification: Arc>, + primary_writer: Arc, + other_writers: Arc>>, + ) -> Self { + Self { + log_specification, + primary_writer, + other_writers, + } + } + + fn primary_enabled(&self, level: log::Level, module: &str) -> bool { + self.log_specification.read().as_ref() + .unwrap(/* catch and expose error? */) + .enabled(level, module) + } +} + +impl log::Log for FlexiLogger { + // If other writers are configured and the metadata target addresses them correctly, + // - we should determine if the metadata-level is digested by any of the writers + // (including the primary writer) + // else we fall back to default behavior: + // Return true if + // - target is filled with module path and level is accepted by log specification + // - target is filled with crap and ??? + // + // Caveat: + // Rocket e.g. sets target explicitly to several fantasy names; + // these hopefully do not collide with any of the modules in the log specification; + // since they do not conform with the {} syntax expected by flexi_logger, they're treated as + // module names. + fn enabled(&self, metadata: &log::Metadata) -> bool { + let target = metadata.target(); + let level = metadata.level(); + + if !self.other_writers.is_empty() && target.starts_with('{') { + // at least one other writer is configured _and_ addressed + let targets: Vec<&str> = target[1..(target.len() - 1)].split(',').collect(); + for t in targets { + if t != "_Default" { + match self.other_writers.get(t) { + None => eprintln!("[flexi_logger] bad writer spec: {}", t), + Some(writer) => { + if level < writer.max_log_level() { + return true; + } + } + } + } + } + } + + self.primary_enabled(level, target) + } + + fn log(&self, record: &log::Record) { + let target = record.metadata().target(); + let mut now = crate::DeferredNow::new(); + if target.starts_with('{') { + let mut use_default = false; + let targets: Vec<&str> = target[1..(target.len() - 1)].split(',').collect(); + for t in targets { + if t == "_Default" { + use_default = true; + } else { + match self.other_writers.get(t) { + None => eprintln!("[flexi_logger] found bad writer spec: {}", t), + Some(writer) => { + writer.write(&mut now, record).unwrap_or_else(|e| { + eprintln!( + "[flexi_logger] writing log line to custom writer \"{}\" \ + failed with: \"{}\"", + t, e + ); + }); + } + } + } + } + if !use_default { + return; + } + } + + let effective_target = if target.starts_with('{') { + record.module_path().unwrap_or_default() + } else { + target + }; + if !self.primary_enabled(record.level(), effective_target) { + return; + } + + #[cfg(feature = "textfilter")] + { + // closure that we need below + let check_text_filter = |text_filter: &Option| { + text_filter + .as_ref() + .map_or(true, |filter| filter.is_match(&*record.args().to_string())) + }; + + if !check_text_filter( + self.log_specification.read().as_ref().unwrap(/* expose this? */).text_filter(), + ) { + return; + } + } + + self.primary_writer + .write(&mut now, record) + .unwrap_or_else(|e| { + eprintln!("[flexi_logger] writing log line failed with {}", e); + }); + } + + fn flush(&self) { + self.primary_writer.flush().unwrap_or_else(|e| { + eprintln!("[flexi_logger] flushing primary writer failed with {}", e); + }); + for writer in self.other_writers.values() { + writer.flush().unwrap_or_else(|e| { + eprintln!("[flexi_logger] flushing custom writer failed with {}", e); + }); + } + } +} diff --git a/agent/support/rust/flexi_logger/src/formats.rs b/agent/support/rust/flexi_logger/src/formats.rs new file mode 100644 index 000000000..61e1e6ab4 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/formats.rs @@ -0,0 +1,416 @@ +use crate::DeferredNow; +use log::Record; +use std::thread; +#[cfg(feature = "colors")] +use yansi::{Color, Paint, Style}; + +/// Function type for Format functions. +/// +/// If you want to write the log lines in your own format, +/// implement a function with this signature and provide it to one of the methods +/// [`Logger::format()`](struct.Logger.html#method.format), +/// [`Logger::format_for_files()`](struct.Logger.html#method.format_for_files), +/// or [`Logger::format_for_stderr()`](struct.Logger.html#method.format_for_stderr). +/// +/// Checkout the code of the provided [format functions](index.html#functions) +/// if you want to start with a template. +/// +/// ## Parameters +/// +/// - `write`: the output stream +/// +/// - `now`: the timestamp that you should use if you want a timestamp to appear in the log line +/// +/// - `record`: the log line's content and metadata, as provided by the log crate's macros. +/// +pub type FormatFunction = fn( + write: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error>; + +/// A logline-formatter that produces log lines like
+/// ```INFO [my_prog::some_submodule] Task successfully read from conf.json``` +/// +/// # Errors +/// +/// See `std::write` +pub fn default_format( + w: &mut dyn std::io::Write, + _now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + write!( + w, + "{} [{}] {}", + record.level(), + record.module_path().unwrap_or(""), + record.args() + ) +} + +#[allow(clippy::doc_markdown)] +/// A colored version of the logline-formatter `default_format` +/// that produces log lines like
+/// ERROR [my_prog::some_submodule] File not found +/// +/// See method [style](fn.style.html) if you want to influence coloring. +/// +/// Only available with feature `colors`. +/// +/// # Errors +/// +/// See `std::write` +#[cfg(feature = "colors")] +pub fn colored_default_format( + w: &mut dyn std::io::Write, + _now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + let level = record.level(); + write!( + w, + "{} [{}] {}", + style(level, level), + record.module_path().unwrap_or(""), + style(level, record.args()) + ) +} + +/// A logline-formatter that produces log lines with timestamp and file location, like +///
+/// ```[2016-01-13 15:25:01.640870 +01:00] INFO [src/foo/bar:26] Task successfully read from conf.json``` +///
+/// +/// # Errors +/// +/// See `std::write` +pub fn opt_format( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + write!( + w, + "[{}] {} [{}:{}] {}", + now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), + record.level(), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + &record.args() + ) +} + +/// A colored version of the logline-formatter `opt_format`. +/// +/// See method [style](fn.style.html) if you want to influence coloring. +/// +/// Only available with feature `colors`. +/// +/// # Errors +/// +/// See `std::write` +#[cfg(feature = "colors")] +pub fn colored_opt_format( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + let level = record.level(); + write!( + w, + "[{}] {} [{}:{}] {}", + style(level, now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z")), + style(level, level), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + style(level, &record.args()) + ) +} + +/// A logline-formatter that produces log lines like +///
+/// ```[2016-01-13 15:25:01.640870 +01:00] INFO [foo::bar] src/foo/bar.rs:26: Task successfully read from conf.json``` +///
+/// i.e. with timestamp, module path and file location. +/// +/// # Errors +/// +/// See `std::write` +pub fn detailed_format( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + write!( + w, + "[{}] {} [{}] {}:{}: {}", + now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), + record.level(), + record.module_path().unwrap_or(""), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + &record.args() + ) +} + +/// A colored version of the logline-formatter `detailed_format`. +/// +/// See method [style](fn.style.html) if you want to influence coloring. +/// +/// Only available with feature `colors`. +/// +/// # Errors +/// +/// See `std::write` +#[cfg(feature = "colors")] +pub fn colored_detailed_format( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + let level = record.level(); + write!( + w, + "[{}] {} [{}] {}:{}: {}", + style(level, now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z")), + style(level, record.level()), + record.module_path().unwrap_or(""), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + style(level, &record.args()) + ) +} + +/// A logline-formatter that produces log lines like +///
+/// ```[2016-01-13 15:25:01.640870 +01:00] T[taskreader] INFO [src/foo/bar:26] Task successfully read from conf.json``` +///
+/// i.e. with timestamp, thread name and file location. +/// +/// # Errors +/// +/// See `std::write` +pub fn with_thread( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + write!( + w, + "[{}] T[{:?}] {} [{}:{}] {}", + now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), + thread::current().name().unwrap_or(""), + record.level(), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + &record.args() + ) +} + +/// A colored version of the logline-formatter `with_thread`. +/// +/// See method [style](fn.style.html) if you want to influence coloring. +/// +/// Only available with feature `colors`. +/// +/// # Errors +/// +/// See `std::write` +#[cfg(feature = "colors")] +pub fn colored_with_thread( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + let level = record.level(); + write!( + w, + "[{}] T[{:?}] {} [{}:{}] {}", + style(level, now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z")), + style(level, thread::current().name().unwrap_or("")), + style(level, level), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + style(level, &record.args()) + ) +} + +/// Helper function that is used in the provided coloring format functions to apply +/// colors based on the log level and the effective color palette. +/// +/// See [`Logger::set_palette`](struct.Logger.html#method.set_palette) if you want to +/// modify the color palette. +/// +/// Only available with feature `colors`. +#[cfg(feature = "colors")] +pub fn style(level: log::Level, item: T) -> Paint { + let palette = &*(PALETTE.read().unwrap()); + match level { + log::Level::Error => palette.error, + log::Level::Warn => palette.warn, + log::Level::Info => palette.info, + log::Level::Debug => palette.debug, + log::Level::Trace => palette.trace, + } + .paint(item) +} + +#[cfg(feature = "colors")] +lazy_static::lazy_static! { + static ref PALETTE: std::sync::RwLock = std::sync::RwLock::new(Palette::default()); +} + +// Overwrites the default PALETTE value either from the environment, if set, +// or from the parameter, if filled. +// Returns an error if parsing failed. +#[cfg(feature = "colors")] +pub(crate) fn set_palette(input: &Option) -> Result<(), std::num::ParseIntError> { + match std::env::var_os("FLEXI_LOGGER_PALETTE") { + Some(ref env_osstring) => { + *(PALETTE.write().unwrap()) = Palette::from(env_osstring.to_string_lossy().as_ref())?; + } + None => match input { + Some(ref input_string) => { + *(PALETTE.write().unwrap()) = Palette::from(input_string)?; + } + None => {} + }, + } + Ok(()) +} + +#[cfg(feature = "colors")] +#[derive(Debug)] +struct Palette { + pub error: Style, + pub warn: Style, + pub info: Style, + pub debug: Style, + pub trace: Style, +} +#[cfg(feature = "colors")] +impl Palette { + fn default() -> Palette { + Palette { + error: Style::new(Color::Fixed(196)).bold(), + warn: Style::new(Color::Fixed(208)).bold(), + info: Style::new(Color::Unset), + debug: Style::new(Color::Fixed(7)), + trace: Style::new(Color::Fixed(8)), + } + } + + fn from(palette: &str) -> Result { + let mut items = palette.split(';'); + Ok(Palette { + error: parse_style(items.next().unwrap_or("196").trim())?, + warn: parse_style(items.next().unwrap_or("208").trim())?, + info: parse_style(items.next().unwrap_or("-").trim())?, + debug: parse_style(items.next().unwrap_or("7").trim())?, + trace: parse_style(items.next().unwrap_or("8").trim())?, + }) + } +} + +#[cfg(feature = "colors")] +fn parse_style(input: &str) -> Result { + Ok(if input == "-" { + Style::new(Color::Unset) + } else { + Style::new(Color::Fixed(input.parse()?)) + }) +} + +/// Specifies the `FormatFunction` and decides if coloring should be used. +/// +/// Is used in +/// [`Logger::adaptive_format_for_stderr`](struct.Logger.html#method.adaptive_format_for_stderr) and +/// [`Logger::adaptive_format_for_stdout`](struct.Logger.html#method.adaptive_format_for_stdout). +/// The coloring format functions are used if the output channel is a tty. +/// +/// Only available with feature `atty`. +#[cfg(feature = "atty")] +#[derive(Clone, Copy)] +pub enum AdaptiveFormat { + /// Chooses between [`default_format`](fn.default_format.html) + /// and [`colored_default_format`](fn.colored_default_format.html). + /// + /// Only available with feature `colors`. + #[cfg(feature = "colors")] + Default, + /// Chooses between [`detailed_format`](fn.detailed_format.html) + /// and [`colored_detailed_format`](fn.colored_detailed_format.html). + /// + /// Only available with feature `colors`. + #[cfg(feature = "colors")] + Detailed, + /// Chooses between [`opt_format`](fn.opt_format.html) + /// and [`colored_opt_format`](fn.colored_opt_format.html). + /// + /// Only available with feature `colors`. + #[cfg(feature = "colors")] + Opt, + /// Chooses between [`with_thread`](fn.with_thread.html) + /// and [`colored_with_thread`](fn.colored_with_thread.html). + /// + /// Only available with feature `colors`. + #[cfg(feature = "colors")] + WithThread, + /// Chooses between the first format function (which is supposed to be uncolored) + /// and the second (which is supposed to be colored). + /// + /// Allows providing own format functions, with freely choosable coloring technique, + /// _and_ making use of the tty detection. + Custom(FormatFunction, FormatFunction), +} + +#[cfg(feature = "atty")] +impl AdaptiveFormat { + #[must_use] + pub(crate) fn format_function(self, stream: Stream) -> FormatFunction { + if stream.is_tty() { + match self { + #[cfg(feature = "colors")] + Self::Default => colored_default_format, + #[cfg(feature = "colors")] + Self::Detailed => colored_detailed_format, + #[cfg(feature = "colors")] + Self::Opt => colored_opt_format, + #[cfg(feature = "colors")] + Self::WithThread => colored_with_thread, + Self::Custom(_, colored) => colored, + } + } else { + match self { + #[cfg(feature = "colors")] + Self::Default => default_format, + #[cfg(feature = "colors")] + Self::Detailed => detailed_format, + #[cfg(feature = "colors")] + Self::Opt => opt_format, + #[cfg(feature = "colors")] + Self::WithThread => with_thread, + Self::Custom(uncolored, _) => uncolored, + } + } + } +} + +#[cfg(feature = "atty")] +#[derive(Clone, Copy)] +pub(crate) enum Stream { + StdOut, + StdErr, +} +#[cfg(feature = "atty")] +impl Stream { + #[must_use] + pub fn is_tty(self) -> bool { + match self { + Self::StdOut => atty::is(atty::Stream::Stdout), + Self::StdErr => atty::is(atty::Stream::Stderr), + } + } +} diff --git a/agent/support/rust/flexi_logger/src/lib.rs b/agent/support/rust/flexi_logger/src/lib.rs new file mode 100644 index 000000000..a94d17915 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/lib.rs @@ -0,0 +1,63 @@ +#![deny(missing_docs)] +#![deny(clippy::all)] +#![deny(clippy::pedantic)] +#![allow(clippy::unused_self)] +#![allow(clippy::needless_doctest_main)] + +//! A flexible and easy-to-use logger that writes logs to stderr and/or to files +//! or other output streams. +//! +//! To read the log specification from an environment variable and get the log written to `stderr`, +//! start `flexi_logger` e.g. like this: +//! ```rust +//! flexi_logger::Logger::with_env().start().unwrap(); +//! ``` +//! +//! See +//! +//! * [Logger](struct.Logger.html) for a full description of all configuration options, +//! * and the [writers](writers/index.html) module for the usage of additional log writers, +//! * and [the homepage](https://crates.io/crates/flexi_logger) for how to get started. +//! +//! There are configuration options to e.g. +//! +//! * decide whether you want to write your logs to stderr or to a file, +//! * configure the path and the filenames of the log files, +//! * use file rotation, +//! * specify the line format for the log lines, +//! * define additional log output streams, e.g for alert or security messages, +//! * support changing the log specification while the program is running, +//! +//! `flexi_logger` uses a similar syntax as [`env_logger`](http://crates.io/crates/env_logger/) +//! for specifying which logs should really be written (but is more graceful with the syntax, +//! and can provide error information). +//! +//! By default, i.e. if feature `colors` is not switched off, the log lines that appear on your +//! terminal are coloured. In case the chosen colors don't fit to your terminal's color theme, +//! you can adapt the colors to improve readability. +//! See the documentation of method [style](fn.style.html) +//! for a description how this can be done. + +mod deferred_now; +mod flexi_error; +mod flexi_logger; +mod formats; +mod log_specification; +mod logger; +mod parameters; +mod primary_writer; +mod reconfiguration_handle; + +pub mod code_examples; +pub mod writers; + +/// Re-exports from log crate +pub use log::{Level, LevelFilter, Record}; + +pub use crate::deferred_now::DeferredNow; +pub use crate::flexi_error::FlexiLoggerError; +pub use crate::formats::*; +pub use crate::log_specification::{LogSpecBuilder, LogSpecification, ModuleFilter}; +pub use crate::logger::{Duplicate, LogTarget, Logger}; +pub use crate::parameters::{Age, Cleanup, Criterion, Naming}; +pub use crate::reconfiguration_handle::ReconfigurationHandle; diff --git a/agent/support/rust/flexi_logger/src/log_specification.rs b/agent/support/rust/flexi_logger/src/log_specification.rs new file mode 100644 index 000000000..684b7caf9 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/log_specification.rs @@ -0,0 +1,927 @@ +use crate::flexi_error::FlexiLoggerError; +use crate::LevelFilter; + +#[cfg(feature = "textfilter")] +use regex::Regex; +use std::collections::HashMap; +use std::env; + +/// +/// Immutable struct that defines which loglines are to be written, +/// based on the module, the log level, and the text. +/// +/// The loglevel specification via string (relevant for methods +/// [parse()](struct.LogSpecification.html#method.parse) and +/// [env()](struct.LogSpecification.html#method.env)) +/// works essentially like with `env_logger`, +/// but we are a bit more tolerant with spaces. Its functionality can be +/// described with some Backus-Naur-form: +/// +/// ```text +/// ::= single_log_level_spec[{,single_log_level_spec}][/] +/// ::= ||= +/// ::= +/// ``` +/// +/// * Examples: +/// +/// * `"info"`: all logs with info, warn, or error level are written +/// * `"crate1"`: all logs of this crate are written, but nothing else +/// * `"warn, crate2::mod_a=debug, mod_x::mod_y=trace"`: all crates log warnings and errors, +/// `mod_a` additionally debug messages, and `mod_x::mod_y` is fully traced +/// +/// * If you just specify the module, without `log_level`, all levels will be traced for this +/// module. +/// * If you just specify a log level, this will be applied as default to all modules without +/// explicit log level assigment. +/// (You see that for modules named error, warn, info, debug or trace, +/// it is necessary to specify their loglevel explicitly). +/// * The module names are compared as Strings, with the side effect that a specified module filter +/// affects all modules whose name starts with this String.
+/// Example: ```"foo"``` affects e.g. +/// +/// * `foo` +/// * `foo::bar` +/// * `foobaz` (!) +/// * `foobaz::bar` (!) +/// +/// The optional text filter is applied for all modules. +/// +/// Note that external module names are to be specified like in ```"extern crate ..."```, i.e., +/// for crates with a dash in their name this means: the dash is to be replaced with +/// the underscore (e.g. ```karl_heinz```, not ```karl-heinz```). +/// See +/// [https://github.com/rust-lang/rfcs/pull/940/files](https://github.com/rust-lang/rfcs/pull/940/files) +/// for an explanation of the different naming conventions in Cargo (packages allow hyphen) and +/// rustc (“extern crate” does not allow hyphens). +#[derive(Clone, Debug, Default)] +pub struct LogSpecification { + module_filters: Vec, + #[cfg(feature = "textfilter")] + textfilter: Option, +} + +/// Defines which loglevel filter to use for the specified module. +/// +/// A `ModuleFilter`, whose `module_name` is not set, describes the default loglevel filter. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ModuleFilter { + /// The module name. + pub module_name: Option, + /// The level filter. + pub level_filter: LevelFilter, +} + +impl LogSpecification { + pub(crate) fn update_from(&mut self, other: Self) { + self.module_filters = other.module_filters; + + #[cfg(feature = "textfilter")] + { + self.textfilter = other.textfilter; + } + } + + pub(crate) fn max_level(&self) -> log::LevelFilter { + self.module_filters + .iter() + .map(|d| d.level_filter) + .max() + .unwrap_or(log::LevelFilter::Off) + } + + /// Returns true if messages on the specified level from the writing module should be written + pub fn enabled(&self, level: log::Level, writing_module: &str) -> bool { + // Search for the longest match, the vector is assumed to be pre-sorted. + for module_filter in &self.module_filters { + match module_filter.module_name { + Some(ref module_name) => { + if writing_module.starts_with(module_name) { + return level <= module_filter.level_filter; + } + } + None => return level <= module_filter.level_filter, + } + } + false + } + + /// Returns a `LogSpecification` where all traces are switched off. + #[must_use] + pub fn off() -> Self { + #[allow(clippy::default_trait_access)] + Default::default() + } + + /// Returns a log specification from a String. + /// + /// # Errors + /// + /// `FlexiLoggerError::Parse` if the input is malformed. + pub fn parse(spec: &str) -> Result { + let mut parse_errs = String::new(); + let mut dirs = Vec::::new(); + + let mut parts = spec.split('/'); + let mods = parts.next(); + #[cfg(feature = "textfilter")] + let filter = parts.next(); + if parts.next().is_some() { + push_err( + &format!("invalid log spec '{}' (too many '/'s), ignoring it", spec), + &mut parse_errs, + ); + return parse_err(parse_errs, Self::off()); + } + if let Some(m) = mods { + for s in m.split(',') { + let s = s.trim(); + if s.is_empty() { + continue; + } + let mut parts = s.split('='); + let (log_level, name) = match ( + parts.next().map(str::trim), + parts.next().map(str::trim), + parts.next(), + ) { + (Some(part_0), None, None) => { + if contains_whitespace(part_0, &mut parse_errs) { + continue; + } + // if the single argument is a log-level string or number, + // treat that as a global fallback setting + match parse_level_filter(part_0.trim()) { + Ok(num) => (num, None), + Err(_) => (LevelFilter::max(), Some(part_0)), + } + } + + (Some(part_0), Some(""), None) => { + if contains_whitespace(part_0, &mut parse_errs) { + continue; + } + (LevelFilter::max(), Some(part_0)) + } + + (Some(part_0), Some(part_1), None) => { + if contains_whitespace(part_0, &mut parse_errs) { + continue; + } + match parse_level_filter(part_1.trim()) { + Ok(num) => (num, Some(part_0.trim())), + Err(e) => { + push_err(&e.to_string(), &mut parse_errs); + continue; + } + } + } + _ => { + push_err( + &format!("invalid part in log spec '{}', ignoring it", s), + &mut parse_errs, + ); + continue; + } + }; + dirs.push(ModuleFilter { + module_name: name.map(ToString::to_string), + level_filter: log_level, + }); + } + } + + #[cfg(feature = "textfilter")] + let textfilter = filter.and_then(|filter| match Regex::new(filter) { + Ok(re) => Some(re), + Err(e) => { + push_err(&format!("invalid regex filter - {}", e), &mut parse_errs); + None + } + }); + + let logspec = Self { + module_filters: dirs.level_sort(), + #[cfg(feature = "textfilter")] + textfilter, + }; + + if parse_errs.is_empty() { + Ok(logspec) + } else { + parse_err(parse_errs, logspec) + } + } + + /// Returns a log specification based on the value of the environment variable `RUST_LOG`, + /// or an empty one. + /// + /// # Errors + /// + /// `FlexiLoggerError::Parse` if the input is malformed. + pub fn env() -> Result { + match env::var("RUST_LOG") { + Ok(spec) => Self::parse(&spec), + Err(..) => Ok(Self::off()), + } + } + + /// Returns a log specification based on the value of the environment variable `RUST_LOG`, + /// if it exists and can be parsed, or on the given String. + /// + /// # Errors + /// + /// `FlexiLoggerError::Parse` if the given spec is malformed. + pub fn env_or_parse>(given_spec: S) -> Result { + env::var("RUST_LOG") + .map_err(|_e| FlexiLoggerError::Poison /*wrong, but only dummy*/) + .and_then(|value| Self::parse(&value)) + .or_else(|_| Self::parse(given_spec.as_ref())) + } + + /// Reads a log specification from an appropriate toml document. + /// + /// This method is only avaible with feature `specfile`. + /// + /// # Errors + /// + /// `FlexiLoggerError::Parse` if the input is malformed. + #[cfg(feature = "specfile_without_notification")] + pub fn from_toml(s: &str) -> Result { + #[derive(Clone, Debug, serde_derive::Deserialize)] + struct LogSpecFileFormat { + pub global_level: Option, + pub global_pattern: Option, + pub modules: Option>, + } + + let logspec_ff: LogSpecFileFormat = toml::from_str(s)?; + let mut parse_errs = String::new(); + let mut module_filters = Vec::::new(); + + if let Some(s) = logspec_ff.global_level { + module_filters.push(ModuleFilter { + module_name: None, + level_filter: parse_level_filter(s)?, + }); + } + + for (k, v) in logspec_ff.modules.unwrap_or_default() { + module_filters.push(ModuleFilter { + module_name: Some(k), + level_filter: parse_level_filter(v)?, + }); + } + + #[cfg(feature = "textfilter")] + let textfilter = match logspec_ff.global_pattern { + None => None, + Some(s) => match Regex::new(&s) { + Ok(re) => Some(re), + Err(e) => { + push_err(&format!("invalid regex filter - {}", e), &mut parse_errs); + None + } + }, + }; + + let logspec = Self { + module_filters: module_filters.level_sort(), + #[cfg(feature = "textfilter")] + textfilter, + }; + if parse_errs.is_empty() { + Ok(logspec) + } else { + parse_err(parse_errs, logspec) + } + } + + /// Serializes itself in toml format. + /// + /// This method is only avaible with feature `specfile`. + /// + /// # Errors + /// + /// `FlexiLoggerError::Io` if writing fails. + #[cfg(feature = "specfile_without_notification")] + pub fn to_toml(&self, w: &mut dyn std::io::Write) -> Result<(), FlexiLoggerError> { + w.write_all(b"### Optional: Default log level\n")?; + let last = self.module_filters.last(); + if last.is_some() && last.as_ref().unwrap().module_name.is_none() { + w.write_all( + format!( + "global_level = '{}'\n", + last.as_ref() + .unwrap() + .level_filter + .to_string() + .to_lowercase() + ) + .as_bytes(), + )?; + } else { + w.write_all(b"#global_level = 'info'\n")?; + } + + w.write_all( + b"\n### Optional: specify a regular expression to suppress all messages that don't match\n", + )?; + w.write_all(b"#global_pattern = 'foo'\n")?; + + w.write_all( + b"\n### Specific log levels per module are optionally defined in this section\n", + )?; + w.write_all(b"[modules]\n")?; + if self.module_filters.is_empty() || self.module_filters[0].module_name.is_none() { + w.write_all(b"#'mod1' = 'warn'\n")?; + w.write_all(b"#'mod2' = 'debug'\n")?; + w.write_all(b"#'mod2::mod3' = 'trace'\n")?; + } + for mf in &self.module_filters { + if mf.module_name.is_some() { + w.write_all( + format!( + "'{}' = '{}'\n", + mf.module_name.as_ref().unwrap(), + mf.level_filter.to_string().to_lowercase() + ) + .as_bytes(), + )?; + } + } + Ok(()) + } + + /// Creates a `LogSpecBuilder`, setting the default log level. + #[must_use] + pub fn default(level_filter: LevelFilter) -> LogSpecBuilder { + LogSpecBuilder::from_module_filters(&[ModuleFilter { + module_name: None, + level_filter, + }]) + } + + /// Provides a reference to the module filters. + pub fn module_filters(&self) -> &Vec { + &self.module_filters + } + + /// Provides a reference to the text filter. + /// + /// This method is only avaible with feature `textfilter`, which is a default feature. + #[cfg(feature = "textfilter")] + pub fn text_filter(&self) -> &Option { + &(self.textfilter) + } +} + +fn push_err(s: &str, parse_errs: &mut String) { + if !parse_errs.is_empty() { + parse_errs.push_str("; "); + } + parse_errs.push_str(s); +} + +fn parse_err( + errors: String, + logspec: LogSpecification, +) -> Result { + Err(FlexiLoggerError::Parse(errors, logspec)) +} + +fn parse_level_filter>(s: S) -> Result { + match s.as_ref().to_lowercase().as_ref() { + "off" => Ok(LevelFilter::Off), + "error" => Ok(LevelFilter::Error), + "warn" => Ok(LevelFilter::Warn), + "info" => Ok(LevelFilter::Info), + "debug" => Ok(LevelFilter::Debug), + "trace" => Ok(LevelFilter::Trace), + _ => Err(FlexiLoggerError::LevelFilter(format!( + "unknown level filter: {}", + s.as_ref() + ))), + } +} + +fn contains_whitespace(s: &str, parse_errs: &mut String) -> bool { + let result = s.chars().any(char::is_whitespace); + if result { + push_err( + &format!( + "ignoring invalid part in log spec '{}' (contains a whitespace)", + s + ), + parse_errs, + ); + } + result +} + +#[allow(clippy::needless_doctest_main)] +/// Builder for `LogSpecification`. +/// +/// # Example +/// +/// Use the reconfigurability feature and build the log spec programmatically. +/// +/// ```rust +/// use flexi_logger::{Logger, LogSpecBuilder}; +/// use log::LevelFilter; +/// +/// fn main() { +/// // Build the initial log specification +/// let mut builder = LogSpecBuilder::new(); // default is LevelFilter::Off +/// builder.default(LevelFilter::Info); +/// builder.module("karl", LevelFilter::Debug); +/// +/// // Initialize Logger, keep builder alive +/// let mut logger_reconf_handle = Logger::with(builder.build()) +/// // your logger configuration goes here, as usual +/// .start() +/// .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); +/// +/// // ... +/// +/// // Modify builder and update the logger +/// builder.default(LevelFilter::Error); +/// builder.remove("karl"); +/// builder.module("emma", LevelFilter::Trace); +/// +/// logger_reconf_handle.set_new_spec(builder.build()); +/// +/// // ... +/// } +/// ``` +#[derive(Clone, Debug, Default)] +pub struct LogSpecBuilder { + module_filters: HashMap, LevelFilter>, +} + +impl LogSpecBuilder { + /// Creates a `LogSpecBuilder` with all logging turned off. + #[must_use] + pub fn new() -> Self { + let mut modfilmap = HashMap::new(); + modfilmap.insert(None, LevelFilter::Off); + Self { + module_filters: modfilmap, + } + } + + /// Creates a `LogSpecBuilder` from given module filters. + #[must_use] + pub fn from_module_filters(module_filters: &[ModuleFilter]) -> Self { + let mut modfilmap = HashMap::new(); + for mf in module_filters { + modfilmap.insert(mf.module_name.clone(), mf.level_filter); + } + Self { + module_filters: modfilmap, + } + } + + /// Adds a default log level filter, or updates the default log level filter. + pub fn default(&mut self, lf: LevelFilter) -> &mut Self { + self.module_filters.insert(None, lf); + self + } + + /// Adds a log level filter, or updates the log level filter, for a module. + pub fn module>(&mut self, module_name: M, lf: LevelFilter) -> &mut Self { + self.module_filters + .insert(Some(module_name.as_ref().to_owned()), lf); + self + } + + /// Adds a log level filter, or updates the log level filter, for a module. + pub fn remove>(&mut self, module_name: M) -> &mut Self { + self.module_filters + .remove(&Some(module_name.as_ref().to_owned())); + self + } + + /// Adds log level filters from a `LogSpecification`. + pub fn insert_modules_from(&mut self, other: LogSpecification) -> &mut Self { + for module_filter in other.module_filters { + self.module_filters + .insert(module_filter.module_name, module_filter.level_filter); + } + self + } + + /// Creates a log specification without text filter. + #[must_use] + pub fn finalize(self) -> LogSpecification { + LogSpecification { + module_filters: self.module_filters.into_vec_module_filter(), + #[cfg(feature = "textfilter")] + textfilter: None, + } + } + + /// Creates a log specification with text filter. + /// + /// This method is only avaible with feature `textfilter`, which is a default feature. + #[cfg(feature = "textfilter")] + pub fn finalize_with_textfilter(self, tf: Regex) -> LogSpecification { + LogSpecification { + module_filters: self.module_filters.into_vec_module_filter(), + textfilter: Some(tf), + } + } + + /// Creates a log specification without being consumed. + #[must_use] + pub fn build(&self) -> LogSpecification { + LogSpecification { + module_filters: self.module_filters.clone().into_vec_module_filter(), + #[cfg(feature = "textfilter")] + textfilter: None, + } + } + + /// Creates a log specification without being consumed, optionally with a text filter. + /// + /// This method is only avaible with feature `textfilter`, which is a default feature. + #[cfg(feature = "textfilter")] + pub fn build_with_textfilter(&self, tf: Option) -> LogSpecification { + LogSpecification { + module_filters: self.module_filters.clone().into_vec_module_filter(), + textfilter: tf, + } + } +} + +trait IntoVecModuleFilter { + fn into_vec_module_filter(self) -> Vec; +} +impl IntoVecModuleFilter for HashMap, LevelFilter> { + fn into_vec_module_filter(self) -> Vec { + let mf: Vec = self + .into_iter() + .map(|(k, v)| ModuleFilter { + module_name: k, + level_filter: v, + }) + .collect(); + mf.level_sort() + } +} + +trait LevelSort { + fn level_sort(self) -> Vec; +} +impl LevelSort for Vec { + /// Sort the module filters by length of their name, + /// this allows a little more efficient lookup at runtime. + fn level_sort(mut self) -> Vec { + self.sort_by(|a, b| { + let a_len = a.module_name.as_ref().map_or(0, String::len); + let b_len = b.module_name.as_ref().map_or(0, String::len); + b_len.cmp(&a_len) + }); + self + } +} + +#[cfg(test)] +mod tests { + use crate::LogSpecification; + use log::{Level, LevelFilter}; + + #[test] + fn parse_logging_spec_valid() { + let spec = LogSpecification::parse("crate1::mod1=error,crate1::mod2,crate2=debug").unwrap(); + assert_eq!(spec.module_filters().len(), 3); + assert_eq!( + spec.module_filters()[0].module_name, + Some("crate1::mod1".to_string()) + ); + assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error); + + assert_eq!( + spec.module_filters()[1].module_name, + Some("crate1::mod2".to_string()) + ); + assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max()); + + assert_eq!( + spec.module_filters()[2].module_name, + Some("crate2".to_string()) + ); + assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug); + + #[cfg(feature = "textfilter")] + assert!(spec.text_filter().is_none()); + } + + #[test] + fn parse_logging_spec_invalid_crate() { + // test parse_logging_spec with multiple = in specification + assert!(LogSpecification::parse("crate1::mod1=warn=info,crate2=debug").is_err()); + } + + #[test] + fn parse_logging_spec_wrong_log_level() { + assert!(LogSpecification::parse("crate1::mod1=wrong, crate2=warn").is_err()); + } + + #[test] + fn parse_logging_spec_empty_log_level() { + assert!(LogSpecification::parse("crate1::mod1=wrong, crate2=").is_err()); + } + + #[test] + fn parse_logging_spec_global() { + let spec = LogSpecification::parse("warn,crate2=debug").unwrap(); + assert_eq!(spec.module_filters().len(), 2); + + assert_eq!(spec.module_filters()[1].module_name, None); + assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::Warn); + + assert_eq!( + spec.module_filters()[0].module_name, + Some("crate2".to_string()) + ); + assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Debug); + + #[cfg(feature = "textfilter")] + assert!(spec.text_filter().is_none()); + } + + #[test] + #[cfg(feature = "textfilter")] + fn parse_logging_spec_valid_filter() { + let spec = LogSpecification::parse(" crate1::mod1 = error , crate1::mod2,crate2=debug/abc") + .unwrap(); + assert_eq!(spec.module_filters().len(), 3); + + assert_eq!( + spec.module_filters()[0].module_name, + Some("crate1::mod1".to_string()) + ); + assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error); + + assert_eq!( + spec.module_filters()[1].module_name, + Some("crate1::mod2".to_string()) + ); + assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max()); + + assert_eq!( + spec.module_filters()[2].module_name, + Some("crate2".to_string()) + ); + assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug); + assert!( + spec.text_filter().is_some() + && spec.text_filter().as_ref().unwrap().to_string() == "abc" + ); + } + + #[test] + fn parse_logging_spec_invalid_crate_filter() { + assert!(LogSpecification::parse("crate1::mod1=error=warn,crate2=debug/a.c").is_err()); + } + + #[test] + #[cfg(feature = "textfilter")] + fn parse_logging_spec_empty_with_filter() { + let spec = LogSpecification::parse("crate1/a*c").unwrap(); + assert_eq!(spec.module_filters().len(), 1); + assert_eq!( + spec.module_filters()[0].module_name, + Some("crate1".to_string()) + ); + assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::max()); + assert!( + spec.text_filter().is_some() + && spec.text_filter().as_ref().unwrap().to_string() == "a*c" + ); + } + + #[test] + fn reuse_logspec_builder() { + let mut builder = crate::LogSpecBuilder::new(); + + builder.default(LevelFilter::Info); + builder.module("carlo", LevelFilter::Debug); + builder.module("toni", LevelFilter::Warn); + let spec1 = builder.build(); + + assert_eq!( + spec1.module_filters()[0].module_name, + Some("carlo".to_string()) + ); + assert_eq!(spec1.module_filters()[0].level_filter, LevelFilter::Debug); + + assert_eq!( + spec1.module_filters()[1].module_name, + Some("toni".to_string()) + ); + assert_eq!(spec1.module_filters()[1].level_filter, LevelFilter::Warn); + + assert_eq!(spec1.module_filters().len(), 3); + assert_eq!(spec1.module_filters()[2].module_name, None); + assert_eq!(spec1.module_filters()[2].level_filter, LevelFilter::Info); + + builder.default(LevelFilter::Error); + builder.remove("carlo"); + builder.module("greta", LevelFilter::Trace); + let spec2 = builder.build(); + + assert_eq!(spec2.module_filters().len(), 3); + assert_eq!(spec2.module_filters()[2].module_name, None); + assert_eq!(spec2.module_filters()[2].level_filter, LevelFilter::Error); + + assert_eq!( + spec2.module_filters()[0].module_name, + Some("greta".to_string()) + ); + assert_eq!(spec2.module_filters()[0].level_filter, LevelFilter::Trace); + + assert_eq!( + spec2.module_filters()[1].module_name, + Some("toni".to_string()) + ); + assert_eq!(spec2.module_filters()[1].level_filter, LevelFilter::Warn); + } + + /////////////////////////////////////////////////////// + /////////////////////////////////////////////////////// + #[test] + fn match_full_path() { + let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); + assert!(spec.enabled(Level::Warn, "crate1::mod1")); + assert!(!spec.enabled(Level::Info, "crate1::mod1")); + assert!(spec.enabled(Level::Info, "crate2")); + assert!(!spec.enabled(Level::Debug, "crate2")); + } + + #[test] + fn no_match() { + let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); + assert!(!spec.enabled(Level::Warn, "crate3")); + } + + #[test] + fn match_beginning() { + let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); + assert!(spec.enabled(Level::Info, "crate2::mod1")); + } + + #[test] + fn match_beginning_longest_match() { + let spec = LogSpecification::parse( + "abcd = info, abcd::mod1 = error, klmn::mod = debug, klmn = info", + ) + .unwrap(); + assert!(spec.enabled(Level::Error, "abcd::mod1::foo")); + assert!(!spec.enabled(Level::Warn, "abcd::mod1::foo")); + assert!(spec.enabled(Level::Warn, "abcd::mod2::foo")); + assert!(!spec.enabled(Level::Debug, "abcd::mod2::foo")); + + assert!(!spec.enabled(Level::Debug, "klmn")); + assert!(!spec.enabled(Level::Debug, "klmn::foo::bar")); + assert!(spec.enabled(Level::Info, "klmn::foo::bar")); + } + + #[test] + fn match_default1() { + let spec = LogSpecification::parse("info,abcd::mod1=warn").unwrap(); + assert!(spec.enabled(Level::Warn, "abcd::mod1")); + assert!(spec.enabled(Level::Info, "crate2::mod2")); + } + + #[test] + fn match_default2() { + let spec = LogSpecification::parse("modxyz=error, info, abcd::mod1=warn").unwrap(); + assert!(spec.enabled(Level::Warn, "abcd::mod1")); + assert!(spec.enabled(Level::Info, "crate2::mod2")); + } + + #[test] + fn rocket() { + let spec = LogSpecification::parse("info, rocket=off, serenity=off").unwrap(); + assert!(spec.enabled(Level::Info, "itsme")); + assert!(spec.enabled(Level::Warn, "abcd::mod1")); + assert!(!spec.enabled(Level::Debug, "abcd::mod1")); + assert!(!spec.enabled(Level::Error, "rocket::rocket")); + assert!(!spec.enabled(Level::Warn, "rocket::rocket")); + assert!(!spec.enabled(Level::Info, "rocket::rocket")); + } + + #[test] + fn add_filters() { + let mut builder = crate::LogSpecBuilder::new(); + + builder.default(LevelFilter::Debug); + builder.module("carlo", LevelFilter::Debug); + builder.module("toni", LevelFilter::Warn); + + builder.insert_modules_from( + LogSpecification::parse("info, may=error, toni::heart = trace").unwrap(), + ); + let spec = builder.build(); + + assert_eq!(spec.module_filters().len(), 5); + + assert_eq!( + spec.module_filters()[0].module_name, + Some("toni::heart".to_string()) + ); + assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Trace); + + assert_eq!( + spec.module_filters()[1].module_name, + Some("carlo".to_string()) + ); + assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::Debug); + + assert_eq!( + spec.module_filters()[2].module_name, + Some("toni".to_string()) + ); + assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Warn); + + assert_eq!( + spec.module_filters()[3].module_name, + Some("may".to_string()) + ); + assert_eq!(spec.module_filters()[3].level_filter, LevelFilter::Error); + + assert_eq!(spec.module_filters()[4].module_name, None); + assert_eq!(spec.module_filters()[4].level_filter, LevelFilter::Info); + } + + #[test] + fn zero_level() { + let spec = LogSpecification::parse("info,crate1::mod1=off").unwrap(); + assert!(!spec.enabled(Level::Error, "crate1::mod1")); + assert!(spec.enabled(Level::Info, "crate2::mod2")); + } +} + +#[cfg(test)] +#[cfg(feature = "specfile_without_notification")] +mod test_with_specfile { + #[cfg(feature = "specfile_without_notification")] + use crate::LogSpecification; + + #[test] + fn specfile() { + compare_specs("", ""); + + compare_specs( + "[modules]\n\ + ", + "", + ); + + compare_specs( + "global_level = 'info'\n\ + \n\ + [modules]\n\ + ", + "info", + ); + + compare_specs( + "global_level = 'info'\n\ + \n\ + [modules]\n\ + 'mod1::mod2' = 'debug'\n\ + 'mod3' = 'trace'\n\ + ", + "info, mod1::mod2 = debug, mod3 = trace", + ); + + compare_specs( + "global_level = 'info'\n\ + global_pattern = 'Foo'\n\ + \n\ + [modules]\n\ + 'mod1::mod2' = 'debug'\n\ + 'mod3' = 'trace'\n\ + ", + "info, mod1::mod2 = debug, mod3 = trace /Foo", + ); + } + + #[cfg(feature = "specfile_without_notification")] + fn compare_specs(toml: &str, spec_string: &str) { + let ls_toml = LogSpecification::from_toml(toml).unwrap(); + let ls_spec = LogSpecification::parse(spec_string).unwrap(); + + assert_eq!(ls_toml.module_filters, ls_spec.module_filters); + assert_eq!(ls_toml.textfilter.is_none(), ls_spec.textfilter.is_none()); + if ls_toml.textfilter.is_some() && ls_spec.textfilter.is_some() { + assert_eq!( + ls_toml.textfilter.unwrap().to_string(), + ls_spec.textfilter.unwrap().to_string() + ); + } + } +} diff --git a/agent/support/rust/flexi_logger/src/logger.rs b/agent/support/rust/flexi_logger/src/logger.rs new file mode 100644 index 000000000..4ed85a2e2 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/logger.rs @@ -0,0 +1,936 @@ +use crate::flexi_logger::FlexiLogger; +use crate::formats::default_format; +#[cfg(feature = "atty")] +use crate::formats::{AdaptiveFormat, Stream}; +use crate::primary_writer::PrimaryWriter; +use crate::writers::{FileLogWriter, FileLogWriterBuilder, LogWriter}; +use crate::{ + Cleanup, Criterion, FlexiLoggerError, FormatFunction, LogSpecification, Naming, + ReconfigurationHandle, +}; + +#[cfg(feature = "specfile")] +use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; +use std::collections::HashMap; +#[cfg(feature = "specfile_without_notification")] +use std::io::Read; +use std::path::PathBuf; +use std::sync::{Arc, RwLock}; + +/// The entry-point for using `flexi_logger`. +/// +/// A simple example with file logging might look like this: +/// +/// ```rust +/// use flexi_logger::{Duplicate,Logger}; +/// +/// Logger::with_str("info, mycrate = debug") +/// .log_to_file() +/// .duplicate_to_stderr(Duplicate::Warn) +/// .start() +/// .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); +/// +/// ``` +/// +/// +/// `Logger` is a builder class that allows you to +/// * specify your desired (initial) loglevel-specification +/// * either programmatically as a String +/// ([`Logger::with_str()`](struct.Logger.html#method.with_str)) +/// * or by providing a String in the environment +/// ([`Logger::with_env()`](struct.Logger.html#method.with_env)), +/// * or by combining both options +/// ([`Logger::with_env_or_str()`](struct.Logger.html#method.with_env_or_str)), +/// * or by building a `LogSpecification` programmatically +/// ([`Logger::with()`](struct.Logger.html#method.with)), +/// * use the desired configuration methods, +/// * and finally start the logger with +/// +/// * [`start()`](struct.Logger.html#method.start), +/// * or [`start_with_specfile()`](struct.Logger.html#method.start_with_specfile). +pub struct Logger { + spec: LogSpecification, + parse_errs: Option, + log_target: LogTarget, + duplicate_err: Duplicate, + duplicate_out: Duplicate, + format_for_file: FormatFunction, + format_for_stderr: FormatFunction, + format_for_stdout: FormatFunction, + format_for_writer: FormatFunction, + #[cfg(feature = "colors")] + o_palette: Option, + flwb: FileLogWriterBuilder, + other_writers: HashMap>, +} + +/// Describes the default log target. +/// +/// All log messages, in which no target is explicitly defined, will be written to +/// the default log target. +/// +/// See the [writers](writers/index.html) module for +/// how to specify non-default log targets in log macro calls, +/// and the usage of non-default log writers. +pub enum LogTarget { + /// Log is written to stderr. + /// + /// This is the default behavior of `flexi_logger`. + StdErr, + /// Log is written to stdout. + StdOut, + /// Log is written to a file. + /// + /// The default pattern for the filename is '\\_\\_\.\', + /// e.g. `myprog_2015-07-08_10-44-11.log`. + File, + /// Log is written to an alternative `LogWriter` implementation. + /// + Writer(Box), + /// Log is written to a file, as with `LogTarget::File`, _and_ to an alternative + /// `LogWriter` implementation. + FileAndWriter(Box), + /// Log is processed, including duplication, but not written to a primary target destination. + /// + /// This can be useful e.g. for running application tests with all log-levels active and still + /// avoiding tons of log files etc. + /// Such tests ensure that the log calls which are normally not active + /// will not cause undesired side-effects when activated + /// (note that the log macros may prevent arguments of inactive log-calls from being evaluated). + /// + /// Combined with + /// [`duplicate_to_stdout()`](struct.Logger.html#method.duplicate_to_stdout) + /// and + /// [`duplicate_to_stderr()`](struct.Logger.html#method.duplicate_to_stderr) + /// it can also be used if you want to get logs both to stdout and stderr, but not to a file. + DevNull, +} + +/// Create a Logger instance and define how to access the (initial) +/// loglevel-specification. +impl Logger { + /// Creates a Logger that you provide with an explicit `LogSpecification`. + /// By default, logs are written with `default_format` to `stderr`. + #[must_use] + pub fn with(logspec: LogSpecification) -> Self { + Self::from_spec_and_errs(logspec, None) + } + + /// Creates a Logger that reads the `LogSpecification` from a String or &str. + /// [See `LogSpecification`](struct.LogSpecification.html) for the syntax. + #[must_use] + pub fn with_str>(s: S) -> Self { + Self::from_result(LogSpecification::parse(s.as_ref())) + } + + /// Creates a Logger that reads the `LogSpecification` from the environment variable `RUST_LOG`. + #[must_use] + pub fn with_env() -> Self { + Self::from_result(LogSpecification::env()) + } + + /// Creates a Logger that reads the `LogSpecification` from the environment variable `RUST_LOG`, + /// or derives it from the given String, if `RUST_LOG` is not set. + #[must_use] + pub fn with_env_or_str>(s: S) -> Self { + Self::from_result(LogSpecification::env_or_parse(s)) + } + + fn from_result(result: Result) -> Self { + match result { + Ok(logspec) => Self::from_spec_and_errs(logspec, None), + Err(e) => match e { + FlexiLoggerError::Parse(parse_errs, logspec) => { + Self::from_spec_and_errs(logspec, Some(parse_errs)) + } + _ => Self::from_spec_and_errs(LogSpecification::off(), None), + }, + } + } + + fn from_spec_and_errs(spec: LogSpecification, parse_errs: Option) -> Self { + #[cfg(feature = "colors")] + { + // Enable ASCII escape sequence support on Windows consoles, + // but disable coloring on unsupported Windows consoles + if cfg!(windows) && !yansi::Paint::enable_windows_ascii() { + yansi::Paint::disable(); + } + } + + Self { + spec, + parse_errs, + log_target: LogTarget::StdErr, + duplicate_err: Duplicate::None, + duplicate_out: Duplicate::None, + format_for_file: default_format, + #[cfg(feature = "colors")] + format_for_stdout: AdaptiveFormat::Default.format_function(Stream::StdOut), + #[cfg(feature = "colors")] + format_for_stderr: AdaptiveFormat::Default.format_function(Stream::StdErr), + + #[cfg(not(feature = "colors"))] + format_for_stdout: default_format, + #[cfg(not(feature = "colors"))] + format_for_stderr: default_format, + + format_for_writer: default_format, + #[cfg(feature = "colors")] + o_palette: None, + flwb: FileLogWriter::builder(), + other_writers: HashMap::>::new(), + } + } +} + +/// Simple methods for influencing the behavior of the Logger. +impl Logger { + /// Allows verifying that no parsing errors have occured in the used factory method, + /// and examining the parse error. + /// + /// Most of the factory methods for Logger (`Logger::with_...()`) + /// parse a log specification String, and deduce from it a `LogSpecification` object. + /// If parsing fails, errors are reported to stdout, but effectively ignored. + /// In worst case, nothing is logged! + /// + /// This method gives programmatic access to parse errors, if there were any, so that errors + /// don't happen unnoticed. + /// + /// In the following example we just panic if the spec was not free of errors: + /// + /// ```should_panic + /// # use flexi_logger::{Logger,LogTarget}; + /// Logger::with_str("hello world") + /// .check_parser_error() + /// .unwrap() // <-- here we could do better than panic + /// .log_target(LogTarget::File) + /// .start(); + /// ``` + /// + /// # Errors + /// + /// `FlexiLoggerError::Parse` if the input for the log specification is malformed. + pub fn check_parser_error(self) -> Result { + match self.parse_errs { + Some(parse_errs) => Err(FlexiLoggerError::Parse(parse_errs, self.spec)), + None => Ok(self), + } + } + + /// Is equivalent to + /// [`log_target`](struct.Logger.html#method.log_target)`(`[`LogTarget::File`]( + /// enum.LogTarget.html#variant.File)`)`. + pub fn log_to_file(self) -> Self { + self.log_target(LogTarget::File) + } + + /// Write the main log output to the specified target. + /// + /// By default, i.e. if this method is not called, the log target `LogTarget::StdErr` is used. + pub fn log_target(mut self, log_target: LogTarget) -> Self { + self.log_target = log_target; + self + } + + /// Makes the logger print an info message to stdout with the name of the logfile + /// when a logfile is opened for writing. + pub fn print_message(mut self) -> Self { + self.flwb = self.flwb.print_message(); + self + } + + /// Makes the logger write messages with the specified minimum severity additionally to stderr. + /// + /// Works with all log targets except `StdErr` and `StdOut`. + pub fn duplicate_to_stderr(mut self, dup: Duplicate) -> Self { + self.duplicate_err = dup; + self + } + + /// Makes the logger write messages with the specified minimum severity additionally to stdout. + /// + /// Works with all log targets except `StdErr` and `StdOut`. + pub fn duplicate_to_stdout(mut self, dup: Duplicate) -> Self { + self.duplicate_out = dup; + self + } + + /// Makes the logger use the provided format function for all messages + /// that are written to files, stderr, stdout, or to an additional writer. + /// + /// You can either choose one of the provided log-line formatters, + /// or you create and use your own format function with the signature
+ /// ```rust,ignore + /// fn( + /// write: &mut dyn std::io::Write, + /// now: &mut DeferredNow, + /// record: &Record, + /// ) -> Result<(), std::io::Error> + /// ``` + /// + /// By default, + /// [`default_format()`](fn.default_format.html) is used for output to files + /// and to custom writers, and [`AdaptiveFormat::Default`](enum.AdaptiveFormat.html#variant.Default) + /// is used for output to `stderr` and `stdout`. + /// + /// If the feature `colors` is switched off, + /// `default_format()` is used for all outputs. + pub fn format(mut self, format: FormatFunction) -> Self { + self.format_for_file = format; + self.format_for_stderr = format; + self.format_for_stdout = format; + self.format_for_writer = format; + self + } + + /// Makes the logger use the provided format function for messages + /// that are written to files. + /// + /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). + pub fn format_for_files(mut self, format: FormatFunction) -> Self { + self.format_for_file = format; + self + } + + /// Makes the logger use the specified format for messages that are written to `stderr`. + /// Coloring is used if `stderr` is a tty. + /// + /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). + /// + /// Only available with feature `colors`. + #[cfg(feature = "atty")] + pub fn adaptive_format_for_stderr(mut self, adaptive_format: AdaptiveFormat) -> Self { + self.format_for_stderr = adaptive_format.format_function(Stream::StdErr); + self + } + + /// Makes the logger use the specified format for messages that are written to `stdout`. + /// Coloring is used if `stdout` is a tty. + /// + /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). + /// + /// Only available with feature `colors`. + #[cfg(feature = "atty")] + pub fn adaptive_format_for_stdout(mut self, adaptive_format: AdaptiveFormat) -> Self { + self.format_for_stdout = adaptive_format.format_function(Stream::StdOut); + self + } + + /// Makes the logger use the provided format function for messages + /// that are written to stderr. + /// + /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). + pub fn format_for_stderr(mut self, format: FormatFunction) -> Self { + self.format_for_stderr = format; + self + } + + /// Makes the logger use the provided format function to format messages + /// that are written to stdout. + /// + /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). + pub fn format_for_stdout(mut self, format: FormatFunction) -> Self { + self.format_for_stdout = format; + self + } + + /// Allows specifying a format function for an additional writer. + /// Note that it is up to the implementation of the additional writer + /// whether it evaluates this setting or not. + /// + /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). + pub fn format_for_writer(mut self, format: FormatFunction) -> Self { + self.format_for_writer = format; + self + } + + /// Sets the color palette for function [`style`](fn.style.html), which is used in the + /// provided coloring format functions. + /// + /// The palette given here overrides the default palette. + /// + /// The palette is specified in form of a String that contains a semicolon-separated list + /// of numbers (0..=255) and/or dashes (´-´). + /// The first five values denote the fixed color that is + /// used for coloring `error`, `warn`, `info`, `debug`, and `trace` messages. + /// + /// The String `"196;208;-;7;8"` describes the default palette, where color 196 is + /// used for error messages, and so on. The `-` means that no coloring is done, + /// i.e., with `"-;-;-;-;-"` all coloring is switched off. + /// + /// The palette can further be overridden at runtime by setting the environment variable + /// `FLEXI_LOGGER_PALETTE` to a palette String. This allows adapting the used text colors to + /// differently colored terminal backgrounds. + /// + /// For your convenience, if you want to specify your own palette, + /// you can produce a colored list with all 255 colors with `cargo run --example colors`. + /// + /// Only available with feature `colors`. + #[cfg(feature = "colors")] + pub fn set_palette(mut self, palette: String) -> Self { + self.o_palette = Some(palette); + self + } + + /// Specifies a folder for the log files. + /// + /// This parameter only has an effect if `log_to_file()` is used, too. + /// The specified folder will be created if it does not exist. + /// By default, the log files are created in the folder where the program was started. + pub fn directory>(mut self, directory: S) -> Self { + self.flwb = self.flwb.directory(directory); + self + } + + /// Specifies a suffix for the log files. + /// + /// This parameter only has an effect if `log_to_file()` is used, too. + pub fn suffix>(mut self, suffix: S) -> Self { + self.flwb = self.flwb.suffix(suffix); + self + } + + /// Makes the logger not include a timestamp into the names of the log files. + /// + /// This option only has an effect if `log_to_file()` is used, too, + /// and is ignored if rotation is used. + pub fn suppress_timestamp(mut self) -> Self { + self.flwb = self.flwb.suppress_timestamp(); + self + } + + /// When rotation is used with some `Cleanup` variant, then this option defines + /// if the cleanup activities (finding files, deleting files, evtl compressing files) is done + /// in the current thread (in the current log-call), or whether cleanup is delegated to a + /// background thread. + /// + /// As of `flexi_logger` version `0.14.7`, + /// the cleanup activities are done by default in a background thread. + /// This minimizes the blocking impact to your application caused by IO operations. + /// + /// In earlier versions of `flexi_logger`, or if you call this method with + /// `use_background_thread = false`, + /// the cleanup is done in the thread that is currently causing a file rotation. + #[must_use] + pub fn cleanup_in_background_thread(mut self, use_background_thread: bool) -> Self { + self.flwb = self + .flwb + .cleanup_in_background_thread(use_background_thread); + self + } + + /// Prevent indefinite growth of the log file by applying file rotation + /// and a clean-up strategy for older log files. + /// + /// By default, the log file is fixed while your program is running and will grow indefinitely. + /// With this option being used, when the log file reaches the specified criterion, + /// the file will be closed and a new file will be opened. + /// + /// Note that also the filename pattern changes: + /// + /// - by default, no timestamp is added to the filename + /// - the logs are always written to a file with infix `_rCURRENT` + /// - when the rotation criterion is fulfilled, it is closed and renamed to a file + /// with another infix (see `Naming`), + /// and then the logging continues again to the (fresh) file with infix `_rCURRENT`. + /// + /// Example: + /// + /// After some logging with your program `my_prog` and rotation with `Naming::Numbers`, + /// you will find files like + /// + /// ```text + /// my_prog_r00000.log + /// my_prog_r00001.log + /// my_prog_r00002.log + /// my_prog_rCURRENT.log + /// ``` + /// + /// ## Parameters + /// + /// `rotate_over_size` is given in bytes, e.g. `10_000_000` will rotate + /// files once they reach a size of 10 MiB. + /// + /// `cleanup` defines the strategy for dealing with older files. + /// See [Cleanup](enum.Cleanup.html) for details. + pub fn rotate(mut self, criterion: Criterion, naming: Naming, cleanup: Cleanup) -> Self { + self.flwb = self.flwb.rotate(criterion, naming, cleanup); + self + } + + /// Makes the logger append to the specified output file, if it exists already; + /// by default, the file would be truncated. + /// + /// This option only has an effect if `log_to_file()` is used, too. + /// This option will hardly make an effect if `suppress_timestamp()` is not used. + pub fn append(mut self) -> Self { + self.flwb = self.flwb.append(); + self + } + + /// The specified String is added to the log file name after the program name. + /// + /// This option only has an effect if `log_to_file()` is used, too. + pub fn discriminant>(mut self, discriminant: S) -> Self { + self.flwb = self.flwb.discriminant(discriminant); + self + } + + /// The specified path will be used on linux systems to create a symbolic link + /// to the current log file. + /// + /// This option has no effect on filesystems where symlinks are not supported, + /// and it only has an effect if `log_to_file()` is used, too. + /// + /// ### Example + /// + /// You can use the symbolic link to follow the log output with `tail`, + /// even if the log files are rotated. + /// + /// Assuming the link has the name `link_to_log_file`, then use: + /// + /// ```text + /// tail --follow=name --max-unchanged-stats=1 --retry link_to_log_file + /// ``` + /// + pub fn create_symlink>(mut self, symlink: P) -> Self { + self.flwb = self.flwb.create_symlink(symlink); + self + } + + /// Registers a `LogWriter` implementation under the given target name. + /// + /// The target name must not start with an underscore. + /// + /// See [the module documentation of `writers`](writers/index.html). + pub fn add_writer>( + mut self, + target_name: S, + writer: Box, + ) -> Self { + self.other_writers.insert(target_name.into(), writer); + self + } + + /// Use this function to set send handler for sending logs to server. + pub fn send_handler(mut self, sender: plugin::Sender) -> Self { + self.flwb = self.flwb.sender(sender); + self + } + /// Set name which will be used for sending logs to server. + pub fn plugin_name(mut self, plugin_name: String) -> Self { + self.flwb = self.flwb.name(plugin_name); + self + } + + /// Use Windows line endings, rather than just `\n`. + pub fn use_windows_line_ending(mut self) -> Self { + self.flwb = self.flwb.use_windows_line_ending(); + self + } +} + +/// Alternative set of methods to control the behavior of the Logger. +/// Use these methods when you want to control the settings flexibly, +/// e.g. with commandline arguments via `docopts` or `clap`. +impl Logger { + /// With true, makes the logger print an info message to stdout, each time + /// when a new file is used for log-output. + pub fn o_print_message(mut self, print_message: bool) -> Self { + self.flwb = self.flwb.o_print_message(print_message); + self + } + + /// Specifies a folder for the log files. + /// + /// This parameter only has an effect if `log_to_file` is set to true. + /// If the specified folder does not exist, the initialization will fail. + /// With None, the log files are created in the folder where the program was started. + pub fn o_directory>(mut self, directory: Option

) -> Self { + self.flwb = self.flwb.o_directory(directory); + self + } + + /// By default, and with None, the log file will grow indefinitely. + /// If a `rotate_config` is set, when the log file reaches or exceeds the specified size, + /// the file will be closed and a new file will be opened. + /// Also the filename pattern changes: instead of the timestamp, a serial number + /// is included into the filename. + /// + /// The size is given in bytes, e.g. `o_rotate_over_size(Some(1_000))` will rotate + /// files once they reach a size of 1 kB. + /// + /// The cleanup strategy allows delimiting the used space on disk. + pub fn o_rotate(mut self, rotate_config: Option<(Criterion, Naming, Cleanup)>) -> Self { + self.flwb = self.flwb.o_rotate(rotate_config); + self + } + + /// With true, makes the logger include a timestamp into the names of the log files. + /// `true` is the default, but `rotate_over_size` sets it to `false`. + /// With this method you can set it to `true` again. + /// + /// This parameter only has an effect if `log_to_file` is set to true. + pub fn o_timestamp(mut self, timestamp: bool) -> Self { + self.flwb = self.flwb.o_timestamp(timestamp); + self + } + + /// This option only has an effect if `log_to_file` is set to true. + /// + /// If append is set to true, makes the logger append to the specified output file, if it exists. + /// By default, or with false, the file would be truncated. + /// + /// This option will hardly make an effect if `suppress_timestamp()` is not used. + + pub fn o_append(mut self, append: bool) -> Self { + self.flwb = self.flwb.o_append(append); + self + } + + /// This option only has an effect if `log_to_file` is set to true. + /// + /// The specified String is added to the log file name. + pub fn o_discriminant>(mut self, discriminant: Option) -> Self { + self.flwb = self.flwb.o_discriminant(discriminant); + self + } + + /// This option only has an effect if `log_to_file` is set to true. + /// + /// If a String is specified, it will be used on linux systems to create in the current folder + /// a symbolic link with this name to the current log file. + pub fn o_create_symlink>(mut self, symlink: Option

) -> Self { + self.flwb = self.flwb.o_create_symlink(symlink); + self + } +} + +/// Finally, start logging, optionally with a spec-file. +impl Logger { + /// Consumes the Logger object and initializes `flexi_logger`. + /// + /// The returned reconfiguration handle allows updating the log specification programmatically + /// later on, e.g. to intensify logging for (buggy) parts of a (test) program, etc. + /// See [`ReconfigurationHandle`](struct.ReconfigurationHandle.html) for an example. + /// + /// # Errors + /// + /// Several variants of `FlexiLoggerError` can occur. + pub fn start(self) -> Result { + let (boxed_logger, handle) = self.build()?; + log::set_boxed_logger(boxed_logger)?; + Ok(handle) + } + + /// Builds a boxed logger and a `ReconfigurationHandle` for it, + /// but does not initialize the global logger. + /// + /// The returned boxed logger implements the Log trait and can be installed manually + /// or nested within another logger. + /// + /// The reconfiguration handle allows updating the log specification programmatically + /// later on, e.g. to intensify logging for (buggy) parts of a (test) program, etc. + /// See [`ReconfigurationHandle`](struct.ReconfigurationHandle.html) for an example. + /// + /// # Errors + /// + /// Several variants of `FlexiLoggerError` can occur. + pub fn build(mut self) -> Result<(Box, ReconfigurationHandle), FlexiLoggerError> { + let max_level = self.spec.max_level(); + let spec = Arc::new(RwLock::new(self.spec)); + let other_writers = Arc::new(self.other_writers); + + #[cfg(feature = "colors")] + crate::formats::set_palette(&self.o_palette)?; + + let primary_writer = Arc::new(match self.log_target { + LogTarget::File => { + self.flwb = self.flwb.format(self.format_for_file); + PrimaryWriter::multi( + self.duplicate_err, + self.duplicate_out, + self.format_for_stderr, + self.format_for_stdout, + vec![Box::new(self.flwb.try_build()?)], + ) + } + LogTarget::Writer(mut w) => { + w.format(self.format_for_writer); + PrimaryWriter::multi( + self.duplicate_err, + self.duplicate_out, + self.format_for_stderr, + self.format_for_stdout, + vec![w], + ) + } + LogTarget::FileAndWriter(mut w) => { + self.flwb = self.flwb.format(self.format_for_file); + w.format(self.format_for_writer); + PrimaryWriter::multi( + self.duplicate_err, + self.duplicate_out, + self.format_for_stderr, + self.format_for_stdout, + vec![Box::new(self.flwb.try_build()?), w], + ) + } + LogTarget::StdOut => PrimaryWriter::stdout(self.format_for_stdout), + LogTarget::StdErr => PrimaryWriter::stderr(self.format_for_stderr), + LogTarget::DevNull => PrimaryWriter::black_hole( + self.duplicate_err, + self.duplicate_out, + self.format_for_stderr, + self.format_for_stdout, + ), + }); + + let flexi_logger = FlexiLogger::new( + Arc::clone(&spec), + Arc::clone(&primary_writer), + Arc::clone(&other_writers), + ); + + let handle = ReconfigurationHandle::new(spec, primary_writer, other_writers); + handle.reconfigure(max_level); + Ok((Box::new(flexi_logger), handle)) + } + + /// Consumes the Logger object and initializes `flexi_logger` in a way that + /// subsequently the log specification can be updated manually. + /// + /// Uses the spec that was given to the factory method (`Logger::with()` etc) + /// as initial spec and then tries to read the logspec from a file. + /// + /// If the file does not exist, `flexi_logger` creates the file and fills it + /// with the initial spec (and in the respective file format, of course). + /// + /// ## Feature dependency + /// + /// The implementation of this configuration method uses some additional crates + /// that you might not want to depend on with your program if you don't use this functionality. + /// For that reason the method is only available if you activate the + /// `specfile` feature. See `flexi_logger`'s [usage](index.html#usage) section for details. + /// + /// ## Usage + /// + /// A logger initialization like + /// + /// ```ignore + /// use flexi_logger::Logger; + /// Logger::with_str("info")/*...*/.start_with_specfile("logspecification.toml"); + /// ``` + /// + /// will create the file `logspecification.toml` (if it does not yet exist) with this content: + /// + /// ```toml + /// ### Optional: Default log level + /// global_level = 'info' + /// ### Optional: specify a regular expression to suppress all messages that don't match + /// #global_pattern = 'foo' + /// + /// ### Specific log levels per module are optionally defined in this section + /// [modules] + /// #'mod1' = 'warn' + /// #'mod2' = 'debug' + /// #'mod2::mod3' = 'trace' + /// ``` + /// + /// You can subsequently edit and modify the file according to your needs, + /// while the program is running, and it will immediately take your changes into account. + /// + /// Currently only toml-files are supported, the file suffix thus must be `.toml`. + /// + /// The initial spec remains valid if the file cannot be read. + /// + /// If you update the specfile subsequently while the program is running, `flexi_logger` + /// re-reads it automatically and adapts its behavior according to the new content. + /// If the file cannot be read anymore, e.g. because the format is not correct, the + /// previous logspec remains active. + /// If the file is corrected subsequently, the log spec update will work again. + /// + /// # Errors + /// + /// Several variants of `FlexiLoggerError` can occur. + /// + /// # Returns + /// + /// A `ReconfigurationHandle` is returned, predominantly to allow using its + /// [`shutdown`](struct.ReconfigurationHandle.html#method.shutdown) method. + #[cfg(feature = "specfile_without_notification")] + pub fn start_with_specfile>( + self, + specfile: P, + ) -> Result { + // Make logging work, before caring for the specfile + let (boxed_logger, handle) = self.build()?; + log::set_boxed_logger(boxed_logger)?; + setup_specfile(specfile, handle.clone())?; + Ok(handle) + } + + /// Builds a boxed logger and a `ReconfigurationHandle` for it, + /// but does not initialize the global logger. + /// + /// + /// The returned boxed logger implements the Log trait and can be installed manually + /// or nested within another logger. + /// + /// For the properties of the returned logger, + /// see [`start_with_specfile()`](struct.Logger.html#method.start_with_specfile). + /// + /// # Errors + /// + /// Several variants of `FlexiLoggerError` can occur. + /// + /// # Returns + /// + /// A `ReconfigurationHandle` is returned, predominantly to allow using its + /// [`shutdown`](struct.ReconfigurationHandle.html#method.shutdown) method. + #[cfg(feature = "specfile_without_notification")] + pub fn build_with_specfile>( + self, + specfile: P, + ) -> Result<(Box, ReconfigurationHandle), FlexiLoggerError> { + let (boxed_log, handle) = self.build()?; + setup_specfile(specfile, handle.clone())?; + Ok((boxed_log, handle)) + } +} + +#[cfg(feature = "specfile_without_notification")] +fn setup_specfile>( + specfile: P, + mut handle: ReconfigurationHandle, +) -> Result<(), FlexiLoggerError> { + let specfile = specfile.as_ref().to_owned(); + synchronize_handle_with_specfile(&mut handle, &specfile)?; + + #[cfg(feature = "specfile")] + { + // Now that the file exists, we can canonicalize the path + let specfile = specfile + .canonicalize() + .map_err(FlexiLoggerError::SpecfileIo)?; + + // Watch the parent folder of the specfile, using debounced events + let (tx, rx) = std::sync::mpsc::channel(); + let debouncing_delay = std::time::Duration::from_millis(1000); + let mut watcher = watcher(tx, debouncing_delay)?; + watcher.watch(&specfile.parent().unwrap(), RecursiveMode::NonRecursive)?; + + // in a separate thread, reread the specfile when it was updated + std::thread::Builder::new() + .name("flexi_logger-specfile-watcher".to_string()) + .stack_size(128 * 1024) + .spawn(move || { + let _anchor_for_watcher = watcher; // keep it alive! + loop { + match rx.recv() { + Ok(debounced_event) => { + // println!("got debounced event {:?}", debounced_event); + match debounced_event { + DebouncedEvent::Create(ref path) + | DebouncedEvent::Write(ref path) => { + if path.canonicalize().map(|x| x == specfile).unwrap_or(false) { + match log_spec_string_from_file(&specfile) + .map_err(FlexiLoggerError::SpecfileIo) + .and_then(|s| LogSpecification::from_toml(&s)) + { + Ok(spec) => handle.set_new_spec(spec), + Err(e) => eprintln!( + "[flexi_logger] rereading the log specification file \ + failed with {:?}, \ + continuing with previous log specification", + e + ), + } + } + } + _event => {} + } + } + Err(e) => { + eprintln!("[flexi_logger] error while watching the specfile: {:?}", e) + } + } + } + })?; + } + Ok(()) +} + +// If the specfile exists, read the file and update the log_spec from it; +// otherwise try to create the file, with the current spec as content, under the specified name. +#[cfg(feature = "specfile_without_notification")] +pub(crate) fn synchronize_handle_with_specfile( + handle: &mut ReconfigurationHandle, + specfile: &std::path::PathBuf, +) -> Result<(), FlexiLoggerError> { + if specfile + .extension() + .unwrap_or_else(|| std::ffi::OsStr::new("")) + .to_str() + .unwrap_or("") + != "toml" + { + return Err(FlexiLoggerError::SpecfileExtension( + "only spec files with extension toml are supported", + )); + } + + if std::path::Path::is_file(specfile) { + let s = log_spec_string_from_file(specfile).map_err(FlexiLoggerError::SpecfileIo)?; + handle.set_new_spec(LogSpecification::from_toml(&s)?); + } else { + if let Some(specfolder) = specfile.parent() { + std::fs::DirBuilder::new() + .recursive(true) + .create(specfolder) + .map_err(FlexiLoggerError::SpecfileIo)?; + } + let mut file = std::fs::OpenOptions::new() + .write(true) + .create_new(true) + .open(specfile) + .map_err(FlexiLoggerError::SpecfileIo)?; + + handle + .current_spec() + .read() + .map_err(|_e| FlexiLoggerError::Poison)? + .to_toml(&mut file)?; + } + Ok(()) +} + +#[cfg(feature = "specfile_without_notification")] +pub(crate) fn log_spec_string_from_file>( + specfile: P, +) -> Result { + let mut buf = String::new(); + let mut file = std::fs::File::open(specfile)?; + file.read_to_string(&mut buf)?; + Ok(buf) +} + +/// Used to control which messages are to be duplicated to stderr, when `log_to_file()` is used. +#[derive(Debug)] +pub enum Duplicate { + /// No messages are duplicated. + None, + /// Only error messages are duplicated. + Error, + /// Error and warn messages are duplicated. + Warn, + /// Error, warn, and info messages are duplicated. + Info, + /// Error, warn, info, and debug messages are duplicated. + Debug, + /// All messages are duplicated. + Trace, + /// All messages are duplicated. + All, +} diff --git a/agent/support/rust/flexi_logger/src/parameters.rs b/agent/support/rust/flexi_logger/src/parameters.rs new file mode 100644 index 000000000..38db2a33d --- /dev/null +++ b/agent/support/rust/flexi_logger/src/parameters.rs @@ -0,0 +1,145 @@ +/// Criterion when to rotate the log file. +/// +/// Used in [`Logger::rotate`](struct.Logger.html#method.rotate). +#[derive(Copy, Clone, Debug)] +pub enum Criterion { + /// Rotate the log file when it exceeds the specified size in bytes. + Size(u64), + /// Rotate the log file when it has become older than the specified age. + /// + /// ## Minor limitation + /// + /// ### TL,DR + /// the combination of `Logger::append()` + /// with `Criterion::Age` works OK, but not perfectly correct on Windows or Linux + /// when the program is restarted. + /// + /// ### Details + /// Applying the age criterion works fine while your program is running. + /// Ideally, we should also apply it to the rCURRENT file when the program is restarted + /// and you chose the `Logger::append()` option. + /// + /// Unfortunately, this does not work on Windows, and it does not work on linux, + /// for different reasons. + /// + /// To minimize the impact on age-based file-rotation, + /// `flexi_logger` uses on Windows and linux its initialization time + /// rather than the real file property + /// as the created_at-info of an rCURRENT file that already exists, and the + /// current timestamp when file rotation happens during further execution. + /// Consequently, a left-over rCURRENT file from a previous program run will look newer + /// than it is, and will be used longer than it should be. + /// + /// #### Issue on Windows + /// + /// For compatibility with DOS (sic!), Windows magically transfers the created_at-info + /// of a file that is deleted (or renamed) to its successor, + /// when the recreation happens within some seconds [[1]](#ref-1). + /// + /// [1] [https://superuser.com/questions/966490/windows-7-what-is-date-created-file-property-referring-to](https://superuser.com/questions/966490/windows-7-what-is-date-created-file-property-referring-to). + /// + /// If the file property were used by `flexi_logger`, + /// the rCURRENT file would always appear to be as old as the + /// first one that ever was created - rotation by time would completely fail. + /// + /// #### Issue on Linux + /// + /// `std::fs::metadata.created()` returns `Err`, because linux does not maintain a + /// created-at-timestamp. + /// + Age(Age), + /// Rotate the file when it has either become older than the specified age, or when it has + /// exceeded the specified size in bytes. + /// + /// See documentation for Age and Size. + AgeOrSize(Age, u64), +} + +/// The age after which a log file rotation will be triggered, +/// when [`Criterion::Age`](enum.Criterion.html#variant.Age) is chosen. +#[derive(Copy, Clone, Debug)] +pub enum Age { + /// Rotate the log file when the local clock has started a new day since the + /// current file had been created. + Day, + /// Rotate the log file when the local clock has started a new hour since the + /// current file had been created. + Hour, + /// Rotate the log file when the local clock has started a new minute since the + /// current file had been created. + Minute, + /// Rotate the log file when the local clock has started a new second since the + /// current file had been created. + Second, +} + +/// The naming convention for rotated log files. +/// +/// With file rotation, the logs are written to a file with infix `_rCURRENT`. +/// When rotation happens, the CURRENT log file will be renamed to a file with +/// another infix of the form `"_r..."`. `Naming` defines which other infix will be used. +/// +/// Used in [`Logger::rotate`](struct.Logger.html#method.rotate). +#[derive(Copy, Clone, Debug)] +pub enum Naming { + /// File rotation rotates to files with a timestamp-infix, like `"r2020-01-27_14-41-08"`. + Timestamps, + /// File rotation rotates to files with a number-infix. + Numbers, +} +/// Defines the strategy for handling older log files. +/// +/// Is used in [`Logger::rotate`](struct.Logger.html#method.rotate). +/// +/// Note that if you use a strategy other than `Cleanup::Never`, then the cleanup work is +/// by default done in an extra thread, to minimize the impact on the program. +/// See +/// [`Logger::cleanup_in_background_thread`](struct.Logger.html#method.cleanup_in_background_thread) +/// if you want to control whether this extra thread is created and used. +#[allow(deprecated)] +#[derive(Copy, Clone, Debug)] +pub enum Cleanup { + /// Older log files are not touched - they remain for ever. + Never, + /// The specified number of rotated log files are kept. + /// Older files are deleted, if necessary. + KeepLogFiles(usize), + /// The specified number of rotated log files are compressed and kept. + /// Older files are deleted, if necessary. + /// + /// This option is only available with feature `compress`. + #[cfg(feature = "compress")] + KeepCompressedFiles(usize), + /// Outdated + #[cfg(feature = "compress")] + #[deprecated(since = "0.16.0", note = "use KeepCompressedFiles instead")] + KeepZipFiles(usize), + /// Allows keeping some files as text files and some as compressed files. + /// + /// ## Example + /// + /// `KeepLogAndCompressedFiles(5,30)` ensures that the youngest five log files are + /// kept as text files, the next 30 are kept as compressed files with additional suffix `.gz`, + /// and older files are removed. + /// + /// This option is only available with feature `compress`. + #[cfg(feature = "compress")] + KeepLogAndCompressedFiles(usize, usize), + /// Outdated + #[deprecated(since = "0.16.0", note = "use KeepLogAndCompressedFiles instead")] + #[cfg(feature = "compress")] + KeepLogAndZipFiles(usize, usize), +} + +impl Cleanup { + // Returns true if some cleanup is to be done. + #[must_use] + #[allow(clippy::match_like_matches_macro)] + pub(crate) fn do_cleanup(&self) -> bool { + // !matches!(self, Self::Never) would be nicer, but is not possible with 1.37 + match self { + Self::Never => false, + _ => true, + } + } +} diff --git a/agent/support/rust/flexi_logger/src/primary_writer.rs b/agent/support/rust/flexi_logger/src/primary_writer.rs new file mode 100644 index 000000000..aaa486d06 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/primary_writer.rs @@ -0,0 +1,251 @@ +use log::Record; +use std::cell::RefCell; +use std::io::Write; + +use crate::deferred_now::DeferredNow; +use crate::logger::Duplicate; +use crate::writers::LogWriter; +use crate::FormatFunction; + +// Writes either to stdout, or to stderr, +// or to a file (with optional duplication to stderr), +// or to nowhere (with optional "duplication" to stderr). +#[allow(clippy::large_enum_variant)] +pub(crate) enum PrimaryWriter { + StdOut(StdOutWriter), + StdErr(StdErrWriter), + Multi(MultiWriter), +} +impl PrimaryWriter { + pub fn multi( + duplicate_stderr: Duplicate, + duplicate_stdout: Duplicate, + format_for_stderr: FormatFunction, + format_for_stdout: FormatFunction, + writers: Vec>, + ) -> Self { + Self::Multi(MultiWriter { + duplicate_stderr, + duplicate_stdout, + format_for_stderr, + format_for_stdout, + writers, + }) + } + pub fn stderr(format: FormatFunction) -> Self { + Self::StdErr(StdErrWriter::new(format)) + } + + pub fn stdout(format: FormatFunction) -> Self { + Self::StdOut(StdOutWriter::new(format)) + } + + pub fn black_hole( + duplicate_err: Duplicate, + duplicate_out: Duplicate, + format_for_stderr: FormatFunction, + format_for_stdout: FormatFunction, + ) -> Self { + Self::multi( + duplicate_err, + duplicate_out, + format_for_stderr, + format_for_stdout, + vec![], + ) + } + + // Write out a log line. + pub fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { + match *self { + Self::StdErr(ref w) => w.write(now, record), + Self::StdOut(ref w) => w.write(now, record), + Self::Multi(ref w) => w.write(now, record), + } + } + + // Flush any buffered records. + pub fn flush(&self) -> std::io::Result<()> { + match *self { + Self::StdErr(ref w) => w.flush(), + Self::StdOut(ref w) => w.flush(), + Self::Multi(ref w) => w.flush(), + } + } + + pub fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { + if let Self::Multi(ref w) = *self { + w.validate_logs(expected); + } + } +} + +// `StdErrWriter` writes logs to stderr. +pub(crate) struct StdErrWriter { + format: FormatFunction, +} + +impl StdErrWriter { + fn new(format: FormatFunction) -> Self { + Self { format } + } + #[inline] + fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { + write_buffered(self.format, now, record, &mut std::io::stderr()) + } + + #[inline] + fn flush(&self) -> std::io::Result<()> { + std::io::stderr().flush() + } +} + +// `StdOutWriter` writes logs to stdout. +pub(crate) struct StdOutWriter { + format: FormatFunction, +} + +impl StdOutWriter { + fn new(format: FormatFunction) -> Self { + Self { format } + } + #[inline] + fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { + write_buffered(self.format, now, record, &mut std::io::stdout()) + } + + #[inline] + fn flush(&self) -> std::io::Result<()> { + std::io::stdout().flush() + } +} + +// The `MultiWriter` writes logs to stderr or to a set of `Writer`s, and in the latter case +// can duplicate messages to stderr. +pub(crate) struct MultiWriter { + duplicate_stderr: Duplicate, + duplicate_stdout: Duplicate, + format_for_stderr: FormatFunction, + format_for_stdout: FormatFunction, + writers: Vec>, +} + +impl LogWriter for MultiWriter { + fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { + for writer in &self.writers { + (*writer).validate_logs(expected); + } + } + + fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { + if match self.duplicate_stderr { + Duplicate::Error => record.level() == log::Level::Error, + Duplicate::Warn => record.level() <= log::Level::Warn, + Duplicate::Info => record.level() <= log::Level::Info, + Duplicate::Debug => record.level() <= log::Level::Debug, + Duplicate::Trace | Duplicate::All => true, + Duplicate::None => false, + } { + write_buffered(self.format_for_stderr, now, record, &mut std::io::stderr())?; + } + + if match self.duplicate_stdout { + Duplicate::Error => record.level() == log::Level::Error, + Duplicate::Warn => record.level() <= log::Level::Warn, + Duplicate::Info => record.level() <= log::Level::Info, + Duplicate::Debug => record.level() <= log::Level::Debug, + Duplicate::Trace | Duplicate::All => true, + Duplicate::None => false, + } { + write_buffered(self.format_for_stdout, now, record, &mut std::io::stdout())?; + } + + for writer in &self.writers { + writer.write(now, record)?; + } + Ok(()) + } + + /// Provides the maximum log level that is to be written. + fn max_log_level(&self) -> log::LevelFilter { + self.writers + .iter() + .map(|w| w.max_log_level()) + .max() + .unwrap() + } + + fn flush(&self) -> std::io::Result<()> { + for writer in &self.writers { + writer.flush()?; + } + std::io::stderr().flush() + } + fn shutdown(&self) { + for writer in &self.writers { + writer.shutdown(); + } + } +} + +// Use a thread-local buffer for writing to stderr or stdout +fn write_buffered( + format_function: FormatFunction, + now: &mut DeferredNow, + record: &Record, + w: &mut dyn Write, +) -> Result<(), std::io::Error> { + let mut result: Result<(), std::io::Error> = Ok(()); + + buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { + Ok(mut buffer) => { + (format_function)(&mut *buffer, now, record) + .unwrap_or_else(|e| write_err(ERR_FORMATTING, &e)); + buffer + .write_all(b"\n") + .unwrap_or_else(|e| write_err(ERR_FORMATTING, &e)); + + result = w.write_all(&*buffer).map_err(|e| { + write_err(ERR_WRITING, &e); + e + }); + + buffer.clear(); + } + Err(_e) => { + // We arrive here in the rare cases of recursive logging + // (e.g. log calls in Debug or Display implementations) + // we print the inner calls, in chronological order, before finally the + // outer most message is printed + let mut tmp_buf = Vec::::with_capacity(200); + (format_function)(&mut tmp_buf, now, record) + .unwrap_or_else(|e| write_err(ERR_FORMATTING, &e)); + tmp_buf + .write_all(b"\n") + .unwrap_or_else(|e| write_err(ERR_FORMATTING, &e)); + + result = w.write_all(&tmp_buf).map_err(|e| { + write_err(ERR_WRITING, &e); + e + }); + } + }); + result +} + +pub(crate) fn buffer_with(f: F) +where + F: FnOnce(&RefCell>), +{ + thread_local! { + static BUFFER: RefCell> = RefCell::new(Vec::with_capacity(200)); + } + BUFFER.with(f); +} + +const ERR_FORMATTING: &str = "formatting failed with "; +const ERR_WRITING: &str = "writing failed with "; + +fn write_err(msg: &str, err: &std::io::Error) { + eprintln!("[flexi_logger] {} with {}", msg, err); +} diff --git a/agent/support/rust/flexi_logger/src/reconfiguration_handle.rs b/agent/support/rust/flexi_logger/src/reconfiguration_handle.rs new file mode 100644 index 000000000..d7454a5de --- /dev/null +++ b/agent/support/rust/flexi_logger/src/reconfiguration_handle.rs @@ -0,0 +1,157 @@ +use crate::log_specification::LogSpecification; +use crate::primary_writer::PrimaryWriter; +use crate::writers::LogWriter; +use std::collections::HashMap; +use std::sync::{Arc, RwLock}; + +/// Allows reconfiguring the logger programmatically. +/// +/// # Example +/// +/// Obtain the `ReconfigurationHandle` (using `.start()`): +/// ```rust +/// # use flexi_logger::{Logger, LogSpecBuilder}; +/// let mut log_handle = Logger::with_str("info") +/// // ... your logger configuration goes here, as usual +/// .start() +/// .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); +/// +/// // ... +/// ``` +/// +/// You can permanently exchange the log specification programmatically, anywhere in your code: +/// +/// ```rust +/// # use flexi_logger::{Logger, LogSpecBuilder}; +/// # let mut log_handle = Logger::with_str("info") +/// # .start() +/// # .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); +/// // ... +/// log_handle.parse_new_spec("warn"); +/// // ... +/// ``` +/// +/// However, when debugging, you often want to modify the log spec only temporarily, for +/// one or few method calls only; this is easier done with the following method, because +/// it allows switching back to the previous spec: +/// +/// ```rust +/// # use flexi_logger::{Logger, LogSpecBuilder}; +/// # let mut log_handle = Logger::with_str("info") +/// # .start() +/// # .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); +/// log_handle.parse_and_push_temp_spec("trace"); +/// // ... +/// // critical calls +/// // ... +/// +/// log_handle.pop_temp_spec(); +/// // Continue with the log spec you had before. +/// // ... +/// ``` +#[derive(Clone)] +pub struct ReconfigurationHandle { + spec: Arc>, + spec_stack: Vec, + primary_writer: Arc, + other_writers: Arc>>, +} +impl ReconfigurationHandle { + pub(crate) fn new( + spec: Arc>, + primary_writer: Arc, + other_writers: Arc>>, + ) -> Self { + Self { + spec, + spec_stack: Vec::default(), + primary_writer, + other_writers, + } + } + + #[cfg(feature = "specfile_without_notification")] + pub(crate) fn current_spec(&self) -> Arc> { + Arc::clone(&self.spec) + } + + // + pub(crate) fn reconfigure(&self, mut max_level: log::LevelFilter) { + for w in self.other_writers.as_ref().values() { + max_level = std::cmp::max(max_level, w.max_log_level()); + } + log::set_max_level(max_level); + } + + /// Replaces the active `LogSpecification`. + pub fn set_new_spec(&mut self, new_spec: LogSpecification) { + let max_level = new_spec.max_level(); + self.spec.write().unwrap(/* catch and expose error? */).update_from(new_spec); + self.reconfigure(max_level); + } + + /// Tries to replace the active `LogSpecification` with the result from parsing the given String. + pub fn parse_new_spec(&mut self, spec: &str) { + self.set_new_spec(LogSpecification::parse(spec).unwrap_or_else(|e| { + eprintln!( + "[flexi_logger] ReconfigurationHandle::parse_new_spec(): failed with {}", + e + ); + LogSpecification::off() + })) + } + + /// Replaces the active `LogSpecification` and pushes the previous one to a Stack. + pub fn push_temp_spec(&mut self, new_spec: LogSpecification) { + self.spec_stack + .push(self.spec.read().unwrap(/* catch and expose error? */).clone()); + self.set_new_spec(new_spec); + } + + /// Tries to replace the active `LogSpecification` with the result from parsing the given String + /// and pushes the previous one to a Stack. + pub fn parse_and_push_temp_spec(&mut self, new_spec: &str) { + self.spec_stack + .push(self.spec.read().unwrap(/* catch and expose error? */).clone()); + self.set_new_spec(LogSpecification::parse(new_spec).unwrap_or_else(|e| { + eprintln!( + "[flexi_logger] ReconfigurationHandle::parse_new_spec(): failed with {}, \ + falling back to empty log spec", + e + ); + LogSpecification::off() + })); + } + + /// Reverts to the previous `LogSpecification`, if any. + pub fn pop_temp_spec(&mut self) { + if let Some(previous_spec) = self.spec_stack.pop() { + self.set_new_spec(previous_spec); + } + } + + /// Shutdown all participating writers. + /// + /// This method is supposed to be called at the very end of your program, in case you use + /// your own writers, or if you want to securely shutdown the cleanup-thread of the + /// `FileLogWriter`. If you use a [`Cleanup`](enum.Cleanup.html) strategy with compressing, + /// and your process terminates + /// without correctly shutting down the cleanup-thread, then you might stop the cleanup-thread + /// while it is compressing a log file, which can leave unexpected files in the filesystem. + /// + /// See also [`LogWriter::shutdown`](writers/trait.LogWriter.html#method.shutdown). + pub fn shutdown(&self) { + if let PrimaryWriter::Multi(writer) = &*self.primary_writer { + writer.shutdown(); + } + for writer in self.other_writers.values() { + writer.shutdown(); + } + } + + // Allows checking the logs written so far to the writer + #[doc(hidden)] + pub fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { + self.primary_writer.validate_logs(expected) + } +} diff --git a/agent/support/rust/flexi_logger/src/writers.rs b/agent/support/rust/flexi_logger/src/writers.rs new file mode 100644 index 000000000..6bf7e04ae --- /dev/null +++ b/agent/support/rust/flexi_logger/src/writers.rs @@ -0,0 +1,105 @@ +//! Contains a trait ([`LogWriter`](trait.LogWriter.html)) for extending `flexi_logger` +//! with additional log writers, +//! and two concrete implementations +//! for writing to files +//! ([`FileLogWriter`](struct.FileLogWriter.html)) +//! or to the syslog +//! ([`SyslogWriter`](struct.SyslogWriter.html)). +//! You can also use your own implementations of [`LogWriter`](trait.LogWriter.html). +//! +//! Such log writers can be used in two ways: +//! +//! * With [`Logger::log_target(...)`](../struct.Logger.html#method.log_target) +//! you can influence to which output stream normal log messages will be written, +//! i.e. from log macro calls without explicit target specification. +//! +//! See [`LogTarget`](../enum.LogTarget.html) for the available options. +//! +//! These log calls will only be written if they match the current +//! [`log specification`](../struct.LogSpecification.html). +//! +//! * [`Logger::add_writer()`](../struct.Logger.html#method.add_writer) +//! can be used to register an additional log writer under a target name. +//! The target name can then be used in calls to the +//! [log macro](https://docs.rs/log/latest/log/macro.log.html) +//! for directing log messages to the desired writers. +//! +//! A log call with a target value that has the form `{Name1,Name2,...}`, i.e., +//! a comma-separated list of target names, within braces, is not sent to the default logger, +//! but to the loggers specified explicitly in the list. +//! In such a list you can again specify the default logger with the target name `_Default`. +//! +//! These log calls will not be affected by the value of `flexi_logger`'s log specification; +//! they will always be written, as you might want it for alerts or auditing. +//! +//! In the following example we define an alert writer, and a macro to facilitate using it +//! (and avoid using the explicit target specification in the macro call), and +//! show some example calls. +//! +//! ```rust +//! use log::*; +//! +//! use flexi_logger::Logger; +//! use flexi_logger::writers::FileLogWriter; +//! +//! // Configure a FileLogWriter for alert messages +//! pub fn alert_logger() -> Box { +//! Box::new(FileLogWriter::builder() +//! .discriminant("Alert") +//! .suffix("alerts") +//! .print_message() +//! .try_build() +//! .unwrap()) +//! } +//! +//! // Define a macro for writing messages to the alert log and to the normal log +//! #[macro_use] +//! mod macros { +//! #[macro_export] +//! macro_rules! alert_error { +//! ($($arg:tt)*) => ( +//! error!(target: "{Alert,_Default}", $($arg)*); +//! ) +//! } +//! } +//! +//! fn main() { +//! Logger::with_env_or_str("info") +//! .print_message() +//! .log_to_file() +//! .add_writer("Alert", alert_logger()) +//! .start() +//! .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); +//! +//! +//! // Explicitly send logs to different loggers +//! error!(target : "{Alert}", "This is only an alert"); +//! error!(target : "{Alert,_Default}", "This is an alert and log message"); +//! +//! // Nicer: use the explicit macro +//! alert_error!("This is another alert and log message"); +//! +//! // Standard log macros write only to the normal log +//! error!("This is a normal error message"); +//! warn!("This is a warning"); +//! info!("This is an info message"); +//! debug!("This is a debug message - you will not see it"); +//! trace!("This is a trace message - you will not see it"); +//! } +//! +//! ``` +//! + +mod file_log_writer; +mod log_writer; + +#[cfg(feature = "syslog_writer")] +mod syslog_writer; + +#[cfg(feature = "syslog_writer")] +pub use self::syslog_writer::{ + LevelToSyslogSeverity, SyslogConnector, SyslogFacility, SyslogSeverity, SyslogWriter, +}; + +pub use self::file_log_writer::{FileLogWriter, FileLogWriterBuilder}; +pub use self::log_writer::LogWriter; diff --git a/agent/support/rust/flexi_logger/src/writers/file_log_writer.rs b/agent/support/rust/flexi_logger/src/writers/file_log_writer.rs new file mode 100644 index 000000000..667d8a870 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/writers/file_log_writer.rs @@ -0,0 +1,502 @@ +mod builder; +mod config; +mod state; + +pub use self::builder::FileLogWriterBuilder; + +use self::config::{Config, FilenameConfig, RotationConfig}; +use crate::primary_writer::buffer_with; +use crate::writers::LogWriter; +use crate::{DeferredNow, FormatFunction}; +use log::Record; +use state::State; +use std::io::Write; +use std::path::PathBuf; +use std::sync::Mutex; +/// A configurable `LogWriter` implementation that writes to a file or a sequence of files. +/// +/// See the [module description](index.html) for usage guidance. +#[allow(clippy::module_name_repetitions)] +pub struct FileLogWriter { + format: FormatFunction, + line_ending: &'static [u8], + // the state needs to be mutable; since `Log.log()` requires an unmutable self, + // which translates into a non-mutating `LogWriter::write()`, + // we need internal mutability and thread-safety. + state: Mutex, + max_log_level: log::LevelFilter, + sender: Option, + name: String, +} +impl FileLogWriter { + pub(crate) fn new( + format: FormatFunction, + line_ending: &'static [u8], + state: Mutex, + max_log_level: log::LevelFilter, + sender: Option, + name: String, + ) -> FileLogWriter { + FileLogWriter { + format, + line_ending, + state, + max_log_level, + sender, + name, + } + } + + /// Instantiates a builder for `FileLogWriter`. + #[must_use] + pub fn builder() -> FileLogWriterBuilder { + FileLogWriterBuilder::new() + } + + /// Returns a reference to its configured output format function. + #[inline] + pub fn format(&self) -> FormatFunction { + self.format + } + + #[doc(hidden)] + pub fn current_filename(&self) -> PathBuf { + self.state.lock().unwrap().current_filename() + } +} + +impl LogWriter for FileLogWriter { + #[inline] + fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { + if record.level().eq(&log::Level::Error) { + match &self.sender { + Some(s) => { + let mut data = std::collections::HashMap::new(); + data.insert("data_type", "1002"); + data.insert("level", "error"); + let timestamp = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_secs() + .to_string(); + data.insert("timestamp", timestamp.as_str()); + data.insert( + "source", + record + .module_path() + .unwrap_or_else(|| record.file().unwrap_or_else(|| record.target())), + ); + let msg = record.args().to_string(); + data.insert("msg", msg.as_str()); + data.insert("plugin", self.name.as_str()); + match s.send(&data) { + Ok(_) => {} + Err(e) => println!("Log send failed:{}", e), + }; + } + None => {} + } + } + buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { + Ok(mut buffer) => { + (self.format)(&mut *buffer, now, record).unwrap_or_else(|e| write_err(ERR_1, &e)); + + let mut state_guard = self.state.lock().unwrap(); + let state = &mut *state_guard; + + buffer + .write_all(self.line_ending) + .unwrap_or_else(|e| write_err(ERR_2, &e)); + + state + .write_buffer(&*buffer) + .unwrap_or_else(|e| write_err(ERR_2, &e)); + buffer.clear(); + } + Err(_e) => { + // We arrive here in the rare cases of recursive logging + // (e.g. log calls in Debug or Display implementations) + // we print the inner calls, in chronological order, before finally the + // outer most message is printed + let mut tmp_buf = Vec::::with_capacity(200); + (self.format)(&mut tmp_buf, now, record).unwrap_or_else(|e| write_err(ERR_1, &e)); + + let mut state_guard = self.state.lock().unwrap(); + let state = &mut *state_guard; + + tmp_buf + .write_all(self.line_ending) + .unwrap_or_else(|e| write_err(ERR_2, &e)); + + state + .write_buffer(&tmp_buf) + .unwrap_or_else(|e| write_err(ERR_2, &e)); + } + }); + + Ok(()) + } + + #[inline] + fn flush(&self) -> std::io::Result<()> { + if let Ok(ref mut state) = self.state.lock() { + state.flush() + } else { + Ok(()) + } + } + + #[inline] + fn max_log_level(&self) -> log::LevelFilter { + self.max_log_level + } + + #[doc(hidden)] + fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { + if let Ok(ref mut state) = self.state.lock() { + state.validate_logs(expected) + } + } + + fn shutdown(&self) { + // do nothing in case of poison errors + if let Ok(ref mut state) = self.state.lock() { + state.shutdown(); + } + } +} + +const ERR_1: &str = "FileLogWriter: formatting failed with "; +const ERR_2: &str = "FileLogWriter: writing failed with "; + +fn write_err(msg: &str, err: &std::io::Error) { + eprintln!("[flexi_logger] {} with {}", msg, err); +} + +#[cfg(test)] +mod test { + use crate::writers::LogWriter; + use crate::{Cleanup, Criterion, DeferredNow, Naming}; + use chrono::Local; + + use std::ops::Add; + use std::path::{Path, PathBuf}; + const DIRECTORY: &str = r"log_files/rotate"; + const ONE: &str = "ONE"; + const TWO: &str = "TWO"; + const THREE: &str = "THREE"; + const FOUR: &str = "FOUR"; + const FIVE: &str = "FIVE"; + const SIX: &str = "SIX"; + const SEVEN: &str = "SEVEN"; + const EIGHT: &str = "EIGHT"; + const NINE: &str = "NINE"; + + // cargo test --lib -- --nocapture + + #[test] + fn test_rotate_no_append_numbers() { + // we use timestamp as discriminant to allow repeated runs + let ts = Local::now() + .format("false-numbers-%Y-%m-%d_%H-%M-%S") + .to_string(); + let naming = Naming::Numbers; + + // ensure we start with -/-/- + assert!(not_exists("00000", &ts)); + assert!(not_exists("00001", &ts)); + assert!(not_exists("CURRENT", &ts)); + + // ensure this produces -/-/ONE + write_loglines(false, naming, &ts, &[ONE]); + assert!(not_exists("00000", &ts)); + assert!(not_exists("00001", &ts)); + assert!(contains("CURRENT", &ts, ONE)); + + // ensure this produces ONE/-/TWO + write_loglines(false, naming, &ts, &[TWO]); + assert!(contains("00000", &ts, ONE)); + assert!(not_exists("00001", &ts)); + assert!(contains("CURRENT", &ts, TWO)); + + // ensure this also produces ONE/-/TWO + remove("CURRENT", &ts); + assert!(not_exists("CURRENT", &ts)); + write_loglines(false, naming, &ts, &[TWO]); + assert!(contains("00000", &ts, ONE)); + assert!(not_exists("00001", &ts)); + assert!(contains("CURRENT", &ts, TWO)); + + // ensure this produces ONE/TWO/THREE + write_loglines(false, naming, &ts, &[THREE]); + assert!(contains("00000", &ts, ONE)); + assert!(contains("00001", &ts, TWO)); + assert!(contains("CURRENT", &ts, THREE)); + } + + #[allow(clippy::cognitive_complexity)] + #[test] + fn test_rotate_with_append_numbers() { + // we use timestamp as discriminant to allow repeated runs + let ts = Local::now() + .format("true-numbers-%Y-%m-%d_%H-%M-%S") + .to_string(); + let naming = Naming::Numbers; + + // ensure we start with -/-/- + assert!(not_exists("00000", &ts)); + assert!(not_exists("00001", &ts)); + assert!(not_exists("CURRENT", &ts)); + + // ensure this produces 12/-/3 + write_loglines(true, naming, &ts, &[ONE, TWO, THREE]); + assert!(contains("00000", &ts, ONE)); + assert!(contains("00000", &ts, TWO)); + assert!(not_exists("00001", &ts)); + assert!(contains("CURRENT", &ts, THREE)); + + // ensure this produces 12/34/56 + write_loglines(true, naming, &ts, &[FOUR, FIVE, SIX]); + assert!(contains("00000", &ts, ONE)); + assert!(contains("00000", &ts, TWO)); + assert!(contains("00001", &ts, THREE)); + assert!(contains("00001", &ts, FOUR)); + assert!(contains("CURRENT", &ts, FIVE)); + assert!(contains("CURRENT", &ts, SIX)); + + // ensure this also produces 12/34/56 + remove("CURRENT", &ts); + remove("00001", &ts); + assert!(not_exists("CURRENT", &ts)); + write_loglines(true, naming, &ts, &[THREE, FOUR, FIVE, SIX]); + assert!(contains("00000", &ts, ONE)); + assert!(contains("00000", &ts, TWO)); + assert!(contains("00001", &ts, THREE)); + assert!(contains("00001", &ts, FOUR)); + assert!(contains("CURRENT", &ts, FIVE)); + assert!(contains("CURRENT", &ts, SIX)); + + // ensure this produces 12/34/56/78/9 + write_loglines(true, naming, &ts, &[SEVEN, EIGHT, NINE]); + assert!(contains("00002", &ts, FIVE)); + assert!(contains("00002", &ts, SIX)); + assert!(contains("00003", &ts, SEVEN)); + assert!(contains("00003", &ts, EIGHT)); + assert!(contains("CURRENT", &ts, NINE)); + } + + #[test] + fn test_rotate_no_append_timestamps() { + // we use timestamp as discriminant to allow repeated runs + let ts = Local::now() + .format("false-timestamps-%Y-%m-%d_%H-%M-%S") + .to_string(); + + let basename = String::from(DIRECTORY).add("/").add( + &Path::new(&std::env::args().next().unwrap()) + .file_stem().unwrap(/*cannot fail*/) + .to_string_lossy().to_string(), + ); + let naming = Naming::Timestamps; + + // ensure we start with -/-/- + assert!(list_rotated_files(&basename, &ts).is_empty()); + assert!(not_exists("CURRENT", &ts)); + + // ensure this produces -/-/ONE + write_loglines(false, naming, &ts, &[ONE]); + assert!(list_rotated_files(&basename, &ts).is_empty()); + assert!(contains("CURRENT", &ts, ONE)); + + std::thread::sleep(std::time::Duration::from_secs(2)); + // ensure this produces ONE/-/TWO + write_loglines(false, naming, &ts, &[TWO]); + assert_eq!(list_rotated_files(&basename, &ts).len(), 1); + assert!(contains("CURRENT", &ts, TWO)); + + std::thread::sleep(std::time::Duration::from_secs(2)); + // ensure this produces ONE/TWO/THREE + write_loglines(false, naming, &ts, &[THREE]); + assert_eq!(list_rotated_files(&basename, &ts).len(), 2); + assert!(contains("CURRENT", &ts, THREE)); + } + + #[test] + fn test_rotate_with_append_timestamps() { + // we use timestamp as discriminant to allow repeated runs + let ts = Local::now() + .format("true-timestamps-%Y-%m-%d_%H-%M-%S") + .to_string(); + + let basename = String::from(DIRECTORY).add("/").add( + &Path::new(&std::env::args().next().unwrap()) + .file_stem().unwrap(/*cannot fail*/) + .to_string_lossy().to_string(), + ); + let naming = Naming::Timestamps; + + // ensure we start with -/-/- + assert!(list_rotated_files(&basename, &ts).is_empty()); + assert!(not_exists("CURRENT", &ts)); + + // ensure this produces 12/-/3 + write_loglines(true, naming, &ts, &[ONE, TWO, THREE]); + assert_eq!(list_rotated_files(&basename, &ts).len(), 1); + assert!(contains("CURRENT", &ts, THREE)); + + // // ensure this produces 12/34/56 + write_loglines(true, naming, &ts, &[FOUR, FIVE, SIX]); + assert!(contains("CURRENT", &ts, FIVE)); + assert!(contains("CURRENT", &ts, SIX)); + assert_eq!(list_rotated_files(&basename, &ts).len(), 2); + + // // ensure this produces 12/34/56/78/9 + // write_loglines(true, naming, &ts, &[SEVEN, EIGHT, NINE]); + // assert_eq!(list_rotated_files(&basename, &ts).len(), 4); + // assert!(contains("CURRENT", &ts, NINE)); + } + + #[test] + fn issue_38() { + const NUMBER_OF_FILES: usize = 5; + const NUMBER_OF_PSEUDO_PROCESSES: usize = 11; + const ISSUE_38: &str = "issue_38"; + const LOG_FOLDER: &str = "log_files/issue_38"; + + for _ in 0..NUMBER_OF_PSEUDO_PROCESSES { + let flw = super::FileLogWriter::builder() + .directory(LOG_FOLDER) + .discriminant(ISSUE_38) + .rotate( + Criterion::Size(500), + Naming::Timestamps, + Cleanup::KeepLogFiles(NUMBER_OF_FILES), + ) + .o_append(false) + .try_build() + .unwrap(); + + // write some lines, but not enough to rotate + for i in 0..4 { + flw.write( + &mut DeferredNow::new(), + &log::Record::builder() + .args(format_args!("{}", i)) + .level(log::Level::Error) + .target("myApp") + .file(Some("server.rs")) + .line(Some(144)) + .module_path(Some("server")) + .build(), + ) + .unwrap(); + } + } + + // give the cleanup thread a short moment of time + std::thread::sleep(std::time::Duration::from_millis(50)); + + let fn_pattern = String::with_capacity(180) + .add( + &String::from(LOG_FOLDER).add("/").add( + &Path::new(&std::env::args().next().unwrap()) + .file_stem().unwrap(/*cannot fail*/) + .to_string_lossy().to_string(), + ), + ) + .add("_") + .add(ISSUE_38) + .add("_r[0-9]*") + .add(".log"); + + assert_eq!( + glob::glob(&fn_pattern) + .unwrap() + .filter_map(Result::ok) + .count(), + NUMBER_OF_FILES + ); + } + + fn remove(s: &str, discr: &str) { + std::fs::remove_file(get_hackyfilepath(s, discr)).unwrap(); + } + + fn not_exists(s: &str, discr: &str) -> bool { + !get_hackyfilepath(s, discr).exists() + } + + fn contains(s: &str, discr: &str, text: &str) -> bool { + match std::fs::read_to_string(get_hackyfilepath(s, discr)) { + Err(_) => false, + Ok(s) => s.contains(text), + } + } + + fn get_hackyfilepath(infix: &str, discr: &str) -> Box { + let arg0 = std::env::args().next().unwrap(); + let mut s_filename = Path::new(&arg0) + .file_stem() + .unwrap() + .to_string_lossy() + .to_string(); + s_filename += "_"; + s_filename += discr; + s_filename += "_r"; + s_filename += infix; + s_filename += ".log"; + let mut path_buf = PathBuf::from(DIRECTORY); + path_buf.push(s_filename); + path_buf.into_boxed_path() + } + + fn write_loglines(append: bool, naming: Naming, discr: &str, texts: &[&'static str]) { + let flw = get_file_log_writer(append, naming, discr); + for text in texts { + flw.write( + &mut DeferredNow::new(), + &log::Record::builder() + .args(format_args!("{}", text)) + .level(log::Level::Error) + .target("myApp") + .file(Some("server.rs")) + .line(Some(144)) + .module_path(Some("server")) + .build(), + ) + .unwrap(); + } + } + + fn get_file_log_writer( + append: bool, + naming: Naming, + discr: &str, + ) -> crate::writers::FileLogWriter { + super::FileLogWriter::builder() + .directory(DIRECTORY) + .discriminant(discr) + .rotate( + Criterion::Size(if append { 28 } else { 10 }), + naming, + Cleanup::Never, + ) + .o_append(append) + .try_build() + .unwrap() + } + + fn list_rotated_files(basename: &str, discr: &str) -> Vec { + let fn_pattern = String::with_capacity(180) + .add(basename) + .add("_") + .add(discr) + .add("_r2[0-9]*") // Year 3000 problem!!! + .add(".log"); + + glob::glob(&fn_pattern) + .unwrap() + .map(|r| r.unwrap().into_os_string().to_string_lossy().to_string()) + .collect() + } +} diff --git a/agent/support/rust/flexi_logger/src/writers/file_log_writer/builder.rs b/agent/support/rust/flexi_logger/src/writers/file_log_writer/builder.rs new file mode 100644 index 000000000..8fea05bae --- /dev/null +++ b/agent/support/rust/flexi_logger/src/writers/file_log_writer/builder.rs @@ -0,0 +1,293 @@ +use crate::flexi_error::FlexiLoggerError; +use crate::formats::default_format; +use crate::FormatFunction; +use crate::{Cleanup, Criterion, Naming}; +use chrono::Local; +use std::env; +use std::path::{Path, PathBuf}; +use std::sync::Mutex; + +use super::{Config, FileLogWriter, RotationConfig, State}; + +/// Builder for `FileLogWriter`. +#[allow(clippy::module_name_repetitions)] +pub struct FileLogWriterBuilder { + discriminant: Option, + config: Config, + format: FormatFunction, + o_rotation_config: Option, + max_log_level: log::LevelFilter, + cleanup_in_background_thread: bool, + sender: Option, + name: String, +} + +/// Simple methods for influencing the behavior of the `FileLogWriter`. +impl FileLogWriterBuilder { + pub(crate) fn new() -> FileLogWriterBuilder { + FileLogWriterBuilder { + name: String::from("default"), + sender: None, + discriminant: None, + o_rotation_config: None, + config: Config::default(), + format: default_format, + max_log_level: log::LevelFilter::Trace, + cleanup_in_background_thread: true, + } + } + /// Set name. + #[must_use] + pub fn name(mut self, name: String) -> Self { + self.name = name; + self + } + /// Add a grpc sender. + #[must_use] + pub fn sender(mut self, sender: plugin::Sender) -> Self { + self.sender = Some(sender); + self + } + /// Makes the `FileLogWriter` print an info message to stdout + /// when a new file is used for log-output. + #[must_use] + pub fn print_message(mut self) -> Self { + self.config.print_message = true; + self + } + + /// Makes the `FileLogWriter` use the provided format function for the log entries, + /// rather than the default ([`formats::default_format`](fn.default_format.html)). + pub fn format(mut self, format: FormatFunction) -> Self { + self.format = format; + self + } + + /// Specifies a folder for the log files. + /// + /// If the specified folder does not exist, the initialization will fail. + /// By default, the log files are created in the folder where the program was started. + pub fn directory>(mut self, directory: P) -> Self { + self.config.filename_config.directory = directory.into(); + self + } + + /// Specifies a suffix for the log files. The default is "log". + pub fn suffix>(mut self, suffix: S) -> Self { + self.config.filename_config.suffix = suffix.into(); + self + } + + /// Makes the logger not include a timestamp into the names of the log files + #[must_use] + pub fn suppress_timestamp(mut self) -> Self { + self.config.filename_config.use_timestamp = false; + self + } + + /// When rotation is used with some `Cleanup` variant, then this option defines + /// if the cleanup activities (finding files, deleting files, evtl compressing files) is done + /// in the current thread (in the current log-call), or whether cleanup is delegated to a + /// background thread. + /// + /// As of `flexi_logger` version `0.14.7`, + /// the cleanup activities are done by default in a background thread. + /// This minimizes the blocking impact to your application caused by IO operations. + /// + /// In earlier versions of `flexi_logger`, or if you call this method with + /// `use_background_thread = false`, + /// the cleanup is done in the thread that is currently causing a file rotation. + #[must_use] + pub fn cleanup_in_background_thread(mut self, use_background_thread: bool) -> Self { + self.cleanup_in_background_thread = use_background_thread; + self + } + + /// Use rotation to prevent indefinite growth of log files. + /// + /// By default, the log file is fixed while your program is running and will grow indefinitely. + /// With this option being used, when the log file reaches the specified criterion, + /// the file will be closed and a new file will be opened. + /// + /// Note that also the filename pattern changes: + /// + /// - by default, no timestamp is added to the filename + /// - the logs are always written to a file with infix `_rCURRENT` + /// - when the rotation criterion is fulfilled, it is closed and renamed to a file + /// with another infix (see `Naming`), + /// and then the logging continues again to the (fresh) file with infix `_rCURRENT`. + /// + /// Example: + /// + /// After some logging with your program `my_prog` and rotation with `Naming::Numbers`, + /// you will find files like + /// + /// ```text + /// my_prog_r00000.log + /// my_prog_r00001.log + /// my_prog_r00002.log + /// my_prog_rCURRENT.log + /// ``` + /// + /// The cleanup parameter allows defining the strategy for dealing with older files. + /// See [Cleanup](enum.Cleanup.html) for details. + #[must_use] + pub fn rotate(mut self, criterion: Criterion, naming: Naming, cleanup: Cleanup) -> Self { + self.o_rotation_config = Some(RotationConfig { + criterion, + naming, + cleanup, + }); + self.config.filename_config.use_timestamp = false; + self + } + + /// Makes the logger append to the given file, if it exists; by default, the file would be + /// truncated. + #[must_use] + pub fn append(mut self) -> Self { + self.config.append = true; + self + } + + /// The specified String is added to the log file name. + pub fn discriminant>(mut self, discriminant: S) -> Self { + self.discriminant = Some(discriminant.into()); + self + } + + /// The specified String will be used on linux systems to create in the current folder + /// a symbolic link to the current log file. + pub fn create_symlink>(mut self, symlink: P) -> Self { + self.config.o_create_symlink = Some(symlink.into()); + self + } + + /// Use Windows line endings, rather than just `\n`. + #[must_use] + pub fn use_windows_line_ending(mut self) -> Self { + self.config.use_windows_line_ending = true; + self + } + + /// Produces the `FileLogWriter`. + /// + /// # Errors + /// + /// `FlexiLoggerError::Io`. + pub fn try_build(mut self) -> Result { + // make sure the folder exists or create it + let p_directory = Path::new(&self.config.filename_config.directory); + std::fs::create_dir_all(&p_directory)?; + if !std::fs::metadata(&p_directory)?.is_dir() { + return Err(FlexiLoggerError::OutputBadDirectory); + }; + + let arg0 = env::args().next().unwrap_or_else(|| "rs".to_owned()); + self.config.filename_config.file_basename = + Path::new(&arg0).file_stem().unwrap(/*cannot fail*/).to_string_lossy().to_string(); + + if let Some(discriminant) = self.discriminant { + self.config.filename_config.file_basename += &format!("_{}", discriminant); + } + if self.config.filename_config.use_timestamp { + self.config.filename_config.file_basename += + &Local::now().format("_%Y-%m-%d_%H-%M-%S").to_string(); + }; + + Ok(FileLogWriter::new( + self.format, + if self.config.use_windows_line_ending { + b"\r\n" + } else { + b"\n" + }, + Mutex::new(State::try_new( + self.config, + self.o_rotation_config, + self.cleanup_in_background_thread, + )?), + self.max_log_level, + self.sender, + self.name, + )) + } +} + +/// Alternative set of methods to control the behavior of the `FileLogWriterBuilder`. +/// Use these methods when you want to control the settings flexibly, +/// e.g. with commandline arguments via `docopts` or `clap`. +impl FileLogWriterBuilder { + /// With true, makes the `FileLogWriterBuilder` print an info message to stdout, each time + /// when a new file is used for log-output. + #[must_use] + pub fn o_print_message(mut self, print_message: bool) -> Self { + self.config.print_message = print_message; + self + } + + /// Specifies a folder for the log files. + /// + /// If the specified folder does not exist, the initialization will fail. + /// With None, the log files are created in the folder where the program was started. + pub fn o_directory>(mut self, directory: Option

) -> Self { + self.config.filename_config.directory = + directory.map_or_else(|| PathBuf::from("."), Into::into); + self + } + + /// With true, makes the `FileLogWriterBuilder` include a timestamp into the names of the + /// log files. + #[must_use] + pub fn o_timestamp(mut self, use_timestamp: bool) -> Self { + self.config.filename_config.use_timestamp = use_timestamp; + self + } + + /// By default, and with None, the log file will grow indefinitely. + /// If a `rotate_config` is set, when the log file reaches or exceeds the specified size, + /// the file will be closed and a new file will be opened. + /// Also the filename pattern changes: instead of the timestamp, a serial number + /// is included into the filename. + /// + /// The size is given in bytes, e.g. `o_rotate_over_size(Some(1_000))` will rotate + /// files once they reach a size of 1 kB. + /// + /// The cleanup strategy allows delimiting the used space on disk. + #[must_use] + pub fn o_rotate(mut self, rotate_config: Option<(Criterion, Naming, Cleanup)>) -> Self { + if let Some((criterion, naming, cleanup)) = rotate_config { + self.o_rotation_config = Some(RotationConfig { + criterion, + naming, + cleanup, + }); + self.config.filename_config.use_timestamp = false; + } else { + self.o_rotation_config = None; + self.config.filename_config.use_timestamp = true; + } + self + } + + /// If append is set to true, makes the logger append to the given file, if it exists. + /// By default, or with false, the file would be truncated. + #[must_use] + pub fn o_append(mut self, append: bool) -> Self { + self.config.append = append; + self + } + + /// The specified String is added to the log file name. + pub fn o_discriminant>(mut self, discriminant: Option) -> Self { + self.discriminant = discriminant.map(Into::into); + self + } + + /// If a String is specified, it will be used on linux systems to create in the current folder + /// a symbolic link with this name to the current log file. + pub fn o_create_symlink>(mut self, symlink: Option) -> Self { + self.config.o_create_symlink = symlink.map(Into::into); + self + } +} diff --git a/agent/support/rust/flexi_logger/src/writers/file_log_writer/config.rs b/agent/support/rust/flexi_logger/src/writers/file_log_writer/config.rs new file mode 100644 index 000000000..87a59ba9c --- /dev/null +++ b/agent/support/rust/flexi_logger/src/writers/file_log_writer/config.rs @@ -0,0 +1,45 @@ +use crate::{Cleanup, Criterion, Naming}; +use std::path::PathBuf; + +// Describes how rotation should work +pub(crate) struct RotationConfig { + // Defines if rotation should be based on size or date + pub(crate) criterion: Criterion, + // Defines if rotated files should be numbered or get a date-based name + pub(crate) naming: Naming, + // Defines the cleanup strategy + pub(crate) cleanup: Cleanup, +} +#[derive(Clone)] +pub(crate) struct FilenameConfig { + pub(crate) directory: PathBuf, + pub(crate) file_basename: String, + pub(crate) suffix: String, + pub(crate) use_timestamp: bool, +} + +// The immutable configuration of a FileLogWriter. +pub(crate) struct Config { + pub(crate) print_message: bool, + pub(crate) append: bool, + pub(crate) filename_config: FilenameConfig, + pub(crate) o_create_symlink: Option, + pub(crate) use_windows_line_ending: bool, +} +impl Config { + // Factory method; uses the same defaults as Logger. + pub fn default() -> Self { + Self { + print_message: false, + filename_config: FilenameConfig { + directory: PathBuf::from("."), + file_basename: String::new(), + suffix: "log".to_string(), + use_timestamp: true, + }, + append: false, + o_create_symlink: None, + use_windows_line_ending: false, + } + } +} diff --git a/agent/support/rust/flexi_logger/src/writers/file_log_writer/state.rs b/agent/support/rust/flexi_logger/src/writers/file_log_writer/state.rs new file mode 100644 index 000000000..62cdc0288 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/writers/file_log_writer/state.rs @@ -0,0 +1,713 @@ +use crate::{Age, Cleanup, Criterion, FlexiLoggerError, Naming}; +use chrono::{DateTime, Datelike, Local, Timelike}; +use std::cmp::max; +use std::fs::{File, OpenOptions}; +use std::io::{BufRead, BufReader, Write}; +use std::ops::Add; +use std::path::{Path, PathBuf}; + +use super::{Config, FilenameConfig, RotationConfig}; + +const CURRENT_INFIX: &str = "_rCURRENT"; +fn number_infix(idx: u32) -> String { + format!("_r{:0>5}", idx) +} + +// Describes the latest existing numbered log file. +#[derive(Clone, Copy)] +enum IdxState { + // We rotate to numbered files, and no rotated numbered file exists yet + Start, + // highest index of rotated numbered files + Idx(u32), +} + +// Created_at is needed both for +// is_rotation_necessary() -> if Criterion::Age -> NamingState::CreatedAt +// and rotate_to_date() -> if Naming::Timestamps -> RollState::Age +enum NamingState { + CreatedAt, + IdxState(IdxState), +} + +enum RollState { + Size(u64, u64), // max_size, current_size + Age(Age), + AgeOrSize(Age, u64, u64), // age, max_size, current_size +} + +enum MessageToCleanupThread { + Act, + Die, +} +struct CleanupThreadHandle { + sender: std::sync::mpsc::Sender, + join_handle: std::thread::JoinHandle<()>, +} + +struct RotationState { + naming_state: NamingState, + roll_state: RollState, + created_at: DateTime, + cleanup: Cleanup, + o_cleanup_thread_handle: Option, +} +impl RotationState { + fn size_rotation_necessary(max_size: u64, current_size: u64) -> bool { + current_size > max_size + } + + fn age_rotation_necessary(&self, age: Age) -> bool { + let now = Local::now(); + match age { + Age::Day => self.created_at.num_days_from_ce() != now.num_days_from_ce(), + Age::Hour => { + self.created_at.num_days_from_ce() != now.num_days_from_ce() + || self.created_at.hour() != now.hour() + } + Age::Minute => { + self.created_at.num_days_from_ce() != now.num_days_from_ce() + || self.created_at.hour() != now.hour() + || self.created_at.minute() != now.minute() + } + Age::Second => { + self.created_at.num_days_from_ce() != now.num_days_from_ce() + || self.created_at.hour() != now.hour() + || self.created_at.minute() != now.minute() + || self.created_at.second() != now.second() + } + } + } + + fn rotation_necessary(&self) -> bool { + match &self.roll_state { + RollState::Size(max_size, current_size) => { + Self::size_rotation_necessary(*max_size, *current_size) + } + RollState::Age(age) => self.age_rotation_necessary(*age), + RollState::AgeOrSize(age, max_size, current_size) => { + Self::size_rotation_necessary(*max_size, *current_size) + || self.age_rotation_necessary(*age) + } + } + } + + fn shutdown(&mut self) { + // this sets o_cleanup_thread_handle in self.state.o_rotation_state to None: + let o_cleanup_thread_handle = self.o_cleanup_thread_handle.take(); + if let Some(cleanup_thread_handle) = o_cleanup_thread_handle { + cleanup_thread_handle + .sender + .send(MessageToCleanupThread::Die) + .ok(); + cleanup_thread_handle.join_handle.join().ok(); + } + } +} + +// Could not implement `std::convert::From` because other parameters are required. +fn try_roll_state_from_criterion( + criterion: Criterion, + config: &Config, + p_path: &Path, +) -> Result { + Ok(match criterion { + Criterion::Age(age) => RollState::Age(age), + Criterion::Size(size) => { + let written_bytes = if config.append { + std::fs::metadata(p_path)?.len() + } else { + 0 + }; + RollState::Size(size, written_bytes) + } // max_size, current_size + Criterion::AgeOrSize(age, size) => { + let written_bytes = if config.append { + std::fs::metadata(&p_path)?.len() + } else { + 0 + }; + RollState::AgeOrSize(age, size, written_bytes) + } // age, max_size, current_size + }) +} + +enum Inner { + Initial(Option, bool), + Active(Option, File), +} + +// The mutable state of a FileLogWriter. +pub(crate) struct State { + config: Config, + inner: Inner, +} +impl State { + pub fn try_new( + config: Config, + o_rotation_config: Option, + cleanup_in_background_thread: bool, + ) -> Result { + let mut state = Self { + inner: Inner::Initial(o_rotation_config, cleanup_in_background_thread), + config, + }; + if false { + // early initialize + state.initialize()?; + } + Ok(state) + } + + fn initialize(&mut self) -> Result<(), std::io::Error> { + if let Inner::Initial(o_rotation_config, cleanup_in_background_thread) = &self.inner { + match o_rotation_config { + None => { + let (log_file, _created_at, _p_path) = open_log_file(&self.config, false)?; + self.inner = Inner::Active(None, log_file); + } + Some(rotate_config) => { + // first rotate, then open the log file + let naming_state = match rotate_config.naming { + Naming::Timestamps => { + if !self.config.append { + rotate_output_file_to_date( + &get_creation_date(&get_filepath( + Some(CURRENT_INFIX), + &self.config.filename_config, + )), + &self.config, + )?; + } + NamingState::CreatedAt + } + Naming::Numbers => { + let mut rotation_state = + get_highest_rotate_idx(&self.config.filename_config); + if !self.config.append { + rotation_state = + rotate_output_file_to_idx(rotation_state, &self.config)?; + } + NamingState::IdxState(rotation_state) + } + }; + let (log_file, created_at, p_path) = open_log_file(&self.config, true)?; + + let roll_state = try_roll_state_from_criterion( + rotate_config.criterion, + &self.config, + &p_path, + )?; + let mut o_cleanup_thread_handle = None; + if rotate_config.cleanup.do_cleanup() { + remove_or_compress_too_old_logfiles( + &None, + &rotate_config.cleanup, + &self.config.filename_config, + )?; + if *cleanup_in_background_thread { + let cleanup = rotate_config.cleanup; + let filename_config = self.config.filename_config.clone(); + let (sender, receiver) = std::sync::mpsc::channel(); + let join_handle = std::thread::Builder::new() + .name("flexi_logger-cleanup".to_string()) + .stack_size(512 * 1024) + .spawn(move || loop { + match receiver.recv() { + Ok(MessageToCleanupThread::Act) => { + remove_or_compress_too_old_logfiles_impl( + &cleanup, + &filename_config, + ) + .ok(); + } + Ok(MessageToCleanupThread::Die) | Err(_) => { + return; + } + } + })?; + // .map_err(FlexiLoggerError::OutputCleanupThread)?; + o_cleanup_thread_handle = Some(CleanupThreadHandle { + sender, + join_handle, + }); + } + } + self.inner = Inner::Active( + Some(RotationState { + naming_state, + roll_state, + created_at, + cleanup: rotate_config.cleanup, + o_cleanup_thread_handle, + }), + log_file, + ); + } + } + } + Ok(()) + } + + pub fn flush(&mut self) -> std::io::Result<()> { + if let Inner::Active(_, ref mut file) = self.inner { + file.flush() + } else { + Ok(()) + } + } + + // With rotation, the logger always writes into a file with infix `_rCURRENT`. + // On overflow, an existing `_rCURRENT` file is renamed to the next numbered file, + // before writing into `_rCURRENT` goes on. + #[inline] + fn mount_next_linewriter_if_necessary(&mut self) -> Result<(), FlexiLoggerError> { + if let Inner::Active(Some(ref mut rotation_state), ref mut file) = self.inner { + if rotation_state.rotation_necessary() { + match rotation_state.naming_state { + NamingState::CreatedAt => { + rotate_output_file_to_date(&rotation_state.created_at, &self.config)?; + } + NamingState::IdxState(ref mut idx_state) => { + *idx_state = rotate_output_file_to_idx(*idx_state, &self.config)?; + } + } + + let (line_writer, created_at, _) = open_log_file(&self.config, true)?; + *file = line_writer; + rotation_state.created_at = created_at; + if let RollState::Size(_, ref mut current_size) + | RollState::AgeOrSize(_, _, ref mut current_size) = rotation_state.roll_state + { + *current_size = 0; + } + + remove_or_compress_too_old_logfiles( + &rotation_state.o_cleanup_thread_handle, + &rotation_state.cleanup, + &self.config.filename_config, + )?; + } + } + + Ok(()) + } + + pub fn write_buffer(&mut self, buf: &[u8]) -> std::io::Result<()> { + if let Inner::Initial(_, _) = self.inner { + self.initialize()?; + } + // rotate if necessary + self.mount_next_linewriter_if_necessary() + .unwrap_or_else(|e| { + eprintln!("[flexi_logger] opening file failed with {}", e); + }); + + if let Inner::Active(ref mut o_rotation_state, ref mut log_file) = self.inner { + log_file.write_all(buf)?; + if let Some(ref mut rotation_state) = o_rotation_state { + if let RollState::Size(_, ref mut current_size) + | RollState::AgeOrSize(_, _, ref mut current_size) = rotation_state.roll_state + { + *current_size += buf.len() as u64; + } + }; + } + Ok(()) + } + + pub fn current_filename(&self) -> PathBuf { + let o_infix = match &self.inner { + Inner::Initial(o_rotation_config, _) => { + if o_rotation_config.is_some() { + Some(CURRENT_INFIX) + } else { + None + } + } + Inner::Active(o_rotation_state, _) => { + if o_rotation_state.is_some() { + Some(CURRENT_INFIX) + } else { + None + } + } + }; + get_filepath(o_infix, &self.config.filename_config) + } + + pub fn validate_logs(&mut self, expected: &[(&'static str, &'static str, &'static str)]) { + if let Inner::Initial(_, _) = self.inner { + self.initialize().unwrap(); + } + if let Inner::Active(ref mut o_rotation_state, _) = self.inner { + let path = get_filepath( + o_rotation_state + .as_ref() + .map(|_| super::state::CURRENT_INFIX), + &self.config.filename_config, + ); + let f = File::open(path).unwrap(); + let mut reader = BufReader::new(f); + let mut buf = String::new(); + for tuple in expected { + buf.clear(); + reader.read_line(&mut buf).unwrap(); + assert!(buf.contains(&tuple.0), "Did not find tuple.0 = {}", tuple.0); + assert!(buf.contains(&tuple.1), "Did not find tuple.1 = {}", tuple.1); + assert!(buf.contains(&tuple.2), "Did not find tuple.2 = {}", tuple.2); + } + buf.clear(); + reader.read_line(&mut buf).unwrap(); + assert!( + buf.is_empty(), + "Found more log lines than expected: {} ", + buf + ); + } + } + + pub fn shutdown(&mut self) { + if let Inner::Active(ref mut o_rotation_state, _) = self.inner { + if let Some(ref mut rotation_state) = o_rotation_state { + rotation_state.shutdown(); + } + } + } +} + +fn get_filepath(o_infix: Option<&str>, config: &FilenameConfig) -> PathBuf { + let mut s_filename = String::with_capacity( + config.file_basename.len() + o_infix.map_or(0, str::len) + 1 + config.suffix.len(), + ) + &config.file_basename; + if let Some(infix) = o_infix { + s_filename += infix; + }; + s_filename += "."; + s_filename += &config.suffix; + let mut p_path = config.directory.to_path_buf(); + p_path.push(s_filename); + p_path +} + +fn open_log_file( + config: &Config, + with_rotation: bool, +) -> Result<(File, DateTime, PathBuf), std::io::Error> { + let o_infix = if with_rotation { + Some(CURRENT_INFIX) + } else { + None + }; + let p_path = get_filepath(o_infix, &config.filename_config); + if config.print_message { + println!("Log is written to {}", &p_path.display()); + } + if let Some(ref link) = config.o_create_symlink { + self::platform::create_symlink_if_possible(link, &p_path); + } + + let log_file = OpenOptions::new() + .write(true) + .create(true) + .append(config.append) + .truncate(!config.append) + .open(&p_path)?; + + Ok((log_file, get_creation_date(&p_path), p_path)) +} + +fn get_highest_rotate_idx(filename_config: &FilenameConfig) -> IdxState { + match list_of_log_and_compressed_files(filename_config) { + Err(e) => { + eprintln!("[flexi_logger] listing rotated log files failed with {}", e); + IdxState::Start // hope and pray ...?? + } + Ok(files) => { + let mut highest_idx = IdxState::Start; + for file in files { + let filename = file.file_stem().unwrap(/*ok*/).to_string_lossy(); + let mut it = filename.rsplit("_r"); + match it.next() { + Some(next) => { + let idx: u32 = next.parse().unwrap_or(0); + highest_idx = match highest_idx { + IdxState::Start => IdxState::Idx(idx), + IdxState::Idx(prev) => IdxState::Idx(max(prev, idx)), + }; + } + None => continue, // ignore unexpected files + } + } + highest_idx + } + } +} + +#[allow(clippy::type_complexity)] +fn list_of_log_and_compressed_files( + filename_config: &FilenameConfig, +) -> std::result::Result< + std::iter::Chain< + std::iter::Chain< + std::vec::IntoIter, + std::vec::IntoIter, + >, + std::vec::IntoIter, + >, + std::io::Error, +> { + let fn_pattern = String::with_capacity(180) + .add(&filename_config.file_basename) + .add("_r[0-9]*") + .add("."); + + let mut log_pattern = filename_config.directory.clone(); + log_pattern.push(fn_pattern.clone().add(&filename_config.suffix)); + let log_pattern = log_pattern.as_os_str().to_string_lossy(); + + let mut zip_pattern = filename_config.directory.clone(); + zip_pattern.push(fn_pattern.clone().add("zip")); + let zip_pattern = zip_pattern.as_os_str().to_string_lossy(); + + let mut gz_pattern = filename_config.directory.clone(); + gz_pattern.push(fn_pattern.add("gz")); + let gz_pattern = gz_pattern.as_os_str().to_string_lossy(); + + Ok(list_of_files(&log_pattern) + .chain(list_of_files(&gz_pattern)) + .chain(list_of_files(&zip_pattern))) +} + +fn list_of_files(pattern: &str) -> std::vec::IntoIter { + let mut log_files: Vec = glob::glob(pattern) + .unwrap(/* failure should be impossible */) + .filter_map(Result::ok) + .collect(); + log_files.reverse(); + log_files.into_iter() +} + +fn remove_or_compress_too_old_logfiles( + o_cleanup_thread_handle: &Option, + cleanup_config: &Cleanup, + filename_config: &FilenameConfig, +) -> Result<(), std::io::Error> { + o_cleanup_thread_handle.as_ref().map_or( + remove_or_compress_too_old_logfiles_impl(cleanup_config, filename_config), + |cleanup_thread_handle| { + cleanup_thread_handle + .sender + .send(MessageToCleanupThread::Act) + .ok(); + Ok(()) + }, + ) +} + +fn remove_or_compress_too_old_logfiles_impl( + cleanup_config: &Cleanup, + filename_config: &FilenameConfig, +) -> Result<(), std::io::Error> { + let (log_limit, compress_limit) = match *cleanup_config { + Cleanup::Never => { + return Ok(()); + } + Cleanup::KeepLogFiles(log_limit) => (log_limit, 0), + + #[cfg(feature = "compress")] + #[allow(deprecated)] + Cleanup::KeepCompressedFiles(compress_limit) | Cleanup::KeepZipFiles(compress_limit) => { + (0, compress_limit) + } + + #[cfg(feature = "compress")] + #[allow(deprecated)] + Cleanup::KeepLogAndCompressedFiles(log_limit, compress_limit) + | Cleanup::KeepLogAndZipFiles(log_limit, compress_limit) => (log_limit, compress_limit), + }; + + for (index, file) in list_of_log_and_compressed_files(&filename_config)?.enumerate() { + if index >= log_limit + compress_limit { + // delete (log or log.gz) + std::fs::remove_file(&file)?; + } else if index >= log_limit { + #[cfg(feature = "compress")] + { + // compress, if not yet compressed + if let Some(extension) = file.extension() { + if extension != "gz" { + let mut old_file = File::open(file.clone())?; + let mut compressed_file = file.clone(); + compressed_file.set_extension("log.gz"); + let mut gz_encoder = flate2::write::GzEncoder::new( + File::create(compressed_file)?, + flate2::Compression::fast(), + ); + std::io::copy(&mut old_file, &mut gz_encoder)?; + gz_encoder.finish()?; + std::fs::remove_file(&file)?; + } + } + } + } + } + + Ok(()) +} + +// Moves the current file to the timestamp of the CURRENT file's creation date. +// If the rotation comes very fast, the new timestamp would be equal to the old one. +// To avoid file collisions, we insert an additional string to the filename (".restart-"). +// The number is incremented in case of repeated collisions. +// Cleaning up can leave some restart-files with higher numbers; if we still are in the same +// second, we need to continue with the restart-incrementing. +fn rotate_output_file_to_date( + creation_date: &DateTime, + config: &Config, +) -> Result<(), std::io::Error> { + let current_path = get_filepath(Some(CURRENT_INFIX), &config.filename_config); + + let mut rotated_path = get_filepath( + Some(&creation_date.format("_r%Y-%m-%d_%H-%M-%S").to_string()), + &config.filename_config, + ); + + // Search for rotated_path as is and for restart-siblings; + // if any exists, find highest restart and add 1, else continue without restart + let mut pattern = rotated_path.clone(); + pattern.set_extension(""); + let mut pattern = pattern.to_string_lossy().to_string(); + pattern.push_str(".restart-*"); + + let file_list = glob::glob(&pattern).unwrap(/*ok*/); + let mut vec: Vec = file_list.map(Result::unwrap).collect(); + vec.sort_unstable(); + + if (*rotated_path).exists() || !vec.is_empty() { + let mut number = if vec.is_empty() { + 0 + } else { + rotated_path = vec.pop().unwrap(/*Ok*/); + let file_stem = rotated_path + .file_stem() + .unwrap(/*ok*/) + .to_string_lossy() + .to_string(); + let index = file_stem.find(".restart-").unwrap(); + file_stem[(index + 9)..].parse::().unwrap() + }; + + while (*rotated_path).exists() { + rotated_path = get_filepath( + Some( + &creation_date + .format("_r%Y-%m-%d_%H-%M-%S") + .to_string() + .add(&format!(".restart-{:04}", number)), + ), + &config.filename_config, + ); + number += 1; + } + } + + match std::fs::rename(¤t_path, &rotated_path) { + Ok(()) => Ok(()), + Err(e) => { + if e.kind() == std::io::ErrorKind::NotFound { + // current did not exist, so we had nothing to do + Ok(()) + } else { + Err(e) + } + } + } +} + +// Moves the current file to the name with the next rotate_idx and returns the next rotate_idx. +// The current file must be closed already. +fn rotate_output_file_to_idx( + idx_state: IdxState, + config: &Config, +) -> Result { + let new_idx = match idx_state { + IdxState::Start => 0, + IdxState::Idx(idx) => idx + 1, + }; + + match std::fs::rename( + get_filepath(Some(CURRENT_INFIX), &config.filename_config), + get_filepath(Some(&number_infix(new_idx)), &config.filename_config), + ) { + Ok(()) => Ok(IdxState::Idx(new_idx)), + Err(e) => { + if e.kind() == std::io::ErrorKind::NotFound { + // current did not exist, so we had nothing to do + Ok(idx_state) + } else { + Err(e) + } + } + } +} + +// See documentation of Criterion::Age. +#[allow(unused_variables)] +fn get_creation_date(path: &PathBuf) -> DateTime { + // On windows, we know that try_get_creation_date() returns a result, but it is wrong. + // On linux, we know that try_get_creation_date() returns an error. + #[cfg(any(target_os = "windows", target_os = "linux"))] + return get_fake_creation_date(); + + // On all others of the many platforms, we give the real creation date a try, + // and fall back to the fake if it is not available. + #[cfg(not(any(target_os = "windows", target_os = "linux")))] + match try_get_creation_date(path) { + Ok(d) => d, + Err(e) => get_fake_creation_date(), + } +} + +fn get_fake_creation_date() -> DateTime { + Local::now() +} + +#[cfg(not(any(target_os = "windows", target_os = "linux")))] +fn try_get_creation_date(path: &PathBuf) -> Result, FlexiLoggerError> { + Ok(std::fs::metadata(path)?.created()?.into()) +} + +mod platform { + use std::path::{Path, PathBuf}; + + pub fn create_symlink_if_possible(link: &PathBuf, path: &Path) { + linux_create_symlink(link, path); + } + + #[cfg(target_os = "linux")] + fn linux_create_symlink(link: &PathBuf, logfile: &Path) { + if std::fs::symlink_metadata(link).is_ok() { + // remove old symlink before creating a new one + if let Err(e) = std::fs::remove_file(link) { + eprintln!( + "[flexi_logger] deleting old symlink to log file failed with {:?}", + e + ); + } + } + + // create new symlink + if let Err(e) = std::os::unix::fs::symlink(&logfile, link) { + eprintln!( + "[flexi_logger] cannot create symlink {:?} for logfile \"{}\" due to {:?}", + link, + &logfile.display(), + e + ); + } + } + + #[cfg(not(target_os = "linux"))] + fn linux_create_symlink(_: &PathBuf, _: &Path) {} +} diff --git a/agent/support/rust/flexi_logger/src/writers/log_writer.rs b/agent/support/rust/flexi_logger/src/writers/log_writer.rs new file mode 100644 index 000000000..ec4ebbf86 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/writers/log_writer.rs @@ -0,0 +1,49 @@ +use crate::deferred_now::DeferredNow; +use crate::FormatFunction; +use log::Record; + +/// Writes to a single log output stream. +/// +/// Boxed instances of `LogWriter` can be used as additional log targets +/// (see [module description](index.html) for more details). +pub trait LogWriter: Sync + Send { + /// Writes out a log line. + /// + /// # Errors + /// + /// `std::io::Error` + fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()>; + + /// Flushes any buffered records. + /// + /// # Errors + /// + /// `std::io::Error` + fn flush(&self) -> std::io::Result<()>; + + /// Provides the maximum log level that is to be written. + fn max_log_level(&self) -> log::LevelFilter; + + /// Sets the format function. + /// Defaults to ([`formats::default_format`](fn.default_format.html)), + /// but can be changed with a call to + /// [`Logger::format_for_writer`](struct.Logger.html#method.format_for_writer). + /// + /// The default implementation is a no-op. + fn format(&mut self, format: FormatFunction) { + let _ = format; + } + + /// Cleanup open resources, if necessary. + fn shutdown(&self) {} + + /// Takes a vec with three patterns per line that represent the log out, + /// compares the written log with the expected lines, + /// and asserts that both are in sync. + /// + /// This function is not meant for productive code, only for tests. + #[doc(hidden)] + fn validate_logs(&self, _expected: &[(&'static str, &'static str, &'static str)]) { + unimplemented!("only useful for tests"); + } +} diff --git a/agent/support/rust/flexi_logger/src/writers/syslog_writer.rs b/agent/support/rust/flexi_logger/src/writers/syslog_writer.rs new file mode 100644 index 000000000..c389abc51 --- /dev/null +++ b/agent/support/rust/flexi_logger/src/writers/syslog_writer.rs @@ -0,0 +1,321 @@ +use crate::deferred_now::DeferredNow; +use crate::writers::log_writer::LogWriter; +use std::cell::RefCell; +use std::ffi::OsString; +use std::io::Error as IoError; +use std::io::Result as IoResult; +use std::io::{BufWriter, ErrorKind, Write}; +use std::net::{TcpStream, ToSocketAddrs, UdpSocket}; +#[cfg(target_os = "linux")] +use std::path::Path; +use std::sync::Mutex; + +/// Syslog Facility. +/// +/// See [RFC 5424](https://datatracker.ietf.org/doc/rfc5424). +#[derive(Copy, Clone, Debug)] +pub enum SyslogFacility { + /// kernel messages. + Kernel = 0 << 3, + /// user-level messages. + UserLevel = 1 << 3, + /// mail system. + MailSystem = 2 << 3, + /// system daemons. + SystemDaemons = 3 << 3, + /// security/authorization messages. + Authorization = 4 << 3, + /// messages generated internally by syslogd. + SyslogD = 5 << 3, + /// line printer subsystem. + LinePrinter = 6 << 3, + /// network news subsystem. + News = 7 << 3, + /// UUCP subsystem. + Uucp = 8 << 3, + /// clock daemon. + Clock = 9 << 3, + /// security/authorization messages. + Authorization2 = 10 << 3, + /// FTP daemon. + Ftp = 11 << 3, + /// NTP subsystem. + Ntp = 12 << 3, + /// log audit. + LogAudit = 13 << 3, + /// log alert. + LogAlert = 14 << 3, + /// clock daemon (note 2). + Clock2 = 15 << 3, + /// local use 0 (local0). + LocalUse0 = 16 << 3, + /// local use 1 (local1). + LocalUse1 = 17 << 3, + /// local use 2 (local2). + LocalUse2 = 18 << 3, + /// local use 3 (local3). + LocalUse3 = 19 << 3, + /// local use 4 (local4). + LocalUse4 = 20 << 3, + /// local use 5 (local5). + LocalUse5 = 21 << 3, + /// local use 6 (local6). + LocalUse6 = 22 << 3, + /// local use 7 (local7). + LocalUse7 = 23 << 3, +} + +/// `SyslogConnector`'s severity. +/// +/// See [RFC 5424](https://datatracker.ietf.org/doc/rfc5424). +#[derive(Debug)] +pub enum SyslogSeverity { + /// System is unusable. + Emergency = 0, + /// Action must be taken immediately. + Alert = 1, + /// Critical conditions. + Critical = 2, + /// Error conditions. + Error = 3, + /// Warning conditions + Warning = 4, + /// Normal but significant condition + Notice = 5, + /// Informational messages. + Info = 6, + /// Debug-level messages. + Debug = 7, +} + +/// Signature for a custom mapping function that maps the rust log levels to +/// values of the syslog Severity. +pub type LevelToSyslogSeverity = fn(level: log::Level) -> SyslogSeverity; + +fn default_mapping(level: log::Level) -> SyslogSeverity { + match level { + log::Level::Error => SyslogSeverity::Error, + log::Level::Warn => SyslogSeverity::Warning, + log::Level::Info => SyslogSeverity::Info, + log::Level::Debug | log::Level::Trace => SyslogSeverity::Debug, + } +} + +/// An experimental configurable `LogWriter` implementation that writes log messages to the syslog +/// (see [RFC 5424](https://datatracker.ietf.org/doc/rfc5424)). +/// +/// Only available with optional crate feature `syslog_writer`. +/// +/// For using the `SyslogWriter`, you need to know how the syslog is managed on your system, +/// how you can access it and with which protocol you can write to it, +/// so that you can choose a variant of the `SyslogConnector` that fits to your environment. +/// +/// See the [module description](index.html) for guidance how to use additional log writers. +pub struct SyslogWriter { + hostname: OsString, + process: String, + pid: u32, + facility: SyslogFacility, + message_id: String, + determine_severity: LevelToSyslogSeverity, + syslog: Mutex>, + max_log_level: log::LevelFilter, +} +impl SyslogWriter { + /// Returns a configured boxed instance. + /// + /// ## Parameters + /// + /// `facility`: An value representing a valid syslog facility value according to RFC 5424. + /// + /// `determine_severity`: (optional) A function that maps the rust log levels + /// to the syslog severities. If None is given, a trivial default mapping is used, which + /// should be good enough in most cases. + /// + /// `message_id`: The value being used as syslog's MSGID, which + /// should identify the type of message. The value itself + /// is a string without further semantics. It is intended for filtering + /// messages on a relay or collector. + /// + /// `syslog`: A [`SyslogConnector`](enum.SyslogConnector.html). + /// + /// # Errors + /// + /// `std::io::Error` + pub fn try_new( + facility: SyslogFacility, + determine_severity: Option, + max_log_level: log::LevelFilter, + message_id: String, + syslog: SyslogConnector, + ) -> IoResult> { + Ok(Box::new(Self { + hostname: hostname::get().unwrap_or_else(|_| OsString::from("")), + process: std::env::args() + .next() + .ok_or_else(|| IoError::new(ErrorKind::Other, "".to_owned()))?, + pid: std::process::id(), + facility, + max_log_level, + message_id, + determine_severity: determine_severity.unwrap_or_else(|| default_mapping), + syslog: Mutex::new(RefCell::new(syslog)), + })) + } +} + +impl LogWriter for SyslogWriter { + fn write(&self, now: &mut DeferredNow, record: &log::Record) -> IoResult<()> { + let mr_syslog = self.syslog.lock().unwrap(); + let mut syslog = mr_syslog.borrow_mut(); + + let severity = (self.determine_severity)(record.level()); + write!( + syslog, + "{}", + format!( + "<{}>1 {} {:?} {} {} {} - {}\n", + self.facility as u8 | severity as u8, + now.now() + .to_rfc3339_opts(chrono::SecondsFormat::Micros, false), + self.hostname, + self.process, + self.pid, + self.message_id, + &record.args() + ) + ) + } + + fn flush(&self) -> IoResult<()> { + let mr_syslog = self.syslog.lock().unwrap(); + let mut syslog = mr_syslog.borrow_mut(); + syslog.flush()?; + Ok(()) + } + + fn max_log_level(&self) -> log::LevelFilter { + self.max_log_level + } +} + +/// Helper struct that connects to the syslog and implements Write. +/// +/// Is used in [`SyslogWriter::try_new`](struct.SyslogWriter.html#method.try_new). +/// +/// ## Example +/// +/// ```rust,no_run +/// use flexi_logger::writers::SyslogConnector; +/// let syslog_connector = SyslogConnector::try_tcp("localhost:7777").unwrap(); +/// ``` +/// +#[derive(Debug)] +pub enum SyslogConnector { + /// Sends log lines to the syslog via a + /// [UnixStream](https://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html). + /// + /// Is only available on linux. + #[cfg(target_os = "linux")] + Stream(BufWriter), + + /// Sends log lines to the syslog via a + /// [UnixDatagram](https://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html). + /// + /// Is only available on linux. + #[cfg(target_os = "linux")] + Datagram(std::os::unix::net::UnixDatagram), + + /// Sends log lines to the syslog via UDP. + /// + /// UDP is fragile and thus discouraged except for local communication. + Udp(UdpSocket), + + /// Sends log lines to the syslog via TCP. + Tcp(BufWriter), +} +impl SyslogConnector { + /// Returns a `SyslogConnector::Datagram` to the specified path. + /// + /// Is only available on linux. + #[cfg(target_os = "linux")] + pub fn try_datagram>(path: P) -> IoResult { + let ud = std::os::unix::net::UnixDatagram::unbound()?; + ud.connect(&path)?; + Ok(SyslogConnector::Datagram(ud)) + } + + /// Returns a `SyslogConnector::Stream` to the specified path. + /// + /// Is only available on linux. + #[cfg(target_os = "linux")] + pub fn try_stream>(path: P) -> IoResult { + Ok(SyslogConnector::Stream(BufWriter::new( + std::os::unix::net::UnixStream::connect(path)?, + ))) + } + + /// Returns a `SyslogConnector` which sends the log lines via TCP to the specified address. + /// + /// # Errors + /// + /// `std::io::Error` if opening the stream fails. + pub fn try_tcp(server: T) -> IoResult { + Ok(Self::Tcp(BufWriter::new(TcpStream::connect(server)?))) + } + + /// Returns a `SyslogConnector` which sends log via the fragile UDP protocol from local to server. + /// + /// # Errors + /// + /// `std::io::Error` if opening the stream fails. + pub fn try_udp(local: T, server: T) -> IoResult { + let socket = UdpSocket::bind(local)?; + socket.connect(server)?; + Ok(Self::Udp(socket)) + } +} + +impl Write for SyslogConnector { + fn write(&mut self, message: &[u8]) -> IoResult { + // eprintln!( + // "syslog: got message \"{}\" ", + // String::from_utf8_lossy(message) + // ); + match *self { + #[cfg(target_os = "linux")] + Self::Datagram(ref ud) => { + // todo: reconnect of conn is broken + ud.send(&message[..]) + } + #[cfg(target_os = "linux")] + Self::Stream(ref mut w) => { + // todo: reconnect of conn is broken + w.write(&message[..]) + .and_then(|sz| w.write_all(&[0; 1]).map(|_| sz)) + } + Self::Tcp(ref mut w) => { + // todo: reconnect of conn is broken + w.write(&message[..]) + } + Self::Udp(ref socket) => { + // ?? + socket.send(&message[..]) + } + } + } + + fn flush(&mut self) -> IoResult<()> { + match *self { + #[cfg(target_os = "linux")] + Self::Datagram(_) => Ok(()), + + #[cfg(target_os = "linux")] + Self::Stream(ref mut w) => w.flush(), + + Self::Udp(_) => Ok(()), + + Self::Tcp(ref mut w) => w.flush(), + } + } +} diff --git a/agent/support/rust/flexi_logger/tests/test_age_or_size.rs b/agent/support/rust/flexi_logger/tests/test_age_or_size.rs new file mode 100644 index 000000000..76c374e18 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_age_or_size.rs @@ -0,0 +1,109 @@ +use chrono::Local; +use flexi_logger::{Age, Cleanup, Criterion, Duplicate, Logger, Naming}; +use glob::glob; +use log::*; +use std::fs::File; +use std::io::{BufRead, BufReader}; +use std::ops::Add; + +#[test] +fn test_age_or_size() { + let directory = define_directory(); + Logger::with_str("trace") + .log_to_file() + .duplicate_to_stderr(Duplicate::Info) + .directory(directory.clone()) + .rotate( + Criterion::AgeOrSize(Age::Second, 80), + Naming::Numbers, + Cleanup::Never, + ) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + // info!("test correct rotation by age or size"); + + write_log_lines(); + + verify_logs(&directory); +} + +fn write_log_lines() { + // Fill first three files by size + trace!("{}", 'a'); + trace!("{}", 'b'); + trace!("{}", 'c'); + + trace!("{}", 'd'); + trace!("{}", 'e'); + trace!("{}", 'f'); + + trace!("{}", 'g'); + trace!("{}", 'h'); + trace!("{}", 'i'); + + trace!("{}", 'j'); + + // now wait to enforce a rotation with a smaller file + std::thread::sleep(std::time::Duration::from_secs(2)); + trace!("{}", 'k'); + + // now wait to enforce a rotation with a smaller file + std::thread::sleep(std::time::Duration::from_secs(2)); + trace!("{}", 'l'); + + // then again fill a file by size + trace!("{}", 'm'); + trace!("{}", 'n'); + + // and do the final rotation: + trace!("{}", 'o'); + + // trace!("{}",'p'); + // trace!("{}",'q'); + // trace!("{}",'r'); + // trace!("{}",'s'); + // trace!("{}",'t'); +} + +fn define_directory() -> String { + format!( + "./log_files/age_or_size/{}", + Local::now().format("%Y-%m-%d_%H-%M-%S") + ) +} + +fn verify_logs(directory: &str) { + let expected_line_counts = [3, 3, 3, 1, 1, 3, 1]; + // read all files + let pattern = String::from(directory).add("/*"); + let globresults = match glob(&pattern) { + Err(e) => panic!( + "Is this ({}) really a directory? Listing failed with {}", + pattern, e + ), + Ok(globresults) => globresults, + }; + let mut no_of_log_files = 0; + let mut total_line_count = 0_usize; + for (index, globresult) in globresults.into_iter().enumerate() { + let mut line_count = 0_usize; + let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {}", e)); + let f = File::open(&pathbuf) + .unwrap_or_else(|e| panic!("Cannot open file {:?} due to {}", pathbuf, e)); + no_of_log_files += 1; + let mut reader = BufReader::new(f); + let mut buffer = String::new(); + while reader.read_line(&mut buffer).unwrap() > 0 { + line_count += 1; + buffer.clear(); + } + assert_eq!( + line_count, expected_line_counts[index], + "file has wrong size" + ); + total_line_count += line_count; + } + + assert_eq!(no_of_log_files, 7, "wrong file count"); + assert_eq!(total_line_count, 15, "wrong line count!"); +} diff --git a/agent/support/rust/flexi_logger/tests/test_colors.rs b/agent/support/rust/flexi_logger/tests/test_colors.rs new file mode 100644 index 000000000..9037446ab --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_colors.rs @@ -0,0 +1,16 @@ +use flexi_logger::{LogTarget, Logger}; +use log::*; + +#[test] +fn test_mods() { + Logger::with_str("trace") + .log_target(LogTarget::StdOut) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message"); + trace!("This is a trace message"); +} diff --git a/agent/support/rust/flexi_logger/tests/test_custom_log_writer.rs b/agent/support/rust/flexi_logger/tests/test_custom_log_writer.rs new file mode 100644 index 000000000..f01c669a5 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_custom_log_writer.rs @@ -0,0 +1,62 @@ +use std::sync::Mutex; + +use flexi_logger::writers::LogWriter; +use flexi_logger::{default_format, DeferredNow, LogTarget, Logger}; +use log::*; + +pub struct CustomWriter { + data: Mutex>, +} + +impl LogWriter for CustomWriter { + fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { + let mut data = self.data.lock().unwrap(); + default_format(&mut *data, now, record) + } + + fn flush(&self) -> std::io::Result<()> { + Ok(()) + } + + fn max_log_level(&self) -> log::LevelFilter { + log::LevelFilter::Trace + } + + fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { + let data = self.data.lock().unwrap(); + let expected_data = + expected + .iter() + .fold(Vec::new(), |mut acc, (level, module, message)| { + acc.extend(format!("{} [{}] {}", level, module, message).bytes()); + acc + }); + assert_eq!(*data, expected_data); + } +} + +#[test] +fn test_custom_log_writer() { + let handle = Logger::with_str("info") + .log_target(LogTarget::Writer(Box::new(CustomWriter { + data: Mutex::new(Vec::new()), + }))) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + handle.validate_logs(&[ + ( + "ERROR", + "test_custom_log_writer", + "This is an error message", + ), + ("WARN", "test_custom_log_writer", "This is a warning"), + ("INFO", "test_custom_log_writer", "This is an info message"), + ]); +} diff --git a/agent/support/rust/flexi_logger/tests/test_custom_log_writer_format.rs b/agent/support/rust/flexi_logger/tests/test_custom_log_writer_format.rs new file mode 100644 index 000000000..9d675016e --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_custom_log_writer_format.rs @@ -0,0 +1,78 @@ +use std::sync::Mutex; + +use flexi_logger::writers::LogWriter; +use flexi_logger::{default_format, DeferredNow, FormatFunction, LogTarget, Logger}; +use log::*; + +pub struct CustomWriter { + data: Mutex>, + format: FormatFunction, +} + +impl LogWriter for CustomWriter { + fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { + let mut data = self.data.lock().unwrap(); + (self.format)(&mut *data, now, record) + } + + fn flush(&self) -> std::io::Result<()> { + Ok(()) + } + + fn format(&mut self, format: FormatFunction) { + self.format = format; + } + + fn max_log_level(&self) -> log::LevelFilter { + log::LevelFilter::Trace + } + + fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { + let data = self.data.lock().unwrap(); + let expected_data = + expected + .iter() + .fold(Vec::new(), |mut acc, (level, _module, message)| { + acc.extend(format!("{}: {}", level, message).bytes()); + acc + }); + assert_eq!(*data, expected_data); + } +} + +fn custom_format( + writer: &mut dyn std::io::Write, + _now: &mut DeferredNow, + record: &Record, +) -> Result<(), std::io::Error> { + // Only write the message and the level, without the module + write!(writer, "{}: {}", record.level(), &record.args()) +} + +#[test] +fn test_custom_log_writer_custom_format() { + let handle = Logger::with_str("info") + .log_target(LogTarget::Writer(Box::new(CustomWriter { + data: Mutex::new(Vec::new()), + format: default_format, + }))) + .format(custom_format) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + handle.validate_logs(&[ + ( + "ERROR", + "test_custom_log_writer", + "This is an error message", + ), + ("WARN", "test_custom_log_writer", "This is a warning"), + ("INFO", "test_custom_log_writer", "This is an info message"), + ]); +} diff --git a/agent/support/rust/flexi_logger/tests/test_default_file_and_writer.rs b/agent/support/rust/flexi_logger/tests/test_default_file_and_writer.rs new file mode 100644 index 000000000..e46676ee9 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_default_file_and_writer.rs @@ -0,0 +1,43 @@ +use flexi_logger::writers::{FileLogWriter, LogWriter}; +use flexi_logger::{detailed_format, LogTarget, Logger}; +use log::*; + +#[test] +fn test_default_file_and_writer() { + let w = FileLogWriter::builder() + .format(detailed_format) + .discriminant("bar") + .try_build() + .unwrap(); + + let handle = Logger::with_str("info") + .log_target(LogTarget::FileAndWriter(Box::new(w))) + .format(detailed_format) + .discriminant("foo") + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + handle.validate_logs(&[ + ("ERROR", "test_default_file_and_writer", "error"), + ("WARN", "test_default_file_and_writer", "warning"), + ("INFO", "test_default_file_and_writer", "info"), + ]); + + let w = FileLogWriter::builder() + .format(detailed_format) + .discriminant("bar") + .append() + .try_build() + .unwrap(); + w.validate_logs(&[ + ("ERROR", "test_default_file_and_writer", "error"), + ("WARN", "test_default_file_and_writer", "warning"), + ("INFO", "test_default_file_and_writer", "info"), + ]); +} diff --git a/agent/support/rust/flexi_logger/tests/test_default_files_dir.rs b/agent/support/rust/flexi_logger/tests/test_default_files_dir.rs new file mode 100644 index 000000000..c68c5826c --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_default_files_dir.rs @@ -0,0 +1,21 @@ +use log::*; + +#[test] +fn test_default_files_dir() { + let handle = flexi_logger::Logger::with_str("info") + .log_to_file() + .directory("log_files") + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + handle.validate_logs(&[ + ("ERROR", "test_default_files_dir", "error"), + ("WARN", "test_default_files_dir", "warning"), + ("INFO", "test_default_files_dir", "info"), + ]); +} diff --git a/agent/support/rust/flexi_logger/tests/test_default_files_dir_rot.rs b/agent/support/rust/flexi_logger/tests/test_default_files_dir_rot.rs new file mode 100644 index 000000000..6833a4bed --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_default_files_dir_rot.rs @@ -0,0 +1,17 @@ +use flexi_logger::*; +use log::*; +#[test] +fn test_default_files_dir_rot() { + Logger::with_str("info") + .log_target(LogTarget::File) + .directory("log_files") + .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); +} diff --git a/agent/support/rust/flexi_logger/tests/test_detailed_files_rot.rs b/agent/support/rust/flexi_logger/tests/test_detailed_files_rot.rs new file mode 100644 index 000000000..8b30f2f59 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_detailed_files_rot.rs @@ -0,0 +1,23 @@ +use flexi_logger::{detailed_format, Cleanup, Criterion, Logger, Naming}; +use log::*; + +#[test] +fn test_detailed_files_rot() { + let handle = Logger::with_str("info") + .format(detailed_format) + .log_to_file() + .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + handle.validate_logs(&[ + ("ERROR", "test_detailed_files_rot", "error"), + ("WARN", "test_detailed_files_rot", "warning"), + ("INFO", "test_detailed_files_rot", "info"), + ]); +} diff --git a/agent/support/rust/flexi_logger/tests/test_detailed_files_rot_timestamp.rs b/agent/support/rust/flexi_logger/tests/test_detailed_files_rot_timestamp.rs new file mode 100644 index 000000000..55cdff0f0 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_detailed_files_rot_timestamp.rs @@ -0,0 +1,24 @@ +use flexi_logger::{detailed_format, Cleanup, Criterion, Logger, Naming}; +use log::*; + +#[test] +fn test_detailed_files_rot_timestamp() { + let handle = Logger::with_str("info") + .format(detailed_format) + .log_to_file() + .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) + .o_timestamp(true) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + handle.validate_logs(&[ + ("ERROR", "test_detailed_files_rot", "error"), + ("WARN", "test_detailed_files_rot", "warning"), + ("INFO", "test_detailed_files_rot", "info"), + ]); +} diff --git a/agent/support/rust/flexi_logger/tests/test_env_logger_style.rs b/agent/support/rust/flexi_logger/tests/test_env_logger_style.rs new file mode 100644 index 000000000..ed9ce4a15 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_env_logger_style.rs @@ -0,0 +1,12 @@ +use log::*; + +#[test] +fn you_must_see_exactly_three_messages_above_1_err_1_warn_1_info() { + flexi_logger::Logger::with_str("info").start().unwrap(); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); +} diff --git a/agent/support/rust/flexi_logger/tests/test_mods.rs b/agent/support/rust/flexi_logger/tests/test_mods.rs new file mode 100644 index 000000000..a8b9f76b2 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_mods.rs @@ -0,0 +1,66 @@ +use flexi_logger::{detailed_format, Logger, ReconfigurationHandle}; +use log::*; + +#[test] +fn test_mods() { + let handle: ReconfigurationHandle = Logger::with_env_or_str( + "info, test_mods::mymod1=debug, test_mods::mymod2=error, test_mods::mymod1::mysubmod = off", + ) + .format(detailed_format) + .log_to_file() + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + mymod1::test_traces(); + mymod2::test_traces(); + + handle.validate_logs(&[ + ("ERROR", "test_mods", "error"), + ("WARN", "test_mods", "warning"), + ("INFO", "test_mods", "info"), + ("ERROR", "test_mods::mymod1", "error"), + ("WARN", "test_mods::mymod1", "warning"), + ("INFO", "test_mods::mymod1", "info"), + ("DEBUG", "test_mods::mymod1", "debug"), + ("ERROR", "test_mods::mymod2", "error"), + ]); +} + +mod mymod1 { + use log::*; + pub fn test_traces() { + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message"); + trace!("This is a trace message - you must not see it!"); + + self::mysubmod::test_traces(); + } + mod mysubmod { + use log::*; + pub fn test_traces() { + error!("This is an error message - you must not see it!"); + warn!("This is a warning - you must not see it!"); + info!("This is an info message - you must not see it!"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + } + } +} +mod mymod2 { + use log::*; + pub fn test_traces() { + error!("This is an error message"); + warn!("This is a warning - you must not see it!"); + info!("This is an info message - you must not see it!"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + } +} diff --git a/agent/support/rust/flexi_logger/tests/test_mods_off.rs b/agent/support/rust/flexi_logger/tests/test_mods_off.rs new file mode 100644 index 000000000..89459fdb6 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_mods_off.rs @@ -0,0 +1,47 @@ +use flexi_logger::{detailed_format, Logger, ReconfigurationHandle}; +use log::*; + +#[test] +fn test_mods_off() { + let handle: ReconfigurationHandle = Logger::with_env_or_str("info, test_mods_off::mymod1=off") + .format(detailed_format) + .log_to_file() + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + mymod1::test_traces(); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + handle.validate_logs(&[ + ("ERROR", "test_mods", "error"), + ("WARN", "test_mods", "warning"), + ("INFO", "test_mods", "info"), + ]); +} + +mod mymod1 { + use log::*; + pub fn test_traces() { + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message"); + trace!("This is a trace message - you must not see it!"); + + self::mysubmod::test_traces(); + } + mod mysubmod { + use log::*; + pub fn test_traces() { + error!("This is an error message - you must not see it!"); + warn!("This is a warning - you must not see it!"); + info!("This is an info message - you must not see it!"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + } + } +} diff --git a/agent/support/rust/flexi_logger/tests/test_multi_logger.rs b/agent/support/rust/flexi_logger/tests/test_multi_logger.rs new file mode 100644 index 000000000..911229a68 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_multi_logger.rs @@ -0,0 +1,104 @@ +use flexi_logger::writers::{FileLogWriter, LogWriter}; +use flexi_logger::{detailed_format, DeferredNow, Logger, Record}; +use log::*; +use std::sync::Arc; + +#[macro_use] +mod macros { + #[macro_export] + macro_rules! sec_alert_error { + ($($arg:tt)*) => ( + error!(target: "{Sec,Alert,_Default}", $($arg)*); + ) + } +} + +#[test] +fn test() { + // more complex just to support validation: + let (sec_writer, sec_handle) = SecWriter::new(); + let mut log_handle = Logger::with_str("info, fantasy = trace") + .format(detailed_format) + .print_message() + .log_to_file() + .add_writer("Sec", sec_writer) + .add_writer("Alert", alert_logger()) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + // Explicitly send logs to different loggers + error!(target : "{Sec}", "This is a security-relevant error message"); + error!(target : "{Sec,Alert}", "This is a security-relevant alert message"); + error!(target : "{Sec,Alert,_Default}", "This is a security-relevant alert and log message"); + error!(target : "{Alert}", "This is an alert"); + + // Nicer: use explicit macros + sec_alert_error!("This is another security-relevant alert and log message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + trace!(target: "phantasia", "this is a trace you should not see"); + trace!(target: "fantasy", "this is a trace you should see"); + + // Switching off logging has no effect on non-default targets + log_handle.parse_new_spec("Off"); + sec_alert_error!("This is a further security-relevant alert and log message"); + + // Verification: + #[rustfmt::skip] + log_handle.validate_logs(&[ + ("ERROR", "multi_logger", "a security-relevant alert and log message"), + ("ERROR", "multi_logger", "another security-relevant alert and log message"), + ("WARN", "multi_logger", "warning"), + ("INFO", "multi_logger", "info"), + ("TRACE", "multi_logger", "this is a trace you should see"), + ]); + #[rustfmt::skip] + sec_handle.validate_logs(&[ + ("ERROR", "multi_logger", "security-relevant error"), + ("ERROR", "multi_logger", "a security-relevant alert"), + ("ERROR", "multi_logger", "security-relevant alert and log message"), + ("ERROR", "multi_logger", "another security-relevant alert"), + ("ERROR", "multi_logger", "a further security-relevant alert"), + ]); +} + +struct SecWriter(Arc); + +impl SecWriter { + pub fn new() -> (Box, Arc) { + let a_flw = Arc::new( + FileLogWriter::builder() + .discriminant("Security") + .suffix("seclog") + .print_message() + .try_build() + .unwrap(), + ); + (Box::new(SecWriter(Arc::clone(&a_flw))), a_flw) + } +} +impl LogWriter for SecWriter { + fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { + self.0.write(now, record) + } + fn flush(&self) -> std::io::Result<()> { + self.0.flush() + } + fn max_log_level(&self) -> log::LevelFilter { + log::LevelFilter::Error + } +} + +pub fn alert_logger() -> Box { + Box::new( + FileLogWriter::builder() + .discriminant("Alert") + .suffix("alerts") + .print_message() + .try_build() + .unwrap(), + ) +} diff --git a/agent/support/rust/flexi_logger/tests/test_multi_threaded_cleanup.rs b/agent/support/rust/flexi_logger/tests/test_multi_threaded_cleanup.rs new file mode 100644 index 000000000..e77308349 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_multi_threaded_cleanup.rs @@ -0,0 +1,155 @@ +#[cfg(feature = "compress")] +mod d { + use chrono::Local; + use flexi_logger::{ + Cleanup, Criterion, DeferredNow, Duplicate, LogSpecification, Logger, Naming, Record, + }; + use glob::glob; + use log::*; + use std::ops::Add; + use std::thread::{self, JoinHandle}; + + const NO_OF_THREADS: usize = 5; + const NO_OF_LOGLINES_PER_THREAD: usize = 100_000; + const ROTATE_OVER_SIZE: u64 = 3_000_000; + const NO_OF_LOG_FILES: usize = 2; + const NO_OF_GZ_FILES: usize = 5; + + #[test] + fn multi_threaded() { + // we use a special log line format that starts with a special string so that it is easier to + // verify that all log lines are written correctly + + let start = Local::now(); + let directory = define_directory(); + let mut reconf_handle = Logger::with_str("debug") + .log_to_file() + .format(test_format) + .duplicate_to_stderr(Duplicate::Info) + .directory(directory.clone()) + .rotate( + Criterion::Size(ROTATE_OVER_SIZE), + Naming::Timestamps, + Cleanup::KeepLogAndCompressedFiles(NO_OF_LOG_FILES, NO_OF_GZ_FILES), + ) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + info!( + "create a huge number of log lines with a considerable number of threads, \ + verify the log" + ); + + let worker_handles = start_worker_threads(NO_OF_THREADS); + let new_spec = LogSpecification::parse("trace").unwrap(); + thread::sleep(std::time::Duration::from_millis(1000)); + reconf_handle.set_new_spec(new_spec); + + wait_for_workers_to_close(worker_handles); + + let delta = Local::now().signed_duration_since(start).num_milliseconds(); + debug!( + "Task executed with {} threads in {}ms.", + NO_OF_THREADS, delta + ); + + reconf_handle.shutdown(); + verify_logs(&directory); + } + + // Starts given number of worker threads and lets each execute `do_work` + fn start_worker_threads(no_of_workers: usize) -> Vec> { + let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); + trace!("Starting {} worker threads", no_of_workers); + for thread_number in 0..no_of_workers { + trace!("Starting thread {}", thread_number); + worker_handles.push( + thread::Builder::new() + .name(thread_number.to_string()) + .spawn(move || { + do_work(thread_number); + 0 as u8 + }) + .unwrap(), + ); + } + trace!("All {} worker threads started.", worker_handles.len()); + worker_handles + } + + fn do_work(thread_number: usize) { + trace!("({}) Thread started working", thread_number); + trace!("ERROR_IF_PRINTED"); + for idx in 0..NO_OF_LOGLINES_PER_THREAD { + debug!("({}) writing out line number {}", thread_number, idx); + } + trace!("MUST_BE_PRINTED"); + } + + fn wait_for_workers_to_close(worker_handles: Vec>) { + for worker_handle in worker_handles { + worker_handle + .join() + .unwrap_or_else(|e| panic!("Joining worker thread failed: {:?}", e)); + } + trace!("All worker threads joined."); + } + + fn define_directory() -> String { + format!( + "./log_files/mt_logs/{}", + Local::now().format("%Y-%m-%d_%H-%M-%S") + ) + } + + pub fn test_format( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, + ) -> std::io::Result<()> { + write!( + w, + "XXXXX [{}] T[{:?}] {} [{}:{}] {}", + now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), + thread::current().name().unwrap_or(""), + record.level(), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + &record.args() + ) + } + + fn verify_logs(directory: &str) { + // Since the cleanup deleted log files, we just can confirm that the correct number of + // log files and compressed files exist + + let basename = String::from(directory).add("/").add( + &std::path::Path::new(&std::env::args().next().unwrap()) + .file_stem().unwrap(/*cannot fail*/) + .to_string_lossy().to_string(), + ); + + let fn_pattern = String::with_capacity(180) + .add(&basename) + .add("_r[0-9][0-9]*."); + + let log_pattern = fn_pattern.clone().add("log"); + println!("log_pattern = {}", log_pattern); + let no_of_log_files = glob(&log_pattern) + .unwrap() + .map(Result::unwrap) + .inspect(|p| println!("found: {:?}", p)) + .count(); + + let gz_pattern = fn_pattern.add("gz"); + let no_of_gz_files = glob(&gz_pattern) + .unwrap() + .map(Result::unwrap) + .inspect(|p| println!("found: {:?}", p)) + .count(); + + assert_eq!(no_of_log_files, NO_OF_LOG_FILES); + assert_eq!(no_of_gz_files, NO_OF_GZ_FILES); + + info!("Found correct number of log and compressed files"); + } +} diff --git a/agent/support/rust/flexi_logger/tests/test_multi_threaded_dates.rs b/agent/support/rust/flexi_logger/tests/test_multi_threaded_dates.rs new file mode 100644 index 000000000..344f7e815 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_multi_threaded_dates.rs @@ -0,0 +1,158 @@ +use chrono::Local; +use flexi_logger::{ + Age, Cleanup, Criterion, DeferredNow, Duplicate, LogSpecification, Logger, Naming, Record, +}; +use glob::glob; +use log::*; +use std::fs::File; +use std::io::{BufRead, BufReader}; +use std::ops::Add; +use std::thread::JoinHandle; +use std::time; + +const NO_OF_THREADS: usize = 5; +const NO_OF_LOGLINES_PER_THREAD: usize = 100_000; + +#[test] +fn multi_threaded() { + // we use a special log line format that starts with a special string so that it is easier to + // verify that all log lines are written correctly + + let start = Local::now(); + let directory = define_directory(); + let mut reconf_handle = Logger::with_str("debug") + .log_to_file() + .format(test_format) + .create_symlink("link_to_mt_log") + .duplicate_to_stderr(Duplicate::Info) + .directory(directory.clone()) + .rotate( + Criterion::Age(Age::Minute), + Naming::Timestamps, + Cleanup::Never, + ) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + info!( + "create a huge number of log lines with a considerable number of threads, verify the log" + ); + + let worker_handles = start_worker_threads(NO_OF_THREADS); + let new_spec = LogSpecification::parse("trace").unwrap(); + std::thread::Builder::new() + .spawn(move || { + std::thread::sleep(time::Duration::from_millis(1000)); + reconf_handle.set_new_spec(new_spec); + 0 as u8 + }) + .unwrap(); + + wait_for_workers_to_close(worker_handles); + + let delta = Local::now().signed_duration_since(start).num_milliseconds(); + debug!( + "Task executed with {} threads in {}ms.", + NO_OF_THREADS, delta + ); + verify_logs(&directory); +} + +// Starts given number of worker threads and lets each execute `do_work` +fn start_worker_threads(no_of_workers: usize) -> Vec> { + let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); + trace!("Starting {} worker threads", no_of_workers); + for thread_number in 0..no_of_workers { + trace!("Starting thread {}", thread_number); + worker_handles.push( + std::thread::Builder::new() + .name(thread_number.to_string()) + .spawn(move || { + do_work(thread_number); + 0 as u8 + }) + .unwrap(), + ); + } + trace!("All {} worker threads started.", worker_handles.len()); + worker_handles +} + +fn do_work(thread_number: usize) { + trace!("({}) Thread started working", thread_number); + trace!("ERROR_IF_PRINTED"); + for idx in 0..NO_OF_LOGLINES_PER_THREAD { + debug!("({}) writing out line number {}", thread_number, idx); + } + trace!("MUST_BE_PRINTED"); +} + +fn wait_for_workers_to_close(worker_handles: Vec>) { + for worker_handle in worker_handles { + worker_handle + .join() + .unwrap_or_else(|e| panic!("Joining worker thread failed: {:?}", e)); + } + trace!("All worker threads joined."); +} + +fn define_directory() -> String { + format!( + "./log_files/mt_logs/{}", + Local::now().format("%Y-%m-%d_%H-%M-%S") + ) +} + +pub fn test_format( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> std::io::Result<()> { + write!( + w, + "XXXXX [{}] T[{:?}] {} [{}:{}] {}", + now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), + std::thread::current().name().unwrap_or(""), + record.level(), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + &record.args() + ) +} + +fn verify_logs(directory: &str) { + // read all files + let pattern = String::from(directory).add("/*"); + let globresults = match glob(&pattern) { + Err(e) => panic!( + "Is this ({}) really a directory? Listing failed with {}", + pattern, e + ), + Ok(globresults) => globresults, + }; + let mut no_of_log_files = 0; + let mut line_count = 0_usize; + for globresult in globresults { + let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {}", e)); + let f = File::open(&pathbuf) + .unwrap_or_else(|e| panic!("Cannot open file {:?} due to {}", pathbuf, e)); + no_of_log_files += 1; + let mut reader = BufReader::new(f); + let mut buffer = String::new(); + while reader.read_line(&mut buffer).unwrap() > 0 { + if buffer.starts_with("XXXXX") { + line_count += 1; + } else { + panic!("irregular line in log file {:?}: \"{}\"", pathbuf, buffer); + } + buffer.clear(); + } + } + assert_eq!( + line_count, + NO_OF_THREADS * NO_OF_LOGLINES_PER_THREAD + 3 + NO_OF_THREADS + ); + info!( + "Wrote {} log lines from {} threads into {} files", + line_count, NO_OF_THREADS, no_of_log_files + ); +} diff --git a/agent/support/rust/flexi_logger/tests/test_multi_threaded_numbers.rs b/agent/support/rust/flexi_logger/tests/test_multi_threaded_numbers.rs new file mode 100644 index 000000000..92aa18491 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_multi_threaded_numbers.rs @@ -0,0 +1,158 @@ +use chrono::Local; +use flexi_logger::{ + Cleanup, Criterion, DeferredNow, Duplicate, LogSpecification, Logger, Naming, Record, +}; +use glob::glob; +use log::*; +use std::fs::File; +use std::io::{BufRead, BufReader}; +use std::ops::Add; +use std::thread::JoinHandle; +use std::time; + +const NO_OF_THREADS: usize = 5; +const NO_OF_LOGLINES_PER_THREAD: usize = 100_000; +const ROTATE_OVER_SIZE: u64 = 4_000_000; + +#[test] +fn multi_threaded() { + // we use a special log line format that starts with a special string so that it is easier to + // verify that all log lines are written correctly + + let start = Local::now(); + let directory = define_directory(); + let mut reconf_handle = Logger::with_str("debug") + .log_to_file() + .format(test_format) + .duplicate_to_stderr(Duplicate::Info) + .directory(directory.clone()) + .rotate( + Criterion::Size(ROTATE_OVER_SIZE), + Naming::Numbers, + Cleanup::Never, + ) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + info!( + "create a huge number of log lines with a considerable number of threads, verify the log" + ); + + let worker_handles = start_worker_threads(NO_OF_THREADS); + let new_spec = LogSpecification::parse("trace").unwrap(); + std::thread::Builder::new() + .spawn(move || { + std::thread::sleep(time::Duration::from_millis(1000)); + reconf_handle.set_new_spec(new_spec); + 0 as u8 + }) + .unwrap(); + + wait_for_workers_to_close(worker_handles); + + let delta = Local::now().signed_duration_since(start).num_milliseconds(); + debug!( + "Task executed with {} threads in {}ms.", + NO_OF_THREADS, delta + ); + verify_logs(&directory); +} + +// Starts given number of worker threads and lets each execute `do_work` +fn start_worker_threads(no_of_workers: usize) -> Vec> { + let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); + trace!("Starting {} worker threads", no_of_workers); + for thread_number in 0..no_of_workers { + trace!("Starting thread {}", thread_number); + worker_handles.push( + std::thread::Builder::new() + .name(thread_number.to_string()) + .spawn(move || { + do_work(thread_number); + 0 as u8 + }) + .unwrap(), + ); + } + trace!("All {} worker threads started.", worker_handles.len()); + worker_handles +} + +fn do_work(thread_number: usize) { + trace!("({}) Thread started working", thread_number); + trace!("ERROR_IF_PRINTED"); + for idx in 0..NO_OF_LOGLINES_PER_THREAD { + debug!("({}) writing out line number {}", thread_number, idx); + } + trace!("MUST_BE_PRINTED"); +} + +fn wait_for_workers_to_close(worker_handles: Vec>) { + for worker_handle in worker_handles { + worker_handle + .join() + .unwrap_or_else(|e| panic!("Joining worker thread failed: {:?}", e)); + } + trace!("All worker threads joined."); +} + +fn define_directory() -> String { + format!( + "./log_files/mt_logs/{}", + Local::now().format("%Y-%m-%d_%H-%M-%S") + ) +} + +pub fn test_format( + w: &mut dyn std::io::Write, + now: &mut DeferredNow, + record: &Record, +) -> std::io::Result<()> { + write!( + w, + "XXXXX [{}] T[{:?}] {} [{}:{}] {}", + now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), + std::thread::current().name().unwrap_or(""), + record.level(), + record.file().unwrap_or(""), + record.line().unwrap_or(0), + &record.args() + ) +} + +fn verify_logs(directory: &str) { + // read all files + let pattern = String::from(directory).add("/*"); + let globresults = match glob(&pattern) { + Err(e) => panic!( + "Is this ({}) really a directory? Listing failed with {}", + pattern, e + ), + Ok(globresults) => globresults, + }; + let mut no_of_log_files = 0; + let mut line_count = 0_usize; + for globresult in globresults { + let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {}", e)); + let f = File::open(&pathbuf) + .unwrap_or_else(|e| panic!("Cannot open file {:?} due to {}", pathbuf, e)); + no_of_log_files += 1; + let mut reader = BufReader::new(f); + let mut buffer = String::new(); + while reader.read_line(&mut buffer).unwrap() > 0 { + if buffer.starts_with("XXXXX") { + line_count += 1; + } else { + panic!("irregular line in log file {:?}: \"{}\"", pathbuf, buffer); + } + buffer.clear(); + } + } + assert_eq!( + line_count, + NO_OF_THREADS * NO_OF_LOGLINES_PER_THREAD + 3 + NO_OF_THREADS + ); + info!( + "Wrote {} log lines from {} threads into {} files", + line_count, NO_OF_THREADS, no_of_log_files + ); +} diff --git a/agent/support/rust/flexi_logger/tests/test_no_logger.rs b/agent/support/rust/flexi_logger/tests/test_no_logger.rs new file mode 100644 index 000000000..bf132995c --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_no_logger.rs @@ -0,0 +1,16 @@ +use flexi_logger::{LogTarget, Logger}; +use log::*; + +#[test] +fn you_must_not_see_anything() { + Logger::with_str("info") + .log_target(LogTarget::DevNull) + .start() + .unwrap(); + + error!("This is an error message - you must not see it!"); + warn!("This is a warning - you must not see it!"); + info!("This is an info message - you must not see it!"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); +} diff --git a/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr.rs b/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr.rs new file mode 100644 index 000000000..f04f44e90 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr.rs @@ -0,0 +1,24 @@ +use flexi_logger::{opt_format, Logger}; +use log::*; + +#[test] +fn test_opt_files_dir_dscr() { + let handle = Logger::with_str("info") + .format(opt_format) + .log_to_file() + .directory("log_files") + .discriminant("foo") + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + handle.validate_logs(&[ + ("ERROR", "test_opt_files_dir_dscr", "error"), + ("WARN", "test_opt_files_dir_dscr", "warning"), + ("INFO", "test_opt_files_dir_dscr", "info"), + ]); +} diff --git a/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr_rot.rs b/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr_rot.rs new file mode 100644 index 000000000..56fcb2c22 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr_rot.rs @@ -0,0 +1,41 @@ +use flexi_logger::{opt_format, Cleanup, Criterion, Logger, Naming}; +use log::*; + +#[test] +fn test_opt_files_dir_dscr_rot() { + let link_name = "link_to_log".to_string(); + let handle = Logger::with_str("info") + .format(opt_format) + .log_to_file() + .directory("log_files") + .discriminant("foo".to_string()) + .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) + .create_symlink(link_name.clone()) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + handle.validate_logs(&[ + ("ERROR", "test_opt_files_dir_dscr_rot", "error"), + ("WARN", "test_opt_files_dir_dscr_rot", "warning"), + ("INFO", "test_opt_files_dir_dscr_rot", "info"), + ]); + self::platform::check_link(&link_name); +} + +mod platform { + #[cfg(target_os = "linux")] + pub fn check_link(link_name: &str) { + match std::fs::symlink_metadata(link_name) { + Err(e) => panic!("error with symlink: {}", e), + Ok(metadata) => assert!(metadata.file_type().is_symlink(), "not a symlink"), + } + } + + #[cfg(not(target_os = "linux"))] + pub fn check_link(_: &str) {} +} diff --git a/agent/support/rust/flexi_logger/tests/test_parse_errors.rs b/agent/support/rust/flexi_logger/tests/test_parse_errors.rs new file mode 100644 index 000000000..87bc6b85e --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_parse_errors.rs @@ -0,0 +1,80 @@ +use flexi_logger::{FlexiLoggerError, LogSpecification, Logger}; +use log::*; + +#[test] +fn parse_errors_logspec() { + match LogSpecification::parse("info, foo=bar, fuzz=debug") + .err() + .unwrap() + { + FlexiLoggerError::Parse(_, logspec) => { + assert_eq!( + logspec.module_filters(), + LogSpecification::parse("info, fuzz=debug") + .unwrap() + .module_filters() + ); + #[cfg(feature = "textfilter")] + assert!(logspec.text_filter().is_none()); + } + _ => panic!("Wrong error from parsing (1)"), + } + + match LogSpecification::parse("info, ene mene dubbedene") + .err() + .unwrap() + { + FlexiLoggerError::Parse(_, logspec) => { + assert_eq!( + logspec.module_filters(), + LogSpecification::parse("info").unwrap().module_filters() + ); + #[cfg(feature = "textfilter")] + assert!(logspec.text_filter().is_none()); + } + _ => panic!("Wrong error from parsing (2)"), + } + + match LogSpecification::parse("ene mene dubbedene").err().unwrap() { + FlexiLoggerError::Parse(_, logspec) => { + assert_eq!( + logspec.module_filters(), + LogSpecification::off().module_filters() + ); + #[cfg(feature = "textfilter")] + assert!(logspec.text_filter().is_none()); + } + _ => panic!("Wrong error from parsing (3)"), + } + + match LogSpecification::parse("INFO, ene / mene / dubbedene") + .err() + .unwrap() + { + FlexiLoggerError::Parse(_, logspec) => { + assert_eq!( + logspec.module_filters(), + LogSpecification::off().module_filters() + ); + #[cfg(feature = "textfilter")] + assert!(logspec.text_filter().is_none()); + } + _ => panic!("Wrong error from parsing (4)"), + } +} + +#[test] +fn parse_errors_logger() { + let result = Logger::with_str("info, foo=baz").check_parser_error(); + assert!(result.is_err()); + let error = result.err().unwrap(); + println!("err: {}", error); + + Logger::with_str("info, foo=debug") + .check_parser_error() + .unwrap() + .start() + .unwrap(); + info!("logging works"); + info!("logging works"); +} diff --git a/agent/support/rust/flexi_logger/tests/test_reconfigure_methods.rs b/agent/support/rust/flexi_logger/tests/test_reconfigure_methods.rs new file mode 100644 index 000000000..433948d7f --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_reconfigure_methods.rs @@ -0,0 +1,113 @@ +use flexi_logger::{Logger, ReconfigurationHandle}; +use log::*; + +#[test] +fn test_reconfigure_methods() { + let mut log_handle = Logger::with_str("info") + .log_to_file() + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + test_parse_new_spec(&mut log_handle); + test_push_new_spec(&mut log_handle); + validate_logs(&mut log_handle); +} + +fn test_parse_new_spec(log_handle: &mut ReconfigurationHandle) { + error!("1-error message"); + warn!("1-warning"); + info!("1-info message"); + debug!("1-debug message - you must not see it!"); + trace!("1-trace message - you must not see it!"); + + log_handle.parse_new_spec("error"); + error!("1-error message"); + warn!("1-warning - you must not see it!"); + info!("1-info message - you must not see it!"); + debug!("1-debug message - you must not see it!"); + trace!("1-trace message - you must not see it!"); + + log_handle.parse_new_spec("trace"); + error!("1-error message"); + warn!("1-warning"); + info!("1-info message"); + debug!("1-debug message"); + trace!("1-trace message"); + + log_handle.parse_new_spec("info"); +} + +#[allow(clippy::cognitive_complexity)] +fn test_push_new_spec(log_handle: &mut ReconfigurationHandle) { + error!("2-error message"); + warn!("2-warning"); + info!("2-info message"); + debug!("2-debug message - you must not see it!"); + trace!("2-trace message - you must not see it!"); + + log_handle.parse_and_push_temp_spec("error"); + error!("2-error message"); + warn!("2-warning - you must not see it!"); + info!("2-info message - you must not see it!"); + debug!("2-debug message - you must not see it!"); + trace!("2-trace message - you must not see it!"); + + log_handle.parse_and_push_temp_spec("trace"); + error!("2-error message"); + warn!("2-warning"); + info!("2-info message"); + debug!("2-debug message"); + trace!("2-trace message"); + + log_handle.pop_temp_spec(); // we should be back on error + error!("2-error message"); + warn!("2-warning - you must not see it!"); + info!("2-info message - you must not see it!"); + debug!("2-debug message - you must not see it!"); + trace!("2-trace message - you must not see it!"); + + log_handle.pop_temp_spec(); // we should be back on info + + error!("2-error message"); + warn!("2-warning"); + info!("2-info message"); + debug!("2-debug message - you must not see it!"); + trace!("2-trace message - you must not see it!"); + + log_handle.pop_temp_spec(); // should be a no-op +} + +#[allow(clippy::cognitive_complexity)] +fn validate_logs(log_handle: &mut ReconfigurationHandle) { + log_handle.validate_logs(&[ + ("ERROR", "test_reconfigure_methods", "1-error"), + ("WARN", "test_reconfigure_methods", "1-warning"), + ("INFO", "test_reconfigure_methods", "1-info"), + // + ("ERROR", "test_reconfigure_methods", "1-error"), + // + ("ERROR", "test_reconfigure_methods", "1-error"), + ("WARN", "test_reconfigure_methods", "1-warning"), + ("INFO", "test_reconfigure_methods", "1-info"), + ("DEBUG", "test_reconfigure_methods", "1-debug"), + ("TRACE", "test_reconfigure_methods", "1-trace"), + // ----- + ("ERROR", "test_reconfigure_methods", "2-error"), + ("WARN", "test_reconfigure_methods", "2-warning"), + ("INFO", "test_reconfigure_methods", "2-info"), + // + ("ERROR", "test_reconfigure_methods", "2-error"), + // + ("ERROR", "test_reconfigure_methods", "2-error"), + ("WARN", "test_reconfigure_methods", "2-warning"), + ("INFO", "test_reconfigure_methods", "2-info"), + ("DEBUG", "test_reconfigure_methods", "2-debug"), + ("TRACE", "test_reconfigure_methods", "2-trace"), + // + ("ERROR", "test_reconfigure_methods", "2-error"), + // + ("ERROR", "test_reconfigure_methods", "2-error"), + ("WARN", "test_reconfigure_methods", "2-warning"), + ("INFO", "test_reconfigure_methods", "2-info"), + ]); +} diff --git a/agent/support/rust/flexi_logger/tests/test_recursion.rs b/agent/support/rust/flexi_logger/tests/test_recursion.rs new file mode 100644 index 000000000..4554bd030 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_recursion.rs @@ -0,0 +1,32 @@ +use flexi_logger::{detailed_format, Logger}; +use log::*; + +#[test] +fn test_recursion() { + Logger::with_str("info") + .format(detailed_format) + .log_to_file() + // .duplicate_to_stderr(Duplicate::All) + // .duplicate_to_stdout(Duplicate::All) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed because: {}", e)); + + let dummy = Dummy(); + + for _ in 0..10 { + error!("This is an error message for {}", dummy); + warn!("This is a warning for {}", dummy); + info!("This is an info message for {}", dummy); + debug!("This is a debug message for {}", dummy); + trace!("This is a trace message for {}", dummy); + } +} + +struct Dummy(); +impl std::fmt::Display for Dummy { + fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + info!("Here comes the inner message :-| "); + f.write_str("Dummy!!")?; + Ok(()) + } +} diff --git a/agent/support/rust/flexi_logger/tests/test_specfile.rs b/agent/support/rust/flexi_logger/tests/test_specfile.rs new file mode 100644 index 000000000..7e64b9f98 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_specfile.rs @@ -0,0 +1,147 @@ +#[cfg(feature = "specfile_without_notification")] +mod a { + use flexi_logger::{detailed_format, Logger}; + use log::*; + use std::io::{BufRead, Write}; + use std::ops::Add; + + const WAIT_MILLIS: u64 = 2000; + + /// Test of the specfile feature, using the file ./tests/logspec.toml. + #[test] + fn test_specfile() { + let specfile = "test_spec/test_specfile_logspec.toml"; + + std::fs::remove_file(specfile).ok(); + assert!(!std::path::Path::new(specfile).exists()); + + Logger::with_str("info") + .format(detailed_format) + .log_to_file() + .suppress_timestamp() + .start_with_specfile(specfile) + .unwrap_or_else(|e| panic!("Logger initialization failed because: {}", e)); + + error!("This is an error-0"); + warn!("This is a warning-0"); + info!("This is an info-0"); + debug!("This is a debug-0"); + trace!("This is a trace-0"); + + eprintln!( + "[{}] ===== behave like many editors: rename and recreate, as warn", + chrono::Local::now() + ); + { + std::fs::rename(&specfile, "old_logspec.toml").unwrap(); + let mut file = std::fs::OpenOptions::new() + .create(true) + .write(true) + .open(specfile) + .unwrap(); + file.write_all( + b" + global_level = 'warn' + [modules] + ", + ) + .unwrap(); + } + + std::thread::sleep(std::time::Duration::from_millis(WAIT_MILLIS)); + + error!("This is an error-1"); + warn!("This is a warning-1"); + info!("This is an info-1"); + debug!("This is a debug-1"); + trace!("This is a trace-1"); + + eprintln!( + "[{}] ===== truncate and rewrite, update to error", + chrono::Local::now() + ); + { + let mut file = std::fs::OpenOptions::new() + .truncate(true) + .write(true) + .open(specfile) + .unwrap(); + file.write_all( + b" + global_level = 'error' + [modules] + ", + ) + .unwrap(); + } + + std::thread::sleep(std::time::Duration::from_millis(WAIT_MILLIS)); + + error!("This is an error-2"); + warn!("This is a warning-2"); + info!("This is an info-2"); + debug!("This is a debug-2"); + trace!("This is a trace-2"); + + let logfile = std::path::Path::new(&std::env::args().nth(0).unwrap()) + .file_stem() + .unwrap() + .to_string_lossy() + .to_string() + .add(".log"); + + if cfg!(feature = "specfile") { + eprintln!("feature is: specfile!"); + validate_logs( + &logfile, + &[ + ("ERROR", "test_specfile::a", "error-0"), + ("WARN", "test_specfile::a", "warning-0"), + ("INFO", "test_specfile::a", "info-0"), + ("ERROR", "test_specfile::a", "error-1"), + ("WARN", "test_specfile::a", "warning-1"), + ("ERROR", "test_specfile::a", "error-2"), + ], + ); + } else { + eprintln!("feature is: specfile_without_notification!"); + validate_logs( + &logfile, + &[ + ("ERROR", "test_specfile::a", "error-0"), + ("WARN", "test_specfile::a", "warning-0"), + ("INFO", "test_specfile::a", "info-0"), + ("ERROR", "test_specfile::a", "error-1"), + ("WARN", "test_specfile::a", "warning-1"), + ("INFO", "test_specfile::a", "info-1"), + ("ERROR", "test_specfile::a", "error-2"), + ("WARN", "test_specfile::a", "warning-2"), + ("INFO", "test_specfile::a", "info-2"), + ], + ); + } + } + + fn validate_logs(logfile: &str, expected: &[(&'static str, &'static str, &'static str)]) { + println!("validating log file = {}", logfile); + + let f = std::fs::File::open(logfile).unwrap(); + let mut reader = std::io::BufReader::new(f); + + let mut buf = String::new(); + for tuple in expected { + buf.clear(); + reader.read_line(&mut buf).unwrap(); + assert!(buf.contains(&tuple.0), "Did not find tuple.0 = {}", tuple.0); + assert!(buf.contains(&tuple.1), "Did not find tuple.1 = {}", tuple.1); + assert!(buf.contains(&tuple.2), "Did not find tuple.2 = {}", tuple.2); + } + buf.clear(); + reader.read_line(&mut buf).unwrap(); + assert!( + buf.is_empty(), + "Found more log lines than expected: {} ", + buf + ); + } +} diff --git a/agent/support/rust/flexi_logger/tests/test_syslog.rs b/agent/support/rust/flexi_logger/tests/test_syslog.rs new file mode 100644 index 000000000..84a625aca --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_syslog.rs @@ -0,0 +1,69 @@ +#[cfg(feature = "syslog_writer")] +mod test { + use flexi_logger::writers::{SyslogConnector, SyslogFacility, SyslogWriter}; + use flexi_logger::{detailed_format, Logger}; + use log::*; + + #[macro_use] + mod macros { + #[macro_export] + macro_rules! syslog_error { + ($($arg:tt)*) => ( + error!(target: "{Syslog,_Default}", $($arg)*); + ) + } + } + + #[test] + fn test_syslog() -> std::io::Result<()> { + let syslog_connector = SyslogConnector::try_udp("127.0.0.1:5555", "127.0.0.1:514")?; + // let syslog_connector = SyslogConnector::try_tcp("localhost:601")?; + + let boxed_syslog_writer = SyslogWriter::try_new( + SyslogFacility::LocalUse0, + None, + log::LevelFilter::Trace, + "JustForTest".to_owned(), + syslog_connector, + ) + .unwrap(); + let log_handle = Logger::with_str("info") + .format(detailed_format) + .print_message() + .log_to_file() + .add_writer("Syslog", boxed_syslog_writer) + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + // Explicitly send logs to different loggers + error!(target : "{Syslog}", "This is a syslog-relevant error message"); + warn!(target : "{Syslog}", "This is a syslog-relevant error message"); + info!(target : "{Syslog}", "This is a syslog-relevant error message"); + debug!(target : "{Syslog}", "This is a syslog-relevant error message"); + trace!(target : "{Syslog}", "This is a syslog-relevant error message"); + + error!(target : "{Syslog,_Default}", "This is a syslog- and log-relevant error message"); + + // Nicer: use explicit macros + syslog_error!("This is another syslog- and log-relevant error message"); + warn!("This is a warning message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + // Verification: + log_handle.validate_logs(&[ + ( + "ERROR", + "syslog", + "a syslog- and log-relevant error message", + ), + ( + "ERROR", + "syslog", + "another syslog- and log-relevant error message", + ), + ("WARN", "syslog", "This is a warning message"), + ]); + Ok(()) + } +} diff --git a/agent/support/rust/flexi_logger/tests/test_textfilter.rs b/agent/support/rust/flexi_logger/tests/test_textfilter.rs new file mode 100644 index 000000000..11a66a1ae --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_textfilter.rs @@ -0,0 +1,54 @@ +#[test] +#[cfg(feature = "textfilter")] +fn test_textfilter() { + use flexi_logger::{default_format, LogSpecification, Logger}; + use log::*; + + use std::env; + use std::fs::File; + use std::io::{BufRead, BufReader}; + use std::path::Path; + + let logspec = LogSpecification::parse("info/Hello").unwrap(); + Logger::with(logspec) + .format(default_format) + .print_message() + .log_to_file() + .suppress_timestamp() + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + error!("Hello, this is an error message"); + warn!("This is a warning! Hello!!"); + info!("Hello, this is an info message! Hello"); + debug!("Hello, this is a debug message - you must not see it!"); + trace!("Hello, this is a trace message - you must not see it!"); + + let arg0 = env::args().next().unwrap(); + let progname = Path::new(&arg0).file_stem().unwrap().to_string_lossy(); + let filename = format!("{}.log", &progname); + + let f = File::open(&filename) + .unwrap_or_else(|e| panic!("Cannot open file {:?} due to {}", filename, e)); + let mut reader = BufReader::new(f); + let mut buffer = String::new(); + let mut count = 0; + while reader.read_line(&mut buffer).unwrap() > 0 { + if buffer.find("Hello").is_none() { + panic!( + "line in log file without Hello {:?}: \"{}\"", + filename, buffer + ); + } else { + count += 1; + } + buffer.clear(); + } + assert_eq!(count, 3); +} diff --git a/agent/support/rust/flexi_logger/tests/test_windows_line_ending.rs b/agent/support/rust/flexi_logger/tests/test_windows_line_ending.rs new file mode 100644 index 000000000..e74a176a8 --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/test_windows_line_ending.rs @@ -0,0 +1,55 @@ +use flexi_logger::{detailed_format, Logger, ReconfigurationHandle}; +use log::*; + +#[test] +fn test_mods() { + let handle: ReconfigurationHandle = Logger::with_env_or_str( + "info, test_windows_line_ending::mymod1=debug, test_windows_line_ending::mymod2=error", + ) + .format(detailed_format) + .log_to_file() + .use_windows_line_ending() + .start() + .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); + + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + + mymod1::test_traces(); + mymod2::test_traces(); + + handle.validate_logs(&[ + ("ERROR", "test_windows_line_ending", "error"), + ("WARN", "test_windows_line_ending", "warning"), + ("INFO", "test_windows_line_ending", "info"), + ("ERROR", "test_windows_line_ending", "error"), + ("WARN", "test_windows_line_ending", "warning"), + ("INFO", "test_windows_line_ending", "info"), + ("DEBUG", "test_windows_line_ending", "debug"), + ("ERROR", "test_windows_line_ending", "error"), + ]); +} + +mod mymod1 { + use log::*; + pub fn test_traces() { + error!("This is an error message"); + warn!("This is a warning"); + info!("This is an info message"); + debug!("This is a debug message"); + trace!("This is a trace message - you must not see it!"); + } +} +mod mymod2 { + use log::*; + pub fn test_traces() { + error!("This is an error message"); + warn!("This is a warning - you must not see it!"); + info!("This is an info message - you must not see it!"); + debug!("This is a debug message - you must not see it!"); + trace!("This is a trace message - you must not see it!"); + } +} diff --git a/agent/support/rust/flexi_logger/tests/version_numbers.rs b/agent/support/rust/flexi_logger/tests/version_numbers.rs new file mode 100644 index 000000000..0f903a63c --- /dev/null +++ b/agent/support/rust/flexi_logger/tests/version_numbers.rs @@ -0,0 +1,6 @@ +use version_sync::assert_markdown_deps_updated; + +#[test] +fn test_readme_deps() { + assert_markdown_deps_updated!("README.md"); +} diff --git a/agent/support/rust/plugin/.gitignore b/agent/support/rust/plugin/.gitignore new file mode 100644 index 000000000..1de565933 --- /dev/null +++ b/agent/support/rust/plugin/.gitignore @@ -0,0 +1 @@ +target \ No newline at end of file diff --git a/agent/support/rust/plugin/Cargo.lock b/agent/support/rust/plugin/Cargo.lock new file mode 100644 index 000000000..ed9070540 --- /dev/null +++ b/agent/support/rust/plugin/Cargo.lock @@ -0,0 +1,231 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "anyhow" +version = "1.0.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf8dcb5b4bbaa28653b647d8c77bd4ed40183b48882e130c1f1ffb73de069fd7" + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "byteorder" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cloudabi" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4344512281c643ae7638bbabc3af17a11307803ec8f0fcad9fae512a8bf36467" +dependencies = [ + "bitflags", +] + +[[package]] +name = "instant" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "libc" +version = "0.2.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614" + +[[package]] +name = "lock_api" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "parking_lot" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b" +dependencies = [ + "cfg-if 0.1.10", + "cloudabi", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "plugin" +version = "0.1.0" +dependencies = [ + "anyhow", + "parking_lot", + "rmp-serde", + "serde", +] + +[[package]] +name = "proc-macro2" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "rmp" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f10b46df14cf1ee1ac7baa4d2fbc2c52c0622a4b82fa8740e37bc452ac0184f" +dependencies = [ + "byteorder", + "num-traits", +] + +[[package]] +name = "rmp-serde" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ce7d70c926fe472aed493b902010bccc17fa9f7284145cb8772fd22fdb052d8" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "serde" +version = "1.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b88fa983de7720629c9387e9f517353ed404164b1e482c970a90c1a4aaf7dc1a" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbd1ae72adb44aab48f325a02444a5fc079349a8d804c1fc922aed3f7454c74e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "smallvec" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7acad6f34eb9e8a259d3283d1e8c1d34d7415943d4895f65cc73813c7396fc85" + +[[package]] +name = "syn" +version = "1.0.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8833e20724c24de12bbaba5ad230ea61c3eafb05b881c7c9d3cfe8638b187e68" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/agent/support/rust/plugin/Cargo.toml b/agent/support/rust/plugin/Cargo.toml new file mode 100644 index 000000000..166fb4fca --- /dev/null +++ b/agent/support/rust/plugin/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "plugin" +version = "0.1.0" +authors = ["zhanglei.sec "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1" +rmp-serde = "0.14.4" +serde = { version = "1.0", features = ["derive"] } +parking_lot = "0.11.1" diff --git a/agent/support/rust/plugin/src/lib.rs b/agent/support/rust/plugin/src/lib.rs new file mode 100644 index 000000000..664c7066e --- /dev/null +++ b/agent/support/rust/plugin/src/lib.rs @@ -0,0 +1,105 @@ +use anyhow::{anyhow, Result}; +use parking_lot::Mutex; +use serde::{Deserialize, Serialize}; +use std::io::{BufWriter, Write}; +use std::os::unix::net::UnixStream; +use std::sync::{atomic, Arc}; + +const DEFAULT_SENDER_BUF_SIZE: usize = 10240; +/// Task defined control message +#[derive(Debug, PartialEq, Deserialize)] +pub struct Task { + pub id: u32, + pub content: String, + pub token: String, +} +/// Sender is used by plugin to send data +#[derive(Clone)] +pub struct Sender { + buffer: Arc>>, + signal: Arc, +} + +impl Sender { + pub fn new(signal: Arc, stream: UnixStream) -> Self { + let thread_signal = signal.clone(); + let mut buf = Vec::::with_capacity(DEFAULT_SENDER_BUF_SIZE); + buf.push(0xdc); + buf.push(0); + buf.push(0); + let rbuffer = Arc::new(Mutex::new(buf)); + let wbuffer = rbuffer.clone(); + let mut w = BufWriter::new(stream); + std::thread::spawn(move || loop { + if thread_signal.load(atomic::Ordering::Relaxed) { + if let Ok(inner) = w.into_inner() { + let _ = inner.shutdown(std::net::Shutdown::Both); + } + return; + } + let mut buf = wbuffer.lock(); + if ((buf[1] as u16) << 8) + (buf[2] as u16) != 0 { + match w.write(buf.as_slice()).and(w.flush()) { + Err(e) => { + thread_signal.store(true, atomic::Ordering::Relaxed); + println!("{:?}", e); + return; + } + Ok(_) => {} + } + buf.clear(); + buf.push(0xdc); + buf.push(0); + buf.push(0); + } + drop(buf); + std::thread::sleep(std::time::Duration::from_millis(125)); + }); + Self { + buffer: rbuffer, + signal, + } + } + pub fn send(&self, data: &T) -> Result<()> { + if self.signal.load(atomic::Ordering::Relaxed) { + return Err(anyhow!("Send error. Must exit.")); + } + let mut buf = self.buffer.lock(); + buf.extend(rmp_serde::encode::to_vec_named(data)?); + let mut len = ((buf[1] as u16) << 8) + (buf[2] as u16); + if len == u16::MAX { + return Err(anyhow!("Reached maximum length. Cannot send.")); + } + len += 1; + buf[1] = (len >> 8) as u8; + buf[2] = len as u8; + + Ok(()) + } + pub fn get_ctrl(&self) -> Arc { + self.signal.clone() + } + pub fn close(&self) { + self.signal.store(true, atomic::Ordering::Relaxed) + } +} +/// Receiver is used by plugin to receive task +pub struct Receiver { + stream: UnixStream, + signal: Arc, +} + +impl Receiver { + pub fn new(signal: Arc, stream: UnixStream) -> Self { + Self { signal, stream } + } + pub fn receive(&self) -> Result { + if self.signal.load(atomic::Ordering::Relaxed) { + return Err(anyhow!("Should exit.")); + } + rmp_serde::decode::from_read(&self.stream).map_err(|e| { + self.signal.store(true, atomic::Ordering::Relaxed); + anyhow!(e) + }) + } +} diff --git a/agent/support/rust/plugin_builder/.gitignore b/agent/support/rust/plugin_builder/.gitignore new file mode 100644 index 000000000..1de565933 --- /dev/null +++ b/agent/support/rust/plugin_builder/.gitignore @@ -0,0 +1 @@ +target \ No newline at end of file diff --git a/agent/support/rust/plugin_builder/Cargo.lock b/agent/support/rust/plugin_builder/Cargo.lock new file mode 100644 index 000000000..f3ff093e0 --- /dev/null +++ b/agent/support/rust/plugin_builder/Cargo.lock @@ -0,0 +1,429 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "adler" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e" + +[[package]] +name = "aho-corasick" +version = "0.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68803225a7b13e47191bab76f2687382b60d259e8cf37f6e1893658b84bb9479" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "byteorder" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +dependencies = [ + "libc", + "num-integer", + "num-traits", + "time", + "winapi", +] + +[[package]] +name = "crc32fast" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "flate2" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129" +dependencies = [ + "cfg-if 1.0.0", + "crc32fast", + "libc", + "miniz_oxide", +] + +[[package]] +name = "flexi_logger" +version = "0.16.2" +dependencies = [ + "anyhow", + "atty", + "chrono", + "flate2", + "glob", + "lazy_static", + "log", + "plugin", + "regex", + "thiserror", + "yansi", +] + +[[package]] +name = "glob" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" + +[[package]] +name = "hermit-abi" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" +dependencies = [ + "libc", +] + +[[package]] +name = "instant" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb" + +[[package]] +name = "lock_api" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" +dependencies = [ + "cfg-if 0.1.10", +] + +[[package]] +name = "memchr" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" + +[[package]] +name = "miniz_oxide" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d" +dependencies = [ + "adler", + "autocfg", +] + +[[package]] +name = "num-integer" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "parking_lot" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ccb628cad4f84851442432c60ad8e1f607e29752d0bf072cbd0baf28aa34272" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "plugin" +version = "0.1.0" +dependencies = [ + "anyhow", + "parking_lot", + "rmp-serde", + "serde", +] + +[[package]] +name = "plugin_builder" +version = "0.1.0" +dependencies = [ + "anyhow", + "flexi_logger", + "plugin", + "rmp-serde", + "serde", +] + +[[package]] +name = "proc-macro2" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "regex" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", + "thread_local", +] + +[[package]] +name = "regex-syntax" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189" + +[[package]] +name = "rmp" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f10b46df14cf1ee1ac7baa4d2fbc2c52c0622a4b82fa8740e37bc452ac0184f" +dependencies = [ + "byteorder", + "num-traits", +] + +[[package]] +name = "rmp-serde" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ce7d70c926fe472aed493b902010bccc17fa9f7284145cb8772fd22fdb052d8" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "serde" +version = "1.0.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06c64263859d87aa2eb554587e2d23183398d617427327cf2b3d0ed8c69e4800" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "smallvec" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae524f056d7d770e174287294f562e95044c68e88dec909a00d2094805db9d75" + +[[package]] +name = "syn" +version = "1.0.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a571a711dddd09019ccc628e1b17fe87c59b09d513c06c026877aa708334f37a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "thiserror" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e9ae34b84616eedaaf1e9dd6026dbe00dcafa92aa0c8077cb69df1fcfe5e53e" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ba20f23e85b10754cd195504aebf6a27e2e6cbe28c17778a0c930724628dd56" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "time" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" +dependencies = [ + "libc", + "wasi", + "winapi", +] + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "yansi" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71" diff --git a/agent/support/rust/plugin_builder/Cargo.toml b/agent/support/rust/plugin_builder/Cargo.toml new file mode 100644 index 000000000..d02e2da82 --- /dev/null +++ b/agent/support/rust/plugin_builder/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "plugin_builder" +version = "0.1.0" +authors = ["zhanglei.sec "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +anyhow = "1" +flexi_logger = { path = "../flexi_logger", features = ["compress"] } +plugin = { path = "../plugin" } +serde = { version = "1.0", features = ["derive"] } +rmp-serde = "0.14.4" diff --git a/agent/support/rust/plugin_builder/src/lib.rs b/agent/support/rust/plugin_builder/src/lib.rs new file mode 100644 index 000000000..499d61b65 --- /dev/null +++ b/agent/support/rust/plugin_builder/src/lib.rs @@ -0,0 +1,75 @@ +use anyhow::{anyhow, Result}; +use flexi_logger::{Age, Cleanup, Criterion, Logger, Naming}; +use plugin::{Receiver, Sender}; +use serde::Serialize; +use std::io::Write; +use std::os::unix::net::UnixStream; +use std::sync::{atomic, Arc}; +/// Use this struct to send a regist request. +#[derive(Debug, PartialEq, Serialize)] +struct RegistRequest { + pid: u32, + name: &'static str, + version: &'static str, +} +/// Builder is used to build a framework to work with the plugin. +pub struct Builder { + stream: UnixStream, + logger: Logger, +} + +impl Builder { + /// TODO: we need doc comment here + pub fn new( + socket_path: &'static str, + name: &'static str, + version: &'static str, + ) -> Result { + let mut stream = match UnixStream::connect(socket_path) { + Ok(s) => s, + Err(e) => return Err(anyhow!(e)), + }; + + let req = RegistRequest { + pid: std::process::id(), + name, + version, + }; + rmp_serde::encode::write_named(&mut stream, &req)?; + stream.flush()?; + // TODO: make the immediate numbers as constants + // TODO: default directory need to be documented + Ok(Self { + stream, + logger: Logger::with_str("info") + .rotate( + Criterion::AgeOrSize(Age::Day, 1024 * 1024 * 10), + Naming::Numbers, + Cleanup::KeepLogAndCompressedFiles(5, 10), + ) + .cleanup_in_background_thread(true) + .log_to_file() + .format(flexi_logger::colored_detailed_format) + .directory("./"), + }) + } + + /// set_logger is used to set a custom logger. + pub fn set_logger(mut self, logger: Logger) -> Self { + self.logger = logger; + self + } + /// set_name is used to set a plugin name for logger,default value is "default". + pub fn set_name(mut self, name: String) -> Self { + self.logger = self.logger.plugin_name(name); + self + } + + // Complete building,get a couple of (Sender, Receiver) + pub fn build(self) -> (Sender, Receiver) { + let signal = Arc::new(atomic::AtomicBool::new(false)); + let sender = Sender::new(signal.clone(), self.stream.try_clone().unwrap()); + self.logger.send_handler(sender.clone()).start().unwrap(); + (sender, Receiver::new(signal, self.stream)) + } +} diff --git a/agent/transport/fileout/fileout.go b/agent/transport/fileout/fileout.go new file mode 100644 index 000000000..8d5f49dd6 --- /dev/null +++ b/agent/transport/fileout/fileout.go @@ -0,0 +1,48 @@ +package fileout + +import ( + "bufio" + "encoding/json" + "os" + + "github.com/bytedance/AgentSmith-HIDS/agent/spec" + "github.com/bytedance/AgentSmith-HIDS/agent/transport" +) + +type FileOut struct { + f *os.File + w *bufio.Writer +} + +func (fo *FileOut) Send(d *spec.Data) error { + content, err := json.Marshal(d) + if err != nil { + return err + } + _, err = fo.w.Write(append(content, '\n')) + if err != nil { + return err + } + err = fo.w.Flush() + if err != nil { + return err + } + return nil +} + +func (fo *FileOut) Receive() (spec.Task, error) { + select {} +} + +func (fo *FileOut) Close() { + fo.w.Flush() + fo.f.Close() +} + +func NewFileOut(path string) (transport.Transport, error) { + f, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0600) + if err != nil { + return nil, err + } + return &FileOut{f, bufio.NewWriter(f)}, nil +} diff --git a/agent/transport/kafka/kafka.go b/agent/transport/kafka/kafka.go new file mode 100644 index 000000000..9066d29c9 --- /dev/null +++ b/agent/transport/kafka/kafka.go @@ -0,0 +1,43 @@ +package kafka + +import ( + "encoding/json" + + "github.com/Shopify/sarama" + "github.com/bytedance/AgentSmith-HIDS/agent/spec" + "github.com/bytedance/AgentSmith-HIDS/agent/transport" + "go.uber.org/zap" +) + +type Kafka struct { + c sarama.Client + p sarama.SyncProducer + t string +} + +func NewKafka(c sarama.Client, topic string) (transport.Transport, error) { + producer, err := sarama.NewSyncProducerFromClient(c) + if err != nil { + return nil, err + } + return &Kafka{c, producer, topic}, nil +} + +func (k *Kafka) Send(d *spec.Data) error { + content, err := json.Marshal(d) + if err != nil { + return err + } + partition, offset, err := k.p.SendMessage(&sarama.ProducerMessage{Topic: k.t, Value: sarama.ByteEncoder(content)}) + zap.S().Debug("Kafka send message:%v %v", partition, offset) + return err +} + +func (k *Kafka) Receive() (spec.Task, error) { + select {} +} + +func (k *Kafka) Close() { + k.p.Close() + k.c.Close() +} diff --git a/agent/transport/stdout/stdout.go b/agent/transport/stdout/stdout.go new file mode 100644 index 000000000..3a2ee084a --- /dev/null +++ b/agent/transport/stdout/stdout.go @@ -0,0 +1,30 @@ +package stdout + +import ( + "encoding/json" + "os" + + "github.com/bytedance/AgentSmith-HIDS/agent/spec" +) + +type Stdout struct { +} + +func (so *Stdout) Send(d *spec.Data) error { + content, err := json.Marshal(d) + if err != nil { + return err + } + _, err = os.Stdout.Write(append(content, '\n')) + if err != nil { + return err + } + return nil +} + +func (fo *Stdout) Receive() (spec.Task, error) { + select {} +} + +func (fo *Stdout) Close() { +} diff --git a/agent/transport/transport.go b/agent/transport/transport.go new file mode 100644 index 000000000..effb68495 --- /dev/null +++ b/agent/transport/transport.go @@ -0,0 +1,41 @@ +package transport + +import ( + "sync" + + "github.com/bytedance/AgentSmith-HIDS/agent/spec" + "github.com/bytedance/AgentSmith-HIDS/agent/transport/stdout" +) + +var ( + mu sync.Mutex + defaultTransport Transport +) + +func init() { defaultTransport = &stdout.Stdout{} } + +type Transport interface { + Send(*spec.Data) error + Receive() (spec.Task, error) + Close() +} + +func SetTransport(t Transport) { + defaultTransport = t +} + +func Send(d *spec.Data) error { + mu.Lock() + defer mu.Unlock() + return defaultTransport.Send(d) +} +func Receive() (spec.Task, error) { + mu.Lock() + defer mu.Unlock() + return defaultTransport.Receive() +} +func Close() { + mu.Lock() + defer mu.Unlock() + defaultTransport.Close() +} From ff0b42f0d352be18d89a604368f7b63131563ab3 Mon Sep 17 00:00:00 2001 From: "zhanglei.sec" Date: Thu, 24 Dec 2020 21:07:16 +0800 Subject: [PATCH 2/8] Update readme. --- agent/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/agent/README.md b/agent/README.md index 88b15407e..639e0982d 100644 --- a/agent/README.md +++ b/agent/README.md @@ -66,7 +66,7 @@ Agent will start a synchronous producer to send data to Kafka, please remember t ### Other Methods You can use custom data output by implementing `Transport interface` under [transport](transport/transport.go).Next, modify the `main` function and set it as the default transport method.In the future, we will support gRPC. ## Logs -You can configure the storage path of the log file by setting the `log` parameter(default is `log/agent_smith.log`), but for more detailed log configuration, please modify the corresponding configuration in the `main` function. All logs of error level or above will be sent to [Data Output](#About-Data-Output). +You can configure the storage path of the log file by setting the `log` parameter(default is `log/agent_smith.log`), but for more detailed log configuration, please modify the corresponding configuration in the `main` function. All logs of error level or above will be sent to [Data Output](#about-data-output). ## Config File Currently for testing purposes, a configuration file is provided to control the addition and deletion of plugins. This poses a great security risk, please do not use it in a production environment. @@ -88,12 +88,12 @@ plugins : ``` Among them, `name` and `version` need to be the same as the [plugin](support/README.md#registration) config, `path` is used to find the plugin binary file, and `sha256` is used to verify the actual startup file. -All events related to the plugin can be seen in the [log](#Logs) file. +All events related to the plugin can be seen in the [log](#logs) file. ## Example With AgentSmith-HIDS Driver ### Precondition * The [Linux Kernrl Module](../driver) (a ko file). * The [Driver Plugin](driver) (a binary file). -* The [Agent](#To-Start-Using-AgentSmith-HIDS-Agent) (a binary file). +* The [Agent](#to-start-using-agentsmith-hids-agent) (a binary file). ### Select a working directory I will use `/etc/hids` as the working directory for the following steps: ``` @@ -127,7 +127,7 @@ If you want to disable this plugin, modify the configuration file and delete dri ``` echo "plugins: []" > /etc/hids/config.yaml ``` -If you want to enable the Driver Plugin again, just [restore the configuration file](#Create-config-file). +If you want to enable the Driver Plugin again, just [restore the configuration file](#create-config-file). ## License AgentSmith-HIDS Agent are distributed under the Apache-2.0 license. \ No newline at end of file From 1e0f996293f39dc8b668826f87aa93e0feb7e3fb Mon Sep 17 00:00:00 2001 From: "zhanglei.sec" Date: Thu, 24 Dec 2020 21:08:06 +0800 Subject: [PATCH 3/8] Fix link. --- agent/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/agent/README.md b/agent/README.md index 639e0982d..03eaf2b93 100644 --- a/agent/README.md +++ b/agent/README.md @@ -66,7 +66,7 @@ Agent will start a synchronous producer to send data to Kafka, please remember t ### Other Methods You can use custom data output by implementing `Transport interface` under [transport](transport/transport.go).Next, modify the `main` function and set it as the default transport method.In the future, we will support gRPC. ## Logs -You can configure the storage path of the log file by setting the `log` parameter(default is `log/agent_smith.log`), but for more detailed log configuration, please modify the corresponding configuration in the `main` function. All logs of error level or above will be sent to [Data Output](#about-data-output). +You can configure the storage path of the log file by setting the `log` parameter(default is `log/agent_smith.log`), but for more detailed log configuration, please modify the corresponding configuration in the `main` function. All logs of error level or above will be sent to [Data Output](#data-output). ## Config File Currently for testing purposes, a configuration file is provided to control the addition and deletion of plugins. This poses a great security risk, please do not use it in a production environment. From 814207fa8c8ba4fb6e1b175b46da7738401852b9 Mon Sep 17 00:00:00 2001 From: "zhanglei.sec" Date: Thu, 24 Dec 2020 21:11:27 +0800 Subject: [PATCH 4/8] Update gits. --- agent/.github/ISSUE_TEMPLATE/bug_report.md | 36 +++++++++++++++++ .../.github/ISSUE_TEMPLATE/feature_request.md | 20 ++++++++++ agent/.github/PULL_REQUEST_TEMPLATE.md | 40 +++++++++++++++++++ agent/.gitmodules | 3 ++ 4 files changed, 99 insertions(+) create mode 100644 agent/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 agent/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 agent/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 agent/.gitmodules diff --git a/agent/.github/ISSUE_TEMPLATE/bug_report.md b/agent/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..57a63fe62 --- /dev/null +++ b/agent/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,36 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**OS information (please complete the following information):** + - Distribution: [e.g. Ubuntu] + - Version [e.g. 18.04.1] + - Kernel info [e.g. 5.4.0-56-generic] + +**Hardware info (if you think it's helpful) + - CPU: [e.g. Intel i7 8700k] + - Motherboard: [e.g. AORUS Z370] + +**Additional context** +Add any other context about the problem here. diff --git a/agent/.github/ISSUE_TEMPLATE/feature_request.md b/agent/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..bbcbbe7d6 --- /dev/null +++ b/agent/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/agent/.github/PULL_REQUEST_TEMPLATE.md b/agent/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..2f148c050 --- /dev/null +++ b/agent/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,40 @@ +A similar PR may already be submitted! +Please search among the [Pull request](../) before creating one. + +Thanks for submitting a pull request! Please provide enough information so that others can review your pull request: + +For more information, see the `CONTRIBUTING` guide. + + +**Summary** + + + +This PR fixes/implements the following **bugs/features** + +* [ ] Bug 1 +* [ ] Bug 2 +* [ ] Feature 1 +* [ ] Feature 2 +* [ ] Breaking changes + + + +Explain the **motivation** for making this change. What existing problem does the pull request solve? + + + +**Test plan (required)** + +Demonstrate the code is solid. Example: The exact commands you ran and their output, screenshots / videos if the pull request changes UI. + + + +**Code formatting** + + + +**Closing issues** + + +Fixes # diff --git a/agent/.gitmodules b/agent/.gitmodules new file mode 100644 index 000000000..c040cb986 --- /dev/null +++ b/agent/.gitmodules @@ -0,0 +1,3 @@ +[submodule "support/rust/flexi_logger"] + path = support/rust/flexi_logger + url = git@github.com:bytedance/flexi_logger.git From 0a76f4a58aace06a9865f3ccd6ff9b4774511c81 Mon Sep 17 00:00:00 2001 From: "zhanglei.sec" Date: Thu, 24 Dec 2020 21:15:58 +0800 Subject: [PATCH 5/8] Delete error deps. --- agent/.gitmodules | 3 - agent/support/rust/flexi_logger/.gitignore | 17 - agent/support/rust/flexi_logger/.travis.yml | 36 - agent/support/rust/flexi_logger/CHANGELOG.md | 436 -------- agent/support/rust/flexi_logger/Cargo.toml | 56 -- .../support/rust/flexi_logger/LICENSE-APACHE | 201 ---- agent/support/rust/flexi_logger/LICENSE-MIT | 25 - agent/support/rust/flexi_logger/README.md | 124 --- .../benches/bench_reconfigurable.rs | 43 - .../flexi_logger/benches/bench_standard.rs | 45 - .../rust/flexi_logger/examples/colors.rs | 75 -- .../rust/flexi_logger/examples/performance.rs | 51 - .../rust/flexi_logger/scripts/cleanup.rs | 46 - .../rust/flexi_logger/scripts/qualify.rs | 85 -- .../rust/flexi_logger/scripts/qualify_fast.rs | 58 -- .../rust/flexi_logger/src/code_examples.rs | 301 ------ .../rust/flexi_logger/src/deferred_now.rs | 23 - .../rust/flexi_logger/src/flexi_error.rs | 63 -- .../rust/flexi_logger/src/flexi_logger.rs | 151 --- .../support/rust/flexi_logger/src/formats.rs | 416 -------- agent/support/rust/flexi_logger/src/lib.rs | 63 -- .../flexi_logger/src/log_specification.rs | 927 ----------------- agent/support/rust/flexi_logger/src/logger.rs | 936 ------------------ .../rust/flexi_logger/src/parameters.rs | 145 --- .../rust/flexi_logger/src/primary_writer.rs | 251 ----- .../src/reconfiguration_handle.rs | 157 --- .../support/rust/flexi_logger/src/writers.rs | 105 -- .../src/writers/file_log_writer.rs | 502 ---------- .../src/writers/file_log_writer/builder.rs | 293 ------ .../src/writers/file_log_writer/config.rs | 45 - .../src/writers/file_log_writer/state.rs | 713 ------------- .../flexi_logger/src/writers/log_writer.rs | 49 - .../flexi_logger/src/writers/syslog_writer.rs | 321 ------ .../flexi_logger/tests/test_age_or_size.rs | 109 -- .../rust/flexi_logger/tests/test_colors.rs | 16 - .../tests/test_custom_log_writer.rs | 62 -- .../tests/test_custom_log_writer_format.rs | 78 -- .../tests/test_default_file_and_writer.rs | 43 - .../tests/test_default_files_dir.rs | 21 - .../tests/test_default_files_dir_rot.rs | 17 - .../tests/test_detailed_files_rot.rs | 23 - .../test_detailed_files_rot_timestamp.rs | 24 - .../tests/test_env_logger_style.rs | 12 - .../rust/flexi_logger/tests/test_mods.rs | 66 -- .../rust/flexi_logger/tests/test_mods_off.rs | 47 - .../flexi_logger/tests/test_multi_logger.rs | 104 -- .../tests/test_multi_threaded_cleanup.rs | 155 --- .../tests/test_multi_threaded_dates.rs | 158 --- .../tests/test_multi_threaded_numbers.rs | 158 --- .../rust/flexi_logger/tests/test_no_logger.rs | 16 - .../tests/test_opt_files_dir_dscr.rs | 24 - .../tests/test_opt_files_dir_dscr_rot.rs | 41 - .../flexi_logger/tests/test_parse_errors.rs | 80 -- .../tests/test_reconfigure_methods.rs | 113 --- .../rust/flexi_logger/tests/test_recursion.rs | 32 - .../rust/flexi_logger/tests/test_specfile.rs | 147 --- .../rust/flexi_logger/tests/test_syslog.rs | 69 -- .../flexi_logger/tests/test_textfilter.rs | 54 - .../tests/test_windows_line_ending.rs | 55 - .../flexi_logger/tests/version_numbers.rs | 6 - 60 files changed, 8492 deletions(-) delete mode 100644 agent/.gitmodules delete mode 100644 agent/support/rust/flexi_logger/.gitignore delete mode 100644 agent/support/rust/flexi_logger/.travis.yml delete mode 100644 agent/support/rust/flexi_logger/CHANGELOG.md delete mode 100644 agent/support/rust/flexi_logger/Cargo.toml delete mode 100644 agent/support/rust/flexi_logger/LICENSE-APACHE delete mode 100644 agent/support/rust/flexi_logger/LICENSE-MIT delete mode 100644 agent/support/rust/flexi_logger/README.md delete mode 100644 agent/support/rust/flexi_logger/benches/bench_reconfigurable.rs delete mode 100644 agent/support/rust/flexi_logger/benches/bench_standard.rs delete mode 100644 agent/support/rust/flexi_logger/examples/colors.rs delete mode 100644 agent/support/rust/flexi_logger/examples/performance.rs delete mode 100644 agent/support/rust/flexi_logger/scripts/cleanup.rs delete mode 100644 agent/support/rust/flexi_logger/scripts/qualify.rs delete mode 100644 agent/support/rust/flexi_logger/scripts/qualify_fast.rs delete mode 100644 agent/support/rust/flexi_logger/src/code_examples.rs delete mode 100644 agent/support/rust/flexi_logger/src/deferred_now.rs delete mode 100644 agent/support/rust/flexi_logger/src/flexi_error.rs delete mode 100644 agent/support/rust/flexi_logger/src/flexi_logger.rs delete mode 100644 agent/support/rust/flexi_logger/src/formats.rs delete mode 100644 agent/support/rust/flexi_logger/src/lib.rs delete mode 100644 agent/support/rust/flexi_logger/src/log_specification.rs delete mode 100644 agent/support/rust/flexi_logger/src/logger.rs delete mode 100644 agent/support/rust/flexi_logger/src/parameters.rs delete mode 100644 agent/support/rust/flexi_logger/src/primary_writer.rs delete mode 100644 agent/support/rust/flexi_logger/src/reconfiguration_handle.rs delete mode 100644 agent/support/rust/flexi_logger/src/writers.rs delete mode 100644 agent/support/rust/flexi_logger/src/writers/file_log_writer.rs delete mode 100644 agent/support/rust/flexi_logger/src/writers/file_log_writer/builder.rs delete mode 100644 agent/support/rust/flexi_logger/src/writers/file_log_writer/config.rs delete mode 100644 agent/support/rust/flexi_logger/src/writers/file_log_writer/state.rs delete mode 100644 agent/support/rust/flexi_logger/src/writers/log_writer.rs delete mode 100644 agent/support/rust/flexi_logger/src/writers/syslog_writer.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_age_or_size.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_colors.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_custom_log_writer.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_custom_log_writer_format.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_default_file_and_writer.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_default_files_dir.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_default_files_dir_rot.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_detailed_files_rot.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_detailed_files_rot_timestamp.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_env_logger_style.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_mods.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_mods_off.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_multi_logger.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_multi_threaded_cleanup.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_multi_threaded_dates.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_multi_threaded_numbers.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_no_logger.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr_rot.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_parse_errors.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_reconfigure_methods.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_recursion.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_specfile.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_syslog.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_textfilter.rs delete mode 100644 agent/support/rust/flexi_logger/tests/test_windows_line_ending.rs delete mode 100644 agent/support/rust/flexi_logger/tests/version_numbers.rs diff --git a/agent/.gitmodules b/agent/.gitmodules deleted file mode 100644 index c040cb986..000000000 --- a/agent/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "support/rust/flexi_logger"] - path = support/rust/flexi_logger - url = git@github.com:bytedance/flexi_logger.git diff --git a/agent/support/rust/flexi_logger/.gitignore b/agent/support/rust/flexi_logger/.gitignore deleted file mode 100644 index 6a3af2091..000000000 --- a/agent/support/rust/flexi_logger/.gitignore +++ /dev/null @@ -1,17 +0,0 @@ -examples/_* -target -Cargo.lock -*.alerts -*.log -*.seclog -*.gz -*.zip -link_to_log -link_to_mt_log -log_files -todo -*logspec.toml -tests/*logspec.toml -*~ -.*~ -.vscode \ No newline at end of file diff --git a/agent/support/rust/flexi_logger/.travis.yml b/agent/support/rust/flexi_logger/.travis.yml deleted file mode 100644 index e625ae7d8..000000000 --- a/agent/support/rust/flexi_logger/.travis.yml +++ /dev/null @@ -1,36 +0,0 @@ -language: rust - -rust: - - stable - - beta - - nightly - # - 1.37.0 // does not work since cargo test is executed automatically - -os: - - linux - - windows - - osx - -jobs: - fast_finish: true - allow_failures: - - rust: nightly - - os: windows - include: - - if: rust = stable - script: - - rustup component add rustfmt - - cargo fmt --all -- --check - - - if: rust = stable OR rust = beta OR rust = nightly - script: - - cargo test --release --all-features - - cargo doc --all-features --no-deps - - cargo test --release - - cargo test --release --no-default-features - - - if: rust = "1.37.0" - script: - - cargo build --release - - cargo build --release --no-default-features - - cargo build --release --all-features diff --git a/agent/support/rust/flexi_logger/CHANGELOG.md b/agent/support/rust/flexi_logger/CHANGELOG.md deleted file mode 100644 index 118f36af3..000000000 --- a/agent/support/rust/flexi_logger/CHANGELOG.md +++ /dev/null @@ -1,436 +0,0 @@ -# Changelog for flexi_logger - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this -project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [0.16.2] - 2020-11-18 - -Add module -[code-examples](https://docs.rs/flexi_logger/latest/flexi_logger/code_examples/index.html) -with additional usage documentation. -This is a follow-up of a PR, kudos goes to [devzbysiu](https://github.com/devzbysiu)! - -## [0.16.1] - 2020-09-20 - -Support empty toml spec files (kudos to ijackson for -[pull request 66](https://github.com/emabee/flexi_logger/pull/66)) -(was supposed to be part of 0.16.0, but I had forgotten to merge it). - -## [0.16.0] - 2020-09-19 - -If file logging is used, do not create the output file if no log is written. -Solves [issue-62](https://github.com/emabee/flexi_logger/issues/62). - -Improve color handling - -- introduce AdaptiveFormat for a clearer API -- Support using feature `atty` without provided coloring -- Extend example `colors` to provide insight in how AdaptiveFormat works -- Remove the deprecated method `Logger::do_not_log()`; use `log_target()` with `LogTarget::DevNull` instead. -- Remove deprecated method `Logger::o_log_to_file()`; use `log_target()` instead. The clearer convenience method `Logger::log_to_file()` is still available. - -Improve the compression feature. Solves [issue-65](https://github.com/emabee/flexi_logger/issues/65). - -- breaking change: change the file suffix for the compressed log files from `.zip` to `.gz` -- Fix wrong wording in code and documentation -- deprecate the feature name `ziplog` and call the feature now `compress` -- rename `Cleanup::KeepZipFiles` into `Cleanup::KeepCompressedFiles` - and `Cleanup::KeepLogAndZipFiles` into `Cleanup::KeepLogAndCompressedFiles` - - the old names still work but are deprecated - -If file logging is used, do not create the output file if no log is written -Solves issue [issue-62](https://github.com/emabee/flexi_logger/issues/62). - -## [0.15.12] - 2020-28-08 - -Make `1.37.0` the minimal rust version for `flexi_logger`. - -## [0.15.11] - 2020-08-07 - -Introduce feature `specfile_without_notification` to allow coping with OS issues -(solves [issue-59](https://github.com/emabee/flexi_logger/issues/59)). - -## [0.15.10] - 2020-07-22 - -Minor code maintenance. - -## [0.15.9] - 2020-07-21 - -Allow using the log target with fantasy names, like with `env_logger`. -Solves [issue-56](https://github.com/emabee/flexi_logger/issues/56). - -## [0.15.8] - 2020-07-20 - -Allow modifying the coloring palette through the environment variable `FLEXI_LOGGER_PALETTE`. -See function [style](https://docs.rs/flexi_logger/latest/flexi_logger/fn.style.html) for details. -Solves [issue-55](https://github.com/emabee/flexi_logger/issues/55). - -By default, don't use colors if stdout or stderr are not a terminal -Solves [issue-57](https://github.com/emabee/flexi_logger/issues/57). - -Add variant Criterion::AgeOrSize -(kudos to -[pscott](https://github.com/pscott), -[PR-54](https://github.com/emabee/flexi_logger/pull/54)). - -## [0.15.7] - 2020-07-02 - -Add some Debug derives -(kudos to -[pscott](https://github.com/pscott), -[PR-52](https://github.com/emabee/flexi_logger/pull/52)). - -## [0.15.6] - 2020-07-02 - -Introduce separate formatting for stdout -(kudos to -[pscott](https://github.com/pscott), -[PR-51](https://github.com/emabee/flexi_logger/pull/51)). - -Deprecate `Logger::do_not_log()`. - -## [0.15.5] - 2020-06-18 - -Add `Logger::duplicate_to_stdout()` to fix -[issue-47](https://github.com/emabee/flexi_logger/issues/47). - -## [0.15.4] - 2020-06-09 - -Fix [issue-45](https://github.com/emabee/flexi_logger/issues/45), which was a panic in -the specfile watcher when some log files were deleted manually while the program was running -(kudos to -[avl](https://github.com/avl), -[PR-46](https://github.com/emabee/flexi_logger/pull/46)). - -## [0.15.3] - 2020-06-04 - -Add compatibility with multi_log by adding methods -`Logger::build` and `Logger::build_with_specfile` (fixes issue-44). - -Add `LogSpecBuilder::insert_modules_from()` (fixes issue-43). - -## [0.15.2] - 2020-03-24 - -Improve handling of parse-errors. - -Fix default format for files (was and is documented to be uncolored, but was colored). - -## [0.15.1] - 2020-03-04 - -Make the textfilter functionality an optional default feature; -deselecting it removes the regex crate as a required dependency, -which reduces the size overhead for any binary using `flexi_logger` -(kudos to [Petre Eftime](petre.eftime@gmail.com)). - -## [0.15.0] - 2020-02-27 - -Refine and rename error variants to allow e.g. differentiating -between errors related to the output (files) -and errors related to the specfile. - -## [0.14.8] - 2020-02-06 - -Make cleanup more robust, and allow controlling the cleanup-thread also with -`Logger::start_with_specfile()`. - -## [0.14.7] - 2020-02-04 - -If rotation is used with cleanup, do the cleanup by default in a background thread -(solves [issue 39](https://github.com/emabee/flexi_logger/issues/39)). - -For the ziplog feature, switch from `zip` crate to `flate2`. - -## [0.14.6] - 2020-01-28 - -Fix [issue 38](https://github.com/emabee/flexi_logger/issues/38) -(Old log files are not removed if rCURRENT doesn't overflow). - -## [0.14.5] - 2019-11-06 - -Pass format option into custom loggers (pull request 37). - -## [0.14.4] - 2019-09-25 - -Fix bug in specfile handling ([issue 36](https://github.com/emabee/flexi_logger/issues/36)). - -Improve docu and implementation of create_symlink. - -Minor other stuff. - -## [0.14.3] - 2019-08-04 - -Allow defining custom handlers for the default log target -(solves [issue 32](https://github.com/emabee/flexi_logger/issues/32)). - -## [0.14.2] - 2019-08-04 - -Use implicit locking of stderr in StdErrWriter. - -Allow failures in travis' windows build. - -Add license files. - -## [0.14.1] - 2019-08-04 - -Support recursive logging also with FileLogWriter, sharing the buffer with the PrimaryWriter. - -Fix multi-threading issue (incorrect line-break handling with stderr). - -## [0.14.0] - 2019-07-22 - -Further stabilize the specfile feature. - -Remove `LogSpecification::ensure_specfile_exists()` and `LogSpecification::from_file()` -from public API, where they should not be (-> version bump). - -Harmonize all eprintln! calls to -prefix the output with "`[flexi_logger]` ". - -## [0.13.4] - 2019-07-19 - -Only relevant for the `specfile` feature: -initialize the logger before dealing in any way with the specfile, -and do the initial read of the specfile in the main thread, -i.e. synchronously, to ensure a deterministic behavior during startup -(fixes [issue 31](https://github.com/emabee/flexi_logger/issues/31)). - -## [0.13.3] - 2019-07-08 - -Improve the file watch for the specfile to make the `specfile` feature more robust. -E.g. allow editing the specfile on linux -with editors that move the original file to a backup name. - -Add an option to write the log to stdout, as recommended for -[twelve-factor apps](https://12factor.net/logs). - -## [0.13.2] - 2019-06-02 - -Make get_creation_date() more robust on all platforms. - -## [0.13.1] - 2019-05-29 - -Fix fatal issue with get_creation_date() on linux -(see ). - -## [0.13.0] - 2019-05-28 - -Improve performance for plain stderr logging. - -Improve robustnesss for recursive log calls. - -## [0.12.0] - 2019-05-24 - -Revise handling of record.metadata().target() versus record.module_path(). - -Incompatible API modification: Logger.rotate() takes now three parameters. - -Suppport different formatting for stderr and files. - -Add feature `colors` (see `README.md` for details). - -Remove the deprecated `Logger::start_reconfigurable()` and `Logger::rotate_over_size()`. - -## [0.11.5] - 2019-05-15 - -Fix [issue 26](https://github.com/emabee/flexi_logger/issues/26) (logging off for specific modules). - -Fix [issue 27](https://github.com/emabee/flexi_logger/issues/27) (log files blank after restart). - -Fix [issue 28](https://github.com/emabee/flexi_logger/issues/28) -(add a corresponding set of unit tests to FileLogWriter). - -## [0.11.4] - 2019-04-01 - -Version updates of dependencies. - -## [0.11.3] - 2019-03-28 - -Add SyslogWriter. - -## [0.11.2] - 2019-03-22 - -Change API to more idiomatic parameter types, in a compatible way. - -Add first implementation of a SyslogWriter. - -## [0.11.1] - 2019-03-06 - -Add option to write windows line endings, rather than a plain `\n`. - -## [0.11.0] - 2019-03-02 - -Add options to cleanup rotated log files, by deleting and/or zipping older files. - -Remove some deprecated methods. - -## [0.10.7] - 2019-02-27 - -Let the BlackHoleLogger, although it doesn't write a log, still duplicate to stderr. - -## [0.10.6] - 2019-02-26 - -Deprecate `Logger::start_reconfigurable()`, let `Logger::start()` return a reconfiguration handle. - -Add an option to write all logs to nowhere (i.e., do not write any logs). - -## [0.10.5] - 2019-01-15 - -Eliminate performance penalty for using reconfigurability. - -## [0.10.4] - 2019-01-07 - -Add methods to modify the log spec temporarily. - -## [0.10.3] - 2018-12-08 - -Advance to edition 2018. - -## [0.10.2] - 2018-12-07 - -Log-spec parsing is improved, more whitespace is tolerated. - -## [0.10.1] - 2018-11-08 - -When file rotation is used, the name of the file to which the logs are written is now stable. - -Details: - -- the logs are always written to a file with infix _rCURRENT -- if this file exceeds the specified rotate-over-size, it is closed and renamed - to a file with a sequential number infix, and then the logging continues again - to the (fresh) file with infix _rCURRENT - -Example: - -After some logging with your program my_prog, you will find files like - -```text -my_prog_r00000.log -my_prog_r00001.log -my_prog_r00002.log -my_prog_rCURRENT.log -``` - -## [0.10.0] - 2018-10-30 - -`LogSpecification::parse()` now returns a `Result`, rather than -a log spec directly (-> version bump). -This enables a more reliable usage of FlexiLogger in non-trivial cases. - -For the sake of compatibility for the normal usecases, the Logger methods `with_str()` etc. -remain unchanged. An extra method is added to retrieve parser errors, if desired. - -## [0.9.3] - 2018-10-27 - -Docu improvement. - -## [0.9.2] - 2018-08-13 - -Fix incorrect filename generation with rotation, -i.e., switch off timestamp usage when rotation is used. - -## [0.9.1] - 2018-08-12 - -Introduce `Logger::duplicate_to_stderr()`, as a more flexible replacement for `duplicate_error()` -and `duplicate_info()`. - -## [0.9.0] - 2018-07-06 - -### Eliminate String allocation - -Get rid of the unneccessary String allocation we've been -carrying with us since ages. This implies changing the signature of the format functions. - -In case you provide your own format function, you'll need to adapt it to the new signature. -Luckily, the effort is low. - -As an example, here is how the definition of the `opt_format` function changed: - -```rust -- pub fn opt_format(record: &Record) -> String { -- format!( ---- -+ pub fn opt_format(w: &mut io::Write, record: &Record) -> Result<(), io::Error> { -+ write!( -+ w, -``` - -Similarly, if you're using the advanced feature of providing your own implementation of LogWriter, -you need to adapt it. The change again is trivial, and should even slightly -simplify your code (you can return io errors and don't have to catch them yourself). - -### Misc - -The docu generation on docs.rs is now configured to considers all features, we thus -expose `Logger.start_with_specfile()` only if the specfile feature is used. So we can revert the -change done with 0.8.1. - -## [0.8.4] - 2018-06-18 - -Add flexi_logger to category `development-tools::debugging` - -## [0.8.3] - 2018-05-14 - -Make append() also work for rotating log files - -## [0.8.2] - 2018-04-03 - -Add option to append to existing log files, rather than always truncating them - -## [0.8.1] - 2018-3-19 - -Expose `Logger.start_with_specfile()` always -...and not only if the feature "specfile" is used - otherwise it does not appear -in the auto-generated docu (because it does not use --allfeatures) - -## [0.8.0] - 2018-03-18 - -Add specfile feature - -- Add a feature that allows to specify the LogSpecification via a file - that can be edited while the program is running -_ Remove/hide deprecated APIs -- As a consequence, cleanup code, get rid of duplicate stuff. - -## [0.7.1] - 2018-03-07 - -Bugfix: do not create empty files when used in env_logger style. -Update docu and the description in cargo.toml - -## [0.7.0] - 2018-02-25 - -Add support for multiple log output streams - -- replace FlexiWriter with DefaultLogWriter, which wraps a FileLogWriter -- add test where a SecurityWriter and an AlertWriter are added -- add docu -- move deprecated structs to separate package -- move benches to folder benches - -## [0.6.13] 2018-02-09 - -Add Logger::with_env_or_str() - -## [0.6.12] 2018-2-07 - -Add ReconfigurationHandle::parse_new_spec() - -## [0.6.11] 2017-12-29 - -Fix README.md - -## [0.6.10] 2017-12-29 - -Publish version based on log 0.4 - -## (...) - -## [0.6.0] 2017-07-13 - -Use builder pattern for LogSpecification and Logger - -- deprecate outdated API -- "objectify" LogSpecification -- improve documentation, e.g. document the dash/underscore issue diff --git a/agent/support/rust/flexi_logger/Cargo.toml b/agent/support/rust/flexi_logger/Cargo.toml deleted file mode 100644 index 2d954c7c8..000000000 --- a/agent/support/rust/flexi_logger/Cargo.toml +++ /dev/null @@ -1,56 +0,0 @@ -[package] -name = "flexi_logger" -version = "0.16.2" -authors = ["emabee "] -edition = "2018" -license = "MIT OR Apache-2.0" -readme = "README.md" -repository = "https://github.com/emabee/flexi_logger" -documentation = "https://docs.rs/flexi_logger" -homepage = "https://crates.io/crates/flexi_logger" -description = """ -An easy-to-configure and flexible logger that writes logs to stderr and/or to files. \ -It allows custom logline formats, and it allows changing the log specification at runtime. \ -It also allows defining additional log streams, e.g. for alert or security messages.\ -""" -keywords = ["file", "logger"] -categories = ["development-tools::debugging"] - -[package.metadata.docs.rs] -all-features = true - -[features] -default = ["colors", "textfilter"] -colors = ["atty", "lazy_static", "yansi"] -specfile = ["specfile_without_notification", "notify"] -specfile_without_notification = ["serde", "toml", "serde_derive"] -syslog_writer = ["libc", "hostname"] -ziplog = ["compress"] # for backwards compatibility -compress = ["flate2"] -textfilter = ["regex"] - -[dependencies] -plugin = { path = "../plugin" } -anyhow = "1" -atty = { version = "0.2", optional = true } -chrono = "0.4" -glob = "0.3" -hostname = { version = "0.3", optional = true } -lazy_static = { version = "1.4", optional = true } -log = { version = "0.4", features = ["std"] } -notify = { version = "4.0", optional = true } -regex = { version = "1.1", optional = true } -serde = { version = "1.0", optional = true } -serde_derive = { version = "1.0", optional = true } -thiserror = "1.0" -toml = { version = "0.5", optional = true } -yansi = { version = "0.5", optional = true } -flate2 = { version = "1.0", optional = true } - -[target.'cfg(linux)'.dependencies] -libc = { version = "^0.2.50", optional = true } - -[dev-dependencies] -serde_derive = "1.0" -version-sync = "0.9" -#env_logger = '*' # optionally needed for the performance example diff --git a/agent/support/rust/flexi_logger/LICENSE-APACHE b/agent/support/rust/flexi_logger/LICENSE-APACHE deleted file mode 100644 index f8e5e5ea0..000000000 --- a/agent/support/rust/flexi_logger/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. \ No newline at end of file diff --git a/agent/support/rust/flexi_logger/LICENSE-MIT b/agent/support/rust/flexi_logger/LICENSE-MIT deleted file mode 100644 index 695259257..000000000 --- a/agent/support/rust/flexi_logger/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2018 The AUTHORS - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/agent/support/rust/flexi_logger/README.md b/agent/support/rust/flexi_logger/README.md deleted file mode 100644 index d01754051..000000000 --- a/agent/support/rust/flexi_logger/README.md +++ /dev/null @@ -1,124 +0,0 @@ -# flexi_logger - -**A flexible and easy-to-use logger that writes logs to stderr and/or to files, and/or to -other output streams, and that can be influenced while the program is running.** - -[![Latest version](https://img.shields.io/crates/v/flexi_logger.svg)](https://crates.io/crates/flexi_logger) -[![Documentation](https://docs.rs/flexi_logger/badge.svg)](https://docs.rs/flexi_logger) -![License](https://img.shields.io/crates/l/flexi_logger.svg) -[![Travis CI](https://travis-ci.org/emabee/flexi_logger.svg?branch=master)](https://travis-ci.org/emabee/flexi_logger) - -## Usage - -Add flexi_logger to the dependencies section in your project's `Cargo.toml`, with - -```toml -[dependencies] -flexi_logger = "0.16" -log = "0.4" -``` - -or, if you want to use some of the optional features, with something like - -```toml -[dependencies] -flexi_logger = { version = "0.16", features = ["specfile", "compress"] } -log = "0.4" -``` - -or, to get the smallest footprint (and no colors), with - -```toml -[dependencies] -flexi_logger = { version = "0.16", default_features = false } -log = "0.4" -``` - -Note: `log` is needed because `flexi_logger` plugs into the standard Rust logging facade given -by the [log crate](https://crates.io/crates/log), -and you use the ```log``` macros to write log lines from your code. - -## Code examples - -See the documentation of module -[code_examples](https://docs.rs/flexi_logger/latest/flexi_logger/code_examples/index.html). - -## Options - -There are configuration options to e.g. - -* decide whether you want to write your logs to stderr or to a file, -* configure the path and the filenames of the log files, -* use file rotation, -* specify the line format for the log lines, -* define additional log streams, e.g for alert or security messages, -* support changing the log specification on the fly, while the program is running, - -See the API documentation for a complete reference. - -## Crate Features - -Make use of any of these features by specifying them in your `Cargo.toml` -(see above in the usage section). - -### **`colors`** - -Getting colored output is also possible without this feature, -by implementing and using your own coloring format function. - -The default feature `colors` simplifies this by doing three things: - -* it activates the optional dependency to `yansi` and -* provides additional colored pendants to the existing uncolored format functions -* it uses `colored_default_format()` for the output to stderr, - and the non-colored `default_format()` for the output to files -* it activates the optional dependency to `atty` to being able to switch off - coloring if the output is not sent to a terminal but e.g. piped to another program. - -**Colors**, -or styles in general, are a matter of taste, and no choice will fit every need. So you can override the default formatting and coloring in various ways. - -With `--no-default-features --features="atty"` you can remove the yansi-based coloring but keep the capability to switch off your own coloring. - -### **`compress`** - -The `compress` feature adds two options to the `Logger::Cleanup` `enum`, which allow keeping some -or all rotated log files in compressed form (`.gz`) rather than as plain text files. - -The feature was previously called `ziplogs`. The old name still works, but is deprecated and -should be replaced. - -### **`specfile`** - -The `specfile` feature adds a method `Logger::start_with_specfile(specfile)`. - -If started with this method, `flexi_logger` uses the log specification -that was given to the factory method (one of `Logger::with...()`) as initial spec -and then tries to read the log specification from the named file. - -If the file does not exist, it is created and filled with the initial spec. - -By editing the log specification in the file while the program is running, -you can change the logging behavior in real-time. - -The implementation of this feature uses some additional crates that you might -not want to depend on with your program if you don't use this functionality. -For that reason the feature is not active by default. - -### **`specfile_without_notification`** - -Pretty much like `specfile`, except that updates to the file are being ignored. -See [issue-59](https://github.com/emabee/flexi_logger/issues/59) for more details. - -### **`textfilter`** - -Removes the ability to filter logs by text, but also removes the dependency on the regex crate. - -### **`syslog`** - -This is still an experimental feature, likely working, but not well tested. -Feedback of all kinds is highly appreciated. - -## Versions - -See the [change log](https://github.com/emabee/flexi_logger/blob/master/CHANGELOG.md). diff --git a/agent/support/rust/flexi_logger/benches/bench_reconfigurable.rs b/agent/support/rust/flexi_logger/benches/bench_reconfigurable.rs deleted file mode 100644 index 98ebb5873..000000000 --- a/agent/support/rust/flexi_logger/benches/bench_reconfigurable.rs +++ /dev/null @@ -1,43 +0,0 @@ -#![feature(extern_crate_item_prelude)] -#![feature(test)] - -extern crate test; -use log::{error, trace}; - -use flexi_logger::Logger; -use test::Bencher; - -#[bench] -fn b10_no_logger_active(b: &mut Bencher) { - b.iter(use_error); -} - -#[bench] -fn b20_initialize_logger(_: &mut Bencher) { - Logger::with_str("info") - .log_to_file() - .directory("log_files") - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); -} - -#[bench] -fn b30_relevant_logs(b: &mut Bencher) { - b.iter(use_error); -} - -#[bench] -fn b40_suppressed_logs(b: &mut Bencher) { - b.iter(use_trace); -} - -fn use_error() { - for _ in 1..100 { - error!("This is an error message"); - } -} -fn use_trace() { - for _ in 1..100 { - trace!("This is a trace message"); - } -} diff --git a/agent/support/rust/flexi_logger/benches/bench_standard.rs b/agent/support/rust/flexi_logger/benches/bench_standard.rs deleted file mode 100644 index 5dbe4b565..000000000 --- a/agent/support/rust/flexi_logger/benches/bench_standard.rs +++ /dev/null @@ -1,45 +0,0 @@ -#![feature(extern_crate_item_prelude)] -#![feature(test)] - -extern crate flexi_logger; -extern crate test; -#[macro_use] -extern crate log; - -use flexi_logger::Logger; -use test::Bencher; - -#[bench] -fn b10_no_logger_active(b: &mut Bencher) { - b.iter(use_error); -} - -#[bench] -fn b20_initialize_logger(_: &mut Bencher) { - Logger::with_str("info") - .log_to_file() - .directory("log_files") - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); -} - -#[bench] -fn b30_relevant_logs(b: &mut Bencher) { - b.iter(use_error); -} - -#[bench] -fn b40_suppressed_logs(b: &mut Bencher) { - b.iter(use_trace); -} - -fn use_error() { - for _ in 1..100 { - error!("This is an error message"); - } -} -fn use_trace() { - for _ in 1..100 { - trace!("This is a trace message"); - } -} diff --git a/agent/support/rust/flexi_logger/examples/colors.rs b/agent/support/rust/flexi_logger/examples/colors.rs deleted file mode 100644 index cf1e5cf09..000000000 --- a/agent/support/rust/flexi_logger/examples/colors.rs +++ /dev/null @@ -1,75 +0,0 @@ -fn main() { - #[cfg(not(feature = "colors"))] - println!("Feature color is switched off"); - - #[cfg(feature = "colors")] - { - use atty::Stream::{Stderr, Stdout}; - use yansi::{Color, Paint, Style}; - - for i in 0..=255 { - println!("{}: {}", i, Paint::fixed(i, i)); - } - - println!(""); - - if atty::is(Stdout) { - println!( - "Stdout is considered a tty - \ - flexi_logger::AdaptiveFormat will use colors", - ); - } else { - println!( - "Stdout is not considered a tty - \ - flexi_logger::AdaptiveFormat will NOT use colors" - ); - } - - if atty::is(Stderr) { - println!( - "Stderr is considered a tty - \ - flexi_logger::AdaptiveFormat will use colors", - ); - } else { - println!( - "Stderr is not considered a tty - \ - flexi_logger::AdaptiveFormat will NOT use colors!" - ); - } - - // Enable ASCII escape sequence support on Windows consoles, - // but disable coloring on unsupported Windows consoles - if cfg!(windows) { - if !Paint::enable_windows_ascii() { - println!("unsupported windows console detected => coloring disabled"); - Paint::disable(); - return; - } - } - - println!( - "\n{}", - Style::new(Color::Fixed(196)) - .bold() - .paint("This is red output like by default with err!") - ); - println!( - "{}", - Style::new(Color::Fixed(208)) - .bold() - .paint("This is yellow output like by default with warn!") - ); - println!( - "{}", - Style::new(Color::Unset).paint("This is normal output like by default with info!") - ); - println!( - "{}", - Style::new(Color::Fixed(7)).paint("This is output like by default with debug!") - ); - println!( - "{}", - Style::new(Color::Fixed(8)).paint("This is grey output like by default with trace!") - ); - } -} diff --git a/agent/support/rust/flexi_logger/examples/performance.rs b/agent/support/rust/flexi_logger/examples/performance.rs deleted file mode 100644 index 113cf7fb5..000000000 --- a/agent/support/rust/flexi_logger/examples/performance.rs +++ /dev/null @@ -1,51 +0,0 @@ -use std::fmt; -use std::time::Instant; - -struct Struct { - data: [u8; 32], -} - -impl fmt::Display for Struct { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{:?}", self.data) - } -} - -fn main() { - // -------------------------------- - println!("flexi_logger"); - flexi_logger::Logger::with_str("off") - .format(flexi_logger::detailed_format) - .start() - .unwrap(); - // -------------------------------- - // $> Set-Item -Path Env:RUST_LOG -Value "trace" - // println!("env_logger"); - // env_logger::init(); - // $> Set-Item -Path Env:RUST_LOG - // -------------------------------- - let mut structs = Vec::new(); - for i in 0..100 { - structs.push(Struct { - data: [i as u8; 32], - }); - } - - { - // With format - let start = Instant::now(); - for s in &structs { - log::info!("{}", format!("{}", s)); - } - eprintln!("with format: {:?}", start.elapsed()); // 2-7ms - } - - { - // Plain logger - let start = Instant::now(); - for s in &structs { - log::info!("{}", s); - } - eprintln!("plain: {:?}", start.elapsed()); // 17-26ms - } -} diff --git a/agent/support/rust/flexi_logger/scripts/cleanup.rs b/agent/support/rust/flexi_logger/scripts/cleanup.rs deleted file mode 100644 index e867b2d2d..000000000 --- a/agent/support/rust/flexi_logger/scripts/cleanup.rs +++ /dev/null @@ -1,46 +0,0 @@ -//! Cleans up all files and folders that were produced by test runs. -//! -//! ```cargo -//! [dependencies] -//! glob = "*" -//! ``` -extern crate glob; - -fn main() { - for pattern in &[ - "./*.log", - "./*.alerts", - "./*.seclog", - "./*logspec.toml", - "./log_files/**/*.log", - "./log_files/**/*.zip", - "./log_files/**/*.gz", - "./test_spec/*.toml", - ] { - for globresult in glob::glob(pattern).unwrap() { - match globresult { - Err(e) => eprintln!("Evaluating pattern {:?} produced error {}", pattern, e), - Ok(pathbuf) => { - std::fs::remove_file(&pathbuf).unwrap(); - } - } - } - } - - let dirs: Vec = glob::glob("./log_files/**") - .unwrap() - .filter_map(|r| match r { - Err(e) => { - eprintln!("Searching for folders produced error {}", e); - None - } - Ok(_) => Some(r.unwrap()), - }) - .collect(); - for pathbuf in dirs.iter().rev() { - std::fs::remove_dir(&pathbuf).unwrap(); - } - - std::fs::remove_dir("./log_files/").ok(); - std::fs::remove_dir("./test_spec/").ok(); -} diff --git a/agent/support/rust/flexi_logger/scripts/qualify.rs b/agent/support/rust/flexi_logger/scripts/qualify.rs deleted file mode 100644 index c0e824c54..000000000 --- a/agent/support/rust/flexi_logger/scripts/qualify.rs +++ /dev/null @@ -1,85 +0,0 @@ -//! ```cargo -//! [dependencies] -//! yansi = "0.5" -//! ``` -extern crate yansi; -use std::process::Command; - -macro_rules! run_command { - ($cmd:expr , $($arg:expr),*) => ( - let mut command = command!($cmd, $($arg),*); - let mut child = command.spawn().unwrap(); - let status = child.wait().unwrap(); - if !status.success() { - print!("> {}",yansi::Paint::red("qualify terminates due to error")); - std::process::exit(-1); - } - ) -} - -macro_rules! command { - ($cmd:expr , $($arg:expr),*) => ( - { - print!("\n> {}",yansi::Paint::yellow($cmd)); - let mut command = Command::new($cmd); - $( - print!(" {}",yansi::Paint::yellow(&$arg)); - command.arg($arg); - )* - print!("\n"); - command - } - ) -} - -fn run_script(s: &str) { - let mut path = std::path::PathBuf::from(std::env::var("CARGO_SCRIPT_BASE_PATH").unwrap()); - path.push(s); - let script = path.to_string_lossy().to_owned().to_string(); - run_command!("cargo", "script", script); -} - -fn main() { - // format - run_command!("cargo", "fmt"); - - // Build in important variants - std::fs::remove_file("Cargo.lock"); - run_command!("cargo", "+1.37.0", "build", "--no-default-features"); - run_command!("cargo", "+1.37.0", "build", "--all-features"); - - std::fs::remove_file("Cargo.lock"); - run_command!("cargo", "build"); - run_command!("cargo", "build", "--no-default-features"); - run_command!("cargo", "build", "--no-default-features", "--features=atty"); - run_command!("cargo", "build", "--all-features"); - run_command!("cargo", "build", "--release"); - run_command!("cargo", "build", "--release", "--all-features"); - - // Clippy in important variants - run_command!("cargo", "clippy", "--", "-D", "warnings"); - run_command!("cargo", "clippy", "--all-features", "--", "-D", "warnings"); - - // Run tests in important variants - run_command!("cargo", "test", "--release", "--all-features"); - run_command!("cargo", "test", "--no-default-features"); - run_command!("cargo", "test", "--release"); - #[rustfmt::skip] - run_command!("cargo", "test", "--release", "--features", "specfile_without_notification"); - run_script("cleanup"); - - // doc - run_command!("cargo", "doc", "--all-features", "--no-deps", "--open"); - - // check git status - let mut cmd = command!("git", "status", "-s"); - let child = cmd.stdout(std::process::Stdio::piped()).spawn().unwrap(); - let output = child.wait_with_output().unwrap(); - if output.stdout.len() > 0 { - print!("> {}", yansi::Paint::red("there are unsubmitted files")); - std::process::exit(-1); - } - - // say goodbye - println!("\n> all done :-) Looks like you're ready to \"cargo publish\"?"); -} diff --git a/agent/support/rust/flexi_logger/scripts/qualify_fast.rs b/agent/support/rust/flexi_logger/scripts/qualify_fast.rs deleted file mode 100644 index ec34153fe..000000000 --- a/agent/support/rust/flexi_logger/scripts/qualify_fast.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! ```cargo -//! [dependencies] -//! yansi = "0.5" -//! ``` -extern crate yansi; -use std::process::Command; - -macro_rules! run_command { - ($cmd:expr , $($arg:expr),*) => ( - let mut command = command!($cmd, $($arg),*); - let mut child = command.spawn().unwrap(); - let status = child.wait().unwrap(); - if !status.success() { - print!("> {}",yansi::Paint::red("qualify terminates due to error")); - std::process::exit(-1); - } - ) -} - -macro_rules! command { - ($cmd:expr , $($arg:expr),*) => ( - { - print!("\n> {}",yansi::Paint::yellow($cmd)); - let mut command = Command::new($cmd); - $( - print!(" {}",yansi::Paint::yellow(&$arg)); - command.arg($arg); - )* - print!("\n"); - command - } - ) -} - -fn run_script(s: &str) { - let mut path = std::path::PathBuf::from(std::env::var("CARGO_SCRIPT_BASE_PATH").unwrap()); - path.push(s); - let script = path.to_string_lossy().to_owned().to_string(); - run_command!("cargo", "script", script); -} - -fn main() { - // Build in important variants - run_command!("cargo", "build", "--release", "--all-features"); - - // Clippy in important variants - run_command!("cargo", "clippy", "--all-features", "--", "-D", "warnings"); - - // Run tests in important variants - run_command!("cargo", "test", "--release", "--all-features"); - run_script("cleanup"); - - // doc - run_command!("cargo", "doc", "--all-features", "--no-deps", "--open"); - - // say goodbye - println!("\n> fast qualification is done :-) Looks like you're ready to do the full qualification?"); -} diff --git a/agent/support/rust/flexi_logger/src/code_examples.rs b/agent/support/rust/flexi_logger/src/code_examples.rs deleted file mode 100644 index cade9098b..000000000 --- a/agent/support/rust/flexi_logger/src/code_examples.rs +++ /dev/null @@ -1,301 +0,0 @@ -//! Here are some examples for the `flexi_logger` initialization. -//! -//! -//! ## Write logs to stderr -//! -//! Expect the log specification in the environment variable `RUST_LOG`: -//! -//! ` Logger::`[`with_env()`](../struct.Logger.html#method.with_env)`.start()?;` -//! -//! (if `RUST_LOG` is not set, or if its value cannot be interpreted, nothing is logged) -//! -//! or provide the log spec programmatically: -//! -//! ` Logger::`[`with_str("info")`](../struct.Logger.html#method.with_str)`.start()?;` -//! -//! or combine both options: -//! -//! ` Logger::`[`with_env_or_str("info")`](../struct.Logger.html#method.with_env_or_str)`.start()?;` -//! -//! After that, you just use the log-macros from the log crate. -//! -//! ## Choose the log output channel -//! -//! By default, logs are written to `stderr`. -//! With [`Logger::log_target`](../struct.Logger.html#method.log_target) -//! you can send the logs to `stdout`, a file, an implementation of `LogWriter`, -//! or write them not at all. -//! -//! When writing to files, you sometimes want to have parts of the log still on the terminal; -//! this can be achieved with -//! [`Logger::duplicate_to_stderr`](../struct.Logger.html#method.duplicate_to_stderr) or -//! [`Logger::duplicate_to_stdout`](../struct.Logger.html#method.duplicate_to_stdout), -//! which duplicate log messages to the terminal. -//! -//! ```rust -//! # use flexi_logger::{LogTarget,Logger,Duplicate}; -//! # fn main() -> Result<(), Box> { -//! Logger::with_str("info") -//! .log_target(LogTarget::File) // write logs to file -//! .duplicate_to_stderr(Duplicate::Warn) // print warnings and errors also to the console -//! .start()?; -//! # Ok(()) -//! # } -//! ``` -//! -//! ## Influence the location and name of the log file -//! -//! By default, the log files are created in the current directory (where the program was started). -//! With [`Logger:directory`](../struct.Logger.html#method.directory) -//! you can specify a concrete folder in which the files should be created. -//! -//! Using [`Logger::discriminant`](../struct.Logger.html#method.discriminant) -//! you can add a discriminating infix to the log file name. -//! -//! With [`Logger::suffix`](../struct.Logger.html#method.suffix) -//! you can change the suffix that is used for the log files. -//! -//! When writing to files, especially when they are in a distant folder, you may want to let the -//! user know where the log file is. -//! -//! [`Logger::print_message`](../struct.Logger.html#method.print_message) -//! prints an info to `stdout` to which file the log is written. -//! -//! `create_symlink(path)` creates (on unix-systems only) a symbolic link at the -//! specified path that points to the log file. -//! -//! ```rust -//! # use flexi_logger::Logger; -//! # fn main() -> Result<(), Box> { -//! Logger::with_str("info") -//! .log_to_file() // write logs to file -//! .directory("traces") // create files in folder ./traces -//! .discriminant("Sample4711A") // use infix in log file name -//! .suffix("trc") // use suffix .trc instead of .log -//! .print_message() // -//! .create_symlink("current_run") // create a symbolic link to the current log file -//! .start()?; -//! # Ok(()) -//! # } -//! ``` -//! -//! This example will print a message -//! "Log is written to `./traces/foo_Sample4711A_2020-11-17_19-24-35.trc`" -//! and, on unix, create a symbolic link called `current_run`. -//! -//! ## Specify the format for the log lines explicitly -//! -//! With [`Logger::format`](../struct.Logger.html#method.format) -//! you set the format for all used output channels of `flexi_logger`. -//! -//! `flexi_logger` provides a couple of format functions, and you can also create and use your own, -//! e.g. by copying and modifying one of the provided format functions. -//! -//! Depending on the configuration, `flexi_logger` can write logs to multiple channels -//! (stdout, stderr, files, or additional writers) -//! at the same time. You can control the format for each output channel individually, using -//! [`Logger::format_for_files`](../struct.Logger.html#method.format_for_files), -//! [`Logger::format_for_stderr`](../struct.Logger.html#method.format_for_stderr), -//! [`Logger::format_for_stdout`](../struct.Logger.html#method.format_for_stdout), or -//! [`Logger::format_for_writer`](../struct.Logger.html#method.format_for_writer). -//! -//! As argument for these functions you can use one of the provided non-coloring format functions -//! -//! - [`default_format`](../fn.default_format.html) -//! - [`detailed_format`](../fn.detailed_format.html) -//! - [`opt_format`](../fn.opt_format.html) -//! - [`with_thread`](../fn.with_thread.html), -//! -//! or one of their coloring pendants -//! -//! - [`colored_default_format`](../fn.colored_default_format.html) -//! - [`colored_detailed_format`](../fn.colored_detailed_format.html) -//! - [`colored_opt_format`](../fn.colored_opt_format.html). -//! - [`colored_with_thread`](../fn.colored_with_thread.html). -//! -//! ### Adaptive Coloring -//! -//! You can use coloring for `stdout` and/or `stderr` -//! conditionally, such that colors are used when the output goes to a tty, -//! and suppressed if you e.g. pipe the output to some other program. -//! With -//! [`Logger::adaptive_format_for_stderr`](../struct.Logger.html#method.adaptive_format_for_stderr) or -//! [`Logger::adaptive_format_for_stdout`](../struct.Logger.html#method.adaptive_format_for_stdout) -//! you can specify one of the provided format pairs -//! (which are based on the format functions listed above), -//! or you can provide your own colored and non-colored format functions. -//! -//! ### Defaults -//! -//! `flexi_logger` initializes by default equivalently to this: -//! -//! ```rust,ignore -//! # use flexi_logger::{Logger,AdaptiveFormat,default_format}; -//! # use log::{debug, error, info, trace, warn}; -//! # fn main() -> Result<(), Box> { -//! # Logger::with_str("info") // Write all error, warn, and info messages -//! # .directory(std::env::temp_dir()) -//! .adaptive_format_for_stderr(AdaptiveFormat::Default) -//! .adaptive_format_for_stdout(AdaptiveFormat::Default) -//! .format_for_files(default_format) -//! .format_for_writer(default_format) -//! # .start()?; -//! # error!("This is an error message"); -//! # warn!("This is a warning"); -//! # info!("This is an info message"); -//! # debug!("This is a debug message - you must not see it!"); -//! # trace!("This is a trace message - you must not see it!"); -//! # run() -//! # } -//! # fn run() -> Result<(), Box> {Ok(())} -//! ``` -//! -//! ## Use a fixed log file, and truncate or append the file on each program start -//! -//! With [`Logger::log_to_file`](../struct.Logger.html#method.log_to_file) and without rotation, -//! `flexi_logger` uses by default files with a timestamp in the name, like -//! `foo_2020-11-16_08-37-44.log` (for a program called `foo`), which are quite unique for each -//! program start. -//! -//! With [`Logger::suppress_timestamp`](../struct.Logger.html#method.suppress_timestamp) -//! you get a simple fixed filename, like `foo.log`. -//! -//! In that case, a restart of the program will truncate an existing log file. -//! -//! Use additionally [`Logger::append`](../struct.Logger.html#method.append) -//! to append the logs of each new run to the existing file. -//! -//! ```rust -//! # use flexi_logger::Logger; -//! # use log::{debug, error, info, trace, warn}; -//! # fn main() -> Result<(), Box> { -//! Logger::with_str("info") // Write all error, warn, and info messages -//! # .directory(std::env::temp_dir()) -//! .log_to_file() // Write the log to a file -//! .suppress_timestamp() // use a simple filename without a timestamp -//! .append() // do not truncate the log file when the program is restarted -//! .start()?; -//! -//! # error!("This is an error message"); -//! # warn!("This is a warning"); -//! # info!("This is an info message"); -//! # debug!("This is a debug message - you must not see it!"); -//! # trace!("This is a trace message - you must not see it!"); -//! # run() -//! # } -//! # fn run() -> Result<(), Box> {Ok(())} -//! ``` -//! -//! ## Rotate the log file -//! -//! With rotation, the logs are always written to a file -//! with the infix `rCURRENT`, like e.g. `foo_rCURRENT.log`. -//! -//! [`Logger::rotation`](../struct.Logger.html#method.rotation) -//! takes three enum arguments to define its behavior: -//! -//! - [`Criterion`](../enum.Criterion.html) -//! - with `Criterion::Age` the rotation happens -//! when the clock switches to a new day, hour, minute, or second -//! - with `Criterion::Size` the rotation happens when the current log file exceeds -//! the specified limit -//! - with `Criterion::AgeOrSize` the rotation happens when either of the two limits is reached -//! -//! - [`Naming`](../enum.Naming.html)
The current file is then renamed -//! - with `Naming::Timestamps` to something like `foo_r2020-11-16_08-56-52.log` -//! - with `Naming::Numbers` to something like `foo_r00000.log` -//! -//! and a fresh `rCURRENT` file is created. -//! -//! - [`Cleanup`](../enum.Cleanup.html) defines if and how you -//! avoid accumulating log files indefinitely: -//! - with `Cleanup::KeepLogFiles` you specify the number of log files that should be retained; -//! if there are more, the older ones are getting deleted -//! - with `Cleanup::KeepCompressedFiles` you specify the number of log files that should be -//! retained, and these are being compressed additionally -//! - with `Cleanup::KeepLogAndCompressedFiles` you specify the number of log files that should be -//! retained as is, and an additional number that are being compressed -//! - with `Cleanup::Never` no cleanup is done, all files are retained. -//! -//! ```rust -//! # use flexi_logger::{Age, Cleanup, Criterion, Logger, Naming}; -//! # use log::{debug, error, info, trace, warn}; -//! # fn main() -> Result<(), Box> { -//! Logger::with_str("info") // Write all error, warn, and info messages -//! # .directory(std::env::temp_dir()) -//! .log_to_file() // Write the log to a file -//! .rotate( // If the program runs long enough, -//! Criterion::Age(Age::Day), // - create a new file every day -//! Naming::Timestamps, // - let the rotated files have a timestamp in their name -//! Cleanup::KeepLogFiles(7), // - keep at most 7 log files -//! ) -//! .start()?; -//! -//! # error!("This is an error message"); -//! # warn!("This is a warning"); -//! # info!("This is an info message"); -//! # debug!("This is a debug message - you must not see it!"); -//! # trace!("This is a trace message - you must not see it!"); -//! # run() -//! # } -//! # fn run() -> Result<(), Box> {Ok(())} -//! ``` -//! -//! ## Reconfigure the log specification programmatically -//! -//! This can be especially handy in debugging situations where you want to see -//! traces only for a short instant. -//! -//! Obtain the `ReconfigurationHandle` -//! -//! ```rust -//! # use flexi_logger::Logger; -//! let mut log_handle = Logger::with_str("info") -//! // ... logger configuration ... -//! .start() -//! .unwrap(); -//! ``` -//! -//! and modify the effective log specification from within your code: -//! -//! ```rust, ignore -//! // ... -//! log_handle.parse_and_push_temp_spec("info, critical_mod = trace"); -//! // ... critical calls ... -//! log_handle.pop_temp_spec(); -//! // ... continue with the log spec you had before. -//! ``` -//! -//! ## Reconfigure the log specification dynamically by editing a spec-file -//! -//! If you start `flexi_logger` with a specfile, -//! -//! ```rust,ignore -//! # use flexi_logger::Logger; -//! Logger::with_str("info") -//! // ... logger configuration ... -//! .start_with_specfile("/server/config/logspec.toml") -//! .unwrap(); -//! ``` -//! -//! then you can change the log specification dynamically, *while your program is running*, -//! by editing the specfile. This can be a great help e.g. if you want to get detailed traces -//! for _some_ requests to a long running server. -//! -//! See [`Logger::start_with_specfile`](../struct.Logger.html#method.start_with_specfile) -//! for more information. -//! -//! ## Miscellaneous -//! -//! For the sake of completeness, we refer here to some more configuration methods. -//! See their documentation for more details. -//! -//! [`Logger::check_parser_error`](../struct.Logger.html#method.check_parser_error) -//! -//! [`Logger::set_palette`](../struct.Logger.html#method.set_palette) -//! -//! [`Logger::cleanup_in_background_thread`](../struct.Logger.html#method.cleanup_in_background_thread) -//! -//! [`Logger::use_windows_line_ending`](../struct.Logger.html#method.use_windows_line_ending) -//! -//! [`Logger::add_writer`](../struct.Logger.html#method.add_writer) diff --git a/agent/support/rust/flexi_logger/src/deferred_now.rs b/agent/support/rust/flexi_logger/src/deferred_now.rs deleted file mode 100644 index 199a23891..000000000 --- a/agent/support/rust/flexi_logger/src/deferred_now.rs +++ /dev/null @@ -1,23 +0,0 @@ -use chrono::{DateTime, Local}; - -/// Deferred timestamp creation. -/// -/// Is used to ensure that a log record that is sent to multiple outputs -/// (in maybe different formats) always uses the same timestamp. -#[derive(Debug)] -pub struct DeferredNow(Option>); -impl<'a> DeferredNow { - pub(crate) fn new() -> Self { - Self(None) - } - - /// Retrieve the timestamp. - /// - /// Requires mutability because the first caller will generate the timestamp. - pub fn now(&'a mut self) -> &'a DateTime { - if self.0.is_none() { - self.0 = Some(Local::now()); - } - self.0.as_ref().unwrap() - } -} diff --git a/agent/support/rust/flexi_logger/src/flexi_error.rs b/agent/support/rust/flexi_logger/src/flexi_error.rs deleted file mode 100644 index 273773a6c..000000000 --- a/agent/support/rust/flexi_logger/src/flexi_error.rs +++ /dev/null @@ -1,63 +0,0 @@ -use crate::log_specification::LogSpecification; -// use std::backtrace::Backtrace; -use thiserror::Error; - -/// Describes errors in the initialization of `flexi_logger`. -#[derive(Error, Debug)] -pub enum FlexiLoggerError { - /// Log file cannot be written because the specified path is not a directory. - #[error("Log file cannot be written because the specified path is not a directory")] - OutputBadDirectory, - - /// Spawning the cleanup thread failed. - /// - /// This error can safely be avoided with `Logger::cleanup_in_background_thread(false)`. - #[error("Spawning the cleanup thread failed.")] - OutputCleanupThread(std::io::Error), - - /// Log cannot be written, e.g. because the configured output directory is not accessible. - #[error( - "Log cannot be written, e.g. because the configured output directory is not accessible" - )] - OutputIo(#[from] std::io::Error), - - /// Filesystem notifications for the specfile could not be set up. - #[error("Filesystem notifications for the specfile could not be set up")] - #[cfg(feature = "specfile")] - SpecfileNotify(#[from] notify::Error), - - /// Parsing the configured logspec toml-file failed. - #[error("Parsing the configured logspec toml-file failed")] - #[cfg(feature = "specfile_without_notification")] - SpecfileToml(#[from] toml::de::Error), - - /// Specfile cannot be accessed or created. - #[error("Specfile cannot be accessed or created")] - #[cfg(feature = "specfile_without_notification")] - SpecfileIo(std::io::Error), - - /// Specfile has an unsupported extension. - #[error("Specfile has an unsupported extension")] - #[cfg(feature = "specfile_without_notification")] - SpecfileExtension(&'static str), - - /// Invalid level filter. - #[error("Invalid level filter")] - LevelFilter(String), - - /// Failed to parse log specification. - #[error("Failed to parse log specification: {0}")] - Parse(String, LogSpecification), - - /// Logger initialization failed. - #[error("Logger initialization failed")] - Log(#[from] log::SetLoggerError), - - /// Some synchronization object is poisoned. - #[error("Some synchronization object is poisoned")] - Poison, - - /// Palette parsing failed - #[error("Palette parsing failed")] - Palette(#[from] std::num::ParseIntError), -} diff --git a/agent/support/rust/flexi_logger/src/flexi_logger.rs b/agent/support/rust/flexi_logger/src/flexi_logger.rs deleted file mode 100644 index 7b643ec35..000000000 --- a/agent/support/rust/flexi_logger/src/flexi_logger.rs +++ /dev/null @@ -1,151 +0,0 @@ -use crate::primary_writer::PrimaryWriter; -use crate::writers::LogWriter; -use crate::LogSpecification; - -#[cfg(feature = "textfilter")] -use regex::Regex; -use std::collections::HashMap; -use std::sync::{Arc, RwLock}; - -// Implements log::Log to plug into the log crate. -// -// Delegates the real logging to the configured PrimaryWriter and optionally to other writers. -// The `PrimaryWriter` is either a `StdErrWriter` or an `ExtendedFileWriter`. -// An ExtendedFileWriter logs to a file, by delegating to a FileWriter, -// and can additionally duplicate log lines to stderr. -pub(crate) struct FlexiLogger { - log_specification: Arc>, - primary_writer: Arc, - other_writers: Arc>>, -} - -impl FlexiLogger { - pub fn new( - log_specification: Arc>, - primary_writer: Arc, - other_writers: Arc>>, - ) -> Self { - Self { - log_specification, - primary_writer, - other_writers, - } - } - - fn primary_enabled(&self, level: log::Level, module: &str) -> bool { - self.log_specification.read().as_ref() - .unwrap(/* catch and expose error? */) - .enabled(level, module) - } -} - -impl log::Log for FlexiLogger { - // If other writers are configured and the metadata target addresses them correctly, - // - we should determine if the metadata-level is digested by any of the writers - // (including the primary writer) - // else we fall back to default behavior: - // Return true if - // - target is filled with module path and level is accepted by log specification - // - target is filled with crap and ??? - // - // Caveat: - // Rocket e.g. sets target explicitly to several fantasy names; - // these hopefully do not collide with any of the modules in the log specification; - // since they do not conform with the {} syntax expected by flexi_logger, they're treated as - // module names. - fn enabled(&self, metadata: &log::Metadata) -> bool { - let target = metadata.target(); - let level = metadata.level(); - - if !self.other_writers.is_empty() && target.starts_with('{') { - // at least one other writer is configured _and_ addressed - let targets: Vec<&str> = target[1..(target.len() - 1)].split(',').collect(); - for t in targets { - if t != "_Default" { - match self.other_writers.get(t) { - None => eprintln!("[flexi_logger] bad writer spec: {}", t), - Some(writer) => { - if level < writer.max_log_level() { - return true; - } - } - } - } - } - } - - self.primary_enabled(level, target) - } - - fn log(&self, record: &log::Record) { - let target = record.metadata().target(); - let mut now = crate::DeferredNow::new(); - if target.starts_with('{') { - let mut use_default = false; - let targets: Vec<&str> = target[1..(target.len() - 1)].split(',').collect(); - for t in targets { - if t == "_Default" { - use_default = true; - } else { - match self.other_writers.get(t) { - None => eprintln!("[flexi_logger] found bad writer spec: {}", t), - Some(writer) => { - writer.write(&mut now, record).unwrap_or_else(|e| { - eprintln!( - "[flexi_logger] writing log line to custom writer \"{}\" \ - failed with: \"{}\"", - t, e - ); - }); - } - } - } - } - if !use_default { - return; - } - } - - let effective_target = if target.starts_with('{') { - record.module_path().unwrap_or_default() - } else { - target - }; - if !self.primary_enabled(record.level(), effective_target) { - return; - } - - #[cfg(feature = "textfilter")] - { - // closure that we need below - let check_text_filter = |text_filter: &Option| { - text_filter - .as_ref() - .map_or(true, |filter| filter.is_match(&*record.args().to_string())) - }; - - if !check_text_filter( - self.log_specification.read().as_ref().unwrap(/* expose this? */).text_filter(), - ) { - return; - } - } - - self.primary_writer - .write(&mut now, record) - .unwrap_or_else(|e| { - eprintln!("[flexi_logger] writing log line failed with {}", e); - }); - } - - fn flush(&self) { - self.primary_writer.flush().unwrap_or_else(|e| { - eprintln!("[flexi_logger] flushing primary writer failed with {}", e); - }); - for writer in self.other_writers.values() { - writer.flush().unwrap_or_else(|e| { - eprintln!("[flexi_logger] flushing custom writer failed with {}", e); - }); - } - } -} diff --git a/agent/support/rust/flexi_logger/src/formats.rs b/agent/support/rust/flexi_logger/src/formats.rs deleted file mode 100644 index 61e1e6ab4..000000000 --- a/agent/support/rust/flexi_logger/src/formats.rs +++ /dev/null @@ -1,416 +0,0 @@ -use crate::DeferredNow; -use log::Record; -use std::thread; -#[cfg(feature = "colors")] -use yansi::{Color, Paint, Style}; - -/// Function type for Format functions. -/// -/// If you want to write the log lines in your own format, -/// implement a function with this signature and provide it to one of the methods -/// [`Logger::format()`](struct.Logger.html#method.format), -/// [`Logger::format_for_files()`](struct.Logger.html#method.format_for_files), -/// or [`Logger::format_for_stderr()`](struct.Logger.html#method.format_for_stderr). -/// -/// Checkout the code of the provided [format functions](index.html#functions) -/// if you want to start with a template. -/// -/// ## Parameters -/// -/// - `write`: the output stream -/// -/// - `now`: the timestamp that you should use if you want a timestamp to appear in the log line -/// -/// - `record`: the log line's content and metadata, as provided by the log crate's macros. -/// -pub type FormatFunction = fn( - write: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error>; - -/// A logline-formatter that produces log lines like
-/// ```INFO [my_prog::some_submodule] Task successfully read from conf.json``` -/// -/// # Errors -/// -/// See `std::write` -pub fn default_format( - w: &mut dyn std::io::Write, - _now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - write!( - w, - "{} [{}] {}", - record.level(), - record.module_path().unwrap_or(""), - record.args() - ) -} - -#[allow(clippy::doc_markdown)] -/// A colored version of the logline-formatter `default_format` -/// that produces log lines like
-/// ERROR [my_prog::some_submodule] File not found -/// -/// See method [style](fn.style.html) if you want to influence coloring. -/// -/// Only available with feature `colors`. -/// -/// # Errors -/// -/// See `std::write` -#[cfg(feature = "colors")] -pub fn colored_default_format( - w: &mut dyn std::io::Write, - _now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - let level = record.level(); - write!( - w, - "{} [{}] {}", - style(level, level), - record.module_path().unwrap_or(""), - style(level, record.args()) - ) -} - -/// A logline-formatter that produces log lines with timestamp and file location, like -///
-/// ```[2016-01-13 15:25:01.640870 +01:00] INFO [src/foo/bar:26] Task successfully read from conf.json``` -///
-/// -/// # Errors -/// -/// See `std::write` -pub fn opt_format( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - write!( - w, - "[{}] {} [{}:{}] {}", - now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), - record.level(), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - &record.args() - ) -} - -/// A colored version of the logline-formatter `opt_format`. -/// -/// See method [style](fn.style.html) if you want to influence coloring. -/// -/// Only available with feature `colors`. -/// -/// # Errors -/// -/// See `std::write` -#[cfg(feature = "colors")] -pub fn colored_opt_format( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - let level = record.level(); - write!( - w, - "[{}] {} [{}:{}] {}", - style(level, now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z")), - style(level, level), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - style(level, &record.args()) - ) -} - -/// A logline-formatter that produces log lines like -///
-/// ```[2016-01-13 15:25:01.640870 +01:00] INFO [foo::bar] src/foo/bar.rs:26: Task successfully read from conf.json``` -///
-/// i.e. with timestamp, module path and file location. -/// -/// # Errors -/// -/// See `std::write` -pub fn detailed_format( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - write!( - w, - "[{}] {} [{}] {}:{}: {}", - now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), - record.level(), - record.module_path().unwrap_or(""), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - &record.args() - ) -} - -/// A colored version of the logline-formatter `detailed_format`. -/// -/// See method [style](fn.style.html) if you want to influence coloring. -/// -/// Only available with feature `colors`. -/// -/// # Errors -/// -/// See `std::write` -#[cfg(feature = "colors")] -pub fn colored_detailed_format( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - let level = record.level(); - write!( - w, - "[{}] {} [{}] {}:{}: {}", - style(level, now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z")), - style(level, record.level()), - record.module_path().unwrap_or(""), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - style(level, &record.args()) - ) -} - -/// A logline-formatter that produces log lines like -///
-/// ```[2016-01-13 15:25:01.640870 +01:00] T[taskreader] INFO [src/foo/bar:26] Task successfully read from conf.json``` -///
-/// i.e. with timestamp, thread name and file location. -/// -/// # Errors -/// -/// See `std::write` -pub fn with_thread( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - write!( - w, - "[{}] T[{:?}] {} [{}:{}] {}", - now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), - thread::current().name().unwrap_or(""), - record.level(), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - &record.args() - ) -} - -/// A colored version of the logline-formatter `with_thread`. -/// -/// See method [style](fn.style.html) if you want to influence coloring. -/// -/// Only available with feature `colors`. -/// -/// # Errors -/// -/// See `std::write` -#[cfg(feature = "colors")] -pub fn colored_with_thread( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - let level = record.level(); - write!( - w, - "[{}] T[{:?}] {} [{}:{}] {}", - style(level, now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z")), - style(level, thread::current().name().unwrap_or("")), - style(level, level), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - style(level, &record.args()) - ) -} - -/// Helper function that is used in the provided coloring format functions to apply -/// colors based on the log level and the effective color palette. -/// -/// See [`Logger::set_palette`](struct.Logger.html#method.set_palette) if you want to -/// modify the color palette. -/// -/// Only available with feature `colors`. -#[cfg(feature = "colors")] -pub fn style(level: log::Level, item: T) -> Paint { - let palette = &*(PALETTE.read().unwrap()); - match level { - log::Level::Error => palette.error, - log::Level::Warn => palette.warn, - log::Level::Info => palette.info, - log::Level::Debug => palette.debug, - log::Level::Trace => palette.trace, - } - .paint(item) -} - -#[cfg(feature = "colors")] -lazy_static::lazy_static! { - static ref PALETTE: std::sync::RwLock = std::sync::RwLock::new(Palette::default()); -} - -// Overwrites the default PALETTE value either from the environment, if set, -// or from the parameter, if filled. -// Returns an error if parsing failed. -#[cfg(feature = "colors")] -pub(crate) fn set_palette(input: &Option) -> Result<(), std::num::ParseIntError> { - match std::env::var_os("FLEXI_LOGGER_PALETTE") { - Some(ref env_osstring) => { - *(PALETTE.write().unwrap()) = Palette::from(env_osstring.to_string_lossy().as_ref())?; - } - None => match input { - Some(ref input_string) => { - *(PALETTE.write().unwrap()) = Palette::from(input_string)?; - } - None => {} - }, - } - Ok(()) -} - -#[cfg(feature = "colors")] -#[derive(Debug)] -struct Palette { - pub error: Style, - pub warn: Style, - pub info: Style, - pub debug: Style, - pub trace: Style, -} -#[cfg(feature = "colors")] -impl Palette { - fn default() -> Palette { - Palette { - error: Style::new(Color::Fixed(196)).bold(), - warn: Style::new(Color::Fixed(208)).bold(), - info: Style::new(Color::Unset), - debug: Style::new(Color::Fixed(7)), - trace: Style::new(Color::Fixed(8)), - } - } - - fn from(palette: &str) -> Result { - let mut items = palette.split(';'); - Ok(Palette { - error: parse_style(items.next().unwrap_or("196").trim())?, - warn: parse_style(items.next().unwrap_or("208").trim())?, - info: parse_style(items.next().unwrap_or("-").trim())?, - debug: parse_style(items.next().unwrap_or("7").trim())?, - trace: parse_style(items.next().unwrap_or("8").trim())?, - }) - } -} - -#[cfg(feature = "colors")] -fn parse_style(input: &str) -> Result { - Ok(if input == "-" { - Style::new(Color::Unset) - } else { - Style::new(Color::Fixed(input.parse()?)) - }) -} - -/// Specifies the `FormatFunction` and decides if coloring should be used. -/// -/// Is used in -/// [`Logger::adaptive_format_for_stderr`](struct.Logger.html#method.adaptive_format_for_stderr) and -/// [`Logger::adaptive_format_for_stdout`](struct.Logger.html#method.adaptive_format_for_stdout). -/// The coloring format functions are used if the output channel is a tty. -/// -/// Only available with feature `atty`. -#[cfg(feature = "atty")] -#[derive(Clone, Copy)] -pub enum AdaptiveFormat { - /// Chooses between [`default_format`](fn.default_format.html) - /// and [`colored_default_format`](fn.colored_default_format.html). - /// - /// Only available with feature `colors`. - #[cfg(feature = "colors")] - Default, - /// Chooses between [`detailed_format`](fn.detailed_format.html) - /// and [`colored_detailed_format`](fn.colored_detailed_format.html). - /// - /// Only available with feature `colors`. - #[cfg(feature = "colors")] - Detailed, - /// Chooses between [`opt_format`](fn.opt_format.html) - /// and [`colored_opt_format`](fn.colored_opt_format.html). - /// - /// Only available with feature `colors`. - #[cfg(feature = "colors")] - Opt, - /// Chooses between [`with_thread`](fn.with_thread.html) - /// and [`colored_with_thread`](fn.colored_with_thread.html). - /// - /// Only available with feature `colors`. - #[cfg(feature = "colors")] - WithThread, - /// Chooses between the first format function (which is supposed to be uncolored) - /// and the second (which is supposed to be colored). - /// - /// Allows providing own format functions, with freely choosable coloring technique, - /// _and_ making use of the tty detection. - Custom(FormatFunction, FormatFunction), -} - -#[cfg(feature = "atty")] -impl AdaptiveFormat { - #[must_use] - pub(crate) fn format_function(self, stream: Stream) -> FormatFunction { - if stream.is_tty() { - match self { - #[cfg(feature = "colors")] - Self::Default => colored_default_format, - #[cfg(feature = "colors")] - Self::Detailed => colored_detailed_format, - #[cfg(feature = "colors")] - Self::Opt => colored_opt_format, - #[cfg(feature = "colors")] - Self::WithThread => colored_with_thread, - Self::Custom(_, colored) => colored, - } - } else { - match self { - #[cfg(feature = "colors")] - Self::Default => default_format, - #[cfg(feature = "colors")] - Self::Detailed => detailed_format, - #[cfg(feature = "colors")] - Self::Opt => opt_format, - #[cfg(feature = "colors")] - Self::WithThread => with_thread, - Self::Custom(uncolored, _) => uncolored, - } - } - } -} - -#[cfg(feature = "atty")] -#[derive(Clone, Copy)] -pub(crate) enum Stream { - StdOut, - StdErr, -} -#[cfg(feature = "atty")] -impl Stream { - #[must_use] - pub fn is_tty(self) -> bool { - match self { - Self::StdOut => atty::is(atty::Stream::Stdout), - Self::StdErr => atty::is(atty::Stream::Stderr), - } - } -} diff --git a/agent/support/rust/flexi_logger/src/lib.rs b/agent/support/rust/flexi_logger/src/lib.rs deleted file mode 100644 index a94d17915..000000000 --- a/agent/support/rust/flexi_logger/src/lib.rs +++ /dev/null @@ -1,63 +0,0 @@ -#![deny(missing_docs)] -#![deny(clippy::all)] -#![deny(clippy::pedantic)] -#![allow(clippy::unused_self)] -#![allow(clippy::needless_doctest_main)] - -//! A flexible and easy-to-use logger that writes logs to stderr and/or to files -//! or other output streams. -//! -//! To read the log specification from an environment variable and get the log written to `stderr`, -//! start `flexi_logger` e.g. like this: -//! ```rust -//! flexi_logger::Logger::with_env().start().unwrap(); -//! ``` -//! -//! See -//! -//! * [Logger](struct.Logger.html) for a full description of all configuration options, -//! * and the [writers](writers/index.html) module for the usage of additional log writers, -//! * and [the homepage](https://crates.io/crates/flexi_logger) for how to get started. -//! -//! There are configuration options to e.g. -//! -//! * decide whether you want to write your logs to stderr or to a file, -//! * configure the path and the filenames of the log files, -//! * use file rotation, -//! * specify the line format for the log lines, -//! * define additional log output streams, e.g for alert or security messages, -//! * support changing the log specification while the program is running, -//! -//! `flexi_logger` uses a similar syntax as [`env_logger`](http://crates.io/crates/env_logger/) -//! for specifying which logs should really be written (but is more graceful with the syntax, -//! and can provide error information). -//! -//! By default, i.e. if feature `colors` is not switched off, the log lines that appear on your -//! terminal are coloured. In case the chosen colors don't fit to your terminal's color theme, -//! you can adapt the colors to improve readability. -//! See the documentation of method [style](fn.style.html) -//! for a description how this can be done. - -mod deferred_now; -mod flexi_error; -mod flexi_logger; -mod formats; -mod log_specification; -mod logger; -mod parameters; -mod primary_writer; -mod reconfiguration_handle; - -pub mod code_examples; -pub mod writers; - -/// Re-exports from log crate -pub use log::{Level, LevelFilter, Record}; - -pub use crate::deferred_now::DeferredNow; -pub use crate::flexi_error::FlexiLoggerError; -pub use crate::formats::*; -pub use crate::log_specification::{LogSpecBuilder, LogSpecification, ModuleFilter}; -pub use crate::logger::{Duplicate, LogTarget, Logger}; -pub use crate::parameters::{Age, Cleanup, Criterion, Naming}; -pub use crate::reconfiguration_handle::ReconfigurationHandle; diff --git a/agent/support/rust/flexi_logger/src/log_specification.rs b/agent/support/rust/flexi_logger/src/log_specification.rs deleted file mode 100644 index 684b7caf9..000000000 --- a/agent/support/rust/flexi_logger/src/log_specification.rs +++ /dev/null @@ -1,927 +0,0 @@ -use crate::flexi_error::FlexiLoggerError; -use crate::LevelFilter; - -#[cfg(feature = "textfilter")] -use regex::Regex; -use std::collections::HashMap; -use std::env; - -/// -/// Immutable struct that defines which loglines are to be written, -/// based on the module, the log level, and the text. -/// -/// The loglevel specification via string (relevant for methods -/// [parse()](struct.LogSpecification.html#method.parse) and -/// [env()](struct.LogSpecification.html#method.env)) -/// works essentially like with `env_logger`, -/// but we are a bit more tolerant with spaces. Its functionality can be -/// described with some Backus-Naur-form: -/// -/// ```text -/// ::= single_log_level_spec[{,single_log_level_spec}][/] -/// ::= ||= -/// ::= -/// ``` -/// -/// * Examples: -/// -/// * `"info"`: all logs with info, warn, or error level are written -/// * `"crate1"`: all logs of this crate are written, but nothing else -/// * `"warn, crate2::mod_a=debug, mod_x::mod_y=trace"`: all crates log warnings and errors, -/// `mod_a` additionally debug messages, and `mod_x::mod_y` is fully traced -/// -/// * If you just specify the module, without `log_level`, all levels will be traced for this -/// module. -/// * If you just specify a log level, this will be applied as default to all modules without -/// explicit log level assigment. -/// (You see that for modules named error, warn, info, debug or trace, -/// it is necessary to specify their loglevel explicitly). -/// * The module names are compared as Strings, with the side effect that a specified module filter -/// affects all modules whose name starts with this String.
-/// Example: ```"foo"``` affects e.g. -/// -/// * `foo` -/// * `foo::bar` -/// * `foobaz` (!) -/// * `foobaz::bar` (!) -/// -/// The optional text filter is applied for all modules. -/// -/// Note that external module names are to be specified like in ```"extern crate ..."```, i.e., -/// for crates with a dash in their name this means: the dash is to be replaced with -/// the underscore (e.g. ```karl_heinz```, not ```karl-heinz```). -/// See -/// [https://github.com/rust-lang/rfcs/pull/940/files](https://github.com/rust-lang/rfcs/pull/940/files) -/// for an explanation of the different naming conventions in Cargo (packages allow hyphen) and -/// rustc (“extern crate” does not allow hyphens). -#[derive(Clone, Debug, Default)] -pub struct LogSpecification { - module_filters: Vec, - #[cfg(feature = "textfilter")] - textfilter: Option, -} - -/// Defines which loglevel filter to use for the specified module. -/// -/// A `ModuleFilter`, whose `module_name` is not set, describes the default loglevel filter. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct ModuleFilter { - /// The module name. - pub module_name: Option, - /// The level filter. - pub level_filter: LevelFilter, -} - -impl LogSpecification { - pub(crate) fn update_from(&mut self, other: Self) { - self.module_filters = other.module_filters; - - #[cfg(feature = "textfilter")] - { - self.textfilter = other.textfilter; - } - } - - pub(crate) fn max_level(&self) -> log::LevelFilter { - self.module_filters - .iter() - .map(|d| d.level_filter) - .max() - .unwrap_or(log::LevelFilter::Off) - } - - /// Returns true if messages on the specified level from the writing module should be written - pub fn enabled(&self, level: log::Level, writing_module: &str) -> bool { - // Search for the longest match, the vector is assumed to be pre-sorted. - for module_filter in &self.module_filters { - match module_filter.module_name { - Some(ref module_name) => { - if writing_module.starts_with(module_name) { - return level <= module_filter.level_filter; - } - } - None => return level <= module_filter.level_filter, - } - } - false - } - - /// Returns a `LogSpecification` where all traces are switched off. - #[must_use] - pub fn off() -> Self { - #[allow(clippy::default_trait_access)] - Default::default() - } - - /// Returns a log specification from a String. - /// - /// # Errors - /// - /// `FlexiLoggerError::Parse` if the input is malformed. - pub fn parse(spec: &str) -> Result { - let mut parse_errs = String::new(); - let mut dirs = Vec::::new(); - - let mut parts = spec.split('/'); - let mods = parts.next(); - #[cfg(feature = "textfilter")] - let filter = parts.next(); - if parts.next().is_some() { - push_err( - &format!("invalid log spec '{}' (too many '/'s), ignoring it", spec), - &mut parse_errs, - ); - return parse_err(parse_errs, Self::off()); - } - if let Some(m) = mods { - for s in m.split(',') { - let s = s.trim(); - if s.is_empty() { - continue; - } - let mut parts = s.split('='); - let (log_level, name) = match ( - parts.next().map(str::trim), - parts.next().map(str::trim), - parts.next(), - ) { - (Some(part_0), None, None) => { - if contains_whitespace(part_0, &mut parse_errs) { - continue; - } - // if the single argument is a log-level string or number, - // treat that as a global fallback setting - match parse_level_filter(part_0.trim()) { - Ok(num) => (num, None), - Err(_) => (LevelFilter::max(), Some(part_0)), - } - } - - (Some(part_0), Some(""), None) => { - if contains_whitespace(part_0, &mut parse_errs) { - continue; - } - (LevelFilter::max(), Some(part_0)) - } - - (Some(part_0), Some(part_1), None) => { - if contains_whitespace(part_0, &mut parse_errs) { - continue; - } - match parse_level_filter(part_1.trim()) { - Ok(num) => (num, Some(part_0.trim())), - Err(e) => { - push_err(&e.to_string(), &mut parse_errs); - continue; - } - } - } - _ => { - push_err( - &format!("invalid part in log spec '{}', ignoring it", s), - &mut parse_errs, - ); - continue; - } - }; - dirs.push(ModuleFilter { - module_name: name.map(ToString::to_string), - level_filter: log_level, - }); - } - } - - #[cfg(feature = "textfilter")] - let textfilter = filter.and_then(|filter| match Regex::new(filter) { - Ok(re) => Some(re), - Err(e) => { - push_err(&format!("invalid regex filter - {}", e), &mut parse_errs); - None - } - }); - - let logspec = Self { - module_filters: dirs.level_sort(), - #[cfg(feature = "textfilter")] - textfilter, - }; - - if parse_errs.is_empty() { - Ok(logspec) - } else { - parse_err(parse_errs, logspec) - } - } - - /// Returns a log specification based on the value of the environment variable `RUST_LOG`, - /// or an empty one. - /// - /// # Errors - /// - /// `FlexiLoggerError::Parse` if the input is malformed. - pub fn env() -> Result { - match env::var("RUST_LOG") { - Ok(spec) => Self::parse(&spec), - Err(..) => Ok(Self::off()), - } - } - - /// Returns a log specification based on the value of the environment variable `RUST_LOG`, - /// if it exists and can be parsed, or on the given String. - /// - /// # Errors - /// - /// `FlexiLoggerError::Parse` if the given spec is malformed. - pub fn env_or_parse>(given_spec: S) -> Result { - env::var("RUST_LOG") - .map_err(|_e| FlexiLoggerError::Poison /*wrong, but only dummy*/) - .and_then(|value| Self::parse(&value)) - .or_else(|_| Self::parse(given_spec.as_ref())) - } - - /// Reads a log specification from an appropriate toml document. - /// - /// This method is only avaible with feature `specfile`. - /// - /// # Errors - /// - /// `FlexiLoggerError::Parse` if the input is malformed. - #[cfg(feature = "specfile_without_notification")] - pub fn from_toml(s: &str) -> Result { - #[derive(Clone, Debug, serde_derive::Deserialize)] - struct LogSpecFileFormat { - pub global_level: Option, - pub global_pattern: Option, - pub modules: Option>, - } - - let logspec_ff: LogSpecFileFormat = toml::from_str(s)?; - let mut parse_errs = String::new(); - let mut module_filters = Vec::::new(); - - if let Some(s) = logspec_ff.global_level { - module_filters.push(ModuleFilter { - module_name: None, - level_filter: parse_level_filter(s)?, - }); - } - - for (k, v) in logspec_ff.modules.unwrap_or_default() { - module_filters.push(ModuleFilter { - module_name: Some(k), - level_filter: parse_level_filter(v)?, - }); - } - - #[cfg(feature = "textfilter")] - let textfilter = match logspec_ff.global_pattern { - None => None, - Some(s) => match Regex::new(&s) { - Ok(re) => Some(re), - Err(e) => { - push_err(&format!("invalid regex filter - {}", e), &mut parse_errs); - None - } - }, - }; - - let logspec = Self { - module_filters: module_filters.level_sort(), - #[cfg(feature = "textfilter")] - textfilter, - }; - if parse_errs.is_empty() { - Ok(logspec) - } else { - parse_err(parse_errs, logspec) - } - } - - /// Serializes itself in toml format. - /// - /// This method is only avaible with feature `specfile`. - /// - /// # Errors - /// - /// `FlexiLoggerError::Io` if writing fails. - #[cfg(feature = "specfile_without_notification")] - pub fn to_toml(&self, w: &mut dyn std::io::Write) -> Result<(), FlexiLoggerError> { - w.write_all(b"### Optional: Default log level\n")?; - let last = self.module_filters.last(); - if last.is_some() && last.as_ref().unwrap().module_name.is_none() { - w.write_all( - format!( - "global_level = '{}'\n", - last.as_ref() - .unwrap() - .level_filter - .to_string() - .to_lowercase() - ) - .as_bytes(), - )?; - } else { - w.write_all(b"#global_level = 'info'\n")?; - } - - w.write_all( - b"\n### Optional: specify a regular expression to suppress all messages that don't match\n", - )?; - w.write_all(b"#global_pattern = 'foo'\n")?; - - w.write_all( - b"\n### Specific log levels per module are optionally defined in this section\n", - )?; - w.write_all(b"[modules]\n")?; - if self.module_filters.is_empty() || self.module_filters[0].module_name.is_none() { - w.write_all(b"#'mod1' = 'warn'\n")?; - w.write_all(b"#'mod2' = 'debug'\n")?; - w.write_all(b"#'mod2::mod3' = 'trace'\n")?; - } - for mf in &self.module_filters { - if mf.module_name.is_some() { - w.write_all( - format!( - "'{}' = '{}'\n", - mf.module_name.as_ref().unwrap(), - mf.level_filter.to_string().to_lowercase() - ) - .as_bytes(), - )?; - } - } - Ok(()) - } - - /// Creates a `LogSpecBuilder`, setting the default log level. - #[must_use] - pub fn default(level_filter: LevelFilter) -> LogSpecBuilder { - LogSpecBuilder::from_module_filters(&[ModuleFilter { - module_name: None, - level_filter, - }]) - } - - /// Provides a reference to the module filters. - pub fn module_filters(&self) -> &Vec { - &self.module_filters - } - - /// Provides a reference to the text filter. - /// - /// This method is only avaible with feature `textfilter`, which is a default feature. - #[cfg(feature = "textfilter")] - pub fn text_filter(&self) -> &Option { - &(self.textfilter) - } -} - -fn push_err(s: &str, parse_errs: &mut String) { - if !parse_errs.is_empty() { - parse_errs.push_str("; "); - } - parse_errs.push_str(s); -} - -fn parse_err( - errors: String, - logspec: LogSpecification, -) -> Result { - Err(FlexiLoggerError::Parse(errors, logspec)) -} - -fn parse_level_filter>(s: S) -> Result { - match s.as_ref().to_lowercase().as_ref() { - "off" => Ok(LevelFilter::Off), - "error" => Ok(LevelFilter::Error), - "warn" => Ok(LevelFilter::Warn), - "info" => Ok(LevelFilter::Info), - "debug" => Ok(LevelFilter::Debug), - "trace" => Ok(LevelFilter::Trace), - _ => Err(FlexiLoggerError::LevelFilter(format!( - "unknown level filter: {}", - s.as_ref() - ))), - } -} - -fn contains_whitespace(s: &str, parse_errs: &mut String) -> bool { - let result = s.chars().any(char::is_whitespace); - if result { - push_err( - &format!( - "ignoring invalid part in log spec '{}' (contains a whitespace)", - s - ), - parse_errs, - ); - } - result -} - -#[allow(clippy::needless_doctest_main)] -/// Builder for `LogSpecification`. -/// -/// # Example -/// -/// Use the reconfigurability feature and build the log spec programmatically. -/// -/// ```rust -/// use flexi_logger::{Logger, LogSpecBuilder}; -/// use log::LevelFilter; -/// -/// fn main() { -/// // Build the initial log specification -/// let mut builder = LogSpecBuilder::new(); // default is LevelFilter::Off -/// builder.default(LevelFilter::Info); -/// builder.module("karl", LevelFilter::Debug); -/// -/// // Initialize Logger, keep builder alive -/// let mut logger_reconf_handle = Logger::with(builder.build()) -/// // your logger configuration goes here, as usual -/// .start() -/// .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); -/// -/// // ... -/// -/// // Modify builder and update the logger -/// builder.default(LevelFilter::Error); -/// builder.remove("karl"); -/// builder.module("emma", LevelFilter::Trace); -/// -/// logger_reconf_handle.set_new_spec(builder.build()); -/// -/// // ... -/// } -/// ``` -#[derive(Clone, Debug, Default)] -pub struct LogSpecBuilder { - module_filters: HashMap, LevelFilter>, -} - -impl LogSpecBuilder { - /// Creates a `LogSpecBuilder` with all logging turned off. - #[must_use] - pub fn new() -> Self { - let mut modfilmap = HashMap::new(); - modfilmap.insert(None, LevelFilter::Off); - Self { - module_filters: modfilmap, - } - } - - /// Creates a `LogSpecBuilder` from given module filters. - #[must_use] - pub fn from_module_filters(module_filters: &[ModuleFilter]) -> Self { - let mut modfilmap = HashMap::new(); - for mf in module_filters { - modfilmap.insert(mf.module_name.clone(), mf.level_filter); - } - Self { - module_filters: modfilmap, - } - } - - /// Adds a default log level filter, or updates the default log level filter. - pub fn default(&mut self, lf: LevelFilter) -> &mut Self { - self.module_filters.insert(None, lf); - self - } - - /// Adds a log level filter, or updates the log level filter, for a module. - pub fn module>(&mut self, module_name: M, lf: LevelFilter) -> &mut Self { - self.module_filters - .insert(Some(module_name.as_ref().to_owned()), lf); - self - } - - /// Adds a log level filter, or updates the log level filter, for a module. - pub fn remove>(&mut self, module_name: M) -> &mut Self { - self.module_filters - .remove(&Some(module_name.as_ref().to_owned())); - self - } - - /// Adds log level filters from a `LogSpecification`. - pub fn insert_modules_from(&mut self, other: LogSpecification) -> &mut Self { - for module_filter in other.module_filters { - self.module_filters - .insert(module_filter.module_name, module_filter.level_filter); - } - self - } - - /// Creates a log specification without text filter. - #[must_use] - pub fn finalize(self) -> LogSpecification { - LogSpecification { - module_filters: self.module_filters.into_vec_module_filter(), - #[cfg(feature = "textfilter")] - textfilter: None, - } - } - - /// Creates a log specification with text filter. - /// - /// This method is only avaible with feature `textfilter`, which is a default feature. - #[cfg(feature = "textfilter")] - pub fn finalize_with_textfilter(self, tf: Regex) -> LogSpecification { - LogSpecification { - module_filters: self.module_filters.into_vec_module_filter(), - textfilter: Some(tf), - } - } - - /// Creates a log specification without being consumed. - #[must_use] - pub fn build(&self) -> LogSpecification { - LogSpecification { - module_filters: self.module_filters.clone().into_vec_module_filter(), - #[cfg(feature = "textfilter")] - textfilter: None, - } - } - - /// Creates a log specification without being consumed, optionally with a text filter. - /// - /// This method is only avaible with feature `textfilter`, which is a default feature. - #[cfg(feature = "textfilter")] - pub fn build_with_textfilter(&self, tf: Option) -> LogSpecification { - LogSpecification { - module_filters: self.module_filters.clone().into_vec_module_filter(), - textfilter: tf, - } - } -} - -trait IntoVecModuleFilter { - fn into_vec_module_filter(self) -> Vec; -} -impl IntoVecModuleFilter for HashMap, LevelFilter> { - fn into_vec_module_filter(self) -> Vec { - let mf: Vec = self - .into_iter() - .map(|(k, v)| ModuleFilter { - module_name: k, - level_filter: v, - }) - .collect(); - mf.level_sort() - } -} - -trait LevelSort { - fn level_sort(self) -> Vec; -} -impl LevelSort for Vec { - /// Sort the module filters by length of their name, - /// this allows a little more efficient lookup at runtime. - fn level_sort(mut self) -> Vec { - self.sort_by(|a, b| { - let a_len = a.module_name.as_ref().map_or(0, String::len); - let b_len = b.module_name.as_ref().map_or(0, String::len); - b_len.cmp(&a_len) - }); - self - } -} - -#[cfg(test)] -mod tests { - use crate::LogSpecification; - use log::{Level, LevelFilter}; - - #[test] - fn parse_logging_spec_valid() { - let spec = LogSpecification::parse("crate1::mod1=error,crate1::mod2,crate2=debug").unwrap(); - assert_eq!(spec.module_filters().len(), 3); - assert_eq!( - spec.module_filters()[0].module_name, - Some("crate1::mod1".to_string()) - ); - assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error); - - assert_eq!( - spec.module_filters()[1].module_name, - Some("crate1::mod2".to_string()) - ); - assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max()); - - assert_eq!( - spec.module_filters()[2].module_name, - Some("crate2".to_string()) - ); - assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug); - - #[cfg(feature = "textfilter")] - assert!(spec.text_filter().is_none()); - } - - #[test] - fn parse_logging_spec_invalid_crate() { - // test parse_logging_spec with multiple = in specification - assert!(LogSpecification::parse("crate1::mod1=warn=info,crate2=debug").is_err()); - } - - #[test] - fn parse_logging_spec_wrong_log_level() { - assert!(LogSpecification::parse("crate1::mod1=wrong, crate2=warn").is_err()); - } - - #[test] - fn parse_logging_spec_empty_log_level() { - assert!(LogSpecification::parse("crate1::mod1=wrong, crate2=").is_err()); - } - - #[test] - fn parse_logging_spec_global() { - let spec = LogSpecification::parse("warn,crate2=debug").unwrap(); - assert_eq!(spec.module_filters().len(), 2); - - assert_eq!(spec.module_filters()[1].module_name, None); - assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::Warn); - - assert_eq!( - spec.module_filters()[0].module_name, - Some("crate2".to_string()) - ); - assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Debug); - - #[cfg(feature = "textfilter")] - assert!(spec.text_filter().is_none()); - } - - #[test] - #[cfg(feature = "textfilter")] - fn parse_logging_spec_valid_filter() { - let spec = LogSpecification::parse(" crate1::mod1 = error , crate1::mod2,crate2=debug/abc") - .unwrap(); - assert_eq!(spec.module_filters().len(), 3); - - assert_eq!( - spec.module_filters()[0].module_name, - Some("crate1::mod1".to_string()) - ); - assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error); - - assert_eq!( - spec.module_filters()[1].module_name, - Some("crate1::mod2".to_string()) - ); - assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max()); - - assert_eq!( - spec.module_filters()[2].module_name, - Some("crate2".to_string()) - ); - assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug); - assert!( - spec.text_filter().is_some() - && spec.text_filter().as_ref().unwrap().to_string() == "abc" - ); - } - - #[test] - fn parse_logging_spec_invalid_crate_filter() { - assert!(LogSpecification::parse("crate1::mod1=error=warn,crate2=debug/a.c").is_err()); - } - - #[test] - #[cfg(feature = "textfilter")] - fn parse_logging_spec_empty_with_filter() { - let spec = LogSpecification::parse("crate1/a*c").unwrap(); - assert_eq!(spec.module_filters().len(), 1); - assert_eq!( - spec.module_filters()[0].module_name, - Some("crate1".to_string()) - ); - assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::max()); - assert!( - spec.text_filter().is_some() - && spec.text_filter().as_ref().unwrap().to_string() == "a*c" - ); - } - - #[test] - fn reuse_logspec_builder() { - let mut builder = crate::LogSpecBuilder::new(); - - builder.default(LevelFilter::Info); - builder.module("carlo", LevelFilter::Debug); - builder.module("toni", LevelFilter::Warn); - let spec1 = builder.build(); - - assert_eq!( - spec1.module_filters()[0].module_name, - Some("carlo".to_string()) - ); - assert_eq!(spec1.module_filters()[0].level_filter, LevelFilter::Debug); - - assert_eq!( - spec1.module_filters()[1].module_name, - Some("toni".to_string()) - ); - assert_eq!(spec1.module_filters()[1].level_filter, LevelFilter::Warn); - - assert_eq!(spec1.module_filters().len(), 3); - assert_eq!(spec1.module_filters()[2].module_name, None); - assert_eq!(spec1.module_filters()[2].level_filter, LevelFilter::Info); - - builder.default(LevelFilter::Error); - builder.remove("carlo"); - builder.module("greta", LevelFilter::Trace); - let spec2 = builder.build(); - - assert_eq!(spec2.module_filters().len(), 3); - assert_eq!(spec2.module_filters()[2].module_name, None); - assert_eq!(spec2.module_filters()[2].level_filter, LevelFilter::Error); - - assert_eq!( - spec2.module_filters()[0].module_name, - Some("greta".to_string()) - ); - assert_eq!(spec2.module_filters()[0].level_filter, LevelFilter::Trace); - - assert_eq!( - spec2.module_filters()[1].module_name, - Some("toni".to_string()) - ); - assert_eq!(spec2.module_filters()[1].level_filter, LevelFilter::Warn); - } - - /////////////////////////////////////////////////////// - /////////////////////////////////////////////////////// - #[test] - fn match_full_path() { - let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); - assert!(spec.enabled(Level::Warn, "crate1::mod1")); - assert!(!spec.enabled(Level::Info, "crate1::mod1")); - assert!(spec.enabled(Level::Info, "crate2")); - assert!(!spec.enabled(Level::Debug, "crate2")); - } - - #[test] - fn no_match() { - let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); - assert!(!spec.enabled(Level::Warn, "crate3")); - } - - #[test] - fn match_beginning() { - let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); - assert!(spec.enabled(Level::Info, "crate2::mod1")); - } - - #[test] - fn match_beginning_longest_match() { - let spec = LogSpecification::parse( - "abcd = info, abcd::mod1 = error, klmn::mod = debug, klmn = info", - ) - .unwrap(); - assert!(spec.enabled(Level::Error, "abcd::mod1::foo")); - assert!(!spec.enabled(Level::Warn, "abcd::mod1::foo")); - assert!(spec.enabled(Level::Warn, "abcd::mod2::foo")); - assert!(!spec.enabled(Level::Debug, "abcd::mod2::foo")); - - assert!(!spec.enabled(Level::Debug, "klmn")); - assert!(!spec.enabled(Level::Debug, "klmn::foo::bar")); - assert!(spec.enabled(Level::Info, "klmn::foo::bar")); - } - - #[test] - fn match_default1() { - let spec = LogSpecification::parse("info,abcd::mod1=warn").unwrap(); - assert!(spec.enabled(Level::Warn, "abcd::mod1")); - assert!(spec.enabled(Level::Info, "crate2::mod2")); - } - - #[test] - fn match_default2() { - let spec = LogSpecification::parse("modxyz=error, info, abcd::mod1=warn").unwrap(); - assert!(spec.enabled(Level::Warn, "abcd::mod1")); - assert!(spec.enabled(Level::Info, "crate2::mod2")); - } - - #[test] - fn rocket() { - let spec = LogSpecification::parse("info, rocket=off, serenity=off").unwrap(); - assert!(spec.enabled(Level::Info, "itsme")); - assert!(spec.enabled(Level::Warn, "abcd::mod1")); - assert!(!spec.enabled(Level::Debug, "abcd::mod1")); - assert!(!spec.enabled(Level::Error, "rocket::rocket")); - assert!(!spec.enabled(Level::Warn, "rocket::rocket")); - assert!(!spec.enabled(Level::Info, "rocket::rocket")); - } - - #[test] - fn add_filters() { - let mut builder = crate::LogSpecBuilder::new(); - - builder.default(LevelFilter::Debug); - builder.module("carlo", LevelFilter::Debug); - builder.module("toni", LevelFilter::Warn); - - builder.insert_modules_from( - LogSpecification::parse("info, may=error, toni::heart = trace").unwrap(), - ); - let spec = builder.build(); - - assert_eq!(spec.module_filters().len(), 5); - - assert_eq!( - spec.module_filters()[0].module_name, - Some("toni::heart".to_string()) - ); - assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Trace); - - assert_eq!( - spec.module_filters()[1].module_name, - Some("carlo".to_string()) - ); - assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::Debug); - - assert_eq!( - spec.module_filters()[2].module_name, - Some("toni".to_string()) - ); - assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Warn); - - assert_eq!( - spec.module_filters()[3].module_name, - Some("may".to_string()) - ); - assert_eq!(spec.module_filters()[3].level_filter, LevelFilter::Error); - - assert_eq!(spec.module_filters()[4].module_name, None); - assert_eq!(spec.module_filters()[4].level_filter, LevelFilter::Info); - } - - #[test] - fn zero_level() { - let spec = LogSpecification::parse("info,crate1::mod1=off").unwrap(); - assert!(!spec.enabled(Level::Error, "crate1::mod1")); - assert!(spec.enabled(Level::Info, "crate2::mod2")); - } -} - -#[cfg(test)] -#[cfg(feature = "specfile_without_notification")] -mod test_with_specfile { - #[cfg(feature = "specfile_without_notification")] - use crate::LogSpecification; - - #[test] - fn specfile() { - compare_specs("", ""); - - compare_specs( - "[modules]\n\ - ", - "", - ); - - compare_specs( - "global_level = 'info'\n\ - \n\ - [modules]\n\ - ", - "info", - ); - - compare_specs( - "global_level = 'info'\n\ - \n\ - [modules]\n\ - 'mod1::mod2' = 'debug'\n\ - 'mod3' = 'trace'\n\ - ", - "info, mod1::mod2 = debug, mod3 = trace", - ); - - compare_specs( - "global_level = 'info'\n\ - global_pattern = 'Foo'\n\ - \n\ - [modules]\n\ - 'mod1::mod2' = 'debug'\n\ - 'mod3' = 'trace'\n\ - ", - "info, mod1::mod2 = debug, mod3 = trace /Foo", - ); - } - - #[cfg(feature = "specfile_without_notification")] - fn compare_specs(toml: &str, spec_string: &str) { - let ls_toml = LogSpecification::from_toml(toml).unwrap(); - let ls_spec = LogSpecification::parse(spec_string).unwrap(); - - assert_eq!(ls_toml.module_filters, ls_spec.module_filters); - assert_eq!(ls_toml.textfilter.is_none(), ls_spec.textfilter.is_none()); - if ls_toml.textfilter.is_some() && ls_spec.textfilter.is_some() { - assert_eq!( - ls_toml.textfilter.unwrap().to_string(), - ls_spec.textfilter.unwrap().to_string() - ); - } - } -} diff --git a/agent/support/rust/flexi_logger/src/logger.rs b/agent/support/rust/flexi_logger/src/logger.rs deleted file mode 100644 index 4ed85a2e2..000000000 --- a/agent/support/rust/flexi_logger/src/logger.rs +++ /dev/null @@ -1,936 +0,0 @@ -use crate::flexi_logger::FlexiLogger; -use crate::formats::default_format; -#[cfg(feature = "atty")] -use crate::formats::{AdaptiveFormat, Stream}; -use crate::primary_writer::PrimaryWriter; -use crate::writers::{FileLogWriter, FileLogWriterBuilder, LogWriter}; -use crate::{ - Cleanup, Criterion, FlexiLoggerError, FormatFunction, LogSpecification, Naming, - ReconfigurationHandle, -}; - -#[cfg(feature = "specfile")] -use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; -use std::collections::HashMap; -#[cfg(feature = "specfile_without_notification")] -use std::io::Read; -use std::path::PathBuf; -use std::sync::{Arc, RwLock}; - -/// The entry-point for using `flexi_logger`. -/// -/// A simple example with file logging might look like this: -/// -/// ```rust -/// use flexi_logger::{Duplicate,Logger}; -/// -/// Logger::with_str("info, mycrate = debug") -/// .log_to_file() -/// .duplicate_to_stderr(Duplicate::Warn) -/// .start() -/// .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); -/// -/// ``` -/// -/// -/// `Logger` is a builder class that allows you to -/// * specify your desired (initial) loglevel-specification -/// * either programmatically as a String -/// ([`Logger::with_str()`](struct.Logger.html#method.with_str)) -/// * or by providing a String in the environment -/// ([`Logger::with_env()`](struct.Logger.html#method.with_env)), -/// * or by combining both options -/// ([`Logger::with_env_or_str()`](struct.Logger.html#method.with_env_or_str)), -/// * or by building a `LogSpecification` programmatically -/// ([`Logger::with()`](struct.Logger.html#method.with)), -/// * use the desired configuration methods, -/// * and finally start the logger with -/// -/// * [`start()`](struct.Logger.html#method.start), -/// * or [`start_with_specfile()`](struct.Logger.html#method.start_with_specfile). -pub struct Logger { - spec: LogSpecification, - parse_errs: Option, - log_target: LogTarget, - duplicate_err: Duplicate, - duplicate_out: Duplicate, - format_for_file: FormatFunction, - format_for_stderr: FormatFunction, - format_for_stdout: FormatFunction, - format_for_writer: FormatFunction, - #[cfg(feature = "colors")] - o_palette: Option, - flwb: FileLogWriterBuilder, - other_writers: HashMap>, -} - -/// Describes the default log target. -/// -/// All log messages, in which no target is explicitly defined, will be written to -/// the default log target. -/// -/// See the [writers](writers/index.html) module for -/// how to specify non-default log targets in log macro calls, -/// and the usage of non-default log writers. -pub enum LogTarget { - /// Log is written to stderr. - /// - /// This is the default behavior of `flexi_logger`. - StdErr, - /// Log is written to stdout. - StdOut, - /// Log is written to a file. - /// - /// The default pattern for the filename is '\\_\\_\.\', - /// e.g. `myprog_2015-07-08_10-44-11.log`. - File, - /// Log is written to an alternative `LogWriter` implementation. - /// - Writer(Box), - /// Log is written to a file, as with `LogTarget::File`, _and_ to an alternative - /// `LogWriter` implementation. - FileAndWriter(Box), - /// Log is processed, including duplication, but not written to a primary target destination. - /// - /// This can be useful e.g. for running application tests with all log-levels active and still - /// avoiding tons of log files etc. - /// Such tests ensure that the log calls which are normally not active - /// will not cause undesired side-effects when activated - /// (note that the log macros may prevent arguments of inactive log-calls from being evaluated). - /// - /// Combined with - /// [`duplicate_to_stdout()`](struct.Logger.html#method.duplicate_to_stdout) - /// and - /// [`duplicate_to_stderr()`](struct.Logger.html#method.duplicate_to_stderr) - /// it can also be used if you want to get logs both to stdout and stderr, but not to a file. - DevNull, -} - -/// Create a Logger instance and define how to access the (initial) -/// loglevel-specification. -impl Logger { - /// Creates a Logger that you provide with an explicit `LogSpecification`. - /// By default, logs are written with `default_format` to `stderr`. - #[must_use] - pub fn with(logspec: LogSpecification) -> Self { - Self::from_spec_and_errs(logspec, None) - } - - /// Creates a Logger that reads the `LogSpecification` from a String or &str. - /// [See `LogSpecification`](struct.LogSpecification.html) for the syntax. - #[must_use] - pub fn with_str>(s: S) -> Self { - Self::from_result(LogSpecification::parse(s.as_ref())) - } - - /// Creates a Logger that reads the `LogSpecification` from the environment variable `RUST_LOG`. - #[must_use] - pub fn with_env() -> Self { - Self::from_result(LogSpecification::env()) - } - - /// Creates a Logger that reads the `LogSpecification` from the environment variable `RUST_LOG`, - /// or derives it from the given String, if `RUST_LOG` is not set. - #[must_use] - pub fn with_env_or_str>(s: S) -> Self { - Self::from_result(LogSpecification::env_or_parse(s)) - } - - fn from_result(result: Result) -> Self { - match result { - Ok(logspec) => Self::from_spec_and_errs(logspec, None), - Err(e) => match e { - FlexiLoggerError::Parse(parse_errs, logspec) => { - Self::from_spec_and_errs(logspec, Some(parse_errs)) - } - _ => Self::from_spec_and_errs(LogSpecification::off(), None), - }, - } - } - - fn from_spec_and_errs(spec: LogSpecification, parse_errs: Option) -> Self { - #[cfg(feature = "colors")] - { - // Enable ASCII escape sequence support on Windows consoles, - // but disable coloring on unsupported Windows consoles - if cfg!(windows) && !yansi::Paint::enable_windows_ascii() { - yansi::Paint::disable(); - } - } - - Self { - spec, - parse_errs, - log_target: LogTarget::StdErr, - duplicate_err: Duplicate::None, - duplicate_out: Duplicate::None, - format_for_file: default_format, - #[cfg(feature = "colors")] - format_for_stdout: AdaptiveFormat::Default.format_function(Stream::StdOut), - #[cfg(feature = "colors")] - format_for_stderr: AdaptiveFormat::Default.format_function(Stream::StdErr), - - #[cfg(not(feature = "colors"))] - format_for_stdout: default_format, - #[cfg(not(feature = "colors"))] - format_for_stderr: default_format, - - format_for_writer: default_format, - #[cfg(feature = "colors")] - o_palette: None, - flwb: FileLogWriter::builder(), - other_writers: HashMap::>::new(), - } - } -} - -/// Simple methods for influencing the behavior of the Logger. -impl Logger { - /// Allows verifying that no parsing errors have occured in the used factory method, - /// and examining the parse error. - /// - /// Most of the factory methods for Logger (`Logger::with_...()`) - /// parse a log specification String, and deduce from it a `LogSpecification` object. - /// If parsing fails, errors are reported to stdout, but effectively ignored. - /// In worst case, nothing is logged! - /// - /// This method gives programmatic access to parse errors, if there were any, so that errors - /// don't happen unnoticed. - /// - /// In the following example we just panic if the spec was not free of errors: - /// - /// ```should_panic - /// # use flexi_logger::{Logger,LogTarget}; - /// Logger::with_str("hello world") - /// .check_parser_error() - /// .unwrap() // <-- here we could do better than panic - /// .log_target(LogTarget::File) - /// .start(); - /// ``` - /// - /// # Errors - /// - /// `FlexiLoggerError::Parse` if the input for the log specification is malformed. - pub fn check_parser_error(self) -> Result { - match self.parse_errs { - Some(parse_errs) => Err(FlexiLoggerError::Parse(parse_errs, self.spec)), - None => Ok(self), - } - } - - /// Is equivalent to - /// [`log_target`](struct.Logger.html#method.log_target)`(`[`LogTarget::File`]( - /// enum.LogTarget.html#variant.File)`)`. - pub fn log_to_file(self) -> Self { - self.log_target(LogTarget::File) - } - - /// Write the main log output to the specified target. - /// - /// By default, i.e. if this method is not called, the log target `LogTarget::StdErr` is used. - pub fn log_target(mut self, log_target: LogTarget) -> Self { - self.log_target = log_target; - self - } - - /// Makes the logger print an info message to stdout with the name of the logfile - /// when a logfile is opened for writing. - pub fn print_message(mut self) -> Self { - self.flwb = self.flwb.print_message(); - self - } - - /// Makes the logger write messages with the specified minimum severity additionally to stderr. - /// - /// Works with all log targets except `StdErr` and `StdOut`. - pub fn duplicate_to_stderr(mut self, dup: Duplicate) -> Self { - self.duplicate_err = dup; - self - } - - /// Makes the logger write messages with the specified minimum severity additionally to stdout. - /// - /// Works with all log targets except `StdErr` and `StdOut`. - pub fn duplicate_to_stdout(mut self, dup: Duplicate) -> Self { - self.duplicate_out = dup; - self - } - - /// Makes the logger use the provided format function for all messages - /// that are written to files, stderr, stdout, or to an additional writer. - /// - /// You can either choose one of the provided log-line formatters, - /// or you create and use your own format function with the signature
- /// ```rust,ignore - /// fn( - /// write: &mut dyn std::io::Write, - /// now: &mut DeferredNow, - /// record: &Record, - /// ) -> Result<(), std::io::Error> - /// ``` - /// - /// By default, - /// [`default_format()`](fn.default_format.html) is used for output to files - /// and to custom writers, and [`AdaptiveFormat::Default`](enum.AdaptiveFormat.html#variant.Default) - /// is used for output to `stderr` and `stdout`. - /// - /// If the feature `colors` is switched off, - /// `default_format()` is used for all outputs. - pub fn format(mut self, format: FormatFunction) -> Self { - self.format_for_file = format; - self.format_for_stderr = format; - self.format_for_stdout = format; - self.format_for_writer = format; - self - } - - /// Makes the logger use the provided format function for messages - /// that are written to files. - /// - /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). - pub fn format_for_files(mut self, format: FormatFunction) -> Self { - self.format_for_file = format; - self - } - - /// Makes the logger use the specified format for messages that are written to `stderr`. - /// Coloring is used if `stderr` is a tty. - /// - /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). - /// - /// Only available with feature `colors`. - #[cfg(feature = "atty")] - pub fn adaptive_format_for_stderr(mut self, adaptive_format: AdaptiveFormat) -> Self { - self.format_for_stderr = adaptive_format.format_function(Stream::StdErr); - self - } - - /// Makes the logger use the specified format for messages that are written to `stdout`. - /// Coloring is used if `stdout` is a tty. - /// - /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). - /// - /// Only available with feature `colors`. - #[cfg(feature = "atty")] - pub fn adaptive_format_for_stdout(mut self, adaptive_format: AdaptiveFormat) -> Self { - self.format_for_stdout = adaptive_format.format_function(Stream::StdOut); - self - } - - /// Makes the logger use the provided format function for messages - /// that are written to stderr. - /// - /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). - pub fn format_for_stderr(mut self, format: FormatFunction) -> Self { - self.format_for_stderr = format; - self - } - - /// Makes the logger use the provided format function to format messages - /// that are written to stdout. - /// - /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). - pub fn format_for_stdout(mut self, format: FormatFunction) -> Self { - self.format_for_stdout = format; - self - } - - /// Allows specifying a format function for an additional writer. - /// Note that it is up to the implementation of the additional writer - /// whether it evaluates this setting or not. - /// - /// Regarding the default, see [`Logger::format`](struct.Logger.html#method.format). - pub fn format_for_writer(mut self, format: FormatFunction) -> Self { - self.format_for_writer = format; - self - } - - /// Sets the color palette for function [`style`](fn.style.html), which is used in the - /// provided coloring format functions. - /// - /// The palette given here overrides the default palette. - /// - /// The palette is specified in form of a String that contains a semicolon-separated list - /// of numbers (0..=255) and/or dashes (´-´). - /// The first five values denote the fixed color that is - /// used for coloring `error`, `warn`, `info`, `debug`, and `trace` messages. - /// - /// The String `"196;208;-;7;8"` describes the default palette, where color 196 is - /// used for error messages, and so on. The `-` means that no coloring is done, - /// i.e., with `"-;-;-;-;-"` all coloring is switched off. - /// - /// The palette can further be overridden at runtime by setting the environment variable - /// `FLEXI_LOGGER_PALETTE` to a palette String. This allows adapting the used text colors to - /// differently colored terminal backgrounds. - /// - /// For your convenience, if you want to specify your own palette, - /// you can produce a colored list with all 255 colors with `cargo run --example colors`. - /// - /// Only available with feature `colors`. - #[cfg(feature = "colors")] - pub fn set_palette(mut self, palette: String) -> Self { - self.o_palette = Some(palette); - self - } - - /// Specifies a folder for the log files. - /// - /// This parameter only has an effect if `log_to_file()` is used, too. - /// The specified folder will be created if it does not exist. - /// By default, the log files are created in the folder where the program was started. - pub fn directory>(mut self, directory: S) -> Self { - self.flwb = self.flwb.directory(directory); - self - } - - /// Specifies a suffix for the log files. - /// - /// This parameter only has an effect if `log_to_file()` is used, too. - pub fn suffix>(mut self, suffix: S) -> Self { - self.flwb = self.flwb.suffix(suffix); - self - } - - /// Makes the logger not include a timestamp into the names of the log files. - /// - /// This option only has an effect if `log_to_file()` is used, too, - /// and is ignored if rotation is used. - pub fn suppress_timestamp(mut self) -> Self { - self.flwb = self.flwb.suppress_timestamp(); - self - } - - /// When rotation is used with some `Cleanup` variant, then this option defines - /// if the cleanup activities (finding files, deleting files, evtl compressing files) is done - /// in the current thread (in the current log-call), or whether cleanup is delegated to a - /// background thread. - /// - /// As of `flexi_logger` version `0.14.7`, - /// the cleanup activities are done by default in a background thread. - /// This minimizes the blocking impact to your application caused by IO operations. - /// - /// In earlier versions of `flexi_logger`, or if you call this method with - /// `use_background_thread = false`, - /// the cleanup is done in the thread that is currently causing a file rotation. - #[must_use] - pub fn cleanup_in_background_thread(mut self, use_background_thread: bool) -> Self { - self.flwb = self - .flwb - .cleanup_in_background_thread(use_background_thread); - self - } - - /// Prevent indefinite growth of the log file by applying file rotation - /// and a clean-up strategy for older log files. - /// - /// By default, the log file is fixed while your program is running and will grow indefinitely. - /// With this option being used, when the log file reaches the specified criterion, - /// the file will be closed and a new file will be opened. - /// - /// Note that also the filename pattern changes: - /// - /// - by default, no timestamp is added to the filename - /// - the logs are always written to a file with infix `_rCURRENT` - /// - when the rotation criterion is fulfilled, it is closed and renamed to a file - /// with another infix (see `Naming`), - /// and then the logging continues again to the (fresh) file with infix `_rCURRENT`. - /// - /// Example: - /// - /// After some logging with your program `my_prog` and rotation with `Naming::Numbers`, - /// you will find files like - /// - /// ```text - /// my_prog_r00000.log - /// my_prog_r00001.log - /// my_prog_r00002.log - /// my_prog_rCURRENT.log - /// ``` - /// - /// ## Parameters - /// - /// `rotate_over_size` is given in bytes, e.g. `10_000_000` will rotate - /// files once they reach a size of 10 MiB. - /// - /// `cleanup` defines the strategy for dealing with older files. - /// See [Cleanup](enum.Cleanup.html) for details. - pub fn rotate(mut self, criterion: Criterion, naming: Naming, cleanup: Cleanup) -> Self { - self.flwb = self.flwb.rotate(criterion, naming, cleanup); - self - } - - /// Makes the logger append to the specified output file, if it exists already; - /// by default, the file would be truncated. - /// - /// This option only has an effect if `log_to_file()` is used, too. - /// This option will hardly make an effect if `suppress_timestamp()` is not used. - pub fn append(mut self) -> Self { - self.flwb = self.flwb.append(); - self - } - - /// The specified String is added to the log file name after the program name. - /// - /// This option only has an effect if `log_to_file()` is used, too. - pub fn discriminant>(mut self, discriminant: S) -> Self { - self.flwb = self.flwb.discriminant(discriminant); - self - } - - /// The specified path will be used on linux systems to create a symbolic link - /// to the current log file. - /// - /// This option has no effect on filesystems where symlinks are not supported, - /// and it only has an effect if `log_to_file()` is used, too. - /// - /// ### Example - /// - /// You can use the symbolic link to follow the log output with `tail`, - /// even if the log files are rotated. - /// - /// Assuming the link has the name `link_to_log_file`, then use: - /// - /// ```text - /// tail --follow=name --max-unchanged-stats=1 --retry link_to_log_file - /// ``` - /// - pub fn create_symlink>(mut self, symlink: P) -> Self { - self.flwb = self.flwb.create_symlink(symlink); - self - } - - /// Registers a `LogWriter` implementation under the given target name. - /// - /// The target name must not start with an underscore. - /// - /// See [the module documentation of `writers`](writers/index.html). - pub fn add_writer>( - mut self, - target_name: S, - writer: Box, - ) -> Self { - self.other_writers.insert(target_name.into(), writer); - self - } - - /// Use this function to set send handler for sending logs to server. - pub fn send_handler(mut self, sender: plugin::Sender) -> Self { - self.flwb = self.flwb.sender(sender); - self - } - /// Set name which will be used for sending logs to server. - pub fn plugin_name(mut self, plugin_name: String) -> Self { - self.flwb = self.flwb.name(plugin_name); - self - } - - /// Use Windows line endings, rather than just `\n`. - pub fn use_windows_line_ending(mut self) -> Self { - self.flwb = self.flwb.use_windows_line_ending(); - self - } -} - -/// Alternative set of methods to control the behavior of the Logger. -/// Use these methods when you want to control the settings flexibly, -/// e.g. with commandline arguments via `docopts` or `clap`. -impl Logger { - /// With true, makes the logger print an info message to stdout, each time - /// when a new file is used for log-output. - pub fn o_print_message(mut self, print_message: bool) -> Self { - self.flwb = self.flwb.o_print_message(print_message); - self - } - - /// Specifies a folder for the log files. - /// - /// This parameter only has an effect if `log_to_file` is set to true. - /// If the specified folder does not exist, the initialization will fail. - /// With None, the log files are created in the folder where the program was started. - pub fn o_directory>(mut self, directory: Option

) -> Self { - self.flwb = self.flwb.o_directory(directory); - self - } - - /// By default, and with None, the log file will grow indefinitely. - /// If a `rotate_config` is set, when the log file reaches or exceeds the specified size, - /// the file will be closed and a new file will be opened. - /// Also the filename pattern changes: instead of the timestamp, a serial number - /// is included into the filename. - /// - /// The size is given in bytes, e.g. `o_rotate_over_size(Some(1_000))` will rotate - /// files once they reach a size of 1 kB. - /// - /// The cleanup strategy allows delimiting the used space on disk. - pub fn o_rotate(mut self, rotate_config: Option<(Criterion, Naming, Cleanup)>) -> Self { - self.flwb = self.flwb.o_rotate(rotate_config); - self - } - - /// With true, makes the logger include a timestamp into the names of the log files. - /// `true` is the default, but `rotate_over_size` sets it to `false`. - /// With this method you can set it to `true` again. - /// - /// This parameter only has an effect if `log_to_file` is set to true. - pub fn o_timestamp(mut self, timestamp: bool) -> Self { - self.flwb = self.flwb.o_timestamp(timestamp); - self - } - - /// This option only has an effect if `log_to_file` is set to true. - /// - /// If append is set to true, makes the logger append to the specified output file, if it exists. - /// By default, or with false, the file would be truncated. - /// - /// This option will hardly make an effect if `suppress_timestamp()` is not used. - - pub fn o_append(mut self, append: bool) -> Self { - self.flwb = self.flwb.o_append(append); - self - } - - /// This option only has an effect if `log_to_file` is set to true. - /// - /// The specified String is added to the log file name. - pub fn o_discriminant>(mut self, discriminant: Option) -> Self { - self.flwb = self.flwb.o_discriminant(discriminant); - self - } - - /// This option only has an effect if `log_to_file` is set to true. - /// - /// If a String is specified, it will be used on linux systems to create in the current folder - /// a symbolic link with this name to the current log file. - pub fn o_create_symlink>(mut self, symlink: Option

) -> Self { - self.flwb = self.flwb.o_create_symlink(symlink); - self - } -} - -/// Finally, start logging, optionally with a spec-file. -impl Logger { - /// Consumes the Logger object and initializes `flexi_logger`. - /// - /// The returned reconfiguration handle allows updating the log specification programmatically - /// later on, e.g. to intensify logging for (buggy) parts of a (test) program, etc. - /// See [`ReconfigurationHandle`](struct.ReconfigurationHandle.html) for an example. - /// - /// # Errors - /// - /// Several variants of `FlexiLoggerError` can occur. - pub fn start(self) -> Result { - let (boxed_logger, handle) = self.build()?; - log::set_boxed_logger(boxed_logger)?; - Ok(handle) - } - - /// Builds a boxed logger and a `ReconfigurationHandle` for it, - /// but does not initialize the global logger. - /// - /// The returned boxed logger implements the Log trait and can be installed manually - /// or nested within another logger. - /// - /// The reconfiguration handle allows updating the log specification programmatically - /// later on, e.g. to intensify logging for (buggy) parts of a (test) program, etc. - /// See [`ReconfigurationHandle`](struct.ReconfigurationHandle.html) for an example. - /// - /// # Errors - /// - /// Several variants of `FlexiLoggerError` can occur. - pub fn build(mut self) -> Result<(Box, ReconfigurationHandle), FlexiLoggerError> { - let max_level = self.spec.max_level(); - let spec = Arc::new(RwLock::new(self.spec)); - let other_writers = Arc::new(self.other_writers); - - #[cfg(feature = "colors")] - crate::formats::set_palette(&self.o_palette)?; - - let primary_writer = Arc::new(match self.log_target { - LogTarget::File => { - self.flwb = self.flwb.format(self.format_for_file); - PrimaryWriter::multi( - self.duplicate_err, - self.duplicate_out, - self.format_for_stderr, - self.format_for_stdout, - vec![Box::new(self.flwb.try_build()?)], - ) - } - LogTarget::Writer(mut w) => { - w.format(self.format_for_writer); - PrimaryWriter::multi( - self.duplicate_err, - self.duplicate_out, - self.format_for_stderr, - self.format_for_stdout, - vec![w], - ) - } - LogTarget::FileAndWriter(mut w) => { - self.flwb = self.flwb.format(self.format_for_file); - w.format(self.format_for_writer); - PrimaryWriter::multi( - self.duplicate_err, - self.duplicate_out, - self.format_for_stderr, - self.format_for_stdout, - vec![Box::new(self.flwb.try_build()?), w], - ) - } - LogTarget::StdOut => PrimaryWriter::stdout(self.format_for_stdout), - LogTarget::StdErr => PrimaryWriter::stderr(self.format_for_stderr), - LogTarget::DevNull => PrimaryWriter::black_hole( - self.duplicate_err, - self.duplicate_out, - self.format_for_stderr, - self.format_for_stdout, - ), - }); - - let flexi_logger = FlexiLogger::new( - Arc::clone(&spec), - Arc::clone(&primary_writer), - Arc::clone(&other_writers), - ); - - let handle = ReconfigurationHandle::new(spec, primary_writer, other_writers); - handle.reconfigure(max_level); - Ok((Box::new(flexi_logger), handle)) - } - - /// Consumes the Logger object and initializes `flexi_logger` in a way that - /// subsequently the log specification can be updated manually. - /// - /// Uses the spec that was given to the factory method (`Logger::with()` etc) - /// as initial spec and then tries to read the logspec from a file. - /// - /// If the file does not exist, `flexi_logger` creates the file and fills it - /// with the initial spec (and in the respective file format, of course). - /// - /// ## Feature dependency - /// - /// The implementation of this configuration method uses some additional crates - /// that you might not want to depend on with your program if you don't use this functionality. - /// For that reason the method is only available if you activate the - /// `specfile` feature. See `flexi_logger`'s [usage](index.html#usage) section for details. - /// - /// ## Usage - /// - /// A logger initialization like - /// - /// ```ignore - /// use flexi_logger::Logger; - /// Logger::with_str("info")/*...*/.start_with_specfile("logspecification.toml"); - /// ``` - /// - /// will create the file `logspecification.toml` (if it does not yet exist) with this content: - /// - /// ```toml - /// ### Optional: Default log level - /// global_level = 'info' - /// ### Optional: specify a regular expression to suppress all messages that don't match - /// #global_pattern = 'foo' - /// - /// ### Specific log levels per module are optionally defined in this section - /// [modules] - /// #'mod1' = 'warn' - /// #'mod2' = 'debug' - /// #'mod2::mod3' = 'trace' - /// ``` - /// - /// You can subsequently edit and modify the file according to your needs, - /// while the program is running, and it will immediately take your changes into account. - /// - /// Currently only toml-files are supported, the file suffix thus must be `.toml`. - /// - /// The initial spec remains valid if the file cannot be read. - /// - /// If you update the specfile subsequently while the program is running, `flexi_logger` - /// re-reads it automatically and adapts its behavior according to the new content. - /// If the file cannot be read anymore, e.g. because the format is not correct, the - /// previous logspec remains active. - /// If the file is corrected subsequently, the log spec update will work again. - /// - /// # Errors - /// - /// Several variants of `FlexiLoggerError` can occur. - /// - /// # Returns - /// - /// A `ReconfigurationHandle` is returned, predominantly to allow using its - /// [`shutdown`](struct.ReconfigurationHandle.html#method.shutdown) method. - #[cfg(feature = "specfile_without_notification")] - pub fn start_with_specfile>( - self, - specfile: P, - ) -> Result { - // Make logging work, before caring for the specfile - let (boxed_logger, handle) = self.build()?; - log::set_boxed_logger(boxed_logger)?; - setup_specfile(specfile, handle.clone())?; - Ok(handle) - } - - /// Builds a boxed logger and a `ReconfigurationHandle` for it, - /// but does not initialize the global logger. - /// - /// - /// The returned boxed logger implements the Log trait and can be installed manually - /// or nested within another logger. - /// - /// For the properties of the returned logger, - /// see [`start_with_specfile()`](struct.Logger.html#method.start_with_specfile). - /// - /// # Errors - /// - /// Several variants of `FlexiLoggerError` can occur. - /// - /// # Returns - /// - /// A `ReconfigurationHandle` is returned, predominantly to allow using its - /// [`shutdown`](struct.ReconfigurationHandle.html#method.shutdown) method. - #[cfg(feature = "specfile_without_notification")] - pub fn build_with_specfile>( - self, - specfile: P, - ) -> Result<(Box, ReconfigurationHandle), FlexiLoggerError> { - let (boxed_log, handle) = self.build()?; - setup_specfile(specfile, handle.clone())?; - Ok((boxed_log, handle)) - } -} - -#[cfg(feature = "specfile_without_notification")] -fn setup_specfile>( - specfile: P, - mut handle: ReconfigurationHandle, -) -> Result<(), FlexiLoggerError> { - let specfile = specfile.as_ref().to_owned(); - synchronize_handle_with_specfile(&mut handle, &specfile)?; - - #[cfg(feature = "specfile")] - { - // Now that the file exists, we can canonicalize the path - let specfile = specfile - .canonicalize() - .map_err(FlexiLoggerError::SpecfileIo)?; - - // Watch the parent folder of the specfile, using debounced events - let (tx, rx) = std::sync::mpsc::channel(); - let debouncing_delay = std::time::Duration::from_millis(1000); - let mut watcher = watcher(tx, debouncing_delay)?; - watcher.watch(&specfile.parent().unwrap(), RecursiveMode::NonRecursive)?; - - // in a separate thread, reread the specfile when it was updated - std::thread::Builder::new() - .name("flexi_logger-specfile-watcher".to_string()) - .stack_size(128 * 1024) - .spawn(move || { - let _anchor_for_watcher = watcher; // keep it alive! - loop { - match rx.recv() { - Ok(debounced_event) => { - // println!("got debounced event {:?}", debounced_event); - match debounced_event { - DebouncedEvent::Create(ref path) - | DebouncedEvent::Write(ref path) => { - if path.canonicalize().map(|x| x == specfile).unwrap_or(false) { - match log_spec_string_from_file(&specfile) - .map_err(FlexiLoggerError::SpecfileIo) - .and_then(|s| LogSpecification::from_toml(&s)) - { - Ok(spec) => handle.set_new_spec(spec), - Err(e) => eprintln!( - "[flexi_logger] rereading the log specification file \ - failed with {:?}, \ - continuing with previous log specification", - e - ), - } - } - } - _event => {} - } - } - Err(e) => { - eprintln!("[flexi_logger] error while watching the specfile: {:?}", e) - } - } - } - })?; - } - Ok(()) -} - -// If the specfile exists, read the file and update the log_spec from it; -// otherwise try to create the file, with the current spec as content, under the specified name. -#[cfg(feature = "specfile_without_notification")] -pub(crate) fn synchronize_handle_with_specfile( - handle: &mut ReconfigurationHandle, - specfile: &std::path::PathBuf, -) -> Result<(), FlexiLoggerError> { - if specfile - .extension() - .unwrap_or_else(|| std::ffi::OsStr::new("")) - .to_str() - .unwrap_or("") - != "toml" - { - return Err(FlexiLoggerError::SpecfileExtension( - "only spec files with extension toml are supported", - )); - } - - if std::path::Path::is_file(specfile) { - let s = log_spec_string_from_file(specfile).map_err(FlexiLoggerError::SpecfileIo)?; - handle.set_new_spec(LogSpecification::from_toml(&s)?); - } else { - if let Some(specfolder) = specfile.parent() { - std::fs::DirBuilder::new() - .recursive(true) - .create(specfolder) - .map_err(FlexiLoggerError::SpecfileIo)?; - } - let mut file = std::fs::OpenOptions::new() - .write(true) - .create_new(true) - .open(specfile) - .map_err(FlexiLoggerError::SpecfileIo)?; - - handle - .current_spec() - .read() - .map_err(|_e| FlexiLoggerError::Poison)? - .to_toml(&mut file)?; - } - Ok(()) -} - -#[cfg(feature = "specfile_without_notification")] -pub(crate) fn log_spec_string_from_file>( - specfile: P, -) -> Result { - let mut buf = String::new(); - let mut file = std::fs::File::open(specfile)?; - file.read_to_string(&mut buf)?; - Ok(buf) -} - -/// Used to control which messages are to be duplicated to stderr, when `log_to_file()` is used. -#[derive(Debug)] -pub enum Duplicate { - /// No messages are duplicated. - None, - /// Only error messages are duplicated. - Error, - /// Error and warn messages are duplicated. - Warn, - /// Error, warn, and info messages are duplicated. - Info, - /// Error, warn, info, and debug messages are duplicated. - Debug, - /// All messages are duplicated. - Trace, - /// All messages are duplicated. - All, -} diff --git a/agent/support/rust/flexi_logger/src/parameters.rs b/agent/support/rust/flexi_logger/src/parameters.rs deleted file mode 100644 index 38db2a33d..000000000 --- a/agent/support/rust/flexi_logger/src/parameters.rs +++ /dev/null @@ -1,145 +0,0 @@ -/// Criterion when to rotate the log file. -/// -/// Used in [`Logger::rotate`](struct.Logger.html#method.rotate). -#[derive(Copy, Clone, Debug)] -pub enum Criterion { - /// Rotate the log file when it exceeds the specified size in bytes. - Size(u64), - /// Rotate the log file when it has become older than the specified age. - /// - /// ## Minor limitation - /// - /// ### TL,DR - /// the combination of `Logger::append()` - /// with `Criterion::Age` works OK, but not perfectly correct on Windows or Linux - /// when the program is restarted. - /// - /// ### Details - /// Applying the age criterion works fine while your program is running. - /// Ideally, we should also apply it to the rCURRENT file when the program is restarted - /// and you chose the `Logger::append()` option. - /// - /// Unfortunately, this does not work on Windows, and it does not work on linux, - /// for different reasons. - /// - /// To minimize the impact on age-based file-rotation, - /// `flexi_logger` uses on Windows and linux its initialization time - /// rather than the real file property - /// as the created_at-info of an rCURRENT file that already exists, and the - /// current timestamp when file rotation happens during further execution. - /// Consequently, a left-over rCURRENT file from a previous program run will look newer - /// than it is, and will be used longer than it should be. - /// - /// #### Issue on Windows - /// - /// For compatibility with DOS (sic!), Windows magically transfers the created_at-info - /// of a file that is deleted (or renamed) to its successor, - /// when the recreation happens within some seconds [[1]](#ref-1). - /// - /// [1] [https://superuser.com/questions/966490/windows-7-what-is-date-created-file-property-referring-to](https://superuser.com/questions/966490/windows-7-what-is-date-created-file-property-referring-to). - /// - /// If the file property were used by `flexi_logger`, - /// the rCURRENT file would always appear to be as old as the - /// first one that ever was created - rotation by time would completely fail. - /// - /// #### Issue on Linux - /// - /// `std::fs::metadata.created()` returns `Err`, because linux does not maintain a - /// created-at-timestamp. - /// - Age(Age), - /// Rotate the file when it has either become older than the specified age, or when it has - /// exceeded the specified size in bytes. - /// - /// See documentation for Age and Size. - AgeOrSize(Age, u64), -} - -/// The age after which a log file rotation will be triggered, -/// when [`Criterion::Age`](enum.Criterion.html#variant.Age) is chosen. -#[derive(Copy, Clone, Debug)] -pub enum Age { - /// Rotate the log file when the local clock has started a new day since the - /// current file had been created. - Day, - /// Rotate the log file when the local clock has started a new hour since the - /// current file had been created. - Hour, - /// Rotate the log file when the local clock has started a new minute since the - /// current file had been created. - Minute, - /// Rotate the log file when the local clock has started a new second since the - /// current file had been created. - Second, -} - -/// The naming convention for rotated log files. -/// -/// With file rotation, the logs are written to a file with infix `_rCURRENT`. -/// When rotation happens, the CURRENT log file will be renamed to a file with -/// another infix of the form `"_r..."`. `Naming` defines which other infix will be used. -/// -/// Used in [`Logger::rotate`](struct.Logger.html#method.rotate). -#[derive(Copy, Clone, Debug)] -pub enum Naming { - /// File rotation rotates to files with a timestamp-infix, like `"r2020-01-27_14-41-08"`. - Timestamps, - /// File rotation rotates to files with a number-infix. - Numbers, -} -/// Defines the strategy for handling older log files. -/// -/// Is used in [`Logger::rotate`](struct.Logger.html#method.rotate). -/// -/// Note that if you use a strategy other than `Cleanup::Never`, then the cleanup work is -/// by default done in an extra thread, to minimize the impact on the program. -/// See -/// [`Logger::cleanup_in_background_thread`](struct.Logger.html#method.cleanup_in_background_thread) -/// if you want to control whether this extra thread is created and used. -#[allow(deprecated)] -#[derive(Copy, Clone, Debug)] -pub enum Cleanup { - /// Older log files are not touched - they remain for ever. - Never, - /// The specified number of rotated log files are kept. - /// Older files are deleted, if necessary. - KeepLogFiles(usize), - /// The specified number of rotated log files are compressed and kept. - /// Older files are deleted, if necessary. - /// - /// This option is only available with feature `compress`. - #[cfg(feature = "compress")] - KeepCompressedFiles(usize), - /// Outdated - #[cfg(feature = "compress")] - #[deprecated(since = "0.16.0", note = "use KeepCompressedFiles instead")] - KeepZipFiles(usize), - /// Allows keeping some files as text files and some as compressed files. - /// - /// ## Example - /// - /// `KeepLogAndCompressedFiles(5,30)` ensures that the youngest five log files are - /// kept as text files, the next 30 are kept as compressed files with additional suffix `.gz`, - /// and older files are removed. - /// - /// This option is only available with feature `compress`. - #[cfg(feature = "compress")] - KeepLogAndCompressedFiles(usize, usize), - /// Outdated - #[deprecated(since = "0.16.0", note = "use KeepLogAndCompressedFiles instead")] - #[cfg(feature = "compress")] - KeepLogAndZipFiles(usize, usize), -} - -impl Cleanup { - // Returns true if some cleanup is to be done. - #[must_use] - #[allow(clippy::match_like_matches_macro)] - pub(crate) fn do_cleanup(&self) -> bool { - // !matches!(self, Self::Never) would be nicer, but is not possible with 1.37 - match self { - Self::Never => false, - _ => true, - } - } -} diff --git a/agent/support/rust/flexi_logger/src/primary_writer.rs b/agent/support/rust/flexi_logger/src/primary_writer.rs deleted file mode 100644 index aaa486d06..000000000 --- a/agent/support/rust/flexi_logger/src/primary_writer.rs +++ /dev/null @@ -1,251 +0,0 @@ -use log::Record; -use std::cell::RefCell; -use std::io::Write; - -use crate::deferred_now::DeferredNow; -use crate::logger::Duplicate; -use crate::writers::LogWriter; -use crate::FormatFunction; - -// Writes either to stdout, or to stderr, -// or to a file (with optional duplication to stderr), -// or to nowhere (with optional "duplication" to stderr). -#[allow(clippy::large_enum_variant)] -pub(crate) enum PrimaryWriter { - StdOut(StdOutWriter), - StdErr(StdErrWriter), - Multi(MultiWriter), -} -impl PrimaryWriter { - pub fn multi( - duplicate_stderr: Duplicate, - duplicate_stdout: Duplicate, - format_for_stderr: FormatFunction, - format_for_stdout: FormatFunction, - writers: Vec>, - ) -> Self { - Self::Multi(MultiWriter { - duplicate_stderr, - duplicate_stdout, - format_for_stderr, - format_for_stdout, - writers, - }) - } - pub fn stderr(format: FormatFunction) -> Self { - Self::StdErr(StdErrWriter::new(format)) - } - - pub fn stdout(format: FormatFunction) -> Self { - Self::StdOut(StdOutWriter::new(format)) - } - - pub fn black_hole( - duplicate_err: Duplicate, - duplicate_out: Duplicate, - format_for_stderr: FormatFunction, - format_for_stdout: FormatFunction, - ) -> Self { - Self::multi( - duplicate_err, - duplicate_out, - format_for_stderr, - format_for_stdout, - vec![], - ) - } - - // Write out a log line. - pub fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { - match *self { - Self::StdErr(ref w) => w.write(now, record), - Self::StdOut(ref w) => w.write(now, record), - Self::Multi(ref w) => w.write(now, record), - } - } - - // Flush any buffered records. - pub fn flush(&self) -> std::io::Result<()> { - match *self { - Self::StdErr(ref w) => w.flush(), - Self::StdOut(ref w) => w.flush(), - Self::Multi(ref w) => w.flush(), - } - } - - pub fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { - if let Self::Multi(ref w) = *self { - w.validate_logs(expected); - } - } -} - -// `StdErrWriter` writes logs to stderr. -pub(crate) struct StdErrWriter { - format: FormatFunction, -} - -impl StdErrWriter { - fn new(format: FormatFunction) -> Self { - Self { format } - } - #[inline] - fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { - write_buffered(self.format, now, record, &mut std::io::stderr()) - } - - #[inline] - fn flush(&self) -> std::io::Result<()> { - std::io::stderr().flush() - } -} - -// `StdOutWriter` writes logs to stdout. -pub(crate) struct StdOutWriter { - format: FormatFunction, -} - -impl StdOutWriter { - fn new(format: FormatFunction) -> Self { - Self { format } - } - #[inline] - fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { - write_buffered(self.format, now, record, &mut std::io::stdout()) - } - - #[inline] - fn flush(&self) -> std::io::Result<()> { - std::io::stdout().flush() - } -} - -// The `MultiWriter` writes logs to stderr or to a set of `Writer`s, and in the latter case -// can duplicate messages to stderr. -pub(crate) struct MultiWriter { - duplicate_stderr: Duplicate, - duplicate_stdout: Duplicate, - format_for_stderr: FormatFunction, - format_for_stdout: FormatFunction, - writers: Vec>, -} - -impl LogWriter for MultiWriter { - fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { - for writer in &self.writers { - (*writer).validate_logs(expected); - } - } - - fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { - if match self.duplicate_stderr { - Duplicate::Error => record.level() == log::Level::Error, - Duplicate::Warn => record.level() <= log::Level::Warn, - Duplicate::Info => record.level() <= log::Level::Info, - Duplicate::Debug => record.level() <= log::Level::Debug, - Duplicate::Trace | Duplicate::All => true, - Duplicate::None => false, - } { - write_buffered(self.format_for_stderr, now, record, &mut std::io::stderr())?; - } - - if match self.duplicate_stdout { - Duplicate::Error => record.level() == log::Level::Error, - Duplicate::Warn => record.level() <= log::Level::Warn, - Duplicate::Info => record.level() <= log::Level::Info, - Duplicate::Debug => record.level() <= log::Level::Debug, - Duplicate::Trace | Duplicate::All => true, - Duplicate::None => false, - } { - write_buffered(self.format_for_stdout, now, record, &mut std::io::stdout())?; - } - - for writer in &self.writers { - writer.write(now, record)?; - } - Ok(()) - } - - /// Provides the maximum log level that is to be written. - fn max_log_level(&self) -> log::LevelFilter { - self.writers - .iter() - .map(|w| w.max_log_level()) - .max() - .unwrap() - } - - fn flush(&self) -> std::io::Result<()> { - for writer in &self.writers { - writer.flush()?; - } - std::io::stderr().flush() - } - fn shutdown(&self) { - for writer in &self.writers { - writer.shutdown(); - } - } -} - -// Use a thread-local buffer for writing to stderr or stdout -fn write_buffered( - format_function: FormatFunction, - now: &mut DeferredNow, - record: &Record, - w: &mut dyn Write, -) -> Result<(), std::io::Error> { - let mut result: Result<(), std::io::Error> = Ok(()); - - buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { - Ok(mut buffer) => { - (format_function)(&mut *buffer, now, record) - .unwrap_or_else(|e| write_err(ERR_FORMATTING, &e)); - buffer - .write_all(b"\n") - .unwrap_or_else(|e| write_err(ERR_FORMATTING, &e)); - - result = w.write_all(&*buffer).map_err(|e| { - write_err(ERR_WRITING, &e); - e - }); - - buffer.clear(); - } - Err(_e) => { - // We arrive here in the rare cases of recursive logging - // (e.g. log calls in Debug or Display implementations) - // we print the inner calls, in chronological order, before finally the - // outer most message is printed - let mut tmp_buf = Vec::::with_capacity(200); - (format_function)(&mut tmp_buf, now, record) - .unwrap_or_else(|e| write_err(ERR_FORMATTING, &e)); - tmp_buf - .write_all(b"\n") - .unwrap_or_else(|e| write_err(ERR_FORMATTING, &e)); - - result = w.write_all(&tmp_buf).map_err(|e| { - write_err(ERR_WRITING, &e); - e - }); - } - }); - result -} - -pub(crate) fn buffer_with(f: F) -where - F: FnOnce(&RefCell>), -{ - thread_local! { - static BUFFER: RefCell> = RefCell::new(Vec::with_capacity(200)); - } - BUFFER.with(f); -} - -const ERR_FORMATTING: &str = "formatting failed with "; -const ERR_WRITING: &str = "writing failed with "; - -fn write_err(msg: &str, err: &std::io::Error) { - eprintln!("[flexi_logger] {} with {}", msg, err); -} diff --git a/agent/support/rust/flexi_logger/src/reconfiguration_handle.rs b/agent/support/rust/flexi_logger/src/reconfiguration_handle.rs deleted file mode 100644 index d7454a5de..000000000 --- a/agent/support/rust/flexi_logger/src/reconfiguration_handle.rs +++ /dev/null @@ -1,157 +0,0 @@ -use crate::log_specification::LogSpecification; -use crate::primary_writer::PrimaryWriter; -use crate::writers::LogWriter; -use std::collections::HashMap; -use std::sync::{Arc, RwLock}; - -/// Allows reconfiguring the logger programmatically. -/// -/// # Example -/// -/// Obtain the `ReconfigurationHandle` (using `.start()`): -/// ```rust -/// # use flexi_logger::{Logger, LogSpecBuilder}; -/// let mut log_handle = Logger::with_str("info") -/// // ... your logger configuration goes here, as usual -/// .start() -/// .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); -/// -/// // ... -/// ``` -/// -/// You can permanently exchange the log specification programmatically, anywhere in your code: -/// -/// ```rust -/// # use flexi_logger::{Logger, LogSpecBuilder}; -/// # let mut log_handle = Logger::with_str("info") -/// # .start() -/// # .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); -/// // ... -/// log_handle.parse_new_spec("warn"); -/// // ... -/// ``` -/// -/// However, when debugging, you often want to modify the log spec only temporarily, for -/// one or few method calls only; this is easier done with the following method, because -/// it allows switching back to the previous spec: -/// -/// ```rust -/// # use flexi_logger::{Logger, LogSpecBuilder}; -/// # let mut log_handle = Logger::with_str("info") -/// # .start() -/// # .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); -/// log_handle.parse_and_push_temp_spec("trace"); -/// // ... -/// // critical calls -/// // ... -/// -/// log_handle.pop_temp_spec(); -/// // Continue with the log spec you had before. -/// // ... -/// ``` -#[derive(Clone)] -pub struct ReconfigurationHandle { - spec: Arc>, - spec_stack: Vec, - primary_writer: Arc, - other_writers: Arc>>, -} -impl ReconfigurationHandle { - pub(crate) fn new( - spec: Arc>, - primary_writer: Arc, - other_writers: Arc>>, - ) -> Self { - Self { - spec, - spec_stack: Vec::default(), - primary_writer, - other_writers, - } - } - - #[cfg(feature = "specfile_without_notification")] - pub(crate) fn current_spec(&self) -> Arc> { - Arc::clone(&self.spec) - } - - // - pub(crate) fn reconfigure(&self, mut max_level: log::LevelFilter) { - for w in self.other_writers.as_ref().values() { - max_level = std::cmp::max(max_level, w.max_log_level()); - } - log::set_max_level(max_level); - } - - /// Replaces the active `LogSpecification`. - pub fn set_new_spec(&mut self, new_spec: LogSpecification) { - let max_level = new_spec.max_level(); - self.spec.write().unwrap(/* catch and expose error? */).update_from(new_spec); - self.reconfigure(max_level); - } - - /// Tries to replace the active `LogSpecification` with the result from parsing the given String. - pub fn parse_new_spec(&mut self, spec: &str) { - self.set_new_spec(LogSpecification::parse(spec).unwrap_or_else(|e| { - eprintln!( - "[flexi_logger] ReconfigurationHandle::parse_new_spec(): failed with {}", - e - ); - LogSpecification::off() - })) - } - - /// Replaces the active `LogSpecification` and pushes the previous one to a Stack. - pub fn push_temp_spec(&mut self, new_spec: LogSpecification) { - self.spec_stack - .push(self.spec.read().unwrap(/* catch and expose error? */).clone()); - self.set_new_spec(new_spec); - } - - /// Tries to replace the active `LogSpecification` with the result from parsing the given String - /// and pushes the previous one to a Stack. - pub fn parse_and_push_temp_spec(&mut self, new_spec: &str) { - self.spec_stack - .push(self.spec.read().unwrap(/* catch and expose error? */).clone()); - self.set_new_spec(LogSpecification::parse(new_spec).unwrap_or_else(|e| { - eprintln!( - "[flexi_logger] ReconfigurationHandle::parse_new_spec(): failed with {}, \ - falling back to empty log spec", - e - ); - LogSpecification::off() - })); - } - - /// Reverts to the previous `LogSpecification`, if any. - pub fn pop_temp_spec(&mut self) { - if let Some(previous_spec) = self.spec_stack.pop() { - self.set_new_spec(previous_spec); - } - } - - /// Shutdown all participating writers. - /// - /// This method is supposed to be called at the very end of your program, in case you use - /// your own writers, or if you want to securely shutdown the cleanup-thread of the - /// `FileLogWriter`. If you use a [`Cleanup`](enum.Cleanup.html) strategy with compressing, - /// and your process terminates - /// without correctly shutting down the cleanup-thread, then you might stop the cleanup-thread - /// while it is compressing a log file, which can leave unexpected files in the filesystem. - /// - /// See also [`LogWriter::shutdown`](writers/trait.LogWriter.html#method.shutdown). - pub fn shutdown(&self) { - if let PrimaryWriter::Multi(writer) = &*self.primary_writer { - writer.shutdown(); - } - for writer in self.other_writers.values() { - writer.shutdown(); - } - } - - // Allows checking the logs written so far to the writer - #[doc(hidden)] - pub fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { - self.primary_writer.validate_logs(expected) - } -} diff --git a/agent/support/rust/flexi_logger/src/writers.rs b/agent/support/rust/flexi_logger/src/writers.rs deleted file mode 100644 index 6bf7e04ae..000000000 --- a/agent/support/rust/flexi_logger/src/writers.rs +++ /dev/null @@ -1,105 +0,0 @@ -//! Contains a trait ([`LogWriter`](trait.LogWriter.html)) for extending `flexi_logger` -//! with additional log writers, -//! and two concrete implementations -//! for writing to files -//! ([`FileLogWriter`](struct.FileLogWriter.html)) -//! or to the syslog -//! ([`SyslogWriter`](struct.SyslogWriter.html)). -//! You can also use your own implementations of [`LogWriter`](trait.LogWriter.html). -//! -//! Such log writers can be used in two ways: -//! -//! * With [`Logger::log_target(...)`](../struct.Logger.html#method.log_target) -//! you can influence to which output stream normal log messages will be written, -//! i.e. from log macro calls without explicit target specification. -//! -//! See [`LogTarget`](../enum.LogTarget.html) for the available options. -//! -//! These log calls will only be written if they match the current -//! [`log specification`](../struct.LogSpecification.html). -//! -//! * [`Logger::add_writer()`](../struct.Logger.html#method.add_writer) -//! can be used to register an additional log writer under a target name. -//! The target name can then be used in calls to the -//! [log macro](https://docs.rs/log/latest/log/macro.log.html) -//! for directing log messages to the desired writers. -//! -//! A log call with a target value that has the form `{Name1,Name2,...}`, i.e., -//! a comma-separated list of target names, within braces, is not sent to the default logger, -//! but to the loggers specified explicitly in the list. -//! In such a list you can again specify the default logger with the target name `_Default`. -//! -//! These log calls will not be affected by the value of `flexi_logger`'s log specification; -//! they will always be written, as you might want it for alerts or auditing. -//! -//! In the following example we define an alert writer, and a macro to facilitate using it -//! (and avoid using the explicit target specification in the macro call), and -//! show some example calls. -//! -//! ```rust -//! use log::*; -//! -//! use flexi_logger::Logger; -//! use flexi_logger::writers::FileLogWriter; -//! -//! // Configure a FileLogWriter for alert messages -//! pub fn alert_logger() -> Box { -//! Box::new(FileLogWriter::builder() -//! .discriminant("Alert") -//! .suffix("alerts") -//! .print_message() -//! .try_build() -//! .unwrap()) -//! } -//! -//! // Define a macro for writing messages to the alert log and to the normal log -//! #[macro_use] -//! mod macros { -//! #[macro_export] -//! macro_rules! alert_error { -//! ($($arg:tt)*) => ( -//! error!(target: "{Alert,_Default}", $($arg)*); -//! ) -//! } -//! } -//! -//! fn main() { -//! Logger::with_env_or_str("info") -//! .print_message() -//! .log_to_file() -//! .add_writer("Alert", alert_logger()) -//! .start() -//! .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); -//! -//! -//! // Explicitly send logs to different loggers -//! error!(target : "{Alert}", "This is only an alert"); -//! error!(target : "{Alert,_Default}", "This is an alert and log message"); -//! -//! // Nicer: use the explicit macro -//! alert_error!("This is another alert and log message"); -//! -//! // Standard log macros write only to the normal log -//! error!("This is a normal error message"); -//! warn!("This is a warning"); -//! info!("This is an info message"); -//! debug!("This is a debug message - you will not see it"); -//! trace!("This is a trace message - you will not see it"); -//! } -//! -//! ``` -//! - -mod file_log_writer; -mod log_writer; - -#[cfg(feature = "syslog_writer")] -mod syslog_writer; - -#[cfg(feature = "syslog_writer")] -pub use self::syslog_writer::{ - LevelToSyslogSeverity, SyslogConnector, SyslogFacility, SyslogSeverity, SyslogWriter, -}; - -pub use self::file_log_writer::{FileLogWriter, FileLogWriterBuilder}; -pub use self::log_writer::LogWriter; diff --git a/agent/support/rust/flexi_logger/src/writers/file_log_writer.rs b/agent/support/rust/flexi_logger/src/writers/file_log_writer.rs deleted file mode 100644 index 667d8a870..000000000 --- a/agent/support/rust/flexi_logger/src/writers/file_log_writer.rs +++ /dev/null @@ -1,502 +0,0 @@ -mod builder; -mod config; -mod state; - -pub use self::builder::FileLogWriterBuilder; - -use self::config::{Config, FilenameConfig, RotationConfig}; -use crate::primary_writer::buffer_with; -use crate::writers::LogWriter; -use crate::{DeferredNow, FormatFunction}; -use log::Record; -use state::State; -use std::io::Write; -use std::path::PathBuf; -use std::sync::Mutex; -/// A configurable `LogWriter` implementation that writes to a file or a sequence of files. -/// -/// See the [module description](index.html) for usage guidance. -#[allow(clippy::module_name_repetitions)] -pub struct FileLogWriter { - format: FormatFunction, - line_ending: &'static [u8], - // the state needs to be mutable; since `Log.log()` requires an unmutable self, - // which translates into a non-mutating `LogWriter::write()`, - // we need internal mutability and thread-safety. - state: Mutex, - max_log_level: log::LevelFilter, - sender: Option, - name: String, -} -impl FileLogWriter { - pub(crate) fn new( - format: FormatFunction, - line_ending: &'static [u8], - state: Mutex, - max_log_level: log::LevelFilter, - sender: Option, - name: String, - ) -> FileLogWriter { - FileLogWriter { - format, - line_ending, - state, - max_log_level, - sender, - name, - } - } - - /// Instantiates a builder for `FileLogWriter`. - #[must_use] - pub fn builder() -> FileLogWriterBuilder { - FileLogWriterBuilder::new() - } - - /// Returns a reference to its configured output format function. - #[inline] - pub fn format(&self) -> FormatFunction { - self.format - } - - #[doc(hidden)] - pub fn current_filename(&self) -> PathBuf { - self.state.lock().unwrap().current_filename() - } -} - -impl LogWriter for FileLogWriter { - #[inline] - fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { - if record.level().eq(&log::Level::Error) { - match &self.sender { - Some(s) => { - let mut data = std::collections::HashMap::new(); - data.insert("data_type", "1002"); - data.insert("level", "error"); - let timestamp = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap_or_default() - .as_secs() - .to_string(); - data.insert("timestamp", timestamp.as_str()); - data.insert( - "source", - record - .module_path() - .unwrap_or_else(|| record.file().unwrap_or_else(|| record.target())), - ); - let msg = record.args().to_string(); - data.insert("msg", msg.as_str()); - data.insert("plugin", self.name.as_str()); - match s.send(&data) { - Ok(_) => {} - Err(e) => println!("Log send failed:{}", e), - }; - } - None => {} - } - } - buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { - Ok(mut buffer) => { - (self.format)(&mut *buffer, now, record).unwrap_or_else(|e| write_err(ERR_1, &e)); - - let mut state_guard = self.state.lock().unwrap(); - let state = &mut *state_guard; - - buffer - .write_all(self.line_ending) - .unwrap_or_else(|e| write_err(ERR_2, &e)); - - state - .write_buffer(&*buffer) - .unwrap_or_else(|e| write_err(ERR_2, &e)); - buffer.clear(); - } - Err(_e) => { - // We arrive here in the rare cases of recursive logging - // (e.g. log calls in Debug or Display implementations) - // we print the inner calls, in chronological order, before finally the - // outer most message is printed - let mut tmp_buf = Vec::::with_capacity(200); - (self.format)(&mut tmp_buf, now, record).unwrap_or_else(|e| write_err(ERR_1, &e)); - - let mut state_guard = self.state.lock().unwrap(); - let state = &mut *state_guard; - - tmp_buf - .write_all(self.line_ending) - .unwrap_or_else(|e| write_err(ERR_2, &e)); - - state - .write_buffer(&tmp_buf) - .unwrap_or_else(|e| write_err(ERR_2, &e)); - } - }); - - Ok(()) - } - - #[inline] - fn flush(&self) -> std::io::Result<()> { - if let Ok(ref mut state) = self.state.lock() { - state.flush() - } else { - Ok(()) - } - } - - #[inline] - fn max_log_level(&self) -> log::LevelFilter { - self.max_log_level - } - - #[doc(hidden)] - fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { - if let Ok(ref mut state) = self.state.lock() { - state.validate_logs(expected) - } - } - - fn shutdown(&self) { - // do nothing in case of poison errors - if let Ok(ref mut state) = self.state.lock() { - state.shutdown(); - } - } -} - -const ERR_1: &str = "FileLogWriter: formatting failed with "; -const ERR_2: &str = "FileLogWriter: writing failed with "; - -fn write_err(msg: &str, err: &std::io::Error) { - eprintln!("[flexi_logger] {} with {}", msg, err); -} - -#[cfg(test)] -mod test { - use crate::writers::LogWriter; - use crate::{Cleanup, Criterion, DeferredNow, Naming}; - use chrono::Local; - - use std::ops::Add; - use std::path::{Path, PathBuf}; - const DIRECTORY: &str = r"log_files/rotate"; - const ONE: &str = "ONE"; - const TWO: &str = "TWO"; - const THREE: &str = "THREE"; - const FOUR: &str = "FOUR"; - const FIVE: &str = "FIVE"; - const SIX: &str = "SIX"; - const SEVEN: &str = "SEVEN"; - const EIGHT: &str = "EIGHT"; - const NINE: &str = "NINE"; - - // cargo test --lib -- --nocapture - - #[test] - fn test_rotate_no_append_numbers() { - // we use timestamp as discriminant to allow repeated runs - let ts = Local::now() - .format("false-numbers-%Y-%m-%d_%H-%M-%S") - .to_string(); - let naming = Naming::Numbers; - - // ensure we start with -/-/- - assert!(not_exists("00000", &ts)); - assert!(not_exists("00001", &ts)); - assert!(not_exists("CURRENT", &ts)); - - // ensure this produces -/-/ONE - write_loglines(false, naming, &ts, &[ONE]); - assert!(not_exists("00000", &ts)); - assert!(not_exists("00001", &ts)); - assert!(contains("CURRENT", &ts, ONE)); - - // ensure this produces ONE/-/TWO - write_loglines(false, naming, &ts, &[TWO]); - assert!(contains("00000", &ts, ONE)); - assert!(not_exists("00001", &ts)); - assert!(contains("CURRENT", &ts, TWO)); - - // ensure this also produces ONE/-/TWO - remove("CURRENT", &ts); - assert!(not_exists("CURRENT", &ts)); - write_loglines(false, naming, &ts, &[TWO]); - assert!(contains("00000", &ts, ONE)); - assert!(not_exists("00001", &ts)); - assert!(contains("CURRENT", &ts, TWO)); - - // ensure this produces ONE/TWO/THREE - write_loglines(false, naming, &ts, &[THREE]); - assert!(contains("00000", &ts, ONE)); - assert!(contains("00001", &ts, TWO)); - assert!(contains("CURRENT", &ts, THREE)); - } - - #[allow(clippy::cognitive_complexity)] - #[test] - fn test_rotate_with_append_numbers() { - // we use timestamp as discriminant to allow repeated runs - let ts = Local::now() - .format("true-numbers-%Y-%m-%d_%H-%M-%S") - .to_string(); - let naming = Naming::Numbers; - - // ensure we start with -/-/- - assert!(not_exists("00000", &ts)); - assert!(not_exists("00001", &ts)); - assert!(not_exists("CURRENT", &ts)); - - // ensure this produces 12/-/3 - write_loglines(true, naming, &ts, &[ONE, TWO, THREE]); - assert!(contains("00000", &ts, ONE)); - assert!(contains("00000", &ts, TWO)); - assert!(not_exists("00001", &ts)); - assert!(contains("CURRENT", &ts, THREE)); - - // ensure this produces 12/34/56 - write_loglines(true, naming, &ts, &[FOUR, FIVE, SIX]); - assert!(contains("00000", &ts, ONE)); - assert!(contains("00000", &ts, TWO)); - assert!(contains("00001", &ts, THREE)); - assert!(contains("00001", &ts, FOUR)); - assert!(contains("CURRENT", &ts, FIVE)); - assert!(contains("CURRENT", &ts, SIX)); - - // ensure this also produces 12/34/56 - remove("CURRENT", &ts); - remove("00001", &ts); - assert!(not_exists("CURRENT", &ts)); - write_loglines(true, naming, &ts, &[THREE, FOUR, FIVE, SIX]); - assert!(contains("00000", &ts, ONE)); - assert!(contains("00000", &ts, TWO)); - assert!(contains("00001", &ts, THREE)); - assert!(contains("00001", &ts, FOUR)); - assert!(contains("CURRENT", &ts, FIVE)); - assert!(contains("CURRENT", &ts, SIX)); - - // ensure this produces 12/34/56/78/9 - write_loglines(true, naming, &ts, &[SEVEN, EIGHT, NINE]); - assert!(contains("00002", &ts, FIVE)); - assert!(contains("00002", &ts, SIX)); - assert!(contains("00003", &ts, SEVEN)); - assert!(contains("00003", &ts, EIGHT)); - assert!(contains("CURRENT", &ts, NINE)); - } - - #[test] - fn test_rotate_no_append_timestamps() { - // we use timestamp as discriminant to allow repeated runs - let ts = Local::now() - .format("false-timestamps-%Y-%m-%d_%H-%M-%S") - .to_string(); - - let basename = String::from(DIRECTORY).add("/").add( - &Path::new(&std::env::args().next().unwrap()) - .file_stem().unwrap(/*cannot fail*/) - .to_string_lossy().to_string(), - ); - let naming = Naming::Timestamps; - - // ensure we start with -/-/- - assert!(list_rotated_files(&basename, &ts).is_empty()); - assert!(not_exists("CURRENT", &ts)); - - // ensure this produces -/-/ONE - write_loglines(false, naming, &ts, &[ONE]); - assert!(list_rotated_files(&basename, &ts).is_empty()); - assert!(contains("CURRENT", &ts, ONE)); - - std::thread::sleep(std::time::Duration::from_secs(2)); - // ensure this produces ONE/-/TWO - write_loglines(false, naming, &ts, &[TWO]); - assert_eq!(list_rotated_files(&basename, &ts).len(), 1); - assert!(contains("CURRENT", &ts, TWO)); - - std::thread::sleep(std::time::Duration::from_secs(2)); - // ensure this produces ONE/TWO/THREE - write_loglines(false, naming, &ts, &[THREE]); - assert_eq!(list_rotated_files(&basename, &ts).len(), 2); - assert!(contains("CURRENT", &ts, THREE)); - } - - #[test] - fn test_rotate_with_append_timestamps() { - // we use timestamp as discriminant to allow repeated runs - let ts = Local::now() - .format("true-timestamps-%Y-%m-%d_%H-%M-%S") - .to_string(); - - let basename = String::from(DIRECTORY).add("/").add( - &Path::new(&std::env::args().next().unwrap()) - .file_stem().unwrap(/*cannot fail*/) - .to_string_lossy().to_string(), - ); - let naming = Naming::Timestamps; - - // ensure we start with -/-/- - assert!(list_rotated_files(&basename, &ts).is_empty()); - assert!(not_exists("CURRENT", &ts)); - - // ensure this produces 12/-/3 - write_loglines(true, naming, &ts, &[ONE, TWO, THREE]); - assert_eq!(list_rotated_files(&basename, &ts).len(), 1); - assert!(contains("CURRENT", &ts, THREE)); - - // // ensure this produces 12/34/56 - write_loglines(true, naming, &ts, &[FOUR, FIVE, SIX]); - assert!(contains("CURRENT", &ts, FIVE)); - assert!(contains("CURRENT", &ts, SIX)); - assert_eq!(list_rotated_files(&basename, &ts).len(), 2); - - // // ensure this produces 12/34/56/78/9 - // write_loglines(true, naming, &ts, &[SEVEN, EIGHT, NINE]); - // assert_eq!(list_rotated_files(&basename, &ts).len(), 4); - // assert!(contains("CURRENT", &ts, NINE)); - } - - #[test] - fn issue_38() { - const NUMBER_OF_FILES: usize = 5; - const NUMBER_OF_PSEUDO_PROCESSES: usize = 11; - const ISSUE_38: &str = "issue_38"; - const LOG_FOLDER: &str = "log_files/issue_38"; - - for _ in 0..NUMBER_OF_PSEUDO_PROCESSES { - let flw = super::FileLogWriter::builder() - .directory(LOG_FOLDER) - .discriminant(ISSUE_38) - .rotate( - Criterion::Size(500), - Naming::Timestamps, - Cleanup::KeepLogFiles(NUMBER_OF_FILES), - ) - .o_append(false) - .try_build() - .unwrap(); - - // write some lines, but not enough to rotate - for i in 0..4 { - flw.write( - &mut DeferredNow::new(), - &log::Record::builder() - .args(format_args!("{}", i)) - .level(log::Level::Error) - .target("myApp") - .file(Some("server.rs")) - .line(Some(144)) - .module_path(Some("server")) - .build(), - ) - .unwrap(); - } - } - - // give the cleanup thread a short moment of time - std::thread::sleep(std::time::Duration::from_millis(50)); - - let fn_pattern = String::with_capacity(180) - .add( - &String::from(LOG_FOLDER).add("/").add( - &Path::new(&std::env::args().next().unwrap()) - .file_stem().unwrap(/*cannot fail*/) - .to_string_lossy().to_string(), - ), - ) - .add("_") - .add(ISSUE_38) - .add("_r[0-9]*") - .add(".log"); - - assert_eq!( - glob::glob(&fn_pattern) - .unwrap() - .filter_map(Result::ok) - .count(), - NUMBER_OF_FILES - ); - } - - fn remove(s: &str, discr: &str) { - std::fs::remove_file(get_hackyfilepath(s, discr)).unwrap(); - } - - fn not_exists(s: &str, discr: &str) -> bool { - !get_hackyfilepath(s, discr).exists() - } - - fn contains(s: &str, discr: &str, text: &str) -> bool { - match std::fs::read_to_string(get_hackyfilepath(s, discr)) { - Err(_) => false, - Ok(s) => s.contains(text), - } - } - - fn get_hackyfilepath(infix: &str, discr: &str) -> Box { - let arg0 = std::env::args().next().unwrap(); - let mut s_filename = Path::new(&arg0) - .file_stem() - .unwrap() - .to_string_lossy() - .to_string(); - s_filename += "_"; - s_filename += discr; - s_filename += "_r"; - s_filename += infix; - s_filename += ".log"; - let mut path_buf = PathBuf::from(DIRECTORY); - path_buf.push(s_filename); - path_buf.into_boxed_path() - } - - fn write_loglines(append: bool, naming: Naming, discr: &str, texts: &[&'static str]) { - let flw = get_file_log_writer(append, naming, discr); - for text in texts { - flw.write( - &mut DeferredNow::new(), - &log::Record::builder() - .args(format_args!("{}", text)) - .level(log::Level::Error) - .target("myApp") - .file(Some("server.rs")) - .line(Some(144)) - .module_path(Some("server")) - .build(), - ) - .unwrap(); - } - } - - fn get_file_log_writer( - append: bool, - naming: Naming, - discr: &str, - ) -> crate::writers::FileLogWriter { - super::FileLogWriter::builder() - .directory(DIRECTORY) - .discriminant(discr) - .rotate( - Criterion::Size(if append { 28 } else { 10 }), - naming, - Cleanup::Never, - ) - .o_append(append) - .try_build() - .unwrap() - } - - fn list_rotated_files(basename: &str, discr: &str) -> Vec { - let fn_pattern = String::with_capacity(180) - .add(basename) - .add("_") - .add(discr) - .add("_r2[0-9]*") // Year 3000 problem!!! - .add(".log"); - - glob::glob(&fn_pattern) - .unwrap() - .map(|r| r.unwrap().into_os_string().to_string_lossy().to_string()) - .collect() - } -} diff --git a/agent/support/rust/flexi_logger/src/writers/file_log_writer/builder.rs b/agent/support/rust/flexi_logger/src/writers/file_log_writer/builder.rs deleted file mode 100644 index 8fea05bae..000000000 --- a/agent/support/rust/flexi_logger/src/writers/file_log_writer/builder.rs +++ /dev/null @@ -1,293 +0,0 @@ -use crate::flexi_error::FlexiLoggerError; -use crate::formats::default_format; -use crate::FormatFunction; -use crate::{Cleanup, Criterion, Naming}; -use chrono::Local; -use std::env; -use std::path::{Path, PathBuf}; -use std::sync::Mutex; - -use super::{Config, FileLogWriter, RotationConfig, State}; - -/// Builder for `FileLogWriter`. -#[allow(clippy::module_name_repetitions)] -pub struct FileLogWriterBuilder { - discriminant: Option, - config: Config, - format: FormatFunction, - o_rotation_config: Option, - max_log_level: log::LevelFilter, - cleanup_in_background_thread: bool, - sender: Option, - name: String, -} - -/// Simple methods for influencing the behavior of the `FileLogWriter`. -impl FileLogWriterBuilder { - pub(crate) fn new() -> FileLogWriterBuilder { - FileLogWriterBuilder { - name: String::from("default"), - sender: None, - discriminant: None, - o_rotation_config: None, - config: Config::default(), - format: default_format, - max_log_level: log::LevelFilter::Trace, - cleanup_in_background_thread: true, - } - } - /// Set name. - #[must_use] - pub fn name(mut self, name: String) -> Self { - self.name = name; - self - } - /// Add a grpc sender. - #[must_use] - pub fn sender(mut self, sender: plugin::Sender) -> Self { - self.sender = Some(sender); - self - } - /// Makes the `FileLogWriter` print an info message to stdout - /// when a new file is used for log-output. - #[must_use] - pub fn print_message(mut self) -> Self { - self.config.print_message = true; - self - } - - /// Makes the `FileLogWriter` use the provided format function for the log entries, - /// rather than the default ([`formats::default_format`](fn.default_format.html)). - pub fn format(mut self, format: FormatFunction) -> Self { - self.format = format; - self - } - - /// Specifies a folder for the log files. - /// - /// If the specified folder does not exist, the initialization will fail. - /// By default, the log files are created in the folder where the program was started. - pub fn directory>(mut self, directory: P) -> Self { - self.config.filename_config.directory = directory.into(); - self - } - - /// Specifies a suffix for the log files. The default is "log". - pub fn suffix>(mut self, suffix: S) -> Self { - self.config.filename_config.suffix = suffix.into(); - self - } - - /// Makes the logger not include a timestamp into the names of the log files - #[must_use] - pub fn suppress_timestamp(mut self) -> Self { - self.config.filename_config.use_timestamp = false; - self - } - - /// When rotation is used with some `Cleanup` variant, then this option defines - /// if the cleanup activities (finding files, deleting files, evtl compressing files) is done - /// in the current thread (in the current log-call), or whether cleanup is delegated to a - /// background thread. - /// - /// As of `flexi_logger` version `0.14.7`, - /// the cleanup activities are done by default in a background thread. - /// This minimizes the blocking impact to your application caused by IO operations. - /// - /// In earlier versions of `flexi_logger`, or if you call this method with - /// `use_background_thread = false`, - /// the cleanup is done in the thread that is currently causing a file rotation. - #[must_use] - pub fn cleanup_in_background_thread(mut self, use_background_thread: bool) -> Self { - self.cleanup_in_background_thread = use_background_thread; - self - } - - /// Use rotation to prevent indefinite growth of log files. - /// - /// By default, the log file is fixed while your program is running and will grow indefinitely. - /// With this option being used, when the log file reaches the specified criterion, - /// the file will be closed and a new file will be opened. - /// - /// Note that also the filename pattern changes: - /// - /// - by default, no timestamp is added to the filename - /// - the logs are always written to a file with infix `_rCURRENT` - /// - when the rotation criterion is fulfilled, it is closed and renamed to a file - /// with another infix (see `Naming`), - /// and then the logging continues again to the (fresh) file with infix `_rCURRENT`. - /// - /// Example: - /// - /// After some logging with your program `my_prog` and rotation with `Naming::Numbers`, - /// you will find files like - /// - /// ```text - /// my_prog_r00000.log - /// my_prog_r00001.log - /// my_prog_r00002.log - /// my_prog_rCURRENT.log - /// ``` - /// - /// The cleanup parameter allows defining the strategy for dealing with older files. - /// See [Cleanup](enum.Cleanup.html) for details. - #[must_use] - pub fn rotate(mut self, criterion: Criterion, naming: Naming, cleanup: Cleanup) -> Self { - self.o_rotation_config = Some(RotationConfig { - criterion, - naming, - cleanup, - }); - self.config.filename_config.use_timestamp = false; - self - } - - /// Makes the logger append to the given file, if it exists; by default, the file would be - /// truncated. - #[must_use] - pub fn append(mut self) -> Self { - self.config.append = true; - self - } - - /// The specified String is added to the log file name. - pub fn discriminant>(mut self, discriminant: S) -> Self { - self.discriminant = Some(discriminant.into()); - self - } - - /// The specified String will be used on linux systems to create in the current folder - /// a symbolic link to the current log file. - pub fn create_symlink>(mut self, symlink: P) -> Self { - self.config.o_create_symlink = Some(symlink.into()); - self - } - - /// Use Windows line endings, rather than just `\n`. - #[must_use] - pub fn use_windows_line_ending(mut self) -> Self { - self.config.use_windows_line_ending = true; - self - } - - /// Produces the `FileLogWriter`. - /// - /// # Errors - /// - /// `FlexiLoggerError::Io`. - pub fn try_build(mut self) -> Result { - // make sure the folder exists or create it - let p_directory = Path::new(&self.config.filename_config.directory); - std::fs::create_dir_all(&p_directory)?; - if !std::fs::metadata(&p_directory)?.is_dir() { - return Err(FlexiLoggerError::OutputBadDirectory); - }; - - let arg0 = env::args().next().unwrap_or_else(|| "rs".to_owned()); - self.config.filename_config.file_basename = - Path::new(&arg0).file_stem().unwrap(/*cannot fail*/).to_string_lossy().to_string(); - - if let Some(discriminant) = self.discriminant { - self.config.filename_config.file_basename += &format!("_{}", discriminant); - } - if self.config.filename_config.use_timestamp { - self.config.filename_config.file_basename += - &Local::now().format("_%Y-%m-%d_%H-%M-%S").to_string(); - }; - - Ok(FileLogWriter::new( - self.format, - if self.config.use_windows_line_ending { - b"\r\n" - } else { - b"\n" - }, - Mutex::new(State::try_new( - self.config, - self.o_rotation_config, - self.cleanup_in_background_thread, - )?), - self.max_log_level, - self.sender, - self.name, - )) - } -} - -/// Alternative set of methods to control the behavior of the `FileLogWriterBuilder`. -/// Use these methods when you want to control the settings flexibly, -/// e.g. with commandline arguments via `docopts` or `clap`. -impl FileLogWriterBuilder { - /// With true, makes the `FileLogWriterBuilder` print an info message to stdout, each time - /// when a new file is used for log-output. - #[must_use] - pub fn o_print_message(mut self, print_message: bool) -> Self { - self.config.print_message = print_message; - self - } - - /// Specifies a folder for the log files. - /// - /// If the specified folder does not exist, the initialization will fail. - /// With None, the log files are created in the folder where the program was started. - pub fn o_directory>(mut self, directory: Option

) -> Self { - self.config.filename_config.directory = - directory.map_or_else(|| PathBuf::from("."), Into::into); - self - } - - /// With true, makes the `FileLogWriterBuilder` include a timestamp into the names of the - /// log files. - #[must_use] - pub fn o_timestamp(mut self, use_timestamp: bool) -> Self { - self.config.filename_config.use_timestamp = use_timestamp; - self - } - - /// By default, and with None, the log file will grow indefinitely. - /// If a `rotate_config` is set, when the log file reaches or exceeds the specified size, - /// the file will be closed and a new file will be opened. - /// Also the filename pattern changes: instead of the timestamp, a serial number - /// is included into the filename. - /// - /// The size is given in bytes, e.g. `o_rotate_over_size(Some(1_000))` will rotate - /// files once they reach a size of 1 kB. - /// - /// The cleanup strategy allows delimiting the used space on disk. - #[must_use] - pub fn o_rotate(mut self, rotate_config: Option<(Criterion, Naming, Cleanup)>) -> Self { - if let Some((criterion, naming, cleanup)) = rotate_config { - self.o_rotation_config = Some(RotationConfig { - criterion, - naming, - cleanup, - }); - self.config.filename_config.use_timestamp = false; - } else { - self.o_rotation_config = None; - self.config.filename_config.use_timestamp = true; - } - self - } - - /// If append is set to true, makes the logger append to the given file, if it exists. - /// By default, or with false, the file would be truncated. - #[must_use] - pub fn o_append(mut self, append: bool) -> Self { - self.config.append = append; - self - } - - /// The specified String is added to the log file name. - pub fn o_discriminant>(mut self, discriminant: Option) -> Self { - self.discriminant = discriminant.map(Into::into); - self - } - - /// If a String is specified, it will be used on linux systems to create in the current folder - /// a symbolic link with this name to the current log file. - pub fn o_create_symlink>(mut self, symlink: Option) -> Self { - self.config.o_create_symlink = symlink.map(Into::into); - self - } -} diff --git a/agent/support/rust/flexi_logger/src/writers/file_log_writer/config.rs b/agent/support/rust/flexi_logger/src/writers/file_log_writer/config.rs deleted file mode 100644 index 87a59ba9c..000000000 --- a/agent/support/rust/flexi_logger/src/writers/file_log_writer/config.rs +++ /dev/null @@ -1,45 +0,0 @@ -use crate::{Cleanup, Criterion, Naming}; -use std::path::PathBuf; - -// Describes how rotation should work -pub(crate) struct RotationConfig { - // Defines if rotation should be based on size or date - pub(crate) criterion: Criterion, - // Defines if rotated files should be numbered or get a date-based name - pub(crate) naming: Naming, - // Defines the cleanup strategy - pub(crate) cleanup: Cleanup, -} -#[derive(Clone)] -pub(crate) struct FilenameConfig { - pub(crate) directory: PathBuf, - pub(crate) file_basename: String, - pub(crate) suffix: String, - pub(crate) use_timestamp: bool, -} - -// The immutable configuration of a FileLogWriter. -pub(crate) struct Config { - pub(crate) print_message: bool, - pub(crate) append: bool, - pub(crate) filename_config: FilenameConfig, - pub(crate) o_create_symlink: Option, - pub(crate) use_windows_line_ending: bool, -} -impl Config { - // Factory method; uses the same defaults as Logger. - pub fn default() -> Self { - Self { - print_message: false, - filename_config: FilenameConfig { - directory: PathBuf::from("."), - file_basename: String::new(), - suffix: "log".to_string(), - use_timestamp: true, - }, - append: false, - o_create_symlink: None, - use_windows_line_ending: false, - } - } -} diff --git a/agent/support/rust/flexi_logger/src/writers/file_log_writer/state.rs b/agent/support/rust/flexi_logger/src/writers/file_log_writer/state.rs deleted file mode 100644 index 62cdc0288..000000000 --- a/agent/support/rust/flexi_logger/src/writers/file_log_writer/state.rs +++ /dev/null @@ -1,713 +0,0 @@ -use crate::{Age, Cleanup, Criterion, FlexiLoggerError, Naming}; -use chrono::{DateTime, Datelike, Local, Timelike}; -use std::cmp::max; -use std::fs::{File, OpenOptions}; -use std::io::{BufRead, BufReader, Write}; -use std::ops::Add; -use std::path::{Path, PathBuf}; - -use super::{Config, FilenameConfig, RotationConfig}; - -const CURRENT_INFIX: &str = "_rCURRENT"; -fn number_infix(idx: u32) -> String { - format!("_r{:0>5}", idx) -} - -// Describes the latest existing numbered log file. -#[derive(Clone, Copy)] -enum IdxState { - // We rotate to numbered files, and no rotated numbered file exists yet - Start, - // highest index of rotated numbered files - Idx(u32), -} - -// Created_at is needed both for -// is_rotation_necessary() -> if Criterion::Age -> NamingState::CreatedAt -// and rotate_to_date() -> if Naming::Timestamps -> RollState::Age -enum NamingState { - CreatedAt, - IdxState(IdxState), -} - -enum RollState { - Size(u64, u64), // max_size, current_size - Age(Age), - AgeOrSize(Age, u64, u64), // age, max_size, current_size -} - -enum MessageToCleanupThread { - Act, - Die, -} -struct CleanupThreadHandle { - sender: std::sync::mpsc::Sender, - join_handle: std::thread::JoinHandle<()>, -} - -struct RotationState { - naming_state: NamingState, - roll_state: RollState, - created_at: DateTime, - cleanup: Cleanup, - o_cleanup_thread_handle: Option, -} -impl RotationState { - fn size_rotation_necessary(max_size: u64, current_size: u64) -> bool { - current_size > max_size - } - - fn age_rotation_necessary(&self, age: Age) -> bool { - let now = Local::now(); - match age { - Age::Day => self.created_at.num_days_from_ce() != now.num_days_from_ce(), - Age::Hour => { - self.created_at.num_days_from_ce() != now.num_days_from_ce() - || self.created_at.hour() != now.hour() - } - Age::Minute => { - self.created_at.num_days_from_ce() != now.num_days_from_ce() - || self.created_at.hour() != now.hour() - || self.created_at.minute() != now.minute() - } - Age::Second => { - self.created_at.num_days_from_ce() != now.num_days_from_ce() - || self.created_at.hour() != now.hour() - || self.created_at.minute() != now.minute() - || self.created_at.second() != now.second() - } - } - } - - fn rotation_necessary(&self) -> bool { - match &self.roll_state { - RollState::Size(max_size, current_size) => { - Self::size_rotation_necessary(*max_size, *current_size) - } - RollState::Age(age) => self.age_rotation_necessary(*age), - RollState::AgeOrSize(age, max_size, current_size) => { - Self::size_rotation_necessary(*max_size, *current_size) - || self.age_rotation_necessary(*age) - } - } - } - - fn shutdown(&mut self) { - // this sets o_cleanup_thread_handle in self.state.o_rotation_state to None: - let o_cleanup_thread_handle = self.o_cleanup_thread_handle.take(); - if let Some(cleanup_thread_handle) = o_cleanup_thread_handle { - cleanup_thread_handle - .sender - .send(MessageToCleanupThread::Die) - .ok(); - cleanup_thread_handle.join_handle.join().ok(); - } - } -} - -// Could not implement `std::convert::From` because other parameters are required. -fn try_roll_state_from_criterion( - criterion: Criterion, - config: &Config, - p_path: &Path, -) -> Result { - Ok(match criterion { - Criterion::Age(age) => RollState::Age(age), - Criterion::Size(size) => { - let written_bytes = if config.append { - std::fs::metadata(p_path)?.len() - } else { - 0 - }; - RollState::Size(size, written_bytes) - } // max_size, current_size - Criterion::AgeOrSize(age, size) => { - let written_bytes = if config.append { - std::fs::metadata(&p_path)?.len() - } else { - 0 - }; - RollState::AgeOrSize(age, size, written_bytes) - } // age, max_size, current_size - }) -} - -enum Inner { - Initial(Option, bool), - Active(Option, File), -} - -// The mutable state of a FileLogWriter. -pub(crate) struct State { - config: Config, - inner: Inner, -} -impl State { - pub fn try_new( - config: Config, - o_rotation_config: Option, - cleanup_in_background_thread: bool, - ) -> Result { - let mut state = Self { - inner: Inner::Initial(o_rotation_config, cleanup_in_background_thread), - config, - }; - if false { - // early initialize - state.initialize()?; - } - Ok(state) - } - - fn initialize(&mut self) -> Result<(), std::io::Error> { - if let Inner::Initial(o_rotation_config, cleanup_in_background_thread) = &self.inner { - match o_rotation_config { - None => { - let (log_file, _created_at, _p_path) = open_log_file(&self.config, false)?; - self.inner = Inner::Active(None, log_file); - } - Some(rotate_config) => { - // first rotate, then open the log file - let naming_state = match rotate_config.naming { - Naming::Timestamps => { - if !self.config.append { - rotate_output_file_to_date( - &get_creation_date(&get_filepath( - Some(CURRENT_INFIX), - &self.config.filename_config, - )), - &self.config, - )?; - } - NamingState::CreatedAt - } - Naming::Numbers => { - let mut rotation_state = - get_highest_rotate_idx(&self.config.filename_config); - if !self.config.append { - rotation_state = - rotate_output_file_to_idx(rotation_state, &self.config)?; - } - NamingState::IdxState(rotation_state) - } - }; - let (log_file, created_at, p_path) = open_log_file(&self.config, true)?; - - let roll_state = try_roll_state_from_criterion( - rotate_config.criterion, - &self.config, - &p_path, - )?; - let mut o_cleanup_thread_handle = None; - if rotate_config.cleanup.do_cleanup() { - remove_or_compress_too_old_logfiles( - &None, - &rotate_config.cleanup, - &self.config.filename_config, - )?; - if *cleanup_in_background_thread { - let cleanup = rotate_config.cleanup; - let filename_config = self.config.filename_config.clone(); - let (sender, receiver) = std::sync::mpsc::channel(); - let join_handle = std::thread::Builder::new() - .name("flexi_logger-cleanup".to_string()) - .stack_size(512 * 1024) - .spawn(move || loop { - match receiver.recv() { - Ok(MessageToCleanupThread::Act) => { - remove_or_compress_too_old_logfiles_impl( - &cleanup, - &filename_config, - ) - .ok(); - } - Ok(MessageToCleanupThread::Die) | Err(_) => { - return; - } - } - })?; - // .map_err(FlexiLoggerError::OutputCleanupThread)?; - o_cleanup_thread_handle = Some(CleanupThreadHandle { - sender, - join_handle, - }); - } - } - self.inner = Inner::Active( - Some(RotationState { - naming_state, - roll_state, - created_at, - cleanup: rotate_config.cleanup, - o_cleanup_thread_handle, - }), - log_file, - ); - } - } - } - Ok(()) - } - - pub fn flush(&mut self) -> std::io::Result<()> { - if let Inner::Active(_, ref mut file) = self.inner { - file.flush() - } else { - Ok(()) - } - } - - // With rotation, the logger always writes into a file with infix `_rCURRENT`. - // On overflow, an existing `_rCURRENT` file is renamed to the next numbered file, - // before writing into `_rCURRENT` goes on. - #[inline] - fn mount_next_linewriter_if_necessary(&mut self) -> Result<(), FlexiLoggerError> { - if let Inner::Active(Some(ref mut rotation_state), ref mut file) = self.inner { - if rotation_state.rotation_necessary() { - match rotation_state.naming_state { - NamingState::CreatedAt => { - rotate_output_file_to_date(&rotation_state.created_at, &self.config)?; - } - NamingState::IdxState(ref mut idx_state) => { - *idx_state = rotate_output_file_to_idx(*idx_state, &self.config)?; - } - } - - let (line_writer, created_at, _) = open_log_file(&self.config, true)?; - *file = line_writer; - rotation_state.created_at = created_at; - if let RollState::Size(_, ref mut current_size) - | RollState::AgeOrSize(_, _, ref mut current_size) = rotation_state.roll_state - { - *current_size = 0; - } - - remove_or_compress_too_old_logfiles( - &rotation_state.o_cleanup_thread_handle, - &rotation_state.cleanup, - &self.config.filename_config, - )?; - } - } - - Ok(()) - } - - pub fn write_buffer(&mut self, buf: &[u8]) -> std::io::Result<()> { - if let Inner::Initial(_, _) = self.inner { - self.initialize()?; - } - // rotate if necessary - self.mount_next_linewriter_if_necessary() - .unwrap_or_else(|e| { - eprintln!("[flexi_logger] opening file failed with {}", e); - }); - - if let Inner::Active(ref mut o_rotation_state, ref mut log_file) = self.inner { - log_file.write_all(buf)?; - if let Some(ref mut rotation_state) = o_rotation_state { - if let RollState::Size(_, ref mut current_size) - | RollState::AgeOrSize(_, _, ref mut current_size) = rotation_state.roll_state - { - *current_size += buf.len() as u64; - } - }; - } - Ok(()) - } - - pub fn current_filename(&self) -> PathBuf { - let o_infix = match &self.inner { - Inner::Initial(o_rotation_config, _) => { - if o_rotation_config.is_some() { - Some(CURRENT_INFIX) - } else { - None - } - } - Inner::Active(o_rotation_state, _) => { - if o_rotation_state.is_some() { - Some(CURRENT_INFIX) - } else { - None - } - } - }; - get_filepath(o_infix, &self.config.filename_config) - } - - pub fn validate_logs(&mut self, expected: &[(&'static str, &'static str, &'static str)]) { - if let Inner::Initial(_, _) = self.inner { - self.initialize().unwrap(); - } - if let Inner::Active(ref mut o_rotation_state, _) = self.inner { - let path = get_filepath( - o_rotation_state - .as_ref() - .map(|_| super::state::CURRENT_INFIX), - &self.config.filename_config, - ); - let f = File::open(path).unwrap(); - let mut reader = BufReader::new(f); - let mut buf = String::new(); - for tuple in expected { - buf.clear(); - reader.read_line(&mut buf).unwrap(); - assert!(buf.contains(&tuple.0), "Did not find tuple.0 = {}", tuple.0); - assert!(buf.contains(&tuple.1), "Did not find tuple.1 = {}", tuple.1); - assert!(buf.contains(&tuple.2), "Did not find tuple.2 = {}", tuple.2); - } - buf.clear(); - reader.read_line(&mut buf).unwrap(); - assert!( - buf.is_empty(), - "Found more log lines than expected: {} ", - buf - ); - } - } - - pub fn shutdown(&mut self) { - if let Inner::Active(ref mut o_rotation_state, _) = self.inner { - if let Some(ref mut rotation_state) = o_rotation_state { - rotation_state.shutdown(); - } - } - } -} - -fn get_filepath(o_infix: Option<&str>, config: &FilenameConfig) -> PathBuf { - let mut s_filename = String::with_capacity( - config.file_basename.len() + o_infix.map_or(0, str::len) + 1 + config.suffix.len(), - ) + &config.file_basename; - if let Some(infix) = o_infix { - s_filename += infix; - }; - s_filename += "."; - s_filename += &config.suffix; - let mut p_path = config.directory.to_path_buf(); - p_path.push(s_filename); - p_path -} - -fn open_log_file( - config: &Config, - with_rotation: bool, -) -> Result<(File, DateTime, PathBuf), std::io::Error> { - let o_infix = if with_rotation { - Some(CURRENT_INFIX) - } else { - None - }; - let p_path = get_filepath(o_infix, &config.filename_config); - if config.print_message { - println!("Log is written to {}", &p_path.display()); - } - if let Some(ref link) = config.o_create_symlink { - self::platform::create_symlink_if_possible(link, &p_path); - } - - let log_file = OpenOptions::new() - .write(true) - .create(true) - .append(config.append) - .truncate(!config.append) - .open(&p_path)?; - - Ok((log_file, get_creation_date(&p_path), p_path)) -} - -fn get_highest_rotate_idx(filename_config: &FilenameConfig) -> IdxState { - match list_of_log_and_compressed_files(filename_config) { - Err(e) => { - eprintln!("[flexi_logger] listing rotated log files failed with {}", e); - IdxState::Start // hope and pray ...?? - } - Ok(files) => { - let mut highest_idx = IdxState::Start; - for file in files { - let filename = file.file_stem().unwrap(/*ok*/).to_string_lossy(); - let mut it = filename.rsplit("_r"); - match it.next() { - Some(next) => { - let idx: u32 = next.parse().unwrap_or(0); - highest_idx = match highest_idx { - IdxState::Start => IdxState::Idx(idx), - IdxState::Idx(prev) => IdxState::Idx(max(prev, idx)), - }; - } - None => continue, // ignore unexpected files - } - } - highest_idx - } - } -} - -#[allow(clippy::type_complexity)] -fn list_of_log_and_compressed_files( - filename_config: &FilenameConfig, -) -> std::result::Result< - std::iter::Chain< - std::iter::Chain< - std::vec::IntoIter, - std::vec::IntoIter, - >, - std::vec::IntoIter, - >, - std::io::Error, -> { - let fn_pattern = String::with_capacity(180) - .add(&filename_config.file_basename) - .add("_r[0-9]*") - .add("."); - - let mut log_pattern = filename_config.directory.clone(); - log_pattern.push(fn_pattern.clone().add(&filename_config.suffix)); - let log_pattern = log_pattern.as_os_str().to_string_lossy(); - - let mut zip_pattern = filename_config.directory.clone(); - zip_pattern.push(fn_pattern.clone().add("zip")); - let zip_pattern = zip_pattern.as_os_str().to_string_lossy(); - - let mut gz_pattern = filename_config.directory.clone(); - gz_pattern.push(fn_pattern.add("gz")); - let gz_pattern = gz_pattern.as_os_str().to_string_lossy(); - - Ok(list_of_files(&log_pattern) - .chain(list_of_files(&gz_pattern)) - .chain(list_of_files(&zip_pattern))) -} - -fn list_of_files(pattern: &str) -> std::vec::IntoIter { - let mut log_files: Vec = glob::glob(pattern) - .unwrap(/* failure should be impossible */) - .filter_map(Result::ok) - .collect(); - log_files.reverse(); - log_files.into_iter() -} - -fn remove_or_compress_too_old_logfiles( - o_cleanup_thread_handle: &Option, - cleanup_config: &Cleanup, - filename_config: &FilenameConfig, -) -> Result<(), std::io::Error> { - o_cleanup_thread_handle.as_ref().map_or( - remove_or_compress_too_old_logfiles_impl(cleanup_config, filename_config), - |cleanup_thread_handle| { - cleanup_thread_handle - .sender - .send(MessageToCleanupThread::Act) - .ok(); - Ok(()) - }, - ) -} - -fn remove_or_compress_too_old_logfiles_impl( - cleanup_config: &Cleanup, - filename_config: &FilenameConfig, -) -> Result<(), std::io::Error> { - let (log_limit, compress_limit) = match *cleanup_config { - Cleanup::Never => { - return Ok(()); - } - Cleanup::KeepLogFiles(log_limit) => (log_limit, 0), - - #[cfg(feature = "compress")] - #[allow(deprecated)] - Cleanup::KeepCompressedFiles(compress_limit) | Cleanup::KeepZipFiles(compress_limit) => { - (0, compress_limit) - } - - #[cfg(feature = "compress")] - #[allow(deprecated)] - Cleanup::KeepLogAndCompressedFiles(log_limit, compress_limit) - | Cleanup::KeepLogAndZipFiles(log_limit, compress_limit) => (log_limit, compress_limit), - }; - - for (index, file) in list_of_log_and_compressed_files(&filename_config)?.enumerate() { - if index >= log_limit + compress_limit { - // delete (log or log.gz) - std::fs::remove_file(&file)?; - } else if index >= log_limit { - #[cfg(feature = "compress")] - { - // compress, if not yet compressed - if let Some(extension) = file.extension() { - if extension != "gz" { - let mut old_file = File::open(file.clone())?; - let mut compressed_file = file.clone(); - compressed_file.set_extension("log.gz"); - let mut gz_encoder = flate2::write::GzEncoder::new( - File::create(compressed_file)?, - flate2::Compression::fast(), - ); - std::io::copy(&mut old_file, &mut gz_encoder)?; - gz_encoder.finish()?; - std::fs::remove_file(&file)?; - } - } - } - } - } - - Ok(()) -} - -// Moves the current file to the timestamp of the CURRENT file's creation date. -// If the rotation comes very fast, the new timestamp would be equal to the old one. -// To avoid file collisions, we insert an additional string to the filename (".restart-"). -// The number is incremented in case of repeated collisions. -// Cleaning up can leave some restart-files with higher numbers; if we still are in the same -// second, we need to continue with the restart-incrementing. -fn rotate_output_file_to_date( - creation_date: &DateTime, - config: &Config, -) -> Result<(), std::io::Error> { - let current_path = get_filepath(Some(CURRENT_INFIX), &config.filename_config); - - let mut rotated_path = get_filepath( - Some(&creation_date.format("_r%Y-%m-%d_%H-%M-%S").to_string()), - &config.filename_config, - ); - - // Search for rotated_path as is and for restart-siblings; - // if any exists, find highest restart and add 1, else continue without restart - let mut pattern = rotated_path.clone(); - pattern.set_extension(""); - let mut pattern = pattern.to_string_lossy().to_string(); - pattern.push_str(".restart-*"); - - let file_list = glob::glob(&pattern).unwrap(/*ok*/); - let mut vec: Vec = file_list.map(Result::unwrap).collect(); - vec.sort_unstable(); - - if (*rotated_path).exists() || !vec.is_empty() { - let mut number = if vec.is_empty() { - 0 - } else { - rotated_path = vec.pop().unwrap(/*Ok*/); - let file_stem = rotated_path - .file_stem() - .unwrap(/*ok*/) - .to_string_lossy() - .to_string(); - let index = file_stem.find(".restart-").unwrap(); - file_stem[(index + 9)..].parse::().unwrap() - }; - - while (*rotated_path).exists() { - rotated_path = get_filepath( - Some( - &creation_date - .format("_r%Y-%m-%d_%H-%M-%S") - .to_string() - .add(&format!(".restart-{:04}", number)), - ), - &config.filename_config, - ); - number += 1; - } - } - - match std::fs::rename(¤t_path, &rotated_path) { - Ok(()) => Ok(()), - Err(e) => { - if e.kind() == std::io::ErrorKind::NotFound { - // current did not exist, so we had nothing to do - Ok(()) - } else { - Err(e) - } - } - } -} - -// Moves the current file to the name with the next rotate_idx and returns the next rotate_idx. -// The current file must be closed already. -fn rotate_output_file_to_idx( - idx_state: IdxState, - config: &Config, -) -> Result { - let new_idx = match idx_state { - IdxState::Start => 0, - IdxState::Idx(idx) => idx + 1, - }; - - match std::fs::rename( - get_filepath(Some(CURRENT_INFIX), &config.filename_config), - get_filepath(Some(&number_infix(new_idx)), &config.filename_config), - ) { - Ok(()) => Ok(IdxState::Idx(new_idx)), - Err(e) => { - if e.kind() == std::io::ErrorKind::NotFound { - // current did not exist, so we had nothing to do - Ok(idx_state) - } else { - Err(e) - } - } - } -} - -// See documentation of Criterion::Age. -#[allow(unused_variables)] -fn get_creation_date(path: &PathBuf) -> DateTime { - // On windows, we know that try_get_creation_date() returns a result, but it is wrong. - // On linux, we know that try_get_creation_date() returns an error. - #[cfg(any(target_os = "windows", target_os = "linux"))] - return get_fake_creation_date(); - - // On all others of the many platforms, we give the real creation date a try, - // and fall back to the fake if it is not available. - #[cfg(not(any(target_os = "windows", target_os = "linux")))] - match try_get_creation_date(path) { - Ok(d) => d, - Err(e) => get_fake_creation_date(), - } -} - -fn get_fake_creation_date() -> DateTime { - Local::now() -} - -#[cfg(not(any(target_os = "windows", target_os = "linux")))] -fn try_get_creation_date(path: &PathBuf) -> Result, FlexiLoggerError> { - Ok(std::fs::metadata(path)?.created()?.into()) -} - -mod platform { - use std::path::{Path, PathBuf}; - - pub fn create_symlink_if_possible(link: &PathBuf, path: &Path) { - linux_create_symlink(link, path); - } - - #[cfg(target_os = "linux")] - fn linux_create_symlink(link: &PathBuf, logfile: &Path) { - if std::fs::symlink_metadata(link).is_ok() { - // remove old symlink before creating a new one - if let Err(e) = std::fs::remove_file(link) { - eprintln!( - "[flexi_logger] deleting old symlink to log file failed with {:?}", - e - ); - } - } - - // create new symlink - if let Err(e) = std::os::unix::fs::symlink(&logfile, link) { - eprintln!( - "[flexi_logger] cannot create symlink {:?} for logfile \"{}\" due to {:?}", - link, - &logfile.display(), - e - ); - } - } - - #[cfg(not(target_os = "linux"))] - fn linux_create_symlink(_: &PathBuf, _: &Path) {} -} diff --git a/agent/support/rust/flexi_logger/src/writers/log_writer.rs b/agent/support/rust/flexi_logger/src/writers/log_writer.rs deleted file mode 100644 index ec4ebbf86..000000000 --- a/agent/support/rust/flexi_logger/src/writers/log_writer.rs +++ /dev/null @@ -1,49 +0,0 @@ -use crate::deferred_now::DeferredNow; -use crate::FormatFunction; -use log::Record; - -/// Writes to a single log output stream. -/// -/// Boxed instances of `LogWriter` can be used as additional log targets -/// (see [module description](index.html) for more details). -pub trait LogWriter: Sync + Send { - /// Writes out a log line. - /// - /// # Errors - /// - /// `std::io::Error` - fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()>; - - /// Flushes any buffered records. - /// - /// # Errors - /// - /// `std::io::Error` - fn flush(&self) -> std::io::Result<()>; - - /// Provides the maximum log level that is to be written. - fn max_log_level(&self) -> log::LevelFilter; - - /// Sets the format function. - /// Defaults to ([`formats::default_format`](fn.default_format.html)), - /// but can be changed with a call to - /// [`Logger::format_for_writer`](struct.Logger.html#method.format_for_writer). - /// - /// The default implementation is a no-op. - fn format(&mut self, format: FormatFunction) { - let _ = format; - } - - /// Cleanup open resources, if necessary. - fn shutdown(&self) {} - - /// Takes a vec with three patterns per line that represent the log out, - /// compares the written log with the expected lines, - /// and asserts that both are in sync. - /// - /// This function is not meant for productive code, only for tests. - #[doc(hidden)] - fn validate_logs(&self, _expected: &[(&'static str, &'static str, &'static str)]) { - unimplemented!("only useful for tests"); - } -} diff --git a/agent/support/rust/flexi_logger/src/writers/syslog_writer.rs b/agent/support/rust/flexi_logger/src/writers/syslog_writer.rs deleted file mode 100644 index c389abc51..000000000 --- a/agent/support/rust/flexi_logger/src/writers/syslog_writer.rs +++ /dev/null @@ -1,321 +0,0 @@ -use crate::deferred_now::DeferredNow; -use crate::writers::log_writer::LogWriter; -use std::cell::RefCell; -use std::ffi::OsString; -use std::io::Error as IoError; -use std::io::Result as IoResult; -use std::io::{BufWriter, ErrorKind, Write}; -use std::net::{TcpStream, ToSocketAddrs, UdpSocket}; -#[cfg(target_os = "linux")] -use std::path::Path; -use std::sync::Mutex; - -/// Syslog Facility. -/// -/// See [RFC 5424](https://datatracker.ietf.org/doc/rfc5424). -#[derive(Copy, Clone, Debug)] -pub enum SyslogFacility { - /// kernel messages. - Kernel = 0 << 3, - /// user-level messages. - UserLevel = 1 << 3, - /// mail system. - MailSystem = 2 << 3, - /// system daemons. - SystemDaemons = 3 << 3, - /// security/authorization messages. - Authorization = 4 << 3, - /// messages generated internally by syslogd. - SyslogD = 5 << 3, - /// line printer subsystem. - LinePrinter = 6 << 3, - /// network news subsystem. - News = 7 << 3, - /// UUCP subsystem. - Uucp = 8 << 3, - /// clock daemon. - Clock = 9 << 3, - /// security/authorization messages. - Authorization2 = 10 << 3, - /// FTP daemon. - Ftp = 11 << 3, - /// NTP subsystem. - Ntp = 12 << 3, - /// log audit. - LogAudit = 13 << 3, - /// log alert. - LogAlert = 14 << 3, - /// clock daemon (note 2). - Clock2 = 15 << 3, - /// local use 0 (local0). - LocalUse0 = 16 << 3, - /// local use 1 (local1). - LocalUse1 = 17 << 3, - /// local use 2 (local2). - LocalUse2 = 18 << 3, - /// local use 3 (local3). - LocalUse3 = 19 << 3, - /// local use 4 (local4). - LocalUse4 = 20 << 3, - /// local use 5 (local5). - LocalUse5 = 21 << 3, - /// local use 6 (local6). - LocalUse6 = 22 << 3, - /// local use 7 (local7). - LocalUse7 = 23 << 3, -} - -/// `SyslogConnector`'s severity. -/// -/// See [RFC 5424](https://datatracker.ietf.org/doc/rfc5424). -#[derive(Debug)] -pub enum SyslogSeverity { - /// System is unusable. - Emergency = 0, - /// Action must be taken immediately. - Alert = 1, - /// Critical conditions. - Critical = 2, - /// Error conditions. - Error = 3, - /// Warning conditions - Warning = 4, - /// Normal but significant condition - Notice = 5, - /// Informational messages. - Info = 6, - /// Debug-level messages. - Debug = 7, -} - -/// Signature for a custom mapping function that maps the rust log levels to -/// values of the syslog Severity. -pub type LevelToSyslogSeverity = fn(level: log::Level) -> SyslogSeverity; - -fn default_mapping(level: log::Level) -> SyslogSeverity { - match level { - log::Level::Error => SyslogSeverity::Error, - log::Level::Warn => SyslogSeverity::Warning, - log::Level::Info => SyslogSeverity::Info, - log::Level::Debug | log::Level::Trace => SyslogSeverity::Debug, - } -} - -/// An experimental configurable `LogWriter` implementation that writes log messages to the syslog -/// (see [RFC 5424](https://datatracker.ietf.org/doc/rfc5424)). -/// -/// Only available with optional crate feature `syslog_writer`. -/// -/// For using the `SyslogWriter`, you need to know how the syslog is managed on your system, -/// how you can access it and with which protocol you can write to it, -/// so that you can choose a variant of the `SyslogConnector` that fits to your environment. -/// -/// See the [module description](index.html) for guidance how to use additional log writers. -pub struct SyslogWriter { - hostname: OsString, - process: String, - pid: u32, - facility: SyslogFacility, - message_id: String, - determine_severity: LevelToSyslogSeverity, - syslog: Mutex>, - max_log_level: log::LevelFilter, -} -impl SyslogWriter { - /// Returns a configured boxed instance. - /// - /// ## Parameters - /// - /// `facility`: An value representing a valid syslog facility value according to RFC 5424. - /// - /// `determine_severity`: (optional) A function that maps the rust log levels - /// to the syslog severities. If None is given, a trivial default mapping is used, which - /// should be good enough in most cases. - /// - /// `message_id`: The value being used as syslog's MSGID, which - /// should identify the type of message. The value itself - /// is a string without further semantics. It is intended for filtering - /// messages on a relay or collector. - /// - /// `syslog`: A [`SyslogConnector`](enum.SyslogConnector.html). - /// - /// # Errors - /// - /// `std::io::Error` - pub fn try_new( - facility: SyslogFacility, - determine_severity: Option, - max_log_level: log::LevelFilter, - message_id: String, - syslog: SyslogConnector, - ) -> IoResult> { - Ok(Box::new(Self { - hostname: hostname::get().unwrap_or_else(|_| OsString::from("")), - process: std::env::args() - .next() - .ok_or_else(|| IoError::new(ErrorKind::Other, "".to_owned()))?, - pid: std::process::id(), - facility, - max_log_level, - message_id, - determine_severity: determine_severity.unwrap_or_else(|| default_mapping), - syslog: Mutex::new(RefCell::new(syslog)), - })) - } -} - -impl LogWriter for SyslogWriter { - fn write(&self, now: &mut DeferredNow, record: &log::Record) -> IoResult<()> { - let mr_syslog = self.syslog.lock().unwrap(); - let mut syslog = mr_syslog.borrow_mut(); - - let severity = (self.determine_severity)(record.level()); - write!( - syslog, - "{}", - format!( - "<{}>1 {} {:?} {} {} {} - {}\n", - self.facility as u8 | severity as u8, - now.now() - .to_rfc3339_opts(chrono::SecondsFormat::Micros, false), - self.hostname, - self.process, - self.pid, - self.message_id, - &record.args() - ) - ) - } - - fn flush(&self) -> IoResult<()> { - let mr_syslog = self.syslog.lock().unwrap(); - let mut syslog = mr_syslog.borrow_mut(); - syslog.flush()?; - Ok(()) - } - - fn max_log_level(&self) -> log::LevelFilter { - self.max_log_level - } -} - -/// Helper struct that connects to the syslog and implements Write. -/// -/// Is used in [`SyslogWriter::try_new`](struct.SyslogWriter.html#method.try_new). -/// -/// ## Example -/// -/// ```rust,no_run -/// use flexi_logger::writers::SyslogConnector; -/// let syslog_connector = SyslogConnector::try_tcp("localhost:7777").unwrap(); -/// ``` -/// -#[derive(Debug)] -pub enum SyslogConnector { - /// Sends log lines to the syslog via a - /// [UnixStream](https://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html). - /// - /// Is only available on linux. - #[cfg(target_os = "linux")] - Stream(BufWriter), - - /// Sends log lines to the syslog via a - /// [UnixDatagram](https://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html). - /// - /// Is only available on linux. - #[cfg(target_os = "linux")] - Datagram(std::os::unix::net::UnixDatagram), - - /// Sends log lines to the syslog via UDP. - /// - /// UDP is fragile and thus discouraged except for local communication. - Udp(UdpSocket), - - /// Sends log lines to the syslog via TCP. - Tcp(BufWriter), -} -impl SyslogConnector { - /// Returns a `SyslogConnector::Datagram` to the specified path. - /// - /// Is only available on linux. - #[cfg(target_os = "linux")] - pub fn try_datagram>(path: P) -> IoResult { - let ud = std::os::unix::net::UnixDatagram::unbound()?; - ud.connect(&path)?; - Ok(SyslogConnector::Datagram(ud)) - } - - /// Returns a `SyslogConnector::Stream` to the specified path. - /// - /// Is only available on linux. - #[cfg(target_os = "linux")] - pub fn try_stream>(path: P) -> IoResult { - Ok(SyslogConnector::Stream(BufWriter::new( - std::os::unix::net::UnixStream::connect(path)?, - ))) - } - - /// Returns a `SyslogConnector` which sends the log lines via TCP to the specified address. - /// - /// # Errors - /// - /// `std::io::Error` if opening the stream fails. - pub fn try_tcp(server: T) -> IoResult { - Ok(Self::Tcp(BufWriter::new(TcpStream::connect(server)?))) - } - - /// Returns a `SyslogConnector` which sends log via the fragile UDP protocol from local to server. - /// - /// # Errors - /// - /// `std::io::Error` if opening the stream fails. - pub fn try_udp(local: T, server: T) -> IoResult { - let socket = UdpSocket::bind(local)?; - socket.connect(server)?; - Ok(Self::Udp(socket)) - } -} - -impl Write for SyslogConnector { - fn write(&mut self, message: &[u8]) -> IoResult { - // eprintln!( - // "syslog: got message \"{}\" ", - // String::from_utf8_lossy(message) - // ); - match *self { - #[cfg(target_os = "linux")] - Self::Datagram(ref ud) => { - // todo: reconnect of conn is broken - ud.send(&message[..]) - } - #[cfg(target_os = "linux")] - Self::Stream(ref mut w) => { - // todo: reconnect of conn is broken - w.write(&message[..]) - .and_then(|sz| w.write_all(&[0; 1]).map(|_| sz)) - } - Self::Tcp(ref mut w) => { - // todo: reconnect of conn is broken - w.write(&message[..]) - } - Self::Udp(ref socket) => { - // ?? - socket.send(&message[..]) - } - } - } - - fn flush(&mut self) -> IoResult<()> { - match *self { - #[cfg(target_os = "linux")] - Self::Datagram(_) => Ok(()), - - #[cfg(target_os = "linux")] - Self::Stream(ref mut w) => w.flush(), - - Self::Udp(_) => Ok(()), - - Self::Tcp(ref mut w) => w.flush(), - } - } -} diff --git a/agent/support/rust/flexi_logger/tests/test_age_or_size.rs b/agent/support/rust/flexi_logger/tests/test_age_or_size.rs deleted file mode 100644 index 76c374e18..000000000 --- a/agent/support/rust/flexi_logger/tests/test_age_or_size.rs +++ /dev/null @@ -1,109 +0,0 @@ -use chrono::Local; -use flexi_logger::{Age, Cleanup, Criterion, Duplicate, Logger, Naming}; -use glob::glob; -use log::*; -use std::fs::File; -use std::io::{BufRead, BufReader}; -use std::ops::Add; - -#[test] -fn test_age_or_size() { - let directory = define_directory(); - Logger::with_str("trace") - .log_to_file() - .duplicate_to_stderr(Duplicate::Info) - .directory(directory.clone()) - .rotate( - Criterion::AgeOrSize(Age::Second, 80), - Naming::Numbers, - Cleanup::Never, - ) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - // info!("test correct rotation by age or size"); - - write_log_lines(); - - verify_logs(&directory); -} - -fn write_log_lines() { - // Fill first three files by size - trace!("{}", 'a'); - trace!("{}", 'b'); - trace!("{}", 'c'); - - trace!("{}", 'd'); - trace!("{}", 'e'); - trace!("{}", 'f'); - - trace!("{}", 'g'); - trace!("{}", 'h'); - trace!("{}", 'i'); - - trace!("{}", 'j'); - - // now wait to enforce a rotation with a smaller file - std::thread::sleep(std::time::Duration::from_secs(2)); - trace!("{}", 'k'); - - // now wait to enforce a rotation with a smaller file - std::thread::sleep(std::time::Duration::from_secs(2)); - trace!("{}", 'l'); - - // then again fill a file by size - trace!("{}", 'm'); - trace!("{}", 'n'); - - // and do the final rotation: - trace!("{}", 'o'); - - // trace!("{}",'p'); - // trace!("{}",'q'); - // trace!("{}",'r'); - // trace!("{}",'s'); - // trace!("{}",'t'); -} - -fn define_directory() -> String { - format!( - "./log_files/age_or_size/{}", - Local::now().format("%Y-%m-%d_%H-%M-%S") - ) -} - -fn verify_logs(directory: &str) { - let expected_line_counts = [3, 3, 3, 1, 1, 3, 1]; - // read all files - let pattern = String::from(directory).add("/*"); - let globresults = match glob(&pattern) { - Err(e) => panic!( - "Is this ({}) really a directory? Listing failed with {}", - pattern, e - ), - Ok(globresults) => globresults, - }; - let mut no_of_log_files = 0; - let mut total_line_count = 0_usize; - for (index, globresult) in globresults.into_iter().enumerate() { - let mut line_count = 0_usize; - let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {}", e)); - let f = File::open(&pathbuf) - .unwrap_or_else(|e| panic!("Cannot open file {:?} due to {}", pathbuf, e)); - no_of_log_files += 1; - let mut reader = BufReader::new(f); - let mut buffer = String::new(); - while reader.read_line(&mut buffer).unwrap() > 0 { - line_count += 1; - buffer.clear(); - } - assert_eq!( - line_count, expected_line_counts[index], - "file has wrong size" - ); - total_line_count += line_count; - } - - assert_eq!(no_of_log_files, 7, "wrong file count"); - assert_eq!(total_line_count, 15, "wrong line count!"); -} diff --git a/agent/support/rust/flexi_logger/tests/test_colors.rs b/agent/support/rust/flexi_logger/tests/test_colors.rs deleted file mode 100644 index 9037446ab..000000000 --- a/agent/support/rust/flexi_logger/tests/test_colors.rs +++ /dev/null @@ -1,16 +0,0 @@ -use flexi_logger::{LogTarget, Logger}; -use log::*; - -#[test] -fn test_mods() { - Logger::with_str("trace") - .log_target(LogTarget::StdOut) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message"); - trace!("This is a trace message"); -} diff --git a/agent/support/rust/flexi_logger/tests/test_custom_log_writer.rs b/agent/support/rust/flexi_logger/tests/test_custom_log_writer.rs deleted file mode 100644 index f01c669a5..000000000 --- a/agent/support/rust/flexi_logger/tests/test_custom_log_writer.rs +++ /dev/null @@ -1,62 +0,0 @@ -use std::sync::Mutex; - -use flexi_logger::writers::LogWriter; -use flexi_logger::{default_format, DeferredNow, LogTarget, Logger}; -use log::*; - -pub struct CustomWriter { - data: Mutex>, -} - -impl LogWriter for CustomWriter { - fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { - let mut data = self.data.lock().unwrap(); - default_format(&mut *data, now, record) - } - - fn flush(&self) -> std::io::Result<()> { - Ok(()) - } - - fn max_log_level(&self) -> log::LevelFilter { - log::LevelFilter::Trace - } - - fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { - let data = self.data.lock().unwrap(); - let expected_data = - expected - .iter() - .fold(Vec::new(), |mut acc, (level, module, message)| { - acc.extend(format!("{} [{}] {}", level, module, message).bytes()); - acc - }); - assert_eq!(*data, expected_data); - } -} - -#[test] -fn test_custom_log_writer() { - let handle = Logger::with_str("info") - .log_target(LogTarget::Writer(Box::new(CustomWriter { - data: Mutex::new(Vec::new()), - }))) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - handle.validate_logs(&[ - ( - "ERROR", - "test_custom_log_writer", - "This is an error message", - ), - ("WARN", "test_custom_log_writer", "This is a warning"), - ("INFO", "test_custom_log_writer", "This is an info message"), - ]); -} diff --git a/agent/support/rust/flexi_logger/tests/test_custom_log_writer_format.rs b/agent/support/rust/flexi_logger/tests/test_custom_log_writer_format.rs deleted file mode 100644 index 9d675016e..000000000 --- a/agent/support/rust/flexi_logger/tests/test_custom_log_writer_format.rs +++ /dev/null @@ -1,78 +0,0 @@ -use std::sync::Mutex; - -use flexi_logger::writers::LogWriter; -use flexi_logger::{default_format, DeferredNow, FormatFunction, LogTarget, Logger}; -use log::*; - -pub struct CustomWriter { - data: Mutex>, - format: FormatFunction, -} - -impl LogWriter for CustomWriter { - fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { - let mut data = self.data.lock().unwrap(); - (self.format)(&mut *data, now, record) - } - - fn flush(&self) -> std::io::Result<()> { - Ok(()) - } - - fn format(&mut self, format: FormatFunction) { - self.format = format; - } - - fn max_log_level(&self) -> log::LevelFilter { - log::LevelFilter::Trace - } - - fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { - let data = self.data.lock().unwrap(); - let expected_data = - expected - .iter() - .fold(Vec::new(), |mut acc, (level, _module, message)| { - acc.extend(format!("{}: {}", level, message).bytes()); - acc - }); - assert_eq!(*data, expected_data); - } -} - -fn custom_format( - writer: &mut dyn std::io::Write, - _now: &mut DeferredNow, - record: &Record, -) -> Result<(), std::io::Error> { - // Only write the message and the level, without the module - write!(writer, "{}: {}", record.level(), &record.args()) -} - -#[test] -fn test_custom_log_writer_custom_format() { - let handle = Logger::with_str("info") - .log_target(LogTarget::Writer(Box::new(CustomWriter { - data: Mutex::new(Vec::new()), - format: default_format, - }))) - .format(custom_format) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - handle.validate_logs(&[ - ( - "ERROR", - "test_custom_log_writer", - "This is an error message", - ), - ("WARN", "test_custom_log_writer", "This is a warning"), - ("INFO", "test_custom_log_writer", "This is an info message"), - ]); -} diff --git a/agent/support/rust/flexi_logger/tests/test_default_file_and_writer.rs b/agent/support/rust/flexi_logger/tests/test_default_file_and_writer.rs deleted file mode 100644 index e46676ee9..000000000 --- a/agent/support/rust/flexi_logger/tests/test_default_file_and_writer.rs +++ /dev/null @@ -1,43 +0,0 @@ -use flexi_logger::writers::{FileLogWriter, LogWriter}; -use flexi_logger::{detailed_format, LogTarget, Logger}; -use log::*; - -#[test] -fn test_default_file_and_writer() { - let w = FileLogWriter::builder() - .format(detailed_format) - .discriminant("bar") - .try_build() - .unwrap(); - - let handle = Logger::with_str("info") - .log_target(LogTarget::FileAndWriter(Box::new(w))) - .format(detailed_format) - .discriminant("foo") - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - handle.validate_logs(&[ - ("ERROR", "test_default_file_and_writer", "error"), - ("WARN", "test_default_file_and_writer", "warning"), - ("INFO", "test_default_file_and_writer", "info"), - ]); - - let w = FileLogWriter::builder() - .format(detailed_format) - .discriminant("bar") - .append() - .try_build() - .unwrap(); - w.validate_logs(&[ - ("ERROR", "test_default_file_and_writer", "error"), - ("WARN", "test_default_file_and_writer", "warning"), - ("INFO", "test_default_file_and_writer", "info"), - ]); -} diff --git a/agent/support/rust/flexi_logger/tests/test_default_files_dir.rs b/agent/support/rust/flexi_logger/tests/test_default_files_dir.rs deleted file mode 100644 index c68c5826c..000000000 --- a/agent/support/rust/flexi_logger/tests/test_default_files_dir.rs +++ /dev/null @@ -1,21 +0,0 @@ -use log::*; - -#[test] -fn test_default_files_dir() { - let handle = flexi_logger::Logger::with_str("info") - .log_to_file() - .directory("log_files") - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - handle.validate_logs(&[ - ("ERROR", "test_default_files_dir", "error"), - ("WARN", "test_default_files_dir", "warning"), - ("INFO", "test_default_files_dir", "info"), - ]); -} diff --git a/agent/support/rust/flexi_logger/tests/test_default_files_dir_rot.rs b/agent/support/rust/flexi_logger/tests/test_default_files_dir_rot.rs deleted file mode 100644 index 6833a4bed..000000000 --- a/agent/support/rust/flexi_logger/tests/test_default_files_dir_rot.rs +++ /dev/null @@ -1,17 +0,0 @@ -use flexi_logger::*; -use log::*; -#[test] -fn test_default_files_dir_rot() { - Logger::with_str("info") - .log_target(LogTarget::File) - .directory("log_files") - .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); -} diff --git a/agent/support/rust/flexi_logger/tests/test_detailed_files_rot.rs b/agent/support/rust/flexi_logger/tests/test_detailed_files_rot.rs deleted file mode 100644 index 8b30f2f59..000000000 --- a/agent/support/rust/flexi_logger/tests/test_detailed_files_rot.rs +++ /dev/null @@ -1,23 +0,0 @@ -use flexi_logger::{detailed_format, Cleanup, Criterion, Logger, Naming}; -use log::*; - -#[test] -fn test_detailed_files_rot() { - let handle = Logger::with_str("info") - .format(detailed_format) - .log_to_file() - .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - handle.validate_logs(&[ - ("ERROR", "test_detailed_files_rot", "error"), - ("WARN", "test_detailed_files_rot", "warning"), - ("INFO", "test_detailed_files_rot", "info"), - ]); -} diff --git a/agent/support/rust/flexi_logger/tests/test_detailed_files_rot_timestamp.rs b/agent/support/rust/flexi_logger/tests/test_detailed_files_rot_timestamp.rs deleted file mode 100644 index 55cdff0f0..000000000 --- a/agent/support/rust/flexi_logger/tests/test_detailed_files_rot_timestamp.rs +++ /dev/null @@ -1,24 +0,0 @@ -use flexi_logger::{detailed_format, Cleanup, Criterion, Logger, Naming}; -use log::*; - -#[test] -fn test_detailed_files_rot_timestamp() { - let handle = Logger::with_str("info") - .format(detailed_format) - .log_to_file() - .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) - .o_timestamp(true) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - handle.validate_logs(&[ - ("ERROR", "test_detailed_files_rot", "error"), - ("WARN", "test_detailed_files_rot", "warning"), - ("INFO", "test_detailed_files_rot", "info"), - ]); -} diff --git a/agent/support/rust/flexi_logger/tests/test_env_logger_style.rs b/agent/support/rust/flexi_logger/tests/test_env_logger_style.rs deleted file mode 100644 index ed9ce4a15..000000000 --- a/agent/support/rust/flexi_logger/tests/test_env_logger_style.rs +++ /dev/null @@ -1,12 +0,0 @@ -use log::*; - -#[test] -fn you_must_see_exactly_three_messages_above_1_err_1_warn_1_info() { - flexi_logger::Logger::with_str("info").start().unwrap(); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); -} diff --git a/agent/support/rust/flexi_logger/tests/test_mods.rs b/agent/support/rust/flexi_logger/tests/test_mods.rs deleted file mode 100644 index a8b9f76b2..000000000 --- a/agent/support/rust/flexi_logger/tests/test_mods.rs +++ /dev/null @@ -1,66 +0,0 @@ -use flexi_logger::{detailed_format, Logger, ReconfigurationHandle}; -use log::*; - -#[test] -fn test_mods() { - let handle: ReconfigurationHandle = Logger::with_env_or_str( - "info, test_mods::mymod1=debug, test_mods::mymod2=error, test_mods::mymod1::mysubmod = off", - ) - .format(detailed_format) - .log_to_file() - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - mymod1::test_traces(); - mymod2::test_traces(); - - handle.validate_logs(&[ - ("ERROR", "test_mods", "error"), - ("WARN", "test_mods", "warning"), - ("INFO", "test_mods", "info"), - ("ERROR", "test_mods::mymod1", "error"), - ("WARN", "test_mods::mymod1", "warning"), - ("INFO", "test_mods::mymod1", "info"), - ("DEBUG", "test_mods::mymod1", "debug"), - ("ERROR", "test_mods::mymod2", "error"), - ]); -} - -mod mymod1 { - use log::*; - pub fn test_traces() { - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message"); - trace!("This is a trace message - you must not see it!"); - - self::mysubmod::test_traces(); - } - mod mysubmod { - use log::*; - pub fn test_traces() { - error!("This is an error message - you must not see it!"); - warn!("This is a warning - you must not see it!"); - info!("This is an info message - you must not see it!"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - } - } -} -mod mymod2 { - use log::*; - pub fn test_traces() { - error!("This is an error message"); - warn!("This is a warning - you must not see it!"); - info!("This is an info message - you must not see it!"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - } -} diff --git a/agent/support/rust/flexi_logger/tests/test_mods_off.rs b/agent/support/rust/flexi_logger/tests/test_mods_off.rs deleted file mode 100644 index 89459fdb6..000000000 --- a/agent/support/rust/flexi_logger/tests/test_mods_off.rs +++ /dev/null @@ -1,47 +0,0 @@ -use flexi_logger::{detailed_format, Logger, ReconfigurationHandle}; -use log::*; - -#[test] -fn test_mods_off() { - let handle: ReconfigurationHandle = Logger::with_env_or_str("info, test_mods_off::mymod1=off") - .format(detailed_format) - .log_to_file() - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - mymod1::test_traces(); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - handle.validate_logs(&[ - ("ERROR", "test_mods", "error"), - ("WARN", "test_mods", "warning"), - ("INFO", "test_mods", "info"), - ]); -} - -mod mymod1 { - use log::*; - pub fn test_traces() { - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message"); - trace!("This is a trace message - you must not see it!"); - - self::mysubmod::test_traces(); - } - mod mysubmod { - use log::*; - pub fn test_traces() { - error!("This is an error message - you must not see it!"); - warn!("This is a warning - you must not see it!"); - info!("This is an info message - you must not see it!"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - } - } -} diff --git a/agent/support/rust/flexi_logger/tests/test_multi_logger.rs b/agent/support/rust/flexi_logger/tests/test_multi_logger.rs deleted file mode 100644 index 911229a68..000000000 --- a/agent/support/rust/flexi_logger/tests/test_multi_logger.rs +++ /dev/null @@ -1,104 +0,0 @@ -use flexi_logger::writers::{FileLogWriter, LogWriter}; -use flexi_logger::{detailed_format, DeferredNow, Logger, Record}; -use log::*; -use std::sync::Arc; - -#[macro_use] -mod macros { - #[macro_export] - macro_rules! sec_alert_error { - ($($arg:tt)*) => ( - error!(target: "{Sec,Alert,_Default}", $($arg)*); - ) - } -} - -#[test] -fn test() { - // more complex just to support validation: - let (sec_writer, sec_handle) = SecWriter::new(); - let mut log_handle = Logger::with_str("info, fantasy = trace") - .format(detailed_format) - .print_message() - .log_to_file() - .add_writer("Sec", sec_writer) - .add_writer("Alert", alert_logger()) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - // Explicitly send logs to different loggers - error!(target : "{Sec}", "This is a security-relevant error message"); - error!(target : "{Sec,Alert}", "This is a security-relevant alert message"); - error!(target : "{Sec,Alert,_Default}", "This is a security-relevant alert and log message"); - error!(target : "{Alert}", "This is an alert"); - - // Nicer: use explicit macros - sec_alert_error!("This is another security-relevant alert and log message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - trace!(target: "phantasia", "this is a trace you should not see"); - trace!(target: "fantasy", "this is a trace you should see"); - - // Switching off logging has no effect on non-default targets - log_handle.parse_new_spec("Off"); - sec_alert_error!("This is a further security-relevant alert and log message"); - - // Verification: - #[rustfmt::skip] - log_handle.validate_logs(&[ - ("ERROR", "multi_logger", "a security-relevant alert and log message"), - ("ERROR", "multi_logger", "another security-relevant alert and log message"), - ("WARN", "multi_logger", "warning"), - ("INFO", "multi_logger", "info"), - ("TRACE", "multi_logger", "this is a trace you should see"), - ]); - #[rustfmt::skip] - sec_handle.validate_logs(&[ - ("ERROR", "multi_logger", "security-relevant error"), - ("ERROR", "multi_logger", "a security-relevant alert"), - ("ERROR", "multi_logger", "security-relevant alert and log message"), - ("ERROR", "multi_logger", "another security-relevant alert"), - ("ERROR", "multi_logger", "a further security-relevant alert"), - ]); -} - -struct SecWriter(Arc); - -impl SecWriter { - pub fn new() -> (Box, Arc) { - let a_flw = Arc::new( - FileLogWriter::builder() - .discriminant("Security") - .suffix("seclog") - .print_message() - .try_build() - .unwrap(), - ); - (Box::new(SecWriter(Arc::clone(&a_flw))), a_flw) - } -} -impl LogWriter for SecWriter { - fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { - self.0.write(now, record) - } - fn flush(&self) -> std::io::Result<()> { - self.0.flush() - } - fn max_log_level(&self) -> log::LevelFilter { - log::LevelFilter::Error - } -} - -pub fn alert_logger() -> Box { - Box::new( - FileLogWriter::builder() - .discriminant("Alert") - .suffix("alerts") - .print_message() - .try_build() - .unwrap(), - ) -} diff --git a/agent/support/rust/flexi_logger/tests/test_multi_threaded_cleanup.rs b/agent/support/rust/flexi_logger/tests/test_multi_threaded_cleanup.rs deleted file mode 100644 index e77308349..000000000 --- a/agent/support/rust/flexi_logger/tests/test_multi_threaded_cleanup.rs +++ /dev/null @@ -1,155 +0,0 @@ -#[cfg(feature = "compress")] -mod d { - use chrono::Local; - use flexi_logger::{ - Cleanup, Criterion, DeferredNow, Duplicate, LogSpecification, Logger, Naming, Record, - }; - use glob::glob; - use log::*; - use std::ops::Add; - use std::thread::{self, JoinHandle}; - - const NO_OF_THREADS: usize = 5; - const NO_OF_LOGLINES_PER_THREAD: usize = 100_000; - const ROTATE_OVER_SIZE: u64 = 3_000_000; - const NO_OF_LOG_FILES: usize = 2; - const NO_OF_GZ_FILES: usize = 5; - - #[test] - fn multi_threaded() { - // we use a special log line format that starts with a special string so that it is easier to - // verify that all log lines are written correctly - - let start = Local::now(); - let directory = define_directory(); - let mut reconf_handle = Logger::with_str("debug") - .log_to_file() - .format(test_format) - .duplicate_to_stderr(Duplicate::Info) - .directory(directory.clone()) - .rotate( - Criterion::Size(ROTATE_OVER_SIZE), - Naming::Timestamps, - Cleanup::KeepLogAndCompressedFiles(NO_OF_LOG_FILES, NO_OF_GZ_FILES), - ) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - info!( - "create a huge number of log lines with a considerable number of threads, \ - verify the log" - ); - - let worker_handles = start_worker_threads(NO_OF_THREADS); - let new_spec = LogSpecification::parse("trace").unwrap(); - thread::sleep(std::time::Duration::from_millis(1000)); - reconf_handle.set_new_spec(new_spec); - - wait_for_workers_to_close(worker_handles); - - let delta = Local::now().signed_duration_since(start).num_milliseconds(); - debug!( - "Task executed with {} threads in {}ms.", - NO_OF_THREADS, delta - ); - - reconf_handle.shutdown(); - verify_logs(&directory); - } - - // Starts given number of worker threads and lets each execute `do_work` - fn start_worker_threads(no_of_workers: usize) -> Vec> { - let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); - trace!("Starting {} worker threads", no_of_workers); - for thread_number in 0..no_of_workers { - trace!("Starting thread {}", thread_number); - worker_handles.push( - thread::Builder::new() - .name(thread_number.to_string()) - .spawn(move || { - do_work(thread_number); - 0 as u8 - }) - .unwrap(), - ); - } - trace!("All {} worker threads started.", worker_handles.len()); - worker_handles - } - - fn do_work(thread_number: usize) { - trace!("({}) Thread started working", thread_number); - trace!("ERROR_IF_PRINTED"); - for idx in 0..NO_OF_LOGLINES_PER_THREAD { - debug!("({}) writing out line number {}", thread_number, idx); - } - trace!("MUST_BE_PRINTED"); - } - - fn wait_for_workers_to_close(worker_handles: Vec>) { - for worker_handle in worker_handles { - worker_handle - .join() - .unwrap_or_else(|e| panic!("Joining worker thread failed: {:?}", e)); - } - trace!("All worker threads joined."); - } - - fn define_directory() -> String { - format!( - "./log_files/mt_logs/{}", - Local::now().format("%Y-%m-%d_%H-%M-%S") - ) - } - - pub fn test_format( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, - ) -> std::io::Result<()> { - write!( - w, - "XXXXX [{}] T[{:?}] {} [{}:{}] {}", - now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), - thread::current().name().unwrap_or(""), - record.level(), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - &record.args() - ) - } - - fn verify_logs(directory: &str) { - // Since the cleanup deleted log files, we just can confirm that the correct number of - // log files and compressed files exist - - let basename = String::from(directory).add("/").add( - &std::path::Path::new(&std::env::args().next().unwrap()) - .file_stem().unwrap(/*cannot fail*/) - .to_string_lossy().to_string(), - ); - - let fn_pattern = String::with_capacity(180) - .add(&basename) - .add("_r[0-9][0-9]*."); - - let log_pattern = fn_pattern.clone().add("log"); - println!("log_pattern = {}", log_pattern); - let no_of_log_files = glob(&log_pattern) - .unwrap() - .map(Result::unwrap) - .inspect(|p| println!("found: {:?}", p)) - .count(); - - let gz_pattern = fn_pattern.add("gz"); - let no_of_gz_files = glob(&gz_pattern) - .unwrap() - .map(Result::unwrap) - .inspect(|p| println!("found: {:?}", p)) - .count(); - - assert_eq!(no_of_log_files, NO_OF_LOG_FILES); - assert_eq!(no_of_gz_files, NO_OF_GZ_FILES); - - info!("Found correct number of log and compressed files"); - } -} diff --git a/agent/support/rust/flexi_logger/tests/test_multi_threaded_dates.rs b/agent/support/rust/flexi_logger/tests/test_multi_threaded_dates.rs deleted file mode 100644 index 344f7e815..000000000 --- a/agent/support/rust/flexi_logger/tests/test_multi_threaded_dates.rs +++ /dev/null @@ -1,158 +0,0 @@ -use chrono::Local; -use flexi_logger::{ - Age, Cleanup, Criterion, DeferredNow, Duplicate, LogSpecification, Logger, Naming, Record, -}; -use glob::glob; -use log::*; -use std::fs::File; -use std::io::{BufRead, BufReader}; -use std::ops::Add; -use std::thread::JoinHandle; -use std::time; - -const NO_OF_THREADS: usize = 5; -const NO_OF_LOGLINES_PER_THREAD: usize = 100_000; - -#[test] -fn multi_threaded() { - // we use a special log line format that starts with a special string so that it is easier to - // verify that all log lines are written correctly - - let start = Local::now(); - let directory = define_directory(); - let mut reconf_handle = Logger::with_str("debug") - .log_to_file() - .format(test_format) - .create_symlink("link_to_mt_log") - .duplicate_to_stderr(Duplicate::Info) - .directory(directory.clone()) - .rotate( - Criterion::Age(Age::Minute), - Naming::Timestamps, - Cleanup::Never, - ) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - info!( - "create a huge number of log lines with a considerable number of threads, verify the log" - ); - - let worker_handles = start_worker_threads(NO_OF_THREADS); - let new_spec = LogSpecification::parse("trace").unwrap(); - std::thread::Builder::new() - .spawn(move || { - std::thread::sleep(time::Duration::from_millis(1000)); - reconf_handle.set_new_spec(new_spec); - 0 as u8 - }) - .unwrap(); - - wait_for_workers_to_close(worker_handles); - - let delta = Local::now().signed_duration_since(start).num_milliseconds(); - debug!( - "Task executed with {} threads in {}ms.", - NO_OF_THREADS, delta - ); - verify_logs(&directory); -} - -// Starts given number of worker threads and lets each execute `do_work` -fn start_worker_threads(no_of_workers: usize) -> Vec> { - let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); - trace!("Starting {} worker threads", no_of_workers); - for thread_number in 0..no_of_workers { - trace!("Starting thread {}", thread_number); - worker_handles.push( - std::thread::Builder::new() - .name(thread_number.to_string()) - .spawn(move || { - do_work(thread_number); - 0 as u8 - }) - .unwrap(), - ); - } - trace!("All {} worker threads started.", worker_handles.len()); - worker_handles -} - -fn do_work(thread_number: usize) { - trace!("({}) Thread started working", thread_number); - trace!("ERROR_IF_PRINTED"); - for idx in 0..NO_OF_LOGLINES_PER_THREAD { - debug!("({}) writing out line number {}", thread_number, idx); - } - trace!("MUST_BE_PRINTED"); -} - -fn wait_for_workers_to_close(worker_handles: Vec>) { - for worker_handle in worker_handles { - worker_handle - .join() - .unwrap_or_else(|e| panic!("Joining worker thread failed: {:?}", e)); - } - trace!("All worker threads joined."); -} - -fn define_directory() -> String { - format!( - "./log_files/mt_logs/{}", - Local::now().format("%Y-%m-%d_%H-%M-%S") - ) -} - -pub fn test_format( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> std::io::Result<()> { - write!( - w, - "XXXXX [{}] T[{:?}] {} [{}:{}] {}", - now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), - std::thread::current().name().unwrap_or(""), - record.level(), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - &record.args() - ) -} - -fn verify_logs(directory: &str) { - // read all files - let pattern = String::from(directory).add("/*"); - let globresults = match glob(&pattern) { - Err(e) => panic!( - "Is this ({}) really a directory? Listing failed with {}", - pattern, e - ), - Ok(globresults) => globresults, - }; - let mut no_of_log_files = 0; - let mut line_count = 0_usize; - for globresult in globresults { - let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {}", e)); - let f = File::open(&pathbuf) - .unwrap_or_else(|e| panic!("Cannot open file {:?} due to {}", pathbuf, e)); - no_of_log_files += 1; - let mut reader = BufReader::new(f); - let mut buffer = String::new(); - while reader.read_line(&mut buffer).unwrap() > 0 { - if buffer.starts_with("XXXXX") { - line_count += 1; - } else { - panic!("irregular line in log file {:?}: \"{}\"", pathbuf, buffer); - } - buffer.clear(); - } - } - assert_eq!( - line_count, - NO_OF_THREADS * NO_OF_LOGLINES_PER_THREAD + 3 + NO_OF_THREADS - ); - info!( - "Wrote {} log lines from {} threads into {} files", - line_count, NO_OF_THREADS, no_of_log_files - ); -} diff --git a/agent/support/rust/flexi_logger/tests/test_multi_threaded_numbers.rs b/agent/support/rust/flexi_logger/tests/test_multi_threaded_numbers.rs deleted file mode 100644 index 92aa18491..000000000 --- a/agent/support/rust/flexi_logger/tests/test_multi_threaded_numbers.rs +++ /dev/null @@ -1,158 +0,0 @@ -use chrono::Local; -use flexi_logger::{ - Cleanup, Criterion, DeferredNow, Duplicate, LogSpecification, Logger, Naming, Record, -}; -use glob::glob; -use log::*; -use std::fs::File; -use std::io::{BufRead, BufReader}; -use std::ops::Add; -use std::thread::JoinHandle; -use std::time; - -const NO_OF_THREADS: usize = 5; -const NO_OF_LOGLINES_PER_THREAD: usize = 100_000; -const ROTATE_OVER_SIZE: u64 = 4_000_000; - -#[test] -fn multi_threaded() { - // we use a special log line format that starts with a special string so that it is easier to - // verify that all log lines are written correctly - - let start = Local::now(); - let directory = define_directory(); - let mut reconf_handle = Logger::with_str("debug") - .log_to_file() - .format(test_format) - .duplicate_to_stderr(Duplicate::Info) - .directory(directory.clone()) - .rotate( - Criterion::Size(ROTATE_OVER_SIZE), - Naming::Numbers, - Cleanup::Never, - ) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - info!( - "create a huge number of log lines with a considerable number of threads, verify the log" - ); - - let worker_handles = start_worker_threads(NO_OF_THREADS); - let new_spec = LogSpecification::parse("trace").unwrap(); - std::thread::Builder::new() - .spawn(move || { - std::thread::sleep(time::Duration::from_millis(1000)); - reconf_handle.set_new_spec(new_spec); - 0 as u8 - }) - .unwrap(); - - wait_for_workers_to_close(worker_handles); - - let delta = Local::now().signed_duration_since(start).num_milliseconds(); - debug!( - "Task executed with {} threads in {}ms.", - NO_OF_THREADS, delta - ); - verify_logs(&directory); -} - -// Starts given number of worker threads and lets each execute `do_work` -fn start_worker_threads(no_of_workers: usize) -> Vec> { - let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); - trace!("Starting {} worker threads", no_of_workers); - for thread_number in 0..no_of_workers { - trace!("Starting thread {}", thread_number); - worker_handles.push( - std::thread::Builder::new() - .name(thread_number.to_string()) - .spawn(move || { - do_work(thread_number); - 0 as u8 - }) - .unwrap(), - ); - } - trace!("All {} worker threads started.", worker_handles.len()); - worker_handles -} - -fn do_work(thread_number: usize) { - trace!("({}) Thread started working", thread_number); - trace!("ERROR_IF_PRINTED"); - for idx in 0..NO_OF_LOGLINES_PER_THREAD { - debug!("({}) writing out line number {}", thread_number, idx); - } - trace!("MUST_BE_PRINTED"); -} - -fn wait_for_workers_to_close(worker_handles: Vec>) { - for worker_handle in worker_handles { - worker_handle - .join() - .unwrap_or_else(|e| panic!("Joining worker thread failed: {:?}", e)); - } - trace!("All worker threads joined."); -} - -fn define_directory() -> String { - format!( - "./log_files/mt_logs/{}", - Local::now().format("%Y-%m-%d_%H-%M-%S") - ) -} - -pub fn test_format( - w: &mut dyn std::io::Write, - now: &mut DeferredNow, - record: &Record, -) -> std::io::Result<()> { - write!( - w, - "XXXXX [{}] T[{:?}] {} [{}:{}] {}", - now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), - std::thread::current().name().unwrap_or(""), - record.level(), - record.file().unwrap_or(""), - record.line().unwrap_or(0), - &record.args() - ) -} - -fn verify_logs(directory: &str) { - // read all files - let pattern = String::from(directory).add("/*"); - let globresults = match glob(&pattern) { - Err(e) => panic!( - "Is this ({}) really a directory? Listing failed with {}", - pattern, e - ), - Ok(globresults) => globresults, - }; - let mut no_of_log_files = 0; - let mut line_count = 0_usize; - for globresult in globresults { - let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {}", e)); - let f = File::open(&pathbuf) - .unwrap_or_else(|e| panic!("Cannot open file {:?} due to {}", pathbuf, e)); - no_of_log_files += 1; - let mut reader = BufReader::new(f); - let mut buffer = String::new(); - while reader.read_line(&mut buffer).unwrap() > 0 { - if buffer.starts_with("XXXXX") { - line_count += 1; - } else { - panic!("irregular line in log file {:?}: \"{}\"", pathbuf, buffer); - } - buffer.clear(); - } - } - assert_eq!( - line_count, - NO_OF_THREADS * NO_OF_LOGLINES_PER_THREAD + 3 + NO_OF_THREADS - ); - info!( - "Wrote {} log lines from {} threads into {} files", - line_count, NO_OF_THREADS, no_of_log_files - ); -} diff --git a/agent/support/rust/flexi_logger/tests/test_no_logger.rs b/agent/support/rust/flexi_logger/tests/test_no_logger.rs deleted file mode 100644 index bf132995c..000000000 --- a/agent/support/rust/flexi_logger/tests/test_no_logger.rs +++ /dev/null @@ -1,16 +0,0 @@ -use flexi_logger::{LogTarget, Logger}; -use log::*; - -#[test] -fn you_must_not_see_anything() { - Logger::with_str("info") - .log_target(LogTarget::DevNull) - .start() - .unwrap(); - - error!("This is an error message - you must not see it!"); - warn!("This is a warning - you must not see it!"); - info!("This is an info message - you must not see it!"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); -} diff --git a/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr.rs b/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr.rs deleted file mode 100644 index f04f44e90..000000000 --- a/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr.rs +++ /dev/null @@ -1,24 +0,0 @@ -use flexi_logger::{opt_format, Logger}; -use log::*; - -#[test] -fn test_opt_files_dir_dscr() { - let handle = Logger::with_str("info") - .format(opt_format) - .log_to_file() - .directory("log_files") - .discriminant("foo") - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - handle.validate_logs(&[ - ("ERROR", "test_opt_files_dir_dscr", "error"), - ("WARN", "test_opt_files_dir_dscr", "warning"), - ("INFO", "test_opt_files_dir_dscr", "info"), - ]); -} diff --git a/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr_rot.rs b/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr_rot.rs deleted file mode 100644 index 56fcb2c22..000000000 --- a/agent/support/rust/flexi_logger/tests/test_opt_files_dir_dscr_rot.rs +++ /dev/null @@ -1,41 +0,0 @@ -use flexi_logger::{opt_format, Cleanup, Criterion, Logger, Naming}; -use log::*; - -#[test] -fn test_opt_files_dir_dscr_rot() { - let link_name = "link_to_log".to_string(); - let handle = Logger::with_str("info") - .format(opt_format) - .log_to_file() - .directory("log_files") - .discriminant("foo".to_string()) - .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) - .create_symlink(link_name.clone()) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - handle.validate_logs(&[ - ("ERROR", "test_opt_files_dir_dscr_rot", "error"), - ("WARN", "test_opt_files_dir_dscr_rot", "warning"), - ("INFO", "test_opt_files_dir_dscr_rot", "info"), - ]); - self::platform::check_link(&link_name); -} - -mod platform { - #[cfg(target_os = "linux")] - pub fn check_link(link_name: &str) { - match std::fs::symlink_metadata(link_name) { - Err(e) => panic!("error with symlink: {}", e), - Ok(metadata) => assert!(metadata.file_type().is_symlink(), "not a symlink"), - } - } - - #[cfg(not(target_os = "linux"))] - pub fn check_link(_: &str) {} -} diff --git a/agent/support/rust/flexi_logger/tests/test_parse_errors.rs b/agent/support/rust/flexi_logger/tests/test_parse_errors.rs deleted file mode 100644 index 87bc6b85e..000000000 --- a/agent/support/rust/flexi_logger/tests/test_parse_errors.rs +++ /dev/null @@ -1,80 +0,0 @@ -use flexi_logger::{FlexiLoggerError, LogSpecification, Logger}; -use log::*; - -#[test] -fn parse_errors_logspec() { - match LogSpecification::parse("info, foo=bar, fuzz=debug") - .err() - .unwrap() - { - FlexiLoggerError::Parse(_, logspec) => { - assert_eq!( - logspec.module_filters(), - LogSpecification::parse("info, fuzz=debug") - .unwrap() - .module_filters() - ); - #[cfg(feature = "textfilter")] - assert!(logspec.text_filter().is_none()); - } - _ => panic!("Wrong error from parsing (1)"), - } - - match LogSpecification::parse("info, ene mene dubbedene") - .err() - .unwrap() - { - FlexiLoggerError::Parse(_, logspec) => { - assert_eq!( - logspec.module_filters(), - LogSpecification::parse("info").unwrap().module_filters() - ); - #[cfg(feature = "textfilter")] - assert!(logspec.text_filter().is_none()); - } - _ => panic!("Wrong error from parsing (2)"), - } - - match LogSpecification::parse("ene mene dubbedene").err().unwrap() { - FlexiLoggerError::Parse(_, logspec) => { - assert_eq!( - logspec.module_filters(), - LogSpecification::off().module_filters() - ); - #[cfg(feature = "textfilter")] - assert!(logspec.text_filter().is_none()); - } - _ => panic!("Wrong error from parsing (3)"), - } - - match LogSpecification::parse("INFO, ene / mene / dubbedene") - .err() - .unwrap() - { - FlexiLoggerError::Parse(_, logspec) => { - assert_eq!( - logspec.module_filters(), - LogSpecification::off().module_filters() - ); - #[cfg(feature = "textfilter")] - assert!(logspec.text_filter().is_none()); - } - _ => panic!("Wrong error from parsing (4)"), - } -} - -#[test] -fn parse_errors_logger() { - let result = Logger::with_str("info, foo=baz").check_parser_error(); - assert!(result.is_err()); - let error = result.err().unwrap(); - println!("err: {}", error); - - Logger::with_str("info, foo=debug") - .check_parser_error() - .unwrap() - .start() - .unwrap(); - info!("logging works"); - info!("logging works"); -} diff --git a/agent/support/rust/flexi_logger/tests/test_reconfigure_methods.rs b/agent/support/rust/flexi_logger/tests/test_reconfigure_methods.rs deleted file mode 100644 index 433948d7f..000000000 --- a/agent/support/rust/flexi_logger/tests/test_reconfigure_methods.rs +++ /dev/null @@ -1,113 +0,0 @@ -use flexi_logger::{Logger, ReconfigurationHandle}; -use log::*; - -#[test] -fn test_reconfigure_methods() { - let mut log_handle = Logger::with_str("info") - .log_to_file() - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - test_parse_new_spec(&mut log_handle); - test_push_new_spec(&mut log_handle); - validate_logs(&mut log_handle); -} - -fn test_parse_new_spec(log_handle: &mut ReconfigurationHandle) { - error!("1-error message"); - warn!("1-warning"); - info!("1-info message"); - debug!("1-debug message - you must not see it!"); - trace!("1-trace message - you must not see it!"); - - log_handle.parse_new_spec("error"); - error!("1-error message"); - warn!("1-warning - you must not see it!"); - info!("1-info message - you must not see it!"); - debug!("1-debug message - you must not see it!"); - trace!("1-trace message - you must not see it!"); - - log_handle.parse_new_spec("trace"); - error!("1-error message"); - warn!("1-warning"); - info!("1-info message"); - debug!("1-debug message"); - trace!("1-trace message"); - - log_handle.parse_new_spec("info"); -} - -#[allow(clippy::cognitive_complexity)] -fn test_push_new_spec(log_handle: &mut ReconfigurationHandle) { - error!("2-error message"); - warn!("2-warning"); - info!("2-info message"); - debug!("2-debug message - you must not see it!"); - trace!("2-trace message - you must not see it!"); - - log_handle.parse_and_push_temp_spec("error"); - error!("2-error message"); - warn!("2-warning - you must not see it!"); - info!("2-info message - you must not see it!"); - debug!("2-debug message - you must not see it!"); - trace!("2-trace message - you must not see it!"); - - log_handle.parse_and_push_temp_spec("trace"); - error!("2-error message"); - warn!("2-warning"); - info!("2-info message"); - debug!("2-debug message"); - trace!("2-trace message"); - - log_handle.pop_temp_spec(); // we should be back on error - error!("2-error message"); - warn!("2-warning - you must not see it!"); - info!("2-info message - you must not see it!"); - debug!("2-debug message - you must not see it!"); - trace!("2-trace message - you must not see it!"); - - log_handle.pop_temp_spec(); // we should be back on info - - error!("2-error message"); - warn!("2-warning"); - info!("2-info message"); - debug!("2-debug message - you must not see it!"); - trace!("2-trace message - you must not see it!"); - - log_handle.pop_temp_spec(); // should be a no-op -} - -#[allow(clippy::cognitive_complexity)] -fn validate_logs(log_handle: &mut ReconfigurationHandle) { - log_handle.validate_logs(&[ - ("ERROR", "test_reconfigure_methods", "1-error"), - ("WARN", "test_reconfigure_methods", "1-warning"), - ("INFO", "test_reconfigure_methods", "1-info"), - // - ("ERROR", "test_reconfigure_methods", "1-error"), - // - ("ERROR", "test_reconfigure_methods", "1-error"), - ("WARN", "test_reconfigure_methods", "1-warning"), - ("INFO", "test_reconfigure_methods", "1-info"), - ("DEBUG", "test_reconfigure_methods", "1-debug"), - ("TRACE", "test_reconfigure_methods", "1-trace"), - // ----- - ("ERROR", "test_reconfigure_methods", "2-error"), - ("WARN", "test_reconfigure_methods", "2-warning"), - ("INFO", "test_reconfigure_methods", "2-info"), - // - ("ERROR", "test_reconfigure_methods", "2-error"), - // - ("ERROR", "test_reconfigure_methods", "2-error"), - ("WARN", "test_reconfigure_methods", "2-warning"), - ("INFO", "test_reconfigure_methods", "2-info"), - ("DEBUG", "test_reconfigure_methods", "2-debug"), - ("TRACE", "test_reconfigure_methods", "2-trace"), - // - ("ERROR", "test_reconfigure_methods", "2-error"), - // - ("ERROR", "test_reconfigure_methods", "2-error"), - ("WARN", "test_reconfigure_methods", "2-warning"), - ("INFO", "test_reconfigure_methods", "2-info"), - ]); -} diff --git a/agent/support/rust/flexi_logger/tests/test_recursion.rs b/agent/support/rust/flexi_logger/tests/test_recursion.rs deleted file mode 100644 index 4554bd030..000000000 --- a/agent/support/rust/flexi_logger/tests/test_recursion.rs +++ /dev/null @@ -1,32 +0,0 @@ -use flexi_logger::{detailed_format, Logger}; -use log::*; - -#[test] -fn test_recursion() { - Logger::with_str("info") - .format(detailed_format) - .log_to_file() - // .duplicate_to_stderr(Duplicate::All) - // .duplicate_to_stdout(Duplicate::All) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed because: {}", e)); - - let dummy = Dummy(); - - for _ in 0..10 { - error!("This is an error message for {}", dummy); - warn!("This is a warning for {}", dummy); - info!("This is an info message for {}", dummy); - debug!("This is a debug message for {}", dummy); - trace!("This is a trace message for {}", dummy); - } -} - -struct Dummy(); -impl std::fmt::Display for Dummy { - fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { - info!("Here comes the inner message :-| "); - f.write_str("Dummy!!")?; - Ok(()) - } -} diff --git a/agent/support/rust/flexi_logger/tests/test_specfile.rs b/agent/support/rust/flexi_logger/tests/test_specfile.rs deleted file mode 100644 index 7e64b9f98..000000000 --- a/agent/support/rust/flexi_logger/tests/test_specfile.rs +++ /dev/null @@ -1,147 +0,0 @@ -#[cfg(feature = "specfile_without_notification")] -mod a { - use flexi_logger::{detailed_format, Logger}; - use log::*; - use std::io::{BufRead, Write}; - use std::ops::Add; - - const WAIT_MILLIS: u64 = 2000; - - /// Test of the specfile feature, using the file ./tests/logspec.toml. - #[test] - fn test_specfile() { - let specfile = "test_spec/test_specfile_logspec.toml"; - - std::fs::remove_file(specfile).ok(); - assert!(!std::path::Path::new(specfile).exists()); - - Logger::with_str("info") - .format(detailed_format) - .log_to_file() - .suppress_timestamp() - .start_with_specfile(specfile) - .unwrap_or_else(|e| panic!("Logger initialization failed because: {}", e)); - - error!("This is an error-0"); - warn!("This is a warning-0"); - info!("This is an info-0"); - debug!("This is a debug-0"); - trace!("This is a trace-0"); - - eprintln!( - "[{}] ===== behave like many editors: rename and recreate, as warn", - chrono::Local::now() - ); - { - std::fs::rename(&specfile, "old_logspec.toml").unwrap(); - let mut file = std::fs::OpenOptions::new() - .create(true) - .write(true) - .open(specfile) - .unwrap(); - file.write_all( - b" - global_level = 'warn' - [modules] - ", - ) - .unwrap(); - } - - std::thread::sleep(std::time::Duration::from_millis(WAIT_MILLIS)); - - error!("This is an error-1"); - warn!("This is a warning-1"); - info!("This is an info-1"); - debug!("This is a debug-1"); - trace!("This is a trace-1"); - - eprintln!( - "[{}] ===== truncate and rewrite, update to error", - chrono::Local::now() - ); - { - let mut file = std::fs::OpenOptions::new() - .truncate(true) - .write(true) - .open(specfile) - .unwrap(); - file.write_all( - b" - global_level = 'error' - [modules] - ", - ) - .unwrap(); - } - - std::thread::sleep(std::time::Duration::from_millis(WAIT_MILLIS)); - - error!("This is an error-2"); - warn!("This is a warning-2"); - info!("This is an info-2"); - debug!("This is a debug-2"); - trace!("This is a trace-2"); - - let logfile = std::path::Path::new(&std::env::args().nth(0).unwrap()) - .file_stem() - .unwrap() - .to_string_lossy() - .to_string() - .add(".log"); - - if cfg!(feature = "specfile") { - eprintln!("feature is: specfile!"); - validate_logs( - &logfile, - &[ - ("ERROR", "test_specfile::a", "error-0"), - ("WARN", "test_specfile::a", "warning-0"), - ("INFO", "test_specfile::a", "info-0"), - ("ERROR", "test_specfile::a", "error-1"), - ("WARN", "test_specfile::a", "warning-1"), - ("ERROR", "test_specfile::a", "error-2"), - ], - ); - } else { - eprintln!("feature is: specfile_without_notification!"); - validate_logs( - &logfile, - &[ - ("ERROR", "test_specfile::a", "error-0"), - ("WARN", "test_specfile::a", "warning-0"), - ("INFO", "test_specfile::a", "info-0"), - ("ERROR", "test_specfile::a", "error-1"), - ("WARN", "test_specfile::a", "warning-1"), - ("INFO", "test_specfile::a", "info-1"), - ("ERROR", "test_specfile::a", "error-2"), - ("WARN", "test_specfile::a", "warning-2"), - ("INFO", "test_specfile::a", "info-2"), - ], - ); - } - } - - fn validate_logs(logfile: &str, expected: &[(&'static str, &'static str, &'static str)]) { - println!("validating log file = {}", logfile); - - let f = std::fs::File::open(logfile).unwrap(); - let mut reader = std::io::BufReader::new(f); - - let mut buf = String::new(); - for tuple in expected { - buf.clear(); - reader.read_line(&mut buf).unwrap(); - assert!(buf.contains(&tuple.0), "Did not find tuple.0 = {}", tuple.0); - assert!(buf.contains(&tuple.1), "Did not find tuple.1 = {}", tuple.1); - assert!(buf.contains(&tuple.2), "Did not find tuple.2 = {}", tuple.2); - } - buf.clear(); - reader.read_line(&mut buf).unwrap(); - assert!( - buf.is_empty(), - "Found more log lines than expected: {} ", - buf - ); - } -} diff --git a/agent/support/rust/flexi_logger/tests/test_syslog.rs b/agent/support/rust/flexi_logger/tests/test_syslog.rs deleted file mode 100644 index 84a625aca..000000000 --- a/agent/support/rust/flexi_logger/tests/test_syslog.rs +++ /dev/null @@ -1,69 +0,0 @@ -#[cfg(feature = "syslog_writer")] -mod test { - use flexi_logger::writers::{SyslogConnector, SyslogFacility, SyslogWriter}; - use flexi_logger::{detailed_format, Logger}; - use log::*; - - #[macro_use] - mod macros { - #[macro_export] - macro_rules! syslog_error { - ($($arg:tt)*) => ( - error!(target: "{Syslog,_Default}", $($arg)*); - ) - } - } - - #[test] - fn test_syslog() -> std::io::Result<()> { - let syslog_connector = SyslogConnector::try_udp("127.0.0.1:5555", "127.0.0.1:514")?; - // let syslog_connector = SyslogConnector::try_tcp("localhost:601")?; - - let boxed_syslog_writer = SyslogWriter::try_new( - SyslogFacility::LocalUse0, - None, - log::LevelFilter::Trace, - "JustForTest".to_owned(), - syslog_connector, - ) - .unwrap(); - let log_handle = Logger::with_str("info") - .format(detailed_format) - .print_message() - .log_to_file() - .add_writer("Syslog", boxed_syslog_writer) - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - // Explicitly send logs to different loggers - error!(target : "{Syslog}", "This is a syslog-relevant error message"); - warn!(target : "{Syslog}", "This is a syslog-relevant error message"); - info!(target : "{Syslog}", "This is a syslog-relevant error message"); - debug!(target : "{Syslog}", "This is a syslog-relevant error message"); - trace!(target : "{Syslog}", "This is a syslog-relevant error message"); - - error!(target : "{Syslog,_Default}", "This is a syslog- and log-relevant error message"); - - // Nicer: use explicit macros - syslog_error!("This is another syslog- and log-relevant error message"); - warn!("This is a warning message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - // Verification: - log_handle.validate_logs(&[ - ( - "ERROR", - "syslog", - "a syslog- and log-relevant error message", - ), - ( - "ERROR", - "syslog", - "another syslog- and log-relevant error message", - ), - ("WARN", "syslog", "This is a warning message"), - ]); - Ok(()) - } -} diff --git a/agent/support/rust/flexi_logger/tests/test_textfilter.rs b/agent/support/rust/flexi_logger/tests/test_textfilter.rs deleted file mode 100644 index 11a66a1ae..000000000 --- a/agent/support/rust/flexi_logger/tests/test_textfilter.rs +++ /dev/null @@ -1,54 +0,0 @@ -#[test] -#[cfg(feature = "textfilter")] -fn test_textfilter() { - use flexi_logger::{default_format, LogSpecification, Logger}; - use log::*; - - use std::env; - use std::fs::File; - use std::io::{BufRead, BufReader}; - use std::path::Path; - - let logspec = LogSpecification::parse("info/Hello").unwrap(); - Logger::with(logspec) - .format(default_format) - .print_message() - .log_to_file() - .suppress_timestamp() - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - error!("Hello, this is an error message"); - warn!("This is a warning! Hello!!"); - info!("Hello, this is an info message! Hello"); - debug!("Hello, this is a debug message - you must not see it!"); - trace!("Hello, this is a trace message - you must not see it!"); - - let arg0 = env::args().next().unwrap(); - let progname = Path::new(&arg0).file_stem().unwrap().to_string_lossy(); - let filename = format!("{}.log", &progname); - - let f = File::open(&filename) - .unwrap_or_else(|e| panic!("Cannot open file {:?} due to {}", filename, e)); - let mut reader = BufReader::new(f); - let mut buffer = String::new(); - let mut count = 0; - while reader.read_line(&mut buffer).unwrap() > 0 { - if buffer.find("Hello").is_none() { - panic!( - "line in log file without Hello {:?}: \"{}\"", - filename, buffer - ); - } else { - count += 1; - } - buffer.clear(); - } - assert_eq!(count, 3); -} diff --git a/agent/support/rust/flexi_logger/tests/test_windows_line_ending.rs b/agent/support/rust/flexi_logger/tests/test_windows_line_ending.rs deleted file mode 100644 index e74a176a8..000000000 --- a/agent/support/rust/flexi_logger/tests/test_windows_line_ending.rs +++ /dev/null @@ -1,55 +0,0 @@ -use flexi_logger::{detailed_format, Logger, ReconfigurationHandle}; -use log::*; - -#[test] -fn test_mods() { - let handle: ReconfigurationHandle = Logger::with_env_or_str( - "info, test_windows_line_ending::mymod1=debug, test_windows_line_ending::mymod2=error", - ) - .format(detailed_format) - .log_to_file() - .use_windows_line_ending() - .start() - .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); - - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - - mymod1::test_traces(); - mymod2::test_traces(); - - handle.validate_logs(&[ - ("ERROR", "test_windows_line_ending", "error"), - ("WARN", "test_windows_line_ending", "warning"), - ("INFO", "test_windows_line_ending", "info"), - ("ERROR", "test_windows_line_ending", "error"), - ("WARN", "test_windows_line_ending", "warning"), - ("INFO", "test_windows_line_ending", "info"), - ("DEBUG", "test_windows_line_ending", "debug"), - ("ERROR", "test_windows_line_ending", "error"), - ]); -} - -mod mymod1 { - use log::*; - pub fn test_traces() { - error!("This is an error message"); - warn!("This is a warning"); - info!("This is an info message"); - debug!("This is a debug message"); - trace!("This is a trace message - you must not see it!"); - } -} -mod mymod2 { - use log::*; - pub fn test_traces() { - error!("This is an error message"); - warn!("This is a warning - you must not see it!"); - info!("This is an info message - you must not see it!"); - debug!("This is a debug message - you must not see it!"); - trace!("This is a trace message - you must not see it!"); - } -} diff --git a/agent/support/rust/flexi_logger/tests/version_numbers.rs b/agent/support/rust/flexi_logger/tests/version_numbers.rs deleted file mode 100644 index 0f903a63c..000000000 --- a/agent/support/rust/flexi_logger/tests/version_numbers.rs +++ /dev/null @@ -1,6 +0,0 @@ -use version_sync::assert_markdown_deps_updated; - -#[test] -fn test_readme_deps() { - assert_markdown_deps_updated!("README.md"); -} From 20372d4596b451bd49fa5a0a8b9becea0b955848 Mon Sep 17 00:00:00 2001 From: "zhanglei.sec" Date: Thu, 24 Dec 2020 21:18:02 +0800 Subject: [PATCH 6/8] Update md. --- .gitmodules | 3 +++ agent/support/rust/flexi_logger | 1 + 2 files changed, 4 insertions(+) create mode 100644 .gitmodules create mode 160000 agent/support/rust/flexi_logger diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..8bf9906a7 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "agent/support/rust/flexi_logger"] + path = agent/support/rust/flexi_logger + url = git@github.com:bytedance/flexi_logger.git diff --git a/agent/support/rust/flexi_logger b/agent/support/rust/flexi_logger new file mode 160000 index 000000000..6d2494a00 --- /dev/null +++ b/agent/support/rust/flexi_logger @@ -0,0 +1 @@ +Subproject commit 6d2494a001034174f156f07a7b84727b5cb99f5e From 0ec034cb35ba32a7b1434677e394d6fe862963b8 Mon Sep 17 00:00:00 2001 From: "zhanglei.sec" Date: Thu, 24 Dec 2020 21:18:11 +0800 Subject: [PATCH 7/8] Update md. --- {agent/.github => .github}/ISSUE_TEMPLATE/bug_report.md | 0 {agent/.github => .github}/ISSUE_TEMPLATE/feature_request.md | 0 {agent/.github => .github}/PULL_REQUEST_TEMPLATE.md | 0 agent/CODE_OF_CONDUCT.md => CODE_OF_CONDUCT.md | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename {agent/.github => .github}/ISSUE_TEMPLATE/bug_report.md (100%) rename {agent/.github => .github}/ISSUE_TEMPLATE/feature_request.md (100%) rename {agent/.github => .github}/PULL_REQUEST_TEMPLATE.md (100%) rename agent/CODE_OF_CONDUCT.md => CODE_OF_CONDUCT.md (100%) diff --git a/agent/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md similarity index 100% rename from agent/.github/ISSUE_TEMPLATE/bug_report.md rename to .github/ISSUE_TEMPLATE/bug_report.md diff --git a/agent/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md similarity index 100% rename from agent/.github/ISSUE_TEMPLATE/feature_request.md rename to .github/ISSUE_TEMPLATE/feature_request.md diff --git a/agent/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md similarity index 100% rename from agent/.github/PULL_REQUEST_TEMPLATE.md rename to .github/PULL_REQUEST_TEMPLATE.md diff --git a/agent/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md similarity index 100% rename from agent/CODE_OF_CONDUCT.md rename to CODE_OF_CONDUCT.md From 0ffa426aa62dc74763a36bb802898ebab61dd19b Mon Sep 17 00:00:00 2001 From: "zhanglei.sec" Date: Thu, 24 Dec 2020 21:21:35 +0800 Subject: [PATCH 8/8] Add submodule --recursive --- agent/README-zh_CN.md | 2 +- agent/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/agent/README-zh_CN.md b/agent/README-zh_CN.md index 52ca54db9..95432e4e1 100644 --- a/agent/README-zh_CN.md +++ b/agent/README-zh_CN.md @@ -24,7 +24,7 @@ AgentSmith-HIDS Agent基于Golang构建,但其他功能插件可以用不同 * Golang 1.15(推荐) ## 快速开始 ``` -git clone https://github.com/bytedance/AgentSmith-HIDS +git clone --recursive https://github.com/bytedance/AgentSmith-HIDS cd AgentSmith-HIDS/agent go build ``` diff --git a/agent/README.md b/agent/README.md index 03eaf2b93..9263728cd 100644 --- a/agent/README.md +++ b/agent/README.md @@ -21,7 +21,7 @@ For maximum functionality, you should probably run with root privileges. * Golang 1.15(Recommended) ## To Start Using AgentSmith-HIDS Agent ``` -git clone https://github.com/bytedance/AgentSmith-HIDS +git clone --recursive https://github.com/bytedance/AgentSmith-HIDS cd AgentSmith-HIDS/agent go build ```