From 1ceb60a802b570034d342ef6c0ea6c247619ffe1 Mon Sep 17 00:00:00 2001 From: Lorenzo Mangani Date: Mon, 30 Jan 2017 09:51:43 +0100 Subject: [PATCH] fork sync --- Readme.markdown | 338 ++++++++ bin/node-logstash-agent | 240 ++++++ build_zmq.sh | 29 + changelog.md | 59 ++ dists/README-RedHat.txt | 89 +++ dists/rpmbuild/.rpmmacros | 3 + dists/rpmbuild/SOURCES/logstash | 100 +++ dists/rpmbuild/SOURCES/run_node.sh | 10 + dists/rpmbuild/SPECS/node-logstash.spec | 102 +++ docs/archi.jpg | Bin 0 -> 41471 bytes docs/archi.pptx | Bin 0 -> 76973 bytes docs/cache.md | 12 + docs/common_params.md | 43 + docs/elastic_mapping.md | 20 + docs/filters/bunyan.md | 18 + docs/filters/compute_date_field.md | 25 + docs/filters/compute_field.md | 37 + docs/filters/eval.md | 37 + docs/filters/geoip.md | 65 ++ docs/filters/grep.md | 52 ++ docs/filters/grok.md | 68 ++ docs/filters/http_status_classifier.md | 23 + docs/filters/json_fields.md | 22 + docs/filters/multiline.md | 24 + docs/filters/mutate_replace.md | 27 + docs/filters/regex.md | 62 ++ docs/filters/remove_field_when_equal.md | 24 + docs/filters/rename.md | 25 + docs/filters/reverse_dns.md | 27 + docs/filters/split.md | 24 + docs/filters/tags_fields.md | 38 + docs/filters/truncate.md | 23 + docs/inputs/amqp.md | 50 ++ docs/inputs/file.md | 70 ++ docs/inputs/gae.md | 127 +++ docs/inputs/http.md | 28 + docs/inputs/redis.md | 36 + docs/inputs/sqs.md | 28 + docs/inputs/syslog.md | 39 + docs/inputs/tags_fields.md | 11 + docs/inputs/tcp_tls.md | 46 ++ docs/inputs/unserializers.md | 13 + docs/inputs/ws.md | 45 ++ docs/inputs/zeromq.md | 22 + docs/interpolation.md | 10 + docs/outputs/amqp.md | 53 ++ docs/outputs/elasticsearch.md | 65 ++ docs/outputs/file.md | 59 ++ docs/outputs/gelf.md | 28 + docs/outputs/hep.md | 43 + docs/outputs/http_post.md | 35 + docs/outputs/http_proxy.md | 13 + docs/outputs/logio.md | 28 + docs/outputs/lumberjack.md | 29 + docs/outputs/redis.md | 36 + docs/outputs/serializers.md | 11 + docs/outputs/sqs.md | 27 + docs/outputs/statsd.md | 36 + docs/outputs/tcp_tls.md | 43 + docs/outputs/ws.md | 46 ++ docs/outputs/zeromq.md | 45 ++ docs/ssl.md | 17 + generate_parser.sh | 1 + jshint.sh | 3 + lib/agent.js | 274 +++++++ lib/filters/filter_add_host.js | 29 + lib/filters/filter_add_timestamp.js | 22 + lib/filters/filter_add_version.js | 22 + lib/filters/filter_app_janus.js | 91 +++ lib/filters/filter_bunyan.js | 68 ++ lib/filters/filter_compute_date_field.js | 32 + lib/filters/filter_compute_field.js | 32 + lib/filters/filter_eval.js | 44 ++ lib/filters/filter_geoip.js | 148 ++++ lib/filters/filter_grep.js | 36 + lib/filters/filter_grok.js | 171 ++++ lib/filters/filter_http_status_classifier.js | 62 ++ lib/filters/filter_json_fields.js | 28 + lib/filters/filter_multiline.js | 37 + lib/filters/filter_mutate_replace.js | 34 + lib/filters/filter_regex.js | 74 ++ lib/filters/filter_remove_field_when_equal.js | 31 + lib/filters/filter_rename.js | 32 + lib/filters/filter_reverse_dns.js | 59 ++ lib/filters/filter_split.js | 49 ++ lib/filters/filter_syslog_pri.js | 72 ++ lib/filters/filter_truncate.js | 30 + lib/inputs/input_amqp.js | 82 ++ lib/inputs/input_file.js | 141 ++++ lib/inputs/input_freeswitch.js | 49 ++ lib/inputs/input_gae.js | 96 +++ lib/inputs/input_http.js | 79 ++ lib/inputs/input_redis.js | 120 +++ lib/inputs/input_sqs.js | 95 +++ lib/inputs/input_stdin.js | 38 + lib/inputs/input_tcp.js | 101 +++ lib/inputs/input_udp.js | 53 ++ lib/inputs/input_unix.js | 53 ++ lib/inputs/input_ws.js | 81 ++ lib/inputs/input_zeromq.js | 54 ++ lib/lib/amqp_driver.js | 88 +++ lib/lib/async_helper.js | 23 + lib/lib/base_component.js | 382 +++++++++ lib/lib/base_filter.js | 63 ++ lib/lib/base_filter_buffer.js | 66 ++ lib/lib/base_input.js | 92 +++ lib/lib/base_output.js | 65 ++ lib/lib/cache_helper.js | 43 + lib/lib/condition_evaluator.js | 129 +++ lib/lib/config_mapper.js | 70 ++ lib/lib/directory_detector.js | 99 +++ lib/lib/directory_watcher.js | 89 +++ lib/lib/elastic_search_helper.js | 15 + lib/lib/error_buffer.js | 57 ++ lib/lib/file_filter.js | 14 + lib/lib/file_loader.js | 71 ++ lib/lib/monitor_file.js | 373 +++++++++ lib/lib/patterns_loader.js | 65 ++ lib/lib/redis_connection_manager.js | 48 ++ lib/lib/regex_helper.js | 53 ++ lib/lib/sig_listener.js | 14 + lib/lib/sqs_wrapper.js | 22 + lib/lib/ssl_helper.js | 63 ++ lib/lib/tail_file.js | 88 +++ lib/lib/url_parser.js | 37 + lib/logstash_config.jison | 165 ++++ lib/logstash_config.js | 729 +++++++++++++++++ lib/outputs/abstract_http.js | 89 +++ lib/outputs/abstract_tcp.js | 105 +++ lib/outputs/abstract_udp.js | 65 ++ lib/outputs/abstract_zeromq.js | 79 ++ lib/outputs/output_amqp.js | 77 ++ lib/outputs/output_elasticsearch.js | 89 +++ lib/outputs/output_elasticsearch_zeromq.js | 31 + lib/outputs/output_file.js | 192 +++++ lib/outputs/output_gelf.js | 54 ++ lib/outputs/output_hep.js | 68 ++ lib/outputs/output_http_post.js | 44 ++ lib/outputs/output_logio.js | 29 + lib/outputs/output_lumberjack.js | 62 ++ lib/outputs/output_redis.js | 96 +++ lib/outputs/output_sqs.js | 50 ++ lib/outputs/output_statsd.js | 67 ++ lib/outputs/output_stdout.js | 25 + lib/outputs/output_tcp.js | 28 + lib/outputs/output_udp.js | 24 + lib/outputs/output_unix.js | 30 + lib/outputs/output_ws.js | 102 +++ lib/outputs/output_zeromq.js | 24 + lib/patterns/grok/firewalls | 60 ++ lib/patterns/grok/grok-patterns | 94 +++ lib/patterns/grok/haproxy | 37 + lib/patterns/grok/java | 7 + lib/patterns/grok/junos | 9 + lib/patterns/grok/linux-syslog | 16 + lib/patterns/grok/mcollective | 1 + lib/patterns/grok/mcollective-patterns | 4 + lib/patterns/grok/mongodb | 4 + lib/patterns/grok/nagios | 108 +++ lib/patterns/grok/postgresql | 3 + lib/patterns/grok/redis | 3 + lib/patterns/grok/ruby | 2 + lib/patterns/http_combined | 6 + lib/patterns/http_vhost_combined | 6 + lib/patterns/syslog | 7 + lib/patterns/syslog_no_prio | 6 + license.txt | 13 + package.json | 55 ++ packager/Procfile | 1 + packager/Procfile_debian8 | 1 + packager/postinst | 18 + remote_test.sh | 34 + test-runner.sh | 31 + .../500_real_life/copytruncate_logrotate.conf | 5 + test/500_real_life/run.js | 22 + test/500_real_life/std_logrotate.conf | 7 + test/file_loader_test/comment | 5 + test/file_loader_test/empty | 0 test/file_loader_test/empty_spaces | 1 + test/file_loader_test/multiple | 7 + test/file_loader_test/only_one_cr | 1 + test/file_loader_test/simple | 1 + test/filter_helper.js | 60 ++ test/grok/extra | 4 + test/grok/wrong | 1 + test/integration_helper.js | 36 + test/maxmind_db.sh | 5 + test/mock_helper.js | 32 + test/not_readable_helper.js | 12 + test/parser/special_chars_new_line | 6 + test/parser/special_chars_quotes | 7 + .../parser/special_chars_quotes_single_quotes | 7 + test/parser/special_chars_space | 6 + test/parser/special_chars_utf8 | 6 + test/redis_driver.js | 28 + test/ssl/client.crt | 60 ++ test/ssl/client.csr | 11 + test/ssl/client.key | 15 + test/ssl/index.txt | 2 + test/ssl/index.txt.attr | 1 + test/ssl/index.txt.attr.old | 1 + test/ssl/index.txt.old | 1 + test/ssl/newcerts/00.pem | 60 ++ test/ssl/newcerts/01.pem | 60 ++ test/ssl/openssl.cnf | 313 ++++++++ test/ssl/root-ca.crt | 19 + test/ssl/root-ca.key | 15 + test/ssl/serial | 1 + test/ssl/serial.old | 1 + test/ssl/server.crt | 60 ++ test/ssl/server.csr | 11 + test/ssl/server.key | 15 + test/test_100_file_loader.js | 113 +++ test/test_101_url_parser.js | 128 +++ test/test_102_error_buffer.js | 41 + test/test_103_file_filter.js | 58 ++ test/test_104_logstash_parser.js | 579 ++++++++++++++ test/test_105_condition_evaluator.js | 235 ++++++ test/test_200_filter_add_timestamp.js | 14 + test/test_201_filter_add_host.js | 14 + test/test_202_filter_regex.js | 345 ++++++++ test/test_203_filter_grep.js | 62 ++ test/test_204_filter_mutate_replace.js | 38 + test/test_205_filter_compute_field.js | 134 ++++ test/test_206_message_filtering.js | 213 +++++ test/test_207_filter_split.js | 77 ++ test/test_208_filter_multiline.js | 131 ++++ test/test_209_filter_syslog_pri.js | 61 ++ test/test_210_compute_date_field.js | 17 + test/test_211_reverse_dns.js | 178 +++++ test/test_212_filter_json_fields.js | 151 ++++ test/test_213_filter_add_version.js | 13 + test/test_214_filter_geoip.js | 270 +++++++ test/test_215_filter_eval.js | 122 +++ test/test_216_filter_bunyan.js | 21 + .../test_217_filter_http_status_classifier.js | 156 ++++ test/test_218_filter_grok.js | 163 ++++ test/test_219_filter_truncate.js | 20 + test/test_220_remove_field_when_equal.js | 29 + test/test_221_filter_rename.js | 20 + test/test_300_monitor_file.js | 738 ++++++++++++++++++ test/test_301_tail_file.js | 229 ++++++ test/test_302_directory_detector.js | 267 +++++++ test/test_303_output_file.js | 214 +++++ test/test_400_error.js | 174 +++++ test/test_401_file2file.js | 337 ++++++++ test/test_402_json_logstash.js | 74 ++ test/test_403_elasticsearch.js | 290 +++++++ test/test_404_http_post.js | 136 ++++ test/test_405_net2file.js | 115 +++ test/test_406_logio.js | 57 ++ test/test_407_statsd.js | 128 +++ test/test_408_gelf.js | 79 ++ test/test_409_multiline.js | 44 ++ test/test_410_file2x2x2file.js | 210 +++++ test/test_411_http_proxy.js | 146 ++++ test/test_412_tls_appendcert.js | 122 +++ test/test_413_closing_inputs.js | 158 ++++ test/test_414_output_zeromq.js | 223 ++++++ test/test_415_output_elasticsearch_zeromq.js | 100 +++ test/test_416_wildcard.js | 168 ++++ ...test_417_filter_no_params_and_only_type.js | 41 + test/test_418_good_tcp_closing.js | 35 + test/test_419_tcp_output_auto_reconnect.js | 64 ++ test/test_420_input_gae.js | 159 ++++ test/test_421_logstash.js | 165 ++++ test/test_421_logstash/base | 5 + test/test_421_logstash/else_else_if | 27 + test/test_421_logstash/fields_tags_1 | 15 + test/test_421_logstash/fields_tags_2 | 15 + test/test_421_logstash/fields_tags_3 | 18 + test/test_421_logstash/grep | 14 + test/test_421_logstash/if_regex | 9 + test/test_421_logstash/regex | 18 + test/test_421_logstash/simple | 7 + test/test_421_logstash/simple_if | 9 + test/test_421_logstash/upper | 16 + test/test_422_input_tags_fields.js | 89 +++ test/test_500_real_life.js | 249 ++++++ test/test_600_no_directory_watched.js | 15 + test/zmq_injector.js | 45 ++ 281 files changed, 19639 insertions(+) create mode 100644 Readme.markdown create mode 100755 bin/node-logstash-agent create mode 100755 build_zmq.sh create mode 100644 changelog.md create mode 100644 dists/README-RedHat.txt create mode 100644 dists/rpmbuild/.rpmmacros create mode 100755 dists/rpmbuild/SOURCES/logstash create mode 100755 dists/rpmbuild/SOURCES/run_node.sh create mode 100644 dists/rpmbuild/SPECS/node-logstash.spec create mode 100644 docs/archi.jpg create mode 100644 docs/archi.pptx create mode 100644 docs/cache.md create mode 100644 docs/common_params.md create mode 100644 docs/elastic_mapping.md create mode 100644 docs/filters/bunyan.md create mode 100644 docs/filters/compute_date_field.md create mode 100644 docs/filters/compute_field.md create mode 100644 docs/filters/eval.md create mode 100644 docs/filters/geoip.md create mode 100644 docs/filters/grep.md create mode 100644 docs/filters/grok.md create mode 100644 docs/filters/http_status_classifier.md create mode 100644 docs/filters/json_fields.md create mode 100644 docs/filters/multiline.md create mode 100644 docs/filters/mutate_replace.md create mode 100644 docs/filters/regex.md create mode 100644 docs/filters/remove_field_when_equal.md create mode 100644 docs/filters/rename.md create mode 100644 docs/filters/reverse_dns.md create mode 100644 docs/filters/split.md create mode 100644 docs/filters/tags_fields.md create mode 100644 docs/filters/truncate.md create mode 100644 docs/inputs/amqp.md create mode 100644 docs/inputs/file.md create mode 100644 docs/inputs/gae.md create mode 100644 docs/inputs/http.md create mode 100644 docs/inputs/redis.md create mode 100644 docs/inputs/sqs.md create mode 100644 docs/inputs/syslog.md create mode 100644 docs/inputs/tags_fields.md create mode 100644 docs/inputs/tcp_tls.md create mode 100644 docs/inputs/unserializers.md create mode 100644 docs/inputs/ws.md create mode 100644 docs/inputs/zeromq.md create mode 100644 docs/interpolation.md create mode 100644 docs/outputs/amqp.md create mode 100644 docs/outputs/elasticsearch.md create mode 100644 docs/outputs/file.md create mode 100644 docs/outputs/gelf.md create mode 100644 docs/outputs/hep.md create mode 100644 docs/outputs/http_post.md create mode 100644 docs/outputs/http_proxy.md create mode 100644 docs/outputs/logio.md create mode 100644 docs/outputs/lumberjack.md create mode 100644 docs/outputs/redis.md create mode 100644 docs/outputs/serializers.md create mode 100644 docs/outputs/sqs.md create mode 100644 docs/outputs/statsd.md create mode 100644 docs/outputs/tcp_tls.md create mode 100644 docs/outputs/ws.md create mode 100644 docs/outputs/zeromq.md create mode 100644 docs/ssl.md create mode 100755 generate_parser.sh create mode 100755 jshint.sh create mode 100644 lib/agent.js create mode 100644 lib/filters/filter_add_host.js create mode 100644 lib/filters/filter_add_timestamp.js create mode 100644 lib/filters/filter_add_version.js create mode 100644 lib/filters/filter_app_janus.js create mode 100644 lib/filters/filter_bunyan.js create mode 100644 lib/filters/filter_compute_date_field.js create mode 100644 lib/filters/filter_compute_field.js create mode 100644 lib/filters/filter_eval.js create mode 100644 lib/filters/filter_geoip.js create mode 100644 lib/filters/filter_grep.js create mode 100644 lib/filters/filter_grok.js create mode 100644 lib/filters/filter_http_status_classifier.js create mode 100644 lib/filters/filter_json_fields.js create mode 100644 lib/filters/filter_multiline.js create mode 100644 lib/filters/filter_mutate_replace.js create mode 100644 lib/filters/filter_regex.js create mode 100644 lib/filters/filter_remove_field_when_equal.js create mode 100644 lib/filters/filter_rename.js create mode 100644 lib/filters/filter_reverse_dns.js create mode 100644 lib/filters/filter_split.js create mode 100644 lib/filters/filter_syslog_pri.js create mode 100644 lib/filters/filter_truncate.js create mode 100644 lib/inputs/input_amqp.js create mode 100644 lib/inputs/input_file.js create mode 100644 lib/inputs/input_freeswitch.js create mode 100644 lib/inputs/input_gae.js create mode 100644 lib/inputs/input_http.js create mode 100644 lib/inputs/input_redis.js create mode 100644 lib/inputs/input_sqs.js create mode 100644 lib/inputs/input_stdin.js create mode 100644 lib/inputs/input_tcp.js create mode 100644 lib/inputs/input_udp.js create mode 100644 lib/inputs/input_unix.js create mode 100644 lib/inputs/input_ws.js create mode 100644 lib/inputs/input_zeromq.js create mode 100644 lib/lib/amqp_driver.js create mode 100644 lib/lib/async_helper.js create mode 100644 lib/lib/base_component.js create mode 100644 lib/lib/base_filter.js create mode 100644 lib/lib/base_filter_buffer.js create mode 100644 lib/lib/base_input.js create mode 100644 lib/lib/base_output.js create mode 100644 lib/lib/cache_helper.js create mode 100644 lib/lib/condition_evaluator.js create mode 100644 lib/lib/config_mapper.js create mode 100644 lib/lib/directory_detector.js create mode 100644 lib/lib/directory_watcher.js create mode 100644 lib/lib/elastic_search_helper.js create mode 100644 lib/lib/error_buffer.js create mode 100644 lib/lib/file_filter.js create mode 100644 lib/lib/file_loader.js create mode 100644 lib/lib/monitor_file.js create mode 100644 lib/lib/patterns_loader.js create mode 100644 lib/lib/redis_connection_manager.js create mode 100644 lib/lib/regex_helper.js create mode 100644 lib/lib/sig_listener.js create mode 100644 lib/lib/sqs_wrapper.js create mode 100644 lib/lib/ssl_helper.js create mode 100644 lib/lib/tail_file.js create mode 100644 lib/lib/url_parser.js create mode 100644 lib/logstash_config.jison create mode 100644 lib/logstash_config.js create mode 100644 lib/outputs/abstract_http.js create mode 100644 lib/outputs/abstract_tcp.js create mode 100644 lib/outputs/abstract_udp.js create mode 100644 lib/outputs/abstract_zeromq.js create mode 100644 lib/outputs/output_amqp.js create mode 100644 lib/outputs/output_elasticsearch.js create mode 100644 lib/outputs/output_elasticsearch_zeromq.js create mode 100644 lib/outputs/output_file.js create mode 100644 lib/outputs/output_gelf.js create mode 100644 lib/outputs/output_hep.js create mode 100644 lib/outputs/output_http_post.js create mode 100644 lib/outputs/output_logio.js create mode 100644 lib/outputs/output_lumberjack.js create mode 100644 lib/outputs/output_redis.js create mode 100644 lib/outputs/output_sqs.js create mode 100644 lib/outputs/output_statsd.js create mode 100644 lib/outputs/output_stdout.js create mode 100644 lib/outputs/output_tcp.js create mode 100644 lib/outputs/output_udp.js create mode 100644 lib/outputs/output_unix.js create mode 100644 lib/outputs/output_ws.js create mode 100644 lib/outputs/output_zeromq.js create mode 100755 lib/patterns/grok/firewalls create mode 100755 lib/patterns/grok/grok-patterns create mode 100755 lib/patterns/grok/haproxy create mode 100755 lib/patterns/grok/java create mode 100755 lib/patterns/grok/junos create mode 100755 lib/patterns/grok/linux-syslog create mode 100755 lib/patterns/grok/mcollective create mode 100755 lib/patterns/grok/mcollective-patterns create mode 100755 lib/patterns/grok/mongodb create mode 100755 lib/patterns/grok/nagios create mode 100755 lib/patterns/grok/postgresql create mode 100755 lib/patterns/grok/redis create mode 100755 lib/patterns/grok/ruby create mode 100644 lib/patterns/http_combined create mode 100644 lib/patterns/http_vhost_combined create mode 100644 lib/patterns/syslog create mode 100644 lib/patterns/syslog_no_prio create mode 100644 license.txt create mode 100644 package.json create mode 100644 packager/Procfile create mode 100644 packager/Procfile_debian8 create mode 100644 packager/postinst create mode 100755 remote_test.sh create mode 100755 test-runner.sh create mode 100644 test/500_real_life/copytruncate_logrotate.conf create mode 100644 test/500_real_life/run.js create mode 100644 test/500_real_life/std_logrotate.conf create mode 100644 test/file_loader_test/comment create mode 100644 test/file_loader_test/empty create mode 100644 test/file_loader_test/empty_spaces create mode 100644 test/file_loader_test/multiple create mode 100644 test/file_loader_test/only_one_cr create mode 100644 test/file_loader_test/simple create mode 100644 test/filter_helper.js create mode 100644 test/grok/extra create mode 100644 test/grok/wrong create mode 100644 test/integration_helper.js create mode 100755 test/maxmind_db.sh create mode 100644 test/mock_helper.js create mode 100644 test/not_readable_helper.js create mode 100644 test/parser/special_chars_new_line create mode 100644 test/parser/special_chars_quotes create mode 100644 test/parser/special_chars_quotes_single_quotes create mode 100644 test/parser/special_chars_space create mode 100644 test/parser/special_chars_utf8 create mode 100644 test/redis_driver.js create mode 100644 test/ssl/client.crt create mode 100644 test/ssl/client.csr create mode 100644 test/ssl/client.key create mode 100644 test/ssl/index.txt create mode 100644 test/ssl/index.txt.attr create mode 100644 test/ssl/index.txt.attr.old create mode 100644 test/ssl/index.txt.old create mode 100644 test/ssl/newcerts/00.pem create mode 100644 test/ssl/newcerts/01.pem create mode 100644 test/ssl/openssl.cnf create mode 100644 test/ssl/root-ca.crt create mode 100644 test/ssl/root-ca.key create mode 100644 test/ssl/serial create mode 100644 test/ssl/serial.old create mode 100644 test/ssl/server.crt create mode 100644 test/ssl/server.csr create mode 100644 test/ssl/server.key create mode 100644 test/test_100_file_loader.js create mode 100644 test/test_101_url_parser.js create mode 100644 test/test_102_error_buffer.js create mode 100644 test/test_103_file_filter.js create mode 100644 test/test_104_logstash_parser.js create mode 100644 test/test_105_condition_evaluator.js create mode 100644 test/test_200_filter_add_timestamp.js create mode 100644 test/test_201_filter_add_host.js create mode 100644 test/test_202_filter_regex.js create mode 100644 test/test_203_filter_grep.js create mode 100644 test/test_204_filter_mutate_replace.js create mode 100644 test/test_205_filter_compute_field.js create mode 100644 test/test_206_message_filtering.js create mode 100644 test/test_207_filter_split.js create mode 100644 test/test_208_filter_multiline.js create mode 100644 test/test_209_filter_syslog_pri.js create mode 100644 test/test_210_compute_date_field.js create mode 100644 test/test_211_reverse_dns.js create mode 100644 test/test_212_filter_json_fields.js create mode 100644 test/test_213_filter_add_version.js create mode 100644 test/test_214_filter_geoip.js create mode 100644 test/test_215_filter_eval.js create mode 100644 test/test_216_filter_bunyan.js create mode 100644 test/test_217_filter_http_status_classifier.js create mode 100644 test/test_218_filter_grok.js create mode 100644 test/test_219_filter_truncate.js create mode 100644 test/test_220_remove_field_when_equal.js create mode 100644 test/test_221_filter_rename.js create mode 100644 test/test_300_monitor_file.js create mode 100644 test/test_301_tail_file.js create mode 100644 test/test_302_directory_detector.js create mode 100644 test/test_303_output_file.js create mode 100644 test/test_400_error.js create mode 100644 test/test_401_file2file.js create mode 100644 test/test_402_json_logstash.js create mode 100644 test/test_403_elasticsearch.js create mode 100644 test/test_404_http_post.js create mode 100644 test/test_405_net2file.js create mode 100644 test/test_406_logio.js create mode 100644 test/test_407_statsd.js create mode 100644 test/test_408_gelf.js create mode 100644 test/test_409_multiline.js create mode 100644 test/test_410_file2x2x2file.js create mode 100644 test/test_411_http_proxy.js create mode 100644 test/test_412_tls_appendcert.js create mode 100644 test/test_413_closing_inputs.js create mode 100644 test/test_414_output_zeromq.js create mode 100644 test/test_415_output_elasticsearch_zeromq.js create mode 100644 test/test_416_wildcard.js create mode 100644 test/test_417_filter_no_params_and_only_type.js create mode 100644 test/test_418_good_tcp_closing.js create mode 100644 test/test_419_tcp_output_auto_reconnect.js create mode 100644 test/test_420_input_gae.js create mode 100644 test/test_421_logstash.js create mode 100644 test/test_421_logstash/base create mode 100644 test/test_421_logstash/else_else_if create mode 100644 test/test_421_logstash/fields_tags_1 create mode 100644 test/test_421_logstash/fields_tags_2 create mode 100644 test/test_421_logstash/fields_tags_3 create mode 100644 test/test_421_logstash/grep create mode 100644 test/test_421_logstash/if_regex create mode 100644 test/test_421_logstash/regex create mode 100644 test/test_421_logstash/simple create mode 100644 test/test_421_logstash/simple_if create mode 100644 test/test_421_logstash/upper create mode 100644 test/test_422_input_tags_fields.js create mode 100644 test/test_500_real_life.js create mode 100644 test/test_600_no_directory_watched.js create mode 100644 test/zmq_injector.js diff --git a/Readme.markdown b/Readme.markdown new file mode 100644 index 00000000..d4c6f6e1 --- /dev/null +++ b/Readme.markdown @@ -0,0 +1,338 @@ +node-logstash +==== + +[![Build Status](https://travis-ci.org/bpaquet/node-logstash.png)](https://travis-ci.org/bpaquet/node-logstash) + +What is it ? +--- + +It's a [NodeJS](http://nodejs.org) implementation of [Logstash](http://logstash.net/). + + +What to do with node-logstash ? +--- + +node-logstash is a tool to collect logs on servers. It allow to send its to a central server and to [ElasticSearch](http://www.elasticsearch.org/) for indexing. + +In top of elastic search, you can use a specialized interface like [kibana](https://github.com/elastic/kibana) to dive into your logs. + +![Archi](https://raw.github.com/bpaquet/node-logstash/master/docs/archi.jpg) + +Why a new implementation ? +--- + +When I tried logstash, I had some problems. This version should have: + +* lower memory footprint +* lower cpu footprint +* faster startup delay + +Moreover it's written in NodeJS, which is a perfect language for programs with many IO. + +node-logstash is compatible with logstash. You can replace a node-logstash node by a logstash one. The data are formatted in the same way to be compatible with logstash UIs. + +How does it works ? +=== + +The architecture is identical to logstash architecture. You have to instanciates plugins with the node-logstash core. There are three type of modules: + +* [inputs plugins](#inputs): where datas come into node-logstash. Examples: file, zeromq transport layer +* [filter plugins](#filters): extract fields from logs, like timestamps. Example: regex plugin +* [outputs plugins](#outputs): where datas leave from node-logstash: Examples: ElasticSearch , zeromq transport layer. + + +A typical node-logstash deployement contains agents to crawl logs and a log server. + +On agent, node-logstash is configured whith inputs plugins to get logs from your software stack, and one output plugin to send logs to log server (eg. zeromq output plugin). + +On log server, logs come trough a zeromq input plugin, are processed (fields and timestamps extraction), and send to ElasticSearch. + +How to get help ? +=== + +Please open an [issue](https://github.com/bpaquet/node-logstash/issues). + +Future of this project +=== + +October 25th 2015. + +When I started node-logstash, the ecosystem around logstash and ElasticSearch were almost non-existant. In 2015, the siutation is not the same : +* Great ecosystem around ElasticSearch and logstash, FileBeat project +* Logstash is now the only way to push events to ElasticSearch ([deprecation of rivers](https://www.elastic.co/blog/deprecating-rivers)) + +So, what is the future of node-logstash ? +* as a tool to collect logs on files and send them through network, node-losgstash is still useful with lower size, instant start, lower CPU / Memory footprint (in my tests with logstash 1.5.0). The comparison is different with Lumberjack and FileBeat. +* as log processing tool, it has the same advantages, but the plugin ecosystem is smaller than Logstash. +* as an injection tool in ElasticSearch : ZeroMQ river will soon be unusable ([deprecation of rivers](https://www.elastic.co/blog/deprecating-rivers)). You have to use bulk api to inject data. It should be less efficient than starting an embedded ElasticSearch node, as in the original Logstash. + +Current project status +--- + +Node-logstash is production ready, and used in production. Installation is a classical node project installation, with some scripts for native packaging. + +Maintainers : currently I, @bpaquet, am the only maintainer. I will keep dependencies up to date, update the core to follow node version, but I do not have time to add features in the core. See Contributing below. + +Weaknesses : +* tests are difficult to maintain, even if they are many and the code coverage is good. Replace vows by mocha is a good way to improve that, but it's a big rework. + +Contributing +=== + +What Pull Request (PR) will be merged ? + +Add plugin (output, input or filter) +--- + +Conditions to have a PR merged : + +* respect jslint +* provide documentation in /docs +* do not modify core. Modifications allowed : + * add plugin in ``Readme.md``. + * add optional dependencies in ``package.json`` +* If you provide unit tests, you can write in plugin documentation that the plugin is a plugin core. +* If you do not provide unit tests, please indicate in the documentation : "Status : contributed plugin, maintained by @xxxx. Producion ready.", and indicate your Github login. + +You are encouraged to ask to merge plugins without tests, which are not production ready. + +Core modification +--- + +Please respect jslint, and provide all needed unit tests. +How to use it ? +=== + +Installation +--- + +### Simple way + +Use [prepackaged deb files](https://packager.io/gh/nodelogstashpackager/node-logstash/install). + +After install, just add your config files to ``/etc/node-logstash/plugins.conf.d``, and restart node-logstash ``service node-logstash restart``. + +To see what options are passed to node-logstash, see [here](packager/Procfile). + +To change log level, do ``node-logstash config:set LOG_LEVEL=debug``, and restart node-logstash. + +### Manual install + +* Install NodeJS, version >= 0.12 +* Install build tools + * Debian based system: `apt-get install build-essential` + * Centos system: `yum install gcc gcc-c++ make` +* Install zmq dev libraries: This is required to build the [node zeromq module](https://github.com/JustinTulloss/zeromq.node). + * Debian based system: `apt-get install libzmq1`. Under recent releases, this package is present in default repositories. On ubuntu lucid, use this [ppa](https://launchpad.net/~chris-lea/+archive/zeromq). On debian squeeze, use [backports](http://backports-master.debian.org/Instructions/). + * Centos 6: `yum install zeromq zeromq-devel`. Before, you have to add the rpm zeromq repo : `curl http://download.opensuse.org/repositories/home:/fengshuo:/zeromq/CentOS_CentOS-6/home:fengshuo:zeromq.repo > /etc/yum.repos.d/zeromq.repo` +* Clone repository: `git clone git://github.com/bpaquet/node-logstash.git && cd node-logstash` +* Install dependencies: `npm install`. + +The executable is ``bin/node-logstash-agent`` + +Configuration formats +--- + +There are two format for configuration. The legacy format use urls. The new one is identical to the [logstash config format](https://www.elastic.co/guide/en/logstash/current/configuration.html). + +Note : if you are using multiple config files, you can mix formats. + +Configuration by url (legacy) +--- + +A plugin is instanciated by an url. Example: ``input://file:///tmp/toto.log``. This url +instanciate an input file plugin which monitor the file ``/tmp/toto.log``. + +The urls can be specified: + +* directly on the command line +* in a file (use the ``--config_file`` switch) +* in all files in a directory (use the ``--config_dir`` switch) + +Configuration by logstash config files (recommended) +--- + +Example for an input file +```` +input { + file { + path => '/tmp/toto.log' + } +} +```` + +You can use ``if`` to have an [event dependent configuration](https://www.elastic.co/guide/en/logstash/current/event-dependent-configuration.html). See [here for details](docs/common_params.md). +As for urls, config can be specified + +* directly on the command line +* in a file (use the ``--config_file`` switch) +* in all files in a directory (use the ``--config_dir`` switch) + +Note : the implementation is young, all bugs reports are welcome. +Note : both formats can be mixed. + +Command lines params +--- + +* ``--log_level`` to change the log level (emergency, alert, critical, error, warning, notice, info, debug) +* ``--log_file`` to redirect log to a log file. +* ``--patterns_directories`` to add some directories (separated by ,), for loading config for regex plugin and grok plugins. Grok patterns files must be located under a ``grok`` subdirectory for each specified directory. +* ``--db_file`` to specify the file to use as database for file inputs (see below) +* ``--http_max_sockets`` to specify the max sockets of [http.globalAgent.maxSockets](http://nodejs.org/api/http.html#http_agent_maxsockets). Default to 100. +* ``--alarm_file`` to specify a file which will be created if node-logstash goes in alarm mode (see below). + +Examples +--- + +Config file for an agent: +```` +input { + file { + path => "/var/log/nginx/access.log" + } +} + +output { + zeromq { + address => ["tcp://log_server:5555"] + } +} +```` + +Config file for log server: +```` +input { + zeromq { + address => ["tcp://0.0.0.0:5555"] + } +} + +filter { + regex { + pattern => http_combined + } +} + +output { + elasticsearch { + host => localhost + port => 9200 + } +} +``` + +Adding your plugins +--- + +You can add easily add your plugins : + +Manually : + +* create a directory layout on the path of your choice : ``/var/my_plugins/inputs``, ``/var/my_plugins/outputs``, ``/var/my_plugins/filters`` +* set the NODE_PATH variable to ``NODE_PATH=/var/my_plugins:/node_logstash_path/lib`` +* add your plugins in ``inputs``, ``outputs`` or ``filters`` directory. In the plugin code, you can reference base plugins with ``var base_filter = require('lib/base_filter');`` +* reference your plugin as usual. + + +With native packaging + +The plugins must be deployed in ``/var/db/node-logstash/custom_plugins``. All subdirectories already exists. The NODE_PATH is already set. + + +Signals +--- + +* USR1: stoping or starting all inputs plugins. Can be used to close input when output targer are failing +* USR2: see below file output plugin + +Changelog +=== + +[Changelog](changelog.md) + +Plugins list +=== + +Input plugins +--- + +* [File](docs/inputs/file.md) +* [Syslog](docs/inputs/syslog.md) +* [ZeroMQ](docs/inputs/zeromq.md) +* [Redis](docs/inputs/redis.md) +* [HTTP](docs/inputs/http.md) +* [Websocket](docs/inputs/ws.md) +* [TCP / TLS](docs/inputs/tcp_tls.md) +* [Google app engine](docs/inputs/gae.md) +* [AMQP](docs/inputs/amqp.md) +* [SQS](docs/inputs/sqs.md) + +Common concepts / parameters : + +* [Unserializers](docs/inputs/unserializers.md) +* [Tags/fields](docs/inputs/tags_fields.md) + +Filter plugins +--- + +* [Regex](docs/filters/regex.md) +* [Grok](docs/filters/grok.md) +* [Mutate Replace](docs/filters/mutate_replace.md) +* [Grep](docs/filters/grep.md) +* [Reverse DNS](docs/filters/reverse_dns.md) +* [Compute field](docs/filters/compute_field.md) +* [Compute date field](docs/filters/compute_date_field.md) +* [Split](docs/filters/split.md) +* [Truncate](docs/filters/truncate.md) +* [Rename](docs/filters/rename.md) +* [Multiline](docs/filters/multiline.md) +* [Json fields](docs/filters/json_fields.md) +* [Geoip](docs/filters/geoip.md) +* [Eval](docs/filters/eval.md) +* [Bunyan](docs/filters/bunyan.md) +* [HTTP Status Classifier](docs/filters/http_status_classifier.md) +* [Remove field when equal](docs/filters/remove_field_when_equal.md) + +Common concepts / parameters : + +* [Common parameters](docs/common_params.md) +* [Tags/fields](docs/filters/tags_fields.md) + +Outputs +--- + +* [ZeroMQ](docs/outputs/zeromq.md) +* [ElasticSearch](docs/outputs/elasticsearch.md) +* [Statsd](docs/outputs/statsd.md) +* [Gelf](docs/outputs/gelf.md) +* [File](docs/outputs/file.md) +* [HTTP Post](docs/outputs/http_post.md) +* [Websocket](docs/outputs/ws.md) +* [Redis](docs/outputs/redis.md) +* [Logio](docs/outputs/logio.md) +* [TCP / TLS](docs/outputs/tcp_tls.md) +* [AMQP](docs/outputs/amqp.md) +* [SQS](docs/outputs/sqs.md) +* [HEP](docs/outputs/hep.md) + +Common concepts / parameters : + +* [Common parameters](docs/common_params.md) +* [Serializers](docs/outputs/serializers.md) + + +Misc +--- + +* [Elasticsearch mapping](docs/elastic_mapping.md) + +License +=== + +Copyright 2012 - 2014 Bertrand Paquet + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/bin/node-logstash-agent b/bin/node-logstash-agent new file mode 100755 index 00000000..fafee0dc --- /dev/null +++ b/bin/node-logstash-agent @@ -0,0 +1,240 @@ +#!/usr/bin/env node + +var optimist = require('optimist'), + fs = require('fs'), + path = require('path'), + http = require('http'), + https = require('https'), + logger = require('log4node'), + agent = require('../lib/agent'), + monitor_file = require('../lib/lib/monitor_file'), + file_loader = require('../lib/lib/file_loader'), + patterns_loader = require('../lib/lib/patterns_loader'); + +var argv = optimist.argv; + +function help() { + console.log('Syntax : node-logstasth-agent [--log_level=info] [--log_file=file] [--config_file=toto] [--config_dir=/etc/config] urls'); + console.log('Url examples: '); + console.log('- input://stdin://'); + console.log('- input://file://main_input.txt?type=toto'); + console.log('- output://stdout://'); + console.log('- output://file:///tmp/toto.txt'); +} + +function log_error() { + if (argv.log_file) { + console.log.apply(console, arguments); + } + logger.error.apply(logger, arguments); +} + +if (argv.help) { + help(); + process.exit(0); +} + +process.on('uncaughtException', function(err) { + log_error('Exception has been catch, it\' a bug'); + log_error('Please submit an issue on https://github.com/bpaquet/node-logstash'); + log_error('Exception:', err); + log_error(err.stack); +}); + +if (argv.log_file) { + logger.info('Log to file', argv.log_file, ', log_level', argv.log_level); + logger.reconfigure({ + level: argv.log_level || process.env.LOG_LEVEL, + file: argv.log_file + }); +} +else { + var log_level = argv.log_level || process.env.LOG_LEVEL; + if (log_level) { + logger.info('Changing log_level', log_level); + logger.setLogLevel(log_level); + } +} + +var package_json_file = path.join(__dirname, '..', 'package.json'); +var version = JSON.parse(fs.readFileSync(package_json_file)).version; + +logger.notice('Starting node-logstasth-agent', version); + +var http_max_sockets = argv.http_max_sockets || 100; +logger.info('Max http socket', http_max_sockets); +http.globalAgent.maxSocket = http_max_sockets; +https.globalAgent.maxSocket = http_max_sockets; + +var a = agent.create(); + +a.on('error', function(module_name, error) { + logger.error('[' + module_name + '] ' + error); +}); + +function removeIfExists(file) { + fs.exists(file, function(exists) { + if (exists) { + fs.unlink(file, function(err) { + if (err) { + logger.error('Unable to remove file', file, err); + } + }); + } + }); +} +if (argv.alarm_file) { + logger.info('Alarm file', argv.alarm_file); + removeIfExists(argv.alarm_file); + a.on('alarm_mode', function(a) { + if (a) { + fs.writeFile(argv.alarm_file, '1', function(err) { + if (err) { + logger.error('Unable to write alarm file', argv.alarm_file, err); + } + }); + } + else { + removeIfExists(argv.alarm_file); + } + }); +} + +patterns_loader.add(path.join(__dirname, '..', 'lib', 'patterns')); + +if (argv.patterns_directories) { + argv.patterns_directories.split(/,/).forEach(function(d) { + patterns_loader.add(d); + }); +} + +function after_config_directory(config) { + logger.info('Loading config : ' + config.length + ' urls'); + + a.start(config, function(err) { + if (err) { + log_error('Unable to load urls from command line'); + log_error(err.stack); + setTimeout(function() { + process.exit(2); + }, 50); + return; + } + logger.info('Config loaded.'); + }); +} + +function after_config_file(config) { + if (argv.config_dir) { + logger.info('Loading config files from : ' + argv.config_dir); + file_loader.loadDirectory(argv.config_dir, true, function(err, result) { + if (err) { + log_error('Unable to load config from directory'); + log_error(err.stack); + setTimeout(function() { + process.exit(1); + }, 50); + return; + } + logger.info('Files loaded from directory, ' + result.length + ' urls found'); + after_config_directory(config.concat(result)); + }); + } + else { + after_config_directory(config); + } +} + +function load_config_file(config) { + if (argv.config_file) { + logger.info('Loading config file : ' + argv.config_file); + file_loader.loadFile(argv.config_file, true, function(err, result) { + if (err) { + log_error('Unable to load config file', argv.config_file); + log_error(err.stack); + setTimeout(function() { + process.exit(1); + }, 50); + return; + } + logger.info('File loaded, ' + result.length + ' urls found'); + after_config_file(config.concat(result)); + }); + } + else { + after_config_file(config); + } +} + +function main_start() { + load_config_file([ + 'filter://add_host://', + 'filter://add_timestamp://', + 'filter://add_version://', + ].concat(argv._.map(function(x) { + return file_loader.filter(true, x)[0]; + }))); +} + +function close() { + a.close(function() { + logger.info('Quitting.'); + if (argv.db_file) { + fs.writeFile(argv.db_file, JSON.stringify(monitor_file.getFileStatus()), function(err) { + if (err) { + log_error('Error while writing', argv.db_file, ':', err); + } + setTimeout(function() { + process.exit(1); + }, 50); + }); + } + else { + setTimeout(function() { + process.exit(1); + }, 50); + } + }); +} + +process.on('SIGTERM', function() { + logger.info('SIGTERM received.'); + close(); +}); + +process.on('SIGINT', function() { + logger.info('SIGINT received.'); + close(); +}); + +if (argv.db_file) { + fs.exists(argv.db_file, function(exists) { + if (!exists) { + logger.info('Db file not found', argv.db_file); + main_start(); + } + else { + fs.readFile(argv.db_file, function(err, data) { + if (err) { + log_error('Error while reading', argv.db_file, ':', err); + } + else { + try { + var parsed = JSON.parse(data); + monitor_file.setFileStatus(parsed); + } + catch(err) { + log_error('Error while parsing db file, please delete it', argv.db_file, ':', err); + } + } + main_start(); + }); + } + }); +} +else { + main_start(); +} + +// avoid stop if no config is given +setInterval(function() {}, 3600 * 1000); diff --git a/build_zmq.sh b/build_zmq.sh new file mode 100755 index 00000000..5545c9dd --- /dev/null +++ b/build_zmq.sh @@ -0,0 +1,29 @@ +#!/bin/sh + +set -e + +target=$1 +version=$2 + +if [ "$target" = "" -o "$version" = "" ]; then + echo "Usage $0 target version" + exit 1 +fi + +echo "Installing ZeroMQ version $version to $target" +cd /tmp +rm -rf zeromq-$version.tar.gz $target +wget http://download.zeromq.org/zeromq-$version.tar.gz +tar xzf zeromq-$version.tar.gz +mv zeromq-$version $target + +echo "Compiling" +cd $target +./configure +make + +echo "ZeromMQ version $version ready in $target" +echo "Env var to set" +echo "export CPLUS_INCLUDE_PATH=$target/include" +echo "export LB_LIBRARY_PATH=$target/src/.libs" +echo "export LIBRARY_PATH=$target/src/.libs" \ No newline at end of file diff --git a/changelog.md b/changelog.md new file mode 100644 index 00000000..0e82cf31 --- /dev/null +++ b/changelog.md @@ -0,0 +1,59 @@ +* 22/04/2016 : Fields and tag management for input plugins +* 21/04/2016 : Fix #123 : Do not close file automatically in output file plugin +* 19/04/2016 : Fix #120 : Grok filter use match param, not grok +* 18/09/2015 : allow to load plugins from NODE_PATH +* 11/09/2015 : publish 0.0.5 on NPM +* 11/09/2015 : new config format, based on logstash config format +* 11/09/2015 : new installer using [packager.io](packager.io), thx to @crohr +* 11/09/2015 : mass documentation update +* 11/09/2015 : allow to use node-maxmind plugin for geoip filter. Geoip filter can now fetch ASN. +* 11/09/2015 : add cache on reverse dns and geoip filters +* 25/10/2015 : Reorganize doc +* 25/10/2015 : Update http proxy support for node > 0.10 +* 24/10/2015 : drop 0.10 support, update test for node 4 +* 24/10/2015 : use aws-sdk for aws SQS plugin + +* 04/10/2015 : publish 0.0.4 on NPM +* 16/05/2015 : Allow to specify dates in computed values +* 13/05/2015 : Add basic auth for HTTP Output plugins (#100) +* 13/05/2015 : Add websockets support (thx to @fujifish) +* 4/04/2015 : Add raw unserializer (thx to @nfisher) +* 12/03/2015 : Allow wildcard in path for input file plugin +* 7/03/2015 : Allow to use fixed index name for ElasticSearch output +* 21/01/2015 : AMQP plain authentication, AMQP vhost +* 3/01/2015 : Add SQS Input / Output +* 9/11/2014 : publish 0.0.3 on NPM + +* Add SSL Suport to AMPQ plugins +* Add bulk insert for ElasticSearch (thx to @fujifish) +* Add index_prefix configuration parameter for ElasticSearch (thx to @fujifish) +* Add AMQP / RabbitMQ input and output +* End of NodeJS 0.8 compatibility +* Add Grok filter (thx to @fujifish) +* Add GAE input +* Fix issue #70 with reconnect on TCP Output +* Fix issue #75 when stopping with TCP input +* Add only\_field\_match\_ options +* Do not log error with Geo IP filter and local ips +* Fix bug #62 : only_type not honored when component have no config (thx to @ryepup) +* Allow ZeroMQ output to multiple hosts (thx to @dax) +* Add bunyan filter (thx to @JonGretar) +* Implement BLPOP / RPUSH mechanism for redis, and use it by default. Thx to @perrinood. +* ElasticSearch indexes now use UTC, and default type value is logs instead of data +* Add wildcard for input file plugin +* Add delimiter for file and tcp plugins +* Auth on redis +* Improve dns reverse filter +* Compatibility with ZeroMQ 2.2.x, 3.x, 4.x +* Add USR1 signal to stop and start inputs plugins +* Add TCP / TLS plugin, thx to @dlanderson +* Add input HTTP plugin, thx to @fujifish +* Refactor SSL management +* Add GeopIP filter, thx to @subutux +* Add serializer and unserializer support +* Allow to use input file plugin on non existent directory +* Utf-8 is now the default encoding for input file plugin +* Add [Log.io](http://logio.org) output +* Use the 1.2 logstash json format +* Add redis input and output plugin +* Add tail -f input file plugin \ No newline at end of file diff --git a/dists/README-RedHat.txt b/dists/README-RedHat.txt new file mode 100644 index 00000000..c6ee60be --- /dev/null +++ b/dists/README-RedHat.txt @@ -0,0 +1,89 @@ + + How to build node-logstash rpm ot of this node-logstash.spec in order to + install it on the RedHat based Linux OS. + ======================================================================== + +Read this document through entirely before star building your rpm package. + + 1. + Read the main documantation page +https://github.com/bpaquet/node-logstash/blob/master/Readme.markdown + - follow through the steps of "Installation" chapter, except ZeroMQ. I've built my .rpm against newer ZeroMQ library v3.2.5, +and the 'node-logstash.spec' is configured this way. Then do NOT do 'yum install zeromq zeromq-devel', instead of this +enable EPEL repo, i.e. +rpm -Uvh --replacepkgs http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm + - the latest ZeroMQ lib within EPEL is v3.2.5 (as per April 2015), so do: +yum install zeromq3 zeromq3-devel + ...Btw, the latest source version is v4 already, so if you want the latest - do your own workaround, +BUT then keep in mind, you will need to adjust the 'node-logstash.spec' accordingly. + + 2. + Follow this document to setup your rpmbuild environment +https://fedoraproject.org/wiki/How_to_create_an_RPM_package +OR + being non-root and while staying inside the directory where you're reading this manual, copy the whole tree to your home dir: +cp -r ./rpmbuild ~/ + then copy the settings +cp ./rpmbuild/.rpmmacros ~/ + +Here is the file checklist, so far you must have: +~/.rpmmacros + +~/rpmbuild/ + BUILD + BUILDROOT + RPMS + SOURCES + SPECS + SRPMS + +~/rpmbuild/SOURCES/ + logstash + run_node.sh + - these are the RHEL-customized scripts to start 'logstash' as a service. + +~/rpmbuild/SPECS/node-logstash.spec + + 3. + Check for the latest version of NodeJS ( https://nodejs.org/ ), by the time of writing this document it is v0.12.2, +so if there is newer version, then edit to adjust the settings inside ~/rpmbuild/SPECS/node-logstash.spec + + 4. + An important dependency is 'start-stop-daemon' utility which exists in Debian/Ubuntu repositories. +But there is no such a package in the EPEL/CentOS repositories. The 'node-logstash' is not written to fork in the background, +that why we need this special tool. I was trying to solve this problem using 'nohup' and '/etc/rc.d/init.d/functions', +but it is rather headache and waste of time. Fortunately there is a 3rd party port of the 'start-stop-daemon' tool, +you can obtain the source here in a form of .src.rpm which makes things even much easier: +wget ftp://ftp.pbone.net/mirror/ftp5.gwdg.de/pub/opensuse/repositories/home%3A/sschapiro%3A/openstack%3A/IS24/RedHat_RHEL-6/src/start-stop-daemon-1.9.18-2.2.src.rpm + then +rpm -i start-stop-daemon-1.9.18-2.2.src.rpm + - if your rpmbuild setup was correct, then you'll see 'start-stop-daemon.spec' inside ~/rpmbuild/SPECS +cd ~/rpmbuild/SPECS +rpmbuild -bb start-stop-daemon.spec + - this program is a small nice piece of C code, so you'll have your 'start-stop-daemon-1.9.18-2.2.x86_64.rpm' +and 'start-stop-daemon-debuginfo-1.9.18-2.2.x86_64.rpm' in less that a minute of building. + + 5. +cd ~/rpmbuild/SPECS +rpmbuild -bb node-logstash.spec + - depending on your hardware it can take ~10 min to build then you'll finally have node-logstash-0.0.3-1.el6.x86_64.rpm package. + + 6. + Finally, +to deploy 'node-logstash' package on your RHEL or CentOS machines you need: + - enable EPEL repo then +yum install zeromq3 + - copy over then install the following +rpm -i start-stop-daemon-1.9.18-2.2.x86_64.rpm +rpm -i node-logstash-0.0.3-1.el6.x86_64.rpm + Do your config file then place it inside /etc/logstash.d/, then +/etc/init.d/logstash start + +p.s. Use Ansible or salt-stack or Puppet etc to automate your installation tasks for big clusters. + + That is really it. :) + Good luck. + +Serge Dudko +sergdudko (at) yandex.ru +Apr-20 2015 diff --git a/dists/rpmbuild/.rpmmacros b/dists/rpmbuild/.rpmmacros new file mode 100644 index 00000000..ff3e9f41 --- /dev/null +++ b/dists/rpmbuild/.rpmmacros @@ -0,0 +1,3 @@ +%_topdir %(echo $HOME)/rpmbuild +%_smp_mflags -j3 +%__arch_install_post /usr/lib/rpm/check-rpaths /usr/lib/rpm/check-buildroot diff --git a/dists/rpmbuild/SOURCES/logstash b/dists/rpmbuild/SOURCES/logstash new file mode 100755 index 00000000..846f6474 --- /dev/null +++ b/dists/rpmbuild/SOURCES/logstash @@ -0,0 +1,100 @@ +#!/bin/bash + +### BEGIN INIT INFO ### +# Provides: logstash +# Required-Start: $local_fs $remote_fs $network +# Required-Stop: $local_fs $remote_fs $network +# Default-Start: 2 3 4 5 +# Default-Stop: 0 1 6 +# Short-Description: start and stop logstash service +### END INIT INFO + +# set -e + +NAME="logstash" +DESC="logstash" +PID_FILE="/opt/logstash/shared/logstash.pid" +DAEMON="/opt/logstash/shared/run_node.sh" +OPTIONS="/opt/logstash/current/bin/node-logstash-agent" +START_STOP_DAEMON=/sbin/start-stop-daemon + +test -f /opt/logstash/current/bin/node-logstash-agent || exit 0 + +test -x $DAEMON || exit 0 + +test -d /opt/logstash/current/node_modules || exit 0 + +# unset NVM_DIR + + + +# umask 022 + +# Source function library. +# RH +. /etc/rc.d/init.d/functions +# Deb +# . /lib/lsb/init-functions + +status_logstash() { + # status_of_proc -p "$PID_FILE" "logstash" "$NAME" + status -p "$PID_FILE" "logstash" "$NAME" + RETVAL=$? +} + +kill_logstash() { + SIGNAL=$1 + + if [ ! "$PID_FILE" = "" ]; then + if [ -f $PID_FILE ]; then + kill $SIGNAL `cat $PID_FILE` || true + rm -f $PID_FILE + fi + fi +} + +run_logstash() { + echo "Running $NAME :" + su logstash -c "cd /opt/logstash/current && $DAEMON $OPTIONS " +} + +start_logstash() { + # $START_STOP_DAEMON -m -b -c logstash -d /opt/logstash/current --oknodo --start --pidfile $PID_FILE --exec /bin/sh -- -c "exec $DAEMON $OPTIONS " + + # the RedHat port of start-stop-daemon is slightly different + $START_STOP_DAEMON -m -b -c logstash --oknodo --pidfile $PID_FILE --start --exec /bin/sh -- -c "exec $DAEMON $OPTIONS " + sleep 1 + status_logstash + chown logstash $PID_FILE +} + +case "$1" in + run) + run_logstash + ;; + start) + echo -n "Starting $DESC: " + start_logstash + echo "$NAME." + ;; + stop) + echo -n "Stopping $DESC: " + kill_logstash + echo "$NAME." + ;; + restart) + echo -n "Restarting $DESC: " + kill_logstash || true + sleep 1 + start_logstash + echo "$NAME." + ;; + status) + status_logstash + ;; + *) + echo "Usage: /etc/init.d/logstash {start|stop|restart|status|run}" + exit 1 +esac + +exit 0 diff --git a/dists/rpmbuild/SOURCES/run_node.sh b/dists/rpmbuild/SOURCES/run_node.sh new file mode 100755 index 00000000..fc7e6f6a --- /dev/null +++ b/dists/rpmbuild/SOURCES/run_node.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +[ -f /etc/default/logstash ] && . /etc/default/logstash + +NODE_OPTS="$NODE_OPTS --log_file /opt/logstash/shared/log/logstash.log" + +export NODE_ENV=production +export PATH=/opt/logstash/node/bin:$PATH +cd /opt/logstash/current +exec node $* $NODE_OPTS diff --git a/dists/rpmbuild/SPECS/node-logstash.spec b/dists/rpmbuild/SPECS/node-logstash.spec new file mode 100644 index 00000000..2cc727e9 --- /dev/null +++ b/dists/rpmbuild/SPECS/node-logstash.spec @@ -0,0 +1,102 @@ +%define NodeJSVer 0.12.2 +%define zmqMajorVer 3 +%define zmqVer 3.2.5 + +Name: node-logstash +Version: 0.0.3 +Release: 1%{?dist} +Summary: It's a NodeJS implementation of Logstash +Group: Applications/File +License: Free +URL: https://github.com/bpaquet/node-logstash +# Source0: https://github.com/bpaquet/node-logstash/archive/master.zip +# Source1: http://nodejs.org/dist/v%{NodeJSVer}/node-v%{NodeJSVer}-linux-x64.tar.gz +Source0: logstash +Source1: run_node.sh +# AutoReqProv: no +BuildRequires: zeromq%{zmqMajorVer}-devel >= %{zmqVer} +BuildRequires: gcc +BuildRequires: gcc-c++ +BuildRequires: make +Requires: zeromq%{zmqMajorVer} >= %{zmqVer} + +%description +node-logstash is a tool to collect logs on servers. It allows to send log data to a central server and to ElasticSearch for indexing. +This implementation has advantages: +- lower memory footprint +- lower cpu footprint +- faster startup delay. + + +%prep +# %setup -q +mkdir -p opt/logstash/shared/log +mkdir -p etc/logstash.d + +cd opt/logstash +pwd +echo "Get the node-logstash code " +git clone https://github.com/bpaquet/node-logstash.git current +echo " " +echo " Wget then unpack NodeJS and rename it to 'node' " +wget http://nodejs.org/dist/v%{NodeJSVer}/node-v%{NodeJSVer}-linux-x64.tar.gz +tar xvzf node-v%{NodeJSVer}-linux-x64.tar.gz +mv node-v%{NodeJSVer}-linux-x64 node +echo " " +echo " Installing npm ... " +export PATH=$RPM_BUILD_DIR/opt/logstash/node/bin:$PATH +cd $RPM_BUILD_DIR/opt/logstash/current +npm install --production +cd $RPM_BUILD_DIR +cp %{SOURCE0} $RPM_BUILD_DIR/opt/logstash/current/bin/ +cp %{SOURCE1} $RPM_BUILD_DIR/opt/logstash/shared/ +echo "Done." +exit 0 + +# no build needed +# %build +# %configure +# make %{?_smp_mflags} +# =================== +# switch to node-logstash code folder + +# no install needed +%install +cp -r $RPM_BUILD_DIR/* $RPM_BUILD_ROOT +exit 0 + +%clean +rm -rf $RPM_BUILD_DIR/* +exit 0 + +%files +%{?filter_setup: +%filter_provides_in %{_docdir} +%filter_requires_in /opt/logstash/current/node_modules/aws-lib/node_modules/sax/examples +%filter_setup +} +%defattr(-,root,root,-) +# %doc +/etc/* +/opt/* + +%post +echo "NODE_OPTS=\"--config_dir /etc/logstash.d --log_level info \" " > /etc/default/logstash +cp /opt/logstash/current/bin/logstash /etc/init.d/ +chkconfig --add logstash +useradd -M -r logstash +chown -R logstash /opt/logstash/shared +/etc/init.d/logstash start + +%preun +/etc/init.d/logstash stop || true +sleep 1 +chkconfig --del logstash || true +rm -f /etc/default/logstash || true +rm -f /etc/init.d/logstash +userdel logstash || true +rm -f /opt/logstash/shared/*.pid /opt/logstash/shared/log/* || true + +%changelog +* Fri Apr 17 2015 Serge Dudko +- first rpm release for v0.0.3 and NodeJS v0.12.2 diff --git a/docs/archi.jpg b/docs/archi.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6c4109eee1f1bb79939635f3f22ade57f07c07bc GIT binary patch literal 41471 zcmd432UJr{*C>4GSO66eqy-hJQbdZYHMh~E*Tk|*S@F;761U#n`<|2xX}s%fV+pUw~^LaK{Im;L8bwK7B~d# z0eFBt*X(?5s9(Bt5dbeLEcN%F*G3-zVgi6}$$u~ZC-i^3?Xh?8u>$}aW-#B=-p9@z zOa}r0gPYwAZ(jgloCNbv1^C_o(@cC|n!_6;5KPNZ(~dvV=csAhA8E54m@7saAPkjO zRy(I__Fy^`Odq%XE8p?2^bZ<92oR5!gNK8+vz;JS7MHv|uRFUr>=gWX{4a|Cg8ZK) z-QD~^o_8(*@VVE=^|CJbD@x_v$@_xQ&-4qsYnqqAv=Es8`G)UVYQ8X-Ui5P{RtMAK z06=@-y7zhGAMms)!A@69!Sq2e-`?3*`^wM!U=Le;127HNrG4e~bUHr%m#O(Xc!HfY^uaWUhtAvOn$9IK z%>$+%czYUA+X88(d*$G!MQsbjPuFn6_tH-q=w?0K^r>xu{L-^J`0Vfq;-s7Pb-Duf zQ)*p$1z+zgKkL$4Tz5XN{S!X)hTBeG1Zkts@b)vJ(gxzE@3`izsRgD%JoKv$9>!F; zfoTSA`)lW@vXuhUYJdsNHNXMz1Z=^l9pC|w071YA&;rhazc&DHu;e=647ht0=R;C{|vCm;)*OfyWE*16!ujVFH|e#N#LWyN=*(Z;4Cp-@4!7mzqcAma2tc?forl2f&T>EGsYTQPy*; z5Y}U?7k@(uvU0JWV7{Moq?ZjOu_Oqd4Pf#uJP>)by{B$0!1p zod%zpzp4K*a(+Yng9C@Zpg^ho#v{4Y>!^K|TH7C_+YO}K!Pg-WTsr_j!}CUvx3i;@ zub`}qjG~}AxRf~vYJ1p89T&Xj<|epPM}TIb^OV*@B@H%J^;Wa_a{#joD(^b08px6=jZMJ10T5XwgQ09^^>wY?=*YQ zf^*X_g@Oa;E2hT)K#HSK2(KsF)=XEGcq$VGgI$fyIJ<^+O>B#GxHv{J$qSL!E4u^ zef!y1_fykUB-H9)Ek=5JM%G=-yQmQVdl%&+u%DSGfM$)3<_JK$pN4Kf4W$l%fSfRZ zmJZd}`FYXM($O<8GJ!Ph0Rf8k0kkx9bhPwzAiba}dpjLg?rZ?fO!7Zes1 zmz0*3f2yskZ)p77)ZEqG)7#hobzpF8d}4BHdS-TR9=nQLTi@8kZ*5cgq5 z|KMvs$QLa=Jsmw0l`k6F0Pv>UPtS1p1mgj9L#At9>_=p8F>{=adzn|W>*z@%45zJk z=k9|-a%0D^RMvJl`~Sz-?f)arelzxmuR&lh9Sx{Fy8QqYAQ4|hi30x*+_V`<8<)P= zB`)`!W-<=CvLDZWk=)&lo*}o8DF6!J|55EoaZwc>J)te-HdXOtN51?vRL9&mSM88WS>dw_NMI@CDl7Bn zrEmM-JR{`8^ArF{0k))x?9lZ*3eb*fB-3n*kutO3?Ia|L0>H2tf49^>YMF=TTL{wW2su9`yuCefJlcq&FT6{936i;6L62V*+H zKp$+DAWa~nCt#_tfk6}aB!3nK7`aCQb{i}%NkCUwK2QMk9s}~>5fYRFG{Hw7lHB3A z1uO-)8UFB}>^hOVsA6vaA?6vajqoSXY5r>zAp9WWFI-wgjS>m~B3p&w50i`0Tbp0SXIOwkSXW*x&+L4mlVcK%@Wz@R4;2z(brRw{^j1e_%@@K(!l*cM2&00fQa` z@u9cj6yPxhs9{?#`d3af|C2QTcnWZ42}&kP%Is($aBeRI@K?*Q4GC}Py2me-M=ill;wILQsBELjV z=0Fc@8<6P!OD=bM$UpS?FI*zGn9SfK_oyUNfERudB)kQ5oF5H}`F{|Y^xvH=OZcUo z(A&6Si_epz;$Je5wenl7e>#_otFr4J7{@Hv!M?;qTw|^|$Kz9<@+m+@eFy2JxhaO! zoh3$akB74n56odNKY6?n0yg*&s^u-_!geCys%&2SbxzTF37pSDH@_ywWo|=F4&cA( zrrDax8EMH1y@7RDJbtckSfniB#C-lTIbmM%V9c>EX?&K?PB?f;Z)~?r@1@7l1qH3I zDtay0h>VvN6R%@G&5*D2qDibDDyPtf3(T4D++`@SM>NOxe%q&J9BRa z#qX4+ttdQU(p2&aXM^t7rT1a5e=c1z8vF`wXA=rV)_J;KbC$1^9c*#j!)cN_a8Xt~ zQH17D)6$be?-M65+p&QNV9@G74Y{4ok!Sy)ulOXbV&LgYEK^%^F_R*J< zoA>O((goJSUZnMu(LEo;l`d${AS@biTT5CQq`PUkz$ zhBiAUz}fED#TW1RX1uYMgYC{fE-{{7{7V&DLf$eCYN4TbQbMAUaw6Hz^jyEomTlk7xj~m@6#eRL#L`Qq0#8Qgx{Q~NlQCiP60aZ`55YlcXPL!LfX)o-bLhEU82OQ);PWi zUSvQtz$rlm!D&@v%P5E?&7LH|TR%t)=fpdp0`FfKgnuxv%sW&1Wp!fzxsycwDGs*F zR(*FSDvoumxl3DCJ>INOH&PPL|9%W9V4g9drX_JQ zpIhGvClo+drvPaYs~P(g;C&M@d{f&$91Nm5}W`8Ku)*iD|>A!hUfCp*Q`?5!HZ^!G&`@_396by&YKf<@DIm^;1Gq|3pvH#4dt@^j^)|e zaW1^EFLo~9#VqJM$bC=f1nrwyIA`UF_X|o1RfRn3nVE^Pei1#MZhH^r53Izg%ub3t zx@cr~^G2B!O+MC;;ck;J@_ZN$DX8vQbbr~mr`Gk23QvwRLO89VWS4u8J z7YvD{ksHN*PB?NN<9ajhN`_bBs zaqG**_j{_|ovY`2CG&Av%E;_%XodbVuiz-TACAMo4E@n!!$B;_b!4XWBlyfqV$LmW zQuK@K%BLxNv`UGAC}@tTCJAe(dA36Iey zPH()|_=MiSCe0m~qIXiepnn275I66?Y3k3&EtM^>*PLOZ!sJ&xWue~T{ zBKnSXS-6t%bgo4DUSl@<)ZVAn0cGz}{jKmWHU`_XArzqd%ImGAc?oHY=Oq!xD8TzX z_-qf#+pq&y)~%_A9h74N)*+KDA+Cf>gIpf4HZ*6tXr~iAG^IBcz=choaJoRQ1d)IL$;8H z2lwNXq(%lc#&^ee*C|53m~zed)S%|Ryyp7Y2^y`$IK01_fBh8-z)ox{sZZe?d9V)M zZ4sHvu2Il;`BJalqts1WhU>TG=}M4OW$oM-?Vd*`SMxgIh)#V(cT&zf&4BDVN!yJZ zZP_f9(P@{5HE)%9@W@1>RrlFdd#iPf412aA3KcioXRO%A27LnJ%ewatdzt7o=-Fu5?iZkX0_e6DctokUxZD+piw+P9Y6^>BVu1aIb!}PKq)I0V%fkWh)RCg2TT42EE zVf$;hcpIOd=jI&svQd3S6vm##Z`SX56BgBeu|KnSX4nwPcZQcfXh;*G)oPpVq!~IBin1%qFmXrDza_X|iLxsmv28j)02t>1!% z#q~zC+KDcxna-39qd#x+HXxbhJb8%f4`Ee(EAPJ8*1G3gs>_~rTfX-tT=zmoP1Ws4 zg{$RnS7pQ<@#cH*igne8D8OAb3{jhsv8kv~?95qBQVLoQ@11TGJKH+d@;GYi(k^i2LE{=76PUP9|u*VYngg923Nl#tBR!`fg= zCo3uE~gj^Il9!s&D>yJef7bG9EFeJ_=S?Gx5GO_=t^1;v~Fax zSF!d=N-tvkeh?zJ z0R^b<+JdiP-%x?Uzw<5Ot*?z_V&UuH4`K%Lja3zi;m3b&$1`LTj z%m6#k+=LP6K!Vmfz8pF(08U28mSy-fWWfsh)2n80&r28SLMyAFw*!0Nm}_8oflGG^ zv4#Kj0!A>VJhzC|GvsUJ;>#ccID$yGW&njXr)am+t)bdRq{(UWKPl~;LjI|#`&Z4P zT5=c}iec-NF$lII!NrM-0~El32SMH|LIK9-{%-xhXi%i16Pf!z2o)yrSK}05-)aew z4D+D?G8DjK?pH+P7&2GbAT!+mZ!so;f^vcV5LEhaLa*ZALvSlQF`7t6;&?Q{bmn>+ zd}8Le+5})=wV(3M1l!yA)1IZ+ul7XZ=X8;%DOhUSU(gw==(#`YhvFj=erwU-r)*@M zL;CCCyIz;x*?7`%EeecmHKjFRug9z$^S!a3xw&BfqhK$XvnVRAEkD`eE5c69#|b$c zQs%E}I~Q^;H#fpilXEid=3ibA%@z$-#~NMOUw1f;0vu&%#BqMws_iRBJTzJ)WrrWx zyN0C)ZI((xJN-H*NjJWcbQ^)U+d7JIYlbcP!Rsi%Nyj#U!k~4;3?T}18P|gJ1$^w< z1YRSc!&J{QSY`XP#`6K(EG{C~E@(6Wk}8c{OMh~@6`>N`?>R;R=oZq1F`FA{DcgZ1 zIE#}r`o_0xDs3xk+h%gZv@zEgBXRa^{m7?*S;*eI)>)W8B(!nqkE zRjGrx=RtWl>m2hwtJa_@G9I6LDXV)#lY73c<_u8}+Q0rjR4e+>Qs7|5)&A#(MFRY_ zh5@f9s~snhk>Q-=J~ckl8#!Ljy_#?FOV9g zf6UEzabcjOLf(L4=z|BPSZk=fM`M#yB;ZSfS?MBukaowOKK7!%7y3U;pIIjD` z&y4;qU%uE%h|UrPc+g*$3M%F%I1ino0KHah$X^IR`2Pul1KNBtDh1knhN3OXAB+v> zwMTb{L9jgd_(~`Sf)^|`rJLDAO3%Ysh}xNf;hUGs<8=x0?<9{`NbOSA5~1TuS*<#M zbx;M=_5t{;E6hF+Gcj5cg?ODTy^4B(y59DfxM@IQHeE{N^74Q_kTISgUkcP|ZXUFl zX~HsEYcZJY&*~M3&%9{xaP0(gD{2r9PVfYwDhaIXxhD`lC%V&ZxlYhkTP0G4&4><; zsAqzTSc9jirQb@O>s{BFeHBHlBz*iU$WF&zhhEh$S|KojGwiFuY94%GTG&R+frjh; zos+wt1ssIhy8+72`@dC##{OeJ6#T0aoOG~vYaruu(^#r#PPt6MLk6?kYMRRuYaaBk z<~!5pi^Gs-d*Cp{$uh@6?Z=HPyD`5Ruak#j; zj!7@CS$`gHD;LP#!OtYL)@^2oHubEe4-ISi4w$ zzo#sf?_R>eaiM#uW)~(HT12O0Wx{frF0KZtnI=moJYO94pXuip)2sXPhR1o}(lwe# zkJ3AQ>Q7G@Kc5^kO;Grrgft}u(|bLd(r#F?^r)YkSpGb^>MDe!{fe{i7U{Flef4zWQD1541+m#_~PYKt))&Qa6 zjq$dFP@yAd$4;%QAT1)`{4Oto6~kNyqJxaI*b~0DSd!|c$V$C>BNmDzmwVpR6hK3J zI%MZP%xO>JCay1reE;prczi(L%`#WGYW7fL?&Xp7bU66x`DK$k9mM#0C6{+=<&0Gc zI=1i`sooeY2BB4X$21%_keGH-5NH<67NUwH)=&}E`-|Z&(t9aDf9eYPp$_3LT!-iw z3vM?!QUDKB4{6VR2m#XiP?N*70eSA1g+l!emkPa>P@1g)37C3}j1uVvh zlk~n?D4GI%3HKx=R<^Wnw!)1kTgdFQ@cYm>;#RUQDIbMLPP+xI1%az#Bd+~TCTL}K zO`QrFKy5@BKo`sf;8g+vgezd&4mCq^4&83VRn6i^bE^@`3Mv#JH_Q`rx+HX;Bn2>3 zModdguql&X!fZpCQx0Q}PA3XM2b1{ZZbB+m^}^*RqIt)@4sA_rBTZ{?AVcBNvuej( zQz$?L`G97~*s}#yMN_18_i>}?`5vqDkzOHtWkc^Jd3{}q4W2;hV0jUdX`%)ATLGi+ zeW9A&M6qkHT~b;L)8r~Y6vwRC7sPfsNt#;)NgUPkk|^#>y1?3z@mMY2***D2*N_a; zDQ6DcLf@-6xvg|D**V7&6?g1}sM}3dSnbGuvfu)| zP%Yi%u@l;^2rHG9ITC2IVpwo{Tq4Bx(U2ak&yDVu2M%i2kDSnpi%AdYLT!jSm>(nT zeJpy|dgG+1>KTzZy`gIxD@9A)=j-LtjaJX&nqw`C6=dj96me#CrrvLdPZs=z6`>&Z`KnqMB`Y!6JD;CfK0 z>=2fvj-?%{6M(?DhUUBi8=BjiJb1+9nwB#n_v8;YiRw7Zz(@BxTm* zViuE;WED4uG;o>nqX5i!pE~JRx+>jsNfxRt-<(U7j5i1DO9EojL*2J7IM^~@)zavp z(@8M&eXOt)ekFm`{||z)Lzn>4s={pJQ1Updn|2iCi?oh zX!pzX*-sxB<$E_9JkDy645)!&3|1IMEQvb3erO&`yvC(I;nM0m2K$rWC1?y!!|5SC3*w?h{XpN$;N9R=DMS&CbDLR}|f}L`%`r3Xey`FXjVJ9|sAg ze3hAtX1vzbCM3Hz!;v+(gwR+v?_7>GA|FgktG2nebR^Ug8`79EMFDD2vDJLN>n(>C zQQ?pgYbOPZfPt*y;tPvQ7KVYcQisdTuY1#l&rR6Zu%v7&e&k7t)T)E<2b$KUEQSk( z34Jwi3_X6J_mkDoN?_E>8e?DgtoL0s$Inm9j;o$BIDT7IPFMV1!l%Zv9^bto2)oO> z-0u{CH>9$rhkY(S&6TH!FMCxSv;&}}`WbFP{SDXqO*ahi_sNG6`}l)=CGOa6o>+Ns zflHz^;LrpX)sAGu4~{f|EBlMy$4q(XSc3wW!~>9FbL;aTb=kw%1lPpX`S!!o;rpiV z5}C0#+0Tq%#h%>~JiQfOv~_6w69r&Vlb`^fLat|q9)0&TH`H`DX~>`cxZ+oE)Ag>; zcV;1C0*Pky-P^Gb+)*o|GdjBYQ#PATE0Zf7_*l_fVM*dDRcrd;amDIi_U&5ovgP{Pi$bItd5lF^(E6nCs z^04QtyQJ6BClvAov1vuv6{0-69=eRDZbw52;v`|?bgt{6{#kR06T0X=^hxMg4p|Hy zPz%R{_T&KQ0I^Hk*I@nK5 ziALm=`xF2}M*;HF$vm~=$jwki(1wGM5+G);4?51J;a0Xf|Dr?pHnX7cHb@33Sv2K4;Fp;$47CW0HFi=ZxemB1T3~kDbm(~Dg0;HQFtdlbAy-rMDB#tDY(u{| zrTj1*Vfg86y#9WRlB#zVkQl5YOAF&vWa_z|siLC3#WCUvYDy~=^xcuwDoc*xMSxeSE@jFuH}h->;^Th{T0MFnSa~`D^D)7+idY0(^MaNUQ?g zS#An2p|!z@B$B|FgVYyQ*c1^y$|MP$CbHgXQ(FrfvH1A2A>AJhK|j3v)ev~RfQJKg zJEf@`?x&q8fa0jHfTt=25PzwfkkHzmEO&fmo*Oag3MK4rCx;hF6ZgQe>A%u@;n;Wc z=Z!hD==+hx6NX-hT7_y&)MBb4Ic$VXR>XIb;$MR7I7lrqMzs<>GeOja4JdgM2_+Z?H zbo$4eGf5vw+(L|z)tA5t9|{?_*r|Ev2L;f4-;u3NPXc~q!WjKS2)|lqy|+VK5}6T- z?_5`c5{nn$ZDkQ@^jVhec|cyF85xHW@n^>jBxpHjqjuX@O zFi4v;=dA8^OI-xZ@Wlbea`<4nfVs%KM+f$8ieGxHEl(19e?wxc6pRY5yL*~pstd;0 zJ-HBts@K6VTy6ZtdOO#Q1Rc%8d|O74xW;sd4rw0GkM%i^A0jBvl23KH&vbhXYPnwhT5SIo^>tTO^)f zSK+eIj&6rrrqzJsdPZyT9*5x>hMzZg+CN>l^w)~S%NCx{)j2NmO^LM%>WZ@d%K=26 z@STBTVx33Usij5R2ynt>ffEAO)gaMo;xK5(7GWdTjoca+i^j^`0k>9AZ9Jrdr0gMR zfj9iCJ{xH_TFG_g1nkf%#|F5aQ}*!3p3aL$G$J>ZI%qUqG+(*;%*&b&&8GC$FM4Mf zSvF=hcV)!NKDZ*K@uA81sAMe78>zLWv!5gmNxyslu4}bZYKh0GIdasy#^K`nS>u}? z5zcGbmmdzqe#z97Vh!ez@rmXmiQoPN#(S85t)UW2249I2hS($mcq+#N|Gv7N6ldwa z5)Jy>!%c{LlJH{Yegn$Ziv zn~xlT&iW)#00t8@Q8Vl{+Z~)ScW(IM7hVAvKpvZc(n^c+PFgHOI$ zhu|IHchGB6np?Y}V+mwda4jn1nv`fkk2!{d0hX6-Fe0Y~1;BE^Kege(*-s+_jTbKh zgIJ;zz!+S}NfaRd!yn@Q&w2ef;@Ex?r|}nYY#?zI0Hh8K7rpHIze(L+qBHnIC^%u_ z!KoDN9lpPM2Lw}4Ed=kFlP2#5cfzBfzmy6s_C{}ko%nHj&u`hYO^U()QxE^wIsK^z z$@;C`Ag9PX$esRzWBV}*e)DP7MHVHh!w%uYl~L@^2)Z#AgTo;K-MKv%9_zfR6~7tF zcF9%g+v&8+Sz^t_k+5-x%dM+T+L}fqgg0wLYy(MS0?1qo*ifJcTcM?X{5f+i)(qLS z)CY`}8jTAgi)942dTiQf6&OSHVQb4dt7^z&kxjHmk(Cj_ z)0K8(l1z=Ty?!Ii_-FdXy=(k7&APe^%6HlLIxl3Jn+Y%rrr?9WMir&bYq#`%+YY|s z+-);ygtqrJEZ64tb?_71*X+%_&Uq}VrhTTye#CQk;$-)_R;1?6JI_Ol#A?L{#Fw>QzE2U;=XY2CQ~#XQixphaKC^pDlShtS!WN;|C47CGWp zc#k`Bpa!}q!bH9YOYPm}A{eBO6r{kv>OjEBT~%Tki>y;Szq8PYy`WBk#;zUuF@eDG zi>v^@L90x|jXei;#S+NJ#)aQXlU~~P2<>3U!easRkURghB8+rS!c^SUQ-JRXPMfb`#@1bjkr8B2heA?ym zEr%{<=()SgNLNN4F=0SgeO_2G$)Ase4p)F?!&(`{GJ;4jQAS59nZ&&$caT*(E7k0j zSP&091-nDb&b<*d|8}0UAwbj$@b(5K*WZgj5`W8JY4D%OEV7AXF?}c{S`Mxd@vYh9C$(hXq!KXp&sh_Ylu%8r&sG;{`^r3DpAN ziRV2#3*qqk))q#xfMA9PJJTk5o7QwzB$$|v=q)fH_=RW9T8WV-aB#3gaALK({x&}# zY5A6^8WRVLUPsak&(w2WT7lMJux)nfg+QSKTCJ<_ z-IKfxN)?5}c5^tb<6bZCTZ#?}t@gV)T;?`07NS)S?p44FylE9vJR_HwXL(W_@IR-a zbz5oQ6~6WHx7-(N2AW+Kmgl;L(HQ!LF+Uxt5yZ5gz+ueVHp6r8XQj)!`WPjS)vrp? z%XA{%0=G)%x{9Vca*G^0$1#%%-sV=#kDgJ0qLVt0xgW|J93n(adCSP08BM`e=~|Wc z3d>q+X1%K)m`oh*JQ8GHUXVbj>&1(ryejywE?guyKwhrWRhdYO$5{7!_r!j9Wn^a0H%4_TnA~zcSi-jEooDkPy z%*G-GV{fRpo=_3-li$|x45H8Wk}vd^&EHod5X{wH+-o8-7d3GV%P`a!z-Ktu=Vg87 zaGJ8N+`U5@5wpt6z;p(9LLuGkAdh_hh^%Jjg)TGqOM5TsYqy(hw4b>!ceeIAAG*$z zTr^ajmU0Jme;RKc+vmBf(e3fKZwq@EqD}6PYq{Dw(Ab-8Y+Z|-{2=c0d9D7&v|(xV z3{poJ%QBf~-4Hid9QUnuO20VKnb~g`@+kfNC@Y*bAr2h+Mb?44xu3pXXkN)$x7({} z;f}ig&ULFW&9lmWi@X*z!LJ#x`qU?d>hvWao?*|j^wwrhNPiwhy8F&G0kdbjdUGl;U*1 ztib`8na-nO%XyQoGb;k?ChTJL+4HK|X>fvZB&t1?5!}xS1wGHsoXbi&hR(U%=hOA% z!r0jAS03D0;BmEbRkH4_m_LZsW82Vqy%rS5?ZNiwMq>)$)NbM+PS-`rtPMWy3c72V zC;I0UPNN!6jY!pwNf=$Zn(n)?4m~?oX>EadmMy&4iDO%m#$;JzBW|xrKY+0c-7&zo zj4F5M;B}TYilnT>*v-D2IbBkQ@YR>`DwW}lyRg=N2g+FpiGdWLBZ%;qnqIagZ#rF+ ztUG9DbgCkw0~CBvm==z=v$aD$`d@w(*Zf$Mq7r8RC`Kjj&doUYKPp)xTj`?TXYbfFJQ0!-)ActTGhsJ;t( zj6flLX{7^$8gwrpD8Dx#)?EYb(Hz{&(VlR>?Ov3Vg=eJPll>uCriLG;fhJSjC z@g&i8leZvau$SX9-Ora1FA3WrY5`yrGN)z$^c5JuqbOQ<&_o5_(Q9l5 zgh}|lYwP0o2!Y6_$*>jane$q2hzn~AWKPpNU~ofV4+RKCU@|GdJ>$`pZKf>JD)bW~ z`wFBaaw{@!9;zg(sx<@4f@9uR@`DE=cI`l?Q!9H>071~Mqc*lKetQE8JwXNifqjJv zfS##=uEdc!4KCrW$CwKx28?Pnax3yvb82>ny>+;o%`sw|ZX%fx?sR}g@Hr%Sry;LU zW(-#Zi1zEkSaCtlnaz}h>09*uCz}IQVoei;Rt?HfpxsF&bqlk@n&bVKN zR*BNCY*EF6yniA5%-UaK8NrRFlWC!!inkLqAfD_vc2I=A_V7SZ{6FM~#*HF3KZ7DP zz#H3u-olGu@H>*DMA7YpGgLmk|AWt2fgjM&G2=jGawt1hQCdB3nSP0oz_wv{RXWtN z-3c9s3fv)oEx@W5u0GC~j3SkzFRyv9Qvk0mbccoLt7@U` ztAFpW;@Io*?Z#2jOeC!p=$em-%%jIlK9>XY-O=1C_SGq*8FH*keblIYk4A4J4mIai zX33xpo|%ZlSIfedPQr%a$pyu*EqU(2f44D;w(j`?2dkUdH(#_yh%D8^G5Lsr#GX|Q{M$Vb09B- z`|dmwARF(Wgz5~+^sG(y(c3uZafx~;z(ZQv7DLA04&ooy_V=5147ir7P8O@jO!{x; zeeN6cDJ?b9nc$mFX-zHPGoO0gW{=_~{G@Iqy21(_5$c(~P`#%^-Zkf;>TY*i&a{S4 z+t>qR6Y)Oh{1te88Wfirv;^1W3qv)>rZBax?kJ~pO%w6KTh)lgdG$f{c>ixR(T=&t zo-6j!)fAOGEalHEBR_UV!plcejgzc91*?jROZy5RNO|ZybjqlTv2p*HzeGQk&k+~< zg69E9??f$R2t=CmYFXT~bq(%F&z}+WtawJnPSpin1bbASJ=s$4K%OIs4oRmQ`}VCl zhn^|F7~GH`^|m9juSu%&wG%ip_VEveVa+TWV?&kG--WF6^7io*^0*HUeQ#)J){X>P zaiKTuL{z#GJ;vV;eweu9?JU&x%x&a5hkFvCNXKBGlgzAtO2*)H3~RD~b8*RIn}!c< zmdkg&Z`5tnyhE|$+chw`Rj8rmmx&wG^TW(;wy6xYky#hc-P%R}s^Ip8rE>+2ocU%&^H2iXD*VVpU_9Jhb;GNfFDhenC^@$&u^nXTTE8^ zjH1DFXn4JGHU@l_eo3z{i){nw)tPt5YV&3rF>muMIHB4M^3>!=PeWbDA$tOoV5B6n zs7rw2unI;pA9`Q@*;a|cIEpPan_*03rJd%;a5y1BNO0E`(}6I}&|HFEs3ex_fr{$b z$V14U>UL*r))=X&CsbL2nDge7*G8`Fb;xSN^sy7qCnbWsfs3|PA!A(i(yT;#QW{Yn z%T^0Jo9bFe0{hrA6RIUC=Gc)dof57&dcyF%Bvd%^gu zf&p1Pp5O?sOD`I7!O(I(9D4=sBt6BJqc*buO+k*p2(yOMS;RH)*L1Rdqi?4P*v}@i zp~Xf2px`$7(ssdectKDnJPdxq#tVCWrM`HCRz7EiR@;SqxX`#Xw$H)D@^iuc9yot+32Dzc%nSnj$H8*(CA8Ub!8#sMxVJXxu_!3v16qk>mWn@(9?@4nJ2*{e2 zST#{KHICwpRW-eGj_#gi69Bm>2f9By693tCc+bLax<_UZl@@(hpTh|ImQxWaED*Hyi#>60|mci#{7X)<*3n zyEta{vb|>szxYpWSHB}Ad#-pmV$W1mR+Kj;Ak?nM^9ekQtasMs&XVV4S7H_4gkFF< zPPZ?)zJz~O*aSb&)FE;3>xKJ-x9cGXM>GUd+;`S)S)~-BXPxvG4|t3=XC10N#uduc zO1|9*x~@z;zoO>UUr0PzF5a^N&BL~h_zs3k`rh#dp~tT=Wyir&t+3yQ!j>4y zm~xf(5DXF%`<^D-iEw|3XXbKlxN@3b;z3V15B`-wOto-t;R1pU!lKLBkoRiH#`96* z`*s(1>&b__;cfdoM<*-u4=ATCkPoQpl7m5GIJ{njC=?tE?*rpL;q`$%E)G~pLlW4Okoqon-&ko5{FoKpq(ck_n_j@ zCEUE*sJrs;LwLmrmO2GPv-#jbsfd@2!LJ!~mCt`l+|s@hlrq*p!=SUjuL6O;uznmo z9rKV(+6@EK(pw7<(l+Ol!0X@#2Uu`uPPZibKLd|v&k1;iU>53h!BdLQbTDg)9f8#= zb$Nle#aiYsA5|B$G!*z))3zcMCp;p$*ZYRbow4PFci)K$@wH7azAIFXL)c~q`KOEA z_Xzo$&}8CXJmqzY4%o$O#a?>)+w8kqL1Z?3>_`uM<{f-<@iHRPPhmk8@|Mo5<(`X= zaE40(ezkDIr#;jabE{Hsyq$koDl>`hE3zn^zQWT_=fz2gWJ}*U;IB=2Kmk|c`MdoGv3aB;mD?kRPI7)f}jXP z@awZYaH!>Jfx18t*AZk=)L_gBWRQ5`g(_zV|7w|V2m?zt5*ud6?DpOpM}5iLwxPFDSdM2toM*)%Wu z1GNgNoNwUk4hQW0Y`L82Pn@Rjg$vk(UPANut=tT~<~T+e(j&WPPABkx&zyBHJs@S= zAald8huC=fJ&hVPR1=f)AY7QJTopT@&6XRe2QQQa4@ZP8eP(|hwof~DhOa;MLDO6( zQu0FS`;BhYmDK8B{vMy*33jcF&zAbv(4SYPFc5@ixJsapWoZd`IQ*q^%A(?j`mFC- zJcC0qs-uCu;qBA9q6rVab%}FtjH&ZpKhaVxkxpQ5$}$=&`}>z#PrKw3Cs!V-D<}M!r85u1ycw zCjstj{FFF+B+t6AG+f?PMK~jMBY?zekzAQwZGOJh^5tYoL=6}-=eHPkvi(rrbpSjT zsw*7l`sUaNf_qqA@J+Bg&o0~cI2_3V%&0~FeO!0wz-}R!rP8EreEWK=e@CJ*n zQICaX>uI^M-A%Od`<+>j9D{h@msJ>L6X`{d3HA7pi=_!0U~v5?wfDg`7md|8gBM8d zUv<@j`LyZ`fIxKPU|sxHFymu1CfapSE(DeF^mKVj$6&$DDF5;^WS2Wnz5&vkIqji- zV+c;?l&O*^Q|%5FsqOn!nHmB}`w{27!h?zZmhYF%ET33Ut20Zov*gvuzEg*Re$5{d zfO|W;gfm#rb>3Ox&Vt>Wzfw_++FsP-Tb5hQeRUYGO|LLg(0@On}XxOH;i-VE6W^s74`Uc`Z0*KNJRZBNpntHaWy zy{6fsvo{)o)4SycHUlfpz^rZC(IgcuSPp0|;7unz;-psi0(~9d7f=Hj`}-G?uw+g= z1&Fk;6{UW&kbc9mO1%0F=w6}a)p}gT^B;YbIZDri*D`Bg%q954KN%$#0DR|gxvEWrXz*|A(7)%fs846H7k9_xW*%^oAUfQDX6 z5Yyh+3_gK6KYDLw#+%BQoLwiO1M!W+N4wXh_mkVu97VfU!bJjUxyki!t)iikzKXWR z!8cWTjQ32o$37Nbq&d>xe|7Q_Z72A}Y~(m}OJtA&knVuH{GINwN5$Yth69$;+nYbk zto8pWMr&cFwpHNU%&1=CNt@-P)FV9Y_>u8+9Et59=~Y`eO&WY^8v-5-{$#uf zU1A-orbBL%BdA}tVd}&Bp!lq&v1;&tXGlafo-x#Pb>cZkJ`7I15kzHIIfYG&>Ri_> zb-5Q$VwOcN7=LBexg#jTH_DSNm+-aAec+IsY2G}1cVPvDH?aR~9`V)d|nv+OSZX`DT3E$-uH zYZCYlp(KT_Lf4~BQ%t1sBQj(9W{)`drb{c35!ENasJtMSr6j^HXJofCPfUG6U(tqv z@V<_Vw%*3if**ZkOYHk8!5Pb~8kBYLzw4p>I zJUkNn21&LIA_*C*3HyDjnjS4OT)90Wpiv*0r#+nue&;gLvRB4Sr6=&udAC*Z5n~@D-;k@%ax42+mi9MKQc4n9V5Enret% zW#{^{L(z(fie8I}Odplaf=BPns?#4}mVajXI;H%ibJiPhb7oNx^$vRd=tBetXzM^H zT_?A<3pTF1>5aqR!9Vw(-ygok-H$)CZAeh&BllZ~knTvs*Emp}*q;9B{>AA55=st6 zU<6me|BtdakB7Q#`^U9PLW_MHqLe*Ll4X+1mLy~enPf{sNCv|cWoL>|#AFLu#=gaj zoruZ4Z)43mV;#&)-_vzn_kCZ_^ZcIg@Adt2W~ zp^q{A`9AP~C6MlwZ`>XAgeD;kXThmy4We6sZ(&)FTrq|o@GYD=q2uWrHBKCzJ96HD zZPi)BY=rtWvQhI(dE(i@x6mZ1q&2Ne9*I_7DsTARwsK)g<0z%%FHs-{tLjxDZ-32Y zHbjRhuea4l?pZO*kCXGtQ%f)til9620iK#gBYPyZX2nb!-PYK3gWVShX}Zbl2Bj;N zOSVHkEvC;$^D^+ss^7B!bR}|!b_7Z`5-zNcQ;BvgkCuAoY=)YJ0a+&89+;jWLBycD z0Wk78z%2PSK;MLHpQxn_lVEjH3}oytDNhzgeb<~nScbO5t_cUM9)`E+@$(^%SO!xB zaeN524Y*O`26;p~g4fYXyvSR)b|Kq~^Q=@~zdGz|^H3ex}4`Q&oZZhN{~MeGSKx zH%CMg1?lMS7sk{ciTP=2_4)ju=cQJC*S3LQu+Ob}W z0}Kj_Z9R1wLJCO)Vas|{5o|u3ag9QcBw|Ws{Peg~zkGudRDrYcSss{$(DiG$-UT4v ziCS{;azf~Dx_F)`5HMb&v7&8zq4UjP{~rN4us43$F-w|$9S|UBbbdn9X)ns zusP&MPl4U$EmGHOqtJ}_FBc@MqV$W+K zzrq2Y^Q0L_rmELr-LpSpZLt8P5naS zCMRq+G$48WSuN5KHe_${_+dTV$JzW!`}Xh}QF~x5do)WkwCb3M)D2lp6EOtSAXO>E zQQ~|f^u(Bab9l~(cQ|k0)Bd9D%ktIjeHX(t;A1SJt!fwsG7m}DiWI-XK+r!ifzT)F zH|a4?)cQ!`*l2;7E%Y%elz|-Kr(JD}U917JoKpel50$&9V?sLnqW`lgi! z=PNYsmeDv#a*1W5R~F)S2F(&ba~zia{xOoyZ%IQyCHpa;#~|1BOMlbt(*tN)>M)Mi znG-ln=E#Agz;V{_Z-Jv|I@`E|`~wc8-=~e!P|JyTkh1~Eh4Z-k#FnS})Ngy}WK#Q1 z76t5i0Py!%2>nU|Y-j+Ov~Uv(!IaUUSN)Gess8E*6P6r}otLJ`sv0p8fdgtOqHVJF zrZj}2L^7)PKcB$hlG3%c#CJ>k{6l`@@~W^G8X_-$2nn2P*6y%2FuIg|@x%1Rq^?bc zP*q8?uMs(I8hK^}PqJyuAwU_Oe6E#~9(2Sl9mgdo3Yl4rYeF4vVtTIq zaS7()%uBLB-c%?(Nk22k1WYZQ<7I0MK<4jky~i7 zf7{6^?YtG&s!d6S{$_Of$y+6cf!vkjHPWt&Fb)JmE;;*A|8oqd6P7hxw~s@&1WTNI z#q&6Rr&wKN_;XpPM(KC{`=?yo?}zwR(nICD?yfINy1W>Gwd?^_rCC*&sLGj5NFyxL z2N@y1l%#%(xSH}vzp)+4S?8Li!w?egEYsN`=*Gd*J{;E;aTIZiUkuuaK22ftRA_d> zFak8hBP=;G?Fsu{@b*|=nwF!Rm-n&n3^PgHH&Yc)7gLr`Jn=&=di~plwLfk}OE)V=c|4cN^ElHbfTN8CbW5koSpn%2707$uz zF`$uR{}Wf&hNksZ%LkBqia8`0AAp5&QlLu&%Ezs09h1WNxN>pH z+$_&It;fZ1*qeP)bEFX^?0a2sg^oZ_!VF3r6=1fa@}ewZ{s_o;U;{A z7A`^B{px}j>Lb(K=TmCPBE&ZGU>e+Y*`%zD-Q>*uL72YN)1UWM_vPR=&!PmMz8_^C z{sl6DM;5sCqa?Sea!c$A3a0K>`i{()XJt~#foFBMooOc9i`rIZam@rhSJA;cx|fPD zJp6hXgat~6T<4bcE1tt&h&Y2iS3>LTac+q&SJ=5;Q5-R+am?G(#HRM{9hMhMFg};f zV>Mk{NXB2>u!!ZY;GTSEjPlJwN1Oy|B<_!IS5}=#h^#NtO?5{Pecy6Di+~bQ;TU;F z)ahT)VcBuV!e1NOF1DKMHlHk;N^%ZI#T+`{TH*Qdg!0J^7(0TYdrNPL|Y?yoDy!kq6`@Z1ZCAVxdt9d=v1P~I%zvwyd+hb1t# z8k#dsR`LAiEbPkY@ht0cem?ZPb&6l0nkRSW zo$b~9KR&|MMPt?9R@BxC8>~os6{EH==+BsQA*7wA4o77Ul6`|l53ZO=NjY){;k)YNphOj(O#~xtE^Z;W!Z{EzK<3UzB6S zJk@>zI2cUxi%gBvs#g}>fst8+LzP~VTdJ{g@?&?u{-3w6Z{GRh$72*SV%4<4j3}Rthc;Fk4Gt{KHxlTV*QO0MZ**Uv#qMTK>mPy} zHFLY?7iQF5_+dyNuqbVld>lU!rO_bdn|&OYN?{3-urvE;;FzKEXm39!{}*+EEGe1{ zSz?_wRvPi_u^RK=^y8Rv)FET0)IhR8upN3(zu}2(l;R}ZKJr4ChUmiL$pRI%<2Zo$ zVUlWQ2|}KsD7mFsDm3AkN1pU|Yo$Li62JbLQARq7C!hVMTM0w3@J#D5{}F^ZH4Dp* zxJxpnoPV@qNQ5+g=uJc)C1VHnm5gsnb2di2uxw^%Npx8kT<%+hM~22)YVbe4R+7}S zwa_224VQWC*p<_ML3ns%YuPu4>~SMT%THxfPjH#rK+Hn^z5V&=G`5k5{b~6OzKoI! z5z^{s?Cv?gdlhT&@){rAg3`TR{*uz&uI(?*Whj7%q$c3?dRClYW1ofb*#0r`X|0xX zEZL(!mr+bjw4$_}x#*+uYjwfF(-xVA4Br<$OtL;R%ibo^zu82FC!ixsz8$j3gZ(<~ z=ymw`(4io%%%l?`*6M4O@8g@zPP2@$sS`3AV5bld$P5HjaI$oPp4N>4xm^j#RN;Ul z0SaQw#;@v5DIPs6_A~1?X56w znLF%{2`(=KH(457L#(*S*+iuV*l7gBZ+nf!DNfh($#RA*N?FeE@~p7tsi@3YJAuv5 zOlxP6e`V_U6b>?V4@+IivF0FbCJy{aHMV`=DEad*O>DCO@!xcoU#uR8i+91#SzHj!HYE@L$GmU{m;I2LynMR4L;^h-aW3PaQP+lI`#Ov zGj2Wtwo{8yC8vFUf)yl-diKFDU_WFeWjC6}dZo~`IeuXok0?CYxw(s;ck3Q1?$;ZFONKZR< z!8d-n`12DVh2_dL`IQ;-pwO(5rYy(1tskR*+^UR|80+!BwRa?JPyc&4@On;z9Krd5 z_GdATQ`RO{OVKrvZhdL$XGERuCFT43A^-mr0J3>l4SpaSVlYs^*nt9Kmd3UI9~8ie z`%3}ATeE`{VA1|ZR}f+#Igy5V2>k2}%um?G2^t)DCWw>DiLSdl;j~>=q#Y6tvs#C$ z;{K^Bbqak~sRa82PkByu_V+1zMaPdG!k-7G$oI0Ay%6NuyE__D%Daz4lfRjA5Va_bPaVAJh4-Q67gfn9l(&uQb_wzVR)umrV#bR2nUX>+5-{|Z5h z0&LF%3R?!zff7!0-2~JUEbHDssu`%aer9QaMVxb{e)T_($qbQ%Zi;RII}oQ`?Yz%} zWCv53x%z8hU;m(lmTAP&1^9-(dw&s#|kqOSA-s zIY2nm9P{nbrHo^Xhkw&~q7}bWXP#sI_>mR)PA)aSzWBR>-&N%B9lzW1RuwE#MnmC_)KLiajLBc$&v^O;2_MaIozE>^KZc zOL;ayr33MmLt4n~errG|n;7?nQY6}#$PM``bjk|>eoTpJNTbaH`;U;* ze06C&!J1+oS@oN4=lP$5kKcRKfZRL;p&X;%9zSRb)Lk0_Bn&SKn1Ctq4LGuXCV&WN z`zs)<@sx=1gSe6($W)V(s&>A_@7Da)IUnRA2NUqek0ObzQY*O)G$c)=7(BUpu#c#V z1pDKNsDoy-4mt_>yOaOw($IlAXmU^g5<8fM6|S_GLV#A9@|(`t9R) zpR$@WjhU1gcFzd>oSm)oO}O-0;kB=Wp_h%bUsU5C67Qv8Wy#lgd6)VTkUjD(>T_}m zAZ{X?Sxy0NEqSfp$D}BPVBtOUnjtR(@oGhDvFa|jW-Y$ z2lG~hL9v3&a_P7xzAf*=FAuwZW`16RcMl`z?UCyb~z615GYpmFHn-`I zOhI~8q)Qq8+UD5H|12Rlq8q-Hn(_MXm$}+SHwzr`4ZL=f*bgFJFSqurL}Y8H=D(Yl zJlaY##+LCSX_>)EE6Y25utMjw6h&4Zog<^iVXJj!zwTA9p6JVx5*pvXeD=mLRphJlT(ibawy5I0&9 z9F~)azv=XF#HbnvU*|M+R-*0`<1|lBQLQYD(Vwq`Y>F>I$jnYLqIj9s$!w93H@l|8 z4{GjwYG-^vSd^zHsiG?J9E2F8$`Sxsfj&YuC7fVn7M;K4V&1P$Es&r1r(iy|Y$XPo!|kHkbrdr*>~T)cChM zqemcMUf>ruG7`vbGM<$@t<(R1AXQ7drHZ}C3IG$Ax$L<|Tiff_JI0iYfX46sT{ z3^WjeTPc6T!Vlo#hyC1OM=3hgw_Bh%;&YNhxJ0!cU%GkT{P0nUlW3_~;})9I-ar>9 z6*0lGFfQ9yVw6gbqB*NhFK_6&vRd7L_O(%R{^<0xdA1}F8UautaNIaFp?4NpXi6~s z_fy5yyftNK&sw(ox|A#PHh(b28dK#D_R7O>C@cdmQX?O=(3NNq5&UnLy5K5GsHHYs= zVnf0Xl?glsDy`EvUBTIsQU=*4mTqNIq*jiU2PyxvwV%)hGm$kis^^bp-9Q3Rb%=jwM6mA*@=KX)~3XBZfqX%7+_`7R=_Y5+GQ2AHKeAbX0 zH<2qq6Oj;NgU^~P$4%&;pZpww;P!7oH^kw^G%A>cEnphGp?*gGEF|FfH|T%U@ulsX zhWoa!-lVlpP0Yf4Rl!;h;u-KISDgcslO092uJNJ;4MNxI{}fvZ5ZnJc&@O6DQ_bv- z;BPwnXFKbd8uSIgj$&HEZ7Qe(L32>RyFt^q1=rw%lKlT^4-ER>YWSz%{%%iz_T!&j zLoT?0|D#Ai2_6SZ!2U}KV@GIjQ0&Nw4^Z+7goyiZU424w_4aGk75kq}N&G1@>cJ%l z6K^ni&?)3&upz+9|L+1*2fbSRkHG%wl@z(_H{B_P{2kJB5!&EC$N0ZZj(?BwzltCJ z*L(oVIHd@-#h+swps4>Ip2o5I3T&z^;n1}o5Xy^KRVe)F*ho4JLek4M{PB(}`NXX+ zJ0}?$Ur!4=9R6~@oW1EmrQ-~)mEE+VV@MO~tLi{(S+>Yoz?~`Kvq!RN37h+;F!h|T zml?OnnI<*)htR>l@Aa6z)5oBGY(&^wUur+6YG0-rb12Tb!0pLvZ=+EVq29Pd%sAM{RPWIC_)Ti z3ZL9)88`$_2ITcM4u+$bTFCiHi zWIU0HW|c_(f?HURgT7FBJST(}_kdZwWwCOKA{|6ck5kl$p4+dcA~iZvpVPi!(O6bUF}g`VKn9$m%MB!cLQBIt z>^9kB`gpwI&-)(HagF6@o7$7999IN>)5VEoJje1-G0!DL8ejOIqkgWw{FQQ;^gLfM zY|ZSwCERE4=8Y!om(QBtK1>--steM7O};&~HjJB^580gUfj&mRi95eUa0qxRXBRJCz*xMur@ZH);bD#LUSz2s`D0r*j#kS`a%$aRHZ>~Ry$i{YzPe`KOh@TvTjnnj`A}^mpxEc`?G}DjJ$r*f{a7&k0-^=?_)CF%|E$%p#p;m)MZN7yF z3r7LdtB*0NiG=co5M>@a0t3Q2UGkl`a)CuhWw`R&`A)r8UV;1!h^#@yrze|9_-5EK zxRm;<&dSt9^P2Aa?KThNU-Ep&&p*qV&U}bY@sW|pPNKHuMr2yh*Wp`ItK7G@zFrSZ zraFBHY{s6gB*(Z-F*+ml(T#88Ssvzk&C)sCbt_F$zifN7D1d?P%zYnJ)Uq51XyJlO zQALjCCW%wV1NmFNIq!H41S{aT$~4(-=#3OlaAqt!_*(oB?_8kEV!L6maM|f4*i&z1bINAGCJD=t^PlXb#fKR_JtzaKezRx1;Ikv5vX`!j9}+z=hsx=jhz90tU9l$cdy;__IY~7oD z_AJ+jwiq2B!F!L1+wmY@eC6^|vOYewVT|IXSP)xYE7MP}X!dWn-|t6=rRiqF9CI$; z$l(@>^?vbQy|gQ&^JY=Bv|8qtd3FCRxW4Ahu+~=t%Yh(-&6F#bnbj!S!m?+p_37%d zni2C>{TM-ww_=A6acP>;bJ{&@5D0uAVRz}4WVd-eml9_KHXsRfQV6XUWIO6 zT3Q(1EuS6%U!bYJqrj>$82Dv%219NunAqN-MdRs1kuwjBW_QzUw(mWq4VlBJuhx+7 zJ@*pC){D0d6sg4}`28aX!GtYsPqw{@5O95r0x0TWfMEMgcYbZ(n--Xr8b+2~3iumK ze>iH>VEm=(A(GKK(nQM{Nd<)v;UkMCSIlL}CUc;sM_lWn8ng0!W|{$|3}b_lIlrds%Mv7G`F@o~P32!m-gxZI z`}xM<{F_-vdOszsG~l@xGlX}ak4co)?5p5f=X16^kM|4=~!cW;Q$NeDb21H^At})1#=S8?r6OG$oDTiy3Zuemz`Pt zX#3Dac1N3cKlwv;CQP~tRg6pQ$t&w|!Ou*XUb4>X_+j9rqUJL9a&Rm()$YE{8NOD_ zDK-AQWR)jw=}ubroX(_ABsQrDzP_pJEOH3@aau*pq^BjWzYy-PUudc8?JCM=sA+e8 z)+RDF+{O2e`<+#VL-+El?=$pBE$7XCi(`+Y}j^%JcU)(WYLYeEJ!R-zGcblSQ#s&7;!yZM=OdW1uq38n>dhP5%!NZ)=wod;W97z9?hR*fXGg1dB9SM%CJ5+qj?>%2TJl{6L(PE zKgju)9SPE;8CO5!)v^H$Ui~XI!e2>k8a?c#PE_e>$X@FAVgDkvWoDM^b#YwVCd}6I zVQ%Z(9hNpt#ZITK7dU3PHx72bY%ilX37 z^k(Xmy3^9)mYh--OaxpNqO6tYB?pZSh_}0#6%(xJT@5`UdLk8@*VfWm4AL@i|b!hY{o#I3`Wl}cj=lPps=&nK7yLZL?Q*_1 zZxs~chDYz#iRanxFo^4k1vrOiy)G~jh#sn!e>g#T)*JjQp$P0jR;RS6g&-y3An1g%DkoSa=bssr#$UI!ZX zm;B1C!CEYFxmNd_Z%&0>;L~etyQpEZJ}4=}$N?{&L~YOP6?ARwbWg=MY`-u^j@;hC zv-Ra?Qd4+il)hJ)arGGxMj0M72=L2#x<*!EgVUGHzoex;c5^)Q-J2bK{ehnN%i%?7 zvu~ZRWbW+}ZWPW?UVB=j?h?zx^Ws{p5JHJW!Af;k-y6u-%2RL3$y!vva`(8{p>757UG%;5 zUNbs(JziGYfs0Q1*t0`?!N5ok#)_8rVIkyXT?vJ$s=>!yW{Xu9lRQ0y<6n?&5Y?QX z<<4DP@BD6P^wDNcWl&si%wqa@rQP%dubBn>QC$g|IXq-|ZtbpiMx5EB0OLmA@}iEp z57Wb+WU@t=UUjIGikr$!r1H!i{m<$ut#I6`=-yOn&pwYWuc+?LD0*ID{q~fwK&-Kj zmSI}H(+iR$sJp=S0#r$-93{WX_5WcGZD1jrao^+Ch<_theSL}BN{y}l*4MXI+68TS z!l954c}Eo9>#p)<*}&il1cK-lnTKqM{Bolv&s(@+T5d? zN{b%&?@RBXY`YM$lCdPCX8-d}6?VZE>{bt|G>udCl`jfa%+o8|@X#t5Bk9{V1Np1T#k_x-yv6I)Zg22U#5U5US@*1&%)KZhV$GaV^KSPQc4 z2GmnMX1;SP*WV;q%9M2LARV9nP&F|fEGnyoUkFuR5sEW2?@5>+qrI*u!y{6$&&$Xd zf`p6qYG+85YCJRUaDD$k)&`!_w=uf=@eccamZ7fVQmu}XQ`sUPf6}hPqxE1$eV7d5 zo4H8&<_J!+C(a(nU|DQ~e9`(?z zH4os0Yr(XJ$A)3%eyMG9hdS);{Yz{ZfI&Dcr{mU*KhihfB zS;v!cn{gm#at=q<%U-8+CIiZ1tBXGM@by_x z8Ge+Ex`-cEYH7Fz$P1(BJ<03SuP3hY6~nr@WXfP5;pz!EVIEiF_l@;QvKHWQ@0`ah z89mis&78d*Q70}YuE~{Z9sB%YTSM=ey7Y}pR&5uNrY|ZX0O|}bfba{YHc`GRTqV6f zTYV<}+KuJo(-D&nn8PtYrutYG*H^XX(dJ&Cp1>raRhM6g>~O4SQZtzAmLcwtYkOhJ z8~U07#uDe$MT10doxUM38Ko3Bj4p*%;9?=ecm3#>h1KTYjiZzSlqz!&Jql}kn|{MC z_QqDq++CW1Fa7B2J0AV?1;|AG2R%{`tBz=DIC! zFR|U0Uc=qFFN5eYSX<;lr?Y5X!4J1tD(RE`kmjf6_lL5}ngVXzX3p%63?0(EEZ)ZQ zSy}npTaeH`xK>TS@Ss-)HFhDIHyERRPWDG#mA{z!PBxu@ac@cXa}0VqIux;012U;y z-|zb)D>A~Ihm(_KKTV7v7iRKE@2HJb$czR9ZCy2s`cQxCQIekimd$}xqv_v!VAZ^O z^Ji&1Qz!w+26wHqqVGlM5mtP)i1^@KL7DMI^NeAOk=mEGab5z<$t8z=nkFFh0a##q za_wbssHzaTSdZBGX>HbA>xzu!#4Z2jt3L%~MsJO=U+epj9Dudy1w_|^27t#)Y zLj4Qec`zNvig3-m6({Jivq@EcAGb_uj4m;7=R$P7d$7|WHDpn*dX9vrh_moLslqp5 z!g{pIrkj#4#k#n><+Z=YdWf&Y5}ES`9P@K8l@l#VEh~Kcf54$8{B&JgEJG7hxK#};O-57KC(^Sww}bB6tqFbp;gT4jN_^c z51*Ckg%4DHzMMUw0Ya2iu=%j&*N`eit!Tud{W475i4iKhB>EXrl2es@|)p!haZuf^!kpyoQGn4Ll26!y5T`v<1|^5-n>$}L&C*S%tMue|)- z(XcSwGsMt-4gKdy6P4gs2#E}eRu#!EE}?6wxH`W_zA-9fsEWy~>+}~R= z&2hB0^1TlfBE2`4nfk@hd8|<7j#4APFi|QdHTVoOa)U8w5 zr=Qx=)jIR?PS;pGu&FD!lLb)zL!E+ALafmvO`<|?&&s3+9C}0tv$|s0(x@M-Gu<93 zqgmA>fyHMoUX^9+xMmp~wm$mDt=XUf&6BmC0$W=IqJZZx$sUvCxEF@^%23(ri_3O# znd$19F{ud}zTmF&{R*RXlsD5$Q?`LSa+XH#hwbj0Z9&eAz@;Ai1U!4#w{AqZl<4aA z6-0rqt#RYKA3t)r#jgjQa*IA@5BGgQwo6+u#X^2ot4)PSC_|qYj5m5-8XVTYb`JBQ zb7(_&qkLY`nYPuaPrNzotlPN5;+-+kk*&OLZfVu>!oJM$OOXB1^u=_s`Zrw0X3n=` zN>=myh^F@|iOJtGTBi9_qKdZH7ri_lzq0@KPF%G`(Bs^fzDH*tW|st=N{@Xcrg4(D z8r!2b0INg!>}P=a{saS2XSoN(R9GD36AnqjpX^pwVCyN zOE_9*Xl6GMO{i^_y{A>DOVw|-60hpFCk|{sXij^!?ZzkZdZ%6Tqob`}Z&w=^f!bc7 zR{H7;WkR6s-eY;njS^q9+K6PVpRKF2P>+ z_^YHm@E%&v_nW1KzUN&l<~O=VVdIWX zjq^@N1=$98%u*DE3k+E#s)`TFiTt7|EnMHCHoe~SPfS}p?ftf4_9OqClLz-P@s^n= z_R*mRcFrRi9$C$+>pv5@RYiyjZzwiSjw~T^bCpq}+J(Ea)%3QZv){QcY0lX0-b%jm z>w%1gg_aX5gX_r#RwrJ^g3}?N>Cy%t`WvgT#h1k9xmmuFkT_ov_jpCZCRvk7IP_%D z3qGz8E@7Vp)G|BWMRD=t-<2DiikMK;sTQX1Gae=j8D66&eO@Hlq%V{WyL@zFijAyV z2|5wPc3cPgQ5cjPpBd;tmgqEpo7`A({p-ZO>xGF!KJJM`){Ud>K@#F;DyF;oyceNb zfm#FZb1I%EXVNrcRbdJ8NZX6}l}u{@E4IK>Ghw-b8UDKdE#O!-sYUj(Z|rKp@`N({ z%L)3FC>YshFTNIJ+>tBuf1-?Q(U1O(GWJT1+B~%DOS%b|M=hupeex$9Ej+5?P2x_@ z2J&=1n$SA}E*yYhZaXmnB!71`)y3_6H2kOo8&xbuqq+(OXo#rB4XzO$BXD7*oa)6V zUi}aT*W40sZa(DwI2Fh(Pp-%FRj!w`>D6bmy_wM04-3ianJwuf67Pxl zFRAq&kl>V}-InBRcZ z-8(rKr5ov1vPuJR4l=~;+Ye%!X_)RFjOf47<_b>FsP-nG_a+HJfqZ4*`H%PHtoWiw zJXiDhMooU~oR|%~%ct)IMZ2eS!SkRVcGR z@6gh9m6>eJ6LszQAKg>mKHjo`C_2=Me(k1_42s1Rk7+V#+)lexHpJ1kPy!+S%Vzcla}Mp$=7C_NSh^k@I}tXC!h*sA2um!Nc?Obx+dze?7nne zeC0~Q&)asW_V+bah3{nU^y7`cEAxwSG#Zd?8foWnU7kw5DK>~Mp4nH2<=dJU>v#do zHYr6L+VJ*@sdyp}>nP9q1fF(?QKtTT{`2bJbSz|%1Ph8L3HuQP_oJA_u`gvUXc8v$ z3d*AFeK(v{M(+Qp*Oxh4m~PE0s5QJRdMk{uhVaXWhu2;Il9Ua{)F%s-#5W8dugFWY zIAPVVo2Q```9XUm=H%hCM|iJ1ovXn$(XMnU%cfYt>nG}}fuS?c z^o5kPxKC8qUvF*2nza=j)@f(LW@ab^J%=$do$v1Su-gumv{H)QxE=jn?DlF?abhT!VAi8ZTl zp9=TpV&oaN7@K+pby2F`GcrmA)`8z^r(e>s!lPU2#zl!g66~|T%w3HXih?`8vwvmK zRo_f*JC#aQx~H5e?KU<(0d5ejm9XcOSBb?p}}2fud$1!KkCfio{McP-$Vbha+q>GK^Cg_7b)l~*~lBSa@>1b z<&rSq66Nfi0Uy_H5Y5qvi-IQHTv^|mnJ?%ob@J_fdE-LcS&L^amZoVj58dv4@+4JDj9#-@#K~?8Li@rzFp+Kx^FdXZ*5`kMnAs?X3HCqSQwoik;IOW5pVDWP(V>1aJDivG;M~m6MP@f4 z?}$T#5pc}az-=TMLz*@WzE90UJW8vN_ya6TM#N?DxIJ#>&sN%QS5v2jA=0_RHah|v zH+YgSQga@=<@mJ;*dPEPKYS6uwn%;{~CBl1Ifjo-t7 z?6GZFNkT=H;6uPl3YS!sfj4*BRrEan7NQ(^;$tBy4{-(=qVcW_K zs=homK~H|vqe<+4m$@)(SMunZ#L7dnZuuj0bjRtMp5HrU*xB5pUl=vqHqP_-w6&oe z-64~O)R$M3wSLnH?}K!F0b*iLB8PYeLgiLPz64ImK8Xw5?}r3QY>*{92nbV!gIn*Q zpM#&m>mv)H+iQ#dHiV0tP*D1B3-ZSQ(ocj4DPhEn#GiRQCL=F`lo&v1 z2|g9v{B6>DRtIJHiCF*B$e%lDAlEO0BmpGOE?EuW564jeKl>e!oCfNC(~0buuBTn5 z_EBd%3@z=NnKLF-7I#y)$ zH(eDjR(_J%0HB&=0aO>+3LW?gfZ8ianCu!083sK8UGN2{gSC%PasqS;K>_*dlr^9Z zEC_lKoF*xm2cww*Rkd~iJ(tcMwa0*_>C)6_HKqUt?zFdyhW`&Cfad=09`vv7!RRUI zG5xmptgBhblQ7T<`hT?I1&}NMO_u+&nG^nN``NpOh=Z5|nsS3TjTYbl(gnZi-Xbx^ zpu+hu4eK!eFKPZ?bhN7v^sV)ml>Z#liT`y>@BUWoe>(HG$ibK@4>~gkT@{(tr3o+_ z{eMUpi2uKdm#jS!C8VudWCsbx_x_sI zOO^~jdPcH(yzR$WWlz!W_bGmSd0)#l=APMGnwoM2$8xC&<8yw~*_7c|iXVb-)Y}X5 zv>wa=^VaUUecr2UBTw3K6zw(Wc9MV9{2b;3NsOii*Egw2cGP&jA#4x618oI#&7G3Nd)~m(~$~`-@Z;nud?S(_Yce6BPKQ6QA z6409`3lZ}j^2WXhplNd!0ka?0nby$)E{-lz-ZOGbo~yCC_iQ6!+JAfxCoUxWs=IAF~ULTWS8=DyQdMF+1=INKDkUbl8SCjKFC>S0J zE;R=L4>OOHt`f+UOchIDZgz>5yj90vH})NmRFGgwg!rO?t(IO z&RT%X#*I_RnT9eMheu3ZttQ}p>^Ktpt@XxbboPojJ|h&#ZzfZ|O=ULf-T1Yj`hc;q zcJYOuk61B7&(B9U>35zqLb$hZ%xz^jR)5HnD(^|@z0S~Y_j*F{hHQT0Lu+6u z6QKMj1aLR@-tCQ-YADo0kB({@I*I+dw>9=ce-k55E`Mt{z$E%^efAF2BMyBrYWCf< zD3be1e9|z|hw6f`uptFb^=E!1yARwV*~flGx_lQlf4UtVD(HRJwz2b?&WZ0Q1|>=h zyK8vxs#_ftb!vbz<1{|Hho-`pR$|NS%P|^zv*^&!2POMvi|caxSNzy2_I{~wL>Y#kRKapU#ERBING5N z*CbHh0n7kD0Q>@q0K3#$HgeL&@SspD9Mlf*`%^0@a^$EI1^Csqrr=)KoXAxySY-4a z<1du@Rzxky4H{O8y(_p)chmAEG!x}=CRxe^7XX|pe0g!4+0KJPE@NR^n|Cmk1*6?$ zSt8A+EYs;XU0O7@SaA9j{pm?JHtEn|hD=ZAzRpvc78b`sGP(1f^p9sA0OE3B8IoG= zVjZ*#6ic!v(RXTyq|oTf=7{C$VOU>@dm-iKE2(PcJbA-5*<_)xIrqmMI!s(*^KZHm zuN7`RT!Wv85j9Db9bZRMt#srerx}FI&y|%v>oOBNe8MrXJ!0+}UY^VYePY4COY+zb zB0Cb@byZ{=uP5~vPV1@ZbSpzXTO_VH7$~!hjfjYT6q0E*l<&O%5<`b-fRtpeoVS97 zn8&s(?+{bnU%+)6u7~>h8oZuAyl-&Zn~VN~vs(akpyC?FjjX z@@1U$MGQgDeomKaWcU`zyJN&2XEf;Yi1i^I-QgO_DLQ)AQyWr2NO~F{MTjT?YI7c& zau#VAetKWY*||@*P|?Ar??bfCBOSm&^l>>R!Akdc9Q}~GLGix-E2K?D=CH)L#CYQS z2wLgxGSb?l^IWqmE5z%$*eBD4CbnO*m7NNQI<}rfDU6S6p(EQ0KD7J7FIzaFKTQzp zi;zF3rKr1rH}nRaNh!^+{q@p4Sq&fbQnDx5bI)D_cOqbDh_sDF8mEc=9;!Zk1&VCI(KpOB6u$2x5z&1#=aN2MMw7fRIWZzaI5`g?$>M41x7Jyp$#;<4wDeXXaKe=UvqjkdSfVbq zto-0w$X$wO`UbVbW|u-;R9@SD<3kBfT46#%CpI$_=vACJ4k$h#aY1=JYrVpR0bhUF zrEJNg=0QdAFu-M}mAKO@7SGSu_-PSU$|)CnS*RMb5b})YIs{GY^K&3_l~ee7SzzQ( zVA_4wf}zM05OVVb8C(%*P=6K+odGHy%9nJ`lZY5FJ#Aip;F7>U~Z zc4S;b`6bc)MM`ap$dR!1N`E$T%q{Y#y_Mh~WkG`8$%$t3{0xVw_MFmyW98LF0m2^- zh5n!Ft~9ENWDA2DKoC%21OW{KP6XM)<_N)N6J!vO#bsC(R6t~tVOR!fl^s+DN5KI? z9>}I4EE1N-Dh8tPMBWGzWywN>K@sRgmH@Gxg5!AR`1ieY-g&>e`$tvZy0=f)S5^1> z?mhbxrzU$t=sm>5Pz~&x4EZ1@h1->@*asSU=J|A>cRF{aUFibz+o90$TT(hE!BaDP z3L&abiG5p=O$vC#B*>)O782ow!F|aHg4K$qba}ruyTsV8cgAeE_E?rCNOwIhs5P5+u`;eBW*>i*Al* z8C1y}d8#;BONG*y8v(iGM`!s!R2Nxzo=CpqZNjVO-U!{MG_vqo$T_T?ykw22PnCXoMyv zRmlHbC{K&`ERG4Y9SYUcqnv9Spx&?7G+4_xAJ!97 z-_c#hI;V3~{XS66$`tv^_yrCUOGnh35LsGFmRP($!&uo&TFxHnW+YM_O_&2tdqi65 z4){gmwUR|jYASx+dskON)HZ@B`8{#4etDxy?P$#8O24B(Z+y)9So5`B`iNaq&iHtM zWVY&!^z*oXZ0OLL?@uqp^X4**9L?CXxw|OT#-VBKb>k7+RV{pW_~K$0**P&WXJ69k zYqZ3bTy9`7i9MHbhPdMt$-ZezrGW&$-$M}4*@Nl0*{X?Y4U+eFtCM`7FFxnm5r=rn zdQ|;z;}}UhjMA7e{ch2&o3|^@o#%eAD0<)_qtR`i!O41G%?XndygsBFZ#fjzk;(ez zP1Z$Ji+7Y9L9pQBN9ZCML%QO#Y#SfZg9nWlY$ELB_p2PqF20U&xuW^6C5&mSSV9I! zZ!GysUjIM7+Qsdgv%qGOs79qw2$W`x7Fz?ha%oIIk)_t2D_BBk<1&<0?--W*3!z>G z^DX0Ku|q6Zq%w1&;wcb=`vrbX5)|ExS0;lQbE7bL#ZA5c(r|2)#Pe1+mm41eGu6S8p~W5U2g|+)k8tXMtlauITn8m%>4&|vJ?=;XHd|Z2{7r& z3ODbD7j@5oN%Gv1VcL2r-|0qxG%b17XwsM`x{T%82{i%f5j+3wJJy=?yb?5|LPoFc z0eD{(?FOX1XYG^WXu{7gmeFybbLx(jho+ofg7t7VYD7ZHQ5-ne$S1=rwXB6yE$~-Q zgV_#3+Ny#+zy=1ZzjejFp7UWK3@$c>fMJ_als#&B(tU$@Nx70kCVuX(dapz_Me_5w z5XV`#HUP7=0UXACWz#YfS4vx+m3nu#@@Q;m;Vt{I%YP(zru2m$Z=;yXpb-HrFwHSW z3cb495LEj*4HoA2Ax@caBjB`g_HLgT5#DmtIAi#2c3&7rl zHk-5}2@90K`N9;`5DBF6VS$wS2&xqP2%4ARkx(h|bpoXk<4VWWHZCOGA&Y9Y;!t_e z_LInpTvXU98wL!KFRxL{5+oPM0SE??E5!8ppf1=FdXG|TiF`!i;|4+ysO|%R ze79@XaO9)YywaG#IIg1#;LI#=ut>$Buk8I>(5#+s2Hq|QVXEe1hyj3b0~Y#qV;x}F zm;xe>p0q;Lhj~8&zX&#wmNyCdK{2&N5a?>q42=9sPYz%y0guBUH#c{DL?HU#Ew&RT zt_GWfw?qSuS=|`SDQm?5k7g;s?3y*H>91Tq{TTin#Lr(L0E3|GRM2(fM?OG;X+LyA z^T1h%ACP*0FaE*$zyYz*UfdPAn1(pjvWfoo^#cU2+><%3opO%l7>j2~TZp4u@(@S@ zoSr4;b8K+gWDgKj*tG!wDDc7b3)9#7hUm@M%|bOn9!Xwktj*l|(l5pQ1k--kpvZ%V z|2OXSi*tI8LoRl?*d;lB`R^86_yen%%Pte^X(Rvp$FfV`*m0ac Q?{oaM-|>Hbg+9Ih6Hu~51^@s6 literal 0 HcmV?d00001 diff --git a/docs/archi.pptx b/docs/archi.pptx new file mode 100644 index 0000000000000000000000000000000000000000..5b1f2166674ecafb5ecc25e96295abb577a258bc GIT binary patch literal 76973 zcmeEuRdifil4XjSnZaU~#bhxvGc#Ju%*@Qp%oej`F*7q+%wUb)tM2Kl={Kuu)|#*G zx*rMWp8SYd5&P`e5jW)|K|oOf-~dPf06+wIpV9Q40RjNpVF3UX03@)c;CCBGV;e_Z zB{y4R2W>i6Yb(NhP+*E&0PyGje_j8BJusfKrqIWL7<`#^jmP=J$*kuKCn`O+mRmX! z0bw(Y&>Tqu4aN7%W})0>5GtvjR=gwZD2~Z4US&P-wh{!XfyhW0rn`lLB3shhs~< z>%s`|Ubf19mF(_nceJ-piAwiw=uQ<8?u@As0 z_syi-*+O3|?nfel#2JE#3im8FXByuX3GTPhMA*GWV*2`^GwFB1yok1bB{6OAJv5|E z7}*O~9n8jZGV1-`XmgP!Y`DgtE66+?hF%MF<0vUek))pIfqL1kk${tJKsIlSZXmaG zS-z|CRU`^n(ra|YyfNytNGA#kQ{M`HCVVqn-qY?}o)Y2<^_>2~YXfj`0rpqCrP!0g zV5t%oBhCsU#r9ySBNihImNUqEU2+bml4o5`mNj@*w!`+7@qC=*HWXV5ed~4()xrxm zhu)%W?D`5m;GfXe*}6P!x}QP(@c{ym`v)W_IQJ)m`$U4>&qRm$L;_uVV=D)Gx<6k3 zg#iD9)Bf*FuZ-`N>1Tiqz7*>a+<#5CAq-9snDnF~Zhq)hd zqi|v+iBr~6m}U%tpiHrr~*qjSL~fY|qptb#4&ttI_IHO7VIW5LHVgQ{6TXfpv9 z`hs{f<};HF!ia5@xGJgsQ_}!!=rsC^vVG&~aQflP1Z9uw<#HUhPu~hj{N8BG1U6f*!$@!w$HnSpYOJ|j`R*z=0?U2^nXjFKff^kai0E} zQh_nO7X1u}LYF~r0m~jFTpBV`o6%U$ag}&_WEca9kagV!?N?u2#@OpO{We#21cb~UM;6)j5l;tE7F9Dn z)Oq}HGeU}=E|L=w9OC3_o~)@+xy%K94ylbp|B{o>Xb2bQwTl}8uHjc#m-{YZtVKPN zQhlg`nv)OSB^FQWvIm=-V=R*K)zC(9v)*h;z5DiKrMTP(XsY zklqg^BR-!2@}E=fr=pXk+UFtF^hsG!|1H)2&nP=j-m+L@Kpnb-@PNo$i+hp9U1PC> zK7&N%gp_^&l+%(I*Bg;=NWR>0iP~Av8%{%9WfeJ0lApMIGomGsi6RuOmaa0A3E;{> zrh`#lw9lE-ZNH}}8c-vXBum`q;BgQnP|@wCS*u)Fw>3`-QULxsYeZ85VYxPbD`mG0 z$K+CefNYR73)jFf?@=><&Njl<2jj*2fUP3Ax^y-*Ur}BT^IH&28)QsZh#A9%;83pR zuzb;5#M(2wyTxcZ?t8mx|1S-3OJ8`>%7#;U7BwSQCw;D`kuq7;p8n7qa;D_7lAD#~ z3@n8d4!8h{uvs@;Af>Dw*nRMMP1??n#F(P&(TEXZeB}_qdv=#snQ%p!%K(ZSuVZt& zN%KrP1-9BFbWU^Vi64?g0NpPc)*nI7^~a{kDR9N0i5cfJUuTxO|_dj=o@DxO?d0BSHLrkOqB`j}V| zL}Da>bY}g%lnjC>315yAAi#O`JM|*BfbVnXD^jj&oK#Aq#|t6aBmD2jFsPWlYih51 zh`)+N=-}wQD<}ZqRR#b+`?pB^H_-nNxIfogv)OAy^P->jLA;v>o={6H&0x$95I3>6 zop!*IDyE%gi>BcMcH~Oj6_=6E)m6OXeIb93r;Be}iL)AZ{+h<;CWfn7C~j_d+4Qiv ze|Bk;pr#qr5nAt9oS+(99b_ImQQ*ii_i=M_Gd|apZ*-D5sE$2(*Iql_?Q#3M)j3%r z%)K~{_X7{@P~HE<_zCg@s&4%aOsyw?n< zHM0wk+})$Rw$Im76$n}phpoD8$@QQu!Z}I=N+~~m6t#je5~2>r+Q?ZuwXgOkELjKF z8`sjJGknOs3y?B`TZ+(2*#aImwqH`REd$OTc~oUj>3bm4>CqY#nA&*p)~b(g8DX!c zQ|wAt8q+e&Y`+^g(Fb*K+_SMN+T#r~+Ga3-%w2Q$vo4vMA`R!x&$2Lz?s5k2aP5Yh zoLi;e=D&R#N0#nLJ;4>68-z2cAc#=V*3c#X-i!@(sJ=Pq>S~2uLeLMEjbTF;M@uTv zACYQFjXvdw4(bbB)}^9Dn@*e0ZiB7Sj-#?PX4;~rmnd!M;Idb1L)6V7xi%!8Xwp#= z(28{xSHRQ|~ZL1t}x)yS_)i)&z zvxr}b51}05Y6GSmo`Y2W{@@u%MZ+0+0&-rinX%9vaO(w(1jS+wfm2Z)Hd)A()Z8rT z=zIkXDpZ!xW#3aJ0>$kHwdiA9yign|y>Z-7ccMDA}|d zu$OyY%lO(JNkvtV+4QtcqT^O?;E8NH_A^*=^I%&L-W{;HxOY5foVfX|wmsWMhMkyy zJA4nXaBALpzAv|HNA1RcSRFGQ zk$QAZ8+U>Vz|Iv>pw6_09+JBQyg-?YKO8O`>cSaeLUPUpKCO!#k&?SV zAsj+uZyw)_J)uG2L`#4gz~JeBU$Vl@-LD`tpWEtv?9P3+wiZMeb>lk>V>4GF@um}+ z^0?0(+O4sKp`I?#8^orSaZ5wR(hLn^jr8%zE3hwCLYk#N2e&Q|=3}G;xKIqA&^Rr= zeL-1$f_ zL2TG|e|V=|KL(d=ExmlnN`VR}IQB%b-;g=d2b>OWPG8XFVCY!-PQ$F(35a}8&kn=Q zywaQJBNG5ezH19Rjf2GW-&kJ)IbSCY%A(C zpBzAo>!ld&nAm0(@5?nWoPl+&igA@Ec$z;+l1ZTp2q{;%Y_`swxB2pTA=h85>}sWi zL9d{l$}>5_J&CoFxLh3Ly%QN~EH4)6WZFnZUwY!+MY*wHOKeejRSB(_lh*OXsQIm` z10Z)HYyz~ysrJRc7U4if%* za%?2uY_iphnt9#Yq&J@S@7bSo8pcZ}qjYlB^qgy4?%HqQEq=iHdAm1)qKkcK=_ zZRUZp%oFUU9(<;tT*}02of3#&x2zrPf+i6evO5UA6rV{MbMBR z=^v%+$cO31tGRGtf7Yu`=tQ4_>?yUX@g?Z5>;=)q=+iGqpj_~2%PoipK0%Yob;+ldesm(V1a!WmB!ZH| zRFMi6ugk)@id`CoQEX?bkKyfTP6@TZu~1- z_FBvNqFqa`AK1hrfEKn5n2B9s;{1FL&oU8u z)A77Pk{iX{?xM4^cJl+ppP1Yc`$`VK@Ilav5O=e~1yU0kjGMY%_?c=X9!d#P#vx;W zB_;Mqodm)0i-;3dCbxb65T#wTIp-+MXdaOc^~Y#$I9~k%hc*AWkHIrNH5T%~wS)19 zT`R`sXq*7@C|F8!N@&@hXUnIc_7}yi*C#dnLl>a=dx|Kf@Alov@gLNIzb&!; z?<)CIBY^%CT!k+EKM)^mX5A6vigkppwuSFNl8x5D*GP;Cj<+%l!Fu=38NNHGzguDt zCLp(Ov&`26^KXKldDiSW^HetzvKO?lgUe!X4%B{6i8vJaa)`X!D~ukrtKJ{$uueH> zk_)9CEY`)YErxPPtXM;w&h=2ZsN5uPKI!`qK`Emm$hm?&aE~W>>*IXPX?lz}Zb+Qt zqeOw9Rm5aptW=xoK8jtKx#dP@p}q;RzNjo z0UVM;zFI@Q1n)kr)#c3trrJ6lN*5_irN1=7Y7FL8j|_v#X6(e`Puj9%FeTp*0Pp?D zwB`<7%$a32)OW1Gc#>Z)sww8D^T9T##{}w?$0t>YRyd2T@ZzlvMp=o}@fDLpjMh@? zt;G(%Ke%F?(UGhTC;k%rdbl<0T2yy8EMhwK{TlB7Pr$MNw+y)d_r&A)Z|?`%e+qyY z|0DDMJ0Z~D2gE<=2!{Xm5%CXD{C_=?*#6~*KTqqg|MJ71eqj2SAO7?M^S}J?ryp4U z<%d80@Za;`lv3eR7XR$)L45K};=f`Le`?)-J3Riz9U4_vZ1z}D|B$>1=S_tpn$$yi zVq5b0$i&HQw$#I-!z73W74SX6feszgWzBSN$!|$J3kzFUG}^^=8|6CII39I$vbxq( zNLLYwC@VyhU3N7>=Q*g&ym;!pHR#R<1Qd4KdPpmNj;tb?cjEZGPEg7kt7q=uS)u~t zHA!8MXiyI-4#z#GE2(8O$XGlz8|3#v>kKw2LcSq6suu0F+w3c)E~=2z!^fFcpiTUY zJdhT)SfD~G9DsgHu^ct7i6Y34f+6OQit|2>U$qNG_m!6{mxx~&w6`Q`(6!o*FpUE8 ze=Dbvi=tNZwe~k7L7T7(3l#{rLStMb(S;UhvJNf3g zHf{;Gw%zMS;=N(ncj8Q1eoZL@gx83t&bKS0%MV$y4K}(q=-HiKL!Z5pQFAnP?(7b- z-P=A07sbT8b*JHUwF<9Wkc?9FT#JZ; znrcaTTDV#TUaX^mUFz|12^3E3gV{jD4cJ4Ig-li>VI!0sE+8O}UV?nlx|)RJwyI;16R(S4Y>FREt57J59wx3l%JI z-m17~F8&?+M*K(-O!-Itl@>B6A*4O@0L4gz3|?7;B+XTbs0K?uJ6OfG!)1k?Zo$jf zkbvy|AN%a&V(S|c#H=nYrDm)1o8;{^{ec$8>aVgL7I*kF&`m&LYvhAFci{T48-u95 z4(B4(Y*KfR9}(JOoJuyM9@sG^84V(Lo&i+Ec@ToXM|d9ifF0Guq9DYlk*$}5B`<@3 zu0k_2;!Gqjy=x%LPlC4(0Vrf3(F}E{k)vO%VOI?}F~3Li^aNXk1AFK$1Lw!;6tX(A1Cj3c!LD?RM7 z`dh`;>eS(~CGG5Gg*(*RtX|L~6*N1G)K1Uimf>342XnF{VoGJ{*~v9r#fZNKho27H z7;ja$GjES9`sk7gi+j57`l>R1}9&3Sb)$X%3G~V!fsAn@Usu#+*wyrcD`e`ND}o4>aL| z9t<$ZT(_p#ctJn8BOqdT)SYt{>I=dAwh?3wYjl4jVZ6rpm-yai;o!5C6a@{6T0E{X6gY&`r(r zYk(>7(cZ~AF2#B;-5vA2@I5kDGDYXG@j!X1Fxp0w^zH(63y z28qz+817{rH}99GLqcXDQ@f-@xU{JMK_4_M=B&L7dpp?oXU_J>-2=E zwswJfs;C_+&M|K{K8{~?Ql=4-_)R4IoutO8Y;@QAq1_C9!Z8*lhcWim$mln$V~5EI z?-IpehxGUxd7Ww4#bORgs1Q-sGoi>g=#IHUL#K8fa#FkEipLe!%cD7S+}Q-~N`-X0 zpMf)hw(LF}F%k8LDpvhfdX1p0yT78o;Pt-usfq_~C8WK1ikE2M((+4eRF@I(ka^k3 za!RB&6aM7O8JFsGxN8ll0n}mdj2U02%ZC?OU4u~TpdG;y^c_VwfOGp5RU4ClJrje} z<&F#(f*ZwO51lonqnyx}&m+I<_ixCP z+>8cWoZSXn80~a?rRZr#YOMEZWj``t~(XfCnHn3sKhxflEPT_M~VMsq4Q};+ZH19L-;b%!^HWoe|!(K(VB7 zg*=Bj1eKJUAJ`h3peNdJoXK!=!&EK7EkM`A4PYsg`bA~DRjQD?r__L0v{sHPA+GA| zjWh9;KS*vYFwzobU5<9RX+7I?%V$AgV_7_-=e*H~XkJ0f$MhG~P58nKSfv>hBjjBZ z>V=>loHr;jZ0TFlWgz(q|FEoCQzY;2GzHoSj6@|(DuxrbA#LpGkjxgJe1DB)4FIh;CXap_J-(HBf%-v z=9W2O&sL7?Dm5C}A~Q?xlwmtgT^x<;fx2RTWkG1!;HIhyH7^uaq8^E~=o|lT`9)ET z8MOR6RbMWwoHH>9cCr52YM8Z#OjfM+cMATg_j0(R$(4(ovU63z&J1SeirE+fwP*?K z!NQTTRKcf(K-rF}YJsYk4v88?Refl#A5K$FzcLo?Eg#S7ds=-)SD)@LrXPkjr&(mU zw^+_m$Qgnc!^~Z6Y&q|&ude&`_m(6fg;5h_L5r=|pGzKnydF>9w>+*6^DoLYL1LE# z?nBXFlVQ*zAYQ%?5A#f6gAEVWokYSN6&~UA*BxOSE;(n;*fHR)lw59a2c6sriU@z% zGcUxZVsI;hiXS2x8}l3`2y$DE+enm&`;btN6YK~I4qs8$#+sSCqTxDTt)`^bvty2X zn-2-XnfHok(4P{^3oEl&xAup0pm+qromCzf?%?0uD)a@eW_OufZZxlnTw`u^N>x$mX~ zOuVI8lAv`_5KVjDeYKd0AUvJe=cjHy7^Ci6Hf=w)5{NEZmiR^AZpN!^*d{7vl-PkPtxmcXldw z@>aVbB8OQX{NS%-h{y1+`4Ca@FQ5@B&RzsERDPeRJTMf8#|s={(%>=l(g<%>!VjZ;?Oz4yqY{E*0uUKMBqnp zmCw2N9l>8xi=0L@;)Tx|M+WKtpcenGiT!y`U24riPB9h=jTrPg!w@Gh-*EXMc`V&~vl})2LY(JQ%!D2IePm@VlHxY~)Hd1m+ zGG%$yl;p?JCdi6Ro)LrqTukOB}YxRbl)fm zxRau$Sq)e`QDbTqh5K^!O8+hNCYrKf(I1ZX;#&c~;<>uchGfJeMo%Bc3?G&vgyZVW zvsg$g94&>u%CKy!RtS`Crn3*F$4dEOHN0|=bfRrn(J`B%GmhUz9Op?{Pex%%<*tdAb`KlOK4$eFCzSwzgvVkL*Gt;3K$NH9*tOtjA{Kc<3 zBlb=+5y~>g2_up{N}(kx^b*YGSafPHo+ON57dSE+gRFY3n*FYgg*Moosrr^yKHM$7s7UoGI>&9^_ESJvnbnEB(IUvdFbjF)sbp{l}&?Fjh+op(IbTYGT$3Jb;~_~=w%l~6Xo3npFewl)ju z1}DD6v^MJotF063YB#JJTg-?HH}xFuM+-4|DEiUYi#KSLXAD&&5pR1RV2ccHs^Jh? z50ZHS^ktQamh*EDJ}w}-U?B7TD5XDi7I)#@I_B8tydJ{|1VhGSBS1(mw~U|1*FZ_= zmyMzD4Y&;{*v+;N8n8Y+sENdMB+~O;X{r`4Prm;_;=U3Sa;5;*J~qiTgxqc-{r2Y9 z)7;Kh`K9#Z-R(_KY_!NYRS)IcUA(R(EtuFHiXvVa{+=ikeFIT*=2{5z@yf+y^mC_%HBd3-hxrBI9J zhH5h+;RX|jk>d(3a)C=q?-i7s_c>gl3g+l;3M?T(?3IWLMPmwR}R|6F#qj_0B82c8T>7*G0e z98ZitLuTaM3`wdN@OB?eNSaYG-Usca4ouHJTa~A}QN7B&K=xo`Joy`{5deD(c_zK`h3 z4&0jlU5}Ui)xZC;Wh+<3sj>Un)V_fEuQBmYWxP^l!)A>YwTqzEg|N|5m^ax-Jyd(d zr{9-GJmVZlEv){B_^0kuzLCdAVCoil&-#6qe1(X#$T9==VHZcdaPraxo;UkewY^-3 z7Ad~o-NMbZI!G+5E^{%X?0)UnYBVhgTD5~#eY#W?Puya~N(^u2tx)na*k*$WQLX$K zsBBf00=FG*nzeY)0oGn&un!mtno2Dy4mrjX-g33Xu*=3s8ADZ-jxKKgRJgy1(a-u~ z;-=F`NCgA1>oM8```RIdx*@PQy&*Fny!cgzk>t+z4FM6c#@f3|to^KwM2>PhFq{@8 zDF09atO~R;oLk)+P5#j2%C#J@?wzH+Qtjp?;2EuqlLgw6kC})xV>pAW9nXR;ESeo7 z)lduPXp|n{U&ZK&Ry8@ffhhF>dTkTuV>Zp5?gFf9)!DJm;1Gzt1&XDtmtSAL;rWP_ z2=;r7m!k~ky7$)el9XfZm(5+yj?u04rcSD79BIx2)s#|+(=WXdtH)v zHN+#4P0)qt!+3fY7u%5U1GH2CNp2-M<_a(Bt7#ObEx4i(@B-gP;iNi0Im$bb5HRU8 z^c%chz*p{I3MmuOd3W)2Jg=t;;?iQ;-U%Q&DWGUuJ8H}%(HG1UVuGDLr+_5(Sy9`< z7_h3#bW<_!L=u{3EFgfM{CTD*Ss1bMU$VYyf77$EFNCt395ZHR-q6lK_JE&h$u)azr!wYiH7}HE#63jC(YLJZd^Ghr=`SP7 z1(kv;V;Sr==dQm~WP%|ZZ_Lk7zUk(G+ez3bq6}OI6V92^(<I(d{ZqC4Q=aRFw@r6x1v+!ih1ffwzs#~3p=Xm$?-e54a=*wx$9;|~ag0caQNH4_EA>CM8G%SPX ztP-xv*lppLX$ze4B{MW=WtTzKW%UDj<_Chq%lTiErm>dlDm*X%px~b%O@FqG#?>_c z(1qXWr+E-hTMRl$am2x+4O_rwNm2`8^hh|-C3u1hxhGLU#+_&J8oO71Ru+aN8gVg+ zpZx^o_qGZnrl0D3)Vhg%{jLH=E=gvjlLV36exw$dWS(?zaPN2jdN3|SLyShtG+Ggl zr^Y6w@g3ghO}`soJUeSTqg)=8Re~cQVo{*Xof&h)u@{=^u{=HLiM2+EA5CZQo)G1Z z#|p!9n?r_~!c0eF!?kYg$>Q6TqO(?!bg#q$Hd3S_*qeRaQhkfcr9lCj3?<0XnQO;7h$?96#0lLs9Y(w zZ?&al(|H)Knp$`1#IpXTQq6RFUyUKd_wq3}SOw7*C4S)Ex|e46?+FW@#NZFxTZWky zb2QW9y~$)}PPj{E5Vvf@FI$jL+BlBY*&#?PFm5a5Q!WynMoKvq*s$Hlnv%Q#M#_7` zZW`zX<{VvqqfOrr0fA98n50g4N+bqOytP@iA({2sBKDX5G{;*nP*FS4L4Rs9p8|$u zC(K7>#@CdK`-%5o|6wsqXuD-B+#>e+#oU6(^Bn$OdBI&Lc8R zPU@Hv{JIMZ{zYbEp-%ysEZL=n#4;2Z1M9l6q%`B(ifn#>j9<_yP@TsEcNp5{beI9d zfKe@d(rYCo-mvs89glUN|tLj>94gO@kg*M?UnWENPjR^KDJ8gy-7g?46pD=QU!u zK1>S43%u>NpD=w3m`!VZ7vHzxwdY@|Z3{30D;S+^1K_llVzi?STCOanV1aae;joQD z*w*_pT*G8;&iX6(om~sybfzQ`HGVsU7 z5KY$t2Jfu*PcRoNkdAVVll=oR%(O$a&>}g^na(hCz}-?#A$StFh%AqWL`R zi<5v7Gv94HxSm`_F_Apr@#NP|Dndm$-f2ac(W6dmGd7;8Bx#2G{-7|+iv7rkV=!s@ zgYoRO%lX4WAuo92nuF(@;pn}n5`L=?arVX#@$J<;5_l^YY_jY&*d(+>dcc&j>0tWCshO0lNZb#f#y(iZWlcvBYBU^9NuRg!;9%XZQ-)~{H zSS{Ll`2`Dd#Bg;j9BQqFqHnl4u|+mXIO=28^-Lg!`Rv1j{FvFgn@}M|@1x2-kFP+fQM7}+E@kZiV zQU7A>)ng(r!Kc-legr*S?;rf$_)RmN6qlNg3ivC&Gz*;|pZkW>obc=|mA6>WbKf7)k9uZ&< zpIx_QuZf)V6h|>}sQ4%tk0ttgVYm>cqjB8H=L~E0++av?b(XpgrX_(w33ZW*wXpkP zcVkiXcxY8DIyGUk#b~j!iMykmNjI8^bKpz0D#WK!qe${a5lVcnXeqYBftyG>h2ECT zFl1_1v=$k%D~~H}^yter!IFW-`nLDo*ms&Ka|v{sDoKU1#V|rJ6~yj!IXaK;Z;0Qu z5TR_wFOG=H@ItJBFq<|tMwY|CRk$so%SzaUgQ>Lo`qHWR5#`0=s)(^AgcY?gnSL($ zBX;Ilc%!`q>%ug=*8e~cHCfd>8nkai$Vzjo>Vf^uPT#!skZs#BA&)n-A*vs|2UK)opA0}#4$rMQ}0i|mpfzMM1Xzs7H{kY=>%)H~Ti zR))QqKWRBBN|C;lI;z~bg|ncPQ$j0ju;j(&cK3iv#jc%q9;(lhd82<~iD0KIY?E*I z$K)fl!VPe61-yYRqGiK)+{06KDBb$&9s9y_vt(i1Rg)1gH;^_8%Ix^?{~HVQO`XIm ztGA@NYRAxVvSANaB-;MQ`Lx#8LNLIOu&ZF7iA;ijL5eid&og?8AsqasNLYDY|JnBA z%%gRK2}hwSDLKMimIjfc@&5=EnV#ts0k#beFLo7rHB(@(5SE2$B}Qh3HcOU81X9W6 z>)B?i*7ycXd39XC;D#@c4Wsb=q`#Y#AI%!F8iBnJ(P;q5vbU7`CXyfKu~+Z6jv;9@ z_ZRk?NRFzD(@s-gv8+ZX|Mx1v+#UH~{yaD-!kx}T&lDIJBBr_h?7}U;FGPbONfRs`--}t0@NC4>*Lw?kfA^mpoE=SLb$k|^hXqu?Gy0~D+qYj;5>%U7eQHf6vDSLh?JM(j{D zLPQYOqxkHs^fPFX6#yx@#&fGQ(U^%4J_kfRpOs8?d%oWtfHqm#oOuTZxSb00RZUG8 zDZ@bj;>d^!`OTfa!bGpO(rdt7pRmBtsJcT4dDuXahLB7^WP1T-M-`gJb{9j|haP;cB-LNF=FvEs zRfNa>wkoK%x26IIRsw}DI=!|d|6Sxj=^f#ZcK<#=lF#nH8w zw><*%S@teptSL^Qxw$c7K{Psj1A%l>i8G1#a-^36J=IJ^unKfu2j`!3tvU|Pt=pj{ z5+W^&pSXc1^-&1`2QjsNZi)*`FrFMs_>u|0Fmto=8&f%@r|E-Imq=_*IN7t} zKAM9w8@2*qb6Y`gnp6qXPh3ZFs4Cg}cR*BVPy`p1IK7ncDZ@Ea-D=*>HUuFRTdC}> z;5rQ`s>i}FdCz-db^v5p)a^OS8Uq)XGktE4jv?mgD*l|}ZNn0zkqz}^_Uf(1J z)A!A&3Z9MLUYlhRHzOBjT?h6ST_F!6(3?HfeKkxZo-E&~X~(4CXnu1r|H|=no8-%n zeD#yd;a|l)*C+tkcev$k%0&QRZOzvy#{dpc+bdc5C|1o%cd>wm7jYpZ=O zbo{Y^ciz_w7<5We7-i%v&?5zgLMU^v4X$h3AsgmHMY0{|n}kNYLw>0ZKpt<4dOs1?znD2V`-IG# z7O%8HWU|+2Km>T5<`zTA=sWo0x(-w>@c&Zt>L?v8gpi#|lXhZ&-faKknbo9+7Y z_1o|lJZ8BF-N)SU=GPMb$Unf1(NkLP$xGYgtWfJ_@pBO1Fuo_IIb>^QfMB7eAp z=S@l4&1cwBSYCv&0xW4dIPaJx)!#I%p?FdbVk!cQfcA zDXyp!!I^S7+vd0$f)ldP64;P_+JZCn3*Q$V(+0VV4HCpxEbw=1{kVmPvyq3E+Y zylUR-GkER^1cxKDoeaQYTvUX`Pw9z)eu0#4&KNFF`1niJ9yl78UF(x1?S2jg{KG)N zU#RwfK2}kwqHVLz`nQ3A(}v2NBEb&7s=?CB#hd z%->$VzmLPuS%=o=_lZJrI)>^U3AW(0Xq94{E&R~P{H>$6Z}=$;QQlC4LTNbmdU>ZR z#S6Tb>N<><|a!lYh}jsg8>{6Q^}nhBDFbH>8_|z)?1wXFefD zGRQO7ChY6;Z~L(_%1soonWXtsm#+L48hP8_P{WE=ZaFTSH8U9%dq#IxIt#i-It?E*9iSCA#38xME6FZ@3s*&MQ?DT zN7$#{h3NcUyJtARG-58A%uy|5SKsr|<3e6YxpX+0$?h1ax6CYj^u+hV(d48}g5He1|3KGS_RfM z5FG zSKXy_b+lG-A5GWq3~M4xY%;q)xMcHtCgRoi!PnTK-fi)}FSR2~#(syHs)cs8+M;MR zkG!3Y1ap{@?I3)M$BXR2&SB%7iMgFa{Fz(p7Vn5)WFJkEByz9rz9wf3VDri!+T;+F z%}mL8>V7KAIKAeFl9cZ)MNP+A1P+_*MU}}rD|Sz=tk#~?TJUB~yC_yaw<&H|&a%oM znCO$^TzQGZZcNN)f}e2a*9o*VA=dA_^OVVbvp{E2mh2p>cpl|#1@6(YiD zHML;z4Q}b@s5aQlWEAUdTE?&9y7mZi`9>Ym75DmgU%xRz2?wx^(6=f~5fP-ZCBv9U!D&XChA92)A8Do;%wt4dU?w{03uyy+Mj$7Yy3)^VlZ+BdvFt z1L+6?^7P#tFz10b6!Eu|>utK+?gW}#y%--GUX?cDU{5PqEG$n$2 z(aA$=a|Ax@l;FnLISE`UP@tG@9E5^~L_&nI8rmE6V`sdug@Mn=Himns=P}<38Arvz*@kGSA<4ym+TW|HYuQN@$#uP@VGxA7 zXjQeUb?s4vkqR`-9<%r`8=RWL8`#cxG`@*^`TV1V$%Z+UuK<_ag=yc?ji0h=Lg0?1 za!Ol~tS$wE2PpM31}}k*vflyNm25%27)`=!hs&#(6+*V*SF7b3i9NFp=t^024%g46Ubd^xkHa(V&-2|Kg!YikBGo(L}#Ac3^# z6)S=Hz!|f;YqZze;1-()d&Ry2l(#|rlaSRR9c(5y1&NFdG8KS>W?MBfqv!Swg?Wjn zX>qCrTjp=*SYH)}qXXWB3O}XJB%NR2+3ju9#d3MbhDI$(B2a z6e48hmv|a>#Ab( z7)!))63IQAn=Mz*-)Us5NFX|y3qrH_u9nM`F0z&f#=HBbPl7kaXczHgDB;=^NR)yv zY~&h{_jI9NfL4$YwDB3R)at;nHVPK?=hY%t_}#PBHskq2(@(2>-r$E}wZ%&c39+EA z!(O%pp=N9kIfvSks044`^1tug7A8ti3s+iY$upC-0akB0q;Got#;jTkU7kzE>6=f= z48hlM5bWsdwLOE7`Q-Z!uh0I-5gegLXQo3WOmRiCh!D?1z*2vOL6XG=tJ)Cbc}|5e z8u4%|GGxKlzrt?Y-{TRyli3xLXHzJI5v~Zk5WTaQU?r%J*BBu;CpLN312c z8Ji#Od)mpBkbmi(4CgMW+EQ`Z4w*?ct zN!K-+5{f&$z$b3R(_XD2;mqXJ=lJ&2{T?PD8otB zIliCpJ~nVb1+NY((J|9>-pYtJ&51Grr_;AjYZT!G81uo}6y00a&+~77?%E%BIk&Pp#*exj z33TBo3bkL6t`5WIJ@<=FU_%r!lphKkx|sG?hSkq1n?<5|Z$+~?u0f_8$!yG`&!Jeo z=rf~%A-;MtYnVEvkugMr|7s( zQzY+q;T1()P4Ox;!EhM`aEp#bgjTC#X#Ot+t#w0fl<;Sp!SA0aX#aeMuky3au*Q!1 zPN4U|XV#^x{#`v(@ri>XR@|Sg0U(lZt$(LSIohO&dhA>un6`CZNS)?E`7;{3-AU_% z1lh~YG3Q9FPS4k4?$)LbXl@6LiC!-TQPiEF}|(U}yL6ns_HX$ZVeOWU2;@{RmL4uY5l^38kZ zYF!F#+_{`R_9yuZ{NeL!q2CHaj6dDBmWN#MNo6uBUuR_0^eCGYikr0hzOLGSOVPlV zKTRy+5l_lHXsw3d$rZnW%QauDescJNIDyn@L~EZ1{sEV zXRAsTQHy>>pXc%lZkJ7|L?fmz;ahJpeXiorZDiOr%3bYcowS#nz3E)Z22gxq3w?bs z^*(LQq)hTSk|ySjd5NPgsd;(S1}t&q;zZ=y_?qH)2_3n!l3x=?)v@gj{jqPO5MF5I zK78MyWMk}F{(N;QFhs}idqL&%oUf&ORS&op`HmMk_Zj&&kH7a`_CpiN=}OF>t7RL~8LMDk*v>}JpI%QafJ zAEc66waB~7>B;NntQRs6GqoDG{1~%by%6(t@%L%x<^s+F>YGcOpPpDB{#5bPCDB`r z;V8Zv5!&?fy~E8QJ;8gNjymG6N1WF(Q>n8ZPMXY>%LS6O?n~rHntDD{V$paN6UAqk zN}_XL5Jhcss`HRAzW8B(Vq4gbebYo9b$4rSD5If*)8&NTVqr1q}F232&mx%UNC}7IvoWBH`E9>c7Ydk_ga!fA0%HI zfL_{==Jnuu6T?mM=p*5S%H>x>sNdd;{(ffnd-gVrf#$)aw$?i5)phoScG?7;+?a_s zyohf0ZwvQw?tN8H?_<8BJCj!zb%$AMa*Mz7 zu32V%%5!O%CXxr4>`9p&wMv$HXWKGzr!p;{M*2&Ixpaxow+D@dT^^Lm^fJ)E?4SN) zZd>R&=9-{JHi>*W@<3(r0+&aJrA^Y;`^Ap073ss()(>Z*w+EbGT3;Y47{&4is~`$2Q-D^9{2w(!Up=2q^R!l5mNA(E5?0 z@=iuMx5+N{Rf61|thYZjPMhR1pMQ|^bnaw~f4Ml39Zc}Gb~7a5(H80Q!x;;B^HTJ7 zJb&Z^T{_Q`TfBF2o0Nt9PzfZK99z6!cf^i``AAE5-m(ted-oX_xyU)n_{oqi70LR5 z;%N~&dOst@+Kt_UJK~eslG`ZZ;|GITm(%Ag!DB%F?fHHmBUse9z?F6q`1t;RT511! zTC~(~+9l)Om7wwwsL7yR>+Gc`(yjExY7SR99p=S9PH;E7_|iL8MUzpp=RtM*Fn`ar&v~j36V-fFC3rrDoPND!XZ@{XE-}{q277E2uAZaT z=J%t|&7&z{$;__Jj~qnqWoOd!6uXgxl|NTY*8E7OrGlw)l=JZE)2j85biCu9V$KAg|*=6)TPvlno*b~qFm#&{p z8yB3(iSJ#c3V+#As_>H~PPW;UB<$Kt^lTY>Lv6hxkNia?o>U@JDwBF5hO4ZDD{Ake zE}W(&Q+j#KkSkYeREB@1*m|JCS-C*<`WpS)O9mIT)EAAORP}8=j-YRDcyHjcE;?KF z)1nn|XX)dWeTyHY@9hcQB);G4F@nL=hbwbaq{n3E$$mEb3drtvesC$1nZLW+Y%Z|m zeN;{GMznu?S^flFK8QCM2@rU2f>=#QvT^hHool@2JbC@4@n7HE$fhT#IRF-Y=8CRj4=Hon!X< zfJ@kyoxBB@u(ZP?iNz~Mzs*F1-VrtlKKmBn(Z`t_pH>_tW3({JWcm=#X5_%+l-#gJB_eZwXD8(7@zu{VVhgqA zCy|^jiP?$kaQ(+pKheqV!T`14xX*TQ_ZU2W`BcmGybwkhnRfR&q)0)8YFF zT&arXJ?*04TuZOofhT%)s#blfHqeBKU}M@edt*$@9f}8aYPey)MdA+H`*VNk9>1Vn8BGiG+r;!bq-hC`@TxlPQR4_Q+IynuDgje*Zvr)qu3Jk9YPSMuPUs=;_iY z)X!mD7(oUNgE40;!{V_o?Q%3m`sz4`<8W)RDGi6PwFz+3$85QYu3g@eyO$u zgAabE_m2y8v;RK8k)nr)RCK1kPQcqZHal7u)H59y(;VhO@w;+;?BwCM%j2JzfYJ)W z?GH9$fQc`$kxYM&jn0`Ic?dEKHvflGhDuK|b)tjFK+P7>y@V*@?+58M;R1){$ zGt|l!2RDl1WE5N{Yjt8sHSk&l5b8;&4xig z{7$|w$@g$|QTlufYclV>e*ivB8iYg7-=*QG&sG_EeWP==z)o~;llSXw!o)Z4p+)yG zLsGQS%7)#JVN@p$qPHFo8qFxN8j!Q39(RJAf}dM zD1Be2cCAuhg=-o0`VsEv5C&@sIyBt^m@hAs_N04&x3Lkc~%ia`?L2o znHUxms1>RVmcmGblLv?+Mjp=lWu5(#F((D1q373c0Jk&o+o2)=IR86;p7DvRjgE(_ zo2QVMy|=Tq%VURUf{t#scK8YW0&qo5NmU6TARqu#z#jm=L|mh)pkSq=r>&%_p$KMx zPhS^4c5{1nfgJ#xUA#Q>l<%{fn3}N@PlE4dU*#2llRw8j|NhtO zXzY(8mxl%Zeg3~<|Cj&Bz_YAv0Du6z*8;dc@w9OUVLt%4_{_%5!wUc|t$^tgzFux1 zObiBn0U8gGK@h%u4%__!KRAc2|A0+?;TY&CfH>#8Qrg%*egeXQAk1(5Pw9651pmPU zhym84Z0lm{;b6mlt_x7JPaU4w{>u2{<3D8o6XyR8bbjUy%KYnM1wY9>o%GegZ|-y1 z?LAcV{)FGzJXX{PVGc0;o154D^YmLFyyNX;C=bHC0C3^zQx7G>KkzTe_}d#8gD@SK z{=~sc)!@%`e-~>lZ4d^_UU={1s(xP2InFm5PX$ny=a1iCn6tN4J#S->A41@X?E^&+ z<_BR`dvBFL>nBw6^wB?0|5cB_y#jbX?RhI1gd;p$_0DO5JQKdReWrX) z3#^~8!_7-g@V}f%Ov&JoZpj24S!sqJ3Kz!*jiX@I{6vj~|@tRuF{c03(9O zfGyw(Sc9K7fD3R2umgI4GH@UKb^|=XoTq>T@C;0`1#@k|6bAwdzzIzI_jo$LaQ-Os zH-_D>(i`WcK`Q?q=MM^-f8w|j(G%S!5&{3Di8P30i6n?50d^t?(LEwLq6c8AB$4!A z9O(QLpX`7gunBVdM?LO5oT(-25%2>)-icJ%33I z^6dK08UQ>&4g5nofq&LhXJ7-vUpeffL^T$}FTK6QD`^mop= zCTzpCj2YNd-QhB?Y!ID|3c&M8vjL0gh_-|gpGu?gmr}90d~S>!WKdV zVFzJ3Vf|lq{`{-M=>PwRXHH~@wqCk}uwi0c7r^<2X)z;m$H ze;y})a!SNXBuu3GUwW;?d0+kq7nc+-J-8$fuwUZ6BzZ~rlG-`^kCAtY3(S)QKNbJt z{*RIK7skIxVEa#4pjQ5>N8nelJMXLKrF}rYpMiYadfEDca|Zw@xVk;}aImxYV!w0y zwm7>yIF;G5tGd_-^0PmF_Kf{kmM8o9YXv-fY@Y}M=kwrsIdIF9-mm$MfUf`VG%rm6 zko^JLtbxDN%)9}h)DxVRMgLCY25llO3;=5G+IV~T{81k`@%93M=Tnh8zy2p6zYp4` zIXwOVv{%H50C1Lo$79~(@n^YU{WAd2{0zSccq#fhJOuzv&2y^{zGaDsfC&KCbiX*0 zBVhguRwCdA4fuKb1po+l0Eqs?Ik(m4*SUQq`0M+^y9@uoAOPMG{sRV@K@0#t4Je#@ z5C5&}ypsP`1?PqF%>eafpqT(5B%lT^P!kYR6X4syH$nge7yqdHoZa(_-~u7h#Y@DO zNl3{+gsLmR1p-3C3q*t$FA{-@AP55U0V3*)SAP?hzjRIaG4Ty|nmaENaxUMz|EYym zZxG2LV(syggp`h+fsu*x78f@Uuc+8vaS2H&g$IgC$||aA`UZwZ#$fZ>Jh6RhXYb(X z>E-R?>-XIMRY+)9ctm7WVp4KSYTBE(>F;y%;Q0lGMa7>hDyzO!*VNXvwzYrl==|2z zJv2NrIyOErIkmX7yt2BszOlK5+CTVtcyx?DIX#!_T+V-7|5h$g2?Q62hzN;@&*dVx z;Cn7OHPOZ2gfCr{*Cl@Je(lDc7nf=7C**u;A-O4{horUk7$l|R5MAU%os0HMvi~!| zUj9Ex_LpFP%QXd%6M~(RnvfcR05}{s@b1Fzq@WA%fB!|*I~_lrv~;lpUf!7#NRMcgNG6(^>_r$6C$Tho<&++_vhXrulp5rv*tF znUAoDmQ#rt1VJo}dKAv@LQE2q!o7kjV$!A!FNp9ORZbuFriD0k^=-Cg&&(tsc(Kv+ zOToNhBFte0uNR9I$YMJl-_*-@H%i{(7rga>ZAvZEBmGnURsffrqS59`eE{u$(qW)y+!qQV8T?0wa6J-arcj0e2y=CP}U7?(gp zwu1*v*YUvO4vGs8NK)YeD@$cjXgggS9vIX##{=ZA@xXdBwB4WsaZ-SU$^57W(G+mZ zu1nQt)I#Awcwp@dI_XpA88w?3WQpuC9@xk)1G!Ow9xOnXcx|8?uvX~e55ym(JeSI$ zFRds$&&K+%;(>#Bh{6whJP;uejtAbSSK}Ce!vp=lg9$~k&;#v72=dtcunQyx7CR?q zw~4A~2T}dPPgY{${-=U3@Bqjr$TuVe)WNJP&LbRqcm1o?=?yDN1o8+C?!wB9WkX&3 zhzEY^8-XYskQ1a3I_k~*DHjrk4k*V1eLBAsS%L>1C@;?ASbuT(8hf_!@0kR-hLYfc z@H#wzU8O=hBckSiJ_x>kj0e&um$WUQgI98K>|fY$QI7NICm`c;gt!~|VtAnUPY%a9 z!9x8}1vq3+&B4^!rh>()WcZO8OJ+`na zi4$1PRZC?mb*Bs;g^NYxkoHRPe)&zYrK@ABv^G)i*xcRfJ+kk);BVayPxV_%S=P<3 z-S@iraLI=8@pfODsFRe$b4^V}ZL{0%&Yg4BBF9W86&wF?s1hPywaDEb#>zsMzljNm zW0V+i99Z@J!N8ZszU}dho5!2%Z`pXfO}i%k`Z<#Vu}#P9YC2NwHcNIYN?a}d+l zGc(xG$*lA_a4>iW`6g5El~J-V8I>+%HY+hu3E9RRmee-*uJKD#)IQCZmv$L_s7qNT ziqBkD>IrxDCnoM@(4b7(yx*U+{%D?gGbUW}?Lcg2M1)6uVl+oeB*pZrd>cVzt5z|b zNfVYcb7G$*UNf~vZ?J2bg! zVz(RShN26HlnkrmL`4UkONG4Fnwq{0Rhdf&-inNcS5_yrh<%D1^p>y|c_}!hX-!*E zr!Q95;}FDIn;l&D)<|;Maf#bHg;dPl#BB6861Y*ReQTbh=V&UE$BJ)WZb(L%>i&p( ziM)ZqV!yUXYbihXSX>UV(MP1QR0k>v%n2Qc~^Ue zX#0#F0!zaXt}Gt@0~L-`WrypLM{YHa$!&Z7yTXK6@k;8J3JBt~nRh)=DTS zUY_FNNR%nn;uUimEmBK|Yw0HysB*(e9i}oLR(j}E%Ty=t@!XxZrP_Up!^wf>WE=cl z7aQK0_dSNT2p=_h;F_$VPU0Ve%W!gML8y6}#*82+75e5;fB(8G`UW0w%x*v4?1bt_ z9Uim9t%YkIWAH$}(6hPY6R7$rq8CyXU%sLQ?cChWbXTr8z(N+{ry-Cm#6tK{1vCJn zpFM*I0uVPxJp1lY*{!3YE%a3f(DB3aH;5^4w=_;RevQ}?(VxhD6buc=Qe!|A7Pe+T z(QT_}xyvYul+wFRH(*EQL(i;CtL5qPrhIajq=Tez-{yY17L<+As;l`_AMVq&vz|Sr zgY3N_-i;a+6Qo^Io#A3}o7HH!gD*-AdOu^ixxlQAjX|uQxJX ziq16GTg~!EW)L-=W;V=y-| zKeBXVj&)2dI!M}h{3qPhX!FvkR=A%>qdhh^Cyabr^EDdKeZEZNmL@+zmOu&!*idqWiJ;bgoF+KY9O~LEcseEhj^r zZiZfCfuVYWIW(wM)zqbQcB;t;j-p>|N-A(n&+3o)$@p%t%sZog!)w=i+IZx?-}4Z& z-o_6}@Sf*ITTG#`J)O3KZ*3er3$H5$1j!0ypDtbI=(bRlQ1bMP5z*22(ACLA_~s3T zm}kQ2PM%w5)}}`pJ4nZA_i}&LEq`9aU292ww)WA*Q%yJ~Whyy6p4X!>>4^{IY7_mI zlwo$Y>vBI?en!ya#aK1Q@@qcxQ9 z#0naz)|bR{`9>W{g!jE$?CBK~a?mt-sl`*kqwg4qxN7XoCv91Y>TlZ~uZ^Q(@PqbM zV4NPBYnM$<&6>9r%vnrW%=S3=ii-0+K8jY4zo{b}6n1n`y+=JdTJ>?ZQq9Y}47Dc4 zs$Q=Vn#7pQCo@lmR~sI}T}=7Z1oGbs@u-c%%`MHvd@>UC)HcO>9r!x1UG<__gJzHj zC?!WN>SAfO@IV$!)&3++ny4O7OCtuq%AfOb7+6Wum#ltOHKP)*{F zd(klD77Gk^REGTirk`#9uEUC5wzhsIYePC3CmmX&9hseOYnX=9D_C8Nf44}QIit|l zecJFN$pH^cC>j?W9>azjp>%9xi~k$T_{(Y%U~BzgdiydCC{q6Tq$R8N4|qUaP6l<& z^MEkhZ#h3`8#5pJTW=eS%+6t#M=xTBG7_uZF816Li*kLoEP@9{rf_z&%Lo$e9duSe z(F}TQzMUzPDK2SYmz9q%Ac9}%t=(dfugSQWgy{5izze9e>+(16NExx^Z|vFJ4XTUG z_M&`wbZd4{Li!eORElF?mDwGAgAtp4GePPCBe*Ijr}EnXZ{4AGj$NZDgSj-<$u@Uy zeh!|FEZ#x4qeKt)0zn6Z0#Zjky&?xGK~gv|$E;L&mh{AGC0ocmNuXq@qtt53c%19u z^?(jmR#bRIx!A5zzj4#pEo9c*t71P-jYOO~?TnFGaM>aiv1PAhsQP)K&OKUOmiK7gk9+^CkPHg4GmWg*iEgn%hWo{L1YfB` z3Pp%gSUpHxqqx~V_GQdW`BgqL=qUAzc$$&!7ReH`{m&qE11lQUC11qnEi^&09rd!z37!)4kD|F(s(lGcEOT?WG!PpWdrE7|T#} z*{)j^AMejCB?s8vT0X6R%M_dohdum(~ zPq(IgBalapj;!4h>7rXTdapjrns!_Ed2w+`V7NWi@(PvoeT_2gKejf&AXGCFXLV`V~XA`g_W%>+`SH&M}T!Qv?5 zSgbrs+8k{KUF1X1p##iNck7XzU(1#*A)nNhhabw?DW`q4qG*7WD?VQEZfF=-A6Tgh zZ!_ylDw~_2g4|u?Jqh9-@gp~~Q))Cj=+G8)FR~0N)+RHaR;LiVUuNd=fi+8!g~PFr zGAaI3Ca*-(PDK=tO|it8Cs~?Nj#jJ;zc4CAN-S54AziM^+PXv^E~ZVb$1+i*AFHVM zqtceBE0_`8b0m#ED|YZ@B}LaR^|~(6zxwH<*obWW0e4-S6DA?DdPnoOfw_1gzZWYpF(O2T7#YWDZlg!$Mq*BCXPpx$aanpiwo-9ZV* z@@r96iHG~y59GdCbf>+2xT!#S!d^6PgPeZ8pjG0!z(uL_?5H|Ih;n7AN4LdrH!$2j8vWRID27SO_3Bz z!`_9}v_`_~Raz#ahV=YF`J_H8M^zTKs4jzK$L>4<+LaGtA^K`54>I699<}-&DMEI3 zT;v+e>jp&!O4#TGwM>V;eSZkD2zRaPop_k5m{6otT%wHjk`SYLTTr>f_#HF9r1mE2 zI;8hp33=G0#yF2np<`WpzrWygyBz=6;U2wTGdj&`T+9j6g9mi^Fyg99H@O4Ms>a0} z%R0);w4&3YMx%w@p2xL~aEFmH^Qn;{NfkU`v3Y<|S)59%2&M1QB9Zbc;Vq^*p7vA{ zHma_GU)F{1EMER9vdGm2+P&Y<(I~QxQ=v#1)gp&ENW4}ml+w}PCy8%z`?iBWMQRD? zwmc*I;srBr7_=Mx@bipJ8#xy6LXzql~|m~9ZuFecSi^Q?URK~*w@D!y4hDtZ57_~Cb${D%3Njez-Ye9e+QQ| z$avlI*GLlr=|ql*`c-Qzumk# z!l^}jK>n2ZqLQ~o3is&O@laBa!bQejef`4Bna7gcvxQw7CnDs|GW7&;-<_25HWqHb z$#gc+)AhF1CR6g>QcBOvq1V%WrK6W=y)rKP{=;~uD#m+LE0P_}h;nr*uIYm#U$fjM z2j?+*kq?)9gY&UW{oo9^XvKjCnk(3Fk08>PJho+AXVID4vLb<=tPbLVD^7ACQj;o(^b~?^W<$|<%%5IehoI?9;_eWcyMbKcq{eto6SEk2j6#Lp(GGQ>72!P7WO~nHPX_8>e zeMaE?!m;97X<$H3WN92nj=Ksy`2L?1#~FZ_?os+&RohU5W=6R`s;tgld-_meQRH^i z+^uvynn3lQD^V)CT{4uKVheqNQ&v+aoYU${-~I8xtc)H?n?J^uD{=j9l!yOn0H-e= z5V^JsB}FG`q2fca;!C@1{Zh(NUe|B7`&&)D-xq{dR5i-Y>wU&Nkj_+qOn$IAu%=w( zn8}%-=-dyqWXh+ajche?_jap^nsEF;(M<^_O@xcn4Y|k+2A1VbRXDKaExOa7{GQ1P zrip+4s*@X0&CXaCYpS>~Gg$^6m8lDahp;ei>ioSdlwvN~2J>noY)O|#a)(#!+q4lf(Z4x)v$zDbh}%m z;3CN=_+DP2;N8NmO{a(1om7u9HWkEmG( zCU|Cd^L6w$IV0Pj`9+!Fh_pw0CM?-$7U`AqkJFPIk_Z$i&}tKtE0TOjgd0)#%4#6m zo+%luf=6e3cS9CtIu4qciL{MH&(R`B`LsgqX+d2 zyWvHCX$yC&-$P!N;b_nmswnz)HfBdwmcfo(--RG`6Li{=_EmGxFtzF~mTBuXM0!)G z4SzNEWo2t@99SE*i^oVUYwUkUhn_Ap25Q*apaAY9$^xy)Gt%r3LRh?Aw7R(26S^vgB4# zetN?Lft6mu11B+%U!?-TI$EV5*!s2S>tBUl`amhMYynFV4VnRkT~6Kw(*mVqSEf7W zo$I3}>eZ_CZcng$B+$|#kmxEOXb?D6S_}RS^~JE&FPpx3wbGN8~XqM0^f zn)RnaUAR7l)*VCE>-tHa7R6d>yCx@n{7CSeptp2F5(Vkw?fqo@caFqvk8E~0Hr+}g ze%?`d*oS5S{(|F&8} z@5S9xspvNHE4{236UEY@*%tTQ`R>%C%u2`?VH1CEUj1@yYHiU1F17b@tVwOWI)*nT z@54!l+mt+u^scU!=#GXsXl36Gxvn-VX8AC@T;jghmTvY1cf*@JMujDHiEfGd1!kG< zl)@oT$}a~|@;**qh8fx- zK8;ww2R_%OlTE9}=218c=oPff_1!6vqAV9+P=i(EG8`CY`B^&_-S@Uw4%KTLZ~Ne= zO|3@DZTPYr;zUxUe@j0xgip%F(3X)m#a7TvH&=>gUdCa+LEW%$+^1-#K*F$qON+J7 zRPMJ1WYf3dwgK(h4rX0*L;MoTP*m{KHub!PE3W_>b0C0tk+DOlDvF{A8!JJNrD z%d}b){-%$DS4THgLNCf2&QsT)eyxZ{FPxu_NqbUrsBqNZql8|(CobFS$w%R=S=Bjj zEHMt(u4ibZ`l}}{H3G1Pon)-<^S8;=TXHd9*L;1np)EOfG`Ak1zdu-_}cm4z# z%940^0y+^7%hA4bXJEg4sFd+y3);UrUX9rD=&4}Juneva!eW~(pp{eu>*LvYVC9PE zP@F1MyB?Q8eCre(ztEqDRFwg^QYh@CSI~GeT&5mO9G?@KqAEO7K4gmz^pKPk>k`_G zhmG$c)({`6%^2o5YVbf*fFwrEk)pGsPTs}Lx0%Zn1YK4+sq7M#sWpEPe|1P<{-@lBDKpM)qP%joZ z?`u?>BTg?CtU+Njw&rP3=s20F()HUzdmnD)PC_3BWc!OHLSvz~1FoGNj2z%RQWF&+b7cU`Z&UIvU*pO(~qZC!DQv6TBtin7y`I>tz*?!aobZg6`$z;$T- zdpa%ytF1#%r5&sM;R^&&o zi=@nQaHnqt-3^s8I!wp4v-e-Kq;K*Uc^2{2^4XVo6mj0yb@OOgR??3(8FiMz#*G~F z%``P0R_zXrHGOH^J1YA$?=)q+8}1}(_93*?&r^TokctG$H?$tb=Pa%~REAZ}v$tXt zY|t&-3T7CSQCxDCRvi|DT;FCR)9*|~>zj{?oiwpdZuz^uEU%)Xwof<8y{nsQG0wj% z0oQA&@<`5-)F#hi7>RMHRTIsPIIzzVvC63L^R2!{T{d~5+ z7jCw^#>}c+JM?%?a?ndv?CciC9Ci1_ z2Q-r_+vKL6mAxfp9dc^Q${vNTU-HSh`(l|yicf(b&Z8ulW#v&El3X%UpTe}2ypqF7 z>Je(I*u`Ubl%{{=b+cb3zbOB7W9P@b@>7iTWaBW-uvkbi1rvadS02hmAgYjIRZjVi zOx-hz#ep)}e6qovmZ*>^*2vP{+zDAktj(CHta6)~WyOt+Y)a%&lG7sETg7V^YKan? z$FdLiCbxQg_GVwnwPTuU>41qY_wo(7CuZh8AIZ!-F^}OXe}?dB81Fk8!fe!4mAIS< zl!J5C#j7JY{tf77Sfq?DGMz$52fR;Mg}CIG>UwGGN~ZuUzOqc*0v>zvGtie&`~C z9_xU-Rh8exuj%aTa%)Uoz)J z8`NR%p~pbgvqdi?nI(Z1RmH?$b@bot^^DC z#doq^M~a932-ICFvz&i`#3ebo3g#Bg^7mfM6CAw%xtl%8^b3FDg&&_jMvANRs3fd_ zJ5sJD*V>ef7n?t9oPRP6NiDDxbqG*PS`9I96*SuYK4O%hS>FYJACS!oan_!U#zjLy zpnGdMTiRH3u;pQJM}aN4oM;1C0T+E$Zb42~xS7&MpMvQh{y*4|Q_u=3Xbg@T%cg^? zgPurkNajwA>>!X5(0%!{l2>>DHh-c8dW(EeBxctB5BxJaC`s_uEeSj@M1b}ems>d! zlNQ2KvSD9C9Up;f@`IE6)mE7BV;}!5;-itXHvxfm$oR!u?`1I)tau=yYaPz|H&ax* z7Hn7?9_aRlo`9}t_}unAaGS{R*&FCa1zH)08G)QwLSYD5?!Sm2s@z})C(wmr1U9^X zL>ui4nS6CFG-7YqK6mR zFnfC2pht}dI2YxPdcyIH=ieGjQ#< zYP($}H}5&_S%=)~a1Fx)bb-Q4nDTH-!C@J5r#y#hpQ)^lRKbP})gM1Gwd|oQHD6ws zsosp7e_}fc=-+DLu#LBq=w|6zN=+{aFgwjZsuQr`Ug9x6GL$@FKRKIJni))~OxE{R zotpLCh@Zm)lgBs}p%x(w2b2O298wOJprNv0ldO5-0UL`>JfOGDHujbc8|e{`q5U%j~{(T8>8$Nt=g#~S;bL^HW<@}CDnM7Ai7O!0IMJ#=3NpiWLP_L2%~m9 z^_>vhDg9wx3$8#kKGq|u((elQIMbaqXBr>-hFf^Io`k67&W!G((S?fT{ zlAV;5G5mf?K&_r?v7w_nPal3A;)luR^~b_M_y{8nz8zd{IZiPcq$GR?rJs<7_2EYh7QAn(+*G7agRl4 z@qpJUE`H}28hO&)vt51?-2+87h2sIPbv!`Hg}wD<8TJVsUybd~=GhURS3mAVALe3| z(k$-U;DHdZ^a&kg2lNJh(x*WegV=By+Gx!(#Cm@_1g(mOfx$j6nbsuGx3lP6{~ZjI z9SuV5jJKoCX~g3J4$}&B_^m8o=R$|@uTw5LwlcnV{lj33d-N0YrZep=OEp#XRl~V8 z)#-N`O|26wSi{SnPgarV%JsOvm2*wD5k7HwtUA!~XnPeMu1u|3#ijK=Dki-_e@J!K za^2_>W1M+(PJHuI3G%xl6cDHT4Z;(_{@(oKqR%r^cBV$fK6E;PU7r~xjf;YGfHuLM zDhhJu+Bx4Og#$CLShbX&iZfVY#leUa;?v>$b|)JcA~V;<17vP&*h0u+ZZH?@q;Q@Y zf_#hu$0?X$WhtfmUrIJW#^bL*Hp&X{KxhpW&T!^eIbU=^cq=-DHWb&Jg1adg^ zKts;=wVR?$xmasQgByBB@L_DMqi*|PQTT`V>w4~It4viq-KgN5m6UStmbdOg40a4s zwZV4n$-;h8eHkV9^TmmB#KCpbFnESzTD=~1dSz;fdG2>`EIxFH+PO|2F9@{H`MgD5 zJ4!yw3nczjMHUcycX|=AOH%3=u2B)u_#ua)^3*k~Ny37?Qd^8}YI-RstMntEI*NMT z>NV4=HnY*~EJ?lR##hApR(U;iQ)F%hXeFsn6hwh)Ra?-KuGM@>WwHu@T2mQ-x0s44= z1#!%{WgA<#jA#Knu1YD?&Z=S$``YTI)sYI08C?2b&oxGV2*3k3zpvvuYU6S6F-YYd z$jQ}Jwj;hC&|Y3m*dASFeFtu(VarBTu|*(k=BI>OdywTzb{&kOJ+7qaS+S0^Rz0hr ze6}kabm@EHc2xnW#aTPe`Y(}UL0TkZRKmf-EG|i}s3}hENNjO8kYj?`0d12?yjd1T zBtxNQHU6}ZjY*=sobrpt(vzRh@`~N$?2_ga@8MCh%5*o9{#_!8L?#K+%Waq&)=~Vq zK<&$os|C?*0ggh)$qwDuR*q60b5~0|oDD$5iYWpmhnsIPkJFNUkCI+0)Yg_2U4^AR0tf^P$VbfWNzqJ5inRC zh(Nv~#{*J}IJh4d9*_sETfKfDXq7IV-`;#+t5Q+x* ze*wDJeMf(iLSV%gp(lP&u*9Ea7(o|?@c%-hTIizX-&*(#SwmpuLDvI}&aVe3pKdSb zf*Jz5VkN zMZ(FS(&bwzW!chgT05xS3EYzDy6;v*RtXd3WHeLX^)qjayU?=qQAV|Gu*}Gd%gDsS zP$tM>*i$W^wbYKbi>rHPQ7?HXLrd%?_ngosY(jZCy#1l&_!9k28b-|#4@^O;pgUP| zp^N=Y;OS5%m8 z($j$di?sWKou1AW3f`EYS~EZDhCz24QJT`wR!-0DDe_}Ea4#ztCX8?H73H7LkSi+!wY_|3Ll0UR1LU1 z8p4{?jf|(>R|y>$8CzE$@;d9glu$Oi39cEcE(eO-O(=TqcWQE)g7j!LSk2WkWEF%} z9(j~zB5_?xJ}xqGB1O`CBXFiS-rQ1KrLr0qEr_^nZbpZT`6s+=HE=shfOC>7S9!#a zi&yG}N<5s(GSzPv6O$i?)%dmPoW)m7XX)z?H%-E8kd`@4`J1Okua-0=KU6hBnu8fW zB1czSr&XPDHgSA4=*Y)Q(&bm+O)2gRXXedQS+U5P;Xqf~EsGNqKX*>f%i4(r7z0kp zkus(Cv5ZToYvn|%y=H#iud*&GcE4{Q{@UtRiHwtJ%DxdOZKR(mnS<+eC`+YW^pDYSjYb;YFcEjFt>=PNBQ#xcLh2{+EX(^Wl7@4mjbA4xd*K1_i-g zvL!Z)%;qFzE>CJlz4^8q@8V?5^c@N-1za6UHmBNsd&m#$)4rtpq|Cm=O~>(Ox;ORt z*@mT6mC!4CTHA_o7Y>Kq)Dui!3q2{&@|qD$F;VlDKp`eRw;86AO(|SO2_G5cmB?wx z*l>4s=dJR39>u5nR65>&pOpcZXodBr>}Io{&jw#W8>e2Y0lnOGm;CI^0|-Jbb~VPA z87ArMXTor=eOLSDAC4=N8e>Y>rmS`_pc_cPkam^`0X_cHaQjg#7tA5sjt`eV8IQ|1 zf>e2k2PSa^?Kd=nRug`0yrfI^-a8^$s(}IGMXI)y;U7PtSKwb)bS&kU<7uYNIZm6> z;EF8tO(LjQ4;=-?1Q0H99VXAl)Xg20P?5ZX{m838>Q0y2A#XQ> zh8C}07R#-RdTn19e-x@UI2t*YnT$lGh}LozLwh_5l)mtZuNUU%Nj3>)iScWa%h>!6 z#@;)wscl;uW^ec276e3;Dr74H0t%vZk=OtMAxiHN5orP3vI8T0{uFh%`YU zLMQ@4lu(owLZr8Z5*DQRPS1Jox%b@r-QV~6m%p%*xz?C-jyaxZJY%dwFFpm-Rt|dv zTMeel78z&fOP8vCuf2TCbxJf+KM&ib>Kk*f#?ZD;U%BWRcUi@Cy(ppct?wEX&Q*AO zJGUscdwNbcB!744zueN9Z8R5{c9WEIHn&;B#d-v1x$H2AlS75v2jvrIrk#y^hx2uh z1s1Kk8@p%a3!4`?*oGSZa?tozXitpcKBG8hH)~A zjodFfkkV~hj(g@$wNNR!gG2E5<1~}AOAUN(1Kk^Sb)x9Eg)XJy;jA1O;mxF;YwIOa zrKq){ukT@1vSTb6kvO%qE^Qbq*DyN@6pWzM_(~A)8>tKvc5a4uHUdB$)riy~TE-_iO`#r29_f^U; z5sf*Vs}nRuWLD8R_zHh5;=}jjUaiRC=5WpKGPw6lw^wj(5eNkAc{5TtSmGYDd$p>@ zn={3?>cgH}XIbM zqaPiHcQ6<-6vgNUTkgeW-B5&aE*qg0_H9?Y1$=^}O)o3XVZizL8@!GkfWD7enR>@* zlu>45Q}O)4n03teZY1*@`ZG*`1*?QiKQIDXk2lkVV(_k*WSR*LwS#0ANfq-SK| zRtb<3JIH~BctS=q!dj-Z`58=vA66J;Xd9{FIW=!ETruL`FmzVI;?ks~4M}FRwgq!- zD6f9FR@|%a_Ln*jTO)(6rLjNxj6rdj)QVIo@m#?0*d_McG z>FRG+rd2zR_7>{f@^@qvq@^oAfK4RdiE5F#-&i@BZ1*MA|LxdnkQYW`WDXF1(>U2W zZXNrEK8vGIZ0ZM%wx8f%LcfC=tQtCj6aF5{I@kcRrpqul5TEuf!NFU%%d~!QmEOZI zNHlP`vAg?O7l_?_dq>wWG->h=t~lJPEQYv*ghU;;Z&|YUjR4r``-2Ot&PZzr@JH9| ziJafYLA2=?Yy;oi%i#PGw)CQEZ+``dDCapK();&u7nugge_Vy#jT%-$Zt>XyE|Fiuxi|)5N6hi z`Fz=%B?RB#gPgI6`#JafQhsoGtCCsvyf~>AcVHVdra06TFeX>F@ie<1TpvLm8m*4` zoB=HLR29^9*(?|(OR(W)$bH&hQW2YTE8W^2i9B!i2_7snB0sX&c56hZIZ zprT@(u=lFPomP$2#0cMCT<5Pi23Vi;IeYa}wfv0@x$-X;myNpIKWDr2$+ub*)ZJ-l zDQzJ&wJ_Hb<1cI03QCT(`|VOC9PSVAI$c`W{!&-b0DtTKKS?t~I3Uf|`~gsZ2+)$L zJZvmbh!u0en|a*%pE@z9NmzWY}Xe%4*PedFFRg8tCs7`eE;mCBdfXAwad zNB~62L2Dw-oDD#G-e+Q-L}g9?;7V`$!POv1=Cno)RbO6IWCkJ2ky`|{qB0pa1E3c$ z>Q8CG|M*2MgJlpV|C#|HxwJQ?6&rS*gXQEspUNCK@3CTuVSESMZE>DF2Q%r|DFj|@ z5wmeBCGcl;pr8|Z0(}hRQ!stVqOVcjd%L5UCCblRG~`TWEDN#E_pk4yvhDFu3bdCS zw86I;0^1nfZHM8^*&%DKxR`YS-NuPv%7i!UZzM^k$^BJLP*UgG5*Cr!-C4IW`9!NL zZBKQwHz=U~HgWfLc20_S&U=3^VvnD6p=gV`LBYg{ds%M!Qm-~soRhZ%}RbJ*?qxksUn0jn+VgSID<7(p$YXsHnS@cIDsP?-XJimqGSF`qsE zB#7fr&&G&7N;-NO^W82TKSPiXmHBOerSrEg(9x)t$W#T+hn|<+oV{acx#JXB(coOh`oU#{paxj<^sFxIEwnO5r%*7KYSPN$4=x=Uu&Z02 zuCHO(p?t#mJD7eU;6D^MWtNqHaJ7J{u4Cvicnua4qX}nkUDn?Q49pg_LeaM%e{{8y zOyguNzsZ#rKhe6rytn=aLvTe8Ah%5XfN$AH+(vHhrc7Ym#h)bGYh#9|8s3CHkno=M z9{$1Q&;+s=@R}EkbQmyrYt(4t<(_`ndk&x&r#<#Iw_zi0pwMd#t^s#N#Af7bumX|SshT&Xt#*0iUSn#W(Wp;yf?B!}@wo0p>y-a- z%~+o}ICv6DEye}DC#wmvJWu%+$n(o7$cMY|OgJP8IMktC#c~p7XObO4;D}vk_+Cbz zULY(?H3u=4p-e@Imk~qKN zKOwD?wy?W90vs_t23TCLQvg#zzOu6e2M!_L9KF8#8f@NKj&tY(>`v=CBvSxgr}F*9 zk%t008m9J*u5L4p6vk!-Il<<>$J7L@5f^Sak5^}Rsa=p4GCyM{*CTd5)wcQJ%8oHx zJBVZ1`d)Hex8T9+shCL>R?m1*%5ve}3dZ>VgSa!s0Jk==%<1|kh^pOC+&y}q%#Z`D z(Du=o<>BwS#kYTzh+1&nw;TL^s5uAO@x^POOMJ8Yc|=v5O3L;+wyFWS?scN5pRg;o zo;tg?49+bO(-dtYJ4J?#?dH{N7Be4+H&Nv;ra}pMAy#yu`03GB2~_u0TDSx3%NByi z%~G^(B+J*M)D5jNa=7RSLUh&2*}UNpB*osay=k&PDJ;aY zY{_L>>BZr(N~vMOHQaF{-Ru*K26wXM2avWoEjfk*@lH`?k#q58%O*Lp@4i%MU_B$c zW@~BmqUapWN-uNid=t6bX~KF-pXV5e*>%-I$xzK4eQXw2UF+t?EJTMK_L^i0Y4xVx!$Ww!scdV!{zUD--Q3)x0u=e=Pk?+Neq(tc)>Ew=51267< zL|K!x@6{LF8YH|NrjTQr-JQ9IXbvq*yO1;WP=Z2C;qoY!ceWAo%9uJ!I@fy2y&-e0 z!uu7Rz|AM*9&`iEBRF+bjU&-aTajKP^gpjnFbS;d+2vH_5cse2Sf0n7uq_fL9#|YS zmmjox*_0-!KTUP;8y34WU$|K5`u<%%Q?II_*X3I4soX>P?WM^@+c?e2V=ia4%+(D_ zI}PF=EITxuL%8lbGxFU%D(k?KT8&pp;=WAoOp3}UJaG`KsiQl`LScjy2y0K5b04$s zA1En;qR~VP8|gF;slSISX8%H6K+Ta_`wMMtzAo)xX)j#a@`^LYK}LN$cM8+2iXF4zUUh}j0tN!x!PmWx075vDeG&KExXZwrWebI%g)&qXTNo~ zOi6v+l1c{{DhgZA7qtcFbJY~;UjLQooCjU9BB?F=ER01KskWkzqV#UihD?{$U_4$5 zipusvGigGN_o>f@=o=qfB$ZD%>ONx6?w4)Cn&ls| z%xY^mbNyJEk$#oi$LS^JlUZZ6v)w04G*fa;XD?rKm~|E|#n@J8)<5e!U8E(SZ`5_} zdXAAPLp4Dk5kOmKU~517!6BViSxC)*?vMlP4EEGBAJTfD(x~Ff@|^L&ABRjBfhvmO zLfR6Mn}nvc&V=+5o}#CotCh;)r{)c4^DN1~m+4heoFBgC@&E zKD;~MGob4DjfJRfc}K%^oa5PgU)FzW!$`5i;lyx-*WF>Qi&Xf5d>FDj2qhWm?iEi%m06A>#iZYOhFLwZG6V$*T*5U#r`tcR=pSy=O$oK z<`J(Z_kbnzGhVW+HQ+8%p_hqljeWaaRazhZTPiLF09_x$S(YqR&PRu$V7uO3m)IUP zd=jpPy@>hOa#;Zi5r4@(r z5&vC31@pB`5fBFkGF6tzL3vjW1i|uN@qMRuWMZGsV1+w-zEKe(7@-qTzQtu#tR2=T z_Zw_MhT9a+^If6OuIQK6A6#ogtRxEhWLY29MLzfes6t^Ilrmq!#;BxCjm7R)AW+!E zB7?u}>VPQGC26$<&X1|%V0XU!;1a!@k&3DMJJ@Rn6X3ncw;CUe`GDu$-R`DtwXHN~ z%t(yS1B?_&_Pwqjyy|UIGwAo;H=uY)09TjOkaJi!kbLr#wT$cQ!yS$R9~&QX1svf^ z%YE<-$?I%2v3{x3^uuhRzm%;=T$<*#)3ub6L+-}85@|N17X5bBBIC!+^>qW~yfC4c zGYiv?%E4MiQm{6{$tA}h2LV$-TGdbq-Ik#kMNLfmBaZGdZSj=aC$Dy4 zv8t+@vJA-BM)HZ*o7aXpQ%#jo=#=Y+zaSTd(k-7G9X8d|>^tU#PndI1MhRpMn$;-C z^eO!v*P>=CO~jd`R$?!U;=gGR#*_3*2D7Czjg$vnCZ^Sq9k(j;%QB{wzUuebpu5GM z#mgzqmLZmN2g)Cg`ILxs7LttAFxaRS8RoqZ1(4IODeC;eC8Af**0{7$M_}1=ZZDN{yD<9Q{Lg;K$x@T0ZWxUX+$6&;8v4PSodUUT5>$EihozzU4`E=oyg>9OXT20!m zvmR!B@{RBy#8Rp|(RRcn<@p@EX1xHs??m-J#4ZNSTQJcQ>YufayUg%#cc%C8%t!d2gBf^SzGPaDy%uIkk0{J{sj}Ic z#okOj6qsY9G2)X-dD}xNA@aPqE8!ehbmE4X`ALh2TRbOv&FD(0E(ngmD_WhToVl~j zQFDrQRRpwWZGPf`>2|rU%1ClW236uvq7-FmqsccmKeod$&y;s4-x?vvZk7?PjyTwE z=Dc?keS~EV@>81NOr6lv1u~vx*7i~;LYumXpSi1b0)*zIgJ4aT|KI}2PMKazv7HOx zr8LP}#`}J7W$oh}-`|7yirPxF6z z#!ug`-Y4BMB}Vrk`66hD?Jb(9Z46`l*g`PMc>dE5!~Pm{Cn?%N(Wi<&w>&v)wF9)cQ4a>~+f4ol623 zrY#pk9zmCRQSe$`timOpv5wWz)_{GZ4Hcdj`*{p*@)nFVW>wU;MXS;*V`@n<-&C-?s5nHJ2}w~$V@ovrh{>aREa;) z<6sh|3*RDNSfVxwXG_&UFMAg&RA#>@t^?24wu>e51l~KuwI?%;??{Sl-T&D?7%mDq zF}*Dsp&Xh^r-fR~G@b{QOwp2@&hfAx((<7&J)LYvmEZUOir}}MT9jpYT)J#PK-vm8 zmuu%7v)D1GK0uv)B3GwD@sa4wex#8r@n~;2=-`ZxS;=*78?!}|64BoH{5zWlk8*^x zgWNrPWopu$9;w@yOUCA{x@$eLjgC(jO$gWyIcHPTdBTBh?_Qu*ch8+9%j#MFs?6LeaT|T(Ox<|{x-3=82yt0-2oQVf@ z3*1fT4au4#icu#kwEj5t*i7!KCOtJ@t|l+8ph(W=lck)woTG z{C7C&?6@PS*5#X!2DRZ`sG14#S2vrkva-Iyt7Uqgk_)CNKn5V7e5c}mr^^#FWMq`# zjhA;x)9(jydadu?*3U%>frI{AzU9Ss+uW?qA)Ik$R^j)Sw}dX#!8T(h%hyYn)YL>{ zie>u>ttv&3)@fP|g5%*MM{{h_P8`<}Tr}iYccNNF$#P-;kd{m7)vQhGxZGS-`~UEWh~RcM{Ahz#q{3}xg{u+guVX4bEe zwjgg6Z$1PCe*m(s^#5jA#uMI8NkGNxstUZG%hOa!)6m5o0J;BO!KfkEri<~*)7eg< zq^tzZ-d33-`TE*+#)`7J+)G>MeIuNO)Gboob#GTGt}GBUy~V68!YS9qKFt~&F)7jc zO!z!kO3aj>(7N)9Zxjp3gy$mL@iF98R07Z$q0r>zWqU{mg&0|1yJkEP%+A=*CvU>v zVgDzUEcV~2T;h+GzbipJ@q+wp$Ng}c$pwRV@{9k}v zXOoX_1OJip%Sx}hwmHU zGx1&hnE!2;Bfh;pgtXq|v{Ff}hu!aCVmVxK=m01SA*?+JEAqj5VUZuTf{VNlsLw7I zTIv(EpN7C6aFgI@mfK_{F!sfb6(wyTlZ#bESoF>hU;X; z#&y<4r$S~cF>N{=Avd8hW8wkS;Ie-5sA=Po|RwEuitJ zJjE-O~I^^gYu)im>d9`-(dHgR2XWe(@^e*Y>*GGU3sQ zX?}F-iTd*CS<%$Sd`)^r^E|i7UX#gOVIlR?vbGCoz@gr0P)_+&*(he&#B>O@tz&M# zwQVTrKzVZx$_#w?u?l$<5)zy#(e%wrMqCg*QlaT%yLfS$RMJ&~njvD1$(0xb! zp+6gWQ_`q0a`es+Ww`e1*2hY(we+tZT3CLgVbe05COf)2wm!>WQrCRq)x$EMDJ+Y(EZ_?`s&5OREEiWqq4Gqy4gOjoWNE#GdF@6 z1g+nJyqTBXa}`L<+%x3pFk?C*BI7n~{pst$HI|9;`?&eYP1C`xep}PQlOr%gJ=^yQ z3M0Gzm&$HryFJ+%?fX4}xQy33VfMyB{{GkEyY#@Y{J#bYXfFz`1| z6iU!Kr?s=z<1s`2ZFywRJGNQr39->c-1}y=TWos6MZGBa)Pzs=Y#{y@AG-~fz2>m& zo#YP3BGFW(uhyDd6^9>i^NXUI&u+a<2)gYxwSHlU<@9W$oG4565-v(=ABdM5HOY$( z&x>xmm%mclpe3!wC-=hcmcLPNgf$KmG_b&Vj+WxIgL1O7AGxzRT68F5pixZCuA=#VQrLF8zEIKP#LXqXOVHTcHo z*HJeRGXy5rhAkG7(p%~DFTnEYE5=(A%4i%CWy0o|DxRA{$|s#eOQ5(ZUx&q=p!*F% znfPy<6ARi3_4e0h)^)$If`ezgkGX1UXZXx`-V^(q&x|ItqCz86T)K#dSWYzFs^Ud` z$}%i^&k0gpzDm2%bh$N)&fSStrI)!m2oC#^54%Z7(<-0ld7w3iDr$)bZ9FWsc|^@~ zu@@l}l+V20vG>>h0Sq4>>(VBp3G=k!1qtJX9mjNQOVKean@`54Kcn}dN1Nsu*m!N( zkYA4~d5DX(!E0UP>>&az_S5Brw4}}~M4SXqt-eCD&$N-$6>U1Z81sD$k`Ge~? zh~0ntf6C&#WT8kZK+*6g9UE0!EZsPylE6zfs*RKn9`CwHzA{pM@v5U|AnJ?W>4nW& zL6Y+AsMNfhI)#c=M!*?u<29^?Bf<*Fv3nG>(^e5okWi;761v3qxyrrMYN3p%@t@EYArYF*W)Xa=uzJ$iP+Yu2>vok# zelx&ux1eUsoFJTLNKN`_mIv=E6u(!2VuTk&&TSv2EC6LNmRviq8(^0r4IzM9uDn5K zDkcy&Xi>ZKpTD?aq%f@0U6_r(7^WO|IAk6Hlx1x|gpfhUaGqjzZ~PR+UpV%^SQ~V> zMG6sF?cJP%AzpB7IEF|C#ZzfOb(13UQ}y}{v0Dr*Zvntz1}qO*y-rvc2vMZTB-J90 zS@(GchL-%`a=S7|u&H@w80eSuHX>m=LD4V}r+@DLX8M6ytC5>(@7%5>UH=_zP?rAM zok-5N$>_)=jgg{M%N%US4cDq7tlotfWAP1AHs;#LE7;ldeNA<%p`O7Pnf7cf=KC`i zIP%`=(a1lzZm){spI%(wm6%5|R6k+1FI2PppJP6z$bdYi1z0l0sR)Ov)7#hnI zk|p~|Bj<@vg0+I|%x@`A`m2Gp=ULe8g-6J7J!ao-?gRz^&C!w8oNJ1!U|4TOZStrm zUlyaP1}VeA40mwG;hTfnlfR&u%Y&ApJ}>Lx8rnb%Ed{Rrx+5Al!r3E30v8h$n9r_c9^ub}JStO;idCk8~B zyJ|Ktw3=JTU;=4dH9PN|;6J!5YgXQCqzhWr{Z*k^#apzJ;}E3c_81Vr$2?axDk<_~ zlU||VIr(_#mTZ3fVGjJ5KaBGPn4$s$1LEvdKo{i4=>d@vSXo3oJ>)_Vmah$?!|$}&!BWF$<6@5x;u?}Ia40zwzvbhGnNgF1n%6mE zE|p;KMzgPTVn9(5^f>7Um(n53bY8CPWi$`!E*pJjNv24W+r#| zSK~O6+ps<9pUz9C<6#R80R;6(X7j+gwW;;}lMPGXhLWH&ma)Mh!nBW6ZP%q<(s?fK?0gyWyj8N%T58^3Q-y#?};Y(SLKgUvkDstC(pV z{ugETCx0a0I7)4LXSIhSeo9gEI-`m&gEk80nyG8(;wfH-dq+|LFXF9V{WKb2IMbrf z=x1qT+_2{80a$(a$|TdQuVR6Gkn^Vu>n!p>R|S3FG&XU|(}Pmgjfva{#5?=b$!%nD zs|5$`&kqxp`dpl5WR>-sWvdJir$F8gBM)x{0GY7$6$u`u5t=CRJG&Hp09u|7=nx!Q zsl^C=9qYrPvIwKeZfFG>UxKpuHByK6J}6~6)cvS}vf>Gr7CkcKHPqd%IsE>h-)gXK z^}CMe<9L_7r`TV39d*24PBj(P8003RFtLGS6J_ zW?NB`_Q&GCznFnDkIcjNsybkCN)sufAs<5GSWzY7d~j0;yLD=0(FxM;M*`Y?l*|cA zRFh7c^Lb)XXQLFMO-u%X2bgp};-N-VC9yLrUl`pku;k?BY6cRHoAH8w2CZ z>(9m@0|IuQz9U}eU{34+BL3OHKdHQkhp(yUV!4-4 z_`57_U7Si(IL)o?3r+>TYH9gUdE*EMdHx3OplMG1M}lSk2{60lr+7YI{? zX^(+CrtKwXZjg_zvhK`yI*8=a66PxMCl;n;<17W1o|FhTCC}RpZM|Pyb-r)?J*G0o zE$=qD1oEFbjB-*-mx-oQHh;mIgSsk^qYmz4HOTg`n@}>;n@4I*oN4q9*N~d929d z76YX~Ak>P|K@VfL&Osoq;011)x&@bj_} zUyyZ3HaA)bzGDJTFjziElKd;U0^Y$lq7LH(?q^-81gF6Ny{|Ps$j@@snoK|o0`59_ zDjS5!==n?x9i}eCiQ)VS#C(t7ETF%9s^tsjomXdvirJll{mTSs_)!EH1jfOCtb&q4 z4uVc!KdZW?)fLE$3%mI9;28zF;aj)ABhe~9xYo7)^Y(rYhk>?XbP4ERBf(g7Fp{0m zzebu+WNFGX%0Lprqn!*r&H`p5fuG#{fZam)0JA+~Uj(=KuOZYx9%2SVc#U@_?g_Ha z0sBee%ja2px!~yg=a+uk`YGF28b<#WHP4AX5x3q2$gF1MPb*l0ff81-T`$(ZFKrcQRl!M$AoX_fCMc!kS(O#8vV2HaXgh9 zDW+qZucf1PG*3kHb9RnCsmr{KR-!y4Jy9Wae>Z_#X zSQpVYM;x?y3sRvP8GG*7tarlsmoB9IxqlE|Q?xqCR7aC+MsU;cKcFg88nTPbYa@mi z;S9h;LN1qvNVD^_L{2!ONxo&QbM$x{bYtm)>c;s9EuU%1Oj6WX7Hdy2PDNcr`YvA* ze7YQVK+Bh+7}YArZbEY{K6Zy6Z{=a}LV2`xD)#A0>`7>w`6pC#eQ1qy64JeiMl_&y zrYD&VUF2sZ5tGH{tFKW-6n1m98}`^X`Qlf~%C&ZvU5RspW%iD~@ij*lJ||sv?@d%q zur!EQXj~eaoh=fpbgQi@`92Q?#C@zOmn$)j>zLI@6t}2Quu;x=DDI9PEU%j_;`MS* z9{FN~1sQ~2{k`|ItcP1g+=GnsJ!ZW3K`hsx^o)|lQIm?o>Q^51K2uhpOuPMyarJ}^ zf~Vt_$Z7kw`?s&_!fQf|XR^I?C`m`?-aPjf^@JjO-@Id0iLI2}E8dv#vATKfEiO8I z(LBC*Bqg@wkIHSkKEzC`a1SMJhQ+IseY-@s`HAGg#P&vmw425ea%+CGnw|y6y~JIS^U_@oN3 zyH0>hmAL`RBLO1lFR*{0%Omhz&=l|`Q5ZA~%LgYCB}@{8m%jk*!ZdzQ5V$BpA{^kE z1DHzKefT<7<*t_^@&%XglOD(GNZ`6aEA$KZt+>W7@-&B+QiS;&duNxj1+G|gG_Pt?V zigNDp(vV|IhFDd_wmX&1zmvW9hMWI{6mmuDwRYz{iYO|g=kN(PGsfTHmg|mp+DD5% zPGl|ND5HmNx6jbbXe#Nj6STpOHfnXUzr<1Bl535PMMS&e z-Y?)0rjA$0b&)M4V&8hE@Ij^+{9FZc5$Z{ST z%S`fsvq105TjY4;)}6b+mTk|0O9S06|7mgXTNh7Y%)R#x?CwReF6#aJNbngKEil!u zllFt@Kb!=J6!4$_Vshbijw|0!YrAmn{+wTJFuxc6=lpX0k5vH^;|xw6pp6px18xn5 zD7u_W%h`#P9#XLI=zIU|bfa3s$nK`XT-x)Hcl_51KkMJqr@}@641)2aYC3Ku#n3}6 zlW<8L&W-5;JmEQK1^Ir>)2%=1?{ZLYzi#-0$(w~Zaj_gytf4Y0>{y81&^@S*Zu3Q%FBU8E@b}0Rx`f_&>Flh7xm_|aoi!^NxWn=7oI9Zt*RLM&d0^AV?~O*i zFQ2oaQs3pe^i8!^3 z6l(U*s&-+&^Nwi9!U&GM1@w-u)u!O)O2YZtKZvVv*SbxntaG-w`Deq(3m=;X!;z}y zPuYDH+pxP!8=LkPoG)z~P1?iP$MBvjEjbRi?Gu(4%2p;?G8t%6F7ItAY-bO(A-!*=p__ZFv5r(ZT%j$8n!CYWda)w=n%|B7-t(tSG8HS#xh3?pCU9_Ik`6Ia(I+b0j+p726aZ>YLEV8xljeZX$Ftm2p>7YUO z(d-lAs6l^F$F^9~-Xdc5V&+|`Pk4!o#;U)jpPY=La<|Gu!(`lXS+hGOs#GZ{{UNgn zxh}5@IP+^~rlFSeFJD1e`hfpmUV(R^nwdpMInO-2>9NHMEz`(1If}R2H}=;aohWg2 z%)Gf~t+$Wx93k`Zq+G^q{6_DQ%sRKvA+(&|}b-Sd^IXr=CR~?|a#; zN-TU;IeTLCxO(kkR|WOVs86O}VQFVu@78W%R=kWImD*enH_io?tM&BrnY8_# zc;TBzZ+^|Mg#0sfPrl_Hd3Zz@r9wtTplpD;HH!Cp;_*{Of*YTdZR6_@>3&X9eruH} z)je9v+C>_sxtJW;cH;|%YMlmyMum;}4*K61-X;54r>8E#H?29!Nz5_{WlmEuE0pLJ z3AFbTV6I*V0LMu15(4c+1lTRPn9m@j0UuO?4UvICMq}C$RQ%pqtSFwg0)6tIZ><HNssUqCo$eG_$~MVp0*tSPrC1*8aU$?YC_yt=(8x|9w@D5V~Trl zoMIZ0E$~u?BL?~&RzxuvfY;Ia1F&6DKNTYi`(`lWm@f~02Cz|1U3z$S#OC(4R^k~@ zK8WC9F&q6L1b?runE0 zrEC6RV!rRwkln0|@YunPF4@p1Bo_M!w9DAvkFQ0pL@_lu!($3B3qqbi_ODRA>x2f6 z66hj$69bu-r3VF9bo5E#qqR@43~2o$fQSCYgHdzZ_i7nr+97JR(R9Y`<%NQMn=Xt< z$QA^8f3Q>0TrBSz$dPV}V86$^Jf}s?TuuWPCXVS@H_$MvrnDWxTS(2UuIW7uvC98? z@hHlLdP`G0K4YwiB}o?3f;m&M{QbWPf>&6b3e*~7K<@ssJ}|x^oUT=D?PhbS-@>)q z;mbpj+TnS+7mKL}49b(Afv#vm@;wDvT3(Xja*r6n#yD5X;!sb`O|vTX0iXPXcgwo` zaiZ?#QldUyUgwP;>lYVEKWD{oIx+Qta&HB+ZQIq>!h9w4*| zLTPz@(+Q^20nyD)ZZm8|ck3@C$4!4DnT}l8kd})lh>nj!*E#&-%Sn8;@B}UE!o{Z9 zeq7|!`xpzlcl`OUyho_$L+#TS5335Pe!#d=XpOld(`EO~bKGCBND4*AmcDd4F5P>x zywekSaqq3RVh8mtOIKbl`;Aq+g>AIenDwlvPhNOm z&7tHvxpxnBv{c-ohc+GU>X~u-!%IxUWlWlhD;s#C6XWk!d@nf)SD6M?arv!~X>fu5 z^z$M}9x>!+8Ab4z*~c@0qcW>vnsy@BSXYH>yid68sU#69)F^ei_yE4AS(YRI-&jaz zjH9T%oA|qcXN)J61*Mzj$vN{q{-Sc>(s=FBG&Fb|S5-NR zSE;KXm=Ev7p~)9e4FsE(sfs-7Z_&;B3sLRkh_JRjVr2Rd>Y)Qv2o{Hfc*|(BTBxtq zl!~n({J8znSW9jhUsRS&6lL@Nq6yqK-6YRM?qUjirKEo8S^y)xu+&J`4ZXCM8E~d=aAc^4O#-WW-bphNWs3MRD~l0lngixe5$bY+ZQV&c@y5Tua9n&Zpuxw32>f z%Xqn7OJm}3SzlSb^sKsrDi-GFTuTj7j4$48gvo^thHj^fFfU6qtWbe1T{BbP0UV%t_tcy!4YgZ@0+9Z7|Re3miDuGdyvTU*v?ZN$^h zMTV6_0!B1qgnxo>2d~Gu_#(bZ)?>TAWfdpkZUG@75!vfmnPI8-E2LaC+H1g(@^sB% zV0>wKvSq#7aAz&ctIxY_48B?xIz!}I=3E@!TGb}qpb^7g(Q}i$eJElWUR0Rf=sIWL zRr;we@-;v*)UD0FrH!N&u$)Gk2m&g#uvQI z9_J^PGOIse0_sJyt#z(yH1-zJ;@bPYpR8;*Ic(Q$&o8UAZTa>yU!Ox5d&W3>CS+_jyWznUt_G+mAx;jP8#C zD%(uV97YF-Hs@J?Bc8z-H(SO2*&k>2<-bdFU;Z~~?&wcx?(baaU+kgRp!E_xsf*iI zzN!XC>>?-jJkol1{~li_h(`IN5^%aGsU!9i?b1*4vwGiUD*dT1Zp*a%^8idpo?*Hr8VouG}?3n77k8WcuCw;>tE+ z(&+u%3usa`E56a)_{8VgjI;A%V_L0{9k5);JBU|vhp+mjoxL%OZ8XqSv3+^%vjY)3KFVk%ri_8MB1Q1pv~PS#bsx`B zYM-&LdU`P)w;i^qukJqHpCVM>a^)N}Bng*Xg3+1En8w^d$@VC%qJ5evWhPOV4W5^Y z449W0m=xJdS;S|Ru3xh)2VJ-1dyi4&-#TU9qZ}Y5cv)Q~rJgDoG`Lu>n|H@X)FST7 z2rm8+rSNKKX2?S3mQ6 zW2P|fW_Di5tkuB*f9I%YB_;(+X4%f5?6Jr=O=Ppl*Z5J1al&!U$1NI7kFxV*75vg& z;%+CXI5-R(F{6l_88{#BZdN8?E~Q|74QEp+;J^wkNWK61ljVqmUZD7nR=oY!4ePJz z545(=>a%>u-XA}6O-||U9sSdFZ!PrQu?T|0r#b&|FQZKj)$x(lhJbMpe%wN=Ixb_G z3z)0h1Hrz}9wBRz=0}M=_|5I%xUpRhjHbCJ!)U;7!a%vC=>lpU@vU>+c@q?Kg5tj` zj44bIC}CUTSRPv^&VdR< zSB6JV$T|l@+RY!w7-3R=DNVYww+FkX>9?&xES9NpKte(+*fQ}EqhWdX38!jKhVfxQ z4Mah$QFl^?a&zEBzHdLc0_gZ9iBTdAIa>8{4?wctb}|MaIMo`1r)PEZ?0$pL__Po^ z3##05{XPJn3tzJLQ-bFxe$2VRC{A|Fjz9VyI`(Hvtu@zu^lk>&E6<`)wq4l6cvs>x zcy$ti?akX8%y z=gtFf-%xk2*UyKd2(3q-(L5|TWFAfWjMg(NSJRSc>nXDZ(=rWHoJmvnZNEH=0!I_G z#et%;F6yN0e8KN|(&a2KNi%1{w~Y>CoYtkD8XX7R8M!n^ooSEKnj&MhYvgil3&oo8?*c5@CLr{Nhj;GHgthfrnR3FZ2g`mZ28!bQlpJ=3Kl)JBD5 zX-+Dy@>Vgre&;QX6ZTua@sQTlk|mo?2EK$?Ay816RrsxH4Sf*`&{F%IP85Cc?vtwI zg+R#?IpM>~v=;_etOxLY;U$_rIR0A4PeQGcK!P8h5G zNjY40Foxg^is6(B%c#}oJMS&cLJzfN_z#$6pESsktE&!4$v(eT@}=ln{Fy5EC9`5M zJAARo-$0+QR%ud*z9Y58wmm@emZ+H9;39U6f8y(;E1s5lHTd_sxwLZEAYA23s`#a@ zw{Dzs_A(Y$3Q7MSQ+CDP3^}<+hxoVL7!rnb7at(3VZBH*ISy4o%x%vg#%N?a@ zI7z6q*3EjBrs%rly4Yx-aw7l9JW)YsRqBk|&5-}o-g}2NwQOPIC<>w=N|zRu4$^y# zihzIw3B5){dheZhM0!)IQlK z9B~5zFPF1IbD8U}M`XJ>?CdRdx*Xlmacfo?JNV*UdgNxkBk?Hyg#mpva^khgD26B_2Ou0<(nQ!qijRGg=NQ#~VAcXq>F z0j=347U5TvHqvY>YK$V!N==6wZ>}OF{royp@}|*N<{I=gC^9~`;U%rQim^)P+6h7K z>z0(pz@aEH@bZrBw3+J>=1@V>ShZ+c<9_02Q})iHnId45v_vB_*$mCLu`EejA0;~M zpyIp|XWH=Q*vvuYXafhhO&K2z*F4Y1@PF8Bx)srebo1>H0^eLFb@ShBiHo00Lg*^# zLMOazt@Gw4Ehu#oZ^AZWqt=d(b*f^F?Q@kq(;AEY3y{mX+ZzKSSEE8_PHwMxUCj&q zutWu2R${G}$(50Xs#NT%dR^8+J+-TFq?Ti7}WA{bQ@@)n|#1O+~<3X-j|30|od7GYzao^6wHu8N;8j+rf* z-fzliM)oH&3WG>028n%!VWju4cjwWLAFEZA&KsB7D~7n8=C9AYx!#{J_0>ToH%g^v zxg`UKs_qEByWJGusxMg^D=it#dV1ZhgX37nl7O&5u5#_S}3CN z_e-o^OnLg2!V~e)bydyGl@h0gJLx&%^W8bj>GpO*`W9sy6y+9TN=LSI8q34eQETm~z{WkPorX0G0_YStz(20BGBr zZt0I3>PEw}Q{9s1HJYoETbCe6ji_{Kvods^C7sxA>nZHji2Uc3(h=)|?Spd*tf;^V z;WJ37$fxH<=7x(9dAV4p6ICUJf{fYRTtTS1UREaaOgd#BNu2Pb2vB z+!Zut&Ya5kl~lEcy*QBHa3IuH)u-;B#UQt|Gw6tbHdHZkf*v%=5IHWcT)9yPYhTeq zFQCXp7ez+`Dsm0fhNIW_$HQnP-1R=}2IjMM66>3H3&bpg3O+u|3`xH(-zmt_eU&%C zJ`AD*%{S1SDOMmGH5M2i&d}dA8ywt*TY(ACWz+5BFyaPogtsbe_>d0X>Y+bg03;Y8 zE9Qi&8qt=iZZPGhrc#vpTsCtQBH|`S7wMc-l$~ilF*K#aRqdWH6hjnK5!aw7ny#)M zP9LK^`Z^C9l2WdgYm(o4lSK)S-F*`9>~@36BHze4GVgr-%=29{~~KtwR5WMgj)JoR~KKAZJj2nbElsz9(z?Qq$KeGBYN(bl#&lxv+$j#2!xo^zm zd1`f!q#6`nx0H=uVp9N4dV#3Zil$}vtkN7T9W@PGIdf7fj7rhE9c#nM6|*c1sK+qO z$pldk>X5}@65ig6-2Qc;du|C`P4p8)t3ggGX>LVFA{E@_=HoguE@|dP+gx|4;a07D zubKSYmB#I2*oKa-C+2FlD}+-hjkT*LZAwSxA9GR-Ow^h6o^e4#muY3LcuJX#`oXO7 z*elG~zXW1;!XE1+o~$#kBqnmdNb1wqJi?%PGuJgdM%J1nQj|vzbmwoH?MoK>3Pfys zlS1glA~&$TSQ2HLwZPhHfXb7Qxoy5tPO=Bv8qaN=b0aZP}i^Ws2;fm+-g<7 zz5P()U}<|g+B@qm9nT)X=IR|p8?mW9uKD_|R;exdRkTTNzd=PU3%8*$x2deRWrrw7 zr$mfiyxO$DcC+QX^b|Q^pohs9>u$2NG=W}mU+!P*4M_l!qEfWW%j-WM5?~AwaC~iP zczcV{55|O7$d}%w#H(Qp9T)X_}Cep@HGW&ci%(Hfd1&=rMJ3l~uh% zyMs+O2Q{_P&F3r%F`|wS1@A6j*{@``NVBkiYIE;^`!&20|F6(&U z>#6m4YQcs_g%qXOC5LI4dF;d1F4Z9kO~8d_)$JiDvRg5$DhGJE+C2Q<6UH6YeSM?1 z9+uW~$hZNQgF$<|{LSFWUREtWr9CtExYEMR)w0vha~PHn%^Mjofp3iY!n z&sc)WKjd{5ow87V-|4j2ZKJEe4YlcbbxQ`duq{e||zVgKW^zl<5M5uyt`wR(Rs19pheC);{hl<$d zI8!yyo}OJ#DQEdeB-QnOBCOcdMS+fdno)8Zb)y3FxL((gkLH|QSvtX#80AHpYnxz8Qv%A=l=BlSyz)ke(wU>V2;CMTtA{JL)V#%;wU~0?- zWN%9{B^tf}C&b{OGqcd1tx`mv59W!`jxr^_A9eOR%(Ki+WhyiM64^y3FI~A6B5pkS z?)=wHJ&DQT-MsDaqdNoOLs%dd0_6GYatv-wQaSn z{(?wgci_`l;zgH4(ve#12&$f~ue|uGNbkKKJCVdL-j3xOH^UMw1!!SVJ47`6dPb^3 zG-=}3RD%ADgo+n~ixI@7U-T%cUuYGWFvrpm$x-U4TTaAhDrnK-r0$1? zW1oN*bDK>gPC1$YTKv0PaY+q`A-v)CLx>D;?xq9ogPtyIon72HnL8Lu32>$-{M_2- z05f9(yP=gNtclKDsILcCuGhWlIxn`z?eb2y(1;(1alhPA9c6FpoV`&+;d&I+a?hd7 z*_te;C0ZSDP?9a2q<_?dCdC8Bfh^lP;}YXY)(C_%D|x*XUev$@_zu2X9RY;k!R=(( zsgy0k@hS57jrnVfzHQWp$3YnIk-n-*d4k-;A%~>6@~EKhwFKRFHqYuBN{0nD75%lV zdabQW%HGY%05M7k3b(4n`O}Dwu+dl9;$P9ZY!8H)Zn4N`MMYWm*IQ_wZngr&=#-DP zOmC73*yO!e$^8YS+*~%g?Og1i>z;+yg(5W)_bfdc+jZc6&Iug-V9g;bYa3hUii*lH zPG`FN7I<@4RvMB|_jVB4++5j7)5z+Ms(3Hr4i{$zARwq!ib+&01i~MsRy3odQSVZw z%Lv-6z6u|bkJ?E&!-725uu^A~5}XM57vC0Fm#KgNFWZH4@YB)&gNhMu)sDH_!ql=U z2yuX7JbN7ZKauD7yU<4DtBj>S~w?A9QVx@f_i{V@?bvTO~1 zj^Jdu6dB#@xGTTc^}c6EffrgA3WaTccYOeNcm0L~Z`++cJH;HAHFd$$F&^h}PLj$h z<5oVLXcHx6wULUXyL-nfDB0}%S^3O0O^;=4+zT6LAUBGdLvfyDvzoUevZ226)?Gdt zGtS%9Jq98RJ*Vjf6;YZ7S zSJ3ozF{Ht~$1UE5H!+nt?Q7bU(q=9YfjkJ?yWeyLKKU=}`_<|%*7x*ZTHm!^%41Uf zDBGncQYN!0v{HihFMW~)`97I6aa(mln z9v54ho(#%O@U=JdlMvGww73dlWLw40Ld5CBR-fwgwZrtjm`B3;v$geR3!A4{Dobic z57`P9V&#ITtYuHNpu+XFd@fCNJ|vhn>))>gt<{Nd{Eio4}TakHb382int*|ln+(Q)@OuR0-SY_d(3XezR)dV5qtV{--TO_vJgAQhD( zTa2Y8C8brb26}o1l}f8B$}1{^467?%RIbp0(# zyu;weXN)isj_?mdrqSUlHw-Z%i_(!@Em(21{u1b#-ul~h%Z27!XmOQ_5oglHPn>C5 zh496jC}BTA4l~BuD$24+y?!&StZB=rJ(Y*rUf&~!brq;@Iy7LyP9&a-Hbt|;a1|N@ zGp6Cv^lvxZVEkG;qZ@L_x5JV9uundz{ zK>LGwDXnftda(JK9(L#(4t_?xazsjb?}nJn*pz`K2$*1ud%WT}4mrN+1)- zi=(k*q>0PSCe}qLHd!-ZTk75nXJug`3zS#3blPph&#SQ8)QexEkdkAxU&N(~fhU#a zMUs`QuBfy?>P`n25M3auE#o-7c&P6o8mY{JU{~9i%rUM#q^6?o)~{6XHzb#+Dqo3C z5p-Yfu4d^jod4hjIZFqza3v-#kc8S<)@MJG(7Vn!R2X;hwX> zZEKvRrF(zkv?aCeVWh*Ov8G~7Lf&?9o0-n>Y1Iw$*#66N+2m<+$~4n!I^-4kjQy*+YaaJV zc0=SxhUX{=Ukv5554Fl@#8+(Oj;b>jlA9~0yq{IgN}9_{Bptq42@EeLh(I!Lor-Ii z@D&l6vtcE<*k0{Xi(dEsLQ{@>OXJC;ESmQryg13(MWL;B; z_59nfVmnGzht}0;vm2+bU@IW(>iwSkeECr`mt!+xB8*0=w&JtsHmr-DkHfx`7d}dX z3v4mSe_Grs*WWS<^6~)#l*6N=<|*Z)E+9F`_!`|6FFjOx*Akdoku#-D*%y0l2r&OZ zG)){ErFGKhambNH;n&vAFluz+@QSXRAf3*j0*nZ4Z^O`pf>fujKet3TbAXkk9sm(J zsz)d1HTX#=fRm!ckft2H7>#rA5|4%ppe}DkNM#}-Xxy?@V)xsd1W_9Kj*Qhh#X(-G`4&@inU43O=3q(;{?aG?+I=Nwu{7Fp%r0y~Ao>mq)vTj9$A-K1 z4oQ_^q$w4xSV+5(LqQwC1Bl)m=TZ@cQFFF|!B{TG2!VpFqOHN@Z#Y=Gk+T;|AY_g$ z82JI&g7s)V5j@WX78k2+yQhR*Kpe*pyLL%)0TxJ9efZ>ReKRH$2tp~x(%NFgQ;t3X zVIp845XE_?l111;~6z0r$Y{5dz;NCA=ulRf!DBxJYb2gtEcl= zpAaCBWc9oZA2YOB++~1lf* zo9UI4&m=C^?0gKjcTv z1Dl?!X!SX)gr>u{`V8VUnvo5?0ly{RmQ{&rSn6%3@CD z=kK$3a&L^~0EM;>uPX2#Iitwg+mhM3*&MnZnV6&M>SwTny*!6J?`>eO^LsWH?L?kP zYT_;9L&L5*t@cbQb;WbXR3zwBL_8|Ot@*wg>({|0jm#r>*gS3OOdG(*h9-fh(C-oc zhl40M7_faFxZO>N!$amz7|?g0!m!U+L9Mh#`qCk8$(-9ME9c@%DB?qji8IUd60j8J zxNC9CeHG_ixzt+~&W-k|l5*TsK4}Iu?|Pg7F4Jx-R;mFmJ(7Byxs{SKdmOUKQGe$5 z{Wxmd{iQ>2aq3bd*m#+H^DDTJYquD(10?S(6=jfd1f&KTXGflu>#5#wzo z#E-92L`m*nNbR$2K^4sJrz7}$X9=3PB+~4!IeKbv{fMfG^%L?-uoH@mV@2SOMwo|P zu0jEgd26O&{yClGLK&=M~IKr zR|i{a2{c~sYV(-3n5vxlY#vLg!2+qPk5EaT8u!)=^kG#@o^kFzkM zDh>(nnXC=zSoS>&y>6W?_(t<}abW!<9%*g~tT`dLg#h`)b5@`4?3sKir3`=T!s6f* zJvhR3p2&x$3$>{hKA}Ik~)E~Jm+eYMjj^O zt!G6l>XMj*%YKccwO`gj_JPO48Dhl(Bt=YA`g!N4oVM}ufIGgn^!5?XX1N;ae7@lj zBU+8mnzVVZw<-NzrSX@`aZ1?|gm!MmuTvP8)I}G^xQ(T>I|WIj=`y0L@CK$DJL`yp7()lsZiE`<`nq0$NyL=kY-^CYVRu7Jlqq zSoq0VnbfCGj8RN~ATtL;QB^f=OWmtfcWvQhJNrGiO+~kU5odZlji;hFv}aSHc9bib z=9<0+c&j($U2(Fwt38UigP@ibH9;>-?#_Lf9>`Szom?uK4uQ(+*PM0hKK*{cvm-WW z22iF%BE+y${C$ZtAl3j4MyeipqmJ8m$#mU7AE`n9`2HD9D77g_T$Z=*nPSJ(^)PE| zTSrS1Zl(BNNv*Kc*)QRy((GA1*&srbIC{>6k4Jq6jj9l`e=v{5Q70qV= z>K!*_dlq|Z!C{8!o8wB0Rem{NUe#y~oTUVGdX`|S zt)1KGAQX@nsq)mI*Osz{?!oqmDr{6jv#X15iPnHG8AR!yu6fP(h_~x$EagpTWY#E0 zgQVPcPHRSjjtA}>B{dPox-jmp}lBjV*lL0Rs@GMWvFweZ!xVBwX?B`=+n~< z@u5O79odXENFlvvC%J`oH@4HKKc(dOqw*vW6swO4cwOP`2(W4*&ygE?ycAG65zAZfFZ9i;&Esbr2i1dE z(5Y#Ux8XM&;T2%k1y}8l8=VOIcxG3l_k$(l;t=2SA+eqjn?&cC$2?-6!F!YS2lN>r z<^2Ko^htLZphWO@_xC8rL7wNp+2rDIe_&%00a#%2<*~y?1tQ_cz{!yx<~Kb2rr&KB z6j)b^yBFCMF>(Yh`U{x89lZLobRCxDRvoicu1q8{&E0GFF4P5ltL@7v)e~lFlET1t zjU=#34ExiD4T!Q?htOPa>LD#@M5-U6&-Z3423i^v?<*?qf|fF-w(1PnbU_v7PQnDiNuecaXj>A*5? z_y(BpR1kBXF?g{NZG48m!Q83)ePc??hO&BOa*=H&%EfoZ07x`G%nmx34@hQctbh@5 zMIX;y8^-7tnos#8T~ERHuk3Zd9n~369L%+#7{6&ii!P;qQG*{88JSB1H#DJsSf0)P zSve|x%oxJw;Na-nT$Us-#b%#^miO2&{6KS9LiKx5l%Lip^3=iW-el*ZWc2 zZPbEU0jauZLu-t1k=ZGE5@0OFwHn+((r0Jrt;bSJcqoMxRWHnwZTisHOMyy;Yy^5a z-Z3HDv7pA0gUx)srN!c4lmmPNL@wWBlerpcLK>M;;b+#Zpf-~brehz!>Hsy*3$}nN z!Hc|rJ}N^2Ic%!Ih`ZKBPE5gMqxLr){m{BVR=eur(OVptf zi4j35R3t0&mC5kL2P=9$%up0_V1l6ySrR0unB?1~k3wCvCMIG(PF zV#4TfR%IiUY@bv{Zg7%5ohWuDO;^f8N2rdZSC4z?BKqa@l-p9sXA3{lD$>X+NYq}J zWYwOJB^a@|8C4$tN^hnl6xhg10pMwiOKb z-ui=b&p{ZGrO=d)InuuJ0ph{3t#ZQs?&tRJ{ndk0U#GJtiY7wa>MAXvPkm$AGp|6c z=cg6oBN~ks8Wa$TA>=LY10b$JK8!3^@hAf%oRUVtc|-0@5pR>VHF4%^V{py}K%8@p zo-|IUWWtdO<`TP;E(pEVK6KI&-e8fn0Q)(2=a>z+kJ5@6|NC-x9Yg6gga2Ecmx(CQ z8P$AZHNOUIEdC#q8`%YPBT5cSaEGN`IS78E>9!22W?g=%mVgIaqLd z+kQ)GxDNAhLAw!EtsBA4AlAvmE35b6eDf}9SM<}KRWVEE+vTzK<#)6~cuD36j#g(w z|4izqxgt68Y(2aXjVMiA0haR(MtJKBJ;F9`TLp2FMfU!)v>`{2m< z06-HHqaDt>&M5&N>$Nz1Uk1p^`nT)rzQTd)fw-;+;CgH|UfoJ z98?1vSLB}py7~i<4CeA_9y7QP!Y~6x>3swYdTwQ4aH8Nye_O1XIlR4VHRS-ZoPt&b za>!!vF^W}k-*BWcUEeDL-vcr`0=R%#S_TPUygrW^QUw6t41(8^fD>c$nu{@~#OOGy zC~Pp{_lmCp6`!kb+SM#d1(vu5DcBHTzE~uj$9x8&#DXGzh=Gw=K)?t4cd;;Fh5=H> z0Rzth{C#=bJCbrz9P*#4ul{>;{ZxIRxekD{Qel)p1DgO1?4N>p;{T(80j2Br4~hC0 zApk|g_xmlX`vWkd9&jM*^Y?p5D`J_!65q2~?g1CX1EIsf;#t2;`4qGQMk`4JE5ekV zlrr0`ZOW|{WaRu}uv;>WvQOecq1&rn?I>k^PI_+3R?xfidzAdOIB<{9V; zvJ>>0PeMxGuYJ5@I2Br{Y^%fj`s=1r&rE5Z0!NG`o6u;cQ${){i(`2(>+1JI`+gks z{r~5$Z<9|IaW9eK;Ne`s!NIwMV`nN|PKS?!vqpyl7$rD3c6LtOPG-i|#=rmM<#MyO z+S2Z~rI8_PWZASLbx0cHCU!CzHndM1pK{B=6q2a~1Kb zorv+0ngtWra(fC1>#Mtmu!a7<9S;_IVF&@})yiiNQ}}MbXY(;H$n$W@{$ZnZ-(^Gd z-eOqM2|WatB2vKO#1*emakGhqQ9=5VcCaq?(zS0>E7P|6GBkXh1V{I!$Z2wxyPzgC zii&q%8ZvT~6J{fA+TC6;O6-}jeOAl z1vd+;+zqRP@y+drrk7v%3~cO-YFHA{z=oRnj2z2nZHk2)bwi;3_k>zBo z<(rn`fd!iFLWpVqk2n1bo|^H1Q1Js^ru7y=oy!#yy_IoqZl;Sv&|)7aEJeM7UsdZZQ6q_&lrbR-Eu9nlkQhfb?1RF9DnuF>)2*+T8ppug>p2_+T#^Q)u_2Vw|ZYI zr^2rCTJ@gfVL08ht%XhcI~2wBj0;an-X84SEDj5dio9@3XHtNC>JCA>F@o;~!rH#v zbHb8;W6y&IH1#;=>La>NVxCH~hVV`jCaxIV=+Eqn!f!+MWsg`Sau9V7A7(H-c|So^ zdV}lzG%cp^(m9?JvFjJtb;i=SdXczs0||kp><$TZPS2xXa9O2f*pr`yi@YS0nWK;5 zLUWaKJd%{AeJUnA<4RD~>g^(2BYn3w!~9i_u+dK7vU6VnW?Omfis7uqhhp7ru{-Mt zbB%VpbBK0r3Gr;PBZ--HsaZn_#&1}m#<0$HdjFnx`uTHC>%#g|-v(XR-4DG`jqX$7 zsjB&nQIk%aoNj!s_)>SHkCtB}5+2&NUcUFxkQgr}QOXCg_b9dlsjANRMaA6wvO6# zwe-T^gT<Ci!OsZS(LJ9N(`x5zv=nBQ(zS%Kz2wL=2gZ(i@RgDJ`h zZf4cYGsl1(WjOKeLP|*Tu3WO~?|qxip1I7=WrU~C8Y2IBsbpt!?!x3n%}ONor+{9Q zb8uI{23*xqHh{}ggs{h(#69t&!(F3r6nzC#lD8>q{;FbxioBzYSUUSkaNj%0_JDWS z)rj>}CBGa$f9yP~v?uhzK6`)AXMn?uDpyjWa;^ZP!IhMS2Sl`mu$kCxj2b*#wrzo% zKd-G*)&w&YcZ3L*Z3KGo&N2xG=ao=2oA9S>2r6m4xKLD3nPt@P*3h6sg|Cu$NQKvX z{shnULGmg7IJgzpLP{^^vh&BI)M^hTT`O|+fd&k!PsI0F@bSLol{0#Nvqx7#4Dq7D4Uo0iLV!Bi<5wkKObV!3xYz z2;M6*bx>%h$GlUhfZBC9o0VL4Y#e8h=xf|z<^1Kz64vqYg&$M- z#8$g+uMk!U_zkNMfGN0L83sOI(+`|7`*cF_2CQUj%PgeJrqwJii$l%-c0co2DCBOm zqS>YEp_i2^hihcruIiJMLkb9La?=fIvsDHEnHDEMrQlWU-lrXHC3V+h z5AhT@0|{*65M(7`rOg!(P9OB)RV30+8s|hWACYRglzwLddwonD__nkOlcP&jlz zBrW}>0!j5f*80Xil4_qGlg?ZP^Q!I>{m5|DfvUwALSKoE&1$@hW5h!#so;iN{gFXm ztL?=H3horOWkk(aA9oL#?RMW}dV8YO*E^FZRU1Bo-2d?@emM_|C;kK<0(2B zKDRPfb#k{d{_z|M`{b{`;-R{~*cjw$hV|x@R$(M-z0Kjeae+Ph&iZ|`bdsf6$2H5Y$#Ry%T1izHu2(k@{GfyTuq{I!|soDJ&ue-n-r z%DyZjlI{4~%Jtc+DZk+-r_&!oKMWL`M0iLKVt=Dj3ABH8X_*8-k1!y@-PKC3OGg3@ zoVL!OcKVvCd7D%E>UmL{HwvIg{h1KweKJ}i?vHW_%+o?49Pbv)t{IU8lnhK!GZO_t zLkWsn4@ljJg7%ZA!{f2qpCv6lcc?ekXvb2%EfCDzG9Fu?NFrH_CpC9Ak>!w*VRGBq zeAIRE*~=cmcplqZ34v-N1RU>7@~?KPylcs`q1Kuhvv|*V$AARW_6g5}cDDVC85ZWI z*_W#X#@cwB_Wb9bRm@VJMF}+gt4laIH*l_8cwuYwqusd;?d-m*hudkFF0g*U0Upo) z-LDL_mhcgt8;yXP)ZqGPnBbC2k)v1qR5PERl_qy`*=kGPXz@#|bt@OF?d^FumMSVk zURBHfW#U|edva(>S619{46`8mLxDDPdAGowXRl*=wqm=jeY}r^?%Z);sI9__?Z$om zHBu5T%Wq4E&bfW#o?$B=n~_t908!fO-~fUwB9p6|9Q3>g^`jJiBzDFf6IY)+8O$CD zA=_b~BG~IAx*S!6NDKd_qoy--K)JJrA3($(KmEzMi2d2FnFrw-%ck)&O4WFmz$RA+ zM+?JLi;#jFjn&O=|7*r+L`>aV!pQd>fmueJrxz{=j}BtkTypcI&%u482 z;F#F(d|ds?QY%%nU_lHgmi|f|-dx{<&KTm*aeEct!11ULO-qb@qTxO*%cYqCs&Qi!Jx?adt*cH$i3ECb7>Rf`8(+B4V&XHF5Kki5(V$Y*){r0WU zJaw9{%HBe@D9ztabph4R(jW@*9C4Wx<#+L&si-!bS$Q_N{x#Ip=8aQl1^%nq^YOtR zjqn8gCxmOhe6ZHS-fePVgY_de`D&))+bEx-6NU#9ZUY9wG8#<6;%^#P1|K;F?bNaO z^{+&+`D`1n*P0;Dx_r-pA@lE(tpMBTtQ9a+@_uNtKMf`$TLN}TW3mMI7B3}eig}Ex7;fCqfSH!e1oX|GC+kPpUMrlyBda!w-oG?i2 zOuqGiCiTT{k*u;89%t3OnZ77l9Jfezxf9xLp2sot(^VOdYv@dC$xB+}!5tEhXD2p6Elo_=?T&d#*fB z#lA#Cd0DARtkWN|Y^&7<^$z=sDD;UPNgsJ0C$_gR-Z2)BenZcm*|~e-4X2M$$P`8% z?|A2CB%kYOjit07KCwt)@Ar_-CDG0HHy>1vNtnzTS0C^oKWsx<@SGJTY0~u`XsOqq z@ZBQLQy@xhTGG0dx@e^}#=Nv%a8=}nShHGQ4{>MW;1x+p$uZt$^@;UYu!A=%(102% z!3!@0adK$-o-Vw{Il~jRvv?=Asot}}pnZrV6WC|3lrSbJ!NRb?#p7k*m05I>Fh!_4 z(XM*4nfTcqH!%tRL%G(CIz^uJqL*y>^tcC(>$UhElr!ooXx+@jpy3xUhFtX$&boQ+D-(c4cD4 zDZeZtK_A!qL=Hiek7lCyR-3=R)hG2Z`3x~RC}VeEpsc+)%7(o4p?l#KjG#BrN4JRe zl2yY1L58}r;^HPDS?}U9t6`g^3Pl{mo4|sSKWd>fP$V;G=aWwQLh<;;`Q3B)|08${E(q=BQjx#tq~-6>}*FHA^{_vw=zp*mv@ISUc#cjyG4*p*wJE7esbJ2~4u8&9Jug8g|Q z78Dfkp%+pZ)j~flMPGfW{*`#nGloIy8pd7sW#z{=8@-pGe83HF(agS{sZqjuqw412 zruM~w>F3WVI~nDcJt^OT_e3QeLOr`5pJZ0}DXb}&`}$k!zvp52NTqZut$b8YpsD^n zN#`@3?cZGRM$>kjj5kZD>V!G@=;)7B$2!5#tIj~-f^aAusXxYAyvu{xdJXh(e<`F%r!6T0{NNp)5fe^ zr{1tl@$`}e8O?`$(|WVFI`M_;oiAezLg#J3xC(tlc7(J4d{IM=4MOW!Bi65S{#q4t z*}BAZ66W$hb;gB@oB;j$Pg3@LNcZIsFtODW{=WYBLz931OIO+!&>uEVFg15OV@Lh( z6WvqAp8?XhUNI8^00>}ay!MmyRlx9sIT%|xa&vwE`KzFj9z#V7ARr6~-~LJPF|gqM zP7XU-nZGc0Be@;)$WVt+Uflt@QgxLVsC3f&SV75P;eRwDZr%W>kMd zRx)%1=8m6`fB)#OpTb|4dFa-kkpHsGB7a%tzeb*<{yWH`e+!xOr*>YX`8&wGf3G=y zhTfq4ThKgztJFU~!+pTL@~_eJmn|*)KOmpc{~ct${{b2I_TNF~{~wUA-T6Dn0{;Ut z>D|ABEcibl(=hxUlqvT za0--;8JJ3beirWl&g@@p_n#{JWAOiq7v4oOQUEmGKFP1O9hUy@ct861uW%Q`X+`va z;vs;?`*Y(pKK}1;Kc<&o;X2@#@Cfm6a83aA_A{K;lm8C)W1Rd7XI)+rCIdW+-~O$# z>CpcU_e0(P3Reh6AM&0eIV=;lgzPJKT?H z?squAp7`;)gTrc#|9&_Z2j}}YFv0)nrDF)N{QqR{oXni9pW7IkTX9*~8JqrEreA&R zf1ns%yYQpC{pxZ572#L6!XF4FB)=p41VH}*#6c_$z#eZ<1ze@j1^ZZ$wj`~m1|0I2W75{Y!{s+KE+JBb) b|GXf7s(2X~LV(HuzS04uc5wRw@b3Qu>q5mc literal 0 HcmV?d00001 diff --git a/docs/cache.md b/docs/cache.md new file mode 100644 index 00000000..38601edf --- /dev/null +++ b/docs/cache.md @@ -0,0 +1,12 @@ +Cache Params +--- + +Status : core feature, unit tested and maintained. + +Cache can be used to store results of costly requests (reverse dns, geoip request ...). +The cache is an [LRU cache](https://github.com/isaacs/node-lru-cache). + +Params : +* ``cache_enabled``: enable or disable cache. Default value : true +* ``cache_size``: cache size (number of items). Default value : 10000 +* ``cache_ttl``: ttl of cached items, in seconds. Default value : 10800 (3h). \ No newline at end of file diff --git a/docs/common_params.md b/docs/common_params.md new file mode 100644 index 00000000..dceaaf5b --- /dev/null +++ b/docs/common_params.md @@ -0,0 +1,43 @@ +Common params for all output and filter plugins +--- + +Status : core feature, unit tested and maintained. + +#### In url format + +* ``only_type``: execute the filter / output plugin only on lines with specified type. Example: ``only_type=nginx`` +* ``only_field_exist_toto``: execute the filter / output plugin only on lines with a field ``toto``. You can specify it multiple times, all fields have to exist. +* ``only_field_equal_toto=aaa``: execute the filter / output plugin only on lines with a field ``toto``, with value ``aaa``. You can specify it multiple times, all fields have to exist and have the specified value. +* ``only_field_match_toto=aaa$``: execute the filter / output plugin only on lines with a field ``toto``, with value match the regular expression ``aaa$``. You can specify it multiple times, all fields have to exist and match the regular expression. + +#### In logstash config format + +As in logstash, you can have an [event dependent configuration](https://www.elastic.co/guide/en/logstash/current/event-dependent-configuration.html). + +Example 1: use statsd output only for a given type. +```` +output { + if [type] == nginx { + statsd { + host => localhost + port => 8125 + metric_type => increment + metric_key => nginx.request + } + } +} +```` + +As in logstash, you can use complex conditions: ``if [loglevel] == "ERROR" and [deployment] == "production" {`` + +You can use the following comparison operators: +* equality: ``==``, ``!=``, ``<``, ``>``, ``<=``, ``>=`` +* regexp: ``=~``, ``!~`` +* inclusion: ``in``, ``not in`` + +The supported boolean operators are: ``and``, ``or``, ``nand``, ``xor``. +The supported unary operators are: ``!``. + + +Conditions can be long and complex. You can use ``if``, ``elsif``, ``else``. Conditions can contain other expressions, you can negate expressions with !, and you can group them with parentheses (...). + diff --git a/docs/elastic_mapping.md b/docs/elastic_mapping.md new file mode 100644 index 00000000..2f194fa7 --- /dev/null +++ b/docs/elastic_mapping.md @@ -0,0 +1,20 @@ +Force fields mapping in ElasticSearch +--- + +If you have a custom field with an hashcode +- if the first hashcode of the day contains only digits, ElasticSearch will guess the field type and will choose integer. After that, it will fail to index the next values that contains letters. +- by default ElasticSearch will tokenize it like some real text instead of treating it like a blob, it won't impact tools like kibana but may prevent you from doing custom queries. + +For both cases you should add a `default-mapping.json` file in ElasticSearch config directory : + +```json +{ + "_default_": { + "properties": { + "my_hash_field": { + "type" : "string", + "index" : "not_analyzed" + } + } + } +} diff --git a/docs/filters/bunyan.md b/docs/filters/bunyan.md new file mode 100644 index 00000000..832c8e1c --- /dev/null +++ b/docs/filters/bunyan.md @@ -0,0 +1,18 @@ +Bunyan filter +--- + +Status : core plugin, unit tested and maintained. + +The bunyan filter parse the [bunyan log format](https://github.com/trentm/node-bunyan). + +Example 1 : parse the logs with type toto, using the bunyan log format. +Config using url: ``filter://bunyan://?only_type=toto`` + +Config using logstash format: +```` +filter { + if [type] == 'toto' { + bunyan {} + } +} +```` diff --git a/docs/filters/compute_date_field.md b/docs/filters/compute_date_field.md new file mode 100644 index 00000000..a1ea90a3 --- /dev/null +++ b/docs/filters/compute_date_field.md @@ -0,0 +1,25 @@ +Compute date field filter +--- + +Status : core plugin, unit tested and maintained. + +The compute date field filter is used to compute a date field from ``timestamp``field, using using [moment](http://momentjs.com/docs/#/parsing/string-format/) date format. + +Example 1: add a field named ``toto``, containing timestamp formated with ``DD/MMMM/YYYY`` +Config using url: ``filter://compute_date_field://toto?date_format=DD/MMMM/YYYY`` + +Config using logstash format: + +```` +filter { + compute_date_field { + field => toto + date_format => 'DD/MMMM/YYYY' + } +} +```` + +Parameters: + +* ``field``: which field to work on. +* ``date_format``: date format string, using [moment](http://momentjs.com/docs/#/parsing/string-format/). diff --git a/docs/filters/compute_field.md b/docs/filters/compute_field.md new file mode 100644 index 00000000..50a94d72 --- /dev/null +++ b/docs/filters/compute_field.md @@ -0,0 +1,37 @@ +Compute field filter +--- + +Status : core plugin, unit tested and maintained. + +The compute field filter is used to add a new field to a line, with a fixed value, or with a value computed from other fields. + +Example 1: add a field named ``toto`` with value ``abc`` +Config using url: ``filter://compute_field://toto?value=abc`` + +Config using logstash format: +```` +filter { + compute_field { + field => toto + value => abc + } +} +```` + +Example 2: add a field named ``toto`` with value ``abcef``, if line contain a field ``titi`` with value ``ef`` +Config using url: ``filter://compute_field://toto?value=abc#{titi}`` + +Config using logstash format: +```` +filter { + compute_field { + field => toto + value => "abc#{titi}" + } +} +``` + +Parameters: + +* ``field``: which field to work on. +* ``value``: value to be placed in the given field. diff --git a/docs/filters/eval.md b/docs/filters/eval.md new file mode 100644 index 00000000..e93a5601 --- /dev/null +++ b/docs/filters/eval.md @@ -0,0 +1,37 @@ +Eval filter +--- + +Status : core plugin, unit tested and maintained. + +The eval filter is used to process a field with javascript code. + +Example 1: multiply the value of field ``delay`` by 1000. +Config using url: ``filter://eval://delay?operation=x*1000`` + +Config using logstash format: +```` +filter { + eval { + field => delay + operation => "x * 1000" + } +} +````` + +Example 2: add ``a`` character to the field ``toto``. +Config using url: ``filter://eval://toto?operation=x+%22a%22`` + +Config using logstash format: +```` +filter { + eval { + field => delay + operation => "x + 'a'" + } +} +````` +Parameters: + +* ``field``: which field to work on. +* ``operation``: javascript code to execute. The input field is in the ``x`` variable. +* ``target_field``: field to store the result. Default : source field. diff --git a/docs/filters/geoip.md b/docs/filters/geoip.md new file mode 100644 index 00000000..ec10a80d --- /dev/null +++ b/docs/filters/geoip.md @@ -0,0 +1,65 @@ +Geoip filter +--- + +Status : core plugin, unit tested and maintained. + +The geoip filter is used to perform a geoip lookup from a given field, and store teh result into current object. + +There are two mode : +* [node-geoip-lite](https://github.com/bluesmoon/node-geoip) plugin : the database is automatically fetch when you run npm install. To update the geoip database from maxmind, go to `node_modules/geoip-lite/` and execute `npm run-script updatedb`. This mode does not resolve ASN. +* [node-maxmind](https://github.com/runk/node-maxmind) plugin. You have to provide the ``maxmind_dir`` param to specify the directory in which you deployed the Geolite db files. This mode support ASN resolving. You can use the node module [maxmind-geolite-mirror](https://github.com/msimerson/maxmind-geolite-mirror) to fill up the geolite directory. The plugin needs two file in the maxmind directory : ``GeoIPCity.dat`` and ``GeoIPASNum.dat``. + +The reverse dns filter can be used before geop filter to resolve hostname. + +Example 1: will lookup for ``ip`` field in the geoip database, using node-geoip-lite. The resulting object will contains following fields: ``ip_geo_country``, ``ip_geo_region``, ``ip_geo_city``, ``ip_geo_lonlat``, filled with geoip lookup result. +Config using url: ``filter://geoip://ip`` + +Config using logstash format: +```` +filter { + geoip { + field => ip + } +} +```` + +Example 2: will lookup for ``http_remote_ip`` field in provided maxmind directory, using node-maxmin. The resulting object will contains following fields: ``http_remote_ip_geo_country``, ``http_remote_ip_geo_region``, ``http_remote_ip_geo_city``, ``http_remote_ip_geo_lonlat``, ``http_remote_ip_geo_asn`` filled with geoip lookup result. +Config using url: ``filter://geoip://http_remote_ip?maxmind_dir=/var/db/maxmind&cache_size=1000`` + +Config using logstash format: +```` +filter { + geoip { + field => http_remote_ip + cache_size => 1000 + maxmind_dir => /var/db/maxmind + } +} +```` + +Parameters: + +* ``field``: which field to work on. +* ``country_field``: field in which to store the geo ip country result. Default value : ``ip_geo_country``, if the field containing the ip is ``ip``. If you specify ``none``, the geo ip country result will not be stored. +* ``region_field``: field in which to store the geo ip region result. Default value : ``ip_geo_region``, if the field containing the ip is ``ip``. If you specify ``none``, the geo ip region result will not be stored. +* ``city_field``: field in which to store the geo ip city result. Default value : ``ip_geo_city``, if the field containing the ip is ``ip``. If you specify ``none``, the geo ip city result will not be stored. +* ``lonlat_field``: field in which to store the geo ip longitude and latitude result. Default value : ``ip_geo_lonlat``, if the field containing the ip is ``ip``. If you specify ``none``, the geo ip longitude and latitude result will not be stored. +* ``asn_field``: field in which to store the asn result. Default value : ``ip_geo_asn``, if the field containing the ip is ``ip``. If you specify ``none``, the geo ip asn result will not be stored. +* ``cache_*``: cache configuration. More doc at [cache](../cache.md). + +#### Note if you are using the native package + +For reduce the size of the package, the native package does not contains any geoip database. +The recommended mode is ``node-maxmind``. + +To enable it, just type + + node-logstash config:set MAXMIND_DB_DIR=/var/db/node-logstash/maxmind/ + node-logstash run node_modules/.bin/maxmind-geolite-mirror + service node-logstash restart + +The geoip plugin will use the env var ``MAXMIND_DB_DIR`` be auto configured (the ``maxmind_dir``is not needed.). + +To refresh the database, just add a weekly cron + + 2 2 0 * * node-logstash run node_modules/.bin/maxmind-geolite-mirror diff --git a/docs/filters/grep.md b/docs/filters/grep.md new file mode 100644 index 00000000..47d2c83e --- /dev/null +++ b/docs/filters/grep.md @@ -0,0 +1,52 @@ +Grep filter +--- + +Status : core plugin, unit tested and maintained. + +The grep filter can remove lines which match or do not match a given regex. + +Example 1: remove all lines which do not contain ``abc``. Equivalent to ``grep` +Config using url: ``filter://grep://?regex=abc`` + +Config using logstash format: +```` +filter { + grep { + regex => 'abc' + } +} +```` + +Example 2: remove all lines which contain ``abc``. Equivalent to ``grep -v`` +Config using url: ``filter://grep://?regex=abc&invert=true`` + +Config using logstash format: +```` +filter { + grep { + regex => /abc/ + invert => true + } +} +```` + +Example 3: remove all lines with type ``nginx`` which do not contain ``abc`` and +Config using url: ``filter://grep://?type=nginx®ex=ab`` + +Config using logstash format: +```` +filter { + if [type] == 'nginx' { + grep { + regex => 'abc' + invert => true + } + } +} +```` + +Parameters: + +* ``regex``: regex to be matched. You have to escape special characters. +* ``regex_flags: regex flags (eg : g, i, m). +* ``invert``: if ``true``, remove lines which match. Default value: false. diff --git a/docs/filters/grok.md b/docs/filters/grok.md new file mode 100644 index 00000000..9875e392 --- /dev/null +++ b/docs/filters/grok.md @@ -0,0 +1,68 @@ +Grok filter +--- + +Status : core plugin, unit tested and maintained. + +The grok filter is used to extract data using [grok patterns](http://logstash.net/docs/latest/filters/grok). The lines of logs are not modified by this filter. + +Grok is a simple pattern defining language. The syntax for a grok pattern is ``%{SYNTAX:SEMANTIC}``. + +The ``SYNTAX`` is the name of the pattern that will match the text. + +The ``SEMANTIC`` is the field name to assign the value of the matched text. + +Grok rides on the Origuruma regular expressions library, so any valid regular expression in that syntax is valid for grok. +You can find the fully supported syntax on the [Origuruma site](http://www.geocities.jp/kosako3/oniguruma/doc/RE.txt). + +The grok filter has many built-in grok patterns. The full list can be found in the [patterns folder](lib/patterns/grok). +(Note: patterns were copied from [elasticsearch/patterns](https://github.com/elasticsearch/logstash/tree/master/patterns)). + +Example 1: if ``message``field is ``hello 123``, the filter will add the field ``w1`` with value ``hello`` and field ``num1`` with value ``123``. +Config using url: ``filter://grok://?match=%{WORD:w1} %{NUMBER:num1}`` + +Config using logstash format: +```` +filter { + grok { + match => '%{WORD:w1} %{NUMBER:num1}' + } +} +```` + +Example 2: to extract fields from a haproxy log. The ``HAPROXYHTTP`` pattern is already built-in to the grok filter. +Config using url: ``filter://grok://only_type=haproxy&match=%{HAPROXYHTTP}`` + +Config using logstash format: +```` +filter { + if [type] == haproxy { + grok { + match => %{HAPROXYHTTP} + } + } +} +```` + +Example 3: to load custom patterns from the ``/path/to/file`` file that defines the ``MY_PATTERN`` pattern. +Config using url: ``filter://grok://?extra_patterns_file=/path/to/file&match=%{MY_PATTERN}`` + +Config using logstash format: +```` +filter { + grok { + extra_patterns_file => '/path/to/file' + match => '%{MY_PATTERN}' + } +} +```` + +Parameters: + +* ``match``: the grok pattern to apply. +* ``extra_patterns_file``: path to a file containing custom patterns to load. +* ``numerical_fields``: name of fields which have to contain a numerical value. If value is not numerical, field will not be set. +* ``date_format``: if ``date_format`` is specified and a ``@timestamp`` field is extracted, the filter will process the data extracted with the date\_format, using [moment](http://momentjs.com/docs/#/parsing/string-format/). The result will replace the original timestamp of the log line. +* ``tag_on_failure``: if the parse fail, tags in this paramter will be added. In url config format, use ``,`` for splitting. Eg : ``tag_on_failure=a,b``. Default: ``["_grokparsefailure"]``. +* ``add_tags``, ``add_field``, ``add_fields``, ``remove_tags``, ``remove_fields``: tags and fields to add or remove when parsing is ok. More doc at [tags and fields](./tags_fields.md). + +Note: fields with empty values will not be set. diff --git a/docs/filters/http_status_classifier.md b/docs/filters/http_status_classifier.md new file mode 100644 index 00000000..6b159a77 --- /dev/null +++ b/docs/filters/http_status_classifier.md @@ -0,0 +1,23 @@ +HTTP Status classifier filter +--- + +Status : core plugin, unit tested and maintained. + +The http status classifier filter parse the status code. + +Example 1: parse the ``http_status`` field and fill the ``http_class`` field with value like ``2xx``, ``3xx``. +Config using url: ``filter://http_status_classifier://http_status`` + +Config using logstash format: +```` +filter { + http_status_classifier { + field => http_status + } +} +``` + +Parameters: +* ``field``: which field to work on. +* ``target_field``: field to store the result. Default : ``http_class``. +* ``special_codes``: http status codes to be kept as is. Eg, with ``498,499`` value in ``special_codes``, the filter will put 499 in the ``http_class`` field when receiving a ``499`` http code, and not ``4xx``. Mutlipe values must be separated with ``,``. Default value: empty. diff --git a/docs/filters/json_fields.md b/docs/filters/json_fields.md new file mode 100644 index 00000000..3d810ac2 --- /dev/null +++ b/docs/filters/json_fields.md @@ -0,0 +1,22 @@ +Json fields filter +--- + +Status : core plugin, unit tested and maintained. + +The json fields filter is used to parse the message payload as a JSON object, and merge it into current object. + +This allows to automatically index fields for messages that already contain a well-formatted JSON payload. The JSON object is parsed starting from the first ``{`` character found in the message. + +Filter does nothing in case of error while parsing the message. Existing attributes in current line are kept, but overwritten if they conflict with attributes from the parsed payload. + +Example 1: will parse, as JSON, the given stream of messages which ``type`` matches ``json_stream``. +Config using url: ``filter://json_fields://?only_type=json_stream`` + +Config using logstash format: +```` +filter { + if [type] == 'json_stream' { + json_fields + } +} +``` diff --git a/docs/filters/multiline.md b/docs/filters/multiline.md new file mode 100644 index 00000000..77207d41 --- /dev/null +++ b/docs/filters/multiline.md @@ -0,0 +1,24 @@ +Multiline filter +--- + +Status : core plugin, unit tested and maintained. + +The multiline filter is used to regroup lines into blocks. For example, you can group lines from a Java stacktrace into single line of log. To do that, you have to provide a regular expression which match the first line of each block. Standard way is to detect a timestamp. + +Example 1: to regroup lines by blocks, each block have to start with a line with a date like ``2012-12-02`` +Config using url: ``filter://multiline://?start_line_regex=^\\d{4}-\\d{2}-\\d{2}`` + +Config using logstash format: +```` +filter { + multiline { + start_line_regex => /^\d{4}-\d{2}-\d{2}/ + } +} +```` + +Parameters: + +* ``start_line_regex``: regular expression which is used to find lines which start blocks. You have to escape special characters. +* ``regex_flags: regex flags (eg : g, i, m). +* ``max_delay``: delay to wait the end of a block. Default value: 50 ms. Softwares which write logs by block usually write blocks in one time, this parameter is used to send lines without waiting the next matching start line. diff --git a/docs/filters/mutate_replace.md b/docs/filters/mutate_replace.md new file mode 100644 index 00000000..75644175 --- /dev/null +++ b/docs/filters/mutate_replace.md @@ -0,0 +1,27 @@ +Mutate replace filter +--- + +Status : core plugin, unit tested and maintained. + +The mutate replace filter is used to run regex on specified field. + +Example 1: replace all ``.`` in ``toto`` field by ``-`` + +Config using url: ``filter://mutate_replace://toto?from=\\.&to=-`` + +Config using logstash format: +```` +filter { + mutate_replace { + field => toto + from => /\./ + to => - + } +} +```` + +Parameters: + +* ``field``: which field to work on. +* ``from``: regex to find pattern which will be replaced. You have to escape special characters. +* ``to``: replacement string. diff --git a/docs/filters/regex.md b/docs/filters/regex.md new file mode 100644 index 00000000..80227b0b --- /dev/null +++ b/docs/filters/regex.md @@ -0,0 +1,62 @@ +Regex filter +--- + +Status : core plugin, unit tested and maintained. + +The regex filter is used to extract data from lines of logs. The lines of logs are not modified by this filter. + +Example 1: to extract the first word of a line of logs, and place it into the ``toto`` field. +Config using url: ``filter://regex://?regex=^(\S)+ &fields=toto`` + +Config using logstash format: +```` +filter { + regex { + regex => /^(\S)/+/ + fields => [toto] + } +} +```` + +Example 2: to extract fields following configuration into the http_combined pattern. node-logstash is bundled with [some configurations](https://github.com/bpaquet/node-logstash/tree/master/lib/patterns). You can add your custom patterns directories, see options ``--patterns_directories``. + +Config using url: ``filter://regex://http_combined?only_type=nginx`` + +Config using logstash format: +```` +filter { + if [type] == 'nginx' { + regex { + builtin_regex => http_combined + } + } +} +```` + +Example 3: to force number extraction. If the macthed string is not a number but ``-``, the field ``a`` will not be set. + +Config using url: ``filter://regex://?regex=(\d+|-)&fields=a&numerical_fields=a`` + +Config using logstash format: +```` +filter { + regex { + regex => /(\d+|-)/ + fields => [a] + numerical_fields => [a] + } +} +```` + +Parameters: + +* ``field``: the field to work on. Default to : message. +* ``regex``: regex to apply. +* ``regex_flags``: regex flags (eg : g, i, m). +* ``fields``: name of fields which will receive the pattern extracted (see below for the special field @timestamp). +* ``numerical_fields``: name of fields which have to contain a numerical value. If value is not numerical, field will not be set. +* ``date_format``: if ``date_format`` is specified and a ``@timestamp`` field is extracted, the filter will process the data extracted with the date\_format, using [moment](http://momentjs.com/docs/#/parsing/string-format/). The result will replace the original timestamp of the log line. + +Note: fields with empty values will not be set. + +See also [Grok filter](grok.md) \ No newline at end of file diff --git a/docs/filters/remove_field_when_equal.md b/docs/filters/remove_field_when_equal.md new file mode 100644 index 00000000..45337802 --- /dev/null +++ b/docs/filters/remove_field_when_equal.md @@ -0,0 +1,24 @@ +Remove field when equal filter +--- + +Status : core plugin, unit tested and maintained. + +The remove field when equal filter allow to remove a message when equal to a given value. Typical usage is to remove field containing ``-`` in apache or nginx logs. + +Example 1: will remove the field ``http_user`` when equal to ``-``. +Config using url: ``filter://remove_field_when_equal://http_user?value=-`` + +Config using logstash format: +```` +filter { + remove_field_when_equal { + field => http_user + value => '-' + } +} +```` + +Parameters: + +* ``field``: which field to work on. +* ``value``: value to check. Required params. diff --git a/docs/filters/rename.md b/docs/filters/rename.md new file mode 100644 index 00000000..7ef5c922 --- /dev/null +++ b/docs/filters/rename.md @@ -0,0 +1,25 @@ +Rename filter +--- + +Status : core plugin, unit tested and maintained. + +The truncate filter is used to rename a field + +Example 1: rename the ``ts`` field to ``timestamp``. + +Config using url: ``filter://rename://ts?to=timestamp`` + +Config using logstash format: +```` +filter { + rename { + from => ts + to => timestamp + } +} +```` + +Parameters: + +* ``from``: Source field name. +* ``to``: Target field name. diff --git a/docs/filters/reverse_dns.md b/docs/filters/reverse_dns.md new file mode 100644 index 00000000..31a4a9fc --- /dev/null +++ b/docs/filters/reverse_dns.md @@ -0,0 +1,27 @@ +Reverse DNS filter +--- + +Status : core plugin, unit tested and maintained. + +The reverse dns filter replace an ip in a field by the hostname, performing a dns resolution. This is useful with syslog. + +Example 1: performs a dns resolution on the field ``host``. + +Config using url: ``filter://reverse_dns://host`` + +Config using logstash format: +```` +filter { + reverse_dns { + field => dns + cache_size => 1000 + } +} +```` + +Parameters: + +* ``field``: which field to work on. +* ``target_field``: field to store the result. Default: field used for resolution. +* ``only_hostname``: after dns resolution, the filter will keep only the first word of dns name. Example : 'www.free.fr' will be transformed to 'www'. Default value: true. +* ``cache_*``: cache configuration. More doc at [cache](../cache.md). \ No newline at end of file diff --git a/docs/filters/split.md b/docs/filters/split.md new file mode 100644 index 00000000..28011eb5 --- /dev/null +++ b/docs/filters/split.md @@ -0,0 +1,24 @@ +Split filter +--- + +Status : core plugin, unit tested and maintained. + +The split filter is used to split a line of log into multiple lines, on a given delimiter. + +Example 1: ``filter://split://?delimiter=|`` split all lines of logs on ``|`` char. + +Config using url: ``filter://split://?delimiter=|``. +You have to url encode special chars (%0A for ``\n``). + +Config using logstash format: +```` +filter { + split { + delimiter => '|' + } +} +```` + +Parameters: + +* ``delimiter``: delimiter used to split. diff --git a/docs/filters/tags_fields.md b/docs/filters/tags_fields.md new file mode 100644 index 00000000..861563ef --- /dev/null +++ b/docs/filters/tags_fields.md @@ -0,0 +1,38 @@ +Tags and fields for filters plugins +--- + +Status : core feature, unit tested and maintained. + +Some filters plugins supports ``add_tags``, ``add_field``, ``add_fields``, ``remove_tags``, ``remove_fields`` params. + +* ``add_tags``: arbitrary tags to add to each data if filter is successful. Must be an array. In url config format, use ``,`` for splitting. Eg : ``add_tags=a,b``. +* ``add_fields`` and ``add_fields``: arbiraty tags to add to each data if filter is sucessful. Must be an hash. In url config format, use ``:`` and ``,``. Eg : ``add_fields=key1:value1,key2:value2`. Note : interpolated strings can be used in values. +* ``remove_tags``: arbitrary tags to add to each data if filter is successful. Must be an array. In url config format, use ``,`` for splitting. Eg : ``tags=a,b``. +* ``remove_field`` and ``removed_fields``: arbiraty remove fields to each data if filter is sucessful. Must be an array. In url config format, use ``,``. Eg : ``remove_fields=key1,key2` + +Example using logstash format: + +```` +filter { + grok { + match => '%{IP}' + add_tags => toto + add_fields => { + a => b + c => d + } + } +} +```` + +or + +```` +filter { + grok { + match => '%{IP}' + add_tags => [a , b] + add_field => { a => b } + } +} +```` diff --git a/docs/filters/truncate.md b/docs/filters/truncate.md new file mode 100644 index 00000000..0331a899 --- /dev/null +++ b/docs/filters/truncate.md @@ -0,0 +1,23 @@ +Truncate filter +--- + +Status : core plugin, unit tested and maintained. + +The truncate filter is used to truncate the log message at a certain size. + +Example 1: truncate the message field to a max size of 200. + +Config using url: ``filter://truncate://?max_size=200`` + +Config using logstash format: +```` +filter { + truncate { + max_size => 200 + } +} +```` + +Parameters: + +* ``max_size``: Maximum size of a message. diff --git a/docs/inputs/amqp.md b/docs/inputs/amqp.md new file mode 100644 index 00000000..b3e181e4 --- /dev/null +++ b/docs/inputs/amqp.md @@ -0,0 +1,50 @@ +AMQP input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to get logs from an [AMQP exchange](https://www.rabbitmq.com/tutorials/amqp-concepts.html), like a [RabbitMQ](http://www.rabbitmq.com/) exchange. This plugin is compatible with the original AMQP logstash plugin. + +Example 1: Fanout mode: Receive message from fanout exchange ``toto`` +Config using url: ``input://amqp://localhost:5672?exchange_name=toto`` + +Config using logstash format: +```` +input { + amqp { + host => localhost + port => 5672 + exchange_name => toto + } +} +```` + +Example 2: Topic mode: Receive message from topic ``test`` on exchange ``toto_topic`` +Config using url: ``input://amqp://localhost:5672?exchange_name=toto_topic&topic=test`` + +Config using logstash format: +```` +input { + amqp { + host => localhost + port => 5672 + exchange_name => toto_topic + topic => test + } +} +```` + +Parameters: + +* ``host``: ip of the AMQP broker. +* ``port``: port of the AMQP broker. +* ``topic``: topic to use in topic mode. Default : none, fanout mode is used. +* ``durable``: set exchange durability. Default : true. +* ``retry_delay``: retry delay (in ms) to connect AMQP broker. Default : 3000. +* ``heartbeat``: AMQP heartbeat in s. Default: 10 +* ``type``: to specify the log type, to faciliate crawling in kibana. Example: ``type=rabbit``. No default value. +* ``username``: username for PLAIN authentication to amqp broker. No default value. +* ``password``: password for PLAIN authentication to amqp broker. No default value. +* ``vhost``: amqp vhost to use. No default value. +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``unserializer``: more doc at [unserializers](unserializers.md). Default value to ``json_logstash``. diff --git a/docs/inputs/file.md b/docs/inputs/file.md new file mode 100644 index 00000000..99f99512 --- /dev/null +++ b/docs/inputs/file.md @@ -0,0 +1,70 @@ +File input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin monitor log files. + +Wildcard (* and ?) can be used, in path, and basename. + +This plugin is compatible with logrotate (copytruncate or normal mode). + +If a db file is specified on node-logstash command line (``--db_file``), this plugin stores the last line read for each file, to allow restart at the same place, even the monitored file grows when node-logstash were down. + +Example 1: to monitor ``/tmp/toto.log`` +Config using url: ``input://file:///tmp/toto.log`` + +Config using logstash format: +```` +input { + file { + path => "/tmp/toto.log" + } +} +```` + +Example 2: to monitor all log file in ``/var/log``. +Config using url: ``input://file:///var/log/*.log`` + +Config using logstash format: +```` +input { + file { + path => "/var/log/*.log" + } +} +```` + +Example 3: to monitor all log ``access.log`` files in directories ``/var/log/httpd/*``. +Config using url: ``input://file:///var/log/httpd/*/access.log`` + +Config using logstash format: +```` +input { + file { + path => "/var/log/httpd/*/access.log" + } +} +```` + +Example 4: to monitor all files matching ``auth?.log`` in ``/var/log``. +Config using url: ``input://file:///var/log/auth%3F.log``. ``%3F`` is the encoding of ``?``. + +Config using logstash format: +```` +input { + file { + path => "/var/log/auth?.log" + } +} +```` + +Parameters: + +* ``path``: the path to monitor. +* ``start_index``: add ``?start_index=0`` to reread files from begining. Without this params, only new lines are read. +* ``use_tail``: use system ``tail -f`` command to monitor file, instead of built in file monitoring. Can be used when node-logstash is unable to follow files with strange rotation log behaviour (builtin in a sofware for example). Defaut value: false. +* ``type``: to specify the log type, to faciliate crawling in kibana. Example: ``type=nginx_error_log``. +* ``unserializer``: more doc at [unserializers](unserializers.md). Default value to ``json_logstash``. + +Note: this plugin can be used on FIFO pipes. diff --git a/docs/inputs/gae.md b/docs/inputs/gae.md new file mode 100644 index 00000000..b1a46ef0 --- /dev/null +++ b/docs/inputs/gae.md @@ -0,0 +1,127 @@ +Google App Engine input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to collect logs from a running Google App Engine Application. + +You have to add a servlet in your App Engine App (see below). The plugin will poll the logs from this servlet. + +This plugin collects logs 10s in the past to allow GAE internal logs propagation. + +Example 1: to grab the logs from myapp GAE app, every minutes, on url ``http://myapp.appspot.com:80/logs?log_key=toto`` +Config using url: ``input://gae://myapp.appspot.com:80?key=toto`` + +Config using logstash format: +```` +input { + gae { + host => myapp.appspot.com + port => 80 + key => toto + } +} +```` + +Parameters: + +* ``host``: hostname of the GAE webapp. +* ``port``: port of the GAE webapp. +* ``type``: to specify the log type, to faciliate crawling in kibana. Example: ``type=mygaeappp``. No default value. +* ``key``. the security key which will be sent in the http query to Google App Engine. +* ``ssl``: use ssl for grabbing logs. Use port 443 in this case. Default : false. +* ``polling``: polling delay. Default: 60s. +* ``servlet_name``: name of the servlet which serve logs. Default : ``logs``. +* ``access_logs_field_name`` and ``access_logs_type``. If the received line of log has a field ``access_logs_field_name``, the plugin will set the type of the line to ``access_logs_type``. It's used to differentiate access logs from application logs, to apply specific filter on access_logs. Standard config is : ``access_logs_type=nginx_access_logs&access_logs_field_name=http_method``. No default value. + +Servlet +--- + +This servlet should render the logs in text format. +Each line must be a JSON that logstash can use. + +Here is an exemple of a servlet rendering the access logs & application logs. +We use Gson for generating Json. + +We use a very simple authentication system : a query string param ``log_key``. + + +````java + //2014-09-09T13:18:00.000+0000 + protected SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + + @Override + void doGet(HttpServletRequest req, HttpServletResponse response) throws ServletException, IOException { + if(!"my_key".equals(request.getParameter("log_key"))) { + return response.sendError(401); + } + + StringBuilder builder = new StringBuilder(); + + //Get the timestamp from when getting the logs + Long startTimestamp; + try { + startTimestamp = new Long(req.getParameter("start_timestamp")); + } catch (NumberFormatException | NullPointerException e) { + startTimestamp = new Date().getTime() - 1000 * 10; //10 sec + } + + Long endTimestamp = new Date().getTime(); + + LogQuery query = LogQuery.Builder. + withDefaults(). + startTimeMillis(startTimestamp - 10 * 1000). + endTimeMillis(endTimestamp - 10 * 1000). + includeAppLogs(true); + + //Iterate over the access logs + for (RequestLogs record : LogServiceFactory.getLogService().fetch(query)) { + String requestId = record.getRequestId(); + if(!record.getResource().startsWith("/logs")) { + builder.append(buildJsonForAccessLog(record)). + append("\n"); + + //Iterate over the app logs of this access log + for (AppLogLine appLog : record.getAppLogLines()) { + builder. + append(buildJsonForApplicationLog(requestId, appLog)). + append("\n"); + } + } + } + + response.getWriter().append(builder); + response.addHeader("X-Log-End-Timestamp", endTimestamp.toString()); + } + + private String buildJsonForApplicationLog(String requestId, AppLogLine appLog) { + JsonObject jsonObject = new JsonObject(); + jsonObject.addProperty("request_id", requestId); + jsonObject.addProperty("message", appLog.getLogMessage().trim()); + jsonObject.addProperty("log_level", appLog.getLogLevel().name()); + jsonObject.addProperty("@timestamp", format.format(new Date(appLog.getTimeUsec() / 1000))); + return gson.toJson(jsonObject); + } + + // ip,user,@timestamp,request,status,bytes_sent,referer,user_agent + public String buildJsonForAccessLog(RequestLogs record) { + JsonObject jsonObject = new JsonObject(); + jsonObject.addProperty("http_remote_ip", record.getIp()); + jsonObject.addProperty("http_path", record.getResource()); + jsonObject.addProperty("http_status", record.getStatus()); + jsonObject.addProperty("http_bytes_sent", record.getResponseSize()); + jsonObject.addProperty("http_referer", record.getReferrer()); + jsonObject.addProperty("http_user_agent", record.getUserAgent()); + jsonObject.addProperty("http_delay", record.getLatencyUsec() / 1000); + jsonObject.addProperty("http_method", record.getMethod()); + jsonObject.addProperty("http_host", record.getHost()); + jsonObject.addProperty("cost", record.getCost()); + + jsonObject.addProperty("@timestamp", format.format(new Date(record.getStartTimeUsec() / 1000))); + jsonObject.addProperty("request_id", record.getRequestId()); + jsonObject.addProperty("message", record.getCombined()); + + return gson.toJson(jsonObject); + } +``` + diff --git a/docs/inputs/http.md b/docs/inputs/http.md new file mode 100644 index 00000000..98d3628d --- /dev/null +++ b/docs/inputs/http.md @@ -0,0 +1,28 @@ +HTTP input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on log server to receive logs from an HTTP/HTTPS stream. This is useful +in case the agent can only output logs through an HTTP/HTTPS channel. + +Example 1: +Config using url: ``input://http://localhost:8080`` + +Config using logstash format: +```` +input { + http { + host => 127.0.0.1 + port => 8080 + } +} +```` + +Parameters: + +* ``host``: listen address for the HTTP server : can be 0.0.0.0, 127.0.0.1 ... +* ``port``: port for the HTTP server. +* ``type``: to specify the log type, to faciliate crawling in kibana. Example: ``type=http``. No default value. +* ``unserializer``: more doc at [unserializers](unserializers.md). Default value to ``json_logstash``. +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false diff --git a/docs/inputs/redis.md b/docs/inputs/redis.md new file mode 100644 index 00000000..bc654d0b --- /dev/null +++ b/docs/inputs/redis.md @@ -0,0 +1,36 @@ +Redis input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on log server to receive logs from redis channels. json_event format is expected. + +They are two method to get message from redis : +* Publish / subscribe : The ``subscribe`` redis command will be used. Parameters ``channel`` and ``pattern_channel`` are needed. +* Queue. This ``blpop`` redis command will be used. ``key`` parameter is needed. + +Example 1: +Config using url: ``input://redis://localhost:6379?channel=logstash_channel`` + +Config using logstash format: +```` +input { + redis { + host => localhost + port => 6379 + channel => logstash_channel + } +} +```` + +Parameters: + +* ``host``: ip of the redis server. +* ``port``: port of the redis server. +* ``auth_pass``: password to use when connecting to Redis +* ``type``: to specify the log type, to faciliate crawling in kibana. Example: ``type=redis``. No default value. +* ``method``: ``pubsub`` or ``queue`` Default value: ``queue``. +* ``channel``: Channel for publish / subscribe. No default value. +* ``pattern_channel``: use channel as pattern. Default value : false. +* ``key``: Queue name for queue. No default value. +* ``unserializer``: more doc at [unserializers](unserializers.md). Default value to ``json_logstash``. diff --git a/docs/inputs/sqs.md b/docs/inputs/sqs.md new file mode 100644 index 00000000..fc269d18 --- /dev/null +++ b/docs/inputs/sqs.md @@ -0,0 +1,28 @@ +SQS input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to get logs from [SQS](https://aws.amazon.com/en/sqs/). This plugin use [long polling](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_ReceiveMessage.html) to get messages faster. + +Example 1: get messages from the SQS queue ``sqs.eu-west-1.amazonaws.com/66171255634/test`` +Config using url: ``input://sqs://sqs.eu-west-1.amazonaws.com/66171255634/test?aws_access_key_id=key&aws_secret_access_key=secret`` + +Config using logstash format: +```` +input { + sqs { + aws_queue => "sqs.eu-west-1.amazonaws.com/66171255634/test" + aws_access_key_id => key + aws_secret_access_key => secret + } +} +```` + +Parameters : +* ``aws_queue``: the aws queue url. +* ``aws_access_key_id``: your AWS Access Key Id. Required. +* ``aws_access_key_id``: your AWS Access Key Id. Required. +* ``aws_secret_access_key``: your AWS Secret Access Key Id. Required. +* ``polling_delay``: the long polling max delay, in seconds. Default value : 10. +* ``unserializer``: more doc at [unserializers](unserializers.md). Default value to ``json_logstash``. diff --git a/docs/inputs/syslog.md b/docs/inputs/syslog.md new file mode 100644 index 00000000..55b1e1cf --- /dev/null +++ b/docs/inputs/syslog.md @@ -0,0 +1,39 @@ +Syslog input plugin +--- + +Status : core plugin, unit tested and maintained. + +There is no syslog plugin, but it's easy to emulate with udp plugin. + +Example 1: +Config using url : + + input://udp://0.0.0.0:514?type=syslog + filter://regex://syslog?only_type=syslog + filter://syslog_pri://?only_type=syslog + +Config using logstash format: +```` +input { + udp { + host => 0.0.0.0 + port => 514 + type => syslog + } +} + +filter { + if [type] == syslog { + regex { + builtin_regex => syslog + } + syslog_pri {} + } +} +```` + +The first filter will parse the syslog line, and extract ``syslog_priority``, ``syslog_program``, ``syslog_pid`` fields, parse timestamp, and will replace ``host`` and ``message`` field. + +The second filter will extract from ``syslog_priority`` field severity and facility. + +You can also use the regex ``syslog_no_prio`` if there is no timestamp in syslog lines. diff --git a/docs/inputs/tags_fields.md b/docs/inputs/tags_fields.md new file mode 100644 index 00000000..52ce69bc --- /dev/null +++ b/docs/inputs/tags_fields.md @@ -0,0 +1,11 @@ +Tags and fields for input plugins +--- + +Status : core feature, unit tested and maintained. + +All inputs plugins supports ``tags``, ``add_field`` and ``add_fields`` params. + +* ``tags``: arbitrary tags to add to each data. Must be an array. In url config format, use ``,`` for splitting. Eg : ``tags=a,b``. +* ``add_fields`` and ``add_fields``: arbiraty tags to add to each data. Must be an hash. In url config format, use ``:`` and ``,``. Eg : ``add_fields=key1:value1,key2:value2` + + diff --git a/docs/inputs/tcp_tls.md b/docs/inputs/tcp_tls.md new file mode 100644 index 00000000..436521a6 --- /dev/null +++ b/docs/inputs/tcp_tls.md @@ -0,0 +1,46 @@ +TCP / TLS input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on log server to receive data over TCP, optionnaly with SSL/TLS encryption. + +Example 1: TCP mode: +Config using url: ``input://tcp://0.0.0.0:12345`` + +Config using logstash format: +```` +input { + tcp { + host => 0.0.0.0 + port => 12345 + } +} +```` + +Example 2: SSL mode: +Config using url: ``input://tcp://0.0.0.0:443?ssl=true&ssl_key=/etc/ssl/private/logstash-server.key&ssl_cert=/etc/ssl/private/logstash-server.crt&ssl_requestCert=true&ssl_rejectUnauthorized=true`` + +Config using logstash format: +```` +input { + tcp { + host => 0.0.0.0 + port => 12345 + ssl => true + ssl_key => "/etc/ssl/private/logstash-server.key" + ssl_cert => "/etc/ssl/private/logstash-server.crt" + ssl_requestCert => true + ssl_rejectUnauthorized => true + } +} +```` + +Parameters: + +* ``host``: listen address for the tcp server : can be 0.0.0.0, 127.0.0.1 ... +* ``port``: port for the tcp server. +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``appendPeerCert``: in SSL mode, adds details of the peer certificate to the @tls field if the peer certificate was received from the client using requestCert option. Default: true in SSL mode +* ``type``: to specify the log type, to faciliate crawling in kibana. Example: ``type=tls``. No default value. +* ``unserializer``: more doc at [unserializers](unserializers.md). Default value to ``json_logstash``. diff --git a/docs/inputs/unserializers.md b/docs/inputs/unserializers.md new file mode 100644 index 00000000..266731b8 --- /dev/null +++ b/docs/inputs/unserializers.md @@ -0,0 +1,13 @@ +Unserializers for input plugins +--- + +Status : core feature, unit tested and maintained. + +Some inputs plugins supports the ``unserializer`` params. +Supported unserializer for input plugin : + +* ``json_logstash``: the unserializer try to parse data as a json object. If fail, raw data is returned. Some input plugins can not accept raw data. +* ``msgpack``: the unserializer try to parse data as a [msgpack](http://msgpack.org) object. If fail, raw data is returned. Some input plugins can not accept raw data. +* ``raw``: the unserializer does not try to parse the input line. Best for performances. + + diff --git a/docs/inputs/ws.md b/docs/inputs/ws.md new file mode 100644 index 00000000..ecc09552 --- /dev/null +++ b/docs/inputs/ws.md @@ -0,0 +1,45 @@ +Websocket input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on log server to receive data over a websocket, optionally with SSL/TLS encryption. Websockets are like TCP, but are proxy and firewall friendly. + +Example 1: Regular mode: +Config using url: ``input://ws://0.0.0.0:12345`` + +Config using logstash format: +```` +input { + ws { + host => 0.0.0.0 + port => 12345 + } +} +```` + +Example 2: TLS mode: +Config using url: ``input://ws://0.0.0.0:443?ssl=true&ssl_key=/etc/ssl/private/logstash-server.key&ssl_cert=/etc/ssl/private/logstash-server.crt&ssl_requestCert=true&ssl_rejectUnauthorized=true`` + +Config using logstash format: +```` +input { + ws { + host => 0.0.0.0 + port => 12345 + ssl => true + ssl_key => /etc/ssl/private/logstash-server.key + ssl_cert => /etc/ssl/private/logstash-server.crt + ssl_requestCert => true + ssl_rejectUnauthorized => true + } +} +```` + +Parameters: + +* ``host``: listen address for the ws server : can be 0.0.0.0, 127.0.0.1 ... +* ``port``: port for the ws server. +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``type``: to specify the log type, to faciliate crawling in kibana. Example: ``type=tls``. No default value. +* ``unserializer``: more doc at [unserializers](unserializers.md). Default value to ``json_logstash``. \ No newline at end of file diff --git a/docs/inputs/zeromq.md b/docs/inputs/zeromq.md new file mode 100644 index 00000000..4efbc785 --- /dev/null +++ b/docs/inputs/zeromq.md @@ -0,0 +1,22 @@ +ZeroMQ input plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on log server to receive logs from agents. + +Example 1: to open a zeromq socket on port 5555. +Config using url: ``input://zeromq://tcp://0.0.0.0:5555`` + +Config using logstash format: +```` +input { + zeromq { + address => ['tcp://0.0.0.0:5555'] + } +} +```` + +Parameters : +* ``address``: array of ZeroMQ url to open. +* ``unserializer``: more doc at [unserializers](unserializers.md). Default value to ``json_logstash``. This plugin does not support raw data. diff --git a/docs/interpolation.md b/docs/interpolation.md new file mode 100644 index 00000000..06174181 --- /dev/null +++ b/docs/interpolation.md @@ -0,0 +1,10 @@ +Interpolation +--- + +When a plugin param is a string, you can use string interpolation to reference line data: + +* ``#{message}`` will return the full log line +* ``#{type}`` will return the type of log line +* ``#{toto}`` will return the value of the field ``toto``, which have to be extracted with a regex filter +* ``2#{toto}`` will return ``2`` followed by the value of the field ``toto``. +* ``#{now:YYYY}`` will return the current year. YYYY is a date format passed to [moment](http://momentjs.com/docs/#/parsing/string-format/) to format current date. diff --git a/docs/outputs/amqp.md b/docs/outputs/amqp.md new file mode 100644 index 00000000..ce429ab3 --- /dev/null +++ b/docs/outputs/amqp.md @@ -0,0 +1,53 @@ +AMQP output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to send logs to an [AMQP exchange](https://www.rabbitmq.com/tutorials/amqp-concepts.html), like a [RabbitMQ](http://www.rabbitmq.com/) exchange. This plugin is compatible with the original AMQP logstash plugin. + +Examples: + +* Fanout mode: Receive message from fanout exchange ``toto`` +Config using url: ``output://amqp://localhost:5672?exchange_name=toto`` + +Config using logstash format: +```` +output { + amqp { + host => localhost + port => 5672 + exchange_name => toto + } +} +```` + +* Topic mode: Receive message from topic ``test`` on exchange ``toto_topic`` +Config using url: ``output://amqp://localhost:5672?exchange_name=toto_topic&topic=test`` + +Config using logstash format: +```` +output { + amqp { + host => localhost + port => 5672 + exchange_name => toto_topic + topic => test + } +} +```` + +Parameters: + +* ``host``: ip of the AMQP broker. +* ``port``: port of the AMQP broker. +* ``topic``: Optional. Topic to use in topic mode. Default : none, fanout mode is used. +* ``durable``: Optional. Set exchange durability. Default : true. +* ``persistent``: Optional. Set persistent flag on each send message. Default: false. +* ``retry_delay``: Optional. Retry delay (in ms) to connect AMQP broker. Default : 3000. +* ``heartbeat``: Optional. AMQP heartbeat in s. Default: 10 +* ``username``: username for PLAIN authentication to amqp broker. No default value. +* ``password``: password for PLAIN authentication to amqp broker. No default value. +* ``vhost``: amqp vhost to use. No default value. +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``serializer``: more doc at [serializers](serializers.md). Default value to ``json_logstash``. +* ``format``: params used by the ``raw`` [serializer](serializers.md). diff --git a/docs/outputs/elasticsearch.md b/docs/outputs/elasticsearch.md new file mode 100644 index 00000000..5944e7e1 --- /dev/null +++ b/docs/outputs/elasticsearch.md @@ -0,0 +1,65 @@ +ElasticSearch output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on log server to ship to ElasticSearch, using HTTP REST interface. + +By default, each incoming message generate one HTTP request to ElasticSearch. The bulk feature allows to send grouped messages. For example, under heavy traffic, you can send messages to ElasticSearch by bulk of 1000 messages. In this mode, the bulk is send even if incomplete after a configured timeout (100 ms by default). + +Note : for better performance, you can use the ZeroMQ plugin and the [ZeroMQ Logasth river for ES < 2.0](https://github.com/bpaquet/elasticsearch-river-zeromq), or the [ElasticSearch ZeroMQ Torrent for ES >= 2.x](https://github.com/bpaquet/elasticsearch-zeromq-torrent). + +Example 1: to send to the HTTP interface of an ElasticSearch server listening on port 9001. +Config using url: ``output://elasticsearch://localhost:9001`` + +Config using logstash format: +```` +output { + elasticsearch { + host => localhost + port => 9001 + } +} +```` + +Example 2: to send to index ``audit-`` and type ``audits``. +Config using url: ``output://elasticsearch://localhost:9001&index_prefix=audit&data_type=audits`` + +Config using logstash format: +```` +output { + elasticsearch { + host => localhost + port => 9001 + index_prefix => audit + data_type => audits + } +} +```` + +Example 3: to perform bulk updates with a limit of 1000 messages per bulk update and a timeout of 100 ms to wait for 'limit' messages. +Config using url: ``output://elasticsearch://localhost:9001?bulk_limit=1000&bulk_timeout=100`` + +Config using logstash format: +```` +output { + elasticsearch { + host => localhost + port => 9001 + bulk_limit => 1000 + bulk_timeout => 100 + } +} +```` + +Parameters: +* ``host``: ip of the elasticsearch server. +* ``port``: port of the elasticsearch server. +* ``index_prefix``: specifies the index prefix that messages will be stored under. Default : ``logstash``. Default index will be ``logstash-`` +* ``index_name``: specifies a fixed name for the index that messages will be stored under. Disable the ``index_prefix`` option. No default value. +* ``data_type``: specifies the type under the index that messages will be stored under. (default is ``logs``) +* ``bulk_limit``: Enable bulk mode. Dpecifies the maximum number of messages to store in memory before bulking to ElasticSearch. No default value. +* ``bulk_timeout``: Specifies the maximum number of milliseconds to wait for ``bulk_limit`` messages,. Default is 100. +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``proxy``: use http proxy. More doc at [http proxy](http_proxy.md). Default : none. +* ``basic_auth_user`` and ``basic_auth_password``: user and password for HTTP Basic Auth required by server. Default: none. diff --git a/docs/outputs/file.md b/docs/outputs/file.md new file mode 100644 index 00000000..d775ad01 --- /dev/null +++ b/docs/outputs/file.md @@ -0,0 +1,59 @@ +File output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to write data into files. There are two modes: JSON, and raw (default). This plugin will create directory and sub directories if needed. Variables can be used in filename or in path. + +In JSON mode, each line of log is dumped to target file as JSON object, containing all fields. + +In raw mode, each line of log is dumped to target file as specified in ``format`` parameter. Default format is ``#{message}``, which means the original log line. + +Note: target files can be reopened by sending USR2 signal to node-logstash. + +Example 1: to write each ``nginx`` log lines to ``/var/log/toto.log``. +Config using url: ``output://file:///var/log/toto.log?only_type=nginx`` + +Config using logstash format: +```` +output { + if [type] == nginx { + file { + path => "/var/log/toto.log" + } + } +} +```` + +Example 2: to write each ``nginx`` log lines to ``/var/log/log_nginx.log``. +Config using url: ``output://file:///var/log/log_#{type}.log`` + +Config using logstash format: +```` +output { + file { + path => "/var/log/log_#{type}.log" + } +} +```` + +Example 3: to create a new directory for each month, and write to a file ``http.log``. +Config using url: ``output://file:///var/log/http/#{now:YYYY-MM}/http.log`` + +Config using logstash format: +```` +output { + file { + path => "/var/log/http/#{now:YYYY-MM}/http.log" + } +} +```` + +Parameters: + +* ``path``: the target path. +* ``serializer``: more doc at [serializers](serializers.md). Default value to ``raw``. +* ``format``: params used by the ``raw`` [serializer](serializers.md). +* ``delimiter``: Optional. Delimiter inserted between message. Default : ``\n``. Must be encoded in url (eg ``%0A`` for ``\n``). Can be empty. +* ``idle_timeout``: delay before closing a file without activity, in seconds. Set it to 0 to never close files. Why closing file ? Because you can use current date in filename, so node-logtash can close them automatically to avoid keeping useless files open). Default : 0. +* ``retry_delay``: after an error, delay before retry, in seconds. Default : 300. diff --git a/docs/outputs/gelf.md b/docs/outputs/gelf.md new file mode 100644 index 00000000..3cfe7858 --- /dev/null +++ b/docs/outputs/gelf.md @@ -0,0 +1,28 @@ +Gelf output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to send data to a GELF enabled server, eg [Graylog2](http://graylog2.org/). Documentation of GELF messages is [here](https://github.com/Graylog2/graylog2-docs/wiki/GELF). + +Example: to send logs to 192.168.1.1 port 1221. +Config using url: ``output://gelf://192.168.1.1:12201`` + +Config using logstash format: +```` +output { + gelf { + host => 192.168.1.1 + port => 12201 + } +} +```` + +Parameters: + +* ``host``: ip of the gelf server. +* ``port``: port of the gelf server. +* ``message``: ``short_message`` field. Default value: ``#{message}``, the line of log. Can reference log line properties (see [interpolation](../interpolation.md)). +* ``facility``: ``facility`` field. Default value: ``#{type}``, the line type. ``no_facility`` if no value. Can reference log line properties (see [interpolation](../interpolation.md)). +* ``level``: ``level`` field. Default value: ``6``. Can reference log line properties (see [interpolation](../interpolation.md)). +* ``version``: ``version`` field. Default value: ``1.0``. diff --git a/docs/outputs/hep.md b/docs/outputs/hep.md new file mode 100644 index 00000000..3f1769ef --- /dev/null +++ b/docs/outputs/hep.md @@ -0,0 +1,43 @@ +HEP/EEP output plugin +--- + +Status : experimental + + +This plugin is used to send correlated data to a collector supporting the [HEP](http://hep.sipcapture.org) encapsulation protocol. + +Example: +Config using url: ``output://hep://localhost:9060`` + +Config using logstash format: +```` + input { + file { + path => "/var/log/kamailio/kamailio.log" + } + } + + filter { + regex { + regex => /ID=([^&]\\S*)/ + fields => [hep_cid] + } + } + + output { + hep { + host => localhost + port => 9060 + } + } +```` + +Parameters: + +* ``host``: ip of the HEP server. +* ``port``: port of the HEP server. +* ``hep_id`` to change the HEP id of the stream. Default value: ``2001``. +* ``hep_pass``: to enable HEP authentication support. Default : none +* ``hep_cid``: to insert a (required) correaltion id, interpolated from string. Default : none. + + diff --git a/docs/outputs/http_post.md b/docs/outputs/http_post.md new file mode 100644 index 00000000..4c3e88a9 --- /dev/null +++ b/docs/outputs/http_post.md @@ -0,0 +1,35 @@ +HTTP Post output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to send data to an HTTP server, with a POST request. For filling request body, there are two modes: JSON, and raw (default). + +In JSON mode, the HTTP POST body request will contain a JSON dump of log line, containing all fields. Content-Type will be set to ``text/plain``. + +In raw mode, the HTTP POST body request will contain the log line. Content-Type will be set to ``application/json``. + +Example 1: Send data to [Loggly](http://loggly.com/) +Config using url: ``output://http_post://logs.loggly.com:80?path=/inputs/YOUR_INPUT_KEY`` + +Config using logstash format: +```` +output { + http_post { + host => logs.loggly.com + port => 80 + path => "/inputs/YOUR_INPUT_KEY" + } +} +```` + +Parameters: + +* ``host``: ip of the target HTTP server. +* ``port``: port of the target HTTP server. +* ``path``: path to use in the HTTP request. Can reference log line properties (see [interpolation](../interpolation.md)). +* ``serializer``: more doc at [serializers](serializers.md). Default value to ``raw``. +* ``format``: params used by the ``raw`` [serializer](serializers.md). +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``proxy``: use http proxy. More doc at [http proxy](http_proxy.md). Default : none. +* ``basic_auth_user`` and ``basic_auth_password``: user and password for HTTP Basic Auth required by server. Default: none. diff --git a/docs/outputs/http_proxy.md b/docs/outputs/http_proxy.md new file mode 100644 index 00000000..ca024299 --- /dev/null +++ b/docs/outputs/http_proxy.md @@ -0,0 +1,13 @@ +Use HTTP proxy in HTTP output plugins +--- + +Status : core feature, unit tested and maintained. + +The proxy parameter allow to use an http proxy. + +The proxy url must have the format ``http[s]://[userinfo@]hostname[:port]`` which gives support for: + * http and https proxies + * proxy authentication via userinfo ``username:password`` in plain text. + * proxy port + +WARN : The HTTP agent API has changed between Node 0.10 and 0.12. This feature is not compatible with Node < 0.12 \ No newline at end of file diff --git a/docs/outputs/logio.md b/docs/outputs/logio.md new file mode 100644 index 00000000..c28c8bae --- /dev/null +++ b/docs/outputs/logio.md @@ -0,0 +1,28 @@ +Logio output plugin +--- + +Status : core plugin, unit tested and maintained. + + +This plugin is used to sent data to a [Log.io](http://logio.org) server. + +Example: +Config using url: ``output://logio://localhost:28777`` + +Config using logstash format: +```` +output { + logio { + host => localhost + port => 28777 + } +} +```` + +Parameters: + +* ``host``: ip of the logio server. +* ``port``: port of the logio server. +* ``priority`` to change the line priority. Can reference log line properties. Default value: ``info``. +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``proxy``: use http proxy. More doc at [http proxy](http_proxy.md). Default : none. diff --git a/docs/outputs/lumberjack.md b/docs/outputs/lumberjack.md new file mode 100644 index 00000000..8be06a53 --- /dev/null +++ b/docs/outputs/lumberjack.md @@ -0,0 +1,29 @@ +Lumberjack output plugin +--- + +Status : core plugin, maintained. + + +This plugin is used to sent data to logstash server, using lumberjack protocol. +The connection must be secured with TLS. + +Example: +Config using url: ``output://lumberjack://localhost:5044?ca=ca.crt`` + +Config using logstash format: +```` +output { + lumberjack { + host => localhost + port => 5044 + ca => "ca.crt" + } +} +```` + +Parameters: + +* ``host``: ip of the logstash server. +* ``port``: port of the logstash server. +* ``ssl_ca``, ``ssl_key``, ``ssl_cert``, ``ssl_rejectUnauthorized``: TLS params. More doc at [ssl](../ssl.md). +* ``max_queue_size``: number of message to store in memory before dropping. Default: 500. diff --git a/docs/outputs/redis.md b/docs/outputs/redis.md new file mode 100644 index 00000000..f7751595 --- /dev/null +++ b/docs/outputs/redis.md @@ -0,0 +1,36 @@ +Redis output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to sent data on a Redis channel. + +They are two method to send message from redis : +* Publish / subscribe : The ``publsh`` redis command will be used. ``channel` parameter is needed. +* Queue. This ``rpush`` redis command will be used. ``key`` parameter is needed. + +Example: +Config using url: ``output://redis://localhost:6379?channel=logstash_channel`` + +Config using logstash format: +```` +input { + redis { + host => localhost + port => 6379 + channel => logstash_channel + } +} +```` + +Parameters: + +* ``host``: ip of the redis server. +* ``port``: port of the redis server. +* ``auth_pass``: password to use when connecting to Redis +* ``type``: to specify the log type, to faciliate crawling in kibana. Example: ``type=app_name_log``. +* ``method``: ``pubsub`` or ``queue``. Method to use for redis messaging. +* ``channel``: Channel for publish / subscribe. No default value. +* ``key``: Queue name for queue. No default value. +* ``serializer``: more doc at [serializers](serializers.md). Default value to ``json_logstash``. +* ``format``: params used by the ``raw`` [serializer](serializers.md). diff --git a/docs/outputs/serializers.md b/docs/outputs/serializers.md new file mode 100644 index 00000000..a51703e4 --- /dev/null +++ b/docs/outputs/serializers.md @@ -0,0 +1,11 @@ +Serializers for output plugins +--- + +Status : core feature, unit tested and maintained. + +Some outputs plugins support the ``serializer`` params. +Supported serializer for output plugin : + +* ``json_logstash``: this serializer dumps the log line to a JSON Object. +* ``msgpack``: this serializer dumps the log line to a [msgpack](http://msgpack.org) Object. +* ``raw``: this serializer dumps the log line to a string, given in the ``format`` parameter. The ``format`` string can reference log lines properties (see [interpolation](../interpolation.md)). Default ``format`` value is ``#{message}``. diff --git a/docs/outputs/sqs.md b/docs/outputs/sqs.md new file mode 100644 index 00000000..1505b798 --- /dev/null +++ b/docs/outputs/sqs.md @@ -0,0 +1,27 @@ +SQS output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used to send logs to [SQS](https://aws.amazon.com/en/sqs/). + +Example : send messages to the SQS queue ``sqs.eu-west-1.amazonaws.com/66171255634/test`` +Config using url: ``output://sqs://sqs.eu-west-1.amazonaws.com/66171255634/test?aws_access_key_id=key&aws_secret_access_key=secret` + +Config using logstash format: +```` +output { + sqs { + aws_queue => "sqs.eu-west-1.amazonaws.com/66171255634/test" + aws_access_key_id => key + aws_secret_access_key => secret + } +} +```` + +Parameters : +* ``aws_queue``: the aws queue url. +* ``aws_access_key_id``: your AWS Access Key Id. Required. +* ``aws_secret_access_key``: your AWS Secret Access Key Id. Required. +* ``serializer``: more doc at [serializers](serializers.md). Default value to ``json_logstash``. +* ``format``: params used by the ``raw`` [serializer](serializers.md). diff --git a/docs/outputs/statsd.md b/docs/outputs/statsd.md new file mode 100644 index 00000000..13c4ba5b --- /dev/null +++ b/docs/outputs/statsd.md @@ -0,0 +1,36 @@ +Statsd output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used send data to statsd. + +Example: to send, for each line of nginx log, a counter with value 1, key ``nginx.request``, on a statsd instance located on port 8125. +Config using url: ``output://statsd://localhost:8125?only_type=nginx&metric_type=increment&metric_key=nginx.request`` + +Config using logstash format: +```` +output { + if [type] == nginx { + statsd { + host => localhost + port => 8125 + metric_type => increment + metric_key => nginx.request + } + } +} +```` + +Parameters: + +* ``host``: ip of the statsd server. +* ``port``: port of the statsd server. +* ``metric_type``: one of ``increment``, ``decrement``, ``counter``, ``timer``, ``gauge``. Type of value to send to statsd. +* ``metric_key``: key to send to statsd. +* ``metric_value``: metric value to send to statsd. Mandatory for ``timer``, ``counter`` and ``gauge`` type. +* ``cache_*``: cache configuration for resolving ``host``. More doc at [cache](../cache.md). (By default, the UDP node stack will resolve the ``host`` field for each write. This behaviour can be expensive). + +``metric_key`` and ``metric_value`` can reference log line properties (see [interpolation](../interpolation.md)). + +Example: ``metric_key=nginx.response.#{status}`` diff --git a/docs/outputs/tcp_tls.md b/docs/outputs/tcp_tls.md new file mode 100644 index 00000000..fefb1488 --- /dev/null +++ b/docs/outputs/tcp_tls.md @@ -0,0 +1,43 @@ +TCP / TLS output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on log clients to send data over TCP, optionnaly with SSL/TLS encryption. + +Example 1: TCP mode: +Config using url: ``output://tcp://192.168.1.1:12345`` + +Config using logstash format: +```` +output { + tcp { + host => 192.168.1.1 + port => 12345 + } +} +```` + +Example 2: SSL mode: +Config using url: ``output://tcp://192.168.1.1:443?ssl=true&ssl_key=/etc/ssl/private/logstash-client.key&ssl_cert=/etc/ssl/private/logstash-client.crt&ssl_rejectUnauthorized=true`` +```` +output { + tcp { + host => 192.168.1.1 + port => 12345 + ssl => true + ssl_key => "/etc/ssl/private/logstash-client.key" + ssl_cert => "/etc/ssl/private/logstash-client.crt" + ssl_requestCert => true + ssl_rejectUnauthorized => true + } +} +```` +Parameters: + +* ``host``: ip of the tcp server. +* ``port``: port of the tcp server. +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``serializer``: more doc at [serializers](serializers.md). Default value to ``json_logstash``. +* ``format``: params used by the ``raw`` [serializer](serializers.md). +* ``delimiter``: Optional. Delimiter inserted between message. Default : ``\n``. Must be encoded in url (eg ``%0A`` for ``\n``). Can be empty. diff --git a/docs/outputs/ws.md b/docs/outputs/ws.md new file mode 100644 index 00000000..e1da1159 --- /dev/null +++ b/docs/outputs/ws.md @@ -0,0 +1,46 @@ +Websocket output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on log clients to send data over a websocket, optionally with SSL/TLS encryption. Websockets are like +TCP connections but they are proxy and firewall friendly. + +Example 1: Regular mode: +Config using url: ``output://ws://192.168.1.1:12345`` + +Config using logstash format: +```` +output { + ws { + host => 192.168.1.1 + port => 12345 + } +} +```` + +Example 2: TLS mode: +Config using url: ``output://ws://192.168.1.1:443?ssl=true&ssl_key=/etc/ssl/private/logstash-client.key&ssl_cert=/etc/ssl/private/logstash-client.crt&ssl_rejectUnauthorized=true`` +```` +output { + ws { + host => 192.168.1.1 + port => 12345 + ssl => true + ssl_key => "/etc/ssl/private/logstash-client.key" + ssl_cert => "/etc/ssl/private/logstash-client.crt" + ssl_requestCert => true + ssl_rejectUnauthorized => true + } +} +```` + +Parameters: + +* ``host``: ip of the ws server. +* ``port``: port of the ws server. +* ``serializer``: more doc at [serializers](serializers.md). Default value to ``json_logstash``. +* ``format``: params used by the ``raw`` [serializer](serializers.md). +* ``ssl``: enable SSL mode. More doc at [ssl](../ssl.md). Default : false +* ``proxy``: use http proxy. More doc at [http proxy](http_proxy.md). Default : none. +* ``basic_auth_user`` and ``basic_auth_password``: user and password for HTTP Basic Auth required by server. Default: none. diff --git a/docs/outputs/zeromq.md b/docs/outputs/zeromq.md new file mode 100644 index 00000000..e8677db0 --- /dev/null +++ b/docs/outputs/zeromq.md @@ -0,0 +1,45 @@ +ZeroMQ output plugin +--- + +Status : core plugin, unit tested and maintained. + +This plugin is used on agents to ship to logs servers, or to send logs to [Elasticsearch Logstash River (ES < 2.0)](https://github.com/bpaquet/elasticsearch-river-zeromq) or to [Elasticsarch ZeroMQ torrent (ES >= 2.x)](https://github.com/bpaquet/elasticsearch-zeromq-torrent). + +Example 1: to send logs to 192.168.1.1 port 5555. +Config using url: ``output://zeromq://tcp://192.168.1.1:5555`` + +Config using logstash format: +```` +output { + zeromq { + address => ['tcp://192.168.1.1:5555'] + } +} +```` + +Example 2: to send logs to 192.168.1.1 and 192.168.1.1, using built in ZeroMQ load balancing feature. +Config using url: ``output://zeromq://tcp://192.168.1.1:5555,tcp://192.168.1.2:5555`` + +Config using logstash format: +```` +output { + zeromq { + address => ['tcp://192.168.1.1:5555', 'tcp://192.168.1.2:5555'] + } +} +```` + +There are two queues in ZeroMQ output plugin : + +* in the ZeroMQ library (see high watermark below). Default size: unlimited +* in the ZeroMQ NodeJS driver. Size is unlimited. + +Parameters: + +* ``address``: array of target ZeroMQ url. +* ``serializer``: more doc at [serializers](serializers.md). Default value to ``json_logstash``. +* ``format``: params used by the ``raw`` [serializer](serializers.md). +* ``zmq_high_watermark``: set the high watermark param on [ZeroMQ socket](http://api.zeromq.org/2-1:zmq-setsockopt). Default : no value. +* ``zmq_threshold_up``: if the NodeJS driver queues size goes upper this threshold, node-losgstash will stop every inputs plugins to avoid memory exhaustion. Default : no value. +* ``zmq_threshold_down``: if the NodeJS driver queues size goes down this threshold and inputs plugins are stopped, node-losgstash will start every inputs plugins. Default : no value. +* ``zmq_check_interval``: if set, the plugin will check the NodeJS driver queue status to go out of alarm mode. Default : no value. Unit is milliseconds diff --git a/docs/ssl.md b/docs/ssl.md new file mode 100644 index 00000000..26ff134e --- /dev/null +++ b/docs/ssl.md @@ -0,0 +1,17 @@ +SSL Params +--- + +Status : core feature, unit tested and maintained. + +When you are in SSL mode (client or server), you can use [all the parameters using by node for SSL / TLS](http://nodejs.org/api/tls.html#tls_tls_createserver_options_secureconnectionlistener), prefixed by ``ssl_``. +You have to give path for certificate and key params, node-logstash will load them before initializing SSL / TLS stack. + +For example, for a HTTPS server : ``ssl=true&ssl_cert=/path/to/cert&ssl_key=/path/to/key`` + +For using a Certificate authority, add ``&ssl_ca=/path/to/ca``. + +For changing SSL ciphers, add ``ssl_ciphers=AES128-GCM-SHA256``. + +To use a client certificate, add ``ssl_cert=/client.cer&ssl_key=/client.key&ssl_ca=/tmp/ca.key``. + +To ignore ssl errors, add ``ssl_rejectUnauthorized=false`. diff --git a/generate_parser.sh b/generate_parser.sh new file mode 100755 index 00000000..f06380ad --- /dev/null +++ b/generate_parser.sh @@ -0,0 +1 @@ +node_modules/.bin/jison lib/logstash_config.jison --outfile lib/logstash_config.js --parser-type lalr \ No newline at end of file diff --git a/jshint.sh b/jshint.sh new file mode 100755 index 00000000..c9fa51c2 --- /dev/null +++ b/jshint.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +node_modules/.bin/jshint bin/* lib/* test/* diff --git a/lib/agent.js b/lib/agent.js new file mode 100644 index 00000000..e894adaa --- /dev/null +++ b/lib/agent.js @@ -0,0 +1,274 @@ +var url_parser = require('./lib/url_parser'), + sig_listener = require('./lib/sig_listener').sig_listener, + events = require('events'), + util = require('util'), + path = require('path'), + async = require('async'), + logger = require('log4node'); + +function LogstashAgent() { + events.EventEmitter.call(this); + this.setMaxListeners(0); + this.modules = []; + this.first_filter = new events.EventEmitter(); + this.first_filter.setMaxListeners(0); + this.last_filter_output_func = function(data) { + this.emit('last_filter', data); + }.bind(this); + this.first_filter.on('output', this.last_filter_output_func); + this.on('first_filter', function(data) { + this.first_filter.emit('output', data); + }.bind(this)); + this.last_filter = this.first_filter; + this.sig_listener = function() { + if (this.closed_inputs) { + logger.info('Starting inputs plugins'); + this.start_inputs(); + } + else { + logger.info('Closing inputs plugins'); + this.close_inputs(); + } + }.bind(this); + sig_listener.on('SIGUSR1', this.sig_listener); + this.alarm_count = 0; + this.on('alarm', function(mode, module) { + if (mode) { + this.alarm_count += 1; + logger.warning('Alarm on for module', module, 'number of alarms', this.alarm_count); + if (this.closed_inputs === false) { + logger.warning('Alarm, closing inputs plugins'); + this.close_inputs(); + this.emit('alarm_mode', true); + } + } + if (!mode) { + this.alarm_count -= 1; + logger.warning('Alarm off for module', module, 'number of alarms', this.alarm_count); + if (this.alarm_count === 0 && this.closed_inputs === true) { + logger.warning('Alarm off, starting inputs plugins'); + this.start_inputs(); + this.emit('alarm_mode', false); + } + } + }); +} + +util.inherits(LogstashAgent, events.EventEmitter); + +LogstashAgent.prototype.close = function(callback) { + this.close_inputs(function() { + this.close_filters(function() { + this.close_outputs(function() { + logger.info('Closing agent'); + if (this.sig_listener) { + sig_listener.removeListener('SIGUSR1', this.sig_listener); + } + return callback(); + }.bind(this)); + }.bind(this)); + }.bind(this)); +}; + +LogstashAgent.prototype.close_modules = function(l, callback) { + async.eachSeries(l, function(m, callback) { + if (m.module) { + m.module.close(function() { + delete m.module; + callback(); + }.bind(this)); + } + else { + callback(); + } + }.bind(this), callback); +}; + +LogstashAgent.prototype.close_inputs = function(callback) { + if (!callback) { + callback = function(err) { + if (err) { + logger.error('Unable to close plugins', err); + } + else { + logger.info('All plugins closed'); + } + }; + } + this.close_modules(this.modules.inputs, function(err) { + if (err) { + return callback(err); + } + this.closed_inputs = true; + callback(); + }.bind(this)); +}; + +LogstashAgent.prototype.close_filters = function(callback) { + this.close_modules(this.modules.filters, callback); +}; + +LogstashAgent.prototype.close_outputs = function(callback) { + this.close_modules(this.modules.outputs, callback); +}; + +LogstashAgent.prototype.configure = function(url, type, callback) { + var parsed = url_parser.extractProtocol(url); + if (!parsed) { + return callback(new Error('Unable to extract plugin name from ' + url)); + } + try { + logger.debug('Initializing module', type); + var directory = type + 's'; + var module_name = type + '_' + parsed.protocol; + var module; + try { + module = require('./' + path.join(directory, module_name)); + } + catch(e) { + module = require(path.join(directory, module_name)); + } + module = module.create(); + var callback_called = false; + var on_error = function(err) { + if (!callback_called) { + callback_called = true; + callback(err, module); + } + }; + module.once('error', on_error); + module.init(parsed.next, function(err) { + module.removeListener('error', on_error); + if (!callback_called) { + callback_called = true; + callback(err, module); + } + module.on('error', function(err) { + this.emit('error', module_name, err); + }.bind(this)); + }.bind(this)); + } + catch (err) { + callback(err); + } +}; + +LogstashAgent.prototype.start = function(urls, callback) { + this.modules = { + inputs: [], + filters: [], + outputs: [], + }; + logger.debug('Loading urls', urls); + for (var k in urls) { + var url = urls[k]; + var parsed = url_parser.extractProtocol(url); + if (!parsed) { + return callback(new Error('Unable to extract protocol from ' + url)); + } + if (parsed.protocol === 'input') { + this.modules.inputs.push({ + url: parsed.next + }); + } + else if (parsed.protocol === 'output') { + this.modules.outputs.push({ + url: parsed.next + }); + } + else if (parsed.protocol === 'filter') { + this.modules.filters.push({ + url: parsed.next + }); + } + else { + return callback(new Error('Unknown protocol : ' + parsed.protocol)); + } + } + this.start_outputs(function(err) { + if (err) { + return callback(err); + } + this.start_filters(function(err) { + if (err) { + return callback(err); + } + this.start_inputs(callback); + }.bind(this)); + }.bind(this)); +}; + +LogstashAgent.prototype.start_modules = function(l, callback, type, module_callback) { + async.eachSeries(l, function(m, callback) { + this.configure(m.url, type, function(err, module) { + if (err) { + return callback(err); + } + m.module = module; + module_callback(module, callback); + }.bind(this)); + }.bind(this), callback); +}; + +LogstashAgent.prototype.start_inputs = function(callback) { + if (!callback) { + callback = function(err) { + if (err) { + logger.error('Unable to start plugins', err); + } + else { + logger.info('All plugins started'); + } + }; + } + this.start_modules(this.modules.inputs, function(err) { + if (err) { + return callback(err); + } + this.closed_inputs = false; + callback(); + }.bind(this), 'input', function(module, module_callback) { + + module.on('data', function(data) { + module.add_tags_function(data, 'tags'); + module.add_fields_function(data); + this.emit('first_filter', data); + }.bind(this)); + + module_callback(); + }.bind(this)); +}; + +LogstashAgent.prototype.start_filters = function(callback) { + this.start_modules(this.modules.filters, callback, 'filter', function(module, module_callback) { + + module.on('output', this.last_filter_output_func); + this.last_filter.removeListener('output', this.last_filter_output_func); + this.last_filter.on('output', function(data) { + module.emit('input', data); + }); + + this.last_filter = module; + + module_callback(); + }.bind(this)); +}; + +LogstashAgent.prototype.start_outputs = function(callback) { + this.start_modules(this.modules.outputs, callback, 'output', function(module, module_callback) { + + this.on('last_filter', function(data) { + module.emit('data', data); + }); + + module.on('alarm', function(mode, name) { + this.emit('alarm', mode, name); + }.bind(this)); + + module_callback(); + }.bind(this)); +}; + +exports.create = function() { + return new LogstashAgent(); +}; diff --git a/lib/filters/filter_add_host.js b/lib/filters/filter_add_host.js new file mode 100644 index 00000000..65274170 --- /dev/null +++ b/lib/filters/filter_add_host.js @@ -0,0 +1,29 @@ +var base_filter = require('../lib/base_filter'), + os = require('os'), + util = require('util'); + +function FilterAddHost() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'AddHost', + start_hook: this.start, + }); +} + +util.inherits(FilterAddHost, base_filter.BaseFilter); + +FilterAddHost.prototype.start = function(callback) { + this.os = os.hostname(); + callback(); +}; + +FilterAddHost.prototype.process = function(data) { + if (!data.host) { + data.host = this.os; + } + return data; +}; + +exports.create = function() { + return new FilterAddHost(); +}; diff --git a/lib/filters/filter_add_timestamp.js b/lib/filters/filter_add_timestamp.js new file mode 100644 index 00000000..9938ad35 --- /dev/null +++ b/lib/filters/filter_add_timestamp.js @@ -0,0 +1,22 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'); + +function FilterAddTimestamp() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'AddTimestamp', + }); +} + +util.inherits(FilterAddTimestamp, base_filter.BaseFilter); + +FilterAddTimestamp.prototype.process = function(data) { + if (!data['@timestamp']) { + data['@timestamp'] = (new Date()).toISOString(); + } + return data; +}; + +exports.create = function() { + return new FilterAddTimestamp(); +}; diff --git a/lib/filters/filter_add_version.js b/lib/filters/filter_add_version.js new file mode 100644 index 00000000..a2b8c40d --- /dev/null +++ b/lib/filters/filter_add_version.js @@ -0,0 +1,22 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'); + +function FilterAddVersion() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'AddVersion', + }); +} + +util.inherits(FilterAddVersion, base_filter.BaseFilter); + +FilterAddVersion.prototype.process = function(data) { + if (!data['@version']) { + data['@version'] = '1'; + } + return data; +}; + +exports.create = function() { + return new FilterAddVersion(); +}; diff --git a/lib/filters/filter_app_janus.js b/lib/filters/filter_app_janus.js new file mode 100644 index 00000000..6d75846f --- /dev/null +++ b/lib/filters/filter_app_janus.js @@ -0,0 +1,91 @@ +var base_filter = require('../lib/base_filter'), + dirty = require('dirty'), + util = require('util'); + +function FilterAppJanus() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'AppJanus', + start_hook: this.start, + }); +} + +util.inherits(FilterAppJanus, base_filter.BaseFilter); + +FilterAppJanus.prototype.start = function(callback) { + this.db = dirty(); + callback(); +}; + +FilterAppJanus.prototype.process = function(data) { + // Process MEETECHO JANUS Events + if (data.type == 1) { + // session create/destroy + // store session_id for transport lookups + if(data.session_id && data.event.transport && data.event.transport.id ) { + db.set(data.session_id, { transport_id: data.event.transport.id }, function() { + }); + } + if (data.event.name == "created" && data.session_id) { + db.set("sess_"+data.event.transport.id, { session_id: data.session_id.toString() }, function() { + }); + } else if (data.event.name == "destroyed") { + // cleanup db + try { + if (db.get(db.get(data.session_id).transport_id)) { + setTimeout(function() { + try { db.rm(db.get(db.get(data.session_id).transport_id)); } catch(err) { if (debug) console.log(err); } + }, 2000); + } + setTimeout(function() { + try { + db.rm(data.session_id); + db.rm(data.transport_id); + db.rm("sess_"+data.transport_id); + } catch(err) { if (debug) console.log(err); } + }, 2000); + } catch(err) { if (debug) console.log(err); } + } + } else if (data.type == 128) { + // transports, no session_id native + // store IP for Session for transport lookups + if(data.event.id && data.event.data['ip'] && data.event.data['port']) { + db.set(data.event.id, {ip: data.event.data['ip'].replace('::ffff:',''),port: data.event.data['port'] }, function() { + }); + } + if (!data.session_id && data.event.id) { + var getsession = db.get("sess_"+data.event.id); + if (getsession && getsession.session_id != undefined) { + data.session_id = getsession.session_id; + }; + } + + } else if (data.type == 32) { + if (!data.session_id) return; + // lookup of media transport IP - ignoring handle_id or grabbing them all + if (db) { + if (data.session_id && db.get(data.session_id)) { + data.ip = { + ip: db.get(db.get(data.session_id).transport_id).ip, + port: db.get(db.get(data.session_id).transport_id).port + }; + } + } + } + + + if(data.session_id) data.session_id = data.session_id.toString(); + if(data.handle_id) data.handle_id = data.handle_id.toString(); + if(data.sender) data.sender = data.sender.toString(); + if(data.type) data.type = data.type.toString(); + if(data.event && data.even.transport) { if (typeof data.event.transport === "string") { data.event.transport = { transport: data.event.transport } } } + if(data.plugindata && data.plugindata.data && data.plugindata.data.result) { + if (typeof data.plugindata.data.result === "string") { data.plugindata.data.result = { result: data.plugindata.data.result } } + } + + return data; +}; + +exports.create = function() { + return new FilterAppJanus(); +}; diff --git a/lib/filters/filter_bunyan.js b/lib/filters/filter_bunyan.js new file mode 100644 index 00000000..59e02784 --- /dev/null +++ b/lib/filters/filter_bunyan.js @@ -0,0 +1,68 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'); + +// https://github.com/trentm/node-bunyan#levels +function parseLevel(level) { + if (typeof level === 'string') { + return level; + } + else if (level >= 60) { + return 'FATAL'; + } + else if (level >= 50) { + return 'ERROR'; + } + else if (level >= 40) { + return 'WARN'; + } + else if (level >= 30) { + return 'INFO'; + } + else if (level >= 20) { + return 'DEBUG'; + } + return 'TRACE'; +} + +function FilterBunyan() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'Bunyan', + }); +} + +var map = { + time: '@timestamp', + hostname: 'host', + v: 'bunyan_version', + name: 'bunyan_app_name', + msg: 'message', +}; + +util.inherits(FilterBunyan, base_filter.BaseFilter); + +FilterBunyan.prototype.process = function(data) { + try { + var message = data.message; + var fields = JSON.parse(message.substring(message.indexOf('{', 0))); + for (var field in fields) { + if (map[field]) { + data[map[field]] = fields[field]; + } + else if (field === 'level') { + data.bunyan_level_name = parseLevel(fields.level); + data.bunyan_level = fields.level; + } + else { + data[field] = fields[field]; + } + } + } + catch (e) {} + + return data; +}; + +exports.create = function() { + return new FilterBunyan(); +}; diff --git a/lib/filters/filter_compute_date_field.js b/lib/filters/filter_compute_date_field.js new file mode 100644 index 00000000..e22d3b81 --- /dev/null +++ b/lib/filters/filter_compute_date_field.js @@ -0,0 +1,32 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + moment = require('moment'), + logger = require('log4node'); + +function FilterComputeDateField() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'ComputeDateField', + required_params: ['date_format'], + host_field: 'field', + start_hook: this.start, + }); +} + +util.inherits(FilterComputeDateField, base_filter.BaseFilter); + +FilterComputeDateField.prototype.start = function(callback) { + logger.info('Initialized compute date field filter on field: ' + this.field + ', date_format: ' + this.date_format); + callback(); +}; + +FilterComputeDateField.prototype.process = function(data) { + if (data['@timestamp']) { + data[this.field] = moment(data['@timestamp']).format(this.date_format); + } + return data; +}; + +exports.create = function() { + return new FilterComputeDateField(); +}; diff --git a/lib/filters/filter_compute_field.js b/lib/filters/filter_compute_field.js new file mode 100644 index 00000000..65010211 --- /dev/null +++ b/lib/filters/filter_compute_field.js @@ -0,0 +1,32 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterComputeField() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'ComputeField', + required_params: ['value'], + host_field: 'field', + start_hook: this.start, + }); +} + +util.inherits(FilterComputeField, base_filter.BaseFilter); + +FilterComputeField.prototype.start = function(callback) { + logger.info('Initialized compute field filter on field: ' + this.field + ', value: ' + this.value); + callback(); +}; + +FilterComputeField.prototype.process = function(data) { + var value = this.replaceByFields(data, this.value); + if (value) { + data[this.field] = value; + } + return data; +}; + +exports.create = function() { + return new FilterComputeField(); +}; diff --git a/lib/filters/filter_eval.js b/lib/filters/filter_eval.js new file mode 100644 index 00000000..7d76d052 --- /dev/null +++ b/lib/filters/filter_eval.js @@ -0,0 +1,44 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterComputeField() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'Eval', + required_params: ['operation'], + optional_params: ['target_field'], + host_field: 'source_field', + start_hook: this.start, + }); +} + +util.inherits(FilterComputeField, base_filter.BaseFilter); + +FilterComputeField.prototype.start = function(callback) { + if (!this.target_field) { + this.target_field = this.source_field; + } + logger.info('Initializing eval filter from', this.source_field, 'to', this.target_field, 'operation', this.operation); + callback(); +}; + +FilterComputeField.prototype.process = function(data) { + var x = data[this.source_field]; + if (x) { + try { + /* jshint evil: true */ + var result = eval(this.operation); + if (result !== undefined && result !== null && (typeof result === 'string' || ! isNaN(result)) && result !== Infinity) { + data[this.target_field] = result; + } + } + catch(err) { + } + } + return data; +}; + +exports.create = function() { + return new FilterComputeField(); +}; diff --git a/lib/filters/filter_geoip.js b/lib/filters/filter_geoip.js new file mode 100644 index 00000000..dd568da7 --- /dev/null +++ b/lib/filters/filter_geoip.js @@ -0,0 +1,148 @@ +var base_filter = require('../lib/base_filter'), + cache_helper = require('../lib/cache_helper'), + util = require('util'), + logger = require('log4node'); + +function FilterGeoip() { + base_filter.BaseFilter.call(this); + this.mergeConfig(cache_helper.config()); + this.mergeConfig({ + name: 'Geoip', + optional_params: ['country_field', 'region_field', 'city_field', 'lonlat_field', 'maxmind_dir'], + host_field: 'field', + default_values: { + 'country_field': '__default__', + 'region_field': '__default__', + 'city_field': '__default__', + 'lonlat_field': '__default__', + 'asn_field': '__default__', + }, + start_hook: this.start, + }); +} + +util.inherits(FilterGeoip, base_filter.BaseFilter); + +FilterGeoip.prototype.start = function(callback) { + var maxmind; + var geoip; + if (process.env.MAXMIND_DB_DIR) { + this.maxmind_dir = process.env.MAXMIND_DB_DIR; + } + if (this.maxmind_dir) { + maxmind = require('maxmind'); + logger.info('Initializing geoip filter from directory', this.maxmind_dir); + maxmind.init(['GeoIPCity.dat', 'GeoIPASNum.dat'].map(function(x) { + return this.maxmind_dir + '/' + x; + }.bind(this)), {checkForUpdates: true}); + } + else { + geoip = require('geoip-lite'); + geoip.startWatchingDataUpdate(); + } + if (this.country_field === '__default__') { + this.country_field = this.field + '_geo_country'; + } + if (this.region_field === '__default__') { + this.region_field = this.field + '_geo_region'; + } + if (this.city_field === '__default__') { + this.city_field = this.field + '_geo_city'; + } + if (this.lonlat_field === '__default__') { + this.lonlat_field = this.field + '_geo_lonlat'; + } + if (this.asn_field === '__default__') { + this.asn_field = this.field + '_geo_asn'; + } + logger.info('Initialized geoip filter, ip field', this.field); + logger.info('Initialized geoip filter, country field', this.country_field); + logger.info('Initialized geoip filter, region field', this.region_field); + logger.info('Initialized geoip filter, city field', this.city_field); + logger.info('Initialized geoip filter, latlon field', this.lonlat_field); + logger.info('Initialized geoip filter, asn field', this.asn_field); + this.cache_miss = function(key, callback) { + var geo; + if (this.maxmind_dir) { + var r = maxmind.getLocation(key); + if (r) { + geo = { + country: r.countryCode, + region: r.region, + city: r.city, + asn: maxmind.getAsn(key), + }; + if (r.latitude && r.longitude) { + geo.ll = [Number((r.latitude).toFixed(4)), Number((r.longitude).toFixed(4))]; + } + } + } + else { + geo = geoip.lookup(key); + } + callback(undefined, geo); + }.bind(this); + callback(); +}; + +FilterGeoip.prototype.process = function(data) { + var ip = data[this.field]; + if (ip && + ip !== '-' && + ip.indexOf('10.') !== 0 && + ip.indexOf('192.168.') !== 0 && + ip.indexOf('172.16') !== 0 && + ip.indexOf('172.17') !== 0 && + ip.indexOf('172.18') !== 0 && + ip.indexOf('172.19') !== 0 && + ip.indexOf('172.20') !== 0 && + ip.indexOf('172.21') !== 0 && + ip.indexOf('172.22') !== 0 && + ip.indexOf('172.23') !== 0 && + ip.indexOf('172.24') !== 0 && + ip.indexOf('172.25') !== 0 && + ip.indexOf('172.26') !== 0 && + ip.indexOf('172.27') !== 0 && + ip.indexOf('172.28') !== 0 && + ip.indexOf('172.29') !== 0 && + ip.indexOf('172.30') !== 0 && + ip.indexOf('172.31') !== 0 && + ip.indexOf('127.0.0.1') !== 0 + ) { + this.cache(ip, function(err, geo) { + if (err) { + logger.info('Unable to geoip lookup', ip, ':', err); + } + else if (geo) { + if (this.country_field !== 'none' && geo.country && geo.country !== '') { + logger.debug('Storing country for ip ' + ip, geo.country); + data[this.country_field] = geo.country; + } + if (this.region_field !== 'none' && geo.region && geo.region !== '') { + logger.debug('Storing region for ip ' + ip, geo.region); + data[this.region_field] = geo.region; + } + if (this.city_field !== 'none' && geo.city && geo.city !== '') { + logger.debug('Storing city for ip ' + ip, geo.city); + data[this.city_field] = geo.city; + } + if (this.asn_field !== 'none' && geo.asn && geo.asn !== '') { + logger.debug('Storing asn for ip ' + ip, geo.asn); + data[this.asn_field] = geo.asn; + } + if (this.lonlat_field !== 'none' && geo.ll) { + logger.debug('Storing latlon for ip ' + ip, geo.ll[0] + ',' + geo.ll[1]); + data[this.lonlat_field] = [geo.ll[1], geo.ll[0]]; + } + } + else { + logger.info('Unable to geoip lookup', ip); + } + }.bind(this)); + } + return data; +}; + +exports.create = function() { + return new FilterGeoip(); +}; \ No newline at end of file diff --git a/lib/filters/filter_grep.js b/lib/filters/filter_grep.js new file mode 100644 index 00000000..d669a4f4 --- /dev/null +++ b/lib/filters/filter_grep.js @@ -0,0 +1,36 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterGrep() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'Grep', + required_params: ['regex'], + optional_params: ['invert', 'regex_flags'], + default_values: { + 'invert': false, + }, + start_hook: this.start, + }); +} + +util.inherits(FilterGrep, base_filter.BaseFilter); + +FilterGrep.prototype.start = function(callback) { + this.regex = new RegExp(this.regex, this.regex_flags); + logger.info('Initialized grep filter on regex: ' + this.regex + ', invert: ' + this.invert + ', flags: ' + (this.regex_flags || '')); + callback(); +}; + +FilterGrep.prototype.process = function(data) { + var match = data.message.match(this.regex); + if (this.invert) { + match = !match; + } + return match ? data : undefined; +}; + +exports.create = function() { + return new FilterGrep(); +}; diff --git a/lib/filters/filter_grok.js b/lib/filters/filter_grok.js new file mode 100644 index 00000000..127ba237 --- /dev/null +++ b/lib/filters/filter_grok.js @@ -0,0 +1,171 @@ +var base_filter = require('../lib/base_filter'), + file_loader = require('../lib/file_loader'), + patterns_loader = require('../lib/patterns_loader'), + util = require('util'), + logger = require('log4node'), + regex_helper = require('../lib/regex_helper'), + OnigRegExp = require('oniguruma').OnigRegExp; + +var onload_callbacks = []; +var loaded = false; +var global_patterns; + +function processLines(lines) { + var patterns = {}; + lines.forEach(function(line) { + // the first space is the separator + var sepIndex = line.indexOf(' '); + if (sepIndex === -1) { + // no space, invalid pattern definition + logger.error('Invalid grok pattern definition "' + line + '"'); + return; + } + var name = line.substring(0, sepIndex); + // remove all unnamed capturing groups from the pattern + patterns[name] = line.substring(sepIndex + 1).trim().replace(/\((?!\?)/g, '(?:'); + }); + return patterns; +} + +function loadPatterns(callback) { + var _leave = function(err) { + loaded = true; + onload_callbacks.forEach(function(cb) { + setImmediate(function() { + cb(err); + }); + }); + onload_callbacks = []; + }; + if (loaded) { + return callback(); + } + onload_callbacks.push(callback); + if (onload_callbacks.length > 1) { + return; + } + logger.info('Loading grok patterns'); + patterns_loader.loadGrokPatterns(function(err, lines) { + if (err) { + return _leave(err); + } + var patterns = processLines(lines); + logger.info('Grok patterns loaded from patterns directories', Object.keys(patterns).length); + global_patterns = patterns; + _leave(); + }); +} + +function FilterGrok() { + base_filter.BaseFilter.call(this); + this.mergeConfig(regex_helper.config()); + this.mergeConfig(this.tags_fields_config()); + this.mergeConfig({ + name: 'Grok', + host_field: '', + allow_empty_host: true, + required_params: ['match'], + optional_params: ['extra_patterns_file', 'tag_on_failure'], + default_values: { + 'tag_on_failure': ['_grokparsefailure'] + }, + arrays: ['tag_on_failure'], + config_hook: this.loadPatterns, + start_hook: this.start + }); +} + +util.inherits(FilterGrok, base_filter.BaseFilter); + +FilterGrok.prototype.expandGrokPattern = function(regex, extra_patterns) { + var offset = 0; + var reduced = regex; + var result; + var grokFinder = new OnigRegExp('%{[^}]+}'); + var grokParts = new RegExp('%{([^:]+):?(.*)}'); + + while ((result = grokFinder.searchSync(regex, offset))) { + offset = result[0].end; + var grokExp = result[0].match; + var parts = grokExp.match(grokParts); + + var p = global_patterns[parts[1]] || extra_patterns[parts[1]]; + if (p) { + if (parts[2].length > 0) { + this.fields.push(parts[2]); + } + var reg = this.expandGrokPattern(p, extra_patterns); + if (parts[2].length > 0) { + // create a named capturing group + reg = '(?<' + parts[2] + '>' + reg + ')'; + } + // replace the grok expression with the regular expression + reduced = reduced.replace(grokExp, reg); + } + else { + throw new Error('Unable to find grok pattern ' + parts[1]); + } + } + + return reduced; +}; + +FilterGrok.prototype.start = function(callback) { + logger.info('Initializing grok filter, pattern: ' + this.match); + + this.fields = []; + + this.post_process = regex_helper.process.bind(this); + + var _done = function(extra_patterns) { + try { + var expanded = this.expandGrokPattern(this.match, extra_patterns); + this.regex = new OnigRegExp(expanded); + } + catch(e) { + logger.error('Unable to process grok pattern', this.match, e); + return callback(e); + } + callback(); + }.bind(this); + + loadPatterns(function(err) { + if (err) { + return callback(err); + } + if (this.extra_patterns_file) { + logger.info('Loading extra pattern file', this.extra_patterns_file); + file_loader.loadFile(this.extra_patterns_file, function(err, lines) { + if (err) { + return callback(err); + } + _done(processLines(lines)); + }.bind(this)); + } + else { + _done({}); + } + }.bind(this)); +}; + +FilterGrok.prototype.process = function(data) { + logger.debug('Trying to match on grok', this.match, ', input', data.message); + var result = this.regex.searchSync(data.message); + if (result) { + for (var i = 0; i < this.fields.length; i++) { + this.post_process(data, result[i + 1].match, i); + } + this.add_tags_function(data, 'add_tags'); + this.remove_tags_function(data); + this.add_fields_function(data); + this.remove_fields_function(data); + } + else { + this.add_tags_function(data, 'tag_on_failure'); + } + return data; +}; + +exports.create = function() { + return new FilterGrok(); +}; diff --git a/lib/filters/filter_http_status_classifier.js b/lib/filters/filter_http_status_classifier.js new file mode 100644 index 00000000..07164b44 --- /dev/null +++ b/lib/filters/filter_http_status_classifier.js @@ -0,0 +1,62 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterHttpStatusClassifier() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'HttpStatus', + host_field: 'field', + optional_params: ['special_codes', 'target_field'], + default_values: { + target_field: 'http_class', + }, + start_hook: this.start, + }); +} + +util.inherits(FilterHttpStatusClassifier, base_filter.BaseFilter); + +FilterHttpStatusClassifier.prototype.start = function(callback) { + if (this.special_codes) { + this.special_codes = this.special_codes.split(/,/).map(function(x) { + return parseInt(x, 10); + }); + } + else { + this.special_codes = []; + } + logger.info('Initialized http status filter: source ' + this.field + ', target: ' + this.target_field + ', special_codes: ' + this.special_codes); + callback(); +}; + +FilterHttpStatusClassifier.prototype.process = function(data) { + if (data[this.field]) { + for(var i in this.special_codes) { + if (data[this.field] === this.special_codes[i]) { + data[this.target_field] = this.special_codes[i].toString(); + return data; + } + } + if (data[this.field] >= 100 && data[this.field] < 200) { + data[this.target_field] = '1xx'; + } + if (data[this.field] >= 200 && data[this.field] < 300) { + data[this.target_field] = '2xx'; + } + if (data[this.field] >= 300 && data[this.field] < 400) { + data[this.target_field] = '3xx'; + } + if (data[this.field] >= 400 && data[this.field] < 500) { + data[this.target_field] = '4xx'; + } + if (data[this.field] >= 500 && data[this.field] < 600) { + data[this.target_field] = '5xx'; + } + } + return data; +}; + +exports.create = function() { + return new FilterHttpStatusClassifier(); +}; diff --git a/lib/filters/filter_json_fields.js b/lib/filters/filter_json_fields.js new file mode 100644 index 00000000..6320d03d --- /dev/null +++ b/lib/filters/filter_json_fields.js @@ -0,0 +1,28 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'); + +function FilterJsonFields() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'JsonFields', + }); +} + +util.inherits(FilterJsonFields, base_filter.BaseFilter); + +FilterJsonFields.prototype.process = function(data) { + try { + var message = data.message; + var fields = JSON.parse(message.substring(message.indexOf('{', 0))); + for (var field in fields) { + data[field] = fields[field]; + } + } + catch (e) {} + + return data; +}; + +exports.create = function() { + return new FilterJsonFields(); +}; diff --git a/lib/filters/filter_multiline.js b/lib/filters/filter_multiline.js new file mode 100644 index 00000000..c936941e --- /dev/null +++ b/lib/filters/filter_multiline.js @@ -0,0 +1,37 @@ +var base_filter_buffer = require('../lib/base_filter_buffer'), + util = require('util'), + logger = require('log4node'); + +function FilterMultiline() { + base_filter_buffer.BaseFilterBuffer.call(this); + this.mergeConfig({ + name: 'Multiline', + required_params: ['start_line_regex'], + optional_params: ['max_delay', 'regex_flags'], + default_values: { + max_delay: 50, + }, + start_hook: this.start, + }); +} + +util.inherits(FilterMultiline, base_filter_buffer.BaseFilterBuffer); + +FilterMultiline.prototype.start = function(callback) { + this.start_line_regex = new RegExp(this.start_line_regex, this.regex_flags); + logger.info('Initialized multiline filter with start_line_regex: ' + this.start_line_regex + ', flags: ' + (this.regex_flags || '')); + this.setInterval(this.max_delay); + callback(); +}; + +FilterMultiline.prototype.process = function(data) { + var key = this.computeKey(data); + if (data.message.match(this.start_line_regex)) { + this.sendIfNeeded(key); + } + this.store(key, data); +}; + +exports.create = function() { + return new FilterMultiline(); +}; diff --git a/lib/filters/filter_mutate_replace.js b/lib/filters/filter_mutate_replace.js new file mode 100644 index 00000000..22116dd8 --- /dev/null +++ b/lib/filters/filter_mutate_replace.js @@ -0,0 +1,34 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterMutateReplace() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'MutateReplace', + host_field: 'field', + required_params: ['from', 'to'], + start_hook: this.start, + }); +} + +util.inherits(FilterMutateReplace, base_filter.BaseFilter); + +FilterMutateReplace.prototype.start = function(callback) { + this.regex = new RegExp(this.from, 'g'); + logger.info('Initialized mutate gsub filter on field: ' + this.field + ', from: ' + this.from + ', to: ' + this.to); + callback(); +}; + +FilterMutateReplace.prototype.process = function(data) { + if (data[this.field]) { + logger.debug('Gsub on field', this.field, ', from', this.from, ', to', this.to, ', current value', data[this.field]); + data[this.field] = data[this.field].toString().replace(this.regex, this.to); + logger.debug('New value', data[this.field]); + } + return data; +}; + +exports.create = function() { + return new FilterMutateReplace(); +}; diff --git a/lib/filters/filter_regex.js b/lib/filters/filter_regex.js new file mode 100644 index 00000000..aaf5e32c --- /dev/null +++ b/lib/filters/filter_regex.js @@ -0,0 +1,74 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'), + patterns_loader = require('../lib/patterns_loader'), + regex_helper = require('../lib/regex_helper'); + +function FilterRegex() { + base_filter.BaseFilter.call(this); + this.mergeConfig(regex_helper.config()); + this.mergeConfig({ + name: 'Regex', + host_field: 'builtin_regex', + allow_empty_host: true, + required_params: ['regex'], + optional_params: ['fields', 'regex_flags', 'field'], + default_values: { + 'field': 'message', + 'fields': '', + 'numerical_fields': '', + }, + config_hook: this.loadPattern, + start_hook: this.start, + }); +} + +util.inherits(FilterRegex, base_filter.BaseFilter); + +FilterRegex.prototype.loadPattern = function(callback) { + if (this.builtin_regex) { + logger.info('Try to load config from pattern file ' + this.builtin_regex); + patterns_loader.load(this.builtin_regex, function(err, config) { + if (err) { + return callback(new Error('Unable to load pattern : ' + this.builtin_regex + ' : ' + err)); + } + for (var i in config) { + this[i] = config[i]; + } + callback(); + }.bind(this)); + } + else { + callback(); + } +}; + +FilterRegex.prototype.start = function(callback) { + this.regex = new RegExp(this.regex, this.regex_flags); + + this.fields = Array.isArray(this.fields) ? this.fields : this.fields.split(','); + + this.post_process = regex_helper.process.bind(this); + + logger.info('Initializing regex filter, regex : ' + this.regex + ', fields ' + this.fields + (this.date_format ? ', date format ' + this.date_format : '') + ', flags: ' + (this.regex_flags || '')); + + callback(); +}; + +FilterRegex.prototype.process = function(data) { + logger.debug('Trying to match on regex', this.regex, ', input', data[this.field]); + if (data[this.field]) { + var result = data[this.field].match(this.regex); + logger.debug('Match result:', result); + if (result) { + for (var i = 0; i < this.fields.length; i++) { + this.post_process(data, result[i + 1], i); + } + } + } + return data; +}; + +exports.create = function() { + return new FilterRegex(); +}; diff --git a/lib/filters/filter_remove_field_when_equal.js b/lib/filters/filter_remove_field_when_equal.js new file mode 100644 index 00000000..78d15bd9 --- /dev/null +++ b/lib/filters/filter_remove_field_when_equal.js @@ -0,0 +1,31 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterRemoveFieldWhenEqual() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'RemoveFieldWhenEqual', + host_field: 'field', + required_params: ['value'], + start_hook: this.start, + }); +} + +util.inherits(FilterRemoveFieldWhenEqual, base_filter.BaseFilter); + +FilterRemoveFieldWhenEqual.prototype.start = function(callback) { + logger.info('Initialized remove field', this.field, 'when equal to', this.value); + callback(); +}; + +FilterRemoveFieldWhenEqual.prototype.process = function(data) { + if (data[this.field] && data[this.field] === this.value) { + delete data[this.field]; + } + return data; +}; + +exports.create = function() { + return new FilterRemoveFieldWhenEqual(); +}; diff --git a/lib/filters/filter_rename.js b/lib/filters/filter_rename.js new file mode 100644 index 00000000..49bfadf2 --- /dev/null +++ b/lib/filters/filter_rename.js @@ -0,0 +1,32 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterRenameField() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'RenameField', + required_params: ['to'], + host_field: 'from', + start_hook: this.start, + }); +} + +util.inherits(FilterRenameField, base_filter.BaseFilter); + +FilterRenameField.prototype.start = function(callback) { + logger.info('Initialized rename field filter from ' + this.from + ' to ' + this.to); + callback(); +}; + +FilterRenameField.prototype.process = function(data) { + if (data[this.from]) { + data[this.to] = data[this.from]; + delete data[this.from]; + } + return data; +}; + +exports.create = function() { + return new FilterRenameField(); +}; diff --git a/lib/filters/filter_reverse_dns.js b/lib/filters/filter_reverse_dns.js new file mode 100644 index 00000000..963000c2 --- /dev/null +++ b/lib/filters/filter_reverse_dns.js @@ -0,0 +1,59 @@ +var base_filter = require('../lib/base_filter'), + cache_helper = require('../lib/cache_helper'), + util = require('util'), + dns = require('dns'), + logger = require('log4node'); + +function FilterReverseDns() { + base_filter.BaseFilter.call(this); + this.mergeConfig(cache_helper.config()); + this.mergeConfig({ + name: 'ReverseDns', + optional_params: ['only_hostname', 'target_field'], + host_field: 'field', + default_values: { + 'only_hostname': 'true', + }, + start_hook: this.start, + }); +} + +util.inherits(FilterReverseDns, base_filter.BaseFilter); + +FilterReverseDns.prototype.start = function(callback) { + this.only_hostname = this.only_hostname === 'true'; + if (!this.target_field) { + this.target_field = this.field; + } + logger.info('Initialized reverse dns filter, use only hostname ', this.only_hostname); + this.cache_miss = function(key, callback) { + try { + dns.reverse(key, callback); + } + catch(e) { + callback(e); + } + }; + callback(); +}; + +FilterReverseDns.prototype.process = function(data) { + if (data[this.field]) { + this.cache(data[this.field], function(err, result) { + if (!err) { + data[this.target_field] = result[0]; + if (this.only_hostname) { + data[this.target_field] = data[this.target_field].split('.')[0]; + } + } + this.emit('output', data); + }.bind(this)); + } + else { + return data; + } +}; + +exports.create = function() { + return new FilterReverseDns(); +}; diff --git a/lib/filters/filter_split.js b/lib/filters/filter_split.js new file mode 100644 index 00000000..5a215242 --- /dev/null +++ b/lib/filters/filter_split.js @@ -0,0 +1,49 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterSplit() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'Split', + required_params: ['delimiter'], + start_hook: this.start, + }); +} + +util.inherits(FilterSplit, base_filter.BaseFilter); + +FilterSplit.prototype.start = function(callback) { + logger.info('Initialized split filter with delimiter: ' + this.delimiter); + callback(); +}; + +FilterSplit.prototype.createMessage = function(data, text) { + var m = JSON.parse(JSON.stringify(data)); + m.message = text; + return m; +}; + +FilterSplit.prototype.process = function(data) { + var result = []; + var current = data.message; + while (true) { + var index = current.indexOf(this.delimiter); + if (index === -1) { + break; + } + var before = current.substring(0, index); + current = current.substring(index + this.delimiter.length); + if (before.length > 0) { + result.push(this.createMessage(data, before)); + } + } + if (current.length > 0) { + result.push(this.createMessage(data, current)); + } + return result; +}; + +exports.create = function() { + return new FilterSplit(); +}; diff --git a/lib/filters/filter_syslog_pri.js b/lib/filters/filter_syslog_pri.js new file mode 100644 index 00000000..f6c841f5 --- /dev/null +++ b/lib/filters/filter_syslog_pri.js @@ -0,0 +1,72 @@ +/* jshint bitwise:false */ + +var base_filter = require('../lib/base_filter'), + util = require('util'); + +function FilterSyslogPri() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'SyslogPri', + optional_params: ['priority_field', 'severity_field', 'facility_field'], + default_values: { + 'priority_field': 'syslog_priority', + 'severity_field': 'syslog_severity', + 'facility_field': 'syslog_facility', + } + }); +} + +var facility_labels = [ + 'kernel', + 'user-level', + 'mail', + 'daemon', + 'security/authorization', + 'syslogd', + 'line printer', + 'network news', + 'uucp', + 'clock', + 'security/authorization', + 'ftp', + 'ntp', + 'log audit', + 'log alert', + 'clock', + 'local0', + 'local1', + 'local2', + 'local3', + 'local4', + 'local5', + 'local6', + 'local7', +]; + +var severity_labels = [ + 'emergency', + 'alert', + 'critical', + 'error', + 'warning', + 'notice', + 'informational', + 'debug', +]; + +util.inherits(FilterSyslogPri, base_filter.BaseFilter); + +FilterSyslogPri.prototype.process = function(data) { + if (data[this.priority_field] > -1 && data[this.priority_field] < 192) { + var priority = data[this.priority_field]; + var severity = priority & 7; + var facility = priority >> 3; + data[this.severity_field] = severity_labels[severity]; + data[this.facility_field] = facility_labels[facility]; + } + return data; +}; + +exports.create = function() { + return new FilterSyslogPri(); +}; diff --git a/lib/filters/filter_truncate.js b/lib/filters/filter_truncate.js new file mode 100644 index 00000000..aeae2520 --- /dev/null +++ b/lib/filters/filter_truncate.js @@ -0,0 +1,30 @@ +var base_filter = require('../lib/base_filter'), + util = require('util'), + logger = require('log4node'); + +function FilterTruncate() { + base_filter.BaseFilter.call(this); + this.mergeConfig({ + name: 'Truncate', + required_params: ['max_size'], + start_hook: this.start, + }); +} + +util.inherits(FilterTruncate, base_filter.BaseFilter); + +FilterTruncate.prototype.start = function(callback) { + logger.info('Initialized truncate filter with max_size: ' + this.max_size); + callback(); +}; + +FilterTruncate.prototype.process = function(data) { + if (data.message) { + data.message = data.message.substring(0, this.max_size); + } + return data; +}; + +exports.create = function() { + return new FilterTruncate(); +}; diff --git a/lib/inputs/input_amqp.js b/lib/inputs/input_amqp.js new file mode 100644 index 00000000..44f3f1e8 --- /dev/null +++ b/lib/inputs/input_amqp.js @@ -0,0 +1,82 @@ +var base_input = require('../lib/base_input'), + amqp_driver = require('../lib/amqp_driver'), + ssl_helper = require('../lib/ssl_helper'), + util = require('util'), + logger = require('log4node'); + +function InputAmqp() { + base_input.BaseInput.call(this); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig(ssl_helper.config()); + this.mergeConfig({ + name: 'Ampq', + host_field: 'host', + port_field: 'port', + required_params: ['exchange_name'], + optional_params: ['topic', 'durable', 'retry_delay', 'heartbeat', 'type', 'username', 'password', 'vhost'], + default_values: { + 'durable': true, + 'retry_delay': 3000, + 'heartbeat': 10, + }, + start_hook: this.start, + }); +} + +util.inherits(InputAmqp, base_input.BaseInput); + +InputAmqp.prototype.start = function(callback) { + this.amqp_url = amqp_driver.buildUrl(this); + logger.info('Start AMQP listener to', this.amqp_url, 'exchange', this.exchange_name, 'topic', this.topic); + + this.connected_callback = function(channel) { + channel.assertExchange(this.exchange_name, this.topic ? 'topic' : 'fanout', {durable: this.durable}, function(err) { + if (err) { + logger.error('Unable to create exchange', err); + } + else { + channel.assertQueue('', {exclusive: true}, function(err, queue) { + if (err) { + logger.error('Unable to assert locale queue', err); + } + else { + channel.bindQueue(queue.queue, this.exchange_name, this.topic || '', {}, function(err) { + if (err) { + logger.error('Unable to bind queue', err); + } + else { + logger.info('Binded to', this.exchange_name, 'topic', this.topic); + channel.consume(queue.queue, function(m) { + var data = m.content.toString(); + this.unserialize_data(data, function(parsed) { + if (this.type) { + parsed.type = this.type; + } + this.emit('data', parsed); + }.bind(this), function(data) { + this.emit('error', 'Unable to parse data ' + data); + }.bind(this)); + }.bind(this), {noAck: true}); + } + }.bind(this)); + } + }.bind(this)); + } + }.bind(this)); + }; + this.disconnected_callback = function() {}; + this.amqp_logger = logger; + + this.driver = amqp_driver.createAmqpClient(this); + + callback(); +}; + +InputAmqp.prototype.close = function(callback) { + logger.info('Closing AMQP input', this.amqp_url, 'exchange ' + this.exchange_name); + this.driver.close(callback); +}; + +exports.create = function() { + return new InputAmqp(); +}; diff --git a/lib/inputs/input_file.js b/lib/inputs/input_file.js new file mode 100644 index 00000000..22e2e958 --- /dev/null +++ b/lib/inputs/input_file.js @@ -0,0 +1,141 @@ +var base_input = require('../lib/base_input'), + util = require('util'), + path = require('path'), + logger = require('log4node'), + directory_detector = require('../lib/directory_detector'), + directory_watcher = require('../lib/directory_watcher'), + file_filter = require('../lib/file_filter'), + fs = require('fs'), + monitor_file = require('../lib/monitor_file'), + tail = require('../lib/tail_file'), + async = require('async'); + +function InputFile() { + base_input.BaseInput.call(this); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'File', + host_field: 'path', + optional_params: ['type', 'buffer_size', 'buffer_encoding', 'wait_delay_after_renaming', 'start_index', 'use_tail'], + default_values: { + 'use_tail': false, + }, + start_hook: this.start, + }); +} + +util.inherits(InputFile, base_input.BaseInput); + +InputFile.prototype.monitorFile = function(dir, local_filename, start_index) { + var filename = (dir === '.' ? path.resolve('.') : dir) + '/' + local_filename; + if (this.monitored_files[filename] || !this.filter.filter(local_filename)) { + return; + } + + logger.info('Start input file', filename); + + var monitor = this.use_tail ? tail.tail(filename) : monitor_file.monitor(filename, { + buffer_size: this.buffer_size, + buffer_encoding: this.buffer_encoding, + wait_delay_after_renaming: this.wait_delay_after_renaming, + }); + + this.monitored_files[filename] = monitor; + + monitor.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + + monitor.on('data', function(data) { + this.unserialize_data(data, function(parsed) { + this.emit('data', parsed); + }.bind(this), function(data) { + this.emit('data', { + 'message': data, + 'path': filename, + 'type': this.type, + }); + }.bind(this)); + }.bind(this)); + + monitor.start(function(err) { + if (err) { + this.emit('error', err); + } + }.bind(this), start_index); + +}; + +InputFile.prototype.start = function(callback) { + logger.info('Start input on file', this.path); + + if (this.start_index) { + this.start_index = parseInt(this.start_index, 10); + } + + this.filter = file_filter.create(path.basename(this.path)); + + var detector = new directory_detector.DirectoryDetector(); + + detector.on('removed', function(dir) { + if (this.monitored_files[dir]) { + this.monitored_files[dir].close(function() {}); + delete this.monitored_files[dir]; + } + Object.keys(this.monitored_files).forEach(function(k) { + if (k.indexOf(dir) === 0) { + this.monitored_files[k].close(function() {}); + delete this.monitored_files[k]; + } + }.bind(this)); + }.bind(this)); + + detector.on('exists', function(dir, newly_created) { + logger.info('Parent directory exists', dir, 'for reading', this.path); + + fs.readdir(dir, function(err, l) { + if (err) { + return this.emit('error', new Error('Error while reading ' + dir + ' : ' + err)); + } + l.forEach(function(x) { + this.monitorFile(dir, x, newly_created ? 0 : this.start_index); + }.bind(this)); + try { + var dir_watcher = directory_watcher.register(dir, function(event, filename) { + this.monitorFile(dir, filename, 0); + }.bind(this)); + this.monitored_files[dir] = { + close: function(callback) { + directory_watcher.unregister(dir_watcher); + callback(); + } + }; + } + catch(err) { + return this.emit('error', new Error('Error while reading ' + dir + ' : ' + err)); + } + }.bind(this)); + + }.bind(this)); + + this.monitored_files = {}; + this.monitored_files.__detector__ = detector; + + detector.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + detector.start(path.dirname(this.path), callback); +}; + +InputFile.prototype.close = function(callback) { + logger.info('Closing input file', this.path); + async.eachSeries(Object.keys(this.monitored_files), function(x, callback) { + var o = this.monitored_files[x]; + delete this.monitored_files[x]; + o.close(callback); + }.bind(this), callback); +}; + +exports.create = function() { + return new InputFile(); +}; diff --git a/lib/inputs/input_freeswitch.js b/lib/inputs/input_freeswitch.js new file mode 100644 index 00000000..bf42966c --- /dev/null +++ b/lib/inputs/input_freeswitch.js @@ -0,0 +1,49 @@ +var base_input = require('../lib/base_input'), + util = require('util'), + esl = require('modesl'); + logger = require('log4node'); + +function InputFreeswitch() { + base_input.BaseInput.call(this); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'Freeswitch', + host_field: 'host', + port_field: 'port', + pass_field: 'pass', + optional_params: ['uuid'], + start_hook: this.start, + }); +} + +util.inherits(InputFreeswitch, base_input.BaseInput); + +InputFreeswitch.prototype.start = function(callback) { + logger.info('Connecting to Freeswitch ESL', this.host, ':', this.port); + + this.socket = new esl.Connection(this.host, this.port, this.pass); + + this.socket.on("error", function (error) { + logger.info('ESL Connection Error ' + JSON.stringify(error)); + setTimeout(function() { socket(this.host, this.port, this.pass);}, 1000); + }).on("esl::end", function () { + logger.info('ESL Connection Ended'); + setTimeout(function() { socket(this.host, this.port, this.pass);}, 1000); + }).on("esl::ready", function () { + eslConn.events('json' , 'ALL', function() { + logger.info('ESL ready - subscribed to receive events.'); + }); + }).on("esl::event::**", function (e, headers, body) { + this.emit('data', e); + }); +}; + +InputFreeswitch.prototype.close = function(callback) { + logger.info('Closing ESL connection', this.host); + this.socket.close(); + callback(); +}; + +exports.create = function() { + return new InputFreeswitch(); +}; diff --git a/lib/inputs/input_gae.js b/lib/inputs/input_gae.js new file mode 100644 index 00000000..0f830458 --- /dev/null +++ b/lib/inputs/input_gae.js @@ -0,0 +1,96 @@ +var base_input = require('../lib/base_input'), + http = require('http'), + https = require('https'), + util = require('util'), + url = require('url'), + logger = require('log4node'); + +function InputGae() { + base_input.BaseInput.call(this); + this.mergeConfig({ + name: 'Gae', + host_field: 'host', + port_field: 'port', + required_params: ['key'], + optional_params: ['ssl', 'polling', 'servlet_name', 'access_logs_type', 'access_logs_field_name', 'type'], + default_values: { + 'ssl': false, + 'polling': 60, + 'servlet_name': 'logs', + }, + start_hook: this.start, + }); +} + +util.inherits(InputGae, base_input.BaseInput); + +InputGae.prototype.start = function(callback) { + this.proto = this.ssl ? https : http; + this.base_url = (this.ssl ? 'https' : 'http') + '://' + this.host + ':' + this.port + '/' + this.servlet_name; + this.current_timestamp = (new Date()).getTime(); + logger.info('Start polling log from Google App Engine to', this.base_url, 'polling period', this.polling); + this.interval = setInterval(function() { + this.poll(); + }.bind(this), this.polling * 1000); + this.poll(); + callback(); +}; + +InputGae.prototype.poll = function() { + if (this.in_progress) { + return; + } + this.in_progress = true; + var options = url.parse(this.base_url + '?start_timestamp=' + this.current_timestamp + '&log_key=' + this.key); + options.rejectUnauthorized = false; + var req = this.proto.get(options, function(res) { + if (res.statusCode === 200) { + var current_buffer = ''; + res.on('data', function(l) { + current_buffer += l.toString(); + var lines = current_buffer.split('\n'); + current_buffer = lines.pop(); + lines.forEach(function(l) { + try { + if (l !== '') { + var o = JSON.parse(l); + if (this.access_logs_type && this.access_logs_field_name && o[this.access_logs_field_name]) { + o.type = this.access_logs_type; + } + if (this.type && !o.type) { + o.type = this.type; + } + this.emit('data', o); + } + } + catch(e) { + this.emit('error', e); + } + }.bind(this)); + }.bind(this)); + this.current_timestamp = res.headers['x-log-end-timestamp']; + res.on('end', function() { + this.in_progress = false; + }.bind(this)); + } + else { + this.emit('error', new Error('Google app engine return wrong return code ' + res.statusCode)); + this.in_progress = false; + } + }.bind(this)); + + req.on('error', function(err) { + this.emit('error', err); + this.in_progress = false; + }.bind(this)); +}; + +InputGae.prototype.close = function(callback) { + clearInterval(this.interval); + logger.info('Closing Google App Engine poller to ', this.host + ':' + this.port); + callback(); +}; + +exports.create = function() { + return new InputGae(); +}; diff --git a/lib/inputs/input_http.js b/lib/inputs/input_http.js new file mode 100644 index 00000000..fa77121d --- /dev/null +++ b/lib/inputs/input_http.js @@ -0,0 +1,79 @@ +var base_input = require('../lib/base_input'), + http = require('http'), + https = require('https'), + util = require('util'), + ssl_helper = require('../lib/ssl_helper'), + logger = require('log4node'); + +function InputHttp() { + base_input.BaseInput.call(this); + this.mergeConfig(ssl_helper.config()); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'Http', + host_field: 'host', + port_field: 'port', + optional_params: ['type'], + start_hook: this.start, + }); +} + +util.inherits(InputHttp, base_input.BaseInput); + +InputHttp.prototype.start = function(callback) { + logger.info('Start listening on', this.host + ':' + this.port, 'ssl ' + this.ssl); + + this.serverCallback = function(request, response) { + var data = ''; + + request.on('data', function(chunk) { + data += chunk.toString(); + }); + + request.on('end', function() { + this.unserialize_data(data, function(parsed) { + this.emit('data', parsed); + }.bind(this), function(data) { + this.emit('data', { + 'message': data.trim(), + 'host': response.remoteAddress, + 'http_port': this.port, + 'type': this.type, + }); + }.bind(this)); + response.writeHead(201); + response.end(); + }.bind(this)); + + request.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + }.bind(this); + + if (this.ssl) { + this.server = https.createServer(ssl_helper.merge_options(this, {}), this.serverCallback); + this.server.on('clientError', function(err) { + this.emit('error', err); + }.bind(this)); + } + else { + this.server = http.createServer(this.serverCallback); + } + + this.server.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + + this.server.listen(this.port, this.host); + + this.server.once('listening', callback); +}; + +InputHttp.prototype.close = function(callback) { + logger.info('Closing http server', this.host + ':' + this.port, 'ssl ' + this.ssl); + this.server.close(callback); +}; + +exports.create = function() { + return new InputHttp(); +}; diff --git a/lib/inputs/input_redis.js b/lib/inputs/input_redis.js new file mode 100644 index 00000000..a5344ced --- /dev/null +++ b/lib/inputs/input_redis.js @@ -0,0 +1,120 @@ +var base_input = require('../lib/base_input'), + util = require('util'), + redis_connection_manager = require('../lib/redis_connection_manager'), + logger = require('log4node'), + error_buffer = require('../lib/error_buffer'); + +function InputRedis() { + base_input.BaseInput.call(this); + this.mergeConfig(error_buffer.config(function() { + return 'input Redis to ' + this.host + ':' + this.port; + })); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'Redis', + host_field: 'host', + port_field: 'port', + optional_params: ['type', 'channel', 'key', 'pattern_channel', 'retry', 'auth_pass', 'method'], + default_values: { + 'method': 'queue', + }, + start_hook: this.start, + }); +} + +util.inherits(InputRedis, base_input.BaseInput); + +InputRedis.prototype.processRedisMessage = function(channel, data) { + this.unserialize_data(data, function(parsed) { + if (this.type) { + parsed.type = this.type; + } + if (channel) { + parsed.redis_channel = channel; + } + this.emit('data', parsed); + }.bind(this), function(data) { + this.emit('error', 'Unable to parse data ' + data); + }.bind(this)); +}; + +InputRedis.prototype.receivePubSub = function(client) { + if (this.pattern_channel) { + client.on('pmessage', function(pattern, channel, data) { + this.processRedisMessage(channel, data); + }.bind(this)); + client.psubscribe(this.channel); + this.redis_connection_manager.on('before_quit', function(client) { + client.punsubscribe(this.channel); + }); + } + else { + client.on('message', function(channel, data) { + this.processRedisMessage(channel, data); + }.bind(this)); + client.subscribe(this.channel); + this.redis_connection_manager.on('before_quit', function(client) { + client.unsubscribe(this.channel); + }); + } +}; + +InputRedis.prototype.receiveQueue = function(client) { + client.blpop(this.key, 1, function(err, data) { + if (data) { + this.processRedisMessage(undefined, data[1]); + } + if (!this.quitting) { + this.receiveQueue(client); + } + }.bind(this)); +}; + +InputRedis.prototype.start = function(callback) { + if (this.method !== 'queue' && this.method !== 'pubsub') { + return callback(new Error('Wrong method, please use pubsub or queue : ' + this.method)); + } + + var receive; + + if (this.method === 'pubsub') { + if (!this.channel) { + return callback(new Error('You have to specify the channel parameter in pubsub mode')); + } + this.desc = 'using pubsub, channel ' + this.channel + ', pattern_channel ' + this.pattern_channel; + receive = this.receivePubSub.bind(this); + } + + if (this.method === 'queue') { + if (!this.key) { + return callback(new Error('You have to specify the key parameter in queue mode')); + } + this.desc = 'using queue, key ' + this.key; + receive = this.receiveQueue.bind(this); + } + + logger.info('Start listening Redis on', this.host + ':' + this.port, this.desc); + + this.redis_connection_manager = redis_connection_manager.create(this.host, this.port, this.auth_pass); + + this.redis_connection_manager.on('error', function(err) { + this.error_buffer.emit('error', err); + }.bind(this)); + + this.redis_connection_manager.on('connect', function() { + this.error_buffer.emit('ok'); + }.bind(this)); + + this.redis_connection_manager.once('connect', receive); + + callback(); +}; + +InputRedis.prototype.close = function(callback) { + this.quitting = true; + this.redis_connection_manager.quit(callback); +}; + +exports.create = function() { + return new InputRedis(); +}; diff --git a/lib/inputs/input_sqs.js b/lib/inputs/input_sqs.js new file mode 100644 index 00000000..739f8d41 --- /dev/null +++ b/lib/inputs/input_sqs.js @@ -0,0 +1,95 @@ +var base_input = require('../lib/base_input'), + sqs_wrapper = require('../lib/sqs_wrapper'), + util = require('util'), + logger = require('log4node'), + error_buffer = require('../lib/error_buffer'); + +function InputSQS() { + base_input.BaseInput.call(this); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig(error_buffer.config(function() { + return 'sqs from ' + this.aws_queue; + })); + this.mergeConfig(sqs_wrapper.config()); + this.mergeConfig({ + name: 'SQS', + optional_params: ['polling_delay', 'type'], + default_values: { + polling_delay: '10' + }, + start_hook: this.start, + }); +} + +util.inherits(InputSQS, base_input.BaseInput); + +InputSQS.prototype.start = function(callback) { + this.polling_delay = parseInt(this.polling_delay, 10); + this.closed_callback = undefined; + logger.info('Creating AWS SQS Input from', this.aws_queue, 'polling delay', this.polling_delay); + this.waitMessage(); + callback(); +}; + +InputSQS.prototype.waitMessage = function() { + this.sqs.receiveMessage({ + QueueUrl: this.queue_url, + MaxNumberOfMessages: 10, + WaitTimeSeconds: this.polling_delay, + }, function(err, result) { + if (err) { + this.error_buffer.emit('error', err); + } + else { + if (result.Messages) { + var ack = { + QueueUrl: this.queue_url, + Entries: [], + }; + result.Messages.forEach(function(mm) { + this.unserialize_data(mm.Body, function(parsed) { + this.emit('data', parsed); + }.bind(this), function(data) { + this.emit('data', { + 'message': data.trim(), + 'sqs_queue': this.aws_queue, + 'type': this.type, + }); + }.bind(this)); + ack.Entries.push({ + Id: mm.MessageId, + ReceiptHandle: mm.ReceiptHandle, + }); + }.bind(this)); + this.sqs.deleteMessageBatch(ack, function(err, result) { + if (err) { + this.error_buffer.emit('error', err); + } + else { + if (result.Successful.length !== ack.Entries.length) { + this.error_buffer.emit('error', new Error('Wrong SQS DeleteBatchMessage Response')); + } + } + }.bind(this)); + } + } + if (this.closed_callback) { + this.closed_callback(); + } + else { + this.waitMessage(); + } + }.bind(this)); +}; + +InputSQS.prototype.close = function(callback) { + logger.info('Closing AWS SQS Input from', this.aws_queue, 'waiting end of polling'); + this.closed_callback = function() { + logger.info('Closed AWS SQS Input from', this.aws_queue); + callback(); + }; +}; + +exports.create = function() { + return new InputSQS(); +}; diff --git a/lib/inputs/input_stdin.js b/lib/inputs/input_stdin.js new file mode 100644 index 00000000..1b031334 --- /dev/null +++ b/lib/inputs/input_stdin.js @@ -0,0 +1,38 @@ +var base_input = require('../lib/base_input'), + util = require('util'), + logger = require('log4node'); + +function InputStdin() { + base_input.BaseInput.call(this); + this.mergeConfig({ + name: 'Stdin', + start_hook: this.start, + }); +} + +util.inherits(InputStdin, base_input.BaseInput); + +InputStdin.prototype.start = function(callback) { + process.stdin.resume(); + this.stdin_listener = function(chunk) { + this.emit('data', { + 'source': 'stdin', + 'message': chunk.toString().trim(), + }); + }.bind(this); + process.stdin.on('data', this.stdin_listener); + + callback(); +}; + +InputStdin.prototype.close = function(callback) { + logger.info('Closing stdin'); + if (this.stdin_listener) { + process.stdin.removeListener('data', this.stdin_listener); + } + callback(); +}; + +exports.create = function() { + return new InputStdin(); +}; diff --git a/lib/inputs/input_tcp.js b/lib/inputs/input_tcp.js new file mode 100644 index 00000000..72a300d7 --- /dev/null +++ b/lib/inputs/input_tcp.js @@ -0,0 +1,101 @@ +var base_input = require('../lib/base_input'), + net = require('net'), + tls = require('tls'), + util = require('util'), + ssl_helper = require('../lib/ssl_helper'), + async = require('async'), + async_helper = require('../lib/async_helper'), + logger = require('log4node'); + +function InputTcp() { + base_input.BaseInput.call(this); + this.mergeConfig(ssl_helper.config()); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'Tcp', + host_field: 'host', + port_field: 'port', + optional_params: ['type', 'appendPeerCert'], + default_values: { + 'appendPeerCert': true, + }, + start_hook: this.start, + }); +} + +util.inherits(InputTcp, base_input.BaseInput); + +InputTcp.prototype.start = function(callback) { + logger.info('Start listening on tcp', this.host + ':' + this.port); + this.counter = 0; + this.current = {}; + + var listener = function(c) { + var local_id = this.counter; + this.counter += 1; + this.current[local_id] = c; + c.on('data', function(data) { + this.unserialize_data(data, function(parsed) { + this.emit('data', parsed); + }.bind(this), function(data) { + var obj = { + 'message': data.toString().trim(), + 'host': c.remoteAddress, + 'tcp_port': this.port, + 'type': this.type, + }; + if (this.ssl && this.appendPeerCert) { + var peer_cert = c.getPeerCertificate(); + obj.tls = { + 'authorized': c.authorized, + 'peer_cert': { + 'subject': peer_cert.subject, + 'issuer': peer_cert.issuer, + 'valid_from': peer_cert.valid_from, + 'valid_to': peer_cert.valid_to, + 'fingerprint': peer_cert.fingerprint, + } + }; + } + this.emit('data', obj); + }.bind(this)); + }.bind(this)); + c.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + c.on('close', function() { + delete this.current[local_id]; + }.bind(this)); + }.bind(this); + + if (this.ssl) { + this.server = tls.createServer(ssl_helper.merge_options(this, {}), listener); + this.server.on('clientError', function(err) { + this.emit('error', err); + }.bind(this)); + } + else { + this.server = net.createServer(listener); + } + + this.server.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + + this.server.listen(this.port, this.host); + + this.server.once('listening', callback); +}; + +InputTcp.prototype.close = function(callback) { + logger.info('Closing listening tcp', this.host + ':' + this.port); + async.eachSeries(Object.keys(this.current), function(x, callback) { + var c = this.current[x]; + c.once('close', callback); + c.end(); + }.bind(this), async_helper.chainedCloseAll([this.server], callback)); +}; + +exports.create = function() { + return new InputTcp(); +}; diff --git a/lib/inputs/input_udp.js b/lib/inputs/input_udp.js new file mode 100644 index 00000000..69e3b0a0 --- /dev/null +++ b/lib/inputs/input_udp.js @@ -0,0 +1,53 @@ +var base_input = require('../lib/base_input'), + dgram = require('dgram'), + util = require('util'), + logger = require('log4node'); + +function InputUdp() { + base_input.BaseInput.call(this); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'Udp', + host_field: 'host', + port_field: 'port', + optional_params: ['type'], + start_hook: this.start, + }); +} + +util.inherits(InputUdp, base_input.BaseInput); + +InputUdp.prototype.start = function(callback) { + logger.info('Start listening on udp', this.host + ':' + this.port); + + this.server = dgram.createSocket('udp4'); + + this.server.on('message', function(data, remote) { + this.unserialize_data(data, function(parsed) { + this.emit('data', parsed); + }.bind(this), function(data) { + this.emit('data', { + 'message': data.toString().trim(), + 'host': remote.address, + 'udp_port': this.port, + 'type': this.type, + }); + }.bind(this)); + }.bind(this)); + + this.server.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + + this.server.bind(this.port, this.host, callback); +}; + +InputUdp.prototype.close = function(callback) { + logger.info('Closing listening udp', this.host + ':' + this.port); + this.server.close(); + callback(); +}; + +exports.create = function() { + return new InputUdp(); +}; diff --git a/lib/inputs/input_unix.js b/lib/inputs/input_unix.js new file mode 100644 index 00000000..3fe4b689 --- /dev/null +++ b/lib/inputs/input_unix.js @@ -0,0 +1,53 @@ +var base_input = require('../lib/base_input'), + util = require('util'), + net = require('net'), + logger = require('log4node'); + +function InputUnix() { + base_input.BaseInput.call(this); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'Unix', + host_field: 'socket', + optional_params: ['type'], + start_hook: this.start, + }); +} + +util.inherits(InputUnix, base_input.BaseInput); + +InputUnix.prototype.start = function(callback) { + logger.info('Start listening on unix socket', this.socket); + + this.server = net.createServer(function(c) { + c.on('data', function(data) { + this.unserialize_data(data, function(parsed) { + this.emit('data', parsed); + }.bind(this), function(data) { + this.emit('data', { + 'message': data.toString().trim(), + 'path': this.socket, + 'type': this.type, + }); + }.bind(this)); + }.bind(this)); + c.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + }.bind(this)); + + this.server.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + + this.server.listen(this.socket, callback); +}; + +InputUnix.prototype.close = function(callback) { + logger.info('Closing listening unix socket' + this.socket); + this.server.close(callback); +}; + +exports.create = function() { + return new InputUnix(); +}; diff --git a/lib/inputs/input_ws.js b/lib/inputs/input_ws.js new file mode 100644 index 00000000..ae7d9772 --- /dev/null +++ b/lib/inputs/input_ws.js @@ -0,0 +1,81 @@ +var base_input = require('../lib/base_input'), + util = require('util'), + http = require('http'), + https = require('https'), + WebSocketServer = require('ws').Server, + ssl_helper = require('../lib/ssl_helper'), + logger = require('log4node'); + +function InputWebsocket() { + base_input.BaseInput.call(this); + this.mergeConfig(ssl_helper.config()); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'Websocket', + host_field: 'host', + port_field: 'port', + optional_params: ['type', 'path'], + default_values: { + 'path': '/', + }, + start_hook: this.start, + }); +} + +util.inherits(InputWebsocket, base_input.BaseInput); + +InputWebsocket.prototype.start = function(callback) { + logger.info('Start listening on websocket', this.host + ':' + this.port, 'ssl ' + this.ssl); + + if (this.ssl) { + this.server = https.createServer(ssl_helper.merge_options(this, {})); + this.server.on('clientError', function(err) { + this.emit('error', err); + }.bind(this)); + } + else { + this.server = http.createServer(); + } + + this.wss = new WebSocketServer({ server: this.server, path: this.path }); + this.wss.on('connection', function(ws) { + + ws.on('message', function(data) { + this.unserialize_data(data, function(parsed) { + this.emit('data', parsed); + }.bind(this), function(data) { + this.emit('data', { + 'message': data.trim(), + 'host': ws._socket.remoteAddress, + 'ws_port': this.port, + 'type': this.type, + }); + }.bind(this)); + }.bind(this)); + + ws.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + + }.bind(this)); + + this.wss.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + + this.server.once('listening', callback); + this.server.listen(this.port, this.host); +}; + +InputWebsocket.prototype.close = function(callback) { + logger.info('Closing websocket server', this.host + ':' + this.port, 'ssl ' + this.ssl); + // close the server and terminate all clients + this.wss.close(); + this.server.close(function() { + callback(); + }); +}; + +exports.create = function() { + return new InputWebsocket(); +}; diff --git a/lib/inputs/input_zeromq.js b/lib/inputs/input_zeromq.js new file mode 100644 index 00000000..dc5b2756 --- /dev/null +++ b/lib/inputs/input_zeromq.js @@ -0,0 +1,54 @@ +var base_input = require('../lib/base_input'), + util = require('util'), + zmq = require('zmq'), + logger = require('log4node'); + +function InputZeroMQ() { + base_input.BaseInput.call(this); + this.mergeConfig(this.unserializer_config()); + this.mergeConfig({ + name: 'Zeromq', + host_field: 'address', + optional_params: ['type'], + start_hook: this.start, + }); +} + +util.inherits(InputZeroMQ, base_input.BaseInput); + +InputZeroMQ.prototype.start = function(callback) { + logger.info('Start listening on zeromq', this.address, 'using ZeroMQ', zmq.version); + + this.socket = zmq.socket('pull'); + this.socket.bind(this.address, function(err) { + if (err) { + return callback(err); + } + logger.info('Zeromq ready on ' + this.address); + + callback(); + }.bind(this)); + + this.socket.on('message', function(data) { + this.unserialize_data(data, function(parsed) { + this.emit('data', parsed); + }.bind(this), function(data) { + var obj = { + 'message': data.toString().trim(), + 'zeromq_from': this.address, + 'type': this.type, + }; + this.emit('data', obj); + }.bind(this)); + }.bind(this)); +}; + +InputZeroMQ.prototype.close = function(callback) { + logger.info('Closing input zeromq', this.address); + this.socket.close(); + callback(); +}; + +exports.create = function() { + return new InputZeroMQ(); +}; diff --git a/lib/lib/amqp_driver.js b/lib/lib/amqp_driver.js new file mode 100644 index 00000000..aee2d421 --- /dev/null +++ b/lib/lib/amqp_driver.js @@ -0,0 +1,88 @@ + +var amqp = require('amqplib/callback_api'), + ssl_helper = require('../lib/ssl_helper'); + +exports.createAmqpClient = function(options) { + var current_connection; + var connect_timeout; + var channel; + var closing = false; + + var connect = function() { + connect_timeout = undefined; + options.amqp_logger.info('Try connecting to', options.amqp_url, 'retry delay', options.retry_delay); + amqp.connect(options.amqp_url + '?heartbeat=' + options.heartbeat, ssl_helper.merge_options(options, {}), function(err, conn) { + if (err) { + options.amqp_logger.error('Unable to connect', err); + connect_timeout = setTimeout(connect, options.retry_delay); + return; + } + if (closing) { + return; + } + options.amqp_logger.info('Connected to', options.amqp_url); + current_connection = conn; + + conn.on('error', function(err) { + options.amqp_logger.info('AMQP Error', err); + if (err.stack) { + options.amqp_logger.info(err.stack); + } + }); + conn.on('close', function() { + options.disconnected_callback(channel); + current_connection = undefined; + channel = undefined; + options.amqp_logger.info('AMQP Close'); + if (!closing) { + setTimeout(connect, options.retry_delay); + } + }); + + channel = current_connection.createChannel(function(err, c) { + if (err) { + options.amqp_logger.error('Unable to create channel', err); + } + else { + channel = c; + options.connected_callback(channel); + } + }); + }); + }; + + connect(); + + return { + close: function(callback) { + closing = true; + options.amqp_logger.info('Closing AMQP connection to', options.amqp_url); + if (connect_timeout) { + clearTimeout(connect_timeout); + } + if (current_connection) { + current_connection.close(function(err) { + process.nextTick(function() { + callback(err); + }); + }); + } + else { + callback(); + } + } + }; + +}; + +exports.buildUrl = function(options) { + var url = (options.ssl ? 'amqps' : 'amqp') + '://'; + if (options.username && options.password) { + url += options.username + ':' + options.password + '@'; + } + url += options.host + ':' + options.port; + if (options.vhost) { + url += options.vhost; + } + return url; +}; \ No newline at end of file diff --git a/lib/lib/async_helper.js b/lib/lib/async_helper.js new file mode 100644 index 00000000..bd0d9507 --- /dev/null +++ b/lib/lib/async_helper.js @@ -0,0 +1,23 @@ +var async = require('async'); + +exports.chainedCloseAll = function(array, callback) { + return function(err) { + if (err) { + return callback(err); + } + async.eachSeries(array, function(e, callback) { + if (e === undefined) { + callback(); + } + else { + e.close(callback); + } + }, callback); + }; +}; + +exports.call = function() { + return function(x, callback) { + x(callback); + }; +}; diff --git a/lib/lib/base_component.js b/lib/lib/base_component.js new file mode 100644 index 00000000..cc7b647d --- /dev/null +++ b/lib/lib/base_component.js @@ -0,0 +1,382 @@ +var events = require('events'), + util = require('util'), + url_parser = require('./url_parser'), + async = require('async'), + moment = require('moment'), + condition_evaluator = require('./condition_evaluator'), + logger = require('log4node'); + +function BaseComponent() { + events.EventEmitter.call(this); + this.setMaxListeners(0); + this.required_libs = {}; + this.config = { + optional_params: [], + required_params: [], + default_values: {}, + arrays: [], + hashes: [], + }; + this.start_hooks = []; + this.config_hooks = []; +} + +util.inherits(BaseComponent, events.EventEmitter); + +BaseComponent.prototype.requireLib = function(name) { + if (!this.required_libs[name]) { + try { + this.required_libs[name] = require(name); + } + catch (e) { + console.error('Unable to load module', name); + throw e; + } + } + return this.required_libs[name]; +}; + +BaseComponent.prototype.mergeConfig = function(config) { + ['name', 'host_field', 'port_field', 'allow_empty_host'].forEach(function(x) { + if (config[x]) { + this.config[x] = config[x]; + } + }.bind(this)); + ['required_params', 'optional_params', 'arrays', 'hashes'].forEach(function(x) { + if (config[x]) { + this.config[x] = this.config[x].concat(config[x]); + } + }.bind(this)); + if (config.default_values) { + for (var x in config.default_values) { + this.config.default_values[x] = config.default_values[x]; + } + } + if (config.start_hook) { + this.start_hooks.push(config.start_hook); + } + if (config.config_hook) { + this.config_hooks.push(config.config_hook); + } +}; + +BaseComponent.prototype.processValue = function(x) { + if (x === 'true') { + return true; + } + else if (x === 'false') { + return false; + } + return x; +}; + +BaseComponent.prototype.loadConfig = function(url, callback) { + this.message_filtering = {}; + + var val, i, res; + + if (url.length === 0) { + if (this.config.host_field || this.config.port_field || this.config.required_params.length > 0) { + return callback(new Error('No empty url for ' + this.config.name)); + } + this.parsed_url = { + params: [], + }; + } + else { + this.parsed_url = url_parser.processUrlContent(url); + if (!this.parsed_url) { + return callback(new Error('Unable to parse config : ' + url)); + } + } + + if (this.config.host_field) { + if (!this.parsed_url.host && this.parsed_url.params[this.config.host_field]) { + this.parsed_url.host = this.parsed_url.params[this.config.host_field]; + } + if (!this.parsed_url.host && !this.config.allow_empty_host) { + return callback(new Error('No host found in url ' + url)); + } + + if (this.config.port_field && this.config.port_field !== -1) { + var p = url_parser.extractPortNumber(this.parsed_url.host); + if (!p && this.parsed_url.params[this.config.port_field]) { + p = {host: this.parsed_url.host, port: this.parsed_url.params[this.config.port_field]}; + } + if (!p) { + return callback(new Error('Unable to extract port from ' + this.parsed_url.host)); + } + this[this.config.host_field] = p.host; + this[this.config.port_field] = p.port; + } + else { + this[this.config.host_field] = this.parsed_url.host; + } + } + + if (this.config.required_params) { + for (i = 0; i < this.config.required_params.length; i++) { + val = this.parsed_url.params[this.config.required_params[i]]; + if (val) { + this[this.config.required_params[i]] = this.processValue(val); + } + } + } + + if (this.config.optional_params) { + for (i = 0; i < this.config.optional_params.length; i++) { + val = this.parsed_url.params[this.config.optional_params[i]]; + if (val !== undefined) { + this[this.config.optional_params[i]] = this.processValue(val); + } + } + } + + if (this.config.default_values) { + for (i in this.config.default_values) { + if (this[i] === undefined) { + this[i] = this.config.default_values[i]; + } + } + } + + for (i in this.config.arrays) { + if (this[this.config.arrays[i]]) { + if (!Array.isArray(this[this.config.arrays[i]])) { + this[this.config.arrays[i]] = this[this.config.arrays[i]].split(','); + } + } + } + + for (i in this.config.hashes) { + if (this[this.config.hashes[i]]) { + var x = {}; + if (typeof(this[this.config.hashes[i]]) !== 'object') { + var v = this[this.config.hashes[i]].split(','); + for(var j in v) { + var splitted = v[j].split(':'); + x[splitted[0]] = splitted[1]; + } + } + this[this.config.hashes[i]] = x; + } + } + + if (this.parsed_url.params.only_type) { + this.message_filtering.only_type = this.parsed_url.params.only_type; + } + + for (i in this.parsed_url.params) { + res = i.match(/^only_field_exist_(.+)$/); + if (res) { + if (!this.message_filtering.only_field_exist) { + this.message_filtering.only_field_exist = []; + } + this.message_filtering.only_field_exist.push(res[1]); + } + res = i.match(/^only_field_equal_(.+)$/); + if (res) { + if (!this.message_filtering.only_field_equal) { + this.message_filtering.only_field_equal = {}; + } + this.message_filtering.only_field_equal[res[1]] = this.parsed_url.params[i]; + } + res = i.match(/^only_field_match_(.+)$/); + if (res) { + if (!this.message_filtering.only_field_match) { + this.message_filtering.only_field_match = {}; + } + this.message_filtering.only_field_match[res[1]] = new RegExp(this.parsed_url.params[i]); + } + + if (this.parsed_url.params.__dynamic_eval__) { + this.__dynamic_eval__ = JSON.parse(this.parsed_url.params.__dynamic_eval__); + } + } + + async.eachSeries(this.config_hooks, function(hook, callback) { + hook.call(this, callback); + }.bind(this), function(err) { + if (err) { + return callback(err); + } + + if (this.config.required_params) { + for (var i = 0; i < this.config.required_params.length; i++) { + if (!this[this.config.required_params[i]]) { + return callback(new Error('You have to specify ' + this.config.required_params[i] + ' in url ' + url)); + } + } + } + + async.eachSeries(this.start_hooks, function(hook, callback) { + hook.call(this, callback); + }.bind(this), callback); + }.bind(this)); +}; + +BaseComponent.prototype.processMessage = function(data) { + var i; + + if (this.message_filtering.only_type && this.message_filtering.only_type !== data.type) { + return false; + } + + if (this.message_filtering.only_field_exist) { + for (i = 0; i < this.message_filtering.only_field_exist.length; i++) { + if (!data[this.message_filtering.only_field_exist[i]]) { + return false; + } + } + } + + if (this.message_filtering.only_field_equal) { + for (i in this.message_filtering.only_field_equal) { + if (!data[i] || data[i] !== this.message_filtering.only_field_equal[i]) { + return false; + } + } + } + + if (this.message_filtering.only_field_match) { + for (i in this.message_filtering.only_field_match) { + if (!data[i] || ! this.message_filtering.only_field_match[i].exec(data[i])) { + return false; + } + } + } + + if (this.__dynamic_eval__) { + try { + var keep = true; + for(i = 0; i < this.__dynamic_eval__.false_clauses.length; i ++) { + var x = this.__dynamic_eval__.false_clauses[i]; + if (condition_evaluator.compute(x, data) === true) { + keep = false; + break; + } + } + if (keep && this.__dynamic_eval__.true_clause) { + if (condition_evaluator.compute(this.__dynamic_eval__.true_clause, data) === false) { + keep = false; + } + } + if (!keep) { + return false; + } + } + catch(e) { + logger.error('Conditional error', e); + return false; + } + } + + return true; +}; + +var replace_cache = {}; + +BaseComponent.prototype.replaceByFields = function(data, s) { + if (!replace_cache[s]) { + var x = []; + var keep = s; + while(true) { + var result = keep.indexOf('#{'); + var end = result === -1 ? -1 : keep.substring(result + 2).indexOf('}'); + if (result !== -1 && end !== -1) { + x.push({ + type: 'string', + string: keep.substring(0, result), + }); + var key = keep.substring(result + 2, result + end + 2); + var date = key.match(/now:(.*)$/); + if (date) { + x.push({ + type: 'now', + format: date[1], + }); + } + else { + x.push({ + type: 'value', + value: key, + }); + } + keep = keep.substring(result + end + 3); + } + else { + if (keep.length > 0) { + x.push({ + type: 'string', + string: keep, + }); + } + break; + } + } + replace_cache[s] = x; + } + var cache = replace_cache[s]; + var r = ''; + for(var i = 0; i < cache.length; i ++) { + if (cache[i].type === 'string') { + r += cache[i].string; + } + else if (cache[i].type === 'now') { + r += moment().format(cache[i].format); + } + else if (cache[i].type === 'value') { + if (data[cache[i].value]) { + r += data[cache[i].value]; + } + else { + logger.debug('Unable to find field', cache[i].value); + return undefined; + } + } + } + return r; +}; + +BaseComponent.prototype.add_tags_function = function(data, field) { + if (this[field]) { + if (! data.tags) { + data.tags = []; + } + this[field].forEach(function(tag) { + data.tags.push(tag); + }); + } +}; + +BaseComponent.prototype.remove_tags_function = function(data) { + if (this.remove_tags) { + if (! data.tags) { + data.tags = []; + } + this.remove_tags.forEach(function(tag) { + var index = data.tags.indexOf(tag); + if (index > -1) { + data.tags.splice(index, 1); + } + }); + } +}; + +BaseComponent.prototype.add_fields_function = function(data) { + if (this.add_fields) { + Object.keys(this.add_fields).forEach(function(f) { + data[f] = this.replaceByFields(data, this.add_fields[f]); + }.bind(this)); + } +}; + +BaseComponent.prototype.remove_fields_function = function(data) { + if (this.remove_fields) { + this.remove_fields.forEach(function(f) { + delete data[f]; + }.bind(this)); + } +}; + +exports.BaseComponent = BaseComponent; diff --git a/lib/lib/base_filter.js b/lib/lib/base_filter.js new file mode 100644 index 00000000..ef28288b --- /dev/null +++ b/lib/lib/base_filter.js @@ -0,0 +1,63 @@ +var base_component = require('./base_component'), + util = require('util'), + logger = require('log4node'); + +function BaseFilter() { + base_component.BaseComponent.call(this); +} + +util.inherits(BaseFilter, base_component.BaseComponent); + +BaseFilter.prototype.init = function(url, callback) { + logger.info('Initializing filter', this.config.name); + + this.loadConfig(url, function(err) { + if (err) { + return callback(err); + } + + this.on('input', function(data) { + if (this.processMessage(data)) { + var res = this.process(data); + if (res) { + if (res.length === undefined) { + res = [res]; + } + for (var i = 0; i < res.length; i++) { + this.emit('output', res[i]); + } + } + } + else { + this.emit('output', data); + } + }.bind(this)); + + callback(); + }.bind(this)); +}; + +BaseFilter.prototype.tags_fields_config = function() { + return { + optional_params: ['add_tags', 'remove_tags', 'add_fields', 'add_field', 'remove_field', 'remove_fields'], + arrays: ['add_tags', 'remove_tags', 'remove_field', 'remove_fields'], + hashes: ['add_fields', 'add_field'], + start_hook: this.start_tags_fields_config.bind(this), + }; +}; + +BaseFilter.prototype.start_tags_fields_config = function(callback) { + if (this.add_field) { + this.add_fields = this.add_field; + } + if (this.remove_field) { + this.remove_fields = this.remove_field; + } + callback(); +}; + +BaseFilter.prototype.close = function(callback) { + callback(); +}; + +exports.BaseFilter = BaseFilter; diff --git a/lib/lib/base_filter_buffer.js b/lib/lib/base_filter_buffer.js new file mode 100644 index 00000000..ae95938a --- /dev/null +++ b/lib/lib/base_filter_buffer.js @@ -0,0 +1,66 @@ +var base_filter = require('./base_filter'), + util = require('util'); + +function BaseFilterBuffer() { + base_filter.BaseFilter.call(this); + this.storage = {}; +} + +util.inherits(BaseFilterBuffer, base_filter.BaseFilter); + +BaseFilterBuffer.prototype.computeKey = function(data) { + return data.type + data.host + data.source; +}; + +BaseFilterBuffer.prototype.store = function(key, data) { + if (!this.storage[key]) { + this.storage[key] = { + first: data, + current: '', + }; + } + else { + this.storage[key].current += '\n'; + } + this.storage[key].current += data.message; + this.storage[key].last = (new Date()).getTime(); +}; + +BaseFilterBuffer.prototype.sendIfNeeded = function(key) { + if (this.storage[key]) { + this.sendMessage(key, this.storage[key].current); + delete this.storage[key]; + } +}; + +BaseFilterBuffer.prototype.setInterval = function(delay) { + var func = function() { + var now = (new Date()).getTime(); + var to_be_deleted = []; + for (var key in this.storage) { + if (now - this.storage[key].last > delay) { + this.sendMessage(key, this.storage[key].current); + to_be_deleted.push(key); + } + } + to_be_deleted.forEach(function(key) { + delete this.storage[key]; + }.bind(this)); + }.bind(this); + this.interval_id = setInterval(func, delay); +}; + +BaseFilterBuffer.prototype.sendMessage = function(key, current) { + var m = JSON.parse(JSON.stringify(this.storage[key].first)); + m.message = current; + this.emit('output', m); +}; + +BaseFilterBuffer.prototype.close = function(callback) { + if (this.interval_id) { + clearInterval(this.interval_id); + } + callback(); +}; + +exports.BaseFilterBuffer = BaseFilterBuffer; diff --git a/lib/lib/base_input.js b/lib/lib/base_input.js new file mode 100644 index 00000000..ccf0f2bc --- /dev/null +++ b/lib/lib/base_input.js @@ -0,0 +1,92 @@ +var base_component = require('./base_component'), + util = require('util'), + logger = require('log4node'); + +function BaseInput() { + base_component.BaseComponent.call(this); + this.mergeConfig({ + optional_params: ['add_field', 'tags', 'add_fields'], + arrays: ['tags'], + hashes: ['add_fields', 'add_field'] + }); +} + +util.inherits(BaseInput, base_component.BaseComponent); + +BaseInput.prototype.init = function(url, callback) { + logger.info('Initializing input', this.config.name); + + this.loadConfig(url, function(err) { + if (this.add_field) { + this.add_fields = this.add_field; + } + callback(err); + }.bind(this)); +}; + +BaseInput.prototype.configure_unserialize = function(serializer) { + if (serializer === 'json_logstash') { + this.unserialize_data = this.unserialize_data_json; + } + else if (serializer === 'raw') { + this.unserialize_data = this.unserialize_data_raw; + } + else if (serializer === 'msgpack') { + this.msgpack = this.requireLib('msgpack'); + this.unserialize_data = this.unserialize_data_msgpack; + } + else { + throw new Error('Unknown serializer ' + serializer); + } +}; + +BaseInput.prototype.unserialize_data_raw = function(data, ok_callback, parse_fail_callback) { + parse_fail_callback(data); +}; + +BaseInput.prototype.unserialize_data_json = function(data, ok_callback, parse_fail_callback) { + try { + var ok = false; + var splitted = data.toString().split('\n'); + for (var i = 0; i < splitted.length; i++) { + if (splitted[i] !== '') { + var parsed = JSON.parse(splitted[i]); + if (parsed['@timestamp']) { + ok_callback(parsed); + ok = true; + } + } + } + if (ok) { + return; + } + } + catch (e) {} + return parse_fail_callback(data); +}; + +BaseInput.prototype.unserialize_data_msgpack = function(data, ok_callback, parse_fail_callback) { + try { + var parsed = this.msgpack.unpack(data); + if (parsed['@timestamp']) { + return ok_callback(parsed); + } + } + catch (e) {} + return parse_fail_callback(data); +}; + +BaseInput.prototype.unserializer_config = function() { + return { + optional_params: ['unserializer'], + default_values: { + 'unserializer': 'json_logstash', + }, + start_hook: function(callback) { + this.configure_unserialize(this.unserializer); + callback(); + }, + }; +}; + +exports.BaseInput = BaseInput; diff --git a/lib/lib/base_output.js b/lib/lib/base_output.js new file mode 100644 index 00000000..72320f48 --- /dev/null +++ b/lib/lib/base_output.js @@ -0,0 +1,65 @@ +var base_component = require('./base_component'), + util = require('util'), + logger = require('log4node'); + +function BaseOutput() { + base_component.BaseComponent.call(this); +} + +util.inherits(BaseOutput, base_component.BaseComponent); + +BaseOutput.prototype.init = function(url, callback) { + logger.info('Initializing output', this.config.name); + + this.loadConfig(url, function(err) { + if (err) { + return callback(err); + } + + this.on('data', function(data) { + if (this.processMessage(data)) { + this.process(data); + } + }.bind(this)); + + callback(); + }.bind(this)); +}; + +BaseOutput.prototype.configure_serialize = function(serializer, raw_format) { + if (serializer === 'json_logstash') { + this.serialize_data = function(data) { + return JSON.stringify(data); + }; + } + else if (serializer === 'raw') { + this.serialize_data = function(data) { + return this.replaceByFields(data, raw_format); + }; + } + else if (serializer === 'msgpack') { + var msgpack = this.requireLib('msgpack'); + this.serialize_data = function(data) { + return msgpack.pack(data); + }; + } + else { + throw new Error('Unknown serializer ' + serializer); + } +}; + +BaseOutput.prototype.serializer_config = function(default_serializer) { + return { + optional_params: ['format', 'serializer'], + default_values: { + 'format': '#{message}', + 'serializer': default_serializer || 'json_logstash', + }, + start_hook: function(callback) { + this.configure_serialize(this.serializer, this.format); + callback(); + }, + }; +}; + +exports.BaseOutput = BaseOutput; diff --git a/lib/lib/cache_helper.js b/lib/lib/cache_helper.js new file mode 100644 index 00000000..01fccb3b --- /dev/null +++ b/lib/lib/cache_helper.js @@ -0,0 +1,43 @@ +var lru = require('lru-cache'); + +function config() { + return { + optional_params: [ + 'cache_enabled', + 'cache_size', + 'cache_ttl', + ], + default_values: { + 'cache_enabled': true, + 'cache_size': 10000, + 'cache_ttl': 60 * 60 * 3, + }, + start_hook: function(callback) { + var __cache__; + if (this.cache_enabled) { + __cache__ = lru({max: this.cache_size, maxAge: this.cache_ttl * 1000}); + } + this.cache = function(key, callback) { + var r; + if (this.cache_enabled) { + r = __cache__.get(key); + } + if (r) { + return callback(undefined, r); + } + this.cache_miss(key, function(err, r) { + if (err) { + return callback(err); + } + if (this.cache_enabled) { + __cache__.set(key, r); + } + return callback(undefined, r); + }.bind(this)); + }.bind(this); + callback(); + }, + }; +} + +exports.config = config; diff --git a/lib/lib/condition_evaluator.js b/lib/lib/condition_evaluator.js new file mode 100644 index 00000000..e0697597 --- /dev/null +++ b/lib/lib/condition_evaluator.js @@ -0,0 +1,129 @@ + +var compute; + +function get(o, data) { + if (o.value) { + return o.value; + } + if (o.field) { + return data[o.field]; + } + throw new Error('Not implemented get for ' + JSON.stringify(o)); +} + +function get_string(o, data) { + var x = get(o, data); + if (typeof x !== 'string') { + throw new Error('Need a string, got ' + typeof x); + } + return x; +} + +function get_regexp(o, data) { + return new RegExp(get_string(o, data)); +} + +function get_bool(x, data) { + if (x === true || x === false) { + return x; + } + if (x === 'true') { + return true; + } + if (x === 'false') { + return false; + } + if (x.op) { + return compute(x, data); + } + throw new Error('Not a boolean : ' + JSON.stringify(x)); +} + +function force_string(x) { + if (typeof x !== 'string') { + x = x === undefined ? '' : x.toString(); + } + return x; +} + +function force_number(x) { + if (typeof x !== 'number') { + var z = parseFloat(x, 10); + if (isNaN(z)) { + throw new Error('Unable to cast to int : ' + JSON.stringify(x)); + } + else { + x = z; + } + } + return x; +} + +compute = exports.compute = function(cond, data, op_override) { + var op = op_override || cond.op; + if (op === '==') { + return force_string(get(cond.left, data)) === force_string(get(cond.right, data)); + } + else if (op === '!=') { + return ! compute(cond, data, '=='); + } + else if (op === '=~') { + return get_regexp(cond.right, data).test(force_string(get(cond.left, data))); + } + else if (op === '!~') { + return ! compute(cond, data, '=~'); + } + else if (op === 'in') { + var r1 = get(cond.right, data); + var l1 = force_string(get(cond.left, data)); + if (Array.isArray(r1)) { + for(var i = 0; i < r1.length; i ++) { + if (force_string(r1[i]) === l1) { + return true; + } + } + return false; + } + else { + throw new Error('In / not in right args must be an array : ' + JSON.stringify(r1)); + } + } + else if (op === 'not in') { + return ! compute(cond, data, 'in'); + } + else if (op === '>') { + var l2 = force_number(get(cond.left, data)); + var r2 = force_number(get(cond.right, data)); + return l2 > r2; + } + else if (op === '<=') { + return ! compute(cond, data, '>'); + } + else if (op === '>=') { + var l3 = force_number(get(cond.left, data)); + var r3 = force_number(get(cond.right, data)); + return l3 >= r3; + } + else if (op === '<') { + return ! compute(cond, data, '>='); + } + else if (op === '!') { + return ! get_bool(cond.left, data); + } + else if (op === 'and') { + return get_bool(cond.left) && get_bool(cond.right); + } + else if (op === 'or') { + return get_bool(cond.left) || get_bool(cond.right); + } + else if (op === 'xor') { + var r4 = get_bool(cond.right); + return get_bool(cond.left) ? ! r4 : r4; + } + else if (op === 'nand') { + return ! compute(cond, data, 'and'); + } + else { + throw new Error('Not implemented op ' + cond.op); + } +}; \ No newline at end of file diff --git a/lib/lib/config_mapper.js b/lib/lib/config_mapper.js new file mode 100644 index 00000000..8870a0fe --- /dev/null +++ b/lib/lib/config_mapper.js @@ -0,0 +1,70 @@ +var querystring = require('querystring'); + +function compute_url(type, o, callback) { + var keys = Object.keys(o); + if (keys.length !== 1) { + throw new Error('Unable to parse'); + } + var plugin = keys[0]; + var url = type + '://' + plugin + '://'; + callback(plugin, o[plugin], url); +} + +function stringify(plugin_conf) { + Object.keys(plugin_conf).forEach(function(key) { + if (typeof(plugin_conf[key]) === 'object' && ! Array.isArray(plugin_conf[key])) { + var s = []; + Object.keys(plugin_conf[key]).forEach(function(k) { + s.push(k + ':' + plugin_conf[key][k]); + }); + plugin_conf[key] = s.join(','); + } + }); + return querystring.stringify(plugin_conf); +} + +exports.map = function(x) { + var res = []; + ['input', 'filter', 'output'].forEach(function(type) { + (x[type] || []).forEach(function(z) { + compute_url(type, z, function(plugin_name, plugin_conf, url) { + if (plugin_name === '__if__') { + var cond_stack = []; + plugin_conf.ifs.forEach(function(x) { + var dynamic_eval = { + false_clauses: cond_stack, + true_clause: x.cond + }; + x.then.forEach(function(z) { + compute_url(type, z, function(plugin_name, plugin_conf, url) { + plugin_conf.__dynamic_eval__ = JSON.stringify(dynamic_eval); + url += '?' + stringify(plugin_conf); + res.push(url); + }); + }); + cond_stack.push(x.cond); + }); + if (plugin_conf.else) { + var dynamic_eval = { + false_clauses: cond_stack, + }; + plugin_conf.else.forEach(function(z) { + compute_url(type, z, function(plugin_name, plugin_conf, url) { + plugin_conf.__dynamic_eval__ = JSON.stringify(dynamic_eval); + url += '?' + stringify(plugin_conf); + res.push(url); + }); + }); + } + } + else { + if (Object.keys(plugin_conf).length > 0) { + url += '?' + stringify(plugin_conf); + } + res.push(url); + } + }); + }); + }); + return res; +}; \ No newline at end of file diff --git a/lib/lib/directory_detector.js b/lib/lib/directory_detector.js new file mode 100644 index 00000000..b8d3fd11 --- /dev/null +++ b/lib/lib/directory_detector.js @@ -0,0 +1,99 @@ +var events = require('events'), + logger = require('log4node'), + fs = require('fs'), + path = require('path'), + util = require('util'), + file_filter = require('./file_filter'), + async = require('async'), + directory_watcher = require('./directory_watcher'); + +function DirectoryDetector() { + events.EventEmitter.call(this); + this.setMaxListeners(0); + this.to_be_closed = []; +} + +util.inherits(DirectoryDetector, events.EventEmitter); + +DirectoryDetector.prototype.start = function(directory, start_callback) { + var called = false; + this.parent_start(directory, true, function() { + if (!called) { + start_callback(); + } + called = true; + }); +}; + +DirectoryDetector.prototype.parent_start = function(directory, first, start_callback) { + logger.debug('Starting directory detector in directory', directory, first); + fs.exists(directory, function(exists) { + if (exists) { + this.emit('exists', directory, false); + if (first) { + start_callback(); + } + return; + } + this.parent = path.resolve(path.join(directory, '..')); + var filter = file_filter.create(path.basename(directory)); + var parent_detector = new DirectoryDetector(); + parent_detector.on('exists', function(d) { + logger.debug('Start monitoring', d, 'waiting for', filter); + try { + var dir_watcher = directory_watcher.register(d, function(event, filename) { + if (event === 'rename' && filter.filter(filename)) { + var f = d + '/' + filename; + fs.exists(f, function(exists) { + if (exists) { + logger.debug('Subdirectory', filename, 'appears in', d); + this.emit('exists', f, true); + } + else { + this.emit('removed', f); + } + }.bind(this)); + } + }.bind(this)); + this.to_be_closed.push({ + close: function(callback) { + directory_watcher.unregister(dir_watcher); + callback(); + } + }); + } + catch (err) { + logger.error('Unable to start watcher on directory', d, err); + this.emit('error', err); + return; + } + fs.readdir(d, function(err, l) { + if (err) { + logger.error('Unable to read directory', d, err); + this.emit('error', err); + return; + } + start_callback(); + l.forEach(function(filename) { + if (filter.filter(filename)) { + logger.info('Subdirectory', filename, 'appears in', d); + this.emit('exists', d + '/' + filename, true); + } + }.bind(this)); + }.bind(this)); + }.bind(this)); + parent_detector.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + parent_detector.parent_start(this.parent, false, start_callback); + this.to_be_closed.push(parent_detector); + }.bind(this)); +}; + +DirectoryDetector.prototype.close = function(callback) { + async.eachSeries(this.to_be_closed, function(x, callback) { + x.close(callback); + }, callback); +}; + +exports.DirectoryDetector = DirectoryDetector; diff --git a/lib/lib/directory_watcher.js b/lib/lib/directory_watcher.js new file mode 100644 index 00000000..5f27bc85 --- /dev/null +++ b/lib/lib/directory_watcher.js @@ -0,0 +1,89 @@ +var fs = require('fs'), + path = require('path'), + os = require('os'), + logger = require('log4node'); + +var platform = os.platform(); + +var current = {}; + +exports.register = function(dir, callback) { + dir = path.resolve(dir); + if (!current[dir]) { + logger.info('Create watcher for dir', dir); + if (platform === 'darwin') { + var fsevents = require('fsevents'); + current[dir] = fsevents(dir); + current[dir].start(); + current[dir].exists = {}; + fs.readdir(dir, function(err, l) { + if (!err) { + l.forEach(function(x) { + current[dir].exists[x] = true; + }); + } + }); + } + else { + current[dir] = fs.watch(dir); + } + current[dir].setMaxListeners(0); + } + var local_callback = (platform === 'darwin') ? function(filename, event) { + filename = path.basename(filename); + if (event.event === 'moved-out' || event.event === 'deleted') { + if (!current[dir].exists[filename]) { + current[dir].exists[filename] = true; + } + callback('rename', filename); + } + else if (event.event === 'moved-in') { + if (current[dir].exists[filename]) { + delete current[dir].exists[filename]; + callback('rename', filename); + } + else { + callback('change', filename); + } + } + else if (event.event === 'modified') { + if (!current[dir].exists[filename]) { + current[dir].exists[filename] = true; + callback('rename', filename); + } + callback('change', filename); + } + else { + logger.warning('Ignored event', event); + } + } : function(event, filename) { + if (filename === null) { + logger.warning('The event \'change\' of NodeJS fs.watch API does not return a filename. Please update NodeJS or use a compatible OS.'); + return; + } + callback(event, filename); + }; + current[dir].on('change', local_callback); + logger.info('Add watcher on dir', dir, 'listeners', current[dir].listeners('change').length); + return { + dir: dir, + callback: local_callback, + }; +}; + +exports.unregister = function(id) { + logger.info('Remove watcher on dir', id.dir); + current[id.dir].removeListener('change', id.callback); + if (current[id.dir].listeners('change').length === 0) { + logger.info('Removing empty listener on', id.dir); + if (platform === 'darwin') { + current[id.dir].stop(); + } + else { + current[id.dir].close(); + } + delete current[id.dir]; + } +}; + +exports.current = current; diff --git a/lib/lib/elastic_search_helper.js b/lib/lib/elastic_search_helper.js new file mode 100644 index 00000000..4e5f294b --- /dev/null +++ b/lib/lib/elastic_search_helper.js @@ -0,0 +1,15 @@ +function fill0(s, k) { + return s.length === k ? s : '0' + fill0(s, k - 1); +} + +function formatDate() { + var now = new Date(); + var year = now.getUTCFullYear(); + var month = fill0((now.getUTCMonth() + 1) + '', 2); + var day = fill0((now.getUTCDate()) + '', 2); + return year + '.' + month + '.' + day; +} + +exports.computePath = function(index_prefix, data_type) { + return '/' + index_prefix + '-' + formatDate() + '/' + data_type; +}; diff --git a/lib/lib/error_buffer.js b/lib/lib/error_buffer.js new file mode 100644 index 00000000..b6984a3a --- /dev/null +++ b/lib/lib/error_buffer.js @@ -0,0 +1,57 @@ +var events = require('events'), + util = require('util'); + +function ErrorBuffer() { + events.EventEmitter.call(this); + this.setMaxListeners(0); +} + +util.inherits(ErrorBuffer, events.EventEmitter); + +ErrorBuffer.prototype.init = function(name, delay, target) { + this.target = target; + this.on_error = false; + this.last_check = undefined; + + this.on('error', function(err) { + if (!this.on_error) { + this.on_error = true; + this.last_check = new Date(); + this.target.emit('error', name + ' start failing: ' + err); + } + else { + if ((new Date()) - this.last_check > delay) { + this.last_check = new Date(); + this.target.emit('error', name + ' still failing.'); + } + } + }.bind(this)); + + this.on('ok', function() { + if (this.on_error) { + this.on_error = false; + this.target.emit('error', name + ' is back to normal.'); + } + }.bind(this)); +}; + +function create(name, delay, target) { + var e = new ErrorBuffer(); + e.init(name, delay, target); + return e; +} + +exports.create = create; + +exports.config = function(fname) { + return { + optional_params: ['error_buffer_delay'], + default_values: { + 'error_buffer_delay': 2000, + }, + start_hook: function(callback) { + this.error_buffer = create(fname.call(this), this.error_buffer_delay, this); + callback(); + }, + }; +}; diff --git a/lib/lib/file_filter.js b/lib/lib/file_filter.js new file mode 100644 index 00000000..4eafef26 --- /dev/null +++ b/lib/lib/file_filter.js @@ -0,0 +1,14 @@ +function Filter(filter) { + filter = filter.replace(/\./g, '\\.'); + filter = filter.replace(/\*/g, '.*'); + filter = filter.replace(/\?/g, '.'); + this._filter = new RegExp('^' + filter + '$'); +} + +Filter.prototype.filter = function(filename) { + return filename.match(this._filter) ? true : false; +}; + +exports.create = function(filter) { + return new Filter(filter); +}; diff --git a/lib/lib/file_loader.js b/lib/lib/file_loader.js new file mode 100644 index 00000000..fff798e9 --- /dev/null +++ b/lib/lib/file_loader.js @@ -0,0 +1,71 @@ +var fs = require('fs'), + path = require('path'), + logstash_config = require('../logstash_config'), + config_mapper = require('./config_mapper'), + async = require('async'); + +function filter(logstash_config_processing, s) { + var result = []; + if (s.trim() === '') { + return []; + } + if (!logstash_config_processing || s.match('input://') || s.match('output://') || s.match('filter://')) { + s.split('\n').forEach(function(k) { + var ss = k.trim(); + if (ss.length > 0 && ss[0] !== '#') { + result.push(ss); + } + }); + return result; + } + else { + return config_mapper.map(logstash_config.parse(s)); + } +} + +exports.filter = filter; + +function loadFile(filename, logstash_config_processing, callback) { + if (!callback) { + callback = logstash_config_processing; + logstash_config_processing = false; + } + fs.readFile(filename, function(err, content) { + if (err) { + return callback(err); + } + var x; + try { + x = filter(logstash_config_processing, content.toString()); + } + catch(e) { + callback(new Error('Processing error for file ' + filename + ' : ' + e)); + return; + } + callback(null, x); + }); +} + +exports.loadFile = loadFile; + +function loadDirectory(directory, logstash_config_processing, callback) { + if (!callback) { + callback = logstash_config_processing; + logstash_config_processing = false; + } + fs.readdir(directory, function(err, files) { + if (err) { + return callback(err); + } + async.reduce(files.sort(), [], function(result, f, callback) { + loadFile(path.join(directory, f), logstash_config_processing, function(err, result2) { + if (err) { + return callback(err); + } + callback(undefined, result.concat(result2)); + }); + }, callback); + }); +} + +exports.loadDirectory = loadDirectory; diff --git a/lib/lib/monitor_file.js b/lib/lib/monitor_file.js new file mode 100644 index 00000000..47b168ad --- /dev/null +++ b/lib/lib/monitor_file.js @@ -0,0 +1,373 @@ +var fs = require('fs'), + path = require('path'), + util = require('util'), + events = require('events'), + logger = require('log4node'), + async = require('async'), + async_helper = require('../lib/async_helper'), + directory_watcher = require('./directory_watcher'); + +var file_status = {}; + +exports.setFileStatus = function(m) { + file_status = m; +}; + +exports.getFileStatus = function() { + return file_status; +}; + +function FdTailer(fd, current_index, buffer_encoding, buffer_size, is_fifo, event_target) { + this.fd = fd; + this.event_target = event_target; + this.to_be_processed = ''; + this.current_index = current_index; + this.is_fifo = is_fifo; + this.buffer = new Buffer(buffer_size); + this.buffer_encoding = buffer_encoding; + this.read_in_progress = false; + this.current_ino = undefined; +} + +FdTailer.prototype.close = function(callback) { + logger.info('Closing file', this.fd); + fs.close(this.fd, function(err) { + if (callback) { + this.event_target.emit('closed'); + if (err) { + this.event_target.emit('error', err); + } + callback(this.last_data); + } + }.bind(this)); +}; + +FdTailer.prototype.read = function(callback_nothing_read) { + if (!this.fd) { + return; + } + if (this.read_in_progress) { + this.need_read_after_read = true; + } + else { + this.read_in_progress = true; + this.need_read_after_read = false; + var current = this.is_fifo ? -1 : this.current_index; + logger.debug('Launch reading on', this.fd, ', current_index', current); + fs.read(this.fd, this.buffer, 0, this.buffer.length, current, function(err, read, buffer) { + this.read_in_progress = false; + if (err) { + return logger.error('Error reading file', this.fd, ':', err); + } + if (read === 0 && callback_nothing_read) { + callback_nothing_read(); + } + if (read > 0) { + logger.debug('Read from', this.fd, ':', read, 'bytes'); + this.current_index += read; + this.handle(read); + if (read === buffer.length || this.need_read_after_read) { + process.nextTick(function() { + this.read(); + }.bind(this)); + } + } + }.bind(this)); + } +}; + +FdTailer.prototype.handle = function(length) { + this.last_data_raw_length = length; + this.last_data = this.buffer.toString(this.buffer_encoding, 0, length); + this.to_be_processed += this.last_data; + while (true) { + var index = this.to_be_processed.indexOf('\n'); + if (index === -1) { + break; + } + if (index > 0) { + var line = this.to_be_processed.slice(0, index); + this.event_target.emit('data', line); + } + this.to_be_processed = this.to_be_processed.slice(index + 1); + } +}; + +function MonitoredFile(filename, options) { + this.filename = path.resolve(filename); + this.options = options; + this.fdTailer = undefined; + this.open_in_progress = false; + this.oldFdTailers = []; + this.wait_delay_after_renaming = this.options.wait_delay_after_renaming || 5000; + this.buffer_encoding = this.options.buffer_encoding || 'utf8'; + this.buffer_size = this.options.buffer_size || 1024; +} + +exports.monitor = function(filename, options) { + return new MonitoredFile(filename, options || {}); +}; + +util.inherits(MonitoredFile, events.EventEmitter); + +MonitoredFile.prototype.close = function(callback) { + logger.info('Closing monitoring for', this.filename); + if (this.dir_watcher) { + logger.debug('Closing directory monitoring for', this.dir); + directory_watcher.unregister(this.dir_watcher); + delete this.dir_watcher; + } + async.eachSeries([function(callback) { + if (this.fdTailer === undefined) { + return callback(); + } + this.fdTailer.close(function(last_data) { + file_status[this.filename] = { + last_data: last_data, + index: this.fdTailer.current_index, + }; + callback(); + }.bind(this)); + }.bind(this), function(callback) { + async.eachSeries(this.oldFdTailers, function(fd, callback) { + clearTimeout(fd.id); + fd.f(callback); + }, callback); + }.bind(this)], async_helper.call(), callback); +}; + +MonitoredFile.prototype.start = function(callback, start_index) { + logger.info('Starting monitoring', this.filename); + this.watch(function(err) { + if (err) { + return callback(err); + } + this.on('renamed', function() { + if (this.fdTailer) { + logger.info('File', this.filename, 'renamed'); + var old_ino = this.current_ino; + var currentFdTailer = this.fdTailer; + var close_func = function(callback) { + logger.debug('Now closing postponed', currentFdTailer.fd); + currentFdTailer.close(function() { + if (callback) { + callback(); + } + }); + var index = -1; + for (var i = 0; i < this.oldFdTailers.length; i++) { + if (this.oldFdTailers[i].fdTailer === currentFdTailer) { + index = i; + } + } + if (index !== -1) { + this.oldFdTailers.splice(index, 1); + } + }.bind(this); + var id = setTimeout(close_func, this.wait_delay_after_renaming); + this.oldFdTailers.push({ + fdTailer: currentFdTailer, + id: id, + f: close_func + }); + this.fdTailer = undefined; + file_status[this.filename] = undefined; + this.restart(0, function(err) { + if (err) { + return; + } + if (this.current_ino === old_ino) { + // same file, closing old now + logger.info('Have reopen the same file, closing old fd now instead of waiting', this.wait_delay_after_renaming); + clearTimeout(id); + close_func(); + } + }.bind(this)); + } + else { + logger.debug('Reopen file', this.filename, 'from 0 after renaming'); + this.restart(0); + } + }); + this.on('changed', function() { + if (this.fdTailer) { + logger.debug('File', this.filename, 'changed'); + this.fdTailer.read(function() { + var last_data = this.fdTailer.last_data; + var last_data_length = this.fdTailer.last_data_raw_length; + if (last_data) { + var buffer = new Buffer(last_data_length); + fs.read(this.fdTailer.fd, buffer, 0, last_data_length, this.fdTailer.current_index - last_data_length, function(err, bytesRead, buffer) { + if (err) { + return this.emit('error', err); + } + if (bytesRead === last_data_length && last_data === buffer.toString(this.buffer_encoding, 0, last_data_length)) { + logger.debug('Event changed received, but no data change and last data match', this.filename, 'fd', this.fdTailer.fd); + } + else { + logger.info('Event changed received, but no data change and last data does not match.', 'Restarting reading', this.filename, 'at 0 fd', this.fdTailer.fd); + this.restart(0); + } + }.bind(this)); + } + }.bind(this)); + } + else { + if (this.last_open_failed) { + this.restart(0); + } + } + }); + this.on('other_changed', function(filename) { + if (this.oldFdTailers.length > 0) { + logger.debug('Something else has changed in directory and we are monitoring an old file for', this.filename); + this.oldFdTailers.forEach(function(o) { + o.fdTailer.read(); + }); + } + else { + logger.debug('Unprocessed event while monitoring', this.filename, ':', 'changed', filename); + } + }); + this.restart(start_index, callback); + }.bind(this)); +}; + +MonitoredFile.prototype.restart = function(start_index, final_callback) { + if (this.fdTailer) { + this.fdTailer.close(); + delete this.fdTailer; + } + if (this.open_in_progress) { + logger.debug('Postponing file opening', this.filename); + this.need_open_after_open = true; + this.need_open_after_open_start_index = start_index; + if (final_callback) { + return final_callback(new Error('Open already in progress')); + } + } + else { + var final_callback_called = false; + var callback = function(err) { + if (final_callback) { + if (!final_callback_called) { + final_callback_called = true; + final_callback(err); + } + return; + } + if (err) { + this.emit('error', err); + } + }.bind(this); + this.open_in_progress = true; + this.need_open_after_open = false; + this.last_open_failed = false; + this.current_ino = undefined; + fs.exists(this.filename, function(exists) { + if (exists) { + logger.debug('Open file for reading', this.filename); + fs.stat(this.filename, function(err, stats) { + if (err) { + return callback(err); + } + if (stats.isFIFO()) { + logger.info('File', this.filename, 'is a FIFO pipe'); + // nodeJS fs.open on a FIFO pipe callback is only called when some data arrive + // we can not wait to inform called init is done + callback(); + } + this.current_ino = stats.ino; + fs.open(this.filename, 'r', function(err, fd) { + this.open_in_progress = false; + if (err) { + this.last_open_failed = true; + return callback(err); + } + this.to_be_processed = ''; + // Some data about file in db_file ? + if (file_status[this.filename] && file_status[this.filename].last_data && file_status[this.filename].index) { + var last_data = file_status[this.filename].last_data; + // Enough data to check last_data ? + if (file_status[this.filename].index >= last_data.length && stats.size > last_data.length) { + var buffer = new Buffer(last_data.length); + fs.read(fd, buffer, 0, last_data.length, file_status[this.filename].index - last_data.length, function(err, bytesRead, buffer) { + if (err) { + return callback(err); + } + if (bytesRead === last_data.length && last_data === buffer.toString(this.buffer_encoding, 0, last_data.length)) { + logger.info('Start from last read index', this.filename, 'at', file_status[this.filename].index, 'fd', fd); + this.fdTailer = new FdTailer(fd, file_status[this.filename].index, this.buffer_encoding, this.buffer_size, stats.isFIFO(), this); + this.fdTailer.read(); + } + else { + logger.info('Have last read index, but last data are not correct.', 'Start reading', this.filename, 'at end fd', fd); + this.fdTailer = new FdTailer(fd, stats.size, this.buffer_encoding, this.buffer_size, stats.isFIFO(), this); + this.fdTailer.read(); + } + callback(); + }.bind(this)); + } + else { + logger.info('Have last read index, but file is too small.', 'Start reading', this.filename, 'at end fd', fd); + this.fdTailer = new FdTailer(fd, stats.size, this.buffer_encoding, this.buffer_size, stats.isFIFO(), this); + this.fdTailer.read(); + callback(); + } + } + // No data about file, starting normally + else { + if (start_index === undefined) { + logger.info('Start reading', this.filename, 'at end', 'fd', fd); + this.fdTailer = new FdTailer(fd, stats.size, this.buffer_encoding, this.buffer_size, stats.isFIFO(), this); + } + else { + logger.info('Start reading', this.filename, 'at', start_index, 'fd', fd); + this.fdTailer = new FdTailer(fd, start_index, this.buffer_encoding, this.buffer_size, stats.isFIFO(), this); + this.fdTailer.read(); + } + callback(); + } + }.bind(this)); + }.bind(this)); + } + else { + this.open_in_progress = false; + logger.info('File does not exist', this.filename); + if (this.need_open_after_open) { + logger.debug('Relaunching open for', this.filename, 'at', this.need_open_after_open_start_index); + this.restart(this.need_open_after_open_start_index); + } + callback(); + } + }.bind(this)); + } +}; + +MonitoredFile.prototype.watch = function(callback) { + try { + this.dir = path.dirname(this.filename); + var basename = path.basename(this.filename); + logger.info('Watching dir', this.dir, 'for file', basename); + this.dir_watcher = directory_watcher.register(this.dir, function(event, filename) { + logger.debug('Event received for', this.filename, ':', event, filename); + if (event === 'rename' && basename === filename) { + this.emit('renamed'); + } + else if (event === 'change' && basename === filename) { + this.emit('changed'); + } + else if (event === 'change') { + this.emit('other_changed', filename); + } + else { + logger.debug('Unprocessed event while monitoring', this.filename, ':', event, filename); + } + }.bind(this)); + callback(); + } + catch (err) { + logger.error('Unable to monitor', this.dir, ':', err); + callback(err); + } +}; diff --git a/lib/lib/patterns_loader.js b/lib/lib/patterns_loader.js new file mode 100644 index 00000000..3e1950cd --- /dev/null +++ b/lib/lib/patterns_loader.js @@ -0,0 +1,65 @@ +var fs = require('fs'), + path = require('path'), + async = require('async'), + file_loader = require('./file_loader'); + +var directories = []; + +exports.add = function(dir) { + if (directories.indexOf(dir) === -1) { + directories.push(dir); + } +}; + +exports.load = function(file_name, callback) { + async.reduce(directories, undefined, function(value, d, callback) { + var f = path.join(d, file_name); + fs.exists(f, function(exists) { + if (exists) { + fs.readFile(f, function(err, content) { + if (err) { + return callback(err); + } + var json; + try { + json = JSON.parse(content); + } + catch (e) { + return callback(new Error('Unable to parse file ' + file_name + ' : ' + e)); + } + callback(undefined, json); + }); + } + else { + callback(undefined, value); + } + }); + }, function(err, result) { + if (err) { + return callback(err); + } + if (result === undefined) { + return callback(new Error('Pattern file ' + file_name + ' not found')); + } + return callback(undefined, result); + }); +}; + +exports.loadGrokPatterns = function(callback) { + async.reduce(directories, [], function(l, dir, callback) { + var d = dir + '/grok'; + fs.exists(d, function(exists) { + if (exists) { + file_loader.loadDirectory(d, function(err, ll) { + if (err) { + return callback(err); + } + callback(undefined, l.concat(ll)); + }); + } + else { + callback(undefined, l); + } + }); + }, callback); +}; \ No newline at end of file diff --git a/lib/lib/redis_connection_manager.js b/lib/lib/redis_connection_manager.js new file mode 100644 index 00000000..7be8a1ad --- /dev/null +++ b/lib/lib/redis_connection_manager.js @@ -0,0 +1,48 @@ +var redis = require('redis'), + util = require('util'), + events = require('events'), + logger = require('log4node'); + +function RedisConnectionManager(host, port, auth_pass) { + events.EventEmitter.call(this); + this.host = host; + this.port = port; + this.options = {}; + if (auth_pass) { + this.options.auth_pass = auth_pass; + } + + logger.info('Connecting to Redis', this.host + ':' + this.port); + + this.client = redis.createClient(this.port, this.host, this.options); + this.end_callback = function() { + logger.info('Redis connection lost to ' + this.host + ':' + this.port); + }.bind(this); + + this.client.on('end', function() { + this.end_callback(); + }.bind(this)); + + this.client.on('error', function(err) { + this.emit('error', err); + }.bind(this)); + + this.client.on('connect', function() { + logger.info('Connected to Redis', this.host + ':' + this.port); + this.emit('connect', this.client); + }.bind(this)); +} + +util.inherits(RedisConnectionManager, events.EventEmitter); + +RedisConnectionManager.prototype.quit = function(callback) { + logger.info('Closing connection to Redis', this.host + ':' + this.port); + this.emit('before_quit', this.client); + this.end_callback = callback; + this.client.quit(); + delete this.client; +}; + +exports.create = function(host, port, retry) { + return new RedisConnectionManager(host, port, retry); +}; diff --git a/lib/lib/regex_helper.js b/lib/lib/regex_helper.js new file mode 100644 index 00000000..d399cb9c --- /dev/null +++ b/lib/lib/regex_helper.js @@ -0,0 +1,53 @@ +var logger = require('log4node'), + moment = require('moment'); + +exports.config = function() { + return { + optional_params: ['numerical_fields', 'date_format'], + default_values: { + 'fields': '', + 'numerical_fields': '', + }, + start_hook: function(callback) { + this.numerical_fields = Array.isArray(this.numerical_fields) ? this.numerical_fields : this.numerical_fields.split(','); + callback(); + }, + }; +}; + +exports.process = function(data, v, i) { + if (v !== undefined && v !== null && v !== '') { + if (this.date_format && (this.fields[i] === 'timestamp' || this.fields[i] === '@timestamp')) { + var m = moment(v, this.date_format); + if (m.year() + m.month() + m.date() + m.hours() + m.minutes() + m.seconds() > 1) { + if (m.year() === 0) { + m.year(moment().year()); + } + data['@timestamp'] = m.format('YYYY-MM-DDTHH:mm:ss.SSSZZ'); + logger.debug('Event timestamp modified to', data['@timestamp']); + } + } + else if (this.fields[i] === 'host') { + data.host = v; + } + else if (this.fields[i] === 'message') { + data.message = v; + } + else { + if (v.match(/^[0-9]+$/)) { + v = parseInt(v, 10); + } + else if (v.match(/^[0-9]+[\.,][0-9]+$/)) { + v = parseFloat(v.replace(',', '.')); + } + else { + if (this.numerical_fields.indexOf(this.fields[i]) !== -1) { + v = undefined; + } + } + if (v !== undefined) { + data[this.fields[i]] = v; + } + } + } +}; \ No newline at end of file diff --git a/lib/lib/sig_listener.js b/lib/lib/sig_listener.js new file mode 100644 index 00000000..e4192170 --- /dev/null +++ b/lib/lib/sig_listener.js @@ -0,0 +1,14 @@ +var events = require('events'); + +var sig_listener = new events.EventEmitter(); +sig_listener.setMaxListeners(0); + +process.on('SIGUSR2', function() { + sig_listener.emit('SIGUSR2'); +}); + +process.on('SIGUSR1', function() { + sig_listener.emit('SIGUSR1'); +}); + +exports.sig_listener = sig_listener; diff --git a/lib/lib/sqs_wrapper.js b/lib/lib/sqs_wrapper.js new file mode 100644 index 00000000..a6ce5d90 --- /dev/null +++ b/lib/lib/sqs_wrapper.js @@ -0,0 +1,22 @@ +var AWS = require('aws-sdk'), + url = require('url'); + +exports.config = function() { + return { + host_field: 'aws_queue', + required_params: ['aws_access_key_id', 'aws_secret_access_key'], + default_values: { + }, + start_hook: function(callback) { + var parsed = url.parse('http://' + this.aws_queue); + this.sqs = new AWS.SQS({ + accessKeyId: this.aws_access_key_id, + secretAccessKey: this.aws_secret_access_key, + region: parsed.host.split('.')[1], + apiVersion: '2012-11-05', + }); + this.queue_url = 'https://' + this.aws_queue; + callback(); + }, + }; +}; diff --git a/lib/lib/ssl_helper.js b/lib/lib/ssl_helper.js new file mode 100644 index 00000000..2c25b541 --- /dev/null +++ b/lib/lib/ssl_helper.js @@ -0,0 +1,63 @@ +var fs = require('fs'), + async = require('async'), + logger = require('log4node'); + +function config() { + return { + optional_params: [ + 'ssl', + 'ssl_pfx', + 'ssl_key', + 'ssl_passphrase', + 'ssl_cert', + 'ssl_ca', + 'ssl_crl', + 'ssl_ciphers', + 'ssl_handshakeTimeout', + 'ssl_honorCipherOrder', + 'ssl_requestCert', + 'ssl_rejectUnauthorized', + 'ssl_sessionIdContext', + 'ssl_secureProtocol', + ], + default_values: { + 'ssl': false, + }, + start_hook: function(callback) { + async.eachSeries([ + 'ssl_key', + 'ssl_cert', + 'ssl_ca', + 'ssl_crl', + 'ssl_pfx' + ], function(f, callback) { + if (this[f]) { + logger.info('Load SSL file', f, this[f]); + fs.readFile(this[f], function(err, result) { + if (err) { + return callback(err); + } + this[f] = result; + callback(); + }.bind(this)); + } + else { + callback(); + } + }.bind(this), callback); + }, + }; +} + +function merge_options(obj, options) { + for (var x in obj) { + var result = x.match(/ssl_(.*)/); + if (result && typeof obj[x] !== 'function') { + options[result[1]] = obj[x]; + } + } + return options; +} + +exports.config = config; +exports.merge_options = merge_options; diff --git a/lib/lib/tail_file.js b/lib/lib/tail_file.js new file mode 100644 index 00000000..2432c33c --- /dev/null +++ b/lib/lib/tail_file.js @@ -0,0 +1,88 @@ +var fs = require('fs'), + path = require('path'), + util = require('util'), + events = require('events'), + directory_watcher = require('./directory_watcher'), + spawn = require('child_process').spawn, + logger = require('log4node'); + +function Tailer(filename, options) { + this.filename = path.resolve(filename); + this.options = options; +} + +util.inherits(Tailer, events.EventEmitter); + +function split_buffer(buffer, callback) { + var data = buffer.toString(); + while (true) { + var index = data.indexOf('\n'); + if (index === -1) { + break; + } + if (index > 0) { + callback(data.slice(0, index)); + } + data = data.slice(index + 1); + } +} + +Tailer.prototype.tail = function(x) { + logger.debug('Launching tail -f on', this.filename); + this.child = spawn(this.tail_path || 'tail', ['-f', '-n', x, this.filename]); + this.child.stdout.on('data', function(data) { + split_buffer(data, function(line) { + this.emit('data', line); + }.bind(this)); + }.bind(this)); + this.child.stderr.on('data', function(data) { + split_buffer(data, function(line) { + logger.error(line); + }.bind(this)); + }.bind(this)); +}; + +Tailer.prototype.start = function(callback, start_index) { + fs.exists(this.filename, function(exists) { + if (exists) { + this.tail(start_index === 0 ? 2000 : 0); + // give time for tail start + setTimeout(callback, 200); + } + else { + try { + this.dir = path.dirname(this.filename); + var basename = path.basename(this.filename); + logger.info('Watching dir', this.dir, 'for file', basename); + this.dir_watcher = directory_watcher.register(this.dir, function(event, filename) { + if ((event === 'change' || event === 'rename') && basename === filename && !this.child) { + this.tail(2000); + } + }.bind(this)); + callback(); + } + catch (err) { + logger.error('Unable to monitor dir', this.dir, err); + callback(err); + } + } + }.bind(this)); +}; + +Tailer.prototype.close = function(callback) { + if (this.child) { + logger.debug('Killing tail process on', this.filename); + this.child.kill(); + delete this.child; + } + if (this.dir_watcher) { + logger.debug('Closing directory monitoring for', this.dir); + directory_watcher.unregister(this.dir_watcher); + delete this.dir_watcher; + } + callback(); +}; + +exports.tail = function(filename, options) { + return new Tailer(filename, options || {}); +}; diff --git a/lib/lib/url_parser.js b/lib/lib/url_parser.js new file mode 100644 index 00000000..273e2cae --- /dev/null +++ b/lib/lib/url_parser.js @@ -0,0 +1,37 @@ +var querystring = require('querystring'); + +exports.extractProtocol = function(url) { + var index = url.indexOf('://'); + return index === -1 ? undefined : { + protocol: url.substring(0, index), + next: url.substring(index + 3) + }; +}; + +exports.processUrlContent = function(url) { + if (url.length === 0) { + return undefined; + } + var index = url.indexOf('?'); + var host = index === -1 ? url : url.substring(0, index); + host = querystring.parse('a=' + host).a; + var params = index === -1 ? {} : querystring.parse(url.substring(index + 1).replace(/\+/g, '%2B')); + return { + host: host, + params: params + }; +}; + +exports.extractPortNumber = function(host) { + var index = host.indexOf(':'); + if (index === -1) { + return undefined; + } + else { + var port = parseInt(host.substring(index + 1), 10); + return isNaN(port) ? undefined : { + host: host.substring(0, index), + port: port + }; + } +}; diff --git a/lib/logstash_config.jison b/lib/logstash_config.jison new file mode 100644 index 00000000..b8c3c60d --- /dev/null +++ b/lib/logstash_config.jison @@ -0,0 +1,165 @@ + +%{ + function process_string(s) { + return s.replace(/\\n/g, '\n').replace(/\\r/g, '\r').replace(/\\t/g, '\t').replace(/\\"/g, '"').replace(/\\'/g, '\'').replace(/\\\//g, '/'); + } +%} + +/* lexical grammar */ +%lex +%% + +\"(\\\"|[^\"])*\" yytext = process_string(yytext.substr(1, yyleng - 2)); return 'VALUE' +\'(\\\'|[^\'])*\' yytext = process_string(yytext.substr(1, yyleng - 2)); return 'VALUE' +\/(\\\/|[^\/])*\/ yytext = process_string(yytext.substr(1, yyleng - 2)); return 'VALUE' +\s+ /* skip whitespace */ +"#".* /* ignore comment */ +[0-9]+\.[0-9]+[^0-9\.] yytext = parseFloat(yytext, 10); return 'VALUE' +[0-9]+[^0-9\.] yytext = parseInt(yytext, 10); return 'VALUE' +"true" yytext = true; return 'VALUE' +"false" yytext = false; return 'VALUE' +"{" return 'START' +"}" return 'STOP' +"[" return 'ARRAY_START' +"]" return 'ARRAY_STOP' +"(" return 'PARENTHESIS_START' +")" return 'PARENTHESIS_STOP' +"=>" return 'SET' +"," return 'COMA' +"if" return 'IF' +"else" return 'ELSE' +"==" return 'BINARY_OPERATOR' +"!=" return 'BINARY_OPERATOR' +"<" return 'BINARY_OPERATOR' +">" return 'BINARY_OPERATOR' +"<=" return 'BINARY_OPERATOR' +">=" return 'BINARY_OPERATOR' +"=~" return 'BINARY_OPERATOR' +"!~" return 'BINARY_OPERATOR' +"not in" return 'BINARY_OPERATOR' +"in" return 'BINARY_OPERATOR' +"!" return 'UNARY_OPERATOR' +"and" return 'CONDITION_OPERATOR' +"or" return 'CONDITION_OPERATOR' +"nand" return 'CONDITION_OPERATOR' +"xor" return 'CONDITION_OPERATOR' +[0-9a-zA-Z_\-\./]+ return 'ID' +<> return 'EOF' + +/lex + +%start logstash_config + +%% /* language grammar */ + +logstash_config + : main_lines EOF + { return $1; } + ; + +main_lines + : main_line + { $$ = {}; $$[$1.key] = $1.value; } + | main_lines main_line + { $$ = $1; $$[$2.key] = $2.value; } + ; + +main_line + : ID START lines STOP + { $$ = {key: $1, value: $3} } + ; + +lines + : lines line + { $$ = $1.concat($2); } + | line + { $$ = [$1]; } + ; + +if + : IF condition START lines STOP + { $$ = {__if__: {ifs: [{cond: $2, then: $4}]}}; } + | IF condition START lines STOP ELSE if + { $$ = $7; $$.__if__.ifs = [{cond: $2, then: $4}].concat($$.__if__.ifs); } + | IF condition START lines STOP ELSE START lines STOP + { $$ = {__if__: {ifs: [{cond: $2, then: $4}], else: $8}}; } + ; + +condition + : condition CONDITION_OPERATOR condition + { $$ = {op: $2, left: $1, right: $3}; } + | sub_condition + { $$ = $1; } + ; + +sub_condition + : condition_member BINARY_OPERATOR condition_member + { $$ = {op: $2, left: $1, right: $3}; } + | PARENTHESIS_START condition PARENTHESIS_STOP + { $$ = $2; } + | UNARY_OPERATOR sub_condition + { $$ = {op: $1, left: $2}; } + ; + +condition_member + : ARRAY_START ID ARRAY_STOP + { $$ = {field: $2}; } + | value + { $$ = {value: $1}; } + ; + +line + : ID plugin_params + { $$ = {}; $$[$1] = $2; } + | if + { $$ = $1; } + ; + +plugin_params + : START STOP + { $$ = {}; } + | START params STOP + { $$ = $2; } + ; + +params + : params param + { $$ = $1; $$[$2.key] = $2.value; } + | params COMA param + { $$ = $1; $$[$3.key] = $3.value; } + | param + { $$ = {}; $$[$1.key] = $1.value; } + ; + +param + : ID SET value + { $$ = {key: $1, value: $3}; } + | value SET value + { $$ = {key: $1, value: $3}; } + ; + +value + : VALUE + { $$ = $1; } + | ID + { $$ = $1; } + | ARRAY_START values ARRAY_STOP + { $$ = $2; } + | START params STOP + { $$ = $2; } + ; + +values_member + : VALUE + { $$ = $1; } + | ID + { $$ = $1; } + ; + +values + : values_member + { $$ = [$1]; } + | values COMA values_member + { $$ = $1.concat($3); } + ; + diff --git a/lib/logstash_config.js b/lib/logstash_config.js new file mode 100644 index 00000000..b1d0843a --- /dev/null +++ b/lib/logstash_config.js @@ -0,0 +1,729 @@ +/* parser generated by jison 0.4.15 */ +/* + Returns a Parser object of the following structure: + + Parser: { + yy: {} + } + + Parser.prototype: { + yy: {}, + trace: function(), + symbols_: {associative list: name ==> number}, + terminals_: {associative list: number ==> name}, + productions_: [...], + performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$), + table: [...], + defaultActions: {...}, + parseError: function(str, hash), + parse: function(input), + + lexer: { + EOF: 1, + parseError: function(str, hash), + setInput: function(input), + input: function(), + unput: function(str), + more: function(), + less: function(n), + pastInput: function(), + upcomingInput: function(), + showPosition: function(), + test_match: function(regex_match_array, rule_index), + next: function(), + lex: function(), + begin: function(condition), + popState: function(), + _currentRules: function(), + topState: function(), + pushState: function(condition), + + options: { + ranges: boolean (optional: true ==> token location info will include a .range[] member) + flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match) + backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code) + }, + + performAction: function(yy, yy_, $avoiding_name_collisions, YY_START), + rules: [...], + conditions: {associative list: name ==> set}, + } + } + + + token location info (@$, _$, etc.): { + first_line: n, + last_line: n, + first_column: n, + last_column: n, + range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based) + } + + + the parseError function receives a 'hash' object with these members for lexer and parser errors: { + text: (matched text) + token: (the produced terminal token, if any) + line: (yylineno) + } + while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: { + loc: (yylloc) + expected: (string describing the set of expected tokens) + recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error) + } +*/ +var parser = (function(){ +var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[1,4],$V1=[5,7],$V2=[1,10],$V3=[1,12],$V4=[7,10,13],$V5=[1,25],$V6=[1,26],$V7=[1,20],$V8=[1,21],$V9=[1,22],$Va=[1,24],$Vb=[1,30],$Vc=[1,32],$Vd=[1,34],$Ve=[8,16,21],$Vf=[1,41],$Vg=[8,16,19,21],$Vh=[7,8,10,16,19,21,23,29,30,31],$Vi=[1,45],$Vj=[7,8,10,23,29,31],$Vk=[1,48],$Vl=[2,31],$Vm=[24,29]; +var parser = {trace: function trace() { }, +yy: {}, +symbols_: {"error":2,"logstash_config":3,"main_lines":4,"EOF":5,"main_line":6,"ID":7,"START":8,"lines":9,"STOP":10,"line":11,"if":12,"IF":13,"condition":14,"ELSE":15,"CONDITION_OPERATOR":16,"sub_condition":17,"condition_member":18,"BINARY_OPERATOR":19,"PARENTHESIS_START":20,"PARENTHESIS_STOP":21,"UNARY_OPERATOR":22,"ARRAY_START":23,"ARRAY_STOP":24,"value":25,"plugin_params":26,"params":27,"param":28,"COMA":29,"SET":30,"VALUE":31,"values":32,"values_member":33,"$accept":0,"$end":1}, +terminals_: {2:"error",5:"EOF",7:"ID",8:"START",10:"STOP",13:"IF",15:"ELSE",16:"CONDITION_OPERATOR",19:"BINARY_OPERATOR",20:"PARENTHESIS_START",21:"PARENTHESIS_STOP",22:"UNARY_OPERATOR",23:"ARRAY_START",24:"ARRAY_STOP",29:"COMA",30:"SET",31:"VALUE"}, +productions_: [0,[3,2],[4,1],[4,2],[6,4],[9,2],[9,1],[12,5],[12,7],[12,9],[14,3],[14,1],[17,3],[17,3],[17,2],[18,3],[18,1],[11,2],[11,1],[26,2],[26,3],[27,2],[27,3],[27,1],[28,3],[28,3],[25,1],[25,1],[25,3],[25,3],[33,1],[33,1],[32,1],[32,3]], +performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) { +/* this == yyval */ + +var $0 = $$.length - 1; +switch (yystate) { +case 1: + return $$[$0-1]; +break; +case 2: case 23: + this.$ = {}; this.$[$$[$0].key] = $$[$0].value; +break; +case 3: case 21: + this.$ = $$[$0-1]; this.$[$$[$0].key] = $$[$0].value; +break; +case 4: + this.$ = {key: $$[$0-3], value: $$[$0-1]} +break; +case 5: + this.$ = $$[$0-1].concat($$[$0]); +break; +case 6: case 32: + this.$ = [$$[$0]]; +break; +case 7: + this.$ = {__if__: {ifs: [{cond: $$[$0-3], then: $$[$0-1]}]}}; +break; +case 8: + this.$ = $$[$0]; this.$.__if__.ifs = [{cond: $$[$0-5], then: $$[$0-3]}].concat(this.$.__if__.ifs); +break; +case 9: + this.$ = {__if__: {ifs: [{cond: $$[$0-7], then: $$[$0-5]}], else: $$[$0-1]}}; +break; +case 10: case 12: + this.$ = {op: $$[$0-1], left: $$[$0-2], right: $$[$0]}; +break; +case 11: case 18: case 26: case 27: case 30: case 31: + this.$ = $$[$0]; +break; +case 13: case 20: case 28: case 29: + this.$ = $$[$0-1]; +break; +case 14: + this.$ = {op: $$[$0-1], left: $$[$0]}; +break; +case 15: + this.$ = {field: $$[$0-1]}; +break; +case 16: + this.$ = {value: $$[$0]}; +break; +case 17: + this.$ = {}; this.$[$$[$0-1]] = $$[$0]; +break; +case 19: + this.$ = {}; +break; +case 22: + this.$ = $$[$0-2]; this.$[$$[$0].key] = $$[$0].value; +break; +case 24: case 25: + this.$ = {key: $$[$0-2], value: $$[$0]}; +break; +case 33: + this.$ = $$[$0-2].concat($$[$0]); +break; +} +}, +table: [{3:1,4:2,6:3,7:$V0},{1:[3]},{5:[1,5],6:6,7:$V0},o($V1,[2,2]),{8:[1,7]},{1:[2,1]},o($V1,[2,3]),{7:$V2,9:8,11:9,12:11,13:$V3},{7:$V2,10:[1,13],11:14,12:11,13:$V3},o($V4,[2,6]),{8:[1,16],26:15},o($V4,[2,18]),{7:$V5,8:$V6,14:17,17:18,18:19,20:$V7,22:$V8,23:$V9,25:23,31:$Va},o($V1,[2,4]),o($V4,[2,5]),o($V4,[2,17]),{7:$Vb,8:$V6,10:[1,27],23:$Vc,25:31,27:28,28:29,31:$Va},{8:[1,33],16:$Vd},o($Ve,[2,11]),{19:[1,35]},{7:$V5,8:$V6,14:36,17:18,18:19,20:$V7,22:$V8,23:$V9,25:23,31:$Va},{7:$V5,8:$V6,17:37,18:19,20:$V7,22:$V8,23:$V9,25:23,31:$Va},{7:[1,38],31:$Vf,32:39,33:40},o($Vg,[2,16]),o($Vh,[2,26]),o([7,8,10,16,19,21,23,29,31],[2,27]),{7:$Vb,8:$V6,23:$Vc,25:31,27:42,28:29,31:$Va},o($V4,[2,19]),{7:$Vb,8:$V6,10:[1,43],23:$Vc,25:31,28:44,29:$Vi,31:$Va},o($Vj,[2,23]),{30:[1,46]},{30:[1,47]},{7:$Vk,31:$Vf,32:39,33:40},{7:$V2,9:49,11:9,12:11,13:$V3},{7:$V5,8:$V6,14:50,17:18,18:19,20:$V7,22:$V8,23:$V9,25:23,31:$Va},{7:$V5,8:$V6,18:51,23:$V9,25:23,31:$Va},{16:$Vd,21:[1,52]},o($Ve,[2,14]),{24:[1,53],29:$Vl},{24:[1,54],29:[1,55]},o($Vm,[2,32]),o($Vm,[2,30]),{7:$Vb,8:$V6,10:[1,56],23:$Vc,25:31,28:44,29:$Vi,31:$Va},o($V4,[2,20]),o($Vj,[2,21]),{7:$Vb,8:$V6,23:$Vc,25:31,28:57,31:$Va},{7:$V5,8:$V6,23:$Vc,25:58,31:$Va},{7:$V5,8:$V6,23:$Vc,25:59,31:$Va},o($Vm,$Vl),{7:$V2,10:[1,60],11:14,12:11,13:$V3},o([8,21],[2,10],{16:$Vd}),o($Ve,[2,12]),o($Ve,[2,13]),o($Vg,[2,15]),o($Vh,[2,28]),{7:$Vk,31:$Vf,33:61},o($Vh,[2,29]),o($Vj,[2,22]),o($Vj,[2,24]),o($Vj,[2,25]),o($V4,[2,7],{15:[1,62]}),o($Vm,[2,33]),{8:[1,64],12:63,13:$V3},o($V4,[2,8]),{7:$V2,9:65,11:9,12:11,13:$V3},{7:$V2,10:[1,66],11:14,12:11,13:$V3},o($V4,[2,9])], +defaultActions: {5:[2,1]}, +parseError: function parseError(str, hash) { + if (hash.recoverable) { + this.trace(str); + } else { + throw new Error(str); + } +}, +parse: function parse(input) { + var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1; + var args = lstack.slice.call(arguments, 1); + var lexer = Object.create(this.lexer); + var sharedState = { yy: {} }; + for (var k in this.yy) { + if (Object.prototype.hasOwnProperty.call(this.yy, k)) { + sharedState.yy[k] = this.yy[k]; + } + } + lexer.setInput(input, sharedState.yy); + sharedState.yy.lexer = lexer; + sharedState.yy.parser = this; + if (typeof lexer.yylloc == 'undefined') { + lexer.yylloc = {}; + } + var yyloc = lexer.yylloc; + lstack.push(yyloc); + var ranges = lexer.options && lexer.options.ranges; + if (typeof sharedState.yy.parseError === 'function') { + this.parseError = sharedState.yy.parseError; + } else { + this.parseError = Object.getPrototypeOf(this).parseError; + } + function popStack(n) { + stack.length = stack.length - 2 * n; + vstack.length = vstack.length - n; + lstack.length = lstack.length - n; + } + _token_stack: + function lex() { + var token; + token = lexer.lex() || EOF; + if (typeof token !== 'number') { + token = self.symbols_[token] || token; + } + return token; + } + var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected; + while (true) { + state = stack[stack.length - 1]; + if (this.defaultActions[state]) { + action = this.defaultActions[state]; + } else { + if (symbol === null || typeof symbol == 'undefined') { + symbol = lex(); + } + action = table[state] && table[state][symbol]; + } + if (typeof action === 'undefined' || !action.length || !action[0]) { + var errStr = ''; + expected = []; + for (p in table[state]) { + if (this.terminals_[p] && p > TERROR) { + expected.push('\'' + this.terminals_[p] + '\''); + } + } + if (lexer.showPosition) { + errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\''; + } else { + errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\''); + } + this.parseError(errStr, { + text: lexer.match, + token: this.terminals_[symbol] || symbol, + line: lexer.yylineno, + loc: yyloc, + expected: expected + }); + } + if (action[0] instanceof Array && action.length > 1) { + throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol); + } + switch (action[0]) { + case 1: + stack.push(symbol); + vstack.push(lexer.yytext); + lstack.push(lexer.yylloc); + stack.push(action[1]); + symbol = null; + if (!preErrorSymbol) { + yyleng = lexer.yyleng; + yytext = lexer.yytext; + yylineno = lexer.yylineno; + yyloc = lexer.yylloc; + if (recovering > 0) { + recovering--; + } + } else { + symbol = preErrorSymbol; + preErrorSymbol = null; + } + break; + case 2: + len = this.productions_[action[1]][1]; + yyval.$ = vstack[vstack.length - len]; + yyval._$ = { + first_line: lstack[lstack.length - (len || 1)].first_line, + last_line: lstack[lstack.length - 1].last_line, + first_column: lstack[lstack.length - (len || 1)].first_column, + last_column: lstack[lstack.length - 1].last_column + }; + if (ranges) { + yyval._$.range = [ + lstack[lstack.length - (len || 1)].range[0], + lstack[lstack.length - 1].range[1] + ]; + } + r = this.performAction.apply(yyval, [ + yytext, + yyleng, + yylineno, + sharedState.yy, + action[1], + vstack, + lstack + ].concat(args)); + if (typeof r !== 'undefined') { + return r; + } + if (len) { + stack = stack.slice(0, -1 * len * 2); + vstack = vstack.slice(0, -1 * len); + lstack = lstack.slice(0, -1 * len); + } + stack.push(this.productions_[action[1]][0]); + vstack.push(yyval.$); + lstack.push(yyval._$); + newState = table[stack[stack.length - 2]][stack[stack.length - 1]]; + stack.push(newState); + break; + case 3: + return true; + } + } + return true; +}}; + + function process_string(s) { + return s.replace(/\\n/g, '\n').replace(/\\r/g, '\r').replace(/\\t/g, '\t').replace(/\\"/g, '"').replace(/\\'/g, '\'').replace(/\\\//g, '/'); + } +/* generated by jison-lex 0.3.4 */ +var lexer = (function(){ +var lexer = ({ + +EOF:1, + +parseError:function parseError(str, hash) { + if (this.yy.parser) { + this.yy.parser.parseError(str, hash); + } else { + throw new Error(str); + } + }, + +// resets the lexer, sets new input +setInput:function (input, yy) { + this.yy = yy || this.yy || {}; + this._input = input; + this._more = this._backtrack = this.done = false; + this.yylineno = this.yyleng = 0; + this.yytext = this.matched = this.match = ''; + this.conditionStack = ['INITIAL']; + this.yylloc = { + first_line: 1, + first_column: 0, + last_line: 1, + last_column: 0 + }; + if (this.options.ranges) { + this.yylloc.range = [0,0]; + } + this.offset = 0; + return this; + }, + +// consumes and returns one char from the input +input:function () { + var ch = this._input[0]; + this.yytext += ch; + this.yyleng++; + this.offset++; + this.match += ch; + this.matched += ch; + var lines = ch.match(/(?:\r\n?|\n).*/g); + if (lines) { + this.yylineno++; + this.yylloc.last_line++; + } else { + this.yylloc.last_column++; + } + if (this.options.ranges) { + this.yylloc.range[1]++; + } + + this._input = this._input.slice(1); + return ch; + }, + +// unshifts one char (or a string) into the input +unput:function (ch) { + var len = ch.length; + var lines = ch.split(/(?:\r\n?|\n)/g); + + this._input = ch + this._input; + this.yytext = this.yytext.substr(0, this.yytext.length - len); + //this.yyleng -= len; + this.offset -= len; + var oldLines = this.match.split(/(?:\r\n?|\n)/g); + this.match = this.match.substr(0, this.match.length - 1); + this.matched = this.matched.substr(0, this.matched.length - 1); + + if (lines.length - 1) { + this.yylineno -= lines.length - 1; + } + var r = this.yylloc.range; + + this.yylloc = { + first_line: this.yylloc.first_line, + last_line: this.yylineno + 1, + first_column: this.yylloc.first_column, + last_column: lines ? + (lines.length === oldLines.length ? this.yylloc.first_column : 0) + + oldLines[oldLines.length - lines.length].length - lines[0].length : + this.yylloc.first_column - len + }; + + if (this.options.ranges) { + this.yylloc.range = [r[0], r[0] + this.yyleng - len]; + } + this.yyleng = this.yytext.length; + return this; + }, + +// When called from action, caches matched text and appends it on next action +more:function () { + this._more = true; + return this; + }, + +// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead. +reject:function () { + if (this.options.backtrack_lexer) { + this._backtrack = true; + } else { + return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), { + text: "", + token: null, + line: this.yylineno + }); + + } + return this; + }, + +// retain first n characters of the match +less:function (n) { + this.unput(this.match.slice(n)); + }, + +// displays already matched input, i.e. for error messages +pastInput:function () { + var past = this.matched.substr(0, this.matched.length - this.match.length); + return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, ""); + }, + +// displays upcoming input, i.e. for error messages +upcomingInput:function () { + var next = this.match; + if (next.length < 20) { + next += this._input.substr(0, 20-next.length); + } + return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, ""); + }, + +// displays the character position where the lexing error occurred, i.e. for error messages +showPosition:function () { + var pre = this.pastInput(); + var c = new Array(pre.length + 1).join("-"); + return pre + this.upcomingInput() + "\n" + c + "^"; + }, + +// test the lexed token: return FALSE when not a match, otherwise return token +test_match:function (match, indexed_rule) { + var token, + lines, + backup; + + if (this.options.backtrack_lexer) { + // save context + backup = { + yylineno: this.yylineno, + yylloc: { + first_line: this.yylloc.first_line, + last_line: this.last_line, + first_column: this.yylloc.first_column, + last_column: this.yylloc.last_column + }, + yytext: this.yytext, + match: this.match, + matches: this.matches, + matched: this.matched, + yyleng: this.yyleng, + offset: this.offset, + _more: this._more, + _input: this._input, + yy: this.yy, + conditionStack: this.conditionStack.slice(0), + done: this.done + }; + if (this.options.ranges) { + backup.yylloc.range = this.yylloc.range.slice(0); + } + } + + lines = match[0].match(/(?:\r\n?|\n).*/g); + if (lines) { + this.yylineno += lines.length; + } + this.yylloc = { + first_line: this.yylloc.last_line, + last_line: this.yylineno + 1, + first_column: this.yylloc.last_column, + last_column: lines ? + lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length : + this.yylloc.last_column + match[0].length + }; + this.yytext += match[0]; + this.match += match[0]; + this.matches = match; + this.yyleng = this.yytext.length; + if (this.options.ranges) { + this.yylloc.range = [this.offset, this.offset += this.yyleng]; + } + this._more = false; + this._backtrack = false; + this._input = this._input.slice(match[0].length); + this.matched += match[0]; + token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]); + if (this.done && this._input) { + this.done = false; + } + if (token) { + return token; + } else if (this._backtrack) { + // recover context + for (var k in backup) { + this[k] = backup[k]; + } + return false; // rule action called reject() implying the next rule should be tested instead. + } + return false; + }, + +// return next match in input +next:function () { + if (this.done) { + return this.EOF; + } + if (!this._input) { + this.done = true; + } + + var token, + match, + tempMatch, + index; + if (!this._more) { + this.yytext = ''; + this.match = ''; + } + var rules = this._currentRules(); + for (var i = 0; i < rules.length; i++) { + tempMatch = this._input.match(this.rules[rules[i]]); + if (tempMatch && (!match || tempMatch[0].length > match[0].length)) { + match = tempMatch; + index = i; + if (this.options.backtrack_lexer) { + token = this.test_match(tempMatch, rules[i]); + if (token !== false) { + return token; + } else if (this._backtrack) { + match = false; + continue; // rule action called reject() implying a rule MISmatch. + } else { + // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace) + return false; + } + } else if (!this.options.flex) { + break; + } + } + } + if (match) { + token = this.test_match(match, rules[index]); + if (token !== false) { + return token; + } + // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace) + return false; + } + if (this._input === "") { + return this.EOF; + } else { + return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), { + text: "", + token: null, + line: this.yylineno + }); + } + }, + +// return next match that has a token +lex:function lex() { + var r = this.next(); + if (r) { + return r; + } else { + return this.lex(); + } + }, + +// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack) +begin:function begin(condition) { + this.conditionStack.push(condition); + }, + +// pop the previously active lexer condition state off the condition stack +popState:function popState() { + var n = this.conditionStack.length - 1; + if (n > 0) { + return this.conditionStack.pop(); + } else { + return this.conditionStack[0]; + } + }, + +// produce the lexer rule set which is active for the currently active lexer condition state +_currentRules:function _currentRules() { + if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) { + return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules; + } else { + return this.conditions["INITIAL"].rules; + } + }, + +// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available +topState:function topState(n) { + n = this.conditionStack.length - 1 - Math.abs(n || 0); + if (n >= 0) { + return this.conditionStack[n]; + } else { + return "INITIAL"; + } + }, + +// alias for begin(condition) +pushState:function pushState(condition) { + this.begin(condition); + }, + +// return the number of states currently on the stack +stateStackSize:function stateStackSize() { + return this.conditionStack.length; + }, +options: {}, +performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) { +var YYSTATE=YY_START; +switch($avoiding_name_collisions) { +case 0:yy_.yytext = process_string(yy_.yytext.substr(1, yy_.yyleng - 2)); return 31 +break; +case 1:yy_.yytext = process_string(yy_.yytext.substr(1, yy_.yyleng - 2)); return 31 +break; +case 2:yy_.yytext = process_string(yy_.yytext.substr(1, yy_.yyleng - 2)); return 31 +break; +case 3:/* skip whitespace */ +break; +case 4:/* ignore comment */ +break; +case 5:yy_.yytext = parseFloat(yy_.yytext, 10); return 31 +break; +case 6:yy_.yytext = parseInt(yy_.yytext, 10); return 31 +break; +case 7:yy_.yytext = true; return 31 +break; +case 8:yy_.yytext = false; return 31 +break; +case 9:return 8 +break; +case 10:return 10 +break; +case 11:return 23 +break; +case 12:return 24 +break; +case 13:return 20 +break; +case 14:return 21 +break; +case 15:return 30 +break; +case 16:return 29 +break; +case 17:return 13 +break; +case 18:return 15 +break; +case 19:return 19 +break; +case 20:return 19 +break; +case 21:return 19 +break; +case 22:return 19 +break; +case 23:return 19 +break; +case 24:return 19 +break; +case 25:return 19 +break; +case 26:return 19 +break; +case 27:return 19 +break; +case 28:return 19 +break; +case 29:return 22 +break; +case 30:return 16 +break; +case 31:return 16 +break; +case 32:return 16 +break; +case 33:return 16 +break; +case 34:return 7 +break; +case 35:return 5 +break; +} +}, +rules: [/^(?:"(\\"|[^\"])*")/,/^(?:'(\\'|[^\'])*')/,/^(?:\/(\\\/|[^\/])*\/)/,/^(?:\s+)/,/^(?:#.*)/,/^(?:[0-9]+\.[0-9]+[^0-9\.])/,/^(?:[0-9]+[^0-9\.])/,/^(?:true\b)/,/^(?:false\b)/,/^(?:\{)/,/^(?:\})/,/^(?:\[)/,/^(?:\])/,/^(?:\()/,/^(?:\))/,/^(?:=>)/,/^(?:,)/,/^(?:if\b)/,/^(?:else\b)/,/^(?:==)/,/^(?:!=)/,/^(?:<)/,/^(?:>)/,/^(?:<=)/,/^(?:>=)/,/^(?:=~)/,/^(?:!~)/,/^(?:not in\b)/,/^(?:in\b)/,/^(?:!)/,/^(?:and\b)/,/^(?:or\b)/,/^(?:nand\b)/,/^(?:xor\b)/,/^(?:[0-9a-zA-Z_\-\.\/]+)/,/^(?:$)/], +conditions: {"INITIAL":{"rules":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35],"inclusive":true}} +}); +return lexer; +})(); +parser.lexer = lexer; +function Parser () { + this.yy = {}; +} +Parser.prototype = parser;parser.Parser = Parser; +return new Parser; +})(); + + +if (typeof require !== 'undefined' && typeof exports !== 'undefined') { +exports.parser = parser; +exports.Parser = parser.Parser; +exports.parse = function () { return parser.parse.apply(parser, arguments); }; +exports.main = function commonjsMain(args) { + if (!args[1]) { + console.log('Usage: '+args[0]+' FILE'); + process.exit(1); + } + var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8"); + return exports.parser.parse(source); +}; +if (typeof module !== 'undefined' && require.main === module) { + exports.main(process.argv.slice(1)); +} +} \ No newline at end of file diff --git a/lib/outputs/abstract_http.js b/lib/outputs/abstract_http.js new file mode 100644 index 00000000..958e6cde --- /dev/null +++ b/lib/outputs/abstract_http.js @@ -0,0 +1,89 @@ +var base_output = require('../lib/base_output'), + util = require('util'), + http = require('http'), + https = require('https'), + logger = require('log4node'), + ssl_helper = require('../lib/ssl_helper'), + error_buffer = require('../lib/error_buffer'); + +function AbstractHttp() { + base_output.BaseOutput.call(this); + this.mergeConfig(ssl_helper.config()); + this.mergeConfig(error_buffer.config(function() { + return 'output HTTP Post to ' + this.host; + })); + this.mergeConfig({ + name: 'Abstract Http', + host_field: 'host', + port_field: 'port', + required_params: [], + optional_params: ['proxy', 'basic_auth_password', 'basic_auth_user'], + start_hook: this.startAbstract, + }); +} + +util.inherits(AbstractHttp, base_output.BaseOutput); + +AbstractHttp.prototype.startAbstract = function(callback) { + logger.info('Start HTTP output to' + this.to()); + + if (this.proxy) { + var HttpProxyAgent = this.ssl ? require('https-proxy-agent') : require('http-proxy-agent'); + this.custom_agent = new HttpProxyAgent(this.proxy); + logger.info('Using http proxy ' + this.proxy); + } + + if (this.basic_auth_user && this.basic_auth_password) { + this.basic_auth = 'Basic ' + new Buffer(this.basic_auth_user + ':' + this.basic_auth_password).toString('base64'); + } + + callback(); +}; + +AbstractHttp.prototype.sendHttpRequest = function(http_options, body) { + if (this.custom_agent) { + http_options.agent = this.custom_agent; + } + if (this.basic_auth) { + if (!http_options.headers) { + http_options.headers = {}; + } + http_options.headers.Authorization = this.basic_auth; + } + if (this.path_prefix) { + http_options.path = this.path_prefix + http_options.path; + } + var listener = function(res) { + if (res.statusCode < 200 || res.statusCode > 299) { + this.error_buffer.emit('error', 'Wrong HTTP Post return code: ' + res.statusCode); + } + else { + this.error_buffer.emit('ok'); + } + res.on('data', function() {}); + }.bind(this); + + var req = this.ssl ? https.request(ssl_helper.merge_options(this, http_options), listener) : http.request(http_options, listener); + + req.on('error', function(e) { + this.error_buffer.emit('error', e.message); + }.bind(this)); + + // wait for socket is needed is some proxy scenario + req.once('socket', function() { + req.write(body); + req.end(); + }); +}; + +AbstractHttp.prototype.close = function(callback) { + logger.info('Closing HTTP Post output to', this.host, this.port, 'ssl ' + this.ssl); + if (this.httpClose) { + this.httpClose(callback); + } + else { + callback(); + } +}; + +exports.AbstractHttp = AbstractHttp; diff --git a/lib/outputs/abstract_tcp.js b/lib/outputs/abstract_tcp.js new file mode 100644 index 00000000..7f51579f --- /dev/null +++ b/lib/outputs/abstract_tcp.js @@ -0,0 +1,105 @@ +var base_output = require('../lib/base_output'), + util = require('util'), + net = require('net'), + tls = require('tls'), + logger = require('log4node'), + ssl_helper = require('../lib/ssl_helper'), + error_buffer = require('../lib/error_buffer'); + +function AbstractTcp() { + base_output.BaseOutput.call(this); + this.mergeConfig(ssl_helper.config()); + this.mergeConfig(error_buffer.config(function() { + return 'output tcp to ' + this.host + ':' + this.port; + })); + this.mergeConfig({ + name: 'AbstractTcp', + host_field: 'host', + port_field: 'port', + start_hook: this.startAbstract, + }); +} + +util.inherits(AbstractTcp, base_output.BaseOutput); + +AbstractTcp.prototype.startAbstract = function(callback) { + logger.info('Start output to' + this.to()); + + this.closed_callback = function() {}; + this.in_progress = false; + + callback(); +}; + +AbstractTcp.prototype.findConnection = function(callback) { + if (this.connection) { + return callback(this.connection); + } + + if (this.in_progress) { + return this.once('connected', function() { + callback(this.connection); + }.bind(this)); + } + this.in_progress = true; + + var connection; + var listener = function() { + this.error_buffer.emit('ok'); + this.in_progress = false; + this.connection = connection; + callback(this.connection); + this.emit('connected'); + }.bind(this); + + if (this.ssl) { + connection = tls.connect(ssl_helper.merge_options(this, { + host: this.host, + port: this.port + }), listener); + } + else if (this.unix_socket) { + connection = net.createConnection({ + path: this.unix_socket + }, listener); + } + else { + connection = net.createConnection({ + host: this.host, + port: this.port + }, listener); + } + + connection.on('error', function(err) { + this.error_buffer.emit('error', err); + }.bind(this)); + connection.on('close', function() { + this.in_progress = false; + this.connection = null; + this.closed_callback(); + }.bind(this)); +}; + +AbstractTcp.prototype.process = function(data) { + this.formatPayload(data, function(message) { + this.findConnection(function(c) { + c.write(message); + }); + }.bind(this)); +}; + +AbstractTcp.prototype.close = function(callback) { + logger.info('Closing output to' + this.to()); + if (this.connection) { + this.closed_callback = function() { + logger.info('Connection closed to' + this.to()); + callback(); + }.bind(this); + this.connection.end(); + } + else { + callback(); + } +}; + +exports.AbstractTcp = AbstractTcp; diff --git a/lib/outputs/abstract_udp.js b/lib/outputs/abstract_udp.js new file mode 100644 index 00000000..59e5ac57 --- /dev/null +++ b/lib/outputs/abstract_udp.js @@ -0,0 +1,65 @@ +var base_output = require('../lib/base_output'), + cache_helper = require('../lib/cache_helper'), + util = require('util'), + dgram = require('dgram'), + dns = require('dns'), + logger = require('log4node'), + error_buffer = require('../lib/error_buffer'); + +function AbstractUdp() { + base_output.BaseOutput.call(this); + this.mergeConfig(cache_helper.config()); + this.mergeConfig(error_buffer.config(function() { + return 'output udp to ' + this.host + ':' + this.port; + })); + this.mergeConfig({ + name: 'AbstractUdp', + host_field: 'host', + port_field: 'port', + start_hook: this.startAbstract, + }); +} + +util.inherits(AbstractUdp, base_output.BaseOutput); + +AbstractUdp.prototype.startAbstract = function(callback) { + logger.info('Start output to ' + this.to()); + + this.socket = dgram.createSocket('udp4'); + + this.cache_miss = function(key, callback) { + dns.lookup(key, function(err, res) { + callback(undefined, res); + }); + }; + + callback(); +}; + +AbstractUdp.prototype.process = function(data) { + this.formatPayload(data, function(message) { + this.cache(this.host, function(err, host) { + if (err) { + logger.error('Unable to resolve host', this.host); + } + else { + this.socket.send(message, 0, message.length, this.port, host, function(err, bytes) { + if (err || bytes !== message.length) { + this.error_buffer.emit('error', new Error('Error while send data to ' + this.host + ':' + this.port + ':' + err)); + } + else { + this.error_buffer.emit('ok'); + } + }.bind(this)); + } + }.bind(this)); + }.bind(this)); +}; + +AbstractUdp.prototype.close = function(callback) { + logger.info('Closing output to ' + this.to()); + this.socket.close(); + callback(); +}; + +exports.AbstractUdp = AbstractUdp; diff --git a/lib/outputs/abstract_zeromq.js b/lib/outputs/abstract_zeromq.js new file mode 100644 index 00000000..7bc90b9b --- /dev/null +++ b/lib/outputs/abstract_zeromq.js @@ -0,0 +1,79 @@ +var base_output = require('../lib/base_output'), + util = require('util'), + zmq = require('zmq'), + logger = require('log4node'); + +function AbstractZeroMQ() { + base_output.BaseOutput.call(this); + this.mergeConfig({ + name: 'Abstract ZeroMQ', + host_field: 'address', + optional_params: ['zmq_high_watermark', 'zmq_threshold_up', 'zmq_threshold_down', 'zmq_check_interval'], + start_hook: this.startAbstract, + }); +} + +util.inherits(AbstractZeroMQ, base_output.BaseOutput); + +AbstractZeroMQ.prototype.startAbstract = function(callback) { + logger.info('Start output to', this.to(), 'using ZeroMQ', zmq.version); + + this.socket = zmq.socket('push'); + if (this.zmq_high_watermark) { + logger.info('Setting high watermark on ZeroMQ socket', this.zmq_high_watermark); + if (zmq.version.match(/^2\./)) { + this.socket.setsockopt(zmq.ZMQ_HWM, parseInt(this.zmq_high_watermark, 10)); + } + else { + this.socket.setsockopt(zmq.ZMQ_SNDHWM, parseInt(this.zmq_high_watermark, 10)); + } + } + (Array.isArray(this.address) ? this.address : this.address.split(',')).forEach(function(address) { + this.socket.connect(address); + }.bind(this)); + + if (this.zmq_check_interval) { + this.check_interval_id = setInterval(function() { + this.check(); + }.bind(this), this.zmq_check_interval); + } + + this.on_alarm = false; + + callback(); +}; + +AbstractZeroMQ.prototype.check = function() { + if (this.on_alarm) { + if (this.zmq_threshold_down && this.socket._outgoing.length < this.zmq_threshold_down) { + logger.warning('Zmq socket end of alarm', this.address, 'current queue size', this.socket._outgoing.length); + this.on_alarm = false; + this.emit('alarm', false, this.address); + } + else { + logger.info('Still in alarm : queue size : ', this.socket._outgoing.length); + } + } +}; + +AbstractZeroMQ.prototype.process = function(data) { + this.formatPayload(data, function(message) { + this.socket.send(message); + if (!this.on_alarm && this.zmq_threshold_down && this.zmq_threshold_up && this.socket._outgoing.length > this.zmq_threshold_up) { + logger.warning('Zmq socket in alarm', this.address, 'current queue size', this.socket._outgoing.length); + this.on_alarm = true; + this.emit('alarm', true, this.address); + } + }.bind(this)); +}; + +AbstractZeroMQ.prototype.close = function(callback) { + logger.info('Closing output to zeromq', this.address); + if (this.zmq_check_interval_id) { + clearInterval(this.zmq_check_interval_id); + } + this.socket.close(); + callback(); +}; + +exports.AbstractZeroMQ = AbstractZeroMQ; diff --git a/lib/outputs/output_amqp.js b/lib/outputs/output_amqp.js new file mode 100644 index 00000000..48eaad7e --- /dev/null +++ b/lib/outputs/output_amqp.js @@ -0,0 +1,77 @@ +var base_output = require('../lib/base_output'), + ssl_helper = require('../lib/ssl_helper'), + amqp_driver = require('../lib/amqp_driver'), + util = require('util'), + logger = require('log4node'), + error_buffer = require('../lib/error_buffer'); + +function OutputAmqp() { + base_output.BaseOutput.call(this); + this.mergeConfig(this.serializer_config('json_logstash')); + this.mergeConfig(ssl_helper.config()); + this.mergeConfig(error_buffer.config(function() { + return 'amqp to ' + this.host + ':' + this.port + ' exchange ' + this.exchange_name; + })); + this.mergeConfig({ + name: 'Ampq', + host_field: 'host', + port_field: 'port', + required_params: ['exchange_name'], + optional_params: ['topic', 'durable', 'retry_delay', 'heartbeat', 'username', 'password', 'vhost', 'persistent'], + default_values: { + 'durable': true, + 'retry_delay': 3000, + 'heartbeat': 10, + 'persistent': false, + }, + start_hook: this.start, + + }); +} + +util.inherits(OutputAmqp, base_output.BaseOutput); + +OutputAmqp.prototype.start = function(callback) { + this.amqp_url = amqp_driver.buildUrl(this); + this.channel = undefined; + logger.info('Start AMQP output to', this.amqp_url, 'exchange', this.exchange_name, 'topic', this.topic); + + + this.connected_callback = function(channel) { + channel.assertExchange(this.exchange_name, this.topic ? 'topic' : 'fanout', {durable: this.durable}, function(err) { + if (err) { + logger.error('Unable to create exchange', err); + } + else { + this.channel = channel; + } + }.bind(this)); + }.bind(this); + this.disconnected_callback = function() { + this.channel = undefined; + }.bind(this); + this.amqp_logger = logger; + + this.driver = amqp_driver.createAmqpClient(this); + + callback(); +}; + +OutputAmqp.prototype.process = function(data) { + if (this.channel) { + var options = {}; + if (this.persistent) { + options.persistent = true; + } + this.channel.publish(this.exchange_name, this.topic || '', new Buffer(this.serialize_data(data)), options); + } +}; + +OutputAmqp.prototype.close = function(callback) { + logger.info('Closing AMQP output', this.amqp_url + ' exchange ' + this.exchange_name); + this.driver.close(callback); +}; + +exports.create = function() { + return new OutputAmqp(); +}; diff --git a/lib/outputs/output_elasticsearch.js b/lib/outputs/output_elasticsearch.js new file mode 100644 index 00000000..5ceb92e3 --- /dev/null +++ b/lib/outputs/output_elasticsearch.js @@ -0,0 +1,89 @@ +var abstract_http = require('./abstract_http'), + util = require('util'), + elastic_search_helper = require('../lib/elastic_search_helper'); + +function OutputElasticSearch() { + abstract_http.AbstractHttp.call(this); + this.mergeConfig({ + name: 'Elastic search', + optional_params: ['bulk_limit', 'bulk_timeout', 'index_prefix', 'data_type', 'index_name'], + default_values: { + 'index_prefix': 'logstash', + 'data_type': 'logs', + 'bulk': 'false', + 'bulk_timeout': '1000' + }, + start_hook: this.start, + }); +} + +util.inherits(OutputElasticSearch, abstract_http.AbstractHttp); + +OutputElasticSearch.prototype.start = function(callback) { + if (this.bulk_limit) { + this.bulk_limit = parseInt(this.bulk_limit, 10); + this.bulk_timeout = parseInt(this.bulk_timeout, 10); + this.bulk_data = ''; + this.bulk_counter = 0; + this.bulk_interval = setInterval(function() { + if (this.bulk_data.length > 0 && ((new Date()).getTime() - this.last_bulk_insert) > this.bulk_timeout) { + this.sendBulk(); + } + }.bind(this), this.bulk_timeout); + } + + callback(); +}; + +OutputElasticSearch.prototype.getPath = function() { + return this.index_name ? '/' + this.index_name + '/' + this.data_type + '/' : elastic_search_helper.computePath(this.index_prefix, this.data_type); +}; + +OutputElasticSearch.prototype.sendBulk = function() { + var payload = this.bulk_data; + this.bulk_data = ''; + this.bulk_counter = 0; + this.postData('/_bulk', payload); +}; + +OutputElasticSearch.prototype.postData = function(path, data) { + var params = { + host: this.host, + port: this.port, + method: 'POST', + path: path, + }; + + this.sendHttpRequest(params, data); +}; + +OutputElasticSearch.prototype.process = function(data) { + if (this.bulk_limit) { + var path = this.getPath().split('/'); + this.bulk_data += JSON.stringify({index: {_index: path[1], _type: path[2]}}) + '\n'; + this.bulk_data += JSON.stringify(data) + '\n'; + this.bulk_counter += 1; + this.last_bulk_insert = (new Date()).getTime(); + if (this.bulk_counter >= this.bulk_limit) { + this.sendBulk(); + } + } + else { + this.postData(this.getPath(), JSON.stringify(data)); + } +}; + +OutputElasticSearch.prototype.httpClose = function(callback) { + if (this.bulk_interval) { + clearInterval(this.bulk_interval); + } + callback(); +}; + +OutputElasticSearch.prototype.to = function() { + return ' Elastic Search Http ' + this.host + ':' + this.port + (this.bulk_limit ? ' bulk ' + this.bulk_limit : ''); +}; + +exports.create = function() { + return new OutputElasticSearch(); +}; diff --git a/lib/outputs/output_elasticsearch_zeromq.js b/lib/outputs/output_elasticsearch_zeromq.js new file mode 100644 index 00000000..21b4c69e --- /dev/null +++ b/lib/outputs/output_elasticsearch_zeromq.js @@ -0,0 +1,31 @@ +var abstract_zeromq = require('./abstract_zeromq'), + util = require('util'), + elastic_search_helper = require('../lib/elastic_search_helper'); + +function OutputElasticSearchZeroMQ() { + abstract_zeromq.AbstractZeroMQ.call(this); + this.mergeConfig(this.serializer_config()); + this.mergeConfig({ + name: 'Elasticsearch ZeroMQ', + optional_params: ['index_prefix', 'data_type'], + default_values: { + index_prefix: 'logstash', + data_type: 'logs', + }, + }); +} + +util.inherits(OutputElasticSearchZeroMQ, abstract_zeromq.AbstractZeroMQ); + +OutputElasticSearchZeroMQ.prototype.to = function() { + return 'Elasticsearch Zeromq : ' + this.address; +}; + +OutputElasticSearchZeroMQ.prototype.formatPayload = function(data, callback) { + var line = 'POST|' + elastic_search_helper.computePath(this.index_prefix, this.data_type) + '|' + JSON.stringify(data); + callback(line); +}; + +exports.create = function() { + return new OutputElasticSearchZeroMQ(); +}; diff --git a/lib/outputs/output_file.js b/lib/outputs/output_file.js new file mode 100644 index 00000000..1fb954b6 --- /dev/null +++ b/lib/outputs/output_file.js @@ -0,0 +1,192 @@ +var base_output = require('../lib/base_output'), + util = require('util'), + fs = require('fs'), + logger = require('log4node'), + async = require('async'), + mkdirp = require('mkdirp'), + path = require('path'), + sig_listener = require('../lib/sig_listener').sig_listener, + error_buffer = require('../lib/error_buffer'); + +var mkdir_queue = async.queue(mkdirp, 1); +var cache_path = {}; + +function now() { + return (new Date()).getTime(); +} + +function createPath(path, callback) { + if (cache_path[path]) { + return callback(); + } + mkdir_queue.push(path, function(err) { + if (!err) { + cache_path[path] = 1; + } + callback(err); + }); +} + +function FileWriter(logger, filename, retry_delay, idle_timeout, event_target, delete_callback) { + this.on_error = false; + this.wait_queue = []; + var suicide = function() { + logger.info('Early closing of', filename); + delete_callback(); + this.end(function(err) { + if (err) { + event_target.emit('error', err); + } + }); + }.bind(this); + createPath(path.dirname(filename), function(err) { + if (err) { + this.on_error = true; + event_target.emit('error', err); + return; + } + var stream = fs.createWriteStream(filename, { + flags: 'a' + }); + stream.on('open', function() { + this.wait_queue.forEach(function(b) { + stream.write(b); + }); + delete this.wait_queue; + this.stream = stream; + }.bind(this)); + stream.on('error', function(err) { + this.on_error = true; + event_target.emit('error', err); + if (!this.suicide_timeout) { + this.suicide_timeout = setTimeout(function() { + delete this.suicide_timeout; + suicide(); + }, retry_delay * 1000); + } + }.bind(this)); + if (idle_timeout > 0) { + this.idle_check_interval = setInterval(function() { + if (this.last_write !== undefined && (now() - this.last_write) > idle_timeout) { + logger.info('Closing file without activity', filename); + suicide(); + } + }.bind(this), idle_timeout * 1000); + } + }.bind(this)); +} + +FileWriter.prototype.write = function(b) { + if (this.on_error) { + return; + } + this.last_write = now(); + if (this.stream) { + this.stream.write(b); + } + else { + this.wait_queue.push(b); + } +}; + +FileWriter.prototype.end = function(callback) { + if (this.suicide_timeout) { + clearTimeout(this.suicide_timeout); + } + if (this.idle_check_interval) { + clearInterval(this.idle_check_interval); + } + if (this.stream) { + this.stream.end(callback); + } + else { + callback(); + } +}; + +function OutputFile() { + base_output.BaseOutput.call(this); + this.mergeConfig(this.serializer_config('raw')); + this.mergeConfig(error_buffer.config(function() { + return 'output to file ' + this.path; + })); + this.mergeConfig({ + name: 'file', + host_field: 'path', + optional_params: ['delimiter', 'idle_timeout', 'retry_delay'], + default_values: { + delimiter: '\n', + idle_timeout: 300, + retry_delay: 300, + }, + start_hook: this.start, + }); +} + +util.inherits(OutputFile, base_output.BaseOutput); + +OutputFile.prototype.reopen = function(callback) { + this.closeAll(Object.keys(this.writers), callback); +}; + +OutputFile.prototype.closeAll = function(files, callback) { + async.eachSeries(files, function(f, callback) { + var w = this.writers[f]; + delete this.writers[f]; + w.end(callback); + }.bind(this), callback); +}; + +OutputFile.prototype.start = function(callback) { + logger.info('Start output to file ' + this.path); + + this.writers = {}; + + this.sig_listener = function() { + this.reopen(); + }.bind(this); + + sig_listener.on('SIGUSR2', this.sig_listener); + + callback(); +}; + +OutputFile.prototype.getWriter = function(filename) { + if (!this.writers[filename]) { + this.writers[filename] = new FileWriter(logger, filename, this.retry_delay, this.idle_timeout, this, function() { + delete this.writers[filename]; + }.bind(this)); + } + return this.writers[filename]; +}; + +OutputFile.prototype.process = function(data) { + var line = this.serialize_data(data); + if (line) { + var filename = this.replaceByFields(data, this.path); + if (filename) { + var writer = this.getWriter(filename); + writer.write(line + this.delimiter); + } + else { + this.error_buffer.emit('error', new Error('Unable to compute output filename ' + this.path)); + } + } +}; + +OutputFile.prototype.close = function(callback) { + logger.info('Closing output to file', this.path); + if (this.sig_listener) { + sig_listener.removeListener('SIGUSR2', this.sig_listener); + } + this.closeAll(Object.keys(this.writers), function(err) { + if (err) { + this.emit('error', err); + } + callback(); + }.bind(this)); +}; + +exports.create = function() { + return new OutputFile(); +}; diff --git a/lib/outputs/output_gelf.js b/lib/outputs/output_gelf.js new file mode 100644 index 00000000..0df4012e --- /dev/null +++ b/lib/outputs/output_gelf.js @@ -0,0 +1,54 @@ +var abstract_udp = require('./abstract_udp'), + util = require('util'), + logger = require('log4node'), + zlib = require('zlib'); + +function OutputGelf() { + abstract_udp.AbstractUdp.call(this); + this.mergeConfig({ + name: 'Gelf', + optional_params: ['version', 'message', 'facility', 'level'], + default_values: { + 'version': '1.0', + 'message': '#{message}', + 'facility': '#{type}', + 'level': '6', + }, + }); +} + +util.inherits(OutputGelf, abstract_udp.AbstractUdp); + +OutputGelf.prototype.formatPayload = function(data, callback) { + var m = { + version: this.version, + short_message: this.replaceByFields(data, this.message), + facility: this.replaceByFields(data, this.facility) || 'no_facility', + level: this.replaceByFields(data, this.level), + host: data.host, + timestamp: (new Date(data['@timestamp'])).getTime() / 1000, + }; + for (var key in data) { + if (!key.match(/^@/) && key !== 'message' && key !== 'host') { + m['_' + key] = data[key]; + } + } + if (!m.short_message) { + return; + } + logger.debug('Sending GELF', m); + zlib.deflate(new Buffer(JSON.stringify(m)), function(err, message) { + if (err) { + return this.emit('error', new Error('Error while compressing data:' + err)); + } + callback(message); + }.bind(this)); +}; + +OutputGelf.prototype.to = function() { + return ' Gelf ' + this.host + ':' + this.port; +}; + +exports.create = function() { + return new OutputGelf(); +}; diff --git a/lib/outputs/output_hep.js b/lib/outputs/output_hep.js new file mode 100644 index 00000000..af26dd13 --- /dev/null +++ b/lib/outputs/output_hep.js @@ -0,0 +1,68 @@ +var abstract_udp = require('./abstract_udp'), + hepjs = require('hep-js'), + util = require('util'); + +function OutputHep() { + abstract_udp.AbstractUdp.call(this); + this.mergeConfig({ + name: 'Udp', + }); + this.mergeConfig(this.serializer_config()); + this.mergeConfig({ + name: 'HEP/EEP Server', + optional_params: ['hep_id', 'hep_pass', 'hep_cid', 'hep_type'], + default_values: { + hep_id: '2001', + hep_pass: 'MyHep', + hep_cid: '#{hep_cid}', + hep_type: 100, + }, + }); +} + +util.inherits(OutputHep, abstract_udp.AbstractUdp); + +OutputHep.prototype.preHep = function(data) { + + var hep_proto = { + 'type': 'HEP', + 'version': 3, + 'payload_type': this.hep_type, + 'captureId': this.hep_id, + 'capturePass': this.hep_pass, + 'ip_family': 2 + }; + + var datenow = (new Date()).getTime(); + hep_proto.time_sec = Math.floor(datenow / 1000); + hep_proto.time_usec = datenow - (hep_proto.time_sec * 1000); + + // Build HEP3 w/ null network parameters + hep_proto.ip_family = 2; + hep_proto.protocol = 6; + hep_proto.srcIp = '127.0.0.1'; + hep_proto.dstIp = '127.0.0.1'; + hep_proto.srcPort = 0; + hep_proto.dstPort = 0; + // pair correlation id from pattern + hep_proto.correlation_id = this.hep_cid; + if (hep_proto.correlation_id !== '') { + // Pack HEP3 + return hepjs.encapsulate(data,hep_proto); + } + else { + return false; + } +}; + +OutputHep.prototype.formatPayload = function(data, callback) { + callback(new Buffer(this.serialize_data(this.preHep(data)))); +}; + +OutputHep.prototype.to = function() { + return 'HEP udp to ' + this.host + ':' + this.port; +}; + +exports.create = function() { + return new OutputHep(); +}; diff --git a/lib/outputs/output_http_post.js b/lib/outputs/output_http_post.js new file mode 100644 index 00000000..6e27200b --- /dev/null +++ b/lib/outputs/output_http_post.js @@ -0,0 +1,44 @@ +var abstract_http = require('./abstract_http'), + util = require('util'); + +function OutputHttpPost() { + abstract_http.AbstractHttp.call(this); + this.mergeConfig(this.serializer_config('raw')); + this.mergeConfig({ + name: 'Http Post', + optional_params: ['path'], + default_values: { + 'path': '/', + }, + }); +} + +util.inherits(OutputHttpPost, abstract_http.AbstractHttp); + +OutputHttpPost.prototype.process = function(data) { + var path = this.replaceByFields(data, this.path); + if (path) { + var http_options = { + host: this.host, + port: this.port, + path: path, + method: 'POST', + headers: { + 'Content-Type': this.serializer === 'json_logstash' ? 'application/json' : 'text/plain' + } + }; + var line = this.serialize_data(data); + if (line) { + http_options.headers['Content-Length'] = Buffer.byteLength(line, 'utf-8'); + this.sendHttpRequest(http_options, line); + } + } +}; + +OutputHttpPost.prototype.to = function() { + return ' http ' + this.host + ':' + this.port; +}; + +exports.create = function() { + return new OutputHttpPost(); +}; diff --git a/lib/outputs/output_logio.js b/lib/outputs/output_logio.js new file mode 100644 index 00000000..24eb573e --- /dev/null +++ b/lib/outputs/output_logio.js @@ -0,0 +1,29 @@ +var abstract_tcp = require('./abstract_tcp'), + util = require('util'); + +function OutputLogIO() { + abstract_tcp.AbstractTcp.call(this); + this.mergeConfig({ + name: 'LogIO', + optional_params: ['priority'], + default_values: { + 'priority': 'info', + } + }); +} + +util.inherits(OutputLogIO, abstract_tcp.AbstractTcp); + +OutputLogIO.prototype.formatPayload = function(data, callback) { + var prio = this.replaceByFields(data, this.priority); + var line = '+log|' + data.host + '|' + (data.type || 'no_type') + '|' + prio + '|' + data.message + '\r\n'; + callback(new Buffer(line)); +}; + +OutputLogIO.prototype.to = function() { + return ' Log IO ' + this.host + ':' + this.port; +}; + +exports.create = function() { + return new OutputLogIO(); +}; diff --git a/lib/outputs/output_lumberjack.js b/lib/outputs/output_lumberjack.js new file mode 100644 index 00000000..ac3520fc --- /dev/null +++ b/lib/outputs/output_lumberjack.js @@ -0,0 +1,62 @@ +var base_output = require('../lib/base_output'), + util = require('util'), + logger = require('log4node'), + error_buffer = require('../lib/error_buffer'), + ssl_helper = require('../lib/ssl_helper'), + lumberjack = require('lumberjack-protocol'); + +function OutputLumberJack() { + base_output.BaseOutput.call(this); + this.mergeConfig(ssl_helper.config()); + this.mergeConfig(error_buffer.config(function() { + return 'lumber jack to ' + this.host + ':' + this.port; + })); + this.mergeConfig({ + name: 'Lumber Jack', + host_field: 'host', + port_field: 'port', + optional_params: ['crt', 'key', 'ca', 'max_queue_size'], + default_values: { + max_queue_size: 500, + }, + start_hook: this.start, + }); +} + +util.inherits(OutputLumberJack, base_output.BaseOutput); + +OutputLumberJack.prototype.start = function(callback) { + logger.info('Creating LumberJack output to', this.host + ':' + this.port, 'using ca', this.ca); + var options = { + maxQueueSize: this.max_queue_size, + }; + var tls_options = ssl_helper.merge_options(this, { + host: this.host, + port: this.port, + }); + this.client = lumberjack.client(tls_options, options); + this.client.on('dropped', function(count) { + this.error_buffer.emit('error', new Error('Dropping data, queue size :' + count)); + }.bind(this)); + callback(); +}; + +OutputLumberJack.prototype.process = function(data) { + if (this.client) { + this.client.writeDataFrame(data, function() { + this.error_buffer.emit('ok'); + }.bind(this)); + } +}; + +OutputLumberJack.prototype.close = function(callback) { + logger.info('Closing LumberJack output to', this.host + ':' + this.port); + if (this.client) { + this.client.close(); + } + callback(); +}; + +exports.create = function() { + return new OutputLumberJack(); +}; diff --git a/lib/outputs/output_redis.js b/lib/outputs/output_redis.js new file mode 100644 index 00000000..10c8d3e9 --- /dev/null +++ b/lib/outputs/output_redis.js @@ -0,0 +1,96 @@ +var base_output = require('../lib/base_output'), + util = require('util'), + redis_connection_manager = require('../lib/redis_connection_manager'), + logger = require('log4node'), + error_buffer = require('../lib/error_buffer'); + +function OutputRedis() { + base_output.BaseOutput.call(this); + this.mergeConfig(this.serializer_config()); + this.mergeConfig(error_buffer.config(function() { + return 'output Redis to ' + this.host + ':' + this.port; + })); + this.mergeConfig({ + name: 'Redis', + host_field: 'host', + port_field: 'port', + optional_params: ['auth_pass', 'method', 'key', 'channel'], + default_values: { + method: 'queue' + }, + start_hook: this.start, + }); +} + +util.inherits(OutputRedis, base_output.BaseOutput); + +OutputRedis.prototype.sendQueue = function(data) { + var key = this.replaceByFields(data, this.key); + if (!this.client.rpush(key, this.serialize_data(data))) { + this.error_buffer.emit('error', 'Unable to rpush message on redis to key ' + key); + } +}; + +OutputRedis.prototype.sendPubSub = function(data) { + var channel = this.replaceByFields(data, this.channel); + if (!this.client.publish(channel, this.serialize_data(data))) { + this.error_buffer.emit('error', 'Unable to publish message on redis to channel ' + channel); + } +}; + +OutputRedis.prototype.start = function(callback) { + if (this.method !== 'queue' && this.method !== 'pubsub') { + return callback(new Error('Wrong method, please use pubsub or queue : ' + this.method)); + } + + if (this.method === 'pubsub') { + if (!this.channel) { + return callback(new Error('You have to specify the channel parameter in pubsub mode')); + } + this.desc = 'using pubsub, channel ' + this.channel; + this.send = this.sendPubSub.bind(this); + } + + if (this.method === 'queue') { + if (!this.key) { + return callback(new Error('You have to specify the key parameter in queue mode')); + } + this.desc = 'using queue, key ' + this.key; + this.send = this.sendQueue.bind(this); + } + + logger.info('Start Redis output to', this.host + ':' + this.port, this.desc); + + this.redis_connection_manager = redis_connection_manager.create(this.host, this.port, this.auth_pass); + + this.redis_connection_manager.on('error', function(err) { + this.error_buffer.emit('error', err); + }.bind(this)); + + this.redis_connection_manager.on('connect', function() { + this.error_buffer.emit('ok'); + }.bind(this)); + + this.redis_connection_manager.once('connect', function(client) { + this.client = client; + }.bind(this)); + + callback(); +}; + +OutputRedis.prototype.process = function(data) { + if (this.client) { + this.send(data); + } + else { + this.error_buffer.emit('ok'); + } +}; + +OutputRedis.prototype.close = function(callback) { + this.redis_connection_manager.quit(callback); +}; + +exports.create = function() { + return new OutputRedis(); +}; diff --git a/lib/outputs/output_sqs.js b/lib/outputs/output_sqs.js new file mode 100644 index 00000000..619781ba --- /dev/null +++ b/lib/outputs/output_sqs.js @@ -0,0 +1,50 @@ +var base_output = require('../lib/base_output'), + sqs_wrapper = require('../lib/sqs_wrapper'), + util = require('util'), + logger = require('log4node'), + error_buffer = require('../lib/error_buffer'); + +function OutputSqs() { + base_output.BaseOutput.call(this); + this.mergeConfig(this.serializer_config('json_logstash')); + this.mergeConfig(error_buffer.config(function() { + return 'sqs to ' + this.aws_queue; + })); + this.mergeConfig(sqs_wrapper.config()); + this.mergeConfig({ + name: 'SQS', + start_hook: this.start, + }); +} + +util.inherits(OutputSqs, base_output.BaseOutput); + +OutputSqs.prototype.start = function(callback) { + logger.info('Creating AWS SQS Output to', this.aws_queue); + callback(); +}; + +OutputSqs.prototype.process = function(data) { + this.sqs.sendMessage({ + QueueUrl: this.queue_url, + MessageBody: this.serialize_data(data) + }, function (err, result) { + if (err) { + this.error_buffer.emit('error', err); + } + else { + if (!result.MessageId) { + this.error_buffer.emit('error', new Error('Wrong SQS SendMessage result')); + } + } + }.bind(this)); +}; + +OutputSqs.prototype.close = function(callback) { + logger.info('Closing AWS SQS Output to', this.aws_queue); + callback(); +}; + +exports.create = function() { + return new OutputSqs(); +}; diff --git a/lib/outputs/output_statsd.js b/lib/outputs/output_statsd.js new file mode 100644 index 00000000..9db0e51f --- /dev/null +++ b/lib/outputs/output_statsd.js @@ -0,0 +1,67 @@ +var abstract_udp = require('./abstract_udp'), + util = require('util'), + logger = require('log4node'); + +function OutputElasticStatsd() { + abstract_udp.AbstractUdp.call(this); + this.mergeConfig({ + name: 'Statsd', + required_params: ['metric_type', 'metric_key'], + optional_params: ['metric_value'], + start_hook: this.start, + }); +} + +util.inherits(OutputElasticStatsd, abstract_udp.AbstractUdp); + +OutputElasticStatsd.prototype.start = function(callback) { + if (this.metric_type === 'counter') { + if (!this.metric_value) { + return callback(new Error('You have to specify metric_value with metric_type counter')); + } + this.raw = this.metric_key + ':' + this.metric_value + '|c'; + } + else if (this.metric_type === 'increment') { + this.raw = this.metric_key + ':1|c'; + } + else if (this.metric_type === 'decrement') { + this.raw = this.metric_key + ':-1|c'; + } + else if (this.metric_type === 'timer') { + if (!this.metric_value) { + return callback(new Error('You have to specify metric_value with metric_type timer')); + } + this.raw = this.metric_key + ':' + this.metric_value + '|ms'; + } + else if (this.metric_type === 'gauge') { + if (!this.metric_value) { + return callback(new Error('You have to specify metric_value with metric_type gauge')); + } + this.raw = this.metric_key + ':' + this.metric_value + '|g'; + } + else { + return callback(new Error('Wrong metric_type: ' + this.metric_type)); + } + + callback(); +}; + +OutputElasticStatsd.prototype.formatPayload = function(data, callback) { + var raw = this.replaceByFields(data, this.raw); + if (raw) { + logger.debug('Send to statsd packet', raw); + var message = new Buffer(raw); + callback(message); + } + else { + logger.debug('Unable to replace fields on', this.raw, 'input', data); + } +}; + +OutputElasticStatsd.prototype.to = function() { + return ' statsd ' + this.host + ':' + this.port + ' , metric_type ' + this.metric_type + ' , metric_key ' + this.metric_key; +}; + +exports.create = function() { + return new OutputElasticStatsd(); +}; diff --git a/lib/outputs/output_stdout.js b/lib/outputs/output_stdout.js new file mode 100644 index 00000000..7583e02d --- /dev/null +++ b/lib/outputs/output_stdout.js @@ -0,0 +1,25 @@ +var base_output = require('../lib/base_output'), + logger = require('log4node'), + util = require('util'); + +function OutputStdout() { + base_output.BaseOutput.call(this); + this.mergeConfig({ + name: 'Stdout', + }); +} + +util.inherits(OutputStdout, base_output.BaseOutput); + +OutputStdout.prototype.process = function(data) { + process.stdout.write('[STDOUT] ' + JSON.stringify(data, null, 2) + '\n'); +}; + +OutputStdout.prototype.close = function(callback) { + logger.info('Closing stdout'); + callback(); +}; + +exports.create = function() { + return new OutputStdout(); +}; diff --git a/lib/outputs/output_tcp.js b/lib/outputs/output_tcp.js new file mode 100644 index 00000000..9a59f159 --- /dev/null +++ b/lib/outputs/output_tcp.js @@ -0,0 +1,28 @@ +var abstract_tcp = require('./abstract_tcp'), + util = require('util'); + +function OutputTcp() { + abstract_tcp.AbstractTcp.call(this); + this.mergeConfig({ + name: 'Tcp', + optional_params: 'delimiter', + default_values: { + delimiter: '\n', + }, + }); + this.mergeConfig(this.serializer_config()); +} + +util.inherits(OutputTcp, abstract_tcp.AbstractTcp); + +OutputTcp.prototype.formatPayload = function(data, callback) { + callback(new Buffer(this.serialize_data(data) + this.delimiter)); +}; + +OutputTcp.prototype.to = function() { + return ' tcp ' + this.host + ':' + this.port; +}; + +exports.create = function() { + return new OutputTcp(); +}; diff --git a/lib/outputs/output_udp.js b/lib/outputs/output_udp.js new file mode 100644 index 00000000..50f092dd --- /dev/null +++ b/lib/outputs/output_udp.js @@ -0,0 +1,24 @@ +var abstract_udp = require('./abstract_udp'), + util = require('util'); + +function OutputUdp() { + abstract_udp.AbstractUdp.call(this); + this.mergeConfig({ + name: 'Udp', + }); + this.mergeConfig(this.serializer_config()); +} + +util.inherits(OutputUdp, abstract_udp.AbstractUdp); + +OutputUdp.prototype.formatPayload = function(data, callback) { + callback(new Buffer(this.serialize_data(data))); +}; + +OutputUdp.prototype.to = function() { + return ' udp ' + this.host + ':' + this.port; +}; + +exports.create = function() { + return new OutputUdp(); +}; diff --git a/lib/outputs/output_unix.js b/lib/outputs/output_unix.js new file mode 100644 index 00000000..b30c0fd5 --- /dev/null +++ b/lib/outputs/output_unix.js @@ -0,0 +1,30 @@ +var abstract_tcp = require('./abstract_tcp'), + util = require('util'); + +function OutputUnix() { + abstract_tcp.AbstractTcp.call(this); + this.mergeConfig({ + name: 'Unix', + host_field: 'unix_socket', + port_field: -1, + optional_params: 'delimiter', + default_values: { + delimiter: '\n', + }, + }); + this.mergeConfig(this.serializer_config()); +} + +util.inherits(OutputUnix, abstract_tcp.AbstractTcp); + +OutputUnix.prototype.formatPayload = function(data, callback) { + callback(new Buffer(this.serialize_data(data) + this.delimiter)); +}; + +OutputUnix.prototype.to = function() { + return ' unix ' + this.unix_socket; +}; + +exports.create = function() { + return new OutputUnix(); +}; diff --git a/lib/outputs/output_ws.js b/lib/outputs/output_ws.js new file mode 100644 index 00000000..826cff48 --- /dev/null +++ b/lib/outputs/output_ws.js @@ -0,0 +1,102 @@ +var abstract_http = require('./abstract_http'), + util = require('util'), + logger = require('log4node'), + error_buffer = require('../lib/error_buffer'), + WebSocket = require('ws'); + +function OutputWebsocket() { + abstract_http.AbstractHttp.call(this); + this.mergeConfig(this.serializer_config('raw')); + this.mergeConfig(error_buffer.config(function() { + return 'output websocket to ' + this.host + ':' + this.port; + })); + this.mergeConfig({ + name: 'Websocket', + optional_params: ['path'], + default_values: { + 'path': '/', + }, + }); +} + +util.inherits(OutputWebsocket, abstract_http.AbstractHttp); + +OutputWebsocket.prototype.process = function(data) { + var line = this.serialize_data(data); + if (this.open) { + this.send(line); + } else { + this.pending.push(line); + } +}; + +OutputWebsocket.prototype.send = function(line) { + this.ws.send(line, function ack(err) { + if (err) { + this.error_buffer.emit('error', err); + } + }.bind(this)); +}; + +OutputWebsocket.prototype.startAbstract = function(callback) { + logger.info('Start websocket output to',this.host + ':' + this.port); + + if (this.proxy) { + this.setupProxy(); + } + + this.pending = []; + this.connect(); + callback(); +}; + +OutputWebsocket.prototype.connect = function() { + var protocol = this.ssl ? 'wss' : 'ws'; + var ws_options = {}; + + if (this.proxying_agent) { + ws_options.agent = this.proxying_agent; + } + var ws = new WebSocket(protocol + '://' + this.host + ':' + this.port + this.path, ws_options); + + ws.on('open', function() { + this.open = true; + this.error_buffer.emit('ok'); + if (this.pending.length > 0) { + this.pending.forEach(function(line) { + this.send(line); + }.bind(this)); + this.pending = []; + } + }.bind(this)); + + ws.on('close', function() { + this.open = false; + if (!this.closed) { + this.error_buffer.emit('error', 'websocket closed'); + this.connect(); + } + }.bind(this)); + + ws.on('error', function(err) { + this.open = false; + if (!this.closed) { + this.error_buffer.emit('error', err); + this.connect(); + } + }.bind(this)); + + this.ws = ws; +}; + +OutputWebsocket.prototype.close = function(callback) { + logger.info('Closing websocket output to', this.host, this.port, 'ssl ' + this.ssl); + this.closed = true; + this.ws.close(); + callback(); +}; + +exports.create = function() { + return new OutputWebsocket(); +}; + diff --git a/lib/outputs/output_zeromq.js b/lib/outputs/output_zeromq.js new file mode 100644 index 00000000..a18a5c4d --- /dev/null +++ b/lib/outputs/output_zeromq.js @@ -0,0 +1,24 @@ +var abstract_zeromq = require('./abstract_zeromq'), + util = require('util'); + +function OutputZeroMQ() { + abstract_zeromq.AbstractZeroMQ.call(this); + this.mergeConfig(this.serializer_config()); + this.mergeConfig({ + name: 'ZeroMQ', + }); +} + +util.inherits(OutputZeroMQ, abstract_zeromq.AbstractZeroMQ); + +OutputZeroMQ.prototype.to = function() { + return 'Zeromq : ' + this.address; +}; + +OutputZeroMQ.prototype.formatPayload = function(data, callback) { + callback(this.serialize_data(data)); +}; + +exports.create = function() { + return new OutputZeroMQ(); +}; diff --git a/lib/patterns/grok/firewalls b/lib/patterns/grok/firewalls new file mode 100755 index 00000000..ff7baeae --- /dev/null +++ b/lib/patterns/grok/firewalls @@ -0,0 +1,60 @@ +# NetScreen firewall logs +NETSCREENSESSIONLOG %{SYSLOGTIMESTAMP:date} %{IPORHOST:device} %{IPORHOST}: NetScreen device_id=%{WORD:device_id}%{DATA}: start_time=%{QUOTEDSTRING:start_time} duration=%{INT:duration} policy_id=%{INT:policy_id} service=%{DATA:service} proto=%{INT:proto} src zone=%{WORD:src_zone} dst zone=%{WORD:dst_zone} action=%{WORD:action} sent=%{INT:sent} rcvd=%{INT:rcvd} src=%{IPORHOST:src_ip} dst=%{IPORHOST:dst_ip} src_port=%{INT:src_port} dst_port=%{INT:dst_port} src-xlated ip=%{IPORHOST:src_xlated_ip} port=%{INT:src_xlated_port} dst-xlated ip=%{IPORHOST:dst_xlated_ip} port=%{INT:dst_xlated_port} session_id=%{INT:session_id} reason=%{GREEDYDATA:reason} + +#== Cisco ASA == +CISCO_TAGGED_SYSLOG ^<%{POSINT:syslog_pri}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:sysloghost})?: %%{CISCOTAG:ciscotag}: +CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME} +CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+) +# Common Particles +CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted +CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)* +CISCO_DIRECTION Inbound|inbound|Outbound|outbound +CISCO_INTERVAL first hit|%{INT}-second interval +CISCO_XLATE_TYPE static|dynamic +# ASA-2-106001 +CISCOFW106001 %{CISCO_DIRECTION:direction} %{WORD:protocol} connection %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{GREEDYDATA:tcp_flags} on interface %{GREEDYDATA:interface} +# ASA-2-106006, ASA-2-106007, ASA-2-106010 +CISCOFW106006_106007_106010 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} (?:from|src) %{IP:src_ip}/%{INT:src_port}(\(%{DATA:src_fwuser}\))? (?:to|dst) %{IP:dst_ip}/%{INT:dst_port}(\(%{DATA:dst_fwuser}\))? (?:on interface %{DATA:interface}|due to %{CISCO_REASON:reason}) +# ASA-3-106014 +CISCOFW106014 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(\(%{DATA:dst_fwuser}\))? \(type %{INT:icmp_type}, code %{INT:icmp_code}\) +# ASA-6-106015 +CISCOFW106015 %{CISCO_ACTION:action} %{WORD:protocol} \(%{DATA:policy_id}\) from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{DATA:tcp_flags} on interface %{GREEDYDATA:interface} +# ASA-1-106021 +CISCOFW106021 %{CISCO_ACTION:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface %{GREEDYDATA:interface} +# ASA-4-106023 +CISCOFW106023 %{CISCO_ACTION:action} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group %{DATA:policy_id} \[%{DATA:hashcode1}, %{DATA:hashcode2}\] +# ASA-5-106100 +CISCOFW106100 access-list %{WORD:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\)(\(%{DATA:src_fwuser}\))? -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\)(\(%{DATA:src_fwuser}\))? hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\] +# ASA-6-110002 +CISCOFW110002 %{CISCO_REASON:reason} for %{WORD:protocol} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} +# ASA-6-302010 +CISCOFW302010 %{INT:connection_count} in use, %{INT:connection_count_max} most used +# ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016 +CISCOFW302013_302014_302015_302016 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection %{INT:connection_id} for %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port}( \(%{IP:src_mapped_ip}/%{INT:src_mapped_port}\))?(\(%{DATA:src_fwuser}\))? to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}( \(%{IP:dst_mapped_ip}/%{INT:dst_mapped_port}\))?(\(%{DATA:dst_fwuser}\))?( duration %{TIME:duration} bytes %{INT:bytes})?(?: %{CISCO_REASON:reason})?( \(%{DATA:user}\))? +# ASA-6-302020, ASA-6-302021 +CISCOFW302020_302021 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection for faddr %{IP:dst_ip}/%{INT:icmp_seq_num}(?:\(%{DATA:fwuser}\))? gaddr %{IP:src_xlated_ip}/%{INT:icmp_code_xlated} laddr %{IP:src_ip}/%{INT:icmp_code}( \(%{DATA:user}\))? +# ASA-6-305011 +CISCOFW305011 %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port} +# ASA-3-313001, ASA-3-313004, ASA-3-313008 +CISCOFW313001_313004_313008 %{CISCO_ACTION:action} %{WORD:protocol} type=%{INT:icmp_type}, code=%{INT:icmp_code} from %{IP:src_ip} on interface %{DATA:interface}( to %{IP:dst_ip})? +# ASA-4-313005 +CISCOFW313005 %{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\(%{DATA:err_src_fwuser}\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\(%{DATA:err_dst_fwuser}\))? \(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\) on %{DATA:interface} interface\. Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\(%{DATA:orig_src_fwuser}\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\(%{DATA:orig_dst_fwuser}\))? +# ASA-4-402117 +CISCOFW402117 %{WORD:protocol}: Received a non-IPSec packet \(protocol= %{WORD:orig_protocol}\) from %{IP:src_ip} to %{IP:dst_ip} +# ASA-4-402119 +CISCOFW402119 %{WORD:protocol}: Received an %{WORD:orig_protocol} packet \(SPI= %{DATA:spi}, sequence number= %{DATA:seq_num}\) from %{IP:src_ip} \(user= %{DATA:user}\) to %{IP:dst_ip} that failed anti-replay checking +# ASA-4-419001 +CISCOFW419001 %{CISCO_ACTION:action} %{WORD:protocol} packet from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}, reason: %{GREEDYDATA:reason} +# ASA-4-419002 +CISCOFW419002 %{CISCO_REASON:reason} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} with different initial sequence number +# ASA-4-500004 +CISCOFW500004 %{CISCO_REASON:reason} for protocol=%{WORD:protocol}, from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} +# ASA-6-602303, ASA-6-602304 +CISCOFW602303_602304 %{WORD:protocol}: An %{CISCO_DIRECTION:direction} %{GREEDYDATA:tunnel_type} SA \(SPI= %{DATA:spi}\) between %{IP:src_ip} and %{IP:dst_ip} \(user= %{DATA:user}\) has been %{CISCO_ACTION:action} +# ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006 +CISCOFW710001_710002_710003_710005_710006 %{WORD:protocol} (?:request|access) %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} +# ASA-6-713172 +CISCOFW713172 Group = %{GREEDYDATA:group}, IP = %{IP:src_ip}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:is_remote_natted}\s*behind a NAT device\s+This\s+end\s*%{DATA:is_local_natted}\s*behind a NAT device +# ASA-4-733100 +CISCOFW733100 \[\s*%{DATA:drop_type}\s*\] drop %{DATA:drop_rate_id} exceeded. Current burst rate is %{INT:drop_rate_current_burst} per second, max configured rate is %{INT:drop_rate_max_burst}; Current average rate is %{INT:drop_rate_current_avg} per second, max configured rate is %{INT:drop_rate_max_avg}; Cumulative total count is %{INT:drop_total_count} +#== End Cisco ASA == diff --git a/lib/patterns/grok/grok-patterns b/lib/patterns/grok/grok-patterns new file mode 100755 index 00000000..37c70487 --- /dev/null +++ b/lib/patterns/grok/grok-patterns @@ -0,0 +1,94 @@ +USERNAME [a-zA-Z0-9._-]+ +USER %{USERNAME} +INT (?:[+-]?(?:[0-9]+)) +BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+))) +NUMBER (?:%{BASE10NUM}) +BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``)) +UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12} + +# Networking +MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC}) +CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4}) +WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2}) +COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2}) +IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)? +IPV4 (?/(?>[\w_%!$@:.,~-]+|\\.)*)+ +TTY (?:/dev/(pts|tty([pq])?)(\w+)?/?(?:[0-9]+)) +WINPATH (?>[A-Za-z]+:|\\)(?:\\[^\\?*]*)+ +URIPROTO [A-Za-z]+(\+[A-Za-z+]+)? +URIHOST %{IPORHOST}(?::%{POSINT:port})? +# uripath comes loosely from RFC1738, but mostly from what Firefox +# doesn't turn into %XX +URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%_\-]*)+ +#URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)? +URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]]* +URIPATHPARAM %{URIPATH}(?:%{URIPARAM})? +URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})? + +# Months: January, Feb, 3, 03, 12, December +MONTH \b(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\b +MONTHNUM (?:0?[1-9]|1[0-2]) +MONTHNUM2 (?:0[1-9]|1[0-2]) +MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) + +# Days: Monday, Tue, Thu, etc... +DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?) + +# Years? +YEAR (?>\d\d){1,2} +HOUR (?:2[0123]|[01]?[0-9]) +MINUTE (?:[0-5][0-9]) +# '60' is a leap second in most time standards and thus is valid. +SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) +TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) +# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it) +DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR} +DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR} +ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE})) +ISO8601_SECOND (?:%{SECOND}|60) +TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? +DATE %{DATE_US}|%{DATE_EU} +DATESTAMP %{DATE}[- ]%{TIME} +TZ (?:[PMCE][SD]T|UTC) +DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ} +DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE} +DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR} +DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND} + +# Syslog Dates: Month Day HH:MM:SS +SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME} +PROG (?:[\w._/%-]+) +SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])? +SYSLOGHOST %{IPORHOST} +SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}> +HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT} + +# Shortcuts +QS %{QUOTEDSTRING} + +# Log formats +SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}: +COMMONAPACHELOG %{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" %{NUMBER:response} (?:%{NUMBER:bytes}|-) +COMBINEDAPACHELOG %{COMMONAPACHELOG} %{QS:referrer} %{QS:agent} + +# Log Levels +LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo|INFO|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?) diff --git a/lib/patterns/grok/haproxy b/lib/patterns/grok/haproxy new file mode 100755 index 00000000..e10fd970 --- /dev/null +++ b/lib/patterns/grok/haproxy @@ -0,0 +1,37 @@ +## These patterns were tested w/ haproxy-1.4.15 + +## Documentation of the haproxy log formats can be found at the following links: +## http://code.google.com/p/haproxy-docs/wiki/HTTPLogFormat +## http://code.google.com/p/haproxy-docs/wiki/TCPLogFormat + +HAPROXYTIME (?!<[0-9])%{HOUR:haproxy_hour}:%{MINUTE:haproxy_minute}(?::%{SECOND:haproxy_second})(?![0-9]) +HAPROXYDATE %{MONTHDAY:haproxy_monthday}/%{MONTH:haproxy_month}/%{YEAR:haproxy_year}:%{HAPROXYTIME:haproxy_time}.%{INT:haproxy_milliseconds} + +# Override these default patterns to parse out what is captured in your haproxy.cfg +HAPROXYCAPTUREDREQUESTHEADERS %{DATA:captured_request_headers} +HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:captured_response_headers} + +# Example: +# These haproxy config lines will add data to the logs that are captured +# by the patterns below. Place them in your custom patterns directory to +# override the defaults. +# +# capture request header Host len 40 +# capture request header X-Forwarded-For len 50 +# capture request header Accept-Language len 50 +# capture request header Referer len 200 +# capture request header User-Agent len 200 +# +# capture response header Content-Type len 30 +# capture response header Content-Encoding len 10 +# capture response header Cache-Control len 200 +# capture response header Last-Modified len 200 +# +# HAPROXYCAPTUREDREQUESTHEADERS %{DATA:request_header_host}\|%{DATA:request_header_x_forwarded_for}\|%{DATA:request_header_accept_language}\|%{DATA:request_header_referer}\|%{DATA:request_header_user_agent} +# HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:response_header_content_type}\|%{DATA:response_header_content_encoding}\|%{DATA:response_header_cache_control}\|%{DATA:response_header_last_modified} + +# parse a haproxy 'httplog' line +HAPROXYHTTP %{SYSLOGTIMESTAMP:syslog_timestamp} %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/%{NOTSPACE:time_duration} %{INT:http_status_code} %{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{HAPROXYCAPTUREDREQUESTHEADERS}\})?( )?(\{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?( )?"(|(%{WORD:http_verb} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?" + +# parse a haproxy 'tcplog' line +HAPROXYTCP %{SYSLOGTIMESTAMP:syslog_timestamp} %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_queue}/%{INT:time_backend_connect}/%{NOTSPACE:time_duration} %{NOTSPACE:bytes_read} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} diff --git a/lib/patterns/grok/java b/lib/patterns/grok/java new file mode 100755 index 00000000..1d5a0e20 --- /dev/null +++ b/lib/patterns/grok/java @@ -0,0 +1,7 @@ +JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]* +#Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source' +JAVAFILE (?:[A-Za-z0-9_. -]+) +#Allow special method +JAVAMETHOD (?:()|[a-zA-Z$_][a-zA-Z$_0-9]*) +#Line number is optional in special cases 'Native method' or 'Unknown source' +JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:class}\.%{JAVAMETHOD:method}\(%{JAVAFILE:file}(?::%{NUMBER:line})?\) diff --git a/lib/patterns/grok/junos b/lib/patterns/grok/junos new file mode 100755 index 00000000..bd796961 --- /dev/null +++ b/lib/patterns/grok/junos @@ -0,0 +1,9 @@ +# JUNOS 11.4 RT_FLOW patterns +RT_FLOW_EVENT (RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY) + +RT_FLOW1 %{RT_FLOW_EVENT:event}: %{GREEDYDATA:close-reason}: %{IP:src-ip}/%{DATA:src-port}->%{IP:dst-ip}/%{DATA:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{DATA:nat-src-port}->%{IP:nat-dst-ip}/%{DATA:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} \d+\(%{DATA:sent}\) \d+\(%{DATA:received}\) %{INT:elapsed-time} .* + +RT_FLOW2 %{RT_FLOW_EVENT:event}: session created %{IP:src-ip}/%{DATA:src-port}->%{IP:dst-ip}/%{DATA:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{DATA:nat-src-port}->%{IP:nat-dst-ip}/%{DATA:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} .* + +RT_FLOW3 %{RT_FLOW_EVENT:event}: session denied %{IP:src-ip}/%{DATA:src-port}->%{IP:dst-ip}/%{DATA:dst-port} %{DATA:service} %{INT:protocol-id}\(\d\) %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} .* + diff --git a/lib/patterns/grok/linux-syslog b/lib/patterns/grok/linux-syslog new file mode 100755 index 00000000..81c1f86e --- /dev/null +++ b/lib/patterns/grok/linux-syslog @@ -0,0 +1,16 @@ +SYSLOG5424PRINTASCII [!-~]+ + +SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}: +SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:pam_module}\(%{DATA:pam_caller}\): session %{WORD:pam_session_state} for user %{USERNAME:username}(?: by %{GREEDYDATA:pam_by})? + +CRON_ACTION [A-Z ]+ +CRONLOG %{SYSLOGBASE} \(%{USER:user}\) %{CRON_ACTION:action} \(%{DATA:message}\) + +SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message} + +# IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424) +SYSLOG5424PRI <%{NONNEGINT:syslog5424_pri}> +SYSLOG5424SD \[%{DATA}\]+ +SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:syslog5424_ver} +(?:%{TIMESTAMP_ISO8601:syslog5424_ts}|-) +(?:%{HOSTNAME:syslog5424_host}|-) +(-|%{SYSLOG5424PRINTASCII:syslog5424_app}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_proc}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_msgid}) +(?:%{SYSLOG5424SD:syslog5424_sd}|-|) + +SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:syslog5424_msg} diff --git a/lib/patterns/grok/mcollective b/lib/patterns/grok/mcollective new file mode 100755 index 00000000..648b172e --- /dev/null +++ b/lib/patterns/grok/mcollective @@ -0,0 +1 @@ +MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: diff --git a/lib/patterns/grok/mcollective-patterns b/lib/patterns/grok/mcollective-patterns new file mode 100755 index 00000000..bb2f7f9b --- /dev/null +++ b/lib/patterns/grok/mcollective-patterns @@ -0,0 +1,4 @@ +# Remember, these can be multi-line events. +MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\]%{SPACE}%{LOGLEVEL:event_level} + +MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: diff --git a/lib/patterns/grok/mongodb b/lib/patterns/grok/mongodb new file mode 100755 index 00000000..47a95735 --- /dev/null +++ b/lib/patterns/grok/mongodb @@ -0,0 +1,4 @@ +MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:component}\] %{GREEDYDATA:message} +MONGO_QUERY \{ (?<={ ).*(?= } ntoreturn:) \} +MONGO_SLOWQUERY %{WORD} %{MONGO_WORDDASH:database}\.%{MONGO_WORDDASH:collection} %{WORD}: %{MONGO_QUERY:query} %{WORD}:%{NONNEGINT:ntoreturn} %{WORD}:%{NONNEGINT:ntoskip} %{WORD}:%{NONNEGINT:nscanned}.*nreturned:%{NONNEGINT:nreturned}..+ (?[0-9]+)ms +MONGO_WORDDASH \b[\w-]+\b diff --git a/lib/patterns/grok/nagios b/lib/patterns/grok/nagios new file mode 100755 index 00000000..9d3fa7b5 --- /dev/null +++ b/lib/patterns/grok/nagios @@ -0,0 +1,108 @@ +################################################################################## +################################################################################## +# Chop Nagios log files to smithereens! +# +# A set of GROK filters to process logfiles generated by Nagios. +# While it does not, this set intends to cover all possible Nagios logs. +# +# Some more work needs to be done to cover all External Commands: +# http://old.nagios.org/developerinfo/externalcommands/commandlist.php +# +# If you need some support on these rules please contact: +# Jelle Smet http://smetj.net +# +################################################################################# +################################################################################# + +NAGIOSTIME \[%{NUMBER:nagios_epoch}\] + +############################################### +######## Begin nagios log types +############################################### +NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE +NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE + +NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION +NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION + +NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT +NAGIOS_TYPE_HOST_ALERT HOST ALERT + +NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT +NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT + +NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT +NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT + +NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK +NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK + +NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER +NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER + +NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND +NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION +############################################### +######## End nagios log types +############################################### + +############################################### +######## Begin external check types +############################################### +NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK +NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK +NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK +NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK +NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT +NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT +NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME +NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME +############################################### +######## End external check types +############################################### +NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:nagios_message} + +NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message} +NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message} + +NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message} +NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message} + +NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message} +NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message} + +NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_message} +NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_message} + +NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} +NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} + +NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} +NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} + +NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name} +NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name} + +NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:nagios_type}: %{DATA:nagios_service};%{DATA:nagios_unknown1};%{DATA:nagios_unknown2}; + +#################### +#### External checks +#################### + +#Disable host & service check +NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service} +NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname} + +#Enable host & service check +NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service} +NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname} + +#Process host & service check +NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result} +NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result} + +#Schedule host & service downtime +NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:nagios_command};%{DATA:nagios_hostname};%{NUMBER:nagios_start_time};%{NUMBER:nagios_end_time};%{NUMBER:nagios_fixed};%{NUMBER:nagios_trigger_id};%{NUMBER:nagios_duration};%{DATA:author};%{DATA:comment} + +#End matching line +NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}) diff --git a/lib/patterns/grok/postgresql b/lib/patterns/grok/postgresql new file mode 100755 index 00000000..c5b3e90b --- /dev/null +++ b/lib/patterns/grok/postgresql @@ -0,0 +1,3 @@ +# Default postgresql pg_log format pattern +POSTGRESQL %{DATESTAMP:timestamp} %{TZ} %{DATA:user_id} %{GREEDYDATA:connection_id} %{POSINT:pid} + diff --git a/lib/patterns/grok/redis b/lib/patterns/grok/redis new file mode 100755 index 00000000..8655c4f0 --- /dev/null +++ b/lib/patterns/grok/redis @@ -0,0 +1,3 @@ +REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME} +REDISLOG \[%{POSINT:pid}\] %{REDISTIMESTAMP:timestamp} \* + diff --git a/lib/patterns/grok/ruby b/lib/patterns/grok/ruby new file mode 100755 index 00000000..b1729cdd --- /dev/null +++ b/lib/patterns/grok/ruby @@ -0,0 +1,2 @@ +RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO) +RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\] *%{RUBY_LOGLEVEL:loglevel} -- +%{DATA:progname}: %{GREEDYDATA:message} diff --git a/lib/patterns/http_combined b/lib/patterns/http_combined new file mode 100644 index 00000000..36f398a6 --- /dev/null +++ b/lib/patterns/http_combined @@ -0,0 +1,6 @@ +{ + "regex": "^(\\S+) \\S+ (\\S+) \\[([^\\]]+)\\] \"([^\"]*)\" (\\d+) (\\d+) \"([^\"]*)\" \"([^\"]*)", + "fields": "ip,user,@timestamp,request,status,bytes_sent,referer,user_agent", + "numerical_fields": "status,bytes_sent", + "date_format": "DD/MMMM/YYYY:HH:mm:ss ZZ" +} diff --git a/lib/patterns/http_vhost_combined b/lib/patterns/http_vhost_combined new file mode 100644 index 00000000..e2e6b012 --- /dev/null +++ b/lib/patterns/http_vhost_combined @@ -0,0 +1,6 @@ +{ + "regex": "^(\\S+) (\\S+) \\S+ (\\S+) \\[([^\\]]+)\\] \"([^\"]*)\" (\\d+) (\\d+) \"([^\"]*)\" \"([^\"]*)", + "fields": "vhost,ip,user,@timestamp,request,status,bytes_sent,referer,user_agent", + "numerical_fields": "status,bytes_sent", + "date_format": "DD/MMMM/YYYY:HH:mm:ss ZZ" +} diff --git a/lib/patterns/syslog b/lib/patterns/syslog new file mode 100644 index 00000000..1c21c7b5 --- /dev/null +++ b/lib/patterns/syslog @@ -0,0 +1,7 @@ +{ + "regex": "^<(\\S+)>(\\S+\\s+\\S+\\s+\\d+:\\d+:\\d+) (\\S+) ([^:\\[]+)\\[?(\\d*)\\]?:\\s+(.*)$", + + "fields": "syslog_priority,timestamp,host,syslog_program,syslog_pid,message", + "numerical_fields": "syslog_pid", + "date_format": "MMM DD HH:mm:ss Z" +} diff --git a/lib/patterns/syslog_no_prio b/lib/patterns/syslog_no_prio new file mode 100644 index 00000000..c1fae45c --- /dev/null +++ b/lib/patterns/syslog_no_prio @@ -0,0 +1,6 @@ +{ + "regex": "^(\\S+\\s+\\S+\\s+\\d+:\\d+:\\d+) (\\S+) ([^:\\[]+)\\[?(\\d*)\\]?:\\s+(.*)$", + "fields": "timestamp,host,syslog_program,syslog_pid,message", + "numerical_fields": "syslog_pid", + "date_format": "MMM DD HH:mm:ss Z" +} diff --git a/license.txt b/license.txt new file mode 100644 index 00000000..12605f37 --- /dev/null +++ b/license.txt @@ -0,0 +1,13 @@ +Copyright 2012 Bertrand Paquet + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/package.json b/package.json new file mode 100644 index 00000000..4f95b713 --- /dev/null +++ b/package.json @@ -0,0 +1,55 @@ +{ + "name": "hepstash", + "description": "HEP Logstash implementation in node JS", + "version": "0.0.5", + "author": "Bertrand Paquet ", + "keywords": ["logstash", "log", "zmq", "zeromq", "hep", "eep"], + "main": "./lib/log4node.js", + "homepage": "https://github.com/sipcapture/hepstash", + "repository": { + "type": "git", + "url": "git://github.com/sipcapture/hepstash.git" + }, + "license": "Apache-2.0", + "devDependencies": { + "vows": "0.8.0", + "vows-batch-retry": "0.0.4", + "js-beautify": "1.4.x", + "whereis": "0.4.x", + "jshint": "2.x", + "istanbul": "0.1.x", + "rimraf": "2.3.x", + "jison": "0.4.x" + }, + "scripts": { + "test": "./test-runner.sh" + }, + "dependencies": { + "log4node": "0.1.6", + "optimist": "0.6.1", + "mkdirp": "0.5.1", + "async": "0.9.0", + "lru-cache": "2.7.x" + }, + "optionalDependencies":{ + "aws-sdk": "2.7.x", + "amqplib": "0.4.0", + "zmq": "2.13.0", + "moment": "2.9.0", + "redis": "2.2.5", + "ws": "0.8.0", + "oniguruma": "5.1.x", + "msgpack": "1.0.x", + "geoip-lite": "1.1.6", + "maxmind": "0.6.x", + "maxmind-geolite-mirror": "1.0.x", + "http-proxy-agent": "1.x", + "https-proxy-agent": "1.x", + "lumberjack-protocol": "git://github.com/bpaquet/node-lumberjack-protocol.git", + "hep-js": "0.0.4" + }, + "directories": { + "test": "./test", + "bin": "./bin" + } +} diff --git a/packager/Procfile b/packager/Procfile new file mode 100644 index 00000000..f967547b --- /dev/null +++ b/packager/Procfile @@ -0,0 +1 @@ +main: node bin/node-logstash-agent --config_dir /etc/$APP_NAME/plugins.conf.d --patterns_directories /var/db/$APP_NAME/patterns --db_file /var/db/$APP_NAME/files.json --alarm_file /var/db/$APP_NAME/on_alarm diff --git a/packager/Procfile_debian8 b/packager/Procfile_debian8 new file mode 100644 index 00000000..a5f60cca --- /dev/null +++ b/packager/Procfile_debian8 @@ -0,0 +1 @@ +main: node bin/node-logstash-agent --config_dir /etc/$APP_NAME/plugins.conf.d --patterns_directories /var/db/$APP_NAME/patterns --db_file /var/db/$APP_NAME/files.json --alarm_file /var/db/$APP_NAME/on_alarm --log_file /var/log/$APP_NAME/main-1.log diff --git a/packager/postinst b/packager/postinst new file mode 100644 index 00000000..0df1beb1 --- /dev/null +++ b/packager/postinst @@ -0,0 +1,18 @@ +#!/bin/sh + +set -e + +APP_NAME="node-logstash" +CONFIG_DIR="/etc/node-logstash/plugins.conf.d" +CLI="$APP_NAME" +CUSTOM_PLUGINS="/var/db/node-logstash/custom_plugins" + +mkdir -p "/var/db/$APP_NAME/patterns" +mkdir -p "$CONFIG_DIR" +mkdir -p "$CUSTOM_PLUGINS" "$CUSTOM_PLUGINS/inputs" "$CUSTOM_PLUGINS/outputs" "$CUSTOM_PLUGINS/filters" +chown "$APP_USER" "$CONFIG_DIR" "$CUSTOM_PLUGINS" "$CUSTOM_PLUGINS/inputs" "$CUSTOM_PLUGINS/outputs" "$CUSTOM_PLUGINS/filters" + +$CLI config | grep NODE_PATH || $CLI config:set NODE_PATH=$CUSTOM_PLUGINS:/opt/node-logstash/lib + +$CLI scale main=0 || true +$CLI scale main=1 || true diff --git a/remote_test.sh b/remote_test.sh new file mode 100755 index 00000000..5b2911f0 --- /dev/null +++ b/remote_test.sh @@ -0,0 +1,34 @@ +#!/bin/sh -e + +TARGET=$1 + +if [ "$TARGET" = "" ]; then + echo "Please specify target on command line" + exit 1 +fi + +if [ "$NODE_VERSION" = "" ]; then + NODE_VERSION=v0.12.7 +fi + +COMMAND="npm test" + +if [ "$TEST" != "" ]; then + COMMAND="TEST='$TEST' $COMMAND" +fi + +if [ "$COVER" != "" ]; then + COMMAND="COVER=$COVER $COMMAND" +fi + +if [ "SSH_LD_LIBRARY_PATH" != "" ]; then + COMMAND="LD_LIBRARY_PATH=$SSH_LD_LIBRARY_PATH $COMMAND" +fi + +echo "Using node version $NODE_VERSION" +rsync -avh --delete --exclude=.git --exclude=test/maxmind --exclude=node_modules --exclude=coverage ../node-logstash/ $TARGET:node-logstash_$NODE_VERSION/ +ssh $TARGET "source .nvm/nvm.sh && nvm use $NODE_VERSION && cd node-logstash_$NODE_VERSION && echo $NODE_VERSION > .node_version && $COMMAND" + +if [ "$COVER" != "" ]; then + rsync -avh --delete $TARGET:node-logstash_$NODE_VERSION/coverage/ coverage/ +fi diff --git a/test-runner.sh b/test-runner.sh new file mode 100755 index 00000000..788bfe60 --- /dev/null +++ b/test-runner.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +set -e + +if [ "$TEST" = "" ]; then + TEST=`ls test/test*.js` + RUN_JSHINT=1 +fi + +export PATH="/usr/sbin:node_modules/.bin:$PATH" +export TZ="Etc/GMT" +export NODE_PATH="test:lib:$NODE_PATH" + +if [ ! -d test/maxmind ]; then + test/maxmind_db.sh +fi + +echo "Launching test : $TEST" + +if [ "$COVER" != "" ]; then + rm -rf coverage + istanbul cover node_modules/.bin/vows -- $TEST --spec +else + vows $TEST --spec +fi + +echo "" + +if [ "$RUN_JSHINT" = "1" ]; then + ./jshint.sh +fi \ No newline at end of file diff --git a/test/500_real_life/copytruncate_logrotate.conf b/test/500_real_life/copytruncate_logrotate.conf new file mode 100644 index 00000000..8ba3f8d3 --- /dev/null +++ b/test/500_real_life/copytruncate_logrotate.conf @@ -0,0 +1,5 @@ +"output.txt" { + rotate 5 + weekly + copytruncate +} diff --git a/test/500_real_life/run.js b/test/500_real_life/run.js new file mode 100644 index 00000000..0258b35d --- /dev/null +++ b/test/500_real_life/run.js @@ -0,0 +1,22 @@ +var argv = require('optimist').argv, + log = require('log4node').reconfigure({ + file: argv.file, + prefix: 'a ' + }); + +console.log('Starting loop, count', argv.count, 'period', argv.period); + +var count = 0; + +function toto() { + if (count >= argv.count) { + console.log('Bye.'); + clearInterval(toto); + process.exit(0); + return; + } + log.info('Line ' + count); + count++; +} + +setInterval(toto, parseInt(argv.period, 10)); diff --git a/test/500_real_life/std_logrotate.conf b/test/500_real_life/std_logrotate.conf new file mode 100644 index 00000000..5bd5e314 --- /dev/null +++ b/test/500_real_life/std_logrotate.conf @@ -0,0 +1,7 @@ +"output.txt" { + rotate 5 + weekly + postrotate + kill -USR2 `cat process.pid` + endscript +} diff --git a/test/file_loader_test/comment b/test/file_loader_test/comment new file mode 100644 index 00000000..6c124fe7 --- /dev/null +++ b/test/file_loader_test/comment @@ -0,0 +1,5 @@ +#this is a comment + + # this is another comment + + output://stdout:// \ No newline at end of file diff --git a/test/file_loader_test/empty b/test/file_loader_test/empty new file mode 100644 index 00000000..e69de29b diff --git a/test/file_loader_test/empty_spaces b/test/file_loader_test/empty_spaces new file mode 100644 index 00000000..991aa1a5 --- /dev/null +++ b/test/file_loader_test/empty_spaces @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/file_loader_test/multiple b/test/file_loader_test/multiple new file mode 100644 index 00000000..77de5c8e --- /dev/null +++ b/test/file_loader_test/multiple @@ -0,0 +1,7 @@ +input://stdin://a +input://stdin://b + + + + +input://stdin://c \ No newline at end of file diff --git a/test/file_loader_test/only_one_cr b/test/file_loader_test/only_one_cr new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/test/file_loader_test/only_one_cr @@ -0,0 +1 @@ + diff --git a/test/file_loader_test/simple b/test/file_loader_test/simple new file mode 100644 index 00000000..cf795823 --- /dev/null +++ b/test/file_loader_test/simple @@ -0,0 +1 @@ +input://stdin:// diff --git a/test/filter_helper.js b/test/filter_helper.js new file mode 100644 index 00000000..50852d30 --- /dev/null +++ b/test/filter_helper.js @@ -0,0 +1,60 @@ +var assert = require('assert'); + +function createWithCallback(filter_name, filter_config, inputs, number_of_events, check_callback, init_callback, end_callback) { + var test = { + topic: function() { + var callback = this.callback; + if (!init_callback) { + init_callback = function(callback) { + callback(); + }; + } + init_callback(function() { + var m = require('../lib/filters/filter_' + filter_name).create(); + var result = []; + m.on('error', function(err) { + assert.ifError(err); + }); + m.on('output', function(x) { + result.push(x); + if (result.length === number_of_events) { + m.close(function() { + callback(null, result); + }); + } + }); + m.init(filter_config, function(err) { + if (err) { + console.error('Unable to load filter', filter_name, filter_config, err); + } + assert.ifError(err); + inputs.forEach(function(d) { + m.emit('input', d); + }); + }); + }); + }, + + check: function(err, result) { + assert.ifError(err); + check_callback(result); + }, + }; + if (end_callback) { + test.end = end_callback; + } + return test; +} + +function create(filter_name, filter_config, inputs, outputs, check_callback, init_callback, end_callback) { + return createWithCallback(filter_name, filter_config, inputs, outputs.length, function(r) { + assert.deepEqual(r, outputs); + if (check_callback) { + check_callback(r); + } + }, init_callback, end_callback); +} + +exports.create = create; + +exports.createWithCallback = createWithCallback; diff --git a/test/grok/extra b/test/grok/extra new file mode 100644 index 00000000..6c6dd9c3 --- /dev/null +++ b/test/grok/extra @@ -0,0 +1,4 @@ +TESTPATTERN [A-Z0-9]+ +GROKTEST %{NUMBER:fnumber} %{WORD:fword} %{GREEDYDATA:fgreedy} %{TESTPATTERN:ftestpattern} +GROKTEST2 %{NUMBER98:fnumber} +GROKTEST3 %{TESTPATTERN:x diff --git a/test/grok/wrong b/test/grok/wrong new file mode 100644 index 00000000..61652fd6 --- /dev/null +++ b/test/grok/wrong @@ -0,0 +1 @@ +jjjjj \ No newline at end of file diff --git a/test/integration_helper.js b/test/integration_helper.js new file mode 100644 index 00000000..bc54fc47 --- /dev/null +++ b/test/integration_helper.js @@ -0,0 +1,36 @@ +var agent = require('agent'), + os = require('os'), + assert = require('assert'); + +function createAgent(urls, callback, error_callback) { + var a = agent.create(); + error_callback = error_callback || function(error) { + assert.ifError(error); + }; + a.on('error', function(module_name, error) { + console.log('Error agent detected, ' + module_name + ' : ' + error); + if (error_callback) { + error_callback(error); + } + }); + a.start(['filter://add_host://', 'filter://add_timestamp://', 'filter://add_version://'].concat(urls), function(error) { + assert.ifError(error); + callback(a); + }); +} + +exports.createAgent = createAgent; + +function checkResult(line, target, override_host, keep_timestamp) { + var parsed = JSON.parse(line); + if (!keep_timestamp) { + delete parsed['@timestamp']; + } + delete parsed.redis_channel; + if (override_host) { + target.host = os.hostname(); + } + assert.deepEqual(parsed, target); +} + +exports.checkResult = checkResult; diff --git a/test/maxmind_db.sh b/test/maxmind_db.sh new file mode 100755 index 00000000..d4291e98 --- /dev/null +++ b/test/maxmind_db.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +export MAXMIND_DB_DIR="$(dirname $0)/maxmind/" +mkdir -p $MAXMIND_DB_DIR +$(dirname $0)/../node_modules/.bin/maxmind-geolite-mirror \ No newline at end of file diff --git a/test/mock_helper.js b/test/mock_helper.js new file mode 100644 index 00000000..c71be3a9 --- /dev/null +++ b/test/mock_helper.js @@ -0,0 +1,32 @@ +var m = require('module'); + +var original_loader = null; +var original_cache = null; +var mocked_modules = []; + +function mocked_loader(request, parent, isMain) { + if (mocked_modules[request]) { + return mocked_modules[request]; + } + return original_loader(request, parent, isMain); +} + +function mock(modules) { + // console.log('Wrapping original loader'); + original_loader = m._load; + original_cache = m._cache; + + m._load = mocked_loader; + m._cache = {}; + mocked_modules = modules; +} + +function unmock() { + // console.log('Restoring original loader'); + m._load = original_loader; + m._cache = original_cache; +} + +exports.unmock = unmock; + +exports.mock = mock; diff --git a/test/not_readable_helper.js b/test/not_readable_helper.js new file mode 100644 index 00000000..2dd1ee55 --- /dev/null +++ b/test/not_readable_helper.js @@ -0,0 +1,12 @@ +var fs = require('fs'), + rimraf = require('rimraf'); + +exports.create = function(name) { + fs.mkdirSync(name); + fs.chmodSync(name, '0000'); +}; + +exports.remove = function(name) { + fs.chmodSync(name, '755'); + rimraf.sync(name); +}; \ No newline at end of file diff --git a/test/parser/special_chars_new_line b/test/parser/special_chars_new_line new file mode 100644 index 00000000..ed6d69a0 --- /dev/null +++ b/test/parser/special_chars_new_line @@ -0,0 +1,6 @@ +output { + elasticsearch { + host => "\nlocalhost" + } + stdout { } +} \ No newline at end of file diff --git a/test/parser/special_chars_quotes b/test/parser/special_chars_quotes new file mode 100644 index 00000000..4cbfb1ee --- /dev/null +++ b/test/parser/special_chars_quotes @@ -0,0 +1,7 @@ +output { + elasticsearch { + host => "\"localhost" + host2 => "'localhost" + } + stdout { } +} \ No newline at end of file diff --git a/test/parser/special_chars_quotes_single_quotes b/test/parser/special_chars_quotes_single_quotes new file mode 100644 index 00000000..5786ee6b --- /dev/null +++ b/test/parser/special_chars_quotes_single_quotes @@ -0,0 +1,7 @@ +output { + elasticsearch { + host => '\'localhost' + host2 => '"localhost' + } + stdout { } +} \ No newline at end of file diff --git a/test/parser/special_chars_space b/test/parser/special_chars_space new file mode 100644 index 00000000..2ecdd1f5 --- /dev/null +++ b/test/parser/special_chars_space @@ -0,0 +1,6 @@ +output { + elasticsearch { + host => "local host" + } + stdout { } +} \ No newline at end of file diff --git a/test/parser/special_chars_utf8 b/test/parser/special_chars_utf8 new file mode 100644 index 00000000..9a654807 --- /dev/null +++ b/test/parser/special_chars_utf8 @@ -0,0 +1,6 @@ +output { + elasticsearch { + host => "éàlocalhost" + } + stdout { } +} \ No newline at end of file diff --git a/test/redis_driver.js b/test/redis_driver.js new file mode 100644 index 00000000..1fa27fda --- /dev/null +++ b/test/redis_driver.js @@ -0,0 +1,28 @@ +var spawn = require('child_process').spawn, + logger = require('log4node'); + +function RedisDriver() {} + +RedisDriver.prototype.start = function(options, callback) { + logger.info('Starting redis', options); + this.redis = spawn('redis-server', ['-']); + // this.redis.stdout.pipe(process.stdout); + Object.keys(options).forEach(function(key) { + this.redis.stdin.write(key + ' ' + options[key] + '\r\n'); + }.bind(this)); + this.redis.stdin.end(); + setTimeout(callback, 200); +}; + +RedisDriver.prototype.stop = function(callback) { + if (this.redis) { + this.redis.once('exit', function() { + logger.info('Redis stopped'); + callback(); + }); + this.redis.kill('SIGINT'); + delete this.redis; + } +}; + +exports.RedisDriver = RedisDriver; diff --git a/test/ssl/client.crt b/test/ssl/client.crt new file mode 100644 index 00000000..32426363 --- /dev/null +++ b/test/ssl/client.crt @@ -0,0 +1,60 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 1 (0x1) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=FR, ST=Node-Logstash, O=Node-Logstash, CN=ca.node-logstash.testing + Validity + Not Before: Nov 15 10:02:44 2013 GMT + Not After : Nov 13 10:02:44 2023 GMT + Subject: C=FR, ST=Node-Logstash, O=Node-Logstash, CN=client_name + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + RSA Public Key: (1024 bit) + Modulus (1024 bit): + 00:f6:2a:c2:5c:a9:77:1e:10:ae:a7:b2:54:4f:23: + a0:51:d8:94:02:44:0b:ce:0e:4e:a0:3b:a3:ac:78: + bb:12:f7:37:2d:31:e7:ff:d7:1a:9b:50:07:fd:59: + ea:7c:e6:5a:f1:c3:90:b5:eb:62:f9:43:cd:26:fb: + 8c:ee:4e:59:4a:b8:bd:f8:78:dc:bb:2b:e3:0e:4c: + ad:59:5e:6a:f6:16:84:81:95:bb:0b:d7:41:66:e0: + 44:ac:22:9a:cd:de:b7:ea:10:9f:eb:95:b1:4f:85: + 53:c6:36:06:63:1e:8c:ba:dc:58:0d:b8:8e:00:36: + a9:c8:80:e5:55:1c:9c:13:21 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + Netscape Comment: + OpenSSL Generated Certificate + X509v3 Subject Key Identifier: + 98:25:3D:7C:3F:F0:39:02:05:90:33:8E:EF:9A:13:C8:66:8E:6B:3A + X509v3 Authority Key Identifier: + keyid:F2:D4:B0:88:61:AA:6D:17:EB:05:D4:86:CE:42:46:B1:6A:C3:0F:17 + + Signature Algorithm: sha1WithRSAEncryption + 98:c1:de:1c:18:d9:e3:75:79:2b:4e:4c:ed:90:0a:e9:b3:69: + 6a:6f:bf:c4:49:b2:be:5d:b6:31:33:06:0e:13:e9:40:ad:03: + 7f:0b:ae:90:50:77:84:5e:a3:82:ee:45:4b:6b:7f:6b:40:44: + e1:d6:fe:50:2c:14:52:d9:5e:ec:03:3f:bf:29:74:5b:3d:d8: + 8f:6a:a5:28:48:49:44:5b:de:d9:1f:46:23:88:98:b0:ca:68: + d9:e8:1e:10:e6:71:65:87:dc:3f:e5:9d:51:5f:a7:85:67:1a: + cb:02:8e:9f:68:41:53:37:7c:1c:41:3d:54:6c:b4:de:69:21: + 5f:c3 +-----BEGIN CERTIFICATE----- +MIICpDCCAg2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJGUjEW +MBQGA1UECBMNTm9kZS1Mb2dzdGFzaDEWMBQGA1UEChMNTm9kZS1Mb2dzdGFzaDEh +MB8GA1UEAxMYY2Eubm9kZS1sb2dzdGFzaC50ZXN0aW5nMB4XDTEzMTExNTEwMDI0 +NFoXDTIzMTExMzEwMDI0NFowUzELMAkGA1UEBhMCRlIxFjAUBgNVBAgTDU5vZGUt +TG9nc3Rhc2gxFjAUBgNVBAoTDU5vZGUtTG9nc3Rhc2gxFDASBgNVBAMUC2NsaWVu +dF9uYW1lMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQD2KsJcqXceEK6nslRP +I6BR2JQCRAvODk6gO6OseLsS9zctMef/1xqbUAf9Wep85lrxw5C162L5Q80m+4zu +TllKuL34eNy7K+MOTK1ZXmr2FoSBlbsL10Fm4ESsIprN3rfqEJ/rlbFPhVPGNgZj +Hoy63FgNuI4ANqnIgOVVHJwTIQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG ++EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU +mCU9fD/wOQIFkDOO75oTyGaOazowHwYDVR0jBBgwFoAU8tSwiGGqbRfrBdSGzkJG +sWrDDxcwDQYJKoZIhvcNAQEFBQADgYEAmMHeHBjZ43V5K05M7ZAK6bNpam+/xEmy +vl22MTMGDhPpQK0DfwuukFB3hF6jgu5FS2t/a0BE4db+UCwUUtle7AM/vyl0Wz3Y +j2qlKEhJRFve2R9GI4iYsMpo2egeEOZxZYfcP+WdUV+nhWcaywKOn2hBUzd8HEE9 +VGy03mkhX8M= +-----END CERTIFICATE----- diff --git a/test/ssl/client.csr b/test/ssl/client.csr new file mode 100644 index 00000000..dc9ced40 --- /dev/null +++ b/test/ssl/client.csr @@ -0,0 +1,11 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIIBkjCB/AIBADBTMQswCQYDVQQGEwJGUjEWMBQGA1UECBMNTm9kZS1Mb2dzdGFz +aDEWMBQGA1UEChMNTm9kZS1Mb2dzdGFzaDEUMBIGA1UEAxQLY2xpZW50X25hbWUw +gZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAPYqwlypdx4QrqeyVE8joFHYlAJE +C84OTqA7o6x4uxL3Ny0x5//XGptQB/1Z6nzmWvHDkLXrYvlDzSb7jO5OWUq4vfh4 +3Lsr4w5MrVleavYWhIGVuwvXQWbgRKwims3et+oQn+uVsU+FU8Y2BmMejLrcWA24 +jgA2qciA5VUcnBMhAgMBAAGgADANBgkqhkiG9w0BAQUFAAOBgQBN07Hz0M3cj9hr +8VYO8+j7bZeAuyXXHlDXhRFk9YAfKtBwHuHKlU0UOAv3PqueUgRygIbq7cZ0k6dZ ++ocCf7m4XJ8tphNRITPA8D0Y0zBliWzQGYtaYH5Ilrtw5UeY2IAuKU3VZeS582rB +kqsFCrl5avCSp3rYF3k47Np4o7tlGw== +-----END CERTIFICATE REQUEST----- diff --git a/test/ssl/client.key b/test/ssl/client.key new file mode 100644 index 00000000..7befc1f5 --- /dev/null +++ b/test/ssl/client.key @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQD2KsJcqXceEK6nslRPI6BR2JQCRAvODk6gO6OseLsS9zctMef/ +1xqbUAf9Wep85lrxw5C162L5Q80m+4zuTllKuL34eNy7K+MOTK1ZXmr2FoSBlbsL +10Fm4ESsIprN3rfqEJ/rlbFPhVPGNgZjHoy63FgNuI4ANqnIgOVVHJwTIQIDAQAB +AoGBAO+rO+a5u9bSySlOj5Upi4qEQb5e/9tTxKtuxXFDbmgNt3PSNh26ERaL+etp +4spOANeH31mvXoJvx8qfeVv/fiBAF2o2l9yXsa9x8MW/aEIiEW0fQkcQLXBn0F3j +Jvb5sIVFdvoTXxmSfcB4BmjQSR9sOENJuXjUDaSiLrOU1ONtAkEA/iGqZ+WZsHXO +P7xJxMla2jzTGbPH0C/RUEWRLwiGzg59h5dTmwvZOlX+R0cDORsaZp24HsWNjlFJ +Iif19UlQcwJBAPf6GkMTf+WJop8Nqx/ZFmjcNnW1LUkDd6KeGRJbWPHDPOt1Fzen +UuyFaga8g73YPc0NblG+InMabn7ykKcaTRsCQQCyHQxfJ2gRam/08dXOK8YWgVz/ +fabE/k648RkxmstIrpVCrEg3uhU1MsI2WMokgx983nwGbTWqksQsQU+86/aLAkBC +j3AzJu3Cd+IdnTacktGQDePUTnA8U+Dt1k/WfIxwQD5+2OwFdTUt5VWjP0CBWaSl +AIDnEgj/Kk4UZxlqsQWXAkAhfW5g9kkUjGD0FvVQxCRm5QXyoMsFR1zXt00K/hFz +pf/hWW6i0wDE6kCDcKAVkNmoFWmE8M0dSoY6EkRYRavu +-----END RSA PRIVATE KEY----- diff --git a/test/ssl/index.txt b/test/ssl/index.txt new file mode 100644 index 00000000..eb309290 --- /dev/null +++ b/test/ssl/index.txt @@ -0,0 +1,2 @@ +V 231113095945Z 00 unknown /C=FR/ST=Node-Logstash/O=Node-Logstash/CN=localhost +V 231113100244Z 01 unknown /C=FR/ST=Node-Logstash/O=Node-Logstash/CN=client_name diff --git a/test/ssl/index.txt.attr b/test/ssl/index.txt.attr new file mode 100644 index 00000000..8f7e63a3 --- /dev/null +++ b/test/ssl/index.txt.attr @@ -0,0 +1 @@ +unique_subject = yes diff --git a/test/ssl/index.txt.attr.old b/test/ssl/index.txt.attr.old new file mode 100644 index 00000000..8f7e63a3 --- /dev/null +++ b/test/ssl/index.txt.attr.old @@ -0,0 +1 @@ +unique_subject = yes diff --git a/test/ssl/index.txt.old b/test/ssl/index.txt.old new file mode 100644 index 00000000..3c87feaa --- /dev/null +++ b/test/ssl/index.txt.old @@ -0,0 +1 @@ +V 231113095945Z 00 unknown /C=FR/ST=Node-Logstash/O=Node-Logstash/CN=localhost diff --git a/test/ssl/newcerts/00.pem b/test/ssl/newcerts/00.pem new file mode 100644 index 00000000..5336bef4 --- /dev/null +++ b/test/ssl/newcerts/00.pem @@ -0,0 +1,60 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 0 (0x0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=FR, ST=Node-Logstash, O=Node-Logstash, CN=ca.node-logstash.testing + Validity + Not Before: Nov 15 09:59:45 2013 GMT + Not After : Nov 13 09:59:45 2023 GMT + Subject: C=FR, ST=Node-Logstash, O=Node-Logstash, CN=localhost + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + RSA Public Key: (1024 bit) + Modulus (1024 bit): + 00:f4:92:9f:a3:53:c7:9c:0c:c6:a2:26:93:53:64: + a6:ee:8d:a8:17:3a:de:ce:0e:a5:b6:58:d6:65:b2: + 27:12:ef:2c:5d:d1:21:0b:00:5c:ff:db:69:d6:ca: + 41:14:45:49:d5:7f:26:6e:c7:f3:ff:61:d9:bf:ff: + f1:6c:79:6f:11:02:17:25:9c:f4:f5:8b:07:43:89: + 03:3d:75:1e:e4:95:5d:6c:85:72:db:6a:fc:c4:d6: + 13:22:7d:19:80:d0:1f:63:9a:de:58:9c:e6:b8:78: + 52:a0:36:6a:76:d0:0f:1d:2c:8e:e9:1f:bb:9d:02: + c8:d2:4e:1c:84:d4:bc:9d:21 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + Netscape Comment: + OpenSSL Generated Certificate + X509v3 Subject Key Identifier: + A6:82:98:CD:92:AB:EB:A7:07:76:CF:4D:DB:19:FB:63:A6:9C:94:EA + X509v3 Authority Key Identifier: + keyid:F2:D4:B0:88:61:AA:6D:17:EB:05:D4:86:CE:42:46:B1:6A:C3:0F:17 + + Signature Algorithm: sha1WithRSAEncryption + 06:14:98:b1:50:63:7a:b3:ac:d6:f8:55:16:e8:6a:3d:99:1d: + db:90:83:ef:1b:32:bc:b8:41:8a:69:34:97:93:d3:cf:26:4d: + ac:a6:5e:1d:b4:5e:33:72:8c:a2:c4:5e:2f:35:80:61:6c:82: + 97:c4:41:37:fe:1b:a5:c9:0a:07:88:40:7a:7e:72:1d:64:97: + 17:91:8c:ca:97:a1:bb:20:0c:f9:8f:92:c0:7b:31:d4:58:95: + e1:ec:15:0b:d2:91:be:73:2b:23:89:4d:4a:4c:15:20:d4:83: + 5f:df:cf:e8:eb:8f:a1:4b:6a:5a:ef:a8:31:90:ee:61:ba:21: + 5c:f2 +-----BEGIN CERTIFICATE----- +MIICojCCAgugAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJGUjEW +MBQGA1UECBMNTm9kZS1Mb2dzdGFzaDEWMBQGA1UEChMNTm9kZS1Mb2dzdGFzaDEh +MB8GA1UEAxMYY2Eubm9kZS1sb2dzdGFzaC50ZXN0aW5nMB4XDTEzMTExNTA5NTk0 +NVoXDTIzMTExMzA5NTk0NVowUTELMAkGA1UEBhMCRlIxFjAUBgNVBAgTDU5vZGUt +TG9nc3Rhc2gxFjAUBgNVBAoTDU5vZGUtTG9nc3Rhc2gxEjAQBgNVBAMTCWxvY2Fs +aG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA9JKfo1PHnAzGoiaTU2Sm +7o2oFzrezg6ltljWZbInEu8sXdEhCwBc/9tp1spBFEVJ1X8mbsfz/2HZv//xbHlv +EQIXJZz09YsHQ4kDPXUe5JVdbIVy22r8xNYTIn0ZgNAfY5reWJzmuHhSoDZqdtAP +HSyO6R+7nQLI0k4chNS8nSECAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhC +AQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFKaC +mM2Sq+unB3bPTdsZ+2OmnJTqMB8GA1UdIwQYMBaAFPLUsIhhqm0X6wXUhs5CRrFq +ww8XMA0GCSqGSIb3DQEBBQUAA4GBAAYUmLFQY3qzrNb4VRboaj2ZHduQg+8bMry4 +QYppNJeT088mTaymXh20XjNyjKLEXi81gGFsgpfEQTf+G6XJCgeIQHp+ch1klxeR +jMqXobsgDPmPksB7MdRYleHsFQvSkb5zKyOJTUpMFSDUg1/fz+jrj6FLalrvqDGQ +7mG6IVzy +-----END CERTIFICATE----- diff --git a/test/ssl/newcerts/01.pem b/test/ssl/newcerts/01.pem new file mode 100644 index 00000000..32426363 --- /dev/null +++ b/test/ssl/newcerts/01.pem @@ -0,0 +1,60 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 1 (0x1) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=FR, ST=Node-Logstash, O=Node-Logstash, CN=ca.node-logstash.testing + Validity + Not Before: Nov 15 10:02:44 2013 GMT + Not After : Nov 13 10:02:44 2023 GMT + Subject: C=FR, ST=Node-Logstash, O=Node-Logstash, CN=client_name + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + RSA Public Key: (1024 bit) + Modulus (1024 bit): + 00:f6:2a:c2:5c:a9:77:1e:10:ae:a7:b2:54:4f:23: + a0:51:d8:94:02:44:0b:ce:0e:4e:a0:3b:a3:ac:78: + bb:12:f7:37:2d:31:e7:ff:d7:1a:9b:50:07:fd:59: + ea:7c:e6:5a:f1:c3:90:b5:eb:62:f9:43:cd:26:fb: + 8c:ee:4e:59:4a:b8:bd:f8:78:dc:bb:2b:e3:0e:4c: + ad:59:5e:6a:f6:16:84:81:95:bb:0b:d7:41:66:e0: + 44:ac:22:9a:cd:de:b7:ea:10:9f:eb:95:b1:4f:85: + 53:c6:36:06:63:1e:8c:ba:dc:58:0d:b8:8e:00:36: + a9:c8:80:e5:55:1c:9c:13:21 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + Netscape Comment: + OpenSSL Generated Certificate + X509v3 Subject Key Identifier: + 98:25:3D:7C:3F:F0:39:02:05:90:33:8E:EF:9A:13:C8:66:8E:6B:3A + X509v3 Authority Key Identifier: + keyid:F2:D4:B0:88:61:AA:6D:17:EB:05:D4:86:CE:42:46:B1:6A:C3:0F:17 + + Signature Algorithm: sha1WithRSAEncryption + 98:c1:de:1c:18:d9:e3:75:79:2b:4e:4c:ed:90:0a:e9:b3:69: + 6a:6f:bf:c4:49:b2:be:5d:b6:31:33:06:0e:13:e9:40:ad:03: + 7f:0b:ae:90:50:77:84:5e:a3:82:ee:45:4b:6b:7f:6b:40:44: + e1:d6:fe:50:2c:14:52:d9:5e:ec:03:3f:bf:29:74:5b:3d:d8: + 8f:6a:a5:28:48:49:44:5b:de:d9:1f:46:23:88:98:b0:ca:68: + d9:e8:1e:10:e6:71:65:87:dc:3f:e5:9d:51:5f:a7:85:67:1a: + cb:02:8e:9f:68:41:53:37:7c:1c:41:3d:54:6c:b4:de:69:21: + 5f:c3 +-----BEGIN CERTIFICATE----- +MIICpDCCAg2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJGUjEW +MBQGA1UECBMNTm9kZS1Mb2dzdGFzaDEWMBQGA1UEChMNTm9kZS1Mb2dzdGFzaDEh +MB8GA1UEAxMYY2Eubm9kZS1sb2dzdGFzaC50ZXN0aW5nMB4XDTEzMTExNTEwMDI0 +NFoXDTIzMTExMzEwMDI0NFowUzELMAkGA1UEBhMCRlIxFjAUBgNVBAgTDU5vZGUt +TG9nc3Rhc2gxFjAUBgNVBAoTDU5vZGUtTG9nc3Rhc2gxFDASBgNVBAMUC2NsaWVu +dF9uYW1lMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQD2KsJcqXceEK6nslRP +I6BR2JQCRAvODk6gO6OseLsS9zctMef/1xqbUAf9Wep85lrxw5C162L5Q80m+4zu +TllKuL34eNy7K+MOTK1ZXmr2FoSBlbsL10Fm4ESsIprN3rfqEJ/rlbFPhVPGNgZj +Hoy63FgNuI4ANqnIgOVVHJwTIQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG ++EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU +mCU9fD/wOQIFkDOO75oTyGaOazowHwYDVR0jBBgwFoAU8tSwiGGqbRfrBdSGzkJG +sWrDDxcwDQYJKoZIhvcNAQEFBQADgYEAmMHeHBjZ43V5K05M7ZAK6bNpam+/xEmy +vl22MTMGDhPpQK0DfwuukFB3hF6jgu5FS2t/a0BE4db+UCwUUtle7AM/vyl0Wz3Y +j2qlKEhJRFve2R9GI4iYsMpo2egeEOZxZYfcP+WdUV+nhWcaywKOn2hBUzd8HEE9 +VGy03mkhX8M= +-----END CERTIFICATE----- diff --git a/test/ssl/openssl.cnf b/test/ssl/openssl.cnf new file mode 100644 index 00000000..6ab50876 --- /dev/null +++ b/test/ssl/openssl.cnf @@ -0,0 +1,313 @@ +# +# OpenSSL example configuration file. +# This is mostly being used for generation of certificate requests. +# + +# This definition stops the following lines choking if HOME isn't +# defined. +HOME = . +RANDFILE = $ENV::HOME/.rnd + +# Extra OBJECT IDENTIFIER info: +#oid_file = $ENV::HOME/.oid +oid_section = new_oids + +# To use this configuration file with the "-extfile" option of the +# "openssl x509" utility, name here the section containing the +# X.509v3 extensions to use: +# extensions = +# (Alternatively, use a configuration file that has only +# X.509v3 extensions in its main [= default] section.) + +[ new_oids ] + +# We can add new OIDs in here for use by 'ca' and 'req'. +# Add a simple OID like this: +# testoid1=1.2.3.4 +# Or use config file substitution like this: +# testoid2=${testoid1}.5.6 + +#################################################################### +[ ca ] +default_ca = CA_default # The default ca section + +#################################################################### +[ CA_default ] + +dir = . # Where everything is kept +certs = $dir/certs # Where the issued certs are kept +crl_dir = $dir/crl # Where the issued crl are kept +database = $dir/index.txt # database index file. +#unique_subject = no # Set to 'no' to allow creation of + # several ctificates with same subject. +new_certs_dir = $dir/newcerts # default place for new certs. + +certificate = $dir/root-ca.crt # The CA certificate +serial = $dir/serial # The current serial number +crlnumber = $dir/crlnumber # the current crl number + # must be commented out to leave a V1 CRL +crl = $dir/crl.pem # The current CRL +private_key = $dir/root-ca.key # The private key +RANDFILE = $dir/private/.rand # private random number file + +x509_extensions = usr_cert # The extentions to add to the cert + +# Comment out the following two lines for the "traditional" +# (and highly broken) format. +name_opt = ca_default # Subject Name options +cert_opt = ca_default # Certificate field options + +# Extension copying option: use with caution. +# copy_extensions = copy + +# Extensions to add to a CRL. Note: Netscape communicator chokes on V2 CRLs +# so this is commented out by default to leave a V1 CRL. +# crlnumber must also be commented out to leave a V1 CRL. +# crl_extensions = crl_ext + +default_days = 3650 # how long to certify for +default_crl_days= 30 # how long before next CRL +default_md = sha1 # which md to use. +preserve = no # keep passed DN ordering + +# A few difference way of specifying how similar the request should look +# For type CA, the listed attributes must be the same, and the optional +# and supplied fields are just that :-) +policy = policy_match + +# For the CA policy +[ policy_match ] +countryName = match +stateOrProvinceName = match +organizationName = match +organizationalUnitName = optional +commonName = supplied +emailAddress = optional + +# For the 'anything' policy +# At this point in time, you must list all acceptable 'object' +# types. +[ policy_anything ] +countryName = optional +stateOrProvinceName = optional +localityName = optional +organizationName = optional +organizationalUnitName = optional +commonName = supplied +emailAddress = optional + +#################################################################### +[ req ] +default_bits = 1024 +default_keyfile = privkey.pem +distinguished_name = req_distinguished_name +attributes = req_attributes +x509_extensions = v3_ca # The extentions to add to the self signed cert + +# Passwords for private keys if not present they will be prompted for +# input_password = secret +# output_password = secret + +# This sets a mask for permitted string types. There are several options. +# default: PrintableString, T61String, BMPString. +# pkix : PrintableString, BMPString. +# utf8only: only UTF8Strings. +# nombstr : PrintableString, T61String (no BMPStrings or UTF8Strings). +# MASK:XXXX a literal mask value. +# WARNING: current versions of Netscape crash on BMPStrings or UTF8Strings +# so use this option with caution! +string_mask = nombstr + +# req_extensions = v3_req # The extensions to add to a certificate request + +[ req_distinguished_name ] +countryName = Country Name (2 letter code) +countryName_default = FR +countryName_min = 2 +countryName_max = 2 + +stateOrProvinceName = State or Province Name (full name) +stateOrProvinceName_default = Node-Logstash + +localityName = Locality Name (eg, city) + +0.organizationName = Organization Name (eg, company) +0.organizationName_default = Node-Logstash + +# we can do this but it is not needed normally :-) +#1.organizationName = Second Organization Name (eg, company) +#1.organizationName_default = + +organizationalUnitName = Organizational Unit Name (eg, section) +#organizationalUnitName_default = + +commonName = Common Name (e.g. server FQDN or YOUR name) +commonName_max = 64 + +emailAddress = Email Address +emailAddress_max = 64 + +# SET-ex3 = SET extension number 3 + +[ req_attributes ] +challengePassword = A challenge password +challengePassword_min = 4 +challengePassword_max = 20 + +unstructuredName = An optional company name + +[ usr_cert ] + +# These extensions are added when 'ca' signs a request. + +# This goes against PKIX guidelines but some CAs do it and some software +# requires this to avoid interpreting an end user certificate as a CA. + +basicConstraints=CA:FALSE + +# Here are some examples of the usage of nsCertType. If it is omitted +# the certificate can be used for anything *except* object signing. + +# This is OK for an SSL server. +# nsCertType = server + +# For an object signing certificate this would be used. +# nsCertType = objsign + +# For normal client use this is typical +# nsCertType = client, email + +# and for everything including object signing: +# nsCertType = client, email, objsign + +# This is typical in keyUsage for a client certificate. +# keyUsage = nonRepudiation, digitalSignature, keyEncipherment + +# This will be displayed in Netscape's comment listbox. +nsComment = "OpenSSL Generated Certificate" + +# PKIX recommendations harmless if included in all certificates. +subjectKeyIdentifier=hash +authorityKeyIdentifier=keyid,issuer + +# This stuff is for subjectAltName and issuerAltname. +# Import the email address. +# subjectAltName=email:copy +# An alternative to produce certificates that aren't +# deprecated according to PKIX. +# subjectAltName=email:move + +# Copy subject details +# issuerAltName=issuer:copy + +#nsCaRevocationUrl = http://www.domain.dom/ca-crl.pem +#nsBaseUrl +#nsRevocationUrl +#nsRenewalUrl +#nsCaPolicyUrl +#nsSslServerName + +[ v3_req ] + +# Extensions to add to a certificate request + +basicConstraints = CA:FALSE +keyUsage = nonRepudiation, digitalSignature, keyEncipherment + +[ v3_ca ] + + +# Extensions for a typical CA + + +# PKIX recommendation. + +subjectKeyIdentifier=hash + +authorityKeyIdentifier=keyid:always,issuer:always + +# This is what PKIX recommends but some broken software chokes on critical +# extensions. +#basicConstraints = critical,CA:true +# So we do this instead. +basicConstraints = CA:true + +# Key usage: this is typical for a CA certificate. However since it will +# prevent it being used as an test self-signed certificate it is best +# left out by default. +# keyUsage = cRLSign, keyCertSign + +# Some might want this also +# nsCertType = sslCA, emailCA + +# Include email address in subject alt name: another PKIX recommendation +# subjectAltName=email:copy +# Copy issuer details +# issuerAltName=issuer:copy + +# DER hex encoding of an extension: beware experts only! +# obj=DER:02:03 +# Where 'obj' is a standard or added object +# You can even override a supported extension: +# basicConstraints= critical, DER:30:03:01:01:FF + +[ crl_ext ] + +# CRL extensions. +# Only issuerAltName and authorityKeyIdentifier make any sense in a CRL. + +# issuerAltName=issuer:copy +authorityKeyIdentifier=keyid:always,issuer:always + +[ proxy_cert_ext ] +# These extensions should be added when creating a proxy certificate + +# This goes against PKIX guidelines but some CAs do it and some software +# requires this to avoid interpreting an end user certificate as a CA. + +basicConstraints=CA:FALSE + +# Here are some examples of the usage of nsCertType. If it is omitted +# the certificate can be used for anything *except* object signing. + +# This is OK for an SSL server. +# nsCertType = server + +# For an object signing certificate this would be used. +# nsCertType = objsign + +# For normal client use this is typical +# nsCertType = client, email + +# and for everything including object signing: +# nsCertType = client, email, objsign + +# This is typical in keyUsage for a client certificate. +# keyUsage = nonRepudiation, digitalSignature, keyEncipherment + +# This will be displayed in Netscape's comment listbox. +nsComment = "OpenSSL Generated Certificate" + +# PKIX recommendations harmless if included in all certificates. +subjectKeyIdentifier=hash +authorityKeyIdentifier=keyid,issuer:always + +# This stuff is for subjectAltName and issuerAltname. +# Import the email address. +# subjectAltName=email:copy +# An alternative to produce certificates that aren't +# deprecated according to PKIX. +# subjectAltName=email:move + +# Copy subject details +# issuerAltName=issuer:copy + +#nsCaRevocationUrl = http://www.domain.dom/ca-crl.pem +#nsBaseUrl +#nsRevocationUrl +#nsRenewalUrl +#nsCaPolicyUrl +#nsSslServerName + +# This really needs to be in place for it to be a proxy certificate. +proxyCertInfo=critical,language:id-ppl-anyLanguage,pathlen:3,policy:foo diff --git a/test/ssl/root-ca.crt b/test/ssl/root-ca.crt new file mode 100644 index 00000000..947f39f8 --- /dev/null +++ b/test/ssl/root-ca.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDBDCCAm2gAwIBAgIJAM+CM0nccWccMA0GCSqGSIb3DQEBBQUAMGAxCzAJBgNV +BAYTAkZSMRYwFAYDVQQIEw1Ob2RlLUxvZ3N0YXNoMRYwFAYDVQQKEw1Ob2RlLUxv +Z3N0YXNoMSEwHwYDVQQDExhjYS5ub2RlLWxvZ3N0YXNoLnRlc3RpbmcwHhcNMTMx +MTE1MDkzODE1WhcNMjMxMTEzMDkzODE1WjBgMQswCQYDVQQGEwJGUjEWMBQGA1UE +CBMNTm9kZS1Mb2dzdGFzaDEWMBQGA1UEChMNTm9kZS1Mb2dzdGFzaDEhMB8GA1UE +AxMYY2Eubm9kZS1sb2dzdGFzaC50ZXN0aW5nMIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQCcbbhi0h+U5unCXxXwJEyBAs2uiDefEUONPUITiE0jbnYwOc8uI7Wv +s/CoxMMnRuS7W4PHAI63rtCE3CKPgKIuEx+V4G8TX0UPbjIWFIYGJxCZld5q8CP3 +lL+VXeUBkw0nCUFj8Zgi7hMTVwnxIkwtpCjd3ddGXkkf699NdiXZpQIDAQABo4HF +MIHCMB0GA1UdDgQWBBTy1LCIYaptF+sF1IbOQkaxasMPFzCBkgYDVR0jBIGKMIGH +gBTy1LCIYaptF+sF1IbOQkaxasMPF6FkpGIwYDELMAkGA1UEBhMCRlIxFjAUBgNV +BAgTDU5vZGUtTG9nc3Rhc2gxFjAUBgNVBAoTDU5vZGUtTG9nc3Rhc2gxITAfBgNV +BAMTGGNhLm5vZGUtbG9nc3Rhc2gudGVzdGluZ4IJAM+CM0nccWccMAwGA1UdEwQF +MAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAkMeaIRulC6En172v5FypoHoZzqkMdVZN +n3vOYcMU+k9Se9ywGXE1/ofHJx44jYdFyKyEY1mw9YifNaeBiU3n+AgCx/YtNljE +GJcTvhzkiV5qmU122G+gfBsNoEIufwdhhfK0BCCcYnFZs25uJGlYoXHnPDYzLwuN +xy23Fck2wPY= +-----END CERTIFICATE----- diff --git a/test/ssl/root-ca.key b/test/ssl/root-ca.key new file mode 100644 index 00000000..42f2c586 --- /dev/null +++ b/test/ssl/root-ca.key @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQCcbbhi0h+U5unCXxXwJEyBAs2uiDefEUONPUITiE0jbnYwOc8u +I7Wvs/CoxMMnRuS7W4PHAI63rtCE3CKPgKIuEx+V4G8TX0UPbjIWFIYGJxCZld5q +8CP3lL+VXeUBkw0nCUFj8Zgi7hMTVwnxIkwtpCjd3ddGXkkf699NdiXZpQIDAQAB +AoGBAIftf6+/Ttog3TedUH7fy+q++OOUUmDSq5RriqNWaVFEh0bYtQMu3I3VGmZM +tBfJLrDvkM632Uwbh+4rK4zvBf0PZavZdKoOwVzAO/RBcet3P+rzutBSyyDVbSzm +H5TuPV65/s1ciNS2eWRzzY/IEDsHnFU2O/GnxGHNgWwIkboBAkEA0IMOACPuetLF +2pppFx177MmPBYjSNamrdNxYTJ7ylzK3dRfoYqZjlTW5VkcE5FB4n07sHW4jTd/6 +cwe2XCspBQJBAMAOCD+qBAZvCPZliqhBzdmhdZuwzC8I+xSM5ljD5WKEKDtT1N/i +6EH7ow5BnvyxaZHvhKpXqN1H2xxeIzP0UCECQB7YdFbC+gGDcq9/DjLUMsP5oUXB +Us+LdQjJm+phzSNe5lLVIZSOZsbo9qzzvp6D5QQ0KavE4kYf7y3bIzgJTn0CQHqV +ulAgjdcpcjgLkhhZgDHkK/RHQdzXx6NTWAfaagSaxC8yEBMjewn6uxXzbq5t8yty +DWTI9QDgx4twztlvnOECQBte9eL6RaGMuBKTAnjjct3/WMUSse+0tvPpUZYVRI1j +sVhet/NotC16xt4Gx4Y4sPihO+SEnY99kOWc6t5vJpo= +-----END RSA PRIVATE KEY----- diff --git a/test/ssl/serial b/test/ssl/serial new file mode 100644 index 00000000..9e22bcb8 --- /dev/null +++ b/test/ssl/serial @@ -0,0 +1 @@ +02 diff --git a/test/ssl/serial.old b/test/ssl/serial.old new file mode 100644 index 00000000..8a0f05e1 --- /dev/null +++ b/test/ssl/serial.old @@ -0,0 +1 @@ +01 diff --git a/test/ssl/server.crt b/test/ssl/server.crt new file mode 100644 index 00000000..5336bef4 --- /dev/null +++ b/test/ssl/server.crt @@ -0,0 +1,60 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 0 (0x0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=FR, ST=Node-Logstash, O=Node-Logstash, CN=ca.node-logstash.testing + Validity + Not Before: Nov 15 09:59:45 2013 GMT + Not After : Nov 13 09:59:45 2023 GMT + Subject: C=FR, ST=Node-Logstash, O=Node-Logstash, CN=localhost + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + RSA Public Key: (1024 bit) + Modulus (1024 bit): + 00:f4:92:9f:a3:53:c7:9c:0c:c6:a2:26:93:53:64: + a6:ee:8d:a8:17:3a:de:ce:0e:a5:b6:58:d6:65:b2: + 27:12:ef:2c:5d:d1:21:0b:00:5c:ff:db:69:d6:ca: + 41:14:45:49:d5:7f:26:6e:c7:f3:ff:61:d9:bf:ff: + f1:6c:79:6f:11:02:17:25:9c:f4:f5:8b:07:43:89: + 03:3d:75:1e:e4:95:5d:6c:85:72:db:6a:fc:c4:d6: + 13:22:7d:19:80:d0:1f:63:9a:de:58:9c:e6:b8:78: + 52:a0:36:6a:76:d0:0f:1d:2c:8e:e9:1f:bb:9d:02: + c8:d2:4e:1c:84:d4:bc:9d:21 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + Netscape Comment: + OpenSSL Generated Certificate + X509v3 Subject Key Identifier: + A6:82:98:CD:92:AB:EB:A7:07:76:CF:4D:DB:19:FB:63:A6:9C:94:EA + X509v3 Authority Key Identifier: + keyid:F2:D4:B0:88:61:AA:6D:17:EB:05:D4:86:CE:42:46:B1:6A:C3:0F:17 + + Signature Algorithm: sha1WithRSAEncryption + 06:14:98:b1:50:63:7a:b3:ac:d6:f8:55:16:e8:6a:3d:99:1d: + db:90:83:ef:1b:32:bc:b8:41:8a:69:34:97:93:d3:cf:26:4d: + ac:a6:5e:1d:b4:5e:33:72:8c:a2:c4:5e:2f:35:80:61:6c:82: + 97:c4:41:37:fe:1b:a5:c9:0a:07:88:40:7a:7e:72:1d:64:97: + 17:91:8c:ca:97:a1:bb:20:0c:f9:8f:92:c0:7b:31:d4:58:95: + e1:ec:15:0b:d2:91:be:73:2b:23:89:4d:4a:4c:15:20:d4:83: + 5f:df:cf:e8:eb:8f:a1:4b:6a:5a:ef:a8:31:90:ee:61:ba:21: + 5c:f2 +-----BEGIN CERTIFICATE----- +MIICojCCAgugAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJGUjEW +MBQGA1UECBMNTm9kZS1Mb2dzdGFzaDEWMBQGA1UEChMNTm9kZS1Mb2dzdGFzaDEh +MB8GA1UEAxMYY2Eubm9kZS1sb2dzdGFzaC50ZXN0aW5nMB4XDTEzMTExNTA5NTk0 +NVoXDTIzMTExMzA5NTk0NVowUTELMAkGA1UEBhMCRlIxFjAUBgNVBAgTDU5vZGUt +TG9nc3Rhc2gxFjAUBgNVBAoTDU5vZGUtTG9nc3Rhc2gxEjAQBgNVBAMTCWxvY2Fs +aG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA9JKfo1PHnAzGoiaTU2Sm +7o2oFzrezg6ltljWZbInEu8sXdEhCwBc/9tp1spBFEVJ1X8mbsfz/2HZv//xbHlv +EQIXJZz09YsHQ4kDPXUe5JVdbIVy22r8xNYTIn0ZgNAfY5reWJzmuHhSoDZqdtAP +HSyO6R+7nQLI0k4chNS8nSECAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhC +AQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFKaC +mM2Sq+unB3bPTdsZ+2OmnJTqMB8GA1UdIwQYMBaAFPLUsIhhqm0X6wXUhs5CRrFq +ww8XMA0GCSqGSIb3DQEBBQUAA4GBAAYUmLFQY3qzrNb4VRboaj2ZHduQg+8bMry4 +QYppNJeT088mTaymXh20XjNyjKLEXi81gGFsgpfEQTf+G6XJCgeIQHp+ch1klxeR +jMqXobsgDPmPksB7MdRYleHsFQvSkb5zKyOJTUpMFSDUg1/fz+jrj6FLalrvqDGQ +7mG6IVzy +-----END CERTIFICATE----- diff --git a/test/ssl/server.csr b/test/ssl/server.csr new file mode 100644 index 00000000..f89f9a6c --- /dev/null +++ b/test/ssl/server.csr @@ -0,0 +1,11 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIIBkDCB+gIBADBRMQswCQYDVQQGEwJGUjEWMBQGA1UECBMNTm9kZS1Mb2dzdGFz +aDEWMBQGA1UEChMNTm9kZS1Mb2dzdGFzaDESMBAGA1UEAxMJbG9jYWxob3N0MIGf +MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQD0kp+jU8ecDMaiJpNTZKbujagXOt7O +DqW2WNZlsicS7yxd0SELAFz/22nWykEURUnVfyZux/P/Ydm///FseW8RAhclnPT1 +iwdDiQM9dR7klV1shXLbavzE1hMifRmA0B9jmt5YnOa4eFKgNmp20A8dLI7pH7ud +AsjSThyE1LydIQIDAQABoAAwDQYJKoZIhvcNAQEFBQADgYEAY7vymlNIMR+URVY1 +jaFJmrG8VcLYAnDgj7XziaCFEd36qrvMGc4WsifrzFyJu6X22H6GRhshTHr4oDwb +5LYQY0bm+YgkcpJzEFtbD3CqsLis3hyYkbBc8Sj45xJtwyrvt/XqlNQbQDpS5kGy +9sEI1oBwHDcKU9prTj98UPNUZiw= +-----END CERTIFICATE REQUEST----- diff --git a/test/ssl/server.key b/test/ssl/server.key new file mode 100644 index 00000000..7d821d5a --- /dev/null +++ b/test/ssl/server.key @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXgIBAAKBgQD0kp+jU8ecDMaiJpNTZKbujagXOt7ODqW2WNZlsicS7yxd0SEL +AFz/22nWykEURUnVfyZux/P/Ydm///FseW8RAhclnPT1iwdDiQM9dR7klV1shXLb +avzE1hMifRmA0B9jmt5YnOa4eFKgNmp20A8dLI7pH7udAsjSThyE1LydIQIDAQAB +AoGBAO6uISkt3AHI3hjk+KZL9jWc/UweYc5KsSIxtJpusH9j9rJgI5Uu95Oit7ms +jLKRpCq8+e7nKJI1FmKwDxZDtETGgf/qDUbpGUMKOexL+jsloQ6IAObzeB7F3ycw +S7+ve+gbt70uRdp1Rch+rkQUrKsUVUjDkDFFhVjcGiGyXDjZAkEA/tnoU3syFNlU +rUmLpKhzhQbGBwZ611wGO6brU+v9PzTn/xtwpY6upytrh9kTm55bycNx3dq4/Vxi +6ow3odeGfwJBAPWs2t7G39CS9XLcVtfPMww0+sjwQo40PYjX7g8f/DE7FNSbdexd +U69an9zFbTc1D6xckpufHb+LEdd72VQsTF8CQQC0KmLXGaHnIVl+hB7HkhrQsWAv +uUIAm9sAMtns40mOdJ2KOODi+cK5BbYoPKti+YnUUmpeN/FNlBGN1QDDVnWLAkEA +tmumjVsQn9gvhx1TIqYI2pyQRxoX4W2q72yeqP5wDcI4AnJ5Cox18jQRliXGpgKx +Y6PNfN6WgT7rfZzJ0DWgcwJAWFss4W2hwHQNg0pVtE1JzxQ3wwcrrxZfHnqxr37x +PmxrLItohALT0H2Hww/fTKfwry6D/e9377K0RuA1HiAmlQ== +-----END RSA PRIVATE KEY----- diff --git a/test/test_100_file_loader.js b/test/test_100_file_loader.js new file mode 100644 index 00000000..36cc07f2 --- /dev/null +++ b/test/test_100_file_loader.js @@ -0,0 +1,113 @@ +/* jshint unused:false */ +var vows = require('vows'), + assert = require('assert'), + file_loader = require('lib/file_loader'); + +vows.describe('File loader').addBatch({ + 'simple': { + topic: function() { + file_loader.loadFile('test/file_loader_test/simple', this.callback); + }, + + check: function(err, result) { + assert.ifError(err); + assert.deepEqual(result, ['input://stdin://']); + } + }, + + 'multiple': { + topic: function() { + file_loader.loadFile('test/file_loader_test/multiple', this.callback); + }, + + check: function(err, result) { + assert.ifError(err); + assert.deepEqual(result, ['input://stdin://a', 'input://stdin://b', 'input://stdin://c']); + } + }, + + 'comment': { + topic: function() { + file_loader.loadFile('test/file_loader_test/comment', this.callback); + }, + + check: function(err, result) { + assert.ifError(err); + assert.deepEqual(result, ['output://stdout://']); + } + }, + + 'file not found': { + topic: function() { + file_loader.loadFile('test/file_loader_test/comment2', this.callback); + }, + check: function(err, result) { + assert.isDefined(err); + assert.match(err.toString(), /ENOENT/); + } + }, + + 'loadDirectory': { + topic: function() { + file_loader.loadDirectory('test/file_loader_test', this.callback); + }, + + check: function(err, result) { + assert.ifError(err); + assert.deepEqual(result, ['output://stdout://', 'input://stdin://a', 'input://stdin://b', 'input://stdin://c', 'input://stdin://']); + } + }, + + 'directory not found': { + topic: function() { + file_loader.loadDirectory('test/file_loader_test2', this.callback); + }, + + check: function(err, result) { + assert.isDefined(err); + assert.match(err.toString(), /ENOENT/); + } + }, + + 'empty standard': { + topic: function() { + file_loader.loadFile('test/file_loader_test/empty', false, this.callback); + }, + check: function(err, result) { + assert.ifError(err); + assert.deepEqual(result, []); + } + }, + + 'empty': { + topic: function() { + file_loader.loadFile('test/file_loader_test/empty', true, this.callback); + }, + check: function(err, result) { + assert.ifError(err); + assert.deepEqual(result, []); + } + }, + + 'empty spaces': { + topic: function() { + file_loader.loadFile('test/file_loader_test/empty_spaces', true, this.callback); + }, + check: function(err, result) { + assert.ifError(err); + assert.deepEqual(result, []); + } + }, + + 'only one cr': { + topic: function() { + file_loader.loadFile('test/file_loader_test/only_one_cr', true, this.callback); + }, + check: function(err, result) { + assert.ifError(err); + assert.deepEqual(result, []); + } + }, + + +}).export(module); diff --git a/test/test_101_url_parser.js b/test/test_101_url_parser.js new file mode 100644 index 00000000..284ef4e3 --- /dev/null +++ b/test/test_101_url_parser.js @@ -0,0 +1,128 @@ +var vows = require('vows'), + assert = require('assert'), + url_parser = require('lib/url_parser'); + +function testExtractProtocol(url, target) { + return { + topic: function() { + this.callback(null, url_parser.extractProtocol(url)); + }, + + check: function(result) { + assert.deepEqual(target, result); + } + }; +} + +function testProcessUrlContent(url, target) { + return { + topic: function() { + this.callback(null, url_parser.processUrlContent(url)); + }, + + check: function(result) { + assert.deepEqual(target, result); + } + }; +} + +function testExtractPortNumber(url, target) { + return { + topic: function() { + this.callback(null, url_parser.extractPortNumber(url)); + }, + + check: function(result) { + assert.deepEqual(target, result); + } + }; +} + +vows.describe('Url parser').addBatch({ + 'extract protocol simple': testExtractProtocol('http://www.google.com', { + protocol: 'http', + next: 'www.google.com' + }), + 'extract protocol full path': testExtractProtocol('file:///tmp/toto.txt', { + protocol: 'file', + next: '/tmp/toto.txt' + }), + 'extract protocol zeromq': testExtractProtocol('zeromq://tcp://*:5567', { + protocol: 'zeromq', + next: 'tcp://*:5567' + }), + 'extract protocol qs': testExtractProtocol('file:///toto.txt?type=mon_type&qs=rgr%20abc', { + protocol: 'file', + next: '/toto.txt?type=mon_type&qs=rgr%20abc' + }), + 'extract protocol failed': testExtractProtocol('toto', undefined), + 'extract protocol jsut protocol': testExtractProtocol('stdin://', { + protocol: 'stdin', + next: '' + }), + 'process url content empty': testProcessUrlContent('', undefined), + 'process url content simple': testProcessUrlContent('/tmp/toto.txt', { + host: '/tmp/toto.txt', + params: {} + }), + 'process url content qs': testProcessUrlContent('/tmp/toto.txt?type=t', { + host: '/tmp/toto.txt', + params: { + type: 't' + } + }), + 'process url content qs special chars': testProcessUrlContent('/tmp/toto.txt?type=t&z=%20t', { + host: '/tmp/toto.txt', + params: { + type: 't', + z: ' t' + } + }), + 'process url content no host': testProcessUrlContent('?type=t', { + host: '', + params: { + type: 't' + } + }), + 'process url content no host with others params': testProcessUrlContent('?type=t&z=%20t', { + host: '', + params: { + type: 't', + z: ' t' + } + }), + 'process url content special chars in host': testProcessUrlContent('/tmp/toto%202.txt?type=t', { + host: '/tmp/toto 2.txt', + params: { + type: 't' + } + }), + 'process url content wildcard in host': testProcessUrlContent('/tmp/to*to*.txt?type=t', { + host: '/tmp/to*to*.txt', + params: { + type: 't' + } + }), + 'process url content question mark in host': testProcessUrlContent('/tmp/to%3Fto%3F.txt?type=t', { + host: '/tmp/to?to?.txt', + params: { + type: 't' + } + }), + 'process url content with plus': testProcessUrlContent('?type=t+3', { + host: '', + params: { + type: 't+3' + } + }), + 'extract port number hostonly': testExtractPortNumber('localhost', undefined), + 'extract port number ip': testExtractPortNumber('0.0.0.0:80', { + host: '0.0.0.0', + port: 80 + }), + 'extract port number host': testExtractPortNumber('www.google.com:8080', { + host: 'www.google.com', + port: 8080 + }), + 'extract port number wrong port': testExtractPortNumber('www.google.com:abcd', undefined), +}).export(module); diff --git a/test/test_102_error_buffer.js b/test/test_102_error_buffer.js new file mode 100644 index 00000000..95d732e7 --- /dev/null +++ b/test/test_102_error_buffer.js @@ -0,0 +1,41 @@ +var vows = require('vows'), + assert = require('assert'), + events = require('events'), + error_buffer = require('lib/error_buffer'); + +vows.describe('Error buffer').addBatch({ + 'standard check': { + topic: function() { + var callback = this.callback; + var errors = []; + var e = new events.EventEmitter(); + var b = error_buffer.create('my name', 250, e); + e.on('error', function(err) { + errors.push(err); + }); + var count = 0; + var i = setInterval(function() { + b.emit('error', 'toto'); + count += 1; + if (count === 500) { + clearInterval(i); + setTimeout(function() { + b.emit('ok', 'toto'); + b.emit('ok', 'toto'); + b.emit('ok', 'toto'); + setTimeout(function() { + callback(undefined, errors); + }, 200); + }, 200); + } + }, 1); + }, + check: function(errors) { + assert.equal(errors[0], 'my name start failing: toto'); + assert.equal(errors[errors.length - 1], 'my name is back to normal.'); + for(var i = 1; i < errors.length - 2; i ++) { + assert.equal(errors[i], 'my name still failing.'); + } + } + } +}).export(module); diff --git a/test/test_103_file_filter.js b/test/test_103_file_filter.js new file mode 100644 index 00000000..543d0f0e --- /dev/null +++ b/test/test_103_file_filter.js @@ -0,0 +1,58 @@ +var vows = require('vows'), + assert = require('assert'), + file_filter = require('../lib/lib/file_filter'); + +vows.describe('File filter').addBatch({ + 'simple': { + check_1: function() { + var f = file_filter.create('*'); + assert.equal(true, f.filter('toto')); + assert.equal(true, f.filter('toto.log')); + }, + + check_2: function() { + var f = file_filter.create('toto.log'); + assert.equal(false, f.filter('toto')); + assert.equal(true, f.filter('toto.log')); + assert.equal(false, f.filter('toto1log')); + assert.equal(false, f.filter('toto.log.2')); + assert.equal(false, f.filter('atoto.log')); + }, + + check_3: function() { + var f = file_filter.create('*.log'); + assert.equal(false, f.filter('toto')); + assert.equal(true, f.filter('toto.log')); + assert.equal(true, f.filter('toto2.log')); + assert.equal(false, f.filter('toto2.log.1')); + assert.equal(false, f.filter('toto25log')); + }, + + check_4: function() { + var f = file_filter.create('to*to*.log'); + assert.equal(false, f.filter('toto')); + assert.equal(true, f.filter('toto.log')); + assert.equal(true, f.filter('toto2.log')); + assert.equal(true, f.filter('to256775437Uto2.log')); + assert.equal(false, f.filter('t2oto.log')); + }, + + check_5: function() { + var f = file_filter.create('toto?.log'); + assert.equal(false, f.filter('toto')); + assert.equal(false, f.filter('toto.log')); + assert.equal(true, f.filter('toto2.log')); + assert.equal(false, f.filter('toto34.log')); + assert.equal(false, f.filter('toto2log')); + }, + + check_6: function() { + var f = file_filter.create('to?to?.log'); + assert.equal(false, f.filter('toto')); + assert.equal(false, f.filter('toto.log')); + assert.equal(true, f.filter('to3to2.log')); + assert.equal(false, f.filter('taoto34.log')); + assert.equal(false, f.filter('to3to2log')); + }, + } +}).export(module); diff --git a/test/test_104_logstash_parser.js b/test/test_104_logstash_parser.js new file mode 100644 index 00000000..1f5060f3 --- /dev/null +++ b/test/test_104_logstash_parser.js @@ -0,0 +1,579 @@ +var vows = require('vows'), + assert = require('assert'), + fs = require('fs'), + querystring = require('querystring'), + config_mapper = require('lib/config_mapper'), + logstash_config = require('logstash_config'); + +function build_cond(x) { + return querystring.escape(JSON.stringify(x)); +} + +function check(s, r1, r2) { + return { + topic: function() { + return logstash_config.parse(s); + }, + check: function(result) { + assert.deepEqual(result, r1); + if (r2) { + assert.deepEqual(config_mapper.map(r1), r2); + } + } + }; +} + +function check_file(f, r1, r2) { + return check(fs.readFileSync(f).toString(), r1, r2); +} + +vows.describe('Logstash parser config').addBatch({ + 'simple': check('input {stdin {}}', { + input: [{ + stdin: {} + }] + }, ['input://stdin://']), + 'simple with comment': check('# this is a comment\ninput {stdin {}}', { + input: [{ + stdin: {} + }] + }, ['input://stdin://']), + 'simple multi line': check('input {\nstdin {\n\n}}', { + input: [{ + stdin: {} + }] + }, ['input://stdin://']), + 'simple multi line with comment': check('# this is a comment\ninput { #this is a comment\nstdin {}}', { + input: [{ + stdin: {} + }] + }, ['input://stdin://']), + 'two lines': check('output {\nelasticsearch {}\nstdout {}\n}', { + output: [{ + elasticsearch: {} + }, { + stdout: {} + }] + }, ['output://elasticsearch://', 'output://stdout://']), + 'input and output': check('input {stdin {}}\noutput { stdout {}}', { + input: [{ + stdin: {} + }], + output: [{ + stdout: {} + }] + }, ['input://stdin://', 'output://stdout://']), + 'plugin config id value': check('output {\nelasticsearch { host => localhost }\nstdout { }\n}', { + output: [{ + elasticsearch: { + host: 'localhost' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=localhost', 'output://stdout://']), + 'plugin config id string': check('output {\nelasticsearch { host => "localhost" }\nstdout { }\n}', { + output: [{ + elasticsearch: { + host: 'localhost' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=localhost', 'output://stdout://']), + 'plugin config id ip': check('output {\nelasticsearch { host => 127.0.0.1 }\nstdout { }\n}', { + output: [{ + elasticsearch: { + host: '127.0.0.1' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=127.0.0.1', 'output://stdout://']), + 'plugin config id dot': check('output {\nelasticsearch { file => output.txt }\nstdout { }\n}', { + output: [{ + elasticsearch: { + file: 'output.txt' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?file=output.txt', 'output://stdout://']), + 'plugin config regex': check('output {\nregex { regex => /localhost/\n fields => [toto, "tata"]}\nstdout { }\n}', { + output: [{ + regex: { + regex: 'localhost', + fields: ['toto', 'tata'] + } + }, { + stdout: {} + }] + }, ['output://regex://?regex=localhost&fields=toto&fields=tata', 'output://stdout://']), + 'plugin config id string sinle quote': check('output {\nelasticsearch { host => \'localhost\' }\nstdout { }\n}', { + output: [{ + elasticsearch: { + host: 'localhost' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=localhost', 'output://stdout://']), + 'plugin config id string with "': check_file('test/parser/special_chars_quotes', { + output: [{ + elasticsearch: { + host: '"localhost', + host2: '\'localhost', + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=%22localhost&host2=\'localhost', 'output://stdout://']), + 'plugin config id string with \' single quote': check_file('test/parser/special_chars_quotes_single_quotes', { + output: [{ + elasticsearch: { + host: '\'localhost', + host2 : '"localhost' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=\'localhost&host2=%22localhost', 'output://stdout://']), + 'plugin config id string with \\n': check_file('test/parser/special_chars_new_line', { + output: [{ + elasticsearch: { + host: '\nlocalhost' + } + }, { + stdout: {} + }] + }), + 'plugin config id string with utf8': check_file('test/parser/special_chars_utf8', { + output: [{ + elasticsearch: { + host: 'éàlocalhost' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=%C3%A9%C3%A0localhost', 'output://stdout://']), + 'plugin config id string with space': check_file('test/parser/special_chars_space', { + output: [{ + elasticsearch: { + host: 'local host' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=local%20host', 'output://stdout://']), + 'plugin config id string with strange chars': check('output {\nelasticsearch { host => "[]\'!()localhost" }\nstdout { }\n}', { + output: [{ + elasticsearch: { + host: '[]\'!()localhost' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=%5B%5D\'!()localhost', 'output://stdout://']), + 'plugin config id string with empty string': check('output {\nelasticsearch { host => "" }\nstdout { }\n}', { + output: [{ + elasticsearch: { + host: '' + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=', 'output://stdout://']), + 'plugin config id number': check('output {\nelasticsearch { host => 12 }\nstdout { host => 3.4 }\n}', { + output: [{ + elasticsearch: { + host: 12 + } + }, { + stdout: { + host: 3.4 + } + }] + }, ['output://elasticsearch://?host=12', 'output://stdout://?host=3.4']), + 'plugin config bool': check('output {\nelasticsearch { host => true }\nstdout { host => false}\n}', { + output: [{ + elasticsearch: { + host: true + } + }, { + stdout: { + host: false + } + }] + }, ['output://elasticsearch://?host=true', 'output://stdout://?host=false']), + 'plugin config array': check('output {\nelasticsearch { match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ] }\nstdout {}\n}', { + output: [{ + elasticsearch: { + match: ['timestamp', 'dd/MMM/yyyy:HH:mm:ss Z' ] + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?match=timestamp&match=dd%2FMMM%2Fyyyy%3AHH%3Amm%3Ass%20Z', 'output://stdout://']), + 'plugin multiple params, same line': check('output {\nelasticsearch { host => localhost, port => 354 }\nstdout {}\n}', { + output: [{ + elasticsearch: { + host: 'localhost', + port: 354 + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=localhost&port=354', 'output://stdout://']), + 'plugin multiple params, multi lines': check('output {\nelasticsearch { host => localhost\nport => 354 }\nstdout {}\n}', { + output: [{ + elasticsearch: { + host: 'localhost', + port: 354 + } + }, { + stdout: {} + }] + }, ['output://elasticsearch://?host=localhost&port=354', 'output://stdout://']), + 'special chars in ids': check('input {\ncompute_field {path => input.txt}\n}', { + input: [{ + compute_field: { + path: 'input.txt' + } + }] + }, ['input://compute_field://?path=input.txt']), + 'conditional plugin': check('filter {\nif [action] == "login" {\nmutate { remove => "secret" }\n}\n}', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: '==', + left: {field: 'action'}, + right: {value: 'login'} + }, + then: [{ + mutate: { + remove: 'secret' + } + }] + }] + } + }] + }, ['filter://mutate://?remove=secret&__dynamic_eval__=' + build_cond({ + false_clauses: [], + true_clause: { + op: '==', + left: {field: 'action'}, + right: {value: 'login'} + } + })]), + 'conditional plugin multiple conditions, two plugins in then': check('filter {\nif [action] == "login" and 23 != [action] {\nmutate { remove => "secret" }\nmutate { remove => "secret2" }}\n}', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: 'and', + left: { + op: '==', + left: {field: 'action'}, + right: {value: 'login'} + }, + right: { + op: '!=', + left:{value: 23}, + right:{field: 'action'} + } + }, + then: [{ + mutate: { + remove: 'secret' + } + }, { + mutate: { + remove: 'secret2' + } + }] + }] + } + }] + }, ['filter://mutate://?remove=secret&__dynamic_eval__=' + build_cond({ + false_clauses: [], + true_clause: { + op: 'and', + left: { + op: '==', + left: {field: 'action'}, + right: {value: 'login'} + }, + right: { + op: '!=', + left:{value: 23}, + right:{field: 'action'} + } + } + }), 'filter://mutate://?remove=secret2&__dynamic_eval__=' + build_cond({ + false_clauses: [], + true_clause: { + op: 'and', + left: { + op: '==', + left: {field: 'action'}, + right: {value: 'login'} + }, + right: { + op: '!=', + left:{value: 23}, + right:{field: 'action'} + } + } + })]), + 'conditional plugin regexp and else': check('filter {\nif [action] =~ /\\/\\dlogin/ {\nmutate { remove => "secret" }}\nelse{ mutate { remove => "secret2"}}\n}', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: '=~', + left: {field: 'action'}, + right: {value: '/\\dlogin'} + }, + then: [{ + mutate: { + remove: 'secret' + } + }] + }], + else: [{ + mutate: { + remove: 'secret2' + } + }] + } + }] + }), + 'conditional plugin regexp, else, else if': check('filter {\nif [action] =~ /login/ {\nmutate { remove => "secret" }}\nelse if [action] == "logout" { mutate { remove => "secret3"}}\nelse{ mutate { remove => "secret2"}}\n}', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: '=~', + left: {field: 'action'}, + right: {value: 'login'} + }, + then: [{ + mutate: { + remove: 'secret' + } + }] + }, { + cond: { + op: '==', + left: {field: 'action'}, + right: {value: 'logout'} + }, + then: [{ + mutate: { + remove: 'secret3' + } + }] + }], + else: [{ + mutate: { + remove: 'secret2' + } + }] + } + }] + }, ['filter://mutate://?remove=secret&__dynamic_eval__=' + build_cond({ + false_clauses: [], + true_clause: { + op: '=~', + left: {field: 'action'}, + right: {value: 'login'} + } + }), 'filter://mutate://?remove=secret3&__dynamic_eval__=' + build_cond({ + false_clauses: [{ + op: '=~', + left: {field: 'action'}, + right: {value: 'login'} + }], + true_clause: { + op: '==', + left: {field: 'action'}, + right: {value: 'logout'} + } + }), 'filter://mutate://?remove=secret2&__dynamic_eval__=' + build_cond({ + false_clauses: [{ + op: '=~', + left: {field: 'action'}, + right: {value: 'login'} + }, { + op: '==', + left: {field: 'action'}, + right: {value: 'logout'} + }], + })]), + 'conditional plugin not in': check('filter {\nif [action] not in ["login", "logout", "reset"] {\nmutate { remove => "secret" }\n}\n}', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: 'not in', + left: {field: 'action'}, + right: {value: ['login', 'logout', 'reset']} + }, + then: [{ + mutate: { + remove: 'secret' + } + }] + }] + } + }] + }), + 'conditional plugin with parenthesis': check('filter {\nif ([action] == "login") {\nmutate { remove => "secret" }\n}\n}', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: '==', + left: {field: 'action'}, + right: {value: 'login'} + }, + then: [{ + mutate: { + remove: 'secret' + } + }] + }] + } + }] + }), + 'conditional plugin not with parenthesis': check('filter {\nif ! [action] in ["login"] {\nmutate { remove => "secret" }\n}\n}', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: '!', + left: { + op: 'in', + left: {field: 'action'}, + right: {value: ['login']} + } + }, + then: [{ + mutate: { + remove: 'secret' + } + }] + }] + } + }] + }), + 'big condition': check('filter {\nif ! [action] in ["login"] or ("a" == "b" xor ("c" == "d" or [action] == "login")) {\nmutate { remove => "secret" }\n}\n}', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: 'or', + left: { + op: '!', + left: { + op: 'in', + left: {field: 'action'}, + right: {value: ['login']}, + } + }, + right: { + op: 'xor', + left: { + op: '==', + left: { value: 'a' }, + right: { value: 'b' }, + }, + right: { + op: 'or', + left: { + op: '==', + left: { value: 'c' }, + right: { value: 'd' }, + }, + right: { + op: '==', + left: { field: 'action' }, + right: { value: 'login' }, + } + } + } + }, + then: [{ + mutate: { + remove: 'secret' + } + }] + }] + } + }] + }), + 'hash': check('filter {grok { match => {\'message\' => \'toto\'}}}', { + filter: [{ + grok: { + match: { + 'message': 'toto' + } + } + }] + }), + 'fields and tags 1': check('input { stdin { tags => ["b", "c"]\nadd_fields => {\nz => toto}}}', { + input: [{ + stdin: { + tags: ['b', 'c'], + add_fields: { + 'z': 'toto' + } + } + }] + }), + 'fields and tags 2': check('input { stdin { tags => ["b", "c"]\nadd_fields => {\nz => toto, z2 => "toto2"}}}', { + input: [{ + stdin: { + tags: ['b', 'c'], + add_fields: { + 'z': 'toto', + 'z2': 'toto2', + } + } + }] + }), + 'multi if': check('filter { if "GROKED" not in [tags] { drop{} } \n if "GROKED2" not in [tags] { drop{} } }', { + filter: [{ + __if__: { + ifs: [{ + cond: { + op: 'not in', + left: { value: 'GROKED' }, + right: { field: 'tags' } + }, + then: [{ + drop: {} + }] + }] + } + }, + { + __if__: { + ifs: [{ + cond: { + op: 'not in', + left: { value: 'GROKED2' }, + right: { field: 'tags' } + }, + then: [{ + drop: {} + }] + }] + } + }] + }, [ + 'filter://drop://?__dynamic_eval__=%7B%22false_clauses%22%3A%5B%5D%2C%22true_clause%22%3A%7B%22op%22%3A%22not%20in%22%2C%22left%22%3A%7B%22value%22%3A%22GROKED%22%7D%2C%22right%22%3A%7B%22field%22%3A%22tags%22%7D%7D%7D', + 'filter://drop://?__dynamic_eval__=%7B%22false_clauses%22%3A%5B%5D%2C%22true_clause%22%3A%7B%22op%22%3A%22not%20in%22%2C%22left%22%3A%7B%22value%22%3A%22GROKED2%22%7D%2C%22right%22%3A%7B%22field%22%3A%22tags%22%7D%7D%7D', + ]), +}).export(module); diff --git a/test/test_105_condition_evaluator.js b/test/test_105_condition_evaluator.js new file mode 100644 index 00000000..8f655e20 --- /dev/null +++ b/test/test_105_condition_evaluator.js @@ -0,0 +1,235 @@ +var vows = require('vows'), + assert = require('assert'), + condition_evaluator = require('lib/condition_evaluator'); + +function check(op, data, result) { + return { + topic: function() { + return condition_evaluator.compute(op, data); + }, + check: function(x) { + assert.equal(x, result); + } + }; +} + +function check_error(op, data, result) { + return { + topic: function() { + try { + condition_evaluator.compute(op, data); + } + catch(e) { + return e; + } + return undefined; + }, + check: function(err) { + assert.isDefined(err); + assert.match(err.toString(), result); + } + }; +} + +var op1 = { + op: '==', + left: { + value: 'aa', + }, + right: { + value: 'bb' + } +}; + +var op2 = { + op: '==', + left: { + field: 'type', + }, + right: { + value: 'bb' + } +}; + +var op3 = { + op: '=~', + left: { + field: 'type', + }, + right: { + value: '^a.c' + } +}; + +var op4 = { + op: '!~', + left: { + field: 'type', + }, + right: { + value: '567' + } +}; + +var op5 = { + op: '!=', + left: { + field: 'type', + }, + right: { + value: 567 + } +}; + +var op6 = { + op: 'in', + left: { + field: 'type', + }, + right: { + value: ['a', 'b', 'c', 35] + } +}; + +var op7 = { + op: 'not in', + left: { + field: 'type', + }, + right: { + field: 'array' + } +}; + +var op8 = { + op: '>', + left: { + field: 'type', + }, + right: { + field: 'message' + } +}; + +var op9 = { + op: '>=', + left: { + field: 'type', + }, + right: { + field: 'message' + } +}; + +var op10 = { + op: '<=', + left: { + field: 'type', + }, + right: { + field: 'message' + } +}; + +var op11 = { + op: '<', + left: { + field: 'type', + }, + right: { + field: 'message' + } +}; + +var op12 = { + op: '=~', + left: { + field: 'type', + }, + right: { + value: '12' + } +}; + +var op13 = { + op: '=~', + left: { + field: 'type', + }, + right: { + value: '/12' + } +}; + +var op14 = { + op: '=~', + left: { + field: 'type', + }, + right: { + value: '56.' + } +}; + +vows.describe('Condition evaluator').addBatch({ + 'simple equal': check(op1, {}, false), + 'equal 1': check(op2, {}, false), + 'equal 2': check(op2, {type: 12}, false), + 'equal 3': check(op2, {type: 'bb'}, true), + 'equal 4': check(op2, {type: 'bb2'}, false), + 'regex 1': check(op3, {type: 'abc'}, true), + 'regex 2': check(op3, {type: 'adc'}, true), + 'regex 3': check(op3, {type: 'addc'}, false), + 'regex 4': check(op3, {type: 'totoabc'}, false), + 'regex 5': check(op3, {type: 12}, false), + 'not regex 1': check(op4, {type: 12}, true), + 'not regex 2': check(op4, {type: 567}, false), + 'not regex 3': check(op4, {type: '567'}, false), + 'not equal 1': check(op5, {type: '567'}, false), + 'not equal 2': check(op5, {type: '568'}, true), + 'in 1': check(op6, {type: 568}, false), + 'in 2': check(op6, {type: '568'}, false), + 'in 3': check(op6, {type: 'ab'}, false), + 'in 4': check(op6, {type: 'a'}, true), + 'in 5': check(op6, {type: 'c'}, true), + 'in 6': check(op6, {type: 35}, true), + 'not in 1': check_error(op7, {type: 35}, /right args must be an array/), + 'not in 2': check_error(op7, {type: 35, array: 1244}, /right args must be an array/), + 'not in 3': check(op7, {type: 35, array: [1244, 35]}, false), + 'not in 4': check(op7, {type: 35, array: [1244, 36]}, true), + 'not in 5': check(op7, {type: '35', array: ['1244', '36']}, true), + 'not in 6': check(op7, {type: '36', array: ['1244', '36']}, false), + '> 1': check(op8, {type: '36', message: '37'}, false), + '> 2': check(op8, {type: '36', message: '35'}, true), + '> 3': check(op8, {type: '36', message: 35}, true), + '> 4': check(op8, {type: '36', message: 35.2}, true), + '> 5': check(op8, {type: '35.3', message: 35.2}, true), + '> 6': check(op8, {type: '35.1', message: 35.2}, false), + '> 7': check_error(op8, {type: 'abc', message: 35.2}, /Unable to cast to int/), + '> 8': check(op8, {type: '35.2', message: 35.2}, false), + '>= 1': check(op9, {type: '35.2', message: 35.2}, true), + '>= 2': check(op9, {type: '35.2', message: 35.3}, false), + '<= 1': check(op10, {type: '35.2', message: 35.2}, true), + '<= 2': check(op10, {type: '35.2', message: 35.1}, false), + '< 1': check(op11, {type: '35.2', message: 35.2}, false), + '< 2': check(op11, {type: '35.2', message: 35.4}, true), + '! < 1': check({op: '!', left: op11}, {type: '35.2', message: 35.4}, false), + '! == 1': check({op: '!', left: op1}, {}, true), + '! string': check_error({op: '!', left: 'aaa'}, {}, /Not a boolean/), + '! bool': check({op: '!', left: 'false'}, {}, true), + 'and 1': check({op: 'and', left: 'true', right: true}, {}, true), + 'and 2': check({op: 'and', left: 'true', right: op1}, {}, false), + 'and 3': check({op: 'and', left: 'true', right: {op: '!', left: op1}}, {}, true), + 'or 1': check({op: 'or', left: 'false', right: {op: '!', left: op1}}, {}, true), + 'xor 1': check({op: 'xor', left: 'false', right: {op: '!', left: op1}}, {}, true), + 'xor 2': check({op: 'xor', left: 'true', right: {op: '!', left: op1}}, {}, false), + 'nand 1': check({op: 'nand', left: 'true', right: {op: '!', left: op1}}, {}, false), + 'nand 2': check({op: 'nand', left: 'false', right: {op: '!', left: op1}}, {}, true), + 'regex2 1': check(op12, {type: 12}, true), + 'regex2 2': check(op12, {type: 13}, false), + 'regex2 3': check(op12, {}, false), + 'regex2 4': check(op13, {type: 'aaaa/12'}, true), + 'regex2 5': check(op13, {type: 'aaaa#12'}, false), + 'regex2 6': check(op14, {type: '567'}, true), + 'regex2 7': check(op14, {type: '597'}, false), +}).export(module); diff --git a/test/test_200_filter_add_timestamp.js b/test/test_200_filter_add_timestamp.js new file mode 100644 index 00000000..ddffc771 --- /dev/null +++ b/test/test_200_filter_add_timestamp.js @@ -0,0 +1,14 @@ +var vows = require('vows'), + assert = require('assert'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter add timestamp ').addBatch({ + 'normal': filter_helper.createWithCallback('add_timestamp', '', [{}], 1, function(result) { + assert.isDefined(result[0]['@timestamp']); + }), + 'not overwrite': filter_helper.create('add_timestamp', '', [{ + '@timestamp': 'toto' + }], [{ + '@timestamp': 'toto' + }]), +}).export(module); diff --git a/test/test_201_filter_add_host.js b/test/test_201_filter_add_host.js new file mode 100644 index 00000000..932b7678 --- /dev/null +++ b/test/test_201_filter_add_host.js @@ -0,0 +1,14 @@ +var vows = require('vows'), + os = require('os'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter add host ').addBatch({ + 'normal': filter_helper.create('add_host', '', [{}], [{ + 'host': os.hostname() + }]), + 'not overwrite': filter_helper.create('add_host', '', [{ + 'host': 'toto' + }], [{ + 'host': 'toto' + }]), +}).export(module); diff --git a/test/test_202_filter_regex.js b/test/test_202_filter_regex.js new file mode 100644 index 00000000..74f13013 --- /dev/null +++ b/test/test_202_filter_regex.js @@ -0,0 +1,345 @@ +var vows = require('vows'), + assert = require('assert'), + moment = require('moment'), + patterns_loader = require('../lib/lib/patterns_loader'), + filter_helper = require('./filter_helper'); + +var n = moment(); + +patterns_loader.add('/toto42'); +patterns_loader.add('/tata43'); +patterns_loader.add('lib/patterns'); + +vows.describe('Filter regex ').addBatch({ + 'normal': filter_helper.create('regex', '?regex=^(a\\S+) (\\S+)&fields=fa,fb', [ + { + 'message': 'abcd efgh ijk' + }, + { + 'message': 'abcd efgh ijk', + fc: 'toto' + }, + { + 'message': 'Abcd efghijk' + }, + ], [ + { + 'message': 'abcd efgh ijk', + fa: 'abcd', + fb: 'efgh' + }, + { + 'message': 'abcd efgh ijk', + fa: 'abcd', + fb: 'efgh', + fc: 'toto' + }, + { + 'message': 'Abcd efghijk' + }, + ]), + 'regex flags': filter_helper.create('regex', '?regex=^(a\\S+) (\\S+)&fields=fa,fb®ex_flags=i', [ + { + 'message': 'abcd efgh ijk' + }, + { + 'message': 'abcd efgh ijk', + fc: 'toto' + }, + { + 'message': 'Abcd efghijk' + }, + ], [ + { + 'message': 'abcd efgh ijk', + fa: 'abcd', + fb: 'efgh' + }, + { + 'message': 'abcd efgh ijk', + fa: 'abcd', + fb: 'efgh', + fc: 'toto' + }, + { + 'message': 'Abcd efghijk', + fa: 'Abcd', + fb: 'efghijk', + }, + ]), + 'number management': filter_helper.create('regex', '?regex=^(\\S+)$&fields=a', [ + { + 'message': '12' + }, + { + 'message': '90' + }, + { + 'message': '12.3' + }, + { + 'message': '11,67' + }, + { + 'message': 'aa' + }, + { + 'message': '' + }, + ], [ + { + 'message': '12', + a: 12 + }, + { + 'message': '90', + a: 90 + }, + { + 'message': '12.3', + a: 12.3 + }, + { + 'message': '11,67', + a: 11.67 + }, + { + 'message': 'aa', + a: 'aa' + }, + { + 'message': '' + }, + ], function(r) { + assert.equal(typeof(r[0].a), 'number'); + assert.equal(typeof(r[1].a), 'number'); + assert.equal(typeof(r[2].a), 'number'); + assert.equal(typeof(r[3].a), 'number'); + assert.equal(typeof(r[4].a), 'string'); + }), + 'with star': filter_helper.create('regex', '?regex=^(\\S*) (\\S+)&fields=fa,fb', [ + { + 'message': ' efgh ijk' + }, + ], [ + { + 'message': ' efgh ijk', + fb: 'efgh' + }, + ]), + 'type filtering': filter_helper.create('regex', '?only_type=toto®ex=^(\\S+) (\\S+)&fields=fa,fb', [ + { + 'message': 'abcd efgh ijk' + }, + { + 'message': 'abcd efgh ijk', + 'type': 'toto' + }, + { + 'message': 'abcd efgh ijk', + 'type': 'toto2' + }, + ], [ + { + 'message': 'abcd efgh ijk' + }, + { + 'message': 'abcd efgh ijk', + 'type': 'toto', + fa: 'abcd', + fb: 'efgh' + }, + { + 'message': 'abcd efgh ijk', + 'type': 'toto2' + }, + ]), + 'two fields one in regex': filter_helper.create('regex', '?regex=^(\\S+) \\S+&fields=fa,fb', [ + { + 'message': 'abcd efgh ijk' + }, + ], [ + { + 'message': 'abcd efgh ijk', + fa: 'abcd' + }, + ]), + 'one field two in regex': filter_helper.create('regex', '?regex=^(\\S+) (\\S+)&fields=fa', [ + { + 'message': 'abcd efgh ijk' + }, + ], [ + { + 'message': 'abcd efgh ijk', + fa: 'abcd' + }, + ]), + 'numerical_fields': filter_helper.create('regex', '?regex=^(\\S+) (\\d+|-)&fields=fa,fb&numerical_fields=fb', [ + { + 'message': 'abcd 123 ijk' + }, + { + 'message': 'abcd - ijk' + }, + ], [ + { + 'message': 'abcd 123 ijk', + fa: 'abcd', + fb: 123 + }, + { + 'message': 'abcd - ijk', + fa: 'abcd' + }, + ]), + 'date parsing': filter_helper.create('regex', '?regex=^(.*)$&fields=timestamp&date_format=DD/MMMM/YYYY:HH:mm:ss ZZ', [ + { + 'message': '31/Jul/2012:18:02:28 +0200' + }, + { + 'message': '31/Jul/2012' + }, + { + 'message': 'toto' + }, + ], [ + { + 'message': '31/Jul/2012:18:02:28 +0200', + '@timestamp': '2012-07-31T16:02:28.000+0000' + }, + { + 'message': '31/Jul/2012', + '@timestamp': '2012-07-31T00:00:00.000+0000' + }, + { + 'message': 'toto' + }, + ]), + 'missing fields in date': filter_helper.create('regex', '?regex=^(.*)$&fields=timestamp&date_format=HH:mm:ss ZZ', [ + { + 'message': '18:02:28' + }, + ], [ + { + 'message': '18:02:28', + '@timestamp': n.format().substring(0, 10) + 'T18:02:28.000+0000' + }, + ]), + 'change message': filter_helper.create('regex', '?regex=^abcd(.*)efgh$&fields=message', [ + { + 'message': 'abcd12345efgh' + }, + ], [ + { + 'message': '12345' + }, + ]), + 'change host': filter_helper.create('regex', '?regex=^(abcd)(.*)efgh$&fields=a,host', [ + { + 'message': 'abcd12345efgh' + }, + ], [ + { + 'message': 'abcd12345efgh', + 'a': 'abcd', + 'host': '12345' + }, + ]), + 'nginx parsing': filter_helper.create('regex', '?regex=^(\\S+) - (\\S*) ?- \\[([^\\]]+)\\] "([^"]+)" (\\d+) (\\d+) "([^"]*)" "([^"]*)"&fields=ip,user,timestamp,request,status,bytes_sent,referer,user_agent&date_format=DD/MMMM/YYYY:HH:mm:ss ZZ', [ + { + 'message': '127.0.0.1 - - [31/Jul/2012:18:02:28 +0200] "GET /favicon.ico HTTP/1.1" 502 574 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1215.0 Safari/537.2"' + }, + { + 'message': '127.0.0.1 - - [31/Jul/2012:18:02:48 +0200] "-" 400 0 "-" "-"' + }, + ], [ + { + 'message': '127.0.0.1 - - [31/Jul/2012:18:02:28 +0200] "GET /favicon.ico HTTP/1.1" 502 574 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1215.0 Safari/537.2"', + 'ip': '127.0.0.1', + 'request': 'GET /favicon.ico HTTP/1.1', + 'status': 502, + 'bytes_sent': 574, + 'user_agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1215.0 Safari/537.2', + 'referer': '-', + '@timestamp': '2012-07-31T16:02:28.000+0000', + }, + { + 'message': '127.0.0.1 - - [31/Jul/2012:18:02:48 +0200] "-" 400 0 "-" "-"', + 'ip': '127.0.0.1', + 'request': '-', + 'status': 400, + 'bytes_sent': 0, + 'user_agent': '-', + 'referer': '-', + '@timestamp': '2012-07-31T16:02:48.000+0000', + }, + ], function(r) { + assert.equal(typeof(r[0].status), 'number'); + assert.equal(typeof(r[0].bytes_sent), 'number'); + assert.equal(typeof(r[0].referer), 'string'); + }), + 'http combined with predefined type': filter_helper.create('regex', 'http_combined', [ + { + 'message': '127.0.0.1 - - [31/Jul/2012:18:02:28 +0200] "GET /favicon.ico HTTP/1.1" 502 574 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1215.0 Safari/537.2"' + }, + { + 'message': '88.178.233.127 - cdv [12/Oct/2012:14:23:28 +0000] "GET /public/utils/ejam.jar HTTP/1.1" 304 172 "-" "Mozilla/4.0 (Windows 7 6.1) Java/1.7.0_07"' + } + ], [ + { + 'message': '127.0.0.1 - - [31/Jul/2012:18:02:28 +0200] "GET /favicon.ico HTTP/1.1" 502 574 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1215.0 Safari/537.2"', + 'ip': '127.0.0.1', + 'request': 'GET /favicon.ico HTTP/1.1', + 'status': 502, + 'bytes_sent': 574, + 'user_agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1215.0 Safari/537.2', + 'referer': '-', + 'user': '-', + '@timestamp': '2012-07-31T16:02:28.000+0000', + }, + { + 'message': '88.178.233.127 - cdv [12/Oct/2012:14:23:28 +0000] "GET /public/utils/ejam.jar HTTP/1.1" 304 172 "-" "Mozilla/4.0 (Windows 7 6.1) Java/1.7.0_07"', + 'user': 'cdv', + 'bytes_sent': 172, + 'ip': '88.178.233.127', + 'status': 304, + 'referer': '-', + 'user_agent': 'Mozilla/4.0 (Windows 7 6.1) Java/1.7.0_07', + 'request': 'GET /public/utils/ejam.jar HTTP/1.1', + '@timestamp': '2012-10-12T14:23:28.000+0000', + } + ]), + 'http vhost combined with predefined type': filter_helper.create('regex', 'http_vhost_combined', [ + { + 'message': 'ip-10-62-95-254.eu-west-1.compute.internal:80 88.178.233.127 - cdv [12/Oct/2012:14:23:28 +0000] "GET /public/utils/ejam.jar HTTP/1.1" 304 172 "-" "Mozilla/4.0 (Windows 7 6.1) Java/1.7.0_07"' + }, + { + 'message': 'www.skillstar.com:80 86.221.21.138 - - [13/Oct/2012:09:04:42 +0200] "GET /favicon.ico HTTP/1.1" 304 0 "-" "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:15.0) Gecko/20100101 Firefox/15.0.1"' + }, + ], [ + { + 'message': 'ip-10-62-95-254.eu-west-1.compute.internal:80 88.178.233.127 - cdv [12/Oct/2012:14:23:28 +0000] "GET /public/utils/ejam.jar HTTP/1.1" 304 172 "-" "Mozilla/4.0 (Windows 7 6.1) Java/1.7.0_07"', + 'user': 'cdv', + 'bytes_sent': 172, + 'ip': '88.178.233.127', + 'status': 304, + 'referer': '-', + 'user_agent': 'Mozilla/4.0 (Windows 7 6.1) Java/1.7.0_07', + 'request': 'GET /public/utils/ejam.jar HTTP/1.1', + 'vhost': 'ip-10-62-95-254.eu-west-1.compute.internal:80', + '@timestamp': '2012-10-12T14:23:28.000+0000', + }, + { + 'message': 'www.skillstar.com:80 86.221.21.138 - - [13/Oct/2012:09:04:42 +0200] "GET /favicon.ico HTTP/1.1" 304 0 "-" "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:15.0) Gecko/20100101 Firefox/15.0.1"', + 'user': '-', + 'bytes_sent': 0, + 'ip': '86.221.21.138', + 'status': 304, + 'referer': '-', + 'vhost': 'www.skillstar.com:80', + 'user_agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:15.0) Gecko/20100101 Firefox/15.0.1', + 'request': 'GET /favicon.ico HTTP/1.1', + '@timestamp': '2012-10-13T07:04:42.000+0000', + } + ]), +}).export(module); diff --git a/test/test_203_filter_grep.js b/test/test_203_filter_grep.js new file mode 100644 index 00000000..871a1f77 --- /dev/null +++ b/test/test_203_filter_grep.js @@ -0,0 +1,62 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter grep ').addBatch({ + 'normal': filter_helper.create('grep', '?regex=abc', [ + { + 'message': 'abcd' + }, + { + 'message': 'abd' + }, + ], [ + { + 'message': 'abcd' + }, + ]), + 'regex': filter_helper.create('grep', '?regex=\\d', [ + { + 'message': 'abcd' + }, + { + 'message': 'ABcD' + }, + { + 'message': 'abd5' + }, + ], [ + { + 'message': 'abd5' + }, + ]), + 'invert': filter_helper.create('grep', '?regex=abc&invert=true', [ + { + 'message': 'abcd' + }, + { + 'message': 'abd' + }, + ], [ + { + 'message': 'abd' + }, + ]), + 'flags': filter_helper.create('grep', '?regex=abc®ex_flags=i', [ + { + 'message': 'abcd' + }, + { + 'message': 'ABcD' + }, + { + 'message': 'abd' + }, + ], [ + { + 'message': 'abcd' + }, + { + 'message': 'ABcD' + }, + ]), +}).export(module); diff --git a/test/test_204_filter_mutate_replace.js b/test/test_204_filter_mutate_replace.js new file mode 100644 index 00000000..18a1ca53 --- /dev/null +++ b/test/test_204_filter_mutate_replace.js @@ -0,0 +1,38 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter replace ').addBatch({ + 'nothing': filter_helper.create('mutate_replace', 'toto?from=\\.&to=-', [{}], [{}]), + 'normal': filter_helper.create('mutate_replace', 'toto?from=\\.&to=-', [{ + 'toto': 'my.domain' + }], [{ + 'toto': 'my-domain' + }]), + 'float': filter_helper.create('mutate_replace', 'toto?from=\\.&to=-', [{ + 'toto': 10.42 + }], [{ + 'toto': '10-42' + }]), + 'multiple': filter_helper.create('mutate_replace', 'toto?from=\\.&to=-', [{ + 'toto': 'my.domain.com' + }], [{ + 'toto': 'my-domain-com' + }]), + 'type_filtering': filter_helper.create('mutate_replace', 'toto?only_type=titi&from=\\.&to=-', [ + { + 'type': 'titi', + 'toto': 'my.domain.com' + }, + { + 'toto': 'my.domain2.com' + } + ], [ + { + 'type': 'titi', + 'toto': 'my-domain-com' + }, + { + 'toto': 'my.domain2.com' + } + ]), +}).export(module); diff --git a/test/test_205_filter_compute_field.js b/test/test_205_filter_compute_field.js new file mode 100644 index 00000000..ec18240f --- /dev/null +++ b/test/test_205_filter_compute_field.js @@ -0,0 +1,134 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter compute field ').addBatch({ + 'normal': filter_helper.create('compute_field', 'titi?value=ab', [ + { + 'message': 'toto' + }, + ], [ + { + 'message': 'toto', + 'titi': 'ab' + }, + ]), + 'edge1': filter_helper.create('compute_field', 'titi?value=#{bouh}', [ + { + 'message': 'toto', + 'bouh': 'a' + }, + ], [ + { + 'message': 'toto', + 'bouh': 'a', + 'titi': 'a' + }, + ]), + 'edge2': filter_helper.create('compute_field', 'titi?value=#{}', [ + { + 'message': 'toto', + 'bouh': 'a' + }, + ], [ + { + 'message': 'toto', + 'bouh': 'a' + }, + ]), + 'edge3': filter_helper.create('compute_field', 'titi?value=#{', [ + { + 'message': 'toto', + 'bouh': 'a' + }, + ], [ + { + 'message': 'toto', + 'bouh': 'a', + 'titi': '#{' + }, + ]), + 'with value': filter_helper.create('compute_field', 'titi?value=ab#{bouh}z', [ + { + 'message': 'toto', + 'bouh': 'tata' + }, + { + 'message': 'toto', + 'bouh': 42 + }, + { + 'message': 'toto', + 'bouh': 42, + 'titi': 'abcdef' + }, + { + 'message': 'toto' + }, + ], [ + { + 'message': 'toto', + 'bouh': 'tata', + 'titi': 'abtataz' + }, + { + 'message': 'toto', + 'bouh': 42, + 'titi': 'ab42z' + }, + { + 'message': 'toto', + 'bouh': 42, + 'titi': 'ab42z' + }, + { + 'message': 'toto' + }, + ]), + 'with multiple values': filter_helper.create('compute_field', 'titi?value=#{bouh1}_#{bouh2}_#{bouh3}', [ + { + 'message': 'toto', + 'bouh1': 'tata', + 'bouh2': '22', + 'bouh3': 34, + }, + { + 'message': 'titi', + 'bouh1': 'tata2', + 'bouh2': '29', + 'bouh3': 9.7, + }, + { + 'message': 'titi', + 'bouh1': 'tata2', + }, + ], [ + { + 'message': 'toto', + 'bouh1': 'tata', + 'bouh2': '22', + 'bouh3': 34, + 'titi': 'tata_22_34', + }, + { + 'message': 'titi', + 'bouh1': 'tata2', + 'bouh2': '29', + 'bouh3': 9.7, + 'titi': 'tata2_29_9.7' + }, + { + 'message': 'titi', + 'bouh1': 'tata2', + }, + ]), + 'date': filter_helper.create('compute_field', 'titi?value=#{now:YYYY/MM}', [ + { + 'message': 'toto', + }, + ], [ + { + 'message': 'toto', + 'titi': require('moment')().format('YYYY/MM'), + }, + ]), +}).export(module); diff --git a/test/test_206_message_filtering.js b/test/test_206_message_filtering.js new file mode 100644 index 00000000..0e1c1ec7 --- /dev/null +++ b/test/test_206_message_filtering.js @@ -0,0 +1,213 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Message filtering ').addBatch({ + 'nothing': filter_helper.create('compute_field', 'titi?value=a', [ + { + 'message': 'toto' + }, + ], [ + { + 'message': 'toto', + 'titi': 'a' + }, + ]), + 'only type': filter_helper.create('compute_field', 'titi?value=a&only_type=z', [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'type': 'tata' + }, + { + 'message': 'toto', + 'type': 'z' + }, + ], [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'type': 'tata' + }, + { + 'message': 'toto', + 'type': 'z', + 'titi': 'a' + }, + ]), + 'only field exist': filter_helper.create('compute_field', 'titi?value=a&only_field_exist_titi', [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'toto': 'b' + }, + { + 'message': 'toto', + 'titi': 'b' + }, + ], [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'toto': 'b' + }, + { + 'message': 'toto', + 'titi': 'a' + }, + ]), + 'multiple only field exist': filter_helper.create('compute_field', 'titi?value=a&only_field_exist_titi&only_field_exist_toto', [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'toto': 'b' + }, + { + 'message': 'toto', + 'titi': 'b' + }, + { + 'message': 'toto', + 'titi': 'b', + 'toto': 'b' + }, + ], [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'toto': 'b' + }, + { + 'message': 'toto', + 'titi': 'b' + }, + { + 'message': 'toto', + 'titi': 'a', + 'toto': 'b' + }, + ]), + 'only field equal': filter_helper.create('compute_field', 'titi?value=a&only_field_equal_titi=z', [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'titi': 'b' + }, + { + 'message': 'toto', + 'titi': 'z' + }, + ], [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'titi': 'b' + }, + { + 'message': 'toto', + 'titi': 'a' + }, + ]), + 'multiple only field equal': filter_helper.create('compute_field', 'titi?value=aa&only_field_equal_titi=a&only_field_equal_toto=b', [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'toto': 'b' + }, + { + 'message': 'toto', + 'titi': 'a' + }, + { + 'message': 'toto', + 'titi': 'a', + 'toto': 'b' + }, + ], [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'toto': 'b' + }, + { + 'message': 'toto', + 'titi': 'a' + }, + { + 'message': 'toto', + 'titi': 'aa', + 'toto': 'b' + }, + ]), + 'one field match': filter_helper.create('compute_field', 'titi?value=aa&only_field_match_titi=abc', [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'titi': 'acb' + }, + { + 'message': 'toto', + 'titi': 'abc' + }, + { + 'message': 'toto', + 'titi': '1234abcdef', + }, + ], [ + { + 'message': 'toto' + }, + { + 'message': 'toto', + 'titi': 'acb' + }, + { + 'message': 'toto', + 'titi': 'aa' + }, + { + 'message': 'toto', + 'titi': 'aa' + }, + ]), + 'multiple field match': filter_helper.create('compute_field', 'titi?value=aa&only_field_match_titi=abc&only_field_match_message=z$', [ + { + 'message': 'ztoto', + 'titi': 'abc' + }, + { + 'message': 'totoz', + 'titi': 'abc', + }, + ], [ + { + 'message': 'ztoto', + 'titi': 'abc' + }, + { + 'message': 'totoz', + 'titi': 'aa', + }, + ]), +}).export(module); diff --git a/test/test_207_filter_split.js b/test/test_207_filter_split.js new file mode 100644 index 00000000..adad1a47 --- /dev/null +++ b/test/test_207_filter_split.js @@ -0,0 +1,77 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter split ').addBatch({ + 'normal': filter_helper.create('split', '?delimiter=|', [ + { + 'message': 'toto||tata|titi', + 'host': 'a' + }, + { + 'message': 'tete|bouh|', + 'host': 'b' + }, + ], [ + { + 'message': 'toto', + 'host': 'a' + }, + { + 'message': 'tata', + 'host': 'a' + }, + { + 'message': 'titi', + 'host': 'a' + }, + { + 'message': 'tete', + 'host': 'b' + }, + { + 'message': 'bouh', + 'host': 'b' + }, + ]), + 'normal with fields and long delimiter': filter_helper.create('split', '?delimiter=|()', [ + { + 'message': 'toto|()tata|()|()titi', + 'host': 'a', + 'z': 2 + }, + ], [ + { + 'message': 'toto', + 'host': 'a', + 'z': 2 + }, + { + 'message': 'tata', + 'host': 'a', + 'z': 2 + }, + { + 'message': 'titi', + 'host': 'a', + 'z': 2 + }, + ]), + 'with carring return in regex': filter_helper.create('split', '?delimiter=toto%0Atiti', [ + { + 'message': 'l1 toto\ntitil2 toto\ntitil3 toto\ntitil4', + }, + ], [ + { + 'message': 'l1 ', + }, + { + 'message': 'l2 ', + }, + { + 'message': 'l3 ', + }, + { + 'message': 'l4', + }, + ]), +}).export(module); diff --git a/test/test_208_filter_multiline.js b/test/test_208_filter_multiline.js new file mode 100644 index 00000000..f779e424 --- /dev/null +++ b/test/test_208_filter_multiline.js @@ -0,0 +1,131 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter multiline ').addBatch({ + 'normal': filter_helper.create('multiline', '?start_line_regex=^abc', [ + { + 'message': 'abc', + 'host': 'a' + }, + { + 'message': 'ABC', + 'host': 'a' + }, + { + 'message': 'abc', + 'host': 'a' + }, + { + 'message': '123', + 'host': 'a' + }, + ], [ + { + 'message': 'abc\nABC', + 'host': 'a' + }, + { + 'message': 'abc\n123', + 'host': 'a' + }, + ]), + 'regex flags': filter_helper.create('multiline', '?start_line_regex=^abc®ex_flags=i', [ + { + 'message': 'abc', + 'host': 'a' + }, + { + 'message': 'ABC', + 'host': 'a' + }, + { + 'message': 'abc', + 'host': 'a' + }, + { + 'message': '123', + 'host': 'a' + }, + ], [ + { + 'message': 'abc', + 'host': 'a' + }, + { + 'message': 'ABC', + 'host': 'a' + }, + { + 'message': 'abc\n123', + 'host': 'a' + }, + ]), + 'wrong initial content': filter_helper.create('multiline', '?start_line_regex=^abc', [ + { + 'message': 'def', + 'host': 'a' + }, + { + 'message': 'abc', + 'host': 'a' + }, + { + 'message': '123', + 'host': 'a' + }, + ], [ + { + 'message': 'def', + 'host': 'a' + }, + { + 'message': 'abc\n123', + 'host': 'a' + }, + ]), + 'multiple sources': filter_helper.create('multiline', '?start_line_regex=^abc', [ + { + 'message': 'abc', + 'host': 'a' + }, + { + 'message': 'abc', + 'host': 'b' + }, + { + 'message': 'def', + 'host': 'a' + }, + { + 'message': '123', + 'host': 'b' + }, + ], [ + { + 'message': 'abc\ndef', + 'host': 'a' + }, + { + 'message': 'abc\n123', + 'host': 'b' + }, + ]), + 'with carring return in regex': filter_helper.create('multiline', '?start_line_regex=^titi®ex_flags=m', [ + { + 'message': 'titil1 toto\ntataabc\ntitil2', + }, + { + 'message': 'bouh', + }, + { + 'message': 'abc\ntiti', + }, + ], [ + { + 'message': 'titil1 toto\ntataabc\ntitil2\nbouh', + }, + { + 'message': 'abc\ntiti', + }, + ]), +}).export(module); diff --git a/test/test_209_filter_syslog_pri.js b/test/test_209_filter_syslog_pri.js new file mode 100644 index 00000000..a85018ec --- /dev/null +++ b/test/test_209_filter_syslog_pri.js @@ -0,0 +1,61 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter syslog pri').addBatch({ + 'normal': filter_helper.create('syslog_pri', '?', [ + { + 'message': 'abcc' + }, + { + 'message': 'abcc', + 'c': 12 + }, + { + 'message': 'abcc', + 'syslog_priority': 'a' + }, + { + 'message': 'abcc', + 'syslog_priority': 158 + }, + { + 'message': 'abcc', + 'syslog_priority': 0 + }, + { + 'message': 'abcc', + 'syslog_priority': 191 + }, + + ], [ + { + 'message': 'abcc' + }, + { + 'message': 'abcc', + 'c': 12 + }, + { + 'message': 'abcc', + 'syslog_priority': 'a' + }, + { + 'message': 'abcc', + 'syslog_priority': 158, + 'syslog_facility': 'local3', + 'syslog_severity': 'informational' + }, + { + 'message': 'abcc', + 'syslog_priority': 0, + 'syslog_facility': 'kernel', + 'syslog_severity': 'emergency' + }, + { + 'message': 'abcc', + 'syslog_priority': 191, + 'syslog_facility': 'local7', + 'syslog_severity': 'debug' + }, + ]), +}).export(module); diff --git a/test/test_210_compute_date_field.js b/test/test_210_compute_date_field.js new file mode 100644 index 00000000..a5d33af8 --- /dev/null +++ b/test/test_210_compute_date_field.js @@ -0,0 +1,17 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter compute date field ').addBatch({ + 'normal': filter_helper.create('compute_date_field', 'titi?date_format=DD/MMMM/YYYY HH', [ + {}, + { + '@timestamp': '2012-07-31T18:02:28.123+02:00' + }, + ], [ + {}, + { + '@timestamp': '2012-07-31T18:02:28.123+02:00', + 'titi': '31/July/2012 16' + }, + ]), +}).export(module); diff --git a/test/test_211_reverse_dns.js b/test/test_211_reverse_dns.js new file mode 100644 index 00000000..4603266f --- /dev/null +++ b/test/test_211_reverse_dns.js @@ -0,0 +1,178 @@ +var vows = require('vows'), + mock_helper = require('./mock_helper'), + filter_helper = require('./filter_helper'); + +function mock_dns() { + mock_helper.mock({ + 'dns': { + reverse: function(s, cb) { + if (s === 'www.free.fr') { + return cb({ + code: 'ENOTFOUND', + errno: 'ENOTFOUND', + syscall: 'getHostByAddr' + }); + } + if (s === '212.27.48.10') { + return cb(null, ['www.free.fr']); + } + if (s === '212.27.48.11') { + return cb(null, ['toto']); + } + throw new Error('toto2'); + } + } + }); +} + +function unmock_dns() { + mock_helper.unmock(); +} + +vows.describe('Filter reverse dns').addBatch({ + 'no_only_hostname': filter_helper.create('reverse_dns', 'host?only_hostname=false', [ + { + 'message': 'abcc' + }, + { + 'host': 'www.free.fr' + }, + { + 'host': 'www.free.fr2' + }, + { + 'host': '212.27.48.10' + }, + { + 'host': '212.27.48.11' + }, + ], [ + { + 'message': 'abcc' + }, + { + 'host': 'www.free.fr' + }, + { + 'host': 'www.free.fr2' + }, + { + 'host': 'www.free.fr' + }, + { + 'host': 'toto' + }, + ], function() {}, function(callback) { + mock_dns(); + callback(); + }, function() { + unmock_dns(); + }), +}).addBatch({ + 'change target': filter_helper.create('reverse_dns', 'host?only_hostname=false&target_field=toto', [ + { + 'message': 'abcc' + }, + { + 'host': 'www.free.fr' + }, + { + 'host': 'www.free.fr2' + }, + { + 'host': '212.27.48.10' + }, + { + 'host': '212.27.48.11' + }, + ], [ + { + 'message': 'abcc' + }, + { + 'host': 'www.free.fr' + }, + { + 'host': 'www.free.fr2' + }, + { + 'host': '212.27.48.10', + 'toto': 'www.free.fr' + }, + { + 'host': '212.27.48.11', + 'toto': 'toto' + }, + ], function() {}, function(callback) { + mock_dns(); + callback(); + }, function() { + unmock_dns(); + }), +}).addBatch({ + 'change src': filter_helper.create('reverse_dns', 'titi?only_hostname=false', [ + { + 'message': 'abcc' + }, + { + 'titi': '212.27.48.10' + }, + { + 'host': '212.27.48.11' + }, + ], [ + { + 'message': 'abcc' + }, + { + 'titi': 'www.free.fr' + }, + { + 'host': '212.27.48.11' + }, + ], function() {}, function(callback) { + mock_dns(); + callback(); + }, function() { + unmock_dns(); + }), +}).addBatch({ + 'only_hostname': filter_helper.create('reverse_dns', 'host', [ + { + 'message': 'abcc' + }, + { + 'host': 'www.free.fr' + }, + { + 'host': 'www.free.fr2' + }, + { + 'host': '212.27.48.10' + }, + { + 'host': '212.27.48.11' + }, + ], [ + { + 'message': 'abcc' + }, + { + 'host': 'www.free.fr' + }, + { + 'host': 'www.free.fr2' + }, + { + 'host': 'www' + }, + { + 'host': 'toto' + }, + ], function() {}, function(callback) { + mock_dns(); + callback(); + }, function() { + unmock_dns(); + }), +}).export(module); diff --git a/test/test_212_filter_json_fields.js b/test/test_212_filter_json_fields.js new file mode 100644 index 00000000..59863796 --- /dev/null +++ b/test/test_212_filter_json_fields.js @@ -0,0 +1,151 @@ +var vows = require('vows'), + patterns_loader = require('lib/patterns_loader'), + filter_helper = require('./filter_helper'); + +patterns_loader.add('/toto'); +patterns_loader.add('/tata'); +patterns_loader.add('../lib/patterns'); + +vows.describe('Filter json_fields ').addBatch({ + 'basic': filter_helper.create('json_fields', '', [ + { + 'message': '{"abcd":"efgh","ijk":["l","m","n"]}' + }, + { + 'message': '{"abcd":"ef\\"\\ngh","ijk":["l","m","n"]}' + }, + ], [ + { + 'message': '{"abcd":"efgh","ijk":["l","m","n"]}', + abcd: 'efgh', + ijk: ['l', 'm', 'n'] + }, + { + 'message': '{"abcd":"ef\\"\\ngh","ijk":["l","m","n"]}', + abcd: 'ef\"\ngh', + ijk: ['l', 'm', 'n'] + }, + ]), + 'empty': filter_helper.create('json_fields', '', [ + { + 'message': '{}' + }, + ], [ + { + 'message': '{}' + }, + ]), + 'merge': filter_helper.create('json_fields', '', [ + { + 'message': '{"abcd":"efgh","ijk":["l","m","n"]}', + fc: 'toto' + }, + ], [ + { + 'message': '{"abcd":"efgh","ijk":["l","m","n"]}', + fc: 'toto', + abcd: 'efgh', + ijk: ['l', 'm', 'n'] + }, + ]), + 'overwrite': filter_helper.create('json_fields', '', [ + { + 'message': '{"abcd":"efgh","ijk":["l","m","n"]}', + fc: 'toto', + abcd: 'toto' + }, + ], [ + { + 'message': '{"abcd":"efgh","ijk":["l","m","n"]}', + fc: 'toto', + abcd: 'efgh', + ijk: ['l', 'm', 'n'] + }, + ]), + 'numeric': filter_helper.create('json_fields', '', [ + { + 'message': '{"abcd":0,"ijk":[1,2,3]}' + }, + ], [ + { + 'message': '{"abcd":0,"ijk":[1,2,3]}', + abcd: 0, + ijk: [1, 2, 3] + }, + ]), + 'object': filter_helper.create('json_fields', '', [ + { + 'message': '{"abcd":{"ef":"g","h":0},"ijk":[1,2,-3.5e-2]}' + }, + ], [ + { + 'message': '{"abcd":{"ef":"g","h":0},"ijk":[1,2,-3.5e-2]}', + abcd: { + ef: 'g', + h: 0 + }, + ijk: [1, 2, -0.035] + }, + ]), + 'boolean': filter_helper.create('json_fields', '', [ + { + 'message': '{"abcd":true,"efg":false}' + }, + ], [ + { + 'message': '{"abcd":true,"efg":false}', + abcd: true, + efg: false + }, + ]), + 'null': filter_helper.create('json_fields', '', [ + { + 'message': '{"abcd":null}' + }, + ], [ + { + 'message': '{"abcd":null}', + abcd: null + }, + ]), + 'corner_cases': filter_helper.create('json_fields', '', [ + { + 'message': '' + }, + { + 'message': '{' + }, + { + 'message': '{[]' + }, + { + 'message': '<123>{"abc":"efg"}' + }, + { + 'message': '<123>[}' + }, + { + 'message': '<123>[]' + }, + ], [ + { + 'message': '' + }, + { + 'message': '{' + }, + { + 'message': '{[]' + }, + { + 'message': '<123>{"abc":"efg"}', + abc: 'efg' + }, + { + 'message': '<123>[}' + }, + { + 'message': '<123>[]' + }, + ]), +}).export(module); diff --git a/test/test_213_filter_add_version.js b/test/test_213_filter_add_version.js new file mode 100644 index 00000000..99d24384 --- /dev/null +++ b/test/test_213_filter_add_version.js @@ -0,0 +1,13 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter add version ').addBatch({ + 'normal': filter_helper.create('add_version', '', [{}], [{ + '@version': '1' + }]), + 'not overwrite': filter_helper.create('add_version', '', [{ + '@version': '2' + }], [{ + '@version': '2' + }]), +}).export(module); diff --git a/test/test_214_filter_geoip.js b/test/test_214_filter_geoip.js new file mode 100644 index 00000000..b83281f0 --- /dev/null +++ b/test/test_214_filter_geoip.js @@ -0,0 +1,270 @@ +var vows = require('vows'), + geoip = require('geoip-lite'), + maxmind = require('maxmind'), + filter_helper = require('./filter_helper'); + +maxmind.init(['test/maxmind/GeoIPCity.dat', 'test/maxmind/GeoIPASNum.dat']); + +var ip1 = '91.121.153.187'; +var ip1_res = geoip.lookup(ip1); +var ip1_res_maxmind = maxmind.getLocation(ip1); + +var ip2 = '82.66.65.173'; +var ip2_res = geoip.lookup(ip2); +var ip2_res_maxmind = maxmind.getLocation(ip2); + +vows.describe('Filter Geoip ').addBatch({ + 'normal': filter_helper.create('geoip', 'ip', [ + { + 'titi': 'tata' + }, + { + 'titi': 'tata', + 'ip': 'toto' + }, + { + 'titi': 'tata', + 'ip': ip1 + }, + { + 'titi': 'tata', + 'ip': ip2 + }, + ], [ + { + 'titi': 'tata' + }, + { + 'titi': 'tata', + 'ip': 'toto' + }, + { + 'titi': 'tata', + 'ip': ip1, + 'ip_geo_country': ip1_res.country, + 'ip_geo_lonlat': [ip1_res.ll[1], ip1_res.ll[0]] + }, + { + 'titi': 'tata', + 'ip': ip2, + 'ip_geo_country': ip2_res.country, + 'ip_geo_region': ip2_res.region, + 'ip_geo_city': ip2_res.city, + 'ip_geo_lonlat': [ip2_res.ll[1], ip2_res.ll[0]] + }, + ]), + 'normal with maxmind plugin': filter_helper.create('geoip', 'ip?maxmind_dir=test/maxmind', [ + { + 'titi': 'tata' + }, + { + 'titi': 'tata', + 'ip': 'toto' + }, + { + 'titi': 'tata', + 'ip': ip1 + }, + { + 'titi': 'tata', + 'ip': ip2 + }, + ], [ + { + 'titi': 'tata' + }, + { + 'titi': 'tata', + 'ip': 'toto' + }, + { + 'titi': 'tata', + 'ip': ip1, + 'ip_geo_country': ip1_res.country, + 'ip_geo_asn': 'AS16276 OVH SAS', + 'ip_geo_lonlat': [Number((ip1_res_maxmind.longitude).toFixed(4)), Number((ip1_res_maxmind.latitude).toFixed(4))], + }, + { + 'titi': 'tata', + 'ip': ip2, + 'ip_geo_country': ip2_res.country, + 'ip_geo_region': ip2_res.region, + 'ip_geo_city': ip2_res.city, + 'ip_geo_asn': 'AS12322 Free SAS', + 'ip_geo_lonlat': [Number((ip2_res_maxmind.longitude).toFixed(4)), Number((ip2_res_maxmind.latitude).toFixed(4))], + }, + ]), + 'hide city': filter_helper.create('geoip', 'ip?city_field=none', [ + { + 'titi': 'tata', + 'ip': ip2 + }, + ], [ + { + 'titi': 'tata', + 'ip': ip2, + 'ip_geo_country': ip2_res.country, + 'ip_geo_region': ip2_res.region, + 'ip_geo_lonlat': [ip2_res.ll[1], ip2_res.ll[0]] + }, + ]), + 'hide country': filter_helper.create('geoip', 'ip?country_field=none', [ + { + 'titi': 'tata', + 'ip': ip2 + }, + ], [ + { + 'titi': 'tata', + 'ip': ip2, + 'ip_geo_region': ip2_res.region, + 'ip_geo_city': ip2_res.city, + 'ip_geo_lonlat': [ip2_res.ll[1], ip2_res.ll[0]] + }, + ]), + 'hide region': filter_helper.create('geoip', 'ip?region_field=none', [ + { + 'titi': 'tata', + 'ip': ip2 + }, + ], [ + { + 'titi': 'tata', + 'ip': ip2, + 'ip_geo_country': ip2_res.country, + 'ip_geo_city': ip2_res.city, + 'ip_geo_lonlat': [ip2_res.ll[1], ip2_res.ll[0]] + }, + ]), + 'hide lonlat': filter_helper.create('geoip', 'ip?lonlat_field=none', [ + { + 'titi': 'tata', + 'ip': ip2 + }, + ], [ + { + 'titi': 'tata', + 'ip': ip2, + 'ip_geo_country': ip2_res.country, + 'ip_geo_region': ip2_res.region, + 'ip_geo_city': ip2_res.city + }, + ]), + 'local': filter_helper.create('geoip', 'ip', [ + { + 'ip': '10.0.0.1', + }, + { + 'ip': '192.168.0.1', + }, + { + 'ip': '172.16.0.1', + }, + { + 'ip': '172.17.0.1', + }, + { + 'ip': '172.18.0.1', + }, + { + 'ip': '172.19.0.1', + }, + { + 'ip': '172.20.0.1', + }, + { + 'ip': '172.21.0.1', + }, + { + 'ip': '172.22.0.1', + }, + { + 'ip': '172.23.0.1', + }, + { + 'ip': '172.24.0.1', + }, + { + 'ip': '172.25.0.1', + }, + { + 'ip': '172.26.0.1', + }, + { + 'ip': '172.27.0.1', + }, + { + 'ip': '172.28.0.1', + }, + { + 'ip': '172.29.0.1', + }, + { + 'ip': '172.30.0.1', + }, + { + 'ip': '172.31.0.1', + }, + { + 'ip': '127.0.0.1', + }, + ], [ + { + 'ip': '10.0.0.1', + }, + { + 'ip': '192.168.0.1', + }, + { + 'ip': '172.16.0.1', + }, + { + 'ip': '172.17.0.1', + }, + { + 'ip': '172.18.0.1', + }, + { + 'ip': '172.19.0.1', + }, + { + 'ip': '172.20.0.1', + }, + { + 'ip': '172.21.0.1', + }, + { + 'ip': '172.22.0.1', + }, + { + 'ip': '172.23.0.1', + }, + { + 'ip': '172.24.0.1', + }, + { + 'ip': '172.25.0.1', + }, + { + 'ip': '172.26.0.1', + }, + { + 'ip': '172.27.0.1', + }, + { + 'ip': '172.28.0.1', + }, + { + 'ip': '172.29.0.1', + }, + { + 'ip': '172.30.0.1', + }, + { + 'ip': '172.31.0.1', + }, + { + 'ip': '127.0.0.1', + }, + ]), +}).export(module); diff --git a/test/test_215_filter_eval.js b/test/test_215_filter_eval.js new file mode 100644 index 00000000..8b3a0901 --- /dev/null +++ b/test/test_215_filter_eval.js @@ -0,0 +1,122 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter eval ').addBatch({ + 'multiplication': filter_helper.create('eval', 'message?operation=x*1000', [ + { + 'message': 'abcd', + 'toto': 'z' + }, + { + 'message': '10' + }, + { + 'message': 4 + }, + ], [ + { + 'message': 'abcd', + 'toto': 'z' + }, + { + 'message': 10000 + }, + { + 'message': 4000 + }, + ]), + 'divide by 0': filter_helper.create('eval', 'message?operation=x/0', [ + { + 'message': 'abcd', + }, + { + 'message': 4 + }, + ], [ + { + 'message': 'abcd', + }, + { + 'message': 4 + }, + ]), + 'target_field': filter_helper.create('eval', 'message?operation=x*10&target_field=toto', [ + { + 'message': 4, + }, + ], [ + { + 'message': 4, + 'toto': 40 + }, + ]), + 'not compile': filter_helper.create('eval', 'message?operation=x%20x', [ + { + 'message': 'abcd', + }, + { + 'message': 4 + }, + ], [ + { + 'message': 'abcd', + }, + { + 'message': 4 + }, + ]), + 'not using x, result integer': filter_helper.create('eval', 'message?operation=5', [ + { + 'message': 'abcd', + }, + ], [ + { + 'message': 5 + }, + ]), + 'not using x, result string': filter_helper.create('eval', 'message?operation=%22azert%22', [ + { + 'message': 'abcd', + }, + ], [ + { + 'message': 'azert' + }, + ]), + 'string lowercase': filter_helper.create('eval', 'message?operation=x.toLowerCase()', [ + { + 'message': 'aBCD', + }, + ], [ + { + 'message': 'abcd' + }, + ]), + 'string concat': filter_helper.create('eval', 'message?operation=x+"a"', [ + { + 'message': 'aBCD', + }, + ], [ + { + 'message': 'aBCDa' + }, + ]), + 'null': filter_helper.create('eval', 'message?operation=null', [ + { + 'message': 'aBCD', + }, + ], [ + { + 'message': 'aBCD' + }, + ]), + 'undefined': filter_helper.create('eval', 'message?operation=undefined', [ + { + 'message': 'aBCD', + }, + ], [ + { + 'message': 'aBCD' + }, + ]), +}).export(module); diff --git a/test/test_216_filter_bunyan.js b/test/test_216_filter_bunyan.js new file mode 100644 index 00000000..b6e34f4c --- /dev/null +++ b/test/test_216_filter_bunyan.js @@ -0,0 +1,21 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter bunyan').addBatch({ + 'normal': filter_helper.create('bunyan', '', [ + { + 'message': '{"name":"myapp","hostname":"banquise.local","pid":6442,"level":30,"msg":"hi","time":"2014-05-31T20:32:53.902Z","v":0}' + }, + ], [ + { + message: 'hi', + bunyan_app_name: 'myapp', + bunyan_version: 0, + host: 'banquise.local', + '@timestamp': '2014-05-31T20:32:53.902Z', + pid: 6442, + bunyan_level_name: 'INFO', + bunyan_level: 30, + }, + ]), +}).export(module); diff --git a/test/test_217_filter_http_status_classifier.js b/test/test_217_filter_http_status_classifier.js new file mode 100644 index 00000000..e82d6891 --- /dev/null +++ b/test/test_217_filter_http_status_classifier.js @@ -0,0 +1,156 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter http status classifier').addBatch({ + 'normal': filter_helper.create('http_status_classifier', 'http_status', [ + { + http_status: 302, + }, + ], [ + { + http_status: 302, + http_class: '3xx', + }, + ]), + 'all': filter_helper.create('http_status_classifier', 'http_status', [ + { + message: 'toto' + }, + { + message: 'toto', + http_status: 99, + }, + { + message: 'toto', + http_status: 100, + }, + { + message: 'toto', + http_status: 200, + }, + { + message: 'toto', + http_status: 299, + }, + { + message: 'toto', + http_status: 302, + }, + { + message: 'toto', + http_status: 404, + }, + { + message: 'toto', + http_status: 499, + }, + { + message: 'toto', + http_status: 500, + }, + { + message: 'toto', + http_status: 504, + }, + { + message: 'toto', + http_status: 612, + }, + ], [ + { + message: 'toto' + }, + { + message: 'toto', + http_status: 99, + }, + { + message: 'toto', + http_status: 100, + http_class: '1xx', + }, + { + message: 'toto', + http_status: 200, + http_class: '2xx', + }, + { + message: 'toto', + http_status: 299, + http_class: '2xx', + }, + { + message: 'toto', + http_status: 302, + http_class: '3xx', + }, + { + message: 'toto', + http_status: 404, + http_class: '4xx', + }, + { + message: 'toto', + http_status: 499, + http_class: '4xx', + }, + { + message: 'toto', + http_status: 500, + http_class: '5xx', + }, + { + message: 'toto', + http_status: 504, + http_class: '5xx', + }, + { + message: 'toto', + http_status: 612, + }, + ]), + 'special code': filter_helper.create('http_status_classifier', 'http_status?special_codes=499,302', [ + { + http_status: 134, + }, + { + http_status: 499, + }, + { + http_status: 302, + }, + ], [ + { + http_status: 134, + http_class: '1xx', + }, + { + http_status: 499, + http_class: '499', + }, + { + http_status: 302, + http_class: '302', + }, + ]), + 'string as http_status': filter_helper.create('http_status_classifier', 'http_status', [ + { + message: 'toto', + http_status: 'toto', + }, + { + message: 'toto', + http_status: '102', + }, + ], [ + { + message: 'toto', + http_status: 'toto', + }, + { + message: 'toto', + http_status: '102', + http_class: '1xx', + }, + ]), +}).export(module); diff --git a/test/test_218_filter_grok.js b/test/test_218_filter_grok.js new file mode 100644 index 00000000..a27667bc --- /dev/null +++ b/test/test_218_filter_grok.js @@ -0,0 +1,163 @@ +var vows = require('vows'), + assert = require('assert'), + patterns_loader = require('../lib/lib/patterns_loader'), + filter_helper = require('./filter_helper'); + +patterns_loader.add('lib/patterns'); +patterns_loader.add('lib/toto'); + +vows.describe('Filter grok ').addBatch({ + 'normal': filter_helper.create('grok', '?match=%{NUMBER:fnumber} %{WORD:fword} %{GREEDYDATA:fgreedy}', [ + { + 'message': '123 abc def jhi' + }, + ], [ + { + 'message': '123 abc def jhi', + 'fnumber': 123, + 'fword': 'abc', + 'fgreedy': 'def jhi' + }, + ]), + 'same type': filter_helper.create('grok', '?match=%{NUMBER:fn1} %{NUMBER:fn2} %{NUMBER:fn3}', [ + { + 'message': '123 456 789' + }, + ], [ + { + 'message': '123 456 789', + 'fn1': 123, + 'fn2': 456, + 'fn3': 789 + }, + ], function(r) { + assert.equal(typeof(r[0].fn1), 'number'); + assert.equal(typeof(r[0].fn2), 'number'); + assert.equal(typeof(r[0].fn3), 'number'); + }), + 'haproxy': filter_helper.create('grok', '?match=%{HAPROXYHTTP}', [ + { + 'message': 'Sep 14 02:01:37 lb haproxy[11223]: 127.0.0.1:12345 [14/Sep/2014:02:01:37.452] public nginx/server1 0/0/0/5/5 200 490 - - ---- 1269/1269/0/1/0 0/0 "GET /my/path HTTP/1.1"' + }, + ], [ + { + 'message': 'Sep 14 02:01:37 lb haproxy[11223]: 127.0.0.1:12345 [14/Sep/2014:02:01:37.452] public nginx/server1 0/0/0/5/5 200 490 - - ---- 1269/1269/0/1/0 0/0 "GET /my/path HTTP/1.1"', + 'syslog_timestamp': 'Sep 14 02:01:37', + 'syslog_server': 'lb', + 'program': 'haproxy', + 'pid': 11223, + 'client_ip': '127.0.0.1', + 'client_port': 12345, + 'accept_date': '14/Sep/2014:02:01:37.452', + 'haproxy_monthday': 14, + 'haproxy_month': 'Sep', + 'haproxy_year': 2014, + 'haproxy_time': '02:01:37', + 'haproxy_hour': 2, + 'haproxy_minute': 1, + 'haproxy_second': 37, + 'haproxy_milliseconds': 452, + 'frontend_name': 'public', + 'backend_name': 'nginx', + 'server_name': 'server1', + 'time_request': 0, + 'time_queue': 0, + 'time_backend_connect': 0, + 'time_backend_response': 5, + 'time_duration': 5, + 'http_status_code': 200, + 'bytes_read': 490, + 'captured_request_cookie': '-', + 'captured_response_cookie': '-', + 'termination_state': '----', + 'actconn': 1269, + 'feconn': 1269, + 'beconn': 0, + 'srvconn': 1, + 'retries': 0, + 'srv_queue': 0, + 'backend_queue': 0, + 'http_verb': 'GET', + 'http_request': '/my/path', + 'http_version': 1.1 + }, + ]), + 'extra patterns': filter_helper.create('grok', '?match=%{GROKTEST}&extra_patterns_file=' + __dirname + '/grok/extra', [ + { + 'message': '123 abc def jhi ABC123' + }, + ], [ + { + 'message': '123 abc def jhi ABC123', + 'fnumber': 123, + 'fword': 'abc', + 'fgreedy': 'def jhi', + 'ftestpattern': 'ABC123' + }, + ]), + 'wrong grok pattern syntax error': filter_helper.create('grok', '?match=%{GROKTEST3}&extra_patterns_file=' + __dirname + '/grok/extra', [ + { + 'message': 'toto' + }, + ], [ + { + 'message': 'toto', + tags: ['_grokparsefailure'], + } + ]), + 'parse ok 1': filter_helper.create('grok', '?match=%{IP}&add_tags=x&add_fields=a:#{host}&remove_field=error', [ + { + 'message': '1.2.3.4', + 'host': 'titi', + 'error': 'a', + }, + ], [ + { + 'message': '1.2.3.4', + 'host': 'titi', + 'tags': ['x'], + 'a': 'titi' + } + ]), + 'parse ok 2': filter_helper.create('grok', '?match=%{IP}&add_tags=x,t&add_field=a:#{host},b:2&remove_fields=toto,error', [ + { + 'message': '1.2.3.4', + 'host': 'titi', + 'error': 'a', + }, + ], [ + { + 'message': '1.2.3.4', + 'host': 'titi', + 'tags': ['x', 't'], + 'a': 'titi', + 'b': 2, + } + ]), + 'parse error 1': filter_helper.create('grok', '?match=%{IP}&tag_on_failure=&add_tags=y,t&add_tags=x&add_fields=a:#{host}&remove_field=error', [ + { + 'message': 'toto', + 'error': 'a', + 'tags': ['x'], + }, + ], [ + { + 'message': 'toto', + 'error': 'a', + 'tags': ['x'], + } + ]), + 'parse error 2': filter_helper.create('grok', '?match=%{IP}&tag_on_failure=a,b&remove_tags=y&add_tags=x&add_fields=a:#{host}&remove_field=error', [ + { + 'message': 'toto', + 'error': 'a', + 'tags': ['x', 'y'], + }, + ], [ + { + 'message': 'toto', + 'error': 'a', + 'tags': ['x', 'y', 'a', 'b'], + } + ]), +}).export(module); diff --git a/test/test_219_filter_truncate.js b/test/test_219_filter_truncate.js new file mode 100644 index 00000000..e3838558 --- /dev/null +++ b/test/test_219_filter_truncate.js @@ -0,0 +1,20 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter truncate ').addBatch({ + 'normal': filter_helper.create('truncate', '?max_size=3', [ + { + 'message': 'toto' + }, + { + 'message': 't' + }, + ], [ + { + 'message': 'tot', + }, + { + 'message': 't', + }, + ]), +}).export(module); diff --git a/test/test_220_remove_field_when_equal.js b/test/test_220_remove_field_when_equal.js new file mode 100644 index 00000000..389372e1 --- /dev/null +++ b/test/test_220_remove_field_when_equal.js @@ -0,0 +1,29 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter remove field when equal').addBatch({ + 'normal': filter_helper.create('remove_field_when_equal', 'request_id?value=-', [ + { + 'message': 'a', + 'request_id': 'b', + }, + { + 'message': 'b', + 'request_id': '-', + }, + { + 'message': 'c', + }, + ], [ + { + 'message': 'a', + 'request_id': 'b', + }, + { + 'message': 'b', + }, + { + 'message': 'c', + }, + ]), +}).export(module); diff --git a/test/test_221_filter_rename.js b/test/test_221_filter_rename.js new file mode 100644 index 00000000..12d5bc5e --- /dev/null +++ b/test/test_221_filter_rename.js @@ -0,0 +1,20 @@ +var vows = require('vows'), + filter_helper = require('./filter_helper'); + +vows.describe('Filter rename ').addBatch({ + 'normal': filter_helper.create('rename', 'message?to=m', [ + { + 'message': 'toto' + }, + { + 'message2': 't' + }, + ], [ + { + 'm': 'toto', + }, + { + 'message2': 't', + }, + ]), +}).export(module); diff --git a/test/test_300_monitor_file.js b/test/test_300_monitor_file.js new file mode 100644 index 00000000..750db38c --- /dev/null +++ b/test/test_300_monitor_file.js @@ -0,0 +1,738 @@ +var vows = require('vows-batch-retry'), + assert = require('assert'), + os = require('os'), + fs = require('fs'), + path = require('path'), + child_process = require('child_process'), + log = require('log4node'), + monitor_file = require('lib/monitor_file'); + +function randomFile(pathname) { + return path.join(pathname || os.tmpDir(), '___node-logstash_test___' + Math.random()); +} + +function TestMonitor(file, options) { + this.file = file; + this.lines = []; + this.errors = []; + this.changed_counter = 0; + this.renamed_counter = 0; + this.closed_counter = 0; + this.monitor = monitor_file.monitor(this.file, options); + this.monitor.on('data', function(data) { + this.lines.push(data); + }.bind(this)); + this.monitor.on('error', function(err) { + log.error(err); + this.errors.push(err); + }.bind(this)); + this.monitor.on('renamed', function() { + this.renamed_counter++; + }.bind(this)); + this.monitor.on('changed', function() { + this.changed_counter++; + }.bind(this)); + this.monitor.on('closed', function() { + this.closed_counter++; + }.bind(this)); +} + +function run(command, args, exit_callback) { + var child = child_process.spawn(command, args); + child.on('error', function(err) { + assert.ifError(err); + }); + child.on('exit', exit_callback); +} + +function create_test(start_callback, check_callback, path, options) { + return { + topic: function() { + var m = new TestMonitor(randomFile(path), options); + var callback = this.callback; + start_callback(m, function(err) { + setTimeout(function() { + m.monitor.close(function() { + callback(err, m); + }); + }, 20); + }); + }, + + check: function(err, m) { + assert.ifError(err); + check_callback(m); + } + }; +} + +function no_error(m) { + assert.equal(m.errors.length, 0); +} + +vows.describe('Monitor ').addBatch({ + 'Not existent file': create_test( + function(m, callback) { + m.monitor.start(callback); + }, function(m) { + no_error(m); + assert.equal(m.lines.length, 0); + } + ), +}).addBatch({ + 'Empty file': create_test( + function(m, callback) { + fs.writeFile(m.file, '', function(err) { + assert.ifError(err); + m.monitor.start(callback); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.equal(m.lines.length, 0); + } + ), +}).addBatch({ + 'Not empty file start index undefined': create_test( + function(m, callback) { + fs.writeFile(m.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + m.monitor.start(callback); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.equal(m.lines.length, 0); + } + ), +}).addBatch({ + 'Not empty file start index 0': create_test( + function(m, callback) { + fs.writeFile(m.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + m.monitor.start(callback, 0); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + } + ), +}).addBatch({ + 'Not empty file start index 3': create_test( + function(m, callback) { + fs.writeFile(m.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + m.monitor.start(callback, 3); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['e1', 'line2']); + } + ), +}).addBatch({ + 'Not empty file start index 0, big buffer, and empty line removal': create_test( + function(m, callback) { + fs.writeFile(m.file, fs.readFileSync(__filename).toString(), function(err) { + assert.ifError(err); + m.monitor.start(callback, 0); + }); + }, function(m) { + setTimeout(function() { + fs.unlinkSync(m.file); + no_error(m); + var test_file_lines = fs.readFileSync(__filename).toString().split('\n'); + var index = 0; + test_file_lines.forEach(function(l) { + if (l.length > 0) { + assert.equal(l, m.lines[index]); + index += 1; + } + }); + assert.equal(m.lines.length, index); + }, 500); + } + ), +}).addBatch({ + 'File filled after start': create_test( + function(m, callback) { + fs.writeFile(m.file, '', function(err) { + assert.ifError(err); + m.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + } + ), +}).addBatch({ + 'File created after start': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.writeFile(m.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }, 0); + }, function check(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'File created after start, filled with append': create_test( + function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m.file, 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile(m.file, 'line2\n', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }, 200); + }); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + } + ), +}).addBatch({ + 'File rewritten from start': create_test( + function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m.file, 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.writeFile(m.file, 'line2\n', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }, 200); + }); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + } + ), +}).addBatch({ + 'File removed': create_test(function(m, callback) { + fs.writeFileSync(m.file, 'line1\nline2\n'); + m.monitor.start(function(err) { + assert.ifError(err); + setTimeout(function() { + fs.unlinkSync(m.file); + setTimeout(callback, 200); + }, 200); + }, 0); + }, function check(m) { + assert.equal(m.monitor.fdTailer, undefined); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'File removed and recreated': create_test(function(m, callback) { + fs.writeFileSync(m.file, 'line1\nline2\n'); + m.monitor.start(function(err) { + assert.ifError(err); + fs.unlink(m.file, function(err) { + assert.ifError(err); + setTimeout(function() { + assert.equal(m.monitor.fdTailer, undefined); + assert.equal(1, m.monitor.oldFdTailers.length); + setTimeout(function() { + assert.equal(0, m.monitor.oldFdTailers.length); + fs.writeFile(m.file, 'line3\n', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }, 200); + }, 10); + }); + }, 0); + }, function check(m) { + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2', 'line3']); + }, undefined, { + wait_delay_after_renaming: 100 + }), +}).addBatch({ + 'File renamed, same name': create_test(function(m, callback) { + fs.writeFileSync(m.file, 'line1\nline2\n'); + m.monitor.start(function(err) { + assert.ifError(err); + m.monitor.emit('renamed', m.file); + setTimeout(function() { + assert.equal(0, m.monitor.oldFdTailers.length); + fs.appendFile(m.file, 'line3\n', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }, 200); + }, 0); + }, function check(m) { + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2', 'line1', 'line2', 'line3']); + }), +}).addBatch({ + 'Incomplete line': create_test(function(m, callback) { + fs.writeFileSync(m.file, 'line1\nline2\nline3'); + m.monitor.start(function(err) { + assert.ifError(err); + setTimeout(function() { + assert.deepEqual(m.lines, ['line1', 'line2']); + setTimeout(function() { + fs.appendFile(m.file, 'line3\nline4\nline5', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }, 200); + }, 200); + }, 0); + }, function check(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2', 'line3line3', 'line4']); + }), +}).addBatch({ + 'Fd filled while monitoring': create_test(function(m, callback) { + m.test_fd = fs.openSync(m.file, 'a'); + var buffer = new Buffer('line1\nline2\n'); + m.monitor.start(function(err) { + assert.ifError(err); + fs.write(m.test_fd, buffer, 0, 6, null, function(err) { + assert.ifError(err); + fs.fsync(m.test_fd, function(err) { + assert.ifError(err); + setTimeout(function() { + assert.deepEqual(m.lines, ['line1']); + fs.write(m.test_fd, buffer, 6, 6, null, function(err) { + assert.ifError(err); + fs.fsync(m.test_fd, function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }); + }, 200); + }); + }); + }, 0); + }, function check(m) { + fs.closeSync(m.test_fd); + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'utf8 encoding': create_test( + function(m, callback) { + fs.writeFile(m.file, 'é\nline2\n', function(err) { + assert.ifError(err); + m.monitor.start(function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }, 0); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['é', 'line2']); + } + ), +}).addBatch({ + 'ascii encoding': create_test(function(m, callback) { + fs.writeFile(m.file, 'é\nline2\n', function(err) { + assert.ifError(err); + m.monitor.start(function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }, 0); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['C)', 'line2']); + }, undefined, { + buffer_encoding: 'ascii' + }), +}).addBatchRetry({ + 'Double monitoring same directory': { + topic: function() { + var callback = this.callback; + var m1 = new TestMonitor(randomFile()); + var m2 = new TestMonitor(randomFile()); + m1.monitor.start(); + m2.monitor.start(); + fs.appendFile(m1.file, 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile(m2.file, 'line10\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile(m1.file, 'line2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m1.monitor.close(function() { + m2.monitor.close(function() { + callback(undefined, m1, m2); + }); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err, m1, m2) { + assert.ifError(err); + fs.unlinkSync(m1.file); + fs.unlinkSync(m2.file); + no_error(m1); + no_error(m2); + assert.deepEqual(m1.lines, ['line1', 'line2']); + assert.equal(m1.changed_counter, 2); + assert.deepEqual(m2.lines, ['line10']); + assert.equal(m2.changed_counter, 1); + } + } +}, 5, 10000).addBatch({ + 'Wrong file path': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.isDefined(err); + callback(); + }); + }, function check(m) { + no_error(m); + assert.equal(m.lines.length, 0); + }, '/toto_does_not_exists/toto.log'), +}).addBatchRetry({ + 'Simple logrotate simulation': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.writeFile(m.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + assert.deepEqual(m.lines, ['line1', 'line2']); + fs.rename(m.file, m.file + '.1', function(err) { + assert.ifError(err); + fs.writeFile(m.file, 'line3\nline4\n', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }); + }, 200); + }); + }, 0); + }, function check(m) { + fs.unlinkSync(m.file); + fs.unlinkSync(m.file + '.1'); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2', 'line3', 'line4']); + assert.equal(m.closed_counter, 2); + }, undefined, { + wait_delay_after_renaming: 1 + }), +}, 5, 10000).addBatchRetry({ + 'Complex logrotate simulation': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.writeFile(m.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + assert.deepEqual(m.lines, ['line1', 'line2']); + fs.rename(m.file, m.file + '.1', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile(m.file + '.1', 'line3\nline4\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.writeFile(m.file, 'line5\nline6\n', function(err) { + assert.ifError(err); + setTimeout(callback, 500); + }); + }, 100); + }); + }, 100); + }); + }, 200); + }); + }, 0); + }, function check(m) { + fs.unlinkSync(m.file); + fs.unlinkSync(m.file + '.1'); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2', 'line3', 'line4', 'line5', 'line6']); + assert.equal(m.closed_counter, 2); + }, undefined, { + wait_delay_after_renaming: 500 + }), +}, 5, 10000).addBatchRetry({ + 'Complex logrotate simulation with permission pb': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.writeFile(m.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + assert.deepEqual(m.lines, ['line1', 'line2']); + fs.rename(m.file, m.file + '.1', function(err) { + assert.ifError(err); + setTimeout(function() { + run('/bin/sh', ['-c', 'umask 777 && touch ' + m.file], function(exit_code) { + assert.equal(exit_code, 0); + setTimeout(function() { + run('/bin/sh', ['-c', 'chmod 644 ' + m.file], function(exit_code) { + assert.equal(exit_code, 0); + setTimeout(function() { + fs.writeFile(m.file, 'line3\nline4\n', function(err) { + assert.ifError(err); + setTimeout(callback, 200); + }); + }, 100); + }); + }, 100); + }); + }, 100); + }); + }, 200); + }); + }, 0); + }, function check(m) { + fs.unlinkSync(m.file); + fs.unlinkSync(m.file + '.1'); + assert.greater(m.errors.length, 0); + assert.match(m.errors[0].toString(), /EACCES/); + assert.deepEqual(m.lines, ['line1', 'line2', 'line3', 'line4']); + assert.equal(m.closed_counter, 2); + }, undefined, { + wait_delay_after_renaming: 500 + }), +}, 5, 10000).addBatchRetry({ + 'Monitor restart': { + topic: function() { + var callback = this.callback; + var m1 = new TestMonitor(randomFile()); + m1.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m1.monitor.close(function() { + var m2 = new TestMonitor(m1.file); + m2.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line3\nline4\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m2.monitor.close(function() { + callback(undefined, m1, m2); + }); + }); + }); + }); + }); + }, 200); + }); + }); + }, + + check: function(err, m1, m2) { + assert.ifError(err); + fs.unlinkSync(m1.file); + no_error(m1); + no_error(m2); + assert.deepEqual(m1.lines, ['line1', 'line2']); + assert.equal(m1.changed_counter, 1); + assert.deepEqual(m2.lines, ['line3', 'line4']); + assert.equal(m2.changed_counter, 1); + } + } +}, 5, 10000).addBatchRetry({ + 'Monitor restart with write while restart': { + topic: function() { + var callback = this.callback; + var m1 = new TestMonitor(randomFile()); + m1.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m1.monitor.close(function() { + setTimeout(function() { + fs.appendFile(m1.file, 'line3\nline4\n', function(err) { + assert.ifError(err); + var m2 = new TestMonitor(m1.file); + m2.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line5\nline6\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m2.monitor.close(function() { + callback(undefined, m1, m2); + }); + }, 200); + }); + }); + }); + }, 500); + }); + }, 200); + }); + }); + }, + + check: function(err, m1, m2) { + assert.ifError(err); + fs.unlinkSync(m1.file); + no_error(m1); + no_error(m2); + assert.deepEqual(m1.lines, ['line1', 'line2']); + assert.equal(m1.changed_counter, 1); + assert.deepEqual(m2.lines, ['line3', 'line4', 'line5', 'line6']); + assert.equal(m2.changed_counter, 1); + } + } +}, 5, 10000).addBatchRetry({ + 'Monitor restart with write while restart, in a new file, too short': { + topic: function() { + var callback = this.callback; + var m1 = new TestMonitor(randomFile()); + m1.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m1.monitor.close(function() { + setTimeout(function() { + fs.unlink(m1.file, function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line3\n', function(err) { + assert.ifError(err); + var m2 = new TestMonitor(m1.file); + m2.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line4\nline5\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m2.monitor.close(function() { + callback(undefined, m1, m2); + }); + }, 200); + }); + }); + }); + }); + }, 500); + }); + }, 200); + }); + }); + }, + + check: function(err, m1, m2) { + assert.ifError(err); + fs.unlinkSync(m1.file); + no_error(m1); + no_error(m2); + assert.deepEqual(m1.lines, ['line1', 'line2']); + assert.equal(m1.changed_counter, 1); + assert.deepEqual(m2.lines, ['line4', 'line5']); + assert.equal(m2.changed_counter, 1); + } + } +}, 5, 10000).addBatchRetry({ + 'Monitor restart with write while restart, in a new file, content not correct': { + topic: function() { + var callback = this.callback; + var m1 = new TestMonitor(randomFile()); + m1.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line1\nline2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m1.monitor.close(function() { + setTimeout(function() { + fs.unlink(m1.file, function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line3\nline4\nline5\n', function(err) { + assert.ifError(err); + var m2 = new TestMonitor(m1.file); + m2.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m1.file, 'line6\nline7\n', function(err) { + assert.ifError(err); + setTimeout(function() { + m2.monitor.close(function() { + callback(undefined, m1, m2); + }); + }, 200); + }); + }); + }); + }); + }, 500); + }); + }, 200); + }); + }); + }, + + check: function(err, m1, m2) { + assert.ifError(err); + fs.unlinkSync(m1.file); + no_error(m1); + no_error(m2); + assert.deepEqual(m1.lines, ['line1', 'line2']); + assert.equal(m1.changed_counter, 1); + assert.deepEqual(m2.lines, ['line6', 'line7']); + assert.equal(m2.changed_counter, 1); + } + } +}, 5, 10000).addBatchRetry({ + 'Monitor fifo': { + topic: function() { + var callback = this.callback; + run('mkfifo', ['toto_fifo'], function(exitCode) { + assert.equal(0, exitCode); + var m = new TestMonitor('toto_fifo', {}); + m.monitor.start(function(err) { + assert.ifError(err); + run('sh', ['-c', 'echo x1 > toto_fifo'], function(exitCode) { + assert.equal(0, exitCode); + run('sh', ['-c', 'echo x2 > toto_fifo'], function(exitCode) { + assert.equal(0, exitCode); + setTimeout(function() { + m.monitor.close(function() { + callback(undefined, m); + }); + }, 200); + }); + }); + }); + }); + }, + + check: function(err, m) { + assert.ifError(err); + fs.unlinkSync('toto_fifo'); + no_error(m); + assert.deepEqual(m.lines, ['x1', 'x2']); + } + } +}, 5, 10000).export(module); +// Do not remove empty line, this file is used during test \ No newline at end of file diff --git a/test/test_301_tail_file.js b/test/test_301_tail_file.js new file mode 100644 index 00000000..33edc860 --- /dev/null +++ b/test/test_301_tail_file.js @@ -0,0 +1,229 @@ +var vows = require('vows-batch-retry'), + assert = require('assert'), + os = require('os'), + fs = require('fs'), + path = require('path'), + log = require('log4node'), + tail = require('lib/tail_file'); + +function randomFile(pathname) { + return path.join(pathname || os.tmpDir(), '___node-logstash_test___' + Math.random()); +} + +function TestMonitor(file, options) { + this.file = file; + this.lines = []; + this.errors = []; + this.changed_counter = 0; + this.renamed_counter = 0; + this.closed_counter = 0; + this.monitor = tail.tail(this.file, options); + this.monitor.on('data', function(data) { + this.lines.push(data); + }.bind(this)); + this.monitor.on('error', function(err) { + log.error(err); + this.errors.push(err); + }.bind(this)); +} + +function create_test(start_callback, check_callback, path, options) { + return { + topic: function() { + var m = new TestMonitor(randomFile(path), options); + var callback = this.callback; + start_callback(m, function(err) { + m.monitor.close(function() { + callback(err, m); + }); + }); + }, + + check: function(err, m) { + assert.ifError(err); + check_callback(m); + } + }; +} + +function no_error(m) { + assert.equal(m.errors.length, 0); +} + +vows.describe('Monitor ').addBatch({ + 'Not existent file': create_test(function(m, callback) { + m.monitor.start(callback); + }, function(m) { + no_error(m); + assert.equal(m.lines.length, 0); + }), +}).addBatch({ + 'Empty file': create_test(function(m, callback) { + fs.writeFileSync(m.file, ''); + m.monitor.start(callback); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.equal(m.lines.length, 0); + }), +}).addBatch({ + 'Not empty file start': create_test(function(m, callback) { + fs.writeFileSync(m.file, 'line1\nline2\n'); + m.monitor.start(callback); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.equal(m.lines.length, 0); + }), +}).addBatch({ + 'Not empty file start, but start_index': create_test(function(m, callback) { + fs.writeFileSync(m.file, 'line1\nline2\n'); + m.monitor.start(callback, 0); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.equal(m.lines.length, 2); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'File filled after start': create_test(function(m, callback) { + fs.writeFileSync(m.file, ''); + m.monitor.start(function(err) { + assert.ifError(err); + fs.appendFileSync(m.file, 'line1\nline2\n'); + setTimeout(callback, 500); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'File created after start': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.writeFileSync(m.file, 'line1\nline2\n'); + setTimeout(callback, 200); + }); + }, function check(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'File created after start, filled with append': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.appendFileSync(m.file, 'line1\n'); + setTimeout(function() { + fs.appendFileSync(m.file, 'line2\n'); + setTimeout(callback, 1000); + }, 200); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'File created after start, filled with append async': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.appendFile(m.file, 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile(m.file, 'line2\n', function(err) { + assert.ifError(err); + setTimeout(callback, 1000); + }); + }, 200); + }); + }); + }, function(m) { + fs.unlinkSync(m.file); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'File removed': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.writeFileSync(m.file, 'line1\nline2\n'); + setTimeout(function() { + fs.unlinkSync(m.file); + setTimeout(callback, 200); + }, 200); + }); + }, function check(m) { + assert.equal(m.monitor.fdTailer, undefined); + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }), +}).addBatch({ + 'File removed and recreated, second file is not read': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.ifError(err); + fs.writeFileSync(m.file, 'line1\nline2\n'); + setTimeout(function() { + fs.unlinkSync(m.file); + setTimeout(function() { + fs.writeFileSync(m.file, 'line3\n'); + setTimeout(callback, 200); + }, 200); + }, 200); + }); + }, function check(m) { + no_error(m); + assert.deepEqual(m.lines, ['line1', 'line2']); + }, undefined, { + wait_delay_after_renaming: 100 + }), +}).addBatchRetry({ + 'Double monitoring same directory': { + topic: function() { + var callback = this.callback; + var m1 = new TestMonitor(randomFile()); + var m2 = new TestMonitor(randomFile()); + m1.monitor.start(function(err) { + assert.ifError(err); + m2.monitor.start(function(err) { + assert.ifError(err); + fs.appendFileSync(m1.file, 'line1\n'); + setTimeout(function() { + fs.appendFileSync(m2.file, 'line10\n'); + setTimeout(function() { + fs.appendFileSync(m1.file, 'line2\n'); + setTimeout(function() { + m1.monitor.close(function() { + m2.monitor.close(function() { + callback(undefined, m1, m2); + }); + }); + }, 200); + }, 200); + }); + }); + }); + }, + + check: function(err, m1, m2) { + assert.ifError(err); + fs.unlinkSync(m1.file); + fs.unlinkSync(m2.file); + no_error(m1); + no_error(m2); + assert.deepEqual(m1.lines, ['line1', 'line2']); + assert.deepEqual(m2.lines, ['line10']); + } + } +}, 5, 10000).addBatch({ + 'Wrong file path': create_test(function(m, callback) { + m.monitor.start(function(err) { + assert.isDefined(err); + callback(); + }); + }, function check(m) { + no_error(m); + assert.equal(m.lines.length, 0); + }, '/toto_does_not_exists/toto.log'), +}).export(module); diff --git a/test/test_302_directory_detector.js b/test/test_302_directory_detector.js new file mode 100644 index 00000000..3d268a01 --- /dev/null +++ b/test/test_302_directory_detector.js @@ -0,0 +1,267 @@ +var vows = require('vows-batch-retry'), + assert = require('assert'), + fs = require('fs'), + path = require('path'), + spawn = require('child_process').spawn, + mkdirp = require('mkdirp'), + rimraf = require('rimraf'), + not_readable_helper = require('./not_readable_helper'), + directory_detector = require('lib/directory_detector'); + +function TestDirectoryDetector(directory, callback) { + this.exists = []; + this.errors = []; + this.removed = []; + this.detector = new directory_detector.DirectoryDetector(); + this.detector.on('exists', function(d, newly_created) { + this.exists.push(d); + this.exists.push(newly_created); + }.bind(this)); + this.detector.on('removed', function(d) { + this.removed.push(d); + }.bind(this)); + this.detector.on('error', function(err) { + this.errors.push(err); + }.bind(this)); + this.detector.start(directory, callback); +} + +function check(detector, exists, removed) { + assert.equal(detector.errors.length, 0); + assert.deepEqual(detector.exists.sort(), exists.sort()); + assert.deepEqual(detector.removed, removed || []); +} + +function create_test(directory, start_callback, check_callback) { + return { + topic: function() { + not_readable_helper.create('root'); + var callback = this.callback; + var already = false; + var detector = new TestDirectoryDetector(directory, function() { + assert.isFalse(already); + already = true; + start_callback(function(detector2) { + detector.detector.close(function(err) { + callback(err, detector, detector2); + }); + }, detector); + }); + }, + + check: function(err, detector, detector2) { + assert.ifError(err); + not_readable_helper.remove('root'); + check_callback(detector, detector2); + } + }; +} + +function create_test_init_failed(directory, pattern) { + return { + topic: function() { + not_readable_helper.create('root'); + var callback = this.callback; + var start_called = false; + var detector = new directory_detector.DirectoryDetector(); + detector.on('error', function(err) { + assert.isDefined(err); + assert.match(err.toString(), new RegExp(pattern)); + callback(null, start_called); + }); + detector.start(directory, function() { + start_called = true; + }); + }, + + check: function(err, start_called) { + assert.ifError(err); + not_readable_helper.remove('root'); + assert.isFalse(start_called); + } + }; +} + +vows.describe('Directory detector ').addBatchRetry({ + 'current directory exists': create_test(path.resolve('.'), function(callback) { + setTimeout(function() { + callback(); + }, 50); + }, function(detector) { + check(detector, [path.resolve('.'), false]); + }), +}, 5, 10000).addBatchRetry({ + 'directory does not exists at startup': create_test(path.resolve('.') + '/toto32', function(callback) { + setTimeout(function() { + callback(); + }, 50); + }, function(detector) { + check(detector, []); + }), +}, 5, 10000).addBatchRetry({ + 'directory does not exists at startup, parent = /': create_test('/toto32', function(callback) { + setTimeout(function() { + callback(); + }, 50); + }, function(detector) { + check(detector, []); + }), +}, 5, 10000).addBatchRetry({ + 'strange directory name': create_test('//////toto56', function(callback) { + setTimeout(function() { + callback(); + }, 50); + }, function(detector) { + check(detector, []); + }), +}, 5, 10000).addBatchRetry({ + 'strange directory name 2': create_test('/#&toto56', function(callback) { + setTimeout(function() { + callback(); + }, 50); + }, function(detector) { + check(detector, []); + }), +}, 5, 10000).addBatchRetry({ + 'directory does not exists at startup, parent not readable': create_test_init_failed('root/toto87/uio', 'EACCES'), +}, 5, 10000).addBatchRetry({ + '1 subdirectory': create_test(path.resolve('.') + '/toto44', function(callback, detector) { + setTimeout(function() { + check(detector, []); + mkdirp('toto44', function(err) { + assert.ifError(err); + setTimeout(function() { + callback(); + }, 50); + }); + }, 50); + }, function(detector) { + rimraf.sync('toto44'); + check(detector, [path.resolve('.') + '/toto44', true]); + }), +}, 5, 10000).addBatchRetry({ + '2 subdirectory, file manipulation': create_test(path.resolve('.') + '/toto48/yuo', function(callback, detector) { + setTimeout(function() { + check(detector, []); + mkdirp('toto48', function(err) { + assert.ifError(err); + fs.writeFile('toto48/tito', 'content', function(err) { + assert.ifError(err); + fs.unlink('toto48/tito', function(err) { + assert.ifError(err); + mkdirp('toto48/truc', function(err) { + assert.ifError(err); + rimraf('toto48/truc', function(err) { + assert.ifError(err); + setTimeout(function() { + check(detector, []); + mkdirp('toto48/yuo', function() { + setTimeout(function() { + callback(); + }, 50); + }); + }, 50); + }); + }); + }); + }); + }); + }, 50); + }, function(detector) { + rimraf.sync('toto48'); + check(detector, [path.resolve('.') + '/toto48/yuo', true]); + }), +}, 5, 10000).addBatchRetry({ + '4 subdirectory': create_test(path.resolve('.') + '/toto45/12/45/87', function(callback, detector) { + setTimeout(function() { + check(detector, []); + mkdirp('toto45', function(err) { + assert.ifError(err); + mkdirp('toto45/12', function(err) { + assert.ifError(err); + mkdirp('toto45/12/45', function(err) { + assert.ifError(err); + mkdirp('toto45/12/45/87', function(err) { + assert.ifError(err); + setTimeout(function() { + callback(); + }, 50); + }); + }); + }); + }); + }, 50); + }, function(detector) { + rimraf.sync('toto45'); + check(detector, [path.resolve('.') + '/toto45/12/45/87', true]); + }), +}, 5, 10000).addBatchRetry({ + '4 subdirectory mkdir -p': create_test(path.resolve('.') + '/toto49/12/45/87', function(callback, detector) { + setTimeout(function() { + check(detector, []); + var child = spawn('mkdir', ['-p', 'toto49/12/45/87']); + child.on('exit', function(exit_code) { + assert.equal(0, exit_code); + setTimeout(function() { + callback(); + }, 50); + }); + }, 50); + }, function(detector) { + rimraf.sync('toto49'); + check(detector, [path.resolve('.') + '/toto49/12/45/87', true]); + }), +}, 5, 10000).addBatchRetry({ + 'using filter': create_test(path.resolve('.') + '/toto45/1*/45', function(callback, detector) { + setTimeout(function() { + check(detector, []); + mkdirp('toto45/12/45', function(err) { + assert.ifError(err); + mkdirp('toto45/13/45', function(err) { + assert.ifError(err); + mkdirp('toto45/20/45', function(err) { + assert.ifError(err); + mkdirp('toto45/13/46', function(err) { + assert.ifError(err); + setTimeout(function() { + var detector2 = new TestDirectoryDetector(path.resolve('.') + '/toto45/1*/45', function(err) { + assert.ifError(err); + }); + setTimeout(function() { + detector2.detector.close(function(err) { + assert.ifError(err); + callback(detector2); + }); + }, 100); + }, 50); + }); + }); + }); + }); + }, 50); + }, function(detector, detector2) { + rimraf.sync('toto45'); + check(detector, [path.resolve('.') + '/toto45/12/45', true, path.resolve('.') + '/toto45/13/45', true]); + check(detector2, [path.resolve('.') + '/toto45/12/45', true, path.resolve('.') + '/toto45/13/45', true]); + }), +}, 5, 10000).addBatchRetry({ + '2 subdirectory, create and removed': create_test(path.resolve('.') + '/toto44/t*', function(callback, detector) { + setTimeout(function() { + check(detector, []); + mkdirp('toto44/titi', function(err) { + assert.ifError(err); + setTimeout(function() { + check(detector, [path.resolve('.') + '/toto44/titi', true]); + rimraf('toto44', function(err) { + assert.ifError(err); + setTimeout(function() { + callback(); + }, 50); + }); + }, 50); + }); + }, 50); + }, function(detector) { + assert.deepEqual(detector.exists, [path.resolve('.') + '/toto44/titi', true], [path.resolve('.') + '/toto44/titi']); + }), +}, 5, 10000).export(module); diff --git a/test/test_303_output_file.js b/test/test_303_output_file.js new file mode 100644 index 00000000..9d90fd7d --- /dev/null +++ b/test/test_303_output_file.js @@ -0,0 +1,214 @@ +var vows = require('vows-batch-retry'), + assert = require('assert'), + mkdirp = require('mkdirp'), + rimraf = require('rimraf'), + moment = require('moment'), + fs = require('fs'), + not_readable_helper = require('./not_readable_helper'), + output_file = require('outputs/output_file'); + +vows.describe('Output file ').addBatchRetry({ + 'standard test': { + topic: function() { + var callback = this.callback; + mkdirp.sync('output'); + var p = output_file.create(); + p.init('output/toto.txt', function(err) { + assert.ifError(err); + p.process({message: 'line1'}); + p.process({message: 'line2'}); + p.process({message: 'line3'}); + setTimeout(function() { + p.close(callback); + }, 200); + }); + }, + check: function(err) { + assert.ifError(err); + + var content = fs.readFileSync('output/toto.txt').toString().split('\n'); + rimraf.sync('output'); + + assert.equal(content.length, 4); + assert.equal(content[0], 'line1'); + assert.equal(content[1], 'line2'); + assert.equal(content[2], 'line3'); + assert.equal(content[3], ''); + } + }, +}, 5, 10000).addBatchRetry({ + 'use variables in file name test': { + topic: function() { + var callback = this.callback; + mkdirp.sync('output'); + var p = output_file.create(); + var e; + p.once('error', function(err) { + e = err; + }); + p.init('output/toto_#{type}.txt', function(err) { + assert.ifError(err); + p.process({message: 'line0'}); + p.process({message: 'line1', type: 'a'}); + p.process({message: 'line2', type: 'b'}); + p.process({message: 'line3', type: 'a'}); + setTimeout(function() { + p.close(function() { + callback(undefined, e); + }); + }, 500); + }); + }, + check: function(err, e) { + assert.ifError(err); + assert.isDefined(e); + assert.match(e, /Unable to compute output filename/); + + var content_a = fs.readFileSync('output/toto_a.txt').toString().split('\n'); + var content_b = fs.readFileSync('output/toto_b.txt').toString().split('\n'); + rimraf.sync('output'); + + assert.equal(content_a.length, 3); + assert.equal(content_a[0], 'line1'); + assert.equal(content_a[1], 'line3'); + assert.equal(content_a[2], ''); + + assert.equal(content_b.length, 2); + assert.equal(content_b[0], 'line2'); + assert.equal(content_b[1], ''); + } + } +}, 5, 10000).addBatchRetry({ + 'useless files closing': { + topic: function() { + var callback = this.callback; + mkdirp.sync('output'); + var p = output_file.create(); + p.init('output/toto.txt?idle_timeout=0.2', function(err) { + assert.ifError(err); + p.process({message: 'line1'}); + setTimeout(function() { + assert.equal(Object.keys(p.writers).length, 1); + setTimeout(function() { + assert.equal(Object.keys(p.writers).length, 0); + p.close(callback); + }, 400); + }, 50); + }); + }, + check: function(err) { + assert.ifError(err); + + var content = fs.readFileSync('output/toto.txt').toString().split('\n'); + rimraf.sync('output'); + + assert.equal(content.length, 2); + assert.equal(content[0], 'line1'); + assert.equal(content[1], ''); + } + }, +}, 5, 10000).addBatchRetry({ + 'reopen': { + topic: function() { + var callback = this.callback; + mkdirp.sync('output'); + var p = output_file.create(); + p.init('output/toto.txt?idle_timeout=0.2', function(err) { + assert.ifError(err); + p.process({message: 'line1'}); + setTimeout(function() { + p.reopen(function() { + setTimeout(function() { + p.process({message: 'line3'}); + setTimeout(function() { + p.close(callback); + }, 200); + }, 100); + }); + p.process({message: 'line2'}); + }, 100); + }); + }, + check: function(err) { + assert.ifError(err); + + var content = fs.readFileSync('output/toto.txt').toString().split('\n'); + rimraf.sync('output'); + + assert.equal(content.length, 4); + assert.equal(content[0], 'line1'); + assert.equal(content[1], 'line2'); + assert.equal(content[2], 'line3'); + assert.equal(content[3], ''); + } + }, +}, 5, 10000).addBatchRetry({ + 'unable to open file': { + topic: function() { + var callback = this.callback; + var e; + mkdirp.sync('output'); + not_readable_helper.create('root'); + var p = output_file.create(); + p.init('root/toto.txt?retry_delay=0.3', function(err) { + assert.ifError(err); + p.process({message: 'line1'}); + p.process({message: 'line2'}); + p.process({message: 'line3'}); + setTimeout(function() { + p.process({message: 'line4'}); + assert.equal(Object.keys(p.writers).length, 1); + setTimeout(function() { + assert.equal(Object.keys(p.writers).length, 0); + p.close(function() { + callback(undefined, e); + }); + }, 500); + }, 50); + }); + p.once('error', function(err) { + e = err; + }); + }, + check: function(err, e) { + assert.ifError(err); + + not_readable_helper.remove('root'); + + rimraf.sync('output'); + + assert.isDefined(e); + assert.match(e, /EACCES/); + } + }, +}, 5, 10000).addBatchRetry({ + 'directory test': { + topic: function() { + var callback = this.callback; + mkdirp.sync('output'); + var p = output_file.create(); + p.init('output/tata/#{now:YYYY}/toto.txt', function(err) { + assert.ifError(err); + p.process({message: 'line1'}); + p.process({message: 'line2'}); + p.process({message: 'line3'}); + setTimeout(function() { + p.close(callback); + }, 200); + }); + }, + check: function(err) { + assert.ifError(err); + + var year = moment().format('YYYY'); + var content = fs.readFileSync('output/tata/' + year + '/toto.txt').toString().split('\n'); + rimraf.sync('output'); + + assert.equal(content.length, 4); + assert.equal(content[0], 'line1'); + assert.equal(content[1], 'line2'); + assert.equal(content[2], 'line3'); + assert.equal(content[3], ''); + } + }, +}, 5, 10000).export(module); \ No newline at end of file diff --git a/test/test_400_error.js b/test/test_400_error.js new file mode 100644 index 00000000..8db8c7f0 --- /dev/null +++ b/test/test_400_error.js @@ -0,0 +1,174 @@ +var agent = require('agent'), + vows = require('vows-batch-retry'), + redis_driver = require('redis_driver'), + assert = require('assert'); + +function check_error_init(urls, expected_message_pattern, start_callback, stop_callback) { + return { + topic: function() { + start_callback = start_callback || function(callback) { + callback(undefined); + }; + stop_callback = stop_callback || function(o, callback) { + callback(); + }; + var callback = this.callback; + start_callback(function(o) { + var a = agent.create(); + a.on('error', function(module_name, error) { + assert.ifError(error); + }); + a.start(urls, function(err) { + if (err) { + a.close(function() { + stop_callback(o, function() { + callback(null, err.toString()); + }); + }); + return; + } + stop_callback(o, function() { + assert.fail('Init success, should not'); + }); + }, 200); + }); + }, + + check: function(error, message) { + assert.ifError(error); + assert.match(message, new RegExp(expected_message_pattern)); + } + }; +} + +vows.describe('Integration error :').addBatch({ + 'non_existent_module': check_error_init([ + 'input://non_existent_module://' + ], 'Cannot find module'), +}).addBatch({ + 'wrong url': check_error_init([ + 'input://non_existent_module' + ], 'Unable to extract plugin name'), +}).addBatch({ + 'wrong url init': check_error_init([ + 'toto://non_existent_module://' + ], 'Unknown protocol'), +}).addBatch({ + 'wrong port in tcp module': check_error_init([ + 'input://tcp://0.0.0.0:abcd' + ], 'Unable to extract port'), +}).addBatch({ + 'filter regex : missing pattern file': check_error_init([ + 'filter://regex://toto2' + ], 'Unable to load pattern : toto2'), +}).addBatch({ + 'output statd : missing metric value with metric counter': check_error_init([ + 'output://statsd://localhost:12345?metric_key=a&metric_type=counter' + ], 'ou have to specify metric_value with metric_type counter'), +}).addBatch({ + 'output statd : missing metric value with metric timer': check_error_init([ + 'output://statsd://localhost:12345?metric_key=a&metric_type=timer' + ], 'You have to specify metric_value with metric_type timer'), +}).addBatch({ + 'output statd : missing metric value with metric gauge': check_error_init([ + 'output://statsd://localhost:12345?metric_key=a&metric_type=gauge' + ], 'You have to specify metric_value with metric_type gauge'), +}).addBatch({ + 'output statd : wrong metric type': check_error_init([ + 'output://statsd://localhost:12345?metric_key=a&metric_type=toto' + ], 'Wrong metric_type: toto'), +}).addBatch({ + 'input_file_error : root directory not readable': check_error_init([ + 'input://file:///root/toto/43input1.txt', + 'output://stdout://' + ], 'EACCES'), +}).addBatch({ + 'http unable to open port (access)': check_error_init([ + 'input://http://localhost:80' + ], 'listen EACCES'), +}).addBatch({ + 'http unable to open port (used)': check_error_init([ + 'input://http://localhost:17874' + ], 'listen EADDRINUSE', function(callback) { + var r = new redis_driver.RedisDriver(); + r.start({ + port: 17874 + }, function() { + callback(r); + }); + }, function(r, callback) { + r.stop(callback); + }), +}).addBatch({ + 'tcp unable to open port (access)': check_error_init([ + 'input://tcp://localhost:80' + ], 'listen EACCES'), +}).addBatch({ + 'tcp unable to open port (used)': check_error_init([ + 'input://tcp://localhost:17874' + ], 'listen EADDRINUSE', function(callback) { + var r = new redis_driver.RedisDriver(); + r.start({ + port: 17874 + }, function() { + callback(r); + }); + }, function(r, callback) { + r.stop(callback); + }), +}).addBatch({ + 'udp unable to open port (access)': check_error_init([ + 'input://udp://localhost:123' + ], 'bind EACCES'), +}).addBatch({ + 'unix unable to open file': check_error_init([ + 'input://unix:///root/toto' + ], 'listen EACCES'), +}).addBatch({ + 'zeromq unable to open': check_error_init([ + 'input://zeromq://tcp://0.0.0.0:22' + ], 'Permission denied'), +}).addBatch({ + 'wrong serializer': check_error_init([ + 'output://tcp://localhost:12345?serializer=non_existent' + ], 'Unknown serializer non_existent'), +}).addBatch({ + 'wrong redis input config': check_error_init([ + 'input://redis://localhost:6379?method=toto&channel=titi' + ], 'Wrong method'), +}).addBatch({ + 'wrong redis input queue config': check_error_init([ + 'input://redis://localhost:6379?method=queue&channel=titi' + ], 'You have to specify the key parameter in queue mode'), +}).addBatch({ + 'wrong redis input pubsub config': check_error_init([ + 'input://redis://localhost:6379?method=pubsub' + ], 'You have to specify the channel parameter in pubsub mode'), +}).addBatch({ + 'wrong redis output config': check_error_init([ + 'output://redis://localhost:6379?method=toto&channel=titi' + ], 'Wrong method'), +}).addBatch({ + 'wrong redis output queue config': check_error_init([ + 'output://redis://localhost:6379?method=queue&channel=titi' + ], 'You have to specify the key parameter in queue mode'), +}).addBatch({ + 'wrong redis output pubsub config': check_error_init([ + 'output://redis://localhost:6379?method=pubsub&key=toto' + ], 'You have to specify the channel parameter in pubsub mode'), +}).addBatch({ + 'wrong grok pattern': check_error_init([ + 'filter://grok://?match=%{GROKTEST}' + ], 'Unable to find grok pattern GROKTEST'), +}).addBatch({ + 'wrong grok pattern recurse': check_error_init([ + 'filter://grok://?match=%{GROKTEST2}&extra_patterns_file=' + __dirname + '/grok/extra' + ], 'Unable to find grok pattern NUMBER98'), +}).addBatch({ + 'unexistent grok pattern file': check_error_init([ + 'filter://grok://?match=%{GROKTEST}&extra_patterns_file=/tmp/titi' + ], 'Error'), + 'wrong pattern file': check_error_init([ + 'filter://grok://?match=%{GROKTEST}&extra_patterns_file=' + __dirname + '/grok/wrong' + ], 'Unable to find grok pattern GROKTEST'), +}).export(module); diff --git a/test/test_401_file2file.js b/test/test_401_file2file.js new file mode 100644 index 00000000..3f93a9cc --- /dev/null +++ b/test/test_401_file2file.js @@ -0,0 +1,337 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + mkdirp = require('mkdirp'), + rimraf = require('rimraf'), + assert = require('assert'), + path = require('path'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration file 2 file :').addBatchRetry({ + 'file2file': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://file://input1.txt', + 'input://file://input2.txt?type=input2', + 'output://file://output1.txt?serializer=json_logstash', + 'output://file://output2.txt?serializer=json_logstash', + 'output://file://output3.txt?serializer=raw&format=_#{message}_', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input1.txt', 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input2.txt', 'line2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input1.txt', 'line3\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output1.txt').toString(); + var c2 = fs.readFileSync('output2.txt').toString(); + var c3 = fs.readFileSync('output3.txt').toString(); + fs.unlinkSync('input1.txt'); + fs.unlinkSync('input2.txt'); + fs.unlinkSync('output1.txt'); + fs.unlinkSync('output2.txt'); + fs.unlinkSync('output3.txt'); + + assert.equal(c1, c2); + var splitted = c1.split('\n'); + assert.equal(splitted.length, 4); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'path': path.resolve('.') + '/input1.txt', + 'message': 'line1' + }, true); + helper.checkResult(splitted[1], { + '@version': '1', + 'path': path.resolve('.') + '/input2.txt', + 'message': 'line2', + 'type': 'input2' + }, true); + helper.checkResult(splitted[2], { + '@version': '1', + 'path': path.resolve('.') + '/input1.txt', + 'message': 'line3' + }, true); + + assert.equal('_line1_\n_line2_\n_line3_\n', c3); + } + }, +}, 1, 20000).addBatchRetry({ + 'file2file raw unserializer': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://file://input1.txt?unserializer=raw', + 'input://file://input2.txt?type=input2&unserializer=raw', + 'output://file://output1.txt?serializer=json_logstash', + 'output://file://output2.txt?serializer=json_logstash', + 'output://file://output3.txt?serializer=raw&format=_#{message}_', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input1.txt', 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input2.txt', 'line2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input1.txt', 'line3\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output1.txt').toString(); + var c2 = fs.readFileSync('output2.txt').toString(); + var c3 = fs.readFileSync('output3.txt').toString(); + fs.unlinkSync('input1.txt'); + fs.unlinkSync('input2.txt'); + fs.unlinkSync('output1.txt'); + fs.unlinkSync('output2.txt'); + fs.unlinkSync('output3.txt'); + + assert.equal(c1, c2); + var splitted = c1.split('\n'); + assert.equal(splitted.length, 4); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'path': path.resolve('.') + '/input1.txt', + 'message': 'line1' + }, true); + helper.checkResult(splitted[1], { + '@version': '1', + 'path': path.resolve('.') + '/input2.txt', + 'message': 'line2', + 'type': 'input2' + }, true); + helper.checkResult(splitted[2], { + '@version': '1', + 'path': path.resolve('.') + '/input1.txt', + 'message': 'line3' + }, true); + + assert.equal('_line1_\n_line2_\n_line3_\n', c3); + } + }, +}, 5, 20000).addBatchRetry({ + 'file2file not existing dir': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://file://toto/56/87/input.txt', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + setTimeout(function() { + mkdirp('toto/56/87', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('toto/56/87/input.txt', 'line1\n', function(err) { + assert.ifError(err); + fs.appendFile('toto/56/87/input.txt', 'line2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }); + }, 200); + }); + }, 500); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + rimraf.sync('toto'); + fs.unlinkSync('output.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 3); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'path': path.resolve('.') + '/toto/56/87/input.txt', + 'message': 'line1' + }, true); + helper.checkResult(splitted[1], { + '@version': '1', + 'path': path.resolve('.') + '/toto/56/87/input.txt', + 'message': 'line2' + }, true); + } + }, +}, 5, 20000).addBatchRetry({ + 'stop start on non existing file': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://file://toto/56/87/input.txt', + ], function(agent) { + setTimeout(function() { + agent.close(callback); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + } + }, +}, 5, 20000).addBatchRetry({ + 'start index': { + topic: function() { + monitor_file.setFileStatus({}); + fs.writeFileSync('toto.txt', 'line1\nline2\n'); + var callback = this.callback; + helper.createAgent([ + 'output://file://output.txt', + 'input://file://toto.txt?start_index=3', + ], function(agent) { + setTimeout(function() { + agent.close(function() { + setTimeout(callback, 200); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var data = fs.readFileSync('output.txt').toString().split('\n'); + fs.unlinkSync('toto.txt'); + fs.unlinkSync('output.txt'); + assert.deepEqual(data, ['e1', 'line2', '']); + } + }, +}, 5, 20000).addBatchRetry({ + 'wildcard': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'output://file://output.txt', + 'input://file://log/*-2005/access.log', + ], function(agent) { + setTimeout(function() { + mkdirp('log/10-02-2005', function(err) { + assert.ifError(err); + fs.appendFile('log/10-02-2005/access.log', 'z1\n', function(err) { + assert.ifError(err); + fs.appendFile('log/10-02-2005/error.log', 'z2\n', function(err) { + assert.ifError(err); + mkdirp('log/10-03-2005', function(err) { + assert.ifError(err); + fs.appendFile('log/10-03-2005/access.log', 'z3\n', function(err) { + assert.ifError(err); + mkdirp('log/10-03-2006', function(err) { + assert.ifError(err); + fs.appendFile('log/10-03-2006/access.log', 'z4\n', function(err) { + assert.ifError(err); + agent.close(function() { + setTimeout(callback, 500); + }); + }); + }); + }); + }); + }); + }); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var data = fs.readFileSync('output.txt').toString().split('\n'); + rimraf.sync('log'); + fs.unlinkSync('output.txt'); + assert.deepEqual(data, ['z1', 'z3', '']); + } + }, +}, 5, 20000).addBatchRetry({ + 'wildcard create destroy': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'output://file://output.txt', + 'input://file://log/*-2005/access.log', + ], function(agent) { + setTimeout(function() { + mkdirp('log/10-02-2005', function(err) { + assert.ifError(err); + fs.appendFile('log/10-02-2005/access.log', 'z1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + rimraf('log/10-02-2005', function(err) { + assert.ifError(err); + setTimeout(function() { + mkdirp('log/10-02-2005', function(err) { + assert.ifError(err); + fs.appendFile('log/10-02-2005/access.log', 'z2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + setTimeout(callback, 500); + }); + }, 100); + }); + }, 100); + }); + }); + }, 100); + }); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var data = fs.readFileSync('output.txt').toString().split('\n'); + rimraf.sync('log'); + fs.unlinkSync('output.txt'); + assert.deepEqual(data, ['z1', 'z2', '']); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_402_json_logstash.js b/test/test_402_json_logstash.js new file mode 100644 index 00000000..ff830664 --- /dev/null +++ b/test/test_402_json_logstash.js @@ -0,0 +1,74 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + dgram = require('dgram'), + assert = require('assert'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration Json logstash event :').addBatchRetry({ + 'json_logstash_event': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://udp://0.0.0.0:17854', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + var udp_send = function(s) { + var buffer = new Buffer(s); + socket.send(buffer, 0, buffer.length, 17854, 'localhost', function(err, bytes) { + if (err || bytes !== buffer.length) { + assert.fail('Unable to send udp packet'); + } + }); + }; + setTimeout(function() { + udp_send('toto'); + setTimeout(function() { + udp_send('{"tata":"toto","type":"titi","message":"oups"}'); + setTimeout(function() { + udp_send('{"tata":"toto","message":"titi", "source": "test42", "type": "pouet", "host": "toto","@timestamp":"abc"}'); + setTimeout(function() { + socket.close(); + agent.close(function() { + callback(null); + }); + }, 200); + }, 50); + }, 50); + }, 50); + }.bind(this)); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + var splitted = c.split('\n'); + assert.equal(splitted.length, 4); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'host': '127.0.0.1', + 'udp_port': 17854, + 'message': 'toto' + }); + helper.checkResult(splitted[1], { + '@version': '1', + 'host': '127.0.0.1', + 'udp_port': 17854, + 'message': '{"tata":"toto","type":"titi","message":"oups"}' + }); + helper.checkResult(splitted[2], { + '@version': '1', + 'host': 'toto', + 'source': 'test42', + 'type': 'pouet', + 'tata': 'toto', + 'message': 'titi', + '@timestamp': 'abc' + }, undefined, true); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_403_elasticsearch.js b/test/test_403_elasticsearch.js new file mode 100644 index 00000000..902220d3 --- /dev/null +++ b/test/test_403_elasticsearch.js @@ -0,0 +1,290 @@ +var vows = require('vows-batch-retry'), + http = require('http'), + net = require('net'), + assert = require('assert'), + helper = require('./integration_helper.js'); + +function es_server(max, agent, port, callback) { + var reqs = []; + var s = http.createServer(function(req, res) { + var body = ''; + req.on('data', function(chunk) { + body += chunk; + }); + req.on('end', function() { + reqs.push({ + req: req, + body: body + }); + res.writeHead(201); + res.end(); + if (reqs.length === max) { + agent.close(function() { + s.close(function() { + callback(null, reqs); + }); + }); + } + }); + }).listen(port); +} + +function tcp_send(text, port, callback) { + var c = net.createConnection({ + port: port, + }, function() { + c.write(text); + c.end(); + if (callback) { + callback(); + } + }); +} + +function checkIndexBulk(json, index_pattern, type) { + var o = JSON.parse(json); + assert.match(o.index._index, new RegExp(index_pattern)); + assert.equal(o.index._type, type); +} + +vows.describe('Integration Elastic search event :').addBatchRetry({ + 'elastic_search test': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://0.0.0.0:17874?type=nginx', + 'input://tcp://0.0.0.0:17875', + 'output://elasticsearch://localhost:17876', + ], function(agent) { + es_server(2, agent, 17876, callback); + tcp_send('toto', 17874); + setTimeout(function() { + tcp_send('titi', 17875); + }, 200); + }); + }, + + check: function(err, reqs) { + assert.ifError(err); + assert.equal(reqs.length, 2); + + assert.equal(reqs[0].req.method, 'POST'); + assert.match(reqs[0].req.url, new RegExp('^\/logstash-' + (new Date()).getUTCFullYear() + '\\.\\d\\d\\.\\d\\d\/logs')); + helper.checkResult(reqs[0].body, { + '@version': '1', + 'message': 'toto', + 'host': '127.0.0.1', + 'type': 'nginx', + 'tcp_port': 17874 + }); + + assert.equal(reqs[1].req.method, 'POST'); + assert.match(reqs[1].req.url, new RegExp('^\/logstash-' + (new Date()).getUTCFullYear() + '\\.\\d\\d\\.\\d\\d\/logs')); + helper.checkResult(reqs[1].body, { + '@version': '1', + 'message': 'titi', + 'host': '127.0.0.1', + 'tcp_port': 17875 + }); + } + }, +}, 5, 20000).addBatchRetry({ + 'elastic_search with custom data type and index test': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://0.0.0.0:17874?type=nginx', + 'input://tcp://0.0.0.0:17875', + 'output://elasticsearch://localhost:17876?data_type=audits&index_prefix=audit', + ], function(agent) { + es_server(2, agent, 17876, callback); + tcp_send('toto', 17874); + setTimeout(function() { + tcp_send('titi', 17875); + }, 200); + }); + }, + + check: function(err, reqs) { + assert.ifError(err); + assert.equal(reqs.length, 2); + + assert.equal(reqs[0].req.method, 'POST'); + assert.match(reqs[0].req.url, new RegExp('^\/audit-' + (new Date()).getUTCFullYear() + '\\.\\d\\d\\.\\d\\d\/audits')); + helper.checkResult(reqs[0].body, { + '@version': '1', + 'message': 'toto', + 'host': '127.0.0.1', + 'type': 'nginx', + 'tcp_port': 17874 + }); + + assert.equal(reqs[1].req.method, 'POST'); + assert.match(reqs[1].req.url, new RegExp('^\/audit-' + (new Date()).getUTCFullYear() + '\\.\\d\\d\\.\\d\\d\/audits')); + helper.checkResult(reqs[1].body, { + '@version': '1', + 'message': 'titi', + 'host': '127.0.0.1', + 'tcp_port': 17875 + }); + } + }, +}, 5, 20000).addBatchRetry({ + 'elastic_search with index name': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://0.0.0.0:17874?type=nginx', + 'input://tcp://0.0.0.0:17875', + 'output://elasticsearch://localhost:17876?data_type=audits&index_name=myindex', + ], function(agent) { + es_server(2, agent, 17876, callback); + tcp_send('toto', 17874); + setTimeout(function() { + tcp_send('titi', 17875); + }, 200); + }); + }, + + check: function(err, reqs) { + assert.ifError(err); + assert.equal(reqs.length, 2); + + assert.equal(reqs[0].req.method, 'POST'); + assert.equal(reqs[0].req.url, '/myindex/audits/'); + helper.checkResult(reqs[0].body, { + '@version': '1', + 'message': 'toto', + 'host': '127.0.0.1', + 'type': 'nginx', + 'tcp_port': 17874 + }); + + assert.equal(reqs[1].req.method, 'POST'); + assert.equal(reqs[1].req.url, '/myindex/audits/'); + helper.checkResult(reqs[1].body, { + '@version': '1', + 'message': 'titi', + 'host': '127.0.0.1', + 'tcp_port': 17875 + }); + } + }, +}, 5, 20000).addBatchRetry({ + 'elastic_search bulk timer test': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://0.0.0.0:17874?type=nginx', + 'input://tcp://0.0.0.0:17875?type=haproxy', + 'input://tcp://0.0.0.0:17876?type=stud', + 'output://elasticsearch://localhost:17877?bulk_limit=3&bulk_timeout=1000&data_type=audits&index_name=toto', + ], function(agent) { + es_server(1, agent, 17877, callback); + tcp_send('toto', 17874); + setTimeout(function() { + tcp_send('titi', 17875); + }, 100); + setTimeout(function() { + tcp_send('tata', 17876); + }, 200); + }); + }, + + check: function(err, reqs) { + assert.ifError(err); + assert.equal(reqs.length, 1); + + assert.equal(reqs[0].req.method, 'POST'); + assert.equal(reqs[0].req.url, '/_bulk'); + var lines = reqs[0].body.split('\n'); + assert.equal(lines.length, 7); + checkIndexBulk(lines[0], '^toto$', 'audits'); + helper.checkResult(lines[1], { + '@version': '1', + 'message': 'toto', + 'host': '127.0.0.1', + 'type': 'nginx', + 'tcp_port': 17874 + }); + checkIndexBulk(lines[2], '^toto$', 'audits'); + helper.checkResult(lines[3], { + '@version': '1', + 'message': 'titi', + 'host': '127.0.0.1', + 'type': 'haproxy', + 'tcp_port': 17875 + }); + checkIndexBulk(lines[4], '^toto$', 'audits'); + helper.checkResult(lines[5], { + '@version': '1', + 'message': 'tata', + 'host': '127.0.0.1', + 'type': 'stud', + 'tcp_port': 17876 + }); + assert.equal(lines[6], ''); + } + }, +}, 5, 20000).addBatchRetry({ + 'elastic_search bulk limit test': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://0.0.0.0:17874?type=nginx', + 'input://tcp://0.0.0.0:17875?type=haproxy', + 'input://tcp://0.0.0.0:17876?type=stud', + 'output://elasticsearch://localhost:17877?bulk_timeout=200&bulk_limit=2&data_type=audits&index_prefix=audit', + ], function(agent) { + es_server(2, agent, 17877, callback); + tcp_send('toto', 17874); + setTimeout(function() { + tcp_send('titi', 17875); + }, 50); + setTimeout(function() { + tcp_send('tata', 17876); + }, 100); + }); + }, + + check: function(err, reqs) { + assert.ifError(err); + assert.equal(reqs.length, 2); + + assert.equal(reqs[0].req.method, 'POST'); + assert.equal(reqs[0].req.url, '/_bulk'); + var lines = reqs[0].body.split('\n').filter(function(line) {return line.length > 0;}); + assert.equal(lines.length, 4); + checkIndexBulk(lines[0], '^audit-' + (new Date()).getUTCFullYear() + '\\.\\d\\d\\.\\d\\d$', 'audits'); + helper.checkResult(lines[1], { + '@version': '1', + 'message': 'toto', + 'host': '127.0.0.1', + 'type': 'nginx', + 'tcp_port': 17874 + }); + checkIndexBulk(lines[2], '^audit-' + (new Date()).getUTCFullYear() + '\\.\\d\\d\\.\\d\\d$', 'audits'); + + helper.checkResult(lines[3], { + '@version': '1', + 'message': 'titi', + 'host': '127.0.0.1', + 'type': 'haproxy', + 'tcp_port': 17875 + }); + + assert.equal(reqs[1].req.method, 'POST'); + assert.equal(reqs[1].req.url, '/_bulk'); + lines = reqs[1].body.split('\n').filter(function(line) {return line.length > 0;}); + assert.equal(lines.length, 2); + checkIndexBulk(lines[0], '^audit-' + (new Date()).getUTCFullYear() + '\\.\\d\\d\\.\\d\\d$', 'audits'); + helper.checkResult(lines[1], { + '@version': '1', + 'message': 'tata', + 'host': '127.0.0.1', + 'type': 'stud', + 'tcp_port': 17876 + }); + } + } +}, 5, 20000).export(module); diff --git a/test/test_404_http_post.js b/test/test_404_http_post.js new file mode 100644 index 00000000..a12caf8f --- /dev/null +++ b/test/test_404_http_post.js @@ -0,0 +1,136 @@ +var vows = require('vows-batch-retry'), + http = require('http'), + net = require('net'), + assert = require('assert'), + helper = require('./integration_helper.js'); + +function check_auth(req) { + var auth = req.headers.authorization; + if (!auth) { + return; + } + + var parts = auth.split(' '); + if ('basic' !== parts[0].toLowerCase()) { + return; + } + if (!parts[1]) { + return; + } + auth = parts[1]; + + auth = new Buffer(auth, 'base64').toString(); + auth = auth.match(/^([^:]*):(.*)$/); + if (!auth) { + return; + } + + return { name: auth[1], pass: auth[2] }; +} + +function createHttpTest(output_url, check_callback, req_count, user, password) { + return { + topic: function() { + var callback = this.callback; + var reqs = []; + helper.createAgent([ + 'input://tcp://0.0.0.0:17874?type=pouet', + output_url, + ], function(agent) { + var http_server = http.createServer(function(req, res) { + if (user && password) { + var a = check_auth(req); + if (a === undefined || a.name !== 'john' || a.pass !== 'secret') { + res.writeHead(401, { + 'WWW-Authenticate': 'Basic realm="example"', + 'Connection': 'close', + }); + res.end(); + agent.close(function() { + http_server.close(function() { + callback(null, reqs); + }); + }); + return; + } + } + var body = ''; + req.on('data', function(chunk) { + body += chunk; + }); + req.on('end', function() { + reqs.push({ + req: req, + body: body + }); + res.writeHead(204); + res.end(); + if (reqs.length === 1) { + agent.close(function() { + http_server.close(function() { + callback(null, reqs); + }); + }); + } + }); + }).listen(17875); + var c1 = net.createConnection({ + port: 17874 + }, function() { + c1.write('toto'); + c1.end(); + }); + }); + }, + + check: function(err, reqs) { + assert.ifError(err); + assert.equal(reqs.length, req_count === undefined ? 1 : req_count); + + check_callback(reqs); + } + }; +} + +vows.describe('Integration Http post :').addBatchRetry({ + 'http_post test raw': createHttpTest('output://http_post://localhost:17875?path=/#{type}', function(reqs) { + assert.equal(reqs[0].req.method, 'POST'); + assert.equal(reqs[0].req.headers['content-type'], 'text/plain'); + assert.equal(reqs[0].req.url, '/pouet'); + assert.equal(reqs[0].body, 'toto'); + }), +}, 5, 20000).addBatchRetry({ + 'http_post test json': createHttpTest('output://http_post://localhost:17875?path=/#{type}&serializer=json_logstash', function(reqs) { + assert.equal(reqs[0].req.method, 'POST'); + assert.equal(reqs[0].req.headers['content-type'], 'application/json'); + assert.equal(reqs[0].req.url, '/pouet'); + helper.checkResult(reqs[0].body, { + message: 'toto', + host: '127.0.0.1', + tcp_port: 17874, + type: 'pouet', + '@version': '1' + }); + }), +}, 5, 20000).addBatchRetry({ + 'http_post auth failed': createHttpTest('output://http_post://localhost:17875?path=/#{type}&serializer=json_logstash', function(reqs) { + assert.equal(reqs.length, 0); + }, 0, 'john', 'secret'), +}, 5, 20000).addBatchRetry({ + 'http_post wrong password': createHttpTest('output://http_post://localhost:17875?path=/#{type}&serializer=json_logstash&basic_auth_user=john&basic_auth_password=secret2', function(reqs) { + assert.equal(reqs.length, 0); + }, 0, 'john', 'secret'), +}, 5, 20000).addBatchRetry({ + 'http_post auth ok': createHttpTest('output://http_post://localhost:17875?path=/#{type}&serializer=json_logstash&basic_auth_user=john&basic_auth_password=secret', function(reqs) { + assert.equal(reqs[0].req.method, 'POST'); + assert.equal(reqs[0].req.headers['content-type'], 'application/json'); + assert.equal(reqs[0].req.url, '/pouet'); + helper.checkResult(reqs[0].body, { + message: 'toto', + host: '127.0.0.1', + tcp_port: 17874, + type: 'pouet', + '@version': '1' + }); + }, 1, 'john', 'secret'), +}, 5, 20000).export(module); diff --git a/test/test_405_net2file.js b/test/test_405_net2file.js new file mode 100644 index 00000000..4717f713 --- /dev/null +++ b/test/test_405_net2file.js @@ -0,0 +1,115 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + net = require('net'), + assert = require('assert'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration net 2 file :').addBatchRetry({ + 'net2file': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://localhost:17874?type=2', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + var c = net.createConnection({ + port: 17874 + }, function() { + c.write('toto'); + c.end(); + }); + c.on('end', function() { + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 100); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + + var splitted = c1.split('\n'); + assert.equal(splitted.length, 2); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'host': '127.0.0.1', + 'tcp_port': 17874, + 'message': 'toto', + 'type': '2' + }); + } + }, +}, 5, 20000).addBatchRetry({ + 'file2net': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + var reqs = []; + var current = []; + var conns = 0; + var connection_callback = function(c) { + conns = conns + 1; + current.push(c); + c.on('data', function(data) { + reqs.push(data.toString()); + }); + }; + var server = net.createServer(connection_callback); + server.listen(17874); + helper.createAgent([ + 'input://file://main_input.txt', + 'output://tcp://localhost:17874?serializer=raw&delimiter=', + ], function(agent) { + setTimeout(function() { + fs.appendFile('main_input.txt', 'line 1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('main_input.txt', 'line 2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + current.forEach(function(c) { + c.end(); + }); + server.close(function() { + server = net.createServer(connection_callback); + server.listen(17874); + setTimeout(function() { + fs.appendFile('main_input.txt', 'line 3\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + server.close(function() { + callback(null, reqs, conns); + }); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err, reqs, conns) { + assert.ifError(err); + fs.unlinkSync('main_input.txt'); + assert.equal(2, conns); + assert.deepEqual(reqs, [ + 'line 1', + 'line 2', + 'line 3', + ]); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_406_logio.js b/test/test_406_logio.js new file mode 100644 index 00000000..2fff446d --- /dev/null +++ b/test/test_406_logio.js @@ -0,0 +1,57 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + os = require('os'), + net = require('net'), + assert = require('assert'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration log io :').addBatchRetry({ + 'logio': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + var reqs = []; + var server = net.createServer(function(c) { + c.on('data', function(data) { + data.toString().split('\r\n').forEach(function(s) { + if (s !== '') { + reqs.push(s); + } + }); + }); + }); + server.listen(17874); + helper.createAgent([ + 'input://file://main_input.txt', + 'input://file://main_input.txt?type=toto', + 'output://logio://localhost:17874', + 'output://logio://localhost:17874?priority=#{type}', + ], function(agent) { + setTimeout(function() { + fs.appendFile('main_input.txt', 'line 1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + server.close(function() { + callback(null, reqs); + }); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err, reqs) { + assert.ifError(err); + fs.unlinkSync('main_input.txt'); + assert.deepEqual(reqs.sort(), [ + '+log|' + os.hostname() + '|no_type|info|line 1', + '+log|' + os.hostname() + '|no_type|undefined|line 1', + '+log|' + os.hostname() + '|toto|info|line 1', + '+log|' + os.hostname() + '|toto|toto|line 1', + ]); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_407_statsd.js b/test/test_407_statsd.js new file mode 100644 index 00000000..b59a4dd1 --- /dev/null +++ b/test/test_407_statsd.js @@ -0,0 +1,128 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + dgram = require('dgram'), + assert = require('assert'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration statsd :').addBatchRetry({ + 'file2statsd': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + var received = []; + var statsd = dgram.createSocket('udp4'); + statsd.on('message', function(d) { + received.push(d.toString()); + }); + statsd.bind(17874); + helper.createAgent([ + 'input://file://input1.txt', + 'input://file://input2.txt?type=titi', + 'input://file://input3.txt?type=tata', + 'input://file://input4.txt?type=tete', + 'input://file://input5.txt?type=toto', + 'filter://regex://?regex=^45_(.*)$&fields=my_field', + 'output://statsd://127.0.0.1:17874?metric_type=increment&metric_key=toto.bouh', + 'output://statsd://127.0.0.1:17874?metric_type=decrement&metric_key=toto.#{message}&only_type=titi', + 'output://statsd://127.0.0.1:17874?metric_type=counter&metric_key=toto.counter&metric_value=#{message}&only_type=tata', + 'output://statsd://127.0.0.1:17874?metric_type=timer&metric_key=toto.#{my_field}.#{my_field}&metric_value=20&only_type=tete', + 'output://statsd://127.0.0.1:17874?metric_type=gauge&metric_key=toto.gauge&metric_value=45&only_type=toto', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input1.txt', 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input2.txt', 'line2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input3.txt', '10\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input4.txt', '45_123\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input5.txt', 'line3\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + statsd.close(); + callback(undefined, received); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err, data) { + fs.unlinkSync('input1.txt'); + fs.unlinkSync('input2.txt'); + fs.unlinkSync('input3.txt'); + fs.unlinkSync('input4.txt'); + fs.unlinkSync('input5.txt'); + assert.ifError(err); + assert.deepEqual(data.sort(), [ + 'toto.bouh:1|c', + 'toto.line2:-1|c', + 'toto.bouh:1|c', + 'toto.counter:10|c', + 'toto.bouh:1|c', + 'toto.123.123:20|ms', + 'toto.bouh:1|c', + 'toto.bouh:1|c', + 'toto.gauge:45|g', + ].sort()); + } + }, +}, 5, 20000).addBatchRetry({ + 'file2statsd_missing_field': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + var received = []; + var errors = []; + var statsd = dgram.createSocket('udp4'); + statsd.on('message', function(d) { + received.push(d.toString()); + }); + statsd.bind(17874); + helper.createAgent([ + 'input://file://input1.txt', + 'filter://regex://?regex=(line2)&fields=unknown_field', + 'output://statsd://127.0.0.1:17874?metric_type=increment&metric_key=toto.bouh.#{unknown_field}', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input1.txt', 'line1\n', function(err) { + assert.ifError(err); + fs.appendFile('input1.txt', 'line2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + statsd.close(); + callback(errors, received); + }); + }, 200); + }); + }); + }, 200); + }, function(error) { + errors.push(error); + }); + }, + + check: function(errors, data) { + fs.unlinkSync('input1.txt'); + assert.deepEqual(data.sort(), ['toto.bouh.line2:1|c'].sort()); + assert.equal(errors.length, 0); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_408_gelf.js b/test/test_408_gelf.js new file mode 100644 index 00000000..1f459916 --- /dev/null +++ b/test/test_408_gelf.js @@ -0,0 +1,79 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + os = require('os'), + path = require('path'), + zlib = require('zlib'), + dgram = require('dgram'), + assert = require('assert'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration gelf :').addBatchRetry({ + 'file2gelf': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + var received = []; + var gelf = dgram.createSocket('udp4'); + gelf.on('message', function(d) { + zlib.inflate(d, function(err, data) { + assert.ifError(err); + data = JSON.parse(data); + received.push(data); + }); + }); + gelf.bind(17874); + helper.createAgent([ + 'input://file://input1.txt?type=toto', + 'filter://compute_field://a?only_type=toto&value=b', + 'input://file://input2.txt', + 'filter://regex://?regex=^\\[(.*)\\]&fields=timestamp&date_format=DD/MMMM/YYYY:HH:mm:ss ZZ', + 'output://gelf://localhost:17874' + ], function(agent) { + setTimeout(function() { + fs.appendFile('input1.txt', '[31/Jul/2012:18:02:28 +0200] line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input2.txt', '[31/Jul/2012:20:02:28 +0200] line2\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + gelf.close(); + callback(undefined, received); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err, data) { + fs.unlinkSync('input1.txt'); + fs.unlinkSync('input2.txt'); + assert.ifError(err); + assert.deepEqual(data.sort(), [{ + version: '1.0', + short_message: '[31/Jul/2012:18:02:28 +0200] line1', + timestamp: (new Date('2012-07-31T16:02:28+00:00')).getTime() / 1000, + host: os.hostname(), + facility: 'toto', + level: '6', + _a: 'b', + _path: path.resolve('.') + '/input1.txt', + _type: 'toto', + }, + { + version: '1.0', + short_message: '[31/Jul/2012:20:02:28 +0200] line2', + timestamp: (new Date('2012-07-31T18:02:28+00:00')).getTime() / 1000, + host: os.hostname(), + facility: 'no_facility', + level: '6', + _path: path.resolve('.') + '/input2.txt', + } + ].sort()); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_409_multiline.js b/test/test_409_multiline.js new file mode 100644 index 00000000..03933978 --- /dev/null +++ b/test/test_409_multiline.js @@ -0,0 +1,44 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + assert = require('assert'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration multiline :').addBatchRetry({ + 'multiline simple test': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://file://input.txt', + 'filter://multiline://?start_line_regex=^1234', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input.txt', 'line1\nline2\n1234line3\n1234line4\nline5\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('input.txt'); + fs.unlinkSync('output.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 4); + assert.equal('', splitted[splitted.length - 1]); + assert.equal(JSON.parse(splitted[0]).message, 'line1\nline2'); + assert.equal(JSON.parse(splitted[1]).message, '1234line3'); + assert.equal(JSON.parse(splitted[2]).message, '1234line4\nline5'); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_410_file2x2x2file.js b/test/test_410_file2x2x2file.js new file mode 100644 index 00000000..ffe3460f --- /dev/null +++ b/test/test_410_file2x2x2file.js @@ -0,0 +1,210 @@ +var vows = require('vows-batch-retry'), + assert = require('assert'), + fs = require('fs'), + path = require('path'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'), + redis_driver = require('redis_driver'); + +function _file2x2x2file(config1, config2, clean_callback, start_callback, stop_callback, check, wait_delay) { + return { + topic: function() { + start_callback = start_callback || function(callback) { + callback(undefined); + }; + stop_callback = stop_callback || function(o, callback) { + callback(); + }; + if (clean_callback) { + clean_callback(); + } + var callback = this.callback; + start_callback(function(o) { + monitor_file.setFileStatus({}); + helper.createAgent(['input://file://main_input.txt?type=test'].concat(config1), function(a1) { + helper.createAgent(config2.concat(['output://file://main_output.txt?serializer=json_logstash']), function(a2) { + setTimeout(function() { + fs.appendFile('main_input.txt', '234 tgerhe grgh\néè\nline3\n', function(err) { + assert.ifError(err); + setTimeout(function() { + a1.close(function() { + a2.close(function() { + stop_callback(o, function() { + callback(null); + }); + }); + }); + }, wait_delay || 200); + }); + }, 200); + }, 200); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + + if (clean_callback) { + clean_callback(); + } + + var c = fs.readFileSync('main_output.txt').toString(); + fs.unlinkSync('main_input.txt'); + fs.unlinkSync('main_output.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 4); + assert.equal('', splitted[splitted.length - 1]); + + check(splitted.slice(0, 3)); + } + }; +} + +function file2x2x2fileNotOrdered(config1, config2, clean_callback, start_callback, stop_callback, wait_delay) { + return _file2x2x2file(config1, config2, clean_callback, start_callback, stop_callback, function(splitted) { + splitted.sort(); + helper.checkResult(splitted[0], { + 'path': path.resolve('.') + '/main_input.txt', + 'message': '234 tgerhe grgh', + 'type': 'test', + '@version': '1' + }, true); + helper.checkResult(splitted[1], { + 'path': path.resolve('.') + '/main_input.txt', + 'message': 'line3', + 'type': 'test', + '@version': '1' + }, true); + helper.checkResult(splitted[2], { + 'path': path.resolve('.') + '/main_input.txt', + 'message': 'éè', + 'type': 'test', + '@version': '1' + }, true); + }, wait_delay); +} + +function file2x2x2file(config1, config2, clean_callback, start_callback, stop_callback, wait_delay) { + return _file2x2x2file(config1, config2, clean_callback, start_callback, stop_callback, function(splitted) { + helper.checkResult(splitted[0], { + 'path': path.resolve('.') + '/main_input.txt', + 'message': '234 tgerhe grgh', + 'type': 'test', + '@version': '1' + }, true); + helper.checkResult(splitted[1], { + 'path': path.resolve('.') + '/main_input.txt', + 'message': 'éè', + 'type': 'test', + '@version': '1' + }, true); + helper.checkResult(splitted[2], { + 'path': path.resolve('.') + '/main_input.txt', + 'message': 'line3', + 'type': 'test', + '@version': '1' + }, true); + }, wait_delay); +} + +var test = vows.describe('Integration file2x2x2file :').addBatchRetry({ + 'redis queue channel transport': file2x2x2file(['output://redis://localhost:17874?key=toto'], ['input://redis://localhost:17874?key=toto'], undefined, function(callback) { + var r = new redis_driver.RedisDriver(); + r.start({ + port: 17874 + }, function() { + callback(r); + }); + }, function(r, callback) { + r.stop(callback); + }), +}, 5, 20000).addBatchRetry({ + 'redis pubsub channel transport': file2x2x2file(['output://redis://localhost:17874?channel=toto&method=pubsub'], ['input://redis://localhost:17874?channel=toto&method=pubsub'], undefined, function(callback) { + var r = new redis_driver.RedisDriver(); + r.start({ + port: 17874 + }, function() { + callback(r); + }); + }, function(r, callback) { + r.stop(callback); + }), +}, 5, 20000).addBatchRetry({ + 'redis pubsub channel transport with auth': file2x2x2file(['output://redis://localhost:17874?channel=toto&auth_pass=pass_toto&method=pubsub'], ['input://redis://localhost:17874?channel=toto&auth_pass=pass_toto&method=pubsub'], undefined, function(callback) { + var r = new redis_driver.RedisDriver(); + r.start({ + port: 17874, + requirepass: 'pass_toto' + }, function() { + callback(r); + }); + }, function(r, callback) { + r.stop(callback); + }), +}, 5, 20000).addBatchRetry({ + 'redis pubsub pattern channel transport': file2x2x2file(['output://redis://localhost:17874?channel=pouet_toto&method=pubsub'], ['input://redis://localhost:17874?channel=*toto&pattern_channel=true&method=pubsub'], undefined, function(callback) { + var r = new redis_driver.RedisDriver(); + r.start({ + port: 17874 + }, function() { + callback(r); + }); + }, function(r, callback) { + r.stop(callback); + }), +}, 5, 20000).addBatchRetry({ + 'file transport': file2x2x2file(['output://file://main_middle.txt?serializer=json_logstash'], ['input://file://main_middle.txt'], function() { + if (fs.existsSync('main_middle.txt')) { + fs.unlinkSync('main_middle.txt'); + } + }), +}, 5, 20000).addBatchRetry({ + 'file transport raw input': _file2x2x2file(['output://file://main_middle.txt?serializer=raw'], ['input://file://main_middle.txt?unserializer=raw'], function() { + if (fs.existsSync('main_middle.txt')) { + fs.unlinkSync('main_middle.txt'); + } + }, undefined, undefined, function(l) { + assert.equal(JSON.parse(l[0]).message, '234 tgerhe grgh'); + assert.equal(JSON.parse(l[1]).message, 'éè'); + assert.equal(JSON.parse(l[2]).message, 'line3'); + }), +}, 5, 20000).addBatchRetry({ + 'tcp transport': file2x2x2file(['output://tcp://localhost:17874'], ['input://tcp://0.0.0.0:17874']), +}, 5, 20000).addBatchRetry({ + 'websockets transport': file2x2x2file(['output://ws://localhost:17874?serializer=json_logstash'], ['input://ws://0.0.0.0:17874']), +}, 5, 20000).addBatchRetry({ + 'zeromq transport': file2x2x2file(['output://zeromq://tcp://localhost:17874'], ['input://zeromq://tcp://*:17874']), +}, 5, 20000).addBatchRetry({ + 'zeromq transport using msgpack': file2x2x2file(['output://zeromq://tcp://localhost:17874?serializer=msgpack'], ['input://zeromq://tcp://*:17874?unserializer=msgpack']), +}, 5, 20000).addBatchRetry({ + 'unix socket transport': file2x2x2file(['output://unix:///tmp/test_socket'], ['input://unix:///tmp/test_socket']), +}, 5, 20000).addBatchRetry({ + 'udp transport': file2x2x2file(['output://udp://localhost:17874'], ['input://udp://127.0.0.1:17874']), +}, 5, 20000).addBatchRetry({ + 'http transport': file2x2x2fileNotOrdered(['output://http_post://localhost:17874?serializer=json_logstash'], ['input://http://127.0.0.1:17874']), +}, 5, 20000).addBatchRetry({ + 'https transport': file2x2x2fileNotOrdered(['output://http_post://localhost:17874?serializer=json_logstash&ssl=true&ssl_rejectUnauthorized=false'], ['input://http://127.0.0.1:17874?ssl=true&ssl_key=test/ssl/server.key&ssl_cert=test/ssl/server.crt']), +}, 5, 20000).addBatchRetry({ + 'https transport with ca': file2x2x2fileNotOrdered(['output://http_post://localhost:17874?serializer=json_logstash&ssl=true&ssl_ca=test/ssl/root-ca.crt'], ['input://http://127.0.0.1:17874?ssl=true&ssl_key=test/ssl/server.key&ssl_cert=test/ssl/server.crt']), +}, 5, 20000).addBatchRetry({ + 'https transport with ca and client side certificate': file2x2x2fileNotOrdered(['output://http_post://localhost:17874?serializer=json_logstash&ssl=true&ssl_ca=test/ssl/root-ca.crt&ssl_key=test/ssl/client.key&ssl_cert=test/ssl/client.crt'], ['input://http://127.0.0.1:17874?ssl=true&ssl_key=test/ssl/server.key&ssl_cert=test/ssl/server.crt&ssl_requestCert=true&ssl_ca=test/ssl/root-ca.crt&ssl_rejectUnauthorized=true']), +}, 5, 20000).addBatchRetry({ + 'tls': file2x2x2fileNotOrdered(['output://tcp://localhost:17874?serializer=json_logstash&ssl=true&ssl_rejectUnauthorized=false'], ['input://tcp://127.0.0.1:17874?ssl=true&ssl_key=test/ssl/server.key&ssl_cert=test/ssl/server.crt']), +}, 5, 20000).addBatchRetry({ + 'tls with ca': file2x2x2fileNotOrdered(['output://tcp://localhost:17874?serializer=json_logstash&ssl=true&ssl_ca=test/ssl/root-ca.crt&ssl_key=test/ssl/client.key&ssl_cert=test/ssl/client.crt'], ['input://tcp://127.0.0.1:17874?ssl=true&ssl_key=test/ssl/server.key&ssl_cert=test/ssl/server.crt&ssl_requestCert=true&ssl_ca=test/ssl/root-ca.crt&ssl_rejectUnauthorized=true']), +}, 5, 20000).addBatchRetry({ + 'rabbitmq standard': file2x2x2file(['output://amqp://localhost:5672?exchange_name=test_node_logstash'], ['input://amqp://localhost:5672?exchange_name=test_node_logstash']), +}, 5, 20000).addBatchRetry({ + 'rabbitmq topic': file2x2x2file(['output://amqp://localhost:5672?exchange_name=test_node_logstash_topic&topic=23'], ['input://amqp://localhost:5672?exchange_name=test_node_logstash_topic&topic=23']), +}, 5, 20000); + +if (fs.existsSync('.sqs')) { + var sqs = fs.readFileSync('.sqs').toString().trim(); + test = test.addBatchRetry({ + 'sqs': file2x2x2fileNotOrdered(['output://sqs://' + sqs], ['input://sqs://' + sqs], undefined, undefined, undefined, 5000), + }, 1, 20000); +} + +test.export(module); \ No newline at end of file diff --git a/test/test_411_http_proxy.js b/test/test_411_http_proxy.js new file mode 100644 index 00000000..e8fba9b2 --- /dev/null +++ b/test/test_411_http_proxy.js @@ -0,0 +1,146 @@ +var vows = require('vows-batch-retry'), + http = require('http'), + net = require('net'), + assert = require('assert'), + helper = require('./integration_helper.js'); + +function createHttpTest(config, check_callback, full_check_callback) { + return { + topic: function() { + var callback = this.callback; + var error; + helper.createAgent([ + 'input://tcp://0.0.0.0:17874?type=pouet', + 'output://' + config, + ], function(agent) { + var http_server = http.createServer(function(req, res) { + var body = ''; + req.on('data', function(chunk) { + body += chunk; + }); + req.on('end', function() { + res.writeHead(204, { + 'Connection': 'close' + }); + res.end(); + agent.close(function() { + http_server.close(function() { + setTimeout(function() { + callback(error, { + req: req, + body: body + }); + }, 100); + }); + }); + }); + }).listen(17875); + var c1 = net.createConnection({ + port: 17874 + }, function() { + c1.write('toto'); + c1.end(); + }); + }, function(err) { + error = err; + }); + }, + + check: function(err, reqs) { + if (full_check_callback) { + full_check_callback(err, reqs); + } + else { + assert.ifError(err); + check_callback(reqs); + } + } + }; +} + +function createConnectTest(config, check_callback) { + return { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://0.0.0.0:17874?type=pouet', + 'output://' + config, + ], function(agent) { + var http_server = http.createServer(function() { + assert.fail('should not be there'); + }).listen(17875); + http_server.on('connect', function(req, socket) { + socket.write('HTTP/1.0 200 Connection established\r\n\r\n'); + socket.destroy(); + agent.close(function() { + http_server.close(function() { + callback(null, { + req: req + }); + }); + }); + }); + var c1 = net.createConnection({ + port: 17874 + }, function() { + c1.write('toto'); + c1.end(); + }); + }, function(err) { + callback(err); + }); + }, + + check: function(err, reqs) { + assert.ifError(err); + check_callback(reqs); + } + }; +} + +vows.describe('Integration Http proxy :').addBatchRetry({ + 'no proxy': createHttpTest('http_post://localhost:17875?path=/#{type}', function(req) { + assert.equal(req.req.method, 'POST'); + assert.equal(req.req.url, '/pouet'); + assert.equal(req.body, 'toto'); + assert.equal(req.req.headers['proxy-authorization'], undefined); + }), +}, 5, 20000).addBatchRetry({ + 'no proxy elastic search': createHttpTest('elasticsearch://localhost:17875', function(req) { + assert.equal(req.req.method, 'POST'); + assert.match(req.req.url, /logstash.*logs/); + assert.equal(req.req.headers['proxy-authorization'], undefined); + }), +}, 5, 20000).addBatchRetry({ + 'http proxy': createHttpTest('http_post://toto.com:1234?path=/#{type}&proxy=http://localhost:17875', function(req) { + assert.equal(req.req.method, 'POST'); + assert.equal(req.req.url, 'http://toto.com:1234/pouet'); + assert.equal(req.body, 'toto'); + assert.equal(req.req.headers['proxy-authorization'], undefined); + }), +}, 5, 20000).addBatchRetry({ + 'http proxy elastic search': createHttpTest('elasticsearch://toto.com:1234?proxy=http://localhost:17875', function(req) { + assert.equal(req.req.method, 'POST'); + assert.match(req.req.url, /http:\/\/toto.com:1234\/logstash.*logs/); + assert.equal(req.req.headers['proxy-authorization'], undefined); + }), +}, 5, 20000).addBatchRetry({ + 'http proxy basic auth': createHttpTest('http_post://toto.com:1234?path=/#{type}&proxy=http://a:bc@localhost:17875', function(req) { + assert.equal(req.req.method, 'POST'); + assert.equal(req.req.url, 'http://toto.com:1234/pouet'); + assert.equal(req.body, 'toto'); + assert.equal(req.req.headers['proxy-authorization'], 'Basic YTpiYw=='); + }), +}, 5, 20000).addBatchRetry({ + 'https proxy': createConnectTest('http_post://toto.com:1234?path=/#{type}&ssl=true&proxy=http://localhost:17875', function(req) { + assert.equal(req.req.method, 'CONNECT'); + assert.equal(req.req.url, 'toto.com:1234'); + assert.equal(req.req.headers['proxy-authorization'], undefined); + }), +}, 5, 20000).addBatchRetry({ + 'https proxy basic auth': createConnectTest('http_post://toto.com:1234?path=/#{type}&ssl=true&proxy=http://a:bc@localhost:17875', function(req) { + assert.equal(req.req.method, 'CONNECT'); + assert.equal(req.req.url, 'toto.com:1234'); + assert.equal(req.req.headers['proxy-authorization'], 'Basic YTpiYw=='); + }), +}, 5, 20000).export(module); diff --git a/test/test_412_tls_appendcert.js b/test/test_412_tls_appendcert.js new file mode 100644 index 00000000..b80a4d00 --- /dev/null +++ b/test/test_412_tls_appendcert.js @@ -0,0 +1,122 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + net = require('net'), + assert = require('assert'), + helper = require('./integration_helper.js'); + +vows.describe('Integration tls appendpeercert:').addBatchRetry({ + 'tls info': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://localhost:17874?ssl=true&ssl_key=test/ssl/server.key&ssl_cert=test/ssl/server.crt&ssl_requestCert=true&ssl_ca=test/ssl/root-ca.crt&ssl_rejectUnauthorized=true', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + helper.createAgent([ + 'input://tcp://localhost:17873', + 'output://tcp://localhost:17874?serializer=raw&ssl=true&ssl_ca=test/ssl/root-ca.crt&ssl_key=test/ssl/client.key&ssl_cert=test/ssl/client.crt', + ], function(agent2) { + var c = net.createConnection({ + port: 17873 + }, function() { + c.write('toto'); + c.end(); + }); + c.on('end', function() { + setTimeout(function() { + agent2.close(function() { + agent.close(function() { + callback(null); + }); + }); + }, 100); + }); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + + var splitted = c1.split('\n'); + assert.equal(splitted.length, 2); + assert.equal('', splitted[splitted.length - 1]); + var client_tls_info = { + authorized: true, + peer_cert: { + subject: { + C: 'FR', + ST: 'Node-Logstash', + O: 'Node-Logstash', + CN: 'client_name' + }, + issuer: { + C: 'FR', + ST: 'Node-Logstash', + O: 'Node-Logstash', + CN: 'ca.node-logstash.testing' + }, + valid_from: 'Nov 15 10:02:44 2013 GMT', + valid_to: 'Nov 13 10:02:44 2023 GMT', + fingerprint: '9D:39:A4:D8:B3:02:0E:4E:F5:42:1B:63:D9:86:E3:45:3E:51:A1:84' + }, + }; + helper.checkResult(splitted[0], { + '@version': '1', + 'host': '127.0.0.1', + 'tcp_port': 17874, + 'message': 'toto', + 'tls': client_tls_info + }); + } + }, +}, 5, 20000).addBatchRetry({ + 'no appendpeercert': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://localhost:17874?ssl=true&ssl_key=test/ssl/server.key&ssl_cert=test/ssl/server.crt&ssl_requestCert=true&ssl_ca=test/ssl/root-ca.crt&ssl_rejectUnauthorized=true&appendPeerCert=false', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + helper.createAgent([ + 'input://tcp://localhost:17873', + 'output://tcp://localhost:17874?serializer=raw&ssl=true&ssl_ca=test/ssl/root-ca.crt&ssl_key=test/ssl/client.key&ssl_cert=test/ssl/client.crt', + ], function(agent2) { + var c = net.createConnection({ + port: 17873 + }, function() { + c.write('toto'); + c.end(); + }); + c.on('end', function() { + setTimeout(function() { + agent2.close(function() { + agent.close(function() { + callback(null); + }); + }); + }, 100); + }); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + + var splitted = c1.split('\n'); + assert.equal(splitted.length, 2); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'host': '127.0.0.1', + 'tcp_port': 17874, + 'message': 'toto' + }); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_413_closing_inputs.js b/test/test_413_closing_inputs.js new file mode 100644 index 00000000..c84f6f7c --- /dev/null +++ b/test/test_413_closing_inputs.js @@ -0,0 +1,158 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + dgram = require('dgram'), + assert = require('assert'), + monitor_file = require('lib/monitor_file'), + helper = require('./integration_helper.js'); + +function loop(x, socket, delay, callback) { + if (x === 0) { + return callback(); + } + var line = new Buffer('line ' + x + '\n'); + socket.send(line, 0, line.length, 17874, 'localhost', function(err) { + if (err) { + return callback(err); + } + fs.appendFile('input.txt', line, function(err) { + if (err) { + return callback(err); + } + setTimeout(function() { + loop(x - 1, socket, delay, callback); + }, delay); + }); + }); +} + +vows.describe('Integration closing inputs:').addBatchRetry({ + 'normal': { + topic: function() { + var callback = this.callback; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'input://file://input.txt?type=file', + 'output://file://output_file.txt?only_type=file&serializer=json_logstash', + 'output://file://output_udp.txt?only_type=udp&serializer=json_logstash', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + loop(500, socket, 10, function(err) { + assert.ifError(err); + setTimeout(function() { + socket.close(); + agent.close(callback); + }, 100); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output_file.txt').toString(); + var c2 = fs.readFileSync('output_udp.txt').toString(); + fs.unlinkSync('output_file.txt'); + fs.unlinkSync('output_udp.txt'); + fs.unlinkSync('input.txt'); + + var splitted_1 = c1.split('\n'); + assert.equal(splitted_1.length, 500 + 1); + + var splitted_2 = c2.split('\n'); + assert.equal(splitted_2.length, 500 + 1); + } + }, +}, 5, 20000).addBatch({ + 'closing before stop': { + topic: function() { + var callback = this.callback; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'input://file://input.txt?type=file', + 'output://file://output_file.txt?only_type=file&serializer=json_logstash', + 'output://file://output_udp.txt?only_type=udp&serializer=json_logstash', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + loop(500, socket, 10, function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close_inputs(function(err) { + assert.isTrue(agent.closed_inputs); + assert.ifError(err); + setTimeout(function() { + socket.close(); + agent.close(callback); + }, 100); + }); + }, 100); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output_file.txt').toString(); + var c2 = fs.readFileSync('output_udp.txt').toString(); + fs.unlinkSync('output_file.txt'); + fs.unlinkSync('output_udp.txt'); + fs.unlinkSync('input.txt'); + + var splitted_1 = c1.split('\n'); + assert.equal(splitted_1.length, 500 + 1); + + var splitted_2 = c2.split('\n'); + assert.equal(splitted_2.length, 500 + 1); + } + }, +}, 5, 20000).addBatchRetry({ + 'closing': { + topic: function() { + var callback = this.callback; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'input://file://input.txt?type=file', + 'output://file://output_file.txt?only_type=file&serializer=json_logstash', + 'output://file://output_udp.txt?only_type=udp&serializer=json_logstash', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + loop(500, socket, 5, function(err) { + assert.ifError(err); + setTimeout(function() { + socket.close(); + agent.close(callback); + }, 100); + }); + setTimeout(function() { + assert.isFalse(agent.closed_inputs); + agent.close_inputs(function(err) { + assert.isTrue(agent.closed_inputs); + assert.ifError(err); + setTimeout(function() { + agent.start_inputs(function(err) { + assert.ifError(err); + assert.isFalse(agent.closed_inputs); + }); + }, 500); + }); + }, 1000); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output_file.txt').toString(); + var c2 = fs.readFileSync('output_udp.txt').toString(); + fs.unlinkSync('output_file.txt'); + fs.unlinkSync('output_udp.txt'); + fs.unlinkSync('input.txt'); + + var splitted_1 = c1.split('\n'); + assert.equal(splitted_1.length, 500 + 1); + + var splitted_2 = c2.split('\n'); + assert.lesser(splitted_2.length, 500); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_414_output_zeromq.js b/test/test_414_output_zeromq.js new file mode 100644 index 00000000..9e544e92 --- /dev/null +++ b/test/test_414_output_zeromq.js @@ -0,0 +1,223 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + dgram = require('dgram'), + assert = require('assert'), + monitor_file = require('lib/monitor_file'), + helper = require('./integration_helper.js'); + +function loop(x, socket, callback) { + if (x === 0) { + return callback(); + } + var line = new Buffer('line ' + x + '\n'); + socket.send(line, 0, line.length, 17874, 'localhost', function(err) { + if (err) { + return callback(err); + } + fs.appendFile('input.txt', line, function(err) { + if (err) { + return callback(err); + } + loop(x - 1, socket, callback); + }); + }); +} + +vows.describe('Integration zeromq:').addBatch({ + 'load balancing': { + topic: function() { + var callback = this.callback; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://zeromq://tcp://0.0.0.0:17875', + 'output://file://output1.txt', + ], function(agent) { + helper.createAgent([ + 'input://zeromq://tcp://0.0.0.0:17876', + 'output://file://output2.txt', + ], function(agent2) { + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'output://zeromq://tcp://localhost:17875,tcp://localhost:17876', + ], function(agent3) { + var socket = dgram.createSocket('udp4'); + socket.send(new Buffer('l1'), 0, 2, 17874, 'localhost', function(err) { + assert.ifError(err); + socket.send(new Buffer('l2'), 0, 2, 17874, 'localhost', function(err) { + assert.ifError(err); + setTimeout(function() { + socket.close(); + agent3.close(function() { + agent2.close(function() { + agent.close(callback); + }); + }); + }, 200); + }); + }); + }); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output1.txt').toString(); + var c2 = fs.readFileSync('output2.txt').toString(); + fs.unlinkSync('output1.txt'); + fs.unlinkSync('output2.txt'); + + var splitted1 = c1.split('\n'); + assert.equal(splitted1.length, 2); + + var splitted2 = c2.split('\n'); + assert.equal(splitted2.length, 2); + } + }, +}).addBatchRetry({ + 'no limit': { + topic: function() { + var callback = this.callback; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'output://zeromq://tcp://localhost:17875', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + loop(1000, socket, function(err) { + assert.ifError(err); + setTimeout(function() { + helper.createAgent([ + 'input://zeromq://tcp://0.0.0.0:17875', + 'output://file://output.txt', + ], function(agent2) { + setTimeout(function() { + socket.close(); + agent2.close(function() { + agent.close(callback); + }); + }, 1000); + }); + }, 100); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + fs.unlinkSync('input.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 1000 + 1); + } + }, +}, 5, 20000).addBatchRetry({ + 'high watermark set to 100': { + topic: function() { + var callback = this.callback; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'output://zeromq://tcp://localhost:17875?zmq_high_watermark=100&zmq_check_interval=100', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + loop(1000, socket, function(err) { + assert.ifError(err); + setTimeout(function() { + helper.createAgent([ + 'input://zeromq://tcp://0.0.0.0:17875', + 'output://file://output.txt', + ], function(agent2) { + setTimeout(function() { + assert.ifError(err); + socket.close(); + agent2.close(function() { + agent.close(callback); + }); + }, 1000); + }); + }, 500); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + fs.unlinkSync('input.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 1000 + 1); + } + }, +}, 5, 20000).addBatchRetry({ + 'closed inputs': { + topic: function() { + var callback = this.callback; + var received = false; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'input://file://input.txt?type=file', + 'output://zeromq://tcp://localhost:17875?zmq_high_watermark=100&zmq_check_interval=100&zmq_threshold_up=500&zmq_threshold_down=200', + ], function(agent) { + agent.once('alarm_mode', function(alarm) { + assert.equal(true, alarm); + agent.once('alarm_mode', function(alarm) { + assert.equal(false, alarm); + received = true; + agent.once('alarm_mode', function() { + assert.fail(); + }); + }); + }); + var socket = dgram.createSocket('udp4'); + assert.equal(false, agent.closed_inputs); + loop(500, socket, function(err) { + assert.ifError(err); + setTimeout(function() { + assert.equal(true, agent.closed_inputs); + loop(500, socket, function(err) { + assert.ifError(err); + setTimeout(function() { + helper.createAgent([ + 'input://zeromq://tcp://0.0.0.0:17875', + 'output://file://output_udp.txt?only_type=udp', + 'output://file://output_file.txt?only_type=file', + ], function(agent2) { + setTimeout(function() { + assert.ifError(err); + assert.equal(false, agent.closed_inputs); + assert.equal(true, received); + socket.close(); + agent2.close(function() { + agent.close(callback); + }); + }, 1000); + }); + }, 500); + }); + }, 50); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output_file.txt').toString(); + var c2 = fs.readFileSync('output_udp.txt').toString(); + fs.unlinkSync('output_file.txt'); + fs.unlinkSync('output_udp.txt'); + fs.unlinkSync('input.txt'); + + var splitted_1 = c1.split('\n'); + assert.equal(splitted_1.length, 1000 + 1); + + var splitted_2 = c2.split('\n'); + assert.lesser(splitted_2.length, 1000); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_415_output_elasticsearch_zeromq.js b/test/test_415_output_elasticsearch_zeromq.js new file mode 100644 index 00000000..ad787973 --- /dev/null +++ b/test/test_415_output_elasticsearch_zeromq.js @@ -0,0 +1,100 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + dgram = require('dgram'), + assert = require('assert'), + monitor_file = require('lib/monitor_file'), + helper = require('./integration_helper.js'); + +vows.describe('Integration elasticsearch zeromq:').addBatchRetry({ + 'elasticsearch test': { + topic: function() { + var callback = this.callback; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'output://elasticsearch_zeromq://tcp://localhost:17875', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + helper.createAgent([ + 'input://zeromq://tcp://0.0.0.0:17875', + 'output://file://output.txt', + ], function(agent2) { + var line = new Buffer('message 42\n'); + socket.send(line, 0, line.length, 17874, 'localhost', function(err) { + if (err) { + return callback(err); + } + setTimeout(function() { + socket.close(); + agent2.close(function() { + agent.close(callback); + }); + }, 200); + }); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 2); + assert.equal(splitted[1], ''); + var l = splitted[0].split('|'); + assert.equal(l.length, 3); + assert.equal(l[0], 'POST'); + assert.match(l[1], /\/logstash-20.*\/logs/); + helper.checkResult(l[2], { + message: 'message 42', + host: '127.0.0.1', + udp_port: 17874, + type: 'udp', + '@version': '1' + }); + } + }, +}, 5, 20000).addBatchRetry({ + 'standard test': { + topic: function() { + var callback = this.callback; + monitor_file.setFileStatus({}); + helper.createAgent([ + 'input://udp://localhost:17874?type=udp', + 'output://zeromq://tcp://localhost:17875', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + helper.createAgent([ + 'input://zeromq://tcp://0.0.0.0:17875', + 'output://file://output.txt', + ], function(agent2) { + var line = new Buffer('message 42\n'); + socket.send(line, 0, line.length, 17874, 'localhost', function(err) { + if (err) { + return callback(err); + } + setTimeout(function() { + socket.close(); + agent2.close(function() { + agent.close(callback); + }); + }, 200); + }); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 2); + assert.equal(splitted[1], ''); + assert.equal(splitted[0], 'message 42'); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_416_wildcard.js b/test/test_416_wildcard.js new file mode 100644 index 00000000..1310ae0c --- /dev/null +++ b/test/test_416_wildcard.js @@ -0,0 +1,168 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + path = require('path'), + assert = require('assert'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +function file100(config) { + var timer = 200; + return { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + config, + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + setTimeout(function() { + var f = function(k, callback) { + if (k === 110) { + return callback(); + } + fs.appendFile('input' + k + '.txt', 'line' + k + '\n', function(err) { + assert.ifError(err); + f(k + 1, callback); + }); + }; + f(0, function() { + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, timer); + }); + }, 500); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + for (var i = 0; i < 110; i++) { + fs.unlinkSync('input' + i + '.txt'); + } + + var splitted = c.split('\n'); + assert.equal(splitted.length, 91); + assert.equal('', splitted[splitted.length - 1]); + for (var k = 10; k < 100; k++) { + helper.checkResult(splitted[k - 10], { + '@version': '1', + 'path': path.resolve('.') + '/input' + k + '.txt', + 'message': 'line' + k + }, true); + } + } + }; +} + +vows.describe('Integration file wildcard :').addBatchRetry({ + '110 files': file100('input://file://input%3F%3F.txt'), +}, 5, 20000).addBatchRetry({ + '110 files use tail': file100('input://file://input%3F%3F.txt?use_tail=true'), +}, 5, 20000).addBatchRetry({ + 'file already exists and remove': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + fs.appendFile('input01.txt', 'toto\n', function(err) { + assert.ifError(err); + helper.createAgent([ + 'input://file://input%3F%3F.txt', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input01.txt', 'tata\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.unlink('input01.txt', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input01.txt', 'titi\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + var splitted = c.split('\n'); + assert.equal(splitted.length, 3); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'path': path.resolve('.') + '/input01.txt', + 'message': 'tata' + }, true); + helper.checkResult(splitted[1], { + '@version': '1', + 'path': path.resolve('.') + '/input01.txt', + 'message': 'titi' + }, true); + } + }, +}, 5, 20000).addBatchRetry({ + 'file already exists use tail': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + fs.appendFile('input01.txt', 'toto\n', function(err) { + assert.ifError(err); + helper.createAgent([ + 'input://file://input%3F%3F.txt?use_tail=true', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input01.txt', 'tata\n', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.unlink('input01.txt', function(err) { + assert.ifError(err); + setTimeout(function() { + fs.appendFile('input01.txt', 'titi\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }, 200); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('output.txt'); + var splitted = c.split('\n'); + assert.equal(splitted.length, 2); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'path': path.resolve('.') + '/input01.txt', + 'message': 'tata' + }, true); + } + } +}, 5, 20000).export(module); diff --git a/test/test_417_filter_no_params_and_only_type.js b/test/test_417_filter_no_params_and_only_type.js new file mode 100644 index 00000000..f998bf15 --- /dev/null +++ b/test/test_417_filter_no_params_and_only_type.js @@ -0,0 +1,41 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + assert = require('assert'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration filter without params and only_type (issue #62)').addBatchRetry({ + 'issue 62': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://file://input.txt?type=toto', + 'filter://bunyan://?only_type=tata', + 'output://file://output.txt', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input.txt', '{"name":"myapp","hostname":"banquise.local","pid":6442,"level":30,"msg":"hi","time":"2014-05-31T20:32:53.902Z","v":0}\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('input.txt'); + fs.unlinkSync('output.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 2); + assert.equal(splitted[0], '{"name":"myapp","hostname":"banquise.local","pid":6442,"level":30,"msg":"hi","time":"2014-05-31T20:32:53.902Z","v":0}'); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_418_good_tcp_closing.js b/test/test_418_good_tcp_closing.js new file mode 100644 index 00000000..95884d1d --- /dev/null +++ b/test/test_418_good_tcp_closing.js @@ -0,0 +1,35 @@ +var vows = require('vows-batch-retry'), + net = require('net'), + assert = require('assert'), + helper = require('./integration_helper.js'); + +vows.describe('Integration good tcp closing (#75)').addBatchRetry({ + 'issue 75': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://tcp://127.0.0.1:45567', + ], function(agent) { + var c = net.connect({host: '127.0.0.1', port: 45567}); + c.on('connect', function() { + var closed = false; + c.on('close', function() { + closed = true; + }); + setTimeout(function() { + agent.close(function() { + setTimeout(function() { + callback(null, closed); + }, 100); + }); + }, 200); + }); + }); + }, + + check: function(err, closed) { + assert.ifError(err); + assert.equal(closed, true); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_419_tcp_output_auto_reconnect.js b/test/test_419_tcp_output_auto_reconnect.js new file mode 100644 index 00000000..814715f3 --- /dev/null +++ b/test/test_419_tcp_output_auto_reconnect.js @@ -0,0 +1,64 @@ +var vows = require('vows-batch-retry'), + net = require('net'), + dgram = require('dgram'), + assert = require('assert'), + helper = require('./integration_helper.js'); + +vows.describe('Integration tcp output auto reconnect (#70)').addBatchRetry({ + 'issue 75': { + topic: function() { + var callback = this.callback; + helper.createAgent([ + 'input://udp://127.0.0.1:45567', + 'output://tcp://127.0.0.1:45568', + ], function(agent) { + var socket = dgram.createSocket('udp4'); + var udp_send = function(s) { + var buffer = new Buffer(s); + socket.send(buffer, 0, buffer.length, 45567, 'localhost', function(err, bytes) { + if (err || bytes !== buffer.length) { + assert.fail('Unable to send udp packet'); + } + }); + }; + var current_connection; + var server = net.createServer(function(cc) { + current_connection = cc; + cc.on('data', function(l) { + assert(l.toString().match(/toto1/)); + cc.end(); + server.close(function() { + setTimeout(function() { + udp_send('toto2'); + setTimeout(function() { + server = net.createServer(function(cc) { + cc.on('data', function(l) { + assert(l.toString().match(/toto3/)); + cc.end(); + server.close(function() { + agent.close(callback); + }); + }); + }); + server.on('listening', function() { + udp_send('toto3'); + }); + server.listen(45568); + }, 1000); + }, 500); + }); + }); + }); + server.on('listening', function() { + udp_send('toto1'); + }); + server.listen(45568); + }, function() { + }); + }, + + check: function(err) { + assert.ifError(err); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_420_input_gae.js b/test/test_420_input_gae.js new file mode 100644 index 00000000..64425686 --- /dev/null +++ b/test/test_420_input_gae.js @@ -0,0 +1,159 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + url = require('url'), + querystring = require('querystring'), + assert = require('assert'), + http = require('http'), + helper = require('./integration_helper.js'); + +var m1 = { + http_remote_ip: '8.8.8.8', + http_path: '/ping?toto=32', + http_status: 200, + http_bytes_sent: 146, + http_referer: '', + http_user_agent: 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2145.4 Safari/537.36', + http_delay: 60, + http_method: 'GET', + http_host: 'test.appspot.com', + cost: 1.6316e-8, + '@timestamp': '2014-09-10T07:59:43.927+0000', + request_id: '541014ef00ff0e26606f6190970001737e707573682d696e746567726174696f6e0001312d776172000100', + message: '8.8.8.8 - - [10/Sep/2014:00:59:43 -0700] "GET /ping?toto=32 HTTP/1.1" 200 146 - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2145.4 Safari/537.36"', +}; + +var m2 = { + request_id: '541008fd00ff0402e8089aecc60001737e707573682d696e746567726174696f6e0001312d776172000100', + message: 'poeut', + log_level: 'INFO', + '@timestamp': '2014-09-10T08:21:17.323+0000', +}; + +function mock() { + var counter = 0; + var server = http.createServer(function(req, res) { + var u = url.parse(req.url); + var qs = querystring.parse(u.query); + if (qs.log_key !== 'toto') { + res.writeHead(401); + return res.end(); + } + if (counter === 0) { + counter += 1; + var s1 = JSON.stringify(m1) + '\n'; + res.writeHead(200, {'x-log-end-timestamp': 666}); + res.write(s1.substring(0, 200)); + res.write(s1.substring(200, 20000)); + return res.end(); + } + if (counter === 1) { + if (qs.start_timestamp !== '666') { + res.writeHead(500); + return res.end(); + } + counter += 1; + var s2 = JSON.stringify(m2) + '\n'; + res.writeHead(200); + return res.end(s2); + } + }); + return server; +} + +vows.describe('Integration input gae:').addBatchRetry({ + 'input gae': { + topic: function() { + var callback = this.callback; + var server = mock(); + server.listen(56534); + server.on('listening', function() { + helper.createAgent([ + 'input://gae://localhost:56534?key=toto&polling=1&type=titi', + 'output://file://output1.txt?only_type=titi', + ], function(agent) { + setTimeout(function() { + agent.close(function() { + server.close(callback); + }); + }, 1500); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output1.txt').toString(); + fs.unlinkSync('output1.txt'); + + var splitted = c1.split('\n'); + assert.equal(splitted.length, 3); + + assert.equal(splitted[0], '8.8.8.8 - - [10/Sep/2014:00:59:43 -0700] "GET /ping?toto=32 HTTP/1.1" 200 146 - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2145.4 Safari/537.36"'); + assert.equal(splitted[1], 'poeut'); + } + }, +}, 5, 20000).addBatchRetry({ + 'input gae with type': { + topic: function() { + var callback = this.callback; + var server = mock(); + server.listen(56534); + server.on('listening', function() { + helper.createAgent([ + 'input://gae://localhost:56534?key=toto&polling=1&access_logs_type=toto&access_logs_field_name=http_remote_ip', + 'output://file://output1.txt?only_type=toto', + ], function(agent) { + setTimeout(function() { + agent.close(function() { + server.close(callback); + }); + }, 1500); + }); + }); + }, + + check: function(err) { + assert.ifError(err); + var c1 = fs.readFileSync('output1.txt').toString(); + fs.unlinkSync('output1.txt'); + + var splitted = c1.split('\n'); + assert.equal(splitted.length, 2); + + assert.equal(splitted[0], '8.8.8.8 - - [10/Sep/2014:00:59:43 -0700] "GET /ping?toto=32 HTTP/1.1" 200 146 - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2145.4 Safari/537.36"'); + } + }, +}, 5, 20000).addBatchRetry({ + 'input gae wrong key': { + topic: function() { + var callback = this.callback; + var server = mock(); + server.listen(56534); + server.on('listening', function() { + helper.createAgent([ + 'input://gae://localhost:56534?key=toto2&polling=1', + 'output://file://output1.txt?only_type=toto', + ], function(agent) { + setTimeout(function() { + agent.close(function() { + if ( http.globalAgent.sockets['localhost:56534']) { + http.globalAgent.sockets['localhost:56534'].forEach(function(x) { + x.end(); + }); + } + server.close(function() { + callback(); + }); + }); + }, 1500); + }, function() { + }); + }); + }, + + check: function(err) { + assert.ifError(err); + assert.equal(false, fs.existsSync('output1.txt')); + } + }, +}, 5, 20000).export(module); diff --git a/test/test_421_logstash.js b/test/test_421_logstash.js new file mode 100644 index 00000000..352068f0 --- /dev/null +++ b/test/test_421_logstash.js @@ -0,0 +1,165 @@ +var vows = require('vows-batch-retry'), + assert = require('assert'), + fs = require('fs'), + logger = require('log4node'), + logstash_config = require('logstash_config'), + config_mapper = require('lib/config_mapper'), + patterns_loader = require('../lib/lib/patterns_loader'), + async = require('async'), + agent = require('agent'); + +patterns_loader.add('lib/patterns'); + +function make_test(config_file, input, output_callback) { + var r = {}; + r[config_file] = { + topic: function() { + var config = fs.readFileSync('test/test_421_logstash/base').toString() + '\n' + fs.readFileSync('test/test_421_logstash/' + config_file).toString(); + var c = logstash_config.parse(config); + var a = agent.create(); + var callback = this.callback; + fs.writeFileSync('input.txt', ''); + a.on('error', function(err) { + logger.error(err); + assert.ifError(err); + }); + a.start(config_mapper.map(c), function(err) { + assert.ifError(err); + setTimeout(function() { + async.eachSeries(input, function(x, callback) { + fs.appendFile('input.txt', x + '\n', function(err) { + if (err) { + return callback(err); + } + setTimeout(callback, 50); + }); + }, function() { + setTimeout(function() { + a.close(callback); + }, 200); + }); + }, 200); + }); + }, + check: function(err) { + assert.ifError(err); + fs.unlinkSync('input.txt'); + var output = fs.readFileSync('output.txt'); + fs.unlinkSync('output.txt'); + var lines = output.toString().split('\n'); + lines.pop(); + output_callback(lines.map(function(x) { + return JSON.parse(x); + })); + } + }; + return r; +} + +vows.describe('Logstash integration tests').addBatch( +// make_test('simple', [ +// 'abcd', +// 'defg', +// ], function(l) { +// assert.equal(2, l.length); +// assert.equal('abcd', l[0].message); +// assert.equal('defg', l[1].message); +// }) +// ).addBatch( +// make_test('simple_if', [ +// 'abcd', +// 'defg', +// ], function(l) { +// assert.equal(1, l.length); +// assert.equal('defg', l[0].message); +// }) +// ).addBatch( +// make_test('grep', [ +// 'abcd', +// 'defg', +// ], function(l) { +// assert.equal(1, l.length); +// assert.equal('defg', l[0].message); +// }) +// ).addBatch( +// make_test('if_regex', [ +// 'abcd', +// 'defgab', +// 'hjh', +// ], function(l) { +// assert.equal(l.length, 2); +// assert.equal('abcd', l[0].message); +// assert.equal('defgab', l[1].message); +// }) +// ).addBatch( +// make_test('else_else_if', [ +// 'abcd', +// 'defgab', +// 'hjh', +// ], function(l) { +// assert.equal(l.length, 3); +// assert.equal('tata', l[0].toto); +// assert.equal('titi', l[1].toto); +// assert.equal('tutu', l[2].toto); +// }) +// ).addBatch( +// make_test('upper', [ +// '12', +// '42', +// 'abcd', +// ], function(l) { +// assert.equal(l.length, 3); +// assert.equal(undefined, l[0].toto); +// assert.equal('tata', l[1].toto); +// assert.equal(undefined, l[2].toto); +// }) +// ).addBatch( +// make_test('regex', [ +// 'atitib67c', +// 'Sep 14 02:01:37 lb haproxy[11223]: 127.0.0.1:12345 [14/Sep/2014:02:01:37.452] public nginx/server1 0/0/0/5/5 200 490 - - ---- 1269/1269/0/1/0 0/0 "GET /my/path HTTP/1.1"' +// ], function(l) { +// assert.equal(l.length, 2); +// assert.equal('titi', l[0].toto); +// assert.equal(67, l[0].tata); +// assert.equal('haproxy', l[1].syslog_program); +// assert.equal(11223, l[1].syslog_pid); +// }) +// ).addBatch( + make_test('fields_tags_1', [ + 'abcd', + '1.2.3.4', + ], function(l) { + assert.equal(2, l.length); + assert.equal('abcd', l[0].message); + assert.isUndefined(l[0].a); + assert.equal('1.2.3.4', l[1].message); + assert.deepEqual([ 'a', 'b' ], l[1].tags); + assert.equal('b', l[1].a); + }) +).addBatch( + make_test('fields_tags_2', [ + 'abcd', + '1.2.3.4', + ], function(l) { + assert.equal(2, l.length); + assert.equal('abcd', l[0].message); + assert.isUndefined(l[0].a); + assert.equal('1.2.3.4', l[1].message); + assert.deepEqual([ 'a', 'b' ], l[1].tags); + assert.equal('b', l[1].a); + assert.equal('d', l[1].c); + }) +).addBatch( + make_test('fields_tags_3', [ + 'abcd', + '1.2.3.4', + ], function(l) { + assert.equal(2, l.length); + assert.equal('abcd', l[0].message); + assert.isUndefined(l[0].a); + assert.equal('1.2.3.4', l[1].message); + assert.deepEqual([ 'toto' ], l[1].tags); + assert.equal('b', l[1].a); + assert.equal('d', l[1].c); + }) +).export(module); diff --git a/test/test_421_logstash/base b/test/test_421_logstash/base new file mode 100644 index 00000000..a29dcf2a --- /dev/null +++ b/test/test_421_logstash/base @@ -0,0 +1,5 @@ +input { + file { + path => input.txt + } +} diff --git a/test/test_421_logstash/else_else_if b/test/test_421_logstash/else_else_if new file mode 100644 index 00000000..b81e47ae --- /dev/null +++ b/test/test_421_logstash/else_else_if @@ -0,0 +1,27 @@ + +filter { + if [message] =~ 'cd' { + compute_field { + field => toto + value => tata + } + } + else if [message] =~ 'ef' { + compute_field { + field => toto + value => titi + } + } + else { + compute_field { + field => toto + value => tutu + } + } +} +output { + file { + path => output.txt + serializer => json_logstash + } +} \ No newline at end of file diff --git a/test/test_421_logstash/fields_tags_1 b/test/test_421_logstash/fields_tags_1 new file mode 100644 index 00000000..dfd91d66 --- /dev/null +++ b/test/test_421_logstash/fields_tags_1 @@ -0,0 +1,15 @@ + +filter { + grok { + match => '%{IP}' + add_tags => [a, b] + add_fields => { a => b } + } +} + +output { + file { + path => output.txt + serializer => json_logstash + } +} \ No newline at end of file diff --git a/test/test_421_logstash/fields_tags_2 b/test/test_421_logstash/fields_tags_2 new file mode 100644 index 00000000..62556ade --- /dev/null +++ b/test/test_421_logstash/fields_tags_2 @@ -0,0 +1,15 @@ + +filter { + grok { + match => '%{IP}' + add_tags => ['a', 'b'] + add_fields => { a => b, c => d } + } +} + +output { + file { + path => output.txt + serializer => json_logstash + } +} \ No newline at end of file diff --git a/test/test_421_logstash/fields_tags_3 b/test/test_421_logstash/fields_tags_3 new file mode 100644 index 00000000..c273b821 --- /dev/null +++ b/test/test_421_logstash/fields_tags_3 @@ -0,0 +1,18 @@ + +filter { + grok { + match => '%{IP}' + add_tags => toto + add_fields => { + a => b + c => d + } + } +} + +output { + file { + path => output.txt + serializer => json_logstash + } +} \ No newline at end of file diff --git a/test/test_421_logstash/grep b/test/test_421_logstash/grep new file mode 100644 index 00000000..bdc0ea12 --- /dev/null +++ b/test/test_421_logstash/grep @@ -0,0 +1,14 @@ + +filter { + grep { + regex => /abcd/ + invert => true + } +} + +output { + file { + path => output.txt + serializer => json_logstash + } +} \ No newline at end of file diff --git a/test/test_421_logstash/if_regex b/test/test_421_logstash/if_regex new file mode 100644 index 00000000..030979f1 --- /dev/null +++ b/test/test_421_logstash/if_regex @@ -0,0 +1,9 @@ + +output { + if [message] =~ ab { + file { + path => output.txt + serializer => json_logstash + } + } +} \ No newline at end of file diff --git a/test/test_421_logstash/regex b/test/test_421_logstash/regex new file mode 100644 index 00000000..7ad7b289 --- /dev/null +++ b/test/test_421_logstash/regex @@ -0,0 +1,18 @@ + +filter { + regex { + regex => /a(.*)b(.*)c/ + fields => [toto, tata] + numerical_fields => [tata] + } + regex { + builtin_regex => syslog_no_prio + } +} + +output { + file { + path => output.txt + serializer => json_logstash + } +} \ No newline at end of file diff --git a/test/test_421_logstash/simple b/test/test_421_logstash/simple new file mode 100644 index 00000000..3dab778b --- /dev/null +++ b/test/test_421_logstash/simple @@ -0,0 +1,7 @@ + +output { + file { + path => output.txt + serializer => json_logstash + } +} \ No newline at end of file diff --git a/test/test_421_logstash/simple_if b/test/test_421_logstash/simple_if new file mode 100644 index 00000000..ad2e7d67 --- /dev/null +++ b/test/test_421_logstash/simple_if @@ -0,0 +1,9 @@ + +output { + if [message] != "abcd" { + file { + path => output.txt + serializer => json_logstash + } + } +} \ No newline at end of file diff --git a/test/test_421_logstash/upper b/test/test_421_logstash/upper new file mode 100644 index 00000000..3fe39382 --- /dev/null +++ b/test/test_421_logstash/upper @@ -0,0 +1,16 @@ + +filter { + if [message] > 30 { + compute_field { + field => toto + value => tata + } + } +} + +output { + file { + path => output.txt + serializer => json_logstash + } +} \ No newline at end of file diff --git a/test/test_422_input_tags_fields.js b/test/test_422_input_tags_fields.js new file mode 100644 index 00000000..512995f9 --- /dev/null +++ b/test/test_422_input_tags_fields.js @@ -0,0 +1,89 @@ +var vows = require('vows-batch-retry'), + fs = require('fs'), + assert = require('assert'), + path = require('path'), + helper = require('./integration_helper.js'), + monitor_file = require('lib/monitor_file'); + +vows.describe('Integration file 2 file :').addBatch({ + 'single': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://file://input.txt?tags=a&add_field=a:b', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input.txt', 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('input.txt'); + fs.unlinkSync('output.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 2); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'path': path.resolve('.') + '/input.txt', + 'message': 'line1', + 'tags': ['a'], + 'a': 'b', + }, true); + } + }, +}).addBatch({ + 'mutiple': { + topic: function() { + monitor_file.setFileStatus({}); + var callback = this.callback; + helper.createAgent([ + 'input://file://input.txt?tags=a,2,3&add_fields=a:b,xxx:12', + 'output://file://output.txt?serializer=json_logstash', + ], function(agent) { + setTimeout(function() { + fs.appendFile('input.txt', 'line1\n', function(err) { + assert.ifError(err); + setTimeout(function() { + agent.close(function() { + callback(null); + }); + }, 200); + }); + }, 200); + }); + }, + + check: function(err) { + assert.ifError(err); + var c = fs.readFileSync('output.txt').toString(); + fs.unlinkSync('input.txt'); + fs.unlinkSync('output.txt'); + + var splitted = c.split('\n'); + assert.equal(splitted.length, 2); + assert.equal('', splitted[splitted.length - 1]); + helper.checkResult(splitted[0], { + '@version': '1', + 'path': path.resolve('.') + '/input.txt', + 'message': 'line1', + 'tags': ['a', '2', '3'], + 'a': 'b', + 'xxx': '12', + }, true); + } + }, +}).export(module); diff --git a/test/test_500_real_life.js b/test/test_500_real_life.js new file mode 100644 index 00000000..a84b49a9 --- /dev/null +++ b/test/test_500_real_life.js @@ -0,0 +1,249 @@ +var vows = require('vows-batch-retry'), + assert = require('assert'), + fs = require('fs'), + agent = require('agent'), + spawn = require('child_process').spawn, + dgram = require('dgram'), + log = require('log4node'), + whereis = require('whereis'); + +function createAgent(urls, callback, error_callback) { + var a = agent.create(); + error_callback = error_callback || function(error) { + assert.ifError(error); + }; + a.on('error', function(module_name, error) { + console.log('Error agent detected, ' + module_name + ' : ' + error); + error_callback(error); + }); + a.start(['filter://add_host://', 'filter://add_timestamp://', 'filter://add_version://'].concat(urls), function(error) { + assert.ifError(error); + callback(a); + }, 200); +} + +function run(command, args, pid_file, callback) { + log.info('Starting sub process'); + var child = spawn(command, args); + if (pid_file) { + fs.writeFile(pid_file, child.pid, function(err) { + if (err) { + console.log(err); + } + }); + } + child.stdout.on('data', function(data) { + process.stdout.write('STDOUT ' + data.toString()); + }); + child.stderr.on('data', function(data) { + process.stdout.write('STDERR ' + data.toString()); + }); + child.on('exit', function(exitCode) { + log.info('End of sub process', exitCode); + callback(exitCode); + }); +} + +function input_file_test(args, topic_callback, check_callback) { + return { + topic: function() { + var callback = this.callback; + var socket = dgram.createSocket('udp4'); + socket.bind(17881); + var datas = []; + socket.on('message', function(data) { + datas.push(data); + }); + createAgent([ + 'input://file://*.txt' + args, + 'output://udp://localhost:17881', + ], function(agent) { + run('node', ['test/500_real_life/run.js', '--file=output.txt', '--count=1500', '--period=1'], 'process.pid', function(exitCode) { + setTimeout(function() { + socket.close(); + agent.close(function() { + callback(undefined, exitCode, datas); + }); + }, 200); + }); + }); + topic_callback(); + }, + + check: function(err, exitCode, datas) { + assert.ifError(err); + fs.unlinkSync('output.txt'); + fs.unlinkSync('process.pid'); + check_callback(); + assert.equal(exitCode, 0); + assert.equal(datas.length, 1500); + } + }; +} + +function output_file_test(topic_callback, check_callback) { + return { + topic: function() { + var callback = this.callback; + var socket = dgram.createSocket('udp4'); + run('node', ['bin/node-logstash-agent', 'input://udp://localhost:17874', 'output://file://output.txt'], 'process.pid', function(exitCode) { + setTimeout(function() { + socket.close(); + callback(undefined, exitCode); + }, 100); + }); + setTimeout(function() { + var a = function(k) { + if (k === 0) { + setTimeout(function() { + process.kill(fs.readFileSync('process.pid')); + }, 200); + return; + } + setTimeout(function() { + var message = new Buffer('line ' + k); + socket.send(message, 0, message.length, 17874, 'localhost', function(err) { + if (err) { + console.log(err); + } + a(k - 1); + }); + }, 1); + }; + a(500); + }, 500); + topic_callback(); + }, + + check: function(err, exitCode) { + fs.unlinkSync('process.pid'); + assert.ifError(err); + check_callback(exitCode); + } + }; +} + +vows.describe('Real life :').addBatchRetry({ + 'simple test': input_file_test('', + function() {}, function() {}), +}, 5, 20000).addBatchRetry({ + 'logrotate test, short_wait_delay_after_renaming': input_file_test('?wait_delay_after_renaming=100', + function() { + whereis('logrotate', function(err, logrotate) { + if (err) { + return console.log(err); + } + setTimeout(function() { + run(logrotate, ['-f', 'test/500_real_life/std_logrotate.conf', '-s', '/tmp/toto'], undefined, function(exitCode) { + console.log('Logrotate exit code', exitCode); + assert.equal(0, exitCode); + }); + }, 500); + setTimeout(function() { + run(logrotate, ['-f', 'test/500_real_life/std_logrotate.conf', '-s', '/tmp/toto'], undefined, function(exitCode) { + console.log('Logrotate exit code', exitCode); + assert.equal(0, exitCode); + }); + }, 1000); + }); + }, function() { + fs.unlinkSync('output.txt.1'); + fs.unlinkSync('output.txt.2'); + } + ), +}, 20, 20000).addBatchRetry({ + 'logrotate test': input_file_test('', + function() { + whereis('logrotate', function(err, logrotate) { + if (err) { + return console.log(err); + } + setTimeout(function() { + run(logrotate, ['-f', 'test/500_real_life/std_logrotate.conf', '-s', '/tmp/toto'], undefined, function(exitCode) { + console.log('Logrotate exit code', exitCode); + assert.equal(0, exitCode); + }); + }, 500); + setTimeout(function() { + run(logrotate, ['-f', 'test/500_real_life/std_logrotate.conf', '-s', '/tmp/toto'], undefined, function(exitCode) { + console.log('Logrotate exit code', exitCode); + assert.equal(exitCode, 0); + }); + }, 1000); + }); + }, function() { + fs.unlinkSync('output.txt.1'); + fs.unlinkSync('output.txt.2'); + } + ), +}, 20, 20000).addBatchRetry({ + 'logrotate copy_truncate test': input_file_test('?use_tail=true', + function() { + whereis('logrotate', function(err, logrotate) { + if (err) { + return console.log(err); + } + setTimeout(function() { + run(logrotate, ['-f', 'test/500_real_life/copytruncate_logrotate.conf', '-s', '/tmp/toto'], undefined, function(exitCode) { + console.log('Logrotate exit code', exitCode); + assert.equal(exitCode, 0); + }); + }, 500); + setTimeout(function() { + run(logrotate, ['-f', 'test/500_real_life/copytruncate_logrotate.conf', '-s', '/tmp/toto'], undefined, function(exitCode) { + console.log('Logrotate exit code', exitCode); + assert.equal(exitCode, 0); + }); + }, 1000); + }); + }, function() { + fs.unlinkSync('output.txt.1'); + fs.unlinkSync('output.txt.2'); + } + ), +}, 5, 20000).addBatchRetry({ + 'file output test': output_file_test( + function() {}, + function(exitCode) { + var output = fs.readFileSync('output.txt').toString().trim().split('\n'); + fs.unlinkSync('output.txt'); + assert.equal(exitCode, 1); + assert.equal(output.length, 500); + var i = 500; + output.forEach(function(k) { + assert.equal('line ' + i, k); + i--; + }); + }), +}, 5, 20000).addBatchRetry({ + 'file output test with logrotate': output_file_test( + function() { + setTimeout(function() { + whereis('logrotate', function(err, logrotate) { + if (err) { + return console.log(err); + } + run(logrotate, ['-f', 'test/500_real_life/std_logrotate.conf', '-s', '/tmp/toto'], undefined, function(exitCode) { + console.log('Logrotate exit code', exitCode); + assert.equal(exitCode, 0); + }); + }); + }, 500); + }, + function(exitCode) { + var o1 = fs.readFileSync('output.txt.1').toString(); + var o2 = fs.readFileSync('output.txt').toString(); + var output = (o1 + o2).trim().split('\n'); + fs.unlinkSync('output.txt'); + fs.unlinkSync('output.txt.1'); + assert.equal(exitCode, 1); + assert.greater(o1.length, 0); + assert.greater(o2.length, 0); + assert.equal(output.length, 500); + var i = 500; + output.forEach(function(k) { + assert.equal(k, 'line ' + i); + i--; + }); + }), +}, 5, 20000).export(module); diff --git a/test/test_600_no_directory_watched.js b/test/test_600_no_directory_watched.js new file mode 100644 index 00000000..496dafea --- /dev/null +++ b/test/test_600_no_directory_watched.js @@ -0,0 +1,15 @@ +var vows = require('vows'), + assert = require('assert'), + directory_watcher = require('../lib/lib/directory_watcher'); + +vows.describe('Check directory watcher').addBatch({ + 'is empty': { + topic: function() { + return directory_watcher.current; + }, + + check: function(l) { + assert.equal(Object.keys(l).length, 0); + } + } +}).export(module); diff --git a/test/zmq_injector.js b/test/zmq_injector.js new file mode 100644 index 00000000..038e73ec --- /dev/null +++ b/test/zmq_injector.js @@ -0,0 +1,45 @@ +var events = require('events'), + log = require('log4node'), + zmq = require('zmq'); + +var target = process.argv[2]; +var type = process.argv[3]; +var period = process.argv[4]; +var count = process.argv[5]; +var max = process.argv[6]; + +if (!target || !type || !period || !count) { + process.exit(1); +} + +log.info('Target', target, 'period', period, 'count', count); + +var k = 0; +var e = new events.EventEmitter(); + +e.on('msg', function() { + k++; + if (k % 10000 === 0) { + log.info('Send', k); + } +}); + +var kk = 0; + +var socket = zmq.socket('push'); +socket.connect(target); + +setInterval(function() { + kk++; + for (var i = 0; i < count; i++) { + socket.send(JSON.stringify({ + 'type': type, + '@timestamp': (new Date()).toISOString(), + 'message': 'message ' + kk + ' ' + i + })); + e.emit('msg'); + } + if (max === kk) { + process.exit(0); + } +}, period);