From dd03214258e92aab52421b50605bb0b4bcec9265 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Sun, 15 Jul 2018 11:49:42 -0400 Subject: [PATCH 1/3] Remove deprecated logstash fields. --- .../outputting/logstash_standalone.conf | 21 +------------------ 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/docs/source/outputting/logstash_standalone.conf b/docs/source/outputting/logstash_standalone.conf index fba6645..992b7cd 100644 --- a/docs/source/outputting/logstash_standalone.conf +++ b/docs/source/outputting/logstash_standalone.conf @@ -22,29 +22,10 @@ output { } elasticsearch { - # Documents in ElasticSearch are identified by tuples of (index, mapping - # type, document_id). - # References: - # - http://logstash.net/docs/1.3.2/outputs/elasticsearch - # - http://stackoverflow.com/questions/15025876/what-is-an-index-in-elasticsearch - # We make the document id unique (for a specific index/mapping type pair) by # using the relevant Eliot fields. This means replaying messages will not # result in duplicates, as long as the replayed messages end up in the same - # index (see below). + # index. document_id => "%{task_uuid}_%{task_level}" - - # By default logstash sets the index to include the current date. When we - # get to point of replaying log files on startup for crash recovery we might - # want to use the last modified date of the file instead of current date, - # otherwise we'll get documents ending up in wrong index. - - #index => "logstash-%{+YYYY.MM.dd}" - - index_type => "Eliot" - - # In a centralized ElasticSearch setup we'd be specifying host/port - # or some such. In this setup we run it ourselves: - embedded => true } } From e48efc8094452733fcfddaabb7a95c2404d814e0 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Sun, 15 Jul 2018 11:51:37 -0400 Subject: [PATCH 2/3] Note this is hand-wavy. --- docs/source/outputting/elasticsearch.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/outputting/elasticsearch.rst b/docs/source/outputting/elasticsearch.rst index 7be957d..7293756 100644 --- a/docs/source/outputting/elasticsearch.rst +++ b/docs/source/outputting/elasticsearch.rst @@ -1,6 +1,8 @@ Using Logstash and ElasticSearch to Process Eliot Logs ====================================================== +.. note:: Logstash, Elasticsearch and Kibana change frequently. These instructions might not be quite accurate. + `ElasticSearch`_ is a search and analytics engine which can be used to store Eliot logging output. The logs can then be browsed by humans using the `Kibana`_ web UI, or on the command-line using the `logstash-cli`_ tool. Automated systems can access the logs using the ElasticSearch query API. From 1ab92eb402a61b47770e24e690becad7ac81d86b Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Sun, 15 Jul 2018 11:52:21 -0400 Subject: [PATCH 3/3] Changelog entry. --- docs/source/news.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/news.rst b/docs/source/news.rst index e6dcb9f..7f30c04 100644 --- a/docs/source/news.rst +++ b/docs/source/news.rst @@ -7,7 +7,7 @@ What's New Documentation: * Documented how to add log levels, and how to filter Eliot logs. - +* Logstash configuration is closer to modern version's options, though still untested. 1.3.0 ^^^^^