Skip to content

Commit

Permalink
Version 1.0.2
Browse files Browse the repository at this point in the history
  • Loading branch information
guilhemmarchand committed Dec 1, 2018
1 parent 1357833 commit bed28d8
Show file tree
Hide file tree
Showing 6 changed files with 91 additions and 41 deletions.
2 changes: 1 addition & 1 deletion TA-kafka-streaming-platform/default/app.conf
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ state = enabled

[launcher]
author = Guilhem Marchand
version = 1.0.1
version = 1.0.2
description = Technology addon for Kafka streaming platform logging

[ui]
Expand Down
80 changes: 44 additions & 36 deletions TA-kafka-streaming-platform/default/props.conf
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -41,6 +43,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -52,6 +56,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -63,6 +69,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -74,6 +82,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -85,17 +95,15 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

[zookeeper:gc-log]
SHOULD_LINEMERGE=false
NO_BINARY_CHECK=true
CHARSET=UTF-8
TIME_PREFIX=^
TIME_FORMAT=%Y-%m-%dT%H:%M:%S.%l%z
LINE_BREAKER=([\n\r]+)\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}
TRUNCATE=0
BREAK_ONLY_BEFORE = \d\d?:\d\d:\d\d
maxDist = 75

REPORT-gc-extractions = gc_action_type, gc_timetaken_sec

Expand All @@ -114,6 +122,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions, kafka_broker_id_match1, kafka_broker_id_match2

Expand All @@ -125,6 +135,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -136,6 +148,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -147,6 +161,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -158,6 +174,8 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

Expand All @@ -169,17 +187,15 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions, kafka_broker_authorizer_action

[kafka:broker:gc-log]
SHOULD_LINEMERGE=false
NO_BINARY_CHECK=true
CHARSET=UTF-8
TIME_PREFIX=^
TIME_FORMAT=%Y-%m-%dT%H:%M:%S.%l%z
LINE_BREAKER=([\n\r]+)\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}
TRUNCATE=0
BREAK_ONLY_BEFORE = \d\d?:\d\d:\d\d
maxDist = 75

REPORT-gc-extractions = gc_action_type, gc_timetaken_sec

Expand All @@ -198,18 +214,16 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_connect_worker_discovering, kafka_connect_worker_task_activity, kafka_connect_worker_consumer_activity, kafka_connect_source_connector, kafka_connect_sink_connector, kafka_connect_java_class, kafka_connect_work_commit_success_ms
EVAL-connector_type = case(isnotnull(connector_source), "source_connector", isnotnull(connector_sink), "sink_connector")

[kafka:connect:gc-log]
SHOULD_LINEMERGE=false
NO_BINARY_CHECK=true
CHARSET=UTF-8
TIME_PREFIX=^
TIME_FORMAT=%Y-%m-%dT%H:%M:%S.%l%z
LINE_BREAKER=([\n\r]+)\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}
TRUNCATE=0
BREAK_ONLY_BEFORE = \d\d?:\d\d:\d\d
maxDist = 75

REPORT-gc-extractions = gc_action_type, gc_timetaken_sec

Expand All @@ -234,17 +248,15 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

[kafka:schema-registry:gc-log]
SHOULD_LINEMERGE=false
NO_BINARY_CHECK=true
CHARSET=UTF-8
TIME_PREFIX=^
TIME_FORMAT=%Y-%m-%dT%H:%M:%S.%l%z
LINE_BREAKER=([\n\r]+)\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}
TRUNCATE=0
BREAK_ONLY_BEFORE = \d\d?:\d\d:\d\d
maxDist = 75

REPORT-gc-extractions = gc_action_type, gc_timetaken_sec

Expand All @@ -269,17 +281,15 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

[kafka:kafka-rest:gc-log]
SHOULD_LINEMERGE=false
NO_BINARY_CHECK=true
CHARSET=UTF-8
TIME_PREFIX=^
TIME_FORMAT=%Y-%m-%dT%H:%M:%S.%l%z
LINE_BREAKER=([\n\r]+)\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}
TRUNCATE=0
BREAK_ONLY_BEFORE = \d\d?:\d\d:\d\d
maxDist = 75

REPORT-gc-extractions = gc_action_type, gc_timetaken_sec

Expand All @@ -304,16 +314,14 @@ TIME_PREFIX=\[
TIME_FORMAT=%Y-%m-%d %H:%M:%S,%3N
LINE_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]
TRUNCATE=0
EVENT_BREAKER_ENABLE=true
EVENT_BREAKER=([\n\r]+)\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\,\d{3}\]

REPORT-kafka_extractions = kafka_log_level, kafka_extractions

[kafka:ksql-server:gc-log]
SHOULD_LINEMERGE=false
NO_BINARY_CHECK=true
CHARSET=UTF-8
TIME_PREFIX=^
TIME_FORMAT=%Y-%m-%dT%H:%M:%S.%l%z
LINE_BREAKER=([\n\r]+)\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}
TRUNCATE=0
BREAK_ONLY_BEFORE = \d\d?:\d\d:\d\d
maxDist = 75

REPORT-gc-extractions = gc_action_type, gc_timetaken_sec
Binary file removed TA-kafka-streaming-platform_101.tgz
Binary file not shown.
Binary file added TA-kafka-streaming-platform_102.tgz
Binary file not shown.
42 changes: 39 additions & 3 deletions docs/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -134,12 +134,18 @@ By default, Confluent may use the same logging location for both Zookeeper and K
sudo mkdir /var/log/zookeeper
sudo chown cp-kafka:confluent /var/log/zookeeper

- Restart Zookeeper and verify that logs are properly generated in the directory.
- Restart Zookeeper and verify that logs are properly generated in the directory:

::

sudo systemctl status confluent-zookeeper

Kafka Connect
-------------

By default, Confluent may use the same logging location for both Kafka brokers and Kafka Connect, suggested configuration to avoid this:
**Unlike other components, Kafka Connect does not log to a file by default, it only logs to the console.**

To change this behaviour, you need to edit the log4j configuration:

**Configuring the systemd for Connect:**

Expand Down Expand Up @@ -171,7 +177,37 @@ By default, Confluent may use the same logging location for both Kafka brokers a
::

sudo mkdir /var/log/connect
sudo chown cp-kafka:confluent /var/log/connect
sudo chown cp-kafka-connect:confluent /var/log/connect

**Configuring log4j:**

- Edit: */etc/kafka/connect-log4j.properties*

- Add a file appender:

::

log4j.rootLogger=INFO, stdout, FILE

log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
log4j.appender.FILE.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.FILE.File=${kafka.logs.dir}/connect.log
log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
log4j.appender.FILE.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n

log4j.logger.org.apache.zookeeper=ERROR
log4j.logger.org.I0Itec.zkclient=ERROR
log4j.logger.org.reflections=ERROR

- Restart Connect and verify that the log file is being created:

::

sudo systemctl status confluent-kafka-connect

Other components
----------------
Expand Down
8 changes: 7 additions & 1 deletion docs/releasenotes.rst
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
Release notes
#############

Version 1.0.2
=============
- feature: EVENT_BREAKER props improvements
- fix: Garbage Collector indexing time parsing issues
- fix: Instructions corrections for Kafka Connect

Version 1.0.1
=============

- Updated default logging location for Confluent
- fix: Updated default logging location for Confluent

Version 1.0.0
=============
Expand Down

0 comments on commit bed28d8

Please sign in to comment.