This repository has been archived by the owner on Mar 24, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 232
/
kafka_tools.py
466 lines (389 loc) · 16.5 KB
/
kafka_tools.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
from __future__ import print_function
import argparse
import calendar
import datetime as dt
import sys
import time
from pkg_resources import parse_version
import tabulate
import pykafka
from pykafka.common import OffsetType
from pykafka.protocol import PartitionOffsetCommitRequest, CreateTopicRequest
from pykafka.utils.compat import PY3, iteritems
#
# Helper Functions
#
def fetch_offsets(client, topic, offset):
"""Fetch raw offset data from a topic.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`pykafka.topic.Topic`
:param offset: Offset to fetch. Can be earliest, latest or a datetime.
:type offset: :class:`pykafka.common.OffsetType` or
:class:`datetime.datetime`
:returns: {partition_id: :class:`pykafka.protocol.OffsetPartitionResponse`}
"""
if offset.lower() == 'earliest':
return topic.earliest_available_offsets()
elif offset.lower() == 'latest':
return topic.latest_available_offsets()
else:
offset = dt.datetime.strptime(offset, "%Y-%m-%dT%H:%M:%S")
offset = int(calendar.timegm(offset.utctimetuple()) * 1000)
return topic.fetch_offset_limits(offset)
def fetch_consumer_lag(client, topic, consumer_group):
"""Get raw lag data for a topic/consumer group.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`pykafka.topic.Topic`
:param consumer_group: Name of the consumer group to fetch lag for.
:type consumer_groups: :class:`str`
:returns: dict of {partition_id: (latest_offset, consumer_offset)}
"""
latest_offsets = fetch_offsets(client, topic, 'latest')
consumer = topic.get_simple_consumer(consumer_group=consumer_group,
auto_start=False)
current_offsets = consumer.fetch_offsets()
return {p_id: (latest_offsets[p_id].offset[0], res.offset)
for p_id, res in current_offsets}
#
# Commands
#
def consume_topic(client, args):
"""Dump messages from a topic to a file or stdout.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`str`
"""
# Don't auto-create topics.
if args.topic not in client.topics:
raise ValueError('Topic {} does not exist.'.format(args.topic))
topic = client.topics[args.topic]
consumer = topic.get_simple_consumer("pykafka_cli",
consumer_timeout_ms=100,
auto_offset_reset=OffsetType.LATEST,
reset_offset_on_start=True,
auto_commit_enable=True,
auto_commit_interval_ms=3000)
num_consumed = 0
while num_consumed < args.limit:
msg = consumer.consume()
if not msg or not msg.value:
continue
# Coming from Kafka, this is utf-8.
msg = msg.value.decode('utf-8', errors='replace') + u'\n'
args.outfile.write(msg if PY3 else msg.encode('utf-8'))
num_consumed += 1
args.outfile.close()
def desc_topic(client, args):
"""Print detailed information about a topic.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`str`
"""
# Don't auto-create topics.
if args.topic not in client.topics:
raise ValueError('Topic {} does not exist.'.format(args.topic))
topic = client.topics[args.topic]
print('Topic: {}'.format(topic.name))
print('Partitions: {}'.format(len(topic.partitions)))
print('Replicas: {}'.format(len(list(topic.partitions.values())[0].replicas)))
print(tabulate.tabulate(
[(p.id, p.leader.id, [r.id for r in p.replicas], [r.id for r in p.isr])
for p in topic.partitions.values()],
headers=['Partition', 'Leader', 'Replicas', 'ISR'],
numalign='center',
))
def print_managed_consumer_groups(client, args):
"""Get Kafka-managed consumer groups for a topic.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`str`
"""
if args.topic not in client.topics:
raise ValueError('Topic {} does not exist.'.format(args.topic))
consumer_groups = {}
brokers = client.brokers
for broker_id, broker in iteritems(brokers):
groups = broker.list_groups().groups.keys()
groups_metadata = broker.describe_groups(group_ids=groups).groups
for group_id, describe_group_response in iteritems(groups_metadata):
members = describe_group_response.members
for member_id, member in iteritems(members):
topics = member.member_metadata.topic_names
if args.topic in topics:
consumer_groups[group_id] = describe_group_response
print('Topic: {}'.format(args.topic))
print(tabulate.tabulate(
[(group_id, x.state, x.protocol, x.protocol_type)
for group_id, x in iteritems(consumer_groups)],
headers=['GroupId', 'State', 'Protocol', 'ProtocolType']
))
def print_consumer_lag(client, args):
"""Print lag for a topic/consumer group.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`str`
:param consumer_group: Name of the consumer group to fetch offsets for.
:type consumer_groups: :class:`str`
"""
# Don't auto-create topics.
if args.topic not in client.topics:
raise ValueError('Topic {} does not exist.'.format(args.topic))
topic = client.topics[args.topic]
lag_info = fetch_consumer_lag(client, topic, args.consumer_group)
lag_info = [(k, '{:,}'.format(v[0] - v[1]), v[0], v[1])
for k, v in iteritems(lag_info)]
print(tabulate.tabulate(
lag_info,
headers=['Partition', 'Lag', 'Latest Offset', 'Current Offset'],
numalign='center',
))
total = sum(int(i[1].replace(',', '')) for i in lag_info)
print('\n Total lag: {:,} messages.'.format(total))
def print_offsets(client, args):
"""Print offsets for a topic
NOTE: Time-based offset lookups are not precise, but are based on segment
boundaries. If there is only one segment, as when Kafka has just
started, the only offsets found will be [0, <latest_offset>].
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`str`
:param offset: Offset to reset to. Can be earliest, latest or a datetime.
Using a datetime will reset the offset to the latest message published
*before* the datetime.
:type offset: :class:`pykafka.common.OffsetType` or
:class:`datetime.datetime`
"""
# Don't auto-create topics.
if args.topic not in client.topics:
raise ValueError('Topic {} does not exist.'.format(args.topic))
topic = client.topics[args.topic]
offsets = fetch_offsets(client, topic, args.offset)
print(tabulate.tabulate(
[(k, v.offset[0]) for k, v in iteritems(offsets)],
headers=['Partition', 'Offset'],
numalign='center',
))
def print_topics(client, args):
"""Print all topics in the cluster.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
"""
print(tabulate.tabulate(
[(t.name,
len(t.partitions),
len(list(t.partitions.values())[0].replicas) - 1)
for t in client.topics.values()],
headers=['Topic', 'Partitions', 'Replication'],
numalign='center',
))
def reset_offsets(client, args):
"""Reset offset for a topic/consumer group.
NOTE: Time-based offset lookups are not precise, but are based on segment
boundaries. If there is only one segment, as when Kafka has just
started, the only offsets found will be [0, <latest_offset>].
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`str`
:param consumer_group: Name of the consumer group to reset offsets for.
:type consumer_groups: :class:`str`
:param offset: Offset to reset to. Can be earliest, latest or a datetime.
Using a datetime will reset the offset to the latest message published
*before* the datetime.
:type offset: :class:`pykafka.common.OffsetType` or
:class:`datetime.datetime`
"""
# Don't auto-create topics.
if args.topic not in client.topics:
raise ValueError('Topic {} does not exist.'.format(args.topic))
topic = client.topics[args.topic]
# Build offset commit requests.
offsets = fetch_offsets(client, topic, args.offset)
tmsp = int(time.time() * 1000)
reqs = [PartitionOffsetCommitRequest(topic.name,
partition_id,
res.offset[0],
tmsp,
b'kafka-tools')
for partition_id, res in iteritems(offsets)]
# Send them to the appropriate broker.
broker = client.cluster.get_group_coordinator(args.consumer_group)
broker.commit_consumer_group_offsets(
args.consumer_group, 1, b'kafka-tools', reqs
)
def create_topic(client, args):
if parse_version(args.broker_version) < parse_version('0.10.0'):
raise ValueError("The topic creation API is not usable on brokers older than "
"0.10.0. Use --broker_version to specify the version")
topic_req = CreateTopicRequest(args.topic, args.num_partitions,
args.replication_factor,
args.replica_assignment or [],
args.config_entries or [])
client.cluster.controller_broker.create_topics([topic_req], args.timeout)
def delete_topic(client, args):
if parse_version(args.broker_version) < parse_version('0.10.0'):
raise ValueError("The topic deletoin API is not usable on brokers older than "
"0.10.0. Use --broker_version to specify the version")
client.cluster.controller_broker.delete_topics([args.topic], args.timeout)
def _encode_utf8(string):
"""Converts argument to UTF-8-encoded bytes.
Used to make dict lookups work in Python 3 because topics and things are
bytes.
"""
return string.encode('utf-8', errors='replace')
def _add_consumer_group(parser):
"""Add consumer_group to arg parser."""
parser.add_argument('consumer_group',
metavar='CONSUMER_GROUP',
help='Consumer group name.',
type=_encode_utf8)
def _add_limit(parser):
"""Add limit option to arg parser."""
parser.add_argument('-l', '--limit',
help='Number of messages to consume '
'(default: %(default)s)',
type=int, default=10)
def _add_timeout(parser):
parser.add_argument('-t', '--timeout',
help='Time in ms to wait for the operation to complete'
'(default: %(default)s)',
type=int, default=5000)
def _add_offset(parser):
"""Add offset to arg parser."""
parser.add_argument('offset',
metavar='OFFSET',
type=str,
help='Offset to fetch. Can be EARLIEST, LATEST, or a '
'datetime in the format YYYY-MM-DDTHH:MM:SS.')
def _add_outfile(parser):
""" Add outfile option to arg parser."""
parser.add_argument('-o', '--outfile',
help='output file (defaults to stdout)',
type=argparse.FileType('w'), default=sys.stdout)
def _add_topic(parser):
"""Add topic to arg parser."""
parser.add_argument('topic',
metavar='TOPIC',
help='Topic name.',
type=_encode_utf8)
def _get_arg_parser():
output = argparse.ArgumentParser(description='Tools for Kafka.')
# Common arguments
output.add_argument('-b', '--broker',
required=False,
default='localhost:9092',
dest='host',
help='host:port of any Kafka broker. '
'[default: localhost:9092]')
output.add_argument('-o', '--broker_version',
required=False,
default='0.9.0',
dest='broker_version',
help="The version string of the broker with which to communicate")
subparsers = output.add_subparsers(help='Commands', dest='command')
# Consume to File
parser = subparsers.add_parser(
'consume_topic',
help='Dump messages for a topic to a file or stdout.')
parser.set_defaults(func=consume_topic)
_add_topic(parser)
_add_limit(parser)
_add_outfile(parser)
# Desc Topic
parser = subparsers.add_parser(
'desc_topic',
help='Print detailed info for a topic.'
)
parser.set_defaults(func=desc_topic)
_add_topic(parser)
# Get consumer groups for a topic
parser = subparsers.add_parser(
'print_managed_consumer_groups',
help='Get consumer groups for a topic'
)
parser.set_defaults(func=print_managed_consumer_groups)
_add_topic(parser)
# Print Consumer Lag
parser = subparsers.add_parser(
'print_consumer_lag',
help='Get consumer lag for a topic.'
)
parser.set_defaults(func=print_consumer_lag)
_add_topic(parser)
_add_consumer_group(parser)
# Print Broker Offsets
parser = subparsers.add_parser(
'print_offsets',
help='Fetch broker offsets for a topic'
)
parser.set_defaults(func=print_offsets)
_add_topic(parser)
_add_offset(parser)
# Print Topics
parser = subparsers.add_parser(
'print_topics',
help='Print information about all topics in the cluster.'
)
parser.set_defaults(func=print_topics)
# Reset Offsets
parser = subparsers.add_parser(
'reset_offsets',
help='Reset offsets for a topic/consumer group'
)
parser.set_defaults(func=reset_offsets)
_add_topic(parser)
_add_consumer_group(parser)
_add_offset(parser)
parser = subparsers.add_parser(
'create_topic',
help='Create a topic'
)
parser.set_defaults(func=create_topic)
_add_topic(parser)
_add_timeout(parser)
parser.add_argument('-p', '--num_partitions',
help='Number of partitions to be created. -1 indicates unset. '
'(default: %(default)s)',
type=int, default=1)
parser.add_argument('-r', '--replication_factor',
help='Replication factor for the topic. -1 indicates unset. '
'(default: %(default)s)',
type=int, default=1)
parser.add_argument('-a', '--replica_assignment',
help='Replica assignment among kafka brokers for this topic '
'partitions. If this is set num_partitions and '
'replication_factor must be unset. Represent as a JSON '
'object with partition IDs as keys as lists of node IDs '
'as values',
type=_encode_utf8)
parser.add_argument('-c', '--config_entries',
help='Topic level configuration for topic to be set. Represent '
'as a JSON object.',
type=_encode_utf8)
parser = subparsers.add_parser(
'delete_topic',
help='Delete a topic'
)
parser.set_defaults(func=delete_topic)
_add_topic(parser)
_add_timeout(parser)
return output
def main():
parser = _get_arg_parser()
args = parser.parse_args()
if args.command:
client = pykafka.KafkaClient(hosts=args.host, broker_version=args.broker_version)
args.func(client, args)
else:
parser.print_help()
if __name__ == '__main__':
main()