forked from Altinity/clickhouse-mysql-data-reader
-
Notifications
You must be signed in to change notification settings - Fork 0
/
chwriter.py
125 lines (100 loc) · 4.01 KB
/
chwriter.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import sys
from decimal import Decimal
from clickhouse_mysql.dbclient.chclient import CHClient
from clickhouse_mysql.writer.writer import Writer
from clickhouse_mysql.tableprocessor import TableProcessor
from clickhouse_mysql.event.event import Event
class CHWriter(Writer):
"""ClickHouse writer"""
client = None
dst_schema = None
dst_table = None
dst_distribute = None
def __init__(
self,
connection_settings,
dst_schema=None,
dst_table=None,
dst_table_prefix=None,
dst_distribute=False,
next_writer_builder=None,
converter_builder=None,
):
if dst_distribute and dst_schema is not None:
dst_schema += "_all"
if dst_distribute and dst_table is not None:
dst_table += "_all"
logging.info("CHWriter() connection_settings={} dst_schema={} dst_table={} dst_distribute={}".format(
connection_settings, dst_schema, dst_table, dst_distribute))
self.client = CHClient(connection_settings)
self.dst_schema = dst_schema
self.dst_table = dst_table
self.dst_table_prefix = dst_table_prefix
self.dst_distribute = dst_distribute
def insert(self, event_or_events=None):
# event_or_events = [
# event: {
# row: {'id': 3, 'a': 3}
# },
# event: {
# row: {'id': 3, 'a': 3}
# },
# ]
events = self.listify(event_or_events)
if len(events) < 1:
logging.warning('No events to insert. class: %s', __class__)
return
# assume we have at least one Event
logging.debug('class:%s insert %d event(s)', __class__, len(events))
# verify and converts events and consolidate converted rows from all events into one batch
rows = []
event_converted = None
for event in events:
if not event.verify:
logging.warning('Event verification failed. Skip one event. Event: %s Class: %s', event.meta(), __class__)
continue # for event
event_converted = self.convert(event)
for row in event_converted:
for key in row.keys():
# we need to convert Decimal value to str value for suitable for table structure
if type(row[key]) == Decimal:
row[key] = str(row[key])
rows.append(row)
logging.debug('class:%s insert %d row(s)', __class__, len(rows))
# determine target schema.table
schema = self.dst_schema if self.dst_schema else event_converted.schema
table = None
if self.dst_distribute:
table = TableProcessor.create_distributed_table_name(db=event_converted.schema, table=event_converted.table)
else:
table = self.dst_table if self.dst_table else event_converted.table
if self.dst_schema:
table = TableProcessor.create_migrated_table_name(prefix=self.dst_table_prefix, table=table)
logging.debug("schema={} table={} self.dst_schema={} self.dst_table={}".format(schema, table, self.dst_schema, self.dst_table))
# and INSERT converted rows
sql = ''
try:
sql = 'INSERT INTO `{0}`.`{1}` ({2}) VALUES'.format(
schema,
table,
', '.join(map(lambda column: '`%s`' % column, rows[0].keys()))
)
self.client.execute(sql, rows)
except Exception as ex:
logging.critical('QUERY FAILED')
logging.critical('ex={}'.format(ex))
logging.critical('sql={}'.format(sql))
sys.exit(0)
# all DONE
if __name__ == '__main__':
connection_settings = {
'host': '192.168.74.230',
'port': 9000,
'user': 'default',
'passwd': '',
}
writer = CHWriter(connection_settings=connection_settings)
writer.insert()