/
elasticsearch_index.py
80 lines (61 loc) · 1.92 KB
/
elasticsearch_index.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
#!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import argparse
import datetime
import json
import sys
import pytablewriter as ptw
from elasticsearch import Elasticsearch
def parse_option():
parser = argparse.ArgumentParser()
parser.add_argument(
"--host", default="localhost",
help="default=%(default)s")
parser.add_argument(
"--port", type=int, default=9200,
help="default=%(default)s")
return parser.parse_args()
def main():
options = parse_option()
es = Elasticsearch(hosts="{:s}:{:d}".format(options.host, options.port))
writer = ptw.ElasticsearchWriter()
writer.stream = es
writer.index_name = "es writer example"
writer.headers = [
"str", "byte", "short", "int", "long", "float", "date", "bool", "ip",
]
writer.value_matrix = [
[
"abc", 100, 10000, 2000000000, 200000000000, 0.1,
datetime.datetime(2017, 1, 2, 3, 4, 5), True, "127.0.0.1",
], [
"def", -10, -1000, -200000000, -20000000000, 100.1,
datetime.datetime(2017, 6, 5, 4, 5, 2), False, "::1",
],
]
# delete existing index ---
es.indices.delete(index=writer.index_name, ignore=404)
# create an index and put data ---
writer.write_table()
# display the result ---
es.indices.refresh(index=writer.index_name)
print("----- mappings -----")
response = es.indices.get_mapping(
index=writer.index_name, doc_type="table")
print("{}\n".format(json.dumps(response, indent=4)))
print("----- documents -----")
response = es.search(
index=writer.index_name,
doc_type="table",
body={
"query": {"match_all": {}}
}
)
for hit in response["hits"]["hits"]:
print(json.dumps(hit["_source"], indent=4))
return 0
if __name__ == "__main__":
sys.exit(main())