forked from a-pertsev/xsl-graph
-
Notifications
You must be signed in to change notification settings - Fork 0
/
server.py
executable file
·141 lines (94 loc) · 3.73 KB
/
server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os.path
import simplejson
import logging
import logging.handlers
from functools import partial
from itertools import chain, imap, groupby
from operator import itemgetter
import tornado.ioloop
import tornado.autoreload
import tornado.web
import config
import cache
import pyxsl.analyze as analyze
from pyxsl.parse import get_data_and_index, get_data
from pyxsl.draw import draw_outside, draw_inside, render_graph, create_graph
from pyxsl.pick import pickle_data_and_index, get_data_index_from_pickle
data_cache = cache.DataCacher()
try:
data_cache.invalidate(*get_data_index_from_pickle())
except IOError:
data, index = get_data_and_index()
pickle_data_and_index(data, index)
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render('templates/main.html')
class SVGImportsHandler(tornado.web.RequestHandler):
def get(self):
file = str(os.path.join(config.ROOT_XSL_DIR, self.get_argument('file', 'ambient/blocks/page.xsl')))
graph = create_graph()
graph = draw_inside(
graph=graph,
data=data_cache.data,
search_files=[file],
)
graph = draw_outside(
graph=graph,
index=data_cache.index,
search_files=[file],
)
result = render_graph(graph)
self.set_header('Content-Type', 'image/svg+xml')
self.finish(result)
def sort_func(text, item):
variants = item.split('.')
variants.append(item)
return max(imap(lambda x: x.find(text), variants))
class SuggestHandler(tornado.web.RequestHandler):
def get(self):
name = self.get_argument('name')
files = data_cache.file_names
sort_func_partial = partial(sort_func, name)
suggests = list(chain.from_iterable(
[list(g) for k,g in
groupby(sorted(files, key=sort_func_partial), sort_func_partial)][1:]))
self.set_header('Content-Type', 'application/json')
self.finish(simplejson.dumps(suggests[:15]))
class InvalidateHandler(tornado.web.RequestHandler):
def get(self):
data_cache.invalidate(*get_data_and_index())
pickle_data_and_index(data_cache.data, data_cache.index)
def get_errors_from_log(handler):
records = filter(lambda record: record.levelname == 'ERROR', handler.buffer)
grouped = groupby(sorted(map(lambda record: record.msg, records), key=itemgetter(0)), key=itemgetter(0))
return dict((key, map(AnalyzeHandler.clean_records, group)) for key, group in grouped)
class AnalyzeHandler(tornado.web.RequestHandler):
@staticmethod
def clean_records(records):
return map(lambda xsl_name: xsl_name.replace(config.ROOT_XSL_DIR, '').lstrip('/'), records[1:])
def get(self):
logger = logging.getLogger(name='parsingLogger')
handler = logging.handlers.MemoryHandler('hndlr')
logger.addHandler(handler)
data = get_data()
result = {
'Not used XSLS': analyze.get_not_used_xsls(data_cache.data, data_cache.index),
'Duplicated imports': analyze.get_duplicated_imports(data)
}
result.update(get_errors_from_log(handler))
self.finish(simplejson.dumps(result))
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
(r"/svg", SVGImportsHandler),
(r"/file_suggest", SuggestHandler),
(r"/cache_invalidate", InvalidateHandler),
(r"/analyze", AnalyzeHandler),
(r"/static/(.*)", tornado.web.StaticFileHandler, {"path": "static"}),
])
application.listen(8888)
io_loop = tornado.ioloop.IOLoop.instance()
tornado.autoreload.start(io_loop, 1000)
io_loop.start()