Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Newer
Older
100644 276 lines (216 sloc) 7.805 kB
a46caa7 @gabrielfalcao Implementing Scenario and Feature parsing
authored
1 # -*- coding: utf-8 -*-
f0ef575 @gabrielfalcao Behaviour Driven Development :)
authored
2 # <Lettuce - Behaviour Driven Development for python>
c0ee8cc @gabrielfalcao updating copyright dates
authored
3 # Copyright (C) <2010-2012> Gabriel Falcão <gabriel@nacaolivre.org>
a46caa7 @gabrielfalcao Implementing Scenario and Feature parsing
authored
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License
16 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
17
4ce03d9 refactoring the line splitting
Gabriel Falcão authored
18 import re
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
19 import time
4a7df75 @synclover Added support for multibyte characters
synclover authored
20 import unicodedata
a46caa7 @gabrielfalcao Implementing Scenario and Feature parsing
authored
21
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
22
c08216f @gabrielfalcao handling ReasonToFail members as utf8 strings. closes #302
authored
23 def utf8_string(s):
ac60d5d @gabrielfalcao Merge branch 'master' of git://github.com/pshomov/lettuce into pshomo…
authored
24 if isinstance(s, str):
25 s = s.decode("utf-8")
c08216f @gabrielfalcao handling ReasonToFail members as utf8 strings. closes #302
authored
26
ac60d5d @gabrielfalcao Merge branch 'master' of git://github.com/pshomov/lettuce into pshomo…
authored
27 return s
28
29
1782bb7 refactoring strings module
Gabriel Falcão authored
30 def escape_if_necessary(what):
a0e8efd @gabrielfalcao pt-br support almost done
authored
31 what = unicode(what)
1782bb7 refactoring strings module
Gabriel Falcão authored
32 if len(what) is 1:
581d702 @gabrielfalcao unicode refactoring closes #42
authored
33 what = u"[%s]" % what
1782bb7 refactoring strings module
Gabriel Falcão authored
34
35 return what
36
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
37
d80542c @gabrielfalcao supporting comments
authored
38 def get_stripped_lines(string, ignore_lines_starting_with=''):
3bc3004 @davidsulc add comments (and fix typo)
davidsulc authored
39 """Split lines at newline char, then return the array of stripped lines"""
40 # used e.g. to separate out all the steps in a scenario
a0e8efd @gabrielfalcao pt-br support almost done
authored
41 string = unicode(string)
581d702 @gabrielfalcao unicode refactoring closes #42
authored
42 lines = [unicode(l.strip()) for l in string.splitlines()]
d80542c @gabrielfalcao supporting comments
authored
43 if ignore_lines_starting_with:
1c578c6 @gabrielfalcao reverting incomplete/non-optimal tag support. closes #166
authored
44 filter_func = lambda x: x and not x.startswith(
45 ignore_lines_starting_with)
d80542c @gabrielfalcao supporting comments
authored
46 else:
3bc3004 @davidsulc add comments (and fix typo)
davidsulc authored
47 # by using an "identity" filter function, blank lines
48 # will not be included in the returned list
d80542c @gabrielfalcao supporting comments
authored
49 filter_func = lambda x: x
50
51 lines = filter(filter_func, lines)
52
53 return lines
4ce03d9 refactoring the line splitting
Gabriel Falcão authored
54
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
55
4ce03d9 refactoring the line splitting
Gabriel Falcão authored
56 def split_wisely(string, sep, strip=False):
a0e8efd @gabrielfalcao pt-br support almost done
authored
57 string = unicode(string)
a6c27e2 Blank step hash become empty strings
David authored
58 if strip:
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
59 string = string.strip()
a6c27e2 Blank step hash become empty strings
David authored
60 else:
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
61 string = string.strip("\n")
a0e8efd @gabrielfalcao pt-br support almost done
authored
62 sep = unicode(sep)
63
64 regex = re.compile(escape_if_necessary(sep), re.UNICODE | re.M | re.I)
4ce03d9 refactoring the line splitting
Gabriel Falcão authored
65
66 items = filter(lambda x: x, regex.split(string))
67 if strip:
68 items = [i.strip() for i in items]
b5298a3 refactoring and creating unit tests for lettuce.strings module
Gabriel Falcão authored
69 else:
70 items = [i.strip("\n") for i in items]
4ce03d9 refactoring the line splitting
Gabriel Falcão authored
71
a6c27e2 Blank step hash become empty strings
David authored
72 return [unicode(i) for i in items]
4ce03d9 refactoring the line splitting
Gabriel Falcão authored
73
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
74
1782bb7 refactoring strings module
Gabriel Falcão authored
75 def wise_startswith(string, seed):
ca320a4 @gabrielfalcao changing the behaviour of lettuce.strings.wise_startswith to always s…
authored
76 string = unicode(string).strip()
a0e8efd @gabrielfalcao pt-br support almost done
authored
77 seed = unicode(seed)
78 regex = u"^%s" % re.escape(seed)
1782bb7 refactoring strings module
Gabriel Falcão authored
79 return bool(re.search(regex, string, re.I))
83fa5ca starting to parse scenario outlines
Gabriel Falcão authored
80
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
81
83fa5ca starting to parse scenario outlines
Gabriel Falcão authored
82 def remove_it(string, what):
581d702 @gabrielfalcao unicode refactoring closes #42
authored
83 return unicode(re.sub(unicode(what), "", unicode(string)).strip())
8d1afaf @gabrielfalcao huge bugfix
authored
84
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
85
4a7df75 @synclover Added support for multibyte characters
synclover authored
86 def column_width(string):
87 l = 0
8e9f744 modified string to be unicode in column_width to distinguish between
janmelli authored
88 for c in unicode(string):
4a7df75 @synclover Added support for multibyte characters
synclover authored
89 if unicodedata.east_asian_width(c) in "WF":
90 l += 2
91 else:
92 l += 1
93 return l
94
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
95
a0e8efd @gabrielfalcao pt-br support almost done
authored
96 def rfill(string, times, char=u" ", append=u""):
97 string = unicode(string)
4a7df75 @synclover Added support for multibyte characters
synclover authored
98 missing = times - column_width(string)
8d1afaf @gabrielfalcao huge bugfix
authored
99 for x in range(missing):
100 string += char
101
581d702 @gabrielfalcao unicode refactoring closes #42
authored
102 return unicode(string) + unicode(append)
a547ce1 @gabrielfalcao dicts to string
authored
103
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
104
a547ce1 @gabrielfalcao dicts to string
authored
105 def getlen(string):
4a7df75 @synclover Added support for multibyte characters
synclover authored
106 return column_width(unicode(string)) + 1
a547ce1 @gabrielfalcao dicts to string
authored
107
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
108
a547ce1 @gabrielfalcao dicts to string
authored
109 def dicts_to_string(dicts, order):
3b4f137 @adaschevici step should map tables into other data structures issue #58
adaschevici authored
110 '''
111 Makes dictionary ready for comparison to strings
112 '''
ccb9500 @gabrielfalcao implementing basic output of backgrounds
authored
113 escape = "#{%s}" % unicode(time.time())
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
114
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
115 def enline(line):
116 return unicode(line).replace("|", escape)
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
117
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
118 def deline(line):
119 return line.replace(escape, '\\|')
120
a547ce1 @gabrielfalcao dicts to string
authored
121 keys_and_sizes = dict([(k, getlen(k)) for k in dicts[0].keys()])
122 for key in keys_and_sizes:
123 for data in dicts:
124 current_size = keys_and_sizes[key]
2b829fe Using data.get(key,"") instead data[key] when getting the hashed valu…
Gustavo Barbosa authored
125 value = unicode(data.get(key, ''))
a547ce1 @gabrielfalcao dicts to string
authored
126 size = getlen(value)
127 if size > current_size:
128 keys_and_sizes[key] = size
129
130 names = []
131 for key in order:
132 size = keys_and_sizes[key]
581d702 @gabrielfalcao unicode refactoring closes #42
authored
133 name = u" %s" % rfill(key, size)
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
134 names.append(enline(name))
a547ce1 @gabrielfalcao dicts to string
authored
135
581d702 @gabrielfalcao unicode refactoring closes #42
authored
136 table = [u"|%s|" % "|".join(names)]
a547ce1 @gabrielfalcao dicts to string
authored
137 for data in dicts:
138 names = []
139 for key in order:
2b829fe Using data.get(key,"") instead data[key] when getting the hashed valu…
Gustavo Barbosa authored
140 value = data.get(key, '')
a547ce1 @gabrielfalcao dicts to string
authored
141 size = keys_and_sizes[key]
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
142 names.append(enline(u" %s" % rfill(value, size)))
a547ce1 @gabrielfalcao dicts to string
authored
143
581d702 @gabrielfalcao unicode refactoring closes #42
authored
144 table.append(u"|%s|" % "|".join(names))
a547ce1 @gabrielfalcao dicts to string
authored
145
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
146 return deline(u"\n".join(table) + u"\n")
147
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
148
3b4f137 @adaschevici step should map tables into other data structures issue #58
adaschevici authored
149 def parse_hashes(lines, json_format=None):
ccb9500 @gabrielfalcao implementing basic output of backgrounds
authored
150 escape = "#{%s}" % unicode(time.time())
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
151
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
152 def enline(line):
153 return unicode(line.replace("\\|", escape)).strip()
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
154
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
155 def deline(line):
156 return line.replace(escape, '|')
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
157
bff3100 @fsouza Fixed #77, ignoring commented lines on scenario examples
fsouza authored
158 def discard_comments(lines):
159 return [line for line in lines if not line.startswith('#')]
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
160
bff3100 @fsouza Fixed #77, ignoring commented lines on scenario examples
fsouza authored
161 lines = discard_comments(lines)
25ed10f @gabrielfalcao release 0.1.11, fixing escaped pipes (closes #62)
authored
162 lines = map(enline, lines)
163
164 keys = []
165 hashes = []
166 if lines:
167 first_line = lines.pop(0)
168 keys = split_wisely(first_line, u"|", True)
169 keys = map(deline, keys)
170
171 for line in lines:
172 values = split_wisely(line, u"|", True)
173 values = map(deline, values)
174 hashes.append(dict(zip(keys, values)))
175
176 return keys, hashes
ae074e4 @pib add multiline string support
pib authored
177
3b4f137 @adaschevici step should map tables into other data structures issue #58
adaschevici authored
178 def json_to_string(json_list, order):
179 '''
180 This is for aesthetic reasons, it will get the width of the largest column and
181 rfill the rest with spaces
182 '''
183 escape = "#{%s}" % unicode(time.time())
184
185 def enline(line):
186 return unicode(line).replace("|", escape)
187
188 def deline(line):
189 return line.replace(escape, '\\|')
190
191 nu_keys_and_sizes = list([[k.keys()[0], getlen(k.keys()[0])] for k in json_list])
192 maxlen = 0
193 for key_list in nu_keys_and_sizes:
194 current_size = key_list[1]
195 counter = 0
196 temp_list = json_list[counter].values()[0]
197 temp_maxlen = len(temp_list)
198 if temp_maxlen > maxlen:
199 maxlen = temp_maxlen
200 for data in temp_list:
201 value = unicode(data)
202 size = getlen(value)
203 if size > current_size:
204 key_list[1] = size
205 counter += 1
206 names = []
207 idx = 0
208 for key in nu_keys_and_sizes:
209 size = key[1]
210 name = u" %s" % rfill(key[0], size)
211 names.append(enline(name))
212
213 table = [u"|%s|" % "|".join(names)]
214
215 for idx in xrange(maxlen):
216 names = []
217 for data, key in zip(json_list, nu_keys_and_sizes):
218 try:
219 value = data.values()[0][idx]
220 except IndexError:
221 value = ''
222 size = key[1]
223 names.append(enline(u" %s" % rfill(value, size)))
224 table.append(u"|%s|" % "|".join(names))
225
226 return deline(u"\n".join(table) + u"\n")
227
228
229 def parse_as_json(lines):
230 '''
231 Parse lines into json objects
232 '''
233 escape = "#{%s}" % unicode(time.time())
234 def enline(line):
235 return unicode(line.replace("\\|", escape)).strip()
236
237 def deline(line):
238 return line.replace(escape, '|')
239
240 def discard_comments(lines):
241 return [line for line in lines if not line.startswith('#')]
242 lines = discard_comments(lines)
243 lines = map(enline, lines)
244 non_unique_keys = []
245 json_map = []
246 if lines:
247 first_line = lines.pop(0)
248 non_unique_keys = split_wisely(first_line, u"|", True)
249 non_unique_keys = map(deline, non_unique_keys)
250 rng_idx = len(non_unique_keys)
251 json_map = list(non_unique_keys)
252 for idx in xrange(rng_idx):
253 json_map[idx] = dict([(non_unique_keys[idx], [])])
254 for line in lines:
255 values = split_wisely(line, u"|", True)
256 values = map(deline, values)
257
258 for idx in xrange(rng_idx):
259 json_map[idx].values()[0].append(values[idx])
260 return non_unique_keys, json_map
261
37d5a91 @chris-morgan Assorted bits of code cleanup; mainly better PEP8.
chris-morgan authored
262
ae074e4 @pib add multiline string support
pib authored
263 def parse_multiline(lines):
264 multilines = []
265 in_multiline = False
266 for line in lines:
267 if line == '"""':
268 in_multiline = not in_multiline
269 elif in_multiline:
270 if line.startswith('"'):
271 line = line[1:]
272 if line.endswith('"'):
273 line = line[:-1]
274 multilines.append(line)
275 return u'\n'.join(multilines)
Something went wrong with that request. Please try again.