@@ -130,7 +130,7 @@ def get_tokens(code: str) -> Tuple[list, int, list]:
130
130
tokens .append (token .string )
131
131
else :
132
132
stopwords_count += 1
133
- except tokenize . TokenError :
133
+ except :
134
134
is_tokenizable = False
135
135
return None , None , comments , docstring , stopwords_count , is_tokenizable
136
136
return code , tokens , comments , docstring , stopwords_count , is_tokenizable
@@ -178,7 +178,7 @@ def collect_data(filename: str,
178
178
179
179
# Convert Python 2 to Python 3
180
180
# os.system(f"~/anaconda3/envs/scs/bin/2to3 {filename} -w -n")
181
- run (["/home/masaidov/.conda/envs/scs/bin /2to3" , filename , "-w" , "-n" ],
181
+ run (["/home/marat/.pyenv/shims /2to3" , filename , "-w" , "-n" ],
182
182
stdout = DEVNULL , stderr = STDOUT )
183
183
print ("Building AST tree from a filename:" , filename )
184
184
@@ -330,12 +330,8 @@ def convert_tokens_to_ast(functions):
330
330
try :
331
331
ast_fun_tokens = json .loads (parse_python3 .parse_file (function , "code" ))
332
332
ast_fun_sequential = get_dfs (convert (ast_fun_tokens ))
333
- except SyntaxError :
334
- print ("Met syntax problem." )
335
- error_counter += 1
336
- continue
337
- except TypeError :
338
- print ("JSON is not serializable." )
333
+ except :
334
+ print ("Met syntax or type error." )
339
335
error_counter += 1
340
336
continue
341
337
ast_tokens .append (ast_fun_sequential )
0 commit comments