-
Notifications
You must be signed in to change notification settings - Fork 639
/
populate.py
709 lines (640 loc) · 33.7 KB
/
populate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
import inspect
import json
import os
import re
import tempfile
from functools import reduce
from logging import getLogger
from typing import Optional, Sequence, Union, Tuple, List, Callable, Dict, Any
from pathlib2 import Path
from six.moves.urllib.parse import urlparse
from .repo import ScriptInfo
from ...task import Task
class CreateAndPopulate(object):
_VCS_SSH_REGEX = \
"^" \
"(?:(?P<user>{regular}*?)@)?" \
"(?P<host>{regular}*?)" \
":" \
"(?P<path>{regular}.*)?" \
"$" \
.format(
regular=r"[^/@:#]"
)
def __init__(
self,
project_name=None, # type: Optional[str]
task_name=None, # type: Optional[str]
task_type=None, # type: Optional[str]
repo=None, # type: Optional[str]
branch=None, # type: Optional[str]
commit=None, # type: Optional[str]
script=None, # type: Optional[str]
working_directory=None, # type: Optional[str]
packages=None, # type: Optional[Union[bool, Sequence[str]]]
requirements_file=None, # type: Optional[Union[str, Path]]
docker=None, # type: Optional[str]
docker_args=None, # type: Optional[str]
docker_bash_setup_script=None, # type: Optional[str]
output_uri=None, # type: Optional[str]
base_task_id=None, # type: Optional[str]
add_task_init_call=True, # type: bool
raise_on_missing_entries=False, # type: bool
verbose=False, # type: bool
):
# type: (...) -> None
"""
Create a new Task from an existing code base.
If the code does not already contain a call to Task.init, pass add_task_init_call=True,
and the code will be patched in remote execution (i.e. when executed by `clearml-agent`
:param project_name: Set the project name for the task. Required if base_task_id is None.
:param task_name: Set the name of the remote task. Required if base_task_id is None.
:param task_type: Optional, The task type to be created. Supported values: 'training', 'testing', 'inference',
'data_processing', 'application', 'monitor', 'controller', 'optimizer', 'service', 'qc', 'custom'
:param repo: Remote URL for the repository to use, OR path to local copy of the git repository
Example: 'https://github.com/allegroai/clearml.git' or '~/project/repo'
:param branch: Select specific repository branch/tag (implies the latest commit from the branch)
:param commit: Select specific commit id to use (default: latest commit,
or when used with local repository matching the local commit id)
:param script: Specify the entry point script for the remote execution. When used in tandem with
remote git repository the script should be a relative path inside the repository,
for example: './source/train.py' . When used with local repository path it supports a
direct path to a file inside the local repository itself, for example: '~/project/source/train.py'
:param working_directory: Working directory to launch the script from. Default: repository root folder.
Relative to repo root or local folder.
:param packages: Manually specify a list of required packages. Example: ["tqdm>=2.1", "scikit-learn"]
or `True` to automatically create requirements
based on locally installed packages (repository must be local).
:param requirements_file: Specify requirements.txt file to install when setting the session.
If not provided, the requirements.txt from the repository will be used.
:param docker: Select the docker image to be executed in by the remote session
:param docker_args: Add docker arguments, pass a single string
:param docker_bash_setup_script: Add bash script to be executed
inside the docker before setting up the Task's environment
:param output_uri: Optional, set the Tasks's output_uri (Storage destination).
examples: 's3://bucket/folder', 'https://server/' , 'gs://bucket/folder', 'azure://bucket', '/folder/'
:param base_task_id: Use a pre-existing task in the system, instead of a local repo/script.
Essentially clones an existing task and overrides arguments/requirements.
:param add_task_init_call: If True, a 'Task.init()' call is added to the script entry point in remote execution.
:param raise_on_missing_entries: If True raise ValueError on missing entries when populating
:param verbose: If True print verbose logging
"""
if repo and len(urlparse(repo).scheme) <= 1 and not re.compile(self._VCS_SSH_REGEX).match(repo):
folder = repo
repo = None
else:
folder = None
if raise_on_missing_entries and not base_task_id:
if not script:
raise ValueError("Entry point script not provided")
if not repo and not folder and not Path(script).is_file():
raise ValueError("Script file \'{}\' could not be found".format(script))
if raise_on_missing_entries and commit and branch:
raise ValueError(
"Specify either a branch/tag or specific commit id, not both (either --commit or --branch)")
if raise_on_missing_entries and not folder and working_directory and working_directory.startswith('/'):
raise ValueError("working directory \'{}\', must be relative to repository root")
if requirements_file and not Path(requirements_file).is_file():
raise ValueError("requirements file could not be found \'{}\'")
self.folder = folder
self.commit = commit
self.branch = branch
self.repo = repo
self.script = script
self.cwd = working_directory
assert not packages or isinstance(packages, (tuple, list, bool))
self.packages = list(packages) if packages is not None and not isinstance(packages, bool) \
else (packages or None)
self.requirements_file = Path(requirements_file) if requirements_file else None
self.base_task_id = base_task_id
self.docker = dict(image=docker, args=docker_args, bash_script=docker_bash_setup_script)
self.add_task_init_call = add_task_init_call
self.project_name = project_name
self.task_name = task_name
self.task_type = task_type
self.output_uri = output_uri
self.task = None
self.raise_on_missing_entries = raise_on_missing_entries
self.verbose = verbose
def create_task(self, dry_run=False):
# type: (bool) -> Union[Task, Dict]
"""
Create the new populated Task
:param dry_run: Optional, If True do not create an actual Task, instead return the Task definition as dict
:return: newly created Task object
"""
local_entry_file = None
repo_info = None
if self.folder or (self.script and Path(self.script).is_file() and not self.repo):
self.folder = os.path.expandvars(os.path.expanduser(self.folder)) if self.folder else None
self.script = os.path.expandvars(os.path.expanduser(self.script)) if self.script else None
self.cwd = os.path.expandvars(os.path.expanduser(self.cwd)) if self.cwd else None
if Path(self.script).is_file():
entry_point = self.script
else:
entry_point = (Path(self.folder) / self.script).as_posix()
entry_point = os.path.abspath(entry_point)
if not os.path.isfile(entry_point):
raise ValueError("Script entrypoint file \'{}\' could not be found".format(entry_point))
local_entry_file = entry_point
repo_info, requirements = ScriptInfo.get(
filepaths=[entry_point],
log=getLogger(),
create_requirements=self.packages is True,
uncommitted_from_remote=True,
detect_jupyter_notebook=False,
add_missing_installed_packages=True,
detailed_req_report=False,
)
# check if we have no repository and no requirements raise error
if self.raise_on_missing_entries and (not self.requirements_file and not self.packages) \
and not self.repo and (
not repo_info or not repo_info.script or not repo_info.script.get('repository')):
raise ValueError("Standalone script detected \'{}\', but no requirements provided".format(self.script))
if dry_run:
task = None
task_state = dict(
name=self.task_name,
project=Task.get_project_id(self.project_name),
type=str(self.task_type or Task.TaskTypes.training),
)
if self.output_uri:
task_state['output'] = dict(destination=self.output_uri)
else:
task_state = dict(script={})
if self.base_task_id:
if self.verbose:
print('Cloning task {}'.format(self.base_task_id))
task = Task.clone(source_task=self.base_task_id, project=Task.get_project_id(self.project_name))
self._set_output_uri(task)
else:
# noinspection PyProtectedMember
task = Task._create(
task_name=self.task_name, project_name=self.project_name,
task_type=self.task_type or Task.TaskTypes.training)
self._set_output_uri(task)
# if there is nothing to populate, return
if not any([
self.folder, self.commit, self.branch, self.repo, self.script, self.cwd,
self.packages, self.requirements_file, self.base_task_id] + (list(self.docker.values()))
):
return task
# clear the script section
task_state['script'] = {}
if repo_info:
task_state['script']['repository'] = repo_info.script['repository']
task_state['script']['version_num'] = repo_info.script['version_num']
task_state['script']['branch'] = repo_info.script['branch']
task_state['script']['diff'] = repo_info.script['diff'] or ''
task_state['script']['working_dir'] = repo_info.script['working_dir']
task_state['script']['entry_point'] = repo_info.script['entry_point']
task_state['script']['binary'] = repo_info.script['binary']
task_state['script']['requirements'] = repo_info.script.get('requirements') or {}
if self.cwd:
self.cwd = self.cwd
cwd = self.cwd if Path(self.cwd).is_dir() else (
Path(repo_info.script['repo_root']) / self.cwd).as_posix()
if not Path(cwd).is_dir():
raise ValueError("Working directory \'{}\' could not be found".format(cwd))
cwd = Path(cwd).relative_to(repo_info.script['repo_root']).as_posix()
entry_point = \
Path(repo_info.script['repo_root']) / repo_info.script['working_dir'] / repo_info.script[
'entry_point']
entry_point = entry_point.relative_to(cwd).as_posix()
task_state['script']['entry_point'] = entry_point or ""
task_state['script']['working_dir'] = cwd or "."
elif self.repo:
# normalize backslashes and remove first one
entry_point = '/'.join([p for p in self.script.split('/') if p and p != '.'])
cwd = '/'.join([p for p in (self.cwd or '.').split('/') if p and p != '.'])
if cwd and entry_point.startswith(cwd + '/'):
entry_point = entry_point[len(cwd) + 1:]
task_state['script']['repository'] = self.repo
task_state['script']['version_num'] = self.commit or None
task_state['script']['branch'] = self.branch or None
task_state['script']['diff'] = ''
task_state['script']['working_dir'] = cwd or '.'
task_state['script']['entry_point'] = entry_point or ""
else:
# standalone task
task_state['script']['entry_point'] = self.script or ""
task_state['script']['working_dir'] = '.'
# update requirements
reqs = []
if self.requirements_file:
with open(self.requirements_file.as_posix(), 'rt') as f:
reqs = [line.strip() for line in f.readlines()]
if self.packages and self.packages is not True:
reqs += self.packages
if reqs:
# make sure we have clearml.
clearml_found = False
for line in reqs:
if line.strip().startswith('#'):
continue
package = reduce(lambda a, b: a.split(b)[0], "#;@=~<>", line).strip()
if package == 'clearml':
clearml_found = True
break
if not clearml_found:
reqs.append('clearml')
task_state['script']['requirements'] = {'pip': '\n'.join(reqs)}
elif not self.repo and repo_info and not repo_info.script.get('requirements'):
# we are in local mode, make sure we have "requirements.txt" it is a must
reqs_txt_file = Path(repo_info.script['repo_root']) / "requirements.txt"
poetry_toml_file = Path(repo_info.script['repo_root']) / "pyproject.toml"
if self.raise_on_missing_entries and not reqs_txt_file.is_file() and not poetry_toml_file.is_file():
raise ValueError(
"requirements.txt not found [{}] "
"Use --requirements or --packages".format(reqs_txt_file.as_posix()))
if self.add_task_init_call:
script_entry = os.path.abspath('/' + task_state['script'].get('working_dir', '.') +
'/' + task_state['script']['entry_point'])
idx_a = 0
# find the right entry for the patch if we have a local file (basically after __future__
if local_entry_file:
with open(local_entry_file, 'rt') as f:
lines = f.readlines()
future_found = self._locate_future_import(lines)
if future_found >= 0:
idx_a = future_found + 1
task_init_patch = ''
if self.repo or task_state.get('script', {}).get('repository'):
# if we do not have requirements, add clearml to the requirements.txt
if not reqs:
task_init_patch += \
"diff --git a/requirements.txt b/requirements.txt\n" \
"--- a/requirements.txt\n" \
"+++ b/requirements.txt\n" \
"@@ -0,0 +1,1 @@\n" \
"+clearml\n"
# Add Task.init call
task_init_patch += \
"diff --git a{script_entry} b{script_entry}\n" \
"--- a{script_entry}\n" \
"+++ b{script_entry}\n" \
"@@ -{idx_a},0 +{idx_b},3 @@\n" \
"+from clearml import Task\n" \
"+Task.init()\n" \
"+\n".format(
script_entry=script_entry, idx_a=idx_a, idx_b=idx_a + 1)
else:
# Add Task.init call
task_init_patch += \
"from clearml import Task\n" \
"Task.init()\n\n"
# make sure we add the dif at the end of the current diff
task_state['script']['diff'] = task_state['script'].get('diff', '')
if task_state['script']['diff'] and not task_state['script']['diff'].endswith('\n'):
task_state['script']['diff'] += '\n'
task_state['script']['diff'] += task_init_patch
# set base docker image if provided
if self.docker:
if dry_run:
task_state['container'] = dict(
image=self.docker.get('image') or '',
arguments=self.docker.get('args') or '',
setup_shell_script=self.docker.get('bash_script') or '',
)
else:
task.set_base_docker(
docker_image=self.docker.get('image'),
docker_arguments=self.docker.get('args'),
docker_setup_bash_script=self.docker.get('bash_script'),
)
if self.verbose:
if task_state['script']['repository']:
repo_details = {k: v for k, v in task_state['script'].items()
if v and k not in ('diff', 'requirements', 'binary')}
print('Repository Detected\n{}'.format(json.dumps(repo_details, indent=2)))
else:
print('Standalone script detected\n Script: {}'.format(self.script))
if task_state['script'].get('requirements') and \
task_state['script']['requirements'].get('pip'):
print('Requirements:{}{}'.format(
'\n Using requirements.txt: {}'.format(
self.requirements_file.as_posix()) if self.requirements_file else '',
'\n {}Packages: {}'.format('Additional ' if self.requirements_file else '', self.packages)
if self.packages else ''
))
if self.docker:
print('Base docker image: {}'.format(self.docker))
if dry_run:
return task_state
# update the Task
task.update_task(task_state)
self.task = task
return task
def _set_output_uri(self, task):
if self.output_uri:
try:
task.output_uri = self.output_uri
except ValueError:
getLogger().warning('Could not verify permission for output_uri: "{}"'.format(self.output_uri))
# do not verify the output uri (it might not be valid when we are creating the Task)
task.storage_uri = self.output_uri
def update_task_args(self, args=None):
# type: (Optional[Union[Sequence[str], Sequence[Tuple[str, str]]]]) -> ()
"""
Update the newly created Task argparse Arguments
If called before Task created, used for argument verification
:param args: Arguments to pass to the remote execution, list of string pairs (argument, value) or
list of strings '<argument>=<value>'. Example: ['lr=0.003', (batch_size, 64)]
"""
if not args:
return
# check args are in format <key>=<value>
args_list = []
for a in args:
if isinstance(a, (list, tuple)):
assert len(a) == 2
args_list.append(a)
continue
try:
parts = a.split('=', 1)
assert len(parts) == 2
args_list.append(parts)
except Exception:
raise ValueError(
"Failed parsing argument \'{}\', arguments must be in \'<key>=<value>\' format")
if not self.task:
return
task_params = self.task.get_parameters()
args_list = {'Args/{}'.format(k): v for k, v in args_list}
task_params.update(args_list)
self.task.set_parameters(task_params)
def get_id(self):
# type: () -> Optional[str]
"""
:return: Return the created Task id (str)
"""
return self.task.id if self.task else None
@staticmethod
def _locate_future_import(lines):
# type: (List[str]) -> int
"""
:param lines: string lines of a python file
:return: line index of the last __future_ import. return -1 if no __future__ was found
"""
# skip over the first two lines, they are ours
# then skip over empty or comment lines
lines = [(i, line.split('#', 1)[0].rstrip()) for i, line in enumerate(lines)
if line.strip('\r\n\t ') and not line.strip().startswith('#')]
# remove triple quotes ' """ '
nested_c = -1
skip_lines = []
for i, line_pair in enumerate(lines):
for _ in line_pair[1].split('"""')[1:]:
if nested_c >= 0:
skip_lines.extend(list(range(nested_c, i + 1)))
nested_c = -1
else:
nested_c = i
# now select all the
lines = [pair for i, pair in enumerate(lines) if i not in skip_lines]
from_future = re.compile(r"^from[\s]*__future__[\s]*")
import_future = re.compile(r"^import[\s]*__future__[\s]*")
# test if we have __future__ import
found_index = -1
for a_i, (_, a_line) in enumerate(lines):
if found_index >= a_i:
continue
if from_future.match(a_line) or import_future.match(a_line):
found_index = a_i
# check the last import block
i, line = lines[found_index]
# wither we have \\ character at the end of the line or the line is indented
parenthesized_lines = '(' in line and ')' not in line
while line.endswith('\\') or parenthesized_lines:
found_index += 1
i, line = lines[found_index]
if ')' in line:
break
else:
break
return found_index if found_index < 0 else lines[found_index][0]
class CreateFromFunction(object):
kwargs_section = 'kwargs'
input_artifact_section = 'kwargs_artifacts'
task_template = """from clearml import Task
from clearml.automation.controller import PipelineDecorator
{function_source}
if __name__ == '__main__':
task = Task.init()
kwargs = {function_kwargs}
task.connect(kwargs, name='{kwargs_section}')
function_input_artifacts = {function_input_artifacts}
params = task.get_parameters() or dict()
for k, v in params.items():
if not v or not k.startswith('{input_artifact_section}/'):
continue
k = k.replace('{input_artifact_section}/', '', 1)
task_id, artifact_name = v.split('.', 1)
kwargs[k] = Task.get_task(task_id=task_id).artifacts[artifact_name].get()
results = {function_name}(**kwargs)
result_names = {function_return}
if result_names:
if not isinstance(results, (tuple, list)) or (len(result_names) == 1 and len(results) != 1):
results = [results]
for name, artifact in zip(result_names, results):
task.upload_artifact(name=name, artifact_object=artifact)
"""
@classmethod
def create_task_from_function(
cls,
a_function, # type: Callable
function_kwargs=None, # type: Optional[Dict[str, Any]]
function_input_artifacts=None, # type: Optional[Dict[str, str]]
function_return=None, # type: Optional[List[str]]
project_name=None, # type: Optional[str]
task_name=None, # type: Optional[str]
task_type=None, # type: Optional[str]
repo=None, # type: Optional[str]
branch=None, # type: Optional[str]
commit=None, # type: Optional[str]
packages=None, # type: Optional[Union[str, Sequence[str]]]
docker=None, # type: Optional[str]
docker_args=None, # type: Optional[str]
docker_bash_setup_script=None, # type: Optional[str]
output_uri=None, # type: Optional[str]
helper_functions=None, # type: Optional[Sequence[Callable]]
dry_run=False, # type: bool
_sanitize_function=None, # type: Optional[Callable[[str], str]]
_sanitize_helper_functions=None, # type: Optional[Callable[[str], str]]
):
# type: (...) -> Optional[Dict, Task]
"""
Create a Task from a function, including wrapping the function input arguments
into the hyper-parameter section as kwargs, and storing function results as named artifacts
Example:
def mock_func(a=6, b=9):
c = a*b
print(a, b, c)
return c, c**2
create_task_from_function(mock_func, function_return=['mul', 'square'])
Example arguments from other Tasks (artifact):
def mock_func(matrix_np):
c = matrix_np*matrix_np
print(matrix_np, c)
return c
create_task_from_function(
mock_func,
function_input_artifacts={'matrix_np': 'aabb1122.previous_matrix'},
function_return=['square_matrix']
)
:param a_function: A global function to convert into a standalone Task
:param function_kwargs: Optional, provide subset of function arguments and default values to expose.
If not provided automatically take all function arguments & defaults
:param function_input_artifacts: Optional, pass input arguments to the function from other Tasks's output artifact.
Example argument named `numpy_matrix` from Task ID `aabbcc` artifact name `answer`:
{'numpy_matrix': 'aabbcc.answer'}
:param function_return: Provide a list of names for all the results.
If not provided no results will be stored as artifacts.
:param project_name: Set the project name for the task. Required if base_task_id is None.
:param task_name: Set the name of the remote task. Required if base_task_id is None.
:param task_type: Optional, The task type to be created. Supported values: 'training', 'testing', 'inference',
'data_processing', 'application', 'monitor', 'controller', 'optimizer', 'service', 'qc', 'custom'
:param repo: Remote URL for the repository to use, OR path to local copy of the git repository
Example: 'https://github.com/allegroai/clearml.git' or '~/project/repo'
:param branch: Select specific repository branch/tag (implies the latest commit from the branch)
:param commit: Select specific commit id to use (default: latest commit,
or when used with local repository matching the local commit id)
:param packages: Manually specify a list of required packages or a local requirements.txt file.
Example: ["tqdm>=2.1", "scikit-learn"] or "./requirements.txt"
If not provided, packages are automatically added based on the imports used in the function.
:param docker: Select the docker image to be executed in by the remote session
:param docker_args: Add docker arguments, pass a single string
:param docker_bash_setup_script: Add bash script to be executed
inside the docker before setting up the Task's environment
:param output_uri: Optional, set the Tasks's output_uri (Storage destination).
examples: 's3://bucket/folder', 'https://server/' , 'gs://bucket/folder', 'azure://bucket', '/folder/'
:param helper_functions: Optional, a list of helper functions to make available
for the standalone function Task.
:param dry_run: If True do not create the Task, but return a dict of the Task's definitions
:param _sanitize_function: Sanitization function for the function string.
:param _sanitize_helper_functions: Sanitization function for the helper function string.
:return: Newly created Task object
"""
function_name = str(a_function.__name__)
function_source = inspect.getsource(a_function)
if _sanitize_function:
function_source = _sanitize_function(function_source)
function_source = cls.__sanitize_remove_type_hints(function_source)
# add helper functions on top.
for f in (helper_functions or []):
f_source = inspect.getsource(f)
if _sanitize_helper_functions:
f_source = _sanitize_helper_functions(f_source)
function_source = cls.__sanitize_remove_type_hints(f_source) + '\n\n' + function_source
function_input_artifacts = function_input_artifacts or dict()
# verify artifact kwargs:
if not all(len(v.split('.', 1)) == 2 for v in function_input_artifacts.values()):
raise ValueError(
'function_input_artifacts={}, it must in the format: '
'{{"argument": "task_id.artifact_name"}}'.format(function_input_artifacts)
)
if function_kwargs is None:
function_kwargs = dict()
inspect_args = inspect.getfullargspec(a_function)
if inspect_args and inspect_args.args:
inspect_defaults_vals = inspect_args.defaults
inspect_defaults_args = inspect_args.args
# adjust the defaults so they match the args (match from the end)
if inspect_defaults_vals and len(inspect_defaults_vals) != len(inspect_defaults_args):
inspect_defaults_args = inspect_defaults_args[-len(inspect_defaults_vals):]
if inspect_defaults_vals and len(inspect_defaults_vals) != len(inspect_defaults_args):
getLogger().warning(
'Ignoring default argument values: '
'could not find all default valued for: \'{}\''.format(function_name))
inspect_defaults_vals = []
function_kwargs = {str(k): v for k, v in zip(inspect_defaults_args, inspect_defaults_vals)} \
if inspect_defaults_vals else {str(k): None for k in inspect_defaults_args}
task_template = cls.task_template.format(
kwargs_section=cls.kwargs_section,
input_artifact_section=cls.input_artifact_section,
function_source=function_source,
function_kwargs=function_kwargs,
function_input_artifacts=function_input_artifacts,
function_name=function_name,
function_return=function_return)
temp_dir = repo if repo and os.path.isdir(repo) else None
with tempfile.NamedTemporaryFile('w', suffix='.py', dir=temp_dir) as temp_file:
temp_file.write(task_template)
temp_file.flush()
requirements_file = None
if packages and not isinstance(packages, (list, tuple)) and Path(packages).is_file():
requirements_file = packages
packages = False
populate = CreateAndPopulate(
project_name=project_name,
task_name=task_name or str(function_name),
task_type=task_type,
script=temp_file.name,
packages=packages if packages is not None else True,
requirements_file=requirements_file,
repo=repo,
branch=branch,
commit=commit,
docker=docker,
docker_args=docker_args,
docker_bash_setup_script=docker_bash_setup_script,
output_uri=output_uri,
add_task_init_call=False,
)
entry_point = '{}.py'.format(function_name)
task = populate.create_task(dry_run=dry_run)
if dry_run:
task['script']['diff'] = task_template
task['script']['entry_point'] = entry_point
task['script']['working_dir'] = '.'
task['hyperparams'] = {
cls.kwargs_section: {
k: dict(section=cls.kwargs_section, name=k, value=str(v))
for k, v in (function_kwargs or {}).items()
},
cls.input_artifact_section: {
k: dict(section=cls.input_artifact_section, name=k, value=str(v))
for k, v in (function_input_artifacts or {}).items()
}
}
else:
task.update_task(task_data={
'script': task.data.script.to_dict().update(
{'entry_point': entry_point, 'working_dir': '.', 'diff': task_template})})
hyper_parameters = {'{}/{}'.format(cls.kwargs_section, k): str(v) for k, v in function_kwargs} \
if function_kwargs else {}
hyper_parameters.update(
{'{}/{}'.format(cls.input_artifact_section, k): str(v) for k, v in function_input_artifacts}
if function_input_artifacts else {}
)
task.set_parameters(hyper_parameters)
return task
@staticmethod
def __sanitize_remove_type_hints(function_source):
# type: (str) -> str
try:
import ast
from ...utilities.lowlevel.astor_unparse import unparse
except ImportError:
return function_source
# noinspection PyBroadException
try:
class TypeHintRemover(ast.NodeTransformer):
def visit_FunctionDef(self, node):
# remove the return type definition
node.returns = None
# remove all argument annotations
if node.args.args:
for arg in node.args.args:
arg.annotation = None
return node
# parse the source code into an AST
parsed_source = ast.parse(function_source)
# remove all type annotations, function return type definitions
# and import statements from 'typing'
transformed = TypeHintRemover().visit(parsed_source)
# convert the AST back to source code
return unparse(transformed).lstrip('\n')
except Exception:
# just in case we failed parsing.
return function_source