From 105380e62fd7651e6b5c17bd66f0b6a701549ef2 Mon Sep 17 00:00:00 2001 From: easythrees Date: Sun, 22 Apr 2018 20:35:29 -0700 Subject: [PATCH 1/3] Addressing the multiprocessing issue on Windows, this involves ensuring files have their proper extensions and re-adding module paths --- bin/conductor | 932 +++++++++++++------------ installers/windows/ConductorClient.nsi | 2 + installers/windows/conductor.bat | 4 +- 3 files changed, 474 insertions(+), 464 deletions(-) diff --git a/bin/conductor b/bin/conductor index ac2fb7d5..0dabd118 100755 --- a/bin/conductor +++ b/bin/conductor @@ -1,462 +1,470 @@ -#!/usr/bin/env python - -import argparse -import imp -import json -import os -import re -import sys - -try: - imp.find_module('conductor') - -except: - sys.path.append(os.path.dirname(os.path.dirname(__file__))) - -import conductor -from conductor.lib import conductor_submit, downloader, downloader2, uploader, uploader_v2, loggeria - - -def parse_args(): - - # Create a parent parser. Arguments that are common across all subparsers can be added to this parser - parent_parser = argparse.ArgumentParser(add_help=False) - - # create the main parser. Not sure why this parser is required, but got parsing tracebacks when excluding it (it gets confused about the arguments provided) - parser = argparse.ArgumentParser(description="description") - subparsers = parser.add_subparsers(title="actions") - - ############################# - # SUBMIT PARSER - ############################# - submit_parser_desciption = "parse submitter arguments" - submit_parser_help = "" - submit_parser = subparsers.add_parser("submit", - parents=[parent_parser], - help=submit_parser_help, - description=submit_parser_desciption, - formatter_class=argparse.RawTextHelpFormatter) - - submit_parser.add_argument("--autoretry_policy", - type=json.loads, - help=('Enables tasks to be automatically retried if they have been preempted and/or ' - 'failed.\n' - 'A policy can be defined for more than one status, e.g. "preempted", "failed"\n' - 'A policy for a status must define a "max_retries" key with an integer from 0 ' - 'through 5\n\n' - 'Example 1: --autoretry_policy "{\\"preempted\\": {\\"max_retries\\": 2 } }"\n\n' - 'Example 2: --autoretry_policy "{\\"failed\\": {\\"max_retries\\": 1 },' - '\\"preempted\\": {\\"max_retries\\": 5 }}"\n\n')) - - submit_parser.add_argument("--cmd", - help=('[DEPRECATED. Use --tasks_data instead]\n' - 'The command to execute. This works in tandem with the --frames ' - 'argument to construct a command per task\n' - 'Example: --cmd "Render /tmp/mayafile.ma"')) - - submit_parser.add_argument("--cores", - help="Number of cores that this job should run on (highmem and highcpu machines have a minimum of 2 cores)", - type=int) - - submit_parser.add_argument("--database_filepath", - help=("The filepath to the local md5 caching database. If no filepath " - "is specified, the database will be created in a temp directory. " - "Note that this flag is only active when --local_upload is True.")) - - submit_parser.add_argument("--docker_image", - help="docker image to run render in") - - submit_parser.add_argument("--environment", - help=(r"Specify environment variable statements to be executed on Conductor's " - "render instance.\n\n" - "Example:\n" - "\t--environment PYTHONPATH=/home/user/steve/python:/home/user/marry/python\n\n" - - "Multipe environment variable statements can be made by separating them with a space:\n" - "\t--environment PYTHONPATH=/home/user/marry PATH=/home/usr/bin\n\n" - - "Appending to Conductor's existing environment variables is also possible. " - "Note that the referenced environment variable is encapsulated by single quotes " - "so that it does not get evaluated until it is executed on Conductor's render instance:\n" - "\t--environment PYTHONPATH=/tmp:/home/user:'$PYTHONPATH'\n\n" - "However, it may be desirable to both evaluate your local environment variable " - "AND also append it to Conductor's own. Note that one PYTHONPATH is" - " contained with single quotes while the other is not:\n" - "\t--environment PYTHONPATH=/tmp:/home/user:$PYTHONPATH:'$PYTHONPATH'\n\n" - "Note that the syntax for Windows clients will look different in some " - "places, but it's important to remember that ultimately these environment " - "variable statements will be executed on a linux platform on Conductor, " - "which means that when referencing Conductor\'s environment variable, " - "A dollar sign syntax must be used:\n" - "\t--environment PYTHONPATH=\"c:\\My Documents\";%%PYTHONPATH%%;$PYTHONPATH"), - nargs="*", - action=ValidateArgEnv) - - submit_parser.add_argument("--metadata", - help=("Specify metadata dictionary. Note that it's best to wrap this dictionary" - " in single quotes,\n e.g. --metadata '{\"project\":\"Scrooge\"}'"), - type=json.loads) # use the json loads function to parse dict - - submit_parser.add_argument("--force", - help="Do not check for existing uploads, force a new upload", - action='store_true') - - submit_parser.add_argument("--chunk_size", - help="number of frames to execute on a single instance", - default=1) - - submit_parser.add_argument("--frames", - help=('[DEPRECATED. Use --tasks_data instead]\n' - 'The frame range to render. This works in tandem with the --command ' - 'argument to construct a command per task.\n' - 'Example: --frames "10-20" or "100-200x2", or "1, 10-20, 100-200x2"')) - - submit_parser.add_argument("--scout_frames", - help="If specified, these frames will run first and the rest of the" - "job will wait until verification has been given before continuing.") - - submit_parser.add_argument("--local_upload", - help="Trigger files to be uploaded localy", - choices=[False, True], - type=cast_to_bool, - default=None) - - submit_parser.add_argument("--location", - help=('An optional string to indicate which location this submitted ' - 'job should be registered as. This option is only relevant ' - 'for conductor accounts which submits jobs from different ' - 'locations (e.g. differing geographic locations or office ' - 'locations that have differing file systems). Typically each ' - 'location would have its own conductor downloader running (for ' - 'downloading completed renders). This location string allows ' - 'each conductor downloader to target only files that match the ' - 'appropriate location. This is potentially useful when you have ' - 'multiple downloaders running but each have differing file ' - 'systems available to them (e.g. downloader1 has /filesystem1 ' - 'available to it, but downloader2 only has /filesystem2 ' - 'available to it). In this case downloader1 should only ' - 'download files that exist on /filesystem1 and downloader2 ' - 'should only download files that exist on /filesystem2. ' - 'This is achieved by including a location string (such as ' - '"location1" or "location2") when submitting jobs (and when ' - 'running a downloader or uploader command).')) - - # TODO: We should remove this arg - submit_parser.add_argument("--machine_type", - help='Type of machine to run on ("standard", "highmem", or "highcpu")') - - submit_parser.add_argument("--preemptible", - choices=[False, True], - type=cast_to_bool, - help='Run job in preemptible instances') - - submit_parser.add_argument("--md5_caching", - help=("Use cached md5s. This can dramatically improve the uploading " - "times, as md5 checking can be very time consuming. Caching md5s " - "allows subsequent uploads (of the same files) to skip the " - "md5 generation process (if the files appear to not have been " - "modified since the last time they were submitted). The cache is " - "stored locally and uses a file's modification time and file size " - "to intelligently guess whether the file has changed. Set this " - "flag to False if there is concern that files may not be getting " - "re-uploaded properly. " - "Note that this flag is only active when --local_upload is True."), - choices=[False, True], - type=cast_to_bool, - default=None) - - submit_parser.add_argument("--output_path", - help="path to copy renders to") - - submit_parser.add_argument("--postcmd", - help="Run this command once the entire job is complete and downloaded") - - submit_parser.add_argument("--priority", - help="Set the priority of the submitted job. Default is 5") - - submit_parser.add_argument("--project", - help='The name of the project to submit the job. This will default to "default"') - - submit_parser.add_argument("--software_package_ids", - help=('The ids for the software packages to use for the submitted ' - 'job, e.g. maya or nuke. Packages are identified ' - 'by their ID number, which can be queried from ' - 'Conductor\'s "web api"'), - nargs="*") - - submit_parser.add_argument("--tasks_data", - help=('Specify a task\'s command to execute and it\'s corresponding frames ' - '(if applicable) in a json dictionary format. e.g.\n' - '--tasks_data \"{\\"command\\": \\"Render -s 1 -e 1 /tmp/file.ma\\", ' - '\\"frames\\": \\"1\\"}" \n' - '\nSpecify multiple tasks via multiple json dictionaries, separated by spaces, e.g.\n' - '--tasks_data "{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}" ' - '"{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}"\n' - '\nNote that that format of this argument must be valid json. This makes for ' - 'piping in more verbose/complex arguments more bearable,\n' - 'Example of reading two task\'s data from an array within a json file:\n' - 'conductor submit --tasks_data "$(cat /tmp/task_cmds.json | jq -c \'.[1]\')"'), - type=json.loads, - nargs="*") - - submit_parser.add_argument("--job_title", - help=("The title to name the job. This is the title " - "that will be displayed in the web ui")) - - submit_parser.add_argument("--upload_file", - help=("A path to a text file whose contents list paths to upload to " - "conductor. There should be one path per line in the text file. " - "Valid paths are to files, directories (symlinked or not), as " - "well as glob expressions. e.g * wildcards accepted")) - - submit_parser.add_argument("--upload_only", - help="Only upload the files, don't start the render", - action='store_true') - - submit_parser.add_argument("--upload_paths", - help="Paths to upload", - nargs="*") - - submit_parser.add_argument("--user", - help="Username to submit as") - - submit_parser.add_argument("--notify", - help="Who to notify when job is complete", - nargs="+") - - submit_parser.add_argument("--slack_notify", - help="Slack recipients to notify when job is complete", - nargs="+") - - submit_parser.add_argument("--log_level", - choices=loggeria.LEVELS, - help="The logging level to display") - - submit_parser.add_argument("--log_dir", - help=("When provided, will write a log file to " - "the provided directory. This will be a " - "rotating log, creating a new log file " - "everyday, while storing the last 7 days " - "of logs")) - - submit_parser.add_argument("--max_instances", - help="Maximum number of instances for this job") - - submit_parser.set_defaults(func=run_submit) - - ############################# - # UPLOADER PARSER - ############################# - uploader_parser_desciption = "parse uploader arguments" - uploader_parser_help = "" - - uploader_parser = subparsers.add_parser("uploader", parents=[parent_parser], - help=uploader_parser_help, - description=uploader_parser_desciption, - formatter_class=argparse.RawTextHelpFormatter) - - uploader_parser.add_argument("--location", - help=('An optional string to indicate which location this uploader ' - 'executable should register as. This option is only relevant ' - 'for conductor accounts which submits jobs from different locations ' - '(e.g. differing geographic locations or office locations that have differing file systems).' - ' Typically each location would have its own conductor uploader process running. This location ' - 'string allows each uploader to target specific upload jobs (files to upload) that are appropriate ' - 'for it. This is potentially useful as each location may have differing file systems ' - 'available to it (e.g. uploader1 has /filesystem1 available to it, but uploader2 only ' - 'has /filesystem2 available to it). In this case uploader1 should only upload files ' - 'that exist on /filesystem1 and uploader2 should only upload files that exist on /filesystem2. ' - 'This is achieved by including a location argument (such as "location1" or "location2") ' - 'when submitting jobs, as well as when launching this uploader command.')) - - uploader_parser.add_argument("--log_level", - choices=loggeria.LEVELS, - help="The logging level to display") - - uploader_parser.add_argument("--log_dir", - help=("When provided, will write a log file to " - "the provided directory. This will be a " - "rotating log, creating a new log file " - "everyday, while storing the last 7 days " - "of logs")) - - uploader_parser.add_argument("--thread_count", - type=int, - default=conductor.CONFIG.get("thread_count"), - help=('The number of threads that should download simultaneously')) - - uploader_parser.add_argument("--alt", - help=('Run an alternative version of the downloader'), - action='store_true') - - uploader_parser.set_defaults(func=run_uploader) - - ############################# - # DOWNLOADER PARSER - ############################# - - downloader_parser_desciption = "parse downloader arguments" - downloader_parser_help = "" - - downloader_parser = subparsers.add_parser("downloader", parents=[parent_parser], - help=downloader_parser_help, - description=downloader_parser_desciption, - formatter_class=argparse.RawTextHelpFormatter) - - downloader_parser.add_argument("--job_id", - help=("The job id(s) to download. When specified " - "will only download those jobs and terminate " - "afterwards"), - action='append') - - downloader_parser.add_argument("--task_id", - help="Manually download output for this task") - - downloader_parser.add_argument("--output", - help="Override for the output directory") - - downloader_parser.add_argument("--location", - default=conductor.CONFIG.get("location"), - help=('An optional string to indicate which location this downloader ' - 'executable should register as. This option is only relevant for ' - 'conductor accounts which submits jobs from different locations ' - '(e.g. differing geographic locations or office locations that ' - 'have differing file systems). Typically each location would ' - 'have its own conductor downloader process running. This location ' - 'argument allows each downloader to target specific jobs (to ' - 'download upon job-completion) that match its appropriate location. ' - 'Essentially this allows the location of which a job was submitted ' - 'from to also be the destination in which to deliver completed ' - 'renders to (which would typically be the desired behavior).')) - - downloader_parser.add_argument("--project", - default=conductor.CONFIG.get("project"), - help=('An optional string to indicate which project that this downloader executable should register as.')) - - downloader_parser.add_argument("--log_level", - choices=loggeria.LEVELS, - default=conductor.CONFIG.get("log_level"), - help="The logging level to display") - - downloader_parser.add_argument("--log_dir", - default=conductor.CONFIG.get("log_dir"), - help=("When provided, will write a log file to " - "the provided directory. This will be a " - "rotating log, creating a new log file " - "everyday, while storing the last 7 days " - "of logs")) - - downloader_parser.add_argument("--thread_count", - type=int, - default=conductor.CONFIG.get("thread_count"), - help=('The number of threads that should download simultaneously')) - - downloader_parser.add_argument("--alt", - help=('Run an alternative version of the downloader'), - action='store_true') - - downloader_parser.set_defaults(func=run_downloader) - - return parser.parse_args() - - -def cast_to_bool(string): - ''' - Ensure that the argument provided is either "True" or "False (or "true" or - "false") and convert that argument to an actual bool value (True or False). - ''' - string_lower = string.lower() - if string_lower == "true": - return True - elif string_lower == "false": - return False - raise argparse.ArgumentTypeError('Argument must be True or False') - - -class ValidateArgEnv(argparse.Action): - ''' - Validate the "env" argument that a user has provided, ensuring that it - adheres to proper syntax, and ultimately produces a dictionary object - consisting of desired environment variable names and their corresponding - values. - - Example: - {"PYTHONPATH": "/tmp/files1:$PYTHONPATH", - "PATH": "/usr/bin":$PATH"} - - ''' - - # Regular expression to validate argument construction = - # Example: PYTHONPATH=/tmp:$PYTHONPATH - rx_env_var = r'\s*(?P[^\s=]+)\s*=(?P[^=]+)$' - - def __call__(self, parser, namespace, values, option_string): - ''' - Read each variable definition and validate that it's constructed - properly. Populate a dictionary where they keys are the variable names - and the values are the variable values - ''' - env_variables = {} - for variable_definition in values: - re_result = re.match(self.rx_env_var, variable_definition) - if not re_result: - msg = ("Invalid variable assignment: '%s'\n" - "\tUse construct: = e.g " - "PYTHONPATH=/tmp:$PYTHONPATH" % variable_definition) - raise argparse.ArgumentError(self, msg) - - var_name = re_result.group("var_name").strip() - var_value = re_result.group("var_value").strip() - env_variables[var_name] = var_value - - setattr(namespace, self.dest, env_variables) - - -class CastLogLevelName(argparse.Action): - ''' - Cast a log level name to it's contstant value, e.g. - "INFO" --> logging.INFO - ''' - - def __call__(self, parser, namespace, values, option_string): - setattr(namespace, self.dest, loggeria.LEVEL_MAP[values]) - - -def run_submit(args): - conductor_submit.run_submit(args) - - -def run_uploader(args): - args_dict = vars(args) - if sys.platform == "win32": - uploader.run_uploader(args) - if args_dict.get("alt"): - uploader_v2.run_uploader(args) - else: - uploader.run_uploader(args) - - -def run_downloader(args): - ''' - Convert the argparse Namespace object to a dictionary and run the downloader - with the given args. - ''' - # Convert Namespace args object to args dict - args_dict = vars(args) - - # Code switcher between new downloader and old downloader - # HARD set windows users to old downloader - if sys.platform == "win32": - return downloader2.run_downloader(args_dict) - if args_dict.get("task_id") and not args_dict.get("job_id"): - raise argparse.ArgumentTypeError('Must supply a job_id with task_id.') - - if args_dict.get("job_id") or args_dict.get("alt"): - return downloader2.run_downloader(args_dict) - else: - return downloader.run_downloader(args_dict) - - -if __name__ == '__main__': - args = parse_args() - args.func(args) +#!/usr/bin/env python + +import argparse +import imp +import json +import os +import re +import sys + +try: + imp.find_module('conductor') + +except: + sys.path.append(os.path.dirname(os.path.dirname(__file__))) + +# On Windows, due to the quirks of the multiprocessing module +# this file is installed as 'conductor.py' and to ensure the +# environment gets passed properly, add the module to the path +# list again +if os.name == 'nt': + sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) + +import conductor +from conductor.lib import conductor_submit, downloader, downloader2, uploader, uploader_v2, loggeria + + +def parse_args(): + + # Create a parent parser. Arguments that are common across all subparsers can be added to this parser + parent_parser = argparse.ArgumentParser(add_help=False) + + # create the main parser. Not sure why this parser is required, but got parsing tracebacks when excluding it (it gets confused about the arguments provided) + parser = argparse.ArgumentParser(description="description") + subparsers = parser.add_subparsers(title="actions") + + ############################# + # SUBMIT PARSER + ############################# + submit_parser_desciption = "parse submitter arguments" + submit_parser_help = "" + submit_parser = subparsers.add_parser("submit", + parents=[parent_parser], + help=submit_parser_help, + description=submit_parser_desciption, + formatter_class=argparse.RawTextHelpFormatter) + + submit_parser.add_argument("--autoretry_policy", + type=json.loads, + help=('Enables tasks to be automatically retried if they have been preempted and/or ' + 'failed.\n' + 'A policy can be defined for more than one status, e.g. "preempted", "failed"\n' + 'A policy for a status must define a "max_retries" key with an integer from 0 ' + 'through 5\n\n' + 'Example 1: --autoretry_policy "{\\"preempted\\": {\\"max_retries\\": 2 } }"\n\n' + 'Example 2: --autoretry_policy "{\\"failed\\": {\\"max_retries\\": 1 },' + '\\"preempted\\": {\\"max_retries\\": 5 }}"\n\n')) + + submit_parser.add_argument("--cmd", + help=('[DEPRECATED. Use --tasks_data instead]\n' + 'The command to execute. This works in tandem with the --frames ' + 'argument to construct a command per task\n' + 'Example: --cmd "Render /tmp/mayafile.ma"')) + + submit_parser.add_argument("--cores", + help="Number of cores that this job should run on (highmem and highcpu machines have a minimum of 2 cores)", + type=int) + + submit_parser.add_argument("--database_filepath", + help=("The filepath to the local md5 caching database. If no filepath " + "is specified, the database will be created in a temp directory. " + "Note that this flag is only active when --local_upload is True.")) + + submit_parser.add_argument("--docker_image", + help="docker image to run render in") + + submit_parser.add_argument("--environment", + help=(r"Specify environment variable statements to be executed on Conductor's " + "render instance.\n\n" + "Example:\n" + "\t--environment PYTHONPATH=/home/user/steve/python:/home/user/marry/python\n\n" + + "Multipe environment variable statements can be made by separating them with a space:\n" + "\t--environment PYTHONPATH=/home/user/marry PATH=/home/usr/bin\n\n" + + "Appending to Conductor's existing environment variables is also possible. " + "Note that the referenced environment variable is encapsulated by single quotes " + "so that it does not get evaluated until it is executed on Conductor's render instance:\n" + "\t--environment PYTHONPATH=/tmp:/home/user:'$PYTHONPATH'\n\n" + "However, it may be desirable to both evaluate your local environment variable " + "AND also append it to Conductor's own. Note that one PYTHONPATH is" + " contained with single quotes while the other is not:\n" + "\t--environment PYTHONPATH=/tmp:/home/user:$PYTHONPATH:'$PYTHONPATH'\n\n" + "Note that the syntax for Windows clients will look different in some " + "places, but it's important to remember that ultimately these environment " + "variable statements will be executed on a linux platform on Conductor, " + "which means that when referencing Conductor\'s environment variable, " + "A dollar sign syntax must be used:\n" + "\t--environment PYTHONPATH=\"c:\\My Documents\";%%PYTHONPATH%%;$PYTHONPATH"), + nargs="*", + action=ValidateArgEnv) + + submit_parser.add_argument("--metadata", + help=("Specify metadata dictionary. Note that it's best to wrap this dictionary" + " in single quotes,\n e.g. --metadata '{\"project\":\"Scrooge\"}'"), + type=json.loads) # use the json loads function to parse dict + + submit_parser.add_argument("--force", + help="Do not check for existing uploads, force a new upload", + action='store_true') + + submit_parser.add_argument("--chunk_size", + help="number of frames to execute on a single instance", + default=1) + + submit_parser.add_argument("--frames", + help=('[DEPRECATED. Use --tasks_data instead]\n' + 'The frame range to render. This works in tandem with the --command ' + 'argument to construct a command per task.\n' + 'Example: --frames "10-20" or "100-200x2", or "1, 10-20, 100-200x2"')) + + submit_parser.add_argument("--scout_frames", + help="If specified, these frames will run first and the rest of the" + "job will wait until verification has been given before continuing.") + + submit_parser.add_argument("--local_upload", + help="Trigger files to be uploaded localy", + choices=[False, True], + type=cast_to_bool, + default=None) + + submit_parser.add_argument("--location", + help=('An optional string to indicate which location this submitted ' + 'job should be registered as. This option is only relevant ' + 'for conductor accounts which submits jobs from different ' + 'locations (e.g. differing geographic locations or office ' + 'locations that have differing file systems). Typically each ' + 'location would have its own conductor downloader running (for ' + 'downloading completed renders). This location string allows ' + 'each conductor downloader to target only files that match the ' + 'appropriate location. This is potentially useful when you have ' + 'multiple downloaders running but each have differing file ' + 'systems available to them (e.g. downloader1 has /filesystem1 ' + 'available to it, but downloader2 only has /filesystem2 ' + 'available to it). In this case downloader1 should only ' + 'download files that exist on /filesystem1 and downloader2 ' + 'should only download files that exist on /filesystem2. ' + 'This is achieved by including a location string (such as ' + '"location1" or "location2") when submitting jobs (and when ' + 'running a downloader or uploader command).')) + + # TODO: We should remove this arg + submit_parser.add_argument("--machine_type", + help='Type of machine to run on ("standard", "highmem", or "highcpu")') + + submit_parser.add_argument("--preemptible", + choices=[False, True], + type=cast_to_bool, + help='Run job in preemptible instances') + + submit_parser.add_argument("--md5_caching", + help=("Use cached md5s. This can dramatically improve the uploading " + "times, as md5 checking can be very time consuming. Caching md5s " + "allows subsequent uploads (of the same files) to skip the " + "md5 generation process (if the files appear to not have been " + "modified since the last time they were submitted). The cache is " + "stored locally and uses a file's modification time and file size " + "to intelligently guess whether the file has changed. Set this " + "flag to False if there is concern that files may not be getting " + "re-uploaded properly. " + "Note that this flag is only active when --local_upload is True."), + choices=[False, True], + type=cast_to_bool, + default=None) + + submit_parser.add_argument("--output_path", + help="path to copy renders to") + + submit_parser.add_argument("--postcmd", + help="Run this command once the entire job is complete and downloaded") + + submit_parser.add_argument("--priority", + help="Set the priority of the submitted job. Default is 5") + + submit_parser.add_argument("--project", + help='The name of the project to submit the job. This will default to "default"') + + submit_parser.add_argument("--software_package_ids", + help=('The ids for the software packages to use for the submitted ' + 'job, e.g. maya or nuke. Packages are identified ' + 'by their ID number, which can be queried from ' + 'Conductor\'s "web api"'), + nargs="*") + + submit_parser.add_argument("--tasks_data", + help=('Specify a task\'s command to execute and it\'s corresponding frames ' + '(if applicable) in a json dictionary format. e.g.\n' + '--tasks_data \"{\\"command\\": \\"Render -s 1 -e 1 /tmp/file.ma\\", ' + '\\"frames\\": \\"1\\"}" \n' + '\nSpecify multiple tasks via multiple json dictionaries, separated by spaces, e.g.\n' + '--tasks_data "{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}" ' + '"{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}"\n' + '\nNote that that format of this argument must be valid json. This makes for ' + 'piping in more verbose/complex arguments more bearable,\n' + 'Example of reading two task\'s data from an array within a json file:\n' + 'conductor submit --tasks_data "$(cat /tmp/task_cmds.json | jq -c \'.[1]\')"'), + type=json.loads, + nargs="*") + + submit_parser.add_argument("--job_title", + help=("The title to name the job. This is the title " + "that will be displayed in the web ui")) + + submit_parser.add_argument("--upload_file", + help=("A path to a text file whose contents list paths to upload to " + "conductor. There should be one path per line in the text file. " + "Valid paths are to files, directories (symlinked or not), as " + "well as glob expressions. e.g * wildcards accepted")) + + submit_parser.add_argument("--upload_only", + help="Only upload the files, don't start the render", + action='store_true') + + submit_parser.add_argument("--upload_paths", + help="Paths to upload", + nargs="*") + + submit_parser.add_argument("--user", + help="Username to submit as") + + submit_parser.add_argument("--notify", + help="Who to notify when job is complete", + nargs="+") + + submit_parser.add_argument("--slack_notify", + help="Slack recipients to notify when job is complete", + nargs="+") + + submit_parser.add_argument("--log_level", + choices=loggeria.LEVELS, + help="The logging level to display") + + submit_parser.add_argument("--log_dir", + help=("When provided, will write a log file to " + "the provided directory. This will be a " + "rotating log, creating a new log file " + "everyday, while storing the last 7 days " + "of logs")) + + submit_parser.add_argument("--max_instances", + help="Maximum number of instances for this job") + + submit_parser.set_defaults(func=run_submit) + + ############################# + # UPLOADER PARSER + ############################# + uploader_parser_desciption = "parse uploader arguments" + uploader_parser_help = "" + + uploader_parser = subparsers.add_parser("uploader", parents=[parent_parser], + help=uploader_parser_help, + description=uploader_parser_desciption, + formatter_class=argparse.RawTextHelpFormatter) + + uploader_parser.add_argument("--location", + help=('An optional string to indicate which location this uploader ' + 'executable should register as. This option is only relevant ' + 'for conductor accounts which submits jobs from different locations ' + '(e.g. differing geographic locations or office locations that have differing file systems).' + ' Typically each location would have its own conductor uploader process running. This location ' + 'string allows each uploader to target specific upload jobs (files to upload) that are appropriate ' + 'for it. This is potentially useful as each location may have differing file systems ' + 'available to it (e.g. uploader1 has /filesystem1 available to it, but uploader2 only ' + 'has /filesystem2 available to it). In this case uploader1 should only upload files ' + 'that exist on /filesystem1 and uploader2 should only upload files that exist on /filesystem2. ' + 'This is achieved by including a location argument (such as "location1" or "location2") ' + 'when submitting jobs, as well as when launching this uploader command.')) + + uploader_parser.add_argument("--log_level", + choices=loggeria.LEVELS, + help="The logging level to display") + + uploader_parser.add_argument("--log_dir", + help=("When provided, will write a log file to " + "the provided directory. This will be a " + "rotating log, creating a new log file " + "everyday, while storing the last 7 days " + "of logs")) + + uploader_parser.add_argument("--thread_count", + type=int, + default=conductor.CONFIG.get("thread_count"), + help=('The number of threads that should download simultaneously')) + + uploader_parser.add_argument("--alt", + help=('Run an alternative version of the downloader'), + action='store_true') + + uploader_parser.set_defaults(func=run_uploader) + + ############################# + # DOWNLOADER PARSER + ############################# + + downloader_parser_desciption = "parse downloader arguments" + downloader_parser_help = "" + + downloader_parser = subparsers.add_parser("downloader", parents=[parent_parser], + help=downloader_parser_help, + description=downloader_parser_desciption, + formatter_class=argparse.RawTextHelpFormatter) + + downloader_parser.add_argument("--job_id", + help=("The job id(s) to download. When specified " + "will only download those jobs and terminate " + "afterwards"), + action='append') + + downloader_parser.add_argument("--task_id", + help="Manually download output for this task") + + downloader_parser.add_argument("--output", + help="Override for the output directory") + + downloader_parser.add_argument("--location", + default=conductor.CONFIG.get("location"), + help=('An optional string to indicate which location this downloader ' + 'executable should register as. This option is only relevant for ' + 'conductor accounts which submits jobs from different locations ' + '(e.g. differing geographic locations or office locations that ' + 'have differing file systems). Typically each location would ' + 'have its own conductor downloader process running. This location ' + 'argument allows each downloader to target specific jobs (to ' + 'download upon job-completion) that match its appropriate location. ' + 'Essentially this allows the location of which a job was submitted ' + 'from to also be the destination in which to deliver completed ' + 'renders to (which would typically be the desired behavior).')) + + downloader_parser.add_argument("--project", + default=conductor.CONFIG.get("project"), + help=('An optional string to indicate which project that this downloader executable should register as.')) + + downloader_parser.add_argument("--log_level", + choices=loggeria.LEVELS, + default=conductor.CONFIG.get("log_level"), + help="The logging level to display") + + downloader_parser.add_argument("--log_dir", + default=conductor.CONFIG.get("log_dir"), + help=("When provided, will write a log file to " + "the provided directory. This will be a " + "rotating log, creating a new log file " + "everyday, while storing the last 7 days " + "of logs")) + + downloader_parser.add_argument("--thread_count", + type=int, + default=conductor.CONFIG.get("thread_count"), + help=('The number of threads that should download simultaneously')) + + downloader_parser.add_argument("--alt", + help=('Run an alternative version of the downloader'), + action='store_true') + + downloader_parser.set_defaults(func=run_downloader) + + return parser.parse_args() + + +def cast_to_bool(string): + ''' + Ensure that the argument provided is either "True" or "False (or "true" or + "false") and convert that argument to an actual bool value (True or False). + ''' + string_lower = string.lower() + if string_lower == "true": + return True + elif string_lower == "false": + return False + raise argparse.ArgumentTypeError('Argument must be True or False') + + +class ValidateArgEnv(argparse.Action): + ''' + Validate the "env" argument that a user has provided, ensuring that it + adheres to proper syntax, and ultimately produces a dictionary object + consisting of desired environment variable names and their corresponding + values. + + Example: + {"PYTHONPATH": "/tmp/files1:$PYTHONPATH", + "PATH": "/usr/bin":$PATH"} + + ''' + + # Regular expression to validate argument construction = + # Example: PYTHONPATH=/tmp:$PYTHONPATH + rx_env_var = r'\s*(?P[^\s=]+)\s*=(?P[^=]+)$' + + def __call__(self, parser, namespace, values, option_string): + ''' + Read each variable definition and validate that it's constructed + properly. Populate a dictionary where they keys are the variable names + and the values are the variable values + ''' + env_variables = {} + for variable_definition in values: + re_result = re.match(self.rx_env_var, variable_definition) + if not re_result: + msg = ("Invalid variable assignment: '%s'\n" + "\tUse construct: = e.g " + "PYTHONPATH=/tmp:$PYTHONPATH" % variable_definition) + raise argparse.ArgumentError(self, msg) + + var_name = re_result.group("var_name").strip() + var_value = re_result.group("var_value").strip() + env_variables[var_name] = var_value + + setattr(namespace, self.dest, env_variables) + + +class CastLogLevelName(argparse.Action): + ''' + Cast a log level name to it's contstant value, e.g. + "INFO" --> logging.INFO + ''' + + def __call__(self, parser, namespace, values, option_string): + setattr(namespace, self.dest, loggeria.LEVEL_MAP[values]) + + +def run_submit(args): + conductor_submit.run_submit(args) + + +def run_uploader(args): + args_dict = vars(args) + if sys.platform == "win32": + uploader.run_uploader(args) + if args_dict.get("alt"): + uploader_v2.run_uploader(args) + else: + uploader.run_uploader(args) + + +def run_downloader(args): + ''' + Convert the argparse Namespace object to a dictionary and run the downloader + with the given args. + ''' + # Convert Namespace args object to args dict + args_dict = vars(args) + + # Code switcher between new downloader and old downloader + # HARD set windows users to old downloader + if sys.platform == "win32": + return downloader.run_downloader(args_dict) + # return downloader2.run_downloader(args_dict) + if args_dict.get("task_id") and not args_dict.get("job_id"): + raise argparse.ArgumentTypeError('Must supply a job_id with task_id.') + + if args_dict.get("job_id") or args_dict.get("alt"): + return downloader2.run_downloader(args_dict) + else: + return downloader.run_downloader(args_dict) + + +if __name__ == '__main__': + args = parse_args() + args.func(args) diff --git a/installers/windows/ConductorClient.nsi b/installers/windows/ConductorClient.nsi index cf60eff1..0dcb8309 100644 --- a/installers/windows/ConductorClient.nsi +++ b/installers/windows/ConductorClient.nsi @@ -74,6 +74,8 @@ ${INSTALL_TYPE} SetOverwrite ifnewer SetOutPath "$INSTDIR" File /r /x ".git" "Conductor" +File /oname=Conductor\bin\conductor.py "Conductor\bin\conductor" +Delete "Conductor\bin\conductor" ${EnvVarUpdate} $0 "PATH" "A" "HKLM" "$INSTDIR\Conductor" ${EnvVarUpdate} $0 "PYTHONPATH" "A" "HKLM" "$INSTDIR\Conductor" diff --git a/installers/windows/conductor.bat b/installers/windows/conductor.bat index cd2ece01..78ba8c80 100644 --- a/installers/windows/conductor.bat +++ b/installers/windows/conductor.bat @@ -1,2 +1,2 @@ -@echo off -"%~dp0\python\python.exe" "%~dp0\bin\conductor" %* +@echo off +"%~dp0\python\python.exe" "%~dp0\bin\conductor.py" %* From 9264453ccc42c1d3cc63e55acb645480a9087abd Mon Sep 17 00:00:00 2001 From: easythrees Date: Sun, 22 Apr 2018 20:41:31 -0700 Subject: [PATCH 2/3] Attemoted fix at line endings --- bin/conductor | 471 +------------------------------------------------- 1 file changed, 1 insertion(+), 470 deletions(-) diff --git a/bin/conductor b/bin/conductor index 0dabd118..09c4c2e4 100755 --- a/bin/conductor +++ b/bin/conductor @@ -1,470 +1 @@ -#!/usr/bin/env python - -import argparse -import imp -import json -import os -import re -import sys - -try: - imp.find_module('conductor') - -except: - sys.path.append(os.path.dirname(os.path.dirname(__file__))) - -# On Windows, due to the quirks of the multiprocessing module -# this file is installed as 'conductor.py' and to ensure the -# environment gets passed properly, add the module to the path -# list again -if os.name == 'nt': - sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) - -import conductor -from conductor.lib import conductor_submit, downloader, downloader2, uploader, uploader_v2, loggeria - - -def parse_args(): - - # Create a parent parser. Arguments that are common across all subparsers can be added to this parser - parent_parser = argparse.ArgumentParser(add_help=False) - - # create the main parser. Not sure why this parser is required, but got parsing tracebacks when excluding it (it gets confused about the arguments provided) - parser = argparse.ArgumentParser(description="description") - subparsers = parser.add_subparsers(title="actions") - - ############################# - # SUBMIT PARSER - ############################# - submit_parser_desciption = "parse submitter arguments" - submit_parser_help = "" - submit_parser = subparsers.add_parser("submit", - parents=[parent_parser], - help=submit_parser_help, - description=submit_parser_desciption, - formatter_class=argparse.RawTextHelpFormatter) - - submit_parser.add_argument("--autoretry_policy", - type=json.loads, - help=('Enables tasks to be automatically retried if they have been preempted and/or ' - 'failed.\n' - 'A policy can be defined for more than one status, e.g. "preempted", "failed"\n' - 'A policy for a status must define a "max_retries" key with an integer from 0 ' - 'through 5\n\n' - 'Example 1: --autoretry_policy "{\\"preempted\\": {\\"max_retries\\": 2 } }"\n\n' - 'Example 2: --autoretry_policy "{\\"failed\\": {\\"max_retries\\": 1 },' - '\\"preempted\\": {\\"max_retries\\": 5 }}"\n\n')) - - submit_parser.add_argument("--cmd", - help=('[DEPRECATED. Use --tasks_data instead]\n' - 'The command to execute. This works in tandem with the --frames ' - 'argument to construct a command per task\n' - 'Example: --cmd "Render /tmp/mayafile.ma"')) - - submit_parser.add_argument("--cores", - help="Number of cores that this job should run on (highmem and highcpu machines have a minimum of 2 cores)", - type=int) - - submit_parser.add_argument("--database_filepath", - help=("The filepath to the local md5 caching database. If no filepath " - "is specified, the database will be created in a temp directory. " - "Note that this flag is only active when --local_upload is True.")) - - submit_parser.add_argument("--docker_image", - help="docker image to run render in") - - submit_parser.add_argument("--environment", - help=(r"Specify environment variable statements to be executed on Conductor's " - "render instance.\n\n" - "Example:\n" - "\t--environment PYTHONPATH=/home/user/steve/python:/home/user/marry/python\n\n" - - "Multipe environment variable statements can be made by separating them with a space:\n" - "\t--environment PYTHONPATH=/home/user/marry PATH=/home/usr/bin\n\n" - - "Appending to Conductor's existing environment variables is also possible. " - "Note that the referenced environment variable is encapsulated by single quotes " - "so that it does not get evaluated until it is executed on Conductor's render instance:\n" - "\t--environment PYTHONPATH=/tmp:/home/user:'$PYTHONPATH'\n\n" - "However, it may be desirable to both evaluate your local environment variable " - "AND also append it to Conductor's own. Note that one PYTHONPATH is" - " contained with single quotes while the other is not:\n" - "\t--environment PYTHONPATH=/tmp:/home/user:$PYTHONPATH:'$PYTHONPATH'\n\n" - "Note that the syntax for Windows clients will look different in some " - "places, but it's important to remember that ultimately these environment " - "variable statements will be executed on a linux platform on Conductor, " - "which means that when referencing Conductor\'s environment variable, " - "A dollar sign syntax must be used:\n" - "\t--environment PYTHONPATH=\"c:\\My Documents\";%%PYTHONPATH%%;$PYTHONPATH"), - nargs="*", - action=ValidateArgEnv) - - submit_parser.add_argument("--metadata", - help=("Specify metadata dictionary. Note that it's best to wrap this dictionary" - " in single quotes,\n e.g. --metadata '{\"project\":\"Scrooge\"}'"), - type=json.loads) # use the json loads function to parse dict - - submit_parser.add_argument("--force", - help="Do not check for existing uploads, force a new upload", - action='store_true') - - submit_parser.add_argument("--chunk_size", - help="number of frames to execute on a single instance", - default=1) - - submit_parser.add_argument("--frames", - help=('[DEPRECATED. Use --tasks_data instead]\n' - 'The frame range to render. This works in tandem with the --command ' - 'argument to construct a command per task.\n' - 'Example: --frames "10-20" or "100-200x2", or "1, 10-20, 100-200x2"')) - - submit_parser.add_argument("--scout_frames", - help="If specified, these frames will run first and the rest of the" - "job will wait until verification has been given before continuing.") - - submit_parser.add_argument("--local_upload", - help="Trigger files to be uploaded localy", - choices=[False, True], - type=cast_to_bool, - default=None) - - submit_parser.add_argument("--location", - help=('An optional string to indicate which location this submitted ' - 'job should be registered as. This option is only relevant ' - 'for conductor accounts which submits jobs from different ' - 'locations (e.g. differing geographic locations or office ' - 'locations that have differing file systems). Typically each ' - 'location would have its own conductor downloader running (for ' - 'downloading completed renders). This location string allows ' - 'each conductor downloader to target only files that match the ' - 'appropriate location. This is potentially useful when you have ' - 'multiple downloaders running but each have differing file ' - 'systems available to them (e.g. downloader1 has /filesystem1 ' - 'available to it, but downloader2 only has /filesystem2 ' - 'available to it). In this case downloader1 should only ' - 'download files that exist on /filesystem1 and downloader2 ' - 'should only download files that exist on /filesystem2. ' - 'This is achieved by including a location string (such as ' - '"location1" or "location2") when submitting jobs (and when ' - 'running a downloader or uploader command).')) - - # TODO: We should remove this arg - submit_parser.add_argument("--machine_type", - help='Type of machine to run on ("standard", "highmem", or "highcpu")') - - submit_parser.add_argument("--preemptible", - choices=[False, True], - type=cast_to_bool, - help='Run job in preemptible instances') - - submit_parser.add_argument("--md5_caching", - help=("Use cached md5s. This can dramatically improve the uploading " - "times, as md5 checking can be very time consuming. Caching md5s " - "allows subsequent uploads (of the same files) to skip the " - "md5 generation process (if the files appear to not have been " - "modified since the last time they were submitted). The cache is " - "stored locally and uses a file's modification time and file size " - "to intelligently guess whether the file has changed. Set this " - "flag to False if there is concern that files may not be getting " - "re-uploaded properly. " - "Note that this flag is only active when --local_upload is True."), - choices=[False, True], - type=cast_to_bool, - default=None) - - submit_parser.add_argument("--output_path", - help="path to copy renders to") - - submit_parser.add_argument("--postcmd", - help="Run this command once the entire job is complete and downloaded") - - submit_parser.add_argument("--priority", - help="Set the priority of the submitted job. Default is 5") - - submit_parser.add_argument("--project", - help='The name of the project to submit the job. This will default to "default"') - - submit_parser.add_argument("--software_package_ids", - help=('The ids for the software packages to use for the submitted ' - 'job, e.g. maya or nuke. Packages are identified ' - 'by their ID number, which can be queried from ' - 'Conductor\'s "web api"'), - nargs="*") - - submit_parser.add_argument("--tasks_data", - help=('Specify a task\'s command to execute and it\'s corresponding frames ' - '(if applicable) in a json dictionary format. e.g.\n' - '--tasks_data \"{\\"command\\": \\"Render -s 1 -e 1 /tmp/file.ma\\", ' - '\\"frames\\": \\"1\\"}" \n' - '\nSpecify multiple tasks via multiple json dictionaries, separated by spaces, e.g.\n' - '--tasks_data "{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}" ' - '"{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}"\n' - '\nNote that that format of this argument must be valid json. This makes for ' - 'piping in more verbose/complex arguments more bearable,\n' - 'Example of reading two task\'s data from an array within a json file:\n' - 'conductor submit --tasks_data "$(cat /tmp/task_cmds.json | jq -c \'.[1]\')"'), - type=json.loads, - nargs="*") - - submit_parser.add_argument("--job_title", - help=("The title to name the job. This is the title " - "that will be displayed in the web ui")) - - submit_parser.add_argument("--upload_file", - help=("A path to a text file whose contents list paths to upload to " - "conductor. There should be one path per line in the text file. " - "Valid paths are to files, directories (symlinked or not), as " - "well as glob expressions. e.g * wildcards accepted")) - - submit_parser.add_argument("--upload_only", - help="Only upload the files, don't start the render", - action='store_true') - - submit_parser.add_argument("--upload_paths", - help="Paths to upload", - nargs="*") - - submit_parser.add_argument("--user", - help="Username to submit as") - - submit_parser.add_argument("--notify", - help="Who to notify when job is complete", - nargs="+") - - submit_parser.add_argument("--slack_notify", - help="Slack recipients to notify when job is complete", - nargs="+") - - submit_parser.add_argument("--log_level", - choices=loggeria.LEVELS, - help="The logging level to display") - - submit_parser.add_argument("--log_dir", - help=("When provided, will write a log file to " - "the provided directory. This will be a " - "rotating log, creating a new log file " - "everyday, while storing the last 7 days " - "of logs")) - - submit_parser.add_argument("--max_instances", - help="Maximum number of instances for this job") - - submit_parser.set_defaults(func=run_submit) - - ############################# - # UPLOADER PARSER - ############################# - uploader_parser_desciption = "parse uploader arguments" - uploader_parser_help = "" - - uploader_parser = subparsers.add_parser("uploader", parents=[parent_parser], - help=uploader_parser_help, - description=uploader_parser_desciption, - formatter_class=argparse.RawTextHelpFormatter) - - uploader_parser.add_argument("--location", - help=('An optional string to indicate which location this uploader ' - 'executable should register as. This option is only relevant ' - 'for conductor accounts which submits jobs from different locations ' - '(e.g. differing geographic locations or office locations that have differing file systems).' - ' Typically each location would have its own conductor uploader process running. This location ' - 'string allows each uploader to target specific upload jobs (files to upload) that are appropriate ' - 'for it. This is potentially useful as each location may have differing file systems ' - 'available to it (e.g. uploader1 has /filesystem1 available to it, but uploader2 only ' - 'has /filesystem2 available to it). In this case uploader1 should only upload files ' - 'that exist on /filesystem1 and uploader2 should only upload files that exist on /filesystem2. ' - 'This is achieved by including a location argument (such as "location1" or "location2") ' - 'when submitting jobs, as well as when launching this uploader command.')) - - uploader_parser.add_argument("--log_level", - choices=loggeria.LEVELS, - help="The logging level to display") - - uploader_parser.add_argument("--log_dir", - help=("When provided, will write a log file to " - "the provided directory. This will be a " - "rotating log, creating a new log file " - "everyday, while storing the last 7 days " - "of logs")) - - uploader_parser.add_argument("--thread_count", - type=int, - default=conductor.CONFIG.get("thread_count"), - help=('The number of threads that should download simultaneously')) - - uploader_parser.add_argument("--alt", - help=('Run an alternative version of the downloader'), - action='store_true') - - uploader_parser.set_defaults(func=run_uploader) - - ############################# - # DOWNLOADER PARSER - ############################# - - downloader_parser_desciption = "parse downloader arguments" - downloader_parser_help = "" - - downloader_parser = subparsers.add_parser("downloader", parents=[parent_parser], - help=downloader_parser_help, - description=downloader_parser_desciption, - formatter_class=argparse.RawTextHelpFormatter) - - downloader_parser.add_argument("--job_id", - help=("The job id(s) to download. When specified " - "will only download those jobs and terminate " - "afterwards"), - action='append') - - downloader_parser.add_argument("--task_id", - help="Manually download output for this task") - - downloader_parser.add_argument("--output", - help="Override for the output directory") - - downloader_parser.add_argument("--location", - default=conductor.CONFIG.get("location"), - help=('An optional string to indicate which location this downloader ' - 'executable should register as. This option is only relevant for ' - 'conductor accounts which submits jobs from different locations ' - '(e.g. differing geographic locations or office locations that ' - 'have differing file systems). Typically each location would ' - 'have its own conductor downloader process running. This location ' - 'argument allows each downloader to target specific jobs (to ' - 'download upon job-completion) that match its appropriate location. ' - 'Essentially this allows the location of which a job was submitted ' - 'from to also be the destination in which to deliver completed ' - 'renders to (which would typically be the desired behavior).')) - - downloader_parser.add_argument("--project", - default=conductor.CONFIG.get("project"), - help=('An optional string to indicate which project that this downloader executable should register as.')) - - downloader_parser.add_argument("--log_level", - choices=loggeria.LEVELS, - default=conductor.CONFIG.get("log_level"), - help="The logging level to display") - - downloader_parser.add_argument("--log_dir", - default=conductor.CONFIG.get("log_dir"), - help=("When provided, will write a log file to " - "the provided directory. This will be a " - "rotating log, creating a new log file " - "everyday, while storing the last 7 days " - "of logs")) - - downloader_parser.add_argument("--thread_count", - type=int, - default=conductor.CONFIG.get("thread_count"), - help=('The number of threads that should download simultaneously')) - - downloader_parser.add_argument("--alt", - help=('Run an alternative version of the downloader'), - action='store_true') - - downloader_parser.set_defaults(func=run_downloader) - - return parser.parse_args() - - -def cast_to_bool(string): - ''' - Ensure that the argument provided is either "True" or "False (or "true" or - "false") and convert that argument to an actual bool value (True or False). - ''' - string_lower = string.lower() - if string_lower == "true": - return True - elif string_lower == "false": - return False - raise argparse.ArgumentTypeError('Argument must be True or False') - - -class ValidateArgEnv(argparse.Action): - ''' - Validate the "env" argument that a user has provided, ensuring that it - adheres to proper syntax, and ultimately produces a dictionary object - consisting of desired environment variable names and their corresponding - values. - - Example: - {"PYTHONPATH": "/tmp/files1:$PYTHONPATH", - "PATH": "/usr/bin":$PATH"} - - ''' - - # Regular expression to validate argument construction = - # Example: PYTHONPATH=/tmp:$PYTHONPATH - rx_env_var = r'\s*(?P[^\s=]+)\s*=(?P[^=]+)$' - - def __call__(self, parser, namespace, values, option_string): - ''' - Read each variable definition and validate that it's constructed - properly. Populate a dictionary where they keys are the variable names - and the values are the variable values - ''' - env_variables = {} - for variable_definition in values: - re_result = re.match(self.rx_env_var, variable_definition) - if not re_result: - msg = ("Invalid variable assignment: '%s'\n" - "\tUse construct: = e.g " - "PYTHONPATH=/tmp:$PYTHONPATH" % variable_definition) - raise argparse.ArgumentError(self, msg) - - var_name = re_result.group("var_name").strip() - var_value = re_result.group("var_value").strip() - env_variables[var_name] = var_value - - setattr(namespace, self.dest, env_variables) - - -class CastLogLevelName(argparse.Action): - ''' - Cast a log level name to it's contstant value, e.g. - "INFO" --> logging.INFO - ''' - - def __call__(self, parser, namespace, values, option_string): - setattr(namespace, self.dest, loggeria.LEVEL_MAP[values]) - - -def run_submit(args): - conductor_submit.run_submit(args) - - -def run_uploader(args): - args_dict = vars(args) - if sys.platform == "win32": - uploader.run_uploader(args) - if args_dict.get("alt"): - uploader_v2.run_uploader(args) - else: - uploader.run_uploader(args) - - -def run_downloader(args): - ''' - Convert the argparse Namespace object to a dictionary and run the downloader - with the given args. - ''' - # Convert Namespace args object to args dict - args_dict = vars(args) - - # Code switcher between new downloader and old downloader - # HARD set windows users to old downloader - if sys.platform == "win32": - return downloader.run_downloader(args_dict) - # return downloader2.run_downloader(args_dict) - if args_dict.get("task_id") and not args_dict.get("job_id"): - raise argparse.ArgumentTypeError('Must supply a job_id with task_id.') - - if args_dict.get("job_id") or args_dict.get("alt"): - return downloader2.run_downloader(args_dict) - else: - return downloader.run_downloader(args_dict) - - -if __name__ == '__main__': - args = parse_args() - args.func(args) +#!/usr/bin/env python import argparse import imp import json import os import re import sys try: imp.find_module('conductor') except: sys.path.append(os.path.dirname(os.path.dirname(__file__))) # On Windows, due to the quirks of the multiprocessing module # this file is installed as 'conductor.py' and to ensure the # environment gets passed properly, add the module to the path # list again if os.name == 'nt': sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) import conductor from conductor.lib import conductor_submit, downloader, downloader2, uploader, uploader_v2, loggeria def parse_args(): # Create a parent parser. Arguments that are common across all subparsers can be added to this parser parent_parser = argparse.ArgumentParser(add_help=False) # create the main parser. Not sure why this parser is required, but got parsing tracebacks when excluding it (it gets confused about the arguments provided) parser = argparse.ArgumentParser(description="description") subparsers = parser.add_subparsers(title="actions") ############################# # SUBMIT PARSER ############################# submit_parser_desciption = "parse submitter arguments" submit_parser_help = "" submit_parser = subparsers.add_parser("submit", parents=[parent_parser], help=submit_parser_help, description=submit_parser_desciption, formatter_class=argparse.RawTextHelpFormatter) submit_parser.add_argument("--autoretry_policy", type=json.loads, help=('Enables tasks to be automatically retried if they have been preempted and/or ' 'failed.\n' 'A policy can be defined for more than one status, e.g. "preempted", "failed"\n' 'A policy for a status must define a "max_retries" key with an integer from 0 ' 'through 5\n\n' 'Example 1: --autoretry_policy "{\\"preempted\\": {\\"max_retries\\": 2 } }"\n\n' 'Example 2: --autoretry_policy "{\\"failed\\": {\\"max_retries\\": 1 },' '\\"preempted\\": {\\"max_retries\\": 5 }}"\n\n')) submit_parser.add_argument("--cmd", help=('[DEPRECATED. Use --tasks_data instead]\n' 'The command to execute. This works in tandem with the --frames ' 'argument to construct a command per task\n' 'Example: --cmd "Render /tmp/mayafile.ma"')) submit_parser.add_argument("--cores", help="Number of cores that this job should run on (highmem and highcpu machines have a minimum of 2 cores)", type=int) submit_parser.add_argument("--database_filepath", help=("The filepath to the local md5 caching database. If no filepath " "is specified, the database will be created in a temp directory. " "Note that this flag is only active when --local_upload is True.")) submit_parser.add_argument("--docker_image", help="docker image to run render in") submit_parser.add_argument("--environment", help=(r"Specify environment variable statements to be executed on Conductor's " "render instance.\n\n" "Example:\n" "\t--environment PYTHONPATH=/home/user/steve/python:/home/user/marry/python\n\n" "Multipe environment variable statements can be made by separating them with a space:\n" "\t--environment PYTHONPATH=/home/user/marry PATH=/home/usr/bin\n\n" "Appending to Conductor's existing environment variables is also possible. " "Note that the referenced environment variable is encapsulated by single quotes " "so that it does not get evaluated until it is executed on Conductor's render instance:\n" "\t--environment PYTHONPATH=/tmp:/home/user:'$PYTHONPATH'\n\n" "However, it may be desirable to both evaluate your local environment variable " "AND also append it to Conductor's own. Note that one PYTHONPATH is" " contained with single quotes while the other is not:\n" "\t--environment PYTHONPATH=/tmp:/home/user:$PYTHONPATH:'$PYTHONPATH'\n\n" "Note that the syntax for Windows clients will look different in some " "places, but it's important to remember that ultimately these environment " "variable statements will be executed on a linux platform on Conductor, " "which means that when referencing Conductor\'s environment variable, " "A dollar sign syntax must be used:\n" "\t--environment PYTHONPATH=\"c:\\My Documents\";%%PYTHONPATH%%;$PYTHONPATH"), nargs="*", action=ValidateArgEnv) submit_parser.add_argument("--metadata", help=("Specify metadata dictionary. Note that it's best to wrap this dictionary" " in single quotes,\n e.g. --metadata '{\"project\":\"Scrooge\"}'"), type=json.loads) # use the json loads function to parse dict submit_parser.add_argument("--force", help="Do not check for existing uploads, force a new upload", action='store_true') submit_parser.add_argument("--chunk_size", help="number of frames to execute on a single instance", default=1) submit_parser.add_argument("--frames", help=('[DEPRECATED. Use --tasks_data instead]\n' 'The frame range to render. This works in tandem with the --command ' 'argument to construct a command per task.\n' 'Example: --frames "10-20" or "100-200x2", or "1, 10-20, 100-200x2"')) submit_parser.add_argument("--scout_frames", help="If specified, these frames will run first and the rest of the" "job will wait until verification has been given before continuing.") submit_parser.add_argument("--local_upload", help="Trigger files to be uploaded localy", choices=[False, True], type=cast_to_bool, default=None) submit_parser.add_argument("--location", help=('An optional string to indicate which location this submitted ' 'job should be registered as. This option is only relevant ' 'for conductor accounts which submits jobs from different ' 'locations (e.g. differing geographic locations or office ' 'locations that have differing file systems). Typically each ' 'location would have its own conductor downloader running (for ' 'downloading completed renders). This location string allows ' 'each conductor downloader to target only files that match the ' 'appropriate location. This is potentially useful when you have ' 'multiple downloaders running but each have differing file ' 'systems available to them (e.g. downloader1 has /filesystem1 ' 'available to it, but downloader2 only has /filesystem2 ' 'available to it). In this case downloader1 should only ' 'download files that exist on /filesystem1 and downloader2 ' 'should only download files that exist on /filesystem2. ' 'This is achieved by including a location string (such as ' '"location1" or "location2") when submitting jobs (and when ' 'running a downloader or uploader command).')) # TODO: We should remove this arg submit_parser.add_argument("--machine_type", help='Type of machine to run on ("standard", "highmem", or "highcpu")') submit_parser.add_argument("--preemptible", choices=[False, True], type=cast_to_bool, help='Run job in preemptible instances') submit_parser.add_argument("--md5_caching", help=("Use cached md5s. This can dramatically improve the uploading " "times, as md5 checking can be very time consuming. Caching md5s " "allows subsequent uploads (of the same files) to skip the " "md5 generation process (if the files appear to not have been " "modified since the last time they were submitted). The cache is " "stored locally and uses a file's modification time and file size " "to intelligently guess whether the file has changed. Set this " "flag to False if there is concern that files may not be getting " "re-uploaded properly. " "Note that this flag is only active when --local_upload is True."), choices=[False, True], type=cast_to_bool, default=None) submit_parser.add_argument("--output_path", help="path to copy renders to") submit_parser.add_argument("--postcmd", help="Run this command once the entire job is complete and downloaded") submit_parser.add_argument("--priority", help="Set the priority of the submitted job. Default is 5") submit_parser.add_argument("--project", help='The name of the project to submit the job. This will default to "default"') submit_parser.add_argument("--software_package_ids", help=('The ids for the software packages to use for the submitted ' 'job, e.g. maya or nuke. Packages are identified ' 'by their ID number, which can be queried from ' 'Conductor\'s "web api"'), nargs="*") submit_parser.add_argument("--tasks_data", help=('Specify a task\'s command to execute and it\'s corresponding frames ' '(if applicable) in a json dictionary format. e.g.\n' '--tasks_data \"{\\"command\\": \\"Render -s 1 -e 1 /tmp/file.ma\\", ' '\\"frames\\": \\"1\\"}" \n' '\nSpecify multiple tasks via multiple json dictionaries, separated by spaces, e.g.\n' '--tasks_data "{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}" ' '"{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}"\n' '\nNote that that format of this argument must be valid json. This makes for ' 'piping in more verbose/complex arguments more bearable,\n' 'Example of reading two task\'s data from an array within a json file:\n' 'conductor submit --tasks_data "$(cat /tmp/task_cmds.json | jq -c \'.[1]\')"'), type=json.loads, nargs="*") submit_parser.add_argument("--job_title", help=("The title to name the job. This is the title " "that will be displayed in the web ui")) submit_parser.add_argument("--upload_file", help=("A path to a text file whose contents list paths to upload to " "conductor. There should be one path per line in the text file. " "Valid paths are to files, directories (symlinked or not), as " "well as glob expressions. e.g * wildcards accepted")) submit_parser.add_argument("--upload_only", help="Only upload the files, don't start the render", action='store_true') submit_parser.add_argument("--upload_paths", help="Paths to upload", nargs="*") submit_parser.add_argument("--user", help="Username to submit as") submit_parser.add_argument("--notify", help="Who to notify when job is complete", nargs="+") submit_parser.add_argument("--slack_notify", help="Slack recipients to notify when job is complete", nargs="+") submit_parser.add_argument("--log_level", choices=loggeria.LEVELS, help="The logging level to display") submit_parser.add_argument("--log_dir", help=("When provided, will write a log file to " "the provided directory. This will be a " "rotating log, creating a new log file " "everyday, while storing the last 7 days " "of logs")) submit_parser.add_argument("--max_instances", help="Maximum number of instances for this job") submit_parser.set_defaults(func=run_submit) ############################# # UPLOADER PARSER ############################# uploader_parser_desciption = "parse uploader arguments" uploader_parser_help = "" uploader_parser = subparsers.add_parser("uploader", parents=[parent_parser], help=uploader_parser_help, description=uploader_parser_desciption, formatter_class=argparse.RawTextHelpFormatter) uploader_parser.add_argument("--location", help=('An optional string to indicate which location this uploader ' 'executable should register as. This option is only relevant ' 'for conductor accounts which submits jobs from different locations ' '(e.g. differing geographic locations or office locations that have differing file systems).' ' Typically each location would have its own conductor uploader process running. This location ' 'string allows each uploader to target specific upload jobs (files to upload) that are appropriate ' 'for it. This is potentially useful as each location may have differing file systems ' 'available to it (e.g. uploader1 has /filesystem1 available to it, but uploader2 only ' 'has /filesystem2 available to it). In this case uploader1 should only upload files ' 'that exist on /filesystem1 and uploader2 should only upload files that exist on /filesystem2. ' 'This is achieved by including a location argument (such as "location1" or "location2") ' 'when submitting jobs, as well as when launching this uploader command.')) uploader_parser.add_argument("--log_level", choices=loggeria.LEVELS, help="The logging level to display") uploader_parser.add_argument("--log_dir", help=("When provided, will write a log file to " "the provided directory. This will be a " "rotating log, creating a new log file " "everyday, while storing the last 7 days " "of logs")) uploader_parser.add_argument("--thread_count", type=int, default=conductor.CONFIG.get("thread_count"), help=('The number of threads that should download simultaneously')) uploader_parser.add_argument("--alt", help=('Run an alternative version of the downloader'), action='store_true') uploader_parser.set_defaults(func=run_uploader) ############################# # DOWNLOADER PARSER ############################# downloader_parser_desciption = "parse downloader arguments" downloader_parser_help = "" downloader_parser = subparsers.add_parser("downloader", parents=[parent_parser], help=downloader_parser_help, description=downloader_parser_desciption, formatter_class=argparse.RawTextHelpFormatter) downloader_parser.add_argument("--job_id", help=("The job id(s) to download. When specified " "will only download those jobs and terminate " "afterwards"), action='append') downloader_parser.add_argument("--task_id", help="Manually download output for this task") downloader_parser.add_argument("--output", help="Override for the output directory") downloader_parser.add_argument("--location", default=conductor.CONFIG.get("location"), help=('An optional string to indicate which location this downloader ' 'executable should register as. This option is only relevant for ' 'conductor accounts which submits jobs from different locations ' '(e.g. differing geographic locations or office locations that ' 'have differing file systems). Typically each location would ' 'have its own conductor downloader process running. This location ' 'argument allows each downloader to target specific jobs (to ' 'download upon job-completion) that match its appropriate location. ' 'Essentially this allows the location of which a job was submitted ' 'from to also be the destination in which to deliver completed ' 'renders to (which would typically be the desired behavior).')) downloader_parser.add_argument("--project", default=conductor.CONFIG.get("project"), help=('An optional string to indicate which project that this downloader executable should register as.')) downloader_parser.add_argument("--log_level", choices=loggeria.LEVELS, default=conductor.CONFIG.get("log_level"), help="The logging level to display") downloader_parser.add_argument("--log_dir", default=conductor.CONFIG.get("log_dir"), help=("When provided, will write a log file to " "the provided directory. This will be a " "rotating log, creating a new log file " "everyday, while storing the last 7 days " "of logs")) downloader_parser.add_argument("--thread_count", type=int, default=conductor.CONFIG.get("thread_count"), help=('The number of threads that should download simultaneously')) downloader_parser.add_argument("--alt", help=('Run an alternative version of the downloader'), action='store_true') downloader_parser.set_defaults(func=run_downloader) return parser.parse_args() def cast_to_bool(string): ''' Ensure that the argument provided is either "True" or "False (or "true" or "false") and convert that argument to an actual bool value (True or False). ''' string_lower = string.lower() if string_lower == "true": return True elif string_lower == "false": return False raise argparse.ArgumentTypeError('Argument must be True or False') class ValidateArgEnv(argparse.Action): ''' Validate the "env" argument that a user has provided, ensuring that it adheres to proper syntax, and ultimately produces a dictionary object consisting of desired environment variable names and their corresponding values. Example: {"PYTHONPATH": "/tmp/files1:$PYTHONPATH", "PATH": "/usr/bin":$PATH"} ''' # Regular expression to validate argument construction = # Example: PYTHONPATH=/tmp:$PYTHONPATH rx_env_var = r'\s*(?P[^\s=]+)\s*=(?P[^=]+)$' def __call__(self, parser, namespace, values, option_string): ''' Read each variable definition and validate that it's constructed properly. Populate a dictionary where they keys are the variable names and the values are the variable values ''' env_variables = {} for variable_definition in values: re_result = re.match(self.rx_env_var, variable_definition) if not re_result: msg = ("Invalid variable assignment: '%s'\n" "\tUse construct: = e.g " "PYTHONPATH=/tmp:$PYTHONPATH" % variable_definition) raise argparse.ArgumentError(self, msg) var_name = re_result.group("var_name").strip() var_value = re_result.group("var_value").strip() env_variables[var_name] = var_value setattr(namespace, self.dest, env_variables) class CastLogLevelName(argparse.Action): ''' Cast a log level name to it's contstant value, e.g. "INFO" --> logging.INFO ''' def __call__(self, parser, namespace, values, option_string): setattr(namespace, self.dest, loggeria.LEVEL_MAP[values]) def run_submit(args): conductor_submit.run_submit(args) def run_uploader(args): args_dict = vars(args) if sys.platform == "win32": uploader.run_uploader(args) if args_dict.get("alt"): uploader_v2.run_uploader(args) else: uploader.run_uploader(args) def run_downloader(args): ''' Convert the argparse Namespace object to a dictionary and run the downloader with the given args. ''' # Convert Namespace args object to args dict args_dict = vars(args) # Code switcher between new downloader and old downloader # HARD set windows users to old downloader if sys.platform == "win32": return downloader.run_downloader(args_dict) # return downloader2.run_downloader(args_dict) if args_dict.get("task_id") and not args_dict.get("job_id"): raise argparse.ArgumentTypeError('Must supply a job_id with task_id.') if args_dict.get("job_id") or args_dict.get("alt"): return downloader2.run_downloader(args_dict) else: return downloader.run_downloader(args_dict) if __name__ == '__main__': args = parse_args() args.func(args) \ No newline at end of file From 57b6c9374533045ea3c4fcda283a2aeaef642420 Mon Sep 17 00:00:00 2001 From: easythrees Date: Sun, 22 Apr 2018 20:43:25 -0700 Subject: [PATCH 3/3] Attemoted fix at line endings, again --- bin/conductor | 471 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 470 insertions(+), 1 deletion(-) diff --git a/bin/conductor b/bin/conductor index 09c4c2e4..3a303f66 100755 --- a/bin/conductor +++ b/bin/conductor @@ -1 +1,470 @@ -#!/usr/bin/env python import argparse import imp import json import os import re import sys try: imp.find_module('conductor') except: sys.path.append(os.path.dirname(os.path.dirname(__file__))) # On Windows, due to the quirks of the multiprocessing module # this file is installed as 'conductor.py' and to ensure the # environment gets passed properly, add the module to the path # list again if os.name == 'nt': sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) import conductor from conductor.lib import conductor_submit, downloader, downloader2, uploader, uploader_v2, loggeria def parse_args(): # Create a parent parser. Arguments that are common across all subparsers can be added to this parser parent_parser = argparse.ArgumentParser(add_help=False) # create the main parser. Not sure why this parser is required, but got parsing tracebacks when excluding it (it gets confused about the arguments provided) parser = argparse.ArgumentParser(description="description") subparsers = parser.add_subparsers(title="actions") ############################# # SUBMIT PARSER ############################# submit_parser_desciption = "parse submitter arguments" submit_parser_help = "" submit_parser = subparsers.add_parser("submit", parents=[parent_parser], help=submit_parser_help, description=submit_parser_desciption, formatter_class=argparse.RawTextHelpFormatter) submit_parser.add_argument("--autoretry_policy", type=json.loads, help=('Enables tasks to be automatically retried if they have been preempted and/or ' 'failed.\n' 'A policy can be defined for more than one status, e.g. "preempted", "failed"\n' 'A policy for a status must define a "max_retries" key with an integer from 0 ' 'through 5\n\n' 'Example 1: --autoretry_policy "{\\"preempted\\": {\\"max_retries\\": 2 } }"\n\n' 'Example 2: --autoretry_policy "{\\"failed\\": {\\"max_retries\\": 1 },' '\\"preempted\\": {\\"max_retries\\": 5 }}"\n\n')) submit_parser.add_argument("--cmd", help=('[DEPRECATED. Use --tasks_data instead]\n' 'The command to execute. This works in tandem with the --frames ' 'argument to construct a command per task\n' 'Example: --cmd "Render /tmp/mayafile.ma"')) submit_parser.add_argument("--cores", help="Number of cores that this job should run on (highmem and highcpu machines have a minimum of 2 cores)", type=int) submit_parser.add_argument("--database_filepath", help=("The filepath to the local md5 caching database. If no filepath " "is specified, the database will be created in a temp directory. " "Note that this flag is only active when --local_upload is True.")) submit_parser.add_argument("--docker_image", help="docker image to run render in") submit_parser.add_argument("--environment", help=(r"Specify environment variable statements to be executed on Conductor's " "render instance.\n\n" "Example:\n" "\t--environment PYTHONPATH=/home/user/steve/python:/home/user/marry/python\n\n" "Multipe environment variable statements can be made by separating them with a space:\n" "\t--environment PYTHONPATH=/home/user/marry PATH=/home/usr/bin\n\n" "Appending to Conductor's existing environment variables is also possible. " "Note that the referenced environment variable is encapsulated by single quotes " "so that it does not get evaluated until it is executed on Conductor's render instance:\n" "\t--environment PYTHONPATH=/tmp:/home/user:'$PYTHONPATH'\n\n" "However, it may be desirable to both evaluate your local environment variable " "AND also append it to Conductor's own. Note that one PYTHONPATH is" " contained with single quotes while the other is not:\n" "\t--environment PYTHONPATH=/tmp:/home/user:$PYTHONPATH:'$PYTHONPATH'\n\n" "Note that the syntax for Windows clients will look different in some " "places, but it's important to remember that ultimately these environment " "variable statements will be executed on a linux platform on Conductor, " "which means that when referencing Conductor\'s environment variable, " "A dollar sign syntax must be used:\n" "\t--environment PYTHONPATH=\"c:\\My Documents\";%%PYTHONPATH%%;$PYTHONPATH"), nargs="*", action=ValidateArgEnv) submit_parser.add_argument("--metadata", help=("Specify metadata dictionary. Note that it's best to wrap this dictionary" " in single quotes,\n e.g. --metadata '{\"project\":\"Scrooge\"}'"), type=json.loads) # use the json loads function to parse dict submit_parser.add_argument("--force", help="Do not check for existing uploads, force a new upload", action='store_true') submit_parser.add_argument("--chunk_size", help="number of frames to execute on a single instance", default=1) submit_parser.add_argument("--frames", help=('[DEPRECATED. Use --tasks_data instead]\n' 'The frame range to render. This works in tandem with the --command ' 'argument to construct a command per task.\n' 'Example: --frames "10-20" or "100-200x2", or "1, 10-20, 100-200x2"')) submit_parser.add_argument("--scout_frames", help="If specified, these frames will run first and the rest of the" "job will wait until verification has been given before continuing.") submit_parser.add_argument("--local_upload", help="Trigger files to be uploaded localy", choices=[False, True], type=cast_to_bool, default=None) submit_parser.add_argument("--location", help=('An optional string to indicate which location this submitted ' 'job should be registered as. This option is only relevant ' 'for conductor accounts which submits jobs from different ' 'locations (e.g. differing geographic locations or office ' 'locations that have differing file systems). Typically each ' 'location would have its own conductor downloader running (for ' 'downloading completed renders). This location string allows ' 'each conductor downloader to target only files that match the ' 'appropriate location. This is potentially useful when you have ' 'multiple downloaders running but each have differing file ' 'systems available to them (e.g. downloader1 has /filesystem1 ' 'available to it, but downloader2 only has /filesystem2 ' 'available to it). In this case downloader1 should only ' 'download files that exist on /filesystem1 and downloader2 ' 'should only download files that exist on /filesystem2. ' 'This is achieved by including a location string (such as ' '"location1" or "location2") when submitting jobs (and when ' 'running a downloader or uploader command).')) # TODO: We should remove this arg submit_parser.add_argument("--machine_type", help='Type of machine to run on ("standard", "highmem", or "highcpu")') submit_parser.add_argument("--preemptible", choices=[False, True], type=cast_to_bool, help='Run job in preemptible instances') submit_parser.add_argument("--md5_caching", help=("Use cached md5s. This can dramatically improve the uploading " "times, as md5 checking can be very time consuming. Caching md5s " "allows subsequent uploads (of the same files) to skip the " "md5 generation process (if the files appear to not have been " "modified since the last time they were submitted). The cache is " "stored locally and uses a file's modification time and file size " "to intelligently guess whether the file has changed. Set this " "flag to False if there is concern that files may not be getting " "re-uploaded properly. " "Note that this flag is only active when --local_upload is True."), choices=[False, True], type=cast_to_bool, default=None) submit_parser.add_argument("--output_path", help="path to copy renders to") submit_parser.add_argument("--postcmd", help="Run this command once the entire job is complete and downloaded") submit_parser.add_argument("--priority", help="Set the priority of the submitted job. Default is 5") submit_parser.add_argument("--project", help='The name of the project to submit the job. This will default to "default"') submit_parser.add_argument("--software_package_ids", help=('The ids for the software packages to use for the submitted ' 'job, e.g. maya or nuke. Packages are identified ' 'by their ID number, which can be queried from ' 'Conductor\'s "web api"'), nargs="*") submit_parser.add_argument("--tasks_data", help=('Specify a task\'s command to execute and it\'s corresponding frames ' '(if applicable) in a json dictionary format. e.g.\n' '--tasks_data \"{\\"command\\": \\"Render -s 1 -e 1 /tmp/file.ma\\", ' '\\"frames\\": \\"1\\"}" \n' '\nSpecify multiple tasks via multiple json dictionaries, separated by spaces, e.g.\n' '--tasks_data "{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}" ' '"{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}"\n' '\nNote that that format of this argument must be valid json. This makes for ' 'piping in more verbose/complex arguments more bearable,\n' 'Example of reading two task\'s data from an array within a json file:\n' 'conductor submit --tasks_data "$(cat /tmp/task_cmds.json | jq -c \'.[1]\')"'), type=json.loads, nargs="*") submit_parser.add_argument("--job_title", help=("The title to name the job. This is the title " "that will be displayed in the web ui")) submit_parser.add_argument("--upload_file", help=("A path to a text file whose contents list paths to upload to " "conductor. There should be one path per line in the text file. " "Valid paths are to files, directories (symlinked or not), as " "well as glob expressions. e.g * wildcards accepted")) submit_parser.add_argument("--upload_only", help="Only upload the files, don't start the render", action='store_true') submit_parser.add_argument("--upload_paths", help="Paths to upload", nargs="*") submit_parser.add_argument("--user", help="Username to submit as") submit_parser.add_argument("--notify", help="Who to notify when job is complete", nargs="+") submit_parser.add_argument("--slack_notify", help="Slack recipients to notify when job is complete", nargs="+") submit_parser.add_argument("--log_level", choices=loggeria.LEVELS, help="The logging level to display") submit_parser.add_argument("--log_dir", help=("When provided, will write a log file to " "the provided directory. This will be a " "rotating log, creating a new log file " "everyday, while storing the last 7 days " "of logs")) submit_parser.add_argument("--max_instances", help="Maximum number of instances for this job") submit_parser.set_defaults(func=run_submit) ############################# # UPLOADER PARSER ############################# uploader_parser_desciption = "parse uploader arguments" uploader_parser_help = "" uploader_parser = subparsers.add_parser("uploader", parents=[parent_parser], help=uploader_parser_help, description=uploader_parser_desciption, formatter_class=argparse.RawTextHelpFormatter) uploader_parser.add_argument("--location", help=('An optional string to indicate which location this uploader ' 'executable should register as. This option is only relevant ' 'for conductor accounts which submits jobs from different locations ' '(e.g. differing geographic locations or office locations that have differing file systems).' ' Typically each location would have its own conductor uploader process running. This location ' 'string allows each uploader to target specific upload jobs (files to upload) that are appropriate ' 'for it. This is potentially useful as each location may have differing file systems ' 'available to it (e.g. uploader1 has /filesystem1 available to it, but uploader2 only ' 'has /filesystem2 available to it). In this case uploader1 should only upload files ' 'that exist on /filesystem1 and uploader2 should only upload files that exist on /filesystem2. ' 'This is achieved by including a location argument (such as "location1" or "location2") ' 'when submitting jobs, as well as when launching this uploader command.')) uploader_parser.add_argument("--log_level", choices=loggeria.LEVELS, help="The logging level to display") uploader_parser.add_argument("--log_dir", help=("When provided, will write a log file to " "the provided directory. This will be a " "rotating log, creating a new log file " "everyday, while storing the last 7 days " "of logs")) uploader_parser.add_argument("--thread_count", type=int, default=conductor.CONFIG.get("thread_count"), help=('The number of threads that should download simultaneously')) uploader_parser.add_argument("--alt", help=('Run an alternative version of the downloader'), action='store_true') uploader_parser.set_defaults(func=run_uploader) ############################# # DOWNLOADER PARSER ############################# downloader_parser_desciption = "parse downloader arguments" downloader_parser_help = "" downloader_parser = subparsers.add_parser("downloader", parents=[parent_parser], help=downloader_parser_help, description=downloader_parser_desciption, formatter_class=argparse.RawTextHelpFormatter) downloader_parser.add_argument("--job_id", help=("The job id(s) to download. When specified " "will only download those jobs and terminate " "afterwards"), action='append') downloader_parser.add_argument("--task_id", help="Manually download output for this task") downloader_parser.add_argument("--output", help="Override for the output directory") downloader_parser.add_argument("--location", default=conductor.CONFIG.get("location"), help=('An optional string to indicate which location this downloader ' 'executable should register as. This option is only relevant for ' 'conductor accounts which submits jobs from different locations ' '(e.g. differing geographic locations or office locations that ' 'have differing file systems). Typically each location would ' 'have its own conductor downloader process running. This location ' 'argument allows each downloader to target specific jobs (to ' 'download upon job-completion) that match its appropriate location. ' 'Essentially this allows the location of which a job was submitted ' 'from to also be the destination in which to deliver completed ' 'renders to (which would typically be the desired behavior).')) downloader_parser.add_argument("--project", default=conductor.CONFIG.get("project"), help=('An optional string to indicate which project that this downloader executable should register as.')) downloader_parser.add_argument("--log_level", choices=loggeria.LEVELS, default=conductor.CONFIG.get("log_level"), help="The logging level to display") downloader_parser.add_argument("--log_dir", default=conductor.CONFIG.get("log_dir"), help=("When provided, will write a log file to " "the provided directory. This will be a " "rotating log, creating a new log file " "everyday, while storing the last 7 days " "of logs")) downloader_parser.add_argument("--thread_count", type=int, default=conductor.CONFIG.get("thread_count"), help=('The number of threads that should download simultaneously')) downloader_parser.add_argument("--alt", help=('Run an alternative version of the downloader'), action='store_true') downloader_parser.set_defaults(func=run_downloader) return parser.parse_args() def cast_to_bool(string): ''' Ensure that the argument provided is either "True" or "False (or "true" or "false") and convert that argument to an actual bool value (True or False). ''' string_lower = string.lower() if string_lower == "true": return True elif string_lower == "false": return False raise argparse.ArgumentTypeError('Argument must be True or False') class ValidateArgEnv(argparse.Action): ''' Validate the "env" argument that a user has provided, ensuring that it adheres to proper syntax, and ultimately produces a dictionary object consisting of desired environment variable names and their corresponding values. Example: {"PYTHONPATH": "/tmp/files1:$PYTHONPATH", "PATH": "/usr/bin":$PATH"} ''' # Regular expression to validate argument construction = # Example: PYTHONPATH=/tmp:$PYTHONPATH rx_env_var = r'\s*(?P[^\s=]+)\s*=(?P[^=]+)$' def __call__(self, parser, namespace, values, option_string): ''' Read each variable definition and validate that it's constructed properly. Populate a dictionary where they keys are the variable names and the values are the variable values ''' env_variables = {} for variable_definition in values: re_result = re.match(self.rx_env_var, variable_definition) if not re_result: msg = ("Invalid variable assignment: '%s'\n" "\tUse construct: = e.g " "PYTHONPATH=/tmp:$PYTHONPATH" % variable_definition) raise argparse.ArgumentError(self, msg) var_name = re_result.group("var_name").strip() var_value = re_result.group("var_value").strip() env_variables[var_name] = var_value setattr(namespace, self.dest, env_variables) class CastLogLevelName(argparse.Action): ''' Cast a log level name to it's contstant value, e.g. "INFO" --> logging.INFO ''' def __call__(self, parser, namespace, values, option_string): setattr(namespace, self.dest, loggeria.LEVEL_MAP[values]) def run_submit(args): conductor_submit.run_submit(args) def run_uploader(args): args_dict = vars(args) if sys.platform == "win32": uploader.run_uploader(args) if args_dict.get("alt"): uploader_v2.run_uploader(args) else: uploader.run_uploader(args) def run_downloader(args): ''' Convert the argparse Namespace object to a dictionary and run the downloader with the given args. ''' # Convert Namespace args object to args dict args_dict = vars(args) # Code switcher between new downloader and old downloader # HARD set windows users to old downloader if sys.platform == "win32": return downloader.run_downloader(args_dict) # return downloader2.run_downloader(args_dict) if args_dict.get("task_id") and not args_dict.get("job_id"): raise argparse.ArgumentTypeError('Must supply a job_id with task_id.') if args_dict.get("job_id") or args_dict.get("alt"): return downloader2.run_downloader(args_dict) else: return downloader.run_downloader(args_dict) if __name__ == '__main__': args = parse_args() args.func(args) \ No newline at end of file +#!/usr/bin/env python + +import argparse +import imp +import json +import os +import re +import sys + +try: + imp.find_module('conductor') + +except: + sys.path.append(os.path.dirname(os.path.dirname(__file__))) + +# On Windows, due to the quirks of the multiprocessing module +# this file is installed as 'conductor.py' and to ensure the +# environment gets passed properly, add the module to the path +# list again +if os.name == 'nt': + sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) + +import conductor +from conductor.lib import conductor_submit, downloader, downloader2, uploader, uploader_v2, loggeria + + +def parse_args(): + + # Create a parent parser. Arguments that are common across all subparsers can be added to this parser + parent_parser = argparse.ArgumentParser(add_help=False) + + # create the main parser. Not sure why this parser is required, but got parsing tracebacks when excluding it (it gets confused about the arguments provided) + parser = argparse.ArgumentParser(description="description") + subparsers = parser.add_subparsers(title="actions") + + ############################# + # SUBMIT PARSER + ############################# + submit_parser_desciption = "parse submitter arguments" + submit_parser_help = "" + submit_parser = subparsers.add_parser("submit", + parents=[parent_parser], + help=submit_parser_help, + description=submit_parser_desciption, + formatter_class=argparse.RawTextHelpFormatter) + + submit_parser.add_argument("--autoretry_policy", + type=json.loads, + help=('Enables tasks to be automatically retried if they have been preempted and/or ' + 'failed.\n' + 'A policy can be defined for more than one status, e.g. "preempted", "failed"\n' + 'A policy for a status must define a "max_retries" key with an integer from 0 ' + 'through 5\n\n' + 'Example 1: --autoretry_policy "{\\"preempted\\": {\\"max_retries\\": 2 } }"\n\n' + 'Example 2: --autoretry_policy "{\\"failed\\": {\\"max_retries\\": 1 },' + '\\"preempted\\": {\\"max_retries\\": 5 }}"\n\n')) + + submit_parser.add_argument("--cmd", + help=('[DEPRECATED. Use --tasks_data instead]\n' + 'The command to execute. This works in tandem with the --frames ' + 'argument to construct a command per task\n' + 'Example: --cmd "Render /tmp/mayafile.ma"')) + + submit_parser.add_argument("--cores", + help="Number of cores that this job should run on (highmem and highcpu machines have a minimum of 2 cores)", + type=int) + + submit_parser.add_argument("--database_filepath", + help=("The filepath to the local md5 caching database. If no filepath " + "is specified, the database will be created in a temp directory. " + "Note that this flag is only active when --local_upload is True.")) + + submit_parser.add_argument("--docker_image", + help="docker image to run render in") + + submit_parser.add_argument("--environment", + help=(r"Specify environment variable statements to be executed on Conductor's " + "render instance.\n\n" + "Example:\n" + "\t--environment PYTHONPATH=/home/user/steve/python:/home/user/marry/python\n\n" + + "Multipe environment variable statements can be made by separating them with a space:\n" + "\t--environment PYTHONPATH=/home/user/marry PATH=/home/usr/bin\n\n" + + "Appending to Conductor's existing environment variables is also possible. " + "Note that the referenced environment variable is encapsulated by single quotes " + "so that it does not get evaluated until it is executed on Conductor's render instance:\n" + "\t--environment PYTHONPATH=/tmp:/home/user:'$PYTHONPATH'\n\n" + "However, it may be desirable to both evaluate your local environment variable " + "AND also append it to Conductor's own. Note that one PYTHONPATH is" + " contained with single quotes while the other is not:\n" + "\t--environment PYTHONPATH=/tmp:/home/user:$PYTHONPATH:'$PYTHONPATH'\n\n" + "Note that the syntax for Windows clients will look different in some " + "places, but it's important to remember that ultimately these environment " + "variable statements will be executed on a linux platform on Conductor, " + "which means that when referencing Conductor\'s environment variable, " + "A dollar sign syntax must be used:\n" + "\t--environment PYTHONPATH=\"c:\\My Documents\";%%PYTHONPATH%%;$PYTHONPATH"), + nargs="*", + action=ValidateArgEnv) + + submit_parser.add_argument("--metadata", + help=("Specify metadata dictionary. Note that it's best to wrap this dictionary" + " in single quotes,\n e.g. --metadata '{\"project\":\"Scrooge\"}'"), + type=json.loads) # use the json loads function to parse dict + + submit_parser.add_argument("--force", + help="Do not check for existing uploads, force a new upload", + action='store_true') + + submit_parser.add_argument("--chunk_size", + help="number of frames to execute on a single instance", + default=1) + + submit_parser.add_argument("--frames", + help=('[DEPRECATED. Use --tasks_data instead]\n' + 'The frame range to render. This works in tandem with the --command ' + 'argument to construct a command per task.\n' + 'Example: --frames "10-20" or "100-200x2", or "1, 10-20, 100-200x2"')) + + submit_parser.add_argument("--scout_frames", + help="If specified, these frames will run first and the rest of the" + "job will wait until verification has been given before continuing.") + + submit_parser.add_argument("--local_upload", + help="Trigger files to be uploaded localy", + choices=[False, True], + type=cast_to_bool, + default=None) + + submit_parser.add_argument("--location", + help=('An optional string to indicate which location this submitted ' + 'job should be registered as. This option is only relevant ' + 'for conductor accounts which submits jobs from different ' + 'locations (e.g. differing geographic locations or office ' + 'locations that have differing file systems). Typically each ' + 'location would have its own conductor downloader running (for ' + 'downloading completed renders). This location string allows ' + 'each conductor downloader to target only files that match the ' + 'appropriate location. This is potentially useful when you have ' + 'multiple downloaders running but each have differing file ' + 'systems available to them (e.g. downloader1 has /filesystem1 ' + 'available to it, but downloader2 only has /filesystem2 ' + 'available to it). In this case downloader1 should only ' + 'download files that exist on /filesystem1 and downloader2 ' + 'should only download files that exist on /filesystem2. ' + 'This is achieved by including a location string (such as ' + '"location1" or "location2") when submitting jobs (and when ' + 'running a downloader or uploader command).')) + + # TODO: We should remove this arg + submit_parser.add_argument("--machine_type", + help='Type of machine to run on ("standard", "highmem", or "highcpu")') + + submit_parser.add_argument("--preemptible", + choices=[False, True], + type=cast_to_bool, + help='Run job in preemptible instances') + + submit_parser.add_argument("--md5_caching", + help=("Use cached md5s. This can dramatically improve the uploading " + "times, as md5 checking can be very time consuming. Caching md5s " + "allows subsequent uploads (of the same files) to skip the " + "md5 generation process (if the files appear to not have been " + "modified since the last time they were submitted). The cache is " + "stored locally and uses a file's modification time and file size " + "to intelligently guess whether the file has changed. Set this " + "flag to False if there is concern that files may not be getting " + "re-uploaded properly. " + "Note that this flag is only active when --local_upload is True."), + choices=[False, True], + type=cast_to_bool, + default=None) + + submit_parser.add_argument("--output_path", + help="path to copy renders to") + + submit_parser.add_argument("--postcmd", + help="Run this command once the entire job is complete and downloaded") + + submit_parser.add_argument("--priority", + help="Set the priority of the submitted job. Default is 5") + + submit_parser.add_argument("--project", + help='The name of the project to submit the job. This will default to "default"') + + submit_parser.add_argument("--software_package_ids", + help=('The ids for the software packages to use for the submitted ' + 'job, e.g. maya or nuke. Packages are identified ' + 'by their ID number, which can be queried from ' + 'Conductor\'s "web api"'), + nargs="*") + + submit_parser.add_argument("--tasks_data", + help=('Specify a task\'s command to execute and it\'s corresponding frames ' + '(if applicable) in a json dictionary format. e.g.\n' + '--tasks_data \"{\\"command\\": \\"Render -s 1 -e 1 /tmp/file.ma\\", ' + '\\"frames\\": \\"1\\"}" \n' + '\nSpecify multiple tasks via multiple json dictionaries, separated by spaces, e.g.\n' + '--tasks_data "{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}" ' + '"{\\"command\\": \\"Render -s 10 -e 12 /tmp/file.ma\\", \\"frames\\": \\"10-12\\"}"\n' + '\nNote that that format of this argument must be valid json. This makes for ' + 'piping in more verbose/complex arguments more bearable,\n' + 'Example of reading two task\'s data from an array within a json file:\n' + 'conductor submit --tasks_data "$(cat /tmp/task_cmds.json | jq -c \'.[1]\')"'), + type=json.loads, + nargs="*") + + submit_parser.add_argument("--job_title", + help=("The title to name the job. This is the title " + "that will be displayed in the web ui")) + + submit_parser.add_argument("--upload_file", + help=("A path to a text file whose contents list paths to upload to " + "conductor. There should be one path per line in the text file. " + "Valid paths are to files, directories (symlinked or not), as " + "well as glob expressions. e.g * wildcards accepted")) + + submit_parser.add_argument("--upload_only", + help="Only upload the files, don't start the render", + action='store_true') + + submit_parser.add_argument("--upload_paths", + help="Paths to upload", + nargs="*") + + submit_parser.add_argument("--user", + help="Username to submit as") + + submit_parser.add_argument("--notify", + help="Who to notify when job is complete", + nargs="+") + + submit_parser.add_argument("--slack_notify", + help="Slack recipients to notify when job is complete", + nargs="+") + + submit_parser.add_argument("--log_level", + choices=loggeria.LEVELS, + help="The logging level to display") + + submit_parser.add_argument("--log_dir", + help=("When provided, will write a log file to " + "the provided directory. This will be a " + "rotating log, creating a new log file " + "everyday, while storing the last 7 days " + "of logs")) + + submit_parser.add_argument("--max_instances", + help="Maximum number of instances for this job") + + submit_parser.set_defaults(func=run_submit) + + ############################# + # UPLOADER PARSER + ############################# + uploader_parser_desciption = "parse uploader arguments" + uploader_parser_help = "" + + uploader_parser = subparsers.add_parser("uploader", parents=[parent_parser], + help=uploader_parser_help, + description=uploader_parser_desciption, + formatter_class=argparse.RawTextHelpFormatter) + + uploader_parser.add_argument("--location", + help=('An optional string to indicate which location this uploader ' + 'executable should register as. This option is only relevant ' + 'for conductor accounts which submits jobs from different locations ' + '(e.g. differing geographic locations or office locations that have differing file systems).' + ' Typically each location would have its own conductor uploader process running. This location ' + 'string allows each uploader to target specific upload jobs (files to upload) that are appropriate ' + 'for it. This is potentially useful as each location may have differing file systems ' + 'available to it (e.g. uploader1 has /filesystem1 available to it, but uploader2 only ' + 'has /filesystem2 available to it). In this case uploader1 should only upload files ' + 'that exist on /filesystem1 and uploader2 should only upload files that exist on /filesystem2. ' + 'This is achieved by including a location argument (such as "location1" or "location2") ' + 'when submitting jobs, as well as when launching this uploader command.')) + + uploader_parser.add_argument("--log_level", + choices=loggeria.LEVELS, + help="The logging level to display") + + uploader_parser.add_argument("--log_dir", + help=("When provided, will write a log file to " + "the provided directory. This will be a " + "rotating log, creating a new log file " + "everyday, while storing the last 7 days " + "of logs")) + + uploader_parser.add_argument("--thread_count", + type=int, + default=conductor.CONFIG.get("thread_count"), + help=('The number of threads that should download simultaneously')) + + uploader_parser.add_argument("--alt", + help=('Run an alternative version of the downloader'), + action='store_true') + + uploader_parser.set_defaults(func=run_uploader) + + ############################# + # DOWNLOADER PARSER + ############################# + + downloader_parser_desciption = "parse downloader arguments" + downloader_parser_help = "" + + downloader_parser = subparsers.add_parser("downloader", parents=[parent_parser], + help=downloader_parser_help, + description=downloader_parser_desciption, + formatter_class=argparse.RawTextHelpFormatter) + + downloader_parser.add_argument("--job_id", + help=("The job id(s) to download. When specified " + "will only download those jobs and terminate " + "afterwards"), + action='append') + + downloader_parser.add_argument("--task_id", + help="Manually download output for this task") + + downloader_parser.add_argument("--output", + help="Override for the output directory") + + downloader_parser.add_argument("--location", + default=conductor.CONFIG.get("location"), + help=('An optional string to indicate which location this downloader ' + 'executable should register as. This option is only relevant for ' + 'conductor accounts which submits jobs from different locations ' + '(e.g. differing geographic locations or office locations that ' + 'have differing file systems). Typically each location would ' + 'have its own conductor downloader process running. This location ' + 'argument allows each downloader to target specific jobs (to ' + 'download upon job-completion) that match its appropriate location. ' + 'Essentially this allows the location of which a job was submitted ' + 'from to also be the destination in which to deliver completed ' + 'renders to (which would typically be the desired behavior).')) + + downloader_parser.add_argument("--project", + default=conductor.CONFIG.get("project"), + help=('An optional string to indicate which project that this downloader executable should register as.')) + + downloader_parser.add_argument("--log_level", + choices=loggeria.LEVELS, + default=conductor.CONFIG.get("log_level"), + help="The logging level to display") + + downloader_parser.add_argument("--log_dir", + default=conductor.CONFIG.get("log_dir"), + help=("When provided, will write a log file to " + "the provided directory. This will be a " + "rotating log, creating a new log file " + "everyday, while storing the last 7 days " + "of logs")) + + downloader_parser.add_argument("--thread_count", + type=int, + default=conductor.CONFIG.get("thread_count"), + help=('The number of threads that should download simultaneously')) + + downloader_parser.add_argument("--alt", + help=('Run an alternative version of the downloader'), + action='store_true') + + downloader_parser.set_defaults(func=run_downloader) + + return parser.parse_args() + + +def cast_to_bool(string): + ''' + Ensure that the argument provided is either "True" or "False (or "true" or + "false") and convert that argument to an actual bool value (True or False). + ''' + string_lower = string.lower() + if string_lower == "true": + return True + elif string_lower == "false": + return False + raise argparse.ArgumentTypeError('Argument must be True or False') + + +class ValidateArgEnv(argparse.Action): + ''' + Validate the "env" argument that a user has provided, ensuring that it + adheres to proper syntax, and ultimately produces a dictionary object + consisting of desired environment variable names and their corresponding + values. + + Example: + {"PYTHONPATH": "/tmp/files1:$PYTHONPATH", + "PATH": "/usr/bin":$PATH"} + + ''' + + # Regular expression to validate argument construction = + # Example: PYTHONPATH=/tmp:$PYTHONPATH + rx_env_var = r'\s*(?P[^\s=]+)\s*=(?P[^=]+)$' + + def __call__(self, parser, namespace, values, option_string): + ''' + Read each variable definition and validate that it's constructed + properly. Populate a dictionary where they keys are the variable names + and the values are the variable values + ''' + env_variables = {} + for variable_definition in values: + re_result = re.match(self.rx_env_var, variable_definition) + if not re_result: + msg = ("Invalid variable assignment: '%s'\n" + "\tUse construct: = e.g " + "PYTHONPATH=/tmp:$PYTHONPATH" % variable_definition) + raise argparse.ArgumentError(self, msg) + + var_name = re_result.group("var_name").strip() + var_value = re_result.group("var_value").strip() + env_variables[var_name] = var_value + + setattr(namespace, self.dest, env_variables) + + +class CastLogLevelName(argparse.Action): + ''' + Cast a log level name to it's contstant value, e.g. + "INFO" --> logging.INFO + ''' + + def __call__(self, parser, namespace, values, option_string): + setattr(namespace, self.dest, loggeria.LEVEL_MAP[values]) + + +def run_submit(args): + conductor_submit.run_submit(args) + + +def run_uploader(args): + args_dict = vars(args) + if sys.platform == "win32": + uploader.run_uploader(args) + if args_dict.get("alt"): + uploader_v2.run_uploader(args) + else: + uploader.run_uploader(args) + + +def run_downloader(args): + ''' + Convert the argparse Namespace object to a dictionary and run the downloader + with the given args. + ''' + # Convert Namespace args object to args dict + args_dict = vars(args) + + # Code switcher between new downloader and old downloader + # HARD set windows users to old downloader + if sys.platform == "win32": + return downloader.run_downloader(args_dict) + # return downloader2.run_downloader(args_dict) + if args_dict.get("task_id") and not args_dict.get("job_id"): + raise argparse.ArgumentTypeError('Must supply a job_id with task_id.') + + if args_dict.get("job_id") or args_dict.get("alt"): + return downloader2.run_downloader(args_dict) + else: + return downloader.run_downloader(args_dict) + + +if __name__ == '__main__': + args = parse_args() + args.func(args)