From 7bb92d0d05093b18695b192da7ce860ad80d623e Mon Sep 17 00:00:00 2001 From: kaiyaointel Date: Tue, 8 Nov 2022 14:33:35 +0800 Subject: [PATCH] Neural Coder enable Python Launcher usage (#1443) * Create __main__.py * Update __main__.py * Create PythonLauncher.MD * Update and rename PythonLauncher.MD to PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonAPI.md * Update PythonLauncher.md * Update PythonLauncher.md * Update PythonLauncher.md * Update __main__.py * Update PythonLauncher.md --- neural_coder/__main__.py | 81 +++++++++++++++++++++++++++++ neural_coder/docs/PythonAPI.md | 2 +- neural_coder/docs/PythonLauncher.md | 33 ++++++++++++ 3 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 neural_coder/__main__.py create mode 100644 neural_coder/docs/PythonLauncher.md diff --git a/neural_coder/__main__.py b/neural_coder/__main__.py new file mode 100644 index 00000000000..f9011e91f8b --- /dev/null +++ b/neural_coder/__main__.py @@ -0,0 +1,81 @@ +# Copyright (c) 2022 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + +from argparse import ArgumentParser, REMAINDER + +def parse_args(): + """ + Helper function parsing the command line options + @retval ArgumentParser + """ + parser = ArgumentParser(description="command-launch a Python script with quantization auto-enabled") + + parser.add_argument("--opt", type=str, default="", + help="optimization feature to enable") + + parser.add_argument("--strategy", type=str, default="static", + help="quantization strategy") + + parser.add_argument('--config', type=str, default="", + help='quantization configuration file path') + + # positional + parser.add_argument("script", type=str, + help="The full path to the script to be launched. " + "followed by all the arguments for the script") + + # script args + parser.add_argument('script_args', nargs=REMAINDER) + return parser.parse_args() + +args = parse_args() + +# copy user entry script (main.py -> main_optimized.py) +import shutil +script_copied = args.script[:-3] + "_optimized.py" +shutil.copy(args.script, script_copied) + +# optimize on copied script with Neural Coder +from neural_coder import enable +if args.opt == "": + if args.strategy == "static": + features=["pytorch_inc_static_quant_fx"] + if args.strategy == "static_ipex": + features=["pytorch_inc_static_quant_ipex"] + if args.strategy == "dynamic": + features=["pytorch_inc_dynamic_quant"] +else: + features=[args.opt] +enable( + code=script_copied, + features=features, + overwrite=True, +) + +# execute on copied script, which has already been optimized +cmd = [] + +cmd.append(sys.executable) # "/xxx/xxx/python" +cmd.append("-u") +cmd.append(script_copied) +cmd.extend(args.script_args) + +cmd = " ".join(cmd) # list convert to string + +process = subprocess.Popen(cmd, env=os.environ, shell=True) # nosec +process.wait() diff --git a/neural_coder/docs/PythonAPI.md b/neural_coder/docs/PythonAPI.md index ebeb31e29a6..dfc7567fe43 100644 --- a/neural_coder/docs/PythonAPI.md +++ b/neural_coder/docs/PythonAPI.md @@ -1,7 +1,7 @@ Neural Coder as Python API =========================== -We currently provide 3 main user-facing APIs for Neural Coder: enable, bench and superbench. +Neural Coder can be used as Python APIs. We currently provide 3 main user-facing APIs for Neural Coder: enable, bench and superbench. #### Enable Users can use ```enable()``` to enable specific features into DL scripts: diff --git a/neural_coder/docs/PythonLauncher.md b/neural_coder/docs/PythonLauncher.md new file mode 100644 index 00000000000..38e3bd5fae7 --- /dev/null +++ b/neural_coder/docs/PythonLauncher.md @@ -0,0 +1,33 @@ +Python Launcher +=========================== + +Neural Coder can be used as a Python **Launcher**. Users can run the Python model code as it is with automatic enabling of Deep Learning optimizations by using Neural Coder's inline Python **Launcher** design. + +## Quick-Start + +Example: Let's say you are running an NLP model using ```run_glue.py``` from HuggingFace transformers [examples](https://github.com/huggingface/transformers/blob/v4.21-release/examples/pytorch/text-classification/run_glue.py), you generally run with a Python command line like this: +```bash +python run_glue.py --model_name_or_path bert-base-cased --task_name mrpc --do_eval --output_dir result +``` + +With Neural Coder's **Launcher**, users can easily enjoy Deep Learning optimizations (default: INT8 static quantization by Intel® Neural Compressor) by simply adding an inline prefix +```bash +-m neural_coder +``` +to the Python command line, and everything else remains the same: +```bash +python -m neural_coder run_glue.py --model_name_or_path bert-base-cased --task_name mrpc --do_eval --output_dir result +``` + +This will run ```run_glue.py``` with the Deep Learning optimization automatically enabled, while everything else (e.g. your input arguments for the code itself) remains the same as the original code. You can also check out the optimized code ```run_glue_optimized.py``` auto-generated by the **Launcher** under the same folder if you want to learn the code enabling. + +Note: Any modification on the optimized code ```run_glue_optimized.py``` will be overwritten every time you run Neural Coder **Launcher** on ```run_glue.py```, so please make any modification on the original code ```run_glue.py``` instead of the optimized one. The optimized code is only saved for your reference. + +## Launcher Arguments (Optional) + +Users can specify which Deep Learning optimization they want to conduct using ```--opt``` argument. The list of supported Deep Learning optimization features can be found [here](SupportMatrix.md). + +Note that if specifically optimizing with INT8 quantization by Intel® Neural Compressor, ```--strategy``` argument can be specified with either ```static```, ```static_ipex``` or ```dynamic```. For example, to run INT8 dynamic quantization by Intel® Neural Compressor instead of the default static quantization: +```bash +python -m neural_coder --strategy dynamic run_glue.py --model_name_or_path bert-base-cased --task_name mrpc --do_eval --output_dir result +```