diff --git a/tensorflow_toolkit/lpr/README.md b/tensorflow_toolkit/lpr/README.md index 6d41634d9f..3476d0db7c 100644 --- a/tensorflow_toolkit/lpr/README.md +++ b/tensorflow_toolkit/lpr/README.md @@ -129,9 +129,10 @@ To run the model via OpenVINO one has to freeze TensorFlow graph and then convert it to OpenVINO Internal Representation (IR) using Model Optimizer: ```Bash -python3 tools/export.py --data_type FP32 chinese_lp/config.py +python3 tools/export.py --data_type FP32 --output_dir chinese_lp/config.py ``` +**default export path**: `lpr/model/export_/frozen_graph` - path to frozen graph `lpr/model/export_/IR/` - path to converted model in IR format diff --git a/tensorflow_toolkit/lpr/tools/export.py b/tensorflow_toolkit/lpr/tools/export.py index bc7edf1304..daf931f6b1 100644 --- a/tensorflow_toolkit/lpr/tools/export.py +++ b/tensorflow_toolkit/lpr/tools/export.py @@ -29,6 +29,7 @@ def parse_args(): parser = argparse.ArgumentParser(description='Export model in IE format') parser.add_argument('--data_type', default='FP32', choices=['FP32', 'FP16'], help='Data type of IR') + parser.add_argument('--output_dir', default=None, help='Output Directory') parser.add_argument('--checkpoint', default=None, help='Default: latest') parser.add_argument('path_to_config', help='Path to a config.py') return parser.parse_args() @@ -71,7 +72,7 @@ def main(_): raise FileNotFoundError(str(checkpoint)) step = checkpoint.split('.')[-2].split('-')[-1] - output_dir = os.path.join(config.model_dir, 'export_{}'.format(step)) + output_dir = args.output_dir if args.output_dir else os.path.join(config.model_dir, 'export_{}'.format(step)) # Freezing graph frozen_dir = os.path.join(output_dir, 'frozen_graph') diff --git a/tensorflow_toolkit/ssd_detector/README.md b/tensorflow_toolkit/ssd_detector/README.md index cc4ab0eede..f8cb604334 100644 --- a/tensorflow_toolkit/ssd_detector/README.md +++ b/tensorflow_toolkit/ssd_detector/README.md @@ -148,10 +148,11 @@ To run the model via OpenVINO one has to freeze TensorFlow graph and then convert it to OpenVINO Internal Representation (IR) using Model Optimizer: ``` -python3 tools/export.py --data_type FP32 vlp/config.py +python3 tools/export.py --data_type FP32 --output_dir vlp/config.py ``` -As a result, you'll find three new artifacts: +As a result, you'll find three new artifacts: +**default export path** - `vlp/model/export_/frozen_graph/` - path to frozen graph - `vlp/model/export_/IR//` - path to converted model in IR format diff --git a/tensorflow_toolkit/ssd_detector/tools/export.py b/tensorflow_toolkit/ssd_detector/tools/export.py index d8cf5d571e..a8418d2d6d 100644 --- a/tensorflow_toolkit/ssd_detector/tools/export.py +++ b/tensorflow_toolkit/ssd_detector/tools/export.py @@ -28,6 +28,7 @@ def parse_args(): parser = argparse.ArgumentParser(description='Export model in IE format') parser.add_argument('--model_name', default='vlp') parser.add_argument('--data_type', default='FP32', choices=['FP32', 'FP16'], help='Data type of IR') + parser.add_argument('--output_dir', default=None, help='Output Directory') parser.add_argument('--checkpoint', default=None, help='Default: latest') parser.add_argument('path_to_config', help='Path to a config.py') return parser.parse_args() @@ -88,7 +89,7 @@ def main(_): raise FileNotFoundError(str(checkpoint)) step = checkpoint.split('-')[-1] - output_dir = os.path.join(config.MODEL_DIR, 'export_{}'.format(step)) + output_dir = args.output_dir if args.output_dir else os.path.join(config.MODEL_DIR, 'export_{}'.format(step)) # Freezing graph frozen_dir = os.path.join(output_dir, 'frozen_graph') diff --git a/tensorflow_toolkit/vehicle_attributes/README.md b/tensorflow_toolkit/vehicle_attributes/README.md index 24c9cb5c1e..6712ca8e6b 100644 --- a/tensorflow_toolkit/vehicle_attributes/README.md +++ b/tensorflow_toolkit/vehicle_attributes/README.md @@ -122,10 +122,10 @@ To run the model via OpenVINO one has to freeze TensorFlow graph and then convert it to OpenVINO Internal Representation (IR) using Model Optimizer: ```Bash -python3 tools/export.py --data_type FP32 cars_100/config.py +python3 tools/export.py --data_type FP32 --output_dir cars_100/config.py ``` -As a result, you'll find three new artifacts: +**default export path** - `lpr/model/export_/frozen_graph/` - path to frozen graph - `lpr/model/export_/IR//` - path to converted model in IR format diff --git a/tensorflow_toolkit/vehicle_attributes/tools/export.py b/tensorflow_toolkit/vehicle_attributes/tools/export.py index c4b8d4a10f..9535cb1723 100644 --- a/tensorflow_toolkit/vehicle_attributes/tools/export.py +++ b/tensorflow_toolkit/vehicle_attributes/tools/export.py @@ -28,6 +28,7 @@ def parse_args(): parser.add_argument('--mo', default='mo.py', help="Path to model optimizer 'mo.py' script") parser.add_argument('--mo_config', default='cars_100/mo.yaml', help="Path config for model optimizer") parser.add_argument('--data_type', default='FP32', choices=['FP32', 'FP16'], help='Data type of IR') + parser.add_argument('--output_dir', default=None, help='Output Directory') parser.add_argument('--checkpoint', default=None, help='Default: latest') parser.add_argument('path_to_config', help='Path to a config.py') return parser.parse_args() @@ -59,7 +60,7 @@ def main(_): raise FileNotFoundError(str(checkpoint)) step = checkpoint.split('.')[-1].split('-')[-1] - output_dir = os.path.join(config.model_dir, 'export_{}'.format(step)) + output_dir = args.output_dir if args.output_dir else os.path.join(config.model_dir, 'export_{}'.format(step)) # Freezing graph frozen_dir = os.path.join(output_dir, 'frozen_graph')