Skip to content

Commit

Permalink
Added output_dir argument in export.py (ssd, lpr, vehicle_attribute) (#…
Browse files Browse the repository at this point in the history
…91)

* Added --output_dir argument in export.py  for ssd,lpr,vehicle_Attribute

* updated with output_dir argument

* Remove whitespaces
  • Loading branch information
mahinlma authored and AlexanderDokuchaev committed Jun 24, 2019
1 parent 552085e commit 650c253
Show file tree
Hide file tree
Showing 6 changed files with 13 additions and 8 deletions.
3 changes: 2 additions & 1 deletion tensorflow_toolkit/lpr/README.md
Expand Up @@ -129,9 +129,10 @@ To run the model via OpenVINO one has to freeze TensorFlow graph and
then convert it to OpenVINO Internal Representation (IR) using Model Optimizer:

```Bash
python3 tools/export.py --data_type FP32 chinese_lp/config.py
python3 tools/export.py --data_type FP32 --output_dir <export_path> chinese_lp/config.py
```

**default export path**:
`lpr/model/export_<step>/frozen_graph` - path to frozen graph
`lpr/model/export_<step>/IR/<data_type>` - path to converted model in IR format

Expand Down
3 changes: 2 additions & 1 deletion tensorflow_toolkit/lpr/tools/export.py
Expand Up @@ -29,6 +29,7 @@
def parse_args():
parser = argparse.ArgumentParser(description='Export model in IE format')
parser.add_argument('--data_type', default='FP32', choices=['FP32', 'FP16'], help='Data type of IR')
parser.add_argument('--output_dir', default=None, help='Output Directory')
parser.add_argument('--checkpoint', default=None, help='Default: latest')
parser.add_argument('path_to_config', help='Path to a config.py')
return parser.parse_args()
Expand Down Expand Up @@ -71,7 +72,7 @@ def main(_):
raise FileNotFoundError(str(checkpoint))

step = checkpoint.split('.')[-2].split('-')[-1]
output_dir = os.path.join(config.model_dir, 'export_{}'.format(step))
output_dir = args.output_dir if args.output_dir else os.path.join(config.model_dir, 'export_{}'.format(step))

# Freezing graph
frozen_dir = os.path.join(output_dir, 'frozen_graph')
Expand Down
5 changes: 3 additions & 2 deletions tensorflow_toolkit/ssd_detector/README.md
Expand Up @@ -148,10 +148,11 @@ To run the model via OpenVINO one has to freeze TensorFlow graph and
then convert it to OpenVINO Internal Representation (IR) using Model Optimizer:

```
python3 tools/export.py --data_type FP32 vlp/config.py
python3 tools/export.py --data_type FP32 --output_dir <export_path> vlp/config.py
```

As a result, you'll find three new artifacts:
As a result, you'll find three new artifacts:
**default export path**
- `vlp/model/export_<step>/frozen_graph/` - path to frozen graph
- `vlp/model/export_<step>/IR/<data_type>/` - path to converted model in IR format

Expand Down
3 changes: 2 additions & 1 deletion tensorflow_toolkit/ssd_detector/tools/export.py
Expand Up @@ -28,6 +28,7 @@ def parse_args():
parser = argparse.ArgumentParser(description='Export model in IE format')
parser.add_argument('--model_name', default='vlp')
parser.add_argument('--data_type', default='FP32', choices=['FP32', 'FP16'], help='Data type of IR')
parser.add_argument('--output_dir', default=None, help='Output Directory')
parser.add_argument('--checkpoint', default=None, help='Default: latest')
parser.add_argument('path_to_config', help='Path to a config.py')
return parser.parse_args()
Expand Down Expand Up @@ -88,7 +89,7 @@ def main(_):
raise FileNotFoundError(str(checkpoint))

step = checkpoint.split('-')[-1]
output_dir = os.path.join(config.MODEL_DIR, 'export_{}'.format(step))
output_dir = args.output_dir if args.output_dir else os.path.join(config.MODEL_DIR, 'export_{}'.format(step))

# Freezing graph
frozen_dir = os.path.join(output_dir, 'frozen_graph')
Expand Down
4 changes: 2 additions & 2 deletions tensorflow_toolkit/vehicle_attributes/README.md
Expand Up @@ -122,10 +122,10 @@ To run the model via OpenVINO one has to freeze TensorFlow graph and
then convert it to OpenVINO Internal Representation (IR) using Model Optimizer:

```Bash
python3 tools/export.py --data_type FP32 cars_100/config.py
python3 tools/export.py --data_type FP32 --output_dir <export_path> cars_100/config.py
```

As a result, you'll find three new artifacts:
**default export path**
- `lpr/model/export_<step>/frozen_graph/` - path to frozen graph
- `lpr/model/export_<step>/IR/<data_type>/` - path to converted model in IR format

Expand Down
3 changes: 2 additions & 1 deletion tensorflow_toolkit/vehicle_attributes/tools/export.py
Expand Up @@ -28,6 +28,7 @@ def parse_args():
parser.add_argument('--mo', default='mo.py', help="Path to model optimizer 'mo.py' script")
parser.add_argument('--mo_config', default='cars_100/mo.yaml', help="Path config for model optimizer")
parser.add_argument('--data_type', default='FP32', choices=['FP32', 'FP16'], help='Data type of IR')
parser.add_argument('--output_dir', default=None, help='Output Directory')
parser.add_argument('--checkpoint', default=None, help='Default: latest')
parser.add_argument('path_to_config', help='Path to a config.py')
return parser.parse_args()
Expand Down Expand Up @@ -59,7 +60,7 @@ def main(_):
raise FileNotFoundError(str(checkpoint))

step = checkpoint.split('.')[-1].split('-')[-1]
output_dir = os.path.join(config.model_dir, 'export_{}'.format(step))
output_dir = args.output_dir if args.output_dir else os.path.join(config.model_dir, 'export_{}'.format(step))

# Freezing graph
frozen_dir = os.path.join(output_dir, 'frozen_graph')
Expand Down

0 comments on commit 650c253

Please sign in to comment.