From a24b1ad195a4417ca20bc6e4d80c35fb13804750 Mon Sep 17 00:00:00 2001 From: "Vafin, Maxim" Date: Wed, 17 Nov 2021 14:09:09 +0300 Subject: [PATCH] Temporarily disable new FP16 generation --- model-optimizer/mo/main.py | 3 ++- model-optimizer/mo/pipeline/common.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/model-optimizer/mo/main.py b/model-optimizer/mo/main.py index e8831184125c50..7b1ba2d47a0fa1 100644 --- a/model-optimizer/mo/main.py +++ b/model-optimizer/mo/main.py @@ -205,7 +205,8 @@ def raise_ie_not_found(): except Exception as e: raise_ie_not_found() - if 'data_type' in argv and argv.data_type in ['FP16', 'half']: + # temporary disable new FP16 generation + if False and 'data_type' in argv and argv.data_type in ['FP16', 'half']: argv.data_type = 'FP32' argv.compress_fp16 = True else: diff --git a/model-optimizer/mo/pipeline/common.py b/model-optimizer/mo/pipeline/common.py index 319b99e9d57032..c8c946d96b77e8 100644 --- a/model-optimizer/mo/pipeline/common.py +++ b/model-optimizer/mo/pipeline/common.py @@ -180,7 +180,9 @@ def prepare_emit_ir(graph: Graph, data_type: str, output_dir: str, output_model_ meta_info = {} graph.strict_mode = False - if not used_by_ir_reader: + # temporary disable new FP16 generation + # if not used_by_ir_reader: + if True: # convert Parameter data types convert_data_type.convert_parameters_data_type(graph, data_type) # convert blobs (usually weights and biases)