Skip to content

Commit

Permalink
Update Neural Solution example & fix grpc port issue (#1232)
Browse files Browse the repository at this point in the history
Signed-off-by: Kaihui-intel <kaihui.tang@intel.com>
  • Loading branch information
Kaihui-intel committed Sep 7, 2023
1 parent 6733681 commit 5288684
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 9 deletions.
@@ -1,11 +1,11 @@
{
"script_url": "tf_example1",
"script_url": "custom_models_optimized/tf_example1",
"optimized": "True",
"arguments": [
"--dataset_location=dataset --model_path=model"
],
"approach": "static",
"requirements": [
"requirements": ["tensorflow"
],
"workers": 1
}
}
@@ -1,11 +1,11 @@
{
"script_url": "tf_example1_dis",
"script_url": "custom_models_optimized/tf_example1",
"optimized": "True",
"arguments": [
"--dataset_location=dataset --model_path=model"
],
"approach": "static",
"requirements": [
"requirements": ["tensorflow"
],
"workers": 3
}
}
Expand Up @@ -15,7 +15,7 @@
import tensorflow as tf

from neural_compressor import Metric
from neural_compressor.config import PostTrainingQuantConfig
from neural_compressor.config import PostTrainingQuantConfig, TuningCriterion
from neural_compressor.data import BilinearImagenetTransform, ComposeTransform, DefaultDataLoader, TensorflowImageRecord
from neural_compressor.quantization import fit

Expand All @@ -42,7 +42,8 @@
def main():
"""Implement running function."""
top1 = Metric(name="topk", k=1)
config = PostTrainingQuantConfig(calibration_sampling_size=[20])
tuning_criterion = TuningCriterion(strategy="basic")
config = PostTrainingQuantConfig(calibration_sampling_size=[20], quant_level=1, tuning_criterion=tuning_criterion)
model_path = FLAGS.model_path + "/mobilenet_v1_1.0_224_frozen.pb"
q_model = fit(
model=model_path,
Expand Down
2 changes: 1 addition & 1 deletion neural_solution/examples/hf_models_grpc/README.md
Expand Up @@ -96,7 +96,7 @@ optional arguments:
- Query the task status and result according to the `task_id`.

``` shell
[user@server hf_models_grpc]$ python client.py query --task_id="d3e10a49326449fb9d0d62f2bfc1cb43"
[user@server hf_models_grpc]$ python client.py --task_monitor_port=22222 --result_monitor_port=33333 --grpc_api_port=8001 query --task_id="d3e10a49326449fb9d0d62f2bfc1cb43"


```
Expand Down
7 changes: 7 additions & 0 deletions neural_solution/frontend/gRPC/client.py
Expand Up @@ -126,7 +126,14 @@ def run_query_task_status(args):
query_action_parser.set_defaults(func=run_query_task_result)
query_action_parser.add_argument("--task_id", type=str, default=None, help="Query task by task id.")

parser.add_argument("--grpc_api_port", type=str, default=None, help="grpc server port.")
parser.add_argument("--result_monitor_port", type=str, default=None, help="result monitor port.")
parser.add_argument("--task_monitor_port", type=str, default=None, help="task monitor port.")

args = parser.parse_args()
config.grpc_api_port = args.grpc_api_port
config.result_monitor_port = args.result_monitor_port
config.task_monitor_port = args.task_monitor_port
args.func(args)

# for test:
Expand Down
5 changes: 5 additions & 0 deletions neural_solution/frontend/gRPC/server.py
Expand Up @@ -145,4 +145,9 @@ def parse_arguments():
logger.info(args.workspace)
config.workspace = args.workspace
config.grpc_api_port = args.grpc_api_port
config.result_monitor_port = args.result_monitor_port
config.task_monitor_port = args.task_monitor_port
# initialize the task submitter
task_submitter.task_monitor_port = config.task_monitor_portq
task_submitter.result_monitor_port = config.result_monitor_port
serve()

0 comments on commit 5288684

Please sign in to comment.