Skip to content

Commit

Permalink
INC Bench update (#1366)
Browse files Browse the repository at this point in the history
  • Loading branch information
bmyrcha committed Oct 20, 2022
1 parent 23c585e commit fe9923d
Show file tree
Hide file tree
Showing 106 changed files with 1,986 additions and 648 deletions.
1 change: 1 addition & 0 deletions conda_meta/full/meta.yaml
Expand Up @@ -41,6 +41,7 @@ requirements:
- sqlalchemy ==1.4.27
- alembic ==1.7.7
- cython
- pywin32 # [win]
test:
imports:
- neural_compressor
Expand Down
Expand Up @@ -15,10 +15,12 @@
"""Configuration type parser."""
import json
from collections.abc import Iterable
from copy import deepcopy
from typing import Any, Dict, List, Type, Union

from neural_compressor.ux.utils.exceptions import ClientErrorException
from neural_compressor.ux.utils.hw_info import HWInfo
from neural_compressor.ux.utils.logger import log
from neural_compressor.ux.utils.utils import parse_bool_value


Expand Down Expand Up @@ -92,8 +94,9 @@ def __init__(self) -> None:
"bool": bool,
}

def parse(self, data: dict) -> dict:
def parse(self, input_data: dict) -> dict:
"""Parse configuration."""
data = deepcopy(input_data)
transforms_data = data.get("transform", None)
if transforms_data is not None:
data.update({"transform": self.parse_transforms(transforms_data)})
Expand All @@ -110,7 +113,9 @@ def parse(self, data: dict) -> dict:

metric_params = data.get("metric_param", None)
if metric_params and isinstance(metric_params, dict):
data["metric_param"] = self.parse_metric(metric_params)
parsed_metric_params = self.parse_metric(metric_params)

data.update({"metric_param": parsed_metric_params})

if "tuning" in data.keys():
data["tuning"] = parse_bool_value(data["tuning"])
Expand Down Expand Up @@ -227,13 +232,15 @@ def parse_metric(self, metric_data: dict) -> dict:
for param_name, param_value in metric_data.items():
if isinstance(param_value, dict):
parsed_data.update({param_name: self.parse_metric(param_value)})
elif isinstance(param_value, str):
elif isinstance(param_value, str) or isinstance(param_value, int):
if param_value == "":
continue
param_type = self.get_param_type("metric", param_name)
if param_type is None:
log.debug("Could not find param type.")
continue
parsed_data.update({param_name: self.parse_value(param_value, param_type)})
parsed_value = self.parse_value(param_value, param_type)
parsed_data.update({param_name: parsed_value})
return parsed_data

def get_param_type(
Expand Down Expand Up @@ -313,6 +320,8 @@ def normalize_string_list(string_list: str, required_type: Union[Type, List[Type
if not isinstance(string_list, str):
return string_list
if isinstance(required_type, list):
string_list = string_list.replace("(", "[")
string_list = string_list.replace(")", "]")
while not string_list.startswith("[["):
string_list = "[" + string_list
while not string_list.endswith("]]"):
Expand Down
Expand Up @@ -48,6 +48,7 @@ def get_boundary_nodes(data: Dict[str, Any]) -> None:
try:
model = model_repository.get_model(model_path)
except NotFoundException:
log.debug(f"Could not get model instance for {model_path}")
supported_frameworks = model_repository.get_frameworks()
raise ClientErrorException(
f"Framework for specified model is not yet supported. "
Expand Down
33 changes: 32 additions & 1 deletion neural_compressor/ux/components/db_manager/db_models/model.py
Expand Up @@ -14,7 +14,7 @@
# limitations under the License.
"""The Model class."""
import json
from typing import Any, List
from typing import Any, List, Optional

from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, String
from sqlalchemy.orm import relationship, session
Expand Down Expand Up @@ -54,6 +54,17 @@ class Model(Base):
"Optimization",
back_populates="optimized_model",
primaryjoin="Optimization.optimized_model_id == Model.id",
)

benchmarks: Any = relationship(
"Benchmark",
back_populates="model",
cascade="all, delete",
)

profilings: Any = relationship(
"Profiling",
back_populates="model",
cascade="all, delete",
)

Expand Down Expand Up @@ -147,6 +158,26 @@ def list(db_session: session.Session, project_id: int) -> dict:
)
return {"models": models}

@staticmethod
def delete_model(
db_session: session.Session,
model_id: int,
model_name: str,
) -> Optional[int]:
"""Remove model from database."""
model = (
db_session.query(Model)
.filter(Model.id == model_id)
.filter(Model.name == model_name)
.one_or_none()
)
if model is None:
return None
db_session.delete(model)
db_session.flush()

return int(model.id)

@staticmethod
def build_info(model: Any) -> dict:
"""Get model info."""
Expand Down
Expand Up @@ -15,9 +15,10 @@
# pylint: disable=no-member
"""The Optimization class."""
import json
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Union

from sqlalchemy import DDL, Column, DateTime, ForeignKey, Integer, String, event
from sqlalchemy.engine import Connection
from sqlalchemy.orm import relationship, session
from sqlalchemy.sql import func

Expand Down Expand Up @@ -554,6 +555,21 @@ def list(db_session: session.Session, project_id: int) -> dict:
optimizations.append(optimization_info)
return {"optimizations": optimizations}

@staticmethod
def unpin_benchmark(
db_connection: Union[session.Session, Connection],
benchmark_id: int,
) -> None:
"""Unpin benchmark from optimization."""
update_queries = [
f"UPDATE optimization SET performance_benchmark_id=null "
f"WHERE performance_benchmark_id={benchmark_id}",
f"UPDATE optimization SET accuracy_benchmark_id=null "
f"WHERE accuracy_benchmark_id={benchmark_id}",
]
for update_query in update_queries:
db_connection.execute(update_query)

@staticmethod
def build_info(
optimization: Any,
Expand Down
Expand Up @@ -16,9 +16,11 @@
"""INC Bench Benchmark API interface."""
import os
import shutil
from sqlite3 import Connection
from typing import List, Optional, Union

from sqlalchemy.orm import sessionmaker
from sqlalchemy import event
from sqlalchemy.orm import Mapper, sessionmaker

from neural_compressor.ux.components.benchmark import Benchmarks
from neural_compressor.ux.components.configuration_wizard.configuration_parser import (
Expand All @@ -27,6 +29,7 @@
from neural_compressor.ux.components.db_manager.db_manager import DBManager
from neural_compressor.ux.components.db_manager.db_models.benchmark import Benchmark
from neural_compressor.ux.components.db_manager.db_models.benchmark_result import BenchmarkResult
from neural_compressor.ux.components.db_manager.db_models.optimization import Optimization
from neural_compressor.ux.components.db_manager.db_operations.project_api_interface import (
ProjectAPIInterface,
)
Expand Down Expand Up @@ -63,6 +66,10 @@ def delete_benchmark(data: dict) -> dict:
benchmark_details = Benchmark.details(db_session, benchmark_id)
project_id = benchmark_details["project_id"]
project_details = ProjectAPIInterface.get_project_details({"id": project_id})
Optimization.unpin_benchmark(
db_connection=db_session,
benchmark_id=benchmark_id,
)
removed_benchmark_id = Benchmark.delete_benchmark(
db_session=db_session,
benchmark_id=benchmark_id,
Expand Down Expand Up @@ -455,3 +462,16 @@ def clean_status(status_to_clean: ExecutionStatus) -> dict:
status_to_clean=status_to_clean,
)
return response


@event.listens_for(Benchmark, "before_delete")
def before_delete_benchmark_entry(
mapper: Mapper,
connection: Connection,
benchmark: Benchmark,
) -> None:
"""Clean up benchmark data before remove."""
Optimization.unpin_benchmark(
db_connection=connection,
benchmark_id=benchmark.id,
)
Expand Up @@ -96,6 +96,25 @@ def list_models(data: dict) -> dict:

return models_list

@staticmethod
def delete_model(data: dict) -> dict:
"""Delete model from database."""
try:
model_id: int = int(data.get("id", None))
model_name: str = str(data.get("name", None))
except ValueError:
raise ClientErrorException("Could not parse value.")
except TypeError:
raise ClientErrorException("Missing model id or model name.")
with Session.begin() as db_session:
removed_model_id = Model.delete_model(
db_session=db_session,
model_id=model_id,
model_name=model_name,
)

return {"id": removed_model_id}

@staticmethod
def parse_model_data(data: dict) -> ModelAddParamsInterface:
"""Parse input data for model."""
Expand Down
2 changes: 1 addition & 1 deletion neural_compressor/ux/components/model/onnxrt/model.py
Expand Up @@ -217,7 +217,7 @@ def guard_requirements_installed(self) -> None:
"""Ensure all requirements are installed."""
check_module("onnx")
check_module("onnxruntime")
if sys.version_info < (3,10): # pragma: no cover
if sys.version_info < (3, 10): # pragma: no cover
check_module("onnxruntime_extensions")

@property
Expand Down
3 changes: 3 additions & 0 deletions neural_compressor/ux/components/model/repository.py
Expand Up @@ -24,6 +24,7 @@
from neural_compressor.ux.components.model.tensorflow.meta_graph import MetaGraphModel
from neural_compressor.ux.components.model.tensorflow.saved_model import SavedModelModel
from neural_compressor.ux.utils.exceptions import NotFoundException
from neural_compressor.ux.utils.logger import log


class ModelRepository:
Expand All @@ -43,6 +44,8 @@ def __init__(self) -> None:
def get_model(self, path: str) -> Model:
"""Get Model for given path."""
for model_type in self.model_types:
supports_path = model_type.supports_path(path)
log.debug(f"{model_type.__name__}: {supports_path}")
if model_type.supports_path(path):
return model_type(path)

Expand Down
Expand Up @@ -233,6 +233,20 @@ def execute_optimization(data: Dict[str, Any]) -> dict:
if is_pytorch_script:
optimized_model_data["model_path"] = logs[0]

optimization_data = OptimizationAPIInterface.get_optimization_details(
{
"id": optimization_id,
},
)
if optimization_data["optimized_model"] is not None:
existing_model_id = optimization_data["optimized_model"]["id"]
existing_model_name = optimization_data["optimized_model"]["name"]
ModelAPIInterface.delete_model(
{
"id": existing_model_id,
"name": existing_model_name,
},
)
optimized_model_id = ModelAPIInterface.add_model(optimized_model_data)
OptimizationAPIInterface.update_optimized_model(
{
Expand Down
13 changes: 12 additions & 1 deletion neural_compressor/ux/gui/src/app/app.component.html
Expand Up @@ -14,6 +14,15 @@
System info
</button>
<app-menu></app-menu>

<mat-toolbar class="theme-container">
<div class="flex-stretch"></div>
<img class="theme-img" src="./../../assets/221a-sunny-day-solid.svg">
<mat-slide-toggle [formControl]="toggleControl">
</mat-slide-toggle>
<img class="theme-img" src="./../../assets/222-night.svg">
</mat-toolbar>

</mat-sidenav>
<mat-sidenav-content>
<div *ngIf="!sidenav.opened" class="pointer title-hidden">
Expand All @@ -27,7 +36,9 @@
</div>
<div class="footer-wrapper">
<footer class="footer">
© Intel Corporation
<p>
© Intel Corporation
</p>
</footer>
</div>
</mat-sidenav-content>
Expand Down
33 changes: 26 additions & 7 deletions neural_compressor/ux/gui/src/app/app.component.scss
Expand Up @@ -46,7 +46,7 @@
.sidenav {
font-family: IntelOneRg;
font-size: 150%;
background-color: $dark-gray;
background-color: $dark-gray !important;
color: white;
}

Expand All @@ -64,10 +64,11 @@
}

.footer {
position: fixed;
bottom: 0px;
color: $gray;
text-align: center;
color: $dark-gray;
margin-top: 5px;
padding-top: 5px;
z-index: 10;
}

.footer-wrapper {
Expand All @@ -89,8 +90,8 @@

.hw-info-button {
float: right;
color: $darker-gray;
background-color: $light-gray;
color: $darker-gray !important;
background-color: $light-gray !important;
margin-top: 15px;
margin-right: 5px;
}
Expand All @@ -102,6 +103,23 @@
position: relative;
}

.theme-img {
width: 25px;
height: 25px;
margin: 5px;
}

.theme-container {
position: fixed;
bottom: 0px;
width: 370px;
background-color: #005B85 !important;
}

.flex-stretch {
flex: 1 0 auto;
}

@media all and (max-width:1500px) {

.logo {
Expand All @@ -117,7 +135,8 @@
margin-right: 5px;
}

.sidenav {
.sidenav,
.bottom {
width: 300px;
}

Expand Down

0 comments on commit fe9923d

Please sign in to comment.