Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .flake8
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[flake8]
exclude = venv, __init__.py, doc/_build, .venv
exclude = venv, __init__.py, doc/_build, .venv, ansys/rep/client/rms/models.py
select = W191, W291, W293, W391, E115, E117, E122, E124, E125, E225, E231, E301, E303, E501, F401, F403, N801, N802, N803, N804, N805, N806, N807, N815, N816
count = True
max-complexity = 10
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ repos:
rev: 22.3.0
hooks:
- id: black
exclude: ^(ansys/rep/client/jms/resource/|ansys/rep/client/auth/resource/)
exclude: ^(ansys/rep/client/jms/resource/|ansys/rep/client/auth/resource/|ansys/rep/client/rms/models.py)

- repo: https://github.com/pycqa/isort
rev: 5.11.5
Expand Down
10 changes: 10 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,16 @@ the building requirements and then executing the build module:
python -m build
python -m twine check dist/*

How to generate/update RMS models
---------------------------------


To generate RMS Pydantic models, first download the RMS openapi spec and save it as `rms_openapi.json` at the root of the repository.
Then, run the datamodel generator:

.. code:: bash

datamodel-codegen --input .\rms_openapi.json --input-file-type openapi --output ansys/rep/client/rms/models.py --output-model-type pydantic_v2.BaseModel

.. LINKS AND REFERENCES
.. _black: https://github.com/psf/black
Expand Down
1 change: 1 addition & 0 deletions ansys/rep/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,5 @@
from .client import Client
from .exceptions import APIError, ClientError, REPError
from .jms import JmsApi, ProjectApi
from .rms import RmsApi
from .warnings import UnverifiedHTTPSRequestsWarning
2 changes: 1 addition & 1 deletion ansys/rep/client/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def create_session(

# Set basic content type to json
session.headers = {
"content-type": "text/json",
"content-type": "application/json",
}

if access_token:
Expand Down
52 changes: 2 additions & 50 deletions ansys/rep/client/jms/api/jms_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,7 @@
from ansys.rep.client.client import Client
from ansys.rep.client.common import Object
from ansys.rep.client.exceptions import REPError
from ansys.rep.client.jms.resource import (
Evaluator,
Operation,
Permission,
Project,
TaskDefinitionTemplate,
)
from ansys.rep.client.jms.resource import Operation, Permission, Project, TaskDefinitionTemplate
from ansys.rep.client.jms.schema.project import ProjectSchema

from .base import copy_objects as base_copy_objects
Expand Down Expand Up @@ -115,48 +109,6 @@ def restore_project(self, path: str) -> Project:
"""
return restore_project(self, path)

################################################################
# Evaluators
def get_evaluators(self, as_objects=True, **query_params) -> List[Evaluator]:
"""Return a list of evaluators, optionally filtered by given query parameters"""
return get_objects(self.client.session, self.url, Evaluator, as_objects, **query_params)

def update_evaluators(
self, evaluators: List[Evaluator], as_objects=True, **query_params
) -> List[Evaluator]:
"""Update evaluators

Examples
--------

You can request multiple evaluators configuration updates at once.
This example shows how to set a custom resource property
on all Linux evaluators that were active in the past 60 seconds.

>>> import datetime
>>> from ansys.rep.client import Client
>>> from ansys.rep.client.jms import JmsApi, EvaluatorConfigurationUpdate
>>> cl = Client(
... rep_url="https://localhost:8443/rep", username="repuser", password="repuser"
... )
>>> jms_api = JmsApi(cl)
>>> query_params = {
... "platform" : "linux",
... "update_time.gt" : datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
... }
>>> evaluators = jms_api.get_evaluators(fields=["id", "host_id"], **query_params)
>>> config_update = EvaluatorConfigurationUpdate(
... custom_resource_properties={"disk_type" : "SSD"}
... )
>>> for ev in evaluators:
... ev.configuration_updates = config_update
>>> evaluators = jms_api.update_evaluators(evaluators)

"""
return update_objects(
self.client.session, self.url, evaluators, Evaluator, as_objects, **query_params
)

################################################################
# Task Definition Templates
def get_task_definition_templates(
Expand Down Expand Up @@ -423,7 +375,7 @@ def restore_project(jms_api, archive_path):

bucket = f"rep-client-restore-{uuid.uuid4()}"
fs_file_url = f"{jms_api.client.rep_url}/fs/api/v1/{bucket}/{archive_name}"
ansfs_file_url = f"ansfs://{bucket}/{archive_name}"
ansfs_file_url = f"ansfs://{bucket}/{archive_name}" # noqa: E231

fs_headers = {"content-type": "application/octet-stream"}

Expand Down
5 changes: 2 additions & 3 deletions ansys/rep/client/jms/schema/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,10 @@ class Meta(ObjectSchemaWithModificationInfo.Meta):

priority = fields.Integer(
allow_none=True,
default=0,
metadata={
"description": "Priority with which jobs are evaluated. The default is 0, "
"which is the highest priority. Assigning a higher value to a design "
"point makes it a lower priority."
"which is the highest priority. Assigning a higher value to a job "
"makes it a lower priority."
},
)
values = fields.Dict(
Expand Down
2 changes: 1 addition & 1 deletion ansys/rep/client/jms/schema/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,5 +114,5 @@ class Meta(ObjectSchemaWithModificationInfo.Meta):

custom_data = fields.Dict(
allow_none=True,
dmetadata={"description": "Dictionary type field to store custom data."},
metadata={"description": "Dictionary type field to store custom data."},
)
8 changes: 8 additions & 0 deletions ansys/rep/client/rms/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from .api.rms_api import RmsApi
from .models import (
ComputeResourceSet,
EvaluatorConfiguration,
EvaluatorConfigurationUpdate,
EvaluatorRegistration,
ScalerRegistration,
)
181 changes: 181 additions & 0 deletions ansys/rep/client/rms/api/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
import json
import logging
from typing import List, Type

from pydantic import BaseModel
from pydantic import __version__ as pydantic_version
from pydantic import create_model
from requests import Session

from ansys.rep.client.exceptions import ClientError

from ..models import (
Cluster,
ComputeResourceSet,
EvaluatorConfigurationUpdate,
EvaluatorRegistration,
ScalerRegistration,
)

OBJECT_TYPE_TO_ENDPOINT = {
Cluster: "clusters",
EvaluatorRegistration: "evaluators",
EvaluatorConfigurationUpdate: "configuration_updates",
ScalerRegistration: "scalers",
ComputeResourceSet: "compute_resource_sets",
}

log = logging.getLogger(__name__)


def _create_dynamic_list_model(name, field_name, field_type) -> BaseModel:
# Helper function to create at runtime a pydantic model storing
# a list of objects.
fields = {f"{field_name}": (List[field_type], ...)}
return create_model(name, **fields)


def objects_to_json(
objects: List[BaseModel],
rest_name: str,
exclude_unset: bool = True,
exclude_defaults: bool = False,
):

ListOfObjects = _create_dynamic_list_model(
name=f"List{objects[0].__class__.__name__}",
field_name=rest_name,
field_type=objects[0].__class__,
)

args = {f"{rest_name}": objects}
objects_list = ListOfObjects(**args)

if pydantic_version.startswith("1."):
return objects_list.json(exclude_unset=exclude_unset, exclude_defaults=exclude_defaults)
elif pydantic_version.startswith("2."):
return objects_list.model_dump_json(
exclude_unset=exclude_unset, exclude_defaults=exclude_defaults
)
else:
raise RuntimeError(f"Unsupported Pydantic version {pydantic_version}")


def json_to_objects(data, obj_type):
obj_list = []
for obj in data:
obj_list.append(obj_type(**obj))
return obj_list


def get_objects(
session: Session, url: str, obj_type: Type[BaseModel], as_objects=True, **query_params
):

rest_name = OBJECT_TYPE_TO_ENDPOINT[obj_type]
url = f"{url}/{rest_name}"
r = session.get(url, params=query_params)

data = r.json()[rest_name]
if not as_objects:
return data

return json_to_objects(data, obj_type)


def get_objects_count(session: Session, url: str, obj_type: Type[BaseModel], **query_params):

rest_name = OBJECT_TYPE_TO_ENDPOINT[obj_type]
url = f"{url}/{rest_name}:count" # noqa: E231
r = session.get(url, params=query_params)

return r.json()[f"num_{rest_name}"]


def get_object(
session: Session,
url: str,
obj_type: Type[BaseModel],
as_object=True,
from_collection=False,
**query_params,
):

r = session.get(url, params=query_params)
data = r.json()
if from_collection:
rest_name = OBJECT_TYPE_TO_ENDPOINT[obj_type]
data = data[rest_name][0]
if not as_object:
return data
return obj_type(**data)


def create_objects(
session: Session, url: str, objects: List[BaseModel], as_objects=True, **query_params
):
if not objects:
return []

are_same = [o.__class__ == objects[0].__class__ for o in objects[1:]]
if not all(are_same):
raise ClientError("Mixed object types")

obj_type = objects[0].__class__
rest_name = OBJECT_TYPE_TO_ENDPOINT[obj_type]

url = f"{url}/{rest_name}"

r = session.post(f"{url}", data=objects_to_json(objects, rest_name), params=query_params)

data = r.json()[rest_name]
if not as_objects:
return data

return json_to_objects(data, obj_type)


def update_objects(
session: Session,
url: str,
objects: List[BaseModel],
obj_type: Type[BaseModel],
as_objects=True,
**query_params,
):

if objects is None:
raise ClientError("objects can't be None")

are_same = [o.__class__ == obj_type for o in objects]
if not all(are_same):
raise ClientError("Mixed object types")

rest_name = OBJECT_TYPE_TO_ENDPOINT[obj_type]

url = f"{url}/{rest_name}"

r = session.put(f"{url}", data=objects_to_json(objects, rest_name), params=query_params)

data = r.json()[rest_name]
if not as_objects:
return data

return json_to_objects(data, obj_type)


def delete_objects(session: Session, url: str, objects: List[BaseModel]):
if not objects:
return

are_same = [o.__class__ == objects[0].__class__ for o in objects[1:]]
if not all(are_same):
raise ClientError("Mixed object types")

obj_type = objects[0].__class__
rest_name = OBJECT_TYPE_TO_ENDPOINT[obj_type]

url = f"{url}/{rest_name}"
data = json.dumps({"source_ids": [obj.id for obj in objects]})

r = session.delete(url, data=data)
Loading