From 0b0316d0224f96ea90de66fd0f5f40a4aaab56c2 Mon Sep 17 00:00:00 2001 From: Asgeir Melling Date: Wed, 11 Sep 2024 12:12:08 +0200 Subject: [PATCH 1/2] feat!: update code to work with updated pi backend --- tagreader/web_handlers.py | 65 +++++++++++++++++++++++++------------ tests/test_PIHandlerREST.py | 23 +++++++------ 2 files changed, 55 insertions(+), 33 deletions(-) diff --git a/tagreader/web_handlers.py b/tagreader/web_handlers.py index f6bf5d4..6b4d20a 100644 --- a/tagreader/web_handlers.py +++ b/tagreader/web_handlers.py @@ -19,7 +19,7 @@ from tagreader.cache import BucketCache, SmartCache from tagreader.logger import logger -from tagreader.utils import ReaderType, is_mac, is_windows, urljoin +from tagreader.utils import IMSType, ReaderType, is_mac, is_windows, urljoin class MD4: @@ -60,7 +60,7 @@ def get_auth_pi() -> HTTPKerberosAuth: def get_url_pi() -> str: - return r"https://piwebapi.equinor.com/piwebapi" + return r"https://piwebapi-qa.equinor.com/piwebapi" def get_auth_aspen(use_internal: bool = True): @@ -144,6 +144,33 @@ def list_piwebapi_sources( logger.error(f"Could not decode JSON response: {e}") +def get_piwebapi_source_to_webid_dict( + url: Optional[str] = None, + auth: Optional[Any] = None, + verify_ssl: Optional[bool] = True, +) -> List[str]: + if url is None: + url = get_url_pi() + + if auth is None: + auth = get_auth_pi() + + if verify_ssl is None: + verify_ssl = get_verify_ssl() + + if verify_ssl is False: + urllib3.disable_warnings(InsecureRequestWarning) + + url_ = urljoin(url, "dataservers") + res = requests.get(url_, auth=auth, verify=verify_ssl) + + res.raise_for_status() + try: + return {item["Name"]: item["WebId"] for item in res.json()["Items"]} + except JSONDecodeError as e: + logger.error(f"Could not decode JSON response: {e}") + + class BaseHandlerWeb(ABC): def __init__( self, @@ -741,11 +768,10 @@ def escape(s: str) -> str: ) @staticmethod - def generate_search_query( + def generate_search_params( tag: Optional[str], desc: Optional[str], datasource: Optional[str], - max: Optional[int] = 100, ) -> Dict[str, str]: q = [] if tag is not None: @@ -753,10 +779,12 @@ def generate_search_query( if desc is not None: q.extend([f"description:{PIHandlerWeb.escape(desc)}"]) query = " AND ".join(q) - params = {"q": f"{query}"} + params = {"query": f"{query}"} if datasource is not None: - params["scope"] = f"pi:{datasource}" + params["dataserverwebid"] = ( + f"{get_piwebapi_source_to_webid_dict()[datasource]}" + ) return params @@ -866,23 +894,23 @@ def search( timeout: Optional[int] = None, return_desc: bool = True, ) -> Union[List[Tuple[str, str]], List[str]]: - params = self.generate_search_query( + params = self.generate_search_params( tag=tag, desc=desc, datasource=self.datasource ) - url = urljoin(self.base_url, "search", "query") + url = urljoin(self.base_url, "points", "search") done = False ret = [] while not done: data = self.fetch(url, params=params, timeout=timeout) for item in data["Items"]: - description = item["Description"] if "Description" in item else "" + description = item["Descriptor"] if "Descriptor" in item else "" ret.append((item["Name"], description)) - next_start = int(data["Links"]["Next"].split("=")[-1]) - if int(data["Links"]["Last"].split("=")[-1]) >= next_start: - params["start"] = next_start # noqa - else: - done = True + # next_start = int(data["Links"]["Next"].split("=")[-1]) + # if int(data["Links"]["Last"].split("=")[-1]) >= next_start: + # params["start"] = next_start # noqa + # else: + done = True if not return_desc: ret = [x[0] for x in ret] @@ -922,17 +950,12 @@ def tag_to_web_id(self, tag: str) -> Optional[str]: if self.web_id_cache and tag in self.web_id_cache: return self.web_id_cache[tag] - params = self.generate_search_query( + params = self.generate_search_params( tag=tag, datasource=self.datasource, desc=None ) - params["fields"] = "name;webid" - url = urljoin(self.base_url, "search", "query") + url = urljoin(self.base_url, "points", "search") data = self.fetch(url, params=params) - if len(data["Errors"]) > 0: - msg = f"Received error from server when searching for WebId for {tag}: {data['Errors']}" - raise ValueError(msg) - if len(data["Items"]) > 1: # Compare elements and if same, return the first first = data["Items"][0] diff --git a/tests/test_PIHandlerREST.py b/tests/test_PIHandlerREST.py index 15d8b5f..9fbb126 100644 --- a/tests/test_PIHandlerREST.py +++ b/tests/test_PIHandlerREST.py @@ -35,28 +35,27 @@ def test_escape_chars() -> None: def test_generate_search_query() -> None: - assert PIHandlerWeb.generate_search_query( + assert PIHandlerWeb.generate_search_params( tag="SINUSOID", desc=None, datasource=None - ) == {"q": "name:SINUSOID"} - assert PIHandlerWeb.generate_search_query( - tag=r"BA:*.1", desc=None, datasource="sourcename" + ) == {"query": "name:SINUSOID"} + assert PIHandlerWeb.generate_search_params( + tag=r"BA:*.1", desc=None, datasource=None ) == { - "q": r"name:BA\:*.1", - "scope": "pi:sourcename", + "query": r"name:BA\:*.1", } - assert PIHandlerWeb.generate_search_query( + assert PIHandlerWeb.generate_search_params( tag="BA:*.1", datasource=None, desc=None ) == { - "q": r"name:BA\:*.1", + "query": r"name:BA\:*.1", } - assert PIHandlerWeb.generate_search_query( + assert PIHandlerWeb.generate_search_params( desc="Concentration Reactor 1", datasource=None, tag=None ) == { - "q": r"description:Concentration\ Reactor\ 1", + "query": r"description:Concentration\ Reactor\ 1", } - assert PIHandlerWeb.generate_search_query( + assert PIHandlerWeb.generate_search_params( tag="BA:*.1", desc="Concentration Reactor 1", datasource=None - ) == {"q": r"name:BA\:*.1 AND description:Concentration\ Reactor\ 1"} + ) == {"query": r"name:BA\:*.1 AND description:Concentration\ Reactor\ 1"} def test_is_summary(pi_handler: PIHandlerWeb) -> None: From a94b00f421b497c7db46c17876f6fa0c33b80156 Mon Sep 17 00:00:00 2001 From: Asgeir Melling Date: Wed, 11 Sep 2024 12:18:44 +0200 Subject: [PATCH 2/2] chore: add snyk ignore --- .snyk | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/.snyk b/.snyk index d8c267c..12475e3 100644 --- a/.snyk +++ b/.snyk @@ -6,5 +6,20 @@ ignore: reason: 'No patch available' expires: 2024-11-01T00:00:00.000Z created: 2023-12-04T09:05:00.000Z + SNYK-PYTHON-NOTEBOOK-7850171: + - '*': + reason: 'No patch available' + expires: 2024-11-01T00:00:00.000Z + created: 2024-09-11T12:17:00.000Z + SNYK-PYTHON-CRYPTOGRAPHY-7886970: + - '*': + reason: 'No patch available' + expires: 2024-11-01T00:00:00.000Z + created: 2024-09-11T12:17:00.000Z + SNYK-PYTHON-JUPYTERLAB-7850170: + - '*': + reason: 'No patch available' + expires: 2024-11-01T00:00:00.000Z + created: 2024-09-11T12:17:00.000Z patch: {} version: v1.25.0 \ No newline at end of file