From a7d0b00a84b231c90880091a60d4045bf8cec6dd Mon Sep 17 00:00:00 2001 From: Sachin Agarwal Date: Fri, 3 Nov 2023 19:22:01 +0000 Subject: [PATCH 1/3] Add finetuned models --- roboflow/models/inference.py | 42 ++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/roboflow/models/inference.py b/roboflow/models/inference.py index 64fce042..2fa9f82a 100644 --- a/roboflow/models/inference.py +++ b/roboflow/models/inference.py @@ -239,7 +239,6 @@ def predict_video( signed_url = video_path url = urljoin(API_URL, "/videoinfer/?api_key=" + self.__api_key) - if model_class in ("CLIPModel", "GazeModel"): if model_class == "CLIPModel": model = "clip" @@ -257,6 +256,14 @@ def predict_video( ], } ] + else: + models = [ + { + "model_id": self.dataset_id, + "model_version":self.version, + "inference_type": self.type + } + ] for model in additional_models: models.append(SUPPORTED_ADDITIONAL_MODELS[model]) @@ -308,7 +315,6 @@ def poll_for_video_results(self, job_id: str = None) -> dict: url = urljoin( API_URL, "/videoinfer/?api_key=" + self.__api_key + "&job_id=" + self.job_id ) - try: response = requests.get(url, headers={"Content-Type": "application/json"}) except Exception as e: @@ -316,20 +322,21 @@ def poll_for_video_results(self, job_id: str = None) -> dict: if not response.ok: raise Exception(f"Error getting video inference results: {response.text}") - data = response.json() + if "status" not in data: + return {} # No status available + if data.get("status") > 1: + return(data) # Error + elif data.get("status") == 1: + return {} # Still running + else: # done + output_signed_url = data["output_signed_url"] + inference_data = requests.get( + output_signed_url, headers={"Content-Type": "application/json"} + ) - if data.get("status") != 0: - return {} - - output_signed_url = data["output_signed_url"] - - inference_data = requests.get( - output_signed_url, headers={"Content-Type": "application/json"} - ) - - # frame_offset and model name are top-level keys - return inference_data.json() + # frame_offset and model name are top-level keys + return inference_data.json() def poll_until_video_results(self, job_id) -> dict: """ @@ -357,14 +364,11 @@ def poll_until_video_results(self, job_id) -> dict: job_id = self.job_id attempts = 0 - + print(f"Checking for video inference results for job {job_id} every 60s") while True: + time.sleep(60) print(f"({attempts * 60}s): Checking for inference results") - response = self.poll_for_video_results() - - time.sleep(60) - attempts += 1 if response != {}: From d345a9b38b630699519440e827fffe3c58c0d21a Mon Sep 17 00:00:00 2001 From: Sachin Agarwal Date: Fri, 3 Nov 2023 19:28:56 +0000 Subject: [PATCH 2/3] Bumped version --- roboflow/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roboflow/__init__.py b/roboflow/__init__.py index 38da02d2..3b12b5b5 100644 --- a/roboflow/__init__.py +++ b/roboflow/__init__.py @@ -13,7 +13,7 @@ from roboflow.models import CLIPModel, GazeModel from roboflow.util.general import write_line -__version__ = "1.1.8" +__version__ = "1.1.9" def check_key(api_key, model, notebook, num_retries=0): From 3eb1e43738092369c2a69662537a737c05db9b2d Mon Sep 17 00:00:00 2001 From: Sachin Agarwal Date: Fri, 3 Nov 2023 19:31:34 +0000 Subject: [PATCH 3/3] linting --- roboflow/models/inference.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/roboflow/models/inference.py b/roboflow/models/inference.py index 2fa9f82a..c7fc946a 100644 --- a/roboflow/models/inference.py +++ b/roboflow/models/inference.py @@ -260,8 +260,8 @@ def predict_video( models = [ { "model_id": self.dataset_id, - "model_version":self.version, - "inference_type": self.type + "model_version": self.version, + "inference_type": self.type, } ] @@ -324,12 +324,12 @@ def poll_for_video_results(self, job_id: str = None) -> dict: raise Exception(f"Error getting video inference results: {response.text}") data = response.json() if "status" not in data: - return {} # No status available + return {} # No status available if data.get("status") > 1: - return(data) # Error + return data # Error elif data.get("status") == 1: return {} # Still running - else: # done + else: # done output_signed_url = data["output_signed_url"] inference_data = requests.get( output_signed_url, headers={"Content-Type": "application/json"}