From 8a313c064f4416f99f2eebe4de64f68553b92cd9 Mon Sep 17 00:00:00 2001 From: Heiru Wu Date: Thu, 7 Dec 2023 19:10:34 +0800 Subject: [PATCH] feat(ray): add io helpers for llm tasks --- instill/helpers/const.py | 34 ++++- instill/helpers/ray_io.py | 290 +++++++++++++++++++++++++++++++++++++- poetry.lock | 79 +++++++++-- pyproject.toml | 1 + 4 files changed, 389 insertions(+), 15 deletions(-) diff --git a/instill/helpers/const.py b/instill/helpers/const.py index f89f4dc..967e20d 100644 --- a/instill/helpers/const.py +++ b/instill/helpers/const.py @@ -1,5 +1,7 @@ from enum import Enum -from typing import Any, Dict +from typing import Any, Dict, Union + +import numpy as np class DataType(Enum): @@ -29,6 +31,7 @@ class TextGenerationInput: class TextToImageInput: + prompt_image: Union[np.ndarray, None] = None prompt = "" negative_prompt = "" steps = 5 @@ -36,3 +39,32 @@ class TextToImageInput: seed = 0 samples = 1 extra_params: Dict[str, str] = {} + + +class ImageToImageInput: + prompt_image: Union[np.ndarray, None] = None + prompt = "" + steps = 5 + guidance_scale = 7.5 + seed = 0 + samples = 1 + extra_params: Dict[str, str] = {} + + +class TextGenerationChatInput: + conversation = "" + max_new_tokens = 100 + top_k = 1 + temperature = 0.8 + random_seed = 0 + extra_params: Dict[str, str] = {} + + +class VisualQuestionAnsweringInput: + prompt_image: Union[np.ndarray, None] = None + prompt = "" + max_new_tokens = 100 + top_k = 1 + temperature = 0.8 + random_seed = 0 + extra_params: Dict[str, str] = {} diff --git a/instill/helpers/ray_io.py b/instill/helpers/ray_io.py index bf074b6..d8f35dc 100644 --- a/instill/helpers/ray_io.py +++ b/instill/helpers/ray_io.py @@ -1,10 +1,18 @@ +import io import json import struct from typing import List import numpy as np +from PIL import Image -from instill.helpers.const import TextGenerationInput, TextToImageInput +from instill.helpers.const import ( + ImageToImageInput, + TextGenerationChatInput, + TextGenerationInput, + TextToImageInput, + VisualQuestionAnsweringInput, +) def serialize_byte_tensor(input_tensor): @@ -101,12 +109,12 @@ def parse_task_text_generation_input(request) -> TextGenerationInput: ) if input_name == "max_new_tokens": - text_generation_inputmax_new_tokens = int.from_bytes( + text_generation_input.max_new_tokens = int.from_bytes( b_input_tensor, "little" ) print( f"[DEBUG] input `max_new_tokens` type\ - ({type(text_generation_inputmax_new_tokens)}): {text_generation_inputmax_new_tokens}" + ({type(text_generation_input.max_new_tokens)}): {text_generation_input.max_new_tokens}" ) if input_name == "top_k": @@ -260,6 +268,282 @@ def parse_task_text_to_image_input(request) -> TextToImageInput: def parse_task_text_to_image_output(image): return np.asarray(image).tobytes() + @staticmethod + def parse_task_image_to_image_input(request) -> ImageToImageInput: + image_to_image_input = ImageToImageInput() + + for i, b_input_tensor in zip(request.inputs, request.raw_input_contents): + input_name = i.name + + if input_name == "prompt_image": + input_tensors = deserialize_bytes_tensor(b_input_tensor) + images = [] + for enc in input_tensors: + pil_img = Image.open(io.BytesIO(enc.astype(bytes))) # RGB + image = np.array(pil_img) + if len(image.shape) == 2: # gray image + raise ValueError( + f"The image shape with {image.shape} is " + f"not in acceptable" + ) + images.append(image) + image_to_image_input.prompt_image = images[0] + print( + f"[DEBUG] input `prompt_image` type\ + ({type(image_to_image_input.prompt_image)}): {image_to_image_input.prompt_image}" + ) + + if input_name == "prompt": + input_tensor = deserialize_bytes_tensor(b_input_tensor) + image_to_image_input.prompt = str(input_tensor[0].decode("utf-8")) + print( + f"[DEBUG] input `prompt` type\ + ({type(image_to_image_input.prompt)}): {image_to_image_input.prompt}" + ) + + if input_name == "steps": + image_to_image_input.steps = int.from_bytes(b_input_tensor, "little") + print( + f"[DEBUG] input `steps` type\ + ({type(image_to_image_input.steps)}): {image_to_image_input.steps}" + ) + + if input_name == "seed": + image_to_image_input.seed = int.from_bytes(b_input_tensor, "little") + print( + f"[DEBUG] input `seed` type\ + ({type(image_to_image_input.seed)}): {image_to_image_input.seed}" + ) + + if input_name == "guidance_scale": + image_to_image_input.guidance_scale = struct.unpack( + "f", b_input_tensor + )[0] + print( + f"[DEBUG] input `guidance_scale` type\ + ({type(image_to_image_input.guidance_scale)}): {image_to_image_input.guidance_scale}" + ) + image_to_image_input.guidance_scale = round( + image_to_image_input.guidance_scale, 2 + ) + + if input_name == "samples": + image_to_image_input.samples = int.from_bytes(b_input_tensor, "little") + print( + f"[DEBUG] input `samples` type\ + ({type(image_to_image_input.samples)}): {image_to_image_input.samples}" + ) + + if input_name == "extra_params": + input_tensor = deserialize_bytes_tensor(b_input_tensor) + extra_params_str = str(input_tensor[0].decode("utf-8")) + print( + f"[DEBUG] input `extra_params` type\ + ({type(extra_params_str)}): {extra_params_str}" + ) + + try: + image_to_image_input.extra_params = json.loads(extra_params_str) + except json.decoder.JSONDecodeError: + print("[DEBUG] WARNING `extra_params` parsing faield!") + continue + + return image_to_image_input + + @staticmethod + def parse_task_image_to_image_output(image): + return np.asarray(image).tobytes() + + @staticmethod + def parse_task_text_generation_chat_input(request) -> TextGenerationChatInput: + text_generation_chat_input = TextGenerationChatInput() + + for i, b_input_tensor in zip(request.inputs, request.raw_input_contents): + input_name = i.name + + if input_name == "conversation": + input_tensor = deserialize_bytes_tensor(b_input_tensor) + text_generation_chat_input.conversation = str( + input_tensor[0].decode("utf-8") + ) + print( + f"[DEBUG] input `conversation` type\ + ({type(text_generation_chat_input.conversation)}): {text_generation_chat_input.conversation}" + ) + + if input_name == "max_new_tokens": + text_generation_chat_input.max_new_tokens = int.from_bytes( + b_input_tensor, "little" + ) + print( + f"[DEBUG] input `max_new_tokens` type\ + ({type(text_generation_chat_input.max_new_tokens)}):\ + {text_generation_chat_input.max_new_tokens}" + ) + + if input_name == "top_k": + text_generation_chat_input.top_k = int.from_bytes( + b_input_tensor, "little" + ) + print( + f"[DEBUG] input `top_k` type\ + ({type(text_generation_chat_input.top_k)}):\ + {text_generation_chat_input.top_k}" + ) + + if input_name == "temperature": + text_generation_chat_input.temperature = struct.unpack( + "f", b_input_tensor + )[0] + print( + f"[DEBUG] input `temperature` type\ + ({type(text_generation_chat_input.temperature)}):\ + {text_generation_chat_input.temperature}" + ) + text_generation_chat_input.temperature = round( + text_generation_chat_input.temperature, 2 + ) + + if input_name == "random_seed": + text_generation_chat_input.random_seed = int.from_bytes( + b_input_tensor, "little" + ) + print( + f"[DEBUG] input `random_seed` type\ + ({type(text_generation_chat_input.random_seed)}):\ + {text_generation_chat_input.random_seed}" + ) + + if input_name == "extra_params": + input_tensor = deserialize_bytes_tensor(b_input_tensor) + extra_params_str = str(input_tensor[0].decode("utf-8")) + print( + f"[DEBUG] input `extra_params` type\ + ({type(extra_params_str)}): {extra_params_str}" + ) + + try: + text_generation_chat_input.extra_params = json.loads( + extra_params_str + ) + except json.decoder.JSONDecodeError: + print("[DEBUG] WARNING `extra_params` parsing faield!") + continue + + return text_generation_chat_input + + @staticmethod + def parse_task_text_generation_chat_output(sequences: list): + text_outputs = [seq["generated_text"].encode("utf-8") for seq in sequences] + + return serialize_byte_tensor(np.asarray(text_outputs)) + + @staticmethod + def parse_task_visual_question_answering_input( + request, + ) -> VisualQuestionAnsweringInput: + text_visual_question_answering_input = VisualQuestionAnsweringInput() + + for i, b_input_tensor in zip(request.inputs, request.raw_input_contents): + input_name = i.name + + if input_name == "prompt_image": + input_tensors = deserialize_bytes_tensor(b_input_tensor) + images = [] + for enc in input_tensors: + pil_img = Image.open(io.BytesIO(enc.astype(bytes))) # RGB + image = np.array(pil_img) + if len(image.shape) == 2: # gray image + raise ValueError( + f"The image shape with {image.shape} is " + f"not in acceptable" + ) + images.append(image) + text_visual_question_answering_input.prompt_image = images[0] + print( + f"[DEBUG] input `prompt_image` type\ + ({type(text_visual_question_answering_input.prompt_image)}): \ + {text_visual_question_answering_input.prompt_image}" + ) + + if input_name == "prompt": + input_tensor = deserialize_bytes_tensor(b_input_tensor) + text_visual_question_answering_input.prompt = str( + input_tensor[0].decode("utf-8") + ) + print( + f"[DEBUG] input `prompt` type\ + ({type(text_visual_question_answering_input.prompt)}):\ + {text_visual_question_answering_input.prompt}" + ) + + if input_name == "max_new_tokens": + text_visual_question_answering_input.max_new_tokens = int.from_bytes( + b_input_tensor, "little" + ) + print( + f"[DEBUG] input `max_new_tokens` type\ + ({type(text_visual_question_answering_input.max_new_tokens)}):\ + {text_visual_question_answering_input.max_new_tokens}" + ) + + if input_name == "top_k": + text_visual_question_answering_input.top_k = int.from_bytes( + b_input_tensor, "little" + ) + print( + f"[DEBUG] input `top_k` type\ + ({type(text_visual_question_answering_input.top_k)}):\ + {text_visual_question_answering_input.top_k}" + ) + + if input_name == "temperature": + text_visual_question_answering_input.temperature = struct.unpack( + "f", b_input_tensor + )[0] + print( + f"[DEBUG] input `temperature` type\ + ({type(text_visual_question_answering_input.temperature)}):\ + {text_visual_question_answering_input.temperature}" + ) + text_visual_question_answering_input.temperature = round( + text_visual_question_answering_input.temperature, 2 + ) + + if input_name == "random_seed": + text_visual_question_answering_input.random_seed = int.from_bytes( + b_input_tensor, "little" + ) + print( + f"[DEBUG] input `random_seed` type\ + ({type(text_visual_question_answering_input.random_seed)}):\ + {text_visual_question_answering_input.random_seed}" + ) + + if input_name == "extra_params": + input_tensor = deserialize_bytes_tensor(b_input_tensor) + extra_params_str = str(input_tensor[0].decode("utf-8")) + print( + f"[DEBUG] input `extra_params` type\ + ({type(extra_params_str)}): {extra_params_str}" + ) + + try: + text_visual_question_answering_input.extra_params = json.loads( + extra_params_str + ) + except json.decoder.JSONDecodeError: + print("[DEBUG] WARNING `extra_params` parsing faield!") + continue + + return text_visual_question_answering_input + + @staticmethod + def parse_task_visual_question_answering_output(sequences: list): + text_outputs = [seq["generated_text"].encode("utf-8") for seq in sequences] + + return serialize_byte_tensor(np.asarray(text_outputs)) + class RawIO: @staticmethod diff --git a/poetry.lock b/poetry.lock index 27f91d8..0c13f6f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1119,16 +1119,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1450,6 +1440,73 @@ files = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] +[[package]] +name = "pillow" +version = "10.1.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, + {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, + {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, + {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, + {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, + {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, + {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, + {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + [[package]] name = "pkginfo" version = "1.9.6" @@ -2512,4 +2569,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "43ee3d51fbc460b29700a4862ea3c7c8b514f91b758954375b402c3f9f07f719" +content-hash = "4e7d806148553c700970b9eb6ea761acfac5c373fe336ff117b9e8bcdcb06336" diff --git a/pyproject.toml b/pyproject.toml index 2ee405d..14e7ca5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,7 @@ google-api-core = "^2.11.1" googleapis-common-protos = "^1.60.0" protoc-gen-openapiv2 = "^0.0.1" pydantic = "*" +pillow = "^10.1.0" [tool.poetry.dev-dependencies]