diff --git a/.gitignore b/.gitignore index 6453239ea..f93a0c567 100644 --- a/.gitignore +++ b/.gitignore @@ -39,4 +39,6 @@ docusaurus/static/docs */lib64/* */include/* */share/* -pyvenv.cfg \ No newline at end of file +pyvenv.cfg +mlruns +mlartifacts \ No newline at end of file diff --git a/docs/integrations/assets/exception_path_landing_page.png b/docs/integrations/assets/exception_path_landing_page.png new file mode 100644 index 000000000..ad179c706 Binary files /dev/null and b/docs/integrations/assets/exception_path_landing_page.png differ diff --git a/docs/integrations/assets/exception_path_trace.png b/docs/integrations/assets/exception_path_trace.png new file mode 100644 index 000000000..064fc0410 Binary files /dev/null and b/docs/integrations/assets/exception_path_trace.png differ diff --git a/docs/integrations/assets/happy_path_traces_landing_page.png b/docs/integrations/assets/happy_path_traces_landing_page.png new file mode 100644 index 000000000..e436bfbf5 Binary files /dev/null and b/docs/integrations/assets/happy_path_traces_landing_page.png differ diff --git a/docs/integrations/assets/llm_span.png b/docs/integrations/assets/llm_span.png new file mode 100644 index 000000000..e009c1d6b Binary files /dev/null and b/docs/integrations/assets/llm_span.png differ diff --git a/docs/integrations/telemetry/mlflow-tracing.ipynb b/docs/integrations/telemetry/mlflow-tracing.ipynb new file mode 100644 index 000000000..27ec3cb16 --- /dev/null +++ b/docs/integrations/telemetry/mlflow-tracing.ipynb @@ -0,0 +1,475 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# MLflow Tracing" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Overview\n", + "In this document, we explain how to set up Guardrails with MLflow Tracing. With this functionality enabled, you can collect additional insights on how your Guard, LLM, and each validator are performing directly in your own Databricks workspace.\n", + "\n", + "In this notebook, we'll be using a local MLflow Tracking Server, but you can just as easily switch over to a [hosted Tracking Server](https://mlflow.org/docs/latest/getting-started/tracking-server-overview/index.html#method-3-use-production-hosted-tracking-server).\n", + "\n", + "For additional background information on Mlflow Tracing, see the [MLflow documentation](https://mlflow.org/docs/latest/llms/index.html#id1)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installing Dependencies\n", + "\n", + "Let's start by installing the dependencies we'll use in this exercise.\n", + "\n", + "First we'll install Guardrails with the `databricks` extra. This will include the [mlflow](https://pypi.org/project/mlflow/) library and any other pip packages we'll need." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# ! pip install \"guardrails-ai[databricks]\" -q\n", + "! pip install \"git+https://github.com/guardrails-ai/guardrails.git@mlflow-integration\" mlflow -q" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we'll ensure the Guardrails CLI is properly configured. Specifically we want to use remote inferencing for one of the ML backed validators we will be using." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "SUCCESS:guardrails-cli:\n", + " Login successful.\n", + "\n", + " Get started by installing our RegexMatch validator:\n", + " https://hub.guardrailsai.com/validator/guardrails_ai/regex_match\n", + "\n", + " You can install it by running:\n", + " guardrails hub install hub://guardrails/regex_match\n", + "\n", + " Find more validators at https://hub.guardrailsai.com\n", + " \n" + ] + } + ], + "source": [ + "! guardrails configure --enable-metrics --token $GUARDRAILS_TOKEN --enable-remote-inferencing" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we'll install some validators from the Guardrails Hub." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Installing hub:\u001b[35m/\u001b[0m\u001b[35m/tryolabs/\u001b[0m\u001b[95mrestricttotopic...\u001b[0m\n", + "✅Successfully installed tryolabs/restricttotopic!\n", + "\n", + "\n", + "Installing hub:\u001b[35m/\u001b[0m\u001b[35m/guardrails/\u001b[0m\u001b[95mvalid_length...\u001b[0m\n", + "✅Successfully installed guardrails/valid_length!\n", + "\n", + "\n" + ] + } + ], + "source": [ + "! guardrails hub install hub://tryolabs/restricttotopic --no-install-local-models --quiet\n", + "! guardrails hub install hub://guardrails/valid_length --quiet" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Starting the MLflow Tracking Server\n", + "\n", + "Our next step is to start the MLflow Tracking server. This stands up both the telemetry sink we will send traces to, as well as the web interface we can use to examine them. You'll need to run this next step is a separate terminal since, otherwise, the server's processes will block execution of the conesecutive cells in this notebook (which is normal)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Run this in the terminal or this cell will block the rest of the notebook\n", + "# ! mlflow server --host localhost --port 8080" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Creating and Instrumenting our Guard\n", + "\n", + "Next up, we'll instrument the Guardrails package to send traces to the MLflow Tracking Server as well as setup our LLM and Guard. \n", + "\n", + "As of `guardrails-ai` version 0.5.8, we offer a builtin instrumentor for MLflow." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import mlflow\n", + "from guardrails.integrations.databricks import MlFlowInstrumentor\n", + "\n", + "mlflow.set_tracking_uri(uri=\"http://localhost:8080\")\n", + "\n", + "MlFlowInstrumentor(experiment_name=\"My First Experiment\").instrument()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This instrumentor wraps some of the key functions and flows within Guardrails and automatically captures trace data when the Guard is run.\n", + "\n", + "Now that the Guardrails package is instrumented, we can create our Guard." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from guardrails import Guard\n", + "from guardrails.hub import RestrictToTopic, ValidLength\n", + "\n", + "guard = Guard(name='content-guard').use_many(\n", + " RestrictToTopic(valid_topics=[\"computer programming\", \"computer science\", \"algorithms\"], disable_llm=True, on_fail=\"exception\"),\n", + " ValidLength(min=1, max=150, on_fail=\"exception\")\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this example, we have created a Guard that uses two Validators: RestrictToTopic and ValidLength. The RestrictToTopic Validator ensures that the text is related to the topics we specify, while the ValidLength Guardrail ensures that the text stays within our character limit." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Testing and Tracking our Guard\n", + "Next we'll test our our Guard by calling an LLM and letting the Guard validate the output. After each execution, we'll look at the trace data collected by MLflow Tracking Server." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "# Setup some environment variables for the LLM\n", + "os.environ[\"DATABRICKS_API_KEY\"] = os.environ.get(\"DATABRICKS_TOKEN\", \"your-databricks-key\")\n", + "os.environ[\"DATABRICKS_API_BASE\"] = os.environ.get(\"DATABRICKS_HOST\", \"https://abc-123ab12a-1234.cloud.databricks.com\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First, we'll give the LLM an easy prompt that should result in an output that passes validation. Consider this our happy path test." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
================== Validated LLM output ================== \n", + "\n" + ], + "text/plain": [ + " ================== Validated LLM output ================== \n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\"Recursion: A method solving problems by solving smaller instances, calling itself with reduced input until \n", + "reaching a base case.\"\n", + "\n" + ], + "text/plain": [ + "\u001b[32m\"Recursion: A method solving problems by solving smaller instances, calling itself with reduced input until \u001b[0m\n", + "\u001b[32mreaching a base case.\"\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from rich import print\n", + "\n", + "instructions = { \"role\": \"system\", \"content\": \"You are a helpful assistant that gives advice about writing clean code and other programming practices.\" }\n", + "prompt = \"Write a short summary about recursion in less than 100 characters.\"\n", + "\n", + "try:\n", + " result = guard(\n", + " model=\"databricks/databricks-dbrx-instruct\",\n", + " messages=[instructions, { \"role\":\"user\", \"content\": prompt }],\n", + " )\n", + "\n", + " print(\" ================== Validated LLM output ================== \")\n", + " print(result.validated_output)\n", + "except Exception as e:\n", + " print(\"Oops! That didn't go as planned...\")\n", + " print(e)\n", + " \n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If we navigate to http://localhost:8080 in our browser we can see our experiemnt, `My First Experiment`, in the list on the left hand side. If we select our experiment, and then select the `Traces` tab, we should see one trace from the cell we just ran.\n", + "\n", + "\n", + "If we select this trace, we see a breakdown of the various steps taken within the Guard on the left, including a timeline, and a details view for the selected span on the right. If you click on the different spans within the trace, you can see different attributes specific to that span. For example, if you click on `guardrails/guard/step/call`, the span that tracked the call to the LLM, you can see all of the parameters that were used to call the LLM, as well as all of the outputs from the LLM including token counts.\n", + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, let's give the LLM a prompt that instructs it to output something that should fail. Consider this our exception path test." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
================== LLM output ================== \n", + "\n" + ], + "text/plain": [ + " ================== LLM output ================== \n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
In the realm where the green field doth lie,\n", + "Where the sun shines bright and the sky's azure high,\n", + "A game of skill, of strategy and might,\n", + "Unfolds in innings, under the sun's warm light.\n", + "\n", + "Batter up, the crowd cheers with delight,\n", + "As the pitcher winds up, with all his might,\n", + "The ball whizzes fast, a blur of white,\n", + "A dance of power, in the afternoon light.\n", + "\n", + "The bat meets ball, a crack, a sight,\n", + "A thrill runs through, like an electric spike,\n", + "The fielders scatter, in a frantic hike,\n", + "To catch or miss, it's all in the strike.\n", + "\n", + "The bases loaded, the tension's tight,\n", + "A single run could end the night,\n", + "The crowd holds breath, in anticipation's height,\n", + "For the game's outcome, in this baseball fight.\n", + "\n", + "The outfielder leaps, with all his height,\n", + "A catch or miss, could decide the plight,\n", + "The ball falls short, in the glove's tight knit,\n", + "A collective sigh, as the inning's writ.\n", + "\n", + "The game goes on, through day and night,\n", + "A battle of wills, in the stadium's light,\n", + "A symphony of plays, in the diamond's sight,\n", + "A poem of baseball, in black and white.\n", + "\n" + ], + "text/plain": [ + "In the realm where the green field doth lie,\n", + "Where the sun shines bright and the sky's azure high,\n", + "A game of skill, of strategy and might,\n", + "Unfolds in innings, under the sun's warm light.\n", + "\n", + "Batter up, the crowd cheers with delight,\n", + "As the pitcher winds up, with all his might,\n", + "The ball whizzes fast, a blur of white,\n", + "A dance of power, in the afternoon light.\n", + "\n", + "The bat meets ball, a crack, a sight,\n", + "A thrill runs through, like an electric spike,\n", + "The fielders scatter, in a frantic hike,\n", + "To catch or miss, it's all in the strike.\n", + "\n", + "The bases loaded, the tension's tight,\n", + "A single run could end the night,\n", + "The crowd holds breath, in anticipation's height,\n", + "For the game's outcome, in this baseball fight.\n", + "\n", + "The outfielder leaps, with all his height,\n", + "A catch or miss, could decide the plight,\n", + "The ball falls short, in the glove's tight knit,\n", + "A collective sigh, as the inning's writ.\n", + "\n", + "The game goes on, through day and night,\n", + "A battle of wills, in the stadium's light,\n", + "A symphony of plays, in the diamond's sight,\n", + "A poem of baseball, in black and white.\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + " ================== Validation Errors ================== \n", + "\n" + ], + "text/plain": [ + "\n", + "\n", + " ================== Validation Errors ================== \n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n", + "RestrictToTopic: No valid topic was found.\n", + "\n" + ], + "text/plain": [ + "\n", + "RestrictToTopic: No valid topic was found.\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "prompt = \"Write a really long poem about baseball.\"\n", + "\n", + "try:\n", + " result = guard(\n", + " model=\"databricks/databricks-dbrx-instruct\",\n", + " messages=[instructions, { \"role\":\"user\", \"content\": prompt }],\n", + " )\n", + "\n", + " print(\"This success was unexpected. Let's look at the output to see why it passed.\")\n", + " print(result.validated_output)\n", + "except Exception as e:\n", + " # Great! It failed just like we expected it to!\n", + " # First, let's look at what the LLM generated.\n", + " print(\" ================== LLM output ================== \")\n", + " print(guard.history.last.raw_outputs.last)\n", + "\n", + " # Next, let's examine the validation errors\n", + " print(\"\\n\\n ================== Validation Errors ================== \")\n", + " for failed_validation in guard.history.last.failed_validations:\n", + " print(f\"\\n{failed_validation.validator_name}: {failed_validation.validation_result.error_message}\")\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First note that there is only one failed validator in the logs: `RestrictToTopic`. This is because since we set `on_fail=\"exception\"`, the first failure to occur will raise an exception and interrupt the process. If we set our OnFail action to a different value, like `noop`, we would also see a log for `ValidLength` since the LLM's output is clearly longer than the max length we specified.\n", + "\n", + "If navigate back to the MLflow UI in our browser, we see another trace. Since this last cell raised an exception, we see that the status is listed as `Error`.\n", + "\n", + "\n", + "If we open this new trace we see, just like in the history logs, only `RestrictToTopic` has a recorded span. This is, again, because it raised an exception on failure exitting the validation loop early.\n", + "\n", + "If we click on the validator's span, and scroll down to the bottom of its details panel, we can see the reason why validation failed: `\"No valid topic was found.\"`\n", + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "With Guardrails, MLflow, and the Guardrails MLflowInstrumentor, we can easily monitor both our LLMs and the validations we're guarding them with. To learn more, check out [Guardrails AI](https://www.guardrailsai.com/) and [MLflow](https://mlflow.org/docs/latest/index.html)." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docusaurus/sidebars.js b/docusaurus/sidebars.js index b5fccfb22..1cce74653 100644 --- a/docusaurus/sidebars.js +++ b/docusaurus/sidebars.js @@ -115,13 +115,14 @@ const sidebars = { { type: "link", label: "Iudex", - href: "https://docs.iudex.ai/integrations/guardrails-integration", + href: "https://docs.iudex.ai/integrations/guardrails", }, { type: "link", label: "OpenLIT", href: "https://docs.openlit.io/latest/integrations/guardrails", }, + "integrations/telemetry/mlflow-tracing", ], }, // "integrations/openai_functions", diff --git a/guardrails/integrations/__init__.py b/guardrails/integrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/guardrails/integrations/databricks/__init__.py b/guardrails/integrations/databricks/__init__.py new file mode 100644 index 000000000..b2468bcae --- /dev/null +++ b/guardrails/integrations/databricks/__init__.py @@ -0,0 +1,3 @@ +from guardrails.integrations.databricks.ml_flow_instrumentor import MlFlowInstrumentor + +__all__ = ["MlFlowInstrumentor"] diff --git a/guardrails/integrations/databricks/ml_flow_instrumentor.py b/guardrails/integrations/databricks/ml_flow_instrumentor.py new file mode 100644 index 000000000..38d458f46 --- /dev/null +++ b/guardrails/integrations/databricks/ml_flow_instrumentor.py @@ -0,0 +1,429 @@ +from functools import wraps +import inspect +import sys +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Coroutine, + Generator, + Iterable, + Union, +) + +from guardrails import Guard, AsyncGuard, settings +from guardrails.classes.validation.validation_result import ValidationResult +from guardrails.run import Runner, StreamRunner, AsyncRunner, AsyncStreamRunner +from guardrails.validator_base import Validator +from guardrails.version import GUARDRAILS_VERSION +from guardrails.telemetry.guard_tracing import ( + add_guard_attributes, + trace_stream_guard, + trace_async_stream_guard, +) +from guardrails.telemetry.runner_tracing import add_step_attributes, add_call_attributes +from guardrails.telemetry.validator_tracing import add_validator_attributes +from guardrails.classes.generic.stack import Stack +from guardrails.classes.llm.llm_response import LLMResponse +from guardrails.classes.history.iteration import Iteration +from guardrails.classes.output_type import OT +from guardrails.classes.validation_outcome import ValidationOutcome +from guardrails.utils.safe_get import safe_get + +try: + import mlflow + import mlflow.tracing + import mlflow.tracing.provider + from mlflow.entities.span_status import SpanStatusCode +except ImportError: + raise ImportError("Please install mlflow to use this instrumentor") + + +if sys.version_info.minor < 10: + from guardrails.utils.polyfills import anext + + +# TODO: Abstract these methods and common logic into a base class +# that can be extended by other instrumentors +class MlFlowInstrumentor: + """Instruments Guardrails to send traces to MLFlow.""" + + def __init__(self, experiment_name: str): + self.experiment_name = experiment_name + # Disable legacy OTEL tracing to avoid duplicate spans + settings.disable_tracing = True + + def instrument(self): + if not mlflow.tracing.provider._is_enabled(): + mlflow.tracing.enable() + mlflow.set_experiment(self.experiment_name) + + wrapped_guard_execute = self._instrument_guard(Guard._execute) + setattr(Guard, "_execute", wrapped_guard_execute) + + wrapped_async_guard_execute = self._instrument_async_guard(AsyncGuard._execute) + setattr(AsyncGuard, "_execute", wrapped_async_guard_execute) + + wrapped_runner_step = self._instrument_runner_step(Runner.step) + setattr(Runner, "step", wrapped_runner_step) + + wrapped_stream_runner_step = self._instrument_stream_runner_step( + StreamRunner.step + ) + setattr(StreamRunner, "step", wrapped_stream_runner_step) + + wrapped_async_runner_step = self._instrument_async_runner_step( + AsyncRunner.async_step + ) + setattr(AsyncRunner, "async_step", wrapped_async_runner_step) + + wrapped_async_stream_runner_step = self._instrument_async_stream_runner_step( + AsyncStreamRunner.async_step # type: ignore + ) + setattr(AsyncStreamRunner, "async_step", wrapped_async_stream_runner_step) + + wrapped_runner_call = self._instrument_runner_call(Runner.call) + setattr(Runner, "call", wrapped_runner_call) + + wrapped_async_runner_call = self._instrument_async_runner_call( + AsyncRunner.async_call + ) + setattr(AsyncRunner, "async_call", wrapped_async_runner_call) + + import guardrails + + validators = guardrails.hub.__dir__() # type: ignore + + for validator_name in validators: + export = getattr(guardrails.hub, validator_name) # type: ignore + if isinstance(export, type) and issubclass(export, Validator): + wrapped_validator_validate = self._instrument_validator_validate( + export.validate + ) + setattr(export, "validate", wrapped_validator_validate) + setattr(guardrails.hub, validator_name, export) # type: ignore + + def _instrument_guard( + self, + guard_execute: Callable[ + ..., Union[ValidationOutcome[OT], Iterable[ValidationOutcome[OT]]] + ], + ): + @wraps(guard_execute) + def _guard_execute_wrapper( + *args, **kwargs + ) -> Union[ValidationOutcome[OT], Iterable[ValidationOutcome[OT]]]: + with mlflow.start_span( + name="guardrails/guard", + span_type="guard", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard", + }, + ) as guard_span: + guard_self = args[0] + history = Stack() + + if guard_self is not None and isinstance(guard_self, Guard): + guard_span.set_attribute("guard.name", guard_self.name) + history = guard_self.history + + try: + result = guard_execute(*args, **kwargs) + if isinstance(result, Iterable) and not isinstance( + result, ValidationOutcome + ): + return trace_stream_guard(guard_span, result, history) # type: ignore + add_guard_attributes(guard_span, history, result) # type: ignore + return result + except Exception as e: + guard_span.set_status(status=SpanStatusCode.ERROR) + raise e + + return _guard_execute_wrapper + + def _instrument_async_guard( + self, + guard_execute: Callable[ + ..., + Coroutine[ + Any, + Any, + Union[ + ValidationOutcome[OT], + Awaitable[ValidationOutcome[OT]], + AsyncIterable[ValidationOutcome[OT]], + ], + ], + ], + ): + @wraps(guard_execute) + async def _async_guard_execute_wrapper( + *args, **kwargs + ) -> Union[ + ValidationOutcome[OT], + Awaitable[ValidationOutcome[OT]], + AsyncIterable[ValidationOutcome[OT]], + ]: + with mlflow.start_span( + name="guardrails/guard", + span_type="guard", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard", + "async": True, + }, + ) as guard_span: + guard_self = args[0] + history = Stack() + + if guard_self is not None and isinstance(guard_self, Guard): + guard_span.set_attribute("guard.name", guard_self.name) + history = guard_self.history + + try: + result = await guard_execute(*args, **kwargs) + if isinstance(result, AsyncIterable): + return trace_async_stream_guard(guard_span, result, history) # type: ignore + res = result + if inspect.isawaitable(result): + res = await result + add_guard_attributes(guard_span, history, res) # type: ignore + return res + except Exception as e: + guard_span.set_status(status=SpanStatusCode.ERROR) + raise e + + return _async_guard_execute_wrapper + + def _instrument_runner_step(self, runner_step: Callable[..., Iteration]): + @wraps(runner_step) + def trace_step_wrapper(*args, **kwargs) -> Iteration: + with mlflow.start_span( + name="guardrails/guard/step", + span_type="step", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step", + }, + ) as step_span: + try: + response = runner_step(*args, **kwargs) + add_step_attributes(step_span, response, *args, **kwargs) # type: ignore + return response + except Exception as e: + step_span.set_status(status=SpanStatusCode.ERROR) + add_step_attributes(step_span, None, *args, **kwargs) # type: ignore + raise e + + return trace_step_wrapper + + def _instrument_stream_runner_step( + self, runner_step: Callable[..., Generator[ValidationOutcome[OT], None, None]] + ): + @wraps(runner_step) + def trace_stream_step_wrapper( + *args, **kwargs + ) -> Generator[ValidationOutcome[OT], None, None]: + with mlflow.start_span( + name="guardrails/guard/step", + span_type="step", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step", + "stream": True, + }, + ) as step_span: + exception = None + try: + gen = runner_step(*args, **kwargs) + next_exists = True + while next_exists: + try: + res = next(gen) + yield res + except StopIteration: + next_exists = False + except Exception as e: + step_span.set_status(status=SpanStatusCode.ERROR) + exception = e + finally: + call = safe_get(args, 8, kwargs.get("call_log", None)) + iteration = call.iterations.last if call else None + add_step_attributes(step_span, iteration, *args, **kwargs) # type: ignore + if exception: + raise exception + + return trace_stream_step_wrapper + + def _instrument_async_runner_step( + self, runner_step: Callable[..., Awaitable[Iteration]] + ): + @wraps(runner_step) + async def trace_async_step_wrapper(*args, **kwargs) -> Iteration: + with mlflow.start_span( + name="guardrails/guard/step", + span_type="step", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step", + "async": True, + }, + ) as step_span: + try: + response = await runner_step(*args, **kwargs) + add_step_attributes(step_span, response, *args, **kwargs) # type: ignore + return response + except Exception as e: + step_span.set_status(status=SpanStatusCode.ERROR) + add_step_attributes(step_span, None, *args, **kwargs) # type: ignore + raise e + + return trace_async_step_wrapper + + def _instrument_async_stream_runner_step( + self, runner_step: Callable[..., AsyncIterable[ValidationOutcome[OT]]] + ) -> Callable[..., AsyncIterable[ValidationOutcome[OT]]]: + @wraps(runner_step) + async def trace_async_stream_step_wrapper( + *args, **kwargs + ) -> AsyncIterable[ValidationOutcome[OT]]: + with mlflow.start_span( + name="guardrails/guard/step", + span_type="step", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step", + "async": True, + "stream": True, + }, + ) as step_span: + exception = None + try: + gen = runner_step(*args, **kwargs) + next_exists = True + while next_exists: + try: + res = await anext(gen) + yield res + except StopIteration: + next_exists = False + except StopAsyncIteration: + next_exists = False + except Exception as e: + step_span.set_status(status=SpanStatusCode.ERROR) + exception = e + finally: + call = safe_get(args, 3, kwargs.get("call_log", None)) + iteration = call.iterations.last if call else None + add_step_attributes(step_span, iteration, *args, **kwargs) # type: ignore + if exception: + raise exception + + return trace_async_stream_step_wrapper + + def _instrument_runner_call(self, runner_call: Callable[..., LLMResponse]): + @wraps(runner_call) + def trace_call_wrapper(*args, **kwargs): + with mlflow.start_span( + name="guardrails/guard/step/call", + span_type="LLM", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step/call", + }, + ) as call_span: + try: + response = runner_call(*args, **kwargs) + add_call_attributes(call_span, response, *args, **kwargs) # type: ignore + return response + except Exception as e: + call_span.set_status(status=SpanStatusCode.ERROR) + add_call_attributes(call_span, None, *args, **kwargs) # type: ignore + raise e + + return trace_call_wrapper + + def _instrument_async_runner_call( + self, runner_call: Callable[..., Awaitable[LLMResponse]] + ): + @wraps(runner_call) + async def trace_async_call_wrapper(*args, **kwargs): + with mlflow.start_span( + name="guardrails/guard/step/call", + span_type="LLM", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step/call", + "async": True, + }, + ) as call_span: + try: + response = await runner_call(*args, **kwargs) + add_call_attributes(call_span, response, *args, **kwargs) # type: ignore + return response + except Exception as e: + call_span.set_status(status=SpanStatusCode.ERROR) + add_call_attributes(call_span, None, *args, **kwargs) # type: ignore + raise e + + return trace_async_call_wrapper + + def _instrument_validator_validate( + self, validator_validate: Callable[..., ValidationResult] + ): + @wraps(validator_validate) + def trace_validator_wrapper(*args, **kwargs): + validator_name = "validator" + obj_id = id(validator_validate) + on_fail_descriptor = "unknown" + init_kwargs = {} + validation_session_id = "unknown" + + validator_self = args[0] + if validator_self is not None and isinstance(validator_self, Validator): + validator_name = validator_self.rail_alias + obj_id = id(validator_self) + on_fail_descriptor = validator_self.on_fail_descriptor + init_kwargs = validator_self._kwargs + + validator_span_name = f"{validator_name}.validate" + with mlflow.start_span( + name=validator_span_name, + span_type="validator", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step/validator", + }, + ) as validator_span: + try: + resp = validator_validate(*args, **kwargs) + add_validator_attributes( + *args, + validator_span=validator_span, # type: ignore + validator_name=validator_name, + obj_id=obj_id, + on_fail_descriptor=on_fail_descriptor, + result=resp, + init_kwargs=init_kwargs, + validation_session_id=validation_session_id, + **kwargs, + ) + return resp + except Exception as e: + validator_span.set_status(status=SpanStatusCode.ERROR) + add_validator_attributes( + *args, + validator_span=validator_span, # type: ignore + validator_name=validator_name, + obj_id=obj_id, + on_fail_descriptor=on_fail_descriptor, + result=None, + init_kwargs=init_kwargs, + validation_session_id=validation_session_id, + **kwargs, + ) + raise e + + return trace_validator_wrapper diff --git a/guardrails/integrations/langchain/__init__.py b/guardrails/integrations/langchain/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/guardrails/telemetry/runner_tracing.py b/guardrails/telemetry/runner_tracing.py index 87b4396f5..9bcba353f 100644 --- a/guardrails/telemetry/runner_tracing.py +++ b/guardrails/telemetry/runner_tracing.py @@ -178,6 +178,8 @@ async def trace_async_stream_step_generator( yield res except StopIteration: next_exists = False + except StopAsyncIteration: + next_exists = False except Exception as e: step_span.set_status(status=StatusCode.ERROR, description=str(e)) exception = e diff --git a/guardrails/utils/polyfills.py b/guardrails/utils/polyfills.py new file mode 100644 index 000000000..f9d585adf --- /dev/null +++ b/guardrails/utils/polyfills.py @@ -0,0 +1,2 @@ +def anext(aiter): + return aiter.__anext__() diff --git a/package.json b/package.json index 47f21975e..04206888b 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,7 @@ "swizzle": "docusaurus swizzle", "deploy": "docusaurus deploy", "clear": "docusaurus clear", - "serve": "docusaurus serve", + "serve": "docusaurus serve --config docusaurus/docusaurus.config.js", "write-translations": "docusaurus write-translations", "write-heading-ids": "docusaurus write-heading-ids", "restart": "rm -rf docs/api_reference_markdown; rm -rf docs-build; npm run start" diff --git a/poetry.lock b/poetry.lock index 3694d1450..2cdbf4fbb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -133,6 +133,39 @@ files = [ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] +[[package]] +name = "alembic" +version = "1.13.2" +description = "A database migration tool for SQLAlchemy." +optional = true +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "aniso8601" +version = "9.0.1" +description = "A library for parsing ISO 8601 strings." +optional = true +python-versions = "*" +files = [ + {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, + {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, +] + +[package.extras] +dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] + [[package]] name = "annotated-types" version = "0.7.0" @@ -525,6 +558,17 @@ files = [ {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, ] +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = true +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + [[package]] name = "cairocffi" version = "1.7.0" @@ -765,6 +809,17 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "cloudpickle" +version = "3.0.0" +description = "Pickler class to extend the standard pickle.Pickler functionality" +optional = true +python-versions = ">=3.8" +files = [ + {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, + {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, +] + [[package]] name = "colorama" version = "0.4.6" @@ -810,6 +865,90 @@ traitlets = ">=4" [package.extras] test = ["pytest"] +[[package]] +name = "contourpy" +version = "1.3.0" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = true +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, + {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, + {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, + {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, + {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, + {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, + {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, + {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, + {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, + {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, + {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, + {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, + {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, +] + +[package.dependencies] +numpy = ">=1.23" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] + [[package]] name = "coverage" version = "7.5.3" @@ -950,6 +1089,21 @@ webencodings = "*" doc = ["sphinx", "sphinx_rtd_theme"] test = ["flake8", "isort", "pytest"] +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = true +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + [[package]] name = "databind" version = "4.5.2" @@ -997,6 +1151,25 @@ files = [ [package.dependencies] databind = ">=4.5.2,<5.0.0" +[[package]] +name = "databricks-sdk" +version = "0.31.1" +description = "Databricks SDK for Python (Beta)" +optional = true +python-versions = ">=3.7" +files = [ + {file = "databricks_sdk-0.31.1-py3-none-any.whl", hash = "sha256:9ab286f87ae1cc98a00ef7d207e40661f4d14a464071425ad169d235919b35f6"}, + {file = "databricks_sdk-0.31.1.tar.gz", hash = "sha256:8609e655d0e5ecb15c2a8a6468e737f8dcb4f28c33239388de3ab386b921d790"}, +] + +[package.dependencies] +google-auth = ">=2.0,<3.0" +requests = ">=2.28.1,<3" + +[package.extras] +dev = ["autoflake", "databricks-connect", "ipython", "ipywidgets", "isort", "pycodestyle", "pyfakefs", "pytest", "pytest-cov", "pytest-mock", "pytest-rerunfailures", "pytest-xdist", "requests-mock", "wheel", "yapf"] +notebook = ["ipython (>=8,<9)", "ipywidgets (>=8,<9)"] + [[package]] name = "debugpy" version = "1.8.1" @@ -1121,6 +1294,28 @@ untokenize = ">=0.1.1,<0.2.0" [package.extras] tomli = ["tomli (>=2.0.0,<3.0.0)"] +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = true +python-versions = ">=3.8" +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + [[package]] name = "docspec" version = "2.2.1" @@ -1401,6 +1596,71 @@ files = [ flask = ">=2.2.5" sqlalchemy = ">=2.0.16" +[[package]] +name = "fonttools" +version = "4.53.1" +description = "Tools to manipulate font files" +optional = true +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, + {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, + {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, + {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, + {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, + {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, + {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, + {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, + {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, + {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, + {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, + {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, + {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, + {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + [[package]] name = "fqdn" version = "1.5.1" @@ -1605,6 +1865,29 @@ gitdb = ">=4.0.1,<5" doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +[[package]] +name = "google-auth" +version = "2.34.0" +description = "Google Authentication Library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, + {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + [[package]] name = "googleapis-common-protos" version = "1.63.1" @@ -1622,6 +1905,51 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +[[package]] +name = "graphene" +version = "3.3" +description = "GraphQL Framework for Python" +optional = true +python-versions = "*" +files = [ + {file = "graphene-3.3-py2.py3-none-any.whl", hash = "sha256:bb3810be33b54cb3e6969506671eb72319e8d7ba0d5ca9c8066472f75bf35a38"}, + {file = "graphene-3.3.tar.gz", hash = "sha256:529bf40c2a698954217d3713c6041d69d3f719ad0080857d7ee31327112446b0"}, +] + +[package.dependencies] +aniso8601 = ">=8,<10" +graphql-core = ">=3.1,<3.3" +graphql-relay = ">=3.1,<3.3" + +[package.extras] +dev = ["black (==22.3.0)", "coveralls (>=3.3,<4)", "flake8 (>=4,<5)", "iso8601 (>=1,<2)", "mock (>=4,<5)", "pytest (>=6,<7)", "pytest-asyncio (>=0.16,<2)", "pytest-benchmark (>=3.4,<4)", "pytest-cov (>=3,<4)", "pytest-mock (>=3,<4)", "pytz (==2022.1)", "snapshottest (>=0.6,<1)"] +test = ["coveralls (>=3.3,<4)", "iso8601 (>=1,<2)", "mock (>=4,<5)", "pytest (>=6,<7)", "pytest-asyncio (>=0.16,<2)", "pytest-benchmark (>=3.4,<4)", "pytest-cov (>=3,<4)", "pytest-mock (>=3,<4)", "pytz (==2022.1)", "snapshottest (>=0.6,<1)"] + +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = true +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + +[[package]] +name = "graphql-relay" +version = "3.2.0" +description = "Relay library for graphql-core" +optional = true +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-relay-3.2.0.tar.gz", hash = "sha256:1ff1c51298356e481a0be009ccdff249832ce53f30559c1338f22a0e0d17250c"}, + {file = "graphql_relay-3.2.0-py3-none-any.whl", hash = "sha256:c9b22bd28b170ba1fe674c74384a8ff30a76c8e26f88ac3aa1584dd3179953e5"}, +] + +[package.dependencies] +graphql-core = ">=3.2,<3.3" + [[package]] name = "greenlet" version = "3.0.3" @@ -1826,6 +2154,27 @@ files = [ [package.extras] dev = ["pyright", "pytest", "pytest-cov", "ruff"] +[[package]] +name = "gunicorn" +version = "23.0.0" +description = "WSGI HTTP Server for UNIX" +optional = true +python-versions = ">=3.7" +files = [ + {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, + {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] +tornado = ["tornado (>=0.2)"] + [[package]] name = "h11" version = "0.14.0" @@ -2087,6 +2436,28 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +[[package]] +name = "importlib-resources" +version = "6.4.4" +description = "Read resources from Python packages" +optional = true +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.4-py3-none-any.whl", hash = "sha256:dda242603d1c9cd836c3368b1174ed74cb4049ecd209e7a1a0104620c18c5c11"}, + {file = "importlib_resources-6.4.4.tar.gz", hash = "sha256:20600c8b7361938dc0bb2d5ec0297802e575df486f5a544fa414da65e13721f7"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -2781,6 +3152,108 @@ completion = ["shtab (>=1.1.0)"] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +[[package]] +name = "kiwisolver" +version = "1.4.6" +description = "A fast implementation of the Cassowary constraint solver" +optional = true +python-versions = ">=3.8" +files = [ + {file = "kiwisolver-1.4.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9739f60317af3ebb15372a61907a71ba71e9cc3c21239d4e39051ecf51928d98"}, + {file = "kiwisolver-1.4.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7802ac87e8efd05f4ed6b82dfe4749cd4f38140c198a7d392ebbb3ab5fb38bd6"}, + {file = "kiwisolver-1.4.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0632248f5a06a2e4134637628de7300b923d242a30926a1bbf7cc4e487dc0bb8"}, + {file = "kiwisolver-1.4.6-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b747105ddb84ce77a41fbc9485df366519526d1f7f4a096ca02570bf082a70c3"}, + {file = "kiwisolver-1.4.6-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9f338d9724cc2b2ea49e8f3af3a6733f5191cf85801db5b137350dc021e16dad"}, + {file = "kiwisolver-1.4.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdeb0c875a8df911cf026f2ee7043d63d59768e58864835d5c5c27020f251fd2"}, + {file = "kiwisolver-1.4.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:182b3eed63f8f79623bba26f1ac75e6c94463c98b70828029db8fe2d230b7ba0"}, + {file = "kiwisolver-1.4.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0b17c30a50ce5345469f206708adb5946917d59c900e53af7108da2a0c4b56f"}, + {file = "kiwisolver-1.4.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cc09aff78d1eb3b4c63d31eba1db6da5b4d580cf65596562038b6c8ec5806a17"}, + {file = "kiwisolver-1.4.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:300443d53ed971a0dd35249f5012a3c3c95004da2e3f5877ed3cb784228d67bd"}, + {file = "kiwisolver-1.4.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:7e3012902606eba35014f725dbd2aab3a28a276cb6872fb21bb27c0ee384a554"}, + {file = "kiwisolver-1.4.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4cf699500d5d88a5424a4a26dfdcada6aa3a1917431e459c88c38dadd6a300d7"}, + {file = "kiwisolver-1.4.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:212a903a4f90aa6bdbd0709b28df4a337687839dd7cf7030bb288ef756f338e4"}, + {file = "kiwisolver-1.4.6-cp310-cp310-win32.whl", hash = "sha256:7de63234cf06d3a0d218d5c6e907f6ceed72a9d369a8c561d1a161ffafd2fa95"}, + {file = "kiwisolver-1.4.6-cp310-cp310-win_amd64.whl", hash = "sha256:ad4410b6aca71bcfba185d92a3094114914b4ddd9d61d5b7b91047cb273a077b"}, + {file = "kiwisolver-1.4.6-cp310-cp310-win_arm64.whl", hash = "sha256:bc523ab49257fd7bbe00e23aff6924624a5da1ce924e4b3e39530049298779da"}, + {file = "kiwisolver-1.4.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a5cb5abad1ad9c265eed7e058fefafeb7964565b93b397ba2f480faec8d674"}, + {file = "kiwisolver-1.4.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e52b2568c47fb4f54d17576954e02b1de156c85152f87283a99db9670fd18c0"}, + {file = "kiwisolver-1.4.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:00af95204100bc1d0f26e1ed52ec77d6e3da5c9b845c88d31875c164e4ba6c0c"}, + {file = "kiwisolver-1.4.6-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50ab1fedf86f3951a9e90a64edd15f598860ed60cd3664259756f097d527b5ae"}, + {file = "kiwisolver-1.4.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc985766bf20141ce64baecc39fb9fedbce094b2b8de1bb62676b79328988e4"}, + {file = "kiwisolver-1.4.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1942a155c737a7c3835a957897f0cc9ebc0085b7a75d934d86aecb1b27b8873"}, + {file = "kiwisolver-1.4.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f464403e391724f8e7dff188d3fb77a85bd1273b3fdba182e6671abcc44434f8"}, + {file = "kiwisolver-1.4.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce5efe545eea86f52ec5a1185e5052815ea86778e8268bad71fa46433f7c0bef"}, + {file = "kiwisolver-1.4.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cb30165f71b7b3378668346e220c81d590593a3a1ff76428a53780310df03f35"}, + {file = "kiwisolver-1.4.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5a987f740e1c9964e614acb87ba1f014b4be760a341effc8dc789913d1840e6"}, + {file = "kiwisolver-1.4.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f2ceaa6d0450623d108956647ef19a1a28c7e07880f1171c932477308d44d80b"}, + {file = "kiwisolver-1.4.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:788cbf99738f18ae8a27b9d4d7314502b4b917005cfdacd1d6a59038332ae24d"}, + {file = "kiwisolver-1.4.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2f6668678a6b9488a7f8a6320e1b1c6396d179a976472dbc08d1600d04119511"}, + {file = "kiwisolver-1.4.6-cp311-cp311-win32.whl", hash = "sha256:10a09a3e4213c2806bcfd2eb4edb756c557973d2cacf06873b18a247fce897da"}, + {file = "kiwisolver-1.4.6-cp311-cp311-win_amd64.whl", hash = "sha256:683ffef2c51fdc54112dc610d06b59b88c21e23fb669b905da6d5bec80da1bde"}, + {file = "kiwisolver-1.4.6-cp311-cp311-win_arm64.whl", hash = "sha256:3b852c7f0ed9a2fd339c228829bca0964233ed45de50aae3e87b72ca37d177f8"}, + {file = "kiwisolver-1.4.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:979df7e9334f6a3694ee9be8d42817e519ef6d155a16499714d082cf41296852"}, + {file = "kiwisolver-1.4.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50c9c6c42bb6ca231626d1182b9128e89c5ce3c64456f811ff0280deb42d7bfe"}, + {file = "kiwisolver-1.4.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ace86489e7951bd26329a589198d3875c3d48380f889c69d3eb254b506a80101"}, + {file = "kiwisolver-1.4.6-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f94771988da902b475f78e85cf63c5c94392773b4a6494234d87c1b363b2fbc5"}, + {file = "kiwisolver-1.4.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62379eee430b1c477bb0a0bf6858a57c7c0dad9cee8b3144a5cb5d366c66a54"}, + {file = "kiwisolver-1.4.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e99b97d69499a7414572c906fbc7ca312519f2e17999730129f6c4492786e953"}, + {file = "kiwisolver-1.4.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab93f58afe3a02922a343189404f24ed885564e6316649790240124b95ef1d6e"}, + {file = "kiwisolver-1.4.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34105f4460ba50fc18a16a8e77a5122f7affe075628763fda748ad0ec534c3ee"}, + {file = "kiwisolver-1.4.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0081f85f0222620563409d4804c6567a930a45dafbe9674c7913fde131653992"}, + {file = "kiwisolver-1.4.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:df2a4a7cc2e01991e039a792457751b601bdf30143ab5f23f9a1e58f20c875f4"}, + {file = "kiwisolver-1.4.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1f401784df70ea2870e4e10adade66b5b06cb2c151bc2a8a414a1d10554e9a81"}, + {file = "kiwisolver-1.4.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:b19761c8c613b6d04c44f1a4797a144b44136f17ec009ccfb025e17b5698140c"}, + {file = "kiwisolver-1.4.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee7289430ded484cc2eff9d8ffcce58ed7fe2c26919321dbc0580322a49e0120"}, + {file = "kiwisolver-1.4.6-cp312-cp312-win32.whl", hash = "sha256:331b9d9f408e874ecf34bd79b79df8e099f0b1b351b8844609c1bfdc8d2d45b2"}, + {file = "kiwisolver-1.4.6-cp312-cp312-win_amd64.whl", hash = "sha256:a9be95d086578b3ada61a4621c0e7ee5f456820bfdccc3329061fdeae1e31179"}, + {file = "kiwisolver-1.4.6-cp312-cp312-win_arm64.whl", hash = "sha256:773f2d87825779ab69196dfcf63e9d91043273421c6128c8d4ed82bc6316068f"}, + {file = "kiwisolver-1.4.6-cp313-cp313-win_arm64.whl", hash = "sha256:140f376c22b5148453acff768cff19c34ebbd593126617018732ea1d9ce65547"}, + {file = "kiwisolver-1.4.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:160b983a1bca62d2274c886ddffc3168e0d6a1ae54d54556229f5bd57a4295e4"}, + {file = "kiwisolver-1.4.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f51a061d280300d33d37ebcfd02d5b480004e5bb5092e80ccabcdec8b7b1be9c"}, + {file = "kiwisolver-1.4.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2e33395cca1a27102beed4baf4e97490fcbb2c245626bddb940eafcfe697bf4a"}, + {file = "kiwisolver-1.4.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7d04968b6015583968e62eca184c5104cbdc02666fd5cc7a4b535f9846968fd"}, + {file = "kiwisolver-1.4.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cfbcd3a4b6193dd89dd005fbc5db8115a9f204727446562992f9f7fed217b3a"}, + {file = "kiwisolver-1.4.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a69366fb349c2be904ac13063e3b6bcae76ed1c826fcbc646f43135b45abb68"}, + {file = "kiwisolver-1.4.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3420b5179fb732a899a0dfbfdcbc221712d850b5772b082415658466e887e55"}, + {file = "kiwisolver-1.4.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4ccbc596114d32bb5d2ff74eb1785ab1b2d5bc56e7e54662ef335b333f427548"}, + {file = "kiwisolver-1.4.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fa61478e1356df92566ca46fe4165d0a36b9e336ee7fe7e71b923267fc5283aa"}, + {file = "kiwisolver-1.4.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:204039c59e6160f1227c2a33153d0738c93c171dbcc5b632c653f7a7abd08dc9"}, + {file = "kiwisolver-1.4.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:827425185329b813b40bbc176e0757282c558d6efab3c9f681f629c737e08a6e"}, + {file = "kiwisolver-1.4.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ccff4e5ec806db412aceec89b8e7a83a56ff93c5c615c725e7784d90c5a556c4"}, + {file = "kiwisolver-1.4.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0d048002e15b9583ddff6ef4a27bd7f94fff830473856e82f311071b5cca9ade"}, + {file = "kiwisolver-1.4.6-cp38-cp38-win32.whl", hash = "sha256:11b0fdacd87bfe02c4f293ac38b2caf736591253687dce4d489a780a4bf2c39e"}, + {file = "kiwisolver-1.4.6-cp38-cp38-win_amd64.whl", hash = "sha256:ab480d087f10270ff24b06247e41eff901a452b890bfd708d8b7eb58bb01b212"}, + {file = "kiwisolver-1.4.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ec27e296790903e2a3484a1d93a8324d0cd660394842e0cf2a3657060ad8edc"}, + {file = "kiwisolver-1.4.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9a59519a485ef60d17af17d93f70679a9e41372f3b777c27103b4ce13ece4e40"}, + {file = "kiwisolver-1.4.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d047def01426d15d5dde1fb9ba4e1d8ed7218069e73f00e0994d050913b2c3f4"}, + {file = "kiwisolver-1.4.6-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b9dbf4091b04e1037c9c75ca67e71a348d145c4fac7e1bb3de2e3fe6f13df150"}, + {file = "kiwisolver-1.4.6-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:872c1323f29f0822000e47acac9a0b6ed2af843a20b27c85fa0fdc906f98140f"}, + {file = "kiwisolver-1.4.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbfa70f983f8a2ea69a3f72c4f04aaa1a152a246c4933e9d5d9c30da95815a9b"}, + {file = "kiwisolver-1.4.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb55ba22ebebc537c2f13ffe3ad83ff1529be360ee36192bb61f330af3a785a5"}, + {file = "kiwisolver-1.4.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8188c27be2e590c519e747d885511204c3e01f2ec77006843a204af6d22ab9c"}, + {file = "kiwisolver-1.4.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:19fa65a9e422eeb3b1d50073eb54e2e8c83821632b735d9d6af0ce1fcf42adea"}, + {file = "kiwisolver-1.4.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:89748381d0251d829cffeec03a5c2710812dc133a085a4f52be0996c291e721a"}, + {file = "kiwisolver-1.4.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:78a708e8371675e73208fa61b0985031e911584ad377593226c5974eaf0c2e2e"}, + {file = "kiwisolver-1.4.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:159a2ed7a89b51fcb9766562626f7d9fc411ed5f8b365413bc5ea2d4a8b81a2c"}, + {file = "kiwisolver-1.4.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7d42dbf8229d4c09632e46c83aeaf1dce6125925088704421c57c483dc9db304"}, + {file = "kiwisolver-1.4.6-cp39-cp39-win32.whl", hash = "sha256:a05655320567b9c83b95c1b45339d01ce6373ff2e2d64f643fee2ba2432f035e"}, + {file = "kiwisolver-1.4.6-cp39-cp39-win_amd64.whl", hash = "sha256:67b72c9cbd78ec8666af40747b80bf309f160701084e7cf492a02464e470ee29"}, + {file = "kiwisolver-1.4.6-cp39-cp39-win_arm64.whl", hash = "sha256:ef452cf166271827939e907b23a1bda423329663a93a644d4a7be8f7bbb431ed"}, + {file = "kiwisolver-1.4.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c0d4811a031ff5194d9b45c15090d674cbf9890461a5028c4475f7b3202a5b1d"}, + {file = "kiwisolver-1.4.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3aa8e43fbc847c26e17e50befac4de2336e223093263aa5b66c9c2030697b911"}, + {file = "kiwisolver-1.4.6-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d9a5af0c3cad547b59a2605d1af95c79c69c6a3aaf908be9677094ca6ba6dfa"}, + {file = "kiwisolver-1.4.6-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43e9bc95d7e9e6f7975f2f481db40738796ea718bf55e22c32eb8e242ed418fc"}, + {file = "kiwisolver-1.4.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b474a369ebe8c2cd02df20997b94cd566edc708f38dce18e66385766dcef5f3c"}, + {file = "kiwisolver-1.4.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:33422cbf4ea20cd42945a7ad6b04bc50da9630a5b42854e139944ffde3ba926f"}, + {file = "kiwisolver-1.4.6-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e033139b0a5981e30c1518b97ae4b20b4172e82ed49f09180d02640bde0ae831"}, + {file = "kiwisolver-1.4.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:261ca5e3a0b3fd3f6bf794122e0f80c76f5b5bb8055508a9d8a8869b5e7e8bef"}, + {file = "kiwisolver-1.4.6-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acdb63f64219a374f7f9bb6c560a435545511364b24757819332f86da03894b9"}, + {file = "kiwisolver-1.4.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c14338ac087b9a8db1db1b7d74ff91c0a2b1c93f6f1ab4942af15f1938449acf"}, + {file = "kiwisolver-1.4.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a40af4800335cab9dfc3b8cb300384ef14e7740f21142c66d7b3f57228c4a290"}, + {file = "kiwisolver-1.4.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:dcb6a2bade6292f2b5b19225a4330af49f855edeed6e3c17240df905696a1494"}, + {file = "kiwisolver-1.4.6.tar.gz", hash = "sha256:3cda29d601445e6aa11f80d90a9b8c2ae501650c55d7ad29829bd44499c9e7e0"}, +] + [[package]] name = "langchain-core" version = "0.1.52" @@ -2969,6 +3442,25 @@ files = [ [package.extras] test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1.9.3)"] +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = true +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + [[package]] name = "manifest-ml" version = "0.1.8" @@ -3110,6 +3602,70 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "matplotlib" +version = "3.9.2" +description = "Python plotting package" +optional = true +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9d78bbc0cbc891ad55b4f39a48c22182e9bdaea7fc0e5dbd364f49f729ca1bbb"}, + {file = "matplotlib-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c375cc72229614632c87355366bdf2570c2dac01ac66b8ad048d2dabadf2d0d4"}, + {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d94ff717eb2bd0b58fe66380bd8b14ac35f48a98e7c6765117fe67fb7684e64"}, + {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab68d50c06938ef28681073327795c5db99bb4666214d2d5f880ed11aeaded66"}, + {file = "matplotlib-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:65aacf95b62272d568044531e41de26285d54aec8cb859031f511f84bd8b495a"}, + {file = "matplotlib-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:3fd595f34aa8a55b7fc8bf9ebea8aa665a84c82d275190a61118d33fbc82ccae"}, + {file = "matplotlib-3.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8dd059447824eec055e829258ab092b56bb0579fc3164fa09c64f3acd478772"}, + {file = "matplotlib-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c797dac8bb9c7a3fd3382b16fe8f215b4cf0f22adccea36f1545a6d7be310b41"}, + {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d719465db13267bcef19ea8954a971db03b9f48b4647e3860e4bc8e6ed86610f"}, + {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912ef7c2362f7193b5819d17dae8629b34a95c58603d781329712ada83f9447"}, + {file = "matplotlib-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7741f26a58a240f43bee74965c4882b6c93df3e7eb3de160126d8c8f53a6ae6e"}, + {file = "matplotlib-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ae82a14dab96fbfad7965403c643cafe6515e386de723e498cf3eeb1e0b70cc7"}, + {file = "matplotlib-3.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac43031375a65c3196bee99f6001e7fa5bdfb00ddf43379d3c0609bdca042df9"}, + {file = "matplotlib-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be0fc24a5e4531ae4d8e858a1a548c1fe33b176bb13eff7f9d0d38ce5112a27d"}, + {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf81de2926c2db243c9b2cbc3917619a0fc85796c6ba4e58f541df814bbf83c7"}, + {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ee45bc4245533111ced13f1f2cace1e7f89d1c793390392a80c139d6cf0e6c"}, + {file = "matplotlib-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:306c8dfc73239f0e72ac50e5a9cf19cc4e8e331dd0c54f5e69ca8758550f1e1e"}, + {file = "matplotlib-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:5413401594cfaff0052f9d8b1aafc6d305b4bd7c4331dccd18f561ff7e1d3bd3"}, + {file = "matplotlib-3.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18128cc08f0d3cfff10b76baa2f296fc28c4607368a8402de61bb3f2eb33c7d9"}, + {file = "matplotlib-3.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4876d7d40219e8ae8bb70f9263bcbe5714415acfdf781086601211335e24f8aa"}, + {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9f07a80deab4bb0b82858a9e9ad53d1382fd122be8cde11080f4e7dfedb38b"}, + {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c0410f181a531ec4e93bbc27692f2c71a15c2da16766f5ba9761e7ae518413"}, + {file = "matplotlib-3.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:909645cce2dc28b735674ce0931a4ac94e12f5b13f6bb0b5a5e65e7cea2c192b"}, + {file = "matplotlib-3.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:f32c7410c7f246838a77d6d1eff0c0f87f3cb0e7c4247aebea71a6d5a68cab49"}, + {file = "matplotlib-3.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:37e51dd1c2db16ede9cfd7b5cabdfc818b2c6397c83f8b10e0e797501c963a03"}, + {file = "matplotlib-3.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b82c5045cebcecd8496a4d694d43f9cc84aeeb49fe2133e036b207abe73f4d30"}, + {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f053c40f94bc51bc03832a41b4f153d83f2062d88c72b5e79997072594e97e51"}, + {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbe196377a8248972f5cede786d4c5508ed5f5ca4a1e09b44bda889958b33f8c"}, + {file = "matplotlib-3.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5816b1e1fe8c192cbc013f8f3e3368ac56fbecf02fb41b8f8559303f24c5015e"}, + {file = "matplotlib-3.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cef2a73d06601437be399908cf13aee74e86932a5ccc6ccdf173408ebc5f6bb2"}, + {file = "matplotlib-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0830e188029c14e891fadd99702fd90d317df294c3298aad682739c5533721a"}, + {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ba9c1299c920964e8d3857ba27173b4dbb51ca4bab47ffc2c2ba0eb5e2cbc5"}, + {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd93b91ab47a3616b4d3c42b52f8363b88ca021e340804c6ab2536344fad9ca"}, + {file = "matplotlib-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d1ce5ed2aefcdce11904fc5bbea7d9c21fff3d5f543841edf3dea84451a09ea"}, + {file = "matplotlib-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:b2696efdc08648536efd4e1601b5fd491fd47f4db97a5fbfd175549a7365c1b2"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d52a3b618cb1cbb769ce2ee1dcdb333c3ab6e823944e9a2d36e37253815f9556"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:039082812cacd6c6bec8e17a9c1e6baca230d4116d522e81e1f63a74d01d2e21"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6758baae2ed64f2331d4fd19be38b7b4eae3ecec210049a26b6a4f3ae1c85dcc"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:050598c2b29e0b9832cde72bcf97627bf00262adbc4a54e2b856426bb2ef0697"}, + {file = "matplotlib-3.9.2.tar.gz", hash = "sha256:96ab43906269ca64a6366934106fa01534454a69e471b7bf3d79083981aaab92"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.23" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[package.extras] +dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] + [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -3386,6 +3942,86 @@ markdown = ">=3.3.3" mkdocs = ">=1.5.0" nbconvert = ">=6.0.0" +[[package]] +name = "mlflow" +version = "2.16.0" +description = "MLflow is an open source platform for the complete machine learning lifecycle" +optional = true +python-versions = ">=3.8" +files = [ + {file = "mlflow-2.16.0-py3-none-any.whl", hash = "sha256:9f27ef6ae7a82d7ecd67b6b4a4d50637a5e8160639115570fbc689758f9c0b54"}, + {file = "mlflow-2.16.0.tar.gz", hash = "sha256:82ea1a2e800f404f1586783b7636091c0a5754cf9ff45afeadf3a5e467f5168f"}, +] + +[package.dependencies] +alembic = "<1.10.0 || >1.10.0,<2" +docker = ">=4.0.0,<8" +Flask = "<4" +graphene = "<4" +gunicorn = {version = "<24", markers = "platform_system != \"Windows\""} +Jinja2 = [ + {version = ">=2.11,<4", markers = "platform_system != \"Windows\""}, + {version = ">=3.0,<4", markers = "platform_system == \"Windows\""}, +] +markdown = ">=3.3,<4" +matplotlib = "<4" +mlflow-skinny = "2.16.0" +numpy = "<3" +pandas = "<3" +pyarrow = ">=4.0.0,<18" +scikit-learn = "<2" +scipy = "<2" +sqlalchemy = ">=1.4.0,<3" +waitress = {version = "<4", markers = "platform_system == \"Windows\""} + +[package.extras] +aliyun-oss = ["aliyunstoreplugin"] +databricks = ["azure-storage-file-datalake (>12)", "boto3 (>1)", "botocore", "google-cloud-storage (>=1.30.0)"] +extras = ["azureml-core (>=1.2.0)", "boto3", "botocore", "google-cloud-storage (>=1.30.0)", "kubernetes", "mlserver (>=1.2.0,!=1.3.1,<1.4.0)", "mlserver-mlflow (>=1.2.0,!=1.3.1,<1.4.0)", "prometheus-flask-exporter", "pyarrow", "pysftp", "requests-auth-aws-sigv4", "virtualenv"] +gateway = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +genai = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +jfrog = ["mlflow-jfrog-plugin"] +langchain = ["langchain (>=0.1.0,<=0.2.15)"] +sqlserver = ["mlflow-dbstore"] +xethub = ["mlflow-xethub"] + +[[package]] +name = "mlflow-skinny" +version = "2.16.0" +description = "MLflow is an open source platform for the complete machine learning lifecycle" +optional = true +python-versions = ">=3.8" +files = [ + {file = "mlflow_skinny-2.16.0-py3-none-any.whl", hash = "sha256:c55541f50efd0f6637377b10e8a654847a3fcd815b8680a95f02e0ca6bd7700c"}, + {file = "mlflow_skinny-2.16.0.tar.gz", hash = "sha256:9b823173063743783b4e7b6c52bdadcc7d9dab48eb883ac454c0d56609df6b2d"}, +] + +[package.dependencies] +cachetools = ">=5.0.0,<6" +click = ">=7.0,<9" +cloudpickle = "<4" +databricks-sdk = ">=0.20.0,<1" +gitpython = ">=3.1.9,<4" +importlib-metadata = ">=3.7.0,<4.7.0 || >4.7.0,<9" +opentelemetry-api = ">=1.9.0,<3" +opentelemetry-sdk = ">=1.9.0,<3" +packaging = "<25" +protobuf = ">=3.12.0,<6" +pyyaml = ">=5.1,<7" +requests = ">=2.17.3,<3" +sqlparse = ">=0.4.0,<1" + +[package.extras] +aliyun-oss = ["aliyunstoreplugin"] +databricks = ["azure-storage-file-datalake (>12)", "boto3 (>1)", "botocore", "google-cloud-storage (>=1.30.0)"] +extras = ["azureml-core (>=1.2.0)", "boto3", "botocore", "google-cloud-storage (>=1.30.0)", "kubernetes", "mlserver (>=1.2.0,!=1.3.1,<1.4.0)", "mlserver-mlflow (>=1.2.0,!=1.3.1,<1.4.0)", "prometheus-flask-exporter", "pyarrow", "pysftp", "requests-auth-aws-sigv4", "virtualenv"] +gateway = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +genai = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +jfrog = ["mlflow-jfrog-plugin"] +langchain = ["langchain (>=0.1.0,<=0.2.15)"] +sqlserver = ["mlflow-dbstore"] +xethub = ["mlflow-xethub"] + [[package]] name = "more-itertools" version = "10.2.0" @@ -4312,6 +4948,79 @@ files = [ {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, ] +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = true +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pandocfilters" version = "1.5.1" @@ -4716,6 +5425,82 @@ files = [ [package.extras] tests = ["pytest"] +[[package]] +name = "pyarrow" +version = "17.0.0" +description = "Python library for Apache Arrow" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, + {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, + {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, + {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, + {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, + {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, + {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] + +[[package]] +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + [[package]] name = "pycparser" version = "2.22" @@ -4930,6 +5715,20 @@ pyyaml = "*" [package.extras] extra = ["pygments (>=2.12)"] +[[package]] +name = "pyparsing" +version = "3.1.4" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = true +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pypdfium2" version = "4.30.0" @@ -5095,6 +5894,17 @@ files = [ {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, ] +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = true +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pywin32" version = "306" @@ -5725,6 +6535,20 @@ files = [ {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, ] +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = true +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "rstr" version = "3.2.2" @@ -5901,6 +6725,93 @@ tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] torch = ["safetensors[numpy]", "torch (>=1.10)"] +[[package]] +name = "scikit-learn" +version = "1.5.1" +description = "A set of python modules for machine learning and data mining" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scikit_learn-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:781586c414f8cc58e71da4f3d7af311e0505a683e112f2f62919e3019abd3745"}, + {file = "scikit_learn-1.5.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5b213bc29cc30a89a3130393b0e39c847a15d769d6e59539cd86b75d276b1a7"}, + {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff4ba34c2abff5ec59c803ed1d97d61b036f659a17f55be102679e88f926fac"}, + {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:161808750c267b77b4a9603cf9c93579c7a74ba8486b1336034c2f1579546d21"}, + {file = "scikit_learn-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:10e49170691514a94bb2e03787aa921b82dbc507a4ea1f20fd95557862c98dc1"}, + {file = "scikit_learn-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:154297ee43c0b83af12464adeab378dee2d0a700ccd03979e2b821e7dd7cc1c2"}, + {file = "scikit_learn-1.5.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b5e865e9bd59396220de49cb4a57b17016256637c61b4c5cc81aaf16bc123bbe"}, + {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909144d50f367a513cee6090873ae582dba019cb3fca063b38054fa42704c3a4"}, + {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b6f74b2c880276e365fe84fe4f1befd6a774f016339c65655eaff12e10cbf"}, + {file = "scikit_learn-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:9a07f90846313a7639af6a019d849ff72baadfa4c74c778821ae0fad07b7275b"}, + {file = "scikit_learn-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5944ce1faada31c55fb2ba20a5346b88e36811aab504ccafb9f0339e9f780395"}, + {file = "scikit_learn-1.5.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0828673c5b520e879f2af6a9e99eee0eefea69a2188be1ca68a6121b809055c1"}, + {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508907e5f81390e16d754e8815f7497e52139162fd69c4fdbd2dfa5d6cc88915"}, + {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97625f217c5c0c5d0505fa2af28ae424bd37949bb2f16ace3ff5f2f81fb4498b"}, + {file = "scikit_learn-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:da3f404e9e284d2b0a157e1b56b6566a34eb2798205cba35a211df3296ab7a74"}, + {file = "scikit_learn-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88e0672c7ac21eb149d409c74cc29f1d611d5158175846e7a9c2427bd12b3956"}, + {file = "scikit_learn-1.5.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:7b073a27797a283187a4ef4ee149959defc350b46cbf63a84d8514fe16b69855"}, + {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b59e3e62d2be870e5c74af4e793293753565c7383ae82943b83383fdcf5cc5c1"}, + {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd8d3a19d4bd6dc5a7d4f358c8c3a60934dc058f363c34c0ac1e9e12a31421d"}, + {file = "scikit_learn-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f57428de0c900a98389c4a433d4a3cf89de979b3aa24d1c1d251802aa15e44d"}, + {file = "scikit_learn-1.5.1.tar.gz", hash = "sha256:0ea5d40c0e3951df445721927448755d3fe1d80833b0b7308ebff5d2a45e6414"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==2.5.6)"] +tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.1" +description = "Fundamental algorithms for scientific computing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, + {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"}, + {file = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"}, + {file = "scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"}, + {file = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"}, + {file = "scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884"}, + {file = "scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16"}, + {file = "scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"}, + {file = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}, + {file = "scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}, + {file = "scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + [[package]] name = "secretstorage" version = "3.3.3" @@ -6248,6 +7159,21 @@ files = [ {file = "sqlitedict-2.1.0.tar.gz", hash = "sha256:03d9cfb96d602996f1d4c2db2856f1224b96a9c431bdd16e78032a72940f9e8c"}, ] +[[package]] +name = "sqlparse" +version = "0.5.1" +description = "A non-validating SQL parser." +optional = true +python-versions = ">=3.8" +files = [ + {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, + {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, +] + +[package.extras] +dev = ["build", "hatch"] +doc = ["sphinx"] + [[package]] name = "sqlvalidator" version = "0.0.20" @@ -6358,6 +7284,17 @@ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] +[[package]] +name = "threadpoolctl" +version = "3.5.0" +description = "threadpoolctl" +optional = true +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, + {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, +] + [[package]] name = "tiktoken" version = "0.7.0" @@ -6870,6 +7807,17 @@ files = [ {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = true +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "untokenize" version = "0.1.1" @@ -6947,6 +7895,21 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "waitress" +version = "3.0.0" +description = "Waitress WSGI server" +optional = true +python-versions = ">=3.8.0" +files = [ + {file = "waitress-3.0.0-py3-none-any.whl", hash = "sha256:2a06f242f4ba0cc563444ca3d1998959447477363a2d7e9b8b4d75d35cfd1669"}, + {file = "waitress-3.0.0.tar.gz", hash = "sha256:005da479b04134cdd9dd602d1ee7c49d79de0537610d653674cc6cbde222b8a1"}, +] + +[package.extras] +docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] +testing = ["coverage (>=5.0)", "pytest", "pytest-cov"] + [[package]] name = "watchdog" version = "4.0.1" @@ -7405,6 +8368,7 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [extras] anthropic = ["anthropic"] api = ["guardrails-api"] +databricks = ["mlflow"] docs-build = ["docspec_python", "nbdoc", "pydoc-markdown"] huggingface = ["jsonformer", "torch", "transformers"] manifest = ["manifest-ml"] @@ -7414,4 +8378,4 @@ vectordb = ["faiss-cpu", "numpy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "43737ebc6c4edc87251bcbfd6f3e8859dc373042313b68f861ac03eb1fcdeac9" +content-hash = "aec41326aef66af046ce16d49c036fec48698032995f3f49df634b9da411caf7" diff --git a/pyproject.toml b/pyproject.toml index 13155c6aa..2f4071ddf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,6 +59,7 @@ guardrails-hub-types = "^0.0.4" guardrails-api-client = ">=0.3.8" diff-match-patch = "^20230430" guardrails-api = ">=0.0.1" +mlflow = {version = ">=2.0.1", optional = true} [tool.poetry.extras] sql = ["sqlvalidator", "sqlalchemy", "sqlglot"] @@ -68,6 +69,7 @@ anthropic = ["anthropic"] docs-build = ["nbdoc", "docspec_python", "pydoc-markdown"] huggingface = ["transformers", "torch", "jsonformer"] api = ["guardrails-api"] +databricks = ["mlflow"] [tool.poetry.group.dev.dependencies] diff --git a/tests/unit_tests/integrations/databricks/test_ml_flow_instrumentor.py b/tests/unit_tests/integrations/databricks/test_ml_flow_instrumentor.py new file mode 100644 index 000000000..0e0593383 --- /dev/null +++ b/tests/unit_tests/integrations/databricks/test_ml_flow_instrumentor.py @@ -0,0 +1,637 @@ +from asyncio import Future +import pytest +from unittest.mock import MagicMock + +from guardrails.guard import Guard +from guardrails.async_guard import AsyncGuard +from guardrails.classes.history.call import Call +from guardrails.classes.history.iteration import Iteration +from guardrails.classes.llm.llm_response import LLMResponse +from guardrails.classes.validation_outcome import ValidationOutcome +from guardrails.run.async_runner import AsyncRunner +from guardrails.run.async_stream_runner import AsyncStreamRunner +from guardrails.run.runner import Runner +from guardrails.run.stream_runner import StreamRunner +from guardrails.version import GUARDRAILS_VERSION +from tests.unit_tests.mocks.mock_span import MockSpan + +try: + import mlflow +except ImportError: + mlflow = None + + +@pytest.mark.skipif( + mlflow is None, + reason="mlflow not installed.", +) +class TestMlFlowInstrumentor: + def test__init__(self): + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + from guardrails import settings + + assert m.experiment_name == "mock experiment" + assert settings.disable_tracing is True + + def test_instrument(self, mocker): + mock_is_enabled = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.tracing.provider._is_enabled", + return_value=False, + ) + mock_enable = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.tracing.enable" + ) + mock_set_experiment = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.set_experiment" + ) + + from tests.unit_tests.mocks import mock_hub + + mocker.patch("guardrails.hub", return_value=mock_hub) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + # Prevent real methods from being wrapped and persistint into other tests + mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.Guard._execute" + ) + guard_execute = Guard._execute + mock_instrument_guard = mocker.patch.object(m, "_instrument_guard") + + mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.AsyncGuard._execute" + ) + async_guard_execute = AsyncGuard._execute + mock_instrument_async_guard = mocker.patch.object(m, "_instrument_async_guard") + + mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.Runner.step" + ) + runner_step = Runner.step + mock_instrument_runner_step = mocker.patch.object(m, "_instrument_runner_step") + + mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.StreamRunner.step" + ) + stream_runner_step = StreamRunner.step + mock_instrument_stream_runner_step = mocker.patch.object( + m, "_instrument_stream_runner_step" + ) + + mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.AsyncRunner.async_step" + ) + async_runner_step = AsyncRunner.async_step + mock_instrument_async_runner_step = mocker.patch.object( + m, "_instrument_async_runner_step" + ) + + mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.AsyncStreamRunner.async_step" + ) + async_stream_runner_step = AsyncStreamRunner.async_step + mock_instrument_async_stream_runner_step = mocker.patch.object( + m, "_instrument_async_stream_runner_step" + ) + + mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.Runner.call" + ) + runner_call = Runner.call + mock_instrument_runner_call = mocker.patch.object(m, "_instrument_runner_call") + + mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.AsyncRunner.async_call" + ) + async_runner_call = AsyncRunner.async_call + mock_instrument_async_runner_call = mocker.patch.object( + m, "_instrument_async_runner_call" + ) + + m.instrument() + + mock_is_enabled.assert_called_once() + mock_enable.assert_called_once() + mock_set_experiment.assert_called_once_with("mock experiment") + + mock_instrument_guard.assert_called_once_with(guard_execute) + mock_instrument_async_guard.assert_called_once_with(async_guard_execute) + mock_instrument_runner_step.assert_called_once_with(runner_step) + mock_instrument_stream_runner_step.assert_called_once_with(stream_runner_step) + mock_instrument_async_runner_step.assert_called_once_with(async_runner_step) + mock_instrument_async_stream_runner_step.assert_called_once_with( + async_stream_runner_step + ) + mock_instrument_runner_call.assert_called_once_with(runner_call) + mock_instrument_async_runner_call.assert_called_once_with(async_runner_call) + + def test__instrument_guard(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_guard_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_guard_attributes" + ) + mock_trace_stream_guard = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.trace_stream_guard" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + mock_result = ValidationOutcome(call_id="mock call id", validation_passed=True) + mock_execute = MagicMock() + mock_execute.return_value = mock_result + mock_guard = MagicMock(spec=Guard) + mock_guard._execute = mock_execute + mock_guard.name = "mock guard" + mock_guard.history = [] + + wrapped_execute = m._instrument_guard(mock_guard._execute) + + wrapped_execute(mock_guard) + + mock_start_span.assert_called_once_with( + name="guardrails/guard", + span_type="guard", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard", + }, + ) + + # Internally called, not the wrapped call above + mock_guard._execute.assert_called_once() + mock_span.set_attribute.assert_called_once_with("guard.name", "mock guard") + mock_add_guard_attributes.assert_called_once_with(mock_span, [], mock_result) + + mock_trace_stream_guard.assert_not_called() + + def test__instrument_guard_stream(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_guard_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_guard_attributes" + ) + mock_trace_stream_guard = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.trace_stream_guard" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + mock_result = [ + ValidationOutcome(call_id="mock call id", validation_passed=True) + ] + mock_execute = MagicMock() + mock_execute.return_value = mock_result + mock_guard = MagicMock(spec=Guard) + mock_guard._execute = mock_execute + mock_guard.name = "mock guard" + mock_guard.history = [] + + wrapped_execute = m._instrument_guard(mock_guard._execute) + + wrapped_execute(mock_guard, stream=True) + + mock_start_span.assert_called_once_with( + name="guardrails/guard", + span_type="guard", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard", + }, + ) + + # Internally called, not the wrapped call above + mock_guard._execute.assert_called_once() + mock_span.set_attribute.assert_called_once_with("guard.name", "mock guard") + mock_trace_stream_guard.assert_called_once_with(mock_span, mock_result, []) + + mock_add_guard_attributes.assert_not_called() + + @pytest.mark.asyncio + async def test__instrument_async_guard(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_guard_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_guard_attributes" + ) + mock_trace_async_stream_guard = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.trace_async_stream_guard" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + validation_outcome = ValidationOutcome( + call_id="mock call id", validation_passed=True + ) + mock_result = Future() + mock_result.set_result(validation_outcome) + mock_execute = MagicMock() + mock_execute.return_value = mock_result + mock_guard = MagicMock(spec=AsyncGuard) + mock_guard._execute = mock_execute + mock_guard.name = "mock guard" + mock_guard.history = [] + + wrapped_execute = m._instrument_async_guard(mock_guard._execute) + + await wrapped_execute(mock_guard) + + mock_start_span.assert_called_once_with( + name="guardrails/guard", + span_type="guard", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard", + "async": True, + }, + ) + + # Internally called, not the wrapped call above + mock_guard._execute.assert_called_once() + mock_span.set_attribute.assert_called_once_with("guard.name", "mock guard") + mock_add_guard_attributes.assert_called_once_with( + mock_span, [], validation_outcome + ) + + mock_trace_async_stream_guard.assert_not_called() + + @pytest.mark.asyncio + async def test__instrument_async_guard_stream(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_guard_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_guard_attributes" + ) + mock_trace_async_stream_guard = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.trace_async_stream_guard" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + async def async_iterable(): + yield ValidationOutcome(call_id="mock call id", validation_passed=True) + + async_gen = async_iterable() + + async def mock_execute(*args, **kwargs): + return async_gen + + mock_guard = MagicMock(spec=AsyncGuard) + mock_guard._execute = mock_execute + mock_guard.name = "mock guard" + mock_guard.history = [] + + wrapped_execute = m._instrument_async_guard(mock_guard._execute) + + await wrapped_execute(mock_guard) + + mock_start_span.assert_called_once_with( + name="guardrails/guard", + span_type="guard", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard", + "async": True, + }, + ) + + mock_span.set_attribute.assert_called_once_with("guard.name", "mock guard") + mock_trace_async_stream_guard.assert_called_once_with(mock_span, async_gen, []) + + mock_add_guard_attributes.assert_not_called() + + def test__instrument_runner_step(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_step_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_step_attributes" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + iteration = Iteration(call_id="mock call id", index=0) + mock_step = MagicMock(return_value=iteration) + mock_runner = MagicMock(spec=Runner) + mock_runner.step = mock_step + + wrapped_step = m._instrument_runner_step(mock_runner.step) + + wrapped_step(mock_runner) + + mock_start_span.assert_called_once_with( + name="guardrails/guard/step", + span_type="step", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step", + }, + ) + + # Internally called, not the wrapped call above + mock_runner.step.assert_called_once() + mock_add_step_attributes.assert_called_once_with( + mock_span, iteration, mock_runner + ) + + def test__instrument_stream_runner_step(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_step_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_step_attributes" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + iteration = Iteration(call_id="mock call id", index=0) + call = Call() + call.iterations.push(iteration) + + def step_iterable(): + yield ValidationOutcome(call_id="mock call id", validation_passed=True) + + step_gen = step_iterable() + mock_runner = MagicMock(spec=StreamRunner) + mock_runner.step = MagicMock(return_value=step_gen) + + wrapped_step = m._instrument_stream_runner_step(mock_runner.step) + + wrapped_gen = wrapped_step(mock_runner, call_log=call) + for gen in wrapped_gen: + pass + + mock_start_span.assert_called_once_with( + name="guardrails/guard/step", + span_type="step", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step", + "stream": True, + }, + ) + + # Internally called, not the wrapped call above + mock_runner.step.assert_called_once() + mock_add_step_attributes.assert_called_once_with( + mock_span, iteration, mock_runner, call_log=call + ) + + @pytest.mark.asyncio + async def test__instrument_async_runner_step(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_step_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_step_attributes" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + iteration = Iteration(call_id="mock call id", index=0) + response = Future() + response.set_result(iteration) + mock_step = MagicMock(return_value=response) + mock_runner = MagicMock(spec=AsyncRunner) + mock_runner.async_step = mock_step + + wrapped_step = m._instrument_async_runner_step(mock_runner.async_step) + + await wrapped_step(mock_runner) + + mock_start_span.assert_called_once_with( + name="guardrails/guard/step", + span_type="step", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step", + "async": True, + }, + ) + + # Internally called, not the wrapped call above + mock_runner.async_step.assert_called_once() + mock_add_step_attributes.assert_called_once_with( + mock_span, iteration, mock_runner + ) + + @pytest.mark.asyncio + async def test__instrument_async_stream_runner_step(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_step_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_step_attributes" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + iteration = Iteration(call_id="mock call id", index=0) + call = Call() + call.iterations.push(iteration) + + async def step_iterable(): + yield ValidationOutcome(call_id="mock call id", validation_passed=True) + + step_gen = step_iterable() + mock_runner = MagicMock(spec=AsyncStreamRunner) + mock_runner.async_step = MagicMock(return_value=step_gen) + + wrapped_step = m._instrument_async_stream_runner_step(mock_runner.async_step) + + wrapped_gen = wrapped_step(mock_runner, call_log=call) + async for gen in wrapped_gen: + pass + + mock_start_span.assert_called_once_with( + name="guardrails/guard/step", + span_type="step", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step", + "async": True, + "stream": True, + }, + ) + + # Internally called, not the wrapped call above + mock_runner.async_step.assert_called_once() + mock_add_step_attributes.assert_called_once_with( + mock_span, iteration, mock_runner, call_log=call + ) + + def test__instrument_runner_call(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_call_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_call_attributes" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + llmResponse = LLMResponse(output="mock output") + mock_call = MagicMock(return_value=llmResponse) + mock_runner = MagicMock(spec=Runner) + mock_runner.call = mock_call + + wrapped_call = m._instrument_runner_call(mock_runner.call) + + wrapped_call(mock_runner) + + mock_start_span.assert_called_once_with( + name="guardrails/guard/step/call", + span_type="LLM", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step/call", + }, + ) + + # Internally called, not the wrapped call above + mock_runner.call.assert_called_once() + mock_add_call_attributes.assert_called_once_with( + mock_span, llmResponse, mock_runner + ) + + @pytest.mark.asyncio + async def test__instrument_async_runner_call(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_call_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_call_attributes" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + + m = MlFlowInstrumentor("mock experiment") + + llmResponse = LLMResponse(output="mock output") + response = Future() + response.set_result(llmResponse) + mock_call = MagicMock(return_value=response) + mock_runner = MagicMock(spec=AsyncRunner) + mock_runner.async_call = mock_call + + wrapped_call = m._instrument_async_runner_call(mock_runner.async_call) + + await wrapped_call(mock_runner) + + mock_start_span.assert_called_once_with( + name="guardrails/guard/step/call", + span_type="LLM", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step/call", + "async": True, + }, + ) + + # Internally called, not the wrapped call above + mock_runner.async_call.assert_called_once() + mock_add_call_attributes.assert_called_once_with( + mock_span, llmResponse, mock_runner + ) + + def test__instrument_validator_validate(self, mocker): + mock_span = MockSpan() + mock_start_span = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.mlflow.start_span", + return_value=mock_span, + ) + + mock_add_validator_attributes = mocker.patch( + "guardrails.integrations.databricks.ml_flow_instrumentor.add_validator_attributes" + ) + + from guardrails.integrations.databricks import MlFlowInstrumentor + from tests.unit_tests.mocks.mock_hub import MockValidator + + m = MlFlowInstrumentor("mock experiment") + + wrapped_validate = m._instrument_validator_validate(MockValidator.validate) + + mock_validator = MockValidator() + + resp = wrapped_validate(mock_validator, True, {}) + + mock_start_span.assert_called_once_with( + name="mock-validator.validate", + span_type="validator", + attributes={ + "guardrails.version": GUARDRAILS_VERSION, + "type": "guardrails/guard/step/validator", + }, + ) + + # Internally called, not the wrapped call above + mock_add_validator_attributes.assert_called_once_with( + mock_validator, + True, + {}, + validator_span=mock_span, # type: ignore + validator_name="mock-validator", + obj_id=id(mock_validator), + on_fail_descriptor="noop", + result=resp, + init_kwargs={}, + validation_session_id="unknown", + ) diff --git a/tests/unit_tests/mocks/mock_hub.py b/tests/unit_tests/mocks/mock_hub.py new file mode 100644 index 000000000..85aaee737 --- /dev/null +++ b/tests/unit_tests/mocks/mock_hub.py @@ -0,0 +1,14 @@ +from typing import Any, Dict + +from guardrails.validator_base import ( + PassResult, + ValidationResult, + Validator, + register_validator, +) + + +@register_validator(name="mock-validator", data_type="string") +class MockValidator(Validator): + def validate(self, value: Any, metadata: Dict) -> ValidationResult: + return PassResult() diff --git a/tests/unit_tests/mocks/mock_span.py b/tests/unit_tests/mocks/mock_span.py new file mode 100644 index 000000000..90ddbee77 --- /dev/null +++ b/tests/unit_tests/mocks/mock_span.py @@ -0,0 +1,18 @@ +from contextlib import AbstractContextManager +from types import TracebackType +from typing import Optional, Type +from unittest.mock import MagicMock + + +class MockSpan(AbstractContextManager): + def __exit__( + self, + __exc_type: Optional[Type[BaseException]], + __exc_value: Optional[BaseException], + __traceback: Optional[TracebackType], + ) -> Optional[bool]: + return super().__exit__(__exc_type, __exc_value, __traceback) + + def __init__(self): + super().__init__() + self.set_attribute = MagicMock()