Skip to content

Commit

Permalink
Merge branch 'master' into antonpirker/django-caching-module
Browse files Browse the repository at this point in the history
  • Loading branch information
antonpirker committed May 3, 2024
2 parents fa47da2 + 41aa99b commit 94421d2
Show file tree
Hide file tree
Showing 29 changed files with 1,876 additions and 135 deletions.
24 changes: 24 additions & 0 deletions .github/workflows/test-integrations-data-processing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ jobs:
- name: Erase coverage
run: |
coverage erase
- name: Test anthropic latest
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-anthropic-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test arq latest
run: |
set -x # print commands that are executed
Expand All @@ -58,10 +62,18 @@ jobs:
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-huey-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test langchain latest
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-langchain-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test openai latest
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-openai-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test huggingface_hub latest
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-huggingface_hub-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test rq latest
run: |
set -x # print commands that are executed
Expand Down Expand Up @@ -98,6 +110,10 @@ jobs:
- name: Erase coverage
run: |
coverage erase
- name: Test anthropic pinned
run: |
set -x # print commands that are executed
./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-anthropic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test arq pinned
run: |
set -x # print commands that are executed
Expand All @@ -114,10 +130,18 @@ jobs:
run: |
set -x # print commands that are executed
./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test langchain pinned
run: |
set -x # print commands that are executed
./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-langchain" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test openai pinned
run: |
set -x # print commands that are executed
./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-openai" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test huggingface_hub pinned
run: |
set -x # print commands that are executed
./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huggingface_hub" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test rq pinned
run: |
set -x # print commands that are executed
Expand Down
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
MIT License

Copyright (c) 2018 Functional Software, Inc. dba Sentry
Copyright (c) 2018-2024 Functional Software, Inc. dba Sentry

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
Expand Down
2 changes: 1 addition & 1 deletion checkouts/data-schemas
6 changes: 6 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ ignore_missing_imports = True
ignore_missing_imports = True
[mypy-aiohttp.*]
ignore_missing_imports = True
[mypy-anthropic.*]
ignore_missing_imports = True
[mypy-sanic.*]
ignore_missing_imports = True
[mypy-tornado.*]
Expand All @@ -48,6 +50,8 @@ ignore_missing_imports = True
ignore_missing_imports = True
[mypy-asgiref.*]
ignore_missing_imports = True
[mypy-langchain_core.*]
ignore_missing_imports = True
[mypy-executing.*]
ignore_missing_imports = True
[mypy-asttokens.*]
Expand All @@ -69,6 +73,8 @@ ignore_missing_imports = True
ignore_missing_imports = True
[mypy-openai.*]
ignore_missing_imports = True
[mypy-huggingface_hub.*]
ignore_missing_imports = True
[mypy-arq.*]
ignore_missing_imports = True
[mypy-grpc.*]
Expand Down
2 changes: 1 addition & 1 deletion scripts/runtox.sh
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,4 @@ if [ -z "${ENV}" ]; then
exit 0
fi

exec $TOXPATH -e "$ENV" -- "${@:2}"
exec $TOXPATH -p auto -o -e "$ENV" -- "${@:2}"
3 changes: 3 additions & 0 deletions scripts/split-tox-gh-actions/split-tox-gh-actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,14 @@
"gcp",
],
"Data Processing": [
"anthropic",
"arq",
"beam",
"celery",
"huey",
"langchain",
"openai",
"huggingface_hub",
"rq",
],
"Databases": [
Expand Down
72 changes: 40 additions & 32 deletions sentry_sdk/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,45 @@
# "critical" is an alias of "fatal" recognized by Relay
LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"]

DurationUnit = Literal[
"nanosecond",
"microsecond",
"millisecond",
"second",
"minute",
"hour",
"day",
"week",
]

InformationUnit = Literal[
"bit",
"byte",
"kilobyte",
"kibibyte",
"megabyte",
"mebibyte",
"gigabyte",
"gibibyte",
"terabyte",
"tebibyte",
"petabyte",
"pebibyte",
"exabyte",
"exbibyte",
]

FractionUnit = Literal["ratio", "percent"]
MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str]

MeasurementValue = TypedDict(
"MeasurementValue",
{
"value": float,
"unit": Optional[MeasurementUnit],
},
)

Event = TypedDict(
"Event",
{
Expand All @@ -49,7 +88,7 @@
"level": LogLevelStr,
"logentry": Mapping[str, object],
"logger": str,
"measurements": dict[str, object],
"measurements": dict[str, MeasurementValue],
"message": str,
"modules": dict[str, str],
"monitor_config": Mapping[str, object],
Expand Down Expand Up @@ -118,37 +157,6 @@
]
SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]

DurationUnit = Literal[
"nanosecond",
"microsecond",
"millisecond",
"second",
"minute",
"hour",
"day",
"week",
]

InformationUnit = Literal[
"bit",
"byte",
"kilobyte",
"kibibyte",
"megabyte",
"mebibyte",
"gigabyte",
"gibibyte",
"terabyte",
"tebibyte",
"petabyte",
"pebibyte",
"exabyte",
"exbibyte",
]

FractionUnit = Literal["ratio", "percent"]
MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str]

ProfilerMode = Literal["sleep", "thread", "gevent", "unknown"]

# Type of the metric.
Expand Down
Empty file added sentry_sdk/ai/__init__.py
Empty file.
77 changes: 77 additions & 0 deletions sentry_sdk/ai/monitoring.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
from functools import wraps

import sentry_sdk.utils
from sentry_sdk import start_span
from sentry_sdk.tracing import Span
from sentry_sdk.utils import ContextVar
from sentry_sdk._types import TYPE_CHECKING

if TYPE_CHECKING:
from typing import Optional, Callable, Any

_ai_pipeline_name = ContextVar("ai_pipeline_name", default=None)


def set_ai_pipeline_name(name):
# type: (Optional[str]) -> None
_ai_pipeline_name.set(name)


def get_ai_pipeline_name():
# type: () -> Optional[str]
return _ai_pipeline_name.get()


def ai_track(description, **span_kwargs):
# type: (str, Any) -> Callable[..., Any]
def decorator(f):
# type: (Callable[..., Any]) -> Callable[..., Any]
@wraps(f)
def wrapped(*args, **kwargs):
# type: (Any, Any) -> Any
curr_pipeline = _ai_pipeline_name.get()
op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline")
with start_span(description=description, op=op, **span_kwargs) as span:
if curr_pipeline:
span.set_data("ai.pipeline.name", curr_pipeline)
return f(*args, **kwargs)
else:
_ai_pipeline_name.set(description)
try:
res = f(*args, **kwargs)
except Exception as e:
event, hint = sentry_sdk.utils.event_from_exception(
e,
client_options=sentry_sdk.get_client().options,
mechanism={"type": "ai_monitoring", "handled": False},
)
sentry_sdk.capture_event(event, hint=hint)
raise e from None
finally:
_ai_pipeline_name.set(None)
return res

return wrapped

return decorator


def record_token_usage(
span, prompt_tokens=None, completion_tokens=None, total_tokens=None
):
# type: (Span, Optional[int], Optional[int], Optional[int]) -> None
ai_pipeline_name = get_ai_pipeline_name()
if ai_pipeline_name:
span.set_data("ai.pipeline.name", ai_pipeline_name)
if prompt_tokens is not None:
span.set_measurement("ai_prompt_tokens_used", value=prompt_tokens)
if completion_tokens is not None:
span.set_measurement("ai_completion_tokens_used", value=completion_tokens)
if (
total_tokens is None
and prompt_tokens is not None
and completion_tokens is not None
):
total_tokens = prompt_tokens + completion_tokens
if total_tokens is not None:
span.set_measurement("ai_total_tokens_used", total_tokens)
32 changes: 32 additions & 0 deletions sentry_sdk/ai/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
from sentry_sdk._types import TYPE_CHECKING

if TYPE_CHECKING:
from typing import Any

from sentry_sdk.tracing import Span
from sentry_sdk.utils import logger


def _normalize_data(data):
# type: (Any) -> Any

# convert pydantic data (e.g. OpenAI v1+) to json compatible format
if hasattr(data, "model_dump"):
try:
return data.model_dump()
except Exception as e:
logger.warning("Could not convert pydantic data to JSON: %s", e)
return data
if isinstance(data, list):
if len(data) == 1:
return _normalize_data(data[0]) # remove empty dimensions
return list(_normalize_data(x) for x in data)
if isinstance(data, dict):
return {k: _normalize_data(v) for (k, v) in data.items()}
return data


def set_data_normalized(span, key, value):
# type: (Span, str, Any) -> None
normalized = _normalize_data(value)
span.set_data(key, normalized)

0 comments on commit 94421d2

Please sign in to comment.