Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion scope3ai/lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ async def submit_impact(
tracer._link_trace(ctx)

if self.sync_mode:
await submit_impact(impact_row)
await submit_impact(impact_row, ctx=ctx)
return ctx

self._ensure_worker()
Expand Down
426 changes: 426 additions & 0 deletions tests/cassettes/test_tracer_context.yaml

Large diffs are not rendered by default.

201 changes: 201 additions & 0 deletions tests/cassettes/test_tracer_context_nested.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
interactions:
- request:
body: '{"messages":[{"role":"user","content":"Hello World!"}],"model":"gpt-4"}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
authorization:
- DUMMY
connection:
- keep-alive
content-length:
- '71'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.57.1
x-stainless-arch:
- x64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Linux
x-stainless-package-version:
- 1.57.1
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.6
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAAwAAAP//jFJNTwIxFLzvr3j2DAZEELkYD36djNGTxmxK+1gq3b6mfRtdCf/ddPlY
iJh46WHmzXTmtcsMQBgtJiDUXLIqve1eL2R9N3i6sdXFy+Xj7No+L3x8fL2tv+/GRnSSgqYfqHir
OlVUeotsyK1pFVAyJtf+xWA4Go2Ho3FDlKTRJlnhuXve7Y36g41iTkZhFBN4ywAAls2ZsjmNX2IC
vc4WKTFGWaCY7IYARCCbECFjNJGlY9FpSUWO0TVx79FaOoF7+gQlHTzAWgA1VcCkZX21Lww4q6JM
gV1l7QZf7ZJYKnygadzwO3xmnInzPKCM5NKtkcmLhl1lAO9N4+qghPCBSs850wJdMuxvCot2t0dI
Jpa2xc+2+IFbrpGlsXFvY0JJNUfdKtv1ykob2iOyvc6/wxzzXvc2rviPfUsohZ5R5z6gNuqwcDsW
MP28v8Z2O24Ci1hHxjKfGVdg8ME0f6B5ylX2AwAA//8DAATOvT36AgAA
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8fac38897e616dc5-MIA
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Tue, 31 Dec 2024 18:09:29 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=E71sj8BgPlfHStPndCfIdMbuz2I7IILfT6MYMR6O05c-1735668569-1.0.1.1-3QctP69wJWwTM76kRblKg8m5S16SGixo_52RMFU2__IS.TchyW.eM3sl2IeYaSnH.IcdrqDnytcozX4H_0WXgQ;
path=/; expires=Tue, 31-Dec-24 18:39:29 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=QWRWX1JfB5JCTGRoMGJQ2MxwzDT2PuSTW1JIp5Ne3u0-1735668569446-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- user-sxsjo8cvghsvsqprrtasrxyq
openai-processing-ms:
- '780'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '5000'
x-ratelimit-limit-tokens:
- '80000'
x-ratelimit-remaining-requests:
- '4999'
x-ratelimit-remaining-tokens:
- '79980'
x-ratelimit-reset-requests:
- 12ms
x-ratelimit-reset-tokens:
- 15ms
x-request-id:
- req_4620b0e0e7c7b2f54ba1f290bb10019f
status:
code: 200
message: OK
- request:
body: '{"messages":[{"role":"user","content":"Hello World!"}],"model":"gpt-4"}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
authorization:
- DUMMY
connection:
- keep-alive
content-length:
- '71'
content-type:
- application/json
cookie:
- __cf_bm=E71sj8BgPlfHStPndCfIdMbuz2I7IILfT6MYMR6O05c-1735668569-1.0.1.1-3QctP69wJWwTM76kRblKg8m5S16SGixo_52RMFU2__IS.TchyW.eM3sl2IeYaSnH.IcdrqDnytcozX4H_0WXgQ;
_cfuvid=QWRWX1JfB5JCTGRoMGJQ2MxwzDT2PuSTW1JIp5Ne3u0-1735668569446-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.57.1
x-stainless-arch:
- x64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Linux
x-stainless-package-version:
- 1.57.1
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.6
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAAwAAAP//jFLLTsMwELznKxafW0R5BOgFoV7KS3AAcUAoMvY2NXW8xt4AEeq/I6eP
tAIkLj7M7Ixn1v7KAITRYghCTSWrytv++Uw24/zhdET1zfuoefMfk9ur6/uTx3DZ3IleUtDLKype
qXYVVd4iG3ILWgWUjMl1cHxwlOcnR/lpS1Sk0SZZ6bl/2N/LBwdLxZSMwiiG8JQBAHy1Z8rmNH6K
Iez1VkiFMcoSxXA9BCAC2YQIGaOJLB2LXkcqcoyujTtGa2kHxvQBSjq4gIUAGqqBScvmbFMYcFJH
mQK72tolPl8nsVT6QC9xya/xiXEmTouAMpJLt0YmL1p2ngE8t43rrRLCB6o8F0wzdMlwsCwsut3+
QjKxtB2+v8K33AqNLI2NGxsTSqop6k7ZrVfW2tAGkW10/hnmN+9Fb+PK/9h3hFLoGXXhA2qjtgt3
YwHTz/trbL3jNrCITWSsiolxJQYfTPsH2qecZ98AAAD//wMANuZV4/oCAAA=
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8fac388f7c4f6dc5-MIA
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Tue, 31 Dec 2024 18:09:30 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- user-sxsjo8cvghsvsqprrtasrxyq
openai-processing-ms:
- '695'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '5000'
x-ratelimit-limit-tokens:
- '80000'
x-ratelimit-remaining-requests:
- '4999'
x-ratelimit-remaining-tokens:
- '79980'
x-ratelimit-reset-requests:
- 12ms
x-ratelimit-reset-tokens:
- 15ms
x-request-id:
- req_6028961ec3fdcb9d881820c1cfaab25a
status:
code: 200
message: OK
version: 1
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def tracer_init(docker_api_info):


@pytest.fixture
def tracer_with_response_init(docker_api_info):
def tracer_with_sync_init(docker_api_info):
from scope3ai import Scope3AI

scope3 = Scope3AI.init(
Expand Down
2 changes: 1 addition & 1 deletion tests/test_openai_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def test_openai_chat(tracer_init):


@pytest.mark.vcr
def test_openai_chat_with_response(tracer_with_response_init):
def test_openai_chat_with_response(tracer_with_sync_init):
client = OpenAI()
response = client.chat.completions.create(
model="gpt-4", messages=[{"role": "user", "content": "Hello World!"}]
Expand Down
110 changes: 110 additions & 0 deletions tests/test_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,3 +138,113 @@ async def test_tracer_openai_simple_asynchronisation(tracer_init):
tracer_init._worker.resume()
await response.scope3ai.await_impact()
assert response.scope3ai.impact is not None


@pytest.mark.vcr
def test_tracer_context(tracer_init):
from openai import OpenAI

client = OpenAI()
with tracer_init.trace() as tracer:
response = client.chat.completions.create(
model="gpt-4", messages=[{"role": "user", "content": "Hello World!"}]
)
assert len(response.choices) > 0
impact = tracer.impact()
assert impact is not None
assert impact.total_energy_wh > 0
assert impact.total_gco2e > 0
assert impact.total_mlh2o > 0


@pytest.mark.vcr
def test_tracer_context_nested(tracer_init):
from openai import OpenAI

client = OpenAI()
with tracer_init.trace() as tracer:
response = client.chat.completions.create(
model="gpt-4", messages=[{"role": "user", "content": "Hello World!"}]
)
assert len(response.choices) > 0

with tracer_init.trace() as tracer2:
response = client.chat.completions.create(
model="gpt-4", messages=[{"role": "user", "content": "Hello World!"}]
)
assert len(response.choices) > 0
impact = tracer2.impact()
assert impact is not None
assert impact.total_energy_wh > 0
assert impact.total_gco2e > 0
assert impact.total_mlh2o > 0

impact2 = tracer.impact()
assert impact2 is not None
assert impact2.total_energy_wh > impact.total_energy_wh
assert impact2.total_gco2e > impact.total_gco2e
assert impact2.total_mlh2o > impact.total_mlh2o


def test_tracer_submit_impact(tracer_init):
from scope3ai.api.types import ImpactRow, Model

# pause the background worker
tracer_init._ensure_worker()
tracer_init._worker.pause()

impact = ImpactRow(model=Model(id="gpt_4o"), input_tokens=100, output_tokens=100)
ctx = tracer_init.submit_impact(impact)

assert ctx is not None
assert ctx.impact is None

# resume the background worker
tracer_init._worker.resume()

ctx.wait_impact()
assert ctx.impact is not None


def test_tracer_submit_impact_sync(tracer_with_sync_init):
from scope3ai.api.types import ImpactRow, Model

impact = ImpactRow(model=Model(id="gpt_4o"), input_tokens=100, output_tokens=100)
ctx = tracer_with_sync_init.submit_impact(impact)

assert ctx is not None
assert ctx.impact is not None


@pytest.mark.asyncio
async def test_tracer_submit_impact_async(tracer_init):
# XXX non fonctional test
from scope3ai.api.types import ImpactRow, Model

# pause the background worker
tracer_init._ensure_worker()
tracer_init._worker.pause()

impact = ImpactRow(model=Model(id="gpt_4o"), input_tokens=100, output_tokens=100)
ctx = await tracer_init.asubmit_impact(impact)

assert ctx is not None
assert ctx.impact is None

# resume the background worker
tracer_init._worker.resume()

# Fully block at the moment.
# await ctx.await_impact()
# assert ctx.impact is not None


@pytest.mark.asyncio
async def test_tracer_submit_impact_sync_async(tracer_with_sync_init):
from scope3ai.api.types import ImpactRow, Model

impact = ImpactRow(model=Model(id="gpt_4o"), input_tokens=100, output_tokens=100)
ctx = await tracer_with_sync_init.asubmit_impact(impact)

assert ctx is not None
assert ctx.impact is not None
Loading