Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ repos:
hooks:
- id: debug-statements
- id: trailing-whitespace
exclude: tests/api-mocks/aiapi.yaml

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.2
Expand Down
2 changes: 1 addition & 1 deletion examples/api-sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def send_impact():
from scope3ai.api.types import ImpactRow

print("Sending impact")
impact = ImpactRow(model="gpt_4o", input_tokens=100, output_tokens=100)
impact = ImpactRow(model_id="gpt_4o", input_tokens=100, output_tokens=100)
response = client.impact(rows=[impact])
print(response)

Expand Down
20 changes: 16 additions & 4 deletions scope3ai/api/client.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from os import getenv
from typing import List, Optional, Union
from typing import Any, List, Optional, TypeVar

import httpx
from pydantic import BaseModel
Expand All @@ -15,13 +15,19 @@
NodeResponse,
)

ClientType = TypeVar("ClientType", httpx.Client, httpx.AsyncClient)


class Scope3AIError(Exception):
pass


class ClientBase:
def __init__(self, api_key: str = None, api_url: str = None) -> None:
def __init__(
self,
api_key: Optional[str] = None,
api_url: Optional[str] = None,
) -> None:
self.api_key = api_key or getenv("SCOPE3AI_API_KEY")
self.api_url = api_url or getenv("SCOPE3AI_API_URL") or DEFAULT_API_URL
if not self.api_key:
Expand All @@ -38,7 +44,7 @@ def __init__(self, api_key: str = None, api_url: str = None) -> None:
)

@property
def client(self) -> Union[httpx.Client, httpx.AsyncClient]:
def client(self) -> ClientType:
"""
Obtain an httpx client for synchronous or asynchronous operation
with the necessary authentication headers included.
Expand All @@ -47,8 +53,14 @@ def client(self) -> Union[httpx.Client, httpx.AsyncClient]:
self._client = self.create_client()
return self._client

def create_client(self) -> ClientType:
raise NotImplementedError


class ClientCommands:
def execute_request(self, *args, **kwargs) -> Any:
raise NotImplementedError

def model(
self,
family: Optional[Family] = None,
Expand Down Expand Up @@ -144,7 +156,7 @@ class Client(ClientBase, ClientCommands):
Synchronous Client to the Scope3AI HTTP API
"""

def create_client(self):
def create_client(self) -> httpx.Client:
return httpx.Client(headers={"Authorization": f"Bearer {self.api_key}"})

def execute_request(
Expand Down
145 changes: 138 additions & 7 deletions scope3ai/api/typesgen.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# generated by datamodel-codegen:
# filename: aiapi.yaml
# timestamp: 2025-01-21T22:57:07+00:00
# timestamp: 2025-01-24T16:50:00+00:00

from __future__ import annotations

Expand Down Expand Up @@ -102,6 +102,105 @@ class NodeUpdateRequest(BaseModel):
)


class ModelCreateRequest(BaseModel):
"""
Create a new model
"""

id: Annotated[str, Field(examples=["gpt-4-turbo"])]
name: Annotated[Optional[str], Field(examples=["GPT-4 Turbo"])] = None
family: Annotated[Optional[str], Field(examples=["gpt"])] = None
hugging_face_path: Annotated[
Optional[str], Field(examples=["EleutherAI/gpt-neo-2.7B"])
] = None
benchmark_model_id: Annotated[Optional[str], Field(examples=["gpt-4-turbo"])] = None
estimated_use_life_days: Annotated[Optional[float], Field(examples=[365])] = None
estimated_requests_per_day: Annotated[Optional[float], Field(examples=[1000])] = (
None
)
fine_tuned_from_model_id: Annotated[
Optional[str], Field(examples=["gpt-4-turbo"])
] = None
aliases: Annotated[
Optional[List[str]], Field(examples=[["claude-latest"]], max_length=100)
] = None
total_params_billions: Annotated[Optional[float], Field(examples=[175])] = None
number_of_experts: Annotated[Optional[int], Field(examples=[7])] = None
params_per_expert_billions: Annotated[Optional[float], Field(examples=[8])] = None
training_usage_energy_kwh: Annotated[Optional[float], Field(examples=[1013.1])] = (
None
)
training_usage_emissions_kgco2e: Annotated[
Optional[float], Field(examples=[1013.1])
] = None
training_usage_water_l: Annotated[Optional[float], Field(examples=[1013.1])] = None
training_embodied_emissions_kgco2e: Annotated[
Optional[float], Field(examples=[11013.1])
] = None
training_embodied_water_l: Annotated[Optional[float], Field(examples=[11013.1])] = (
None
)


class ModelUpdateRequest(BaseModel):
"""
Update an existing model
"""

name: Annotated[Optional[str], Field(examples=["GPT-4 Turbo"])] = None
family: Annotated[Optional[str], Field(examples=["gpt"])] = None
hugging_face_path: Annotated[
Optional[str], Field(examples=["EleutherAI/gpt-neo-2.7B"])
] = None
benchmark_model_id: Annotated[Optional[str], Field(examples=["gpt-4-turbo"])] = None
estimated_use_life_days: Annotated[Optional[float], Field(examples=[365])] = None
estimated_requests_per_day: Annotated[Optional[float], Field(examples=[1000])] = (
None
)
fine_tuned_from_model_id: Annotated[
Optional[str], Field(examples=["gpt-4-turbo"])
] = None
total_params_billions: Annotated[Optional[float], Field(examples=[175])] = None
number_of_experts: Annotated[Optional[int], Field(examples=[7])] = None
params_per_expert_billions: Annotated[Optional[float], Field(examples=[8])] = None
training_usage_energy_kwh: Annotated[Optional[float], Field(examples=[1013.1])] = (
None
)
training_usage_emissions_kgco2e: Annotated[
Optional[float], Field(examples=[1013.1])
] = None
training_usage_water_l: Annotated[Optional[float], Field(examples=[1013.1])] = None
training_embodied_emissions_kgco2e: Annotated[
Optional[float], Field(examples=[11013.1])
] = None
training_embodied_water_l: Annotated[Optional[float], Field(examples=[11013.1])] = (
None
)


class GPUCreateRequest(BaseModel):
name: Annotated[str, Field(examples=["NVIDIA A100 40GB"])]
id: Annotated[str, Field(examples=["a100_40gb"])]
max_power_w: Annotated[float, Field(examples=[700])]
embodied_emissions_kgco2e: Annotated[float, Field(examples=[282.1])]
embodied_water_mlh2o: Annotated[float, Field(examples=[181.1])]
performance_ratio_to_h200: Annotated[float, Field(examples=[1.5])]
ols_coefficient_gpu_count: Annotated[float, Field(examples=[11.4])]
ols_intercept: Annotated[float, Field(examples=[11.4])]


class GPUUpdateRequest(BaseModel):
name: Annotated[Optional[str], Field(examples=["NVIDIA A100 40GB"])] = None
max_power_w: Annotated[Optional[float], Field(examples=[700])] = None
embodied_emissions_kgco2e: Annotated[Optional[float], Field(examples=[282.1])] = (
None
)
embodied_water_mlh2o: Annotated[Optional[float], Field(examples=[181.1])] = None
performance_ratio_to_h200: Annotated[Optional[float], Field(examples=[1.5])] = None
ols_coefficient_gpu_count: Annotated[Optional[float], Field(examples=[11.4])] = None
ols_intercept: Annotated[Optional[float], Field(examples=[11.4])] = None


class Call(RootModel[List[Union[str, int]]]):
root: Annotated[
List[Union[str, int]],
Expand Down Expand Up @@ -151,7 +250,7 @@ class ImpactBigQueryRequest(BaseModel):
Optional[Dict[str, Any]],
Field(description="User-defined context from BigQuery"),
] = None
calls: List[Call]
calls: Annotated[List[Call], Field(max_length=1000)]


class ImpactBigQueryResponse(BaseModel):
Expand Down Expand Up @@ -210,7 +309,7 @@ class Error(BaseModel):

class Node(NodeCreateRequest):
customer_id: Annotated[
Optional[Any],
Optional[int],
Field(
description="ID of the customer who owns this node (visible to admins only)"
),
Expand All @@ -227,14 +326,26 @@ class GPU(BaseModel):
model_config = ConfigDict(
extra="forbid",
)
name: Annotated[Optional[str], Field(examples=["NVIDIA A100 40GB"])] = None
name: Annotated[str, Field(examples=["NVIDIA A100 40GB"])]
id: Annotated[str, Field(examples=["a100_40gb"])]
max_power_w: Annotated[float, Field(examples=[700])]
embodied_emissions_kgco2e: Annotated[float, Field(examples=[282.1])]
embodied_water_mlh2o: Annotated[float, Field(examples=[181.1])]
performance_ratio_to_h200: Annotated[float, Field(examples=[1.5])]
ols_coefficient_gpu_count: Annotated[float, Field(examples=[11.4])]
ols_intercept: Annotated[float, Field(examples=[11.4])]
customer_id: Annotated[
int,
Field(
description="ID of the customer who owns this node (visible to admins only)"
),
]
created_at: datetime
updated_at: datetime
created_by: Annotated[
str,
Field(description="ID of the user who created the node (admin or owner only)"),
]


class Image(RootModel[str]):
Expand Down Expand Up @@ -517,12 +628,12 @@ class ImpactRow(BaseModel):
),
] = None
input_audio_seconds: Annotated[
Optional[int],
Optional[float],
Field(
description="the duration of audio input in seconds",
examples=[60],
ge=0,
le=100000,
ge=0.0,
le=100000.0,
),
] = None
output_tokens: Annotated[
Expand Down Expand Up @@ -589,6 +700,14 @@ class Model(BaseModel):
extra="forbid",
)
id: Annotated[str, Field(examples=["gpt-4-turbo"])]
aliases: Annotated[
List[str],
Field(
description="List of aliases for this model; must be globally-unique with id",
examples=[["claude-latest", "claude-3-sonnet-current"]],
max_length=100,
),
]
name: Annotated[Optional[str], Field(examples=["GPT-4 Turbo"])] = None
family: Annotated[Optional[str], Field(examples=["gpt"])] = None
hugging_face_path: Annotated[
Expand Down Expand Up @@ -621,6 +740,18 @@ class Model(BaseModel):
fine_tuned_from_model_id: Annotated[
Optional[str], Field(examples=["llama_31_8b"])
] = None
customer_id: Annotated[
int,
Field(
description="ID of the customer who owns this node (visible to admins only)"
),
]
created_at: datetime
updated_at: datetime
created_by: Annotated[
str,
Field(description="ID of the user who created the node (admin or owner only)"),
]


class GridMix(BaseModel):
Expand Down
Loading
Loading