Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove logfire_api_session parameter from logfire.configure #272

Merged
merged 3 commits into from
Jun 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 2 additions & 16 deletions logfire/_internal/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,6 @@ def configure(
default_span_processor: Callable[[SpanExporter], SpanProcessor] | None = None,
metric_readers: None = None,
additional_metric_readers: Sequence[MetricReader] | None = None,
logfire_api_session: requests.Session | None = None,
pydantic_plugin: PydanticPlugin | None = None,
fast_shutdown: bool = False,
scrubbing_patterns: Sequence[str] | None = None,
Expand Down Expand Up @@ -203,7 +202,6 @@ def configure(
which exports metrics to Logfire's API.
Ensure that `preferred_temporality=logfire.METRICS_PREFERRED_TEMPORALITY`
is passed to the constructor of metric readers/exporters that accept the `preferred_temporality` argument.
logfire_api_session: HTTP client session used to communicate with the Logfire API.
pydantic_plugin: Configuration for the Pydantic plugin. If `None` uses the `LOGFIRE_PYDANTIC_PLUGIN_*` environment
variables, otherwise defaults to `PydanticPlugin(record='off')`.
fast_shutdown: Whether to shut down exporters and providers quickly, mostly used for tests. Defaults to `False`.
Expand Down Expand Up @@ -250,7 +248,6 @@ def configure(
additional_span_processors=additional_span_processors,
default_span_processor=default_span_processor,
additional_metric_readers=additional_metric_readers,
logfire_api_session=logfire_api_session,
pydantic_plugin=pydantic_plugin,
fast_shutdown=fast_shutdown,
scrubbing_patterns=scrubbing_patterns,
Expand Down Expand Up @@ -312,9 +309,6 @@ class _LogfireConfigData:
id_generator: IdGenerator
"""The ID generator to use"""

logfire_api_session: requests.Session
"""The session to use when checking the Logfire backend"""

ns_timestamp_generator: Callable[[], int]
"""The nanosecond timestamp generator to use"""

Expand Down Expand Up @@ -364,7 +358,6 @@ def _load_configuration(
additional_span_processors: Sequence[SpanProcessor] | None,
default_span_processor: Callable[[SpanExporter], SpanProcessor] | None,
additional_metric_readers: Sequence[MetricReader] | None,
logfire_api_session: requests.Session | None,
pydantic_plugin: PydanticPlugin | None,
fast_shutdown: bool,
scrubbing_patterns: Sequence[str] | None,
Expand Down Expand Up @@ -438,7 +431,6 @@ def _load_configuration(
self.additional_span_processors = additional_span_processors
self.default_span_processor = default_span_processor or _get_default_span_processor
self.additional_metric_readers = additional_metric_readers
self.logfire_api_session = logfire_api_session or requests.Session()
if self.service_version is None:
try:
self.service_version = get_git_revision_hash()
Expand Down Expand Up @@ -468,7 +460,6 @@ def __init__(
additional_span_processors: Sequence[SpanProcessor] | None = None,
default_span_processor: Callable[[SpanExporter], SpanProcessor] | None = None,
additional_metric_readers: Sequence[MetricReader] | None = None,
logfire_api_session: requests.Session | None = None,
pydantic_plugin: PydanticPlugin | None = None,
fast_shutdown: bool = False,
scrubbing_patterns: Sequence[str] | None = None,
Expand Down Expand Up @@ -502,7 +493,6 @@ def __init__(
additional_span_processors=additional_span_processors,
default_span_processor=default_span_processor,
additional_metric_readers=additional_metric_readers,
logfire_api_session=logfire_api_session,
pydantic_plugin=pydantic_plugin,
fast_shutdown=fast_shutdown,
scrubbing_patterns=scrubbing_patterns,
Expand Down Expand Up @@ -540,7 +530,6 @@ def configure(
additional_span_processors: Sequence[SpanProcessor] | None,
default_span_processor: Callable[[SpanExporter], SpanProcessor] | None,
additional_metric_readers: Sequence[MetricReader] | None,
logfire_api_session: requests.Session | None,
pydantic_plugin: PydanticPlugin | None,
fast_shutdown: bool,
scrubbing_patterns: Sequence[str] | None,
Expand Down Expand Up @@ -568,7 +557,6 @@ def configure(
additional_span_processors,
default_span_processor,
additional_metric_readers,
logfire_api_session,
pydantic_plugin,
fast_shutdown,
scrubbing_patterns,
Expand Down Expand Up @@ -679,7 +667,7 @@ def add_span_processor(span_processor: SpanProcessor) -> None:
credentials = LogfireCredentials.initialize_project(
logfire_api_url=self.base_url,
project_name=self.project_name,
session=self.logfire_api_session,
session=requests.Session(),
)
credentials.write_creds_file(self.data_dir)
self.token = credentials.token
Expand All @@ -698,8 +686,6 @@ def check_token():
thread.start()

headers = {'User-Agent': f'logfire/{VERSION}', 'Authorization': self.token}
self.logfire_api_session.headers.update(headers)

session = OTLPExporterHttpSession(max_body_size=OTLP_MAX_BODY_SIZE)
session.headers.update(headers)
otel_traces_exporter_env = os.getenv(OTEL_TRACES_EXPORTER)
Expand Down Expand Up @@ -803,7 +789,7 @@ def meter(self) -> metrics.Meter:
return self.get_meter_provider().get_meter('logfire', VERSION)

def _initialize_credentials_from_token(self, token: str) -> LogfireCredentials | None:
return LogfireCredentials.from_token(token, self.logfire_api_session, self.base_url)
return LogfireCredentials.from_token(token, requests.Session(), self.base_url)


def _get_default_span_processor(exporter: SpanExporter) -> SpanProcessor:
Expand Down
2 changes: 1 addition & 1 deletion tests/test_configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -794,7 +794,7 @@ def normalize(s: dict[str, Any]) -> dict[str, Any]:
for value in s.values():
assert not dataclasses.is_dataclass(value)
# These values get deepcopied by dataclasses.asdict, so we can't compare them directly
return {k: v for k, v in s.items() if k not in ['logfire_api_session', 'id_generator']}
return {k: v for k, v in s.items() if k not in ['id_generator']}

assert normalize(serialized) == normalize(serialized2)

Expand Down