From 8c9c0845abd02a0c82ad7e6ddbff128bb1343795 Mon Sep 17 00:00:00 2001 From: cjumel Date: Thu, 13 Nov 2025 14:44:03 +0100 Subject: [PATCH 1/3] chore: enable all lint rules --- .github/pull_request_template.md | 7 +- Makefile | 6 +- examples/1_search_results_search.py | 4 +- examples/2_sourced_answer_search.py | 4 +- examples/3_structured_search.py | 4 +- examples/4_asynchronous_search.py | 4 +- examples/5_fetch.py | 4 +- pyproject.toml | 9 ++- src/linkup/_client.py | 104 ++++++++++++++-------------- src/linkup/_types.py | 2 + tests/unit/client_test.py | 58 ++++++++++------ 11 files changed, 116 insertions(+), 90 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index f827401..c94a92f 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -7,6 +7,7 @@ - [ ] I have installed pre-commit on this project (for instance with the `make install-dev` command) - **before** creating any commit, or I have run successfully the `make lint` command on my changes -- [ ] I have run successfully the `make test` command on my changes -- [ ] I have updated the `README.md` if my changes affected it + **before** creating any commit, or I have run successfully the `make format-lint` command on my + changes. +- [ ] I have run successfully the `make test` command on my changes. +- [ ] I have updated the `README.md` if my changes affected it. diff --git a/Makefile b/Makefile index e01d9c0..9c389d7 100644 --- a/Makefile +++ b/Makefile @@ -4,8 +4,12 @@ install-dev: @$(MAKE) install uv run pre-commit install -lint: +format-lint: SKIP=no-commit-to-branch uv run pre-commit run --all-files +format-lint-unsafe: + uv run --with ruff ruff check --fix --unsafe-fixes . + @echo + @$(MAKE) format-lint test-mypy: @# Avoid running mypy on the whole directory ("./") to avoid potential conflicts with files with the same name (e.g. between different types of tests) diff --git a/examples/1_search_results_search.py b/examples/1_search_results_search.py index bf912dc..de5b270 100644 --- a/examples/1_search_results_search.py +++ b/examples/1_search_results_search.py @@ -7,8 +7,8 @@ fill the missing values, or pass a Linkup API key to the `LinkupClient` initialization. """ +import rich from dotenv import load_dotenv -from rich import print from linkup import LinkupClient @@ -20,4 +20,4 @@ depth="standard", # or "deep" output_type="searchResults", ) -print(response) +rich.print(response) diff --git a/examples/2_sourced_answer_search.py b/examples/2_sourced_answer_search.py index e1ee3b8..e70dbdf 100644 --- a/examples/2_sourced_answer_search.py +++ b/examples/2_sourced_answer_search.py @@ -8,8 +8,8 @@ fill the missing values, or pass a Linkup API key to the `LinkupClient` initialization. """ +import rich from dotenv import load_dotenv -from rich import print from linkup import LinkupClient @@ -22,4 +22,4 @@ output_type="sourcedAnswer", include_inline_citations=False, ) -print(response) +rich.print(response) diff --git a/examples/3_structured_search.py b/examples/3_structured_search.py index 3ca9f23..cc69c93 100644 --- a/examples/3_structured_search.py +++ b/examples/3_structured_search.py @@ -8,9 +8,9 @@ fill the missing values, or pass a Linkup API key to the `LinkupClient` initialization. """ +import rich from dotenv import load_dotenv from pydantic import BaseModel, Field -from rich import print from linkup import LinkupClient @@ -34,4 +34,4 @@ class Events(BaseModel): structured_output_schema=Events, # or json.dumps(Events.model_json_schema()) include_sources=False, ) -print(response) +rich.print(response) diff --git a/examples/4_asynchronous_search.py b/examples/4_asynchronous_search.py index 32b909e..88412b0 100644 --- a/examples/4_asynchronous_search.py +++ b/examples/4_asynchronous_search.py @@ -11,8 +11,8 @@ import asyncio import time +import rich from dotenv import load_dotenv -from rich import print from linkup import LinkupClient @@ -35,7 +35,7 @@ async def search(idx: int, query: str) -> None: output_type="searchResults", # or "sourcedAnswer" or "structured" ) print(f"{idx + 1}: {time.time() - t0:.3f}s") - print(response) + rich.print(response) print("-" * 100) diff --git a/examples/5_fetch.py b/examples/5_fetch.py index 52fbecf..e35fba1 100644 --- a/examples/5_fetch.py +++ b/examples/5_fetch.py @@ -6,8 +6,8 @@ fill the missing values, or pass a Linkup API key to the `LinkupClient` initialization. """ +import rich from dotenv import load_dotenv -from rich import print from linkup import LinkupClient @@ -17,4 +17,4 @@ response = client.fetch( url="https://docs.linkup.so", ) -print(response) +rich.print(response) diff --git a/pyproject.toml b/pyproject.toml index 8a7da00..94535b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,23 +57,22 @@ target-version = "py39" [tool.ruff.lint] extend-ignore = ["D107"] pydocstyle = { convention = "google" } -# TODO: enable commented out rules and fix errors select = [ - # "A", # flake8-builtins: avoid shadowing built-in names - # "ANN", # flake8-annotations: check for missing type annotations + "A", # flake8-builtins: avoid shadowing built-in names + "ANN", # flake8-annotations: check for missing type annotations "ASYNC", # flake8-async: enforce best practices for async code "B", # flake8-bugbear: find likely bugs and design problems in your program "C4", # flake8-comprehensions: enforce best practices for list/set/dict comprehensions "D", # pydocstyle: check compliance with docstring conventions "E", # pycodestyle errors: check for PEP 8 style convention errors "F", # pyflakes: check for Python source file errors - # "FA", # flake8-future-annotations: enforce usage of future annotations when relevant + "FA", # flake8-future-annotations: enforce usage of future annotations when relevant "I", # isort: enforce import sorting "ICN", # flake8-import-conventions: enforce general import conventions "ISC", # flake8-implicit-str-concat: check for invalid implicit or explicit string concatenation "N", # pep8-naming: check for naming convention violations "PERF", # perflint: check for performance anti-patterns - # "PT", # flake8-pytest-style: check common style issues and inconsistencies in pytest-based tests + "PT", # flake8-pytest-style: check common style issues and inconsistencies in pytest-based tests "PTH", # flake8-use-pathlib: enforce usage of pathlib for path manipulations instead of os.path "Q", # flake8-quotes: enforce consistent string quote usage "RET", # flake8-return: enforce best practices for return statements diff --git a/src/linkup/_client.py b/src/linkup/_client.py index 638c691..d6af42e 100644 --- a/src/linkup/_client.py +++ b/src/linkup/_client.py @@ -1,9 +1,11 @@ """Linkup client, the entrypoint for Linkup functions.""" +from __future__ import annotations + import json import os -from datetime import date -from typing import Any, Literal, Optional, Union +from datetime import date # noqa: TC003 (`date` is used in test mocks) +from typing import Any, Literal import httpx from pydantic import BaseModel, SecretStr @@ -42,7 +44,7 @@ class LinkupClient: def __init__( self, - api_key: Union[str, SecretStr, None] = None, + api_key: str | SecretStr | None = None, base_url: str = "https://api.linkup.so/v1", ) -> None: if api_key is None: @@ -60,15 +62,15 @@ def search( query: str, depth: Literal["standard", "deep"], output_type: Literal["searchResults", "sourcedAnswer", "structured"], - structured_output_schema: Union[type[BaseModel], str, None] = None, - include_images: Optional[bool] = None, - from_date: Optional[date] = None, - to_date: Optional[date] = None, - exclude_domains: Optional[list[str]] = None, - include_domains: Optional[list[str]] = None, - include_inline_citations: Optional[bool] = None, - include_sources: Optional[bool] = None, - ) -> Any: + structured_output_schema: type[BaseModel] | str | None = None, + include_images: bool | None = None, + from_date: date | None = None, + to_date: date | None = None, + exclude_domains: list[str] | None = None, + include_domains: list[str] | None = None, + include_inline_citations: bool | None = None, + include_sources: bool | None = None, + ) -> Any: # noqa: ANN401 """Perform a web search using the Linkup API `search` endpoint. All optional parameters will default to the Linkup API defaults when not provided. The @@ -117,7 +119,7 @@ def search( LinkupInsufficientCreditError: If you have run out of credit. LinkupNoResultError: If the search query did not yield any result. """ - params: dict[str, Union[str, bool, list[str]]] = self._get_search_params( + params: dict[str, str | bool | list[str]] = self._get_search_params( query=query, depth=depth, output_type=output_type, @@ -152,15 +154,15 @@ async def async_search( query: str, depth: Literal["standard", "deep"], output_type: Literal["searchResults", "sourcedAnswer", "structured"], - structured_output_schema: Union[type[BaseModel], str, None] = None, - include_images: Optional[bool] = None, - from_date: Optional[date] = None, - to_date: Optional[date] = None, - exclude_domains: Optional[list[str]] = None, - include_domains: Optional[list[str]] = None, - include_inline_citations: Optional[bool] = None, - include_sources: Optional[bool] = None, - ) -> Any: + structured_output_schema: type[BaseModel] | str | None = None, + include_images: bool | None = None, + from_date: date | None = None, + to_date: date | None = None, + exclude_domains: list[str] | None = None, + include_domains: list[str] | None = None, + include_inline_citations: bool | None = None, + include_sources: bool | None = None, + ) -> Any: # noqa: ANN401 """Asynchronously perform a web search using the Linkup API `search` endpoint. All optional parameters will default to the Linkup API defaults when not provided. The @@ -209,7 +211,7 @@ async def async_search( LinkupInsufficientCreditError: If you have run out of credit. LinkupNoResultError: If the search query did not yield any result. """ - params: dict[str, Union[str, bool, list[str]]] = self._get_search_params( + params: dict[str, str | bool | list[str]] = self._get_search_params( query=query, depth=depth, output_type=output_type, @@ -242,9 +244,9 @@ async def async_search( def fetch( self, url: str, - include_raw_html: Optional[bool] = None, - render_js: Optional[bool] = None, - extract_images: Optional[bool] = None, + include_raw_html: bool | None = None, + render_js: bool | None = None, + extract_images: bool | None = None, ) -> LinkupFetchResponse: """Fetch the content of a web page using the Linkup API `fetch` endpoint. @@ -266,7 +268,7 @@ def fetch( LinkupInvalidRequestError: If the provided URL is not valid. LinkupFailedFetchError: If the provided URL is not found or can't be fetched. """ - params: dict[str, Union[str, bool]] = self._get_fetch_params( + params: dict[str, str | bool] = self._get_fetch_params( url=url, include_raw_html=include_raw_html, render_js=render_js, @@ -287,9 +289,9 @@ def fetch( async def async_fetch( self, url: str, - include_raw_html: Optional[bool] = None, - render_js: Optional[bool] = None, - extract_images: Optional[bool] = None, + include_raw_html: bool | None = None, + render_js: bool | None = None, + extract_images: bool | None = None, ) -> LinkupFetchResponse: """Asynchronously fetch the content of a web page using the Linkup API `fetch` endpoint. @@ -311,7 +313,7 @@ async def async_fetch( LinkupInvalidRequestError: If the provided URL is not valid. LinkupFailedFetchError: If the provided URL is not found or can't be fetched. """ - params: dict[str, Union[str, bool]] = self._get_fetch_params( + params: dict[str, str | bool] = self._get_fetch_params( url=url, include_raw_html=include_raw_html, render_js=render_js, @@ -342,7 +344,7 @@ def _request( self, method: str, url: str, - **kwargs: Any, + **kwargs: Any, # noqa: ANN401 ) -> httpx.Response: # pragma: no cover with httpx.Client(base_url=self._base_url, headers=self._headers()) as client: return client.request( @@ -355,7 +357,7 @@ async def _async_request( self, method: str, url: str, - **kwargs: Any, + **kwargs: Any, # noqa: ANN401 ) -> httpx.Response: # pragma: no cover async with httpx.AsyncClient(base_url=self._base_url, headers=self._headers()) as client: return await client.request( @@ -442,16 +444,16 @@ def _get_search_params( query: str, depth: Literal["standard", "deep"], output_type: Literal["searchResults", "sourcedAnswer", "structured"], - structured_output_schema: Union[type[BaseModel], str, None], - include_images: Optional[bool], - from_date: Optional[date], - to_date: Optional[date], - exclude_domains: Optional[list[str]], - include_domains: Optional[list[str]], - include_inline_citations: Optional[bool], - include_sources: Optional[bool], - ) -> dict[str, Union[str, bool, list[str]]]: - params: dict[str, Union[str, bool, list[str]]] = { + structured_output_schema: type[BaseModel] | str | None, + include_images: bool | None, + from_date: date | None, + to_date: date | None, + exclude_domains: list[str] | None, + include_domains: list[str] | None, + include_inline_citations: bool | None, + include_sources: bool | None, + ) -> dict[str, str | bool | list[str]]: + params: dict[str, str | bool | list[str]] = { "q": query, "depth": depth, "outputType": output_type, @@ -487,11 +489,11 @@ def _get_search_params( def _get_fetch_params( self, url: str, - include_raw_html: Optional[bool], - render_js: Optional[bool], - extract_images: Optional[bool], - ) -> dict[str, Union[str, bool]]: - params: dict[str, Union[str, bool]] = { + include_raw_html: bool | None, + render_js: bool | None, + extract_images: bool | None, + ) -> dict[str, str | bool]: + params: dict[str, str | bool] = { "url": url, } if include_raw_html is not None: @@ -506,9 +508,9 @@ def _parse_search_response( self, response: httpx.Response, output_type: Literal["searchResults", "sourcedAnswer", "structured"], - structured_output_schema: Union[type[BaseModel], str, None], - include_sources: Optional[bool], - ) -> Any: + structured_output_schema: type[BaseModel] | str | None, + include_sources: bool | None, + ) -> Any: # noqa: ANN401 response_data: Any = response.json() if output_type == "searchResults": return LinkupSearchResults.model_validate(response_data) diff --git a/src/linkup/_types.py b/src/linkup/_types.py index c4a8621..ee8911a 100644 --- a/src/linkup/_types.py +++ b/src/linkup/_types.py @@ -1,5 +1,7 @@ """Input and output types for Linkup functions.""" +# ruff: noqa: FA100 (pydantic models don't play well with future annotations) + from typing import Any, Literal, Optional, Union from pydantic import BaseModel, ConfigDict, Field diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index 98c19eb..331f28f 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -290,8 +290,12 @@ class Company(BaseModel): @pytest.mark.parametrize( - "search_kwargs, expected_request_params, mock_request_response_content, " - "expected_search_response", + ( + "search_kwargs", + "expected_request_params", + "mock_request_response_content", + "expected_search_response", + ), test_search_parameters, ) def test_search( @@ -300,7 +304,7 @@ def test_search( search_kwargs: dict[str, Any], expected_request_params: dict[str, Any], mock_request_response_content: bytes, - expected_search_response: Any, + expected_search_response: Any, # noqa: ANN401 ) -> None: mocker.patch("linkup._client.date").today.return_value = date(2000, 1, 1) request_mock = mocker.patch( @@ -323,8 +327,12 @@ def test_search( @pytest.mark.asyncio @pytest.mark.parametrize( - "search_kwargs, expected_request_params, mock_request_response_content, " - "expected_search_response", + ( + "search_kwargs", + "expected_request_params", + "mock_request_response_content", + "expected_search_response", + ), test_search_parameters, ) async def test_async_search( @@ -333,7 +341,7 @@ async def test_async_search( search_kwargs: dict[str, Any], expected_request_params: dict[str, Any], mock_request_response_content: bytes, - expected_search_response: Any, + expected_search_response: Any, # noqa: ANN401 ) -> None: mocker.patch("linkup._client.date").today.return_value = date(2000, 1, 1) request_mock = mocker.patch( @@ -468,7 +476,7 @@ async def test_async_search( @pytest.mark.parametrize( - "mock_request_response_status_code, mock_request_response_content, expected_exception", + ("mock_request_response_status_code", "mock_request_response_content", "expected_exception"), test_search_error_parameters, ) def test_search_error( @@ -476,7 +484,7 @@ def test_search_error( client: LinkupClient, mock_request_response_status_code: int, mock_request_response_content: bytes, - expected_exception: Any, + expected_exception: type[Exception], ) -> None: request_mock = mocker.patch( "linkup._client.LinkupClient._request", @@ -493,7 +501,7 @@ def test_search_error( @pytest.mark.asyncio @pytest.mark.parametrize( - "mock_request_response_status_code, mock_request_response_content, expected_exception", + ("mock_request_response_status_code", "mock_request_response_content", "expected_exception"), test_search_error_parameters, ) async def test_async_search_error( @@ -501,7 +509,7 @@ async def test_async_search_error( client: LinkupClient, mock_request_response_status_code: int, mock_request_response_content: bytes, - expected_exception: Any, + expected_exception: type[Exception], ) -> None: request_mock = mocker.patch( "linkup._client.LinkupClient._async_request", @@ -533,7 +541,12 @@ async def test_async_search_error( @pytest.mark.parametrize( - "fetch_kwargs, expected_request_params, mock_request_response_content, expected_fetch_response", + ( + "fetch_kwargs", + "expected_request_params", + "mock_request_response_content", + "expected_fetch_response", + ), test_fetch_parameters, ) def test_fetch( @@ -542,7 +555,7 @@ def test_fetch( fetch_kwargs: dict[str, Any], expected_request_params: dict[str, Any], mock_request_response_content: bytes, - expected_fetch_response: Any, + expected_fetch_response: LinkupFetchResponse, ) -> None: request_mock = mocker.patch( "linkup._client.LinkupClient._request", @@ -552,7 +565,7 @@ def test_fetch( ), ) - fetch_response: Any = client.fetch(**fetch_kwargs) + fetch_response: LinkupFetchResponse = client.fetch(**fetch_kwargs) request_mock.assert_called_once_with( method="POST", url="/fetch", @@ -564,7 +577,12 @@ def test_fetch( @pytest.mark.asyncio @pytest.mark.parametrize( - "fetch_kwargs, expected_request_params, mock_request_response_content, expected_fetch_response", + ( + "fetch_kwargs", + "expected_request_params", + "mock_request_response_content", + "expected_fetch_response", + ), test_fetch_parameters, ) async def test_async_fetch( @@ -573,7 +591,7 @@ async def test_async_fetch( fetch_kwargs: dict[str, Any], expected_request_params: dict[str, Any], mock_request_response_content: bytes, - expected_fetch_response: Any, + expected_fetch_response: LinkupFetchResponse, ) -> None: request_mock = mocker.patch( "linkup._client.LinkupClient._async_request", @@ -583,7 +601,7 @@ async def test_async_fetch( ), ) - fetch_response: Any = await client.async_fetch(**fetch_kwargs) + fetch_response: LinkupFetchResponse = await client.async_fetch(**fetch_kwargs) request_mock.assert_called_once_with( method="POST", url="/fetch", @@ -629,7 +647,7 @@ async def test_async_fetch( @pytest.mark.parametrize( - "mock_request_response_status_code, mock_request_response_content, expected_exception", + ("mock_request_response_status_code", "mock_request_response_content", "expected_exception"), test_fetch_error_parameters, ) def test_fetch_error( @@ -637,7 +655,7 @@ def test_fetch_error( client: LinkupClient, mock_request_response_status_code: int, mock_request_response_content: bytes, - expected_exception: Any, + expected_exception: type[Exception], ) -> None: request_mock = mocker.patch( "linkup._client.LinkupClient._request", @@ -654,7 +672,7 @@ def test_fetch_error( @pytest.mark.asyncio @pytest.mark.parametrize( - "mock_request_response_status_code, mock_request_response_content, expected_exception", + ("mock_request_response_status_code", "mock_request_response_content", "expected_exception"), test_fetch_error_parameters, ) async def test_async_fetch_error( @@ -662,7 +680,7 @@ async def test_async_fetch_error( client: LinkupClient, mock_request_response_status_code: int, mock_request_response_content: bytes, - expected_exception: Any, + expected_exception: type[Exception], ) -> None: request_mock = mocker.patch( "linkup._client.LinkupClient._async_request", From 1bd307597df06372a0b20c3f08def0ba3fa74a53 Mon Sep 17 00:00:00 2001 From: cjumel Date: Thu, 13 Nov 2025 15:08:11 +0100 Subject: [PATCH 2/3] chore: complete test coverage --- tests/unit/client_test.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index 331f28f..81c3890 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -532,8 +532,18 @@ async def test_async_search_error( LinkupFetchResponse(markdown="Some web page content", raw_html=None), ), ( - {"url": "https://example.com", "include_raw_html": True, "render_js": True}, - {"url": "https://example.com", "includeRawHtml": True, "renderJs": True}, + { + "url": "https://example.com", + "include_raw_html": True, + "render_js": True, + "extract_images": True, + }, + { + "url": "https://example.com", + "includeRawHtml": True, + "renderJs": True, + "extractImages": True, + }, b'{"markdown": "#Some web page content", "rawHtml": "..."}', LinkupFetchResponse(markdown="#Some web page content", raw_html="..."), ), From 797c7e63164b1c7bb0cf2273fa540ad6f053b3f8 Mon Sep 17 00:00:00 2001 From: cjumel Date: Thu, 13 Nov 2025 15:21:40 +0100 Subject: [PATCH 3/3] feat: max_results parameter --- src/linkup/_client.py | 17 +++++++++--- tests/unit/client_test.py | 56 +++------------------------------------ 2 files changed, 17 insertions(+), 56 deletions(-) diff --git a/src/linkup/_client.py b/src/linkup/_client.py index d6af42e..3c89640 100644 --- a/src/linkup/_client.py +++ b/src/linkup/_client.py @@ -68,6 +68,7 @@ def search( to_date: date | None = None, exclude_domains: list[str] | None = None, include_domains: list[str] | None = None, + max_results: int | None = None, include_inline_citations: bool | None = None, include_sources: bool | None = None, ) -> Any: # noqa: ANN401 @@ -95,6 +96,7 @@ def search( search results will not be filtered by date. exclude_domains: If you want to exclude specific domains from your search. include_domains: If you want the search to only return results from certain domains. + max_results: The maximum number of results to return. include_inline_citations: If output_type is "sourcedAnswer", indicate whether the answer should include inline citations. include_sources: If output_type is "structured", indicate whether the answer should @@ -119,7 +121,7 @@ def search( LinkupInsufficientCreditError: If you have run out of credit. LinkupNoResultError: If the search query did not yield any result. """ - params: dict[str, str | bool | list[str]] = self._get_search_params( + params: dict[str, str | bool | int | list[str]] = self._get_search_params( query=query, depth=depth, output_type=output_type, @@ -129,6 +131,7 @@ def search( to_date=to_date, exclude_domains=exclude_domains, include_domains=include_domains, + max_results=max_results, include_inline_citations=include_inline_citations, include_sources=include_sources, ) @@ -160,6 +163,7 @@ async def async_search( to_date: date | None = None, exclude_domains: list[str] | None = None, include_domains: list[str] | None = None, + max_results: int | None = None, include_inline_citations: bool | None = None, include_sources: bool | None = None, ) -> Any: # noqa: ANN401 @@ -187,6 +191,7 @@ async def async_search( search results will not be filtered by date. exclude_domains: If you want to exclude specific domains from your search. include_domains: If you want the search to only return results from certain domains. + max_results: The maximum number of results to return. include_inline_citations: If output_type is "sourcedAnswer", indicate whether the answer should include inline citations. include_sources: If output_type is "structured", indicate whether the answer should @@ -211,7 +216,7 @@ async def async_search( LinkupInsufficientCreditError: If you have run out of credit. LinkupNoResultError: If the search query did not yield any result. """ - params: dict[str, str | bool | list[str]] = self._get_search_params( + params: dict[str, str | bool | int | list[str]] = self._get_search_params( query=query, depth=depth, output_type=output_type, @@ -221,6 +226,7 @@ async def async_search( to_date=to_date, exclude_domains=exclude_domains, include_domains=include_domains, + max_results=max_results, include_inline_citations=include_inline_citations, include_sources=include_sources, ) @@ -450,10 +456,11 @@ def _get_search_params( to_date: date | None, exclude_domains: list[str] | None, include_domains: list[str] | None, + max_results: int | None, include_inline_citations: bool | None, include_sources: bool | None, - ) -> dict[str, str | bool | list[str]]: - params: dict[str, str | bool | list[str]] = { + ) -> dict[str, str | bool | int | list[str]]: + params: dict[str, str | bool | int | list[str]] = { "q": query, "depth": depth, "outputType": output_type, @@ -479,6 +486,8 @@ def _get_search_params( params["excludeDomains"] = exclude_domains if include_domains is not None: params["includeDomains"] = include_domains + if max_results is not None: + params["maxResults"] = max_results if include_inline_citations is not None: params["includeInlineCitations"] = include_inline_citations if include_sources is not None: diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index 81c3890..f8c3c54 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -76,7 +76,9 @@ class Company(BaseModel): "to_date": date(2023, 12, 31), "exclude_domains": ["excluded.com"], "include_domains": ["example.com", "example.org"], + "max_results": 10, "include_inline_citations": True, + "include_sources": True, }, { "q": "A long query.", @@ -87,7 +89,9 @@ class Company(BaseModel): "toDate": "2023-12-31", "excludeDomains": ["excluded.com"], "includeDomains": ["example.com", "example.org"], + "maxResults": 10, "includeInlineCitations": True, + "includeSources": True, }, b'{"results": []}', LinkupSearchResults(results=[]), @@ -234,58 +238,6 @@ class Company(BaseModel): ], ), ), - ( - { - "query": "query", - "depth": "standard", - "output_type": "structured", - "structured_output_schema": json.dumps(Company.model_json_schema()), - "include_sources": True, - }, - { - "q": "query", - "depth": "standard", - "outputType": "structured", - "structuredOutputSchema": json.dumps(Company.model_json_schema()), - "includeSources": True, - }, - b""" - { - "data": { - "name": "Linkup", - "founders_names": ["Philippe Mizrahi", "Denis Charrier", "Boris Toledano"], - "creation_date": "2024", - "website_url": "https://www.linkup.so/" - }, - "sources": [ - { - "type": "text", - "name": "foo", - "url": "https://foo.com", - "content": "lorem ipsum dolor sit amet" - }, - {"type": "image", "name": "bar", "url": "https://bar.com"} - ] - } - """, - LinkupSearchStructuredResponse( - data={ - "name": "Linkup", - "founders_names": ["Philippe Mizrahi", "Denis Charrier", "Boris Toledano"], - "creation_date": "2024", - "website_url": "https://www.linkup.so/", - }, - sources=[ - LinkupSearchTextResult( - type="text", - name="foo", - url="https://foo.com", - content="lorem ipsum dolor sit amet", - ), - LinkupSearchImageResult(type="image", name="bar", url="https://bar.com"), - ], - ), - ), ]