Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for Meilisearch v0.30.0 #325

Merged
merged 17 commits into from
Dec 5, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/testing.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ jobs:
- name: Install Dependencies
run: poetry install
- name: MeiliSearch (latest version) setup with Docker
run: docker run --rm -d -p 7700:7700 getmeili/meilisearch:v0.29.1 meilisearch --no-analytics --master-key=masterKey
run: docker run --rm -d -p 7700:7700 getmeili/meilisearch:v0.30.0 meilisearch --no-analytics --master-key=masterKey
- name: Test with pytest
run: |
poetry run pytest --cov=meilisearch_python_async --cov-report=xml
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ See our [docs](https://meilisearch-python-async.paulsanders.dev) for the full do

## Compatibility with Meilisearch

This package only guarantees the compatibility with [version v0.29 of Meilisearch](https://github.com/meilisearch/MeiliSearch/releases/tag/v0.29.0).
This package only guarantees the compatibility with [version v0.30 of Meilisearch](https://github.com/meilisearch/MeiliSearch/releases/tag/v0.30.0).

## Contributing

Expand Down
2 changes: 1 addition & 1 deletion docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ Meilisearch itself and how to use it can be found in [here](https://docs.meilise

## Compatibility with Meilisearch

This package only guarantees the compatibility with [version v0.29 of Meilisearch](https://github.com/meilisearch/meilisearch/releases/tag/v0.29.0).
This package only guarantees the compatibility with [version v0.20 of Meilisearch](https://github.com/meilisearch/meilisearch/releases/tag/v0.30.0).
27 changes: 27 additions & 0 deletions meilisearch_python_async/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -570,3 +570,30 @@ async def health(self) -> Health:
"""
response = await self._http_requests.get("health")
return Health(**response.json())

async def swap_indexes(self, indexes: list[tuple[str, str]]) -> TaskInfo:
"""Swap two indexes.

Args:

indexes: A list of tuples, each tuple should contain the indexes to swap.

Returns:

The details of the task.

Raises:

MeilisearchCommunicationError: If there was an error communicating with the server.
MeilisearchApiError: If the MeiliSearch API returned an error.

Examples:

>>> from meilisearch_python_async import Client
>>> async with Client("http://localhost.com", "masterKey") as client:
>>> index = await client.swap_indexes([("index_a", "index_b")])
"""
processed_indexes = [{"indexes": x} for x in indexes]
response = await self._http_requests.post("swap-indexes", processed_indexes)

return TaskInfo(**response.json())
6 changes: 6 additions & 0 deletions meilisearch_python_async/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,8 @@ async def search(
highlight_post_tag: str = "</em>",
crop_marker: str = "...",
matching_strategy: str = "all",
hits_per_page: int | None = None,
page: int | None = None,
) -> SearchResults:
"""Search the index.

Expand All @@ -320,6 +322,8 @@ async def search(
crop_marker: Marker to display when the number of words excedes the `crop_length`.
Defaults to ...
matching_strategy: Specifies the matching strategy Meilisearch should use. Defaults to `all`.
hits_per_page: Sets the number of results returned per page.
page: Sets the specific results page to fetch.

Returns:

Expand Down Expand Up @@ -353,6 +357,8 @@ async def search(
"highlightPostTag": highlight_post_tag,
"cropMarker": crop_marker,
"matchingStrategy": matching_strategy,
"hitsPerPage": hits_per_page,
"page": page,
}
url = f"{self._base_url_with_uid}/search"
response = await self._http_requests.post(url, body=body)
Expand Down
10 changes: 7 additions & 3 deletions meilisearch_python_async/models/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,13 @@

class SearchResults(CamelBase):
hits: List[Dict[str, Any]]
offset: int
limit: int
estimated_total_hits: int
offset: Optional[int]
limit: Optional[int]
estimated_total_hits: Optional[int]
processing_time_ms: float
query: str
facet_distribution: Optional[Dict[str, Any]] = None
total_pages: Optional[int]
total_hits: Optional[int]
page: Optional[int]
hits_per_page: Optional[int]
8 changes: 5 additions & 3 deletions meilisearch_python_async/models/task.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from datetime import datetime
from typing import Any, Dict, Optional, Union
from typing import Any, Dict, List, Optional, Union

from camel_converter.pydantic_base import CamelBase
from pydantic import Field
Expand All @@ -10,10 +10,12 @@ class TaskId(CamelBase):


class TaskStatus(TaskId):
index_uid: Optional[str] = None
index_uids: Optional[List[str]] = None
status: str
task_type: Union[str, Dict[str, Any]] = Field(..., alias="type")
details: Optional[Dict[str, Any]]
error: Optional[Dict[str, Any]]
canceled_by: Optional[int]
duration: Optional[str]
enqueued_at: datetime
started_at: Optional[datetime]
Expand All @@ -22,7 +24,7 @@ class TaskStatus(TaskId):

class TaskInfo(CamelBase):
task_uid: int
index_uid: Optional[str] = None
index_uids: Optional[List[str]] = None
status: str
task_type: Union[str, Dict[str, Any]] = Field(..., alias="type")
enqueued_at: datetime
175 changes: 167 additions & 8 deletions meilisearch_python_async/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,182 @@
from asyncio import sleep
from datetime import datetime
from typing import TYPE_CHECKING
from urllib.parse import urlencode

from httpx import AsyncClient

from meilisearch_python_async._http_requests import HttpRequests
from meilisearch_python_async.errors import MeiliSearchTimeoutError
from meilisearch_python_async.models.task import TaskStatus
from meilisearch_python_async.models.task import TaskInfo, TaskStatus

if TYPE_CHECKING:
from meilisearch_python_async.client import Client # pragma: no cover


async def get_tasks(client: AsyncClient | Client, index_id: str | None = None) -> list[TaskStatus]:
"""Get all tasks.
async def cancel_tasks(
client: AsyncClient | Client,
*,
uids: list[str] | None = None,
index_uids: list[str] | None = None,
statuses: list[str] | None = None,
types: list[str] | None = None,
before_enqueued_at: datetime | None = None,
after_enqueueda_at: datetime | None = None,
before_started_at: datetime | None = None,
after_finished_at: datetime | None = None,
) -> TaskInfo:
"""Cancel a list of enqueued or processing tasks.

Defaults to cancelling all tasks.

Args:

client: An httpx AsyncClient or meilisearch_python_async Client instance.
uids: A list of task UIDs to cancel.
index_uids: A list of index UIDs for which to cancel tasks.
statuses: A list of statuses to cancel.
types: A list of types to cancel.
before_enqueued_at: Cancel tasks that were enqueued before the specified date time.
after_enqueueda_at: Cancel tasks that were enqueued after the specified date time.
before_started_at: Cancel tasks that were started before the specified date time.
after_finished_at: Cancel tasks that were finished after the specified date time.

Returns:

The details of the task

Raises:

MeilisearchCommunicationError: If there was an error communicating with the server.
MeilisearchApiError: If the MeiliSearch API returned an error.
MeiliSearchTimeoutError: If the connection times out.

Examples:

>>> from meilisearch_python_async import Client
>>> from meilisearch_python_async.task import cancel_tasks
>>>
>>> async with Client("http://localhost.com", "masterKey") as client:
>>> await cancel_tasks(client, uids=[1, 2])
"""
parameters = {}
if uids:
parameters["uids"] = ",".join([str(x) for x in uids])
if index_uids:
parameters["indexUids"] = ",".join([str(x) for x in index_uids])
if statuses:
parameters["statuses"] = ",".join(statuses)
if types:
parameters["types"] = ",".join(types)
if before_enqueued_at:
parameters["beforeEnqueuedAt"] = str(before_enqueued_at)
if after_enqueueda_at:
parameters["afterEnqueuedAt"] = str(after_enqueueda_at)
if before_started_at:
parameters["beforeStartedAt"] = str(before_started_at)
if after_finished_at:
parameters["afterFinishedAt"] = str(after_finished_at)

if not parameters:
# Cancel all tasks if no parmaeters provided
parameters["statuses"] = "enqueued,processing"

url = f"tasks/cancel?{urlencode(parameters)}"
client_ = _get_client(client)
response = await client_.post(url)

return TaskInfo(**response.json())


async def delete_tasks(
client: AsyncClient | Client,
*,
uids: list[str] | None = None,
index_uids: list[str] | None = None,
statuses: list[str] | None = None,
types: list[str] | None = None,
before_enqueued_at: datetime | None = None,
after_enqueueda_at: datetime | None = None,
before_started_at: datetime | None = None,
after_finished_at: datetime | None = None,
) -> TaskInfo:
"""Delete a list of tasks.

Defaults to deleting all tasks.

Args:

client: An httpx AsyncClient or meilisearch_python_async Client instance.
uids: A list of task UIDs to cancel.
index_uids: A list of index UIDs for which to cancel tasks.
statuses: A list of statuses to cancel.
types: A list of types to cancel.
before_enqueued_at: Cancel tasks that were enqueued before the specified date time.
after_enqueueda_at: Cancel tasks that were enqueued after the specified date time.
before_started_at: Cancel tasks that were started before the specified date time.
after_finished_at: Cancel tasks that were finished after the specified date time.

Returns:

The details of the task

Raises:

MeilisearchCommunicationError: If there was an error communicating with the server.
MeilisearchApiError: If the MeiliSearch API returned an error.
MeiliSearchTimeoutError: If the connection times out.

Examples:

>>> from meilisearch_python_async import Client
>>> from meilisearch_python_async.task import delete_tasks
>>>
>>> async with Client("http://localhost.com", "masterKey") as client:
>>> await delete_tasks(client, uids=[1, 2])
"""
parameters = {}
if uids:
parameters["uids"] = ",".join([str(x) for x in uids])
if index_uids:
parameters["indexUids"] = ",".join([str(x) for x in index_uids])
if statuses:
parameters["statuses"] = ",".join(statuses)
if types:
parameters["types"] = ",".join(types)
if before_enqueued_at:
parameters["beforeEnqueuedAt"] = str(before_enqueued_at)
if after_enqueueda_at:
parameters["afterEnqueuedAt"] = str(after_enqueueda_at)
if before_started_at:
parameters["beforeStartedAt"] = str(before_started_at)
if after_finished_at:
parameters["afterFinishedAt"] = str(after_finished_at)

if not parameters:
# delete all tasks if no parmaeters provided
parameters["statuses"] = "canceled,enqueued,failed,processing,succeeded"

url = f"tasks?{urlencode(parameters)}"
client_ = _get_client(client)
response = await client_.delete(url)

return TaskInfo(**response.json())


async def get_tasks(
client: AsyncClient | Client,
*,
index_ids: list[str] | None = None,
types: str | list[str] | None = None,
) -> list[TaskStatus]:
"""Get multiple tasks.

Args:

client: An httpx AsyncClient or meilisearch_python_async Client instance.
index_id: The id of the index for which to get the tasks. If provided this will get the
tasks only for the specified index, if not all tasks will be returned. Default = None
index_ids: A list of index UIDs for which to get the tasks. If provided this will get the
tasks only for the specified indexes, if not all tasks will be returned. Default = None
types: Specify specific task types to retrieve. Default = None

Returns:

Expand All @@ -41,12 +198,14 @@ async def get_tasks(client: AsyncClient | Client, index_id: str | None = None) -
>>> async with Client("http://localhost.com", "masterKey") as client:
>>> await get_tasks(client)
"""
url = f"tasks?indexUid={index_id}" if index_id else "tasks"
url = f"tasks?indexUids={','.join(index_ids)}" if index_ids else "tasks"
if types:
formatted_types = ",".join(types) if isinstance(types, list) else types
url = f"{url}&types={formatted_types}" if "?" in url else f"{url}?types={formatted_types}"
client_ = _get_client(client)
response = await client_.get(url)
tasks = [TaskStatus(**x) for x in response.json()["results"]]

return tasks
return [TaskStatus(**x) for x in response.json()["results"]]


async def get_task(client: AsyncClient | Client, task_id: int) -> TaskStatus:
Expand Down
17 changes: 17 additions & 0 deletions tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,3 +375,20 @@ def mock_error(*args, **kwargs):
monkeypatch.setattr(AsyncClient, "post", mock_error)
with pytest.raises(MeiliSearchCommunicationError):
await test_client.create_index("some_index")


async def test_swap_indexes(test_client, empty_index):
index_a = await empty_index("index_a")
index_b = await empty_index("index_b")
task_a = await index_a.add_documents([{"id": 1, "title": "index_a"}])
task_b = await index_b.add_documents([{"id": 1, "title": "index_b"}])
await wait_for_task(index_a.http_client, task_a.task_uid)
await wait_for_task(index_b.http_client, task_b.task_uid)
swapTask = await test_client.swap_indexes([(index_a.uid, index_b.uid)])
task = await wait_for_task(index_a.http_client, swapTask.task_uid)
doc_a = await test_client.index(index_a.uid).get_document(1)
doc_b = await test_client.index(index_b.uid).get_document(1)

assert doc_a["title"] == index_b.uid
assert doc_b["title"] == index_a.uid
assert task.task_type == "indexSwap"
23 changes: 23 additions & 0 deletions tests/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,29 @@ async def test_custom_search_facet_filters_with_space(test_client):
assert response.hits[0]["title"] == "The Hobbit"


async def test_custom_search_params_with_pagination_parameters(index_with_documents):
index = await index_with_documents()
response = await index.search("", hits_per_page=1, page=1)

assert len(response.hits) == 1
assert response.hits_per_page == 1
assert response.page == 1
assert response.total_pages is not None
assert response.total_hits is not None


async def test_custom_search_params_with_pagination_parameters_at_zero(index_with_documents):
index = await index_with_documents()
response = await index.search("", hits_per_page=0, page=0)

assert len(response.hits) == 0
assert response.hits_per_page == 0
assert response.page == 0
assert response.total_pages is not None
assert response.total_hits is not None
assert response.estimated_total_hits is None


async def test_custom_search_params_with_many_params(index_with_documents):
index = await index_with_documents()
update = await index.update_filterable_attributes(["genre"])
Expand Down
Loading