Skip to content

Return task instances from task methods instead of dicts #750

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 8 additions & 9 deletions meilisearch/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from meilisearch.errors import MeilisearchError
from meilisearch.index import Index
from meilisearch.models.key import Key, KeysResults
from meilisearch.models.task import TaskInfo
from meilisearch.models.task import Task, TaskInfo, TaskResults
from meilisearch.task import TaskHandler


Expand Down Expand Up @@ -466,9 +466,7 @@ def swap_indexes(self, parameters: List[Dict[str, List[str]]]) -> TaskInfo:
"""
return TaskInfo(**self.http.post(self.config.paths.swap, parameters))

def get_tasks(
self, parameters: Optional[Dict[str, Any]] = None
) -> Dict[str, List[Dict[str, Any]]]:
def get_tasks(self, parameters: Optional[Dict[str, Any]] = None) -> TaskResults:
"""Get all tasks.

Parameters
Expand All @@ -479,7 +477,8 @@ def get_tasks(
Returns
-------
task:
Dictionary with limit, from, next and results containing a list of all enqueued, processing, succeeded or failed tasks.
TaskResult instance containing limit, from, next and results containing a list of all
enqueued, processing, succeeded or failed tasks.

Raises
------
Expand All @@ -488,7 +487,7 @@ def get_tasks(
"""
return self.task_handler.get_tasks(parameters=parameters)

def get_task(self, uid: int) -> Dict[str, Any]:
def get_task(self, uid: int) -> Task:
"""Get one task.

Parameters
Expand All @@ -499,7 +498,7 @@ def get_task(self, uid: int) -> Dict[str, Any]:
Returns
-------
task:
Dictionary containing information about the processed asynchronous task.
Task instance containing information about the processed asynchronous task.

Raises
------
Expand Down Expand Up @@ -553,7 +552,7 @@ def wait_for_task(
uid: int,
timeout_in_ms: int = 5000,
interval_in_ms: int = 50,
) -> Dict[str, Any]:
) -> Task:
"""Wait until Meilisearch processes a task until it fails or succeeds.

Parameters
Expand All @@ -568,7 +567,7 @@ def wait_for_task(
Returns
-------
task:
Dictionary containing information about the processed asynchronous task.
Task instance containing information about the processed asynchronous task.

Raises
------
Expand Down
9 changes: 3 additions & 6 deletions meilisearch/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,8 +170,7 @@ def get_tasks(self, parameters: Optional[Dict[str, Any]] = None) -> TaskResults:
else:
parameters = {"indexUids": [self.uid]}

tasks = self.task_handler.get_tasks(parameters=parameters)
return TaskResults(tasks)
return self.task_handler.get_tasks(parameters=parameters)

def get_task(self, uid: int) -> Task:
"""Get one task through the route of a specific index.
Expand All @@ -191,8 +190,7 @@ def get_task(self, uid: int) -> Task:
MeilisearchApiError
An error containing details about why Meilisearch can't process your request. Meilisearch error codes are described here: https://docs.meilisearch.com/errors/#meilisearch-errors
"""
task = self.task_handler.get_task(uid)
return Task(**task)
return self.task_handler.get_task(uid)

def wait_for_task(
self,
Expand Down Expand Up @@ -221,8 +219,7 @@ def wait_for_task(
MeilisearchTimeoutError
An error containing details about why Meilisearch can't process your request. Meilisearch error codes are described here: https://docs.meilisearch.com/errors/#meilisearch-errors
"""
task = self.task_handler.wait_for_task(uid, timeout_in_ms, interval_in_ms)
return Task(**task)
return self.task_handler.wait_for_task(uid, timeout_in_ms, interval_in_ms)

def get_stats(self) -> IndexStats:
"""Get stats of the index.
Expand Down
27 changes: 14 additions & 13 deletions meilisearch/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@

from datetime import datetime
from time import sleep
from typing import Any, Dict, List, Optional
from typing import Any, Dict, Optional
from urllib import parse

from meilisearch._httprequests import HttpRequests
from meilisearch.config import Config
from meilisearch.errors import MeilisearchTimeoutError
from meilisearch.models.task import TaskInfo
from meilisearch.models.task import Task, TaskInfo, TaskResults


class TaskHandler:
Expand All @@ -27,9 +27,7 @@ def __init__(self, config: Config):
self.config = config
self.http = HttpRequests(config)

def get_tasks(
self, parameters: Optional[Dict[str, Any]] = None
) -> Dict[str, List[Dict[str, Any]]]:
def get_tasks(self, parameters: Optional[Dict[str, Any]] = None) -> TaskResults:
"""Get all tasks.

Parameters
Expand All @@ -40,7 +38,8 @@ def get_tasks(
Returns
-------
task:
Dictionary with limit, from, next and results containing a list of all enqueued, processing, succeeded or failed tasks.
TaskResults instance contining limit, from, next and results containing a list of all
enqueued, processing, succeeded or failed tasks.

Raises
------
Expand All @@ -52,9 +51,10 @@ def get_tasks(
for param in parameters:
if isinstance(parameters[param], list):
parameters[param] = ",".join(parameters[param])
return self.http.get(f"{self.config.paths.task}?{parse.urlencode(parameters)}")
tasks = self.http.get(f"{self.config.paths.task}?{parse.urlencode(parameters)}")
return TaskResults(tasks)

def get_task(self, uid: int) -> Dict[str, Any]:
def get_task(self, uid: int) -> Task:
"""Get one task.

Parameters
Expand All @@ -65,14 +65,15 @@ def get_task(self, uid: int) -> Dict[str, Any]:
Returns
-------
task:
Dictionary containing information about the status of the asynchronous task.
Task instance containing information about the processed asynchronous task.

Raises
------
MeilisearchApiError
An error containing details about why Meilisearch can't process your request. Meilisearch error codes are described here: https://docs.meilisearch.com/errors/#meilisearch-errors
"""
return self.http.get(f"{self.config.paths.task}/{uid}")
task = self.http.get(f"{self.config.paths.task}/{uid}")
return Task(**task)

def cancel_tasks(self, parameters: Optional[Dict[str, Any]] = None) -> TaskInfo:
"""Cancel a list of enqueued or processing tasks.
Expand Down Expand Up @@ -132,7 +133,7 @@ def wait_for_task(
uid: int,
timeout_in_ms: int = 5000,
interval_in_ms: int = 50,
) -> Dict[str, Any]:
) -> Task:
"""Wait until the task fails or succeeds in Meilisearch.

Parameters
Expand All @@ -147,7 +148,7 @@ def wait_for_task(
Returns
-------
task:
Dictionary containing information about the processed asynchronous task.
Task instance containing information about the processed asynchronous task.

Raises
------
Expand All @@ -158,7 +159,7 @@ def wait_for_task(
elapsed_time = 0.0
while elapsed_time < timeout_in_ms:
task = self.get_task(uid)
if task["status"] not in ("enqueued", "processing"):
if task.status not in ("enqueued", "processing"):
return task
sleep(interval_in_ms / 1000)
time_delta = datetime.now() - start_time
Expand Down
4 changes: 2 additions & 2 deletions tests/client/test_client_dumps.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ def test_dump_creation(client, index_with_documents):
dump = client.create_dump()
client.wait_for_task(dump.task_uid)
dump_status = client.get_task(dump.task_uid)
assert dump_status["status"] == "succeeded"
assert dump_status["type"] == "dumpCreation"
assert dump_status.status == "succeeded"
assert dump_status.type == "dumpCreation"
6 changes: 3 additions & 3 deletions tests/client/test_client_swap_meilisearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ def test_swap_indexes(client, empty_index):

assert docA.title == indexB.uid
assert docB.title == indexA.uid
assert task["type"] == "indexSwap"
assert "swaps" in task["details"]
assert task.type == "indexSwap"
assert "swaps" in task.details


def test_swap_indexes_with_one_that_does_not_exist(client, empty_index):
Expand All @@ -43,7 +43,7 @@ def test_swap_indexes_with_one_that_does_not_exist(client, empty_index):
task = client.wait_for_task(swapTask.task_uid)

assert swapTask.type == "indexSwap"
assert task["error"]["code"] == "index_not_found"
assert task.error["code"] == "index_not_found"


def test_swap_indexes_with_itself(client, empty_index):
Expand Down
59 changes: 25 additions & 34 deletions tests/client/test_client_task_meilisearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,33 +9,29 @@
def test_get_tasks_default(client):
"""Tests getting the global tasks list."""
tasks = client.get_tasks()
assert isinstance(tasks, dict)
assert "results" in tasks
assert len(tasks.results) >= 1


def test_get_tasks(client, empty_index):
"""Tests getting the global tasks list after populating an index."""
current_tasks = client.get_tasks()
pre_count = current_tasks["from"]
pre_count = current_tasks.from_
empty_index()
tasks = client.get_tasks()
assert isinstance(tasks, dict)
assert tasks["from"] == pre_count + 1
assert tasks.from_ == pre_count + 1


def test_get_tasks_empty_parameters(client):
"""Tests getting the global tasks list after populating an index."""
tasks = client.get_tasks({})
assert isinstance(tasks, dict)
assert isinstance(tasks["results"], list)
assert isinstance(tasks.results, list)


def test_get_tasks_with_parameters(client, empty_index):
"""Tests getting the global tasks list after populating an index."""
empty_index()
tasks = client.get_tasks({"limit": 1})
assert isinstance(tasks, dict)
assert len(tasks["results"]) == 1
assert len(tasks.results) == 1


def test_get_tasks_with_all_plural_parameters(client, empty_index):
Expand All @@ -44,8 +40,7 @@ def test_get_tasks_with_all_plural_parameters(client, empty_index):
tasks = client.get_tasks(
{"indexUids": [common.INDEX_UID], "statuses": ["succeeded"], "types": ["indexCreation"]}
)
assert isinstance(tasks, dict)
assert len(tasks["results"]) >= 1
assert len(tasks.results) >= 1


def test_get_tasks_with_date_parameters(client, empty_index):
Expand All @@ -58,34 +53,31 @@ def test_get_tasks_with_date_parameters(client, empty_index):
"beforeFinishedAt": "2042-04-02T00:42:42Z",
}
)
assert isinstance(tasks, dict)
assert len(tasks["results"]) > 1
assert len(tasks.results) > 1


def test_get_tasks_with_index_uid(client, empty_index):
"""Tests getting the global tasks list after populating an index."""
empty_index()
tasks = client.get_tasks({"limit": 1, "indexUids": [common.INDEX_UID]})
assert isinstance(tasks, dict)
assert len(tasks["results"]) == 1
assert len(tasks.results) == 1


def test_get_task(client):
"""Tests getting the tasks list of an empty index."""
response = client.create_index(uid=common.INDEX_UID)
client.wait_for_task(response.task_uid)
task = client.get_task(response.task_uid)
assert isinstance(task, dict)
assert len(task) == 11
assert "uid" in task
assert "indexUid" in task
assert "status" in task
assert "type" in task
assert "duration" in task
assert "enqueuedAt" in task
assert "finishedAt" in task
assert "details" in task
assert "startedAt" in task
task_dict = task.__dict__
assert "uid" in task_dict
assert "index_uid" in task_dict
assert "status" in task_dict
assert "type" in task_dict
assert "duration" in task_dict
assert "enqueued_at" in task_dict
assert "finished_at" in task_dict
assert "details" in task_dict
assert "started_at" in task_dict


def test_get_task_inexistent(client):
Expand Down Expand Up @@ -115,8 +107,8 @@ def test_cancel_tasks(client):
assert task.task_uid is not None
assert task.index_uid is None
assert task.type == "taskCancelation"
assert "uids" in tasks["results"][0]["details"]["originalFilter"]
assert "uids=1%2C2" in tasks["results"][0]["details"]["originalFilter"]
assert "uids" in tasks.results[0].details["originalFilter"]
assert "uids=1%2C2" in tasks.results[0].details["originalFilter"]


@pytest.mark.usefixtures("create_tasks")
Expand All @@ -130,7 +122,7 @@ def test_cancel_every_task(client):
assert task.task_uid is not None
assert task.index_uid is None
assert task.type == "taskCancelation"
assert "statuses=enqueued%2Cprocessing" in tasks["results"][0]["details"]["originalFilter"]
assert "statuses=enqueued%2Cprocessing" in tasks.results[0].details["originalFilter"]


def test_delete_tasks_by_uid(client, empty_index, small_movies):
Expand All @@ -147,8 +139,8 @@ def test_delete_tasks_by_uid(client, empty_index, small_movies):
assert task_deleted.task_uid is not None
assert task_deleted.index_uid is None
assert task_deleted.type == "taskDeletion"
assert "uids" in task["details"]["originalFilter"]
assert f"uids={task_addition.task_uid}" in task["details"]["originalFilter"]
assert "uids" in task.details["originalFilter"]
assert f"uids={task_addition.task_uid}" in task.details["originalFilter"]


def test_delete_tasks_by_filter(client):
Expand All @@ -160,8 +152,7 @@ def test_delete_tasks_by_filter(client):
assert task.task_uid is not None
assert task.index_uid is None
assert task.type == "taskDeletion"
assert len(tasks_after["results"]) >= 1
assert len(tasks_after.results) >= 1
assert (
"statuses=succeeded%2Cfailed%2Ccanceled"
in tasks_after["results"][0]["details"]["originalFilter"]
"statuses=succeeded%2Cfailed%2Ccanceled" in tasks_after.results[0].details["originalFilter"]
)