Skip to content

Commit

Permalink
DBClient has a get_items method that pages through results from api
Browse files Browse the repository at this point in the history
  • Loading branch information
niquerio committed Oct 31, 2024
1 parent b47cf3e commit 565f71e
Show file tree
Hide file tree
Showing 3 changed files with 130 additions and 0 deletions.
29 changes: 29 additions & 0 deletions aim/digifeeds/db_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,5 +35,34 @@ def add_item_status(self, barcode: str, status: str):
response.raise_for_status()
return response.json()

def get_items(self, limit: int = 50, in_zephir: bool | None = None):
items = []
url = self._url(f"items")
params = {
"limit": limit,
"offset": 0,
}
if in_zephir != None:
params["in_zephir"] = in_zephir

response = requests.get(url, params=params)
if response.status_code != 200:
response.raise_for_status()

first_page = response.json()
total = first_page["total"]
for item in first_page["items"]:
items.append(item)

for offset in list(range(limit, total, limit)):
params["offset"] = offset
response = requests.get(url, params=params)
if response.status_code != 200:
response.raise_for_status()
for item in response.json()["items"]:
items.append(item)

return items

def _url(self, path) -> str:
return f"{self.base_url}/{path}"
83 changes: 83 additions & 0 deletions tests/digifeeds/test_db_client.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,18 @@
import responses
from responses import matchers
import pytest
from aim.services import S
from aim.digifeeds.db_client import DBClient
from requests.exceptions import HTTPError
import json
import copy


@pytest.fixture
def item_list():
with open("tests/fixtures/digifeeds/item_list.json") as f:
output = json.load(f)
return output


@responses.activate
Expand Down Expand Up @@ -79,3 +89,76 @@ def test_add_item_status_failure():
with pytest.raises(Exception) as exc_info:
DBClient().add_item_status(barcode="my_barcode", status="in_zephir")
assert exc_info.type is HTTPError


@responses.activate
def test_get_items_multiple_pages(item_list):
page_2 = copy.copy(item_list)
page_2["offset"] = 1
page_2["items"][0]["barcode"] = "some_other_barcode"
url = f"{S.digifeeds_api_url}/items"
responses.get(
url=url,
match=[matchers.query_param_matcher({"limit": 1, "offset": 0})],
json=item_list,
)
responses.get(
url=url,
match=[matchers.query_param_matcher({"limit": 1, "offset": 1})],
json=page_2,
)

items = DBClient().get_items(limit=1)
assert (len(items)) == 2


@responses.activate
def test_get_items_in_zephir_value(item_list):
item_list["total"] = 1
url = f"{S.digifeeds_api_url}/items"
responses.get(
url=url,
match=[
matchers.query_param_matcher({"limit": 1, "offset": 0, "in_zephir": False})
],
json=item_list,
)
items = DBClient().get_items(limit=1, in_zephir=False)
assert (len(items)) == 1


@responses.activate
def test_get_items_fail_first_page():
url = f"{S.digifeeds_api_url}/items"
responses.get(
url=url,
status=500,
match=[matchers.query_param_matcher({"limit": 1, "offset": 0})],
json={},
)

with pytest.raises(Exception) as exc_info:
DBClient().get_items(limit=1)

assert exc_info.type is HTTPError


@responses.activate
def test_get_items_fail_later_page(item_list):
url = f"{S.digifeeds_api_url}/items"
responses.get(
url=url,
match=[matchers.query_param_matcher({"limit": 1, "offset": 0})],
json=item_list,
)
responses.get(
url=url,
status=500,
match=[matchers.query_param_matcher({"limit": 1, "offset": 1})],
json={},
)

with pytest.raises(Exception) as exc_info:
DBClient().get_items(limit=1)

assert exc_info.type is HTTPError
18 changes: 18 additions & 0 deletions tests/fixtures/digifeeds/item_list.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"limit": 1,
"offset": 0,
"total": 2,
"items": [
{
"barcode": "some_barcode",
"created_at": "2024-09-25T17:12:39",
"statuses": [
{
"name": "added_to_digifeeds_set",
"description": "Item has been added to the digifeeds set",
"created_at": "2024-09-25T17:13:28"
}
]
}
]
}

0 comments on commit 565f71e

Please sign in to comment.