Skip to content

implement data api search commands #583

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jun 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 51 additions & 12 deletions planet/clients/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,12 @@ class Items(Paged):
ITEMS_KEY = 'features'


class Searches(Paged):
"""Asynchronous iterator over searches from a paged response."""
NEXT_KEY = '_next'
ITEMS_KEY = 'searches'


class DataClient:
"""Low-level asynchronous access to Planet's data API.

Expand Down Expand Up @@ -212,12 +218,25 @@ async def update_search(self,
Returns:
Description of the saved search.
"""
raise NotImplementedError
url = f'{self._searches_url()}/{search_id}'

request_json = {
'name': name,
'filter': search_filter,
'item_types': item_types,
'__daily_email_enabled': enable_email
}

request = self._request(url, method='PUT', json=request_json)
response = await self._do_request(request)
return response.json()

async def list_searches(
self,
sort: str = 'created desc',
search_type: str = 'any') -> typing.AsyncIterator[dict]:
search_type: str = 'any',
limit: typing.Union[int,
None] = 100) -> typing.AsyncIterator[dict]:
"""List all saved searches available to the authenticated user.

NOTE: the term 'saved' is overloaded here. We want to list saved
Expand All @@ -228,32 +247,43 @@ async def list_searches(
Parameters:
sort: Field and direction to order results by.
search_type: Search type filter.
limit: Maximum number of items to return.

Returns:
List of saved searches that match filter.
An iterator over all searches that match filter.

Raises:
planet.exceptions.APIError: On API error.
planet.exceptions.ClientError: If sort or search_type are not
valid.
"""
# NOTE: check sort and search_type args are in LIST_SORT_ORDER and
# LIST_SEARCH_TYPE, respectively
raise NotImplementedError
sort = sort.lower()
if sort not in LIST_SORT_ORDER:
raise exceptions.ClientError(
f'{sort} must be one of {LIST_SORT_ORDER}')

search_type = search_type.lower()
if search_type not in LIST_SEARCH_TYPE:
raise exceptions.ClientError(
f'{search_type} must be one of {LIST_SEARCH_TYPE}')

url = f'{self._searches_url()}'
request = self._request(url, method='GET')
return Searches(request, self._do_request, limit=limit)

async def delete_search(self, search_id: str):
"""Delete an existing saved search.

Parameters:
search_id: Saved search identifier.

Returns:
Nothing.

Raises:
planet.exceptions.APIError: On API error.
"""
raise NotImplementedError
url = f'{self._searches_url()}/{search_id}'

request = self._request(url, method='DELETE')
await self._do_request(request)

async def get_search(self, search_id: str) -> dict:
"""Get a saved search by id.
Expand All @@ -269,19 +299,27 @@ async def get_search(self, search_id: str) -> dict:
"""
raise NotImplementedError

async def run_search(self, search_id: str) -> typing.AsyncIterator[dict]:
async def run_search(
self,
search_id: str,
limit: typing.Union[int,
None] = 100) -> typing.AsyncIterator[dict]:
"""Execute a saved search.

Parameters:
search_id: Stored search identifier.
limit: Maximum number of items to return.

Returns:
Returns an iterator over all items matching the search.

Raises:
planet.exceptions.APIError: On API error.
"""
raise NotImplementedError
url = f'{self._searches_url()}/{search_id}/results'

request = self._request(url, method='GET')
return Items(request, self._do_request, limit=limit)

async def get_stats(self,
item_types: typing.List[str],
Expand All @@ -301,6 +339,7 @@ async def get_stats(self,
planet.exceptions.APIError: On API error.
planet.exceptions.ClientError: If interval is not valid.
"""
interval = interval.lower()
if interval not in STATS_INTERVAL:
raise exceptions.ClientError(
f'{interval} must be one of {STATS_INTERVAL}')
Expand Down
144 changes: 144 additions & 0 deletions tests/integration/test_data_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from contextlib import nullcontext as does_not_raise
from http import HTTPStatus
import json
import logging
Expand Down Expand Up @@ -224,6 +225,149 @@ async def test_create_search_email(search_filter, session):
assert search == page_response


@respx.mock
@pytest.mark.asyncio
async def test_update_search_basic(search_filter, session):
sid = 'search_id'

page_response = {
"__daily_email_enabled": False,
"_links": {
"_self": "string", "thumbnail": "string"
},
"created": "2019-08-24T14:15:22Z",
"filter": search_filter,
"id": sid,
"last_executed": "2019-08-24T14:15:22Z",
"name": "test",
"updated": "2019-08-24T14:15:22Z"
}
mock_resp = httpx.Response(HTTPStatus.OK, json=page_response)
respx.put(f'{TEST_SEARCHES_URL}/{sid}').return_value = mock_resp

cl = DataClient(session, base_url=TEST_URL)
search = await cl.update_search(sid, 'test', ['PSScene'], search_filter)

# check that request is correct
expected_request = {
"item_types": ["PSScene"],
"filter": search_filter,
"name": "test",
"__daily_email_enabled": False
}
actual_body = json.loads(respx.calls[0].request.content)
assert actual_body == expected_request

# check the response is returned unaltered
assert search == page_response


@respx.mock
@pytest.mark.asyncio
@pytest.mark.parametrize("limit, expected_list_length", [(None, 4), (3, 3)])
async def test_list_searches_success(limit,
expected_list_length,
search_result,
session):
page1_response = {"_links": {}, "searches": [search_result] * 4}
route = respx.get(TEST_SEARCHES_URL)
route.return_value = httpx.Response(200, json=page1_response)

cl = DataClient(session, base_url=TEST_URL)

searches = await cl.list_searches(limit=limit)
searches_list_length = len([s async for s in searches])
assert searches_list_length == expected_list_length

assert route.called


@respx.mock
@pytest.mark.asyncio
@pytest.mark.parametrize(
"sort, search_type, expectation",
[('DOESNOTEXIST', 'ANY', pytest.raises(exceptions.ClientError)),
('CREATED DESC', 'DOESNOTEXIST', pytest.raises(exceptions.ClientError))])
async def test_list_searches_args_do_not_match(sort,
search_type,
expectation,
session):
route = respx.get(TEST_SEARCHES_URL)
route.return_value = httpx.Response(200, json={})

cl = DataClient(session, base_url=TEST_URL)

with expectation:
await cl.list_searches(sort=sort, search_type=search_type)

assert not route.called


@respx.mock
@pytest.mark.asyncio
@pytest.mark.parametrize("retcode, expectation",
[(204, does_not_raise()),
(404, pytest.raises(exceptions.APIError))])
async def test_delete_search(retcode, expectation, session):
sid = 'search_id'
mock_resp = httpx.Response(retcode)
route = respx.delete(f'{TEST_SEARCHES_URL}/{sid}')
route.return_value = mock_resp
cl = DataClient(session, base_url=TEST_URL)

with expectation:
await cl.delete_search(sid)

assert route.called


@respx.mock
@pytest.mark.asyncio
async def test_run_search_success(item_descriptions, session):
sid = 'search_id'
route = respx.get(f'{TEST_SEARCHES_URL}/{sid}/results')

next_page_url = f'{TEST_URL}/blob/?page_marker=IAmATest'
item1, item2, item3 = item_descriptions
page1_response = {
"_links": {
"_next": next_page_url
}, "features": [item1, item2]
}

route.return_value = httpx.Response(204, json=page1_response)

page2_response = {"_links": {"_self": next_page_url}, "features": [item3]}
mock_resp2 = httpx.Response(HTTPStatus.OK, json=page2_response)
respx.get(next_page_url).return_value = mock_resp2

cl = DataClient(session, base_url=TEST_URL)
items = await cl.run_search(sid)
items_list = [i async for i in items]

assert route.called

# check that all of the items were returned unchanged
assert items_list == item_descriptions


@respx.mock
@pytest.mark.asyncio
async def test_run_search_doesnotexist(session):
sid = 'search_id'
route = respx.get(f'{TEST_SEARCHES_URL}/{sid}/results')
route.return_value = httpx.Response(404)

cl = DataClient(session, base_url=TEST_URL)
with pytest.raises(exceptions.APIError):
items = await cl.run_search(sid)
# this won't throw the error until the iterator is processed
# issue 476
[i async for i in items]

assert route.called


@respx.mock
@pytest.mark.asyncio
async def test_get_stats_success(search_filter, session):
Expand Down