Skip to content

Commit 54fa08b

Browse files
committed
Issue #404/#481 test_vectorcube streamlining
- use `dummy_backend` for more compact setup - cover more combinations of server-side support, `auto_validate` and explicit `validate` - leverage fixture parameterization more to avoid custom fixtures
1 parent d7925d7 commit 54fa08b

File tree

7 files changed

+326
-213
lines changed

7 files changed

+326
-213
lines changed

openeo/rest/connection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1568,7 +1568,7 @@ def execute(
15681568
json=pg_with_metadata,
15691569
expected_status=200,
15701570
timeout=timeout or DEFAULT_TIMEOUT_SYNCHRONOUS_EXECUTE,
1571-
).json()
1571+
).json() # TODO: only do JSON decoding when mimetype is actually JSON?
15721572

15731573
def create_job(
15741574
self,

tests/rest/conftest.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,10 @@ def assert_oidc_device_code_flow(url: str = "https://oidc.test/dc", elapsed: flo
7272

7373
@pytest.fixture
7474
def api_capabilities() -> dict:
75-
"""Fixture to be overridden for customizing the capabilities doc used by connection fixtures."""
75+
"""
76+
Fixture to be overridden for customizing the capabilities doc used by connection fixtures.
77+
To be used as kwargs for `build_capabilities`
78+
"""
7679
return {}
7780

7881

tests/rest/datacube/conftest.py

Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -67,29 +67,19 @@ def setup_collection_metadata(requests_mock, cid: str, bands: List[str]):
6767
})
6868

6969

70-
@pytest.fixture
71-
def support_udp() -> bool:
72-
"""Per-test overridable `build_capabilities_kwargs(udp=...)` value for connection fixtures"""
73-
return False
74-
7570

7671
@pytest.fixture
77-
def connection(api_version, requests_mock) -> Connection:
72+
def connection(api_version, requests_mock, api_capabilities) -> Connection:
7873
"""Connection fixture to a backend of given version with some image collections."""
79-
return _setup_connection(api_version, requests_mock)
74+
return _setup_connection(api_version, requests_mock, build_capabilities_kwargs=api_capabilities)
8075

8176

8277
@pytest.fixture
83-
def con100(requests_mock, support_udp) -> Connection:
78+
def con100(requests_mock, api_capabilities) -> Connection:
8479
"""Connection fixture to a 1.0.0 backend with some image collections."""
85-
return _setup_connection("1.0.0", requests_mock, build_capabilities_kwargs={"udp": support_udp})
80+
return _setup_connection("1.0.0", requests_mock, build_capabilities_kwargs=api_capabilities)
8681

8782

88-
@pytest.fixture
89-
def connection_with_pgvalidation_datacube(api_version, requests_mock) -> Connection:
90-
"""Connection fixture to a backend that supports validation of the process graph."""
91-
return _setup_connection("1.0.0", requests_mock, build_capabilities_kwargs={"udp": support_udp, "validation": True})
92-
9383

9484
@pytest.fixture
9585
def s2cube(connection, api_version) -> DataCube:

tests/rest/datacube/test_datacube.py

Lines changed: 150 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -4,20 +4,20 @@
44
- 1.0.0-style DataCube
55
66
"""
7-
import json
87
import pathlib
98
from datetime import date, datetime
109
from unittest import mock
1110

1211
import numpy as np
1312
import pytest
14-
import requests
1513
import shapely
1614
import shapely.geometry
1715

1816
from openeo.rest import BandMathException
17+
from openeo.rest._testing import build_capabilities
1918
from openeo.rest.connection import Connection
2019
from openeo.rest.datacube import DataCube
20+
from openeo.util import dict_no_none
2121

2222
from ... import load_json_resource
2323
from .. import get_download_graph
@@ -812,94 +812,176 @@ def test_save_result_format_options_vs_execute_batch(elf, s2cube, get_create_job
812812
}
813813

814814

815-
class TestProcessGraphValidation:
816-
JOB_ID = "j-123"
817-
PROCESS_GRAPH_DICT = {"add1": {"process_id": "add", "arguments": {"x": 3, "y": 5}, "result": True}}
818-
PROCESS_GRAPH_STRING = json.dumps(PROCESS_GRAPH_DICT)
815+
class TestDataCubeValidation:
816+
"""
817+
Test (auto) validation of datacube execution with `download`, `execute`, ...
818+
"""
819819

820-
@pytest.fixture
821-
def cube_add(self, requests_mock, connection_with_pgvalidation_datacube: Connection) -> DataCube:
822-
requests_mock.post(API_URL + "/result", content=self._post_result_handler_json)
823-
return connection_with_pgvalidation_datacube.datacube_from_json(self.PROCESS_GRAPH_STRING)
820+
_PG_S2 = {
821+
"loadcollection1": {
822+
"process_id": "load_collection",
823+
"arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None},
824+
"result": True,
825+
},
826+
}
827+
_PG_S2_SAVE = {
828+
"loadcollection1": {
829+
"process_id": "load_collection",
830+
"arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None},
831+
},
832+
"saveresult1": {
833+
"process_id": "save_result",
834+
"arguments": {"data": {"from_node": "loadcollection1"}, "format": "GTiff", "options": {}},
835+
"result": True,
836+
},
837+
}
824838

825-
def _post_jobs_handler_json(self, response: requests.Request, context):
826-
context.headers["OpenEO-Identifier"] = self.JOB_ID
827-
return b""
839+
@pytest.fixture(params=[False, True])
840+
def auto_validate(self, request) -> bool:
841+
"""Fixture to parametrize auto_validate setting."""
842+
return request.param
828843

829-
def _post_result_handler_json(self, response: requests.Request, context):
830-
pg = response.json()["process"]["process_graph"]
831-
assert pg == self.PROCESS_GRAPH_DICT
832-
return b'{"answer": 8}'
844+
@pytest.fixture
845+
def connection(self, api_version, requests_mock, api_capabilities, auto_validate) -> Connection:
846+
requests_mock.get(API_URL, json=build_capabilities(api_version=api_version, **api_capabilities))
847+
con = Connection(API_URL, **dict_no_none(auto_validate=auto_validate))
848+
return con
849+
850+
@pytest.fixture(autouse=True)
851+
def dummy_backend_setup(self, dummy_backend):
852+
dummy_backend.next_validation_errors = [{"code": "NoAdd", "message": "Don't add numbers"}]
853+
854+
# Reusable list of (fixture) parameterization
855+
# of ["api_capabilities", "auto_validate", "validate", "validation_expected"]
856+
_VALIDATION_PARAMETER_SETS = [
857+
# No validation supported by backend: don't attempt to validate
858+
({}, None, None, False),
859+
({}, True, True, False),
860+
# Validation supported by backend, default behavior -> validate
861+
({"validation": True}, None, None, True),
862+
# (Validation supported by backend) no explicit validation enabled: follow auto_validate setting
863+
({"validation": True}, True, None, True),
864+
({"validation": True}, False, None, False),
865+
# (Validation supported by backend) follow explicit `validate` toggle regardless of auto_validate
866+
({"validation": True}, False, True, True),
867+
({"validation": True}, True, False, False),
868+
]
833869

834-
@pytest.mark.parametrize("validate", [True, False])
835-
def test_create_job_with_pg_validation(
836-
self,
837-
requests_mock,
838-
connection_with_pgvalidation_datacube: Connection,
839-
validate,
840-
):
870+
@pytest.mark.parametrize(
871+
["api_capabilities", "auto_validate", "validate", "validation_expected"],
872+
_VALIDATION_PARAMETER_SETS,
873+
)
874+
def test_cube_download_validation(self, dummy_backend, connection, validate, validation_expected, caplog, tmp_path):
841875
"""The DataCube should pass through request for the validation to the
842876
connection and the validation endpoint should only be called when
843877
validation was requested.
844878
"""
845-
m = requests_mock.post(API_URL + "/validation", json={"errors": []})
879+
cube = connection.load_collection("S2")
846880

847-
requests_mock.post(API_URL + "/jobs", status_code=201, content=self._post_jobs_handler_json)
848-
cube: DataCube = connection_with_pgvalidation_datacube.load_collection("S2")
849-
cube.create_job(validate=validate)
881+
output = tmp_path / "result.tiff"
882+
cube.download(outputfile=output, **dict_no_none(validate=validate))
883+
assert output.read_bytes() == b'{"what?": "Result data"}'
884+
assert dummy_backend.get_sync_pg() == self._PG_S2_SAVE
850885

851-
# Validation should be called if and only if it was requested
852-
expected_call_count = 1 if validate else 0
853-
assert m.call_count == expected_call_count
886+
if validation_expected:
887+
assert dummy_backend.validation_requests == [self._PG_S2_SAVE]
888+
assert caplog.messages == ["Preflight process graph validation raised: [NoAdd] Don't add numbers"]
889+
else:
890+
assert dummy_backend.validation_requests == []
891+
assert caplog.messages == []
854892

855-
@pytest.mark.parametrize("validate", [True, False])
856-
def test_execute_with_pg_validation(
857-
self,
858-
requests_mock,
859-
cube_add: DataCube,
860-
validate,
861-
):
893+
@pytest.mark.parametrize("api_capabilities", [{"validation": True}])
894+
def test_cube_download_validation_broken(self, dummy_backend, connection, requests_mock, caplog, tmp_path):
895+
"""Test resilience against broken validation response."""
896+
requests_mock.post(
897+
connection.build_url("/validation"), status_code=500, json={"code": "Internal", "message": "nope!"}
898+
)
899+
900+
cube = connection.load_collection("S2")
901+
902+
output = tmp_path / "result.tiff"
903+
cube.download(outputfile=output, validate=True)
904+
assert output.read_bytes() == b'{"what?": "Result data"}'
905+
assert dummy_backend.get_sync_pg() == self._PG_S2_SAVE
906+
907+
assert caplog.messages == ["Preflight process graph validation failed: [500] Internal: nope!"]
908+
909+
@pytest.mark.parametrize(
910+
["api_capabilities", "auto_validate", "validate", "validation_expected"],
911+
_VALIDATION_PARAMETER_SETS,
912+
)
913+
def test_cube_execute_validation(self, dummy_backend, connection, validate, validation_expected, caplog):
862914
"""The DataCube should pass through request for the validation to the
863915
connection and the validation endpoint should only be called when
864916
validation was requested.
865917
"""
866-
m = requests_mock.post(API_URL + "/validation", json={"errors": []})
867-
requests_mock.post(API_URL + "/jobs", status_code=201, content=self._post_jobs_handler_json)
868-
requests_mock.post(API_URL + "/result", content=self._post_result_handler_json)
918+
cube = connection.load_collection("S2")
869919

870-
cube_add.execute(validate=validate)
920+
res = cube.execute(**dict_no_none(validate=validate))
921+
assert res == {"what?": "Result data"}
922+
assert dummy_backend.get_sync_pg() == self._PG_S2
871923

872-
# Validation should be called if and only if it was requested
873-
expected_call_count = 1 if validate else 0
874-
assert m.call_count == expected_call_count
924+
if validation_expected:
925+
assert dummy_backend.validation_requests == [self._PG_S2]
926+
assert caplog.messages == ["Preflight process graph validation raised: [NoAdd] Don't add numbers"]
927+
else:
928+
assert dummy_backend.validation_requests == []
929+
assert caplog.messages == []
875930

876-
@pytest.mark.parametrize("validate", [True, False])
877-
def test_execute_batch_with_pg_validation(
878-
self,
879-
requests_mock,
880-
cube_add: DataCube,
881-
validate,
931+
@pytest.mark.parametrize(
932+
["api_capabilities", "auto_validate", "validate", "validation_expected"],
933+
_VALIDATION_PARAMETER_SETS,
934+
)
935+
def test_cube_create_job_validation(
936+
self, dummy_backend, connection: Connection, validate, validation_expected, caplog
882937
):
883938
"""The DataCube should pass through request for the validation to the
884939
connection and the validation endpoint should only be called when
885940
validation was requested.
886941
"""
887-
m = requests_mock.post(API_URL + "/validation", json={"errors": []})
888-
requests_mock.post(API_URL + "/jobs", status_code=201, content=self._post_jobs_handler_json)
889-
requests_mock.post(API_URL + f"/jobs/{self.JOB_ID}/results", status_code=202)
890-
job_metadata = {
891-
"id": self.JOB_ID,
892-
"title": f"Job {self.JOB_ID,}",
893-
"description": f"Job {self.JOB_ID,}",
894-
"process": self.PROCESS_GRAPH_DICT,
895-
"status": "finished",
896-
"created": "2017-01-01T09:32:12Z",
897-
"links": [],
898-
}
899-
requests_mock.get(API_URL + f"/jobs/{self.JOB_ID}", status_code=200, json=job_metadata)
942+
cube = connection.load_collection("S2")
943+
job = cube.create_job(**dict_no_none(validate=validate))
944+
assert job.job_id == "job-000"
945+
assert dummy_backend.get_batch_pg() == self._PG_S2_SAVE
946+
947+
if validation_expected:
948+
assert dummy_backend.validation_requests == [self._PG_S2_SAVE]
949+
assert caplog.messages == ["Preflight process graph validation raised: [NoAdd] Don't add numbers"]
950+
else:
951+
assert dummy_backend.validation_requests == []
952+
assert caplog.messages == []
900953

901-
cube_add.execute_batch(validate=validate)
954+
@pytest.mark.parametrize("api_capabilities", [{"validation": True}])
955+
def test_cube_create_job_validation_broken(self, dummy_backend, connection, requests_mock, caplog, tmp_path):
956+
"""Test resilience against broken validation response."""
957+
requests_mock.post(
958+
connection.build_url("/validation"), status_code=500, json={"code": "Internal", "message": "nope!"}
959+
)
902960

903-
# Validation should be called if and only if it was requested
904-
expected_call_count = 1 if validate else 0
905-
assert m.call_count == expected_call_count
961+
cube = connection.load_collection("S2")
962+
job = cube.create_job(validate=True)
963+
assert job.job_id == "job-000"
964+
assert dummy_backend.get_batch_pg() == self._PG_S2_SAVE
965+
966+
assert caplog.messages == ["Preflight process graph validation failed: [500] Internal: nope!"]
967+
968+
@pytest.mark.parametrize(
969+
["api_capabilities", "auto_validate", "validate", "validation_expected"],
970+
_VALIDATION_PARAMETER_SETS,
971+
)
972+
def test_cube_execute_batch_validation(self, dummy_backend, connection, validate, validation_expected, caplog):
973+
"""The DataCube should pass through request for the validation to the
974+
connection and the validation endpoint should only be called when
975+
validation was requested.
976+
"""
977+
cube = connection.load_collection("S2")
978+
job = cube.execute_batch(**dict_no_none(validate=validate))
979+
assert job.job_id == "job-000"
980+
assert dummy_backend.get_batch_pg() == self._PG_S2_SAVE
981+
982+
if validation_expected:
983+
assert dummy_backend.validation_requests == [self._PG_S2_SAVE]
984+
assert caplog.messages == ["Preflight process graph validation raised: [NoAdd] Don't add numbers"]
985+
else:
986+
assert dummy_backend.validation_requests == []
987+
assert caplog.messages == []

tests/rest/datacube/test_datacube100.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@
2727
from openeo.internal.warnings import UserDeprecationWarning
2828
from openeo.processes import ProcessBuilder
2929
from openeo.rest import OpenEoClientException
30-
from openeo.rest._testing import build_capabilities
3130
from openeo.rest.connection import Connection
3231
from openeo.rest.datacube import THIS, UDF, DataCube
3332

@@ -1966,9 +1965,9 @@ def test_custom_process_arguments_namespacd(con100: Connection):
19661965
assert res.flat_graph() == expected
19671966

19681967

1969-
@pytest.mark.parametrize("support_udp", [True])
1968+
1969+
@pytest.mark.parametrize("api_capabilities", [{"udp": True}])
19701970
def test_save_user_defined_process(con100, requests_mock):
1971-
requests_mock.get(API_URL + "/", json=build_capabilities(udp=True))
19721971
requests_mock.get(API_URL + "/processes", json={"processes": [{"id": "add"}]})
19731972

19741973
expected_body = load_json_resource("data/1.0.0/save_user_defined_process.json")
@@ -1990,9 +1989,8 @@ def check_body(request):
19901989
assert adapter.called
19911990

19921991

1993-
@pytest.mark.parametrize("support_udp", [True])
1992+
@pytest.mark.parametrize("api_capabilities", [{"udp": True}])
19941993
def test_save_user_defined_process_public(con100, requests_mock):
1995-
requests_mock.get(API_URL + "/", json=build_capabilities(udp=True))
19961994
requests_mock.get(API_URL + "/processes", json={"processes": [{"id": "add"}]})
19971995

19981996
expected_body = load_json_resource("data/1.0.0/save_user_defined_process.json")

0 commit comments

Comments
 (0)