Skip to content

Commit fb41c03

Browse files
authored
Merge branch 'main' into extend_datetime_search
2 parents 17818d3 + f4618a9 commit fb41c03

File tree

18 files changed

+215
-71
lines changed

18 files changed

+215
-71
lines changed

.github/workflows/cicd.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: stac-fastapi-elasticsearch
1+
name: sfeos
22

33
on:
44
push:

CHANGELOG.md

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,21 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
77

88
## [Unreleased]
99

10+
## [v3.0.0a2]
11+
1012
### Added
1113
- Queryables landing page and collection links when the Filter Extension is enabled [#267](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/267)
1214

1315
### Changed
1416

1517
- Updated stac-fastapi libraries to v3.0.0a1 [#265](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/265)
18+
- Updated stac-fastapi libraries to v3.0.0a3 [#269](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/269)
1619

1720
### Fixed
1821

1922
- API sort extension tests [#264](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/264)
2023
- Basic auth permission fix for checking route path instead of absolute path [#266](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/266)
24+
- Remove deprecated filter_fields property, return all properties as default [#269](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/269)
2125

2226
## [v3.0.0a1]
2327

@@ -217,7 +221,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
217221
- Added db_to_stac serializer to item_collection method in core.py.
218222

219223

220-
[Unreleased]: <https://github.com/stac-utils/stac-fastapi-elasticsearch/tree/v3.0.0a1...main>
224+
[Unreleased]: <https://github.com/stac-utils/stac-fastapi-elasticsearch/tree/v3.0.0a2...main>
225+
[v3.0.0a2]: <https://github.com/stac-utils/stac-fastapi-elasticsearch/tree/v3.0.0a1...v3.0.0a2>
221226
[v3.0.0a1]: <https://github.com/stac-utils/stac-fastapi-elasticsearch/tree/v3.0.0a0...v3.0.0a1>
222227
[v3.0.0a0]: <https://github.com/stac-utils/stac-fastapi-elasticsearch/tree/v2.4.1...v3.0.0a0>
223228
[v2.4.1]: <https://github.com/stac-utils/stac-fastapi-elasticsearch/tree/v2.4.0...v2.4.1>

docker-compose.basic_auth_protected.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ services:
1111
environment:
1212
- STAC_FASTAPI_TITLE=stac-fastapi-elasticsearch
1313
- STAC_FASTAPI_DESCRIPTION=A STAC FastAPI with an Elasticsearch backend
14-
- STAC_FASTAPI_VERSION=3.0.0a1
14+
- STAC_FASTAPI_VERSION=3.0.0a2
1515
- APP_HOST=0.0.0.0
1616
- APP_PORT=8080
1717
- RELOAD=true

docker-compose.basic_auth_public.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ services:
1111
environment:
1212
- STAC_FASTAPI_TITLE=stac-fastapi-elasticsearch
1313
- STAC_FASTAPI_DESCRIPTION=A STAC FastAPI with an Elasticsearch backend
14-
- STAC_FASTAPI_VERSION=3.0.0a1
14+
- STAC_FASTAPI_VERSION=3.0.0a2
1515
- APP_HOST=0.0.0.0
1616
- APP_PORT=8080
1717
- RELOAD=true

docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ services:
4343
environment:
4444
- STAC_FASTAPI_TITLE=stac-fastapi-opensearch
4545
- STAC_FASTAPI_DESCRIPTION=A STAC FastAPI with an Opensearch backend
46-
- STAC_FASTAPI_VERSION=3.0.0a1
46+
- STAC_FASTAPI_VERSION=3.0.0a2
4747
- APP_HOST=0.0.0.0
4848
- APP_PORT=8082
4949
- RELOAD=true

stac_fastapi/core/setup.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,9 @@
1010
"attrs>=23.2.0",
1111
"pydantic[dotenv]",
1212
"stac_pydantic>=3",
13-
"stac-fastapi.types==3.0.0a1",
14-
"stac-fastapi.api==3.0.0a1",
15-
"stac-fastapi.extensions==3.0.0a1",
13+
"stac-fastapi.types==3.0.0a3",
14+
"stac-fastapi.api==3.0.0a3",
15+
"stac-fastapi.extensions==3.0.0a3",
1616
"orjson",
1717
"overrides",
1818
"geojson-pydantic",

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 29 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010

1111
import attr
1212
import orjson
13-
import stac_pydantic
1413
from fastapi import HTTPException, Request
1514
from overrides import overrides
1615
from pydantic import ValidationError
@@ -26,19 +25,16 @@
2625
from stac_fastapi.core.models.links import PagingLinks
2726
from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
2827
from stac_fastapi.core.session import Session
28+
from stac_fastapi.core.utilities import filter_fields
29+
from stac_fastapi.extensions.core.filter.client import AsyncBaseFiltersClient
2930
from stac_fastapi.extensions.third_party.bulk_transactions import (
3031
BaseBulkTransactionsClient,
3132
BulkTransactionMethod,
3233
Items,
3334
)
3435
from stac_fastapi.types import stac as stac_types
35-
from stac_fastapi.types.config import Settings
3636
from stac_fastapi.types.conformance import BASE_CONFORMANCE_CLASSES
37-
from stac_fastapi.types.core import (
38-
AsyncBaseCoreClient,
39-
AsyncBaseFiltersClient,
40-
AsyncBaseTransactionsClient,
41-
)
37+
from stac_fastapi.types.core import AsyncBaseCoreClient, AsyncBaseTransactionsClient
4238
from stac_fastapi.types.extension import ApiExtension
4339
from stac_fastapi.types.requests import get_base_url
4440
from stac_fastapi.types.rfc3339 import DateTimeType
@@ -492,34 +488,26 @@ async def get_search(
492488
base_args["intersects"] = orjson.loads(unquote_plus(intersects))
493489

494490
if sortby:
495-
sort_param = []
496-
for sort in sortby:
497-
sort_param.append(
498-
{
499-
"field": sort[1:],
500-
"direction": "desc" if sort[0] == "-" else "asc",
501-
}
502-
)
503-
base_args["sortby"] = sort_param
491+
base_args["sortby"] = [
492+
{"field": sort[1:], "direction": "desc" if sort[0] == "-" else "asc"}
493+
for sort in sortby
494+
]
504495

505496
if filter:
506-
if filter_lang == "cql2-json":
507-
base_args["filter-lang"] = "cql2-json"
508-
base_args["filter"] = orjson.loads(unquote_plus(filter))
509-
else:
510-
base_args["filter-lang"] = "cql2-json"
511-
base_args["filter"] = orjson.loads(to_cql2(parse_cql2_text(filter)))
497+
base_args["filter-lang"] = "cql2-json"
498+
base_args["filter"] = orjson.loads(
499+
unquote_plus(filter)
500+
if filter_lang == "cql2-json"
501+
else to_cql2(parse_cql2_text(filter))
502+
)
512503

513504
if fields:
514-
includes = set()
515-
excludes = set()
505+
includes, excludes = set(), set()
516506
for field in fields:
517507
if field[0] == "-":
518508
excludes.add(field[1:])
519-
elif field[0] == "+":
520-
includes.add(field[1:])
521509
else:
522-
includes.add(field)
510+
includes.add(field[1:] if field[0] in "+ " else field)
523511
base_args["fields"] = {"include": includes, "exclude": excludes}
524512

525513
# Do the request
@@ -615,32 +603,22 @@ async def post_search(
615603
collection_ids=search_request.collections,
616604
)
617605

606+
fields = (
607+
getattr(search_request, "fields", None)
608+
if self.extension_is_enabled("FieldsExtension")
609+
else None
610+
)
611+
include: Set[str] = fields.include if fields and fields.include else set()
612+
exclude: Set[str] = fields.exclude if fields and fields.exclude else set()
613+
618614
items = [
619-
self.item_serializer.db_to_stac(item, base_url=base_url) for item in items
615+
filter_fields(
616+
self.item_serializer.db_to_stac(item, base_url=base_url),
617+
include,
618+
exclude,
619+
)
620+
for item in items
620621
]
621-
622-
if self.extension_is_enabled("FieldsExtension"):
623-
if search_request.query is not None:
624-
query_include: Set[str] = set(
625-
[
626-
k if k in Settings.get().indexed_fields else f"properties.{k}"
627-
for k in search_request.query.keys()
628-
]
629-
)
630-
if not search_request.fields.include:
631-
search_request.fields.include = query_include
632-
else:
633-
search_request.fields.include.union(query_include)
634-
635-
filter_kwargs = search_request.fields.filter_fields
636-
637-
items = [
638-
orjson.loads(
639-
stac_pydantic.Item(**feat).json(**filter_kwargs, exclude_unset=True)
640-
)
641-
for feat in items
642-
]
643-
644622
links = await PagingLinks(request=request, next=next_token).get_links()
645623

646624
return stac_types.ItemCollection(
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
"""Fields extension."""
2+
3+
from typing import Optional, Set
4+
5+
from pydantic import BaseModel, Field
6+
7+
from stac_fastapi.extensions.core import FieldsExtension as FieldsExtensionBase
8+
from stac_fastapi.extensions.core.fields import request
9+
10+
11+
class PostFieldsExtension(request.PostFieldsExtension):
12+
"""PostFieldsExtension."""
13+
14+
# Set defaults if needed
15+
# include : Optional[Set[str]] = Field(
16+
# default_factory=lambda: {
17+
# "id",
18+
# "type",
19+
# "stac_version",
20+
# "geometry",
21+
# "bbox",
22+
# "links",
23+
# "assets",
24+
# "properties.datetime",
25+
# "collection",
26+
# }
27+
# )
28+
include: Optional[Set[str]] = set()
29+
exclude: Optional[Set[str]] = set()
30+
31+
32+
class FieldsExtensionPostRequest(BaseModel):
33+
"""Additional fields and schema for the POST request."""
34+
35+
fields: Optional[PostFieldsExtension] = Field(PostFieldsExtension())
36+
37+
38+
class FieldsExtension(FieldsExtensionBase):
39+
"""Override the POST model."""
40+
41+
POST = FieldsExtensionPostRequest

stac_fastapi/core/stac_fastapi/core/utilities.py

Lines changed: 113 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,9 @@
33
This module contains functions for transforming geospatial coordinates,
44
such as converting bounding boxes to polygon representations.
55
"""
6-
from typing import List
6+
from typing import Any, Dict, List, Optional, Set, Union
7+
8+
from stac_fastapi.types.stac import Item
79

810
MAX_LIMIT = 10000
911

@@ -21,3 +23,113 @@ def bbox2polygon(b0: float, b1: float, b2: float, b3: float) -> List[List[List[f
2123
List[List[List[float]]]: A polygon represented as a list of lists of coordinates.
2224
"""
2325
return [[[b0, b1], [b2, b1], [b2, b3], [b0, b3], [b0, b1]]]
26+
27+
28+
# copied from stac-fastapi-pgstac
29+
# https://github.com/stac-utils/stac-fastapi-pgstac/blob/26f6d918eb933a90833f30e69e21ba3b4e8a7151/stac_fastapi/pgstac/utils.py#L10-L116
30+
def filter_fields( # noqa: C901
31+
item: Union[Item, Dict[str, Any]],
32+
include: Optional[Set[str]] = None,
33+
exclude: Optional[Set[str]] = None,
34+
) -> Item:
35+
"""Preserve and remove fields as indicated by the fields extension include/exclude sets.
36+
37+
Returns a shallow copy of the Item with the fields filtered.
38+
39+
This will not perform a deep copy; values of the original item will be referenced
40+
in the return item.
41+
"""
42+
if not include and not exclude:
43+
return item
44+
45+
# Build a shallow copy of included fields on an item, or a sub-tree of an item
46+
def include_fields(
47+
source: Dict[str, Any], fields: Optional[Set[str]]
48+
) -> Dict[str, Any]:
49+
if not fields:
50+
return source
51+
52+
clean_item: Dict[str, Any] = {}
53+
for key_path in fields or []:
54+
key_path_parts = key_path.split(".")
55+
key_root = key_path_parts[0]
56+
if key_root in source:
57+
if isinstance(source[key_root], dict) and len(key_path_parts) > 1:
58+
# The root of this key path on the item is a dict, and the
59+
# key path indicates a sub-key to be included. Walk the dict
60+
# from the root key and get the full nested value to include.
61+
value = include_fields(
62+
source[key_root], fields={".".join(key_path_parts[1:])}
63+
)
64+
65+
if isinstance(clean_item.get(key_root), dict):
66+
# A previously specified key and sub-keys may have been included
67+
# already, so do a deep merge update if the root key already exists.
68+
dict_deep_update(clean_item[key_root], value)
69+
else:
70+
# The root key does not exist, so add it. Fields
71+
# extension only allows nested referencing on dicts, so
72+
# this won't overwrite anything.
73+
clean_item[key_root] = value
74+
else:
75+
# The item value to include is not a dict, or, it is a dict but the
76+
# key path is for the whole value, not a sub-key. Include the entire
77+
# value in the cleaned item.
78+
clean_item[key_root] = source[key_root]
79+
else:
80+
# The key, or root key of a multi-part key, is not present in the item,
81+
# so it is ignored
82+
pass
83+
return clean_item
84+
85+
# For an item built up for included fields, remove excluded fields. This
86+
# modifies `source` in place.
87+
def exclude_fields(source: Dict[str, Any], fields: Optional[Set[str]]) -> None:
88+
for key_path in fields or []:
89+
key_path_part = key_path.split(".")
90+
key_root = key_path_part[0]
91+
if key_root in source:
92+
if isinstance(source[key_root], dict) and len(key_path_part) > 1:
93+
# Walk the nested path of this key to remove the leaf-key
94+
exclude_fields(
95+
source[key_root], fields={".".join(key_path_part[1:])}
96+
)
97+
# If, after removing the leaf-key, the root is now an empty
98+
# dict, remove it entirely
99+
if not source[key_root]:
100+
del source[key_root]
101+
else:
102+
# The key's value is not a dict, or there is no sub-key to remove. The
103+
# entire key can be removed from the source.
104+
source.pop(key_root, None)
105+
106+
# Coalesce incoming type to a dict
107+
item = dict(item)
108+
109+
clean_item = include_fields(item, include)
110+
111+
# If, after including all the specified fields, there are no included properties,
112+
# return just id and collection.
113+
if not clean_item:
114+
return Item({"id": item["id"], "collection": item["collection"]})
115+
116+
exclude_fields(clean_item, exclude)
117+
118+
return Item(**clean_item)
119+
120+
121+
def dict_deep_update(merge_to: Dict[str, Any], merge_from: Dict[str, Any]) -> None:
122+
"""Perform a deep update of two dicts.
123+
124+
merge_to is updated in-place with the values from merge_from.
125+
merge_from values take precedence over existing values in merge_to.
126+
"""
127+
for k, v in merge_from.items():
128+
if (
129+
k in merge_to
130+
and isinstance(merge_to[k], dict)
131+
and isinstance(merge_from[k], dict)
132+
):
133+
dict_deep_update(merge_to[k], merge_from[k])
134+
else:
135+
merge_to[k] = v
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
"""library version."""
2-
__version__ = "3.0.0a1"
2+
__version__ = "3.0.0a2"

0 commit comments

Comments
 (0)