Skip to content
25 changes: 25 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
FROM python:3.13-slim

RUN apt-get update && apt-get install -y \
build-essential \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

WORKDIR /app

COPY README.md .
COPY stac_fastapi/core/pyproject.toml stac_fastapi/core/
COPY stac_fastapi/sfeos_helpers/pyproject.toml stac_fastapi/sfeos_helpers/
COPY stac_fastapi/opensearch/pyproject.toml stac_fastapi/opensearch/

RUN pip install --no-cache-dir --upgrade pip setuptools wheel

COPY stac_fastapi/ stac_fastapi/

RUN pip install --no-cache-dir ./stac_fastapi/core[redis]
RUN pip install --no-cache-dir ./stac_fastapi/sfeos_helpers
RUN pip install --no-cache-dir ./stac_fastapi/opensearch[server,redis]

EXPOSE 8080

CMD ["uvicorn", "stac_fastapi.opensearch.app:app", "--host", "0.0.0.0", "--port", "8080"]
20 changes: 11 additions & 9 deletions stac_fastapi/core/stac_fastapi/core/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,18 +546,20 @@ async def post_all_collections(
return await self.all_collections(
limit=search_request.limit if hasattr(search_request, "limit") else None,
bbox=search_request.bbox if hasattr(search_request, "bbox") else None,
datetime=search_request.datetime
if hasattr(search_request, "datetime")
else None,
datetime=(
search_request.datetime if hasattr(search_request, "datetime") else None
),
token=search_request.token if hasattr(search_request, "token") else None,
fields=fields,
sortby=sortby,
filter_expr=search_request.filter
if hasattr(search_request, "filter")
else None,
filter_lang=search_request.filter_lang
if hasattr(search_request, "filter_lang")
else None,
filter_expr=(
search_request.filter if hasattr(search_request, "filter") else None
),
filter_lang=(
search_request.filter_lang
if hasattr(search_request, "filter_lang")
else None
),
query=search_request.query if hasattr(search_request, "query") else None,
q=search_request.q if hasattr(search_request, "q") else None,
request=request,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -656,7 +656,7 @@ def apply_datetime_filter(
),
],
)
return search.query(filter_query), datetime_search
return search.query(filter_query), datetime_search

@staticmethod
def apply_bbox_filter(search: Search, bbox: List):
Expand Down Expand Up @@ -811,7 +811,7 @@ async def execute_search(
token: Optional[str],
sort: Optional[Dict[str, Dict[str, str]]],
collection_ids: Optional[List[str]],
datetime_search: Dict[str, Optional[str]],
datetime_search: str,
ignore_unavailable: bool = True,
) -> Tuple[Iterable[Dict[str, Any]], Optional[int], Optional[str]]:
"""Execute a search query with limit and other optional parameters.
Expand All @@ -822,7 +822,7 @@ async def execute_search(
token (Optional[str]): The token used to return the next set of results.
sort (Optional[Dict[str, Dict[str, str]]]): Specifies how the results should be sorted.
collection_ids (Optional[List[str]]): The collection ids to search.
datetime_search (Dict[str, Optional[str]]): Datetime range used for index selection.
datetime_search (str): Datetime used for index selection.
ignore_unavailable (bool, optional): Whether to ignore unavailable collections. Defaults to True.

Returns:
Expand Down Expand Up @@ -912,7 +912,7 @@ async def aggregate(
geometry_geohash_grid_precision: int,
geometry_geotile_grid_precision: int,
datetime_frequency_interval: str,
datetime_search,
datetime_search: str,
ignore_unavailable: Optional[bool] = True,
):
"""Return aggregations of STAC Items."""
Expand Down Expand Up @@ -1218,6 +1218,10 @@ async def json_patch_item(
Returns:
patched item.
"""
for operation in operations:
if operation.op in ["add", "replace", "remove"]:
self.async_index_inserter.validate_datetime_field_update(operation.path)

new_item_id = None
new_collection_id = None
script_operations = []
Expand All @@ -1238,8 +1242,6 @@ async def json_patch_item(
else:
script_operations.append(operation)

script = operations_to_script(script_operations, create_nest=create_nest)

try:
search_response = await self.client.search(
index=index_alias_by_collection_id(collection_id),
Expand All @@ -1252,13 +1254,18 @@ async def json_patch_item(
raise NotFoundError(
f"Item {item_id} does not exist inside Collection {collection_id}"
)
document_index = search_response["hits"]["hits"][0]["_index"]
await self.client.update(
index=document_index,
id=mk_item_id(item_id, collection_id),
script=script,
refresh=True,
)

if script_operations:
script = operations_to_script(
script_operations, create_nest=create_nest
)
document_index = search_response["hits"]["hits"][0]["_index"]
await self.client.update(
index=document_index,
id=mk_item_id(item_id, collection_id),
script=script,
refresh=True,
)
except ESNotFoundError:
raise NotFoundError(
f"Item {item_id} does not exist inside Collection {collection_id}"
Expand All @@ -1271,34 +1278,16 @@ async def json_patch_item(
item = await self.get_one_item(collection_id, item_id)

if new_collection_id:
await self.client.reindex(
body={
"dest": {
"index": f"{ITEMS_INDEX_PREFIX}{new_collection_id}"
}, # # noqa
"source": {
"index": f"{ITEMS_INDEX_PREFIX}{collection_id}",
"query": {"term": {"id": {"value": item_id}}},
},
"script": {
"lang": "painless",
"source": (
f"""ctx._id = ctx._id.replace('{collection_id}', '{new_collection_id}');""" # noqa
f"""ctx._source.collection = '{new_collection_id}';""" # noqa
),
},
},
wait_for_completion=True,
refresh=True,
)
item["collection"] = new_collection_id
item = await self.async_prep_create_item(item=item, base_url=base_url)
await self.create_item(item=item, refresh=True)

await self.delete_item(
item_id=item_id,
collection_id=collection_id,
refresh=refresh,
)

item["collection"] = new_collection_id
collection_id = new_collection_id

if new_item_id:
Expand Down Expand Up @@ -1684,6 +1673,7 @@ async def delete_collection(self, collection_id: str, **kwargs: Any):
index=COLLECTIONS_INDEX, id=collection_id, refresh=refresh
)
await delete_item_index(collection_id)
await self.async_index_inserter.refresh_cache()

async def bulk_async(
self,
Expand Down
54 changes: 23 additions & 31 deletions stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -811,7 +811,7 @@ async def execute_search(
token: Optional[str],
sort: Optional[Dict[str, Dict[str, str]]],
collection_ids: Optional[List[str]],
datetime_search: Dict[str, Optional[str]],
datetime_search: str,
ignore_unavailable: bool = True,
) -> Tuple[Iterable[Dict[str, Any]], Optional[int], Optional[str]]:
"""Execute a search query with limit and other optional parameters.
Expand All @@ -822,7 +822,7 @@ async def execute_search(
token (Optional[str]): The token used to return the next set of results.
sort (Optional[Dict[str, Dict[str, str]]]): Specifies how the results should be sorted.
collection_ids (Optional[List[str]]): The collection ids to search.
datetime_search (Dict[str, Optional[str]]): Datetime range used for index selection.
datetime_search (str): Datetime used for index selection.
ignore_unavailable (bool, optional): Whether to ignore unavailable collections. Defaults to True.

Returns:
Expand Down Expand Up @@ -918,7 +918,7 @@ async def aggregate(
geometry_geohash_grid_precision: int,
geometry_geotile_grid_precision: int,
datetime_frequency_interval: str,
datetime_search,
datetime_search: str,
ignore_unavailable: Optional[bool] = True,
):
"""Return aggregations of STAC Items."""
Expand Down Expand Up @@ -1219,6 +1219,10 @@ async def json_patch_item(
Returns:
patched item.
"""
for operation in operations:
if operation.op in ["add", "replace", "remove"]:
self.async_index_inserter.validate_datetime_field_update(operation.path)

new_item_id = None
new_collection_id = None
script_operations = []
Expand All @@ -1238,8 +1242,6 @@ async def json_patch_item(
else:
script_operations.append(operation)

script = operations_to_script(script_operations, create_nest=create_nest)

try:
search_response = await self.client.search(
index=index_alias_by_collection_id(collection_id),
Expand All @@ -1252,13 +1254,18 @@ async def json_patch_item(
raise NotFoundError(
f"Item {item_id} does not exist inside Collection {collection_id}"
)
document_index = search_response["hits"]["hits"][0]["_index"]
await self.client.update(
index=document_index,
id=mk_item_id(item_id, collection_id),
body={"script": script},
refresh=True,
)

if script_operations:
script = operations_to_script(
script_operations, create_nest=create_nest
)
document_index = search_response["hits"]["hits"][0]["_index"]
await self.client.update(
index=document_index,
id=mk_item_id(item_id, collection_id),
body={"script": script},
refresh=True,
)
except exceptions.NotFoundError:
raise NotFoundError(
f"Item {item_id} does not exist inside Collection {collection_id}"
Expand All @@ -1271,32 +1278,16 @@ async def json_patch_item(
item = await self.get_one_item(collection_id, item_id)

if new_collection_id:
await self.client.reindex(
body={
"dest": {"index": f"{ITEMS_INDEX_PREFIX}{new_collection_id}"},
"source": {
"index": f"{ITEMS_INDEX_PREFIX}{collection_id}",
"query": {"term": {"id": {"value": item_id}}},
},
"script": {
"lang": "painless",
"source": (
f"""ctx._id = ctx._id.replace('{collection_id}', '{new_collection_id}');"""
f"""ctx._source.collection = '{new_collection_id}';"""
),
},
},
wait_for_completion=True,
refresh=True,
)
item["collection"] = new_collection_id
item = await self.async_prep_create_item(item=item, base_url=base_url)
await self.create_item(item=item, refresh=True)

await self.delete_item(
item_id=item_id,
collection_id=collection_id,
refresh=refresh,
)

item["collection"] = new_collection_id
collection_id = new_collection_id

if new_item_id:
Expand Down Expand Up @@ -1657,6 +1648,7 @@ async def delete_collection(self, collection_id: str, **kwargs: Any):
)
# Delete the item index for the collection
await delete_item_index(collection_id)
await self.async_index_inserter.refresh_cache()

async def bulk_async(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -316,8 +316,6 @@ async def aggregate(
search, datetime_search = self.database.apply_datetime_filter(
search=search, datetime=aggregate_request.datetime
)
else:
datetime_search = {"gte": None, "lte": None}

if aggregate_request.bbox:
bbox = aggregate_request.bbox
Expand Down Expand Up @@ -416,7 +414,7 @@ async def aggregate(
geometry_geohash_grid_precision,
geometry_geotile_grid_precision,
datetime_frequency_interval,
datetime_search,
aggregate_request.datetime,
)
except Exception as error:
if not isinstance(error, IndexError):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,13 @@
"""

# Re-export all functions for backward compatibility
from .datetime import extract_date, extract_first_date_from_index, return_date
from .datetime import (
extract_date,
extract_first_date_from_index,
extract_last_date_from_index,
is_index_closed,
return_date,
)
from .document import mk_actions, mk_item_id
from .index import (
create_index_templates_shared,
Expand Down Expand Up @@ -77,4 +83,6 @@
"return_date",
"extract_date",
"extract_first_date_from_index",
"extract_last_date_from_index",
"is_index_closed",
]
Loading