diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 154b5f26..bf9ad019 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -46,6 +46,22 @@ jobs: ES_JAVA_OPTS: -Xms512m -Xmx1g ports: - 9400:9400 + + opensearch_2_11: + image: opensearchproject/opensearch:2.11.1 + env: + cluster.name: stac-cluster + node.name: os01 + network.host: 0.0.0.0 + transport.host: 0.0.0.0 + discovery.type: single-node + http.port: 9202 + http.cors.enabled: true + plugins.security.disabled: true + plugins.security.ssl.http.enabled: true + OPENSEARCH_JAVA_OPTS: -Xms512m -Xmx512m + ports: + - 9202:9202 strategy: matrix: python-version: [ "3.8", "3.9", "3.10", "3.11"] @@ -94,4 +110,15 @@ jobs: ES_PORT: 9400 ES_HOST: 172.17.0.1 ES_USE_SSL: false - ES_VERIFY_CERTS: false \ No newline at end of file + ES_VERIFY_CERTS: false + + - name: Run test suite against OpenSearch 2.11.1 + run: | + cd stac_fastapi/elasticsearch && pipenv run pytest -svvv + env: + ENVIRONMENT: testing + ES_PORT: 9202 + ES_HOST: 172.17.0.1 + ES_USE_SSL: false + ES_VERIFY_CERTS: false + BACKEND: opensearch \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index b552a17f..e76fecef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added +- OpenSearch 2.11.1 support [#187](https://github.com/stac-utils/stac-fastapi-elasticsearch/pull/187) - Advanced comparison (LIKE, IN, BETWEEN) operators to the Filter extension [#178](https://github.com/stac-utils/stac-fastapi-elasticsearch/pull/178) ### Changed diff --git a/Makefile b/Makefile index ce2609bc..068c86d1 100644 --- a/Makefile +++ b/Makefile @@ -1,20 +1,32 @@ #!make APP_HOST ?= 0.0.0.0 -APP_PORT ?= 8080 +ES_APP_PORT ?= 8080 EXTERNAL_APP_PORT ?= ${APP_PORT} -APP_PORT ?= 8080 +ES_APP_PORT ?= 8080 ES_HOST ?= docker.for.mac.localhost ES_PORT ?= 9200 +OS_APP_PORT ?= 8082 +ES_HOST ?= docker.for.mac.localhost +OS_PORT ?= 9202 + run_es = docker-compose \ run \ - -p ${EXTERNAL_APP_PORT}:${APP_PORT} \ + -p ${EXTERNAL_APP_PORT}:${ES_APP_PORT} \ -e PY_IGNORE_IMPORTMISMATCH=1 \ -e APP_HOST=${APP_HOST} \ - -e APP_PORT=${APP_PORT} \ + -e APP_PORT=${ES_APP_PORT} \ app-elasticsearch +run_os = docker-compose \ + run \ + -p ${EXTERNAL_APP_PORT}:${OS_APP_PORT} \ + -e PY_IGNORE_IMPORTMISMATCH=1 \ + -e APP_HOST=${APP_HOST} \ + -e APP_PORT=${OS_APP_PORT} \ + app-opensearch + .PHONY: image-deploy image-deploy: docker build -f Dockerfile.deploy -t stac-fastapi-elasticsearch:latest . @@ -40,15 +52,32 @@ docker-run: image-dev docker-shell: $(run_es) /bin/bash +.PHONY: test-elasticsearch +test: + -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' + docker-compose down + +.PHONY: test-opensearch +test-opensearch: + -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' + docker-compose down + .PHONY: test test: -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' docker-compose down -.PHONY: run-database -run-database: + -$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd /app/stac_fastapi/elasticsearch/tests/ && pytest' + docker-compose down + +.PHONY: run-database-es +run-database-es: docker-compose run --rm elasticsearch +.PHONY: run-database-os +run-database-os: + docker-compose run --rm opensearch + .PHONY: pybase-install pybase-install: pip install wheel && \ diff --git a/README.md b/README.md index 5b4b7685..9ae86aae 100644 --- a/README.md +++ b/README.md @@ -42,13 +42,15 @@ docker-compose build docker-compose up ``` -By default, docker-compose uses Elasticsearch 8.x. However, most recent 7.x versions should also work. +By default, docker-compose uses Elasticsearch 8.x and OpenSearch 2.11.1. If you wish to use a different version, put the following in a file named `.env` in the same directory you run docker-compose from: ```shell ELASTICSEARCH_VERSION=7.17.1 +OPENSEARCH_VERSION=2.11.0 ``` +The most recent Elasticsearch 7.x versions should also work. See the [opensearch-py docs](https://github.com/opensearch-project/opensearch-py/blob/main/COMPATIBILITY.md) for compatibility information. To create a new Collection: @@ -78,7 +80,18 @@ curl -X "GET" "http://localhost:8080/collections?limit=1&token=example_token" ```shell make test ``` - +Test against OpenSearch only + +```shell +make test-opensearch +``` + +Test against Elasticsearch only + +```shell +make test-elasticsearch +``` + ## Ingest sample data ```shell diff --git a/data_loader/data_loader.py b/data_loader/data_loader.py index b8d9f93e..c438811d 100644 --- a/data_loader/data_loader.py +++ b/data_loader/data_loader.py @@ -1,12 +1,24 @@ """Database ingestion script.""" import json import os +import sys import click import requests +if len(sys.argv) != 2: + print("Usage: python data_loader.py ") + sys.exit(1) + DATA_DIR = os.path.join(os.path.dirname(__file__), "setup_data/") -STAC_API_BASE_URL = "http://localhost:8080" + +backend = sys.argv[1].lower() +if backend == "opensearch": + STAC_API_BASE_URL = "http://localhost:8082" +elif backend == "elasticsearch": + STAC_API_BASE_URL = "http://localhost:8080" +else: + print("Invalid backend tag. Enter either 'opensearch' or 'elasticsearch'.") def load_data(filename): diff --git a/docker-compose.yml b/docker-compose.yml index 03698654..2010cd08 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,7 @@ version: '3.9' services: app-elasticsearch: container_name: stac-fastapi-es - image: stac-utils/stac-fastapi + image: stac-utils/stac-fastapi-es restart: always build: context: . @@ -18,6 +18,7 @@ services: - ES_PORT=9200 - ES_USE_SSL=false - ES_VERIFY_CERTS=false + - BACKEND=elasticsearch ports: - "8080:8080" volumes: @@ -29,6 +30,35 @@ services: command: bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app" + app-opensearch: + container_name: stac-fastapi-os + image: stac-utils/stac-fastapi-os + restart: always + build: + context: . + dockerfile: Dockerfile.dev + environment: + - APP_HOST=0.0.0.0 + - APP_PORT=8082 + - RELOAD=true + - ENVIRONMENT=local + - WEB_CONCURRENCY=10 + - ES_HOST=172.17.0.1 + - ES_PORT=9202 + - ES_USE_SSL=false + - ES_VERIFY_CERTS=false + - BACKEND=opensearch + ports: + - "8082:8082" + volumes: + - ./stac_fastapi:/app/stac_fastapi + - ./scripts:/app/scripts + - ./osdata:/usr/share/opensearch/data + depends_on: + - opensearch + command: + bash -c "./scripts/wait-for-it-es.sh os-container:9202 && python -m stac_fastapi.elasticsearch.app" + elasticsearch: container_name: es-container image: docker.elastic.co/elasticsearch/elasticsearch:${ELASTICSEARCH_VERSION:-8.11.0} @@ -40,3 +70,16 @@ services: - ./elasticsearch/snapshots:/usr/share/elasticsearch/snapshots ports: - "9200:9200" + + opensearch: + container_name: os-container + image: opensearchproject/opensearch:${OPENSEARCH_VERSION:-2.11.1} + environment: + - discovery.type=single-node + - plugins.security.disabled=true + - OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m + volumes: + - ./opensearch/config/opensearch.yml:/usr/share/opensearch/config/opensearch.yml + - ./opensearch/snapshots:/usr/share/opensearch/snapshots + ports: + - "9202:9202" diff --git a/opensearch/config/opensearch.yml b/opensearch/config/opensearch.yml new file mode 100644 index 00000000..5e44b259 --- /dev/null +++ b/opensearch/config/opensearch.yml @@ -0,0 +1,19 @@ +## Cluster Settings +cluster.name: stac-cluster +node.name: os01 +network.host: 0.0.0.0 +transport.host: 0.0.0.0 +discovery.type: single-node +http.port: 9202 +http.cors.enabled: true +http.cors.allow-headers: X-Requested-With,Content-Type,Content-Length,Accept,Authorization + +path: + repo: + - /usr/share/opensearch/snapshots + +# Security +plugins.security.disabled: true +plugins.security.ssl.http.enabled: true + +node.max_local_storage_nodes: 3 diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index 4c503883..643779b3 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -218,6 +218,7 @@ async def all_collections(self, **kwargs) -> Collections: next_link = None if len(hits) == limit: last_hit = hits[-1] + logger.info(last_hit) next_search_after = last_hit["sort"] next_token = urlsafe_b64encode( ",".join(map(str, next_search_after)).encode() @@ -848,6 +849,8 @@ def __attrs_post_init__(self): """Create es engine.""" # settings = BaseSettings() self.client = self.settings.create_client + # settings = SearchSettings() + # self.client = settings.create_client def preprocess_item( self, item: stac_types.Item, base_url, method: BulkTransactionMethod diff --git a/stac_fastapi/core/stac_fastapi/core/utilities.py b/stac_fastapi/core/stac_fastapi/core/utilities.py new file mode 100644 index 00000000..b5dac390 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/utilities.py @@ -0,0 +1,21 @@ +"""Module for geospatial processing functions. + +This module contains functions for transforming geospatial coordinates, +such as converting bounding boxes to polygon representations. +""" +from typing import List + + +def bbox2polygon(b0: float, b1: float, b2: float, b3: float) -> List[List[List[float]]]: + """Transform a bounding box represented by its four coordinates `b0`, `b1`, `b2`, and `b3` into a polygon. + + Args: + b0 (float): The x-coordinate of the lower-left corner of the bounding box. + b1 (float): The y-coordinate of the lower-left corner of the bounding box. + b2 (float): The x-coordinate of the upper-right corner of the bounding box. + b3 (float): The y-coordinate of the upper-right corner of the bounding box. + + Returns: + List[List[List[float]]]: A polygon represented as a list of lists of coordinates. + """ + return [[[b0, b1], [b2, b1], [b2, b3], [b0, b3], [b0, b1]]] diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py index c1bc2d3d..75d60684 100644 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py +++ b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py @@ -1,4 +1,5 @@ """FastAPI application.""" + from stac_fastapi.api.app import StacApi from stac_fastapi.api.models import create_get_request_model, create_post_request_model from stac_fastapi.core.core import ( diff --git a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/session.py b/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/session.py deleted file mode 100644 index d5a7aa3c..00000000 --- a/stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/session.py +++ /dev/null @@ -1,25 +0,0 @@ -"""database session management.""" -import logging - -import attr - -logger = logging.getLogger(__name__) - - -@attr.s -class Session: - """Database session management.""" - - @classmethod - def create_from_env(cls): - """Create from environment.""" - ... - - @classmethod - def create_from_settings(cls, settings): - """Create a Session object from settings.""" - ... - - def __attrs_post_init__(self): - """Post init handler.""" - ... diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py index 4046a1c2..07c143df 100644 --- a/stac_fastapi/opensearch/setup.py +++ b/stac_fastapi/opensearch/setup.py @@ -1,4 +1,4 @@ -"""stac_fastapi: elasticsearch module.""" +"""stac_fastapi: opencsearch module.""" from setuptools import find_namespace_packages, setup @@ -13,34 +13,34 @@ "stac-fastapi.types==2.4.9", "stac-fastapi.api==2.4.9", "stac-fastapi.extensions==2.4.9", - # "elasticsearch[async]==8.11.0", - # "elasticsearch-dsl==8.11.0", + "opensearch-py==2.4.2", + "opensearch-py[async]==2.4.2", "pystac[validation]", - # "uvicorn", + "uvicorn", "orjson", "overrides", - # "starlette", + "starlette", "geojson-pydantic", "pygeofilter==0.2.1", ] -# extra_reqs = { -# "dev": [ -# "pytest", -# "pytest-cov", -# "pytest-asyncio", -# "pre-commit", -# "requests", -# "ciso8601", -# "httpx", -# ], -# "docs": ["mkdocs", "mkdocs-material", "pdocs"], -# "server": ["uvicorn[standard]==0.19.0"], -# } +extra_reqs = { + "dev": [ + "pytest", + "pytest-cov", + "pytest-asyncio", + "pre-commit", + "requests", + "ciso8601", + "httpx", + ], + "docs": ["mkdocs", "mkdocs-material", "pdocs"], + "server": ["uvicorn[standard]==0.19.0"], +} setup( - name="stac-fastapi.core", - description="Core library for the Elasticsearch and Opensearch stac-fastapi backends.", + name="stac-fastapi.opensearch", + description="Opensearch stac-fastapi backend.", long_description=desc, long_description_content_type="text/markdown", python_requires=">=3.8", diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/__init__.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/__init__.py new file mode 100644 index 00000000..342b8919 --- /dev/null +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/__init__.py @@ -0,0 +1 @@ +"""opensearch submodule.""" diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/app.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/app.py new file mode 100644 index 00000000..75d60684 --- /dev/null +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/app.py @@ -0,0 +1,109 @@ +"""FastAPI application.""" + +from stac_fastapi.api.app import StacApi +from stac_fastapi.api.models import create_get_request_model, create_post_request_model +from stac_fastapi.core.core import ( + BulkTransactionsClient, + CoreClient, + EsAsyncBaseFiltersClient, + TransactionsClient, +) +from stac_fastapi.core.extensions import QueryExtension +from stac_fastapi.elasticsearch.config import ElasticsearchSettings +from stac_fastapi.elasticsearch.database_logic import ( + DatabaseLogic, + create_collection_index, +) +from stac_fastapi.elasticsearch.session import Session +from stac_fastapi.extensions.core import ( + ContextExtension, + FieldsExtension, + FilterExtension, + SortExtension, + TokenPaginationExtension, + TransactionExtension, +) +from stac_fastapi.extensions.third_party import BulkTransactionExtension + +settings = ElasticsearchSettings() +session = Session.create_from_settings(settings) + +filter_extension = FilterExtension(client=EsAsyncBaseFiltersClient()) +filter_extension.conformance_classes.append( + "http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators" +) + +database_logic = DatabaseLogic() + +extensions = [ + TransactionExtension( + client=TransactionsClient( + database=database_logic, session=session, settings=settings + ), + settings=settings, + ), + BulkTransactionExtension( + client=BulkTransactionsClient( + database=database_logic, + session=session, + settings=settings, + ) + ), + FieldsExtension(), + QueryExtension(), + SortExtension(), + TokenPaginationExtension(), + ContextExtension(), + filter_extension, +] + +post_request_model = create_post_request_model(extensions) + +api = StacApi( + settings=settings, + extensions=extensions, + client=CoreClient( + database=database_logic, session=session, post_request_model=post_request_model + ), + search_get_request_model=create_get_request_model(extensions), + search_post_request_model=post_request_model, +) +app = api.app + + +@app.on_event("startup") +async def _startup_event() -> None: + await create_collection_index() + + +def run() -> None: + """Run app from command line using uvicorn if available.""" + try: + import uvicorn + + uvicorn.run( + "stac_fastapi.elasticsearch.app:app", + host=settings.app_host, + port=settings.app_port, + log_level="info", + reload=settings.reload, + ) + except ImportError: + raise RuntimeError("Uvicorn must be installed in order to use command") + + +if __name__ == "__main__": + run() + + +def create_handler(app): + """Create a handler to use with AWS Lambda if mangum available.""" + try: + from mangum import Mangum + + return Mangum(app) + except ImportError: + return None + + +handler = create_handler(app) diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/config.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/config.py new file mode 100644 index 00000000..643b81ce --- /dev/null +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/config.py @@ -0,0 +1,72 @@ +"""API configuration.""" +import os +import ssl +from typing import Any, Dict, Set + +from opensearchpy import AsyncOpenSearch, OpenSearch + +from stac_fastapi.types.config import ApiSettings + + +def _es_config() -> Dict[str, Any]: + # Determine the scheme (http or https) + use_ssl = os.getenv("ES_USE_SSL", "true").lower() == "true" + scheme = "https" if use_ssl else "http" + + # Configure the hosts parameter with the correct scheme + hosts = [f"{scheme}://{os.getenv('ES_HOST')}:{os.getenv('ES_PORT')}"] + + # Initialize the configuration dictionary + config = { + "hosts": hosts, + "headers": {"accept": "application/json", "Content-Type": "application/json"}, + } + + # Explicitly exclude SSL settings when not using SSL + if not use_ssl: + return config + + # Include SSL settings if using https + config["ssl_version"] = ssl.TLSVersion.TLSv1_3 # type: ignore + config["verify_certs"] = os.getenv("ES_VERIFY_CERTS", "true").lower() != "false" # type: ignore + + # Include CA Certificates if verifying certs + if config["verify_certs"]: + config["ca_certs"] = os.getenv( + "CURL_CA_BUNDLE", "/etc/ssl/certs/ca-certificates.crt" + ) + + # Handle authentication + if (u := os.getenv("ES_USER")) and (p := os.getenv("ES_PASS")): + config["http_auth"] = (u, p) + + return config + + +_forbidden_fields: Set[str] = {"type"} + + +class SearchSettings(ApiSettings): + """API settings.""" + + # Fields which are defined by STAC but not included in the database model + forbidden_fields: Set[str] = _forbidden_fields + indexed_fields: Set[str] = {"datetime"} + + @property + def create_client(self): + """Create es client.""" + return OpenSearch(**_es_config()) + + +class AsyncSearchSettings(ApiSettings): + """API settings.""" + + # Fields which are defined by STAC but not included in the database model + forbidden_fields: Set[str] = _forbidden_fields + indexed_fields: Set[str] = {"datetime"} + + @property + def create_client(self): + """Create async elasticsearch client.""" + return AsyncOpenSearch(**_es_config()) diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py index 933312f2..3f8ec4a3 100644 --- a/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/database_logic.py @@ -6,15 +6,18 @@ from typing import Any, Dict, Iterable, List, Optional, Protocol, Tuple, Type, Union import attr -from elasticsearch_dsl import Q, Search - -from elasticsearch import exceptions, helpers # type: ignore -from stac_fastapi.core.extensions import filter -from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer -from stac_fastapi.elasticsearch.config import AsyncElasticsearchSettings -from stac_fastapi.elasticsearch.config import ( - ElasticsearchSettings as SyncElasticsearchSettings, +from opensearchpy import exceptions, helpers +from opensearchpy.exceptions import TransportError +from opensearchpy.helpers.query import Q +from opensearchpy.helpers.search import Search + +from stac_fastapi.core import serializers +from stac_fastapi.elasticsearch.config.config_opensearch import AsyncSearchSettings +from stac_fastapi.elasticsearch.config.config_opensearch import ( + SearchSettings as SyncSearchSettings, ) +from stac_fastapi.elasticsearch.extensions import filter +from stac_fastapi.elasticsearch.utilities import bbox2polygon from stac_fastapi.types.errors import ConflictError, NotFoundError from stac_fastapi.types.stac import Collection, Item @@ -177,13 +180,23 @@ async def create_collection_index() -> None: None """ - client = AsyncElasticsearchSettings().create_client + client = AsyncSearchSettings().create_client + + search_body = { + "mappings": ES_COLLECTIONS_MAPPINGS, + "aliases": {COLLECTIONS_INDEX: {}}, + } + + index = f"{COLLECTIONS_INDEX}-000001" + + try: + await client.indices.create(index=index, body=search_body) + except TransportError as e: + if e.status_code == 400: + pass # Ignore 400 status codes + else: + raise e - await client.options(ignore_status=400).indices.create( - index=f"{COLLECTIONS_INDEX}-000001", - aliases={COLLECTIONS_INDEX: {}}, - mappings=ES_COLLECTIONS_MAPPINGS, - ) await client.close() @@ -198,15 +211,22 @@ async def create_item_index(collection_id: str): None """ - client = AsyncElasticsearchSettings().create_client + client = AsyncSearchSettings().create_client index_name = index_by_collection_id(collection_id) + search_body = { + "aliases": {index_name: {}}, + "mappings": ES_ITEMS_MAPPINGS, + "settings": ES_ITEMS_SETTINGS, + } + + try: + await client.indices.create(index=f"{index_name}-000001", body=search_body) + except TransportError as e: + if e.status_code == 400: + pass # Ignore 400 status codes + else: + raise e - await client.options(ignore_status=400).indices.create( - index=f"{index_by_collection_id(collection_id)}-000001", - aliases={index_name: {}}, - mappings=ES_ITEMS_MAPPINGS, - settings=ES_ITEMS_SETTINGS, - ) await client.close() @@ -216,7 +236,7 @@ async def delete_item_index(collection_id: str): Args: collection_id (str): The ID of the collection whose items index will be deleted. """ - client = AsyncElasticsearchSettings().create_client + client = AsyncSearchSettings().create_client name = index_by_collection_id(collection_id) resolved = await client.indices.resolve_index(name=name) @@ -229,21 +249,6 @@ async def delete_item_index(collection_id: str): await client.close() -def bbox2polygon(b0: float, b1: float, b2: float, b3: float) -> List[List[List[float]]]: - """Transform a bounding box represented by its four coordinates `b0`, `b1`, `b2`, and `b3` into a polygon. - - Args: - b0 (float): The x-coordinate of the lower-left corner of the bounding box. - b1 (float): The y-coordinate of the lower-left corner of the bounding box. - b2 (float): The x-coordinate of the upper-right corner of the bounding box. - b3 (float): The y-coordinate of the upper-right corner of the bounding box. - - Returns: - List[List[List[float]]]: A polygon represented as a list of lists of coordinates. - """ - return [[[b0, b1], [b2, b1], [b2, b3], [b0, b3], [b0, b1]]] - - def mk_item_id(item_id: str, collection_id: str): """Create the document id for an Item in Elasticsearch. @@ -293,18 +298,22 @@ class Geometry(Protocol): # noqa class DatabaseLogic: """Database logic.""" - client = AsyncElasticsearchSettings().create_client - sync_client = SyncElasticsearchSettings().create_client + client = AsyncSearchSettings().create_client + sync_client = SyncSearchSettings().create_client - item_serializer: Type[ItemSerializer] = attr.ib(default=ItemSerializer) - collection_serializer: Type[CollectionSerializer] = attr.ib( - default=CollectionSerializer + item_serializer: Type[serializers.ItemSerializer] = attr.ib( + default=serializers.ItemSerializer + ) + collection_serializer: Type[serializers.CollectionSerializer] = attr.ib( + default=serializers.CollectionSerializer ) """CORE LOGIC""" async def get_all_collections( - self, token: Optional[str], limit: int + self, + token: Optional[str], + limit: int, ) -> Iterable[Dict[str, Any]]: """Retrieve a list of all collections from the database. @@ -320,14 +329,15 @@ async def get_all_collections( with the `COLLECTIONS_INDEX` as the target index and `size=limit` to retrieve records. The result is a generator of dictionaries containing the source data for each collection. """ - search_after = None + search_body: Dict[str, Any] = {} if token: search_after = urlsafe_b64decode(token.encode()).decode().split(",") + search_body["search_after"] = search_after + + search_body["sort"] = {"id": {"order": "asc"}} + collections = await self.client.search( - index=COLLECTIONS_INDEX, - search_after=search_after, - size=limit, - sort={"id": {"order": "asc"}}, + index=COLLECTIONS_INDEX, body=search_body, size=limit ) hits = collections["hits"]["hits"] return hits @@ -530,11 +540,14 @@ async def execute_search( Raises: NotFoundError: If the collections specified in `collection_ids` do not exist. """ - search_after = None + search_body: Dict[str, Any] = {} + query = search.query.to_dict() if search.query else None + if query: + search_body["query"] = query if token: search_after = urlsafe_b64decode(token.encode()).decode().split(",") - - query = search.query.to_dict() if search.query else None + search_body["search_after"] = search_after + search_body["sort"] = sort if sort else DEFAULT_SORT index_param = indices(collection_ids) @@ -542,9 +555,7 @@ async def execute_search( self.client.search( index=index_param, ignore_unavailable=ignore_unavailable, - query=query, - sort=sort or DEFAULT_SORT, - search_after=search_after, + body=search_body, size=limit, ) ) @@ -675,7 +686,7 @@ async def create_item(self, item: Item, refresh: bool = False): es_resp = await self.client.index( index=index_by_collection_id(collection_id), id=mk_item_id(item_id, collection_id), - document=item, + body=item, refresh=refresh, ) @@ -729,7 +740,7 @@ async def create_collection(self, collection: Collection, refresh: bool = False) await self.client.index( index=COLLECTIONS_INDEX, id=collection_id, - document=collection, + body=collection, refresh=refresh, ) diff --git a/stac_fastapi/opensearch/stac_fastapi/opensearch/version.py b/stac_fastapi/opensearch/stac_fastapi/opensearch/version.py new file mode 100644 index 00000000..1eeef171 --- /dev/null +++ b/stac_fastapi/opensearch/stac_fastapi/opensearch/version.py @@ -0,0 +1,2 @@ +"""library version.""" +__version__ = "1.0.0"