Skip to content

Commit

Permalink
Return existing links in pgstac (#282)
Browse files Browse the repository at this point in the history
* Add venv to ignore files

* Add run-joplin-pgstac make command

* Modify ingest logic to update if exists

* Add license to collections; test license link

* Fix bug where existing links were not returned

* Update CHANGES

* Use get_links instead of new _extended_links

Explain why we resolve the href for extra_links in get_links

* Test item extra_links with relative href
  • Loading branch information
lossyrob authored Nov 2, 2021
1 parent 8222b40 commit 31778fe
Show file tree
Hide file tree
Showing 12 changed files with 107 additions and 43 deletions.
2 changes: 2 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,5 @@ coverage.xml
*.log
.git
.envrc

venv
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -126,3 +126,6 @@ docs/api/*

# Direnv
.envrc

# Virtualenv
venv
2 changes: 2 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@

### Fixed

* Links stored with Collections and Items (e.g. license links) are now returned with those STAC objects ([#282](https://github.com/stac-utils/stac-fastapi/pull/282))

## [2.2.0]

### Added
Expand Down
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ run-database:
run-joplin-sqlalchemy:
docker-compose run --rm loadjoplin-sqlalchemy

.PHONY: run-joplin-pgstac
run-joplin-pgstac:
docker-compose run --rm loadjoplin-pgstac

.PHONY: test
test: test-sqlalchemy test-pgstac

Expand Down
24 changes: 15 additions & 9 deletions scripts/ingest_joplin.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,27 +15,33 @@
raise Exception("You must include full path/port to stac instance")


def post_or_put(url: str, data: dict):
"""Post or put data to url."""
r = requests.post(url, json=data)
if r.status_code == 409:
# Exists, so update
r = requests.put(url, json=data)
# Unchanged may throw a 404
if not r.status_code == 404:
r.raise_for_status()
else:
r.raise_for_status()


def ingest_joplin_data(app_host: str = app_host, data_dir: Path = joplindata):
"""ingest data."""

with open(data_dir / "collection.json") as f:
collection = json.load(f)

r = requests.post(urljoin(app_host, "collections"), json=collection)
if r.status_code not in (200, 409):
r.raise_for_status()
post_or_put(urljoin(app_host, "/collections"), collection)

with open(data_dir / "index.geojson") as f:
index = json.load(f)

for feat in index["features"]:
del feat["stac_extensions"]
r = requests.post(
urljoin(app_host, f"collections/{collection['id']}/items"), json=feat
)
if r.status_code == 409:
continue
r.raise_for_status()
post_or_put(urljoin(app_host, f"collections/{collection['id']}/items"), feat)


if __name__ == "__main__":
Expand Down
17 changes: 11 additions & 6 deletions stac_fastapi/pgstac/stac_fastapi/pgstac/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,10 @@ async def all_collections(self, **kwargs) -> Collections:
coll = Collection(**c)
coll["links"] = await CollectionLinks(
collection_id=coll["id"], request=request
).get_links()
).get_links(extra_links=coll.get("links"))

linked_collections.append(coll)

links = [
{
"rel": Relations.root.value,
Expand Down Expand Up @@ -94,8 +96,11 @@ async def get_collection(self, id: str, **kwargs) -> Collection:
collection = await conn.fetchval(q, *p)
if collection is None:
raise NotFoundError(f"Collection {id} does not exist.")
links = await CollectionLinks(collection_id=id, request=request).get_links()
collection["links"] = links

collection["links"] = await CollectionLinks(
collection_id=id, request=request
).get_links(extra_links=collection.get("links"))

return Collection(**collection)

async def _search_base(
Expand Down Expand Up @@ -147,12 +152,12 @@ async def _search_base(
# TODO: feature.collection is not always included
# This code fails if it's left outside of the fields expression
# I've fields extension updated test cases to always include feature.collection
links = await ItemLinks(
feature["links"] = await ItemLinks(
collection_id=feature["collection"],
item_id=feature["id"],
request=request,
).get_links()
feature["links"] = links
).get_links(extra_links=feature.get("links"))

exclude = search_request.fields.exclude
if exclude and len(exclude) == 0:
exclude = None
Expand Down
25 changes: 18 additions & 7 deletions stac_fastapi/pgstac/stac_fastapi/pgstac/models/links.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def link_root(self) -> Dict:
rel=Relations.root.value, type=MimeTypes.json.value, href=self.base_url
)

def create_links(self) -> List[Dict]:
def create_links(self) -> List[Dict[str, Any]]:
"""Return all inferred links."""
links = []
for name in dir(self):
Expand All @@ -77,7 +77,7 @@ def create_links(self) -> List[Dict]:
return links

async def get_links(
self, extra_links: List[Dict[str, Any]] = []
self, extra_links: Optional[List[Dict[str, Any]]] = None
) -> List[Dict[str, Any]]:
"""
Generate all the links.
Expand All @@ -91,11 +91,22 @@ async def get_links(
# join passed in links with generated links
# and update relative paths
links = self.create_links()
if extra_links is not None and len(extra_links) >= 1:
for link in extra_links:
if link["rel"] not in INFERRED_LINK_RELS:
link["href"] = self.resolve(link["href"])
links.append(link)

if extra_links:
# For extra links passed in,
# add links modified with a resolved href.
# Drop any links that are dynamically
# determined by the server (e.g. self, parent, etc.)
# Resolving the href allows for relative paths
# to be stored in pgstac and for the hrefs in the
# links of response STAC objects to be resolved
# to the request url.
links += [
{**link, "href": self.resolve(link["href"])}
for link in extra_links
if link["rel"] not in INFERRED_LINK_RELS
]

return links


Expand Down
21 changes: 3 additions & 18 deletions stac_fastapi/pgstac/tests/data/test_collection.json
Original file line number Diff line number Diff line change
Expand Up @@ -100,24 +100,9 @@
},
"links": [
{
"href": "http://localhost:8081/collections/landsat-8-l1",
"rel": "self",
"type": "application/json"
},
{
"href": "http://localhost:8081/",
"rel": "parent",
"type": "application/json"
},
{
"href": "http://localhost:8081/collections/landsat-8-l1/items",
"rel": "item",
"type": "application/geo+json"
},
{
"href": "http://localhost:8081/",
"rel": "root",
"type": "application/json"
"rel": "license",
"href": "https://creativecommons.org/licenses/publicdomain/",
"title": "public domain"
}
],
"title": "Landsat 8 L1",
Expand Down
5 changes: 5 additions & 0 deletions stac_fastapi/pgstac/tests/data/test_item.json
Original file line number Diff line number Diff line change
Expand Up @@ -500,6 +500,11 @@
"href": "http://localhost:8081/",
"rel": "root",
"type": "application/json"
},
{
"href": "preview.html",
"rel": "preview",
"type": "application/html"
}
]
}
11 changes: 11 additions & 0 deletions stac_fastapi/pgstac/tests/resources/test_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,3 +164,14 @@ async def test_returns_valid_links_in_collections(app_client, load_test_data):
for i in collection.to_dict()["links"]
if i not in single_coll_mocked_link.to_dict()["links"]
] == []


@pytest.mark.asyncio
async def test_returns_license_link(app_client, load_test_collection):
coll = load_test_collection

resp = await app_client.get(f"/collections/{coll.id}")
assert resp.status_code == 200
resp_json = resp.json()
link_rel_types = [link["rel"] for link in resp_json["links"]]
assert "license" in link_rel_types
28 changes: 26 additions & 2 deletions stac_fastapi/pgstac/tests/resources/test_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
import uuid
from datetime import datetime, timedelta
from typing import Callable
from urllib.parse import parse_qs, urlparse
from urllib.parse import parse_qs, urljoin, urlparse

import pystac
import pytest
from httpx import AsyncClient
from shapely.geometry import Polygon
from stac_pydantic import Collection, Item
from stac_pydantic.shared import DATETIME_RFC339
Expand Down Expand Up @@ -67,7 +68,7 @@ async def test_create_item(app_client, load_test_data: Callable, load_test_colle
in_json = load_test_data("test_item.json")
in_item = Item.parse_obj(in_json)
resp = await app_client.post(
"/collections/{coll.id}/items",
f"/collections/{coll.id}/items",
json=in_json,
)
assert resp.status_code == 200
Expand Down Expand Up @@ -1005,3 +1006,26 @@ async def test_search_bbox_errors(app_client):
params = {"bbox": "100.0,0.0,0.0,105.0"}
resp = await app_client.get("/search", params=params)
assert resp.status_code == 400


@pytest.mark.asyncio
async def test_preserves_extra_link(
app_client: AsyncClient, load_test_data, load_test_collection
):
coll = load_test_collection
test_item = load_test_data("test_item.json")
expected_href = urljoin(str(app_client.base_url), "preview.html")

resp = await app_client.post(f"/collections/{coll.id}/items", json=test_item)
assert resp.status_code == 200

response_item = await app_client.get(
f"/collections/{coll.id}/items/{test_item['id']}",
params={"limit": 1},
)
assert response_item.status_code == 200
item = response_item.json()

extra_link = [link for link in item["links"] if link["rel"] == "preview"]
assert extra_link
assert extra_link[0]["href"] == expected_href
8 changes: 7 additions & 1 deletion stac_fastapi/testdata/joplin/collection.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,13 @@
"description": "This imagery was acquired by the NOAA Remote Sensing Division to support NOAA national security and emergency response requirements. In addition, it will be used for ongoing research efforts for testing and developing standards for airborne digital imagery. Individual images have been combined into a larger mosaic and tiled for distribution. The approximate ground sample distance (GSD) for each pixel is 35 cm (1.14 feet).",
"stac_version": "1.0.0",
"license": "public-domain",
"links": [],
"links": [
{
"rel": "license",
"href": "https://creativecommons.org/licenses/publicdomain/",
"title": "public domain"
}
],
"type": "collection",
"extent": {
"spatial": {
Expand Down

0 comments on commit 31778fe

Please sign in to comment.