Skip to content

Commit

Permalink
Merge pull request #152 from sat-utils/develop
Browse files Browse the repository at this point in the history
publish 0.2.3
  • Loading branch information
matthewhanson authored Jan 29, 2019
2 parents 72ac9f7 + 1e3f7c9 commit cb44931
Show file tree
Hide file tree
Showing 14 changed files with 169 additions and 113 deletions.
13 changes: 12 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,16 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.

## [Unreleased]

## [v0.2.3] - 2019-01-29

### Fixed
- Proper handling of bounding box passed as string

### Changed
- De-normalize Item properties to include all properties from collection
- Flattened elastic search to simplify query logic
- Items returned will now include all 'Common' properties that are in the Items Collection

## [v0.2.2] - 2019-01-21

### Fixed
Expand Down Expand Up @@ -74,7 +84,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
- Refactor and improve splitting

[Unreleased]: https://github.com/sat-utils/sat-api/compare/master...develop
[v0.2.2]: https://github.com/sat-utils/sat-api/compare/v0.2.0...v0.2.2
[v0.2.3]: https://github.com/sat-utils/sat-api/compare/v0.2.2...v0.2.3
[v0.2.2]: https://github.com/sat-utils/sat-api/compare/v0.2.1...v0.2.2
[v0.2.1]: https://github.com/sat-utils/sat-api/compare/v0.2.0...v0.2.1
[v0.2.0]: https://github.com/sat-utils/sat-api/compare/v0.1.0...v0.2.0
[v0.1.0]: https://github.com/sat-utils/sat-api/compare/v0.0.2...v0.1.0
Expand Down
2 changes: 1 addition & 1 deletion lerna.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"lerna": "2.11.0",
"version": "0.2.2",
"version": "0.2.3",
"npmClient": "yarn",
"packages": [
"packages/*"
Expand Down
17 changes: 8 additions & 9 deletions packages/api-lib/libs/api.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,13 @@ const extractBbox = function (params) {
let intersectsGeometry
const { bbox } = params
if (bbox) {
const boundingBox = extent(bbox)
let bboxArray
if (typeof bbox === 'string') {
bboxArray = JSON.parse(bbox)
} else {
bboxArray = bbox
}
const boundingBox = extent(bboxArray)
const geojson = feature(boundingBox.polygon())
intersectsGeometry = geojson
}
Expand Down Expand Up @@ -236,15 +242,8 @@ const buildPageLinks = function (meta, parameters, endpoint) {
}

const searchItems = async function (parameters, page, limit, backend, endpoint) {
const arbitraryLimit = 5000
const { results: collectionResults } =
await backend.search(parameters, 'collections', 1, arbitraryLimit)
const collectionList = collectionResults.map((result) => result.id)
const collectionsQuery = Object.assign(
{}, parameters, { parentCollections: collectionList }
)
const { results: itemsResults, meta: itemsMeta } =
await backend.search(collectionsQuery, 'items', page, limit)
await backend.search(parameters, 'items', page, limit)
const pageLinks = buildPageLinks(itemsMeta, parameters, endpoint)
const items = addItemLinks(itemsResults, endpoint)
const response = wrapResponseInFeatureCollection(itemsMeta, items, pageLinks)
Expand Down
106 changes: 58 additions & 48 deletions packages/api-lib/libs/es.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ const ElasticsearchWritableStream = require('./ElasticSearchWriteableStream')
const logger = require('./logger')

let _esClient

/*
This module is used for connecting to an Elasticsearch instance, writing records,
searching records, and managing the indexes. It looks for the ES_HOST environment
Expand Down Expand Up @@ -77,7 +76,6 @@ async function esClient() {
// Create STAC mappings
async function prepare(index) {
// TODO - different mappings for collection and item
let ready
const props = {
'type': 'object',
properties: {
Expand Down Expand Up @@ -106,7 +104,6 @@ async function prepare(index) {
}]
const client = await esClient()
const indexExists = await client.indices.exists({ index })

if (!indexExists) {
const payload = {
index,
Expand All @@ -133,49 +130,71 @@ async function prepare(index) {
try {
await client.indices.create(payload)
logger.info(`Created index: ${JSON.stringify(payload)}`)
ready = 0
} catch (error) {
const debugMessage = `Error creating index, already created: ${error}`
logger.debug(debugMessage)
}
}
return ready
}

// Given an input stream and a transform, write records to an elasticsearch instance
async function _stream() {
const toEs = through2.obj({ objectMode: true }, (data, encoding, next) => {
let index = ''
if (data && data.hasOwnProperty('extent')) {
index = 'collections'
} else if (data && data.hasOwnProperty('geometry')) {
index = 'items'
} else {
next()
return
}
// remove any hierarchy links in a non-mutating way
const hlinks = ['self', 'root', 'parent', 'child', 'collection', 'item']
const links = data.links.filter((link) => hlinks.includes(link))
const dataNoLinks = Object.assign({}, data, { links })

// create ES record
const record = {
index,
type: 'doc',
id: dataNoLinks.id,
action: 'update',
_retry_on_conflict: 3,
body: {
doc: dataNoLinks,
doc_as_upsert: true
}
}
next(null, record)
})
let esStreams
try {
let collections = []
const client = await esClient()
const indexExists = await client.indices.exists({ index: 'collections' })
if (indexExists) {
const body = { query: { match_all: {} } }
const searchParams = {
index: 'collections',
body
}
const resultBody = await client.search(searchParams)
collections = resultBody.hits.hits.map((r) => (r._source))
}

const toEs = through2.obj({ objectMode: true }, (data, encoding, next) => {
let index = ''
if (data && data.hasOwnProperty('extent')) {
index = 'collections'
} else if (data && data.hasOwnProperty('geometry')) {
index = 'items'
} else {
next()
return
}
// remove any hierarchy links in a non-mutating way
const hlinks = ['self', 'root', 'parent', 'child', 'collection', 'item']
const links = data.links.filter((link) => hlinks.includes(link))
let esDataObject = Object.assign({}, data, { links })
if (index === 'items') {
const collectionId = data.properties.collection
const itemCollection =
collections.find((collection) => (collectionId === collection.id))
if (itemCollection) {
const flatProperties =
Object.assign({}, itemCollection.properties, data.properties)
esDataObject = Object.assign({}, esDataObject, { properties: flatProperties })
} else {
logger.error(`${data.id} has no collection`)
}
}

// create ES record
const record = {
index,
type: 'doc',
id: esDataObject.id,
action: 'update',
_retry_on_conflict: 3,
body: {
doc: esDataObject,
doc_as_upsert: true
}
}
next(null, record)
})
const esStream = new ElasticsearchWritableStream({ client: client }, {
objectMode: true,
highWaterMark: process.env.ES_BATCH_SIZE || 500
Expand Down Expand Up @@ -243,9 +262,11 @@ function buildDatetimeQuery(parameters) {

function buildQuery(parameters) {
const eq = 'eq'
const { query, parentCollections, intersects } = parameters
const { query, intersects } = parameters
let must = []
if (query) {
// Using reduce rather than map as we don't currently support all
// stac query operators.
must = Object.keys(query).reduce((accumulator, property) => {
const operatorsObject = query[property]
const operators = Object.keys(operatorsObject)
Expand All @@ -265,6 +286,7 @@ function buildQuery(parameters) {
return accumulator
}, must)
}

if (intersects) {
const { geometry } = intersects
must.push({
Expand All @@ -279,19 +301,7 @@ function buildQuery(parameters) {
must.push(datetimeQuery)
}

let filter
if (parentCollections && parentCollections.length !== 0) {
filter = {
bool: {
should: [
{ terms: { 'properties.collection': parentCollections } },
{ bool: { must } }
]
}
}
} else {
filter = { bool: { must } }
}
const filter = { bool: { must } }
const queryBody = {
constant_score: { filter }
}
Expand Down
2 changes: 1 addition & 1 deletion packages/api-lib/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@sat-utils/api-lib",
"version": "0.2.2",
"version": "0.2.3",
"description": "A library for creating a search API of public Satellites metadata using Elasticsearch",
"main": "index.js",
"scripts": {
Expand Down
2 changes: 1 addition & 1 deletion packages/api-lib/tests/fixtures/stac/collection2.json
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@
"eo:gsd": 15,
"eo:instrument": "OLI_TIRS",
"eo:off_nadir": 0,
"eo:platform": "landsat-8"
"eo:platform": "platform2"
},
"providers": [
{
Expand Down
9 changes: 9 additions & 0 deletions packages/api-lib/tests/integration/ingestCollections.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
process.env.ES_HOST = `http://${process.env.DOCKER_NAME}:4571`
const ingest = require('../../libs/ingest').ingest
const backend = require('../../libs/es')

async function doIngest() {
await ingest('../fixtures/stac/catalog.json', backend, true, true)
console.log('Collections done')
}
doIngest()
9 changes: 6 additions & 3 deletions packages/api-lib/tests/integration/ingestData.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ process.env.ES_HOST = `http://${process.env.DOCKER_NAME}:4571`
const ingest = require('../../libs/ingest').ingest
const backend = require('../../libs/es')

//ingest('../fixtures/stac/catalog.json', backend)
ingest('https://landsat-stac.s3.amazonaws.com/landsat-8-l1/catalog.json', backend)

async function doIngest() {
await ingest('../fixtures/stac/catalog.json', backend)
console.log('Items done')
}
//ingest('https://landsat-stac.s3.amazonaws.com/landsat-8-l1/catalog.json', backend)
doIngest()
2 changes: 1 addition & 1 deletion packages/api-lib/tests/integration/runIntegration.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/bin/bash
docker-compose up & while ! nc -z $DOCKER_NAME 4571; do sleep 1; done;
sleep 20;
node ./ingestData.js && yarn ava ./tests/integration/test_api.js
node ./ingestCollections.js && node ./ingestData.js && yarn ava ./tests/integration/test_api.js
24 changes: 24 additions & 0 deletions packages/api-lib/tests/integration/test_api.js
Original file line number Diff line number Diff line change
Expand Up @@ -151,3 +151,27 @@ test('stac/search sort', async (t) => {
}, backend, endpoint)
t.is(response.features[0].id, 'LC80100102015082LGN00')
})

test('stac/search flattened collection properties', async (t) => {
let response = await search('/stac/search', {
query: {
'eo:platform': {
eq: 'platform2'
}
}
}, backend, endpoint)
t.is(response.features[0].id, 'collection2_item')

response = await search('/stac/search', {
query: {
'eo:platform': {
eq: 'landsat-8'
}
}
}, backend, endpoint)
const havePlatform =
response.features.filter(
(item) => (item.properties['eo:platform'] === 'landsat-8')
)
t.is(havePlatform.length, response.features.length)
})
47 changes: 44 additions & 3 deletions packages/api-lib/tests/test_api_search.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@ const sinon = require('sinon')
const proxquire = require('proxyquire')
const api = require('../libs/api')
const item = require('./fixtures/item.json')
const itemLinks = require('./fixtures/itemLinks.json')

function cloneMutatedItem() {
return Object.assign({}, item, { links: item.links.slice(0) })
}

test('search es error', async (t) => {
const error = sinon.spy()
Expand Down Expand Up @@ -65,6 +70,36 @@ test('search /stac', async (t) => {
'Returns STAC catalog with links to collections')
})

test('search /stac/search wraps results', async (t) => {
const limit = 10
const page = 1
const meta = {
limit,
page,
found: 1,
returned: 1
}
const clonedItem = cloneMutatedItem()
const results = [clonedItem]

const itemsResults = { meta, results }
const search = sinon.stub()
search.resolves(itemsResults)
const backend = { search }
const actual = await api.search('/stac/search', {}, backend, 'endpoint')
t.deepEqual(actual.features[0].links, itemLinks.links,
'Adds correct relative STAC links')

const expectedMeta = {
limit,
page,
found: 1,
returned: 1
}
t.deepEqual(actual.meta, expectedMeta, 'Adds correct response metadata fields')
t.is(actual.type, 'FeatureCollection', 'Wraps response as FeatureCollection')
})

test('search /stac/search query parameters', async (t) => {
const search = sinon.stub().resolves({ results: [], meta: {} })
const backend = { search }
Expand Down Expand Up @@ -105,8 +140,9 @@ test('search /stac/search bbox parameter', async (t) => {
const s = -10
const e = 10
const n = 10
const bbox = [w, s, e, n]
const queryParams = {
bbox: [w, s, e, n],
bbox,
page: 1,
limit: 1
}
Expand All @@ -128,6 +164,10 @@ test('search /stac/search bbox parameter', async (t) => {
t.deepEqual(search.firstCall.args[0].intersects, expected,
'Converts a [w,s,e,n] bbox to an intersects search parameter')
search.resetHistory()
queryParams.bbox = `[${bbox.toString()}]`
await api.search('/stac/search', queryParams, backend, 'endpoint')
t.deepEqual(search.firstCall.args[0].intersects, expected,
'Converts stringified [w,s,e,n] bbox to an intersects search parameter')
})

test('search /stac/search time parameter', async (t) => {
Expand Down Expand Up @@ -237,10 +277,11 @@ test('search /collections/collectionId/items/itemId', async (t) => {
found: 1,
returned: 1
}

const clonedItem = cloneMutatedItem()
const results = [clonedItem]
const search = sinon.stub().resolves({
meta,
results: [item]
results
})
const backend = { search }
const itemId = 'itemId'
Expand Down
Loading

0 comments on commit cb44931

Please sign in to comment.