Skip to content

Commit

Permalink
Merge branch 'release/1.1.0'
Browse files Browse the repository at this point in the history
mpu-creare committed Jul 31, 2019
2 parents 9e7bc30 + 60fce91 commit dbb3b37
Showing 134 changed files with 10,519 additions and 9,170 deletions.
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
repos:
- repo: https://github.com/ambv/black
rev: stable
hooks:
- id: black
language_version: python3
files: (^podpac|^dist)
8 changes: 7 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -6,10 +6,13 @@
# https://conda.io/docs/user-guide/tasks/use-conda-with-travis-ci.html

language: python
sudo: required
dist: xenial
python:
- "2.7"
- "3.5"
- "3.6"
- "3.7"

# addons:
# # https://docs.travis-ci.com/user/uploading-artifacts/
@@ -44,6 +47,9 @@ script:
# deploy docs to `podpac-docs` repository. This script only pushes the docs on pushes to develop and master.
jobs:
include:
- stage: formatting
python: "3.7"
script: black --check podpac
- stage: docs deploy
python: "3.6"
python: "3.7"
script: cd doc && ./ci-deploy.sh && cd ..
8 changes: 4 additions & 4 deletions dist/aws/Dockerfile
Original file line number Diff line number Diff line change
@@ -21,7 +21,7 @@ ADD . /podpac/

RUN mkdir /tmp/vendored/ && \
cp /podpac/settings.json /tmp/vendored/settings.json && \
cd /podpac/ && git clean -xdf && \
cd /podpac/ && rm -rf .git/ doc/ .github/ && \
pip3 install -r dist/aws/aws_requirements.txt -t /tmp/vendored/ --upgrade

RUN cd /tmp/vendored/ && touch pydap/__init__.py && \
@@ -31,15 +31,15 @@ RUN cd /tmp/vendored/ && touch pydap/__init__.py && \

RUN cp -r /podpac/ /tmp/vendored/ && \
mv /tmp/vendored/podpac/dist/aws/handler.py /tmp/vendored/handler.py && \
cp tmp/vendored/podpac/dist/aws/mk_dist.py /tmp/vendored/mk_dist.py && \
rm -rf /tmp/vendored/podpac/dist/ && \
cp -r /tmp/vendored/podpac/podpac/* /tmp/vendored/podpac/ && \
rm -rf /tmp/vendored/podpac/podpac/*

RUN pip3 install pyproj==2.1.3 -t /tmp/vendored/ --upgrade

RUN cd /tmp/vendored && \
find * -maxdepth 0 -type f | grep ".zip" -v | grep -v ".pyc" | xargs zip -9 -rqy podpac_dist_latest.zip
RUN cd /tmp/vendored && \
find * -maxdepth 0 -type d -exec zip -9 -rqy {}.zip {} \;
RUN cd /tmp/vendored && du -s *.zip > zip_package_sizes.txt
RUN cd /tmp/vendored && du -s * | grep .zip -v > package_sizes.txt
RUN cd /tmp/vendored && cp podpac/dist/aws/mk_dist.py . && python3 mk_dist.py
RUN cd /tmp/vendored && python3 mk_dist.py
6 changes: 4 additions & 2 deletions dist/aws/aws_requirements.txt
Original file line number Diff line number Diff line change
@@ -6,12 +6,14 @@ traitlets>=4.3
xarray>=0.10
requests>=2.18
beautifulsoup4>=4.6
h5py>=2.7
h5py>=2.9
lxml>=4.2
pydap>=3.2
rasterio>=0.36
pyproj>=2.1
pyproj>=2.2
requests>=2.18
numexpr>=2.6
lazy-import>=0.2.2
psutil
zarr>=2.3
s3fs>=0.2
7 changes: 4 additions & 3 deletions dist/aws/build_lambda.sh
Original file line number Diff line number Diff line change
@@ -37,8 +37,9 @@ if [ -z $TAG ]
aws s3 cp podpac_deps_latest.zip s3://podpac-s3/podpac/podpac_deps_$TAG.zip
aws s3 cp podpac_dist_latest.zip s3://podpac-s3/podpac/podpac_dist_$TAG.zip
fi
aws s3 cp podpac_deps_latest.zip s3://podpac-s3/podpac/podpac_deps_latest.zip
aws s3 cp podpac_dist_latest.zip s3://podpac-s3/podpac/podpac_dist_latest.zip
aws s3 cp podpac_deps_latest.zip s3://podpac-s3/podpac/podpac_deps_ESIP3.zip
aws s3 cp podpac_dist_latest.zip s3://podpac-s3/podpac/podpac_dist_ESIP3.zip
rm podpac_deps_latest.zip podpac_dist_latest.zip

# Update lambda function to use the zips from S3 (uploaded above).
aws lambda update-function-code --function-name podpac_lambda --s3-bucket podpac-s3 --s3-key podpac/podpac_dist_latest.zip
aws lambda update-function-code --function-name podpac_lambda_ESIP3 --s3-bucket podpac-s3 --s3-key podpac/podpac_dist_ESIP3.zip
123 changes: 93 additions & 30 deletions dist/aws/handler.py
Original file line number Diff line number Diff line change
@@ -7,60 +7,123 @@
import urllib.parse as urllib
from collections import OrderedDict

import boto3

import _pickle as cPickle

import boto3
import botocore

# sys.path.insert(0, '/tmp/podpac/')
sys.path.append('/tmp/')
# sys.path.append(os.getcwd() + '/podpac/')

s3 = boto3.client('s3')
deps = 'podpac_deps_latest.zip'
deps = 'podpac_deps_ESIP3.zip'


def handler(event, context, get_deps=True, ret_pipeline=False):
bucket_name = event['Records'][0]['s3']['bucket']['name']
print(event)
bucket_name = 'podpac-s3'
if get_deps:
s3.download_file(bucket_name, 'podpac/' + deps, '/tmp/' + deps)
subprocess.call(['unzip', '/tmp/' + deps, '-d', '/tmp'])
sys.path.append('/tmp/')
subprocess.call(['rm', '/tmp/' + deps])
file_key = urllib.unquote_plus(
event['Records'][0]['s3']['object']['key'])
_json = ''
# get the object
obj = s3.get_object(Bucket=bucket_name, Key=file_key)
# get lines
lines = obj['Body'].read().split(b'\n')
for r in lines:
if len(_json) > 0:
_json += '\n'
_json += r.decode()
_json = json.loads(
_json, object_pairs_hook=OrderedDict)
pipeline_json = _json['pipeline']

if 'Records' in event and event['Records'][0]['eventSource'] == 'aws:s3':
# <start S3 trigger specific>
file_key = urllib.unquote_plus(
event['Records'][0]['s3']['object']['key'])
_json = ''
# get the object
obj = s3.get_object(Bucket=bucket_name, Key=file_key)
# get lines
lines = obj['Body'].read().split(b'\n')
for r in lines:
if len(_json) > 0:
_json += '\n'
_json += r.decode()
_json = json.loads(
_json, object_pairs_hook=OrderedDict)
pipeline_json = _json['pipeline']
else:
# elif ('pathParameters' in event and event['pathParameters'] is not None and 'proxy' in event['pathParameters']) or ('authorizationToken' in event and event['authorizationToken'] == "incoming-client-token"):
# TODO: Need to get the pipeline_json from the event...
print("DSullivan: we have an API Gateway event")
pipeline_json = None

# Need to set matplotlib backend to 'Agg' before importing it elsewhere
import matplotlib
matplotlib.use('agg')
from podpac import settings
from podpac.core.node import Node
from podpac.core.pipeline import Pipeline
from podpac.core.coordinates import Coordinates
from podpac.core.utils import JSONEncoder
pipeline = Pipeline(definition=pipeline_json, do_write_output=False)
coords = Coordinates.from_json(
json.dumps(_json['coordinates'], indent=4, cls=JSONEncoder))
pipeline.eval(coords)
from podpac.core.utils import JSONEncoder, _get_query_params_from_url
import podpac.datalib

# check if file exists
if pipeline_json is not None:
pipeline = Pipeline(definition=pipeline_json, do_write_output=False)
filename = file_key.replace('.json', '.' + pipeline.output.format)
filename = filename.replace(
settings['S3_JSON_FOLDER'], settings['S3_OUTPUT_FOLDER'])
try:
s3.head_object(Bucket=bucket_name, Key=filename)
# Object exists, so we don't have to recompute
if not _json.get('force_compute', False):
return
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
# It does not exist, so we should proceed
pass
else:
# Something else has gone wrong... not handling this case.
pass

args = []
kwargs = {}
# if from S3 trigger
if pipeline_json is not None:
# if 'Records' in event and event['Records'][0]['eventSource'] == 'aws:s3':
pipeline = Pipeline(definition=pipeline_json, do_write_output=False)
coords = Coordinates.from_json(
json.dumps(_json['coordinates'], indent=4, cls=JSONEncoder))
format = pipeline.output.format

# else from api gateway and it's a WMS/WCS request
else:
# elif ('pathParameters' in event and event['pathParameters'] is not None and 'proxy' in event['pathParameters']) or ('authorizationToken' in event and event['authorizationToken'] == "incoming-client-token"):
print(_get_query_params_from_url(event['queryStringParameters']))
coords = Coordinates.from_url(event['queryStringParameters'])
pipeline = Node.from_url(event['queryStringParameters'])
pipeline.do_write_output = False
format = _get_query_params_from_url(event['queryStringParameters'])[
'FORMAT'].split('/')[-1]
if format in ['png', 'jpg', 'jpeg']:
kwargs['return_base64'] = True

output = pipeline.eval(coords)
if ret_pipeline:
return pipeline

filename = file_key.replace('.json', '.' + pipeline.output.format)
filename = filename.replace(settings['S3_JSON_FOLDER'], settings['S3_OUTPUT_FOLDER'])

body = cPickle.dumps(pipeline._output)
s3.put_object(Bucket=bucket_name,
Key=filename, Body=body)
return
body = output.to_format(format, *args, **kwargs)
if pipeline_json is not None:
s3.put_object(Bucket=bucket_name,
Key=filename, Body=body)
else:
try:
json.dumps(body)
except Exception as e:
print("AWS: body is not serializable, attempting to decode.")
body = body.decode()
return {
"statusCode": 200,
"headers": {
"Content-Type": "image/png"
},
"isBase64Encoded": True,
"body": body
}


if __name__ == '__main__':
Original file line number Diff line number Diff line change
@@ -2,4 +2,4 @@
ECHO "Activating PODPAC environment."
REM This assumes that set_local_conda_path.bat has been called
SET CURL_CA_BUNDLE=%mypath%miniconda\envs\podpac\Library\ssl\cacert.pem
activate podpac
conda activate podpac
Binary file removed doc/source/_static/img/class-structure.png
Binary file not shown.
Binary file added doc/source/_static/img/complex-pipeline.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added doc/source/_static/img/coordinates.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added doc/source/_static/img/node.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added doc/source/_static/img/simple-pipeline.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added doc/source/_static/img/units-data-array.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Oops, something went wrong.

0 comments on commit dbb3b37

Please sign in to comment.