diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 00000000..a8ab54e2 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,216 @@ +# Python CircleCI 2.0 configuration file +# +# Check https://circleci.com/docs/2.0/language-python/ for more details +# +version: 2 +jobs: + build: + docker: + - image: docker/compose:1.19.0 + + working_directory: ~/repo + + steps: + - checkout + + - setup_remote_docker: + docker_layer_caching: true + + - run: + name: Build application Docker image + command: | + docker-compose -f compose/circleci.yml up -d + + - run: + name: PEP8 Lint check + command: | + docker-compose -f compose/circleci.yml exec -T runserver prospector -o pylint + + - run: + name: Run unit tests + command: | + docker-compose -f compose/circleci.yml exec -T runserver coverage run manage.py test --noinput tests --settings conf.test_settings + + - run: + name: Unit test coverage report + command: | + docker-compose -f compose/circleci.yml exec -T runserver coverage xml -o test-results/coverage/results.xml + + - run: + name: Copy artifacts from Docker + command: | + docker cp $(docker-compose -f compose/circleci.yml ps -q runserver):/app/test-results test-results + + - store_test_results: + path: test-results + + - run: + name: Save docker image + command: | + mkdir -p docker-cache + docker save -o docker-cache/built-image.tar transmission-django + + - save_cache: + key: docker-cache-{{ .Branch }}-{{ .Revision }} + paths: + - docker-cache + + push-to-ecr: + docker: + - image: docker:stable-git + + steps: + - restore_cache: + keys: + - docker-cache-{{ .Branch }}-{{ .Revision }} + + - setup_remote_docker: + docker_layer_caching: true + + - run: + name: Set dynamic ENV variables + command: | + echo 'export SHORT_GIT_HASH=$(echo $CIRCLE_SHA1 | cut -c -7)' >> $BASH_ENV + + - run: + name: Install dependencies + command: | + apk add --no-cache py-pip + pip install awscli + + - run: + name: Load docker image + command: | + docker load < /root/repo/docker-cache/built-image.tar + + - run: + name: Push image to ECR + command: | + source $BASH_ENV + docker tag transmission-django $ECR_ENDPOINT/transmission-django:$SHORT_GIT_HASH + docker tag transmission-django $ECR_ENDPOINT/transmission-django:latest + $(aws ecr get-login --no-include-email) + docker push $ECR_ENDPOINT/transmission-django:$SHORT_GIT_HASH + docker push $ECR_ENDPOINT/transmission-django:latest + + deploy-to-dev: + docker: + - image: docker:stable-git + + steps: + - run: + name: Set dynamic ENV variables + command: | + echo 'export SHORT_GIT_HASH=$(echo $CIRCLE_SHA1 | cut -c -7)' >> $BASH_ENV + + - run: + name: Install dependencies + command: | + apk add --no-cache py-pip + pip install awscli + + - run: + name: Deploy new Task Revision to ECS + command: | + source $BASH_ENV + aws lambda invoke --function-name DeployImageToECS --payload "{ + \"app\": \"transmission\", + \"env\": \"DEV\", + \"image\": \"$ECR_ENDPOINT/transmission-django:$SHORT_GIT_HASH\" + }" outputfile.txt && cat outputfile.txt + + deploy-to-stage: + docker: + - image: docker:stable-git + + steps: + - run: + name: Set dynamic ENV variables + command: | + echo 'export SHORT_GIT_HASH=$(echo $CIRCLE_SHA1 | cut -c -7)' >> $BASH_ENV + + - run: + name: Install dependencies + command: | + apk add --no-cache py-pip + pip install awscli + + - run: + name: Deploy new Task Revision to ECS + command: | + source $BASH_ENV + aws lambda invoke --function-name DeployImageToECS --payload "{ + \"app\": \"transmission\", + \"env\": \"STAGE\", + \"image\": \"$ECR_ENDPOINT/transmission-django:$SHORT_GIT_HASH\" + }" outputfile.txt && cat outputfile.txt + + deploy-to-prod: + docker: + - image: docker:stable-git + + steps: + - run: + name: Set dynamic ENV variables + command: | + echo 'export SHORT_GIT_HASH=$(echo $CIRCLE_SHA1 | cut -c -7)' >> $BASH_ENV + + - run: + name: Install dependencies + command: | + apk add --no-cache py-pip + pip install awscli + + - run: + name: Deploy new Task Revision to ECS + command: | + source $BASH_ENV + aws lambda invoke --function-name DeployImageToECS --payload "{ + \"app\": \"transmission\", + \"env\": \"PROD\", + \"image\": \"$ECR_ENDPOINT/transmission-django:$SHORT_GIT_HASH\" + }" outputfile.txt && cat outputfile.txt + +workflows: + version: 2 + build-and-deploy: + jobs: + - build + - push-to-ecr: + filters: + branches: + only: master + requires: + - build + - deploy-to-dev: + filters: + branches: + only: master + requires: + - push-to-ecr + - hold-deploy-stage: + filters: + branches: + only: master + type: approval + requires: + - push-to-ecr + - deploy-to-stage: + filters: + branches: + only: master + requires: + - hold-deploy-stage + - hold-deploy-prod: + filters: + branches: + only: master + type: approval + requires: + - deploy-to-stage + - deploy-to-prod: + filters: + branches: + only: master + requires: + - hold-deploy-prod diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..30fff9d7 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,25 @@ +[run] +include = apps/* +omit = + apps/*/migrations/* + apps/wsgi.py + apps/celery.py + +[report] +exclude_lines = + pragma: no cover + + # Don't complain about missing debug-only code: + def __unicode__ + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +show_missing = True diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..045d1172 --- /dev/null +++ b/.envrc @@ -0,0 +1,8 @@ + +# Export all variables from .env if it exists + +if [[ -e ".env" ]] ; then + set -a + source .env + set +a +fi diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..321976c2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,114 @@ +.idea +bin/dev-tools/node_modules +test-results + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +compose/*/pip.cache/*.whl +compose/*/pip.cache/*.zip +compose/*/pip.cache/*.tar.gz +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +.static_storage/ +.media/ +staticfiles/ +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +docker-compose.yml diff --git a/.prospector.yaml b/.prospector.yaml new file mode 100644 index 00000000..0fe9e47d --- /dev/null +++ b/.prospector.yaml @@ -0,0 +1,32 @@ +output-format: text + +strictness: high + +uses: + - django + +pylint: + disable: + - unused-argument + - redefined-builtin + - no-self-use + - function-redefined + - abstract-method + - wildcard-import + - unused-wildcard-import + - too-many-ancestors + options: + good-names: + - pk + max-line-length: 120 + +pep8: + full: true + options: + max-line-length: 120 + +pyflakes: + disable: + - F811 + - F401 + - F403 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..a93f7175 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,37 @@ +FROM python:3.6.5 + +LABEL maintainer="Adam Hodges " + +ENV LANG C.UTF-8 +ENV PYTHONUNBUFFERED 1 + +RUN mkdir /build +WORKDIR /build + +ADD ./compose/django/requirements.txt /build/ +ADD ./compose/django/pip.cache/ /build/ + +# SUPPORT SSH FOR IAM USERS # +RUN apt-get update && apt-get -y install openssh-server +RUN mkdir /var/run/sshd /etc/cron.d +RUN pip install keymaker +RUN keymaker install + +# Configure public key SSH +RUN echo "AllowAgentForwarding yes" >> /etc/ssh/sshd_config +RUN echo "PasswordAuthentication no" >> /etc/ssh/sshd_config +# ------------------------- # + +RUN pip install -r /build/requirements.txt --find-links /build/ + +RUN mkdir /app +WORKDIR /app + +COPY ./compose/django/*.sh / +RUN chmod +x /*.sh +ENTRYPOINT ["/entrypoint.sh"] + +ADD . /app/ + +# Generate static assets +RUN python manage.py collectstatic -c --noinput diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..2c908f3c --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018 ShipChain, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 00000000..02da404b --- /dev/null +++ b/README.md @@ -0,0 +1,234 @@ +

+ ShipChain +

+ +[![CircleCI](https://circleci.com/gh/ShipChain/transmission/tree/master.svg?style=svg)](https://circleci.com/gh/ShipChain/transmission/tree/master) +[![License](http://img.shields.io/:license-apache-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0.html) +[![Code style: prospector](https://img.shields.io/badge/code_style-prospector-ff69b4.svg?style=svg)](https://github.com/PyCQA/prospector) +[![Chat](https://img.shields.io/badge/gitter-ShipChain/lobby-green.svg)](https://gitter.im/ShipChain/Lobby) + +# ShipChain Transmission Project + +* A service assisting in binding REST-to-Eth API calls, to simplify data access. +* Interacts with the ShipChain Engine project via RPC, implementing business logic for the ShipChain +portal and maintaining state for asynchronous transactions on the Ethereum network. + +ShipChain Transmission is deployed for public consumption at [transmission.shipchain.io](https://transmission.shipchain.io) and detailed +API documentation (OpenAPI 3.0) is available at the landing page **(API DOCS NOT CURRENTLY AVAILABLE)**. + +## Getting Started + +These instructions should provide everything needed to get a copy of Transmission up and running in your local environment. + +### Prerequisites + +The development environment for Transmission uses Docker to assist in the setup of local databases and supporting infrastructure. +Deployment of these containers is handled through the use of Docker Compose with the provided files in the `compose` directory. + +See the official Docker documentation for installation information: + + * [Install Docker](https://docs.docker.com/engine/installation/) + * [Install Docker Compose](https://docs.docker.com/compose/install/) version > 1.21.0 + +Transmission also relies heavily on the [ShipChain Engine](https://github.com/shipchain/engine) project for interaction +with the Ethereum blockchain as well as wallet/vault management. Please refer to the readme in the Engine repository for +instructions on running your own Engine instance. + +### Docker Compose + +It is suggested to develop using Docker Compose; configuration files live in +[compose](compose/) folder, with [compose/dev.yml](compose/dev.yml) hosting several services (Redis, Postgres, Celery) +necessary for running everything locally. + +You must first run `docker network create portal` to create a local network for +other ShipChain services to communicate on (like the Profiles and Engine services). + +The dev environment uses a seperate Dockerfile located at +[compose/django/Dockerfile](compose/django/Dockerfile); please note, this file *doesn't* use the docker +`CP` directive to copy the project code into the container, instead the code is +mounted as a volume (so that as you save files, they update inside the container). + +#### Scripts + +There are several scripts in the `bin/` folder dedicated to making life simple: + +* [bin/check_style](bin/check_style) will run the prospector lint tool against the repo, flagging any violations +of common Python style guides (such as PEP-8). + +* [bin/docker_tests](bin/docker_tests) will run the full suite of lint checks and unit tests, as they are run +during a CircleCi build. This is useful to ensure a build is valid before pushing code. + +* [bin/dc](bin/dc) is an alias for `docker-compose -f compose/dev.yml` (you could use + e.g `compose/my_settings.yml` by setting environment variable `ROLE=my_settings`) + +* [bin/dcleanup](bin/dcleanup) is quick way to "kill, remove, restart, tail logs", so if you need +to restart the `runserver` service and follow the logs you can `dcleanup runserver` + +* [bin/ddo](bin/ddo) is an alias for `bin/dc run django $*`, so `ddo bash` will get you + a shell inside a django container. + +* [bin/dmg](bin/dmg) is an alias for `bin/ddo manage.py $*`, so you can quickly run + management commands like `dmg migrate` or `dmg dbshell` + +If you plan on doing a lot of development work, you might consider adding `PATH=$PATH:./bin` +to your `.bashrc` file so you can skip typing `bin/`. Ensure you first understand the security +risks of doing so. + +The scripts provided in the [bin](bin) directory allow for easier interaction with the Docker compose services and containers. + By default, these scripts use the [dev.yml](compose/dev.yml) compose file. This can be changed to any configuration file by setting + the `ROLE` environment variable. For example if you want to use `my_settings.yml` with the scripts provided, + you would only need to set `ROLE=my_settings` in your environment. + +### Configuration + +Before you can begin using Transmission, you may need to do some configuration depending on your specific requirements. + +#### Environment Variables + +When utilizing the provided scripts in the [bin](bin) directory to manage the Docker containers, a file in the base folder +named `.env` is sourced. This allows you to inject environment variables in to the launched containers. + +##### Service URLs +The URLs that Transmission uses when communicating with other services are defined in the following environment variables: + +* `ENGINE_RPC_URL` - URL of the Engine RPC server +* `INTERNAL_URL` - URL of this Transmission deployment - used for Engine callbacks +* `PROFILES_URL` - URL of ShipChain Profiles, to be used for authentication. The value can also be set to `DISABLED` if +running outside of ShipChain's infrastructure. + +##### Database + The Docker Compose files provided include a PostgreSQL container that is linked to Transmission with default connection + string `psql://transmission:transmission@psql:5432/transmission`. This can be modified by setting the environment + variable to your preferred database: + +* `DATABASE_URL` + +##### AWS + +If you intend to utilize any AWS services (such as Secrets Manager and RDS as we do in-house) you may want to include +the following variables: +* `AWS_ACCESS_KEY_ID` +* `AWS_SECRET_ACCESS_KEY` + +##### Logging + +The default Python console logging level is configurable by way of environment variable. The following variable accepts any +[valid Python logging level](https://docs.python.org/3.6/library/logging.html#logging-levels), and defaults to `DEBUG`: +* `LOG_LEVEL` + +If you want to also log messages to ElasticSearch, the URL of your ElasticSearch instance is configurable with the following environment variable: +* `ELASTICSEARCH_URL` + +##### Metrics + +Transmission supports the reporting of application metrics to an InfluxDB instance. We use this internally in combination with +Graphana to make a spiffy real-time dashboard of our application use. In order to use this, set: +* `INFLUXDB_URL` - With the format `http://{host}:{port}/{database}` + +#### Pycharm Bindings (optional) +Integration with PyCharm for local development is straightforward -- you can debug the whole project +using PyCharm runners with minimal configuration. + +1. Add a Remote Project Interpreter, with the following settings: + + * Type: Docker-Compose + + * Configuration File: compose/dev.yml + + * Service: runserver + + * Environment Variables: COMPOSE_PROJECT_NAME=transmission + + * Be sure to set one Path Mapping: Local Directory -> "/app/" on Remote + +2. Add a Run Configuration named "runserver": + + * Type: Django Runserver + + * EnvFile (using EnvFile plugin): .env + + * Host: 0.0.0.0 + + * Interpreter: Docker Compose Runserver Interpreter you setup above + +#### Deployed Environment + +While [compose/dev.yml](compose/dev.yml) is very useful for development, it's not appropriate for production use. +The service runs as a Django server (uwsgi) and is designed to be deployed behind an +nginx reverse proxy container, along with an additional Celery worker container. +We currently use Amazon ECS (FARGATE) for deployment by way of CircleCi and AWS Lambda. + +The Dockerfile to build for deployment is the root Dockerfile; `docker build` +should generate the image as expected. + +## Running the tests + +Testing is handled through the Docker containers as well and can be invoked via the [bin/docker_tests](bin/docker_tests) script. +This is the recommended way to run the tests. This script first runs a `prospector` lint check, followed by the unit +tests, and finally a coverage report. + +## Usage + +See the public OpenAPI 3.0 documentation **(API DOCS NOT CURRENTLY AVAILABLE)** for a full enumeration of API endpoints with example requests/responses. + +### Starting Transmission +Once the dependencies are resolved, starting Transmission should be as easy as: + +* `docker-compose -p transmission -f compose/dev.yml up` or +* `./bin/dc up` + +By default, the [dev.yml](bin/dev.yml) compose file uses Django runserver, which is mapped to the host port 8000. + +### Authentication +All endpoints require a valid JWT from OIDC Auth with the ShipChain Profiles service. The JWT shall +be provided to Transmission as a bearer token in the `Authorization` request header, in the format +`JWT {token}`. ShipChain Profiles is a full-featured OIDC provider and all JWTs will be validated using +the Profiles JWK. + +Tracking updates from ShipChain AXLE devices are authenticated via AWS IoT, and all messages are signed +by the device itself and validated using the device's AWS IoT certificate. + +Transmission's JWT authentication mechanism can be disabled by setting the `PROFILES_URL` environment variable to `DISABLED`. +This is required for the use of Transmission and Engine outside of ShipChain's infrastructure; all authentication and authorization +in this case is left up to you. + +### Asynchronous Requests +Transmission interacts with ShipChain Engine asynchronously; any long-running Engine RPC calls are passed a callback +to a Transmission endpoint. When Transmission receives an update about a job, the relevant listeners +are notified, updating the data model and pushing notifications to all relevant channels. + +### Postman +There is a Postman collection available for import at [tests/postman.collection.Transmission.json](tests/postman.collection.Transmission.json). +This can be imported into Postman to provide a collection of all available Transmission endpoints for ease of testing. +Please note that this collection is designed to be used with ShipChain Profiles - if you are using it for your own internal +testing, you will need to disable authentication on all of the requests in the collection. + +## Built With + +* [Django](https://www.djangoproject.com/) - Python MVC web framework +* [Django Rest Framework](http://www.django-rest-framework.org/) - REST API toolkit +* [Celery](http://www.celeryproject.org/) - Distributed task queue + + + +## Versioning + +We use [SemVer](http://semver.org/) for versioning. For the versions available, see the +[tags on this repository](https://github.com/shipchain/transmission/tags). + +## Authors + +* **Adam Hodges** - [ajhodges](https://github.com/ajhodges) +* **Lucas Clay** - [mlclay](https://github.com/mlclay) +* **Leeward Bound** - [leewardbound](https://github.com/leewardbound) +* **James Neyer** - [jamesfneyer](https://github.com/jamesfneyer) + + + +## License + +This project is licensed under the Apache 2.0 License - see the [LICENSE](LICENSE) file for details diff --git a/apps/__init__.py b/apps/__init__.py new file mode 100644 index 00000000..3b91b070 --- /dev/null +++ b/apps/__init__.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import, unicode_literals + +# This will make sure the app is always imported when +# Django starts so that shared_task will use this app. +from .celery import app as celery_app + +__all__ = ['celery_app'] diff --git a/apps/celery.py b/apps/celery.py new file mode 100644 index 00000000..915a3068 --- /dev/null +++ b/apps/celery.py @@ -0,0 +1,30 @@ +import os +from celery import Celery + +# set the default Django settings module for the 'celery' program. +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings') + +# pylint:disable=invalid-name +REDIS_URL = os.environ.get('REDIS_URL', default='redis://:redis_pass@redis_db:6379/1') +app = Celery('apps', broker=REDIS_URL) +app.conf.ONCE = { + 'backend': 'celery_once.backends.Redis', + 'settings': { + 'url': REDIS_URL, + 'default_timeout': 60 * 60 + } +} + +# Using a string here means the worker doesn't have to serialize +# the configuration object to child processes. +# - namespace='CELERY' means all celery-related configuration keys +# should have a `CELERY_` prefix. +app.config_from_object('django.conf:settings', namespace='CELERY') + +# Load task modules from all registered Django app configs. +app.autodiscover_tasks() + + +@app.task(bind=True) +def debug_task(self): + print('Request: {0!r}'.format(self.request)) diff --git a/apps/eth/__init__.py b/apps/eth/__init__.py new file mode 100644 index 00000000..207c3527 --- /dev/null +++ b/apps/eth/__init__.py @@ -0,0 +1,20 @@ +from django.apps import AppConfig +from django.conf import settings +from .tasks import engine_subscribe + + +class EthConfig(AppConfig): + name = 'apps.eth' + label = 'eth' + verbose_name = 'Eth' + + def ready(self): + # pylint:disable=unused-variable + import apps.eth.signals + + if settings.SUBSCRIBE_EVENTS: + engine_subscribe.delay() # Handle subscription via Celery task + + +# pylint:disable=invalid-name +default_app_config = 'apps.eth.EthConfig' diff --git a/apps/eth/migrations/0001_initial.py b/apps/eth/migrations/0001_initial.py new file mode 100644 index 00000000..1407e020 --- /dev/null +++ b/apps/eth/migrations/0001_initial.py @@ -0,0 +1,69 @@ +# Generated by Django 2.0.7 on 2018-08-15 14:42 + +import apps.utils +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion +import django_extensions.db.fields.json + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ] + + operations = [ + migrations.CreateModel( + name='ContractReceipt', + fields=[ + ('block_hash', models.CharField(max_length=66, validators=[django.core.validators.RegexValidator(message='Invalid hash.', regex='^0x([A-Fa-f0-9]{64})$')])), + ('block_number', models.BigIntegerField()), + ('contract_address', models.CharField(blank=True, max_length=42, null=True, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('cumulative_gas_used', models.IntegerField()), + ('from_address', models.CharField(blank=True, max_length=42, null=True, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('gas_used', models.IntegerField()), + ('logs', django_extensions.db.fields.json.JSONField(default=dict)), + ('logs_bloom', models.CharField(max_length=514)), + ('status', models.BooleanField()), + ('to_address', models.CharField(blank=True, max_length=42, null=True, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('transaction_hash', models.CharField(max_length=66, primary_key=True, serialize=False, validators=[django.core.validators.RegexValidator(message='Invalid hash.', regex='^0x([A-Fa-f0-9]{64})$')])), + ('transaction_index', models.IntegerField()), + ], + ), + migrations.CreateModel( + name='Event', + fields=[ + ('id', models.CharField(default=apps.utils.random_id, max_length=36, primary_key=True, serialize=False)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('address', models.CharField(blank=True, max_length=42, null=True, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('block_number', models.BigIntegerField()), + ('transaction_hash', models.CharField(max_length=66, validators=[django.core.validators.RegexValidator(message='Invalid hash.', regex='^0x([A-Fa-f0-9]{64})$')])), + ('transaction_index', models.IntegerField()), + ('block_hash', models.CharField(max_length=66, validators=[django.core.validators.RegexValidator(message='Invalid hash.', regex='^0x([A-Fa-f0-9]{64})$')])), + ('log_index', models.IntegerField()), + ('removed', models.BooleanField()), + ('event_id', models.CharField(max_length=514)), + ('return_values', django_extensions.db.fields.json.JSONField(default=dict)), + ('event_name', models.CharField(max_length=514)), + ('signature', models.CharField(max_length=66, validators=[django.core.validators.RegexValidator(message='Invalid hash.', regex='^0x([A-Fa-f0-9]{64})$')])), + ('raw', django_extensions.db.fields.json.JSONField(default=dict)), + ('contract_receipt', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='eth.ContractReceipt')), + ], + ), + migrations.CreateModel( + name='EventListener', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('listener_id', models.CharField(max_length=36)), + ('contract_receipt', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='eth.ContractReceipt')), + ('listener_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ], + ), + migrations.AlterUniqueTogether( + name='contractreceipt', + unique_together={('transaction_hash', 'transaction_index')}, + ), + ] diff --git a/apps/eth/migrations/0002_optional_receipt.py b/apps/eth/migrations/0002_optional_receipt.py new file mode 100644 index 00000000..371ba9ba --- /dev/null +++ b/apps/eth/migrations/0002_optional_receipt.py @@ -0,0 +1,19 @@ +# Generated by Django 2.0.7 on 2018-08-15 15:26 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('eth', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='event', + name='contract_receipt', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='eth.ContractReceipt'), + ), + ] diff --git a/apps/eth/migrations/0003_auto_20180829_2114.py b/apps/eth/migrations/0003_auto_20180829_2114.py new file mode 100644 index 00000000..dd71edf9 --- /dev/null +++ b/apps/eth/migrations/0003_auto_20180829_2114.py @@ -0,0 +1,104 @@ +# Generated by Django 2.0.7 on 2018-08-29 21:14 + +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion +import django_extensions.db.fields.json + + +class Migration(migrations.Migration): + + dependencies = [ + ('jobs', '0003_auto_20180801_1626'), + ('contenttypes', '0002_remove_content_type_name'), + ('eth', '0002_optional_receipt'), + ] + + operations = [ + migrations.RemoveField( + model_name='event', + name='contract_receipt', + ), + migrations.CreateModel( + name='EthAction', + fields=[ + ('transaction_hash', models.CharField(max_length=66, primary_key=True, serialize=False, validators=[django.core.validators.RegexValidator(message='Invalid hash.', regex='^0x([A-Fa-f0-9]{64})$')])), + ('updated_at', models.DateTimeField(auto_now=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ], + ), + migrations.CreateModel( + name='EthListener', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('listener_id', models.CharField(max_length=36)), + ], + ), + migrations.DeleteModel( + name='ContractReceipt', + ), + migrations.RemoveField( + model_name='eventlistener', + name='contract_receipt', + ), + migrations.RemoveField( + model_name='eventlistener', + name='listener_type', + ), + migrations.CreateModel( + name='Transaction', + fields=[ + ('eth_action', models.OneToOneField(db_column='hash', on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='eth.EthAction')), + ('nonce', models.IntegerField()), + ('block_hash', models.CharField(max_length=66, validators=[django.core.validators.RegexValidator(message='Invalid hash.', regex='^0x([A-Fa-f0-9]{64})$')])), + ('block_number', models.IntegerField()), + ('transaction_index', models.IntegerField(null=True)), + ('from_address', models.CharField(max_length=42, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('to_address', models.CharField(max_length=42, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('value', models.IntegerField()), + ('gas', models.IntegerField()), + ('gas_price', models.IntegerField()), + ('input', models.TextField()), + ], + ), + migrations.CreateModel( + name='TransactionReceipt', + fields=[ + ('block_hash', models.CharField(max_length=66, validators=[django.core.validators.RegexValidator(message='Invalid hash.', regex='^0x([A-Fa-f0-9]{64})$')])), + ('block_number', models.BigIntegerField()), + ('contract_address', models.CharField(blank=True, max_length=42, null=True, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('cumulative_gas_used', models.IntegerField()), + ('from_address', models.CharField(blank=True, max_length=42, null=True, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('gas_used', models.IntegerField()), + ('logs', django_extensions.db.fields.json.JSONField(default=dict)), + ('logs_bloom', models.CharField(max_length=514)), + ('status', models.BooleanField()), + ('to_address', models.CharField(blank=True, max_length=42, null=True, validators=[django.core.validators.RegexValidator(message='Invalid address.', regex='^0x([A-Fa-f0-9]{40})$')])), + ('eth_action', models.OneToOneField(db_column='transaction_hash', on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='eth.EthAction')), + ('transaction_index', models.IntegerField(null=True)), + ], + ), + migrations.DeleteModel( + name='EventListener', + ), + migrations.AddField( + model_name='ethlistener', + name='eth_action', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='eth.EthAction'), + ), + migrations.AddField( + model_name='ethlistener', + name='listener_type', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType'), + ), + migrations.AddField( + model_name='ethaction', + name='async_job', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jobs.AsyncJob'), + ), + migrations.AddField( + model_name='event', + name='eth_action', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='eth.EthAction'), + ), + ] diff --git a/apps/eth/migrations/0004_ethaction_listeners.py b/apps/eth/migrations/0004_ethaction_listeners.py new file mode 100644 index 00000000..e5008d8f --- /dev/null +++ b/apps/eth/migrations/0004_ethaction_listeners.py @@ -0,0 +1,20 @@ +# Generated by Django 2.0.7 on 2018-08-30 13:52 + +from django.db import migrations +import gm2m.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('eth', '0003_auto_20180829_2114'), + ] + + operations = [ + migrations.AddField( + model_name='ethaction', + name='listeners', + field=gm2m.fields.GM2MField('shipments.Shipment', through='eth.EthListener', through_fields=['eth_action', 'listener', 'listener_type', 'listener_id']), + ), + ] diff --git a/apps/eth/migrations/0005_auto_20180830_2231.py b/apps/eth/migrations/0005_auto_20180830_2231.py new file mode 100644 index 00000000..3dc562d1 --- /dev/null +++ b/apps/eth/migrations/0005_auto_20180830_2231.py @@ -0,0 +1,60 @@ +# Generated by Django 2.0.7 on 2018-08-30 22:31 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('eth', '0004_ethaction_listeners'), + ] + + operations = [ + migrations.RenameField( + model_name='transaction', + old_name='input', + new_name='data', + ), + migrations.RemoveField( + model_name='transaction', + name='block_hash', + ), + migrations.RemoveField( + model_name='transaction', + name='block_number', + ), + migrations.RemoveField( + model_name='transaction', + name='from_address', + ), + migrations.RemoveField( + model_name='transaction', + name='transaction_index', + ), + migrations.AddField( + model_name='transaction', + name='chain_id', + field=models.IntegerField(default=1337), + preserve_default=False, + ), + migrations.AlterField( + model_name='transaction', + name='gas', + field=models.CharField(max_length=32), + ), + migrations.AlterField( + model_name='transaction', + name='gas_price', + field=models.CharField(max_length=32), + ), + migrations.AlterField( + model_name='transaction', + name='nonce', + field=models.CharField(max_length=32), + ), + migrations.AlterField( + model_name='transaction', + name='value', + field=models.CharField(max_length=32), + ), + ] diff --git a/apps/eth/migrations/0006_auto_20180830_2239.py b/apps/eth/migrations/0006_auto_20180830_2239.py new file mode 100644 index 00000000..2d753aca --- /dev/null +++ b/apps/eth/migrations/0006_auto_20180830_2239.py @@ -0,0 +1,18 @@ +# Generated by Django 2.0.7 on 2018-08-30 22:39 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('eth', '0005_auto_20180830_2231'), + ] + + operations = [ + migrations.RenameField( + model_name='transaction', + old_name='gas', + new_name='gas_limit', + ), + ] diff --git a/apps/eth/migrations/__init__.py b/apps/eth/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/eth/models.py b/apps/eth/models.py new file mode 100644 index 00000000..e82017cd --- /dev/null +++ b/apps/eth/models.py @@ -0,0 +1,197 @@ +from django.conf import settings +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType +from django.db import models +from django.core.validators import RegexValidator +from django_extensions.db.fields.json import JSONField +from rest_framework.reverse import reverse + +from gm2m import GM2MField + +from apps.utils import random_id +from apps.jobs.models import AsyncJob + +HASH_REGEX = RegexValidator(regex=r'^0x([A-Fa-f0-9]{64})$', message="Invalid hash.") +ADDRESS_REGEX = RegexValidator(regex=r'^0x([A-Fa-f0-9]{40})$', message="Invalid address.") + + +class EthAction(models.Model): + transaction_hash = models.CharField(validators=[HASH_REGEX], max_length=66, primary_key=True) + + async_job = models.ForeignKey(AsyncJob, on_delete=models.CASCADE) + + updated_at = models.DateTimeField(auto_now=True) + created_at = models.DateTimeField(auto_now_add=True) + + listeners = GM2MField('shipments.Shipment', through='EthListener') + + +class Transaction(models.Model): + """ + "hash": "0x9fc76417374aa880d4449a1f7f31ec597f00b1f6f3dd2d66f4c9c6c445836d8b", + "nonce": 2, + "chainId": 1337, + "to": "0x6295ee1b4f6dd65047762f924ecd367c17eabf8f", + "value": '123450000000000000', + "gas": 314159, + "gasPrice": '2000000000000', + "data": "0x57cb2fc4" + """ + eth_action = models.OneToOneField(EthAction, db_column="hash", primary_key=True, on_delete=models.CASCADE) + + nonce = models.CharField(max_length=32) + chain_id = models.IntegerField() + to_address = models.CharField(validators=[ADDRESS_REGEX], max_length=42) + value = models.CharField(max_length=32) + gas_limit = models.CharField(max_length=32) + gas_price = models.CharField(max_length=32) + data = models.TextField() + + @staticmethod + def from_unsigned_tx(camel_tx): + return Transaction( + nonce=camel_tx['nonce'], + chain_id=camel_tx['chainId'], + to_address=camel_tx['to'], + value=camel_tx['value'], + gas_limit=camel_tx['gasLimit'], + gas_price=camel_tx['gasPrice'], + data=camel_tx['data'] + ) + + +class TransactionReceipt(models.Model): + """ + "blockHash": "0x38823cb26b528867c8dbea4146292908f55e1ee7f293685db1df0851d1b93b24", + "blockNumber": 14, + "contractAddress": null, + "cumulativeGasUsed": 270710, + "from": "0x13b1eebb31a1aa2ecaa2ad9e7455df2f717f2143", + "gasUsed": 270710, + "logs": [ + { + "address": "0x25Ff5dc79A7c4e34254ff0f4a19d69E491201DD3", + "topics": [ + "0x485397dbe4d658daac8124e3080f66a255b9207fa36d7e757ba4d52fe6c21f54" + ], + "data": "0x0000000000000000000000000000...00000000000000000002", + "blockNumber": 14, + "transactionHash": "0x7ff1a69326d64507a306a836128aa67503972cb38e22fa6db217ec553c560d76", + "transactionIndex": 0, + "blockHash": "0x38823cb26b528867c8dbea4146292908f55e1ee7f293685db1df0851d1b93b24", + "logIndex": 0, + "removed": false, + "id": "log_025179b7" + } + ], + "logsBloom": "0x0000000000000000000000000000000...00000000000000000000", + "status": true, + "to": "0x25ff5dc79a7c4e34254ff0f4a19d69e491201dd3", + "transactionHash": "0x7ff1a69326d64507a306a836128aa67503972cb38e22fa6db217ec553c560d76", + "transactionIndex": 0 + """ + + block_hash = models.CharField(validators=[HASH_REGEX], max_length=66) + block_number = models.BigIntegerField() + contract_address = models.CharField(validators=[ADDRESS_REGEX], max_length=42, null=True, blank=True) + cumulative_gas_used = models.IntegerField() + from_address = models.CharField(validators=[ADDRESS_REGEX], max_length=42, null=True, blank=True) + gas_used = models.IntegerField() + logs = JSONField() + logs_bloom = models.CharField(max_length=514) + status = models.BooleanField() + to_address = models.CharField(validators=[ADDRESS_REGEX], max_length=42, null=True, blank=True) + eth_action = models.OneToOneField(EthAction, db_column="transaction_hash", + primary_key=True, on_delete=models.CASCADE) + transaction_index = models.IntegerField(null=True) + + @staticmethod + def convert_receipt(receipt): + return { + 'block_hash': receipt['blockHash'], + 'block_number': receipt['blockNumber'], + 'contract_address': receipt['contractAddress'], + 'cumulative_gas_used': receipt['cumulativeGasUsed'], + 'from_address': receipt['from'], + 'gas_used': receipt['gasUsed'], + 'logs': receipt['logs'], + 'logs_bloom': receipt['logsBloom'], + 'status': receipt['status'], + 'to_address': receipt['to'], + 'eth_action_id': receipt['transactionHash'], + 'transaction_index': receipt['transactionIndex'], + } + + @staticmethod + def from_eth_receipt(receipt): + return TransactionReceipt( + block_hash=receipt['blockHash'], + block_number=receipt['blockNumber'], + contract_address=receipt['contractAddress'], + cumulative_gas_used=receipt['cumulativeGasUsed'], + from_address=receipt['from'], + gas_used=receipt['gasUsed'], + logs=receipt['logs'], + logs_bloom=receipt['logsBloom'], + status=receipt['status'], + to_address=receipt['to'], + eth_action_id=receipt['transactionHash'], + transaction_index=receipt['transactionIndex'], + ) + + +class Event(models.Model): + """ + { + "address": "0x25Ff5dc79A7c4e34254ff0f4a19d69E491201DD3", + "blockNumber": 3, + "transactionHash": "0xc18a24a35052a5a3375ee6c2c5ddd6b0587cfa950b59468b67f63f284e2cc382", + "transactionIndex": 0, + "blockHash": "0x62469a8d113b27180c139d88a25f0348bb4939600011d33382b98e10842c85d9", + "logIndex": 0, + "removed": false, + "id": "log_25652065", + "returnValues": { + "0": "0xFCaf25bF38E7C86612a25ff18CB8e09aB07c9885", + "shipTokenContractAddress": "0xFCaf25bF38E7C86612a25ff18CB8e09aB07c9885" + }, + "event": "SetTokenContractAddressEvent", + "signature": "0xbbbf32f08c8c0621e580dcf0a8e0024525ec357db61bb4faa1a639d4f958a824", + "raw": { + "data": "0x000000000000000000000000fcaf25bf38e7c86612a25ff18cb8e09ab07c9885", + "topics": [ + "0xbbbf32f08c8c0621e580dcf0a8e0024525ec357db61bb4faa1a639d4f958a824" + ] + } + } + """ + + id = models.CharField(primary_key=True, default=random_id, max_length=36) + eth_action = models.ForeignKey(EthAction, on_delete=models.CASCADE, blank=True, null=True) + created_at = models.DateTimeField(auto_now_add=True) + + address = models.CharField(validators=[ADDRESS_REGEX], max_length=42, null=True, blank=True) + block_number = models.BigIntegerField() + transaction_hash = models.CharField(validators=[HASH_REGEX], max_length=66) + transaction_index = models.IntegerField() + block_hash = models.CharField(validators=[HASH_REGEX], max_length=66) + log_index = models.IntegerField() + removed = models.BooleanField() + event_id = models.CharField(max_length=514) + return_values = JSONField() + event_name = models.CharField(max_length=514) + signature = models.CharField(validators=[HASH_REGEX], max_length=66) + raw = JSONField() + + @staticmethod + def get_event_subscription_url(): + return settings.INTERNAL_URL + reverse('event-list', kwargs={'version': 'v1'}) + + +class EthListener(models.Model): + eth_action = models.ForeignKey(EthAction, on_delete=models.CASCADE) + + # Polymorphic listener + listener_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) + listener_id = models.CharField(max_length=36) + listener = GenericForeignKey('listener_type', 'listener_id') diff --git a/apps/eth/permissions.py b/apps/eth/permissions.py new file mode 100644 index 00000000..f9b191fb --- /dev/null +++ b/apps/eth/permissions.py @@ -0,0 +1,15 @@ +from rest_framework import permissions + + +class IsOwner(permissions.BasePermission): + """ + Custom permission to only allow owners of an object to edit it + """ + + def has_object_permission(self, request, view, obj): + + # Permissions are only allowed to the owner of the shipment. + for shipment in obj.listeners.filter(Model='shipments.Shipment'): + if shipment.owner_id == request.user.id: + return True + return False diff --git a/apps/eth/rpc.py b/apps/eth/rpc.py new file mode 100644 index 00000000..ea607336 --- /dev/null +++ b/apps/eth/rpc.py @@ -0,0 +1,20 @@ +from apps.eth.models import Event +from apps.rpc_client import RPCClient, RPCError + + +class EventRPCClient(RPCClient): + + def subscribe(self, url=Event.get_event_subscription_url(), project="LOAD", interval=5000, events=None): + + result = self.call('event.subscribe', { + "url": url, + "project": project, + "interval": interval, + "eventNames": events or ["allEvents"], + }) + + if 'success' in result and result['success']: + if 'subscription' in result: + return + + raise RPCError("Invalid response from Engine") diff --git a/apps/eth/serializers.py b/apps/eth/serializers.py new file mode 100644 index 00000000..23669da7 --- /dev/null +++ b/apps/eth/serializers.py @@ -0,0 +1,142 @@ +from rest_framework_json_api import serializers + +from apps.eth.models import Event, EthAction, Transaction, TransactionReceipt, HASH_REGEX +from apps.jobs.serializers import AsyncJobSerializer + + +class TransactionSerializer(serializers.ModelSerializer): + """ + "hash": "0x9fc76417374aa880d4449a1f7f31ec597f00b1f6f3dd2d66f4c9c6c445836d8b", + "nonce": 2, + "blockHash": "0xef95f2f1ed3ca60b048b4bf67cde2195961e0bba6f70bcbea9a2c4e133e34b46", + "blockNumber": 3, + "transactionIndex": 0, + "from": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", + "to": "0x6295ee1b4f6dd65047762f924ecd367c17eabf8f", + "value": '123450000000000000', + "gas": 314159, + "gasPrice": '2000000000000', + "input": "0x57cb2fc4" + """ + hash = serializers.RegexField(HASH_REGEX.regex, source='eth_action_id', max_length=66) + + class Meta: + model = Transaction + exclude = ('eth_action',) + + +class TransactionReceiptSerializer(serializers.ModelSerializer): + """ + "blockHash": "0x38823cb26b528867c8dbea4146292908f55e1ee7f293685db1df0851d1b93b24", + "blockNumber": 14, + "contractAddress": null, + "cumulativeGasUsed": 270710, + "from": "0x13b1eebb31a1aa2ecaa2ad9e7455df2f717f2143", + "gasUsed": 270710, + "logs": [ + { + "address": "0x25Ff5dc79A7c4e34254ff0f4a19d69E491201DD3", + "topics": [ + "0x485397dbe4d658daac8124e3080f66a255b9207fa36d7e757ba4d52fe6c21f54" + ], + "data": "0x0000000000000000000000000000...00000000000000000002", + "blockNumber": 14, + "transactionHash": "0x7ff1a69326d64507a306a836128aa67503972cb38e22fa6db217ec553c560d76", + "transactionIndex": 0, + "blockHash": "0x38823cb26b528867c8dbea4146292908f55e1ee7f293685db1df0851d1b93b24", + "logIndex": 0, + "removed": false, + "id": "log_025179b7" + } + ], + "logsBloom": "0x0000000000000000000000000000000...00000000000000000000", + "status": true, + "to": "0x25ff5dc79a7c4e34254ff0f4a19d69e491201dd3", + "transactionHash": "0x7ff1a69326d64507a306a836128aa67503972cb38e22fa6db217ec553c560d76", + "transactionIndex": 0 + """ + transaction_hash = serializers.RegexField(HASH_REGEX.regex, source='eth_action_id', max_length=66) + + class Meta: + model = TransactionReceipt + exclude = ('eth_action',) + + +class EventSerializer(serializers.ModelSerializer): + """ + { + "address": "0x25Ff5dc79A7c4e34254ff0f4a19d69E491201DD3", + "blockNumber": 3, + "transactionHash": "0xc18a24a35052a5a3375ee6c2c5ddd6b0587cfa950b59468b67f63f284e2cc382", + "transactionIndex": 0, + "blockHash": "0x62469a8d113b27180c139d88a25f0348bb4939600011d33382b98e10842c85d9", + "logIndex": 0, + "removed": false, + "id": "log_25652065", + "returnValues": { + "0": "0xFCaf25bF38E7C86612a25ff18CB8e09aB07c9885", + "shipTokenContractAddress": "0xFCaf25bF38E7C86612a25ff18CB8e09aB07c9885" + }, + "event": "SetTokenContractAddressEvent", + "signature": "0xbbbf32f08c8c0621e580dcf0a8e0024525ec357db61bb4faa1a639d4f958a824", + "raw": { + "data": "0x000000000000000000000000fcaf25bf38e7c86612a25ff18cb8e09ab07c9885", + "topics": [ + "0xbbbf32f08c8c0621e580dcf0a8e0024525ec357db61bb4faa1a639d4f958a824" + ] + } + } + """ + block_hash = serializers.CharField(source='blockHash') + block_number = serializers.IntegerField(source='blockNumber') + log_index = serializers.IntegerField(source='logIndex') + event_id = serializers.CharField(source='id') + return_values = serializers.JSONField(source='returnValues') + event_name = serializers.CharField(source='event') + raw = serializers.JSONField() + + transaction_hash = serializers.CharField(source='transactionHash') + transaction_index = serializers.IntegerField(source='transactionIndex') + + class Meta: + model = Event + fields = ['block_hash', 'block_number', 'log_index', 'event_id', 'return_values', 'event_name', + 'transaction_hash', 'transaction_index', 'removed', 'signature', 'raw', 'address'] + + def to_internal_value(self, data): + if 'blockHash' in data: + data['block_hash'] = data['blockHash'] + if 'blockNumber' in data: + data['block_number'] = data['blockNumber'] + if 'logIndex' in data: + data['log_index'] = data['logIndex'] + if 'id' in data: + data['event_id'] = data['id'] + if 'returnValues' in data: + data['return_values'] = data['returnValues'] + if 'event' in data: + data['event_name'] = data['event'] + if 'transactionHash' in data: + data['transaction_hash'] = data['transactionHash'] + if 'transactionIndex' in data: + data['transaction_index'] = data['transactionIndex'] + return super(EventSerializer, self).to_internal_value(data) + + +class EthActionSerializer(serializers.ModelSerializer): + transaction = serializers.ResourceRelatedField(queryset=Transaction.objects.all()) + transaction_receipt = serializers.ResourceRelatedField(source='transactionreceipt', + queryset=TransactionReceipt.objects.all()) + + class Meta: + model = EthAction + exclude = ('listeners',) + + included_serializers = { + 'transaction': TransactionSerializer, + 'transaction_receipt': TransactionReceiptSerializer, + 'async_job': AsyncJobSerializer + } + + class JSONAPIMeta: + included_resources = ['transaction', 'transaction_receipt', 'async_job'] diff --git a/apps/eth/signals.py b/apps/eth/signals.py new file mode 100644 index 00000000..7c1a603b --- /dev/null +++ b/apps/eth/signals.py @@ -0,0 +1,17 @@ +from django.db.models.signals import post_save +from django.dispatch import Signal, receiver + +# pylint:disable=invalid-name +from apps.eth.models import Event + +event_update = Signal(providing_args=["event", "listener"]) + + +@receiver(post_save, sender=Event, dispatch_uid='event_post_save') +def event_post_save(sender, instance, **kwargs): + + # Update has been received, send signal to listener class + if instance.eth_action: + for listener in instance.eth_action.ethlistener_set.all(): + event_update.send(sender=listener.listener_type.model_class(), + event=instance, listener=listener.listener) diff --git a/apps/eth/tasks.py b/apps/eth/tasks.py new file mode 100644 index 00000000..ba62a435 --- /dev/null +++ b/apps/eth/tasks.py @@ -0,0 +1,19 @@ +from celery import shared_task +from celery_once import QueueOnce + + +@shared_task(base=QueueOnce, once={'graceful': True}, bind=True, autoretry_for=(Exception,), + retry_backoff=3, retry_backoff_max=60, max_retries=None) +def engine_subscribe(self): + from apps.eth.rpc import EventRPCClient, RPCError + + try: + rpc_client = EventRPCClient() + rpc_client.subscribe() + # TODO: Metrics/Logs for subscribe successful + print("Subscribed to Events") + + except RPCError as rpc_error: + # TODO: Metrics/Logs for subscribe failure + print(f"Unable to subscribe to Events: {rpc_error}") + raise self.retry(exc=rpc_error) diff --git a/apps/eth/views.py b/apps/eth/views.py new file mode 100644 index 00000000..44473332 --- /dev/null +++ b/apps/eth/views.py @@ -0,0 +1,78 @@ +import logging + +from django.conf import settings +from django.core.exceptions import ObjectDoesNotExist +from rest_framework import mixins, viewsets, parsers, status, renderers, permissions +from rest_framework.response import Response +from rest_framework_json_api import renderers as jsapi_renderers + +from apps.eth.models import EthAction, Event +from apps.eth.serializers import EventSerializer, EthActionSerializer + +from .permissions import IsOwner + +LOG = logging.getLogger('transmission') + + +class EventViewSet(mixins.CreateModelMixin, + viewsets.GenericViewSet): + """ + Handles Event callbacks from Engine + """ + queryset = Event.objects.all() + serializer_class = EventSerializer + parser_classes = (parsers.JSONParser,) + renderer_classes = (renderers.JSONRenderer, jsapi_renderers.JSONRenderer) + # TODO: Restrict for Engine + permission_classes = (permissions.AllowAny,) + + def create(self, request, *args, **kwargs): + + is_many = isinstance(request.data, list) + + if not is_many: + serializer = EventSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + try: + action = EthAction.objects.get(transaction_hash=serializer.data['transaction_hash']) + except ObjectDoesNotExist: + action = None + # TODO: Events without Receipt metric reporting + LOG.info(f"Non-EthAction Event processed " + f"Tx: {serializer.data['transaction_hash']}") + + Event.objects.create(**serializer.data, eth_action=action) + + else: + serializer = EventSerializer(data=request.data, many=True) + serializer.is_valid(raise_exception=True) + + for event in serializer.data: + try: + action = EthAction.objects.get(transaction_hash=event['transaction_hash']) + except ObjectDoesNotExist: + action = None + # TODO: Events without Receipt metric reporting + LOG.info(f"Non-EthAction Event processed " + f"Tx: {event['transaction_hash']}") + + Event.objects.create(**event, eth_action=action) + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class TransactionViewSet(mixins.RetrieveModelMixin, + viewsets.GenericViewSet): + """ + Get tx details for a transaction hash + """ + queryset = EthAction.objects.all() + serializer_class = EthActionSerializer + permission_classes = (permissions.IsAuthenticated, IsOwner) if settings.PROFILES_URL else (permissions.AllowAny,) + + def get_queryset(self): + queryset = self.queryset + if settings.PROFILES_URL: + queryset = queryset.filter(ethlistener__shipments__owner_id=self.request.user.id) + return queryset diff --git a/apps/jobs/__init__.py b/apps/jobs/__init__.py new file mode 100644 index 00000000..850ca965 --- /dev/null +++ b/apps/jobs/__init__.py @@ -0,0 +1,15 @@ +from django.apps import AppConfig + + +class JobsConfig(AppConfig): + name = 'apps.jobs' + label = 'jobs' + verbose_name = 'Jobs' + + def ready(self): + # pylint:disable=unused-variable + import apps.jobs.signals + + +# pylint:disable=invalid-name +default_app_config = 'apps.jobs.JobsConfig' diff --git a/apps/jobs/migrations/0001_initial.py b/apps/jobs/migrations/0001_initial.py new file mode 100644 index 00000000..ad593797 --- /dev/null +++ b/apps/jobs/migrations/0001_initial.py @@ -0,0 +1,48 @@ +# Generated by Django 2.0.7 on 2018-07-27 20:26 + +import apps.utils +import django.contrib.postgres.fields.jsonb +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ] + + operations = [ + migrations.CreateModel( + name='AsyncJob', + fields=[ + ('id', models.CharField(default=apps.utils.random_id, max_length=36, primary_key=True, serialize=False)), + ('parameters', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ], + options={ + 'ordering': ('created_at',), + }, + ), + migrations.CreateModel( + name='JobListener', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('listener_id', models.CharField(max_length=36)), + ('async_job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jobs.AsyncJob')), + ('listener_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ], + ), + migrations.CreateModel( + name='JobMessage', + fields=[ + ('id', models.CharField(default=apps.utils.random_id, max_length=36, primary_key=True, serialize=False)), + ('body', django.contrib.postgres.fields.jsonb.JSONField()), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('async_job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jobs.AsyncJob')), + ], + ), + ] diff --git a/apps/jobs/migrations/0002_auto_20180730_1547.py b/apps/jobs/migrations/0002_auto_20180730_1547.py new file mode 100644 index 00000000..be3b979f --- /dev/null +++ b/apps/jobs/migrations/0002_auto_20180730_1547.py @@ -0,0 +1,22 @@ +# Generated by Django 2.0.7 on 2018-07-30 15:47 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('jobs', '0001_initial'), + ] + + operations = [ + migrations.RenameModel( + old_name='JobListener', + new_name='Listener', + ), + migrations.RenameModel( + old_name='JobMessage', + new_name='Message', + ), + ] diff --git a/apps/jobs/migrations/0003_auto_20180801_1626.py b/apps/jobs/migrations/0003_auto_20180801_1626.py new file mode 100644 index 00000000..780f0f99 --- /dev/null +++ b/apps/jobs/migrations/0003_auto_20180801_1626.py @@ -0,0 +1,26 @@ +# Generated by Django 2.0.7 on 2018-08-01 16:26 + +import apps.jobs.models +from django.db import migrations +import enumfields.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('jobs', '0002_auto_20180730_1547'), + ] + + operations = [ + migrations.AddField( + model_name='asyncjob', + name='state', + field=enumfields.fields.EnumIntegerField(default=0, enum=apps.jobs.models.JobState), + ), + migrations.AddField( + model_name='message', + name='type', + field=enumfields.fields.EnumIntegerField(default=0, enum=apps.jobs.models.MessageType), + preserve_default=False, + ), + ] diff --git a/apps/jobs/migrations/0004_auto_20180829_2114.py b/apps/jobs/migrations/0004_auto_20180829_2114.py new file mode 100644 index 00000000..b34ed075 --- /dev/null +++ b/apps/jobs/migrations/0004_auto_20180829_2114.py @@ -0,0 +1,35 @@ +# Generated by Django 2.0.7 on 2018-08-29 21:14 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('jobs', '0003_auto_20180801_1626'), + ] + + operations = [ + migrations.CreateModel( + name='JobListener', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('listener_id', models.CharField(max_length=36)), + ('async_job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jobs.AsyncJob')), + ('listener_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ], + ), + migrations.RemoveField( + model_name='listener', + name='async_job', + ), + migrations.RemoveField( + model_name='listener', + name='listener_type', + ), + migrations.DeleteModel( + name='Listener', + ), + ] diff --git a/apps/jobs/migrations/0005_asyncjob_listeners.py b/apps/jobs/migrations/0005_asyncjob_listeners.py new file mode 100644 index 00000000..0b70e9b6 --- /dev/null +++ b/apps/jobs/migrations/0005_asyncjob_listeners.py @@ -0,0 +1,20 @@ +# Generated by Django 2.0.7 on 2018-08-30 13:30 + +from django.db import migrations +import gm2m.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('jobs', '0004_auto_20180829_2114'), + ] + + operations = [ + migrations.AddField( + model_name='asyncjob', + name='listeners', + field=gm2m.fields.GM2MField(through='jobs.JobListener', through_fields=['async_job', 'listener', 'listener_type', 'listener_id']), + ), + ] diff --git a/apps/jobs/migrations/0006_auto_20180830_1352.py b/apps/jobs/migrations/0006_auto_20180830_1352.py new file mode 100644 index 00000000..1996c861 --- /dev/null +++ b/apps/jobs/migrations/0006_auto_20180830_1352.py @@ -0,0 +1,19 @@ +# Generated by Django 2.0.7 on 2018-08-30 13:52 + +from django.db import migrations +import gm2m.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('jobs', '0005_asyncjob_listeners'), + ] + + operations = [ + migrations.AlterField( + model_name='asyncjob', + name='listeners', + field=gm2m.fields.GM2MField('shipments.Shipment', through='jobs.JobListener', through_fields=['async_job', 'listener', 'listener_type', 'listener_id']), + ), + ] diff --git a/apps/jobs/migrations/__init__.py b/apps/jobs/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/jobs/models.py b/apps/jobs/models.py new file mode 100644 index 00000000..ba06ecf3 --- /dev/null +++ b/apps/jobs/models.py @@ -0,0 +1,78 @@ +import celery +from django.db import models +from django.conf import settings +from django.urls import reverse +# TODO: Should this be a Postgres field? +from django.contrib.postgres.fields import JSONField + +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType + +from gm2m import GM2MField +from enumfields import Enum, EnumIntegerField + +from apps.utils import random_id + + +class JobState(Enum): + PENDING = 0 + RUNNING = 1 + FAILED = 2 + COMPLETE = 3 + + +class AsyncJob(models.Model): + id = models.CharField(primary_key=True, default=random_id, max_length=36) + state = EnumIntegerField(JobState, default=JobState.PENDING) + parameters = JSONField(blank=True, null=True) + updated_at = models.DateTimeField(auto_now=True) + created_at = models.DateTimeField(auto_now_add=True) + + class Meta: + ordering = ('created_at',) + + def get_callback_url(self): + return settings.INTERNAL_URL + reverse('job-message', kwargs={'version': 'v1', 'pk': self.id}) + + listeners = GM2MField('shipments.Shipment', through='JobListener') + + def fire(self): + # Use send_task to avoid cyclic import + celery.current_app.send_task('apps.jobs.tasks.async_job_fire', (self.id,)) + + @staticmethod + def rpc_job_for_listener(rpc_class, rpc_method, rpc_parameters, signing_wallet_id, listener): + job = AsyncJob.objects.create(parameters={ + 'rpc_class': f'{rpc_class.__module__}.{rpc_class.__name__}', + 'rpc_method': f'{rpc_method.__name__}', + 'rpc_parameters': rpc_parameters, + 'signing_wallet_id': signing_wallet_id, + }) + job.joblistener_set.create(listener=listener) + job.save() + job.fire() + return job + + +class MessageType(Enum): + ERROR = 0 + ETH_TRANSACTION = 1 + + +class Message(models.Model): + id = models.CharField(primary_key=True, default=random_id, max_length=36) + + async_job = models.ForeignKey(AsyncJob, on_delete=models.CASCADE) + + type = EnumIntegerField(MessageType) + body = JSONField() + created_at = models.DateTimeField(auto_now_add=True) + + +class JobListener(models.Model): + async_job = models.ForeignKey(AsyncJob, on_delete=models.CASCADE) + + # Polymorphic listener + listener_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) + listener_id = models.CharField(max_length=36) + listener = GenericForeignKey('listener_type', 'listener_id') diff --git a/apps/jobs/serializers.py b/apps/jobs/serializers.py new file mode 100644 index 00000000..197ef0ed --- /dev/null +++ b/apps/jobs/serializers.py @@ -0,0 +1,24 @@ +from rest_framework_json_api import serializers +from drf_enum_field.serializers import EnumFieldSerializerMixin +from .models import AsyncJob, Message + + +class MessageSerializer(EnumFieldSerializerMixin, serializers.ModelSerializer): + class Meta: + model = Message + fields = ('type', 'body', 'created_at') + + +class AsyncJobSerializer(EnumFieldSerializerMixin, serializers.ModelSerializer): + message_set = serializers.ResourceRelatedField(queryset=Message.objects, many=True) + + class Meta: + model = AsyncJob + exclude = ('listeners',) + + included_serializers = { + 'message_set': MessageSerializer + } + + class JSONAPIMeta: + included_resources = ['message_set'] diff --git a/apps/jobs/signals.py b/apps/jobs/signals.py new file mode 100644 index 00000000..5ac3de4c --- /dev/null +++ b/apps/jobs/signals.py @@ -0,0 +1,20 @@ +from django.db.models.signals import post_save +from django.dispatch import Signal, receiver + +from .models import Message, MessageType, JobState + +# pylint:disable=invalid-name +job_update = Signal(providing_args=["message", "listener"]) + + +@receiver(post_save, sender=Message, dispatch_uid='message_post_save') +def message_post_save(sender, instance, **kwargs): + if instance.type == MessageType.ERROR: + # Generic error handling + instance.async_job.state = JobState.FAILED + instance.async_job.save() # TODO: Log error message (instance.body), trigger a retry? + + # Update has been received, send signal to listener class + for listener in instance.async_job.joblistener_set.all(): + job_update.send(sender=listener.listener_type.model_class(), + message=instance, listener=listener.listener) diff --git a/apps/jobs/tasks.py b/apps/jobs/tasks.py new file mode 100644 index 00000000..c93afa3a --- /dev/null +++ b/apps/jobs/tasks.py @@ -0,0 +1,88 @@ +# pylint:disable=invalid-name +import importlib + +from django.db import transaction +from django.core.cache import cache +from celery import shared_task + +from apps.rpc_client import RPCError + + +@shared_task(bind=True, autoretry_for=(Exception,), + retry_backoff=3, retry_backoff_max=60, max_retries=0) # TODO: enable retries +def async_job_fire(self, async_job_id): + from .models import AsyncJob, JobState + + print(f'AsyncJob {async_job_id} firing!') + async_job = AsyncJob.objects.get(id=async_job_id) + + if async_job.state == JobState.PENDING: + try: + with cache.lock(async_job.parameters['signing_wallet_id']): # Only one concurrent tx per wallet + # Find which RPC module/class to import + module_name, rpc_class_name = async_job.parameters['rpc_class'].rsplit('.', 1) + module = importlib.import_module(module_name) + rpc_client = getattr(module, rpc_class_name)() + + # Generate transaction via RPC + unsigned_tx = _generic_get_transaction(rpc_client, async_job) + + # Sign tx via RPC + signed_tx, eth_action = _sign_transaction(rpc_client, async_job, unsigned_tx) + + # Send tx via RPC + _send_transaction(rpc_client, async_job, signed_tx, eth_action) + + # TODO: Metrics/Logs for success + print("Eth TX Submitted via AsyncJob") + except RPCError as rpc_error: + # TODO: Metrics/Logs for failure + print(f"Unexpected RPC error: {rpc_error}") + raise self.retry(exc=rpc_error) + + +def _generic_get_transaction(rpc_client, async_job): + if async_job.parameters['rpc_method'] == 'create_shipment_transaction': + contract_version, unsigned_tx = getattr(rpc_client, async_job.parameters['rpc_method'])( + *async_job.parameters['rpc_parameters']) + for shipment in async_job.listeners.filter(Model='shipments.Shipment'): + shipment.contract_version = contract_version + shipment.save() + else: + unsigned_tx = getattr(rpc_client, async_job.parameters['rpc_method'])( + *async_job.parameters['rpc_parameters']) + return unsigned_tx + + +def _sign_transaction(rpc_client, async_job, unsigned_tx): + from apps.eth.models import EthAction, Transaction + + signed_tx, hash_tx = getattr(rpc_client, 'sign_transaction')(async_job.parameters['signing_wallet_id'], + unsigned_tx) + async_job.parameters['signed_tx'] = signed_tx + + # Create EthAction so this Job's Listeners can also listen to Events posted for the TransactionHash + with transaction.atomic(): + eth_action = EthAction(async_job=async_job, transaction_hash=hash_tx) + for job_listener in async_job.joblistener_set.all(): + eth_action.ethlistener_set.create(listener=job_listener.listener) + + eth_action.transaction = Transaction.from_unsigned_tx(unsigned_tx) + eth_action.transaction.hash = hash_tx + eth_action.transaction.save() + eth_action.save() + + return signed_tx, eth_action + + +def _send_transaction(rpc_client, async_job, signed_tx, eth_action): + from .models import JobState + from apps.eth.models import TransactionReceipt + + receipt = getattr(rpc_client, 'send_transaction')(signed_tx, async_job.get_callback_url()) + with transaction.atomic(): + eth_action.transactionreceipt = TransactionReceipt.from_eth_receipt(receipt) + eth_action.transactionreceipt.save() + + async_job.state = JobState.RUNNING + async_job.save() diff --git a/apps/jobs/views.py b/apps/jobs/views.py new file mode 100644 index 00000000..236a8ef5 --- /dev/null +++ b/apps/jobs/views.py @@ -0,0 +1,29 @@ +from rest_framework import viewsets, mixins, parsers, permissions, status, renderers +from rest_framework.decorators import action +from rest_framework.response import Response +from rest_framework_json_api import parsers as jsapi_parsers + +from .models import AsyncJob +from .serializers import AsyncJobSerializer, MessageSerializer + + +class JobsViewSet(mixins.ListModelMixin, + mixins.RetrieveModelMixin, + viewsets.GenericViewSet): + """ + Manages the state of an AsyncJob + """ + queryset = AsyncJob.objects.all() + serializer_class = AsyncJobSerializer + parser_classes = (parsers.JSONParser, jsapi_parsers.JSONParser) + + @action(detail=True, methods=['post'], + permission_classes=[permissions.AllowAny], + renderer_classes=[renderers.JSONRenderer]) + def message(self, request, version, pk): + serializer = MessageSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + serializer.save(async_job_id=pk) + + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apps/rpc_client.py b/apps/rpc_client.py new file mode 100644 index 00000000..1edba6f4 --- /dev/null +++ b/apps/rpc_client.py @@ -0,0 +1,88 @@ +import json +import logging +import requests +from rest_framework import status + +from rest_framework.exceptions import APIException + +from django.conf import settings + +from influxdb_metrics.loader import log_metric, TimingMetric + +LOG = logging.getLogger('transmission') + + +class RPCError(APIException): + status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + default_detail = 'Internal Service Error.' + default_code = 'server_error' + + def __init__(self, detail, status_code=None, code=None): + super(RPCError, self).__init__(detail, code) + self.detail = detail + + if status_code: + self.status_code = status_code + + +class RPCClient(object): + def __init__(self): + self.url = settings.ENGINE_RPC_URL + self.headers = {'content-type': 'application/json'} + self.payload = {"jsonrpc": "2.0", "id": 0, "params": {}} + + def call(self, method, args=None): + + if args and not isinstance(args, object): + raise RPCError("Invalid parameter type for Engine RPC call") + + self.payload['method'] = method + self.payload['params'] = args or {} + + try: + with TimingMetric('engine_rpc.call', tags={'method': method}) as timer: + response_json = requests.post(self.url, data=json.dumps(self.payload), headers=self.headers).json() + LOG.info('rpc_client(%s) duration: %.3f', method, timer.elapsed) + + if 'error' in response_json: + log_metric('engine_rpc.error', tags={'method': method, 'code': response_json['error']['code']}) + LOG.error('rpc_client(%s) error: %s', method, response_json['error']) + raise RPCError(response_json['error']['message']) + + response_json = response_json['result'] + + except requests.exceptions.ConnectionError: + # Don't return the true ConnectionError as it can contain internal URLs + log_metric('engine_rpc.error', tags={'method': method, 'code': 'ConnectionError'}) + raise RPCError("Service temporarily unavailable, try again later", status.HTTP_503_SERVICE_UNAVAILABLE, + 'service_unavailable') + + except Exception as exception: + log_metric('engine_rpc.error', tags={'method': method, 'code': 'Exception'}) + raise RPCError(str(exception)) + + return response_json + + def sign_transaction(self, wallet_id, transaction): + + result = self.call('transaction.sign', { + "signerWallet": wallet_id, + "txUnsigned": transaction + }) + + if 'success' in result and result['success']: + if 'transaction' in result: + return result['transaction'], result['hash'] + raise RPCError("Invalid response from Engine") + + def send_transaction(self, signed_transaction, callback_url): + + result = self.call('transaction.send', { + "callbackUrl": callback_url, + "txSigned": signed_transaction + }) + + if 'success' in result and result['success']: + if 'receipt' in result: + return result['receipt'] + raise RPCError("Invalid response from Engine") diff --git a/apps/schema/static/schema/shipchain-logo.png b/apps/schema/static/schema/shipchain-logo.png new file mode 100644 index 00000000..81a4f1ca Binary files /dev/null and b/apps/schema/static/schema/shipchain-logo.png differ diff --git a/apps/schema/static/schema/swagger.yaml b/apps/schema/static/schema/swagger.yaml new file mode 100644 index 00000000..565a2a90 --- /dev/null +++ b/apps/schema/static/schema/swagger.yaml @@ -0,0 +1,393 @@ +openapi: 3.0.0 +servers: +- url: https://transmission.shipchain.io/ + description: ShipChain Transmission API +info: + version: 0.8.0-beta + title: ShipChain Transmission API + description: | + **THIS DOCUMENTATION IS UNDER CONSTRUCTION** + + The ShipChain Transmission API is a microservice that handles interaction with the ShipChain smart contracts via the Engine project. + + # Errors + The API uses standard HTTP status codes and the [JSON API](http://jsonapi.org/examples/#error-objects) specification to indicate the success or failure of the API call. The body of the response will be JSON in the following format: + + ``` + { + "errors": [ + { + "status": "422", + "source": { "pointer": "/data/attributes/volume" }, + "detail": "Volume does not, in fact, go to 11." + } + ] + } + ``` + + x-logo: + url: '/static/schema/shipchain-logo.png' + +strings: +- &response_200 "Success" +- &response_202 "Accepted" +- &response_204 "Success" +- &response_401 "Unauthorized" +- &response_404 "Resource not found" +- &response_500 "Internal server error" +- &response_503 "Service temporarily unavailable" + +paths: + /api/v1/shipments: + get: + summary: List shipments + description: > + Retrieve an array of `Shipment` objects associated with the account ordered by the `Shipment.created_at` date. + parameters: + - $ref: '#/components/parameters/page' + - $ref: '#/components/parameters/ordering' + tags: + - Shipments + responses: + '200': + description: *response_200 + content: + application/vnd.api+json: + schema: + $ref: '#/components/schemas/shipment/listResponse' + '401': + description: *response_401 + content: + application/vnd.api+json: + schema: + $ref: '#/components/schemas/errors/401' + +components: + parameters: + page: + name: page + in: query + description: Page number used for pagination + required: false + schema: + type: integer + format: int32 + + ordering: + name: search + in: query + description: ordering of the objects + required: false + schema: + type: string + enum: [created_at, modified_at, -created_at, -modified_at] + + wallet: + path: + required: true + name: wallet_id + in: path + description: ID of the Wallet + schema: + $ref: '#/components/schemas/dataTypes/uuid' + + device: + path: + required: true + name: device_id + in: path + description: ID of the Device + schema: + $ref: '#/components/schemas/dataTypes/uuid' + + storageCredentials: + path: + name: storage_credentials_id + in: path + description: ID of the Storage Credentials + required: true + schema: + $ref: '#/components/schemas/dataTypes/uuid' + + schemas: + dataTypes: + # General resource parameters + uuid: &uuid + type: string + format: uuid + description: Unique UUID for this resource + example: '1243d23b-e2fc-475a-8290-0e4f53479553' + + resourceId: + properties: + id: + <<: *uuid + title: id + + createdAt: + properties: + created_at: + title: created_at + description: Date/time of resource creation + type: string + format: date-time + example: '2018-06-18T14:54:56.652732Z' + + modifiedAt: + properties: + modified_at: + title: modified_at + description: Date/time of last resource modification + type: string + format: date-time + example: '2018-06-18T14:54:56.652732Z' + + # General Ethereum parameters + ethereum: + address: + properties: + address: + title: address + description: Ethereum address + type: string + example: '0x369974bFb76b656C0c45a84D00b1877340b45fA2' + + publicKey: + properties: + public_key: + title: public_key + description: Public key of the wallet + type: string + example: '8afaded2fc0f7b4566c303fccb9d3b445aa88a1d213d972f38c1c2fe173f7adc8e67596ddab0dd342ec9cd4f907884b5b7ae7b873437fb8a1cb2e58ece473f81' + + privateKey: + properties: + private_key: + title: private_key + description: Private key of the wallet + type: string + example: '2146c3dfeab8621830f5aa4d22cb3ee8ba13c9f9513988ec3fb154c0a43a596a' + + # Models + jsonApi: + resource: + allOf: + - $ref: '#/components/schemas/dataTypes/resourceId' + - $ref: '#/components/requestBodies/jsonApi/createResource' + - type: object + description: Primary data related to the resource. + + dataArray: + properties: + data: + type: array + items: + $ref: '#/components/schemas/jsonApi/resource' + + linksAndMeta: + properties: + links: + $ref: '#/components/schemas/links' + meta: + $ref: '#/components/schemas/meta' + + data: + properties: + data: + $ref: '#/components/schemas/jsonApi/resource' + + error: + properties: + errors: + type: array + items: + type: object + properties: + status: + description: The HTTP status code applicable to this problem, expressed as a string value. + type: string + detail: + description: A human-readable explanation specific to this occurrence of the problem. + type: string + + errorPointer: + properties: + errors: + type: array + description: An array of error objects + items: + type: object + description: Error objects provide additional information about problems encountered while performing an operation. + properties: + source: # Not present on all errors + description: An object containing references to the source of the error, optionally including any of the following members. + type: object + properties: + pointer: + description: A JSON Pointer [RFC6901] to the associated entity in the request document [e.g. "/data" for a primary data object, or "/data/attributes/title" for a specific attribute]. + type: string + + errorParameter: + properties: + errors: + type: array + items: + type: object + properties: + source: # Not present on all errors + description: An object containing references to the source of the error, optionally including any of the following members. + type: object + properties: + parameter: + description: A string indicating which URI query parameter caused the error. + type: string + + shipment: + resource: + properties: + type: + example: Shipment + + attributes: + allOf: + - $ref: '#/components/schemas/dataTypes/createdAt' + - $ref: '#/components/schemas/dataTypes/modifiedAt' + + getResource: + allOf: + - $ref: '#/components/schemas/shipment/resource' + - properties: + attributes: + $ref: '#/components/schemas/shipment/attributes' + + getResponse: + allOf: + - $ref: '#/components/schemas/jsonApi/data' + - properties: + data: + $ref: '#/components/schemas/shipment/getResource' + + listResponse: + allOf: + - $ref: '#/components/schemas/jsonApi/dataArray' + - $ref: '#/components/schemas/jsonApi/linksAndMeta' + - properties: + data: + items: + $ref: '#/components/schemas/shipment/getResource' + + errors: + 401: + allOf: + - $ref: '#/components/schemas/jsonApi/error' + - $ref: '#/components/schemas/jsonApi/errorPointer' + - properties: + errors: + items: + properties: + detail: + example: *response_401 + source: + properties: + pointer: + example: "/data" + status: + example: "401" + 500: + allOf: + - $ref: '#/components/schemas/jsonApi/error' + - properties: + errors: + items: + properties: + detail: + example: *response_500 + status: + example: "500" + + links: + type: object + description: Links related to the primary data. + properties: + first: + title: first + description: Link to first page of the list + type: string + example: '/api/v1/{object-type}/?page=1' + last: + title: last + description: Link to last page of the list + type: string + example: '/api/v1/{object-type}/?page=4' + next: + title: next + description: Link to next page of the list + type: string + example: '/api/v1/{object-type}/?page=3' + previous: + title: previous + description: Link to previous page of the list + type: string + example: '/api/v1/{object-type}/?page=2' + + meta: + type: object + description: Non-standard meta-information. + properties: + pagination: + type: object + properties: + count: + title: count + description: Total number of resources associated with user + type: integer + format: int32 + example: '1' + page: + title: page + description: Current page + type: integer + format: int32 + example: '1' + pages: + title: pages + description: Number of pages of resources + type: integer + format: int32 + example: '1' + + requestBodies: + jsonApi: + createResource: + type: object + description: Primary data related to the resource. + properties: + type: + type: string + title: type + description: Type of resource + attributes: + type: object + title: attributes + description: Properties of the resource + + createData: + properties: + data: + type: object + $ref: '#/components/requestBodies/jsonApi/createResource' + + securitySchemes: + JWT: + description: > + All authenticated service calls must contain a valid JWT id_token obtained from the Profiles OIDC provider and + passed via the Authentication header. + type: http + scheme: bearer + bearerFormat: JWT {id_token} +security: +- JWT: [] +tags: +- name: Shipments + description: > + Shipments are the core entity in the ShipChain Transmission service. This is a collection of endpoints related to managing shipments. \ No newline at end of file diff --git a/apps/schema/templates/apidoc.html b/apps/schema/templates/apidoc.html new file mode 100644 index 00000000..4c110d93 --- /dev/null +++ b/apps/schema/templates/apidoc.html @@ -0,0 +1,23 @@ +{% load staticfiles %} + + + + + + + + + ShipChain ReDoc + + + + + + \ No newline at end of file diff --git a/apps/shipments/__init__.py b/apps/shipments/__init__.py new file mode 100644 index 00000000..1502e2ff --- /dev/null +++ b/apps/shipments/__init__.py @@ -0,0 +1,15 @@ +from django.apps import AppConfig + + +class ShipmentsConfig(AppConfig): + name = 'apps.shipments' + label = 'shipments' + verbose_name = 'shipments' + + def ready(self): + # pylint:disable=unused-variable + import apps.shipments.signals + + +# pylint:disable=invalid-name +default_app_config = 'apps.shipments.ShipmentsConfig' diff --git a/apps/shipments/geojson.py b/apps/shipments/geojson.py new file mode 100644 index 00000000..4c08d043 --- /dev/null +++ b/apps/shipments/geojson.py @@ -0,0 +1,142 @@ +import logging +from datetime import datetime + +from rest_framework.exceptions import APIException +from geojson import Feature, FeatureCollection, LineString, Point + +LOG = logging.getLogger('transmission') + + +def build_line_string_feature(shipment, tracking_data): + """ + :param shipment: Shipment to be used for datetime filtering + :param tracking_data: List of tracking data points returned from Engine + :return: All tracking coordinates in a single GeoJSON LineString Feature + """ + begin = (shipment.pickup_actual or datetime.min).replace(tzinfo=None) + end = (shipment.delivery_actual or datetime.max).replace(tzinfo=None) + tracking_points = [] + for point in tracking_data: + try: + dtp = DeviceTrackingPoint(point) + if begin <= dtp.timestamp <= end: + tracking_points.append(dtp) + except InvalidTrackingPointError as err: + LOG.warning(f'Error parsing tracking data for shipment {shipment.id}: {err}') + + return [DeviceTrackingPoint.get_linestring_feature(tracking_points)] + + +def build_point_features(shipment, tracking_data): + """ + :param shipment: Shipment to be used for datetime filtering + :param tracking_data: List of tracking data points returned from Engine + :return: All tracking coordinates each in their own GeoJSON Point Feature + """ + begin = (shipment.pickup_actual or datetime.min).replace(tzinfo=None) + end = (shipment.delivery_actual or datetime.max).replace(tzinfo=None) + point_features = [] + for point in tracking_data: + try: + dtp = DeviceTrackingPoint(point) + if begin <= dtp.timestamp <= end: + point_features.append(dtp.as_point_feature()) + except InvalidTrackingPointError as err: + LOG.warning(f'Error parsing tracking data for shipment {shipment.id}: {err}') + return point_features + + +def build_feature_collection(features): + """ + :param features: List of Features, or single Feature to be returned in a FeatureCollection + :return: All provided Features in a single FeatureCollection + """ + feature_list = features + + if not isinstance(feature_list, list): + feature_list = [feature_list] + + return FeatureCollection(feature_list) + + +class InvalidTrackingPointError(APIException): + """ + Extend DRF APIException so these are automatically transformed via the exception handler + """ + pass + + +class DeviceTrackingPoint(object): + """ + Serialize the returned tracking data in to this class to catch formatting errors and assist in + generating the appropriate GeoJSON Features correctly + """ + + def __init__(self, point): + try: + self.lat = point['coordinates'][0] + self.lon = point['coordinates'][1] + self.timestamp = DeviceTrackingPoint.__build_timestamp(point['fix_date'], point['fix_time']) + + self.uncertainty = point['uncertainty'] if 'uncertainty' in point else None + self.has_gps = point['has_gps'] if 'has_gps' in point else None + self.source = point['source'] if 'source' in point else None + + except IndexError: + raise InvalidTrackingPointError(f"Invalid Coordinates format from device") + + except KeyError as key_error: + raise InvalidTrackingPointError(f"Missing field {key_error} in tracking data from device") + + def as_point(self): + try: + return Point((self.lon, self.lat)) + except Exception: + raise InvalidTrackingPointError("Unable to build GeoJSON Point from tracking data") + + def as_point_feature(self): + try: + return Feature(geometry=self.as_point(), properties={ + "time": self.timestamp, + "uncertainty": self.uncertainty, + "has_gps": self.has_gps, + "source": self.source, + }) + except Exception: + raise InvalidTrackingPointError("Unable to build GeoJSON Point Feature from tracking data") + + @staticmethod + def get_linestring_list(tracking_points): + linestring = LineString([(point.lon, point.lat) for point in tracking_points]) + linestring_timestamps = [point.timestamp for point in tracking_points] + + return linestring, linestring_timestamps + + @staticmethod + def get_linestring_feature(tracking_points): + try: + linestring, linestring_timestamps = DeviceTrackingPoint.get_linestring_list(tracking_points) + return Feature(geometry=linestring, properties={"linestringTimestamps": linestring_timestamps}) + + except Exception: + raise InvalidTrackingPointError("Unable to build GeoJSON LineString Feature from tracking data") + + @staticmethod + def __extract_date_fields(date): + day, month, year = int(date[:2]), int(date[2:4]), int("20" + date[4:6]) + return day, month, year + + @staticmethod + def __extract_time_fields(time): + hour, minute, second = int(time[:2]), int(time[2:4]), int(time[4:6]) + return hour, minute, second + + @staticmethod + def __build_timestamp(date, time): + try: + day, month, year = DeviceTrackingPoint.__extract_date_fields(date) + hour, minute, second = DeviceTrackingPoint.__extract_time_fields(time) + return datetime(year, month, day, hour, minute, second) + + except Exception as exception: + raise InvalidTrackingPointError(f"Error building timestamp from device tracking data: '{exception}'") diff --git a/apps/shipments/migrations/0001_initial.py b/apps/shipments/migrations/0001_initial.py new file mode 100644 index 00000000..1b720243 --- /dev/null +++ b/apps/shipments/migrations/0001_initial.py @@ -0,0 +1,104 @@ +# Generated by Django 2.0.7 on 2018-07-16 17:40 + +import apps.utils +import django.contrib.postgres.fields.jsonb +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Location', + fields=[ + ('id', models.CharField(default=apps.utils.random_id, max_length=36, primary_key=True, serialize=False)), + ('name', models.CharField(max_length=255)), + ('address_1', models.CharField(max_length=255)), + ('address_2', models.CharField(max_length=255)), + ('city', models.CharField(max_length=255)), + ('state', models.CharField(max_length=255)), + ('country', models.CharField(max_length=255)), + ('postal_code', models.CharField(max_length=255)), + ('phone_number', models.CharField(max_length=255, validators=[django.core.validators.RegexValidator(message='Invalid phone number.', regex='^(\\+\\d{1,2}\\s)?\\(?\\d{3}\\)?[\\s.-]?\\d{3}[\\s.-]?\\d{4}')])), + ('fax_number', models.CharField(max_length=255, validators=[django.core.validators.RegexValidator(message='Invalid phone number.', regex='^(\\+\\d{1,2}\\s)?\\(?\\d{3}\\)?[\\s.-]?\\d{3}[\\s.-]?\\d{4}')])), + ('updated_at', models.DateTimeField(auto_now=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ], + options={ + 'ordering': ('created_at',), + }, + ), + migrations.CreateModel( + name='Shipment', + fields=[ + ('id', models.CharField(default=apps.utils.random_id, max_length=36, primary_key=True, serialize=False)), + ('owner', models.CharField(max_length=36)), + ('carrier_scac', models.CharField(max_length=255)), + ('forwarder_scac', models.CharField(max_length=255)), + ('nvocc_scac', models.CharField(max_length=255)), + ('shipper_reference', models.CharField(max_length=255)), + ('forwarder_reference', models.CharField(max_length=255)), + ('forwarders_shipper_id', models.CharField(max_length=255)), + ('carrier_instructions', models.CharField(max_length=255)), + ('pro_number', models.CharField(max_length=255)), + ('master_bill', models.CharField(max_length=255)), + ('house_bill', models.CharField(max_length=255)), + ('subhouse_bill', models.CharField(max_length=255)), + ('freight_payment_terms', models.CharField(max_length=255)), + ('vessel_name', models.CharField(max_length=255)), + ('voyage_number', models.CharField(max_length=255)), + ('mode', models.CharField(max_length=255)), + ('number_of_packages', models.IntegerField(default=0)), + ('gross_weight_kgs', models.IntegerField(default=0)), + ('volume_cbms', models.IntegerField(default=0)), + ('container_count', models.IntegerField(default=0)), + ('dimensional_weight', models.IntegerField(default=0)), + ('chargeable_weight', models.IntegerField(default=0)), + ('docs_received_actual', models.DateTimeField()), + ('docs_approved_actual', models.DateTimeField()), + ('pickup_appointment_actual', models.DateTimeField()), + ('pickup_estimated', models.DateTimeField()), + ('pickup_actual', models.DateTimeField()), + ('loading_estimated', models.DateTimeField()), + ('loading_actual', models.DateTimeField()), + ('departure_estimated', models.DateTimeField()), + ('departure_actual', models.DateTimeField()), + ('delivery_appointment_actual', models.DateTimeField()), + ('arrival_port_estimated', models.DateTimeField()), + ('arrival_port_actual', models.DateTimeField()), + ('delivery_estimated', models.DateTimeField()), + ('delivery_actual', models.DateTimeField()), + ('last_attempted_delivery_actual', models.DateTimeField()), + ('cancel_requested_date_actual', models.DateTimeField()), + ('cancel_confirmed_date_actual', models.DateTimeField()), + ('customs_filed_date_actual', models.DateTimeField()), + ('customs_hold_date_actual', models.DateTimeField()), + ('customs_release_date_actual', models.DateTimeField()), + ('containerization_type', models.CharField(max_length=255)), + ('arrival_unlocode', models.CharField(max_length=255)), + ('final_port_unlocode', models.CharField(max_length=255)), + ('import_unlocode', models.CharField(max_length=255)), + ('lading_unlocode', models.CharField(max_length=255)), + ('origin_unlocode', models.CharField(max_length=255)), + ('us_routed_export', models.CharField(max_length=255)), + ('import_customs_mode', models.CharField(max_length=255)), + ('us_customs_export_port', models.CharField(max_length=255)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('customer_fields', django.contrib.postgres.fields.jsonb.JSONField()), + ('final_destination_location', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='shipment_shipments_dest', to='shipments.Location')), + ('ship_from_location', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='shipment_shipments_from', to='shipments.Location')), + ('ship_to_location', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='shipment_shipments_to', to='shipments.Location')), + ], + options={ + 'ordering': ('created_at',), + }, + ), + ] diff --git a/apps/shipments/migrations/0002_auto_20180716_1748.py b/apps/shipments/migrations/0002_auto_20180716_1748.py new file mode 100644 index 00000000..a2cfbdd0 --- /dev/null +++ b/apps/shipments/migrations/0002_auto_20180716_1748.py @@ -0,0 +1,285 @@ +# Generated by Django 2.0.7 on 2018-07-16 17:48 + +import django.contrib.postgres.fields.jsonb +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='shipment', + name='arrival_port_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='arrival_port_estimated', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='arrival_unlocode', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='cancel_confirmed_date_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='cancel_requested_date_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='carrier_instructions', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='carrier_scac', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='chargeable_weight', + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='container_count', + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='containerization_type', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='customer_fields', + field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='customs_filed_date_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='customs_hold_date_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='customs_release_date_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='delivery_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='delivery_appointment_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='delivery_estimated', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='departure_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='departure_estimated', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='dimensional_weight', + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='docs_approved_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='docs_received_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='final_destination_location', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='shipment_shipments_dest', to='shipments.Location'), + ), + migrations.AlterField( + model_name='shipment', + name='final_port_unlocode', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='forwarder_reference', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='forwarder_scac', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='forwarders_shipper_id', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='freight_payment_terms', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='gross_weight_kgs', + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='house_bill', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='import_customs_mode', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='import_unlocode', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='lading_unlocode', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='last_attempted_delivery_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='loading_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='loading_estimated', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='master_bill', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='mode', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='number_of_packages', + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='nvocc_scac', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='origin_unlocode', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='pickup_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='pickup_appointment_actual', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='pickup_estimated', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='pro_number', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='ship_from_location', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='shipment_shipments_from', to='shipments.Location'), + ), + migrations.AlterField( + model_name='shipment', + name='ship_to_location', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='shipment_shipments_to', to='shipments.Location'), + ), + migrations.AlterField( + model_name='shipment', + name='shipper_reference', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='subhouse_bill', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='us_customs_export_port', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='us_routed_export', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='vessel_name', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='volume_cbms', + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='shipment', + name='voyage_number', + field=models.CharField(blank=True, max_length=255, null=True), + ), + ] diff --git a/apps/shipments/migrations/0003_auto_20180716_1848.py b/apps/shipments/migrations/0003_auto_20180716_1848.py new file mode 100644 index 00000000..a3afd8a0 --- /dev/null +++ b/apps/shipments/migrations/0003_auto_20180716_1848.py @@ -0,0 +1,18 @@ +# Generated by Django 2.0.7 on 2018-07-16 18:48 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0002_auto_20180716_1748'), + ] + + operations = [ + migrations.RenameField( + model_name='shipment', + old_name='owner', + new_name='owner_id', + ), + ] diff --git a/apps/shipments/migrations/0004_auto_20180727_1854.py b/apps/shipments/migrations/0004_auto_20180727_1854.py new file mode 100644 index 00000000..cee780d3 --- /dev/null +++ b/apps/shipments/migrations/0004_auto_20180727_1854.py @@ -0,0 +1,72 @@ +# Generated by Django 2.0.7 on 2018-07-27 18:54 + +import apps.utils +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0003_auto_20180716_1848'), + ] + + operations = [ + migrations.CreateModel( + name='LoadShipment', + fields=[ + ('id', models.CharField(default=apps.utils.random_id, max_length=36, primary_key=True, serialize=False)), + ('shipment_id', models.IntegerField(db_index=True)), + ('shipment_amount', models.IntegerField()), + ('paid_amount', models.IntegerField(default=0)), + ('paid_tokens', models.DecimalField(decimal_places=18, max_digits=40)), + ('shipper', models.CharField(max_length=42)), + ('carrier', models.CharField(max_length=42)), + ('moderator', models.CharField(max_length=42, null=True)), + ('contract_funded', models.BooleanField()), + ('shipment_created', models.BooleanField()), + ('valid_until', models.IntegerField()), + ('start_block', models.IntegerField()), + ('end_block', models.IntegerField(null=True)), + ('escrow_funded', models.BooleanField()), + ('shipment_committed_by_carrier', models.BooleanField()), + ('commitment_confirmed_date', models.IntegerField()), + ('shipment_completed_by_carrier', models.BooleanField()), + ('shipment_accepted_by_shipper', models.BooleanField()), + ('shipment_canceled_by_shipper', models.BooleanField()), + ('escrow_paid', models.BooleanField()), + ], + ), + migrations.AlterModelOptions( + name='location', + options={}, + ), + migrations.AddField( + model_name='shipment', + name='carrier_wallet_id', + field=models.CharField(default=1234, max_length=36), + preserve_default=False, + ), + migrations.AddField( + model_name='shipment', + name='shipper_wallet_id', + field=models.CharField(default=1234, max_length=36), + preserve_default=False, + ), + migrations.AddField( + model_name='shipment', + name='storage_credentials_id', + field=models.CharField(default=1234, max_length=36), + preserve_default=False, + ), + migrations.AddField( + model_name='shipment', + name='vault_id', + field=models.CharField(max_length=36, null=True), + ), + migrations.AddField( + model_name='shipment', + name='load_data', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='shipments.LoadShipment'), + ), + ] diff --git a/apps/shipments/migrations/0005_auto_20180808_1643.py b/apps/shipments/migrations/0005_auto_20180808_1643.py new file mode 100644 index 00000000..7ff6b1d3 --- /dev/null +++ b/apps/shipments/migrations/0005_auto_20180808_1643.py @@ -0,0 +1,54 @@ +# Generated by Django 2.0.7 on 2018-08-08 16:43 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0004_auto_20180727_1854'), + ] + + operations = [ + migrations.AlterField( + model_name='location', + name='address_1', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='location', + name='address_2', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='location', + name='city', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='location', + name='country', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='location', + name='fax_number', + field=models.CharField(blank=True, max_length=255, null=True, validators=[django.core.validators.RegexValidator(message='Invalid phone number.', regex='^(\\+\\d{1,2}\\s)?\\(?\\d{3}\\)?[\\s.-]?\\d{3}[\\s.-]?\\d{4}')]), + ), + migrations.AlterField( + model_name='location', + name='phone_number', + field=models.CharField(blank=True, max_length=255, null=True, validators=[django.core.validators.RegexValidator(message='Invalid phone number.', regex='^(\\+\\d{1,2}\\s)?\\(?\\d{3}\\)?[\\s.-]?\\d{3}[\\s.-]?\\d{4}')]), + ), + migrations.AlterField( + model_name='location', + name='postal_code', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='location', + name='state', + field=models.CharField(blank=True, max_length=255, null=True), + ), + ] diff --git a/apps/shipments/migrations/0006_shipment_required_fields.py b/apps/shipments/migrations/0006_shipment_required_fields.py new file mode 100644 index 00000000..62612fda --- /dev/null +++ b/apps/shipments/migrations/0006_shipment_required_fields.py @@ -0,0 +1,78 @@ +# Generated by Django 2.0.7 on 2018-08-15 14:45 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0005_auto_20180808_1643'), + ] + + operations = [ + migrations.AlterField( + model_name='loadshipment', + name='contract_funded', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='loadshipment', + name='end_block', + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='loadshipment', + name='escrow_funded', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='loadshipment', + name='escrow_paid', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='loadshipment', + name='moderator', + field=models.CharField(blank=True, max_length=42, null=True), + ), + migrations.AlterField( + model_name='loadshipment', + name='paid_tokens', + field=models.DecimalField(decimal_places=18, default=0, max_digits=40), + ), + migrations.AlterField( + model_name='loadshipment', + name='shipment_accepted_by_shipper', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='loadshipment', + name='shipment_canceled_by_shipper', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='loadshipment', + name='shipment_committed_by_carrier', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='loadshipment', + name='shipment_completed_by_carrier', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='loadshipment', + name='shipment_created', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='loadshipment', + name='shipment_id', + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='loadshipment', + name='start_block', + field=models.IntegerField(blank=True, null=True), + ), + ] diff --git a/apps/shipments/migrations/0007_more_required_fields.py b/apps/shipments/migrations/0007_more_required_fields.py new file mode 100644 index 00000000..9cf5780a --- /dev/null +++ b/apps/shipments/migrations/0007_more_required_fields.py @@ -0,0 +1,18 @@ +# Generated by Django 2.0.7 on 2018-08-15 14:48 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0006_shipment_required_fields'), + ] + + operations = [ + migrations.AlterField( + model_name='loadshipment', + name='commitment_confirmed_date', + field=models.IntegerField(blank=True, null=True), + ), + ] diff --git a/apps/shipments/migrations/0008_shipment_contract_version.py b/apps/shipments/migrations/0008_shipment_contract_version.py new file mode 100644 index 00000000..4a471686 --- /dev/null +++ b/apps/shipments/migrations/0008_shipment_contract_version.py @@ -0,0 +1,19 @@ +# Generated by Django 2.0.7 on 2018-08-30 12:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0007_more_required_fields'), + ] + + operations = [ + migrations.AddField( + model_name='shipment', + name='contract_version', + field=models.CharField(default='1.0.2', max_length=36), + preserve_default=False, + ), + ] diff --git a/apps/shipments/migrations/0009_auto_20180830_2021.py b/apps/shipments/migrations/0009_auto_20180830_2021.py new file mode 100644 index 00000000..8f5973bd --- /dev/null +++ b/apps/shipments/migrations/0009_auto_20180830_2021.py @@ -0,0 +1,19 @@ +# Generated by Django 2.0.7 on 2018-08-30 20:21 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0008_shipment_contract_version'), + ] + + operations = [ + migrations.AlterField( + model_name='shipment', + name='load_data', + field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='shipments.LoadShipment'), + ), + ] diff --git a/apps/shipments/migrations/0010_auto_20180830_2137.py b/apps/shipments/migrations/0010_auto_20180830_2137.py new file mode 100644 index 00000000..8583ff7d --- /dev/null +++ b/apps/shipments/migrations/0010_auto_20180830_2137.py @@ -0,0 +1,31 @@ +# Generated by Django 2.0.7 on 2018-08-30 21:37 + +import apps.shipments.models +from django.db import migrations +import enumfields.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('shipments', '0009_auto_20180830_2021'), + ] + + operations = [ + migrations.AddField( + model_name='loadshipment', + name='escrow_status', + field=enumfields.fields.EnumField(default=1, enum=apps.shipments.models.EscrowStatus, max_length=10), + ), + migrations.AddField( + model_name='loadshipment', + name='funding_type', + field=enumfields.fields.EnumField(default=2, enum=apps.shipments.models.FundingType, max_length=10), + preserve_default=False, + ), + migrations.AddField( + model_name='loadshipment', + name='shipment_status', + field=enumfields.fields.EnumField(default=0, enum=apps.shipments.models.ShipmentStatus, max_length=10), + ), + ] diff --git a/apps/shipments/migrations/__init__.py b/apps/shipments/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/shipments/models.py b/apps/shipments/models.py new file mode 100644 index 00000000..81626d6f --- /dev/null +++ b/apps/shipments/models.py @@ -0,0 +1,203 @@ +import logging + +from django.conf import settings +from django.db import models +from django.core.validators import RegexValidator +from django.contrib.postgres.fields import JSONField +from django.contrib.contenttypes.fields import GenericRelation + +from enumfields import Enum +from enumfields import EnumField + +from apps.utils import random_id +from apps.jobs.models import JobListener, AsyncJob +from apps.eth.models import EthListener +from .rpc import ShipmentRPCClient + +LOG = logging.getLogger('transmission') + + +class Location(models.Model): + id = models.CharField(primary_key=True, default=random_id, max_length=36) + + name = models.CharField(max_length=255) + address_1 = models.CharField(max_length=255, blank=True, null=True) + address_2 = models.CharField(max_length=255, blank=True, null=True) + city = models.CharField(max_length=255, blank=True, null=True) + state = models.CharField(max_length=255, blank=True, null=True) + country = models.CharField(max_length=255, blank=True, null=True) + postal_code = models.CharField(max_length=255, blank=True, null=True) + + phone_regex = RegexValidator(regex=r'^(\+\d{1,2}\s)?\(?\d{3}\)?[\s.-]?\d{3}[\s.-]?\d{4}', + message="Invalid phone number.") + phone_number = models.CharField(validators=[phone_regex], max_length=255, blank=True, null=True) + fax_number = models.CharField(validators=[phone_regex], max_length=255, blank=True, null=True) + + updated_at = models.DateTimeField(auto_now=True) + created_at = models.DateTimeField(auto_now_add=True) + + +class FundingType(Enum): + SHIP = 0 + CASH = 1 + ETH = 2 + + +class EscrowStatus(Enum): + CONTRACT_INITIATED = 1 + CONTRACT_SETUP = 2 + CONTRACT_COMMITTED = 3 + CONTRACT_COMPLETED = 4 + CONTRACT_ACCEPTED = 5 + CONTRACT_CANCELED = 6 + + +class ShipmentStatus(Enum): + PENDING = 0 + INITIATED = 1 + COMPLETED = 2 + CANCELED = 3 + + +class LoadShipment(models.Model): + id = models.CharField(primary_key=True, default=random_id, max_length=36) + + shipment_id = models.IntegerField(blank=True, null=True) + funding_type = EnumField(enum=FundingType) + + shipment_amount = models.IntegerField() + paid_amount = models.IntegerField(default=0) + paid_tokens = models.DecimalField(max_digits=40, decimal_places=18, default=0) + + shipper = models.CharField(max_length=42) + carrier = models.CharField(max_length=42) + moderator = models.CharField(max_length=42, blank=True, null=True) + + escrow_status = EnumField(enum=EscrowStatus, default=EscrowStatus.CONTRACT_INITIATED) + shipment_status = EnumField(enum=ShipmentStatus, default=ShipmentStatus.PENDING) + + contract_funded = models.BooleanField(default=False) + shipment_created = models.BooleanField(default=False) + valid_until = models.IntegerField() + start_block = models.IntegerField(blank=True, null=True) + end_block = models.IntegerField(blank=True, null=True) + + escrow_funded = models.BooleanField(default=False) + shipment_committed_by_carrier = models.BooleanField(default=False) + commitment_confirmed_date = models.IntegerField(blank=True, null=True) + + shipment_completed_by_carrier = models.BooleanField(default=False) + shipment_accepted_by_shipper = models.BooleanField(default=False) + shipment_canceled_by_shipper = models.BooleanField(default=False) + escrow_paid = models.BooleanField(default=False) + + +class Shipment(models.Model): + id = models.CharField(primary_key=True, default=random_id, max_length=36) + owner_id = models.CharField(null=False, max_length=36) + load_data = models.OneToOneField(LoadShipment, on_delete=models.CASCADE, null=True) + + storage_credentials_id = models.CharField(null=False, max_length=36) + vault_id = models.CharField(null=True, max_length=36) + shipper_wallet_id = models.CharField(null=False, max_length=36) + carrier_wallet_id = models.CharField(null=False, max_length=36) + updated_at = models.DateTimeField(auto_now=True) + created_at = models.DateTimeField(auto_now_add=True) + + job_listeners = GenericRelation(JobListener, related_query_name='shipments', + content_type_field='listener_type', object_id_field='listener_id') + eth_listeners = GenericRelation(EthListener, related_query_name='shipments', + content_type_field='listener_type', object_id_field='listener_id') + contract_version = models.CharField(null=False, max_length=36) + + class Meta: + ordering = ('created_at',) + + # Shipment Schema fields + carrier_scac = models.CharField(max_length=255, blank=True, null=True) + forwarder_scac = models.CharField(max_length=255, blank=True, null=True) + nvocc_scac = models.CharField(max_length=255, blank=True, null=True) + shipper_reference = models.CharField(max_length=255, blank=True, null=True) + forwarder_reference = models.CharField(max_length=255, blank=True, null=True) + forwarders_shipper_id = models.CharField(max_length=255, blank=True, null=True) + + ship_from_location = models.ForeignKey(Location, on_delete=models.PROTECT, + related_name='%(class)s_shipments_from', null=True) + ship_to_location = models.ForeignKey(Location, on_delete=models.PROTECT, + related_name='%(class)s_shipments_to', null=True) + final_destination_location = models.ForeignKey(Location, on_delete=models.PROTECT, + related_name='%(class)s_shipments_dest', null=True) + + carrier_instructions = models.CharField(max_length=255, blank=True, null=True) + pro_number = models.CharField(max_length=255, blank=True, null=True) + master_bill = models.CharField(max_length=255, blank=True, null=True) + house_bill = models.CharField(max_length=255, blank=True, null=True) + subhouse_bill = models.CharField(max_length=255, blank=True, null=True) + freight_payment_terms = models.CharField(max_length=255, blank=True, null=True) + vessel_name = models.CharField(max_length=255, blank=True, null=True) + voyage_number = models.CharField(max_length=255, blank=True, null=True) + mode = models.CharField(max_length=255, blank=True, null=True) + + number_of_packages = models.IntegerField(blank=True, null=True) + gross_weight_kgs = models.IntegerField(blank=True, null=True) + volume_cbms = models.IntegerField(blank=True, null=True) + container_count = models.IntegerField(blank=True, null=True) + dimensional_weight = models.IntegerField(blank=True, null=True) + chargeable_weight = models.IntegerField(blank=True, null=True) + + docs_received_actual = models.DateTimeField(blank=True, null=True) + docs_approved_actual = models.DateTimeField(blank=True, null=True) + pickup_appointment_actual = models.DateTimeField(blank=True, null=True) + pickup_estimated = models.DateTimeField(blank=True, null=True) + pickup_actual = models.DateTimeField(blank=True, null=True) + loading_estimated = models.DateTimeField(blank=True, null=True) + loading_actual = models.DateTimeField(blank=True, null=True) + departure_estimated = models.DateTimeField(blank=True, null=True) + departure_actual = models.DateTimeField(blank=True, null=True) + delivery_appointment_actual = models.DateTimeField(blank=True, null=True) + arrival_port_estimated = models.DateTimeField(blank=True, null=True) + arrival_port_actual = models.DateTimeField(blank=True, null=True) + delivery_estimated = models.DateTimeField(blank=True, null=True) + delivery_actual = models.DateTimeField(blank=True, null=True) + last_attempted_delivery_actual = models.DateTimeField(blank=True, null=True) + cancel_requested_date_actual = models.DateTimeField(blank=True, null=True) + cancel_confirmed_date_actual = models.DateTimeField(blank=True, null=True) + customs_filed_date_actual = models.DateTimeField(blank=True, null=True) + customs_hold_date_actual = models.DateTimeField(blank=True, null=True) + customs_release_date_actual = models.DateTimeField(blank=True, null=True) + + containerization_type = models.CharField(max_length=255, blank=True, null=True) + arrival_unlocode = models.CharField(max_length=255, blank=True, null=True) + final_port_unlocode = models.CharField(max_length=255, blank=True, null=True) + import_unlocode = models.CharField(max_length=255, blank=True, null=True) + lading_unlocode = models.CharField(max_length=255, blank=True, null=True) + origin_unlocode = models.CharField(max_length=255, blank=True, null=True) + us_routed_export = models.CharField(max_length=255, blank=True, null=True) + import_customs_mode = models.CharField(max_length=255, blank=True, null=True) + us_customs_export_port = models.CharField(max_length=255, blank=True, null=True) + + customer_fields = JSONField(blank=True, null=True) + + def get_device_request_url(self): + return f"{settings.PROFILES_URL}/api/v1/device/?on_shipment={self.vault_id}" + + def update_vault_hash(self, vault_hash): + async_job = None + if self.load_data and self.load_data.shipment_id: + async_job = AsyncJob.rpc_job_for_listener( + rpc_class=ShipmentRPCClient, + rpc_method=ShipmentRPCClient.update_vault_hash_transaction, + rpc_parameters=[self.shipper_wallet_id, + self.load_data.shipment_id, + '', + vault_hash], + signing_wallet_id=self.shipper_wallet_id, + listener=self) + else: + LOG.error(f'Shipment {self.id} tried to update_vault_hash before load_data.shipment_id was set!') + return async_job + + # Defaults + VALID_UNTIL = 24 + FUNDING_TYPE = FundingType.SHIP.value + SHIPMENT_AMOUNT = 1 diff --git a/apps/shipments/permissions.py b/apps/shipments/permissions.py new file mode 100644 index 00000000..d38174d4 --- /dev/null +++ b/apps/shipments/permissions.py @@ -0,0 +1,12 @@ +from rest_framework import permissions + + +class IsOwner(permissions.BasePermission): + """ + Custom permission to only allow owners of an object to edit it + """ + + def has_object_permission(self, request, view, obj): + + # Permissions are only allowed to the owner of the shipment. + return obj.owner_id == request.user.id diff --git a/apps/shipments/rpc.py b/apps/shipments/rpc.py new file mode 100644 index 00000000..a03bacda --- /dev/null +++ b/apps/shipments/rpc.py @@ -0,0 +1,74 @@ +from apps.rpc_client import RPCClient, RPCError + + +class ShipmentRPCClient(RPCClient): + def create_vault(self, storage_credentials_id, shipper_wallet_id, carrier_wallet_id): + + result = self.call('load.create_vault', { + "storageCredentials": storage_credentials_id, + "shipperWallet": shipper_wallet_id, + "carrierWallet": carrier_wallet_id + }) + + if 'success' in result and result['success']: + if 'vault_id' in result: + return result['vault_id'] + raise RPCError("Invalid response from Engine") + + def add_shipment_data(self, storage_credentials_id, wallet_id, vault_id, shipment_data): + + result = self.call('load.add_shipment_data', { + "storageCredentials": storage_credentials_id, + "vaultWallet": wallet_id, + "vault": vault_id, + "shipment": shipment_data + }) + + if 'success' in result and result['success']: + return result['vault_signed'] + raise RPCError("Invalid response from Engine") + + def create_shipment_transaction(self, shipper_wallet_id, carrier_wallet_id, # pylint: disable=too-many-arguments + valid_until, funding_type, shipment_amount): + + result = self.call('load.create_shipment_transaction', { + "shipperWallet": shipper_wallet_id, + "carrierWallet": carrier_wallet_id, + "validUntil": valid_until, + "fundingType": funding_type, + "shipmentAmount": shipment_amount + }) + + if 'success' in result and result['success']: + if 'transaction' in result and 'contractVersion' in result: + return result['contractVersion'], result['transaction'] + + raise RPCError("Invalid response from Engine") + + def get_tracking_data(self, storage_credentials_id, wallet_id, vault_id): + + result = self.call('load.get_tracking_data', { + "storageCredentials": storage_credentials_id, + "vaultWallet": wallet_id, + "vault": vault_id + }) + + if 'success' in result and result['success']: + if 'contents' in result: + return result['contents'] + + raise RPCError("Invalid response from Engine") + + def update_vault_hash_transaction(self, wallet_id, current_shipment_id, url, vault_hash): + result = self.call('load.update_vault_hash_transaction', { + "shipperWallet": wallet_id, + "shipmentId": current_shipment_id, + "url": url, + "hash": vault_hash + }) + + if 'success' in result and result['success']: + if 'transaction' in result: + return result['transaction'] + + raise RPCError("Invalid response from Engine") diff --git a/apps/shipments/serializers.py b/apps/shipments/serializers.py new file mode 100644 index 00000000..4081949d --- /dev/null +++ b/apps/shipments/serializers.py @@ -0,0 +1,133 @@ +from collections import OrderedDict + +from django.conf import settings +from rest_framework.fields import SkipField +from enumfields.drf import EnumField +from enumfields.drf.serializers import EnumSupportSerializerMixin +from rest_framework_json_api import serializers +from apps.shipments.models import Shipment, Location, LoadShipment, FundingType, EscrowStatus, ShipmentStatus + + +class NullableFieldsMixin: + def to_representation(self, instance): + # Remove null fields from serialized object + ret = OrderedDict() + fields = [field for field in self.fields.values() if not field.write_only] + + for field in fields: + try: + attribute = field.get_attribute(instance) + except SkipField: + continue + + if attribute is not None: + representation = field.to_representation(attribute) + if representation is None: + # Do not serialize empty objects + continue + if isinstance(representation, list) and not representation: + # Do not serialize empty lists + continue + ret[field.field_name] = representation + + return ret + + +class LocationSerializer(NullableFieldsMixin, serializers.ModelSerializer): + """ + Serializer for a location, used nested in a Shipment + """ + class Meta: + model = Location + fields = '__all__' + + +class LoadShipmentSerializer(NullableFieldsMixin, serializers.ModelSerializer): + """ + Serializer for a location, used nested in a Shipment + """ + funding_type = EnumField(FundingType, ints_as_names=True) + escrow_status = EnumField(EscrowStatus, ints_as_names=True) + shipment_status = EnumField(ShipmentStatus, ints_as_names=True) + + class Meta: + model = LoadShipment + fields = '__all__' + + +class ShipmentSerializer(serializers.ModelSerializer, EnumSupportSerializerMixin): + """ + Serializer for a shipment object + """ + load_data = LoadShipmentSerializer(required=False) + ship_from_location = LocationSerializer(required=False) + ship_to_location = LocationSerializer(required=False) + + transactions = serializers.SerializerMethodField() + + def get_transactions(self, obj): + return [eth_action.transaction_hash for eth_action in obj.ethaction_set.all()] + + class Meta: + model = Shipment + fields = '__all__' + read_only_fields = ('owner_id', 'contract_version') if settings.PROFILES_URL else ('contract_version',) + + class JSONAPIMeta: + included_resources = ['ship_from_location', 'ship_to_location', 'final_destination_location', 'load_data'] + + +class ShipmentCreateSerializer(ShipmentSerializer): + def create(self, validated_data): + location_args = {} + + for location_field in ['ship_from_location', 'ship_to_location']: + if location_field in validated_data: + data = validated_data.pop(location_field) + + location_args[location_field], _ = Location.objects.get_or_create(**data) + + return Shipment.objects.create(**validated_data, **location_args) + + +class ShipmentUpdateSerializer(ShipmentSerializer): + class Meta: + model = Shipment + fields = '__all__' + if settings.PROFILES_URL: + read_only_fields = ('owner_id', 'vault_id', 'shipper_wallet_id', + 'carrier_wallet_id', 'storage_credentials_id') + else: + read_only_fields = ('vault_id', 'shipper_wallet_id', 'carrier_wallet_id', 'storage_credentials_id') + + +class ShipmentTxSerializer(serializers.ModelSerializer): + async_job_id = serializers.CharField(max_length=36) + + load_data = LoadShipmentSerializer(required=False) + ship_from_location = LocationSerializer(required=False) + ship_to_location = LocationSerializer(required=False) + + class Meta: + model = Shipment + fields = '__all__' + read_only_fields = ('owner_id',) + meta_fields = ('async_job_id',) + + class JSONAPIMeta: + included_resources = ['ship_from_location', 'ship_to_location', 'final_destination_location', 'load_data'] + + +class ShipmentVaultSerializer(NullableFieldsMixin, serializers.ModelSerializer): + """ + Serializer for a shipment vault object + """ + + ship_from_location = LocationSerializer(required=False) + ship_to_location = LocationSerializer(required=False) + + class Meta: + model = Shipment + exclude = ('owner_id', 'load_data', 'storage_credentials_id', + 'vault_id', 'shipper_wallet_id', 'carrier_wallet_id', + 'contract_version') diff --git a/apps/shipments/signals.py b/apps/shipments/signals.py new file mode 100644 index 00000000..863b2233 --- /dev/null +++ b/apps/shipments/signals.py @@ -0,0 +1,88 @@ +from django.dispatch import receiver +from django.db.models.signals import post_save + +from apps.eth.signals import event_update +from apps.eth.models import TransactionReceipt +from apps.jobs.models import JobState, MessageType, AsyncJob +from apps.jobs.signals import job_update +from .models import Shipment, LoadShipment +from .rpc import ShipmentRPCClient +from .serializers import ShipmentVaultSerializer + + +@receiver(job_update, sender=Shipment, dispatch_uid='shipment_job_update') +def shipment_job_update(sender, message, listener, **kwargs): + if message.type == MessageType.ETH_TRANSACTION: + TransactionReceipt.objects.filter(eth_action_id=message.body['transactionHash'] + ).update(**TransactionReceipt.convert_receipt(message.body)) + + message.async_job.state = JobState.COMPLETE + message.async_job.save() + + +@receiver(event_update, sender=Shipment, dispatch_uid='shipment_event_update') +def shipment_event_update(sender, event, listener, **kwargs): + + # TODO: Metrics for processed Events + + if event.event_name == "CreateNewShipmentEvent": + listener.load_data.shipment_id = event.return_values['shipmentID'] + listener.load_data.start_block = event.block_number + listener.load_data.shipment_created = True + listener.load_data.save() + + # Add vault data to new Shipment + rpc_client = ShipmentRPCClient() + signature = rpc_client.add_shipment_data(listener.storage_credentials_id, listener.shipper_wallet_id, + listener.vault_id, ShipmentVaultSerializer(listener).data) + + # Update LOAD contract with vault uri/hash + listener.update_vault_hash(signature['hash']) + + +@receiver(post_save, sender=Shipment, dispatch_uid='shipment_post_save') +def shipment_post_save(sender, **kwargs): + instance, created = kwargs["instance"], kwargs["created"] + if created: + # Create vault + rpc_client = ShipmentRPCClient() + instance.vault_id = rpc_client.create_vault(instance.storage_credentials_id, instance.shipper_wallet_id, + instance.carrier_wallet_id) + + instance.save() + + # Create LoadShipment entity + # TODO: Get FundingType,ShipmentAmount,ValidUntil for use in LOAD Contract/LoadShipment + instance.load_data = LoadShipment.objects.create(shipment=instance, + shipper=instance.shipper_wallet_id, + carrier=instance.carrier_wallet_id, + valid_until=Shipment.VALID_UNTIL, + funding_type=Shipment.FUNDING_TYPE, + shipment_amount=Shipment.SHIPMENT_AMOUNT) + instance.save() + else: + # Update Shipment vault data + rpc_client = ShipmentRPCClient() + signature = rpc_client.add_shipment_data(instance.storage_credentials_id, instance.shipper_wallet_id, + instance.vault_id, ShipmentVaultSerializer(instance).data) + + # Update LOAD contract with vault uri/hash + instance.update_vault_hash(signature['hash']) + + +@receiver(post_save, sender=LoadShipment, dispatch_uid='loadshipment_post_save') +def loadshipment_post_save(sender, **kwargs): + instance, created = kwargs["instance"], kwargs["created"] + if created: + # Create shipment on the LOAD contract + AsyncJob.rpc_job_for_listener( + rpc_class=ShipmentRPCClient, + rpc_method=ShipmentRPCClient.create_shipment_transaction, + rpc_parameters=[instance.shipment.shipper_wallet_id, + instance.shipment.carrier_wallet_id, + instance.valid_until, + instance.funding_type.value, + instance.shipment_amount], + signing_wallet_id=instance.shipment.shipper_wallet_id, + listener=instance.shipment + ) diff --git a/apps/shipments/views.py b/apps/shipments/views.py new file mode 100644 index 00000000..fb318ca3 --- /dev/null +++ b/apps/shipments/views.py @@ -0,0 +1,98 @@ +from django.conf import settings +from rest_framework import viewsets, permissions, status +from rest_framework.decorators import action +from rest_framework.response import Response + +from .geojson import build_line_string_feature, build_point_features, build_feature_collection +from .models import Shipment +from .permissions import IsOwner +from .rpc import ShipmentRPCClient +from .serializers import ShipmentSerializer, ShipmentCreateSerializer, \ + ShipmentUpdateSerializer, ShipmentTxSerializer + + +class ShipmentViewSet(viewsets.ModelViewSet): + queryset = Shipment.objects.all() + serializer_class = ShipmentSerializer + permission_classes = (permissions.IsAuthenticated, IsOwner) if settings.PROFILES_URL else (permissions.AllowAny,) + + def get_queryset(self): + queryset = self.queryset + if settings.PROFILES_URL: + queryset = queryset.filter(owner_id=self.request.user.id) + return queryset + + def perform_create(self, serializer): + if settings.PROFILES_URL: + created = serializer.save(owner_id=self.request.user.id) + else: + created = serializer.save() + return created + + def perform_update(self, serializer): + return serializer.save() + + def create(self, request, *args, **kwargs): + """ + Create a Shipment object and make Async Request to Engine + """ + # Create Shipment + serializer = ShipmentCreateSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + shipment = self.perform_create(serializer) + async_job = shipment.asyncjob_set.all()[:1] + + response = ShipmentTxSerializer(shipment) + if async_job: + response.instance.async_job_id = async_job[0].id + + return Response(response.data, status=status.HTTP_202_ACCEPTED) + + @action(detail=True, methods=['get']) + def tracking(self, request, version, pk=None): + """ + Retrieve tracking data for this Shipment after checking permissions with Profiles + """ + shipment = Shipment.objects.get(pk=pk) + + # TODO: re-implement device/shipment authorization for tracking data + + rpc_client = ShipmentRPCClient() + tracking_data = rpc_client.get_tracking_data(shipment.storage_credentials_id, + shipment.shipper_wallet_id, + shipment.vault_id) + + if 'as_line' in request.query_params: + all_features = build_line_string_feature(shipment, tracking_data) + + elif 'as_point' in request.query_params: + all_features = build_point_features(shipment, tracking_data) + + else: + all_features = [] + all_features += build_line_string_feature(shipment, tracking_data) + all_features += build_point_features(shipment, tracking_data) + + feature_collection = build_feature_collection(all_features) + + return Response(data=feature_collection, status=status.HTTP_200_OK) + + def update(self, request, *args, **kwargs): + """ + Update the shipment with new details, overwriting the built-in method + """ + partial = kwargs.pop('partial', False) + instance = self.get_object() + + serializer = ShipmentUpdateSerializer(instance, data=request.data, partial=partial) + serializer.is_valid(raise_exception=True) + + shipment = self.perform_update(serializer) + async_job = shipment.asyncjob_set.all()[:1] # TODO: be sure to filter to the latest one, handle race cond. + + response = ShipmentTxSerializer(shipment) + if async_job: + response.instance.async_job_id = async_job[0].id + + return Response(response.data, status=status.HTTP_202_ACCEPTED) diff --git a/apps/urls.py b/apps/urls.py new file mode 100644 index 00000000..42bdbb66 --- /dev/null +++ b/apps/urls.py @@ -0,0 +1,43 @@ +# pylint: disable=C0103 +"""transmission URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/2.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.conf.urls import url +from django.views.generic import TemplateView +from rest_framework.urlpatterns import format_suffix_patterns +from rest_framework import routers +from apps.jobs import views as jobs +from apps.shipments import views as shipments +from apps.eth import views as eth + + +API_PREFIX = r'^api/(?P(v1|v2))' + +router = routers.SimpleRouter() +router.register(f'{API_PREFIX[1:]}/shipments', shipments.ShipmentViewSet) +router.register(f'{API_PREFIX[1:]}/jobs', jobs.JobsViewSet, base_name='job') +router.register(f'{API_PREFIX[1:]}/events', eth.EventViewSet, base_name='event') +router.register(f'{API_PREFIX[1:]}/transactions', eth.TransactionViewSet, base_name='transaction') + +urlpatterns = [ + url( + r'(^(api/v1/schema)|^$)', + TemplateView.as_view(template_name='apidoc.html'), + name='api_schema' + ), +] +urlpatterns += router.urls + +urlpatterns = format_suffix_patterns(urlpatterns) diff --git a/apps/utils.py b/apps/utils.py new file mode 100644 index 00000000..ef702bef --- /dev/null +++ b/apps/utils.py @@ -0,0 +1,96 @@ +from collections import namedtuple +from rest_framework import exceptions +from rest_framework_jwt.settings import api_settings +from rest_framework_jwt.authentication import JSONWebTokenAuthentication +from drf_enum_field.fields import EnumField + + +def random_id(): + """ + Cast the UUID to a string + """ + from uuid import uuid4 + return str(uuid4()) + + +def assertDeepAlmostEqual(test_case, expected, actual, *args, **kwargs): # nopep8 pylint: disable=invalid-name + """ + Assert that two complex structures have almost equal contents. + + Compares lists, dicts and tuples recursively. Checks numeric values + using test_case's :py:meth:`unittest.TestCase.assertAlmostEqual` and + checks all other values with :py:meth:`unittest.TestCase.assertEqual`. + Accepts additional positional and keyword arguments and pass those + intact to assertAlmostEqual() (that's how you specify comparison + precision). + + :param test_case: TestCase object on which we can call all of the basic + 'assert' methods. + :type test_case: :py:class:`unittest.TestCase` object + """ + is_root = '__trace' not in kwargs + trace = kwargs.pop('__trace', 'ROOT') + try: + if isinstance(expected, (int, float, int, complex)): + test_case.assertAlmostEqual(expected, actual, *args, **kwargs) + elif isinstance(expected, dict): + test_case.assertEqual(set(expected), set(actual)) + for key in expected: + assertDeepAlmostEqual(test_case, expected[key], actual[key], + __trace=repr(key), *args, **kwargs) + else: + test_case.assertEqual(expected, actual) + except AssertionError as exc: + exc.__dict__.setdefault('traces', []).append(trace) + if is_root: + trace = ' -> '.join(reversed(exc.traces)) + exc = AssertionError("%s\nTRACE: %s" % (str(exc), trace)) + raise exc + + +class EnumToNameField(EnumField): + def to_internal_value(self, data): + return super(EnumToNameField, self).to_internal_value(data).name + + +class AuthenticatedUser: + def __init__(self, payload): + self.id = payload.get('user_id') # pylint:disable=invalid-name + self.username = payload.get('username') + self.email = payload.get('email') + + def is_authenticated(self): + return True + + def is_staff(self): + return False + + def is_superuser(self): + return False + + +class PassiveJSONWebTokenAuthentication(JSONWebTokenAuthentication): + + def authenticate_credentials(self, payload): + if 'sub' not in payload: + raise exceptions.AuthenticationFailed('Invalid payload.') + + payload['pk'] = payload['sub'] + payload = namedtuple("User", payload.keys())(*payload.values()) + payload = api_settings.JWT_PAYLOAD_HANDLER(payload) + + user = AuthenticatedUser(payload) + + return user + + +def snake_to_sentence(word): + return ' '.join(x.capitalize() or '_' for x in word.split('_')) + + +def build_auth_headers_from_request(request): + if not request.auth or not isinstance(request.auth, bytes): + raise Exception("No auth in request") + + token = request.auth.decode('utf-8') + return {'Authorization': f"JWT {token}"} diff --git a/apps/wsgi.py b/apps/wsgi.py new file mode 100644 index 00000000..f22a0fc0 --- /dev/null +++ b/apps/wsgi.py @@ -0,0 +1,20 @@ +""" +WSGI config for transmission project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/ +""" + +import os + +import dotenv +from django.core.wsgi import get_wsgi_application + +dotenv.read_dotenv(os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env')) + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") + +# pylint:disable=invalid-name +application = get_wsgi_application() diff --git a/bin/check_style b/bin/check_style new file mode 100755 index 00000000..2874f09b --- /dev/null +++ b/bin/check_style @@ -0,0 +1,2 @@ +#!/bin/bash +docker-compose -f compose/dev.yml run django_shell prospector -o pylint diff --git a/bin/dc b/bin/dc new file mode 100755 index 00000000..af284213 --- /dev/null +++ b/bin/dc @@ -0,0 +1,10 @@ +#!/bin/bash +BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +PROJECT_DIR="$( cd $BIN/.. && pwd )" + +[[ -f ../.envrc ]] && source ../.envrc +[[ -f ../.env ]] && source ../.env +[[ -z "$COMPOSE_PROJECT" ]] && COMPOSE_PROJECT=$(basename $PROJECT_DIR) +[[ -z "$ROLE" ]] && ROLE=dev + +docker-compose -p $COMPOSE_PROJECT -f compose/$ROLE.yml $* diff --git a/bin/dcleanup b/bin/dcleanup new file mode 100755 index 00000000..6ff47628 --- /dev/null +++ b/bin/dcleanup @@ -0,0 +1,5 @@ +#!/bin/bash +bin/dc kill $* +bin/dc rm -f $* +bin/dc up -d $* +bin/dc logs -f $* diff --git a/bin/ddo b/bin/ddo new file mode 100755 index 00000000..61f1b2f4 --- /dev/null +++ b/bin/ddo @@ -0,0 +1,2 @@ +#!/bin/bash +bin/dc run --rm django_shell $* diff --git a/bin/dev-tools/cache_pip_wheels.sh b/bin/dev-tools/cache_pip_wheels.sh new file mode 100755 index 00000000..40ba4d09 --- /dev/null +++ b/bin/dev-tools/cache_pip_wheels.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Before installing + +BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/.. && pwd )" +PROJECT_DIR="$( cd $BIN/.. && pwd )" + +CACHE_STUB="compose/django/pip.cache" +CACHE_DIR="$PROJECT_DIR/$CACHE_STUB" + +echo "Generating pip.cache directory" +mkdir -p $CACHE_DIR + +echo "Downloading dependencies" +pushd $PROJECT_DIR +bin/ddo pip download -r compose/django/requirements.txt -d $CACHE_STUB diff --git a/bin/dev-tools/install_hooks.sh b/bin/dev-tools/install_hooks.sh new file mode 100644 index 00000000..df99029d --- /dev/null +++ b/bin/dev-tools/install_hooks.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +GIT_DIR=$(git rev-parse --git-dir) + +echo "Installing hooks..." +# this command creates symlink to our pre-push script +ln -s ../../bin/dev-tools/pre_push.sh $GIT_DIR/hooks/pre-push +echo "Done!" \ No newline at end of file diff --git a/bin/dev-tools/pre_push.sh b/bin/dev-tools/pre_push.sh new file mode 100644 index 00000000..20d602bd --- /dev/null +++ b/bin/dev-tools/pre_push.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash + diff --git a/bin/dev-tools/setup_dev_environment.sh b/bin/dev-tools/setup_dev_environment.sh new file mode 100755 index 00000000..36c1a92f --- /dev/null +++ b/bin/dev-tools/setup_dev_environment.sh @@ -0,0 +1,29 @@ +#!/bin/bash +set -e +BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/.. && pwd )" +PROJECT_DIR="$( cd $BIN/.. && pwd )" + +source $BIN/dev-tools/utils.sh + +pushd $PROJECT_DIR + +color_header "Creating portal network" +docker network create portal || color_header "Already exists!" $COLOR_YELLOW + +color_header "Building containers" +bin/dc build + +color_header "Starting services" +bin/dc up -d + +color_header "Caching build files" +bin/dev-tools/cache_pip_wheels.sh + +color_header "Five second pause to warm up" $COLOR_YELLOW +sleep 5 + +color_header "Running migrations" +bin/dmg migrate + +color_header "Restarting dev server and following logs..." $COLOR_GREEN +bin/dcleanup runserver diff --git a/bin/dev-tools/utils.sh b/bin/dev-tools/utils.sh new file mode 100644 index 00000000..08f1ef19 --- /dev/null +++ b/bin/dev-tools/utils.sh @@ -0,0 +1,57 @@ +#!/bin/bash + +# Not everyone uses the same color variables :-( +export COLOR_NC='\e[0m' # No Color +export COLOR_WHITE='\e[1;37m' +export COLOR_BLACK='\e[0;30m' +export COLOR_BLUE='\e[0;34m' +export COLOR_LIGHT_BLUE='\e[1;34m' +export COLOR_GREEN='\e[0;32m' +export COLOR_LIGHT_GREEN='\e[1;32m' +export COLOR_CYAN='\e[0;36m' +export COLOR_LIGHT_CYAN='\e[1;36m' +export COLOR_RED='\e[0;31m' +export COLOR_LIGHT_RED='\e[1;31m' +export COLOR_PURPLE='\e[0;35m' +export COLOR_LIGHT_PURPLE='\e[1;35m' +export COLOR_BROWN='\e[0;33m' +export COLOR_YELLOW='\e[1;33m' +export COLOR_GRAY='\e[0;30m' +export COLOR_LIGHT_GRAY='\e[0;37m' + +function color_header() { + if [[ -z "$2" ]] ; then C="$COLOR_CYAN"; else C="$2"; fi + echo + echo -e "$C----------------------" + echo "$1" + echo -e "----------------------$COLOR_NC" +} + +function git_status { + DIRTY=0 + UNTRACKED=0 + ADDED=0 + OUT=$(git status 2> /dev/null) + [[ "$( echo $OUT | grep 'Changes not staged for commit:')" != "" ]] && DIRTY=1 + [[ "$( echo $OUT | grep 'Untracked files:')" != "" ]] && UNTRACKED=1 + [[ "$( echo $OUT | grep 'Changes to be committed:')" != "" ]] && ADDED=1 + if [[ ADDED -ne 0 || DIRTY -ne 0 || UNTRACKED -ne 0 ]]; then + echo "DIRTY" + else + echo "CLEAN" + fi +} + +function slack_text { + PAYLOAD="payload={\"text\": \"$*\"}" + URL="https://hooks.slack.com/services/**/**" + curl -X POST $URL --data-urlencode "$PAYLOAD" +} + +function ec2json() { + aws ec2 describe-instances | jq -r '.Reservations[].Instances[] | {type: .InstanceType, state: .State.Name, ip: .PublicIpAddress, name: (.Tags[] | select(.Key == "Name") | .Value), id: .InstanceId}' +} + +function ec2info() { + ec2json | jq -r 'reduce . as $i (""; . + "["+$i.type + "|"+$i.state+"]\t"+$i.name[:15]+"\t"+ $i.id +"\t"+ $i.ip)' | sort +} diff --git a/bin/dmg b/bin/dmg new file mode 100755 index 00000000..38bd6dcb --- /dev/null +++ b/bin/dmg @@ -0,0 +1,2 @@ +#!/bin/bash +bin/ddo python ./manage.py $* diff --git a/bin/docker_tests b/bin/docker_tests new file mode 100755 index 00000000..391ea8dd --- /dev/null +++ b/bin/docker_tests @@ -0,0 +1,22 @@ +#!/bin/bash + +set -e + +# magic line to ensure that we're always inside the root of our application, +# no matter from which directory we'll run script +# thanks to it we can just enter `./bin/docker_tests` +cd "${0%/*}/.." + +echo "Running tests" + +bin/dc build + +bin/dc up -d + +bin/dc exec -T runserver prospector -o pylint + +bin/dc exec -T runserver coverage run manage.py test --noinput tests --settings conf.test_settings + +bin/dc exec -T runserver coverage report + +bin/dc down diff --git a/compose/build.yml b/compose/build.yml new file mode 100644 index 00000000..d40ea96f --- /dev/null +++ b/compose/build.yml @@ -0,0 +1,11 @@ +version: "2.1" +services: + transmission-django-dev: + build: + context: ./django + image: transmission-django-dev + transmission-django: + build: + context: ../ + image: transmission-django + diff --git a/compose/circleci.yml b/compose/circleci.yml new file mode 100644 index 00000000..b170f040 --- /dev/null +++ b/compose/circleci.yml @@ -0,0 +1,52 @@ +version: '2.1' +services: + + ganache: + image: trufflesuite/ganache-cli + networks: + - portal + expose: + - '8545' + + redis_db: + image: redis + expose: + - '6379' + volumes: + - /data/shipchain/transmission/redis:/data + command: > + --requirepass redis_pass + + psql: + image: sameersbn/postgresql:9.6-2 + expose: + - '5432' + networks: + - portal + environment: + DB_NAME: transmission + DB_PASS: transmission + DB_USER: transmission + + runserver: + extends: + service: transmission-django + file: build.yml + command: sleep infinity + ports: + - "8000:8000" + networks: + - portal + links: + - ganache + - psql + - redis_db + environment: + - ENV + - SECRET_KEY + - SERVICE=circleci + - REDIS_URL=redis://:redis_pass@redis_db:6379/1 + +networks: + portal: + driver: bridge diff --git a/compose/dev.yml b/compose/dev.yml new file mode 100644 index 00000000..b6d06f93 --- /dev/null +++ b/compose/dev.yml @@ -0,0 +1,101 @@ +version: '2.1' +services: + + redis_db: + image: redis + expose: + - '6379' + volumes: + - /data/shipchain/transmission/redis:/data + command: > + --requirepass redis_pass + + psql: + image: sameersbn/postgresql:9.6-2 + ports: + - "5432:5432" + environment: + DB_NAME: transmission + DB_PASS: transmission + DB_USER: transmission + volumes: + - /data/shipchain/transmission/postgresql:/var/lib/postgresql + + django_shell: # This is a one-shot command runner service, for manage.py or bash, it dies immediately + extends: + service: transmission-django-dev + file: build.yml + volumes: + - ../:/app + links: + - psql + - redis_db + environment: + - ENV + - SECRET_KEY + - SERVICE=django_shell + - REDIS_URL + - ENGINE_RPC_URL=http://engine-rpc:2000 + - INTERNAL_URL=http://transmission-runserver:8000 + - PROFILES_URL #http://profiles-runserver:8000 + command: "bash" + entrypoint: [] + + runserver: + extends: + service: transmission-django-dev + file: build.yml + command: ["python", "manage.py", "runserver", "0.0.0.0:8000"] + ports: + - "8000:8000" + networks: + default: + aliases: + - transmission-runserver + portal: + aliases: + - transmission-runserver + links: + - psql + - redis_db + volumes: + - ../:/app + environment: + - ENV + - SECRET_KEY + - SERVICE=runserver + - REDIS_URL + - ENGINE_RPC_URL=http://engine-rpc:2000 + - INTERNAL_URL=http://transmission-runserver:8000 + - PROFILES_URL #http://profiles-runserver:8000 + + celery: + extends: + service: transmission-django-dev + file: build.yml + command: ["celery", "-A", "apps", "worker"] + networks: + default: + aliases: + - transmission-celery + portal: + aliases: + - transmission-celery + links: + - psql + - redis_db + volumes: + - ../:/app + environment: + - ENV + - SECRET_KEY + - SERVICE=celery + - REDIS_URL + - ENGINE_RPC_URL=http://engine-rpc:2000 + - INTERNAL_URL=http://transmission-runserver:8000 + - PROFILES_URL #http://profiles-runserver:8000 + entrypoint: [] + +networks: + portal: + external: true diff --git a/compose/django/Dockerfile b/compose/django/Dockerfile new file mode 100644 index 00000000..b466b40a --- /dev/null +++ b/compose/django/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.6.5 + +LABEL maintainer="Adam Hodges " + +ENV LANG C.UTF-8 +ENV PYTHONUNBUFFERED 1 + +RUN mkdir /build +WORKDIR /build + +ADD ./requirements.txt /build/ +ADD ./pip.cache/ /build/ + +RUN pip install -r /build/requirements.txt --find-links /build/ + +RUN mkdir /app +WORKDIR /app + +COPY ./*.sh / +RUN chmod +x /*.sh +ENTRYPOINT ["/entrypoint.sh"] diff --git a/compose/django/entrypoint.sh b/compose/django/entrypoint.sh new file mode 100644 index 00000000..5f272713 --- /dev/null +++ b/compose/django/entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +if [ "$ENV" = "PROD" ] || [ "$ENV" = "STAGE" ] || [ "$ENV" = "DEV" ]; +then + echo "Not running in a docker-compose environment, skipping wait-for-it" +else + echo "Waiting for dependencies to come up in the stack" + /wait-for-it.sh ${REDIS_NAME:-redis_db}:6379 + /wait-for-it.sh ${PSQL_NAME:-psql}:5432 +fi + +python manage.py migrate +exec "$@" \ No newline at end of file diff --git a/compose/django/pip.cache/__empty__ b/compose/django/pip.cache/__empty__ new file mode 100644 index 00000000..e69de29b diff --git a/compose/django/requirements.txt b/compose/django/requirements.txt new file mode 100644 index 00000000..20f9cbf8 --- /dev/null +++ b/compose/django/requirements.txt @@ -0,0 +1,104 @@ +amqp==2.2.2 +asn1crypto==0.24.0 +astroid==1.6.1 +billiard==3.5.0.3 +boto3==1.7.38 +botocore==1.10.38 +celery==4.2.1 +celery-once==2.0.0 +certifi==2018.1.18 +cffi==1.11.5 +chardet==3.0.4 +click==6.7 +-e git://github.com/kusha/python-elasticsearch-logger.git@91b458a58e12ab1e9a8fe16a28d225f610e29183#egg=CMRESHandler +colorama==0.3.7 +coreapi==2.3.3 +coreschema==0.0.4 +coverage==4.5.1 +cryptography==2.3.1 +cytoolz==0.9.0 +Django==2.0.7 +django-cors-headers==2.2.0 +django-dotenv==1.4.2 +django-enumfields==0.10.0 +django-environ==0.4.4 +django-extensions==2.0.6 +django-filter==1.1.0 +django-gm2m==0.6.1 +-e git://github.com/lwbco/django-influxdb-tagged-metrics.git@add895558ed6ab59c621b19beae026f8933d4848#egg=django_influxdb_metrics +django-redis==4.9.0 +djangorestframework==3.8.2 +djangorestframework-jsonapi==2.4.0 +djangorestframework-jwt==1.11.0 +docutils==0.14 +dodgy==0.1.9 +drf-enum-field==0.9.2 +elasticsearch==6.2.0 +eth-abi==0.5.0 +eth-keys==0.1.0b4 +eth-tester==0.1.0b11 +eth-utils==0.8.1 +flake8==3.5.0 +flake8-polyfill==1.0.2 +geojson==2.4.0 +gunicorn==19.7.1 +idna==2.6 +inflection==0.3.1 +influxdb==5.0.0 +isort==4.3.4 +itypes==1.1.0 +Jinja2==2.10 +jmespath==0.9.3 +josepy==1.0.1 +kombu==4.2.1 +lazy-object-proxy==1.3.1 +lru-dict==1.1.6 +Markdown==2.6.11 +MarkupSafe==1.0 +mccabe==0.6.1 +openapi-codec==1.3.2 +pep8-naming==0.5.0 +prospector==0.12.7 +psutil==5.4.5 +psycopg2==2.7.4 +pyasn1==0.4.2 +pycodestyle==2.0.0 +pycparser==2.18 +pydocstyle==2.1.1 +pyflakes==1.6.0 +PyJWT==1.6.4 +pylint==1.8.2 +pylint-celery==0.3 +pylint-common==0.2.5 +pylint-django==0.9.3 +pylint-flask==0.5 +pylint-plugin-utils==0.2.6 +pyOpenSSL==17.5.0 +pysha3==1.0.2 +python-dateutil==2.7.2 +python-server-metrics==0.2.1 +pytz==2018.5 +PyYAML==3.12 +redis==2.10.6 +requests==2.18.4 +requirements-detector==0.5.2 +retrying==1.3.3 +rlp==0.6.0 +rsa==3.4.2 +s3transfer==0.1.13 +semantic-version==2.6.0 +setoptconf==0.2.0 +simplejson==3.13.2 +six==1.11.0 +snowballstemmer==1.2.1 +termstyle==0.1.11 +tld==0.7.10 +toolz==0.9.0 +Unidecode==1.0.22 +unittest-xml-reporting==2.1.1 +uritemplate==3.0.0 +urllib3==1.22 +uWSGI==2.0.17.1 +vine==1.1.4 +whitenoise==3.3.1 +wrapt==1.10.11 diff --git a/compose/django/ssh-entrypoint.sh b/compose/django/ssh-entrypoint.sh new file mode 100644 index 00000000..6c9493c7 --- /dev/null +++ b/compose/django/ssh-entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# Copy ECS env vars into bash profile so they're available to SSH'd users +echo "export ENV=$ENV" >> /etc/profile +echo "export OIDC_RP_CLIENT_ID=$OIDC_RP_CLIENT_ID" >> /etc/profile +echo "export OIDC_PUBLIC_KEY_PEM_BASE64=$OIDC_PUBLIC_KEY_PEM_BASE64" >> /etc/profile +echo "export SERVICE=$SERVICE" >> /etc/profile +echo "export REDIS_URL=$REDIS_URL" >> /etc/profile +echo "export AWS_CONTAINER_CREDENTIALS_RELATIVE_URI=$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI" >> /etc/profile +sed -i -e "2iexport AWS_CONTAINER_CREDENTIALS_RELATIVE_URI=$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI\\" /usr/sbin/keymaker-get-public-keys +sed -i -e "2iexport AWS_CONTAINER_CREDENTIALS_RELATIVE_URI=$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI\\" /usr/local/bin/keymaker-create-account-for-iam-user + +exec "$@" \ No newline at end of file diff --git a/compose/django/wait-for-it.sh b/compose/django/wait-for-it.sh new file mode 100644 index 00000000..5b3a6f9a --- /dev/null +++ b/compose/django/wait-for-it.sh @@ -0,0 +1,177 @@ +#!/usr/bin/env bash +# Use this script to test if a given TCP host/port are available + +cmdname=$(basename $0) + +echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } + +usage() +{ + cat << USAGE >&2 +Usage: + $cmdname host:port [-s] [-t timeout] [-- command args] + -h HOST | --host=HOST Host or IP under test + -p PORT | --port=PORT TCP port under test + Alternatively, you specify the host and port as host:port + -s | --strict Only execute subcommand if the test succeeds + -q | --quiet Don't output any status messages + -t TIMEOUT | --timeout=TIMEOUT + Timeout in seconds, zero for no timeout + -- COMMAND ARGS Execute command with args after the test finishes +USAGE + exit 1 +} + +wait_for() +{ + if [[ $TIMEOUT -gt 0 ]]; then + echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" + else + echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" + fi + start_ts=$(date +%s) + while : + do + if [[ $ISBUSY -eq 1 ]]; then + nc -z $HOST $PORT + result=$? + else + (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 + result=$? + fi + if [[ $result -eq 0 ]]; then + end_ts=$(date +%s) + echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" + break + fi + sleep 1 + done + return $result +} + +wait_for_wrapper() +{ + # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 + if [[ $QUIET -eq 1 ]]; then + timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & + else + timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & + fi + PID=$! + trap "kill -INT -$PID" INT + wait $PID + RESULT=$? + if [[ $RESULT -ne 0 ]]; then + echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" + fi + return $RESULT +} + +# process arguments +while [[ $# -gt 0 ]] +do + case "$1" in + *:* ) + hostport=(${1//:/ }) + HOST=${hostport[0]} + PORT=${hostport[1]} + shift 1 + ;; + --child) + CHILD=1 + shift 1 + ;; + -q | --quiet) + QUIET=1 + shift 1 + ;; + -s | --strict) + STRICT=1 + shift 1 + ;; + -h) + HOST="$2" + if [[ $HOST == "" ]]; then break; fi + shift 2 + ;; + --host=*) + HOST="${1#*=}" + shift 1 + ;; + -p) + PORT="$2" + if [[ $PORT == "" ]]; then break; fi + shift 2 + ;; + --port=*) + PORT="${1#*=}" + shift 1 + ;; + -t) + TIMEOUT="$2" + if [[ $TIMEOUT == "" ]]; then break; fi + shift 2 + ;; + --timeout=*) + TIMEOUT="${1#*=}" + shift 1 + ;; + --) + shift + CLI=("$@") + break + ;; + --help) + usage + ;; + *) + echoerr "Unknown argument: $1" + usage + ;; + esac +done + +if [[ "$HOST" == "" || "$PORT" == "" ]]; then + echoerr "Error: you need to provide a host and port to test." + usage +fi + +TIMEOUT=${TIMEOUT:-15} +STRICT=${STRICT:-0} +CHILD=${CHILD:-0} +QUIET=${QUIET:-0} + +# check to see if timeout is from busybox? +# check to see if timeout is from busybox? +TIMEOUT_PATH=$(realpath $(which timeout)) +if [[ $TIMEOUT_PATH =~ "busybox" ]]; then + ISBUSY=1 + BUSYTIMEFLAG="-t" +else + ISBUSY=0 + BUSYTIMEFLAG="" +fi + +if [[ $CHILD -gt 0 ]]; then + wait_for + RESULT=$? + exit $RESULT +else + if [[ $TIMEOUT -gt 0 ]]; then + wait_for_wrapper + RESULT=$? + else + wait_for + RESULT=$? + fi +fi + +if [[ $CLI != "" ]]; then + if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then + echoerr "$cmdname: strict mode, refusing to execute subprocess" + exit $RESULT + fi + exec "${CLI[@]}" +else + exit $RESULT +fi \ No newline at end of file diff --git a/compose/nginx/Dockerfile b/compose/nginx/Dockerfile new file mode 100644 index 00000000..0c51a425 --- /dev/null +++ b/compose/nginx/Dockerfile @@ -0,0 +1,14 @@ +FROM nginx + +LABEL maintainer="Adam Hodges " + +RUN apt-get update +RUN apt-get install -y python-pip jq +RUN pip install awscli + +RUN mkdir -p /etc/nginx/certs +ADD ./nginx.conf /etc/nginx/conf.d/default.conf + +COPY ./*.sh / +RUN chmod +x /*.sh +ENTRYPOINT ["/entrypoint.sh"] diff --git a/compose/nginx/entrypoint.sh b/compose/nginx/entrypoint.sh new file mode 100644 index 00000000..3c262daf --- /dev/null +++ b/compose/nginx/entrypoint.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +SUBDOMAIN=$APP-${ENV,,} + +echo "Looking for existing certificate in ACM" +CERT_ARN=$(aws acm list-certificates | jq -r "[.CertificateSummaryList[] | select(.DomainName == \"$SUBDOMAIN.internal\")][0].CertificateArn") + +# Create private certificate in AWS ACM +if [ $CERT_ARN = "null" ]; +then + echo "Creating new certificate" + CERT_ARN=$(aws --region us-east-1 acm request-certificate \ + --domain-name $SUBDOMAIN.internal \ + --idempotency-token $APP$ENV \ + --options CertificateTransparencyLoggingPreference=DISABLED \ + --certificate-authority-arn $CERT_AUTHORITY_ARN | jq -r '.CertificateArn') + + echo "Waiting for certificate to be ready" + STATUS=$(aws acm describe-certificate --certificate-arn $CERT_ARN | jq -r '.Certificate.Status') + while [ $STATUS != "ISSUED" ] && [ $STATUS != "FAILED" ] + do + sleep 2 + STATUS=$(aws acm describe-certificate --certificate-arn $CERT_ARN | jq -r '.Certificate.Status') + done + + echo "Tagging certificate" + aws acm add-tags-to-certificate --certificate-arn $CERT_ARN --tags Key=Name,Value=$SUBDOMAIN +fi + +# Export certificate as JSON +echo "Exporting certificate" +CERT_PASS=$(openssl rand --base64 12) +CERT_JSON=$(aws --region us-east-1 acm export-certificate --certificate-arn $CERT_ARN --passphrase $CERT_PASS) + +# Copy certificate to nginx +echo "Copying certificate to nginx" +echo $CERT_JSON | jq -r '.Certificate' > /etc/nginx/certs/$SUBDOMAIN.internal.crt +echo $CERT_JSON | jq -r '.PrivateKey' > /etc/nginx/certs/$SUBDOMAIN.internal.encrypted.key + +# Decrypt key +openssl rsa -passin pass:$CERT_PASS -in /etc/nginx/certs/$SUBDOMAIN.internal.encrypted.key -out /etc/nginx/certs/$SUBDOMAIN.internal.key + +# Update nginx.conf with app name and environment +sed -i "s/#{DOMAIN}/$SUBDOMAIN.internal/g" /etc/nginx/conf.d/default.conf + +exec "$@" \ No newline at end of file diff --git a/compose/nginx/nginx.conf b/compose/nginx/nginx.conf new file mode 100644 index 00000000..fe354fcf --- /dev/null +++ b/compose/nginx/nginx.conf @@ -0,0 +1,20 @@ +upstream django { + server 127.0.0.1:8000; +} + +server { + listen 443 ssl; + server_name ~^(.+)$; + charset utf-8; + + ssl_certificate certs/#{DOMAIN}.crt; + ssl_certificate_key certs/#{DOMAIN}.key; + + client_max_body_size 25m; + + location / { + uwsgi_pass django; + include /etc/nginx/uwsgi_params; + uwsgi_param HTTP_X_FORWARDED_PROTO https; + } +} \ No newline at end of file diff --git a/conf/__init__.py b/conf/__init__.py new file mode 100644 index 00000000..82c3123f --- /dev/null +++ b/conf/__init__.py @@ -0,0 +1,3 @@ +from .base import * + +from .auth import * diff --git a/conf/auth.py b/conf/auth.py new file mode 100644 index 00000000..d8438126 --- /dev/null +++ b/conf/auth.py @@ -0,0 +1,23 @@ +import os +import base64 +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.serialization import load_pem_public_key + + +SESSION_COOKIE_NAME = os.environ.get( + 'SESSION_COOKIE_NAME', 'txm-sessionid' +) + +OIDC_PUBLIC_KEY_PEM_BASE64 = os.environ.get('OIDC_PUBLIC_KEY_PEM_BASE64', 'LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0NCk1JR2ZN\ +QTBHQ1NxR1NJYjNEUUVCQVFVQUE0R05BRENCaVFLQmdRQ2FmTDBXVVRObFdteTJJdlRPQ2xpNHdqZFMNClk1cWJNaXNQcHlrNVFkamRNMEFuY2gvbm5qTGJ\ +aVzAwakw0V0lXM0YzOHZjNThQSzExNzB3OG9maGF1TEJSMEgNCjBsRTZoTTlsV2l3TjZOODFNVWZ5cG1HME9ReG1vYW5XN2Y1ano2Z2tCRkNzc21pQWZxSF\ +Z1TTJtSmlJdGJZTVUNCm8vcmtxcm9zQnVadmFKSnJEUUlEQVFBQg0KLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0t') + +JWT_AUTH = { + 'JWT_PUBLIC_KEY': load_pem_public_key( + data=base64.b64decode(OIDC_PUBLIC_KEY_PEM_BASE64.strip()), + backend=default_backend() + ), + 'JWT_ALGORITHM': 'RS256', + 'JWT_AUDIENCE': os.environ.get('OIDC_RP_CLIENT_ID', '892633'), +} diff --git a/conf/base.py b/conf/base.py new file mode 100644 index 00000000..cac3976e --- /dev/null +++ b/conf/base.py @@ -0,0 +1,294 @@ +""" +Django settings for transmission project. + +Generated by 'django-admin startproject' using Django 2.0.2. + +For more information on this file, see +https://docs.djangoproject.com/en/2.0/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/2.0/ref/settings/ +""" + +import json +import os +import sys +from urllib.parse import urlparse + +import environ +from cmreslogging.handlers import CMRESHandler + +ENV = environ.Env() + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +ENGINE_RPC_URL = os.environ.get('ENGINE_RPC_URL', "http://engine-rpc:2000/") +INTERNAL_URL = os.environ.get('INTERNAL_URL', 'http://transmission-runserver:8000') +PROFILES_URL = os.environ.get('PROFILES_URL') +if PROFILES_URL != 'DISABLED': + PROFILES_URL = ('http://profiles-runserver:8000' if ('runserver' in INTERNAL_URL) + else INTERNAL_URL.replace("transmission", "profiles")) +ELASTICSEARCH_URL = os.environ.get('ELASTICSEARCH_URL', None) + +ENVIRONMENT = os.environ.get('ENV', 'LOCAL') +SERVICE = os.environ.get('SERVICE', 'NONE') +PROJECT_MODULE = 'transmission' + +MANAGE_PY_COMMAND = None + +RELOAD_SERVER = False +if len(sys.argv) > 1 and sys.argv[0] == 'manage.py': + MANAGE_PY_COMMAND = sys.argv[1] + RELOAD_SERVER = (os.environ.get('RUN_MAIN') != 'true') + +SUBSCRIBE_EVENTS = (ENVIRONMENT != 'TEST' and + MANAGE_PY_COMMAND != 'migrate' and + MANAGE_PY_COMMAND != 'test' and + not RELOAD_SERVER) + +ALLOWED_HOSTS = ['*'] + +CSRF_USE_SESSIONS = True + +CORS_ORIGIN_ALLOW_ALL = True + +if ENVIRONMENT in ('PROD', 'STAGE', 'DEV'): + if ENVIRONMENT == 'PROD': + DEBUG = False + LOG_LEVEL = 'INFO' + else: + DEBUG = os.environ.get('FORCE_DEBUG', False) + LOG_LEVEL = os.environ.get('LOG_LEVEL', 'DEBUG') + + import boto3 + SECRETS_MANAGER = boto3.client('secretsmanager', region_name='us-east-1') + + SECRET_KEY = json.loads(SECRETS_MANAGER.get_secret_value( + SecretId=f'TRANSMISSION_SECRET_KEY_{ENVIRONMENT}' + )['SecretString'])['SECRET_KEY'] + + RDS_CREDS = json.loads(SECRETS_MANAGER.get_secret_value( + SecretId=f'TRANSMISSION_RDS_{ENVIRONMENT}' + )['SecretString']) + + os.environ['DATABASE_URL'] = (f'psql://{RDS_CREDS["username"]}:{RDS_CREDS["password"]}@' + f'{RDS_CREDS["host"]}:{RDS_CREDS["port"]}/{RDS_CREDS["dbname"]}') + + SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') + SECURE_SSL_REDIRECT = True + SESSION_COOKIE_SECURE = True + SECURE_CONTENT_TYPE_NOSNIFF = True + SECURE_BROWSER_XSS_FILTER = True + X_FRAME_OPTIONS = 'DENY' +else: + DEBUG = True + LOG_LEVEL = 'DEBUG' + DEV_SECRET_KEY = 'devsecretkey' * 19 # noqa + SECRET_KEY = os.environ.get('SECRET_KEY', DEV_SECRET_KEY) + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'django_extensions', + 'rest_framework', + 'influxdb_metrics', + 'corsheaders', + 'gm2m', + 'apps.jobs', + 'apps.eth', + 'apps.shipments', + 'apps.schema', +] + +REST_FRAMEWORK = { + 'PAGE_SIZE': 10, + 'EXCEPTION_HANDLER': 'rest_framework_json_api.exceptions.exception_handler', + 'DEFAULT_PAGINATION_CLASS': + 'rest_framework_json_api.pagination.PageNumberPagination', + 'DEFAULT_PARSER_CLASSES': ( + 'rest_framework_json_api.parsers.JSONParser', + 'rest_framework.parsers.FormParser', + 'rest_framework.parsers.MultiPartParser' + ), + 'DEFAULT_RENDERER_CLASSES': ( + 'rest_framework_json_api.renderers.JSONRenderer', + # If you're performance testing, you will want to use the browseable API + # without forms, as the forms can generate their own queries. + # If performance testing, enable: + # 'example.utils.BrowsableAPIRendererWithoutForms', + # Otherwise, to play around with the browseable API, enable: + 'rest_framework.renderers.BrowsableAPIRenderer' + ), + 'DEFAULT_METADATA_CLASS': 'rest_framework_json_api.metadata.JSONAPIMetadata', + 'TEST_REQUEST_RENDERER_CLASSES': ( + 'rest_framework_json_api.renderers.JSONRenderer', + ), + 'TEST_REQUEST_DEFAULT_FORMAT': 'vnd.api+json', + 'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.URLPathVersioning', +} + +if PROFILES_URL: + REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] = ( + 'apps.utils.PassiveJSONWebTokenAuthentication' + ), + REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'] = ( + 'rest_framework.permissions.IsAuthenticated' + ), + +MIDDLEWARE = [ + 'influxdb_metrics.middleware.InfluxDBRequestMiddleware', + 'django.middleware.security.SecurityMiddleware', + 'corsheaders.middleware.CorsMiddleware', + 'whitenoise.middleware.WhiteNoiseMiddleware', + # 'django.middleware.cache.UpdateCacheMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + # 'django.middleware.cache.FetchFromCacheMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'apps.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'apps.wsgi.application' + +TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner' +TEST_OUTPUT_DIR = 'test-results/unittest/results.xml' + +# Database +# https://docs.djangoproject.com/en/2.0/ref/settings/#databases + +DATABASES = { + 'default': ENV.db(default='psql://transmission:transmission@psql:5432/transmission'), +} + +# Caching +CACHES = { + 'default': ENV.cache('REDIS_URL', default='redis://:redis_pass@redis_db:6379/1') +} +SESSION_ENGINE = "django.contrib.sessions.backends.cache" +SESSION_CACHE_ALIAS = "default" + +STATICFILES_DIRS = [ + os.path.join(BASE_DIR, "apps/schema/static"), +] +STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') + +# Password validation +# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/2.0/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/2.0/howto/static-files/ + +STATIC_URL = '/static/' + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'handlers': { + 'console': { + 'level': LOG_LEVEL, + 'class': 'logging.StreamHandler', + } + }, + 'loggers': { + 'oidc_provider': { + 'handlers': ['console'], + 'level': 'DEBUG', + }, + 'django': { + 'handlers': ['console'], + 'level': 'DEBUG', + }, + }, +} + +if ELASTICSEARCH_URL: + ELASTICSEARCH_HOST = urlparse(ELASTICSEARCH_URL).netloc + LOGGING['handlers']['elasticsearch'] = { + 'level': LOG_LEVEL, + 'class': 'cmreslogging.handlers.NonBlockingCMRESHandler', + 'hosts': [{ + 'host': ELASTICSEARCH_HOST, + 'port': 443 + }], + 'es_index_name': 'django-logs', + 'es_additional_fields': { + 'Service': SERVICE, + 'Environment': ENVIRONMENT, + 'Project': PROJECT_MODULE + }, + 'auth_type': CMRESHandler.AuthType.NO_AUTH, + 'use_ssl': True, + } + LOGGING['loggers']['oidc_provider']['handlers'].append('elasticsearch') + LOGGING['loggers']['django']['handlers'].append('elasticsearch') + +INFLUXDB_DISABLED = True +INFLUXDB_URL = os.environ.get('INFLUXDB_URL') +if INFLUXDB_URL: + INFLUXDB_DISABLED = False + INFLUXDB_URL = urlparse(INFLUXDB_URL) + INFLUXDB_HOST = INFLUXDB_URL.hostname + INFLUXDB_PORT = str(INFLUXDB_URL.port) if INFLUXDB_URL.port else '80' + INFLUXDB_USER = None + INFLUXDB_PASSWORD = None + INFLUXDB_DATABASE = INFLUXDB_URL.path[1:] + INFLUXDB_TIMEOUT = 1 + + EMAIL_BACKEND = 'influxdb_metrics.email.InfluxDbEmailBackend' diff --git a/conf/test_settings.py b/conf/test_settings.py new file mode 100644 index 00000000..8b308efb --- /dev/null +++ b/conf/test_settings.py @@ -0,0 +1,19 @@ +from . import * + +ENVIRONMENT = 'TEST' +INFLUXDB_DISABLED = True + +# Disable auth for unit tests +REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] = [] +REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'] = [] + +ENGINE_RPC_URL = "http://INTENTIONALLY_DISCONNECTED:9999" +INTERNAL_URL = "http://INTENTIONALLY_DISCONNECTED:9999" +PROFILES_URL = "http://INTENTIONALLY_DISCONNECTED:9999" + +SUBSCRIBE_EVENTS = False + +for name, logger in LOGGING['loggers'].items(): + logger['handlers'] = [h for h in logger.get('handlers', []) if h != 'elasticsearch'] + if logger.get('level') == 'DEBUG': + logger['level'] = 'INFO' diff --git a/manage.py b/manage.py new file mode 100755 index 00000000..f53ab052 --- /dev/null +++ b/manage.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +import os +import sys + +import dotenv + +if __name__ == "__main__": + dotenv.read_dotenv() + + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) diff --git a/settings.py b/settings.py new file mode 100644 index 00000000..b9d792c3 --- /dev/null +++ b/settings.py @@ -0,0 +1 @@ +from conf import * diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/postman.collection.Transmission.json b/tests/postman.collection.Transmission.json new file mode 100644 index 00000000..70096b32 --- /dev/null +++ b/tests/postman.collection.Transmission.json @@ -0,0 +1,519 @@ +{ + "info": { + "_postman_id": "a9009015-2439-49d6-9d48-46823180aaeb", + "name": "Transmission", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Shipments", + "description": null, + "item": [ + { + "name": "Create Shipment", + "request": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "JWT {{token}}" + } + ], + "body": {}, + "url": { + "raw": "{{transmission_url}}/api/v1/shipments/", + "host": [ + "{{transmission_url}}" + ], + "path": [ + "api", + "v1", + "shipments", + "" + ] + } + }, + "response": [] + }, + { + "name": "List Shipments", + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "JWT {{token}}" + } + ], + "body": {}, + "url": { + "raw": "{{transmission_url}}/api/v1/shipments/", + "host": [ + "{{transmission_url}}" + ], + "path": [ + "api", + "v1", + "shipments", + "" + ] + } + }, + "response": [] + }, + { + "name": "Get Shipment", + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "JWT {{token}}" + } + ], + "body": {}, + "url": { + "raw": "{{transmission_url}}/api/v1/shipments/{{shipment_id}}/", + "host": [ + "{{transmission_url}}" + ], + "path": [ + "api", + "v1", + "shipments", + "{{shipment_id}}", + "" + ] + } + }, + "response": [] + }, + { + "name": "Update Shipment", + "request": { + "method": "PATCH", + "header": [ + { + "key": "Authorization", + "value": "JWT {{token}}" + } + ], + "body": { + "mode": "formdata", + "formdata": [ + { + "key": "carrier_scac", + "value": "wat", + "type": "text", + "disabled": true + }, + { + "key": "forwarder_scac", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "nvocc_scac", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "shipper_reference", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "forwarder_reference", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "forwarders_shipper_id", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "carrier_instructions", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "pro_number", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "master_bill", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "house_bill", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "subhouse_bill", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "freight_payment_terms", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "vessel_name", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "voyage_number", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "mode", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "number_of_packages", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "gross_weight_kgs", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "volume_cbms", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "container_count", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "dimensional_weight", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "chargeable_weight", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "docs_received_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "docs_approved_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "pickup_appointment_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "pickup_estimated", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "pickup_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "loading_estimated", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "loading_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "departure_estimated", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "departure_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "delivery_appointment_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "arrival_port_estimated", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "arrival_port_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "delivery_estimated", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "delivery_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "last_attempted_delivery_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "cancel_requested_date_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "cancel_confirmed_date_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "customs_filed_date_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "customs_hold_date_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "customs_release_date_actual", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "containerization_type", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "arrival_unlocode", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "final_port_unlocode", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "import_unlocode", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "lading_unlocode", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "origin_unlocode", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "us_routed_export", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "import_customs_mode", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "us_customs_export_port", + "value": "", + "type": "text", + "disabled": true + }, + { + "key": "customer_fields", + "value": "{}", + "type": "text", + "disabled": true + } + ] + }, + "url": { + "raw": "{{transmission_url}}/api/v1/shipments/{{shipment_id}}/", + "host": [ + "{{transmission_url}}" + ], + "path": [ + "api", + "v1", + "shipments", + "{{shipment_id}}", + "" + ] + } + }, + "response": [] + }, + { + "name": "Delete Shipment", + "request": { + "method": "DELETE", + "header": [ + { + "key": "Authorization", + "value": "JWT {{token}}" + } + ], + "body": {}, + "url": { + "raw": "{{transmission_url}}/api/v1/shipments/{{shipment_id}}/", + "host": [ + "{{transmission_url}}" + ], + "path": [ + "api", + "v1", + "shipments", + "{{shipment_id}}", + "" + ] + } + }, + "response": [] + } + ] + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "id": "a2160307-fe58-4f1b-a9a3-60fa35705e24", + "type": "text/javascript", + "exec": [ + "var token_time = pm.environment.get(\"token_time\");", + "var now_time = new Date().getTime();", + "", + "var token_age = now_time - token_time;", + "", + "if(token_age > 60000){", + "", + " var url = pm.environment.get(\"profiles_url\");", + " var clientId = pm.environment.get(\"oidc_client_id\");", + " var usr = pm.environment.get(\"jwt_username\");", + " var pwd = pm.environment.get(\"jwt_password\");", + " ", + " pm.sendRequest(", + " {", + " url: url+\"/openid/token/\",", + " method: 'POST',", + " header: {", + " 'Content-Type': 'multipart/form-data',", + " },", + " body: {", + " mode: 'formdata',", + " formdata: [", + " {key: \"client_id\", value: clientId, disabled: false, description: {content:\"\", type:\"text/plain\"}},", + " {key: \"username\", value: usr, disabled: false, description: {content:\"\", type:\"text/plain\"}},", + " {key: \"password\", value: pwd, disabled: false, description: {content:\"\", type:\"text/plain\"}},", + " {key: \"grant_type\", value: 'password', disabled: false, description: {content:\"\", type:\"text/plain\"}},", + " {key: \"scope\", value: 'openid email', disabled: false, description: {content:\"\", type:\"text/plain\"}},", + " ]", + " }", + " }", + " , function (err, response) {", + " var data = response.json();", + " pm.environment.set(\"token\", data.id_token);", + " pm.environment.set(\"token_time\", now_time);", + " });", + "}", + "", + "else {", + " console.log(\"Existing token reused. [\"+(token_age/1000)+\"]\")", + "}" + ] + } + }, + { + "listen": "test", + "script": { + "id": "20351884-f8e6-421c-98d6-2bdd29347164", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ] +} \ No newline at end of file diff --git a/tests/postman.environment.user1.json b/tests/postman.environment.user1.json new file mode 100644 index 00000000..9bc6ea8a --- /dev/null +++ b/tests/postman.environment.user1.json @@ -0,0 +1,51 @@ +{ + "id": "9440180d-f528-4ac7-8a5c-6181528d4e6f", + "name": "Profile [User1]", + "values": [ + { + "key": "profiles_url", + "value": "http://localhost:9000", + "enabled": true, + "type": "text" + }, + { + "key": "jwt_username", + "value": "user1", + "enabled": true, + "type": "text" + }, + { + "key": "jwt_password", + "value": "testpassword", + "enabled": true, + "type": "text" + }, + { + "key": "token_time", + "value": 1531765278759, + "enabled": true, + "type": "text" + }, + { + "key": "oidc_client_id", + "value": "892633", + "enabled": true, + "type": "text" + }, + { + "key": "token", + "value": "eyJhbGciOiJSUzI1NiIsImtpZCI6ImQ1OWJjMGRlYzIzMzlmZDliN2VhMmZjZDNhMjgwNTMxIn0.eyJpc3MiOiJodHRwOi8vbG9jYWxob3N0OjkwMDAvb3BlbmlkIiwic3ViIjoiMzY4NmM2MTYtZmFkZi00MjYxLWEyYzItNmFhYTUwNGExYWUzIiwiYXVkIjoiODkyNjMzIiwiZXhwIjoxNTMxNzY1ODc4LCJpYXQiOjE1MzE3NjUyNzgsImF1dGhfdGltZSI6MTUyODk5MTMxNiwibm9uY2UiOiJzZWxmLmNvZGUubm9uY2UiLCJhdF9oYXNoIjoiNllCVHNuUk4yZFRwS3ZVZXBWMGpnUSIsInVzZXJuYW1lIjoidXNlcjEiLCJlbWFpbCI6InVzZXIxQHNoaXBjaGFpbi5pbyJ9.gTQR55PQiBpu5-lMA94mZl-gBD2DK4hJhQ-9O0H-jV7vxKrlC3pLVYGk3N0AOo8jf640yxHuoSQw1rooTnpxisPr0yrxoKh2Xd1T6K3qEFPUrbB3l8B1Dy_HofV80nG7C4ej5FUfYGHFkHHlbZpilU5VOIShc-K5Peynzz5Ming", + "enabled": true, + "type": "text" + }, + { + "key": "transmission_url", + "value": "http://localhost:8000", + "enabled": true, + "type": "text" + } + ], + "_postman_variable_scope": "environment", + "_postman_exported_at": "2018-07-16T18:22:40.681Z", + "_postman_exported_using": "Postman/6.1.3" +} \ No newline at end of file diff --git a/tests/postman.environment.user2.json b/tests/postman.environment.user2.json new file mode 100644 index 00000000..168b7b92 --- /dev/null +++ b/tests/postman.environment.user2.json @@ -0,0 +1,52 @@ +{ + "id": "85e447c1-db32-4dd2-b98f-d88059481835", + "name": "Profile [User2]", + "values": [ + { + "key": "profiles_url", + "value": "http://localhost:9000", + "enabled": true, + "type": "text" + }, + { + "key": "jwt_username", + "value": "user2", + "enabled": true, + "type": "text" + }, + { + "key": "jwt_password", + "value": "testpassword", + "enabled": true, + "type": "text" + }, + { + "key": "token_time", + "value": "1", + "enabled": true, + "type": "text" + }, + { + "key": "oidc_client_id", + "value": "892633", + "enabled": true, + "type": "text" + }, + { + "key": "token", + "value": "", + "enabled": true, + "type": "text" + }, + { + "key": "transmission_url", + "value": "http://localhost:8000", + "description": "", + "type": "text", + "enabled": true + } + ], + "_postman_variable_scope": "environment", + "_postman_exported_at": "2018-07-16T18:23:08.156Z", + "_postman_exported_using": "Postman/6.1.3" +} \ No newline at end of file diff --git a/tests/test_shipments.py b/tests/test_shipments.py new file mode 100644 index 00000000..26697d0f --- /dev/null +++ b/tests/test_shipments.py @@ -0,0 +1,370 @@ +import copy +from unittest import mock + +import requests +from rest_framework import status +from rest_framework.reverse import reverse +from rest_framework.test import APITestCase, APIClient + +from apps.shipments.models import Shipment, LoadShipment, FundingType, EscrowStatus, ShipmentStatus +from apps.shipments.rpc import ShipmentRPCClient +from apps.utils import AuthenticatedUser +from tests.utils import replace_variables_in_string + +VAULT_ID = 'b715a8ff-9299-4c87-96de-a4b0a4a54509' +CARRIER_WALLET_ID = '3716ff65-3d03-4b65-9fd5-43d15380cff9' +SHIPPER_WALLET_ID = '48381c16-432b-493f-9f8b-54e88a84ec0a' +STORAGE_CRED_ID = '77b72202-5bcd-49f4-9860-bc4ec4fee07b' + + +class ShipmentAPITests(APITestCase): + + def setUp(self): + self.client = APIClient() + + self.user_1 = AuthenticatedUser({ + 'user_id': '5e8f1d76-162d-4f21-9b71-2ca97306ef7b', + 'username': 'user1@shipchain.io', + 'email': 'user1@shipchain.io', + }) + + def set_user(self, user, token=None): + self.client.force_authenticate(user=user, token=token) + + def create_shipment(self): + self.shipments = [] + self.shipments.append(Shipment.objects.create(vault_id=VAULT_ID, + carrier_wallet_id=CARRIER_WALLET_ID, + shipper_wallet_id=SHIPPER_WALLET_ID, + storage_credentials_id=STORAGE_CRED_ID, + owner_id=self.user_1.id, + load_data=self.load_datas[0])) + + def create_load_data(self): + self.load_datas = [] + self.load_datas.append(LoadShipment.objects.create(shipment_id=1, + shipment_amount=0, + paid_amount=0, + paid_tokens="0.000000000000000000", + shipper=SHIPPER_WALLET_ID, + carrier=CARRIER_WALLET_ID, + contract_funded=False, + shipment_created=True, + valid_until=24, + funding_type=FundingType.SHIP, + escrow_status=EscrowStatus.CONTRACT_INITIATED, + shipment_status=ShipmentStatus.PENDING, + start_block=6)) + + def test_list_empty(self): + """ + Test listing requires authentication + """ + + # Unauthenticated request should fail with 403 + url = reverse('shipment-list', kwargs={'version': 'v1'}) + + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + # Authenticated request should succeed + self.set_user(self.user_1) + + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.json() + + # No devices created should return empty array + self.assertEqual(len(response_data['data']), 0) + # + # def test_create(self): + # url = reverse('shipment-list', kwargs={'version': 'v1'}) + # + # # Unauthenticated request should fail with 403 + # response = self.client.patch(url, '{}', content_type='application/vnd.api+json') + # self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + # + # parameters = { + # '_vault_id': VAULT_ID, + # '_carrier_wallet_id': CARRIER_WALLET_ID, + # '_shipper_wallet_id': SHIPPER_WALLET_ID, + # '_storage_credentials_id': STORAGE_CRED_ID, + # '_async_hash': 'txHash' + # } + # + # post_data = ''' + # { + # "data": { + # "type": "Shipment", + # "attributes": { + # "carrier_wallet_id": "<<_carrier_wallet_id>>", + # "shipper_wallet_id": "<<_shipper_wallet_id>>", + # "storage_credentials_id": "<<_storage_credentials_id>>" + # } + # } + # } + # ''' + # + # # Mock RPC calls + # mock_rpc_client = ShipmentRPCClient + # + # mock_rpc_client.create_vault = mock.Mock(return_value=parameters['_vault_id']) + # mock_rpc_client.add_shipment_data = mock.Mock(return_value={'hash': 'txHash'}) + # mock_rpc_client.create_shipment_transaction = mock.Mock(return_value=('version', {})) + # mock_rpc_client.sign_transaction = mock.Mock(return_value=({}, 'txHash')) + # mock_rpc_client.send_transaction = mock.Mock(return_value={ + # "blockHash": "0xccb595947a121e37df8bf689c3f88c6d9c7fb56070c9afda38551540f9e231f7", + # "blockNumber": 15, + # "contractAddress": None, + # "cumulativeGasUsed": 138090, + # "from": "0x13b1eebb31a1aa2ecaa2ad9e7455df2f717f2143", + # "gasUsed": 138090, + # "logs": [], + # "logsBloom": "0x0000000000", + # "status": True, + # "to": "0x25ff5dc79a7c4e34254ff0f4a19d69e491201dd3", + # "transactionHash": parameters['_async_hash'], + # "transactionIndex": 0 + # }) + # + # # Authenticated request should succeed + # self.set_user(self.user_1) + # + # post_data = replace_variables_in_string(post_data, parameters) + # + # response = self.client.post(url, post_data, content_type='application/vnd.api+json') + # print(response.content) + # + # response_data = response.json() + # self.assertEqual(response_data['data']['attributes']['carrier_wallet_id'], parameters['_carrier_wallet_id']) + # self.assertEqual(response_data['data']['attributes']['shipper_wallet_id'], parameters['_shipper_wallet_id']) + # self.assertEqual(response_data['data']['attributes']['storage_credentials_id'], parameters['_storage_credentials_id']) + # self.assertEqual(response_data['data']['attributes']['vault_id'], parameters['_vault_id']) + # self.assertEqual(response_data['data']['meta']['transaction_id'], parameters['_async_hash']) + + # def test_get_device_request_url(self): + # from conf.test_settings import PROFILES_URL + # + # _shipment_id = 'b715a8ff-9299-4c87-96de-a4b0a4a54509' + # _vault_id = '01fc36c4-63e5-4c02-943a-b52cd25b235b' + # shipment = Shipment.objects.create(id=_shipment_id, vault_id=_vault_id) + # + # profiles_url = shipment.get_device_request_url() + # + # # http://INTENTIONALLY_DISCONNECTED:9999/api/v1/device/?on_shipment=b715a8ff-9299-4c87-96de-a4b0a4a54509 + # self.assertIn(PROFILES_URL, profiles_url) + # self.assertIn(f"?on_shipment={_vault_id}", profiles_url) + # + # def test_get_tracking(self): + # self.create_load_data() + # self.create_shipment() + # + # url = reverse('shipment-tracking', kwargs={'version': 'v1', 'pk': self.shipments[0].id}) + # + # class DeviceForShipmentResponse(object): + # status_code = status.HTTP_200_OK + # + # @staticmethod + # def json(): + # return { + # 'data': [ + # { + # 'type': 'Device' + # } + # ] + # } + # + # tracking_data = [ + # { + # 'coordinates': [33.018413333333335, -80.123635], + # 'fix_date': '270718', + # 'fix_time': '210714.000', + # 'has_gps': 'A', + # 'source': 'gps', + # 'uncertainty': 0 + # }, + # { + # 'coordinates': [34.018413333333335, -81.123635], + # 'fix_date': '280718', + # 'fix_time': '210714.000', + # 'source': 'gps', + # 'uncertainty': 0 + # }, + # ] + # + # geo_json = { + # 'type': 'FeatureCollection', + # 'features': [ + # { + # 'type': 'Feature', + # 'geometry': { + # 'type': 'LineString', + # 'coordinates': [ + # [ + # -80.123635, + # 33.018413333333335 + # ], + # [ + # -81.123635, + # 34.018413333333335 + # ] + # ] + # }, + # 'properties': { + # 'linestringTimestamps': [ + # '2018-07-27T21:07:14', + # '2018-07-28T21:07:14' + # ] + # } + # }, + # { + # 'type': 'Feature', + # 'geometry': { + # 'type': 'Point', + # 'coordinates': [ + # -80.123635, + # 33.018413333333335 + # ] + # }, + # 'properties': { + # 'time': '2018-07-27T21:07:14', + # 'uncertainty': 0, + # 'has_gps': 'A', + # 'source': 'gps' + # } + # }, + # { + # 'type': 'Feature', + # 'geometry': { + # 'type': 'Point', + # 'coordinates': [ + # -81.123635, + # 34.018413333333335 + # ] + # }, + # 'properties': { + # 'time': '2018-07-28T21:07:14', + # 'uncertainty': 0, + # 'has_gps': None, + # 'source': 'gps' + # } + # } + # ] + # } + # + # # Unauthenticated request should fail with 403 + # response = self.client.get(url) + # self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + # + # # Mock Requests calls + # mock_requests = requests + # mock_requests.get = mock.Mock(return_value=DeviceForShipmentResponse) + # + # # Mock RPC calls + # mock_rpc_client = ShipmentRPCClient + # mock_rpc_client.get_tracking_data = mock.Mock(return_value=tracking_data) + # + # # Authenticated request should succeed, pass in token so we can pull it from request.auth + # self.set_user(self.user_1, b'a1234567890b1234567890') + # + # response = self.client.get(url) + # response_data = response.json() + # + # self.assertEqual(response_data['data'], geo_json) + # + # # Test ?as_point + # response = self.client.get(f'{url}?as_point') + # response_data = response.json() + # + # geo_json_point = copy.deepcopy(geo_json) + # del geo_json_point['features'][0] + # + # self.assertEqual(response_data['data'], geo_json_point) + # + # # Test ?as_line + # response = self.client.get(f'{url}?as_line') + # response_data = response.json() + # + # geo_json_line = copy.deepcopy(geo_json) + # del geo_json_line['features'][2] + # del geo_json_line['features'][1] + # + # self.assertEqual(response_data['data'], geo_json_line) + # + # def test_shipment_update(self): + # self.create_load_data() + # self.create_shipment() + # url = reverse('shipment-detail', kwargs={'version': 'v1', 'pk': self.shipments[0].id}) + # + # # Unauthenticated request should fail with 403 + # response = self.client.put(url, '{}', content_type='application/vnd.api+json') + # self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + # + # parameters = { + # '_vault_id': VAULT_ID, + # '_carrier_wallet_id': CARRIER_WALLET_ID, + # '_shipper_wallet_id': SHIPPER_WALLET_ID, + # '_storage_credentials_id': STORAGE_CRED_ID, + # '_async_hash': 'txHash', + # '_shipment_id': self.shipments[0].id, + # '_carrier_scac': 'test_scac' + # } + # + # post_data = ''' + # { + # "data": { + # "type": "Shipment", + # "id": "<<_shipment_id>>", + # "attributes": { + # "carrier_scac": "test_scac" + # } + # } + # } + # ''' + # + # # Mock RPC calls + # mock_rpc_client = ShipmentRPCClient + # + # mock_rpc_client.add_shipment_data = mock.Mock(return_value={"hash": "txHash"}) + # mock_rpc_client.sign_transaction = mock.Mock(return_value={}) + # mock_rpc_client.send_transaction = mock.Mock(return_value={ + # "blockHash": "0xccb595947a121e37df8bf689c3f88c6d9c7fb56070c9afda38551540f9e231f7", + # "blockNumber": 15, + # "contractAddress": None, + # "cumulativeGasUsed": 138090, + # "from": "0x13b1eebb31a1aa2ecaa2ad9e7455df2f717f2143", + # "gasUsed": 138090, + # "logs": [], + # "logsBloom": "0x0000000000", + # "status": True, + # "to": "0x25ff5dc79a7c4e34254ff0f4a19d69e491201dd3", + # "transactionHash": parameters['_async_hash'], + # "transactionIndex": 0 + # }) + # mock_rpc_client.update_vault_hash_transaction = mock.Mock(return_value={ + # "nonce": "0x2", + # "chainId": 1337, + # "to": "0x25Ff5dc79A7c4e34254ff0f4a19d69E491201DD3", + # "gasPrice": "0x4a817c800", + # "gasLimit": "0x7a120", + # "value": "0x0", + # "data": "0x002" + # }) + # + # # Authenticated request should succeed + # self.set_user(self.user_1) + # + # post_data = replace_variables_in_string(post_data, parameters) + # print('load_data: ', self.shipments[0].load_data) + # response = self.client.put(url, post_data, content_type='application/vnd.api+json') + # + # response_data = response.json() + # print(response_data) + # self.assertEqual(response_data['data']['attributes']['carrier_wallet_id'], parameters['_carrier_wallet_id']) + # self.assertEqual(response_data['data']['attributes']['shipper_wallet_id'], parameters['_shipper_wallet_id']) + # self.assertEqual(response_data['data']['attributes']['storage_credentials_id'], + # parameters['_storage_credentials_id']) + # self.assertEqual(response_data['data']['attributes']['carrier_scac'], parameters['_carrier_scac']) + # self.assertEqual(response_data['data']['attributes']['vault_id'], parameters['_vault_id']) + # self.assertEqual(response_data['data']['meta']['transaction_id'], parameters['_async_hash']) diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 00000000..7907fb44 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,36 @@ +from rest_framework import exceptions +from django.test import TestCase + +from apps.utils import assertDeepAlmostEqual, PassiveJSONWebTokenAuthentication + + +class UtilsTests(TestCase): + def test_assert_almost_equal(self): + assertDeepAlmostEqual(self, + expected={ + 'data': { + 'testing': 123.4567890123456789 + } + }, + actual={ + 'data': { + 'testing': 123.4567890123456788 + } + }) + self.assertRaises(AssertionError, assertDeepAlmostEqual, + test_case=self, + expected={ + 'testing': 123 + }, + actual={ + 'testing': 124 + }) + + def test_passive_jwt_auth(self): + auth = PassiveJSONWebTokenAuthentication() + self.assertRaises(exceptions.AuthenticationFailed, auth.authenticate_credentials, {}) + user = auth.authenticate_credentials({'sub': '000-000-0000', 'username': 'wat@wat.com', 'email': 'wat@wat.com'}) + self.assertEqual(user.is_authenticated(), True) + self.assertEqual(user.is_staff(), False) + self.assertEqual(user.is_superuser(), False) + self.assertEqual(user.username, 'wat@wat.com') diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000..11f6a079 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,8 @@ +import re + + +def replace_variables_in_string(string, parameters): + matches = re.findall("<<(\w+?)>>", string) + for match in matches: + string = string.replace(f"<<{match}>>", parameters[match]) + return string