Skip to content

Commit

Permalink
Merge branch 'develop' into better_query_error
Browse files Browse the repository at this point in the history
  • Loading branch information
Ariana-B authored Jul 8, 2024
2 parents 8630ce1 + 0ed0a01 commit 062617f
Show file tree
Hide file tree
Showing 32 changed files with 392 additions and 326 deletions.
7 changes: 3 additions & 4 deletions .github/workflows/deployment_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,9 @@ jobs:
make build-prod
make up-prod
- name: Sleep for 10 seconds
uses: whatnick/wait-action@master
with:
time: '5s'
- name: Sleep for 30 seconds
run: sleep 30s
shell: bash

- name: Prepare explorer schema
run: |
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -88,4 +88,4 @@ CMD ["gunicorn", \
"90", \
"--config", \
"python:cubedash.gunicorn_config", \
"cubedash:app"]
"cubedash:create_app()"]
30 changes: 15 additions & 15 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -88,55 +88,55 @@ clean: ## Clean all working/temporary files

# DOCKER STUFF
up: ## Start server using Docker
docker-compose up --quiet-pull
docker compose up --quiet-pull

up-d: ## Start server using Docker in background
docker-compose up -d --quiet-pull
docker compose up -d --quiet-pull

build: ## Build the dev Docker image
docker-compose build
docker compose build

docker-clean: ## Get rid of the local docker env and DB
docker-compose down
docker compose down

build-prod: ## Build the prod Docker image
docker-compose \
docker compose \
--file docker-compose.yml \
build

up-prod: ## Start using the prod Docker image
docker-compose \
docker compose \
--file docker-compose.yml \
up -d --quiet-pull
up -d --wait --quiet-pull

init-odc: ## Initialise ODC Database
docker-compose exec -T explorer \
docker compose exec -T explorer \
datacube system init

docker-shell: ## Get a shell into local Docker environ
docker-compose exec -T explorer \
docker compose exec -T explorer \
bash

schema: ## Initialise Explorer DB using Docker
docker-compose exec -T explorer \
docker compose exec -T explorer \
cubedash-gen -v --init

index: ## Update Explorer DB using Docker
docker-compose exec -T explorer \
docker compose exec -T explorer \
cubedash-gen --all

force-refresh: ## Entirely refresh the Explorer tables in Docker
docker-compose exec -T explorer \
docker compose exec -T explorer \
cubedash-gen --force-refresh --refresh-stats --all

create-test-db-docker: ## Create a test database inside Docker
docker-compose run --rm -T explorer \
docker compose run --rm -T explorer \
bash /code/.docker/create_db.sh

lint-docker: ## Run linting inside inside Docker
docker-compose run --rm explorer \
docker compose run --rm explorer \
make lint

test-docker: ## Run tests inside Docker
docker-compose run --rm explorer \
docker compose run --rm explorer \
pytest --cov=cubedash --cov-report=xml -r sx --durations=5
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ A `cubedash-run` command is available to run Explorer locally:
But Explorer can be run using any typical Python WSGI server, for example [gunicorn](https://gunicorn.org/):

pip install gunicorn
gunicorn -b '127.0.0.1:8080' -w 4 cubedash:app
gunicorn -b '127.0.0.1:8080' -w 4 cubedash:create_app()

Products will begin appearing one-by-one as the summaries are generated in the
background. If impatient, you can manually navigate to a product using
Expand Down Expand Up @@ -130,8 +130,7 @@ Datacube-explorer default timezone is configured to: `Australia/Darwin`.

To configure the instance to a different timezone, the following configuration needs to be applied:

- `os.environment` variable `CUBEDASH_DEFAULT_TIMEZONE`
- `app.config` variable `CUBEDASH_DEFAULT_TIMEZONE`
- `app.config` variable `CUBEDASH_DEFAULT_TIMEZONE` (via environment variable `CUBEDASH_SETTINGS`, which points to a `.env.py` file)

### Can I add custom scripts or text to the page (such as analytics)?

Expand Down
4 changes: 2 additions & 2 deletions cubedash/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@
except ImportError:
__version__ = "Unknown/Not Installed"

from ._pages import app
from ._model import create_app

__all__ = ("app", "__version__")
__all__ = ("create_app", "__version__")
5 changes: 4 additions & 1 deletion cubedash/_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from datetime import date, datetime

import flask
from dateutil import tz
from flask import Blueprint, abort, request

from cubedash import _utils
Expand Down Expand Up @@ -30,7 +31,9 @@ def datasets_geojson(
if limit > hard_limit:
limit = hard_limit

time = _utils.as_time_range(year, month, day, tzinfo=_model.STORE.grouping_timezone)
time = _utils.as_time_range(
year, month, day, tzinfo=tz.gettz(_model.DEFAULT_GROUPING_TIMEZONE)
)

return as_geojson(
dict(
Expand Down
13 changes: 7 additions & 6 deletions cubedash/_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from uuid import UUID

import flask
from flask import Blueprint, abort, url_for
from flask import Blueprint, abort, current_app, url_for

from . import _model
from . import _utils as utils
Expand All @@ -13,9 +13,7 @@
__name__,
)

PROVENANCE_DISPLAY_LIMIT = _model.app.config.get(
"CUBEDASH_PROVENANCE_DISPLAY_LIMIT", 25
)
PROVENANCE_DISPLAY_LIMIT = 25


@bp.route("/dataset/<uuid:id_>")
Expand Down Expand Up @@ -54,8 +52,11 @@ def dataset_full_page(product_name: str, id_: UUID):
f"Perhaps you meant to visit {actual_url!r}",
)

provenance_display_limit = current_app.config.get(
"CUBEDASH_PROVENANCE_DISPLAY_LIMIT", PROVENANCE_DISPLAY_LIMIT
)
source_datasets, source_dataset_overflow = utils.get_dataset_sources(
index, id_, limit=PROVENANCE_DISPLAY_LIMIT
index, id_, limit=provenance_display_limit
)

archived_location_times = index.datasets.get_archived_location_times(id_)
Expand All @@ -64,7 +65,7 @@ def dataset_full_page(product_name: str, id_: UUID):
ordered_metadata = utils.prepare_dataset_formatting(dataset)

derived_datasets, derived_dataset_overflow = utils.get_datasets_derived(
index, id_, limit=PROVENANCE_DISPLAY_LIMIT
index, id_, limit=provenance_display_limit
)
derived_datasets.sort(key=utils.dataset_label)

Expand Down
4 changes: 2 additions & 2 deletions cubedash/_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def _dataset_geojson(dataset):

@bp.app_template_filter("product_link")
def _product_link(product_name):
url = flask.url_for("product_page", product_name=product_name)
url = flask.url_for("pages.product_page", product_name=product_name)
return Markup(f"<a href='{url}' class='product-name'>{product_name}</a>")


Expand Down Expand Up @@ -147,7 +147,7 @@ def _dataset_day_link(dataset: Dataset, timezone=None):
if timezone:
t = utils.default_utc(t).astimezone(timezone)
url = flask.url_for(
"product_page",
"pages.product_page",
product_name=dataset.type.name,
year=t.year,
month=t.month,
Expand Down
Loading

0 comments on commit 062617f

Please sign in to comment.