Skip to content

Commit

Permalink
Merge pull request #457 from cevi/dev
Browse files Browse the repository at this point in the history
Version 6.0.0 - Route Storing

- Exports are now stored in a database. This allows for further exports and:
- QR Code generation for import into swisstopo app
- Add height profile again (revert removal in 5.1.0)
- Improve Drawing: you can now delete the last point of the path
- Improve Drawing: you can now draw off-road paths
- Bug fixes
- Dependency updates
  • Loading branch information
wp99cp authored Oct 3, 2024
2 parents 23894aa + e7d1c89 commit 33fe5b7
Show file tree
Hide file tree
Showing 57 changed files with 3,230 additions and 1,958 deletions.
5 changes: 5 additions & 0 deletions .env.ci-testing
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,12 @@ FRONTEND_DOMAIN=http://awt-frontend
PRINT_API_BASE_URL=awt-mapfish-print-server
CYPRESS_BASE_URL=http://awt-frontend
VALHALLA_DOMAIN=http://awt-valhalla:8002
STORE_API_URL=http://awt-store-api:6000

# Configures the logging level for the backend
# valid values are DEBUG, INFO, WARNING, ERROR or CRITICAL
LOG_LEVEL=INFO

# MongoDB settings for route storing
MONGO_INITDB_ROOT_USERNAME=admin
MONGO_INITDB_ROOT_PASSWORD=pass
6 changes: 6 additions & 0 deletions .env.local-dev
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,14 @@ DOCS_DOMAIN=http://localhost:4000
FRONTEND_DOMAIN=http://localhost
PRINT_API_BASE_URL=awt-mapfish-print-server
VALHALLA_DOMAIN=http://localhost:8002
STORE_API_URL=http://awt-store-api:6000

# Configures the logging level for the backend
# valid values are DEBUG, INFO, WARNING, ERROR or CRITICAL
LOG_LEVEL=DEBUG
FAULTHANDLER=true

# MongoDB settings for route storing
MONGO_INITDB_ROOT_USERNAME=admin
MONGO_INITDB_ROOT_PASSWORD=pass
ME_CONFIG_MONGODB_URL=mongodb://admin:pass@awt-mongodb:27017/
8 changes: 7 additions & 1 deletion .env.prod-dev
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,13 @@ DOCS_DOMAIN=http://docs.dev.map.cevi.tools
FRONTEND_DOMAIN=https://dev.map.cevi.tools
PRINT_API_BASE_URL=awt-mapfish-print-server
VALHALLA_DOMAIN=https://valhalla.dev.map.cevi.tools
STORE_API_URL=http://awt-store-api:6000

# Configures the logging level for the backend
# valid values are DEBUG, INFO, WARNING, ERROR or CRITICAL
LOG_LEVEL=INFO
LOG_LEVEL=INFO

# MongoDB settings for route storing
MONGO_INITDB_ROOT_USERNAME=admin
# TODO: Change to the more secure password in production
MONGO_INITDB_ROOT_PASSWORD=pass
10 changes: 8 additions & 2 deletions .env.prod-latest
Original file line number Diff line number Diff line change
@@ -1,11 +1,17 @@
# Configuration for the angular application: development or production
ANGULAR_BUILD_MODE=production
BACKEND_DOMAIN=https://backend.map.cevi.tools
DOCS_DOMAIN=http://docs.map.cevi.tools
DOCS_DOMAIN=https://docs.map.cevi.tools
FRONTEND_DOMAIN=https://map.cevi.tools
PRINT_API_BASE_URL=awt-mapfish-print-server
VALHALLA_DOMAIN=https://valhalla.map.cevi.tools
STORE_API_URL=http://awt-store-api:6000

# Configures the logging level for the backend
# valid values are DEBUG, INFO, WARNING, ERROR or CRITICAL
LOG_LEVEL=INFO
LOG_LEVEL=INFO

# MongoDB settings for route storing
MONGO_INITDB_ROOT_USERNAME=admin
# TODO: Change to the more secure password in production
MONGO_INITDB_ROOT_PASSWORD=pass
2 changes: 1 addition & 1 deletion .github/workflows/create_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v4

- uses: actions/setup-node@v4.0.3
- uses: actions/setup-node@v4.0.4
with:
node-version: '16'

Expand Down
6 changes: 6 additions & 0 deletions .github/workflows/cypress.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,12 @@ jobs:
- name: 🛎️ Checkout
uses: actions/checkout@v4

- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
docker-images: false

- name: create dumy files for ./git/HEAD and ./git/refs/heads/
run: |
mkdir -p .git/refs/heads
Expand Down
2 changes: 1 addition & 1 deletion Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ anpassen (siehe auch J+S-Broschüre Berg).
GPX-File) dient dabei als Grundlage.
- [x] In einem nächsten Schritt soll es möglich sein, die gewählten Punkte auf der einer interaktiven Karte zu
visualisieren und bei Bedarf zu verschieben.
- [ ] Das Zeichnen neuer Routen soll direkt in unserem Webinterface möglich sein (damit entfällt der GPX-Export). Herzu
- [x] Das Zeichnen neuer Routen soll direkt in unserem Webinterface möglich sein (damit entfällt der GPX-Export). Herzu
werden magnetische Wege verwendet. D.h. die Route soll dabei automatisch an den nächsten Weg angepasst werden.

Weitere Funktionalitäten können gerne auch per Enhancement-Issue gewünscht werden.
Expand Down
133 changes: 127 additions & 6 deletions backend/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from threading import Thread

import polyline
import requests
from flask import Flask, request, send_file, redirect
from flask_cors import CORS

Expand All @@ -27,7 +28,8 @@
)
from automatic_walk_time_tables.path_transformers.pois_transfomer import POIsTransformer
from automatic_walk_time_tables.utils.error import UserException
from automatic_walk_time_tables.utils.path import Path
from automatic_walk_time_tables.utils.gpx_creator import create_gpx_file
from automatic_walk_time_tables.utils.path import Path, path_from_json
from automatic_walk_time_tables.utils.point import Point_LV95
from server_logging.log_helper import setup_recursive_logger
from server_logging.status_handler import ExportStateHandler, ExportStateLogger
Expand Down Expand Up @@ -297,10 +299,24 @@ def create_export(options, uuid):
if "name_points_in_export" not in options["settings"]:
options["settings"]["name_points_in_export"] = True

logger.info("OPTIONS:" + str(options))

generator = AutomatedWalkTableGenerator(uuid, options)
generator.set_data(path, way_points, pois)

store_dict = generator.get_store_dict()
r = requests.post(os.environ["STORE_API_URL"] + "/store", json=store_dict)
if r.status_code == 200:
logger.log(
ExportStateLogger.REQUESTABLE,
"Daten abgespeichert.",
{"uuid": uuid, "status": GeneratorStatus.RUNNING},
)
else:
logger.log(
ExportStateLogger.REQUESTABLE,
"Daten nicht abgespeichert.",
{"uuid": uuid, "status": GeneratorStatus.ERROR},
)

generator.run()

# Create a new thread and start it
Expand Down Expand Up @@ -373,9 +389,15 @@ def download(uuid):
base_path = pathlib.Path("./output/" + uuid + "/")
state = stateHandler.get_status(uuid)

if (state and state["status"] != GeneratorStatus.SUCCESS) or not os.path.exists(
base_path
):
status_ok = state and state["status"] == GeneratorStatus.SUCCESS
files_exists = os.path.exists(base_path)

storage = None
if not status_ok and not files_exists:
storage = fetch_data_for_uuid(uuid)

if not status_ok and not files_exists and storage is None:

# check if content type is HTML
if "text/html" in request.headers.get("Accept", ""):
frontend_url = os.environ["FRONTEND_DOMAIN"]
Expand All @@ -392,6 +414,35 @@ def download(uuid):
mimetype="application/json",
)

if not files_exists and storage is not None:

logger.debug("Files not found, creating them now.")
logger.debug("Storage options: %s" % storage["options"])

# Create the folder and files
thread = Thread(
target=create_export, kwargs={"options": storage["options"], "uuid": uuid}
)
thread.start()

return app.response_class(
response=json.dumps({"status": GeneratorStatus.RUNNING, "uuid": str(uuid)}),
status=200,
mimetype="application/json",
)

if not files_exists and storage is None:
return app.response_class(
response=json.dumps(
{
"status": GeneratorStatus.ERROR,
"message": "Die angeforderten Daten sind nicht verfügbar.",
}
),
status=404,
mimetype="application/json",
)

# Return Zip with data
data = io.BytesIO()
with zipfile.ZipFile(data, mode="w") as z:
Expand All @@ -409,6 +460,76 @@ def download(uuid):
)


def fetch_data_for_uuid(uuid):
"""
Fetches the data for the given UUID from the store API.
:param uuid: The UUID of the data to fetch
:return: The data for the given UUID or None if the data is not available
"""

r = requests.post(os.environ["STORE_API_URL"] + "/retrieve", json={"uuid": uuid})
if r.status_code == 200:
return r.json()

return None


@app.route("/retrieve/<uuid>")
def retrieve_route(uuid):
data = fetch_data_for_uuid(uuid)
if data is not None:
options = data["options"]
# TODO: check if the export folder still exists.
# if yes: do not export again, but rather just serve the folder
# if no: do as is now.

thread = Thread(
target=create_export, kwargs={"options": options, "uuid": str(uuid)}
)
thread.start()

return app.response_class(
response=json.dumps({"status": GeneratorStatus.RUNNING, "uuid": str(uuid)}),
status=200,
mimetype="application/json",
)
else:
return app.response_class(
response=json.dumps(
{
"status": GeneratorStatus.ERROR,
"message": "Die angeforderte GPX Datei ist nicht verfügbar.",
}
),
status=404,
mimetype="application/json",
)


@app.route("/gpx/<uuid>.gpx")
def generate_gpx(uuid):
data = fetch_data_for_uuid(uuid)

if data is None:
return app.response_class(
response=json.dumps(
{
"status": GeneratorStatus.ERROR,
"message": "Die angeforderte GPX Datei ist nicht verfügbar.",
}
),
status=404,
mimetype="application/json",
)

path: path.Path = path_from_json(data["path"])
way_points: path.Path = path_from_json(data["way_points"])
gpx_string = create_gpx_file(path, way_points)
return app.response_class(
response=gpx_string, status=200, mimetype="application/gpx+xml"
)


if __name__ == "__main__":
app.run(
debug=(os.environ.get("DEBUG", "False").lower() in ("true", "1", "t")),
Expand Down
34 changes: 33 additions & 1 deletion backend/automatic_walk_time_tables/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@
from automatic_walk_time_tables.path_transformers.pois_transfomer import POIsTransformer
from automatic_walk_time_tables.utils import path
from automatic_walk_time_tables.utils.file_parser import GeoFileParser
from automatic_walk_time_tables.walk_time_table.walk_table import create_walk_table
from automatic_walk_time_tables.walk_time_table.walk_table import (
create_walk_table,
plot_elevation_profile,
)
from server_logging.status_handler import ExportStateLogger
from automatic_walk_time_tables.utils.error import UserException

Expand Down Expand Up @@ -93,6 +96,21 @@ def __create_files(self):
naming_fetcher = NamingTransformer()
self.__way_points = naming_fetcher.transform(self.__way_points)

self.__log_runtime(
plot_elevation_profile,
"Benötigte Zeit zum erstellen des Höhenprofils",
self.__path,
self.__way_points,
self.__pois,
file_name=name,
legend_position=self.options["settings"]["legend_position"],
)
self.__logger.log(
ExportStateLogger.REQUESTABLE,
"Höhenprofil wurde erstellt.",
{"uuid": self.uuid, "status": GeneratorStatus.RUNNING},
)

# We use fetch map numbers only for the selected way points,
# this is much faster that for every point in the original path. As the swiss_TML_api uses a tolerance
# of 2_000m anyway the chance to miss a map number is very small.
Expand Down Expand Up @@ -173,3 +191,17 @@ def set_data(self, path_data: path.Path, way_points: path.Path, pois: path.Path)
self.__path = path_data
self.__way_points = way_points
self.__pois = pois

def get_store_dict(self):

# name way_points
naming_fetcher = NamingTransformer(use_default_name=True)
__way_points = naming_fetcher.transform(self.__way_points)

return {
"uuid": self.uuid,
"options": self.options,
"path": self.__path.to_json(),
"pois": self.__pois.to_json(),
"way_points": __way_points.to_json(),
}
27 changes: 27 additions & 0 deletions backend/automatic_walk_time_tables/map_downloader/create_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import logging
import os
import time
import base64
from pathlib import Path
from typing import List

Expand Down Expand Up @@ -321,11 +322,14 @@ def create_mapfish_query(
point_layer = self.create_point_json(lv95, point, "#00BFFF", pointRadius=7)
point_layers.append(point_layer)

qr_code_string = self.__build_qr_code_string()

query_json = {
"layout": "A4 landscape",
"outputFormat": "pdf",
"attributes": {
"scale": "Massstab: 1:" + f"{map_scaling:,}".replace(",", "'"),
"qr_code": qr_code_string,
"map": {
"center": center,
"scale": map_scaling,
Expand All @@ -340,6 +344,29 @@ def create_mapfish_query(

return query_json

def __build_qr_code_string(self):
backend_domain = os.environ["BACKEND_DOMAIN"]
clear_url = f"{backend_domain}/gpx/{self.uuid}.gpx"
b64_url = base64.b64encode(clear_url.encode("ascii")).decode("ascii")
final_url = "https://swisstopo.app/u/" + b64_url
r = requests.post(
"https://backend.qr.cevi.tools/png",
json={"text": final_url},
)
if r.status_code == 200:
qr_code_bytes = r.content

# Convert the byte string to a base64-encoded string
base64_encoded = base64.b64encode(qr_code_bytes).decode("utf-8")

# Add the appropriate prefix for embedding in a webpage as a data URL
data_url = f"data:image/png;base64,{base64_encoded}"

# Print or return the data URL
return data_url
else:
return "" # TODO: does this work?

def create_point_json(
self, lv95, point, color="#FF0000", pointRadius=5, label=False
):
Expand Down
Loading

0 comments on commit 33fe5b7

Please sign in to comment.