Skip to content

Commit

Permalink
Merge pull request #142 from m4dm4rtig4n/0.8.2
Browse files Browse the repository at this point in the history
0.8.2
  • Loading branch information
m4dm4rtig4n authored Nov 25, 2022
2 parents 94c5de3 + 98ccbfb commit e9deb4e
Show file tree
Hide file tree
Showing 16 changed files with 96 additions and 78 deletions.
31 changes: 0 additions & 31 deletions 3bfb136b0221_add_enable_progress_progress_status_to_.py

This file was deleted.

14 changes: 10 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
COMPOSE=docker compose -f docker-compose.dev.yml
COMPOSE=docker compose -f dev/docker-compose.dev.yaml

.DEFAULT_GOAL := wizard
## Run wizard
Expand All @@ -17,6 +17,12 @@ run:
$(COMPOSE) rm -f myelectricaldata_import
python3 main.py action=run

## Connect to EnedisGateway container : DEV
run-production:
$(COMPOSE) stop myelectricaldata_import
$(COMPOSE) rm -f myelectricaldata_import
python3 main.py action=run env=production

## Start docker conatiners for dev
up:
@echo "Start docker container for dev"
Expand Down Expand Up @@ -64,9 +70,9 @@ git_push:
generate-dependencies:
cd app; pip-compile -o requirements.txt pyproject.toml; cd -

## Create github pre release (dev)
create-release-dev:
python3 main.py action=create_pre_release
clean: generate-dependencies
docker image rm -f myelectricaldata_import_myelectricaldata_import
# docker build ./

## Create github release (prod)
create-release:
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

**Vous recherchez un Discord Francais autour de la "Domotique & Diy" ?**

[![https://discord.gg/DfVJZme](discord.png 'Vous recherchez un Discord Francais autour de la "Domotique & Diy" ?')](https://discord.gg/DfVJZme)
[![https://discord.gg/DfVJZme](ressources/discord.png 'Vous recherchez un Discord Francais autour de la "Domotique & Diy" ?')](https://discord.gg/DfVJZme)

****

Expand Down Expand Up @@ -216,7 +216,7 @@ Voir [F.A.Q](https://www.myelectricaldata.fr/faq) pour plus de détail.
> Actuellement la dashboard est uniquement compatible avec les version <= 0.7.8
Une fois les données exporté dans Grafana, vous pouvez utiliser la dashboard [ICI](grafana_dashboard.json)
Une fois les données exporté dans Grafana, vous pouvez utiliser la dashboard [ICI](ressources/grafana_dashboard.json)
> Ne fonctionne qu'avec InfluxDB <= V1.8
Expand Down
Binary file removed __pycache__/dependencies.cpython-310.pyc
Binary file not shown.
39 changes: 24 additions & 15 deletions app/main.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,26 @@
import sys

from flask import Flask, request, send_file
from flask_apscheduler import APScheduler
from waitress import serve

from config import cycle_minimun
from dependencies import *
from models.config import Config, get_version
from models.jobs import Job
from models.ajax import Ajax
from models.config import Config, get_version
from models.database import Database
from models.influxdb import InfluxDB
from models.jobs import Job
from models.log import Log
from models.mqtt import Mqtt
from templates.index import Index
from templates.usage_point_id import UsagePointId

LOG = Log()

if "DEV" in os.environ and os.getenv("DEV"):
LOG.title_warning("Run in Development mode")
else:
LOG.title("Run in production mode")

if "APPLICATION_PATH_DATA" in os.environ:
APPLICATION_PATH_DATA = os.getenv("APPLICATION_PATH_DATA")
else:
Expand Down Expand Up @@ -83,17 +87,18 @@


class FetchAllDataScheduler(object):
JOBS = [
{
JOBS = []
if "DEV" not in os.environ or not os.getenv("DEV"):
JOBS.append({
"id": f"fetch_data_boot",
"func": Job().job_import_data
}, {
"id": f"fetch_data",
"func": Job().job_import_data,
"trigger": "interval",
"seconds": CYCLE,
}
]
})
JOBS.append({
"id": f"fetch_data",
"func": Job().job_import_data,
"trigger": "interval",
"seconds": CYCLE,
})
SCHEDULER_API_ENABLED = True


Expand Down Expand Up @@ -143,6 +148,7 @@ def usage_point_id(usage_point_id):
def lock():
return str(DB.lock_status())


@APP.route("/gateway_status", methods=['GET'])
@APP.route("/gateway_status/", methods=['GET'])
def gateway_status():
Expand Down Expand Up @@ -172,6 +178,7 @@ def account_status(usage_point_id):
def import_all_data(usage_point_id):
return Ajax(usage_point_id).import_data()


@APP.route("/import/<usage_point_id>/<target>", methods=['GET'])
@APP.route("/import/<usage_point_id>/<target>", methods=['GET'])
def import_data(usage_point_id, target):
Expand Down Expand Up @@ -207,5 +214,7 @@ def whitelist_data(usage_point_id, target, date):
def fetch_data(usage_point_id, target, date):
return Ajax(usage_point_id).fetch(target, date)


APP.run(host="0.0.0.0", port=5000, debug=False, use_reloader=True)
if "DEV" in os.environ and os.getenv("DEV"):
APP.run(host="0.0.0.0", port=5000, debug=False, use_reloader=True)
else:
serve(APP, host="0.0.0.0", port=5000)
29 changes: 21 additions & 8 deletions app/models/jobs.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import __main__ as app
import sys
import time
import traceback

from models.config import get_version
from models.database import Database
from models.export_mqtt import ExportMqtt
from models.export_home_assistant import HomeAssistant
from models.export_influxdb import ExportInfluxDB
from models.export_mqtt import ExportMqtt
from models.log import Log
from models.query_address import Address
from models.query_contract import Contract
Expand All @@ -26,8 +26,15 @@ def __init__(self, usage_point_id=None):
self.mqtt_config = self.config.mqtt_config()
self.home_assistant_config = self.config.home_assistant_config()
self.influxdb_config = self.config.influxdb_config()
self.wait_job_start = 10

def job_import_data(self, target=None):
app.LOG.title("Démarrage du job d'importation dans 10s")
i = self.wait_job_start
while i > 0:
app.LOG.log(f" => {i}s")
time.sleep(1)
i = i - 1
if app.DB.lock_status():
return {
"status": False,
Expand Down Expand Up @@ -99,14 +106,16 @@ def job_import_data(self, target=None):
if target == "mqtt" or target is None:
ExportMqtt(self.usage_point_id, "consumption").contract()
ExportMqtt(self.usage_point_id, "consumption").address()
if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption:
if hasattr(self.usage_point_config,
"consumption") and self.usage_point_config.consumption:
ExportMqtt(self.usage_point_id, "consumption").daily_annual(
self.usage_point_config.consumption_price_base
)
ExportMqtt(self.usage_point_id, "consumption").daily_linear(
self.usage_point_config.consumption_price_base
)
if hasattr(self.usage_point_config, "production") and self.usage_point_config.production:
if hasattr(self.usage_point_config,
"production") and self.usage_point_config.production:
ExportMqtt(self.usage_point_id, "production").daily_annual(
self.usage_point_config.production_price
)
Expand Down Expand Up @@ -155,21 +164,25 @@ def job_import_data(self, target=None):
if "enable" in self.influxdb_config and self.influxdb_config["enable"]:
# app.INFLUXDB.purge_influxdb()
if target == "influxdb" or target is None:
if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption:
if hasattr(self.usage_point_config,
"consumption") and self.usage_point_config.consumption:
ExportInfluxDB(self.usage_point_id).daily(
self.usage_point_config.consumption_price_base,
)
if hasattr(self.usage_point_config, "production") and self.usage_point_config.production:
if hasattr(self.usage_point_config,
"production") and self.usage_point_config.production:
ExportInfluxDB(self.usage_point_id).daily(
self.usage_point_config.production_price,
"production"
)
if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail:
if hasattr(self.usage_point_config,
"consumption_detail") and self.usage_point_config.consumption_detail:
ExportInfluxDB(self.usage_point_id).detail(
self.usage_point_config.consumption_price_hp,
self.usage_point_config.consumption_price_hc
)
if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail:
if hasattr(self.usage_point_config,
"production_detail") and self.usage_point_config.production_detail:
ExportInfluxDB(self.usage_point_id).detail(
self.usage_point_config.production_price,
"production_detail"
Expand Down
1 change: 1 addition & 0 deletions app/models/stat.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ def __init__(self, usage_point_id):
self.value_yesterday_evolution = 0
self.value_current_month_evolution = 0
self.value_peak_offpeak_percent_hp_vs_hc = 0
self.value_monthly_evolution = 0

def daily(self, index=0, measurement_direction="consumption"):
begin = datetime.combine(self.yesterday_date - timedelta(days=index), datetime.min.time())
Expand Down
3 changes: 2 additions & 1 deletion app/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ dependencies = [
"jinja2",
"art",
"SQLAlchemy",
"alembic"
"alembic",
"waitress"
]

[project.optional-dependencies]
Expand Down
2 changes: 2 additions & 0 deletions app/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ urllib3==1.26.12
# via
# influxdb-client
# requests
waitress==2.1.2
# via myelectricaldata_import (pyproject.toml)
werkzeug==2.2.2
# via flask

Expand Down
3 changes: 1 addition & 2 deletions config.exemple.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
cycle: 14400 # 2H
cycle: 14400 # 4H
debug: true
wipe_influxdb: false
home_assistant:
Expand Down Expand Up @@ -34,7 +34,6 @@ myelectricaldata:
consumption_price_base: '0.145907'
consumption_price_hc: '0.124364'
consumption_price_hp: '0.164915'
enable: 'true'
offpeak_hours_0: 22H00-6H00 # LUNDI
offpeak_hours_1: 22H00-6H00 # MARDI
offpeak_hours_2: 22H00-6H00 # MERCREDI
Expand Down
11 changes: 7 additions & 4 deletions dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from InquirerPy.base import Choice
from packaging.version import parse as parse_version

docker_compose = "docker-compose -f docker-compose.dev.yml"
docker_compose = "docker-compose -f dev/docker-compose.dev.yaml"


def cmd(cmd, path="./"):
Expand Down Expand Up @@ -56,17 +56,20 @@ def wizard():
app.LOG.error("Good bye!!")


def run(debug=False):
def run(dev=False, debug=False):
if debug:
app.LOG.title(["Boot DynAPI in debug mode", "CTRL + C to exit"])
else:
app.LOG.title(["Boot DynAPI", "CTRL + C to exit"])
mode_debug = ""
mode_dev = ""
if debug:
mode_debug = "-e DEBUG=true"
if dev:
mode_dev = "-e DEV=true"
command = (
f"{docker_compose} run -p 5000:5000 "
f"{mode_debug} myelectricaldata_import"
f"{docker_compose} run -p 5000:5000"
f"{mode_debug} {mode_dev} myelectricaldata_import"
)
app.LOG.log(command)
os.system(command)
Expand Down
14 changes: 7 additions & 7 deletions docker-compose.dev.yml → dev/docker-compose.dev.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
version: "3.9"
services:
myelectricaldata_import:
build: .
build: ./../
environment:
TZ: Europe/Paris
ports:
Expand All @@ -10,15 +10,15 @@ services:
- influxdb
- mosquitto
volumes:
- ./data:/data
- ./app:/app
- ./../data:/data
- ./../app:/app

influxdb:
image: influxdb:2.4
ports:
- '8086:8086'
volumes:
- ./data/influxdb:/var/lib/influxdb2
- ./../data/influxdb:/var/lib/influxdb2
healthcheck:
test: ["CMD", "curl", "-f", "http://influxdb:8086"]
interval: 25s
Expand All @@ -40,8 +40,8 @@ services:
volumes:
- /etc/localtime:/etc/localtime:ro
- /etc/timezone:/etc/timezone:ro
- ./data/mosquitto:/data
- ./mosquitto:/mosquitto/config/
- ./../data/mosquitto:/data
- ./../mosquitto:/mosquitto/config/
ports:
- 1883:1883
- 9001:9001
Expand All @@ -53,7 +53,7 @@ services:
links:
- mosquitto
volumes:
- ./data/:/mqtt-explorer/config
- ./../data/:/mqtt-explorer/config
ports:
- 4000:4000

Expand Down
11 changes: 11 additions & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
version: "3.9"
services:
myelectricaldata:
image: m4dm4rtig4n/myelectricaldata:latest
restart: unless-stopped
volumes:
- ./data:/data
environment:
TZ: Europe/Paris
ports:
- '5000:5000'
Loading

0 comments on commit e9deb4e

Please sign in to comment.