From 56d498816fc524a0f541773bcd11fc4fa39ff522 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Fri, 2 Dec 2022 22:57:28 +0100 Subject: [PATCH 01/25] Fix CD --- .github/workflows/build_push_docker.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_push_docker.yaml b/.github/workflows/build_push_docker.yaml index 6eed64c4..f7849cd8 100755 --- a/.github/workflows/build_push_docker.yaml +++ b/.github/workflows/build_push_docker.yaml @@ -85,7 +85,7 @@ jobs: # LATEST-DEV - name: Build and push uses: docker/build-push-action@v2 - if: ${{ github.event.release.prerelease }} + if: "github.event.release.prerelease" with: context: . platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7 @@ -103,7 +103,7 @@ jobs: # # LATEST - name: Build and push uses: docker/build-push-action@v2 - if: ${{ !github.event.release.prerelease }} + if: "!github.event.release.prerelease" with: context: . platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7 From 1d70daf8e06db1f193df87b7dd20c0803bd4dc79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Fri, 2 Dec 2022 23:38:14 +0100 Subject: [PATCH 02/25] Fix CD --- .github/workflows/build_push_docker.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_push_docker.yaml b/.github/workflows/build_push_docker.yaml index f7849cd8..105581a3 100755 --- a/.github/workflows/build_push_docker.yaml +++ b/.github/workflows/build_push_docker.yaml @@ -85,7 +85,7 @@ jobs: # LATEST-DEV - name: Build and push uses: docker/build-push-action@v2 - if: "github.event.release.prerelease" + if: github.ref == 'refs/heads/pre-release' with: context: . platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7 @@ -103,7 +103,7 @@ jobs: # # LATEST - name: Build and push uses: docker/build-push-action@v2 - if: "!github.event.release.prerelease" + if: github.ref != 'refs/heads/pre-release' with: context: . platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7 From ca20d5316efb22b6b8abf4cea2e36f1f1079c124 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Fri, 2 Dec 2022 23:52:41 +0100 Subject: [PATCH 03/25] Fix CI --- .github/workflows/build_push_docker.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build_push_docker.yaml b/.github/workflows/build_push_docker.yaml index 105581a3..28e13fa8 100755 --- a/.github/workflows/build_push_docker.yaml +++ b/.github/workflows/build_push_docker.yaml @@ -85,7 +85,7 @@ jobs: # LATEST-DEV - name: Build and push uses: docker/build-push-action@v2 - if: github.ref == 'refs/heads/pre-release' + if: ${{ github.event.release.prerelease }} with: context: . platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7 @@ -97,13 +97,14 @@ jobs: env: DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }} uses: Ilshidur/action-discord@master + if: ${{ github.event.release.prerelease }} with: args: '**MyElectricalData** : **latest-dev** image version is up to date' # ################################################################################################################## # # LATEST - name: Build and push uses: docker/build-push-action@v2 - if: github.ref != 'refs/heads/pre-release' + if: ${{ !github.event.release.prerelease }} with: context: . platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7 @@ -112,10 +113,10 @@ jobs: m4dm4rtig4n/enedisgateway2mqtt:latest m4dm4rtig4n/myelectricaldata:latest - name: Discord notification + if: ${{ !github.event.release.prerelease }} env: DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }} uses: Ilshidur/action-discord@master - if: github.event.release.prerelease == 'false' with: args: '**MyElectricalData** : **latest** image version is up to date' ################################################################################################################## From dca361ddc84e25c7d5683d5c3f4010cca6434f2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sat, 3 Dec 2022 01:36:44 +0100 Subject: [PATCH 04/25] Add other method to import data in influxdb --- app/models/influxdb.py | 58 +++++++++++++++++++++++++++++++++++++----- config.exemple.yaml | 16 +++++++++--- 2 files changed, 64 insertions(+), 10 deletions(-) diff --git a/app/models/influxdb.py b/app/models/influxdb.py index a15241cb..c4190475 100644 --- a/app/models/influxdb.py +++ b/app/models/influxdb.py @@ -1,20 +1,22 @@ import datetime +import __main__ as app + import influxdb_client -from dateutil.relativedelta import relativedelta from dateutil.tz import tzlocal from influxdb_client.client.util import date_utils from influxdb_client.client.util.date_utils import DateHelper from influxdb_client.client.write_api import ASYNCHRONOUS, SYNCHRONOUS -import __main__ as app from dependencies import * -from models.log import Log class InfluxDB: - def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="myelectricaldata", write_options="SYNCHRONOUS"): + def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="myelectricaldata", method="batching", + write_options=None): + if write_options is None: + write_options = {} self.hostname = hostname self.port = port self.token = token @@ -23,7 +25,40 @@ def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="mye self.influxdb = {} self.write_api = {} self.delete_api = {} - self.write_options = write_options + self.method = method + self.write_options = {} + if "batch_size" in write_options: + self.write_options["batch_size"] = write_options["batch_size"] + else: + self.write_options["batch_size"] = 1000 + if "flush_interval" in write_options: + self.write_options["flush_interval"] = write_options["flush_interval"] + else: + self.write_options["flush_interval"] = 1000 + if "jitter_interval" in write_options: + self.write_options["jitter_interval"] = write_options["jitter_interval"] + else: + self.write_options["jitter_interval"] = 0 + if "retry_interval" in write_options: + self.write_options["retry_interval"] = write_options["retry_interval"] + else: + self.write_options["retry_interval"] = 5000 + if "max_retry_time" in write_options: + self.write_options["max_retry_time"] = write_options["max_retry_time"] + else: + self.write_options["max_retry_time"] = "180_000" + if "max_retries" in write_options: + self.write_options["max_retries"] = write_options["max_retries"] + else: + self.write_options["max_retries"] = 5 + if "max_retry_delay" in write_options: + self.write_options["max_retry_delay"] = write_options["max_retry_delay"] + else: + self.write_options["max_retry_delay"] = 125_000 + if "exponential_base" in write_options: + self.write_options["exponential_base"] = write_options["exponential_base"] + else: + self.write_options["exponential_base"] = 2 self.connect() def connect(self): @@ -47,10 +82,19 @@ def connect(self): "https://github.com/m4dm4rtig4n/enedisgateway2mqtt#configuration-file" ]) - if self.write_options == "ASYNCHRONOUS": + if self.method.upper() == "ASYNCHRONOUS": self.write_api = self.influxdb.write_api(write_options=ASYNCHRONOUS) - else: + elif self.method.upper() == "SYNCHRONOUS": self.write_api = self.influxdb.write_api(write_options=SYNCHRONOUS) + else: + self.write_api = self.influxdb.write_api(write_options=influxdb_client.WriteOptions( + batch_size=self.write_options["batch_size"], + flush_interval=self.write_options["flush_interval"], + jitter_interval=self.write_options["jitter_interval"], + retry_interval=self.write_options["retry_interval"], + max_retries=self.write_options["max_retries"], + max_retry_delay=self.write_options["max_retry_delay"], + exponential_base=self.write_options["exponential_base"])) self.delete_api = self.influxdb.delete_api() def purge_influxdb(self): diff --git a/config.exemple.yaml b/config.exemple.yaml index 74e09e40..84e0ba86 100755 --- a/config.exemple.yaml +++ b/config.exemple.yaml @@ -13,9 +13,19 @@ influxdb: org: myelectricaldata bucket: myelectricaldata # ATTENTION, L'activation de l'importation asynchrone va réduire fortement le temps d'importation dans InfluxDB - # mais va augmenter drastiquement la consommation mémoire & CPU. - # À activer uniquement sur un hardware robuste (et pas sur un Raspberry ou autre par exemple). - asynchronous: 'false' + # mais va augmenter la consommation mémoire & CPU et donc à activer uniquement sur un hardware robuste. + method: batching # Mode disponible : synchronous / asynchronous / batching + # batching_options permet uniquement de configurer la methode `batching`. + # Pour plus d'information : https://github.com/influxdata/influxdb-client-python#batching + batching_options: + batch_size: 1000 + flush_interval: 1000 + jitter_interval: 0 + retry_interval: 5000 + max_retry_time: 180_000 + max_retries: 5 + max_retry_delay: 125_000 + exponential_base: 2 mqtt: enable: true hostname: mosquitto From c2e4d860352b7016d8d10056321f3209f5cea1da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sat, 3 Dec 2022 10:50:36 +0100 Subject: [PATCH 05/25] add switch version in CD --- .github/workflows/build_push_docker.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/build_push_docker.yaml b/.github/workflows/build_push_docker.yaml index 28e13fa8..31e85a2f 100755 --- a/.github/workflows/build_push_docker.yaml +++ b/.github/workflows/build_push_docker.yaml @@ -60,6 +60,9 @@ jobs: uses: Ilshidur/action-discord@master with: args: '**MyElectricalData** : Version **${{ needs.informations.outputs.version }}** is in building state...' + - name: Switch app/VERSION + run: | + echo "${{ needs.informations.outputs.version }}" > app/VERSION ################################################################################################################## # VERSION - name: Build and push ${{ needs.informations.outputs.version }} From 0080b3105d05f9a1644ee201fd7436258cd7346d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sat, 3 Dec 2022 11:19:19 +0100 Subject: [PATCH 06/25] Add version in gateway status head --- app/models/ajax.py | 5 ++++- app/templates/js/gateway_status.js | 8 ++++++-- dependencies.py | 6 ++++++ 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/app/models/ajax.py b/app/models/ajax.py index 1e78b054..516573b8 100755 --- a/app/models/ajax.py +++ b/app/models/ajax.py @@ -32,7 +32,10 @@ def __init__(self, usage_point_id=None): def gateway_status(self): app.LOG.title(f"[{self.usage_point_id}] Check de l'état de la passerelle.") - return Status().ping() + return { + "gateway": Status().ping(), + "version": get_version() + } def account_status(self): app.LOG.title(f"[{self.usage_point_id}] Check du statut du compte.") diff --git a/app/templates/js/gateway_status.js b/app/templates/js/gateway_status.js index 8d4546c8..8e312dce 100644 --- a/app/templates/js/gateway_status.js +++ b/app/templates/js/gateway_status.js @@ -67,9 +67,10 @@ if (document.URL.indexOf("/usage_point_id/") >= 0) { let gateway_state = "success.png" let information = "" let information_class = "stat_value" - if (data["status"] === false) { + version = data["version"] + if (data["gateway"]["status"] === false) { gateway_state = "error.png"; - information = data["information"] + information = data["gateway"]["information"] information_class = "stat_value_warning"; } content = "" + @@ -77,6 +78,9 @@ if (document.URL.indexOf("/usage_point_id/") >= 0) { "" + "" + "" + + "" + + "" + + "" + "" + "" + "
Statut de la passerelle
Version" + version + "
" + information + "
" diff --git a/dependencies.py b/dependencies.py index d7b70a90..55c0aac0 100644 --- a/dependencies.py +++ b/dependencies.py @@ -18,6 +18,10 @@ def cmd(cmd, path="./"): ) +def switch_version(version): + open("/app/VERSION", "w").write(version) + + def wizard(): app.LOG.title("Wizard Mode") skip = ["help"] @@ -171,4 +175,6 @@ def create_release(prerelease=False): os.system(f"gh release create -t {version} --generate-notes {prerelease_txt} {version}") app.LOG.log(" => Success") + switch_version() + app.LOG.log(f"Release {version} is online!!!!") From 0a578eaa8dbca5e9b78f56e729d60e29eb33e5e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sat, 3 Dec 2022 11:20:14 +0100 Subject: [PATCH 07/25] minor fiw --- dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies.py b/dependencies.py index 55c0aac0..fc010c3a 100644 --- a/dependencies.py +++ b/dependencies.py @@ -175,6 +175,6 @@ def create_release(prerelease=False): os.system(f"gh release create -t {version} --generate-notes {prerelease_txt} {version}") app.LOG.log(" => Success") - switch_version() + switch_version(version) app.LOG.log(f"Release {version} is online!!!!") From c49d19039a2b34df924404449a5fa94a6a525e41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sat, 3 Dec 2022 11:20:37 +0100 Subject: [PATCH 08/25] Minor fix --- dependencies.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dependencies.py b/dependencies.py index fc010c3a..e174b06d 100644 --- a/dependencies.py +++ b/dependencies.py @@ -151,6 +151,8 @@ def create_release(prerelease=False): app.LOG.title_error("No problem!") return False + switch_version(version) + if rebuild_confirm: app.LOG.log(f"Delete release {version} on remote") os.system(f"gh release delete {version} -y") @@ -175,6 +177,5 @@ def create_release(prerelease=False): os.system(f"gh release create -t {version} --generate-notes {prerelease_txt} {version}") app.LOG.log(" => Success") - switch_version(version) app.LOG.log(f"Release {version} is online!!!!") From b30181c5acefbfd4539dc010c64013834b5192ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sat, 3 Dec 2022 11:21:00 +0100 Subject: [PATCH 09/25] Minor fix --- dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies.py b/dependencies.py index e174b06d..d59796a9 100644 --- a/dependencies.py +++ b/dependencies.py @@ -19,7 +19,7 @@ def cmd(cmd, path="./"): def switch_version(version): - open("/app/VERSION", "w").write(version) + open("app/VERSION", "w").write(version) def wizard(): From 4c34684ef38795e78b0537a4342c261b2283dc48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sat, 3 Dec 2022 16:51:03 +0100 Subject: [PATCH 10/25] fix Invalid header value b'0.8.8-dev\n' --- app/VERSION | 2 +- app/models/ajax.py | 5 +---- app/models/query_status.py | 2 ++ app/templates/js/gateway_status.js | 4 ++-- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/app/VERSION b/app/VERSION index ffca19c8..fac109b2 100755 --- a/app/VERSION +++ b/app/VERSION @@ -1 +1 @@ -0.8.0-dev \ No newline at end of file +0.8.8-dev \ No newline at end of file diff --git a/app/models/ajax.py b/app/models/ajax.py index 516573b8..1e78b054 100755 --- a/app/models/ajax.py +++ b/app/models/ajax.py @@ -32,10 +32,7 @@ def __init__(self, usage_point_id=None): def gateway_status(self): app.LOG.title(f"[{self.usage_point_id}] Check de l'état de la passerelle.") - return { - "gateway": Status().ping(), - "version": get_version() - } + return Status().ping() def account_status(self): app.LOG.title(f"[{self.usage_point_id}] Check du statut du compte.") diff --git a/app/models/query_status.py b/app/models/query_status.py index daaef5d6..30317260 100755 --- a/app/models/query_status.py +++ b/app/models/query_status.py @@ -5,6 +5,7 @@ from config import URL from dependencies import * +from models.config import get_version from models.log import Log from models.query import Query @@ -25,6 +26,7 @@ def ping(self): status = json.loads(response.text) for key, value in status.items(): self.log.log(f"{key}: {value}") + status["version"] = get_version() return status except LookupError: return { diff --git a/app/templates/js/gateway_status.js b/app/templates/js/gateway_status.js index 8e312dce..0c6e2238 100644 --- a/app/templates/js/gateway_status.js +++ b/app/templates/js/gateway_status.js @@ -68,9 +68,9 @@ if (document.URL.indexOf("/usage_point_id/") >= 0) { let information = "" let information_class = "stat_value" version = data["version"] - if (data["gateway"]["status"] === false) { + if (data["status"] === false) { gateway_state = "error.png"; - information = data["gateway"]["information"] + information = data["information"] information_class = "stat_value_warning"; } content = "" + From 2a01eb209c7f8123764c2f27b72b0a4a87ae0acd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sat, 3 Dec 2022 17:29:58 +0100 Subject: [PATCH 11/25] fix Invalid header value b'0.8.8-dev\n' --- app/models/config.py | 2 +- app/models/query.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/app/models/config.py b/app/models/config.py index fd8b7eff..d57e23f5 100755 --- a/app/models/config.py +++ b/app/models/config.py @@ -10,7 +10,7 @@ def get_version(): f = open("/app/VERSION", "r") version = f.read() f.close() - return version + return version.strip() class Config: diff --git a/app/models/query.py b/app/models/query.py index ca1b7862..95b9877f 100755 --- a/app/models/query.py +++ b/app/models/query.py @@ -3,7 +3,6 @@ import __main__ as app from dependencies import * -# TODO : Arrété les call quand le quota est atteint class Query(object): From 786fffc9209cbf4493ac5aead12038b36705dc2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Sun, 4 Dec 2022 23:54:26 +0100 Subject: [PATCH 12/25] fix unknow value in config.yaml, force float to export value in influxdb --- app/models/database.py | 262 +++++++++++++++++++++++----------- app/models/export_influxdb.py | 16 +-- app/models/query_detail.py | 2 +- 3 files changed, 187 insertions(+), 93 deletions(-) diff --git a/app/models/database.py b/app/models/database.py index dd4504b0..fa80eec3 100644 --- a/app/models/database.py +++ b/app/models/database.py @@ -203,104 +203,194 @@ def set_usage_point(self, usage_point_id, data): .where(UsagePoints.usage_point_id == usage_point_id) ) usage_points = self.session.scalars(query).one_or_none() + if "enable" in data: + enable = data["enable"] + else: + enable = True + if "name" in data: + name = data["name"] + else: + name = "" + if "cache" in data: + cache = data["cache"] + else: + cache = True + if "consumption" in data: + consumption = data["consumption"] + else: + consumption = True + if "consumption_detail" in data: + consumption_detail = data["consumption_detail"] + else: + consumption_detail = True + if "production" in data: + production = data["production"] + else: + production = False + if "production_detail" in data: + production_detail = data["production_detail"] + else: + production_detail = False + if "production_price" in data: + production_price = data["production_price"] + else: + production_price = 0 + if "consumption_price_base" in data: + consumption_price_base = data["consumption_price_base"] + else: + consumption_price_base = 0 + if "consumption_price_hc" in data: + consumption_price_hc = data["consumption_price_hc"] + else: + consumption_price_hc = 0 + if "consumption_price_hp" in data: + consumption_price_hp = data["consumption_price_hp"] + else: + consumption_price_hp = 0 + if "offpeak_hours_0" in data: + offpeak_hours_0 = data["offpeak_hours_0"] + else: + offpeak_hours_0 = "" + if "offpeak_hours_1" in data: + offpeak_hours_1 = data["offpeak_hours_1"] + else: + offpeak_hours_1 = "" + if "offpeak_hours_2" in data: + offpeak_hours_2 = data["offpeak_hours_2"] + else: + offpeak_hours_2 = "" + if "offpeak_hours_3" in data: + offpeak_hours_3 = data["offpeak_hours_3"] + else: + offpeak_hours_3 = "" + if "offpeak_hours_4" in data: + offpeak_hours_4 = data["offpeak_hours_4"] + else: + offpeak_hours_4 = "" + if "offpeak_hours_5" in data: + offpeak_hours_5 = data["offpeak_hours_5"] + else: + offpeak_hours_5 = "" + if "offpeak_hours_6" in data: + offpeak_hours_6 = data["offpeak_hours_6"] + else: + offpeak_hours_6 = "" + if "plan" in data: + plan = data["plan"] + else: + plan = "BASE" + if "refresh_addresse" in data: + refresh_addresse = data["refresh_addresse"] + else: + refresh_addresse = False + if "refresh_contract" in data: + refresh_contract = data["refresh_contract"] + else: + refresh_contract = False + if "token" in data: + token = data["token"] + else: + token = "" progress = 0 if "progress" in data: progress = data["progress"] progress_status = "" if "progress_status" in data: - progress = data["progress_status"] - # progress_status = data["progress_status"] if "progress_status" in data else progress_status = 0 - if usage_points is not None: - usage_points.name = data["name"] - usage_points.cache = str2bool(data["cache"]) - usage_points.consumption = str2bool(data["consumption"]) - usage_points.consumption_detail = str2bool(data["consumption_detail"]) - usage_points.production = str2bool(data["production"]) - usage_points.production_detail = str2bool(data["production_detail"]) - usage_points.production_price = data["production_price"] - usage_points.consumption_price_base = data["consumption_price_base"] - usage_points.consumption_price_hc = data["consumption_price_hc"] - usage_points.consumption_price_hp = data["consumption_price_hp"] - usage_points.offpeak_hours_0 = data["offpeak_hours_0"] - usage_points.offpeak_hours_1 = data["offpeak_hours_1"] - usage_points.offpeak_hours_2 = data["offpeak_hours_2"] - usage_points.offpeak_hours_3 = data["offpeak_hours_3"] - usage_points.offpeak_hours_4 = data["offpeak_hours_4"] - usage_points.offpeak_hours_5 = data["offpeak_hours_5"] - usage_points.offpeak_hours_6 = data["offpeak_hours_6"] - usage_points.plan = data["plan"] - usage_points.refresh_addresse = str2bool(data["refresh_addresse"]) - usage_points.refresh_contract = str2bool(data["refresh_contract"]) - usage_points.token = data["token"] - usage_points.progress = progress - usage_points.progress_status = progress_status - usage_points.enable = str2bool(data["enable"]) - if "consumption_max_date" in data: - if not data["consumption_max_date"] or data["consumption_max_date"] is None: - usage_points.consumption_max_date = None - else: - consumption_max_date = data["consumption_max_date"] - if isinstance(consumption_max_date, datetime): - usage_points.consumption_max_date = consumption_max_date - else: - usage_points.consumption_max_date = datetime.strptime(consumption_max_date, "%Y-%m-%d") + progress_status = data["progress_status"] + consumption_max_date = None + if "consumption_max_date" in data: + if not data["consumption_max_date"] or data["consumption_max_date"] is None: + consumption_max_date = None + else: + consumption_max_date = data["consumption_max_date"] + if not isinstance(consumption_max_date, datetime): + consumption_max_date = datetime.strptime(consumption_max_date, "%Y-%m-%d") + consumption_detail_max_date = None + if "consumption_detail_max_date" in data: if "consumption_detail_max_date" in data: - if "consumption_detail_max_date" in data: - if not data["consumption_detail_max_date"] or data["consumption_detail_max_date"] is None: - usage_points.consumption_detail_max_date = None - else: - consumption_detail_max_date = data["consumption_detail_max_date"] - if isinstance(consumption_detail_max_date, datetime): - usage_points.consumption_detail_max_date = consumption_detail_max_date - else: - usage_points.consumption_detail_max_date = datetime.strptime(consumption_detail_max_date, "%Y-%m-%d") - if "production_max_date" in data: - if not data["production_max_date"] or data["production_max_date"] is None: - usage_points.production_max_date = None + if not data["consumption_detail_max_date"] or data["consumption_detail_max_date"] is None: + consumption_detail_max_date = None else: - production_max_date = data["production_max_date"] - if isinstance(production_max_date, datetime): - usage_points.production_max_date = production_max_date - else: - usage_points.production_max_date = datetime.strptime(production_max_date, "%Y-%m-%d") - if "production_detail_max_date" in data: - if not data["production_detail_max_date"] or data["production_detail_max_date"] is None: - usage_points.production_detail_max_date = None + consumption_detail_max_date = data["consumption_detail_max_date"] + if not isinstance(consumption_detail_max_date, datetime): + consumption_detail_max_date = datetime.strptime(consumption_detail_max_date,"%Y-%m-%d") + production_max_date = None + if "production_max_date" in data: + if not data["production_max_date"] or data["production_max_date"] is None: + production_max_date = None + else: + production_max_date = data["production_max_date"] + if not isinstance(production_max_date, datetime): + production_max_date = datetime.strptime(production_max_date, "%Y-%m-%d") + production_detail_max_date = None + if "production_detail_max_date" in data: + if not data["production_detail_max_date"] or data["production_detail_max_date"] is None: + production_detail_max_date = None + else: + production_detail_max_date = data["production_detail_max_date"] + if isinstance(production_detail_max_date, datetime): + production_detail_max_date = production_detail_max_date else: - production_detail_max_date = data["production_detail_max_date"] - if isinstance(production_detail_max_date, datetime): - usage_points.production_detail_max_date = production_detail_max_date - else: - usage_points.production_detail_max_date = datetime.strptime(production_detail_max_date, "%Y-%m-%d") + production_detail_max_date = datetime.strptime(production_detail_max_date, "%Y-%m-%d") + if usage_points is not None: + usage_points.enable = str2bool(enable) + usage_points.name = name + usage_points.cache = str2bool(cache) + usage_points.consumption = str2bool(consumption) + usage_points.consumption_detail = str2bool(consumption_detail) + usage_points.production = str2bool(production) + usage_points.production_detail = str2bool(production_detail) + usage_points.production_price = production_price + usage_points.consumption_price_base = consumption_price_base + usage_points.consumption_price_hc = consumption_price_hc + usage_points.consumption_price_hp = consumption_price_hp + usage_points.offpeak_hours_0 = offpeak_hours_0 + usage_points.offpeak_hours_1 = offpeak_hours_1 + usage_points.offpeak_hours_2 = offpeak_hours_2 + usage_points.offpeak_hours_3 = offpeak_hours_3 + usage_points.offpeak_hours_4 = offpeak_hours_4 + usage_points.offpeak_hours_5 = offpeak_hours_5 + usage_points.offpeak_hours_6 = offpeak_hours_6 + usage_points.offpeak_hours_6 = offpeak_hours_6 + usage_points.plan = plan + usage_points.refresh_addresse = str2bool(refresh_addresse) + usage_points.refresh_contract = str2bool(refresh_contract) + usage_points.token = token + usage_points.progress = progress + usage_points.progress_status = progress_status + usage_points.consumption_max_date = consumption_max_date + usage_points.consumption_detail_max_date = consumption_detail_max_date + usage_points.production_max_date = production_max_date + usage_points.production_detail_max_date = production_detail_max_date else: self.session.add( UsagePoints( usage_point_id=usage_point_id, # TODO : Erreur si name est vide - name=data["name"], - cache=str2bool(data["cache"]), - consumption=str2bool(data["consumption"]), - consumption_detail=str2bool(data["consumption_detail"]), - production=str2bool(data["production"]), - production_detail=str2bool(data["production_detail"]), - production_price=data["production_price"], - consumption_price_base=data["consumption_price_base"], - consumption_price_hc=data["consumption_price_hc"], - consumption_price_hp=data["consumption_price_hp"], - offpeak_hours_0=data["offpeak_hours_0"], - offpeak_hours_1=data["offpeak_hours_1"], - offpeak_hours_2=data["offpeak_hours_2"], - offpeak_hours_3=data["offpeak_hours_3"], - offpeak_hours_4=data["offpeak_hours_4"], - offpeak_hours_5=data["offpeak_hours_5"], - offpeak_hours_6=data["offpeak_hours_6"], - plan=data["plan"], - refresh_addresse=str2bool(data["refresh_addresse"]), - refresh_contract=str2bool(data["refresh_contract"]), - token=data["token"], + name=name, + cache=str2bool(cache), + consumption=str2bool(consumption), + consumption_detail=str2bool(consumption_detail), + production=str2bool(production), + production_detail=str2bool(production_detail), + production_price=production_price, + consumption_price_base=consumption_price_base, + consumption_price_hc=consumption_price_hc, + consumption_price_hp=consumption_price_hp, + offpeak_hours_0=offpeak_hours_0, + offpeak_hours_1=offpeak_hours_1, + offpeak_hours_2=offpeak_hours_2, + offpeak_hours_3=offpeak_hours_3, + offpeak_hours_4=offpeak_hours_4, + offpeak_hours_5=offpeak_hours_5, + offpeak_hours_6=offpeak_hours_6, + plan=plan, + refresh_addresse=str2bool(refresh_addresse), + refresh_contract=str2bool(refresh_contract), + token=token, progress=progress, progress_status=progress_status, - enable=str2bool(data["enable"]), + enable=enable, consumption_max_date=None, consumption_detail_max_date=None, production_max_date=None, @@ -804,8 +894,11 @@ def get_detail(self, usage_point_id, begin, end, measurement_direction="consumpt time_delta = abs(int((begin - end).total_seconds() / 60)) total_internal = 0 for query in query_result: + # print(query) total_internal = total_internal + query.interval - if abs(total_internal - time_delta) > 60: + total_time = abs(total_internal - time_delta) + if total_time > 300: + LOG.log(f" - {total_time}m absente du relevé.") result["missing_data"] = True else: for query in query_result: @@ -849,6 +942,7 @@ def insert_detail_bulk(self, data, mesure_type="consumption"): self.session.execute( table.__table__.delete().filter(ConsumptionDetail.date.between(begin, end)) ) + print(data) self.session.add_all(data) def insert_detail(self, usage_point_id, date, value, interval, measure_type, blacklist=0, fail_count=0, diff --git a/app/models/export_influxdb.py b/app/models/export_influxdb.py index 7cace6b0..9dca5395 100755 --- a/app/models/export_influxdb.py +++ b/app/models/export_influxdb.py @@ -40,9 +40,9 @@ def daily(self, price, measurement_direction="consumption"): "month": daily.date.strftime("%m"), }, fields={ - "Wh": watt, - "kWh": forceRound(kwatt, 2), - "price": forceRound(euro, 2) + "Wh": float(watt), + "kWh": float(forceRound(kwatt, 2)), + "price": float(forceRound(euro, 2)) }, ) current_month = date.strftime("%m") @@ -73,11 +73,11 @@ def detail(self, price_hp, price_hc=0, measurement_direction="consumption_detail "measure_type": detail.measure_type, }, fields={ - "W": watt, - "kW": forceRound(kwatt, 2), - "Wh": watth, - "kWh": forceRound(kwatth, 2), - "price": forceRound(euro, 2) + "W": float(watt), + "kW": float(forceRound(kwatt, 2)), + "Wh": float(watth), + "kWh": float(forceRound(kwatth, 2)), + "price": float(forceRound(euro, 2)) }, ) current_month = date.strftime("%m") diff --git a/app/models/query_detail.py b/app/models/query_detail.py index 0e801d38..aa671e10 100644 --- a/app/models/query_detail.py +++ b/app/models/query_detail.py @@ -67,7 +67,7 @@ def run(self, begin, end): endpoint += "/cache" try: current_data = self.db.get_detail(self.usage_point_id, begin, end, self.measure_type) - current_week = datetime.datetime.now() - datetime.timedelta(days=self.max_detail + 1) + # current_week = datetime.datetime.now() - datetime.timedelta(days=self.max_detail + 1) # last_week = False # if current_week <= begin: # last_week = True From f970c8f8274b08970cca7a64423e8d5b2b979737 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Mon, 5 Dec 2022 22:36:47 +0100 Subject: [PATCH 13/25] doc --- README.md | 117 +++++++++++++++++++++------------- app/models/database.py | 1 - app/models/export_influxdb.py | 10 +-- app/models/influxdb.py | 48 ++++++++++---- config.exemple.yaml | 2 +- 5 files changed, 112 insertions(+), 66 deletions(-) diff --git a/README.md b/README.md index 1e6d3816..a5050f01 100755 --- a/README.md +++ b/README.md @@ -100,33 +100,51 @@ Un template est disponible sur le repo [config.yaml](https://github.com/m4dm4rti | discovery_prefix | Préfixe configuré dans Home Assistant pour l'auto-discovery | homeassistant | ### influxdb -| Champs | Information | Défaut | -|----------|----------------------------------------------------|------------------| -| enable | Activation ou non des exports vers InfluxDB | False | -| hostname | Addresse IP ou domaine vers votre serveur InfluxDB | influxdb | -| port | Port du serveur InfluxDB | 8086 | -| token | Token en V2 & USERNAME:PASSWORD pour la V1 | myelectricaldata | -| org | Nom de l'organisation V2, en V1 mettre "-" | myelectricaldata | -| bucket | Nom du bucket en V2 et "DATABASE/RETENTION" en V1 | myelectricaldata | - -#### v1.X : +| Champs | Information | Défaut | +|------------------|---------------------------------------------------------------|------------------| +| enable | Activation ou non des exports vers InfluxDB | False | +| hostname | Addresse IP ou domaine vers votre serveur InfluxDB | influxdb | +| port | Port du serveur InfluxDB | 8086 | +| token | Token en V2 & USERNAME:PASSWORD pour la V1 | myelectricaldata | +| org | Nom de l'organisation V2, en V1 mettre "-" | myelectricaldata | +| bucket | Nom du bucket en V2 et "DATABASE/RETENTION" en V1 | myelectricaldata | +| method | synchronous / asynchronous / batching | synchronous | +| batching_options | https://github.com/influxdata/influxdb-client-python#batching | | + +#### **method & batching_options :** + +Ces 2 propriétés vont vous permettre de jouer sur la rapidité d'importation dans l'influxdb. + +> ATTENTION, en fonction de la configuration, vous risquez de surcharger votre serveur. + +- **synchronous** : Mode classique assez lent sur l'importation, mais évite de surcharger le CPU & la mémoire. +- **asynchronous** : Mode "bourrin", la totalité des valeurs seront envoyée en même temps et donc consommer énormément de ressources le temps du traimement +- **batching** : Mode custom qui va vous permettre de jouer sur divers paramètres. A utilisé si le mode synchronous est encore trop gourmand. Plus d'information disponible [ici](https://github.com/influxdata/influxdb-client-python#batching). + +#### Configuration par version : + +##### v1.X : ```yaml influxdb: - host: influxdb - port: 8086 - token: USERNAME:PASSWORD - org: "-" - bucket: "DATABASE/RETENTION" + enable: 'true' + hostname: influxdb + port: 8086 + token: USERNAME:PASSWORD + org: "-" + bucket: "DATABASE/RETENTION" + method: asynchronous ``` -#### v2.X : +##### v2.X : ```yaml influxdb: - host: influxdb - port: 8086 - token: MY_TOKEN - org: MY_ORG - bucket: MY_BUCKET + enable: 'true' + hostname: influxdb + port: 8086 + token: MY_TOKEN + org: MY_ORG + bucket: MY_BUCKET + method: batching ``` ### mqtt @@ -147,31 +165,38 @@ influxdb: Dictionnaire avec comme clef votre Point de Livraison (entre double quote) contenant toute sa configuration. -| Champs | Information | Défaut | -|------------------------|--------------------------------------------------------------------------------|--------| -| token | Activation ou non des exports vers MQTT | "" | -| name | Alias de votre point livraison pour faciliter la navigation | "" | -| addresses | Récupération des coordonnées du point de livraison | False | -| cache | Activation du cache sur la passerelle | True | -| consumption | Activation de la collecte de consommation journalière | True | -| consumption_detail | Activation de la collecte de consommation horaire | True | -| consumption_price_base | Prix d'achat du kW sans forfait HP/HC | 0 | -| consumption_price_hc | Prix d'achat du kW en Heure Creuse | 0 | -| consumption_price_hp | Prix d'achat du kW en Heure Pleine | 0 | -| enable | Activation du PDL | True | -| offpeak_hours_0 | Heure creuse du Lundi | "" | -| offpeak_hours_1 | Heure creuse du Mardi | "" | -| offpeak_hours_2 | Heure creuse du Mercredi | "" | -| offpeak_hours_3 | Heure creuse du Jeudi | "" | -| offpeak_hours_4 | Heure creuse du Vendredi | "" | -| offpeak_hours_5 | Heure creuse du Samedi | "" | -| offpeak_hours_6 | Heure creuse du Dimanche | "" | -| plan | Votre type de plan BASE ou HP/HC | BASE | -| production | Activation de la collecte de production journalière | False | -| production_detail | Activation de la collecte de production horaire | False | -| production_price | Prix de revente à Enedis (Inutile pour l'instant) | 0 | -| refresh_addresse | Permet de forcer un rafraichissement des informations "postale" dans le cache | False | -| refresh_contract | Permet de forcer un rafraichissement des informations du contrat dans le cache | False | +| Champs | Information | Défaut | +|-----------------------------|--------------------------------------------------------------------------------|--------| +| token | Activation ou non des exports vers MQTT | "" | +| name | Alias de votre point livraison pour faciliter la navigation | "" | +| addresses | Récupération des coordonnées du point de livraison | False | +| cache | Activation du cache sur la passerelle | True | +| consumption | Activation de la collecte de consommation journalière | True | +| consumption_detail | Activation de la collecte de consommation horaire | True | +| consumption_max_date | Permet de la date boutoir de récupération de la consommation journalière | "" | +| consumption_detail_max_date | Permet de la date boutoir de récupération de la consommation détaillée | "" | +| consumption_price_base | Prix d'achat du kW sans forfait HP/HC | 0 | +| consumption_price_hc | Prix d'achat du kW en Heure Creuse | 0 | +| consumption_price_hp | Prix d'achat du kW en Heure Pleine | 0 | +| enable | Activation du PDL | True | +| offpeak_hours_0 | Heure creuse du Lundi | "" | +| offpeak_hours_1 | Heure creuse du Mardi | "" | +| offpeak_hours_2 | Heure creuse du Mercredi | "" | +| offpeak_hours_3 | Heure creuse du Jeudi | "" | +| offpeak_hours_4 | Heure creuse du Vendredi | "" | +| offpeak_hours_5 | Heure creuse du Samedi | "" | +| offpeak_hours_6 | Heure creuse du Dimanche | "" | +| plan | Votre type de plan BASE ou HP/HC | BASE | +| production | Activation de la collecte de production journalière | False | +| production_detail | Activation de la collecte de production horaire | False | +| production_price | Prix de revente à Enedis (Inutile pour l'instant) | 0 | +| production_max_date | Permet de la date boutoir de récupération de la production journalière | "" | +| production_detail_max_date | Permet de la date boutoir de récupération de la production détaillée | "" | +| refresh_addresse | Permet de forcer un rafraichissement des informations "postale" dans le cache | False | +| refresh_contract | Permet de forcer un rafraichissement des informations du contrat dans le cache | False | + +> Si les valeurs **consumption_max_date**, **consumption_max_detail_date**, **production_max_date**, **production_detail_max_date** +> ne sont pas défini, ce sera la date de début de contrat remonté par Enedis qui sera prise en compte. #### offpeak_hours diff --git a/app/models/database.py b/app/models/database.py index fa80eec3..d587fb70 100644 --- a/app/models/database.py +++ b/app/models/database.py @@ -942,7 +942,6 @@ def insert_detail_bulk(self, data, mesure_type="consumption"): self.session.execute( table.__table__.delete().filter(ConsumptionDetail.date.between(begin, end)) ) - print(data) self.session.add_all(data) def insert_detail(self, usage_point_id, date, value, interval, measure_type, blacklist=0, fail_count=0, diff --git a/app/models/export_influxdb.py b/app/models/export_influxdb.py index 9dca5395..32f5fc96 100755 --- a/app/models/export_influxdb.py +++ b/app/models/export_influxdb.py @@ -41,8 +41,8 @@ def daily(self, price, measurement_direction="consumption"): }, fields={ "Wh": float(watt), - "kWh": float(forceRound(kwatt, 2)), - "price": float(forceRound(euro, 2)) + "kWh": float(forceRound(kwatt, 5)), + "price": float(forceRound(euro, 5)) }, ) current_month = date.strftime("%m") @@ -74,10 +74,10 @@ def detail(self, price_hp, price_hc=0, measurement_direction="consumption_detail }, fields={ "W": float(watt), - "kW": float(forceRound(kwatt, 2)), + "kW": float(forceRound(kwatt, 5)), "Wh": float(watth), - "kWh": float(forceRound(kwatth, 2)), - "price": float(forceRound(euro, 2)) + "kWh": float(forceRound(kwatth, 5)), + "price": float(forceRound(euro, 5)) }, ) current_month = date.strftime("%m") diff --git a/app/models/influxdb.py b/app/models/influxdb.py index c4190475..89bcf4a1 100644 --- a/app/models/influxdb.py +++ b/app/models/influxdb.py @@ -25,6 +25,7 @@ def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="mye self.influxdb = {} self.write_api = {} self.delete_api = {} + self.buckets_api = {} self.method = method self.write_options = {} if "batch_size" in write_options: @@ -60,6 +61,16 @@ def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="mye else: self.write_options["exponential_base"] = 2 self.connect() + self.retention = 0 + self.max_retention = None + self.get_list_retention_policies() + if self.retention < 94608000: + day = int(self.retention / 60 / 60 / 24) + app.LOG.log([ + f" => ATTENTION, l'InfluxDB est configuré avec une durée de retention de {day} jours.", + f" Toutes les données supérieurs à {day} jours ne seront jamais inséré dans celui-ci." + ]) + def connect(self): app.LOG.separator() @@ -96,6 +107,8 @@ def connect(self): max_retry_delay=self.write_options["max_retry_delay"], exponential_base=self.write_options["exponential_base"])) self.delete_api = self.influxdb.delete_api() + self.buckets_api = self.influxdb.buckets_api() + self.get_list_retention_policies() def purge_influxdb(self): app.LOG.separator_warning() @@ -114,21 +127,30 @@ def purge_influxdb(self): # app.CONFIG.set("wipe_influxdb", False) app.LOG.warning(f" => Data reset") + def get_list_retention_policies(self): + buckets = self.buckets_api.find_buckets().buckets + for bucket in buckets: + if bucket.name == self.bucket: + self.retention = bucket.retention_rules[0].every_seconds + self.max_retention = datetime.datetime.now() - datetime.timedelta(seconds=self.retention) + def write(self, tags, date=None, fields=None, measurement="log"): + date_max = datetime.datetime.now() - datetime.timedelta(seconds=self.retention) if date is None: date_object = datetime.datetime.now() else: date_object = date - record = { - "measurement": measurement, - "time": date_object, - "tags": {}, - "fields": {} - } - if tags: - for key, value in tags.items(): - record["tags"][key] = value - if fields is not None: - for key, value in fields.items(): - record["fields"][key] = value - self.write_api.write(bucket=self.bucket, org=self.org, record=record) + if date.replace(tzinfo=None) > date_max.replace(tzinfo=None): + record = { + "measurement": measurement, + "time": date_object, + "tags": {}, + "fields": {} + } + if tags: + for key, value in tags.items(): + record["tags"][key] = value + if fields is not None: + for key, value in fields.items(): + record["fields"][key] = value + self.write_api.write(bucket=self.bucket, org=self.org, record=record) \ No newline at end of file diff --git a/config.exemple.yaml b/config.exemple.yaml index 84e0ba86..bb5770d4 100755 --- a/config.exemple.yaml +++ b/config.exemple.yaml @@ -14,7 +14,7 @@ influxdb: bucket: myelectricaldata # ATTENTION, L'activation de l'importation asynchrone va réduire fortement le temps d'importation dans InfluxDB # mais va augmenter la consommation mémoire & CPU et donc à activer uniquement sur un hardware robuste. - method: batching # Mode disponible : synchronous / asynchronous / batching + method: synchronous # Mode disponible : synchronous / asynchronous / batching # batching_options permet uniquement de configurer la methode `batching`. # Pour plus d'information : https://github.com/influxdata/influxdb-client-python#batching batching_options: From 5b538e9f2629d86432a7ab8e63b6e73bd5036a9c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Mon, 5 Dec 2022 23:15:06 +0100 Subject: [PATCH 14/25] remove 1 days delta --- app/models/query_daily.py | 5 ++++- app/models/query_detail.py | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/app/models/query_daily.py b/app/models/query_daily.py index 6a5fd58d..6c33d330 100644 --- a/app/models/query_daily.py +++ b/app/models/query_daily.py @@ -61,6 +61,8 @@ def run(self, begin, end): app.LOG.log(f" => Chargement des données depuis MyElectricalData {begin_str} => {end_str}") data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() blacklist = 0 + from pprint import pprint + pprint(data.text) if hasattr(data, "status_code"): if data.status_code == 200: meter_reading = json.loads(data.text)['meter_reading'] @@ -105,7 +107,8 @@ def run(self, begin, end): def get(self): # REMOVE TODAY - end = datetime.datetime.combine((datetime.datetime.now() - datetime.timedelta(days=1)), datetime.datetime.max.time()) + # end = datetime.datetime.combine((datetime.datetime.now() - datetime.timedelta(days=1)), datetime.datetime.max.time()) + end = datetime.datetime.combine((datetime.datetime.now()), datetime.datetime.max.time()) begin = datetime.datetime.combine(end - datetime.timedelta(days=self.max_daily), datetime.datetime.min.time()) finish = True diff --git a/app/models/query_detail.py b/app/models/query_detail.py index aa671e10..6750b2c0 100644 --- a/app/models/query_detail.py +++ b/app/models/query_detail.py @@ -141,7 +141,8 @@ def run(self, begin, end): def get(self): # REMOVE TODAY - end = datetime.datetime.combine((datetime.datetime.now() - datetime.timedelta(days=1)), datetime.datetime.max.time()) + # end = datetime.datetime.combine((datetime.datetime.now() - datetime.timedelta(days=1)), datetime.datetime.max.time()) + end = datetime.datetime.combine((datetime.datetime.now()), datetime.datetime.max.time()) begin = datetime.datetime.combine(end - datetime.timedelta(days=self.max_detail), datetime.datetime.min.time()) finish = True result = [] From f6e28e76f2b7ec16a73e626e9318ff976b25af02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Tue, 6 Dec 2022 00:28:44 +0100 Subject: [PATCH 15/25] fix influxdb method --- README.md | 5 ++++- app/main.py | 11 ++++++++--- app/models/database.py | 1 - app/models/influxdb.py | 8 +++++--- app/models/query_daily.py | 2 -- app/models/query_detail.py | 16 ++++++++++++---- 6 files changed, 29 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index a5050f01..2abed0ed 100755 --- a/README.md +++ b/README.md @@ -100,6 +100,9 @@ Un template est disponible sur le repo [config.yaml](https://github.com/m4dm4rti | discovery_prefix | Préfixe configuré dans Home Assistant pour l'auto-discovery | homeassistant | ### influxdb + +> Version supportée minimun 1.8 + | Champs | Information | Défaut | |------------------|---------------------------------------------------------------|------------------| | enable | Activation ou non des exports vers InfluxDB | False | @@ -123,7 +126,7 @@ Ces 2 propriétés vont vous permettre de jouer sur la rapidité d'importation d #### Configuration par version : -##### v1.X : +##### v1.8 : ```yaml influxdb: enable: 'true' diff --git a/app/main.py b/app/main.py index 2b435617..4b845917 100755 --- a/app/main.py +++ b/app/main.py @@ -67,10 +67,14 @@ INFLUXB_ENABLE = False INFLUXDB = None -if "asynchronous" in INFLUXDB_CONFIG and str2bool(INFLUXDB_CONFIG["asynchronous"]): - write_options = "ASYNCHRONOUS" +if "method" in INFLUXDB_CONFIG: + method = INFLUXDB_CONFIG["method"] else: - write_options = "SYNCHRONOUS" + method = "SYNCHRONOUS" + +write_options = [] +if "batching_options" in INFLUXDB_CONFIG: + write_options = INFLUXDB_CONFIG["batching_options"] if INFLUXDB_CONFIG and "enable" in INFLUXDB_CONFIG and INFLUXDB_CONFIG["enable"]: INFLUXB_ENABLE = True @@ -80,6 +84,7 @@ token=INFLUXDB_CONFIG["token"], org=INFLUXDB_CONFIG["org"], bucket=INFLUXDB_CONFIG["bucket"], + method=method, write_options=write_options ) if CONFIG.get("wipe_influxdb"): diff --git a/app/models/database.py b/app/models/database.py index d587fb70..15247fa1 100644 --- a/app/models/database.py +++ b/app/models/database.py @@ -366,7 +366,6 @@ def set_usage_point(self, usage_point_id, data): self.session.add( UsagePoints( usage_point_id=usage_point_id, - # TODO : Erreur si name est vide name=name, cache=str2bool(cache), consumption=str2bool(consumption), diff --git a/app/models/influxdb.py b/app/models/influxdb.py index 89bcf4a1..893abcc1 100644 --- a/app/models/influxdb.py +++ b/app/models/influxdb.py @@ -13,7 +13,7 @@ class InfluxDB: - def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="myelectricaldata", method="batching", + def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="myelectricaldata", method="SYNCHRONOUS", write_options=None): if write_options is None: write_options = {} @@ -66,8 +66,8 @@ def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="mye self.get_list_retention_policies() if self.retention < 94608000: day = int(self.retention / 60 / 60 / 24) - app.LOG.log([ - f" => ATTENTION, l'InfluxDB est configuré avec une durée de retention de {day} jours.", + app.LOG.warning([ + f" ATTENTION, l'InfluxDB est configuré avec une durée de retention de {day} jours.", f" Toutes les données supérieurs à {day} jours ne seront jamais inséré dans celui-ci." ]) @@ -93,7 +93,9 @@ def connect(self): "https://github.com/m4dm4rtig4n/enedisgateway2mqtt#configuration-file" ]) + app.LOG.log(f" => Methode d'importation : {self.method.upper()}") if self.method.upper() == "ASYNCHRONOUS": + app.LOG.warning(" ATTENTION, le mode d'importation \"ASYNCHRONOUS\" est très consommateur de ressources système.") self.write_api = self.influxdb.write_api(write_options=ASYNCHRONOUS) elif self.method.upper() == "SYNCHRONOUS": self.write_api = self.influxdb.write_api(write_options=SYNCHRONOUS) diff --git a/app/models/query_daily.py b/app/models/query_daily.py index 6c33d330..daeb5f2f 100644 --- a/app/models/query_daily.py +++ b/app/models/query_daily.py @@ -61,8 +61,6 @@ def run(self, begin, end): app.LOG.log(f" => Chargement des données depuis MyElectricalData {begin_str} => {end_str}") data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() blacklist = 0 - from pprint import pprint - pprint(data.text) if hasattr(data, "status_code"): if data.status_code == 200: meter_reading = json.loads(data.text)['meter_reading'] diff --git a/app/models/query_detail.py b/app/models/query_detail.py index 6750b2c0..e30d8ca7 100644 --- a/app/models/query_detail.py +++ b/app/models/query_detail.py @@ -112,15 +112,23 @@ def run(self, begin, end): result = is_between(dateHourMinute, (offpeak_begin, offpeak_stop)) if result: measure_type = "HC" - bulk_insert.append(self.detail_table( + self.db.insert_detail( usage_point_id=self.usage_point_id, date=date, value=value, interval=interval, measure_type=measure_type, - blacklist=0 - )) - self.db.insert_detail_bulk(bulk_insert, self.measure_type) + blacklist=0, + ) + # bulk_insert.append(self.detail_table( + # usage_point_id=self.usage_point_id, + # date=date, + # value=value, + # interval=interval, + # measure_type=measure_type, + # blacklist=0 + # )) + # self.db.insert_detail_bulk(bulk_insert, self.measure_type) return meter_reading["interval_reading"] else: return { From 8e7126dab3cdb12f3cf559993b7654fbfcf548b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Tue, 6 Dec 2022 12:54:07 +0100 Subject: [PATCH 16/25] fix influxdb infinite retention --- README.md | 2 +- app/models/influxdb.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 2abed0ed..c0099a22 100755 --- a/README.md +++ b/README.md @@ -263,7 +263,7 @@ services: image: m4dm4rtig4n/myelectricaldata:latest restart: unless-stopped volumes: - -./:/data + - ./:/data environment: TZ: Europe/Paris ports: diff --git a/app/models/influxdb.py b/app/models/influxdb.py index 893abcc1..976b6fdd 100644 --- a/app/models/influxdb.py +++ b/app/models/influxdb.py @@ -64,12 +64,16 @@ def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="mye self.retention = 0 self.max_retention = None self.get_list_retention_policies() - if self.retention < 94608000: + if self.retention != 0: day = int(self.retention / 60 / 60 / 24) app.LOG.warning([ f" ATTENTION, l'InfluxDB est configuré avec une durée de retention de {day} jours.", f" Toutes les données supérieurs à {day} jours ne seront jamais inséré dans celui-ci." ]) + else: + app.LOG.log([ + f" => Aucune retention de données détecté." + ]) def connect(self): @@ -95,7 +99,7 @@ def connect(self): app.LOG.log(f" => Methode d'importation : {self.method.upper()}") if self.method.upper() == "ASYNCHRONOUS": - app.LOG.warning(" ATTENTION, le mode d'importation \"ASYNCHRONOUS\" est très consommateur de ressources système.") + app.LOG.warning(" ATTENTION, le mode d'importation \"ASYNCHRONOUS\" est très consommateur de ressources système.") self.write_api = self.influxdb.write_api(write_options=ASYNCHRONOUS) elif self.method.upper() == "SYNCHRONOUS": self.write_api = self.influxdb.write_api(write_options=SYNCHRONOUS) @@ -142,7 +146,7 @@ def write(self, tags, date=None, fields=None, measurement="log"): date_object = datetime.datetime.now() else: date_object = date - if date.replace(tzinfo=None) > date_max.replace(tzinfo=None): + if self.retention == 0 or (date.replace(tzinfo=None) > date_max.replace(tzinfo=None)): record = { "measurement": measurement, "time": date_object, From 69c540abbd4cab003a9ee30ac505e2fbf275bdd4 Mon Sep 17 00:00:00 2001 From: Philippe Date: Tue, 6 Dec 2022 13:00:17 +0100 Subject: [PATCH 17/25] Typo update --- app/models/influxdb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/models/influxdb.py b/app/models/influxdb.py index 976b6fdd..8e09ad35 100644 --- a/app/models/influxdb.py +++ b/app/models/influxdb.py @@ -68,7 +68,7 @@ def __init__(self, hostname, port, token, org="myelectricaldata.fr", bucket="mye day = int(self.retention / 60 / 60 / 24) app.LOG.warning([ f" ATTENTION, l'InfluxDB est configuré avec une durée de retention de {day} jours.", - f" Toutes les données supérieurs à {day} jours ne seront jamais inséré dans celui-ci." + f" Toutes les données supérieures à {day} jours ne seront jamais insérées dans celui-ci." ]) else: app.LOG.log([ @@ -159,4 +159,4 @@ def write(self, tags, date=None, fields=None, measurement="log"): if fields is not None: for key, value in fields.items(): record["fields"][key] = value - self.write_api.write(bucket=self.bucket, org=self.org, record=record) \ No newline at end of file + self.write_api.write(bucket=self.bucket, org=self.org, record=record) From a5ece879e1b38e75569d84c15c4b34617705ddfb Mon Sep 17 00:00:00 2001 From: Philippe Date: Tue, 6 Dec 2022 13:09:03 +0100 Subject: [PATCH 18/25] Typo corrections --- app/models/jobs.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/app/models/jobs.py b/app/models/jobs.py index c06d0135..0b693bc0 100644 --- a/app/models/jobs.py +++ b/app/models/jobs.py @@ -56,7 +56,7 @@ def job_import_data(self, wait=True, target=None): self.get_gateway_status() except Exception as e: traceback.print_exc() - LOG.error([f"Erreur lors de la récupération des information de la passerelle", e]) + LOG.error([f"Erreur lors de la récupération des informations de la passerelle", e]) try: if target == "account_status" or target is None: self.get_account_status() @@ -68,7 +68,7 @@ def job_import_data(self, wait=True, target=None): self.get_contract() except Exception as e: traceback.print_exc() - LOG.critical([f"Erreur lors de la récupération des information du contract", e]) + LOG.critical([f"Erreur lors de la récupération des informations du contract", e]) try: if target == "addresses" or target is None: self.get_addresses() @@ -86,7 +86,7 @@ def job_import_data(self, wait=True, target=None): self.get_consumption_detail() except Exception as e: traceback.print_exc() - LOG.error([f"Erreur lors de la récupération de votre consommation détaillé", e]) + LOG.error([f"Erreur lors de la récupération de votre consommation détaillée", e]) try: if target == "production" or target is None: self.get_production() @@ -98,7 +98,7 @@ def job_import_data(self, wait=True, target=None): self.get_production_detail() except Exception as e: traceback.print_exc() - LOG.error([f"Erreur lors de la récupération de votre production détaillé", e]) + LOG.error([f"Erreur lors de la récupération de votre production détaillée", e]) try: # ####################################################################################################### @@ -258,7 +258,7 @@ def get_consumption(self): def get_consumption_detail(self): result = {} if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail: - LOG.title(f"[{self.usage_point_config.usage_point_id}] Récupération de la consommation détaillé :") + LOG.title(f"[{self.usage_point_config.usage_point_id}] Récupération de la consommation détaillée :") result = Detail( headers=self.header_generate(), usage_point_id=self.usage_point_config.usage_point_id, @@ -279,7 +279,7 @@ def get_production(self): def get_production_detail(self): result = {} if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: - LOG.title(f"[{self.usage_point_config.usage_point_id}] Récupération de la production détaillé :") + LOG.title(f"[{self.usage_point_config.usage_point_id}] Récupération de la production détaillée :") result = Detail( headers=self.header_generate(), usage_point_id=self.usage_point_config.usage_point_id, From f10a800418c109ec10e386e36a318cead5464fd5 Mon Sep 17 00:00:00 2001 From: Philippe Date: Tue, 6 Dec 2022 13:15:10 +0100 Subject: [PATCH 19/25] Typo correction --- app/templates/md/usage_point_id.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app/templates/md/usage_point_id.md b/app/templates/md/usage_point_id.md index 1b32ec0f..853b4040 100755 --- a/app/templates/md/usage_point_id.md +++ b/app/templates/md/usage_point_id.md @@ -2,8 +2,8 @@ ## Mon contrat -* Les données ont était récupéré via les API d'Enedis et ne sont pas toujours correctement misent à jours par Enedis. -Vérifié bien vos horaires HC/HP et votre date d'activation. Vous pouvez surcharger ces valeurs dans la configuration du point de livraison. +* Les données ont était récupérées via les API d'Enedis et ne sont pas toujours correctement mises à jour par Enedis. +Vérifiez bien vos horaires HC/HP et votre date d'activation. Vous pouvez surcharger ces valeurs dans la configuration du point de livraison. Adresse postale : **{{ address | default("Pas de données.") }}** @@ -23,4 +23,4 @@ Statut du contrat : **{{ contract_data['contract_status'] | default("Pas de donn Dernière date de changement du tarif : **{{ contract_data['last_distribution_tariff_change_date'] | default("Pas de données.") }}** -Seuil heures creuses / pleines : \ No newline at end of file +Seuil heures creuses / pleines : From c5f476a671f471bd879afd0e5d0f0760c38c1432 Mon Sep 17 00:00:00 2001 From: Philippe Date: Tue, 6 Dec 2022 13:19:53 +0100 Subject: [PATCH 20/25] Typo correction --- app/templates/models/configuration.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/app/templates/models/configuration.py b/app/templates/models/configuration.py index 304cb4a9..5c360857 100644 --- a/app/templates/models/configuration.py +++ b/app/templates/models/configuration.py @@ -25,7 +25,7 @@ def __init__(self, title="", usage_point_id=0, display_usage_point_id=False): "enable": { "title": "Actif ?", "help": "Active ou non le lancement du job de récupération des données tous X secondes
" - "L'interval de récupération est défini par le \"cycle\" dans le config.yaml", + "L'intervalle de récupération est définie par le \"cycle\" dans le config.yaml", "type": True, "default": True }, @@ -73,16 +73,16 @@ def __init__(self, title="", usage_point_id=0, display_usage_point_id=False): "consumption_max_date": { "title": "Date max journalière", "help": "Permet de définir la date de fin de récupération des données en mode journalier. " - "

Si aucune valeur n'est défini ce sera la date 'last_activation_date' remontée " + "

Si aucune valeur n'est définie ce sera la date 'last_activation_date' remontée " "par les API d'Enedis." - "

ATTENTION, si cette valeur n'est pas correctement défini vous risquez de ne pas " + "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement quota", "type": datetime.datetime.now(), "default": "" }, "consumption_detail": { "title": "Consommation détaillée", - "help": "Active/Désactive la récupération de la consommation détaillé.

ATTENTION, pour " + "help": "Active/Désactive la récupération de la consommation détaillée.

ATTENTION, pour " "fonctionner il vous faut activer le relevé de consommation horaire sur le site d'Enedis" "Plus d'informations sont disponible ici", "type": True, @@ -90,10 +90,10 @@ def __init__(self, title="", usage_point_id=0, display_usage_point_id=False): }, "consumption_detail_max_date": { "title": "Date max détaillée", - "help": "Permet de définir la date de fin de récupération des données en mode détaillée. " - "

Si aucune valeur n'est défini ce sera la date 'last_activation_date' remontée " + "help": "Permet de définir la date de fin de récupération des données en mode détaillé. " + "

Si aucune valeur n'est définie ce sera la date 'last_activation_date' remontée " "par les API d'Enedis." - "

ATTENTION, si cette valeur n'est pas correctement défini vous risquez de ne pas " + "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement quota", "type": datetime.datetime.now(), "default": "" @@ -177,16 +177,16 @@ def __init__(self, title="", usage_point_id=0, display_usage_point_id=False): "production_max_date": { "title": "Date max journalière", "help": "Permet de définir la date de fin de récupération des données en mode journalier. " - "

Si aucune valeur n'est défini ce sera la date 'last_activation_date' remontée " + "

Si aucune valeur n'est définie ce sera la date 'last_activation_date' remontée " "par les API d'Enedis." - "

ATTENTION, si cette valeur n'est pas correctement défini vous risquez de ne pas " + "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement quota", "type": datetime.datetime.now(), "default": "" }, "production_detail": { "title": "Production détaillée", - "help": "Active/Désactive la récupération de la production détaillé via vos panneaux solaires." + "help": "Active/Désactive la récupération de la production détaillée via vos panneaux solaires." "

ATTENTION
, pour fonctionner il vous faut activer le relevé de consommation horaire" "sur le site d'Enedis
Plus d'informations sont disponible " "ici", @@ -195,10 +195,10 @@ def __init__(self, title="", usage_point_id=0, display_usage_point_id=False): }, "production_detail_max_date": { "title": "Date max détaillée", - "help": "Permet de définir la date de fin de récupération des données en mode détaillée. " - "

Si aucune valeur n'est défini ce sera la date 'last_activation_date' remontée " + "help": "Permet de définir la date de fin de récupération des données en mode détaillé. " + "

Si aucune valeur n'est définie ce sera la date 'last_activation_date' remontée " "par les API d'Enedis." - "

ATTENTION, si cette valeur n'est pas correctement défini vous risquez de ne pas " + "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement quota", "type": datetime.datetime.now(), "default": "" From e147a046518261db3fd45fc2ebf786f1b876de16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Wed, 7 Dec 2022 00:18:15 +0100 Subject: [PATCH 21/25] fix https://github.com/m4dm4rtig4n/myelectricaldata/issues/143 --- app/alembic/versions/0c07baa8d7b2_base.py | 16 +-- app/db_schema.py | 18 +-- app/models/database.py | 132 ++++++++++++------ .../__pycache__/configuration.cpython-39.pyc | Bin 10610 -> 0 bytes .../__pycache__/datatable.cpython-39.pyc | Bin 5219 -> 0 bytes .../__pycache__/dependencies.cpython-39.pyc | Bin 169 -> 0 bytes .../models/__pycache__/menu.cpython-39.pyc | Bin 2074 -> 0 bytes .../__pycache__/sidemenu.cpython-39.pyc | Bin 1065 -> 0 bytes .../usage_point_select.cpython-39.pyc | Bin 2105 -> 0 bytes 9 files changed, 108 insertions(+), 58 deletions(-) delete mode 100644 app/templates/models/__pycache__/configuration.cpython-39.pyc delete mode 100644 app/templates/models/__pycache__/datatable.cpython-39.pyc delete mode 100644 app/templates/models/__pycache__/dependencies.cpython-39.pyc delete mode 100644 app/templates/models/__pycache__/menu.cpython-39.pyc delete mode 100644 app/templates/models/__pycache__/sidemenu.cpython-39.pyc delete mode 100644 app/templates/models/__pycache__/usage_point_select.cpython-39.pyc diff --git a/app/alembic/versions/0c07baa8d7b2_base.py b/app/alembic/versions/0c07baa8d7b2_base.py index 29774d21..d0b872ce 100644 --- a/app/alembic/versions/0c07baa8d7b2_base.py +++ b/app/alembic/versions/0c07baa8d7b2_base.py @@ -68,7 +68,7 @@ def upgrade() -> None: op.create_index(op.f('ix_addresses_id'), 'addresses', ['id'], unique=True) op.create_index(op.f('ix_addresses_usage_point_id'), 'addresses', ['usage_point_id'], unique=False) op.create_table('consumption_daily', - sa.Column('id', sa.Integer(), nullable=False), + sa.Column('id', sa.String(), nullable=False), sa.Column('usage_point_id', sa.Text(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('value', sa.Integer(), nullable=False), @@ -81,7 +81,7 @@ def upgrade() -> None: op.create_index(op.f('ix_consumption_daily_id'), 'consumption_daily', ['id'], unique=True) op.create_index(op.f('ix_consumption_daily_usage_point_id'), 'consumption_daily', ['usage_point_id'], unique=False) op.create_table('consumption_detail', - sa.Column('id', sa.Integer(), nullable=False), + sa.Column('id', sa.String(), nullable=False), sa.Column('usage_point_id', sa.Text(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('value', sa.Integer(), nullable=False), @@ -91,7 +91,7 @@ def upgrade() -> None: sa.Column('fail_count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['usage_point_id'], ['usage_points.usage_point_id'], ), sa.PrimaryKeyConstraint('id'), - sqlite_autoincrement=True + # sqlite_autoincrement=True ) op.create_index(op.f('ix_consumption_detail_id'), 'consumption_detail', ['id'], unique=True) op.create_index(op.f('ix_consumption_detail_usage_point_id'), 'consumption_detail', ['usage_point_id'], unique=False) @@ -116,12 +116,12 @@ def upgrade() -> None: sa.Column('count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['usage_point_id'], ['usage_points.usage_point_id'], ), sa.PrimaryKeyConstraint('id'), - sqlite_autoincrement=True + # sqlite_autoincrement=True ) op.create_index(op.f('ix_contracts_id'), 'contracts', ['id'], unique=True) op.create_index(op.f('ix_contracts_usage_point_id'), 'contracts', ['usage_point_id'], unique=False) op.create_table('production_daily', - sa.Column('id', sa.Integer(), nullable=False), + sa.Column('id', sa.String(), nullable=False), sa.Column('usage_point_id', sa.Text(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('value', sa.Integer(), nullable=False), @@ -129,12 +129,12 @@ def upgrade() -> None: sa.Column('fail_count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['usage_point_id'], ['usage_points.usage_point_id'], ), sa.PrimaryKeyConstraint('id'), - sqlite_autoincrement=True + # sqlite_autoincrement=True ) op.create_index(op.f('ix_production_daily_id'), 'production_daily', ['id'], unique=True) op.create_index(op.f('ix_production_daily_usage_point_id'), 'production_daily', ['usage_point_id'], unique=False) op.create_table('production_detail', - sa.Column('id', sa.Integer(), nullable=False), + sa.Column('id', sa.String(), nullable=False), sa.Column('usage_point_id', sa.Text(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('value', sa.Integer(), nullable=False), @@ -144,7 +144,7 @@ def upgrade() -> None: sa.Column('fail_count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['usage_point_id'], ['usage_points.usage_point_id'], ), sa.PrimaryKeyConstraint('id'), - sqlite_autoincrement=True + # sqlite_autoincrement=True ) op.create_index(op.f('ix_production_detail_id'), 'production_detail', ['id'], unique=True) op.create_index(op.f('ix_production_detail_usage_point_id'), 'production_detail', ['usage_point_id'], unique=False) diff --git a/app/db_schema.py b/app/db_schema.py index 64d8c59b..625da091 100644 --- a/app/db_schema.py +++ b/app/db_schema.py @@ -1,4 +1,4 @@ -from sqlalchemy import (Column, ForeignKey, Float, Integer, Text, Boolean, DateTime) +from sqlalchemy import (Column, ForeignKey, Float, Integer, Text, Boolean, DateTime, String) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship @@ -351,9 +351,9 @@ def __repr__(self): class ConsumptionDaily(Base): __tablename__ = 'consumption_daily' - __table_args__ = {'sqlite_autoincrement': True} + # __table_args__ = {'sqlite_autoincrement': True} - id = Column(Integer, + id = Column(String, primary_key=True, index=True, unique=True, @@ -393,9 +393,9 @@ def __repr__(self): class ConsumptionDetail(Base): __tablename__ = 'consumption_detail' - __table_args__ = {'sqlite_autoincrement': True} + # __table_args__ = {'sqlite_autoincrement': True} - id = Column(Integer, + id = Column(String, primary_key=True, index=True, unique=True, @@ -443,9 +443,9 @@ def __repr__(self): class ProductionDaily(Base): __tablename__ = 'production_daily' - __table_args__ = {'sqlite_autoincrement': True} + # __table_args__ = {'sqlite_autoincrement': True} - id = Column(Integer, + id = Column(String, primary_key=True, index=True, unique=True, @@ -485,9 +485,9 @@ def __repr__(self): class ProductionDetail(Base): __tablename__ = 'production_detail' - __table_args__ = {'sqlite_autoincrement': True} + # __table_args__ = {'sqlite_autoincrement': True} - id = Column(Integer, + id = Column(String, primary_key=True, index=True, unique=True, diff --git a/app/models/database.py b/app/models/database.py index 15247fa1..8c6bec8c 100644 --- a/app/models/database.py +++ b/app/models/database.py @@ -1,6 +1,6 @@ -import datetime import json import os +import hashlib from datetime import datetime, timedelta from sqlalchemy import (create_engine, delete, inspect, select) @@ -332,6 +332,36 @@ def set_usage_point(self, usage_point_id, data): production_detail_max_date = production_detail_max_date else: production_detail_max_date = datetime.strptime(production_detail_max_date, "%Y-%m-%d") + + if "call_number" in data: + call_number = data["call_number"] + else: + call_number = 0 + if "quota_reached" in data: + quota_reached = str2bool(data["quota_reached"]) + else: + quota_reached = False + if "quota_limit" in data: + quota_limit = data["quota_limit"] + else: + quota_limit = 0 + if "quota_reset_at" in data: + quota_reset_at = data["quota_reset_at"] + else: + quota_reset_at = None + if "last_call" in data: + last_call = data["last_call"] + else: + last_call = None + if "ban" in data: + ban = str2bool(data["ban"]) + else: + ban = False + if "consentement_expiration" in data: + consentement_expiration = data["consentement_expiration"] + else: + consentement_expiration = None + if usage_points is not None: usage_points.enable = str2bool(enable) usage_points.name = name @@ -362,6 +392,13 @@ def set_usage_point(self, usage_point_id, data): usage_points.consumption_detail_max_date = consumption_detail_max_date usage_points.production_max_date = production_max_date usage_points.production_detail_max_date = production_detail_max_date + usage_points.call_number = call_number + usage_points.quota_reached = str2bool(quota_reached) + usage_points.quota_limit = quota_limit + usage_points.quota_reset_at = quota_reset_at + usage_points.last_call = last_call + usage_points.ban = str2bool(ban) + usage_points.consentement_expiration = consentement_expiration else: self.session.add( UsagePoints( @@ -389,11 +426,18 @@ def set_usage_point(self, usage_point_id, data): token=token, progress=progress, progress_status=progress_status, - enable=enable, - consumption_max_date=None, - consumption_detail_max_date=None, - production_max_date=None, - production_detail_max_date=None + enable=str2bool(enable), + consumption_max_date=consumption_max_date, + consumption_detail_max_date=consumption_detail_max_date, + production_max_date=production_max_date, + production_detail_max_date=production_detail_max_date, + call_number=call_number, + quota_reached=str2bool(quota_reached), + quota_limit=quota_limit, + quota_reset_at=quota_reset_at, + last_call=last_call, + ban=str2bool(ban), + consentement_expiration=consentement_expiration ) ) @@ -428,8 +472,7 @@ def usage_point_update(self, usage_points.last_call = last_call usage_points.ban = ban - ## ---------------------------------------------------------------------------------------------------------------- - + ## ---------------------------------------------------------------------------------------------------------------- ## ADDRESSES ## ---------------------------------------------------------------------------------------------------------------- def get_addresse(self, usage_point_id): @@ -551,6 +594,7 @@ def get_daily_all(self, usage_point_id, measurement_direction="consumption"): ).all() def get_daily_date(self, usage_point_id, date, measurement_direction="consumption"): + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() if measurement_direction == "consumption": table = ConsumptionDaily relation = UsagePoints.relation_consumption_daily @@ -560,8 +604,7 @@ def get_daily_date(self, usage_point_id, date, measurement_direction="consumptio return self.session.scalars( select(table) .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date) + .where(table.id == unique_id) ).first() def get_daily_state(self, usage_point_id, date, measurement_direction="consumption"): @@ -635,6 +678,7 @@ def get_daily_fail_count(self, usage_point_id, date, measurement_direction="cons return 0 def daily_fail_increment(self, usage_point_id, date, measurement_direction="consumption"): + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() if measurement_direction == "consumption": table = ConsumptionDaily relation = UsagePoints.relation_consumption_daily @@ -643,8 +687,7 @@ def daily_fail_increment(self, usage_point_id, date, measurement_direction="cons relation = UsagePoints.relation_production_daily query = (select(table) .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date)) + .where(table.id == unique_id)) LOG.debug(query.compile(compile_kwargs={"literal_binds": True})) daily = self.session.scalars(query).one_or_none() if daily is not None: @@ -736,6 +779,7 @@ def get_daily(self, usage_point_id, begin, end, measurement_direction="consumpti def insert_daily(self, usage_point_id, date, value, blacklist=0, fail_count=0, measurement_direction="consumption"): + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() if measurement_direction == "consumption": table = ConsumptionDaily relation = UsagePoints.relation_consumption_daily @@ -744,10 +788,10 @@ def insert_daily(self, usage_point_id, date, value, blacklist=0, fail_count=0, relation = UsagePoints.relation_production_daily query = (select(table) .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date)) + .where(table.id == unique_id)) daily = self.session.scalars(query).one_or_none() if daily is not None: + daily.id = unique_id daily.usage_point_id = usage_point_id daily.date = date daily.value = value @@ -756,6 +800,7 @@ def insert_daily(self, usage_point_id, date, value, blacklist=0, fail_count=0, else: self.session.add( table( + id=unique_id, usage_point_id=usage_point_id, date=date, value=value, @@ -770,16 +815,17 @@ def delete_daily(self, usage_point_id, date=None, measurement_direction="consump else: table = ProductionDaily if date is not None: + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() self.session.execute( delete(table) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date) + .where(table.id == unique_id) ) else: self.session.execute(delete(table).where(table.usage_point_id == usage_point_id)) return True def blacklist_daily(self, usage_point_id, date, action=True, measurement_direction="consumption"): + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() if measurement_direction == "consumption": table = ConsumptionDaily relation = UsagePoints.relation_consumption_daily @@ -788,8 +834,7 @@ def blacklist_daily(self, usage_point_id, date, action=True, measurement_directi relation = UsagePoints.relation_production_daily query = (select(table) .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date) + .where(table.id == unique_id) ) daily = self.session.scalars(query).one_or_none() if daily is not None: @@ -797,6 +842,7 @@ def blacklist_daily(self, usage_point_id, date, action=True, measurement_directi else: self.session.add( table( + id=unique_id, usage_point_id=usage_point_id, date=date, value=0, @@ -840,6 +886,7 @@ def get_detail_all(self, usage_point_id, begin=None, end=None, measurement_direc ).all() def get_detail_date(self, usage_point_id, date, measurement_direction="consumption"): + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() if measurement_direction == "consumption": table = ConsumptionDetail relation = UsagePoints.relation_consumption_detail @@ -849,8 +896,7 @@ def get_detail_date(self, usage_point_id, date, measurement_direction="consumpti return self.session.scalars( select(table) .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date) + .where(table.id == unique_id) ).first() def get_detail_range(self, usage_point_id, begin, end, measurement_direction="consumption"): @@ -910,6 +956,7 @@ def get_detail(self, usage_point_id, begin, end, measurement_direction="consumpt return result def get_detail_state(self, usage_point_id, date, measurement_direction="consumption"): + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() if measurement_direction == "consumption": table = ConsumptionDetail relation = UsagePoints.relation_consumption_detail @@ -919,38 +966,39 @@ def get_detail_state(self, usage_point_id, date, measurement_direction="consumpt current_data = self.session.scalars( select(table) .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date) + .where(table.id == unique_id) ).one_or_none() if current_data is None: return False else: return True - def insert_detail_bulk(self, data, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - begin = "" - end = "" - for scalar in data: - if begin == "": - begin = scalar.date - end = scalar.date - self.session.execute( - table.__table__.delete().filter(ConsumptionDetail.date.between(begin, end)) - ) - self.session.add_all(data) + # def insert_detail_bulk(self, data, mesure_type="consumption"): + # if mesure_type == "consumption": + # table = ConsumptionDetail + # else: + # table = ProductionDetail + # begin = "" + # end = "" + # for scalar in data: + # if begin == "": + # begin = scalar.date + # end = scalar.date + # self.session.execute( + # table.__table__.delete().filter(ConsumptionDetail.date.between(begin, end)) + # ) + # self.session.add_all(data) def insert_detail(self, usage_point_id, date, value, interval, measure_type, blacklist=0, fail_count=0, mesure_type="consumption"): + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() if mesure_type == "consumption": table = ConsumptionDetail else: table = ProductionDetail detail = self.get_detail_date(usage_point_id, date, mesure_type) if detail is not None: + detail.id = unique_id detail.usage_point_id = usage_point_id detail.date = date detail.value = value @@ -961,6 +1009,7 @@ def insert_detail(self, usage_point_id, date, value, interval, measure_type, bla else: self.session.add( table( + id=unique_id, usage_point_id=usage_point_id, date=date, value=value, @@ -977,10 +1026,10 @@ def delete_detail(self, usage_point_id, date=None, mesure_type="consumption"): else: table = ProductionDetail if date is not None: + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() self.session.execute( delete(table) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date) + .where(table.id == unique_id) ) else: self.session.execute(delete(table).where(table.usage_point_id == usage_point_id)) @@ -1000,6 +1049,7 @@ def get_detail_fail_count(self, usage_point_id, date, mesure_type="consumption") return self.get_detail_date(usage_point_id, date, mesure_type).fail_count def detail_fail_increment(self, usage_point_id, date, mesure_type="consumption"): + unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode('utf-8')).hexdigest() if mesure_type == "consumption": table = ConsumptionDetail relation = UsagePoints.relation_consumption_detail @@ -1008,8 +1058,7 @@ def detail_fail_increment(self, usage_point_id, date, mesure_type="consumption") relation = UsagePoints.relation_production_detail query = (select(table) .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date == date)) + .where(table.id == unique_id)) detail = self.session.scalars(query).one_or_none() if detail is not None: fail_count = int(detail.fail_count) + 1 @@ -1029,6 +1078,7 @@ def detail_fail_increment(self, usage_point_id, date, mesure_type="consumption") fail_count = 0 self.session.add( table( + id=unique_id, usage_point_id=usage_point_id, date=date, value=0, diff --git a/app/templates/models/__pycache__/configuration.cpython-39.pyc b/app/templates/models/__pycache__/configuration.cpython-39.pyc deleted file mode 100644 index 7ea26d6fe3c88875b66875c68a8971ccfa8100b1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10610 zcmeHNU2GdycIFIc$Pq>TSXTU#c)hbs9GQ+yTDJ4EwoI=gIk8jQQESzks0oM>?@%0R zI78i;ku8Gi7I09sC>AyPw!8Mi-j`PU(1)TeTC{H}@>;Yg`Y_n17Fe{eEn2i`8}~bR zhJX6A-A&#~f|qyhy>rgF=YHp$d(Vt9G?Yo;^ZB3scIB_<6N!JMi{4KX7vI6p`v3(? zFv(6VNa9~!knu0u$-1(jNYt0K)p}|nRZlOZ#kFE<^?`)}De-=SsVwzbf~D@u3mGQe zPUO@740?%Mc`4N9&3eN&d^0bH+HIeswzM9qb(1fd)b08F+@kB+6}qAK(-G};w3r}0!0DLUh2@_nx-S1VD9?Dc7!fysY z?@!V1CzhpFqAa(lOtxf{N-K#{WfGHF5?|#Z*b+;8sSc){_6&q-w`bW98)hR*G8+-& zM%e-Mjfwg}Hr}bf^f1j1vBRDFjC_6HTCr0*+o?@H)Yv(8zB7}Z zl3!qNc6xV8{uX;1622oO`u0PWU3{oO(^@OBtUi$VANd395}RU|SxIn&t}V&Jx-4sc zW_gfZ`97_S#u8ZU@7xO2Y1=ZqW?lU0EYKUS<@h=?b=z9whUK{qngXt`xq+uQOkOvA zpPOQUsdLlkZm?!@yckRU@$dW_r5fTUGr^@EBvMzGmufpgWMqhH!>ci5NlTWp|O};ai)P zps}?X0T(Tv&RoaY+C;@C=|9pvv*J4Fm|En;(v1ns)Z}Z1z02LE2PJH6RxQWU8wMwy zxypLQHgh^N9FML+3U*RhH|q9$sLs19rW4$OuyqP|q*QL0>a=S!9UL0t@r0y4;U6Zj(OYlAvCp^oHwrSP_?t2HV(oAs}c|zv`IG z@^sVJw>FoI7%>gQwg4m}&^q1VBy=29aNVc3-bXcBZM03oa1T_bnvFZ@ENygPmSYE!*7Q@l18yaKAf zVpL&fm6}B+<^fbLZ=*Jt?&FFTs6atL9&g{{d9e|g@y(rkVYHqP)p~{Znn(J05I6x{ ziX*|z7hqsH5FnZl;BN_?Le*;m?J>bZJ{>BKQ8&Ys=@^T!ZB^Jos0xUN3Mn%jg$;1i ztCbDL@b8)3mWWQp4~{~z834snzXSX&zVGZOIx=SAGT{ME| z?sN*lOJD;NGxfT0Pb__hg$Fy^eZ^s@VzuELXwf?;YqRWuBs@T!u+bLY^!)tQTl3dHymealERgpxjXur|gYd^WJW>p+*Z~UP&n<5?fbn8F5ShkyW=^Oy z;PC*swwV%^+c+k_Z91R~wM`gnE}$Mjbz7St5(9WOz|B*|#Zqu&`@&{21pGQmA?Bx}P*-&wxL1S@%*(PPr--2B>k-Z(6NKYli#$>vylO72851RU z4@>nd!b-dta}*=VtMwQX;wzRs_HwvkM~Jw4_ikagFi|yDCrVbuqTmpW_rkb4Q#0_t zN)-1H{(l(n598+G9JlW2^Yb(MwV7ZJ%d?v=ff>;#MDUgG)?gw!;yWB7b{g<;9<~}{ z=Vk92NtS;21N!{=m8Giqe14^u=Unj1FUqrFo0emG`!a0~b*4peEV%umOe%0tWAf3cJBPLq2fpN~Ca1%Xg_ z4*1tK3)>NLA0lc-{>m!7C&I6OQ9pOCcK+;{vu~liG&xE2$qOi_idHH3{XT>5Y#F*d zBMB)2m^O(7UoL!*aOmw=;z0yNo(b%;`sT9rQy;egsL&2=Aap{3m_-A=Mn^YXA9mH) z^3rsog9N7K#k(7!9o-O-vl9tSQo;6}5(y~O2k&n|dp$h6Dvp911=6od(+C1R5wlIW z|M(KGqL{Ye>jl-S1uqWOkVZWUY#a+n=LTvNM=DY&UkYfz$xC^PHVf*F02|zVk`Dk> ze2|JPiUswi!I`z7-84BI0cz?8W5=*_{NI#W(W3Nf(Prq)3li{&rQ!?3rEpxBa1-cvfMEITI5sS zAn^~;_R==i%l*>`H9A1R@tR@MaqosH_;lnmI3@Qur4B=FdUp2u{PjCmp+v-I*qzgNyvKfcjDswT<*p$y_WiQgB>0LpG# zcii^c-ze|kjub}!n=*`2lK)4cs~7qIJLQ77W2Y?LnQWKkXUcCRey)8l@jm1xZO}K; z%2WBNJg7*@zcvp6#ohF5p{-I%3w1sRP@_m9^0U5*ytK4I89-#CSCS88{x1wUz-8%2 z$ZAq6{?$k7hV0AB$;YDQiL{XvS1R@6N{K0m{OWAtf&A$YKan@ojZ~}K@|$Q$Z)nH? zt1C(VTP>NTS}JI#A0^*TY{)GYGH6UYn%Ee?{{a3ozJ@Gu`o7Hn&>x6u8-vcvXjMe( zU!hgh(5kS+#$YSM5e-_-tKpb4a>*pY>Y|@% zW$sJ<;ED_yYnW5k&$b5dOQ1Z6meXu>BMS-#1m&?-8Z-`ya=evA{iR0(L?_~f@%D3Z z8zOGr@5Q`_1@E7AxE$)?a=0}JE=RU;`P=8>G6K3X_c8XUkpAT!y01J+3%R9@p;l%# z@#%$);nr}Av>;=Laty69`$lURyvJKZD37;>ACEpE4SqAa^MSlNj4^u8>}9_E47m=p zGXB`|7*==DKhPSC>frcesy#l~8f4#kB5#afWu=V~(2BI)uaA)(`jGiQ?9X>~?t$#A zbr7C=lmzw?(USpz+=U&;o$W>LgCw5?hi77!y-J9~=qm8`D$qO{qch2Wiu+&NiB63D zF?vqyh0h1c)nh#fRrnYov^9$RK97<4x}ZAIv$ne-Q{k!SoFW@($#}Z*#x3!jLu_@Lr!#ubXNp;Cy2UQ%`eI~G z7tcd3=bE49bkAS6&C9tsc3-3iA7-x9M}-JZQ7%c4qDo(E$ccQF>s-$DhvH)M*K#@@ z;~;B&IXCy=?fG0u-$7T<_9=y{i!=M>oVY0W3tSR&>btple#%FF(`Ps%+8(y+mviSk zvuPmXYdK42e7A8?pFG>RcPUqzs+}t}N2ZFk$5&^iNK!4j_d3f|Gp(hXk3<8lPz47`mtTxdu_nKi+X>uqQ_Ja;_J<5vZPFeX*75J*wYng*TtcCkF8IS>VDm$vxA+_7J<&{GDf`d6#?JX#0u{ z4i8HJ`De~HR$kk8?Iz5gCN|nJv9^J|1f~=cfOLvB_(L2k<;OXlGb5LM2DMP0eK#Cg zGX35YXPBi1xJ={?@x=Q)jZ^V-r0#|3n&BCEs~}8c2vtNbEL7;F7LLEHRG~-yZP4cziEAoy24b`+3`m@eqvCBE zq161k{d3ejCeFQzl*Q>zd8)|Dmzp{(4NHnNE{)+lJc^ozZw4dqlNFSTEPt6vYUoKl z&Bz%snoiC*5u!&);(QwaG73h=qmJ#{J1n7J@KL1K;{S+x7W7BKcW>IGIF%|-HI*dJ z^hiunN3yo1AX<=TUrDnh$v*SgTT;;Oxvx9-QOu_~Hq&=frUmFbB~#W_!%v)&vse}E zw#6y=vGgQ?Q_>&fR3L98TgmwPC%9HN)RxL*?DkW>+)CjrnOu?i9L~H7cE?FkDvuQG z0j1UB@d#x__G)om#pqw}8?9~|ox+I6dyGi8QogpV;Y4+O_YteZI58)dGms_qNWL%q z?y-%uKiEn?&OS-tl&gIgZ!#(P-cL#iXLK^*{rlO3B)%V`Y-!6wEs3SSPw$^;@$Qv! zHO=vh`fDe1ul89_xp^>`FCgF6I2r9AOqR=^ZVsRk=5Z3$Q$0DMWv|$qe(IxCSVop> zfga~{6C2lQf07>-Cd#ishA2P&F4btkIK7d?~546nkihQAI> zdB;H|0r-!>Of;DEA?XNy^q-ZE$Yatesd;3kcjkpIfm?VsK(9(gui)iVltH6yR=FHz z%4MY4@HheWY`Kirt?a0Wze6|BgE{^Y6_latDaaY|6beGWN1IeyEU$p;m7NNh2kG+oqAMXQ&O(-a$*h7|h6AyQ=g*(I|SbQs< z(oroHBZeMoR_TX`s#JT5sP=?dWr{yQjnbE(0o`kmM5hjC-0iSL7iHA-Spz2(!3`XL zowY^By$^wp_kqIKpMLcu;{l0aAceT6(6$rMS{Wk1lcTc_~(* zhr!FwbzXTWdXL2h=y{VHu@R3wA$QSdiPXG!fUJtnFxnX*wx6Q<5_sqlaVA@|ygbku9^b zer8|G+|JYh`oyKjfHwFgjIXwU===g2%)~RavPppZ(~P?S+}W&t7V2jNHs}rfcys5zJenvc)W zZ%gzleRDA9i*lYWO4%jphwto@y%^7dw;Hnjl5GF(Q2RWzzqKMp%)iWz!etO?Pt%zE2% z!Vq|F2-A4mY!w1RuE{r;L-+GiesGYP3FI2N%ROecV6L;~GT%69|7fLt@x#VxB>IaO zq58$m(@FjL)Aj~ypT^D){3U-a?7i1^K=L$#cTOwtV>jfi?swf_gdMOx6aazeK&YC0 ztxLCeJs;_dA6;JTTwbINCD9%`UY{kEHP2~pdSEdz9yqRNx4V6xM`ZCMpiTg;MUARO zR#t(o0=*6N4$w8A>p(Yv-UYgukku87tU|xls|^y2fncI_x<21X@Jdu^QundUKC5KA z?XGV}=ZoZ%-@-VfW^1Ln3|(8%g?*VQl1n_hYINJ}^tu7JqncUwoesNgW{d>e(XR{T z#eQV3^*QhQ^hIdp}h0zIT#mTy_; zo*WUn(LpK{zf!PrEeZNkOx0-i&{T=M<0F%A1o`ppkm-`I6bGXOnJG3DGo9gMcCo0~ z(R7~Zdw9gGN56YD*L*(*bL7O!-6NNc<43r);c_O{!eSr=XU{-y_iz8N&-11S==>HLk=;_OXJm~2WOUV(-ckYGgCnNoT!UQYkK<4}3 z96hXW#O)`9R%CSsEqZxqyIK8mzK(7l>gVS0p(l@vcsDt1HP>3vrRMS)%q93bR7ch| zU8gE=*zv=zy=6zlY(+~a6f-ih%`Mxi+PjA)J2H>e(2A}d+b2H(()2;=q?er3LgLk|}HBn)^g=>b82M@$m!=61}Z25`O=j}xI zx^2gEBNm8%>xtrbw*uJvQJ{)K{Qqy94a%ZPlPCN{;m&$uP&mD9+hRLUYVv?=!~Y5{ z7%VBlKH2xUr8}M{y96^rUZ8O-dHrDnq{Ilh1IM`(BlBrm8wKw`-Y z5}UJ6xK+-gXz6KXbITe_F@Q#}4_<&H*Y2^#suj}^3_}lVDmz*2?pwR8VqT=DA#wE~ z?>~ZysLetloEP2~+b<@V{8!b;zts}?O0TI{#U-rcDiJmH>#AnpDsU#T0Y9A(JR$g) ze67APbVJssQyoGNR)OHHK)WPWzV))uBB+YzaUJv-_ykfVVadQHz#>uZwQz_PVbRbJ z^c=!sln>E1)JHF?guK`t1&l@||0zg#D<|-pS|8?|5lTATFF_bkblAg-u*H305ewS|_>^2u?#!)ZR9a yWa`VTPpXvlAQze6c3CLrK%)HY{+(bRx=WeE)F*l~Mb#2X5eh>(nS?Ybk^cd-pF0Tv diff --git a/app/templates/models/__pycache__/dependencies.cpython-39.pyc b/app/templates/models/__pycache__/dependencies.cpython-39.pyc deleted file mode 100644 index 545507770395e26e755b20f89240ca29863ec72e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 169 zcmYe~<>g`kf`6Rx$(lg=F^Gcl5a0V_`+P~-5EZ0W{5h_X;zNmzkN~N1tVTi5pyEVoUr33Lt#=(?IQ!h) zwMh>9iGB+d2_E|i_$WyC$`fCqh``LA?Klpw($3E8?99&0{&ttHthfZ)*O#N#uSG(B zM`6BMAUuRF?*UyAI&3^8Ba)H{{ec^Y$2U*CI}Cq%S|90Y#Bid zX1*jx7PFWQqsSTpT@vaQa%$7WE3n&43 zMT7&x>vw9fx(4cgOcj^4Zo+sB0|Zs=pJz@OM!iUdA=*eA2r(QZUBRHMQ(eoz`ANey zW#~HkN5C*+3~`^5pUE%eGjhE7jX5^Q)(EOIGJk^Vj-Wnw32+7Aikd53ls+Y6X)18Z*HAMLoZ@G{_{ksq#I(OzSoMWlskSqI^%4MwlMRp=c?y*5)q|IPtn-- zq&koJ)b4@z(TDxBZBIle9Rre^hNcsNn7VNmH(y#}$?Yj6aOSB8dBV2cu* zab|Z^H_pt6=~)e}oFEj$Eg*OUKwRRZq^eOS0)9)v+v@NP2Xdx*t~k0=~+KgicsA19On zcn*eK#Pq!4%TDsWzv&$<1FBE~7xjdv?*-eF^%zAZ-w01Yo%i_zZzC&qx#}b=ePd&D zhM0kfQDwnM_u(pTXw5j^8I7wf>G8P?7*%wd1?sV_--xgiF$h18Ctu@vXzA^EUp_f# zR{K;)UeT;n0#bYw?7%R#LC<7a-Z@Q<%=6Scj{)MUqN z(^&Gwc+vB$0w|2(+2U6P!zpZWl0SrSI9Tb*LPF!252)8T*v^e|>WUS>+Z5sMDIHST z5>a1?+c5vHK)(Zux1sBg1b}X=!E0$7=HCc&yWsw57v0y(chcJ$>{lLdKproZhI|8< z1;~H!KadBY<4bx`n1gk(DOSNtX0@bDP4|9U&mO)t)(iju diff --git a/app/templates/models/__pycache__/sidemenu.cpython-39.pyc b/app/templates/models/__pycache__/sidemenu.cpython-39.pyc deleted file mode 100644 index 13216c5abc05a244e2f1aef095d708c77573c1e4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1065 zcmbtTO>fgc5Zw>kaZ3Uz<$?qUBrf65(4G;ZDiRbGQ_&#QTvjXZhT5_nv)&>#N>Aw( z@dFT2d+cA@D<}Q}2gHoiChma~YwhgE?Cg8;vUbu+#mXsydug|{uxpJj12=t4@fWg1#u+mQO7^L--96>=)BCN z)iFAn4dCZck71%~m?2YCu`@E{iYpIBxLyS?`lPjseZYasVbNh^DS^TwqvObf^uq~|=NrE-DMh0?}gUYFb; zY3GlUObfSyDhQOJ)@p9>c{vYZun<)MhS0n837vEnlA!nBa=e6y|C8g~yTHP;XmY0ib2R<~ADuwGeJq z2zWThAewM(AwD0=Z0<3)5M>?By#?M)WaEJYt5QRTEaahI@AX=UkjtbH!uW8&1@X8T lC7y3X$3hu@bV3XOi{ewYM{~ zQDS)|r1Tx$t4Qr*{{ip)6KI|)@zfWdQJ*-+PRb@AJetpQIsWF{d?si%YXqLxzyBQW zRSEe6CyS2>ldqu5RRBg9jmeM}Z)0e{+lbAHHMA)ChA@*^PYAOPjG@iw1Jbcy!#8BJ zL$&*mPo{C8c!(>D&s+WvV3il#z*{Z2=+I;~bDof)#VX8&cgb7Rtw%E0;ah2xs0Tde zq1ya9M5f~h#|HE}(B&lnMaDEIzL6uExdCV~0y@&oj;+mLI@PW9t*!g_?yNt&_wA;? zwf^v~ZozUKg@KCF#GeLgq|eLZG|v1PD&S911MP&PGzxjg)E4Y-iwYblT!0rupm*PV z+ykbbI`V<+O;W~V*<0Z7OXAh(zIJ^-N+RX^5Dp?8076~r&}`+j(W}RjAw{X9X9?a7 z=!Na~VTaKjVAc@vq?&R?mxn>X3JIETH< zhbBli^CslWmCfv4zYLa_1QUL%T|`lqsUZD`wY`YlYL_C5X;N&3aUkWbwup8{sy)co z`{@+3%i9g&8E_?O!rOyxx9i=3l-l#y%qvpNW890QT@geQgkitmD?c1)3$@VogGihmeKZ3L~|zD`7fIl-SQquj^&TC*B+CX`&sT1V6?+ zYldl@inXh^uVb5GhF#lu`9cPvigsVV=y=*K?d2>xbIPQw+V-T{kHNakXan&etG?A9 zhvk=p?0$cA^H?&D65btg^wOI5+2_-}FTL$FQC%5je9gOo3+=)GxhS*yz0u7-MS#jTN=#CU-0 z0}yIK?Q!W6V1w3Rj6?kE2OYi{vEO^4W=$J(?w7nh0+}BkZG44-zGCS?F+Bnz^ zWGJGk>H*79)n29_(d_cSeDIGf==pWW!HSPRvG41e@59|NgTs8?@cqZLATD Date: Wed, 7 Dec 2022 07:40:21 +0100 Subject: [PATCH 22/25] typo --- app/templates/md/usage_point_id.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/templates/md/usage_point_id.md b/app/templates/md/usage_point_id.md index 853b4040..644b81b0 100755 --- a/app/templates/md/usage_point_id.md +++ b/app/templates/md/usage_point_id.md @@ -2,7 +2,7 @@ ## Mon contrat -* Les données ont était récupérées via les API d'Enedis et ne sont pas toujours correctement mises à jour par Enedis. +* Les données ont été récupérées via les API d'Enedis et ne sont pas toujours correctement mises à jour par Enedis. Vérifiez bien vos horaires HC/HP et votre date d'activation. Vous pouvez surcharger ces valeurs dans la configuration du point de livraison. Adresse postale : **{{ address | default("Pas de données.") }}** From a90aae8841db86bf76699b072ed4b73cf471a96e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Wed, 7 Dec 2022 13:24:18 +0100 Subject: [PATCH 23/25] maj doc --- README.md | 36 +++++++++++++++++++++++++++++++++--- dependencies.py | 1 - 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index c0099a22..e0228902 100755 --- a/README.md +++ b/README.md @@ -307,11 +307,41 @@ Pour ce connecter au docker en bash : make bash ```` +## F.A.Q + +### Si vous rencontrez des erreurs SQL au démarrage ? + +La plus simple est de supprimer le fichier cache.db et de relancer l’intégration, mais attention, vous allez perdre tt l’historique dans le cache. +Il est cependant possible de le récupérer via la procédure ci-dessous en nommant votre fichier de cache actuel en enedisgateway.db. + +### Comment migrer de EnedisGateway2MQTT vers MyElectricalData ? + +Pour migrer proprement depuis EnedisGateway2MQTT et avant de lancer la migration vers une version >= 0.8.0, merci de respecter cette procédure : +- Arrêter l’integration +- Backup le fichier enedisgateway.db (au cas où) +- Renommer l’actuel en enedisgateway.db.wait +- Migrer en 0.8.X (Attention le fichier de config à changé vous pouvez reprendre l’exemple [ici](https://github.com/m4dm4rtig4n/myelectricaldata/blob/master/config.exemple.yaml)) +- Démarrer en 0.8.X pour initialiser le nouveau cache. +- Arrêter l’integration. +- Renommer le enedisgateway.db.wait en enedisgateway.db +- Re-lance l’intégration, il va migrer les anciennes données du enedisgateway.db vers le cache.db (visible dans les logs) + +Pour ceux qui auraient eu des soucis lors de la migration et souhaite récupérer leur anciennes données en cache: +- Arrêter l’intégration +- Supprimer le cache.db +- Démarré l’intégration pour initialiser correctement le cache.db. +- Arrêter l’intégration +- Reprendre le backup (où le enedisgateway.db.migrate) et le positionner au même endroit que le cache.db avec le nom enedisgateway.db +- Lancer l’intégration en v0.8.X +- L’import du enedisgateway.db vers cache.db ce fera au lancement +- Le fichier enedisgateway.db sera renommé en enedisgateway.db.migrate. + ## Roadmap -- Gestion du **DJU18** pour une meilleur estimation de l'évolution de la votre consommation. -- Ajout d'un connecteur PostgreSQL / MariaDB -- [Remonter la puissance max](https://github.com/m4dm4rtig4n/enedisgateway2mqtt/issues/66) +- Intégrer Tempo. +- Gestion du **DJU18** pour une meilleure estimation de l'évolution de la vôtre consommation. +- Ajout d'un connecteur PostgreSQL / MariaDB. +- [Remonter la puissance max](https://github.com/m4dm4rtig4n/enedisgateway2mqtt/issues/66). ## Change log: diff --git a/dependencies.py b/dependencies.py index d59796a9..75f5c7f9 100644 --- a/dependencies.py +++ b/dependencies.py @@ -160,7 +160,6 @@ def create_release(prerelease=False): os.system(f"git tag -d {version}") app.LOG.log(" => Success") app.LOG.log(f"Delete tag {version} on remote") - print(f"git push --delete origin {version}") os.system(f"git push --delete origin {version}") app.LOG.log(" => Success") From 37e2e5a37c7a2087e30a42e097eedeb3853b20fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Wed, 7 Dec 2022 23:26:31 +0100 Subject: [PATCH 24/25] fix https://github.com/m4dm4rtig4n/myelectricaldata/issues/180 --- app/models/database.py | 3 +++ app/models/query_daily.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/app/models/database.py b/app/models/database.py index 8c6bec8c..8b89e3ae 100644 --- a/app/models/database.py +++ b/app/models/database.py @@ -697,6 +697,7 @@ def daily_fail_increment(self, usage_point_id, date, measurement_direction="cons fail_count = 0 else: blacklist = 0 + daily.id = unique_id daily.usage_point_id = usage_point_id daily.date = date daily.value = 0 @@ -706,6 +707,7 @@ def daily_fail_increment(self, usage_point_id, date, measurement_direction="cons fail_count = 0 self.session.add( table( + id=unique_id, usage_point_id=usage_point_id, date=date, value=0, @@ -790,6 +792,7 @@ def insert_daily(self, usage_point_id, date, value, blacklist=0, fail_count=0, .join(relation) .where(table.id == unique_id)) daily = self.session.scalars(query).one_or_none() + LOG.debug(query.compile(compile_kwargs={"literal_binds": True})) if daily is not None: daily.id = unique_id daily.usage_point_id = usage_point_id diff --git a/app/models/query_daily.py b/app/models/query_daily.py index daeb5f2f..831bab41 100644 --- a/app/models/query_daily.py +++ b/app/models/query_daily.py @@ -60,6 +60,8 @@ def run(self, begin, end): else: app.LOG.log(f" => Chargement des données depuis MyElectricalData {begin_str} => {end_str}") data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() + from pprint import pprint + pprint(data.text) blacklist = 0 if hasattr(data, "status_code"): if data.status_code == 200: From cd334613b1ecfa321cbde4a1409b81fbc5a7ab23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Wed, 7 Dec 2022 23:42:44 +0100 Subject: [PATCH 25/25] fix https://github.com/m4dm4rtig4n/myelectricaldata/issues/180 --- app/models/database.py | 1 - app/models/query_daily.py | 2 -- 2 files changed, 3 deletions(-) diff --git a/app/models/database.py b/app/models/database.py index 8b89e3ae..ededd6ea 100644 --- a/app/models/database.py +++ b/app/models/database.py @@ -942,7 +942,6 @@ def get_detail(self, usage_point_id, begin, end, measurement_direction="consumpt time_delta = abs(int((begin - end).total_seconds() / 60)) total_internal = 0 for query in query_result: - # print(query) total_internal = total_internal + query.interval total_time = abs(total_internal - time_delta) if total_time > 300: diff --git a/app/models/query_daily.py b/app/models/query_daily.py index 831bab41..daeb5f2f 100644 --- a/app/models/query_daily.py +++ b/app/models/query_daily.py @@ -60,8 +60,6 @@ def run(self, begin, end): else: app.LOG.log(f" => Chargement des données depuis MyElectricalData {begin_str} => {end_str}") data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() - from pprint import pprint - pprint(data.text) blacklist = 0 if hasattr(data, "status_code"): if data.status_code == 200: