Skip to content

Commit

Permalink
0.5.6
Browse files Browse the repository at this point in the history
  • Loading branch information
m4dm4rtig4n committed Nov 1, 2021
1 parent 5dd708e commit 691b88c
Show file tree
Hide file tree
Showing 6 changed files with 83 additions and 13 deletions.
49 changes: 44 additions & 5 deletions .github/workflows/build_push_docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,29 +55,68 @@ jobs:
with:
args: '**EnedisGateway2MQTT** : Version **${{ steps.vars.outputs.version }}** is in building state...'

##################################################################################################################
# VERSION
- name: Build and push
uses: docker/build-push-action@v2
if: steps.check-tag.outputs.dev == 'false'
# if: steps.check-tag.outputs.dev == 'false'
with:
context: .
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
push: true
tags: |
m4dm4rtig4n/enedisgateway2mqtt:latest
m4dm4rtig4n/enedisgateway2mqtt:${{ steps.vars.outputs.version }}
m4dm4rtig4n/enedisgateway2mqtt:latest-dev
- name: Discord notification
# if: steps.check-tag.outputs.dev == 'false'
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
uses: Ilshidur/action-discord@master
with:
args: '**EnedisGateway2MQTT** : **${{ steps.vars.outputs.version }}** image version is up to date'

##################################################################################################################
# LATEST-DEV
- name: Build and push
uses: docker/build-push-action@v2
if: steps.check-tag.outputs.dev == 'true'
# if: steps.check-tag.outputs.dev == 'true'
with:
context: .
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
push: true
tags: |
m4dm4rtig4n/enedisgateway2mqtt:${{ steps.vars.outputs.version }}
m4dm4rtig4n/enedisgateway2mqtt:latest-dev
- name: Discord notification
# if: steps.check-tag.outputs.dev == 'true'
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
uses: Ilshidur/action-discord@master
with:
args: '**EnedisGateway2MQTT** : **latest-dev** image version is up to date'

##################################################################################################################
# LATEST
- name: Build and push
uses: docker/build-push-action@v2
if: steps.check-tag.outputs.dev == 'false'
with:
context: .
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
push: true
tags: |
m4dm4rtig4n/enedisgateway2mqtt:latest
- name: Discord notification
if: steps.check-tag.outputs.dev == 'false'
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
uses: Ilshidur/action-discord@master
with:
args: '**EnedisGateway2MQTT** : **Latest** image version is up to date'

##################################################################################################################
# FINISH
- name: Discord notification
# if: steps.check-tag.outputs.dev == 'false'
env:
Expand Down
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,12 @@ make start

## Change log:

### [0.5.6] - 2021-11-01

- Reduce API Call
- Add more log
- Fix bug

### [0.5.5] - 2021-11-01

- Fix log on MQTT connection failed :
Expand Down
2 changes: 1 addition & 1 deletion app/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.5.5
0.5.6
4 changes: 3 additions & 1 deletion app/detail.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,7 @@ def is_between(time, time_range):
elif detail['error_code'] == 2:
f.log(f"Fetch data error detected beetween {dateBegin} / {dateEnded}", "ERROR")
f.log(f" => {detail['description']}", "ERROR")
# cur.execute(f"UPDATE {mode}_detail SET fail = {date_data['fail'] + 1} WHERE pdl = '{pdl}' and date = '{date}'")
else:
f.log(f"API return error beetween {dateBegin} / {dateEnded}", "ERROR")
f.log(f" => {detail['description']}", "ERROR")
Expand Down Expand Up @@ -312,7 +313,8 @@ def checkHistoryDetail(cur, con, mode, dateBegin, dateEnded):
query = f"SELECT * FROM {mode}_detail WHERE pdl = '{pdl}' AND date BETWEEN '{dateBegin}' AND '{dateEnded}' ORDER BY date"
cur.execute(query)
query_result = cur.fetchall()
if len(query_result) < 160:
# if len(query_result) < 160:
if not query_result:
result = {
"missing_data": True
}
Expand Down
2 changes: 2 additions & 0 deletions app/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ def on_message(client, userdata, msg):

def logLine():
log("####################################################################################")
def logLine1():
log("------------------------------------------------------------------------------------")


def log(msg, level="INFO "):
Expand Down
33 changes: 27 additions & 6 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,12 @@ def init_database(cur):
fail INTEGER)''')
cur.execute('''CREATE UNIQUE INDEX idx_date_consumption_detail
ON consumption_detail (date)''')
# cur.execute('''CREATE TABLE consumption_detail_try (
# pdl TEXT NOT NULL,
# date TEXT NOT NULL,
# try INTEGER)''')
# cur.execute('''CREATE UNIQUE INDEX idx_date_consumption_detail_try
# ON consumption_detail_try (date)''')
## PRODUCTION
# DAILY
cur.execute('''CREATE TABLE production_daily (
Expand Down Expand Up @@ -420,7 +426,9 @@ def run():
f.logLine()
f.log("Get Consumption :")
ha_discovery_consumption = day.getDaily(cur, con, client, "consumption", last_activation_date)
# pprint(ha_discovery_consumption)
f.logLine1()
f.log(" SUCCESS : Consumption daily imported")
f.logLine1()
if ha_autodiscovery == True:
f.logLine()
f.log("Home Assistant auto-discovery (Consumption) :")
Expand All @@ -446,12 +454,15 @@ def run():
ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'],
attributes=attributes, unit_of_meas=unit_of_meas,
device_class=device_class, state_class=state_class)

f.log(" => HA Sensor updated")
# f.logLine()

if get_consumption_detail == True:
f.log("Get Consumption Detail:")
ha_discovery_consumption = detail.getDetail(cur, con, client, "consumption", last_activation_date, offpeak_hours)
f.logLine1()
f.log(" SUCCESS : Consumption detail imported")
f.logLine1()
if ha_autodiscovery == True:
f.logLine()
f.log("Home Assistant auto-discovery (Consumption Detail) :")
Expand All @@ -477,13 +488,16 @@ def run():
ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'],
attributes=attributes, unit_of_meas=unit_of_meas,
device_class=device_class, state_class=state_class)

f.log(" => HA Sensor updated")
# f.logLine()

if get_production == True:
f.logLine()
f.log("Get production :")
ha_discovery_production = day.getDaily(cur, con, client, "production", last_activation_date)
f.logLine1()
f.log(" SUCCESS : Production daily imported")
f.logLine1()
if ha_autodiscovery == True:
f.logLine()
f.log("Home Assistant auto-discovery (Production) :")
Expand All @@ -508,12 +522,15 @@ def run():
ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'],
attributes=attributes, unit_of_meas=unit_of_meas,
device_class=device_class, state_class=state_class)
# f.logLine()
f.log(" => HA Sensor updated")

if get_production_detail == True:
f.logLine()
f.log("Get production Detail:")
ha_discovery_consumption = detail.getDetail(cur, con, client, "production", last_activation_date, offpeak_hours)
f.logLine1()
f.log(" SUCCESS : Production detail imported")
f.logLine1()
if ha_autodiscovery == True:
f.logLine()
f.log("Home Assistant auto-discovery (Production Detail) :")
Expand All @@ -539,6 +556,7 @@ def run():
ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'],
attributes=attributes, unit_of_meas=unit_of_meas,
device_class=device_class, state_class=state_class)
f.log(" => HA Sensor updated")

if card_myenedis == True:
f.logLine()
Expand Down Expand Up @@ -567,13 +585,16 @@ def run():
value=sensor_data['value'],
attributes=attributes, unit_of_meas=unit_of_meas,
device_class=device_class, state_class=state_class)
f.log(" => Sensor generated")


if influxdb_enable == True:
f.logLine()
f.log("Push data in influxdb")
influx.influxdb_insert(cur, con, influxdb_api)
f.log(" => Data exported")

query = f"SELECT * FROM consumption_daily WHERE pdl == '{pdl}' AND fail > {fail_count} ORDER BY date"
query = f"SELECT - FROM consumption_daily WHERE pdl == '{pdl}' AND fail > {fail_count} ORDER BY date"
rows = con.execute(query)
if rows.fetchone() is not None:
f.logLine()
Expand All @@ -582,7 +603,7 @@ def run():
for row in rows:
f.log(f"{row[0]} => {row[1]}")

query = f"SELECT * FROM production_daily WHERE pdl == '{pdl}' AND fail > {fail_count} ORDER BY date"
query = f"SELECT - FROM production_daily WHERE pdl == '{pdl}' AND fail > {fail_count} ORDER BY date"
rows = con.execute(query)
if rows.fetchone() is not None:
f.logLine()
Expand Down

0 comments on commit 691b88c

Please sign in to comment.