Skip to content

Commit

Permalink
cache addresses & contracts
Browse files Browse the repository at this point in the history
  • Loading branch information
m4dm4rtig4n committed Oct 6, 2021
1 parent 34d4005 commit 3fab84b
Show file tree
Hide file tree
Showing 5 changed files with 81 additions and 18 deletions.
36 changes: 30 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ If you reach this limit, you will be banned for 24 hours!
See chapter [persistance](#persistance), to reduce API call number.



## Environment variable

| Variable | Information | Mandatory/Default |
Expand All @@ -76,20 +75,41 @@ See chapter [persistance](#persistance), to reduce API call number.
| HA_AUTODISCOVERY | Enable auto-discovery | False |
| HA_AUTODISCOVERY_PREFIX | Home Assistant auto discovery prefix | homeassistant |
| BASE_PRICE | Price of kWh in base plan | 0 |
| CYCLE | Data refresh cycle (3600s minimum) | 3600 |
| CYCLE | Data refresh cycle (12h minimum) | 43200 |
| ADDRESSES | Get all addresses information | False |
| FORCE_REFRESH | Force refresh all data (wipe all cached data) | False |

*Why is there no calculation for the HC / HP ?*

The HC / HP calculations require a lot of API calls and the limit will be reached very quickly

> Need database => Roadmap
The HC / HP calculations require a lot of API calls and the limit will be reached very quickly.
> This feature will add soon.
## Persistance
## Cache

Since v0.3, Enedis Gateway use SQLite database to store all data and reduce API call number.
Don't forget to mount /data to keep database persistance !!

### Lifecycle

| Data type | Information | Refresh after |
|:---------------:|:---------------|:-----:|
| contracts | All contract informations | 7 run |
| addresses | All contact details | 7 run |
| consumption | Daily consumption | never |
| production | Daily production | never |

If you want force refresh all data you can set environment variable "**FORCE_REFRESH**" to "**True**".

**WARNING, This parameters wipe all data (addresses, contracts, consumption, production) and generate lot of API Call (don't forget [Enedis Gateway limit](#Enedis Gateway limit))**

> It doesn't forget that it takes several days to recover consumption/production in detail mode.
### Blacklist

Sometimes there are holes in the Enedis consumption records. So I set up a blacklist system for certain dates.

If date does not return information after 7 try (7 x CYCLE), I blacklist this date and will no longer generate an API call

## Usage :

```
Expand Down Expand Up @@ -170,6 +190,10 @@ volumes:

## Change log:

### [0.4.1] - 2021-10-06

- Cache addresses & contracts data.

### [0.4.0] - 2021-10-05

- Switch locale to fr_FR.UTF8 (french date format)
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.4.1-dev
0.4.1
11 changes: 7 additions & 4 deletions app/addresses.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ def queryApi(url, headers, data, count=0):
"usage_point_id": str(pdl),
}

# cur.execute(f"DELETE FROM addresses WHERE pdl = '{pdl}'")
query = f"SELECT * FROM addresses WHERE pdl = '{pdl}'"
cur.execute(query)
query_result = cur.fetchone()
Expand All @@ -35,12 +34,16 @@ def queryApi(url, headers, data, count=0):
addresses = queryApi(url, headers, data)
else:
count = query_result[2]
if count > main.force_refresh_count:
if count >= main.force_refresh_count:
f.log(" => Query API (Refresh Cache)")
addresses = queryApi(url, headers, data, count)
addresses = queryApi(url, headers, data, 0)
else:
f.log(f" => Query Cache (refresh in {count} try)")
f.log(f" => Query Cache (refresh in {main.force_refresh_count-count} try)")
addresses = json.loads(query_result[1])
new_count = count + 1
query = f"INSERT OR REPLACE INTO addresses VALUES (?,?,?)"
cur.execute(query, [pdl, json.dumps(addresses), new_count])
con.commit()

if not "customer" in addresses:
f.publish(client, f"{pdl}/consumption/current_year/error", str(1))
Expand Down
33 changes: 31 additions & 2 deletions app/contract.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,21 @@
import requests
import json
from dateutil.relativedelta import *
from pprint import pprint

from importlib import import_module
main = import_module("main")
f = import_module("function")

def getContract(client):
def getContract(client, con, cur):

def queryApi(url, headers, data, count=0):
contract = requests.request("POST", url=f"{url}", headers=headers, data=json.dumps(data)).json()
query = f"INSERT OR REPLACE INTO contracts VALUES (?,?,?)"
cur.execute(query, [pdl, json.dumps(contract), count])
con.commit()
return contract

pdl = main.pdl
headers = main.headers
url = main.url
Expand All @@ -19,7 +28,27 @@ def getContract(client):
"type": "contracts",
"usage_point_id": str(pdl),
}
contract = requests.request("POST", url=f"{url}", headers=headers, data=json.dumps(data)).json()


query = f"SELECT * FROM contracts WHERE pdl = '{pdl}'"
cur.execute(query)
query_result = cur.fetchone()
if query_result is None:
f.log(" => Query API")
contract = queryApi(url, headers, data)
else:
count = query_result[2]
if count >= main.force_refresh_count:
f.log(" => Query API (Refresh Cache)")
contract = queryApi(url, headers, data, 0)
else:
f.log(f" => Query Cache (refresh in {main.force_refresh_count-count} try)")
contract = json.loads(query_result[1])
new_count = count + 1
query = f"INSERT OR REPLACE INTO contracts VALUES (?,?,?)"
cur.execute(query, [pdl, json.dumps(contract), new_count])
con.commit()

if "customer" in contract:
customer = contract["customer"]
f.publish(client, f"{pdl}/customer_id", str(customer["customer_id"]))
Expand Down
17 changes: 12 additions & 5 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,17 @@
# CYCLE
if "CYCLE" in os.environ:
cycle = int(os.environ['CYCLE'])
if cycle < 3600:
cycle = 3600
if cycle < 43200:
cycle = 43200
else:
cycle = 86400
cycle = 43200

########################################################################################################################
# FORCE_REFRESH
if "FORCE_REFRESH" in os.environ:
force_refresh = bool(strtobool(os.environ['FORCE_REFRESH']))
else:
force_refresh = True

api_no_result = []

Expand Down Expand Up @@ -198,7 +205,7 @@ def run():

# Check database structure
try:
cur.execute("INSERT OR REPLACE INTO addresses VALUES (?,?)", [0, 0, 0])
cur.execute("INSERT OR REPLACE INTO addresses VALUES (?,?,?)", [0, 0, 0])
cur.execute("INSERT OR REPLACE INTO contracts VALUES (?,?,?)", [0, 0, 0])
cur.execute("INSERT OR REPLACE INTO consumption_daily VALUES (?,?,?,?)", [0, '1970-01-01', 0, 0])
cur.execute("INSERT OR REPLACE INTO production_daily VALUES (?,?,?,?)", [0, '1970-01-01', 0, 0])
Expand All @@ -218,7 +225,7 @@ def run():

f.log("####################################################################################")
f.log("Get contract :")
contract = cont.getContract(client)
contract = cont.getContract(client, con, cur)
if "error" in contract:
f.publish(client, f"error", str(1))
for key, data in contract["errorMsg"].items():
Expand Down

0 comments on commit 3fab84b

Please sign in to comment.