Skip to content

Commit

Permalink
Merge pull request #6 from robhammond/solar-updates
Browse files Browse the repository at this point in the history
Merge in Solar updates
  • Loading branch information
robhammond committed Sep 14, 2023
2 parents fc67140 + 0562cb1 commit 4c58aba
Show file tree
Hide file tree
Showing 27 changed files with 1,137 additions and 109 deletions.
4 changes: 2 additions & 2 deletions app/tasks/fetch-n3rgy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import sqlite3
import os
from datetime import datetime, timedelta
from update_rates import refresh_db
from update_rates import update_import

HA_DB_URL = os.getenv('HA_DB_URL')

Expand Down Expand Up @@ -86,7 +86,7 @@ def fetch_usage(start_date=None, end_date=None):
conn.commit()

conn.close()
refresh_db()
update_import()


if __name__ == "__main__":
Expand Down
88 changes: 88 additions & 0 deletions app/tasks/n3rgy-production.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
"""
Fetch past 7 days data from n3rgy.
Requires Consumer API endpoint to be enabled at https://data.n3rgy.com/consumer/home
"""
import requests
import sqlite3
import os
import json
from datetime import datetime, timedelta
from update_rates import update_export

HA_DB_URL = os.getenv('HA_DB_URL')


def fetch_production(start_date=None, end_date=None):
"fetches electricity production from n3rgy endpoint"
if not start_date:
today = datetime.now()
last_week = today - timedelta(days=7)
start_date = last_week.strftime("%Y%m%d")

if not end_date:
today = datetime.now()
tomorrow = today + timedelta(days=1)
end_date = tomorrow.strftime("%Y%m%d")

endpoint_url = "https://consumer-api.data.n3rgy.com/electricity/production/1"
params = {
"start": start_date,
"end": end_date,
"output": "json",
}
conn = sqlite3.connect(HA_DB_URL)
c = conn.cursor()
sql = """
SELECT
value
FROM Credentials c
JOIN Entity e ON
e.id = c.entityId
WHERE key='auth_header'
AND e.entity_name = 'n3rgy'
"""
try:
res = c.execute(sql).fetchone()
auth_header = res[0]
except Exception as e:
raise Exception("Authorization header not found in DB")

if auth_header == '12345':
raise Exception("Please define the n3rgy Authorization header - see README.md for more information")

with requests.get(endpoint_url, params=params, headers={"Authorization": auth_header}) as res:
res_json = res.json()
print(json.dumps(res_json))

for value in res_json["values"]:
ts = value["timestamp"]

# times are in UTC, it doesn't include TZ
dt = datetime.strptime(ts, "%Y-%m-%d %H:%M")
dt_start = datetime.strptime(ts, "%Y-%m-%d %H:%M") - timedelta(minutes=30)

# print(dt)

sql = f"""
UPDATE electricity
SET kwh_exported = {value["value"]}
WHERE
datetime = '{dt}'
AND datetime_start = '{dt_start}'
AND granularity = '{res_json["granularity"]}'
and source = 'n3rgy'
"""
try:
c.execute(sql)
except Exception as e:
print(f"Error inserting: {e}")
pass
conn.commit()

conn.close()
update_export()


if __name__ == "__main__":
fetch_production()
4 changes: 2 additions & 2 deletions app/tasks/octopus-energy.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import sqlite3
import os

from update_rates import refresh_db
from update_rates import update_import

HA_DB_URL = os.getenv("HA_DB_URL")
API_ROOT = "https://api.octopus.energy"
Expand Down Expand Up @@ -105,7 +105,7 @@ def fetch_usage(start_date=None, end_date=None):
pass

conn.close()
refresh_db()
update_import()
else:
print(f"Error fetching: {res.status_code} HTTP response")
print(request_url)
Expand Down
85 changes: 85 additions & 0 deletions app/tasks/solcast.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# Fetch data from Solis Cloud API
import base64
import argparse
import hashlib
import hmac
import json
import logging
import os
import sqlite3
from datetime import datetime, timedelta, timezone
from time import sleep
import requests

logging.basicConfig(level=logging.DEBUG)

HA_DB_URL = os.getenv("HA_DB_URL")

API_BASE = "https://api.solcast.com.au/rooftop_sites"


def _get_creds():
conn = sqlite3.connect(HA_DB_URL)
c = conn.cursor()
sql = """
SELECT
key,
value
FROM Credentials c
JOIN Entity e ON
e.id = c.entityId
WHERE
LOWER(e.entity_name) LIKE 'solcast%'
"""
creds = {}
try:
res = c.execute(sql).fetchall()
for r in res:
if r[0] == "resource_id":
creds["resource_id"] = r[1]
if r[0] == "api_key":
creds["api_key"] = r[1]

except Exception as e:
raise Exception("Authorization details not found in DB")

return creds


def get_forecast():
creds = _get_creds()
endpoint_url = f"{API_BASE}/{creds['resource_id']}/forecasts?format=json&api_key={creds['api_key']}"
print(endpoint_url)
with requests.get(endpoint_url) as res:
if res.status_code == 200:
forecast_data = res.json()["forecasts"]
save_forecasts(forecast_data)

def save_forecasts(forecasts):
conn = sqlite3.connect(HA_DB_URL)
c = conn.cursor()

for forecast in forecasts:
update_timestamp = int(datetime.utcnow().strftime('%Y%m%d%H%M'))
query = """
INSERT INTO solcast_forecasts
(pv_estimate, pv_estimate10, pv_estimate90, period_end, period, update_timestamp)
VALUES (?, ?, ?, ?, ?, ?)
"""
c.execute(query, (
forecast['pv_estimate'],
forecast['pv_estimate10'],
forecast['pv_estimate90'],
forecast['period_end'],
forecast['period'],
update_timestamp)
)

conn.commit()
conn.close()

def main():
get_forecast()

if __name__ == "__main__":
main()
153 changes: 151 additions & 2 deletions app/tasks/update_rates.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,12 @@
HA_DB_URL = os.getenv("HA_DB_URL")


def refresh_db():
def update_import():
conn = sqlite3.connect(HA_DB_URL)
conn.row_factory = sqlite3.Row # enables using dict in rows
c = conn.cursor()

# set rates for consumption
sql = """
SELECT *
FROM Electricity
Expand Down Expand Up @@ -58,6 +59,7 @@ def refresh_db():
)
AND s.supplier_end IS NULL
)
AND tariff_type = 'import'
"""
suppl = c.execute(sql, (date_str, date_str)).fetchall()

Expand Down Expand Up @@ -112,7 +114,7 @@ def refresh_db():
# Use <= for both ends of the range
if dt_start <= actual_dt <= dt_end:
print(f"Matching rate: {rate['rate_type']}") # Debug print statement

sql = f"""
UPDATE
electricity
Expand All @@ -135,8 +137,155 @@ def refresh_db():
c.execute(sql, (rate["id"], el["id"]))
conn.commit()
pass

conn.close()


def update_export():
conn = sqlite3.connect(HA_DB_URL)
conn.row_factory = sqlite3.Row # enables using dict in rows
c = conn.cursor()
# set rates for production
sql = """
SELECT *
FROM Electricity
WHERE
exportRateId IS NULL
AND
datetime_start >= DATE(
strftime('%s', 'now', '-7 days'),
'unixepoch',
'localtime'
)
"""

usage = c.execute(sql).fetchall()

for el in usage:
try:
dt = datetime.strptime(el["datetime_start"], "%Y-%m-%d %H:%M:%S")
except ValueError:
dt = datetime.strptime(el["datetime_start"], "%Y-%m-%d %H:%M:%S%z")
utc = pytz.timezone("UTC")
dt = utc.localize(dt)

hours = dt.strftime("%H")
mins = dt.strftime("%M")
el_time = f"{hours}:{mins}"
date_str = dt.strftime("%Y-%m-%d")

print(f"el_time {el_time} - {date_str}")

sql = f"""
SELECT
id
FROM
Supplier s
WHERE
(
DATE(?) BETWEEN DATE(
s.supplier_start / 1000,
'unixepoch',
'localtime'
) AND DATE(s.supplier_end / 1000, 'unixepoch', 'localtime')
)
OR (
DATE(?) >= DATE(
s.supplier_start / 1000,
'unixepoch',
'localtime'
)
AND s.supplier_end IS NULL
)
AND tariff_type = 'export'
"""
try:
suppl = c.execute(sql, (date_str, date_str)).fetchall()

if suppl[0]:
sql = f"""
SELECT *
FROM rates
WHERE
supplierId = {suppl[0]["id"]}
"""
rates = c.execute(sql).fetchall()

# Define time zones
utc = pytz.timezone("UTC")
bst = pytz.timezone("Europe/London")

for rate in rates:
if rate["rate_type"] != "fixed":
# Get the actual datetime in UTC
actual_dt_utc = datetime.strptime(el["datetime_start"], "%Y-%m-%d %H:%M:%S")
actual_dt_utc = utc.localize(actual_dt_utc)

# Convert to local time (BST)
actual_dt = actual_dt_utc.astimezone(bst)

# extract hour and minute of rate window
regex = r"^(\d\d):(\d\d)$"
start = re.search(regex, rate["start_time"])
end = re.search(regex, rate["end_time"])
start_hour = int(start.group(1))
start_min = int(start.group(2))
end_hour = int(end.group(1))
end_min = int(end.group(2))

# build dt_start and dt_end based on actual_dt (in BST)
dt_start = actual_dt.replace(hour=start_hour, minute=start_min, second=0, microsecond=0)
dt_end = actual_dt.replace(hour=end_hour, minute=end_min, second=0, microsecond=0)

# If end time is earlier than start time, it means the range crosses midnight
if dt_end.time() < dt_start.time():
# Shift dt_start to previous day and dt_end to the next day
dt_start = dt_start - timedelta(days=1)
dt_end = dt_end + timedelta(days=1)
else:
# This might be needed for cases where the end time is exactly at midnight
dt_end = dt_end - timedelta(seconds=1)

# Debug print statements
# print(f"rate: {rate['rate_type']}, start_time: {rate['start_time']}, end_time: {rate['end_time']}")
# print(f"actual_dt: {actual_dt}, dt_start: {dt_start}, dt_end: {dt_end}")

# Use <= for both ends of the range
if dt_start <= actual_dt <= dt_end:
print(f"Matching rate: {rate['rate_type']}") # Debug print statement

sql = f"""
UPDATE
electricity
SET
exportRateId = ?
WHERE
id = ?
"""
c.execute(sql, (rate["id"], el["id"]))
conn.commit()
else:
sql = f"""
UPDATE
electricity
SET
exportRateId = ?
WHERE
id = ?
"""
c.execute(sql, (rate["id"], el["id"]))
conn.commit()
pass
except Exception as e:
print(f"Error: {e}")
pass
conn.close()


def refresh_db():
update_import()
update_export()


if __name__ == "__main__":
refresh_db()
Loading

0 comments on commit 4c58aba

Please sign in to comment.