Skip to content
This repository has been archived by the owner on Sep 27, 2023. It is now read-only.

Commit

Permalink
Add support for accessing ISO New England hourly market data (#1307)
Browse files Browse the repository at this point in the history
  • Loading branch information
dchassin authored Jul 3, 2023
1 parent 7983323 commit a2371af
Show file tree
Hide file tree
Showing 6 changed files with 343 additions and 12 deletions.
18 changes: 6 additions & 12 deletions cloud/websites/docs.gridlabd.us/index.html
Original file line number Diff line number Diff line change
@@ -1,15 +1,9 @@
<!DOCTYPE HTML />
<HTML>
<SCRIPT LANGUAGE="Javascript" SRC="_defaults.js"></SCRIPT>
<SCRIPT LANGUAGE="JavaScript">
document.writeln('<FRAMESET ROWS="' + (top_panel_height+20) + ',*" BORDER=2>');
console.info(window.location.search)
document.writeln('<FRAME SRC="_topbar.html" NORESIZE="noresize"></FRAME>');
document.writeln('<FRAMESET COLS="320,*" BORDER=3>');
document.writeln('<FRAME NAME="contents" SRC="_contents.html' + window.location.search + '"></FRAME>');
document.writeln('<FRAME NAME="page" SRC="_page.html' + window.location.search + '"></FRAME>');
document.writeln('</FRAMESET>');
document.writeln('</FRAMESET>');
</SCRIPT>

<HEAD>
<META HTTP-EQUIV="refresh" CONTENT="0,url=http://docs.gridlabd.us/index.html?owner=arras-energy&project=gridlabd" />
</HEAD>
<BODY>
Redirecting to <A HREF="http://docs.gridlabd.us/index.html?owner=arras-energy&project=gridlabd">http://docs.arras-energy.org/index.html?owner=arras-energy&project=gridlabd</A>...
</BODY>
</HTML>
1 change: 1 addition & 0 deletions tools/Makefile.mk
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ dist_pkgdata_DATA += tools/gridlabd-editor.png
dist_pkgdata_DATA += tools/gridlabd-editor.py
dist_pkgdata_DATA += tools/insights.py
dist_pkgdata_DATA += tools/install.py
dist_pkgdata_DATA += tools/isone.py
dist_pkgdata_DATA += tools/market_data.py
dist_pkgdata_DATA += tools/mdb_info.py
dist_pkgdata_DATA += tools/market_model.py
Expand Down
2 changes: 2 additions & 0 deletions tools/autotest/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
test_isone.csv
test_isone_opt.glm
21 changes: 21 additions & 0 deletions tools/autotest/test_isone.glm
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#python -m isone -d=smd/ME -s=2020-03-01 -e=2020-03-01 -c=test_isone.csv -g=test_isone_opt.glm
module tape
{
csv_header_type NAME;
}
#include "test_isone_opt.glm"
object recorder
{
parent isone;
file "test_isone_record.csv";
property "DA_Demand,RT_Demand,DA_LMP,DA_EC,DA_CC,DA_MLC,RT_LMP,RT_EC,RT_CC,RT_MLC,Dry_Bulb,Dew_Point";
interval 1h;
}
clock {
timezone ${ISONE_TIMEZONE};
starttime ${ISONE_STARTDATE};
stoptime ${ISONE_STOPDATE};
}
#ifexists "../test_isone_record.csv"
#on_exit 0 diff ../test_isone_record.csv test_isone_record.csv > gridlabd.diff
#endif
25 changes: 25 additions & 0 deletions tools/autotest/test_isone_record.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
timestamp,DA_Demand,RT_Demand,DA_LMP,DA_EC,DA_CC,DA_MLC,RT_LMP,RT_EC,RT_CC,RT_MLC,Dry_Bulb,Dew_Point
2020-03-01 00:00:00 EST,+847,+1177.36,+20.28,+20.34,-0.17,+0.11,+16.21,+16.07,+0,+0.14,+19,+5
2020-03-01 01:00:00 EST,+840.8,+1152.96,+19.16,+19.22,-0.15,+0.09,+16.33,+16.2,+0,+0.13,+18,+6
2020-03-01 02:00:00 EST,+855.1,+1138.74,+17.52,+17.62,-0.13,+0.03,+15.64,+15.56,+0,+0.08,+18,+5
2020-03-01 03:00:00 EST,+838.6,+1144.65,+17.73,+17.8,-0.14,+0.07,+15.21,+15.14,+0,+0.07,+18,+5
2020-03-01 04:00:00 EST,+852.2,+1167.67,+18.3,+18.38,-0.13,+0.05,+15.26,+15.19,+0,+0.07,+17,+5
2020-03-01 05:00:00 EST,+914.7,+1209.67,+19.12,+19.16,-0.13,+0.09,+13.21,+13.15,+0,+0.06,+17,+5
2020-03-01 06:00:00 EST,+912.1,+1277.46,+18.72,+18.72,-0.07,+0.07,+16.09,+15.94,+0,+0.15,+17,+5
2020-03-01 07:00:00 EST,+1017.2,+1337.71,+18.34,+18.11,-0.04,+0.27,+14.3,+14.1,+0,+0.2,+19,+6
2020-03-01 08:00:00 EST,+1122,+1401.55,+17.95,+17.59,+0,+0.36,+13.48,+13.28,+0,+0.2,+23,+7
2020-03-01 09:00:00 EST,+1027,+1411.46,+16.49,+16.31,-0.02,+0.2,+13.94,+13.74,+0,+0.2,+27,+8
2020-03-01 10:00:00 EST,+999.9,+1438.49,+14.8,+14.83,+0,-0.03,+15.96,+15.78,+0,+0.18,+28,+6
2020-03-01 11:00:00 EST,+956.6,+1425.79,+13.67,+13.79,-0.02,-0.1,+11.46,+11.35,+0,+0.11,+30,+7
2020-03-01 12:00:00 EST,+902.7,+1398.41,+12.82,+13.08,-0.13,-0.13,+12.5,+12.41,+0,+0.09,+32,+8
2020-03-01 13:00:00 EST,+896.3,+1370.73,+12.75,+13.02,-0.12,-0.15,+11.81,+11.75,+0,+0.06,+32,+7
2020-03-01 14:00:00 EST,+883.2,+1348.15,+12.66,+12.97,-0.12,-0.19,+12.61,+12.56,+0,+0.05,+34,+7
2020-03-01 15:00:00 EST,+1013.1,+1383.17,+13.49,+13.68,-0.12,-0.07,+14.08,+13.96,+0,+0.12,+35,+8
2020-03-01 16:00:00 EST,+1117.8,+1460.34,+16.51,+16.48,-0.11,+0.14,+21.35,+21.15,+0,+0.2,+34,+7
2020-03-01 17:00:00 EST,+1206.7,+1531.06,+19.58,+19.61,-0.13,+0.1,+21.93,+21.74,+0,+0.19,+32,+7
2020-03-01 18:00:00 EST,+1239,+1563.54,+25.89,+25.92,-0.18,+0.15,+30.69,+30.34,+0,+0.35,+29,+8
2020-03-01 19:00:00 EST,+1160,+1516.67,+20.82,+20.95,-0.18,+0.05,+24.05,+23.85,+0,+0.2,+27,+8
2020-03-01 20:00:00 EST,+1120.6,+1467.12,+17.55,+17.75,-0.13,-0.07,+21.78,+21.63,+0,+0.15,+26,+8
2020-03-01 21:00:00 EST,+984.8,+1361.46,+16.71,+16.87,-0.14,-0.02,+16.71,+16.57,+0,+0.14,+25,+8
2020-03-01 22:00:00 EST,+942.1,+1264.5,+16.54,+16.69,-0.16,+0.01,+18.14,+17.99,+0,+0.15,+24,+8
2020-03-01 23:00:00 EST,+872.2,+1218.99,+16.79,+16.94,-0.14,-0.01,+25.65,+25.27,+0,+0.38,+22,+8
288 changes: 288 additions & 0 deletions tools/isone.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,288 @@
"""ISO New England data access module
Syntax: gridlabd isone OPTIONS ...
Options:
-d|--dataset=DATASET/SUBSET Download data from the specified dataset
(and subset, default is `smd/ISO NE CA`)
-c|--csv=FILENAME Save CSV data to the specified file
(default is /dev/stdout)
-C|--CLASS=CLASSNAME Specify the GLM class name (default is
`isone`)
-e|--end=YEAR|STOPDATE Download data ending on the specified year
or date (last month default)
-f|--freshen Freshen the data cache
-g|--glm=FILENAME Save GLM data to the specified file
(default is /dev/null)
-l|--list Output a list of available sheets in dataset
-N|--NAME=OBJECTNAME Specify the GLM object name (default is
`isone`)
-s|--start=YEAR|STARTDATE Download data starting on the specified
year or date (last month by default)
-y|--year[=YEAR] Download data for the specified year
(current year by default)
Currently the only supported dataset is `smd`. The valid subsets depend on the
dataset and year. See https://www.iso-ne.com/isoexpress/web/reports/load-and-demand/-/tree/zone-info
for more information.
"""

import sys, os
import datetime
import pandas
import requests
import traceback

EXECNAME = os.path.splitext(os.path.basename(sys.argv[0]))[0]
DATASET = "smd"
CSVFILE = "/dev/stdout"
GLMFILE = None
CLASSNAME = "isone"
OBJNAME = "isone"
STARTDATE = None
STOPDATE = None
CACHEDIR = "/usr/local/opt/gridlabd/current/share/gridlabd/isone.d/" \
if not "GLD_ETC" in os.environ else os.path.join(os.environ['GLD_ETC'],"isone")
DATEFORMAT = "%Y-%m-%d"
FLOATFORMAT = "%.2f"
FRESHEN = False
VERBOSE = False
QUIET = False
DEBUG = False

E_OK = 0
E_SYNTAX = 1
E_INVALID = 2
E_FAILED = 3

def error(msg,code=None):
if not QUIET:
print(f"ERROR [{EXECNAME}]: {msg}",flush=True,file=sys.stderr)
if not code is None:
exit(code)

def verbose(msg):
if VERBOSE:
print(f"VERBOSE [{EXECNAME}]: {msg}",flush=True,file=sys.stderr)

def get_year(year=None,dataset=None):
"""Get data from a year
:param year: int - the starting date in DATEFORMAT
:return: pandas.DataFrame - the requested dataset
:return: list - list of available subsets (if dataset is None)
"""
if year == None:
year = datetime.datetime.now().year
verbose(f"setting to default year '{year}'")
assert(type(year)==int)
if dataset == None:
dataset = DATASET
verbose(f"setting to default dataset '{dataset}'")
specs = dataset.split("/")
cachefile = os.path.join(CACHEDIR,f"{specs[0]}_{year}.xlsx")
if FRESHEN or not os.path.exists(cachefile):
verbose(f"creating cache directory '{CACHEDIR}'")
os.makedirs(CACHEDIR,exist_ok=True)
try:
url = f"https://www.iso-ne.com/static-assets/documents/{year}/02/{year}_{specs[0]}_hourly.xlsx"
verbose(f"downloading data from '{url}'")
req = requests.get(url)
if req.status_code == 200:
verbose(f"saving data to '{cachefile}'")
with open(cachefile,"wb") as xls:
xls.write(req.content)
else:
error(f"unable to download data from '{url}' (HTTP code {req.status_code})",E_FAILED)
except Exception as err:
error(f"unable to get data from '{url}' ({err})",E_FAILED)
if len(specs) > 1:
verbose(f"loading '{specs[1]}' from '{cachefile}'")
return pandas.read_excel(cachefile,sheet_name=specs[1],index_col=[0,1],parse_dates=[0])
else:
verbose(f"loading data from '{cachefile}'")
return pandas.read_excel(cachefile,sheet_name=None)

def get_data(startdate=None,stopdate=None,dataset=None):
"""Get data from a date range
:param startdate: str - the starting date in DATEFORMAT
:param stopdate: str - the stopping date in DATEFORMAT
:param dataset: str - the dataset/subset specification
:return: pandas.DataFrame - the requested data
"""
if startdate == None:
startdate = datetime.datetime(year=datetime.datetime.now().year)
else:
startdate = datetime.datetime.strptime(startdate,DATEFORMAT)
if startdate == None:
stopdate = datetime.datetime.now()
else:
stopdate = datetime.datetime.strptime(stopdate,DATEFORMAT)
if startdate > stopdate:
error("startdate is after stopdate",E_INVALID)
data = []
for year in range(startdate.year,stopdate.year+1):
data.append(get_year(year,dataset))
data = pandas.concat(data)
maxdate = min(data.index.get_level_values(0).max(),stopdate)
return data.loc[pandas.date_range(startdate,maxdate)]

def fix_timestamps(df):
"""Fix timestamp in dataframe
:param data: pandas.DataFrame - data to fix
:return: pandas.DataFrame - fixed data
"""
df['timestamp'] = [x[0] + datetime.timedelta(hours = int(x[1])-1) for x in list(df.index)]
return df.set_index('timestamp')

if __name__ == "__main__":
if len(sys.argv) == 1:
for line in __doc__.split("\n"):
if line.startswith("Syntax: "):
print(line,file=sys.stderr)
exit(E_SYNTAX)
elif sys.argv[1] in ["-h","--help","help"]:
print(__doc__)
exit(E_OK)
for arg in sys.argv[1:]:
spec = arg.split("=")
if len(spec) == 1:
token = arg
value = None
elif len(spec) == 2:
token = spec[0]
value = spec[1]
else:
token = spec[0]
value = "=".join(spec[1:])

if token in ["-v","--verbose"]:
VERBOSE = True
elif token in ["-q","--quiet"]:
QUIET = True
elif token in ["--debug"]:
DEBUG = True

# -d|--dataset=DATASET/SUBSET Download data from the specified dataset
# (and subset, default is `smd/ISO NE CA`)
elif token in ["-d","--dataset"]:
if value:
DATASET = value
else:
error("dataset not specified",E_SYNTAX)

# -c|--csv=FILENAME Save CSV data to the specified file
# (default is /dev/stdout)
elif token in ["-c","--csv"]:
if value:
CSVFILE = value
else:
error("csvfile not specified",E_SYNTAX)

# -C|--CLASS=CLASSNAME Specify the GLM class name (default is
# `isone`)
elif token in ["-C","--CLASS"]:
if value:
CLASSNAME = value
else:
error("class name not specified",E_SYNTAX)

# -e|--end=YEAR|STOPDATE Download data ending on the specified year
# or date (last month default)
elif token in ["-e","--end"]:
if value:
try:
STOPDATE = int(value)
except:
STOPDATE = value

# -f|--freshen Freshen the data cache
elif token in ["-f","--freshen"]:
FRESHEN = True

# -g|--glm=FILENAME save GLM data to the specified file
# (default is /dev/null)
elif token in ["-g","--glm"]:
if value:
GLMFILE = value
else:
error("GLM name not specified",E_SYNTAX)

# -l|--list output a list of available datasets
elif token in ["-l","--list"]:
print("\n".join(get_year(value)),file=sys.stdout)
exit(E_OK)

# -N|--NAME=OBJECTNAME specify the GLM object name (default is
# `isone`)
elif token in ["-N","--NAME"]:
if value:
OBJNAME = value
else:
error("object name not specified",E_SYNTAX)

# -s|--start=YEAR|STARTDATE download data starting on the specified
# year or date (last month by default)
elif token in ["-s","--start"]:
if value:
try:
STARTDATE = str(int(value))
except:
STARTDATE = value

# -y|--year[=YEAR] download data for the specified year
# (current year by default)
elif token in ["-y","--year"]:
if value:
try:
STARTDATE = value + "-01-01"
STOPDATE = value + "-12-31"
except:
error(f"year '{value}' is invalid",E_INVALID)
else:
error("year not specified",E_SYNTAX)

# invalid argument
else:
error(f"argument '{arg}' is not recognized",E_SYNTAX)

try:
data = get_data(STARTDATE,STOPDATE)

if GLMFILE:
if CSVFILE.startswith("/dev/"):
error(f"CSV file '{CSVFILE}' must be a regular file",E_INVALID)

# classinfo = CLASSNAME.split(".")

properties = "\n ".join([f'double {x};' for x in data.columns])
with open(GLMFILE,"w") as glm:
glm.write(f"""// created by '{' '.join(sys.argv)}' at {datetime.datetime.now()}
class {CLASSNAME}
{{
{properties}
}}
module tape;
object {CLASSNAME}
{{
name {OBJNAME};
object player
{{
file "{CSVFILE}";
property "{','.join(data.columns)}";
}};
}}
#define ISONE_TIMEZONE=EST+5EDT
#define ISONE_STARTDATE={data.index.get_level_values(0).min()}
#define ISONE_STOPDATE={data.index.get_level_values(0).max()+datetime.timedelta(days=1)}
""")

fix_timestamps(data).to_csv(CSVFILE,float_format=FLOATFORMAT,header=(GLMFILE is None))

exit(E_OK)
except SystemExit as err:
exit(err.code)
except:
e_type,e_value,e_trace = sys.exc_info()
if DEBUG:
traceback.print_tb(e_trace)
error(f"{e_value} ({e_type.__name__} exception)",E_FAILED)

0 comments on commit a2371af

Please sign in to comment.