-
Notifications
You must be signed in to change notification settings - Fork 14
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #8 from heigeo/nws2
nws hydrograph & cnrfc ensemble forecasts
- Loading branch information
Showing
10 changed files
with
478 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,7 @@ | ||
language: python | ||
python: | ||
- "2.7" | ||
install: | ||
- pip install git+https://github.com/wq/wq.io | ||
script: | ||
- python setup.py test |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,168 @@ | ||
from wq.io import ( | ||
CsvParser, XmlNetIO, XmlParser, BaseIO, | ||
TupleMapper, TimeSeriesMapper | ||
) | ||
from wq.io.parsers.base import BaseParser | ||
from climata.base import ( | ||
WebserviceLoader, ZipWebserviceLoader, | ||
FilterOpt, DateOpt, ChoiceOpt | ||
) | ||
from .parsers import EnsembleCsvParser | ||
|
||
|
||
class HydroForecastIO(WebserviceLoader, XmlParser, TimeSeriesMapper, BaseIO): | ||
""" | ||
Loads hydrograph forecast data (next 3 days) from weather.gov | ||
""" | ||
|
||
########################################### | ||
# valid is the time of the forecast in UTC | ||
# primary is the stage in ft | ||
# secondary is the flow in kcfs | ||
# It appears to return 3 days forecast in the future | ||
########################################### | ||
|
||
start_date = DateOpt(ignored=True) | ||
end_date = DateOpt(ignored=True) | ||
state = FilterOpt(ignored=True) | ||
county = FilterOpt(ignored=True) | ||
station = FilterOpt(url_param='gage') | ||
parameter = FilterOpt(ignored=True) | ||
basin = FilterOpt(ignored=True) | ||
|
||
root_tag = "forecast" | ||
date_formats = [ | ||
'%Y-%m-%dT%H:%M:%S' | ||
] | ||
url = 'http://water.weather.gov/ahps2/hydrograph_to_xml.php' | ||
|
||
def parse_item(self, elem): | ||
valid = elem.find('valid') | ||
primary = elem.find('primary') | ||
secondary = elem.find('secondary') | ||
return { | ||
'date': valid.text.replace('-00:00', ''), | ||
primary.attrib['name']: primary.text, | ||
secondary.attrib['name']: secondary.text, | ||
} | ||
|
||
|
||
class EnsembleForecastIO(ZipWebserviceLoader, EnsembleCsvParser, | ||
TupleMapper, BaseIO): | ||
|
||
""" | ||
Load ensemble forecast zip files from the CNRFC website. | ||
- start_date and basin are required to specify the zip file; | ||
- station and end_date can be used to filter the downloaded data. | ||
""" | ||
|
||
nested = True | ||
|
||
start_date = DateOpt(required=True) | ||
end_date = DateOpt() | ||
|
||
# Region filters | ||
state = FilterOpt(ignored=True) | ||
county = FilterOpt(ignored=True) | ||
|
||
# FIXME: this isn't actually a HUC8 basin | ||
basin = FilterOpt(required=True) | ||
|
||
station = FilterOpt(multi=True) | ||
parameter = FilterOpt(ignored=True) | ||
|
||
region = ChoiceOpt( | ||
default="cnrfc", | ||
choices=["cnrfc"] | ||
) | ||
|
||
urls = { | ||
"cnrfc": ( | ||
"http://www.cnrfc.noaa.gov/csv/" + | ||
"{date}12_{basin}_hefs_csv_daily.zip" | ||
) | ||
} | ||
|
||
@property | ||
def params(self): | ||
# Don't actually need params, but ensure validation logic is called | ||
params = super(EnsembleForecastIO, self).params | ||
return None | ||
|
||
@property | ||
def url(self): | ||
url = self.urls[self.getvalue("region")] | ||
return url.format( | ||
date=self.getvalue("start_date").strftime("%Y%m%d"), | ||
basin=self.getvalue("basin"), | ||
) | ||
|
||
def parse(self): | ||
super(EnsembleForecastIO, self).parse() | ||
|
||
# Optionally filter by station id | ||
site_filter = self.getvalue('station') | ||
date_filter = self.getvalue('end_date') | ||
if not site_filter: | ||
return | ||
self.data = [ | ||
item for item in self.data | ||
if item['site'] in site_filter | ||
] | ||
if not date_filter: | ||
return | ||
date_filter = date_filter.strftime('%Y-%m-%d') + " 23:59:59" | ||
for item in self.data: | ||
item['data'] = [ | ||
row for row in item['data'] | ||
if row['date'] <= date_filter | ||
] | ||
|
||
def usable_item(self, item): | ||
item = item.copy() | ||
item['data'] = TimeSeriesIO(data=item['data']) | ||
return super(EnsembleForecastIO, self).usable_item(item) | ||
|
||
|
||
class TimeSeriesIO(TimeSeriesMapper, BaseIO): | ||
date_formats = ["%Y-%m-%d %H:%M:%S"] | ||
|
||
def usable_item(self, item): | ||
uitem = super(TimeSeriesIO, self).usable_item(item) | ||
# Convert KCFS to CFS | ||
return uitem._replace(value=uitem.value * 1000) | ||
|
||
|
||
class SiteIO(XmlNetIO): | ||
""" | ||
Base class for CNRFC site layers. Use ForecastSiteIO or EnsembleSiteIO. | ||
""" | ||
layer = None | ||
key_field = "id" | ||
region = "cnrfc" | ||
urls = { | ||
"cnrfc": "http://www.cnrfc.noaa.gov/data/kml/%s.xml" | ||
} | ||
|
||
@property | ||
def url(self): | ||
if self.region not in self.urls: | ||
raise Exception("Region %s not currently supported!" % self.region) | ||
return self.urls[self.region] % self.layer | ||
|
||
def parse_item(self, item): | ||
return item.attrib | ||
|
||
|
||
class ForecastSiteIO(SiteIO): | ||
""" | ||
CNRFC sites with deterministic forecasts. | ||
""" | ||
layer = "riverFcst" | ||
|
||
|
||
class EnsembleSiteIO(SiteIO): | ||
""" | ||
CNRFC sites with ensemble forecasts. | ||
""" | ||
layer = "ensPoints" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
from wq.io.parsers.base import TableParser | ||
from csv import reader | ||
|
||
|
||
class EnsembleCsvParser(TableParser): | ||
header_row = 0 | ||
max_header_row = 0 | ||
start_row = 2 | ||
|
||
def parse(self): | ||
csvdata = reader(self.file) | ||
sitedata = {} | ||
|
||
# Extract metadata from first two rows | ||
sites = next(csvdata) | ||
params = next(csvdata) | ||
years = [] | ||
for site, param in zip(sites, params): | ||
if site not in sitedata: | ||
sitedata[site] = {} | ||
if param not in sitedata[site]: | ||
sitedata[site][param] = [] | ||
year = 1950 | ||
else: | ||
year += 1 | ||
years.append(year) | ||
|
||
# Extract data from remaining rows | ||
for row in csvdata: | ||
date = row[0] | ||
for site, param, year, val in zip(sites, params, years, row[1:]): | ||
data = { | ||
'date': date, | ||
'year': year, | ||
'value': val, | ||
} | ||
sitedata[site][param].append(data) | ||
|
||
# Repackage into IO-friendly arrays | ||
self.data = [] | ||
for site in sitedata: | ||
for param in sitedata[site]: | ||
self.data.append({ | ||
'site': site, | ||
'parameter': param, | ||
'data': sitedata[site][param], | ||
}) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Binary file not shown.
Large diffs are not rendered by default.
Oops, something went wrong.
Oops, something went wrong.