Skip to content

Commit

Permalink
pre-commit autoupdate
Browse files Browse the repository at this point in the history
  • Loading branch information
douglatornell committed Jul 20, 2021
1 parent 176d4eb commit 4d24e31
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 15 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@
repos:
# Out-of-the-box hooks from the pre-commit org
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.4.0
rev: v4.0.1
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
# Code formatting with black
- repo: https://github.com/psf/black
rev: 20.8b1
rev: 21.7b0
hooks:
- id: black
2 changes: 1 addition & 1 deletion docs/worker_failures.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ If this page is not accessible then the :mod:`collect_NeahBay_ssh` worker may fa

:mod:`make_ssh_files` can take a date so older files can be run. Files from the observation site can also be run with the appropriate flag.

.. _NOAA forecast: https://nomads.ncep.noaa.gov/pub/data/nccf/com/etss/prod/
.. _NOAA forecast: https://nomads.ncep.noaa.gov/pub/data/nccf/com/etss/prod/

.. _NOAA tides and water levels: https://tidesandcurrents.noaa.gov/waterlevels.html?id=9443090

Expand Down
27 changes: 15 additions & 12 deletions nowcast/residuals.py
Original file line number Diff line number Diff line change
Expand Up @@ -716,18 +716,19 @@ def NeahBay_forcing_anom(textfile, run_date, tide_file, archive=False, fromtar=F

if fromtar:
data = pd.read_csv(textfile, parse_dates=[0], index_col=0).rename(
columns={" OB": "obs", " TWL": 'fcst'})
data['Date'] = pd.to_datetime(data.index)
columns={" OB": "obs", " TWL": "fcst"}
)
data["Date"] = pd.to_datetime(data.index)
# clean up 00:00 obs
datesat00 = data.resample('1d').nearest().index.array
data['offset'] = data.obs - data[' TIDE']
data.loc[data.Date.isin(datesat00), 'obs'] = (
datesat00 = data.resample("1d").nearest().index.array
data["offset"] = data.obs - data[" TIDE"]
data.loc[data.Date.isin(datesat00), "obs"] = (
data[(data.Date.shift(1).isin(datesat00))].offset.values
+ data[data.Date.isin(datesat00)][' TIDE'].values
+ data[data.Date.isin(datesat00)][" TIDE"].values
)
data.loc[data.obs > 9000, 'obs'] = 9999
data = data.resample('1h').nearest()
data['Date'] = pd.to_datetime(data.index, utc=True)
data.loc[data.obs > 9000, "obs"] = 9999
data = data.resample("1h").nearest()
data["Date"] = pd.to_datetime(data.index, utc=True)
dates = data.Date.array
else:
data = _load_surge_data(textfile, archive)
Expand All @@ -744,7 +745,9 @@ def NeahBay_forcing_anom(textfile, run_date, tide_file, archive=False, fromtar=F
isDec = True
for i in range(dates.shape[0]):
dates[i] = _to_datetime(dates[i], run_date.year, isDec, isJan)
surge, forecast_flag = _calculate_forcing_surge(data, dates, tide_file, archive, fromtar)
surge, forecast_flag = _calculate_forcing_surge(
data, dates, tide_file, archive, fromtar
)
return dates, surge, forecast_flag


Expand Down Expand Up @@ -776,7 +779,7 @@ def _calculate_forcing_surge(data, dates, tide_file, archive=False, fromtar=Fals
surge = []
# Load tides
ttide, _ = stormtools.load_tidal_predictions(tide_file)
sealevel_correction = 0.
sealevel_correction = 0.0
for d in dates:
tide = ttide.pred_all[ttide.time == d].item()
if archive:
Expand All @@ -786,7 +789,7 @@ def _calculate_forcing_surge(data, dates, tide_file, archive=False, fromtar=Fals
fcst = data.fcst[data.Date == d].item()
sealevel_correction = MSL_in_feet
else:
# Convert datetime to string for comparing with times in data
# Convert datetime to string for comparing with times in data
daystr = d.strftime("%m/%d %HZ")
obs = data.obs[data.date == daystr].item()
fcst = data.fcst[data.date == daystr].item()
Expand Down

0 comments on commit 4d24e31

Please sign in to comment.