Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Sep 2, 2024
1 parent 03ca984 commit e12c0ba
Show file tree
Hide file tree
Showing 41 changed files with 216 additions and 103 deletions.
3 changes: 2 additions & 1 deletion scripts/auth/drive2webaccess.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
Synchronize the ACL on the Google Drive to the local DB
"""

import isudatateam.cscap_utils as utils
from pyiem.util import get_dbconn

import isudatateam.cscap_utils as utils


def main():
"""Go Main Go."""
Expand Down
1 change: 1 addition & 0 deletions scripts/cscap/add_agronomic_column.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import gdata.docs.client

import isudatateam.cscap_utils as util

config = util.get_config()
Expand Down
3 changes: 2 additions & 1 deletion scripts/cscap/check_2012_on.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
2011 and the rest of the years
"""

import isudatateam.cscap_utils as util
from gspread_pandas import Spread
from pyiem.util import logger

import isudatateam.cscap_utils as util

LOG = logger()


Expand Down
3 changes: 2 additions & 1 deletion scripts/cscap/check_agronomic_header.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""Gio discovered a mismatch between AG codes and headers"""

import isudatateam.cscap_utils as util
import pandas as pd

import isudatateam.cscap_utils as util

config = util.get_config()
drive = util.get_driveclient(config, "cscap")
sheets = util.get_sheetsclient(config, "cscap")
Expand Down
109 changes: 75 additions & 34 deletions scripts/cscap/chicago delta yield biomass.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,12 @@
}
],
"source": [
"import psycopg2\n",
"import pandas as pd\n",
"pgconn = psycopg2.connect(database='sustainablecorn', host='iemdb', user='nobody')\n",
"import psycopg2\n",
"\n",
"pgconn = psycopg2.connect(\n",
" database=\"sustainablecorn\", host=\"iemdb\", user=\"nobody\"\n",
")\n",
"cursor = pgconn.cursor()\n",
"pd.set_printoptions(max_rows=400, max_columns=10)\n",
"cursor.execute(\"\"\"\n",
Expand All @@ -27,26 +30,31 @@
"\"\"\")\n",
"data = {}\n",
"for row in cursor:\n",
" key = '%s|%s|%s' % (row[0], row[1], row[3])\n",
" key = \"%s|%s|%s\" % (row[0], row[1], row[3])\n",
" if not data.has_key(key):\n",
" data[key] = {'rotation': row[5], 'tillage': row[6]}\n",
" data[key][ row[2] ] = float(row[4])\n",
" data[key] = {\"rotation\": row[5], \"tillage\": row[6]}\n",
" data[key][row[2]] = float(row[4])\n",
"\n",
"rows = []\n",
"for key in data.keys():\n",
" tokens = key.split(\"|\")\n",
" rows.append( dict(siteid=tokens[0], plotid=row[1], year=tokens[2], \n",
" agr7=data[key].get('AGR7'), \n",
" agr17=data[key].get('AGR17'), \n",
" agr19=data[key].get('AGR19'), \n",
" agr39=data[key].get('AGR39'),\n",
" rotation=data[key]['rotation'],\n",
" tillage=data[key]['tillage']\n",
" ) )\n",
" \n",
" rows.append(\n",
" dict(\n",
" siteid=tokens[0],\n",
" plotid=row[1],\n",
" year=tokens[2],\n",
" agr7=data[key].get(\"AGR7\"),\n",
" agr17=data[key].get(\"AGR17\"),\n",
" agr19=data[key].get(\"AGR19\"),\n",
" agr39=data[key].get(\"AGR39\"),\n",
" rotation=data[key][\"rotation\"],\n",
" tillage=data[key][\"tillage\"],\n",
" )\n",
" )\n",
"\n",
"df = pd.DataFrame(rows)\n",
"df.fillna(np.nan)\n",
"print('Loaded %s rows from the database!' % (len(df),))"
"print(\"Loaded %s rows from the database!\" % (len(df),))"
]
},
{
Expand All @@ -71,27 +79,56 @@
" return None\n",
" return np.ma.average(ar[ar.notnull()])\n",
"\n",
"\n",
"rows = []\n",
"for year in [\"2011\", \"2012\", \"2013\"]:\n",
" for sid in df.siteid.unique():\n",
" agr7 = df[(df.siteid==sid)&(df.year==year)].agr7\n",
" agr7 = df[(df.siteid == sid) & (df.year == year)].agr7\n",
" if len(agr7[agr7.notnull()]) == 0:\n",
" continue\n",
" agr7 = np.average(agr7[agr7.notnull()])\n",
" \n",
" cyield_nocc = a( df[(df.siteid==sid)&(df.year==year)&\n",
" ((df.rotation=='ROT4')|(df.rotation=='ROT5'))].agr17)\n",
" syield_nocc = a( df[(df.siteid==sid)&(df.year==year)&\n",
" ((df.rotation=='ROT4')|(df.rotation=='ROT5'))].agr19)\n",
" cyield_cc = a( df[(df.siteid==sid)&(df.year==year)&\n",
" ((df.rotation=='ROT36')|(df.rotation=='ROT37'))].agr17)\n",
" syield_cc = a( df[(df.siteid==sid)&(df.year==year)&\n",
" ((df.rotation=='ROT36')|(df.rotation=='ROT37'))].agr19)\n",
"\n",
" rows.append( dict(siteid=sid, year=year, ryebio=agr7, \n",
" cyield_nocc=cyield_nocc, syield_nocc=syield_nocc, \n",
" cyield_cc=cyield_cc, syield_cc=syield_cc) )\n",
" \n",
" cyield_nocc = a(\n",
" df[\n",
" (df.siteid == sid)\n",
" & (df.year == year)\n",
" & ((df.rotation == \"ROT4\") | (df.rotation == \"ROT5\"))\n",
" ].agr17\n",
" )\n",
" syield_nocc = a(\n",
" df[\n",
" (df.siteid == sid)\n",
" & (df.year == year)\n",
" & ((df.rotation == \"ROT4\") | (df.rotation == \"ROT5\"))\n",
" ].agr19\n",
" )\n",
" cyield_cc = a(\n",
" df[\n",
" (df.siteid == sid)\n",
" & (df.year == year)\n",
" & ((df.rotation == \"ROT36\") | (df.rotation == \"ROT37\"))\n",
" ].agr17\n",
" )\n",
" syield_cc = a(\n",
" df[\n",
" (df.siteid == sid)\n",
" & (df.year == year)\n",
" & ((df.rotation == \"ROT36\") | (df.rotation == \"ROT37\"))\n",
" ].agr19\n",
" )\n",
"\n",
" rows.append(\n",
" dict(\n",
" siteid=sid,\n",
" year=year,\n",
" ryebio=agr7,\n",
" cyield_nocc=cyield_nocc,\n",
" syield_nocc=syield_nocc,\n",
" cyield_cc=cyield_cc,\n",
" syield_cc=syield_cc,\n",
" )\n",
" )\n",
"\n",
"df2 = pd.DataFrame(rows)"
]
},
Expand Down Expand Up @@ -396,7 +433,7 @@
}
],
"source": [
"df2.sort('ryebio')"
"df2.sort(\"ryebio\")"
]
},
{
Expand All @@ -416,11 +453,15 @@
}
],
"source": [
"df3 = df2[(df2.siteid!='WOOSTER.COV')]\n",
"(fig, ax) = plt.subplots(1,1)\n",
"df3 = df2[(df2.siteid != \"WOOSTER.COV\")]\n",
"(fig, ax) = plt.subplots(1, 1)\n",
"\n",
"ax.scatter(df3.ryebio, df3.cyield_cc - df3.cyield_nocc, marker='+', s=50, label='Corn')\n",
"ax.scatter(df3.ryebio, df3.syield_cc - df3.syield_nocc, marker='s', s=50, label='Soy')\n",
"ax.scatter(\n",
" df3.ryebio, df3.cyield_cc - df3.cyield_nocc, marker=\"+\", s=50, label=\"Corn\"\n",
")\n",
"ax.scatter(\n",
" df3.ryebio, df3.syield_cc - df3.syield_nocc, marker=\"s\", s=50, label=\"Soy\"\n",
")\n",
"ax.legend()\n",
"ax.set_ylabel(\"$\\Delta$ Yield (CoverCrop minus Non) [kg/ha]\")\n",
"ax.set_xlabel(\"Rye Spring Biomass [kg/ha]\")\n",
Expand Down
43 changes: 28 additions & 15 deletions scripts/cscap/chicago.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -14,20 +14,31 @@
}
],
"source": [
"import psycopg2\n",
"import pandas as pd\n",
"pgconn = psycopg2.connect(database='sustainablecorn', host='iemdb', user='nobody')\n",
"import psycopg2\n",
"\n",
"pgconn = psycopg2.connect(\n",
" database=\"sustainablecorn\", host=\"iemdb\", user=\"nobody\"\n",
")\n",
"cursor = pgconn.cursor()\n",
"pd.set_printoptions(max_rows=400, max_columns=10)\n",
"cursor.execute(\"\"\"\n",
" SELECT uniqueid, operation, to_char(valid, 'Mon dd,YYYY'), cropyear, valid from operations ORDER by valid ASC\n",
"\"\"\")\n",
"rows = []\n",
"for row in cursor:\n",
" rows.append( dict(siteid=row[0], year=row[3], operation=row[1], date=row[4], sdate=row[2]) )\n",
" \n",
" rows.append(\n",
" dict(\n",
" siteid=row[0],\n",
" year=row[3],\n",
" operation=row[1],\n",
" date=row[4],\n",
" sdate=row[2],\n",
" )\n",
" )\n",
"\n",
"df = pd.DataFrame(rows)\n",
"print('Loaded %s rows from the database!' % (len(df),))"
"print(\"Loaded %s rows from the database!\" % (len(df),))"
]
},
{
Expand Down Expand Up @@ -241,12 +252,14 @@
}
],
"source": [
"df2 = df[(\n",
" (df.operation == 'termination_rye_corn')|\n",
" (df.operation == 'termination_rye_soy'))\n",
" &(df.year==2013)\n",
" ].sort(['date'])\n",
"df2[['sdate', 'operation', 'siteid', 'year']]"
"df2 = df[\n",
" (\n",
" (df.operation == \"termination_rye_corn\")\n",
" | (df.operation == \"termination_rye_soy\")\n",
" )\n",
" & (df.year == 2013)\n",
"].sort([\"date\"])\n",
"df2[[\"sdate\", \"operation\", \"siteid\", \"year\"]]"
]
},
{
Expand Down Expand Up @@ -779,8 +792,8 @@
}
],
"source": [
"df2 = df[df.operation == 'plant_corn']\n",
"df2[['date', 'operation', 'siteid', 'year']]"
"df2 = df[df.operation == \"plant_corn\"]\n",
"df2[[\"date\", \"operation\", \"siteid\", \"year\"]]"
]
},
{
Expand Down Expand Up @@ -889,8 +902,8 @@
}
],
"source": [
"df2 = df[df.operation == 'sample_soilnitrate']\n",
"df2[['date', 'operation', 'siteid', 'year']]"
"df2 = df[df.operation == \"sample_soilnitrate\"]\n",
"df2[[\"date\", \"operation\", \"siteid\", \"year\"]]"
]
},
{
Expand Down
1 change: 1 addition & 0 deletions scripts/cscap/create_agronomic_datasheet.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import copy

import gdata.spreadsheets.data

import isudatateam.cscap_utils as util

config = util.get_config()
Expand Down
1 change: 1 addition & 0 deletions scripts/cscap/create_soil_bd.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import gdata.spreadsheets.data

import isudatateam.cscap_utils as util

config = util.get_config()
Expand Down
1 change: 1 addition & 0 deletions scripts/cscap/create_soil_nitrate_sheets.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import gdata.spreadsheets.data

import isudatateam.cscap_utils as util

config = util.get_config()
Expand Down
1 change: 1 addition & 0 deletions scripts/cscap/create_soil_texture_sheets.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""

import gdata.spreadsheets.data

import isudatateam.cscap_utils as util

config = util.get_config()
Expand Down
32 changes: 20 additions & 12 deletions scripts/cscap/dwm1.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
"outputs": [],
"source": [
"import psycopg2\n",
"import numpy as np\n",
"from pandas.io.sql import read_sql\n",
"\n",
"%matplotlib inline\n",
"import matplotlib.pyplot as plt"
]
Expand All @@ -29,17 +29,23 @@
}
],
"source": [
"pgconn = psycopg2.connect(database='sustainablecorn', host='iemdb', user='nobody')\n",
"site = 'STJOHNS'\n",
"df = read_sql(\"\"\"\n",
"pgconn = psycopg2.connect(\n",
" database=\"sustainablecorn\", host=\"iemdb\", user=\"nobody\"\n",
")\n",
"site = \"STJOHNS\"\n",
"df = read_sql(\n",
" \"\"\"\n",
" SELECT o.valid, o.plotid, o.discharge_mm as discharge_mm,\n",
" lag(o.discharge_mm) OVER (PARTITION by o.plotid ORDER by o.valid ASC) as discharge_mm_lag,\n",
" t.depth_mm as depth_mm, lag(t.depth_mm) OVER (PARTITION by o.plotid ORDER by o.valid ASC)\n",
" as depth_mm_lag from tileflow_data o JOIN watertable_data t\n",
" on (date_trunc('minute', o.valid) = date_trunc('minute', t.valid)\n",
" and t.plotid = o.plotid ) WHERE o.uniqueid = %s and\n",
" discharge_mm is not null and depth_mm is not null ORDER by plotid ASC, valid ASC\n",
"\"\"\", pgconn, params=(site,))\n",
"\"\"\",\n",
" pgconn,\n",
" params=(site,),\n",
")\n",
"print(len(df.index))"
]
},
Expand Down Expand Up @@ -81,11 +87,11 @@
"source": [
"(fig, ax) = plt.subplots(2, 1, figsize=(12, 24))\n",
"\n",
"plots = df['plotid'].unique()\n",
"plots = df[\"plotid\"].unique()\n",
"plots.sort()\n",
"for i, plotid in enumerate(plots):\n",
" df2 = df[(df['plotid']==plotid) & (df['depth_mm'] > df['depth_mm_lag'])]\n",
" ax[i].scatter( df2['depth_mm'], df2['discharge_mm'])\n",
" df2 = df[(df[\"plotid\"] == plotid) & (df[\"depth_mm\"] > df[\"depth_mm_lag\"])]\n",
" ax[i].scatter(df2[\"depth_mm\"], df2[\"discharge_mm\"])\n",
" ax[i].set_ylabel(\"Discharge [mm]\")\n",
" ax[i].grid(True)\n",
" ax[i].set_ylim(0, 0.21)\n",
Expand Down Expand Up @@ -124,12 +130,14 @@
"source": [
"(fig, ax) = plt.subplots(6, 1, figsize=(12, 24))\n",
"\n",
"plots = df['plotid'].unique()\n",
"plots = df[\"plotid\"].unique()\n",
"plots.sort()\n",
"for i, plotid in enumerate(plots):\n",
" df2 = df[(df['plotid']==plotid)]\n",
" ax[i].scatter( df2['depth_mm'] - df2['depth_mm_lag'],\n",
" df2['discharge_mm'] - df2['discharge_mm_lag'])\n",
" df2 = df[(df[\"plotid\"] == plotid)]\n",
" ax[i].scatter(\n",
" df2[\"depth_mm\"] - df2[\"depth_mm_lag\"],\n",
" df2[\"discharge_mm\"] - df2[\"discharge_mm_lag\"],\n",
" )\n",
" ax[i].set_ylabel(\"Discharge [mm]\")\n",
" ax[i].grid(True)\n",
" ax[i].set_ylim(-0.2, 0.2)\n",
Expand Down
Loading

0 comments on commit e12c0ba

Please sign in to comment.