From ad89e0727c42190cc59a14eee44b117d76fbab21 Mon Sep 17 00:00:00 2001 From: akrherz Date: Sat, 10 Feb 2024 17:15:41 -0600 Subject: [PATCH 1/3] mnt: sundry updates per review --- htdocs/geojson/sbw.py | 2 + htdocs/json/climodat_dd.py | 24 ++--- htdocs/json/dcp_vars.php | 2 +- htdocs/json/network.php | 2 +- htdocs/json/nwstext_center_date.py | 14 ++- htdocs/json/stage4.py | 6 +- htdocs/json/state_ugc.php | 2 +- htdocs/json/webcams.php | 4 +- htdocs/plotting/auto/scripts100/p102.py | 2 +- htdocs/plotting/auto/scripts100/p162.py | 3 +- htdocs/plotting/auto/scripts100/p166.py | 13 ++- htdocs/plotting/auto/scripts100/p168.py | 25 +++-- htdocs/plotting/auto/scripts100/p170.py | 5 +- htdocs/plotting/auto/scripts100/p173.py | 23 +++-- htdocs/plotting/auto/scripts100/p175.py | 23 ++--- htdocs/plotting/auto/scripts100/p180.py | 33 ++----- htdocs/plotting/auto/scripts100/p187.py | 12 ++- htdocs/plotting/auto/scripts100/p188.py | 39 ++++---- htdocs/plotting/auto/scripts100/p189.py | 33 ++++--- htdocs/plotting/auto/scripts100/p190.py | 8 +- htdocs/plotting/auto/scripts100/p191.py | 13 +-- htdocs/plotting/auto/scripts100/p193.py | 37 ++++--- htdocs/plotting/auto/scripts100/p196.py | 5 +- htdocs/plotting/auto/scripts100/p199.py | 5 +- htdocs/raccoon/index.phtml | 1 - htdocs/raccoon/wait.phtml | 30 +++--- htdocs/request/asos/1min.phtml | 16 ++-- htdocs/request/asos/csv.php | 3 +- htdocs/request/asos/hourlyprecip.phtml | 39 ++++---- scripts/GIS/24h_lsr.py | 7 +- scripts/GIS/ffg2geotiff.py | 32 ------- scripts/RUN_50_AFTER.sh | 2 +- scripts/RUN_NOON.sh | 2 +- scripts/climodat/hrrr_solarrad.py | 122 ++++++++++++------------ scripts/climodat/narr_solarrad.py | 58 +++++------ scripts/coop/day_precip.py | 2 +- scripts/coop/email_rr3_to_harry.py | 2 +- scripts/coop/use_acis.py | 3 +- scripts/iemre/daily_analysis.py | 7 +- scripts/iemre/init_daily_mrms.py | 2 +- scripts/iemre/init_narr.py | 15 ++- scripts/iemre/merge_narr.py | 21 ++-- scripts/usdm/process_usdm.py | 3 +- scripts/year/plot_stage4.py | 9 +- 44 files changed, 340 insertions(+), 371 deletions(-) delete mode 100644 scripts/GIS/ffg2geotiff.py diff --git a/htdocs/geojson/sbw.py b/htdocs/geojson/sbw.py index 4b29a08b67..ae738c84e7 100644 --- a/htdocs/geojson/sbw.py +++ b/htdocs/geojson/sbw.py @@ -77,6 +77,8 @@ def validate_ts(val): year = int(val[:4]) if year < 1986 or year > (utc().year + 1): raise ValueError("year invalid") + if len(val) == 10: + val += "T00:00:00Z" # YYYY-mm-ddTHH:MI if len(val) == 16: val += ":00Z" diff --git a/htdocs/json/climodat_dd.py b/htdocs/json/climodat_dd.py index 226e16e76f..c69981a44f 100644 --- a/htdocs/json/climodat_dd.py +++ b/htdocs/json/climodat_dd.py @@ -4,10 +4,11 @@ import numpy as np from metpy.units import units +from pyiem.database import get_dbconn from pyiem.exceptions import IncompleteWebRequest from pyiem.iemre import find_ij from pyiem.meteorology import gdd as calc_gdd -from pyiem.util import c2f, get_dbconn, ncopen +from pyiem.util import c2f, ncopen from pyiem.webutil import iemapp from pymemcache.client import Client @@ -64,16 +65,17 @@ def run(station, sdate, edate, gddbase, gddceil): "accum": accum, } idx, jdx = find_ij(lon, lat) - for model in ["gfs", "ndfd"]: - with ncopen(f"/mesonet/data/iemre/{model}_current.nc") as nc: - highs = c2f(nc.variables["high_tmpk"][:, jdx, idx] - 273.15) - lows = c2f(nc.variables["low_tmpk"][:, jdx, idx] - 273.15) - taxis = compute_taxis(nc.variables["time"]) - gdds, total = compute(taxis, highs, lows, gddbase, gddceil) - res[model] = gdds - res[f"{model}_accum"] = total - res[f"{model}_sdate"] = f"{gdds[0]['date']}" - res[f"{model}_edate"] = f"{gdds[-1]['date']}" + if idx is not None: + for model in ["gfs", "ndfd"]: + with ncopen(f"/mesonet/data/iemre/{model}_current.nc") as nc: + highs = c2f(nc.variables["high_tmpk"][:, jdx, idx] - 273.15) + lows = c2f(nc.variables["low_tmpk"][:, jdx, idx] - 273.15) + taxis = compute_taxis(nc.variables["time"]) + gdds, total = compute(taxis, highs, lows, gddbase, gddceil) + res[model] = gdds + res[f"{model}_accum"] = total + res[f"{model}_sdate"] = f"{gdds[0]['date']}" + res[f"{model}_edate"] = f"{gdds[-1]['date']}" return json.dumps(res) diff --git a/htdocs/json/dcp_vars.php b/htdocs/json/dcp_vars.php index 4178a65cdc..6b578b3a8e 100644 --- a/htdocs/json/dcp_vars.php +++ b/htdocs/json/dcp_vars.php @@ -17,7 +17,7 @@ "SELECT distinct key from $table WHERE station = $1" ); -$station = isset($_REQUEST["station"]) ? strtoupper($_REQUEST["station"]) : +$station = isset($_REQUEST["station"]) ? strtoupper(xssafe($_REQUEST["station"])) : die(json_encode('Please provide a station variable (NWSLI)')); $rs = pg_execute($hads, "SELECT", array($station)); diff --git a/htdocs/json/network.php b/htdocs/json/network.php index ff12a93e22..281f579ecc 100644 --- a/htdocs/json/network.php +++ b/htdocs/json/network.php @@ -8,7 +8,7 @@ require_once "../../include/network.php"; require_once "../../include/forms.php"; -$network = isset($_REQUEST["network"]) ? $_REQUEST["network"] : "KCCI"; +$network = isset($_REQUEST["network"]) ? xssafe($_REQUEST["network"]) : "KCCI"; $nt = new NetworkTable($network); $ar = array("stations" => array()); diff --git a/htdocs/json/nwstext_center_date.py b/htdocs/json/nwstext_center_date.py index f869e79c1a..56e160cac5 100644 --- a/htdocs/json/nwstext_center_date.py +++ b/htdocs/json/nwstext_center_date.py @@ -7,15 +7,13 @@ # extras from pyiem.exceptions import IncompleteWebRequest -from pyiem.util import get_dbconn, html_escape, utc +from pyiem.util import html_escape, utc from pyiem.webutil import iemapp -@iemapp(default_tz="UTC") +@iemapp(default_tz="UTC", iemdb="afos") def application(environ, start_response): """Answer request.""" - pgconn = get_dbconn("afos") - acursor = pgconn.cursor() center = environ.get("center", "KOKX")[:4] cb = environ.get("callback") if environ.get("date") is not None: @@ -38,15 +36,15 @@ def application(environ, start_response): 'SRF', 'SQW', 'SVR', 'SVS', 'TCV', 'TOR', 'TSU', 'WCN', 'WSW') """ - acursor.execute( + environ["iemdb.afos.cursor"].execute( "SELECT data, to_char(entered at time zone 'UTC', " - "'YYYY-MM-DDThh24:MI:00Z') from products " + "'YYYY-MM-DDThh24:MI:00Z') as ee from products " "where source = %s and entered >= %s and " f"entered < %s {pil_limiter} ORDER by entered ASC", (center, environ["sts"], environ["ets"]), ) - for row in acursor: - root["products"].append({"data": row[0], "entered": row[1]}) + for row in environ["iemdb.afos.cursor"]: + root["products"].append({"data": row["data"], "entered": row["ee"]}) data = json.dumps(root) if cb is not None: diff --git a/htdocs/json/stage4.py b/htdocs/json/stage4.py index 2c4860b3e0..326620b154 100644 --- a/htdocs/json/stage4.py +++ b/htdocs/json/stage4.py @@ -32,7 +32,7 @@ def dowork(environ): sidx = iemre.hourly_offset(sts) eidx = iemre.hourly_offset(ets) - ncfn = "/mesonet/data/stage4/%s_stage4_hourly.nc" % (date.year,) + ncfn = f"/mesonet/data/stage4/{date.year}_stage4_hourly.nc" res = {"gridi": -1, "gridj": -1, "data": []} if not os.path.isfile(ncfn): return json.dumps(res) @@ -69,7 +69,7 @@ def application(environ, start_response): valid = environ.get("valid") cb = environ.get("callback", None) - mckey = "/json/stage4/%.2f/%.2f/%s?callback=%s" % (lon, lat, valid, cb) + mckey = f"/json/stage4/{lon:.2f}/{lat:.2f}/{valid}?callback={cb}" mc = Client("iem-memcached:11211") res = mc.get(mckey) if not res: @@ -80,7 +80,7 @@ def application(environ, start_response): mc.close() if cb is not None: - res = "%s(%s)" % (html_escape(cb), res) + res = f"{html_escape(cb)}({res})" headers = [("Content-type", "application/json")] start_response("200 OK", headers) diff --git a/htdocs/json/state_ugc.php b/htdocs/json/state_ugc.php index 253c065a1c..2b42c0ddf8 100644 --- a/htdocs/json/state_ugc.php +++ b/htdocs/json/state_ugc.php @@ -16,7 +16,7 @@ "ORDER by name ASC" ); -$st = isset($_REQUEST["state"]) ? $_REQUEST["state"] : 'IA'; +$st = isset($_REQUEST["state"]) ? xssafe($_REQUEST["state"]) : 'IA'; $rs = pg_execute($dbconn, "SELECT", array($st)); diff --git a/htdocs/json/webcams.php b/htdocs/json/webcams.php index 9007204131..52e52cb6b0 100644 --- a/htdocs/json/webcams.php +++ b/htdocs/json/webcams.php @@ -8,8 +8,8 @@ require_once "../../include/forms.php"; // This should be a UTC timestamp, gasp! -$ts = isset($_REQUEST["ts"]) ? strtotime($_REQUEST["ts"]) : 0; -$network = isset($_REQUEST["network"]) ? substr($_REQUEST["network"], 0, 4) : "KCCI"; +$ts = isset($_REQUEST["ts"]) ? strtotime(xssafe($_REQUEST["ts"])) : 0; +$network = isset($_REQUEST["network"]) ? substr(xssafe($_REQUEST["network"]), 0, 4) : "KCCI"; $connect = iemdb("mesosite"); pg_exec($connect, "SET TIME ZONE 'UTC'"); diff --git a/htdocs/plotting/auto/scripts100/p102.py b/htdocs/plotting/auto/scripts100/p102.py index aa4b23976d..5d6fd2fa5b 100644 --- a/htdocs/plotting/auto/scripts100/p102.py +++ b/htdocs/plotting/auto/scripts100/p102.py @@ -190,4 +190,4 @@ def plotter(fdict): if __name__ == "__main__": - plotter(dict(ltype=["SNOW"], station="PBZ")) + plotter({}) diff --git a/htdocs/plotting/auto/scripts100/p162.py b/htdocs/plotting/auto/scripts100/p162.py index 9bd2465045..2fe0d22c9d 100644 --- a/htdocs/plotting/auto/scripts100/p162.py +++ b/htdocs/plotting/auto/scripts100/p162.py @@ -9,9 +9,10 @@ import numpy as np import numpy.ma as ma import pandas as pd +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.plot import figure_axes, get_cmap -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context def get_description(): diff --git a/htdocs/plotting/auto/scripts100/p166.py b/htdocs/plotting/auto/scripts100/p166.py index c7b4864a64..8d23f75b39 100644 --- a/htdocs/plotting/auto/scripts100/p166.py +++ b/htdocs/plotting/auto/scripts100/p166.py @@ -1,4 +1,8 @@ -"""Watches""" +""" +This plot presents a summary of the number of year +to date watches issued by the Storm Prediction Center and the percentage +of those watches that at least touched the given state. +""" import datetime import matplotlib.ticker as ticker @@ -16,12 +20,7 @@ def get_description(): """Return a dict describing how to call this plotter""" - desc = {} - desc["data"] = True - desc["description"] = """This plot presents a summary of the number of year - to date watches issued by the Storm Prediction Center and the percentage - of those watches that at least touched the given state. - """ + desc = {"data": True, "description": __doc__} desc["arguments"] = [ dict(type="state", name="state", default="IA", label="Select State:"), dict( diff --git a/htdocs/plotting/auto/scripts100/p168.py b/htdocs/plotting/auto/scripts100/p168.py index 21218ed459..0fb19ec235 100644 --- a/htdocs/plotting/auto/scripts100/p168.py +++ b/htdocs/plotting/auto/scripts100/p168.py @@ -1,4 +1,13 @@ -"""First fall or last spring""" +""" +This chart presents the last date of fall or first +date of spring that a given temperature threshold was last or first +reached. Note that leap day creates some ambiguity with an analysis like +this, so for example, the 15th of November is considered equal for each +year. The plot truncates once you reach the 20th of December. If you use +the downloaded file, please note that you need to consider the levels +above the given threshold as the latest date. The downloaded file simply +provides the latest date at a given temperature. +""" import calendar import datetime @@ -11,17 +20,7 @@ def get_description(): """Return a dict describing how to call this plotter""" - desc = {} - desc["data"] = True - desc["description"] = """This chart presents the last date of fall or first - date of spring that a given temperature threshold was last or first - reached. Note that leap day creates some ambiguity with an analysis like - this, so for example, the 15th of November is considered equal for each - year. The plot truncates once you reach the 20th of December. If you use - the downloaded file, please note that you need to consider the levels - above the given threshold as the latest date. The downloaded file simply - provides the latest date at a given temperature. - """ + desc = {"data": True, "description": __doc__} desc["arguments"] = [ dict( type="station", @@ -106,4 +105,4 @@ def plotter(fdict): if __name__ == "__main__": - plotter(dict(station="IA7594", network="IACLIMATE")) + plotter({}) diff --git a/htdocs/plotting/auto/scripts100/p170.py b/htdocs/plotting/auto/scripts100/p170.py index 690befefd6..97c6ecc81e 100644 --- a/htdocs/plotting/auto/scripts100/p170.py +++ b/htdocs/plotting/auto/scripts100/p170.py @@ -24,9 +24,10 @@ import datetime import pandas as pd +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.plot import figure_axes -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context PDICT = { "TS": "All Thunder Reports (TS)", @@ -173,4 +174,4 @@ def plotter(fdict): if __name__ == "__main__": - plotter(dict(zstation="ALO", year=2017, var="TSFZRA", network="IA_ASOS")) + plotter({}) diff --git a/htdocs/plotting/auto/scripts100/p173.py b/htdocs/plotting/auto/scripts100/p173.py index e336483fca..673ebf19fe 100644 --- a/htdocs/plotting/auto/scripts100/p173.py +++ b/htdocs/plotting/auto/scripts100/p173.py @@ -1,4 +1,13 @@ -"""autoplot ya'll""" +""" +This chart presents the hourly average wind speeds +by month of the year or by custom periods. +The hours presented are valid in the local time zone +of the reporting station. For example in Iowa, 3 PM would represent +3 PM CDT in the summer and 3 PM CST in the winter. Please complain to us +if this logic causes you heartburn! The format of the date periods is +two digit month followed by two digit day for both the start and end +date. +""" import calendar import datetime @@ -13,17 +22,7 @@ def get_description(): """Return a dict describing how to call this plotter""" - desc = {} - desc["data"] = True - desc["cache"] = 86400 - desc["description"] = """This chart presents the hourly average wind speeds - by month of the year or by custom periods. - The hours presented are valid in the local time zone - of the reporting station. For example in Iowa, 3 PM would represent - 3 PM CDT in the summer and 3 PM CST in the winter. Please complain to us - if this logic causes you heartburn! The format of the date periods is - two digit month followed by two digit day for both the start and end - date.""" + desc = {"description": __doc__, "data": True, "cache": 86400} today = datetime.date.today() desc["arguments"] = [ dict( diff --git a/htdocs/plotting/auto/scripts100/p175.py b/htdocs/plotting/auto/scripts100/p175.py index 00eb9f9e2b..97e777d3ac 100644 --- a/htdocs/plotting/auto/scripts100/p175.py +++ b/htdocs/plotting/auto/scripts100/p175.py @@ -1,4 +1,9 @@ -"""snow cover coverage.""" +""" +This chart displays estimated areal coverage of +snow cover for a single state. This estimate is based on a 0.125x0.125 +degree analysis of NWS COOP observations. The date shown would represent +snow depth reported approximately at 7 AM. +""" import datetime import os @@ -7,28 +12,18 @@ import numpy as np import pandas as pd from pyiem import iemre, reference +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.grid.zs import CachingZonalStats from pyiem.plot import figure_axes -from pyiem.util import ( - convert_value, - get_autoplot_context, - get_sqlalchemy_conn, - ncopen, -) +from pyiem.util import convert_value, get_autoplot_context, ncopen def get_description(): """Return a dict describing how to call this plotter""" - desc = {} + desc = {"description": __doc__, "data": True, "cache": 86400} today = datetime.date.today() year = today.year if today.month > 9 else today.year - 1 - desc["description"] = """This chart displays estimated areal coverage of - snow cover for a single state. This estimate is based on a 0.125x0.125 - degree analysis of NWS COOP observations. The date shown would represent - snow depth reported approximately at 7 AM. - """ - desc["data"] = True desc["arguments"] = [ dict( type="year", diff --git a/htdocs/plotting/auto/scripts100/p180.py b/htdocs/plotting/auto/scripts100/p180.py index 1a0e39864c..9252fe5c43 100644 --- a/htdocs/plotting/auto/scripts100/p180.py +++ b/htdocs/plotting/auto/scripts100/p180.py @@ -1,17 +1,17 @@ -"""This application plots daily climatology for - a location or two of your choice. You can pick which climatology to use - and effectively build a difference plot when picking the same station, - but using a different climatology.""" -# stdlib +""" +This application plots daily climatology for +a location or two of your choice. You can pick which climatology to use +and effectively build a difference plot when picking the same station, +but using a different climatology. +""" import calendar import matplotlib.dates as mdates - -# third party import pandas as pd +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.plot import figure, fitbox -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context PDICT = { "por": "Period of Record (por) Climatology", @@ -347,19 +347,4 @@ def plotter(fdict): if __name__ == "__main__": - plotter( - dict( - v="snow", - network1="IACLIMATE", - station1="IA0112", - c1="1951", - network2="IACLIMATE", - station2="IA0112", - c2="ncei81", - s="0", - sy1=1981, - ey1=2010, - sy2=1991, - ey2=2020, - ) - ) + plotter({}) diff --git a/htdocs/plotting/auto/scripts100/p187.py b/htdocs/plotting/auto/scripts100/p187.py index bbc4475b0b..f2d4abd4db 100644 --- a/htdocs/plotting/auto/scripts100/p187.py +++ b/htdocs/plotting/auto/scripts100/p187.py @@ -1,13 +1,15 @@ -"""This chart presents the rank a station's yearly - summary value has against an unweighted population of available - observations in the state. The green line is a simple average of the - plot. +""" +This chart presents the rank a station's yearly +summary value has against an unweighted population of available +observations in the state. The green line is a simple average of the +plot. """ import pandas as pd +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.plot import figure -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context PDICT = { "precip": "Total Precipitation", diff --git a/htdocs/plotting/auto/scripts100/p188.py b/htdocs/plotting/auto/scripts100/p188.py index ec39b4a9ba..e785ec3682 100644 --- a/htdocs/plotting/auto/scripts100/p188.py +++ b/htdocs/plotting/auto/scripts100/p188.py @@ -1,11 +1,21 @@ -"""Min temp after, max temp after, count of days""" +""" +This plot presents the yearly minimum temperature +after the first spring temperature above a given value or the maximum +temperature after the first fall temperature below a given value. The +terms spring and fall are simply representing the first half and second +half of the year respectively. So for example using the default plot +options, this chart displays the maximum high temperature after the first +fall season sub 32 low temperature and then the number of days that the +high reached 60+ degrees until the end of each year. +""" import calendar import datetime import pandas as pd +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.plot import figure -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context PDICT = { "spring": "Min Temp after first Spring Temp above", @@ -15,17 +25,7 @@ def get_description(): """Return a dict describing how to call this plotter""" - desc = {} - desc["data"] = True - desc["description"] = """This plot presents the yearly minimum temperature - after the first spring temperature above a given value or the maximum - temperature after the first fall temperature below a given value. The - terms spring and fall are simply representing the first half and second - half of the year respectively. So for example using the default plot - options, this chart displays the maximum high temperature after the first - fall season sub 32 low temperature and then the number of days that the - high reached 60+ degrees until the end of each year. - """ + desc = {"data": True, "description": __doc__} today = datetime.date.today() desc["arguments"] = [ dict( @@ -71,16 +71,15 @@ def plotter(fdict): varname = ctx["var"] thres = ctx["thres"] thres2 = ctx["thres2"] - table = f"alldata_{station[:2]}" if varname == "fall": - sql = f""" + sql = """ WITH doy as ( - SELECT year, min(day) from {table} WHERE station = %s + SELECT year, min(day) from alldata WHERE station = %s and low < %s and month > 6 GROUP by year), agg as ( SELECT a.year, max(high) as peak_value, sum(case when high >= %s then 1 else 0 end) as count_days - from {table} a, doy d + from alldata a, doy d WHERE a.station = %s and a.year = d.year and a.day > d.min GROUP by a.year) @@ -94,14 +93,14 @@ def plotter(fdict): ctx["ax1_ylabel"] = r"Max High Temperature $^\circ$F" ctx["ax2_xlabel"] = "Date of First Sub %.0f Low" % (thres,) else: - sql = f""" + sql = """ WITH doy as ( - SELECT year, min(day) from {table} WHERE station = %s + SELECT year, min(day) from alldata WHERE station = %s and high >= %s and month < 6 GROUP by year), agg as ( SELECT a.year, min(low) as peak_value, sum(case when low < %s then 1 else 0 end) as count_days - from {table} a, doy d + from alldata a, doy d WHERE a.station = %s and a.year = d.year and a.day > d.min and a.month < 6 GROUP by a.year) diff --git a/htdocs/plotting/auto/scripts100/p189.py b/htdocs/plotting/auto/scripts100/p189.py index a6eff1ff21..921a7e2310 100644 --- a/htdocs/plotting/auto/scripts100/p189.py +++ b/htdocs/plotting/auto/scripts100/p189.py @@ -1,20 +1,23 @@ -"""Create plots of yearly totals and optionally fit - a linear trendline. Here is a brief description of some of the - available metrics. - +""" +Create plots of yearly totals and optionally fit +a linear trendline. Here is a brief description of some of the +available metrics. + -

If you plot the DJF period, the year shown is the year of the - December within the three year period.""" +

If you plot the DJF period, the year shown is the year of the +December within the three year period. +""" import datetime import pandas as pd +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.plot import figure_axes -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context from scipy.stats import linregress BOOLS = { @@ -102,6 +105,14 @@ "month_bounds": "and month in (6,7,8)", "valid_offset": " ", }, + "summer_avg_era5land_soilm1m_avg": { + "title": "Summer [JJA] Average ERA5-Land 0-1m Soil Moisture", + "ylabel": "Soil Moisture [m3/m3]", + "xlabel": "Year", + "func": "avg(era5land_soilm1m_avg)", + "month_bounds": "and month in (6,7,8)", + "valid_offset": " ", + }, "spring_avg_temp": { "title": "Spring [MAM] Average Temperature", "ylabel": "Temperature [F]", diff --git a/htdocs/plotting/auto/scripts100/p190.py b/htdocs/plotting/auto/scripts100/p190.py index 3ba995118e..00a9ef3621 100644 --- a/htdocs/plotting/auto/scripts100/p190.py +++ b/htdocs/plotting/auto/scripts100/p190.py @@ -1,13 +1,15 @@ -"""This chart presents the year that the present - day climatology record resides.""" +""" +This chart presents the year that the present day climatology record resides. +""" import calendar import matplotlib.colors as mpcolors import numpy as np import pandas as pd +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.plot import figure, get_cmap -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context def get_description(): diff --git a/htdocs/plotting/auto/scripts100/p191.py b/htdocs/plotting/auto/scripts100/p191.py index f45cdeb07e..1f345382bc 100644 --- a/htdocs/plotting/auto/scripts100/p191.py +++ b/htdocs/plotting/auto/scripts100/p191.py @@ -14,11 +14,12 @@ import datetime import pandas as pd +from pyiem.database import get_dbconn, get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.nws import vtec from pyiem.plot import calendar_plot from pyiem.reference import state_names -from pyiem.util import get_autoplot_context, get_dbconn, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context from sqlalchemy import text PDICT = {"yes": "Colorize Cells in Chart", "no": "Just plot values please"} @@ -236,12 +237,4 @@ def plotter(fdict): if __name__ == "__main__": - plotter( - dict( - phenomenav1="FG", - significancev1="Y", - sdate="2017-01-01", - edate="2017-12-31", - wfo="OUN", - ) - ) + plotter({}) diff --git a/htdocs/plotting/auto/scripts100/p193.py b/htdocs/plotting/auto/scripts100/p193.py index e725c7c82f..575fd11344 100644 --- a/htdocs/plotting/auto/scripts100/p193.py +++ b/htdocs/plotting/auto/scripts100/p193.py @@ -1,4 +1,8 @@ -"""Generalized mapper of AZOS data""" +""" +Generates a map of WPC Quantitative Precipitation +Forecast (QPF) and most recent US Drought Monitor to the date choosen to +plot the WPC forecast +""" import datetime import os @@ -6,7 +10,7 @@ import pygrib from pyiem.exceptions import NoDataFound from pyiem.plot import MapPlot, get_cmap, pretty_bins -from pyiem.util import get_autoplot_context, mm2inch, utc +from pyiem.util import archive_fetch, get_autoplot_context, mm2inch, utc PDICT = {"120": "Five Day", "168": "Seven Day"} PDICT2 = {"0": "0z (7 PM CDT)", "12": "12z (7 AM CDT)"} @@ -24,12 +28,7 @@ def get_description(): """Return a dict describing how to call this plotter""" - desc = {} - desc["data"] = False - desc["cache"] = 600 - desc["description"] = """Generates a map of WPC Quantitative Precipitation - Forecast (QPF) and most recent US Drought Monitor to the date choosen to - plot the WPC forecast.""" + desc = {"data": False, "cache": 600, "description": __doc__} utcnow = datetime.datetime.utcnow() desc["arguments"] = [ dict( @@ -89,19 +88,15 @@ def plotter(fdict): period = ctx["f"] scale = ctx["scale"] valid = utc(date.year, date.month, date.day, int(z)) - gribfn = valid.strftime( - ( - "/mesonet/ARCHIVE/data/%Y/%m/%d/model/wpc/" - "p" + period + "m_%Y%m%d%Hf" + period + ".grb" - ) - ) - if not os.path.isfile(gribfn): - raise NoDataFound(f"gribfn {gribfn} missing") - - grbs = pygrib.open(gribfn) - grb = grbs[1] - precip = mm2inch(grb.values) - lats, lons = grb.latlons() + with archive_fetch( + valid.strftime(f"%Y/%m/%d/model/wpc/p{period}m_%Y%m%d%Hf{period}.grb") + ) as gribfn: + if not os.path.isfile(gribfn): + raise NoDataFound(f"gribfn {gribfn} missing") + grbs = pygrib.open(gribfn) + grb = grbs[1] + precip = mm2inch(grb.values) + lats, lons = grb.latlons() title = ( f"Weather Prediction Center {PDICT[period]} " diff --git a/htdocs/plotting/auto/scripts100/p196.py b/htdocs/plotting/auto/scripts100/p196.py index 57918cb374..d759807412 100644 --- a/htdocs/plotting/auto/scripts100/p196.py +++ b/htdocs/plotting/auto/scripts100/p196.py @@ -16,10 +16,11 @@ """ import pandas as pd +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.nws.vtec import NWS_COLORS, get_ps_string from pyiem.plot import figure_axes -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn +from pyiem.util import get_autoplot_context PDICT = { "no": "Consider all Heat Index / Wind Chill Values", @@ -179,4 +180,4 @@ def plotter(fdict): if __name__ == "__main__": - plotter(dict(opt="no", var="chill")) + plotter({}) diff --git a/htdocs/plotting/auto/scripts100/p199.py b/htdocs/plotting/auto/scripts100/p199.py index 6453888899..0eef5e78ad 100644 --- a/htdocs/plotting/auto/scripts100/p199.py +++ b/htdocs/plotting/auto/scripts100/p199.py @@ -6,11 +6,12 @@ import pandas as pd from metpy.units import units +from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.network import Table as NetworkTable # This is needed. from pyiem.plot.geoplot import MapPlot from pyiem.tracker import loadqc -from pyiem.util import get_autoplot_context, get_sqlalchemy_conn, mm2inch +from pyiem.util import get_autoplot_context, mm2inch PLOTTYPES = { "1": "Max/Min 4 Inch Soil Temps", @@ -415,4 +416,4 @@ def plotter(fdict): if __name__ == "__main__": - plotter(dict(opt="5", date="2019-04-21")) + plotter({}) diff --git a/htdocs/raccoon/index.phtml b/htdocs/raccoon/index.phtml index e65a66e326..8d640c1d5a 100644 --- a/htdocs/raccoon/index.phtml +++ b/htdocs/raccoon/index.phtml @@ -21,7 +21,6 @@ $sts = gmmktime($hour1, 0, 0, $month1, $day1, $year); $ets = gmmktime($hour2, 0, 0, $month2, $day2, $year); $wtype = isset($_GET["wtype"]) ? $_GET["wtype"] : array('TO', 'SV'); - if (isset($_REQUEST["wfo"])) { /* Check to make sure we have warnings first! */ $postgis = iemdb("postgis"); diff --git a/htdocs/raccoon/wait.phtml b/htdocs/raccoon/wait.phtml index 7f99c659c5..529a07b039 100644 --- a/htdocs/raccoon/wait.phtml +++ b/htdocs/raccoon/wait.phtml @@ -8,35 +8,41 @@ require_once "../../include/database.inc.php"; $jobid = xssafe($_REQUEST["jobid"]); $status = "PROCESSING"; -if (isset($_REQUEST["jobid"])){ +if (isset($_REQUEST["jobid"])) { $mesosite = iemdb("mesosite"); pg_prepare($mesosite, "SELECT", "SELECT * from racoon_jobs where jobid = $1"); - $rs = pg_execute($mesosite, "SELECT", Array($jobid)); + $rs = pg_execute($mesosite, "SELECT", array($jobid)); if (pg_num_rows($rs) < 1) die(); - $row = pg_fetch_assoc($rs,0); + $row = pg_fetch_assoc($rs, 0); $sts = strtotime($row["sts"]); $ets = strtotime($row["ets"]); - $basefn = sprintf("%s-%s-%s-%s-%s.ppt", $row["wfo"], - str_replace(",", "_", $row["wtype"]), - $row["radar"], gmdate("YmdH", $sts), gmdate("YmdH", $ets)); + $basefn = sprintf( + "%s-%s-%s-%s-%s.ppt", + $row["wfo"], + str_replace(",", "_", $row["wtype"]), + $row["radar"], + gmdate("YmdH", $sts), + gmdate("YmdH", $ets) + ); $fn = sprintf("/mesonet/share/pickup/raccoon/%s", $basefn); - if (is_file($fn)){ + if (is_file($fn)) { $status = "DONE!"; } - + pg_close($mesosite); } $t->title = "Raccoon - Warnings overview in PowerPoint"; -if ($status != "DONE!"){ - $t->headextra = ""; +if ($status != "DONE!") { + $t->headextra = ""; } $c = ""; -if ($status == "DONE!"){ +if ($status == "DONE!") { $c = sprintf( '

Download your powerpoint here', - $basefn); + $basefn + ); } $t->content = <<IEM Raccoon diff --git a/htdocs/request/asos/1min.phtml b/htdocs/request/asos/1min.phtml index 2fbe164e3e..bba8c00090 100644 --- a/htdocs/request/asos/1min.phtml +++ b/htdocs/request/asos/1min.phtml @@ -1,4 +1,4 @@ -title = "ASOS One Minute Data Download"; $aend = get_iemprop("asos.1min.end"); $archive_end = "N/A"; -if (! is_null($aend)){ +if (!is_null($aend)) { $asos_archive_end = new DateTime($aend); $archive_end = $asos_archive_end->format('d M Y'); } $bogus = 0; $y1select = yearSelect2(2000, date("Y"), "year1"); -$m1select = monthSelect($bogus, "month1"); -$d1select = daySelect2("1", "day1"); -$h1select = hourSelect($bogus, "hour1"); -$min1select = minuteSelect($bogus, "minute1"); +$m1select = monthSelect($bogus, "month1"); +$d1select = daySelect2("1", "day1"); +$h1select = hourSelect($bogus, "hour1"); +$min1select = minuteSelect($bogus, "minute1"); $y2select = yearSelect2(2000, date("Y"), "year2"); $m2select = monthSelect(date("m"), "month2"); @@ -40,7 +40,7 @@ $t->headextra = << EOM; -$t->content = <<content = <<

  • ASOS Mainpage
  • Download ASOS 1 minute interval data
  • @@ -179,5 +179,5 @@ How shall the output values be seperated? -EOF; +EOM; $t->render('full.phtml'); diff --git a/htdocs/request/asos/csv.php b/htdocs/request/asos/csv.php index b81ab29e2c..53b40ad97f 100644 --- a/htdocs/request/asos/csv.php +++ b/htdocs/request/asos/csv.php @@ -2,6 +2,7 @@ /* Generate a CSV file based on a request */ require_once "../../../config/settings.inc.php"; require_once "../../../include/database.inc.php"; +require_once "../../../include/forms.php"; $mesosite = iemdb("mesosite"); $access = iemdb("iem"); $asos = iemdb("asos"); @@ -15,7 +16,7 @@ $sql = sprintf("SELECT id, ST_DistanceSphere(geom, ST_geometryfromtext('POINT(%.4f %.4f)',4326)) as dist from stations WHERE network ~* 'ASOS' ORDER by dist ASC - LIMIT 5", $_GET["lon"], $_GET["lat"]); + LIMIT 5", xssafe($_GET["lon"]), xssafe($_GET["lat"])); $rs = pg_exec($mesosite, $sql); for ($i = 0; $row = pg_fetch_array($rs); $i++) { $stations[$i] = $row["id"]; diff --git a/htdocs/request/asos/hourlyprecip.phtml b/htdocs/request/asos/hourlyprecip.phtml index e896c42046..51c56e1154 100644 --- a/htdocs/request/asos/hourlyprecip.phtml +++ b/htdocs/request/asos/hourlyprecip.phtml @@ -1,4 +1,4 @@ -iemss = True; $t->title = "Download Hourly Precipitation Data"; -$network = isset($_REQUEST["network"]) ? xssafe($_REQUEST["network"]): "IA_ASOS"; +$network = isset($_REQUEST["network"]) ? xssafe($_REQUEST["network"]) : "IA_ASOS"; $nselect = ""; @@ -36,7 +41,7 @@ $y2 = yearSelect2(1941, date("Y"), "year2"); $m2 = monthSelect(date("m"), "month2"); $d2 = daySelect2(date("d"), "day2"); -$ar = Array( +$ar = array( "Etc/UTC" => "Coordinated Universal Time (UTC)", "America/New_York" => "America/New_York (EST/EDT)", "America/Chicago" => "America/Chicago (CST/CDT)", @@ -49,10 +54,10 @@ $tzselect = make_select("tz", "America/Chicago", $ar); $t->content = << -
  • ASOS Mainpage
  • -
  • IEM Computed Hourly Precipitation Totals
  • +
  • ASOS Mainpage
  • +
  • IEM Computed Hourly Precipitation Totals
  • - +

    The IEM attempts to take the METAR reports of precipitation and then provide just the hourly precipitation totals. These totals are not for the true hour (00 to 59 after), but for the hour between the standard METAR @@ -93,7 +98,7 @@ data prior to 2010 for missing hours. Start: {$y1}{$m1}{$d1} - + End: @@ -109,7 +114,7 @@ data prior to 2010 for missing hours.

    4) Timezone of Observation Times:

    The following options are available for how the observation time is - presented.

    + presented.

    {$tzselect} diff --git a/scripts/GIS/24h_lsr.py b/scripts/GIS/24h_lsr.py index cb6523f5a0..9b951b3a6a 100644 --- a/scripts/GIS/24h_lsr.py +++ b/scripts/GIS/24h_lsr.py @@ -8,8 +8,9 @@ import tempfile import zipfile -from geopandas import read_postgis -from pyiem.util import get_sqlalchemy_conn, logger, utc +import geopandas as gpd +from pyiem.database import get_sqlalchemy_conn +from pyiem.util import logger, utc LOG = logger() FIELDS = { @@ -37,7 +38,7 @@ def main(): # out of the shapefile ets = utc() + datetime.timedelta(minutes=+1) with get_sqlalchemy_conn("postgis") as conn: - df = read_postgis( + df = gpd.read_postgis( """ SELECT distinct l.geom, to_char(valid at time zone 'UTC', 'YYYYMMDDHH24MI') as VALID, diff --git a/scripts/GIS/ffg2geotiff.py b/scripts/GIS/ffg2geotiff.py deleted file mode 100644 index 75a442ab86..0000000000 --- a/scripts/GIS/ffg2geotiff.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Generate a GeoTIFF from the FFG product.""" -import os -import sys - -import pygrib -import pyproj -from pyiem.util import logger, utc - -LOG = logger() - - -def main(argv): - """Go Main.""" - valid = utc(*[int(i) for i in argv[1:]]) - grbfn = ( - "/mesonet/ARCHIVE/data/" - f"{valid:%Y/%m/%d}/model/ffg/5kmffg_{valid:%Y%m%d%H}.grib2" - ) - if not os.path.isfile(grbfn): - LOG.warning("%s is missing", grbfn) - return - grbs = pygrib.open(grbfn) - pj = None - for grb in grbs: - if pj is None: - pj = pyproj.Proj(grb.projparams) - # grb.stepRange to get the hour - print(grb.projparams) - - -if __name__ == "__main__": - main(sys.argv) diff --git a/scripts/RUN_50_AFTER.sh b/scripts/RUN_50_AFTER.sh index 0fd82842c7..ef751092ba 100644 --- a/scripts/RUN_50_AFTER.sh +++ b/scripts/RUN_50_AFTER.sh @@ -18,7 +18,7 @@ HH=$(date +%H) if [ "$HH" -eq "22" ] then cd ../../climodat - python hrrr_solarrad.py $(date +'%Y %m %d') + python hrrr_solarrad.py --date=$(date +'%Y-%m-%d') fi # END \ No newline at end of file diff --git a/scripts/RUN_NOON.sh b/scripts/RUN_NOON.sh index be2d2326ed..490344173d 100644 --- a/scripts/RUN_NOON.sh +++ b/scripts/RUN_NOON.sh @@ -27,7 +27,7 @@ python daily_estimator.py --date=$(date --date '1 days ago' +'%Y-%m-%d') python daily_estimator.py --date=$(date --date '7 days ago' +'%Y-%m-%d') python compute4regions.py $(date +'%Y %m %d') python compute4regions.py $(date --date '1 days ago' +'%Y %m %d') -python hrrr_solarrad.py $(date --date '1 days ago' +'%Y %m %d') +python hrrr_solarrad.py --date=$(date --date '1 days ago' +'%Y-%m-%d') # Sync any coop data that may have updated over the past 24 hours python sync_coop_updates.py diff --git a/scripts/climodat/hrrr_solarrad.py b/scripts/climodat/hrrr_solarrad.py index 462056acf5..181e7412f0 100644 --- a/scripts/climodat/hrrr_solarrad.py +++ b/scripts/climodat/hrrr_solarrad.py @@ -6,16 +6,17 @@ # pylint: disable=unpacking-non-sequence import datetime import os -import sys +import click import geopandas as gpd import numpy as np import pandas as pd import pygrib import pyproj from affine import Affine +from pyiem.database import get_dbconnc, get_sqlalchemy_conn from pyiem.grid.zs import CachingZonalStats -from pyiem.util import get_dbconn, get_sqlalchemy_conn, logger, utc +from pyiem.util import archive_fetch, logger, utc LOG = logger() LCC = ( @@ -69,6 +70,7 @@ def build_stations(dt) -> pd.DataFrame: df[COL] = np.nan df["i"] = np.nan df["j"] = np.nan + df["day"] = dt LOG.info("Found %s database entries", len(df.index)) return df @@ -100,54 +102,57 @@ def compute(df, sids, dt, do_regions=False): sts, sts + datetime.timedelta(hours=23), freq="1h" ): # Try the newer f01 files, which have better data! - fn = now.strftime( - "/mesonet/ARCHIVE/data/%Y/%m/%d/model/hrrr/%H/" - "hrrr.t%Hz.3kmf01.grib2" - ) - if os.path.isfile(fn): + with archive_fetch( + now.strftime("%Y/%m/%d/model/hrrr/%H/hrrr.t%Hz.3kmf01.grib2") + ) as fn: + if os.path.isfile(fn): + grbs = pygrib.open(fn) + selgrbs = grbs.select(name=GRBNAME) + if len(selgrbs) == 4: + # Goodie + for g in selgrbs: + if total is None: + xaxis, yaxis = get_grid(g) + affine = Affine( + g["DxInMetres"], + 0, + xaxis[0], + 0, + 0 - g["DyInMetres"], + yaxis[-1], + ) + total = g.values * 900.0 # 15 min data + else: + total += g.values * 900.0 + continue + with archive_fetch( + now.strftime("%Y/%m/%d/model/hrrr/%H/hrrr.t%Hz.3kmf00.grib2") + ) as fn: + if not os.path.isfile(fn): + LOG.info("Missing %s", fn) + continue grbs = pygrib.open(fn) - selgrbs = grbs.select(name=GRBNAME) - if len(selgrbs) == 4: - # Goodie - for g in selgrbs: - if total is None: - xaxis, yaxis = get_grid(g) - affine = Affine( - g["DxInMetres"], - 0, - xaxis[0], - 0, - 0 - g["DyInMetres"], - yaxis[-1], - ) - total = g.values * 900.0 # 15 min data - else: - total += g.values * 900.0 + try: + if now >= SWITCH_DATE: + grb = grbs.select(name=GRBNAME) + else: + grb = grbs.select(parameterNumber=192) + except ValueError: continue - fn = now.strftime( - "/mesonet/ARCHIVE/data/%Y/%m/%d/model/hrrr/%H/" - "hrrr.t%Hz.3kmf00.grib2" - ) - if not os.path.isfile(fn): - LOG.info("Missing %s", fn) - continue - grbs = pygrib.open(fn) - try: - if now >= SWITCH_DATE: - grb = grbs.select(name=GRBNAME) + g = grb[0] + if total is None: + xaxis, yaxis = get_grid(g) + affine = Affine( + g["DxInMetres"], + 0, + xaxis[0], + 0, + 0 - g["DyInMetres"], + yaxis[-1], + ) + total = g.values * 3600.0 else: - grb = grbs.select(parameterNumber=192) - except ValueError: - continue - g = grb[0] - if total is None: - xaxis, yaxis = get_grid(g) - affine = Affine( - g["DxInMetres"], 0, xaxis[0], 0, 0 - g["DyInMetres"], yaxis[-1] - ) - total = g.values * 3600.0 - else: - total += g.values * 3600.0 + total += g.values * 3600.0 if total is None: LOG.warning("No HRRR data for %s", dt) @@ -168,9 +173,11 @@ def compute(df, sids, dt, do_regions=False): LOG.info("IA0200 %s", df.at["IA0200", COL]) -def main(argv): +@click.command() +@click.option("--date", "dt", type=click.DateTime(), help="UTC Valid Time") +def main(dt): """Do Something""" - dt = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) + dt = dt.date() df = build_stations(dt) # We currently do two options # 1. For morning sites 1-11 AM, they get yesterday's radiation @@ -180,18 +187,15 @@ def main(argv): sids = df[df[COL].isna()].index.values compute(df, sids, dt) - pgconn = get_dbconn("coop") - cursor = pgconn.cursor() - for sid, row in df[df[COL].notna()].iterrows(): - cursor.execute( - f"UPDATE alldata set {COL} = %s where station = %s and " - "day = %s", - (row[COL], sid, dt), - ) + pgconn, cursor = get_dbconnc("coop") + cursor.executemany( + f"UPDATE alldata set {COL} = %({COL})s where station = %(station)s " + "and day = %(day)s", + df[df[COL].notna()].reset_index().to_dict("records"), + ) cursor.close() pgconn.commit() if __name__ == "__main__": - # run main() run - main(sys.argv) + main() diff --git a/scripts/climodat/narr_solarrad.py b/scripts/climodat/narr_solarrad.py index 53ff31ffb7..601e77b115 100644 --- a/scripts/climodat/narr_solarrad.py +++ b/scripts/climodat/narr_solarrad.py @@ -35,8 +35,9 @@ import pygrib import pyproj from affine import Affine +from pyiem.database import get_dbconn, get_sqlalchemy_conn from pyiem.grid.zs import CachingZonalStats -from pyiem.util import get_dbconn, get_sqlalchemy_conn, logger, utc +from pyiem.util import archive_fetch, logger, utc LOG = logger() LCC = ( @@ -89,29 +90,29 @@ def compute(df, sids, dt, do_regions=False): yaxis = None while now < ets: # See if we have Grib data first - fn = now.strftime( - "/mesonet/ARCHIVE/data/%Y/%m/%d/model/NARR/rad_%Y%m%d%H00.grib" - ) - if not os.path.isfile(fn): - LOG.warning("Missing %s", fn) - else: - grb = pygrib.open(fn)[1] - if total is None: - xaxis, yaxis = get_grid(grb) - affine = Affine( - grb["DxInMetres"], - 0, - xaxis[0], - 0, - 0 - grb["DyInMetres"], - yaxis[-1], - ) - - # W/m2 over 3 hours J/m2 to MJ/m2 - total = grb["values"] * 10800.0 / 1_000_000.0 + with archive_fetch( + now.strftime("%Y/%m/%d/model/NARR/rad_%Y%m%d%H00.grib") + ) as fn: + if not os.path.isfile(fn): + LOG.warning("Missing %s", fn) else: - total += grb["values"] * 10800.0 / 1_000_000.0 - now += datetime.timedelta(hours=3) + grb = pygrib.open(fn)[1] + if total is None: + xaxis, yaxis = get_grid(grb) + affine = Affine( + grb["DxInMetres"], + 0, + xaxis[0], + 0, + 0 - grb["DyInMetres"], + yaxis[-1], + ) + + # W/m2 over 3 hours J/m2 to MJ/m2 + total = grb["values"] * 10800.0 / 1_000_000.0 + else: + total += grb["values"] * 10800.0 / 1_000_000.0 + now += datetime.timedelta(hours=3) df["i"] = np.digitize(df["projx"].values, xaxis) df["j"] = np.digitize(df["projy"].values, yaxis) @@ -143,6 +144,7 @@ def build_stations(dt) -> pd.DataFrame: df[COL] = np.nan df["i"] = np.nan df["j"] = np.nan + df["day"] = dt LOG.info("Found %s database entries", len(df.index)) return df @@ -163,11 +165,11 @@ def do(dt): pgconn = get_dbconn("coop") cursor = pgconn.cursor() - for sid, row in df[df[COL].notna()].iterrows(): - cursor.execute( - f"UPDATE alldata set {COL} = %s where station = %s and day = %s", - (row["narr_srad"], sid, dt), - ) + cursor.executemany( + f"UPDATE alldata set {COL} = %({COL})s where station = %(station)s " + "and day = %(day)s", + df[df[COL].notna()].reset_index().to_dict(orient="records"), + ) cursor.close() pgconn.commit() diff --git a/scripts/coop/day_precip.py b/scripts/coop/day_precip.py index 456b791860..4e2abeb39c 100644 --- a/scripts/coop/day_precip.py +++ b/scripts/coop/day_precip.py @@ -6,8 +6,8 @@ import subprocess import tempfile +from pyiem.database import get_dbconn from pyiem.network import Table as NetworkTable -from pyiem.util import get_dbconn def main(): diff --git a/scripts/coop/email_rr3_to_harry.py b/scripts/coop/email_rr3_to_harry.py index 4f11c905b6..39c100039c 100644 --- a/scripts/coop/email_rr3_to_harry.py +++ b/scripts/coop/email_rr3_to_harry.py @@ -9,7 +9,7 @@ from email.mime.base import MIMEBase from email.mime.multipart import MIMEMultipart -from pyiem.util import get_dbconn +from pyiem.database import get_dbconn WFOS = ["KDMX", "KARX", "KDVN", "KFSD", "KOAX"] diff --git a/scripts/coop/use_acis.py b/scripts/coop/use_acis.py index eecd4a82a5..5d76f65ffe 100644 --- a/scripts/coop/use_acis.py +++ b/scripts/coop/use_acis.py @@ -6,10 +6,11 @@ import click import pandas as pd import requests +from pyiem.database import get_dbconnc, get_sqlalchemy_conn from pyiem.network import Table as NetworkTable from pyiem.observation import Observation from pyiem.reference import TRACE_VALUE -from pyiem.util import get_dbconnc, get_sqlalchemy_conn, logger +from pyiem.util import logger from tqdm import tqdm LOG = logger() diff --git a/scripts/iemre/daily_analysis.py b/scripts/iemre/daily_analysis.py index aaf67561cb..aafd380489 100644 --- a/scripts/iemre/daily_analysis.py +++ b/scripts/iemre/daily_analysis.py @@ -324,7 +324,7 @@ def use_asos_daily(ts, ds): ds["max_rh"].values = res -def use_climodat_daily(ts, ds): +def use_climodat_daily(ts: datetime.date, ds): """Do our gridding""" mybuf = 2.0 giswkt = "SRID=4326;POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))" % ( @@ -372,7 +372,8 @@ def use_climodat_daily(ts, ds): ), ) if len(df.index) < 4: - LOG.warning("Failed quorum") + if ts != datetime.date.today(): + LOG.warning("Failed quorum") return suffix = "_all" if ts.year < 1951 else "" res = generic_gridder(df, f"highdata{suffix}") @@ -386,7 +387,7 @@ def use_climodat_daily(ts, ds): ds["p01d"].values = convert_value(res, "inch", "mm") -def workflow(ts): +def workflow(ts: datetime.date): """Do Work""" # load up our current data ds = iemre.get_grids(ts) diff --git a/scripts/iemre/init_daily_mrms.py b/scripts/iemre/init_daily_mrms.py index c0fd529776..941062e0c8 100644 --- a/scripts/iemre/init_daily_mrms.py +++ b/scripts/iemre/init_daily_mrms.py @@ -20,7 +20,7 @@ def init_year(ts): LOG.warn("Cowardly refusing to overwrite %s", fn) return nc = ncopen(fn, "w") - nc.title = "MRMS Daily Precipitation %s" % (ts.year,) + nc.title = f"MRMS Daily Precipitation {ts.year}" nc.platform = "Grided Estimates" nc.description = "MRMS 0.01 degree grid" nc.institution = "Iowa State University, Ames, IA, USA" diff --git a/scripts/iemre/init_narr.py b/scripts/iemre/init_narr.py index f17566f81f..1ed507f322 100644 --- a/scripts/iemre/init_narr.py +++ b/scripts/iemre/init_narr.py @@ -5,12 +5,10 @@ import numpy as np import pygrib -from pyiem.util import logger, ncopen +from pyiem.util import archive_fetch, logger, ncopen # This exists on dev laptop :/ -TEMPLATE_FN = ( - "/mesonet/ARCHIVE/data/1980/01/01/model/NARR/apcp_198001010000.grib" -) +TEMPLATE_FN = "1980/01/01/model/NARR/apcp_198001010000.grib" BASEDIR = "/mesonet/data/iemre" LOG = logger() @@ -20,10 +18,11 @@ def init_year(ts): Create a new NetCDF file for a year of our specification! """ # Load up the example grib file to base our file on - grbs = pygrib.open(TEMPLATE_FN) - grb = grbs[1] - # grid shape is y, x - lats, lons = grb.latlons() + with archive_fetch(TEMPLATE_FN) as fn: + grbs = pygrib.open(fn) + grb = grbs[1] + # grid shape is y, x + lats, lons = grb.latlons() fp = f"{BASEDIR}/{ts.year}_narr.nc" if os.path.isfile(fp): diff --git a/scripts/iemre/merge_narr.py b/scripts/iemre/merge_narr.py index 7a77af64f2..5e2df75e81 100644 --- a/scripts/iemre/merge_narr.py +++ b/scripts/iemre/merge_narr.py @@ -11,7 +11,7 @@ import numpy as np import pygrib from pyiem import iemre -from pyiem.util import logger, ncopen +from pyiem.util import archive_fetch, logger, ncopen LOG = logger() @@ -20,16 +20,15 @@ def to_netcdf(valid): """Persist this 1 hour precip information to the netcdf storage Recall that this timestep has data for the previous hour""" - fn = ( - f"/mesonet/ARCHIVE/data/{valid:%Y/%m/%d}/model/NARR/" - f"apcp_{valid:%Y%m%d%H%M}.grib" - ) - if not os.path.isfile(fn): - LOG.warning("Missing file %s", fn) - return False - gribs = pygrib.open(fn) - grb = gribs[1] - val = grb.values + with archive_fetch( + f"{valid:%Y/%m/%d}/model/NARR/apcp_{valid:%Y%m%d%H%M}.grib" + ) as fn: + if not os.path.isfile(fn): + LOG.warning("Missing file %s", fn) + return False + gribs = pygrib.open(fn) + grb = gribs[1] + val = grb.values tidx = int((iemre.hourly_offset(valid) + 1) / 3) LOG.info("%s np.min: %s np.max: %s", tidx, np.min(val), np.max(val)) diff --git a/scripts/usdm/process_usdm.py b/scripts/usdm/process_usdm.py index ee161626fd..9014e06494 100644 --- a/scripts/usdm/process_usdm.py +++ b/scripts/usdm/process_usdm.py @@ -9,7 +9,8 @@ import fiona import requests -from pyiem.util import exponential_backoff, get_dbconnc, logger +from pyiem.database import get_dbconnc +from pyiem.util import exponential_backoff, logger from shapely.geometry import MultiPolygon, shape LOG = logger() diff --git a/scripts/year/plot_stage4.py b/scripts/year/plot_stage4.py index a73eb734cb..541a2e738d 100644 --- a/scripts/year/plot_stage4.py +++ b/scripts/year/plot_stage4.py @@ -31,8 +31,7 @@ def main(): # Plot departure from normal mp = MapPlot( sector="midwest", - title=("Precipitation Departure %s - %s") - % (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")), + title=f"Precipitation Departure {sts:%b %d %Y} - {ets:%b %d %Y}", subtitle="based on IEM Estimates", ) @@ -47,8 +46,7 @@ def main(): # Plot normals mp = MapPlot( sector="midwest", - title=("Normal Precipitation:: %s - %s") - % (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")), + title=f"Normal Precipitation:: {sts:%b %d %Y} - {ets:%b %d %Y}", subtitle="based on IEM Estimates", ) @@ -61,8 +59,7 @@ def main(): # Plot Obs mp = MapPlot( sector="midwest", - title=("Estimated Precipitation:: %s - %s") - % (sts.strftime("%b %d %Y"), ets.strftime("%b %d %Y")), + title=f"Estimated Precipitation:: {sts:%b %d %Y} - {ets:%b %d %Y}", subtitle="based on IEM Estimates", ) From e304977da9345ec4489a48fb86bcc04cfb8f8c9d Mon Sep 17 00:00:00 2001 From: akrherz Date: Sun, 11 Feb 2024 13:07:03 -0600 Subject: [PATCH 2/3] fix: implement min func for IEMRE daily, sigh --- scripts/iemre/daily_analysis.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/iemre/daily_analysis.py b/scripts/iemre/daily_analysis.py index aafd380489..77e212970d 100644 --- a/scripts/iemre/daily_analysis.py +++ b/scripts/iemre/daily_analysis.py @@ -163,7 +163,9 @@ def copy_iemre_hourly(ts, ds): aggfunc = np.ma.max if vname.startswith("p01d"): aggfunc = np.ma.sum # was np.nansum, better check this - if vname == "avg_dwpk": + elif vname in ["low_tmpk", "low_tmpk_12z"]: + aggfunc = np.ma.min + elif vname == "avg_dwpk": aggfunc = np.ma.mean ncvarname = ( vname.replace("high_", "") From 3aba0134c6bfdae346d10d9491f112eec8b48304 Mon Sep 17 00:00:00 2001 From: akrherz Date: Sun, 11 Feb 2024 13:13:37 -0600 Subject: [PATCH 3/3] mnt: address lint --- scripts/climodat/narr_solarrad.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/climodat/narr_solarrad.py b/scripts/climodat/narr_solarrad.py index 601e77b115..425c82d128 100644 --- a/scripts/climodat/narr_solarrad.py +++ b/scripts/climodat/narr_solarrad.py @@ -166,8 +166,8 @@ def do(dt): cursor = pgconn.cursor() cursor.executemany( - f"UPDATE alldata set {COL} = %({COL})s where station = %(station)s " - "and day = %(day)s", + "UPDATE alldata set narr_srad = %(narr_srad)s where " + "station = %(station)s and day = %(day)s", df[df[COL].notna()].reset_index().to_dict(orient="records"), )