From c27c01216e0b82596f2fd66bd2b0d49b39329b05 Mon Sep 17 00:00:00 2001 From: akrherz Date: Sat, 18 Nov 2023 06:16:44 -0600 Subject: [PATCH 1/3] mnt: use faster reprojection --- scripts/iemre/grid_rsds.py | 34 ++++++++++++---------------------- 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/scripts/iemre/grid_rsds.py b/scripts/iemre/grid_rsds.py index fe6a00f63b..521c27817c 100644 --- a/scripts/iemre/grid_rsds.py +++ b/scripts/iemre/grid_rsds.py @@ -12,6 +12,8 @@ import numpy as np import pygrib import pyproj +import xarray as xr +from affine import Affine from pyiem import iemre from pyiem.util import get_dbconn, logger, ncopen, utc from scipy.interpolate import NearestNDInterpolator @@ -105,8 +107,6 @@ def do_hrrr(ts): "+units=m +lat_2=38.5 +lat_1=38.5 +lat_0=38.5" ) total = None - xaxis = None - yaxis = None # So IEMRE is storing data from coast to coast, so we should be # aggressive about running for an entire calendar date for hr in range(24): @@ -136,12 +136,8 @@ def do_hrrr(ts): lat1 = g["latitudeOfFirstGridPointInDegrees"] lon1 = g["longitudeOfFirstGridPointInDegrees"] llcrnrx, llcrnry = LCC(lon1, lat1) - nx = g["Nx"] - ny = g["Ny"] dx = g["DxInMetres"] dy = g["DyInMetres"] - xaxis = llcrnrx + dx * np.arange(nx) - yaxis = llcrnry + dy * np.arange(ny) if subtotal is None: subtotal = g.values else: @@ -179,12 +175,8 @@ def do_hrrr(ts): lat1 = g["latitudeOfFirstGridPointInDegrees"] lon1 = g["longitudeOfFirstGridPointInDegrees"] llcrnrx, llcrnry = LCC(lon1, lat1) - nx = g["Nx"] - ny = g["Ny"] dx = g["DxInMetres"] dy = g["DyInMetres"] - xaxis = llcrnrx + dx * np.arange(nx) - yaxis = llcrnry + dy * np.arange(ny) else: total += g.values @@ -194,18 +186,16 @@ def do_hrrr(ts): # We wanna store as W m-2, so we just average out the data by hour total = total / 24.0 - - ds = iemre.get_grids(ts.date(), varnames="rsds") - for i, lon in enumerate(iemre.XAXIS): - for j, lat in enumerate(iemre.YAXIS): - (x, y) = LCC(lon, lat) - i2 = np.digitize([x], xaxis)[0] - j2 = np.digitize([y], yaxis)[0] - try: - ds["rsds"].values[j, i] = total[j2, i2] - except IndexError: - continue - + affine_in = Affine(dx, 0.0, llcrnrx, 0.0, dy, llcrnry) + + ds = xr.Dataset( + { + "rsds": xr.DataArray( + iemre.reproject2iemre(total, affine_in, LCC.crs), + dims=("y", "x"), + ) + } + ) iemre.set_grids(ts.date(), ds) subprocess.call( ["python", "db_to_netcdf.py", f"{ts:%Y}", f"{ts:%m}", f"{ts:%d}"] From 55840db62342a94aa74068e71133d045724ac297 Mon Sep 17 00:00:00 2001 From: akrherz Date: Sat, 18 Nov 2023 06:31:48 -0600 Subject: [PATCH 2/3] feat: add auto compute max/min month comp https://mesonet.agron.iastate.edu/plotting/auto/?q=39 --- htdocs/plotting/auto/scripts/p39.py | 59 +++++++++++++++++++++++------ 1 file changed, 47 insertions(+), 12 deletions(-) diff --git a/htdocs/plotting/auto/scripts/p39.py b/htdocs/plotting/auto/scripts/p39.py index 6624c4f6df..9363ac67f7 100644 --- a/htdocs/plotting/auto/scripts/p39.py +++ b/htdocs/plotting/auto/scripts/p39.py @@ -12,7 +12,13 @@ import numpy as np from pyiem.exceptions import NoDataFound from pyiem.plot import figure_axes -from pyiem.util import get_autoplot_context, get_dbconn +from pyiem.util import get_autoplot_context, get_dbconnc + +PDICT = { + "manual": "Select comparison month manually", + "high": "Based on effective date, find warmest same month on record", + "low": "Based on effective date, find coldest same month on record", +} def get_description(): @@ -28,6 +34,13 @@ def get_description(): network="IACLIMATE", label="Select Station:", ), + { + "type": "select", + "name": "compare", + "default": "manual", + "label": "How to compare?", + "options": PDICT, + }, dict( type="year", name="year", @@ -52,16 +65,35 @@ def get_description(): return desc +def compute_compare_month(ctx, cursor): + """Figure out what the user wants.""" + year = ctx["year"] + month = ctx["month"] + compare = ctx["compare"] + if compare == "manual": + return year, month + station = ctx["station"] + effective_date = ctx["date"] + cursor.execute( + f""" + select year, avg((high+low)/2) from alldata + where station = %s and month = %s and year != %s + GROUP by year + ORDER by avg {'desc' if compare == 'high' else 'asc'} LIMIT 1 + """, + (station, effective_date.month, effective_date.year), + ) + return cursor.fetchone()["year"], effective_date.month + + def plotter(fdict): """Go""" - pgconn = get_dbconn("coop") - cursor = pgconn.cursor() + pgconn, cursor = get_dbconnc("coop") ctx = get_autoplot_context(fdict, get_description()) station = ctx["station"] - year = ctx["year"] - month = ctx["month"] effective_date = ctx["date"] + year, month = compute_compare_month(ctx, cursor) oldmonth = datetime.date(year, month, 1) sts = datetime.date(effective_date.year, effective_date.month, 1) @@ -70,21 +102,22 @@ def plotter(fdict): # beat month cursor.execute( - "SELECT extract(day from day), (high+low)/2. from " + "SELECT extract(day from day), (high+low)/2. as t from " "alldata WHERE station = %s and year = %s and month = %s " "ORDER by day ASC", (station, year, month), ) if cursor.rowcount == 0: + pgconn.close() raise NoDataFound("No Data Found.") prevmonth = [] for row in cursor: - prevmonth.append(float(row[1])) + prevmonth.append(float(row["t"])) # build history cursor.execute( - "SELECT year, day, (high+low)/2. from alldata " + "SELECT year, day, (high+low)/2. as t from alldata " "WHERE station = %s and month = %s and extract(day from day) <= %s " "and day < %s ORDER by day ASC", (station, effective_date.month, days, ets), @@ -92,10 +125,12 @@ def plotter(fdict): for i, row in enumerate(cursor): if i == 0: - baseyear = row[0] - data = np.ma.ones((effective_date.year - row[0] + 1, days)) * -99 - data[row[0] - baseyear, row[1].day - 1] = row[2] - + baseyear = row["year"] + data = ( + np.ma.ones((effective_date.year - row["year"] + 1, days)) * -99 + ) + data[row["year"] - baseyear, row["day"].day - 1] = row["t"] + pgconn.close() # Do we have data for the effective_date ? pos = ( effective_date.day From e2d31d9f928bddab345831d2717986244e21d38e Mon Sep 17 00:00:00 2001 From: akrherz Date: Sat, 18 Nov 2023 06:32:13 -0600 Subject: [PATCH 3/3] sundry updates --- htdocs/plotting/auto/scripts100/p192.py | 4 ++-- htdocs/plotting/auto/scripts200/p206.py | 2 +- htdocs/vtec/search.php | 2 +- scripts/00z/generate_rtp.py | 2 +- scripts/12z/generate_rtp.py | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/htdocs/plotting/auto/scripts100/p192.py b/htdocs/plotting/auto/scripts100/p192.py index 5e70366fac..6e723f94bf 100644 --- a/htdocs/plotting/auto/scripts100/p192.py +++ b/htdocs/plotting/auto/scripts100/p192.py @@ -97,7 +97,7 @@ def get_df(ctx, bnds, buf=2.25): select id, st_x(geom) as lon, st_y(geom) as lat, state, wfo from stations where network ~* 'ASOS' and - ST_contains(ST_geomfromtext(%s), geom) + ST_contains(ST_GeomFromEWKT(%s), geom) ) SELECT station, vsby, tmpf, dwpf, sknt, state, wfo, lat, lon, relh, abs(extract(epoch from (%s - valid))) as tdiff from @@ -125,7 +125,7 @@ def get_df(ctx, bnds, buf=2.25): WHERE s.network ~* 'ASOS' and s.country = 'US' and valid + '80 minutes'::interval > now() and vsby >= 0 and vsby <= 10 and - ST_contains(ST_geomfromtext(%s), geom) + ST_contains(ST_GeomFromEWKT(%s), geom) """, conn, params=(giswkt,), diff --git a/htdocs/plotting/auto/scripts200/p206.py b/htdocs/plotting/auto/scripts200/p206.py index 52fe0508de..c8595ef6a7 100644 --- a/htdocs/plotting/auto/scripts200/p206.py +++ b/htdocs/plotting/auto/scripts200/p206.py @@ -141,7 +141,7 @@ def get_df(ctx, buf=2.25): select id, st_x(geom) as lon, st_y(geom) as lat, state, wfo, iemid, country, geom from stations where network ~* 'ASOS' and - ST_contains(ST_geomfromtext(%s), geom) + ST_contains(ST_GeomFromEWKT(%s), geom) ) SELECT s.day, s.max_tmpf, s.min_tmpf, s.max_dwpf, s.min_dwpf, s.min_rh, s.max_rh, s.min_feel, s.max_feel, diff --git a/htdocs/vtec/search.php b/htdocs/vtec/search.php index 7646ddad04..f08a1fca84 100644 --- a/htdocs/vtec/search.php +++ b/htdocs/vtec/search.php @@ -33,7 +33,7 @@ $t->content = <<This application allows you to search for National Weather Service Watch, -Warning, and Advisories. There are currently two options: +Warning, and Advisories. There are currently three options:
  • 1. Search for Storm Based Warnings by Point
  • 2. Search of Watch/Warning/Advisories by County/Zone or by Point
  • diff --git a/scripts/00z/generate_rtp.py b/scripts/00z/generate_rtp.py index cbf847eaf2..8939ec94b4 100644 --- a/scripts/00z/generate_rtp.py +++ b/scripts/00z/generate_rtp.py @@ -26,7 +26,7 @@ }, { "filename": "awos_rtp_00z_dvn.shef", - "networks": "IA_ASOS ISUSM IA_DCP IA_RWIS IL_RWIS IL_ASOS".split(), + "networks": "IL_ASOS IA_ASOS ISUSM IA_DCP IL_RWIS IA_RWIS".split(), "precip_works": ["IA_ASOS", "IL_ASOS", "ISUSM"], "limiter": "wfo = :wfo", "filter": ["wfo", "DVN"], diff --git a/scripts/12z/generate_rtp.py b/scripts/12z/generate_rtp.py index 18318f8134..4ea76b17f0 100644 --- a/scripts/12z/generate_rtp.py +++ b/scripts/12z/generate_rtp.py @@ -24,7 +24,7 @@ }, { "filename": "awos_rtp_dvn.shef", - "networks": "IA_ASOS ISUSM IA_DCP IA_RWIS IL_RWIS IL_ASOS".split(), + "networks": "IL_ASOS IA_ASOS ISUSM IA_DCP IL_RWIS IA_RWIS".split(), "precip_works": ["IA_ASOS", "IL_ASOS", "ISUSM"], "limiter": "wfo = :wfo", "filter": ["wfo", "DVN"],