Skip to content

Commit

Permalink
Merge pull request #1035 from akrherz/241218-3
Browse files Browse the repository at this point in the history
Omnibus
  • Loading branch information
akrherz authored Dec 19, 2024
2 parents f984970 + 0f2b069 commit b36281b
Show file tree
Hide file tree
Showing 31 changed files with 308 additions and 256 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ repos:
hooks:
- id: eslint
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.8.3"
rev: "v0.8.4"
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
Expand Down
2 changes: 1 addition & 1 deletion pylib/iemweb/autoplot/scripts/p4.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def plotter(fdict):
raise NoDataFound("Data not available for year")
with ncopen(ncfn) as nc:
precip = nc.variables["p01d"]
czs = CachingZonalStats(iemre.AFFINE)
czs = CachingZonalStats(iemre.DOMAINS[""]["affine"])
hasdata = np.zeros(
(nc.dimensions["lat"].size, nc.dimensions["lon"].size)
)
Expand Down
2 changes: 1 addition & 1 deletion pylib/iemweb/autoplot/scripts/p89.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def get_data(ctx):
raise NoDataFound(f"Missing {ncfn}")
with ncopen(ncfn) as nc:
precip = nc.variables["p01d"]
czs = CachingZonalStats(iemre.AFFINE)
czs = CachingZonalStats(iemre.DOMAINS[""]["affine"])
hasdata = np.zeros(
(nc.dimensions["lat"].size, nc.dimensions["lon"].size)
)
Expand Down
2 changes: 1 addition & 1 deletion pylib/iemweb/autoplot/scripts100/p175.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def plotter(fdict):
if not os.path.isfile(ncfn):
raise NoDataFound(f"Data for year {sts.year} not found")
with ncopen(ncfn) as nc:
czs = CachingZonalStats(iemre.AFFINE)
czs = CachingZonalStats(iemre.DOMAINS[""]["affine"])
hasdata = np.zeros(
(nc.dimensions["lat"].size, nc.dimensions["lon"].size)
)
Expand Down
2 changes: 1 addition & 1 deletion pylib/iemweb/autoplot/scripts100/p182.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from pyiem import iemre, util
from pyiem.exceptions import NoDataFound
from pyiem.grid.zs import CachingZonalStats
from pyiem.iemre import MRMS4IEMRE_AFFINE
from pyiem.mrms import MRMS4IEMRE_AFFINE
from pyiem.plot import figure_axes
from pyiem.reference import state_names

Expand Down
2 changes: 1 addition & 1 deletion pylib/iemweb/autoplot/scripts100/p185.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from pyiem import iemre, util
from pyiem.exceptions import NoDataFound
from pyiem.grid.zs import CachingZonalStats
from pyiem.iemre import MRMS4IEMRE_AFFINE
from pyiem.mrms import MRMS4IEMRE_AFFINE
from pyiem.plot import get_cmap
from pyiem.plot.geoplot import MapPlot

Expand Down
5 changes: 2 additions & 3 deletions pylib/iemweb/iemre/daily.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

import numpy as np
import pyiem.prism as prismutil
from pyiem import iemre
from pyiem import iemre, mrms
from pyiem.util import convert_value, ncopen
from pyiem.webutil import iemapp

Expand Down Expand Up @@ -85,8 +85,7 @@ def application(environ, start_response):
if not os.path.isfile(ncfn):
mrms_precip = None
else:
j2 = int((lat - iemre.SOUTH) * 100.0)
i2 = int((lon - iemre.WEST) * 100.0)
i2, j2 = mrms.find_ij(lon, lat)
with ncopen(ncfn) as nc:
mrms_precip = nc.variables["p01d"][offset, j2, i2] / 25.4
else:
Expand Down
6 changes: 2 additions & 4 deletions pylib/iemweb/iemre/multiday.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import numpy as np
import pyiem.prism as prismutil
from pydantic import Field
from pyiem import iemre
from pyiem import iemre, mrms
from pyiem.util import convert_value, ncopen
from pyiem.webutil import CGIModel, iemapp

Expand Down Expand Up @@ -98,7 +98,6 @@ def application(environ, start_response):
"Point not within any domain",
)
]
dom = iemre.DOMAINS[domain]
i, j = iemre.find_ij(lon, lat, domain=domain)
if i is None or j is None:
return [
Expand Down Expand Up @@ -166,8 +165,7 @@ def application(environ, start_response):
prism_precip = [None] * (offset2 - offset1)

if ts1.year > 2000 and domain == "":
j2 = int((lat - dom["south"]) * 100.0)
i2 = int((lon - dom["west"]) * 100.0)
i2, j2 = mrms.find_ij(lon, lat)
with ncopen(iemre.get_daily_mrms_ncname(ts1.year)) as nc:
mrms_precip = nc.variables["p01d"][tslice, j2, i2] / 25.4
else:
Expand Down
42 changes: 20 additions & 22 deletions pylib/iemweb/json/stage4.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from pydantic import Field, field_validator
from pyiem import iemre
from pyiem.reference import ISO8601
from pyiem.stage4 import find_ij
from pyiem.util import mm2inch, ncopen, utc
from pyiem.webutil import CGIModel, iemapp

Expand All @@ -43,8 +44,8 @@ class Schema(CGIModel):
"""See how we are called."""

callback: str = Field(None, description="JSONP callback function name")
lat: float = Field(..., description="Latitude of point")
lon: float = Field(..., description="Longitude of point")
lat: float = Field(..., description="Latitude of point", ge=-90, le=90)
lon: float = Field(..., description="Longitude of point", ge=-180, le=180)
valid: date = Field(..., description="Valid date of data")
tz: str = Field("UTC", description="Timezone of valid date")

Expand Down Expand Up @@ -88,26 +89,23 @@ def dowork(environ):
}
if not os.path.isfile(ncfn):
return json.dumps(res)
with ncopen(ncfn) as nc:
dist = (
(nc.variables["lon"][:] - environ["lon"]) ** 2
+ (nc.variables["lat"][:] - environ["lat"]) ** 2
) ** 0.5
(j, i) = np.unravel_index(dist.argmin(), dist.shape) # skipcq
res["gridi"] = int(i)
res["gridj"] = int(j)

ppt = nc.variables["p01m"][sidx:eidx, j, i]

for tx, pt in enumerate(ppt):
valid = sts + timedelta(hours=tx)
utcnow = valid.astimezone(ZoneInfo("UTC"))
res["data"].append(
{
"end_valid": utcnow.strftime("%Y-%m-%dT%H:00:00Z"),
"precip_in": myrounder(mm2inch(pt), 2),
}
)
i, j = find_ij(environ["lon"], environ["lat"])
if i is not None:
with ncopen(ncfn) as nc:
res["gridi"] = i
res["gridj"] = j

ppt = nc.variables["p01m"][sidx:eidx, j, i]

for tx, pt in enumerate(ppt):
valid = sts + timedelta(hours=tx)
utcnow = valid.astimezone(ZoneInfo("UTC"))
res["data"].append(
{
"end_valid": utcnow.strftime("%Y-%m-%dT%H:00:00Z"),
"precip_in": myrounder(mm2inch(pt), 2),
}
)

return json.dumps(res)

Expand Down
2 changes: 1 addition & 1 deletion scripts/climodat/compute4regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def do_day(cursor, valid):
index_col="id",
geom_col="geom",
)
czs = CachingZonalStats(iemre.AFFINE)
czs = CachingZonalStats(iemre.DOMAINS[""]["affine"])
sthigh = czs.gen_stats(np.flipud(high), gdf["geom"])
stlow = czs.gen_stats(np.flipud(low), gdf["geom"])
stprecip = czs.gen_stats(np.flipud(precip), gdf["geom"])
Expand Down
26 changes: 9 additions & 17 deletions scripts/climodat/era5land_extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@
import geopandas as gpd
import numpy as np
import pandas as pd
from affine import Affine
from pyiem.database import get_dbconn, get_sqlalchemy_conn
from pyiem.era5land import DOMAINS, find_ij
from pyiem.grid.zs import CachingZonalStats
from pyiem.iemre import NORTH, WEST, hourly_offset
from pyiem.iemre import hourly_offset
from pyiem.util import convert_value, logger, ncopen, utc

LOG = logger()
Expand All @@ -31,9 +31,8 @@ def compute_regions(data, varname, df):
index_col="id",
geom_col="geom",
)
affine = Affine(0.1, 0, WEST, 0, -0.1, NORTH)
czs = CachingZonalStats(affine)
data = czs.gen_stats(np.flipud(data), gdf["geom"])
czs = CachingZonalStats(DOMAINS[""]["AFFINE_NC"])
data = czs.gen_stats(data, gdf["geom"])
for i, sid in enumerate(gdf.index.values):
df.at[sid, varname] = data[i]

Expand Down Expand Up @@ -61,8 +60,6 @@ def build_stations(dt) -> pd.DataFrame:
"era5land_soilm1m_avg",
]:
df[col] = np.nan
df["i"] = np.nan
df["j"] = np.nan
LOG.info("Found %s database entries", len(df.index))
return df

Expand All @@ -79,8 +76,6 @@ def compute(df, sids, dt, do_regions=False):
# Wm-2 to MJ
factor = 3600.0 / 1_000_000.0
with ncopen(ncfn) as nc:
lons = nc.variables["lon"][:]
lats = nc.variables["lat"][:]
if f"{dt:%m%d}" == "1231":
rsds = np.sum(nc.variables["rsds"][idx0:], 0) * factor
# Close enough
Expand All @@ -104,20 +99,17 @@ def compute(df, sids, dt, do_regions=False):
) / 100.0
soilt = np.mean(nc.variables["soilt"][idx0:idx1, 0], 0)

df["i"] = np.digitize(df["lon"].values, lons)
df["j"] = np.digitize(df["lat"].values, lats)
rsds = rsds.filled(np.nan)
soilm = soilm.filled(np.nan)
soilm1m = soilm1m.filled(np.nan)
soilt = soilt.filled(np.nan)

for sid, row in df.loc[sids].iterrows():
df.at[sid, "era5land_srad"] = rsds[int(row["j"]), int(row["i"])]
df.at[sid, "era5land_soilt4_avg"] = soilt[int(row["j"]), int(row["i"])]
df.at[sid, "era5land_soilm4_avg"] = soilm[int(row["j"]), int(row["i"])]
df.at[sid, "era5land_soilm1m_avg"] = soilm1m[
int(row["j"]), int(row["i"])
]
i, j = find_ij(row["lon"], row["lat"])
df.at[sid, "era5land_srad"] = rsds[j, i]
df.at[sid, "era5land_soilt4_avg"] = soilt[j, i]
df.at[sid, "era5land_soilm4_avg"] = soilm[j, i]
df.at[sid, "era5land_soilm1m_avg"] = soilm1m[j, i]

if do_regions:
compute_regions(rsds, "era5land_srad", df)
Expand Down
7 changes: 4 additions & 3 deletions scripts/climodat/merra_solarrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ def compute_regions(rsds, df):
index_col="id",
geom_col="geom",
)
affine = Affine(0.625, 0, -180.0, 0, -0.5, 90)
# This may not be exactly right, but alas
affine = Affine(0.625, 0, -180.3125, 0, -0.5, 90.5)
czs = CachingZonalStats(affine)
data = czs.gen_stats(np.flipud(rsds), gdf["geom"])
for i, sid in enumerate(gdf.index.values):
Expand Down Expand Up @@ -154,8 +155,8 @@ def do(dt: datetime):
cursor = pgconn.cursor()
for sid, row in df[df[COL].notna()].iterrows():
cursor.execute(
f"UPDATE alldata set {COL} = %s where station = %s and "
"day = %s",
f"UPDATE alldata set {COL} = %s where station = %s " # skipcq
"and day = %s",
(row[COL], sid, dt),
)

Expand Down
2 changes: 1 addition & 1 deletion scripts/current/ifc_today_total.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

def doday(ts, realtime):
"""
Create a plot of precipitation stage4 estimates for some day
Create a plot of precipitation IFC estimates for some day
"""
idx = daily_offset(ts)
with ncopen(
Expand Down
2 changes: 1 addition & 1 deletion scripts/current/mrms_today_total.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

def doday(ts, realtime):
"""
Create a plot of precipitation stage4 estimates for some day
Create a plot of precipitation MRMS estimates for some day
"""
lts = utc(ts.year, ts.month, ts.day, 12)
lts = lts.astimezone(ZoneInfo("America/Chicago"))
Expand Down
2 changes: 1 addition & 1 deletion scripts/current/q3_xhour.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

def doit(ts, hours):
"""
Create a plot of precipitation stage4 estimates for some day
Create a plot of precipitation MRMS estimates for some day
"""
# Start at 1 AM
ts = ts.replace(minute=0, second=0, microsecond=0)
Expand Down
Loading

0 comments on commit b36281b

Please sign in to comment.