Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions nlmod/dims/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,18 @@ def xy_to_icell2d(xy, ds):
return icell2d


def get_icell2d_from_xy(x, y, ds, gi=None, rotated=True):
if gi is None:
gi = flopy.utils.GridIntersect(
modelgrid_from_ds(ds, rotated=rotated), method="vertex"
)
cellids = gi.intersects(Point(x, y))["cellids"]
if len(cellids) < 1:
raise (ValueError(f"Point ({x}, {y}) is outside of the model grid"))
icell2d = cellids[0]
return icell2d


def xy_to_row_col(xy, ds):
"""Get the row and column values of a point defined by its x and y coordinates.

Expand Down
137 changes: 114 additions & 23 deletions nlmod/gwf/lake.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import numpy as np
import pandas as pd

from ..dims.layers import get_first_active_layer

logger = logging.getLogger(__name__)

LAKE_KWDS = [
Expand Down Expand Up @@ -37,11 +39,16 @@ def lake_from_gdf(
gdf,
ds,
recharge=True,
evaporation=True,
claktype="VERTICAL",
boundname_column="identificatie",
obs_type="STAGE",
surfdep=0.05,
pname="lak",
gwt=None,
obs_type_gwt="CONCENTRATION",
rainfall_concentration=0.0,
evaporation_concentration=0.0,
**kwargs,
):
"""Add a lake from a geodataframe.
Expand All @@ -59,7 +66,7 @@ def lake_from_gdf(
'RUNOFF', 'INFLOW', 'WITHDRAWAL', 'AUXILIARY', 'RATE', 'INVERT',
'WIDTH', 'SLOPE', 'ROUGH'. These columns should contain the name
of a dataarray in ds with the dimension time.
if the lake has any outlets they should be specified in the columns
if the lake has any outlets they should be specified in the column
lakeout : the lake number of the outlet, if this is -1 the water
is removed from the model.
optinal columns are 'couttype', 'outlet_invert', 'outlet_width',
Expand All @@ -68,17 +75,19 @@ def lake_from_gdf(
ds : xr.DataSet
dataset containing relevant model grid and time information
recharge : bool, optional
if True recharge will be added to the lake and removed from the
recharge package. The recharge
if True recharge will be added to the lake as rainfall and removed from the rch
package.
evaporation : bool, optional
if True evaporation will be added to the lake and removed from the evt package.
claktype : str, optional
defines the lake-GWF connection type. For now only VERTICAL is
supported. The default is 'VERTICAL'.
defines the lake-GWF connection type. For now only VERTICAL is supported. The
default is 'VERTICAL'.
boundname_column : str, optional
THe name of the column in gdf to use for the boundnames. The default is
The name of the column in gdf to use for the boundnames. The default is
"identificatie", which is a unique identifier in the BGT.
surfdep : float, optional
Defines the surface depression depth for VERTICAL lake-GWF connections.
The default is 0.05.
Defines the surface depression depth for VERTICAL lake-GWF connections. The
default is 0.05.
pname : str, optional
name of the lake package. The default is 'lak'.
**kwargs :
Expand Down Expand Up @@ -111,6 +120,10 @@ def lake_from_gdf(
for iper in range(ds.sizes["time"]):
perioddata[iper] = []

if gwt is not None:
packagedata_gwt = []
perioddata_gwt = {0: []}

lake_settings = [setting for setting in LAKE_KWDS if setting in gdf.columns]

if "lakeout" in gdf.columns:
Expand All @@ -123,11 +136,19 @@ def lake_from_gdf(
noutlets = None
outlets = None

fal = get_first_active_layer(ds).data

for lakeno, lake_gdf in gdf.groupby("lakeno"):
nlakeconn = lake_gdf.shape[0]
strt = lake_gdf["strt"].iloc[0]
assert (lake_gdf["strt"] == strt).all(), "a single lake should have single strt"

if "strt" in lake_gdf:
strt = lake_gdf["strt"].iloc[0]
msg = "a single lake should have single strt"
assert (lake_gdf["strt"] == strt).all(), msg
else:
# take the mean of the starting concentrations of the connected cells
head = ds["starting_head"].data[fal[lake_gdf.index], lake_gdf.index]
area = ds["area"].data[lake_gdf.index]
strt = (head * area).sum() / area.sum()
if boundname_column is not None:
boundname = lake_gdf[boundname_column].iloc[0]
assert (
Expand All @@ -139,7 +160,7 @@ def lake_from_gdf(

iconn = 0
for icell2d, row in lake_gdf.iterrows():
cellid = (0, icell2d) # assuming lake in the top layer
cellid = (fal[icell2d], icell2d) # assuming lake in the top layer

# If BEDLEAK is specified to be NONE, the lake-GWF connection
# conductance is solely a function of aquifer properties in the
Expand Down Expand Up @@ -204,19 +225,50 @@ def lake_from_gdf(
outsettings.append(setval)
outlets.append([outlet_no, lakeno, lakeout] + outsettings)
outlet_no += 1

if recharge and "recharge" not in ds:
logger.info("No recharge in Dataset. Setting recharge to False")
recharge = False
if evaporation and "evaporation" not in ds:
logger.info("No evaporation in Dataset. Setting evaporation to False")
evaporation = False
if recharge or evaporation:
cellids = [row[2][1] for row in connectiondata]
if recharge:
if "time" not in ds["recharge"].dims:
rech = ds["recharge"][cellids].values.mean()
# set recharge to zero in dataset
ds["recharge"][cellids] = 0
if evaporation:
if "time" not in ds["evaporation"].dims:
evap = ds["evaporation"][cellids].values.mean()
# set evaporation to zero in dataset
ds["evaporation"][cellids] = 0

for iper in range(ds.sizes["time"]):
if recharge:
# add recharge to lake
cellids = [row[2][1] for row in connectiondata]
rech = ds["recharge"][iper, cellids].values.mean()
if "time" in ds["recharge"].dims:
rech = ds["recharge"][iper, cellids].values.mean()
# set recharge to zero in dataset
ds["recharge"][iper, cellids] = 0

if rech >= 0:
perioddata[iper].append([lakeno, "RAINFALL", rech])
perioddata[iper].append([lakeno, "EVAPORATION", 0])
if not evaporation:
perioddata[iper].append([lakeno, "EVAPORATION", 0])
else:
perioddata[iper].append([lakeno, "RAINFALL", 0])
perioddata[iper].append([lakeno, "EVAPORATION", -rech])
# set recharge to zero in dataset
ds["recharge"][iper, cellids] = 0
if evaporation:
logger.warning("Ignoring negative recharge-values for lakes")
else:
perioddata[iper].append([lakeno, "EVAPORATION", -rech])
if evaporation:
if "time" in ds["evaporation"].dims:
evap = ds["evaporation"][iper, cellids].values.mean()
# set recharge to zero in dataset
ds["evaporation"][iper, cellids] = 0
perioddata[iper].append([lakeno, "EVAPORATION", evap])

# add other time variant settings to lake
for lake_setting in lake_settings:
Expand All @@ -231,18 +283,37 @@ def lake_from_gdf(
perioddata[iper].append(
[lakeno, lake_setting, ds[datavar].values[iper]]
)
if gwt is not None:
if "strt_concentration" in lake_gdf.columns:
strt = lake_gdf["strt_concentration"].iloc[0]
msg = "a single lake should have single strt_concentration"
assert (lake_gdf["strt_concentration"] == strt).all(), msg
else:
# take the mean of the starting concentrations of the connected cells
conc = ds["chloride"].data[fal[lake_gdf.index], lake_gdf.index]
area = ds["area"].data[lake_gdf.index]
strt = (conc * area).sum() / area.sum()
if boundname_column is not None:
packagedata_gwt.append([lakeno, strt, boundname])
else:
packagedata_gwt.append([lakeno, strt])
if recharge:
perioddata_gwt[0].append([lakeno, "rainfall", rainfall_concentration])
if evaporation:
perioddata_gwt[0].append(
[lakeno, "evaporation", evaporation_concentration]
)

if use_outlets:
noutlets = len(outlets)

if boundname_column is not None:
observations = []
for boundname in np.unique(gdf[boundname_column]):
observations.append((boundname, obs_type, boundname))
observations = {f"{pname}_{obs_type}.csv": observations}
obs_list = [(x, obs_type, x) for x in np.unique(gdf[boundname_column])]
observations = {f"{pname}_{obs_type}.csv": obs_list}
else:
observations = None

boundnames = boundname_column is not None
lak = flopy.mf6.ModflowGwflak(
gwf,
surfdep=surfdep,
Expand All @@ -252,13 +323,33 @@ def lake_from_gdf(
packagedata=packagedata,
connectiondata=connectiondata,
perioddata=perioddata,
boundnames=boundname_column is not None,
boundnames=boundnames,
observations=observations,
budget_filerecord=f"{pname}.bgt",
stage_filerecord=f"{pname}.hds",
noutlets=noutlets,
outlets=outlets,
pname=pname,
**kwargs,
)

if gwt is not None:
if boundname_column is not None:
obs_list = [(x, obs_type_gwt, x) for x in np.unique(gdf[boundname_column])]
observations_gwt = {f"{pname}_{obs_type_gwt}.csv": obs_list}
else:
observations_gwt = None

lkt = flopy.mf6.ModflowGwtlkt(
gwt,
pname=lak.package_name,
packagedata=packagedata_gwt,
lakeperioddata=perioddata_gwt,
boundnames=boundnames,
observations=observations_gwt,
budget_filerecord=f"{pname}_gwt.bgt",
concentration_filerecord=f"{pname}_gwt.unc",
)
return lak, lkt

return lak
33 changes: 10 additions & 23 deletions nlmod/gwf/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@
import numpy as np
import pandas as pd
import xarray as xr
from shapely.geometry import Point

from ..dims.grid import get_affine_world_to_mod, modelgrid_from_ds
from ..dims.grid import get_icell2d_from_xy
from ..mfoutput.mfoutput import (
_get_budget_da,
_get_flopy_data_object,
Expand Down Expand Up @@ -389,9 +388,7 @@ def get_flow_lower_face(
return da


def get_head_at_point(
head, x, y, ds=None, gi=None, drop_nan_layers=True, rotated=False
):
def get_head_at_point(head, x, y, ds=None, gi=None, drop_nan_layers=True, rotated=True):
"""Get the head at a certain point from a head DataArray for all cells.

Parameters
Expand All @@ -404,8 +401,8 @@ def get_head_at_point(
y : float
The y-coordinate of the requested head.
ds : xarray.Dataset, optional
Xarray dataset with model data. Only used when a Vertex grid is used, and gi is
not supplied. The default is None.
Xarray dataset with model data. Only used when a Vertex grid is used. The
default is None.
gi : flopy.utils.GridIntersect, optional
A GridIntersect class, to determine the cell at point x,y. Only used when a
Vertex grid is used, and it is determined from ds when None. The default is
Expand All @@ -414,29 +411,19 @@ def get_head_at_point(
Drop layers that are NaN at all timesteps. The default is True.
rotated : bool, optional
If the model grid has a rotation, and rotated is False, x and y are in model
coordinates. Otherwise x and y are in real world coordinates. The defaults is
False.
coordinates. Otherwise x and y are in real world coordinates. The default is
True.

Returns
-------
head_point : xarray.DataArray
A DataArray with dimensions (time, layer).
"""
if rotated and "angrot" in ds.attrs and ds.attrs["angrot"] != 0.0:
# calculate model coordinates from the specified real-world coordinates
x, y = get_affine_world_to_mod(ds) * (x, y)

if "icell2d" in head.dims:
if gi is None:
if ds is None:
raise (
ValueError(
"Please supply either gi (GridIntersect) or ds for a vertex grid"
)
)
gi = flopy.utils.GridIntersect(modelgrid_from_ds(ds), method="vertex")
icelld2 = gi.intersect(Point(x, y))["cellids"][0]
head_point = head[:, :, icelld2]
if ds is None:
raise (ValueError("Please supply ds for a vertex grid"))
icell2d = get_icell2d_from_xy(x, y, ds=ds, gi=gi, rotated=rotated)
head_point = head.sel(icell2d=icell2d)
else:
head_point = head.interp(x=x, y=y, method="nearest")
if drop_nan_layers:
Expand Down
16 changes: 8 additions & 8 deletions nlmod/mfoutput/binaryfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,15 @@ def _get_binary_head_data(kstpkper, fobj):
return arr


def __create3D(data, fobj, column="q"):
def __create3D(data, fobj, column="q", node="node"):
"""Adapted from CellBudgetFile.__create3D.

See flopy.utils.binaryfile.CellBudgetFile.__create3D.
"""
out = np.ma.zeros(fobj.nnodes, dtype=np.float32)
out.mask = True
for [node, q] in zip(data["node"], data[column]):
idx = node - 1
for n, q in zip(data[node], data[column]):
idx = n - 1
out.data[idx] += q
out.mask[idx] = False
return np.ma.reshape(out, fobj.shape)
Expand Down Expand Up @@ -105,7 +105,7 @@ def _select_data_indices_budget(fobj, text, kstpkper):
return select_indices


def _get_binary_budget_data(kstpkper, fobj, text, column="q"):
def _get_binary_budget_data(kstpkper, fobj, text, column="q", node="node"):
"""Get budget data from binary CellBudgetFile.

Code copied from flopy.utils.binaryfile.CellBudgetFile and adapted to
Expand Down Expand Up @@ -145,15 +145,15 @@ def _get_binary_budget_data(kstpkper, fobj, text, column="q"):

data = []
for ipos in fobj.iposarray[idx]:
data.append(_get_binary_budget_record(fobj, ipos, header, column))
data.append(_get_binary_budget_record(fobj, ipos, header, column, node))

if len(data) == 1:
return data[0]
else:
return np.ma.sum(data, axis=0)


def _get_binary_budget_record(fobj, ipos, header, column):
def _get_binary_budget_record(fobj, ipos, header, column, node):
"""Get a single data record from the budget file."""
imeth = header["imeth"][0]
nlay = abs(header["nlay"][0])
Expand Down Expand Up @@ -218,14 +218,14 @@ def _get_binary_budget_record(fobj, ipos, header, column):
naux = nauxp1 - 1
dtype_list = [("node", np.int32), ("node2", np.int32), ("q", fobj.realtype)]
for _ in range(naux):
auxname = binaryread(f, str, charlen=16)
auxname = binaryread(f, bytes, charlen=16)
if not isinstance(auxname, str):
auxname = auxname.decode()
dtype_list.append((auxname.strip(), fobj.realtype))
dtype = np.dtype(dtype_list)
nlist = binaryread(f, np.int32)[0]
data = binaryread(f, dtype, shape=(nlist,))
data = __create3D(data, fobj, column=column)
data = __create3D(data, fobj, column=column, node=node)
if fobj.modelgrid is not None:
return np.reshape(data, fobj.shape)
else:
Expand Down
1 change: 1 addition & 0 deletions nlmod/mfoutput/mfoutput.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,7 @@ def _get_budget_da(
fobj=cbcobj,
text=text,
column=column,
**kwargs,
)

# create data array
Expand Down
Loading