Skip to content
Merged
6 changes: 3 additions & 3 deletions nlmod/dims/attributes_encodings.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,9 +167,9 @@ def get_encodings(
if np.issubdtype(da.dtype, np.character):
continue

assert "_FillValue" not in da.attrs, (
f"Custom fillvalues are not supported. {varname} has a fillvalue set."
)
assert (
"_FillValue" not in da.attrs
), f"Custom fillvalues are not supported. {varname} has a fillvalue set."

encoding = {
"zlib": True,
Expand Down
62 changes: 58 additions & 4 deletions nlmod/dims/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,65 @@
remove_inactive_layers,
)
from .rdp import rdp
from .shared import get_area, get_delc, get_delr # noqa: F401
from .shared import GridTypeDims, get_area, get_delc, get_delr # noqa: F401

logger = logging.getLogger(__name__)


def is_structured(ds):
"""Check if a dataset is structured.

Parameters
----------
ds : xr.Dataset or xr.Dataarray
dataset or dataarray

Returns
-------
bool
True if the dataset is structured.
"""
return GridTypeDims.parse_dims(ds) in (
GridTypeDims.STRUCTURED,
GridTypeDims.STRUCTURED_LAYERED,
)


def is_vertex(ds):
"""Check if a dataset is vertex.

Parameters
----------
ds : xr.Dataset or xr.Dataarray
dataset or dataarray

Returns
-------
bool
True if the dataset is structured.
"""
return GridTypeDims.parse_dims(ds) in (
GridTypeDims.VERTEX,
GridTypeDims.VERTEX_LAYERED,
)


def is_layered(ds):
"""Check if a dataset is layered.

Parameters
----------
ds : xr.Dataset or xr.Dataarray
dataset or dataarray

Returns
-------
bool
True if the dataset is layered.
"""
return "layer" in ds.dims


def snap_extent(extent, delr, delc):
"""Snap the extent in such a way that an integer number of columns and rows fit in
the extent. The new extent is always equal to, or bigger than the original extent.
Expand Down Expand Up @@ -1513,9 +1567,9 @@ def aggregate_vector_per_cell(gdf, fields_methods, modelgrid=None):
else:
raise TypeError("cannot aggregate geometries of different types")
if bool({"length_weighted", "max_length"} & set(fields_methods.values())):
assert geom_types[0] == "LineString", (
"can only use length methods with line geometries"
)
assert (
geom_types[0] == "LineString"
), "can only use length methods with line geometries"
if bool({"area_weighted", "max_area"} & set(fields_methods.values())):
if ("Polygon" in geom_types) or ("MultiPolygon" in geom_types):
pass
Expand Down
134 changes: 71 additions & 63 deletions nlmod/dims/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from ..util import LayerError, _get_value_from_ds_datavar
from .resample import fillnan_da
from .shared import GridTypeDims

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -269,9 +270,9 @@ def split_layers_ds(
split_dict[lay0] = [1 / split_dict[lay0]] * split_dict[lay0]
elif hasattr(split_dict[lay0], "__iter__"):
# make sure the fractions add up to 1
assert np.isclose(np.sum(split_dict[lay0]), 1), (
f"Fractions for splitting layer '{lay0}' do not add up to 1."
)
assert np.isclose(
np.sum(split_dict[lay0]), 1
), f"Fractions for splitting layer '{lay0}' do not add up to 1."
split_dict[lay0] = split_dict[lay0] / np.sum(split_dict[lay0])
else:
raise ValueError(
Expand Down Expand Up @@ -375,18 +376,9 @@ def layer_combine_top_bot(ds, combine_layers, layer="layer", top="top", bot="bot
new_nlay = (
ds[layer].size - sum((len(c) for c in combine_layers)) + len(combine_layers)
)

# create new DataArrays for storing new top/bot
new_bot = xr.DataArray(
data=np.nan,
dims=["layer", "y", "x"],
coords={"layer": np.arange(new_nlay), "y": ds.y.data, "x": ds.x.data},
)
new_top = xr.DataArray(
data=np.nan,
dims=["layer", "y", "x"],
coords={"layer": np.arange(new_nlay), "y": ds.y.data, "x": ds.x.data},
)
new_bot = _get_empty_layered_da(ds[bot], nlay=new_nlay)
new_top = _get_empty_layered_da(ds[top], nlay=new_nlay)

# dict to keep track of old and new layer indices
reindexer = {}
Expand All @@ -411,7 +403,7 @@ def layer_combine_top_bot(ds, combine_layers, layer="layer", top="top", bot="bot
"calculate new top/bot."
)
tops = ds[top].data[c, ...]
bots = ds[bot].data[c, :, :]
bots = ds[bot].data[c, ...]
new_top.data[j] = np.nanmax(tops, axis=0)
new_bot.data[j] = np.nanmin(bots, axis=0)

Expand Down Expand Up @@ -453,25 +445,49 @@ def sum_param_combined_layers(da, reindexer):
data array containing new parameters for combined layers and old
parameters for unmodified layers.
"""
da_new = xr.DataArray(
data=np.nan,
dims=["layer", "y", "x"],
coords={
"layer": np.arange(list(reindexer.keys())[-1] + 1),
"y": da["y"],
"x": da["x"],
},
)

da_new = _get_empty_layered_da(da, nlay=list(reindexer.keys())[-1] + 1)
for k, v in reindexer.items():
if isinstance(v, tuple):
psum = np.sum(da.data[v, :, :], axis=0)
psum = np.sum(da.data[v, ...], axis=0)
else:
psum = da.data[v]
da_new.data[k] = psum
return da_new


def _get_empty_layered_da(da, nlay):
"""Get empty DataArray with number of layer specified by nlay.

Parameters
----------
da : xarray.DataArray
original data array
nlay : int
number of layers

Returns
-------
da_new : xarray.DataArray
new empty data array with updated number of layers
"""
if set(GridTypeDims.STRUCTURED_LAYERED.value).issubset(da.dims):
dims = GridTypeDims.STRUCTURED_LAYERED.value
coords = {
"layer": np.arange(nlay),
"y": da["y"],
"x": da["x"],
}
elif set(GridTypeDims.VERTEX_LAYERED.value).issubset(da.dims):
dims = GridTypeDims.VERTEX_LAYERED.value
coords = {
"layer": np.arange(nlay),
"icell2d": da["icell2d"],
}
else:
raise TypeError("Cannot determine grid type of data array.")
return xr.DataArray(data=np.nan, dims=dims, coords=coords)


def kheq_combined_layers(kh, thickness, reindexer):
"""Calculate equivalent horizontal hydraulic conductivity.

Expand All @@ -491,21 +507,12 @@ def kheq_combined_layers(kh, thickness, reindexer):
for combined layers and original hydraulic conductivity in unmodified
layers
"""
da_kh = xr.DataArray(
data=np.nan,
dims=["layer", "y", "x"],
coords={
"layer": np.arange(list(reindexer.keys())[-1] + 1),
"y": kh["y"],
"x": kh["x"],
},
)

da_kh = _get_empty_layered_da(kh, nlay=list(reindexer.keys())[-1] + 1)
for k, v in reindexer.items():
if isinstance(v, tuple):
kheq = np.nansum(
thickness.data[v, :, :] * kh.data[v, :, :], axis=0
) / np.nansum(thickness.data[v, :, :], axis=0)
thickness.data[v, ...] * kh.data[v, ...], axis=0
) / np.nansum(thickness.data[v, ...], axis=0)
kheq[np.isinf(kheq)] = np.nan
else:
kheq = kh.data[v]
Expand All @@ -532,20 +539,11 @@ def kveq_combined_layers(kv, thickness, reindexer):
for combined layers and original hydraulic conductivity in unmodified
layers
"""
da_kv = xr.DataArray(
data=np.nan,
dims=["layer", "y", "x"],
coords={
"layer": np.arange(list(reindexer.keys())[-1] + 1),
"y": kv["y"],
"x": kv["x"],
},
)

da_kv = _get_empty_layered_da(kv, nlay=list(reindexer.keys())[-1] + 1)
for k, v in reindexer.items():
if isinstance(v, tuple):
kveq = np.nansum(thickness.data[v, :, :], axis=0) / np.nansum(
thickness.data[v, :, :] / kv.data[v, :, :], axis=0
kveq = np.nansum(thickness.data[v, ...], axis=0) / np.nansum(
thickness.data[v, ...] / kv.data[v, ...], axis=0
)
kveq[np.isinf(kveq)] = np.nan
else:
Expand All @@ -564,6 +562,7 @@ def combine_layers_ds(
kv="kv",
kD="kD",
c="c",
return_reindexer=False,
):
"""Combine layers in Dataset.

Expand Down Expand Up @@ -595,6 +594,9 @@ def combine_layers_ds(
c : str, optional
name of data variable containg resistance or c,
by default 'c'. Not parsed if set to None.
return_reindexer : bool, optional
Return a dictionary that can be used to reindex variables from the original
layer-dimension to the new layer-dimension when True. The default is False.

Returns
-------
Expand Down Expand Up @@ -625,7 +627,7 @@ def combine_layers_ds(
----
When passing integers to combine_layers, these are always intepreted as the
layer index (i.e. starting at 0 and numbered consecutively), and not the
layer "name". If the dataset layer index is integer, only the layer index
layer "name". If the dataset layer names are integers, only the layer index
can be used to specify which layers to merge.
"""
data_vars = []
Expand All @@ -634,10 +636,14 @@ def combine_layers_ds(
data_vars.append(dv)
parsed_dv = set([top, bot] + data_vars)

dropped_dv = set(ds.data_vars.keys()) - parsed_dv
if len(dropped_dv) > 0:
msg = f"Following data variables will be dropped: {dropped_dv}"
logger.warning(msg)
check_remaining_dv = set(ds.data_vars.keys()) - parsed_dv
keep_dv = []
for dv in check_remaining_dv:
if "layer" in ds[dv].dims:
msg = f"Data variable has 'layer' dimension and will be dropped: {dv}"
logger.warning(msg)
else:
keep_dv.append(dv)

# calculate new tops/bots
logger.info("Calculating new layer tops and bottoms...")
Expand Down Expand Up @@ -700,16 +706,16 @@ def combine_layers_ds(

# calculate equivalent kh/kv
if kh is not None:
logger.info(f"Calculate equivalent '{kh}' for combined layers.")
logger.info("Calculate equivalent '%s' for combined layers.", kh)
da_dict[kh] = kheq_combined_layers(ds[kh], thickness, reindexer)
if kv is not None:
logger.info(f"Calculate equivalent '{kv}' for combined layers.")
logger.info("Calculate equivalent '%s' for combined layers.", kv)
da_dict[kv] = kveq_combined_layers(ds[kv], thickness, reindexer)
if kD is not None and kD in ds:
logger.info(f"Calculate value '{kD}' for combined layers with sum.")
logger.info("Calculate value '%s' for combined layers with sum.", kD)
da_dict[kD] = sum_param_combined_layers(ds[kD], reindexer)
if c is not None and c in ds:
logger.info(f"Calculate value '{c}' for combined layers with sum.")
logger.info("Calculate value '%s' for combined layers with sum.", c)
da_dict[c] = sum_param_combined_layers(ds[c], reindexer)

# get new layer names, based on first sub-layer from each combined layer
Expand All @@ -725,17 +731,19 @@ def combine_layers_ds(
for k, da in da_dict.items():
da_dict[k] = da.assign_coords(layer=layer_names)

# add reindexer to attributes
attrs = ds.attrs.copy()
attrs["combine_reindexer"] = reindexer
# add original data variables to new dataset
for dv in keep_dv:
da_dict[dv] = ds[dv]

# create new dataset
logger.info("Done! Created new dataset with combined layers!")
ds_combine = xr.Dataset(da_dict, attrs=attrs)
ds_combine = xr.Dataset(da_dict, attrs=ds.attrs)

# remove layer dimension from top
ds_combine = remove_layer_dim_from_top(ds_combine, inconsistency_threshold=1e-5)

if return_reindexer:
return ds_combine, reindexer
return ds_combine


Expand Down
39 changes: 37 additions & 2 deletions nlmod/dims/shared.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,46 @@
from enum import Enum

import numpy as np
import xarray as xr


class GridTypeDims(Enum):
"""Enum for grid dimensions."""

STRUCTURED_LAYERED = ("layer", "y", "x")
VERTEX_LAYERED = ("layer", "icell2d")
STRUCTURED = ("y", "x")
VERTEX = ("icell2d",)

@classmethod
def parse_dims(cls, ds):
"""Get GridTypeDim from dataset or dataarray.

Parameters
----------
ds : xr.Dataset or xr.DataArray
Dataset or DataArray to parse.

Returns
-------
gridtype : GridTypeDims
type of grid

Raises
------
ValueError
If no partially matching gridtype is found.
"""
for gridtype in GridTypeDims:
if set(gridtype.value).issubset(ds.dims):
return gridtype
# raises ValueError if no gridtype is found
return cls(ds.dims)


def get_delr(ds):
"""
Get the distance along rows (delr) from the x-coordinate of a structured model
dataset.
Get the distance along rows (delr) from the x-coordinate of a structured model ds.

Parameters
----------
Expand Down
Loading