Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Manifest.toml
Original file line number Diff line number Diff line change
Expand Up @@ -1502,7 +1502,7 @@ weakdeps = ["Distributed"]
DistributedExt = "Distributed"

[[deps.Ribasim]]
deps = ["ADTypes", "Accessors", "Arrow", "BasicModelInterface", "CodecZstd", "Configurations", "DBInterface", "DataInterpolations", "DataStructures", "Dates", "DelimitedFiles", "DiffEqCallbacks", "DifferentiationInterface", "EnumX", "FiniteDiff", "ForwardDiff", "Graphs", "HiGHS", "IterTools", "JuMP", "Legolas", "LinearAlgebra", "LinearSolve", "Logging", "LoggingExtras", "MetaGraphsNext", "OrdinaryDiffEqBDF", "OrdinaryDiffEqCore", "OrdinaryDiffEqLowOrderRK", "OrdinaryDiffEqNonlinearSolve", "OrdinaryDiffEqRosenbrock", "OrdinaryDiffEqSDIRK", "OrdinaryDiffEqTsit5", "PiecewiseLinearOpt", "PrecompileTools", "Preferences", "SQLite", "SciMLBase", "SparseArrays", "SparseConnectivityTracer", "SparseMatrixColorings", "StructArrays", "Tables", "TerminalLoggers", "TranscodingStreams"]
deps = ["ADTypes", "Accessors", "Arrow", "BasicModelInterface", "CodecZstd", "Configurations", "DBInterface", "DataInterpolations", "DataStructures", "Dates", "DelimitedFiles", "DiffEqCallbacks", "DifferentiationInterface", "EnumX", "FiniteDiff", "ForwardDiff", "Graphs", "HiGHS", "IterTools", "JuMP", "Legolas", "LinearAlgebra", "LinearSolve", "Logging", "LoggingExtras", "MetaGraphsNext", "OrdinaryDiffEqBDF", "OrdinaryDiffEqCore", "OrdinaryDiffEqLowOrderRK", "OrdinaryDiffEqNonlinearSolve", "OrdinaryDiffEqRosenbrock", "OrdinaryDiffEqSDIRK", "OrdinaryDiffEqTsit5", "PiecewiseLinearOpt", "PrecompileTools", "Preferences", "Printf", "SQLite", "SciMLBase", "SparseArrays", "SparseConnectivityTracer", "SparseMatrixColorings", "StructArrays", "Tables", "TerminalLoggers", "TranscodingStreams"]
path = "core"
uuid = "aac5e3d9-0b8f-4d4f-8241-b1a7a9632635"
version = "2025.4.0"
Expand Down
2 changes: 2 additions & 0 deletions core/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ OrdinaryDiffEqTsit5 = "b1df2697-797e-41e3-8120-5422d3b24e4a"
PiecewiseLinearOpt = "0f51c51e-adfa-5141-8a04-d40246b8977c"
PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a"
Preferences = "21216c6a-2e73-6563-6e65-726566657250"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
SQLite = "0aa819cd-b072-5ff4-a722-6bc24af294d9"
SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
Expand Down Expand Up @@ -92,6 +93,7 @@ OrdinaryDiffEqTsit5 = "1.1"
PiecewiseLinearOpt = "0.4.2"
PrecompileTools = "1.2.1"
Preferences = "1.4.3"
Printf = "1.11.0"
SQLite = "1.5.1"
SciMLBase = "2.36"
SparseArrays = "1"
Expand Down
2 changes: 2 additions & 0 deletions core/src/Ribasim.jl
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,8 @@ using DataStructures: OrderedSet, OrderedDict, counter, inc!

using Dates: Second

using Printf: @sprintf

export libribasim

include("carrays.jl")
Expand Down
29 changes: 23 additions & 6 deletions core/src/callback.jl
Original file line number Diff line number Diff line change
Expand Up @@ -122,13 +122,21 @@ Specifically, we first use all the inflows to update the mass of the Basins, rec
the Basin concentration(s) and then remove the mass that is being lost to the outflows.
"""
function update_cumulative_flows!(u, t, integrator)::Nothing
(; p) = integrator
(; cache, p) = integrator
(; p_independent, p_mutable, time_dependent_cache) = p
(; basin, flow_boundary, allocation) = p_independent
(; basin, flow_boundary, allocation, temp_convergence, convergence, ncalls) =
p_independent

# Update tprev
p_mutable.tprev = t

# Update convergence measure
if hasproperty(cache, :nlsolver)
@. temp_convergence = abs(cache.nlsolver.cache.atmp / u)
convergence .+= temp_convergence / finitemaximum(temp_convergence)
ncalls[1] += 1
end

# Update cumulative forcings which are integrated exactly
@. basin.cumulative_drainage_saveat +=
time_dependent_cache.basin.current_cumulative_drainage - basin.cumulative_drainage
Expand Down Expand Up @@ -367,8 +375,16 @@ inflow and outflow per Basin.
"""
function save_flow(u, t, integrator)
(; cache, p) = integrator
(; basin, state_inflow_link, state_outflow_link, flow_boundary, u_prev_saveat) =
p.p_independent
(;
basin,
state_inflow_link,
state_outflow_link,
flow_boundary,
u_prev_saveat,
convergence,
ncalls,
node_id,
) = p.p_independent
Δt = get_Δt(integrator)
flow_mean = (u - u_prev_saveat) / Δt

Expand Down Expand Up @@ -423,8 +439,7 @@ function save_flow(u, t, integrator)
@. basin.cumulative_drainage_saveat = 0.0

if hasproperty(cache, :nlsolver)
@. flow_convergence = abs(cache.nlsolver.cache.atmp / u)
flow_convergence = CVector(flow_convergence, getaxes(u))
flow_convergence = convergence ./ ncalls[1]
for (i, (evap, infil)) in
enumerate(zip(flow_convergence.evaporation, flow_convergence.infiltration))
if isnan(evap)
Expand All @@ -435,6 +450,8 @@ function save_flow(u, t, integrator)
basin_convergence[i] = max(evap, infil)
end
end
fill!(convergence, 0)
ncalls[1] = 0
end

concentration = copy(basin.concentration_data.concentration_state)
Expand Down
21 changes: 13 additions & 8 deletions core/src/logging.jl
Original file line number Diff line number Diff line change
Expand Up @@ -51,16 +51,22 @@
end

"Log the convergence bottlenecks."
function log_bottlenecks(model; converged::Bool)
function log_bottlenecks(model; interrupt::Bool)
(; cache, p, u) = model.integrator
(; p_independent) = p

level = converged ? LoggingExtras.Info : LoggingExtras.Warn
level = LoggingExtras.Warn

# Indicate convergence bottlenecks if possible with the current algorithm
if hasproperty(cache, :nlsolver)
flow_error = @. abs(cache.nlsolver.cache.atmp / u)
errors = Pair{Symbol, Float64}[]
flow_error = if interrupt && p.p_independent.ncalls[1] > 0
flow_error = p.p_independent.convergence ./ p.p_independent.ncalls[1]

Check warning on line 63 in core/src/logging.jl

View check run for this annotation

Codecov / codecov/patch

core/src/logging.jl#L63

Added line #L63 was not covered by tests
else
temp_convergence = @. abs(cache.nlsolver.cache.atmp / u)
temp_convergence / finitemaximum(temp_convergence)
end

errors = Pair{Symbol, String}[]
error_count = 0
max_errors = 5
# Iterate over the errors in descending order
Expand All @@ -69,10 +75,10 @@
error = flow_error[i]
isnan(error) && continue # NaN are sorted as largest
# Stop reporting errors if they are too small or too many
if error < model.config.solver.reltol || error_count >= max_errors
if error < 1 / length(flow_error) || error_count >= max_errors
break
end
push!(errors, node_id => error)
push!(errors, node_id => @sprintf("%.2f", error * 100) * "%")
error_count += 1
end
if !isempty(errors)
Expand All @@ -87,13 +93,12 @@
"Log messages after the computation."
function log_finalize(model)::Cint
if success(model)
log_bottlenecks(model; converged = true)
@info "The model finished successfully."
return 0
else
# OrdinaryDiffEq doesn't error on e.g. convergence failure,
# but we want a non-zero exit code in that case.
log_bottlenecks(model; converged = false)
log_bottlenecks(model; interrupt = false)
t = datetime_since(model.integrator.t, model.config.starttime)
(; retcode) = model.integrator.sol
@error """The model exited at model time $t with return code $retcode.
Expand Down
8 changes: 5 additions & 3 deletions core/src/main.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,12 @@
model = Model(config)
try
solve!(model)
catch
catch e
# Catch errors thrown during simulation.
@warn "Simulation crashed or interrupted."
log_bottlenecks(model; converged = false)
t = datetime_since(model.integrator.t, model.config.starttime)
@warn "Simulation crashed or interrupted at $t."
interrupt = e isa InterruptException
log_bottlenecks(model; interrupt)

Check warning on line 44 in core/src/main.jl

View check run for this annotation

Codecov / codecov/patch

core/src/main.jl#L41-L44

Added lines #L41 - L44 were not covered by tests
write_results(model)
display_error(io)
return 1
Expand Down
5 changes: 5 additions & 0 deletions core/src/parameter.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ const state_components = (
)
const n_components = length(state_components)
const StateTuple{V} = NamedTuple{state_components, NTuple{n_components, V}}
const RibasimCVectorType =
Ribasim.CArrays.CArray{Float64, 1, Vector{Float64}, StateTuple{UnitRange{Int}}}

# LinkType.flow and NodeType.FlowBoundary
@enumx LinkType flow control none
Expand Down Expand Up @@ -1096,6 +1098,9 @@ the object itself is not.
# Callback configurations
do_concentration::Bool
do_subgrid::Bool
temp_convergence::RibasimCVectorType
convergence::RibasimCVectorType
ncalls::Vector{Int} = [0]
end

function StateTimeDependentCache(
Expand Down
2 changes: 2 additions & 0 deletions core/src/read.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1489,6 +1489,8 @@ function Parameters(db::DB, config::Config)::Parameters
node_id,
do_concentration = config.experimental.concentration,
do_subgrid = config.results.subgrid,
temp_convergence = CVector(zeros(n_states), state_ranges),
convergence = CVector(zeros(n_states), state_ranges),
)

collect_control_mappings!(p_independent)
Expand Down
11 changes: 11 additions & 0 deletions core/src/util.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1153,6 +1153,17 @@ function eval_time_interp(
end
end

function finitemaximum(u::AbstractVector; init = 0)
# Find the maximum finite value in the vector
max_val = init
for val in u
if isfinite(val) && val > max_val
max_val = val
end
end
max_val
end

function initialize_concentration_itp(
n_substance,
substance_idx_node_type;
Expand Down
4 changes: 3 additions & 1 deletion python/ribasim/ribasim/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,9 @@ def node_table(self) -> NodeTable:
node_table = NodeTable(df=df)
node_table.sort()
assert node_table.df is not None
assert node_table.df.index.is_unique, "node_id must be unique"
assert node_table.df.index.is_unique, (
f"node_id must be unique, the following ids are not {node_table.df.index[node_table.df.index.duplicated(keep=False)].unique()}"
)
return node_table

def _nodes(self) -> Generator[MultiNodeModel, Any, None]:
Expand Down
Loading