Skip to content

Commit

Permalink
SBMLImporter update and fix bug in tests
Browse files Browse the repository at this point in the history
  • Loading branch information
sebapersson committed Feb 16, 2024
1 parent d43b9a3 commit 0a25f81
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 21 deletions.
2 changes: 1 addition & 1 deletion src/Process_input/Observables/h_sigma_derivatives.jl
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ function create_derivative_σ_h_file(model_name::String,
parameter_map, state_map, experimental_conditions)

# Dummary variables to keep PEtab importer happy even as we are not providing any PEtab files
model_SBML = SBMLImporter.ModelSBML()
model_SBML = SBMLImporter.ModelSBML("")

∂h∂u_str, ∂h∂p_str = PEtab.create∂h∂_function(model_name, @__DIR__, model_state_names,
parameter_info, p_ode_problem_names,
Expand Down
6 changes: 3 additions & 3 deletions src/Process_input/Observables/u0_h_sigma.jl
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ function create_σ_h_u0_file(model_name::String,
parameter_map, state_map, experimental_conditions)

# Dummary variables to keep PEtab importer happy even as we are not providing any PEtab files
model_SBML = SBMLImporter.ModelSBML()
model_SBML = SBMLImporter.ModelSBML("")

h_str = PEtab.create_h_function(model_name, @__DIR__, model_state_names, parameter_info,
p_ode_problem_names,
Expand Down Expand Up @@ -227,7 +227,7 @@ end
dir_model::String,
parameter_info::ParametersInfo,
p_ode_problem_names::Vector{String},
state_map,
state_map,
model_SBML;
inplace::Bool=true)
Expand Down Expand Up @@ -270,7 +270,7 @@ function create_u0_function(model_name::String,
# Write the formula for each initial condition to file
_model_state_names = [replace.(string.(state_map[i].first), "(t)" => "")
for i in eachindex(state_map)]
# If we create from Julia model (e.g.) Catalyst this is not applicable and step is skipped
# If we create from Julia model (e.g.) Catalyst this is not applicable and step is skipped
if !isempty(model_SBML.parameters)
model_state_names1 = filter(x -> x keys(model_SBML.species) &&
model_SBML.species[x].assignment_rule == false,
Expand Down
33 changes: 17 additions & 16 deletions test/Julia_import/Priors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -63,29 +63,30 @@ petab_model_rn = PEtabModel(rn, simulation_conditions, observables, measurements
petab_problem_rn = PEtabODEProblem(petab_model_rn, verbose=false)

# Compute gradient + hessian for nllh and prior
prior_val = _compute_prior(petab_problem_sys.θ_nominalT)
prior_grad = ForwardDiff.gradient(_compute_prior, petab_problem_sys.θ_nominalT)
prior_hess = ForwardDiff.hessian(_compute_prior, petab_problem_sys.θ_nominalT)
nllh = petab_problem_rn.compute_nllh(petab_problem_sys.θ_nominalT)
nllh_grad = ForwardDiff.gradient(petab_problem_rn.compute_nllh, petab_problem_sys.θ_nominalT)
nllh_hess = ForwardDiff.hessian(petab_problem_rn.compute_nllh, petab_problem_sys.θ_nominalT)
prior_val = _compute_prior(petab_problem_rn.θ_nominalT)
prior_grad = ForwardDiff.gradient(_compute_prior, petab_problem_rn.θ_nominalT)
prior_hess = ForwardDiff.hessian(_compute_prior, petab_problem_rn.θ_nominalT)
nllh = petab_problem_rn.compute_nllh(petab_problem_rn.θ_nominalT)
nllh_grad = ForwardDiff.gradient(petab_problem_rn.compute_nllh, petab_problem_rn.θ_nominalT)
nllh_hess = ForwardDiff.hessian(petab_problem_rn.compute_nllh, petab_problem_rn.θ_nominalT)
# Compute petab objective gradient and hessian
obj = petab_problem_rn.compute_cost(petab_problem_sys.θ_nominalT)
grad = petab_problem_rn.compute_gradient(petab_problem_sys.θ_nominalT)
hess = petab_problem_rn.compute_hessian(petab_problem_sys.θ_nominalT)
obj = petab_problem_rn.compute_cost(petab_problem_rn.θ_nominalT)
grad = petab_problem_rn.compute_gradient(petab_problem_rn.θ_nominalT)
hess = petab_problem_rn.compute_hessian(petab_problem_rn.θ_nominalT)
# Test it all adds up
@test obj prior_val + nllh
@test norm(grad - (nllh_grad + prior_grad)) < 1e-8
@test norm(hess - (nllh_hess + prior_hess)) < 1e-8

# The same test but for the Blockhessian approximation, and another gradient method
petab_problem_rn = PEtabODEProblem(petab_model_rn, verbose=false, hessian_method=:BlockForwardDiff, gradient_method=:ForwardEquations)
nllh = petab_problem_rn.compute_nllh(petab_problem_sys.θ_nominalT)
nllh_grad = ForwardDiff.gradient(petab_problem_rn.compute_nllh, petab_problem_sys.θ_nominalT)
nllh_hess = ForwardDiff.hessian(petab_problem_rn.compute_nllh, petab_problem_sys.θ_nominalT)
obj = petab_problem_rn.compute_cost(petab_problem_sys.θ_nominalT)
grad = petab_problem_rn.compute_gradient(petab_problem_sys.θ_nominalT)
hess = petab_problem_rn.compute_hessian(petab_problem_sys.θ_nominalT)
nllh = petab_problem_rn.compute_nllh(petab_problem_rn.θ_nominalT)
nllh_grad = ForwardDiff.gradient(petab_problem_rn.compute_nllh, petab_problem_rn.θ_nominalT)
nllh_hess = ForwardDiff.hessian(petab_problem_rn.compute_nllh, petab_problem_rn.θ_nominalT)
obj = petab_problem_rn.compute_cost(petab_problem_rn.θ_nominalT)
grad = petab_problem_rn.compute_gradient(petab_problem_rn.θ_nominalT)
hess = petab_problem_rn.compute_hessian(petab_problem_rn.θ_nominalT)
@test obj prior_val + nllh
@test norm(grad - (nllh_grad + prior_grad)) < 1e-8
@test norm(hess - (nllh_hess + prior_hess)) < 1e-8
@test norm(hess[1:4, 1:4] - (nllh_hess[1:4, 1:4] + prior_hess[1:4, 1:4])) < 1e-8
@test norm(hess[5, 5] - (nllh_hess[5, 5] + prior_hess[5, 5])) < 1e-8
2 changes: 1 addition & 1 deletion test/Test_model2.jl
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ function test_cost_gradient_hessian_test_model2(petab_model::PEtabModel, ode_sol
H1 = prob1.compute_hessian(p)
H2 = prob2.compute_hessian(p)
@test norm(H1[1:2] - H2[1:2]) < 1e-2
@test norm(H1[3:4] - H2[3:4]) < 1e-2
@test norm(normalize(diag(H1, 0)[3:end]) - normalize(diag(H2, 0)[3:end])) < 1e-2
end
end

Expand Down

0 comments on commit 0a25f81

Please sign in to comment.