From d31171c46718ceedb1eb2eaa5f888ffb3a7f673a Mon Sep 17 00:00:00 2001 From: tmigot Date: Tue, 18 Jun 2024 23:34:31 +0200 Subject: [PATCH 1/4] Add error for evaluation of constraints of an unconstrained model --- src/nlp/api.jl | 23 +++++++++++++++++++++++ src/nlp/utils.jl | 24 ++++++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/src/nlp/api.jl b/src/nlp/api.jl index 82d9bb08..51015ab4 100644 --- a/src/nlp/api.jl +++ b/src/nlp/api.jl @@ -45,6 +45,7 @@ Evaluate ``c(x)``, the constraints at `x`. """ function cons(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_unconstrained(nlp) c = S(undef, nlp.meta.ncon) return cons!(nlp, x, c) end @@ -70,6 +71,7 @@ Evaluate the linear constraints at `x`. """ function cons_lin(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_linear_constraints(nlp) c = S(undef, nlp.meta.nlin) return cons_lin!(nlp, x, c) end @@ -88,6 +90,7 @@ Evaluate the nonlinear constraints at `x`. """ function cons_nln(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_nonlinear_constraints(nlp) c = S(undef, nlp.meta.nnln) return cons_nln!(nlp, x, c) end @@ -118,6 +121,7 @@ Evaluate ``f(x)`` and ``c(x)`` at `x`. """ function objcons(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_unconstrained(nlp) f = obj(nlp, x) c = cons(nlp, x) return f, c @@ -131,6 +135,7 @@ Evaluate ``f(x)`` and ``c(x)`` at `x`. `c` is overwritten with the value of ``c( function objcons!(nlp::AbstractNLPModel, x::AbstractVector, c::AbstractVector) @lencheck nlp.meta.nvar x @lencheck nlp.meta.ncon c + check_unconstrained(nlp) f = obj(nlp, x) cons!(nlp, x, c) return f, c @@ -166,6 +171,7 @@ end Return the structure of the constraints Jacobian in sparse coordinate format. """ function jac_structure(nlp::AbstractNLPModel) + check_unconstrained(nlp) rows = Vector{Int}(undef, nlp.meta.nnzj) cols = Vector{Int}(undef, nlp.meta.nnzj) jac_structure!(nlp, rows, cols) @@ -181,6 +187,7 @@ function jac_structure!( rows::AbstractVector{T}, cols::AbstractVector{T}, ) where {T} + check_unconstrained(nlp) @lencheck nlp.meta.nnzj rows cols lin_ind = 1:(nlp.meta.lin_nnzj) nlp.meta.nlin > 0 && jac_lin_structure!(nlp, view(rows, lin_ind), view(cols, lin_ind)) @@ -203,6 +210,7 @@ end Return the structure of the linear constraints Jacobian in sparse coordinate format. """ function jac_lin_structure(nlp::AbstractNLPModel) + check_linear_constraints(nlp) rows = Vector{Int}(undef, nlp.meta.lin_nnzj) cols = Vector{Int}(undef, nlp.meta.lin_nnzj) jac_lin_structure!(nlp, rows, cols) @@ -221,6 +229,7 @@ function jac_lin_structure! end Return the structure of the nonlinear constraints Jacobian in sparse coordinate format. """ function jac_nln_structure(nlp::AbstractNLPModel) + check_nonlinear_constraints(nlp) rows = Vector{Int}(undef, nlp.meta.nln_nnzj) cols = Vector{Int}(undef, nlp.meta.nln_nnzj) jac_nln_structure!(nlp, rows, cols) @@ -242,6 +251,7 @@ rewriting `vals`. function jac_coord!(nlp::AbstractNLPModel, x::AbstractVector, vals::AbstractVector) @lencheck nlp.meta.nvar x @lencheck nlp.meta.nnzj vals + check_unconstrained(nlp) increment!(nlp, :neval_jac) lin_ind = 1:(nlp.meta.lin_nnzj) nlp.meta.nlin > 0 && jac_lin_coord!(nlp, x, view(vals, lin_ind)) @@ -257,6 +267,7 @@ Evaluate ``J(x)``, the constraints Jacobian at `x` in sparse coordinate format. """ function jac_coord(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_unconstrained(nlp) vals = S(undef, nlp.meta.nnzj) return jac_coord!(nlp, x, vals) end @@ -268,6 +279,7 @@ Evaluate ``J(x)``, the constraints Jacobian at `x` as a sparse matrix. """ function jac(nlp::AbstractNLPModel, x::AbstractVector) @lencheck nlp.meta.nvar x + check_unconstrained(nlp) rows, cols = jac_structure(nlp) vals = jac_coord(nlp, x) sparse(rows, cols, vals, nlp.meta.ncon, nlp.meta.nvar) @@ -288,6 +300,7 @@ Evaluate ``J(x)``, the linear constraints Jacobian at `x` in sparse coordinate f """ function jac_lin_coord(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_linear_constraints(nlp) vals = S(undef, nlp.meta.lin_nnzj) return jac_lin_coord!(nlp, x, vals) end @@ -299,6 +312,7 @@ Evaluate ``J(x)``, the linear constraints Jacobian at `x` as a sparse matrix. """ function jac_lin(nlp::AbstractNLPModel, x::AbstractVector) @lencheck nlp.meta.nvar x + check_linear_constraints(nlp) rows, cols = jac_lin_structure(nlp) vals = jac_lin_coord(nlp, x) sparse(rows, cols, vals, nlp.meta.nlin, nlp.meta.nvar) @@ -319,6 +333,7 @@ Evaluate ``J(x)``, the nonlinear constraints Jacobian at `x` in sparse coordinat """ function jac_nln_coord(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_nonlinear_constraints(nlp) vals = S(undef, nlp.meta.nln_nnzj) return jac_nln_coord!(nlp, x, vals) end @@ -330,6 +345,7 @@ Evaluate ``J(x)``, the nonlinear constraints Jacobian at `x` as a sparse matrix. """ function jac_nln(nlp::AbstractNLPModel, x::AbstractVector) @lencheck nlp.meta.nvar x + check_nonlinear_constraints(nlp) rows, cols = jac_nln_structure(nlp) vals = jac_nln_coord(nlp, x) sparse(rows, cols, vals, nlp.meta.nnln, nlp.meta.nvar) @@ -342,6 +358,7 @@ Evaluate ``J(x)v``, the Jacobian-vector product at `x`. """ function jprod(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x v + check_unconstrained(nlp) Jv = S(undef, nlp.meta.ncon) return jprod!(nlp, x, v, Jv) end @@ -354,6 +371,7 @@ Evaluate ``J(x)v``, the Jacobian-vector product at `x` in place. function jprod!(nlp::AbstractNLPModel, x::AbstractVector, v::AbstractVector, Jv::AbstractVector) @lencheck nlp.meta.nvar x v @lencheck nlp.meta.ncon Jv + check_unconstrained(nlp) increment!(nlp, :neval_jprod) nlp.meta.nlin > 0 && jprod_lin!(nlp, x, v, view(Jv, nlp.meta.lin)) nlp.meta.nnln > 0 && jprod_nln!(nlp, x, v, view(Jv, nlp.meta.nln)) @@ -377,6 +395,7 @@ function jprod!( @lencheck nlp.meta.nnzj rows cols vals @lencheck nlp.meta.nvar v @lencheck nlp.meta.ncon Jv + check_unconstrained(nlp) increment!(nlp, :neval_jprod) coo_prod!(rows, cols, vals, v, Jv) end @@ -388,6 +407,7 @@ Evaluate ``J(x)v``, the linear Jacobian-vector product at `x`. """ function jprod_lin(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x v + check_linear_constraints(nlp) Jv = S(undef, nlp.meta.nlin) return jprod_lin!(nlp, x, v, Jv) end @@ -416,6 +436,7 @@ function jprod_lin!( @lencheck nlp.meta.lin_nnzj rows cols vals @lencheck nlp.meta.nvar v @lencheck nlp.meta.nlin Jv + check_linear_constraints(nlp) increment!(nlp, :neval_jprod_lin) coo_prod!(rows, cols, vals, v, Jv) end @@ -427,6 +448,7 @@ Evaluate ``J(x)v``, the nonlinear Jacobian-vector product at `x`. """ function jprod_nln(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x v + check_nonlinear_constraints(nlp) Jv = S(undef, nlp.meta.nnln) return jprod_nln!(nlp, x, v, Jv) end @@ -455,6 +477,7 @@ function jprod_nln!( @lencheck nlp.meta.nln_nnzj rows cols vals @lencheck nlp.meta.nvar v @lencheck nlp.meta.nnln Jv + check_nonlinear_constraints(nlp) increment!(nlp, :neval_jprod_nln) coo_prod!(rows, cols, vals, v, Jv) end diff --git a/src/nlp/utils.jl b/src/nlp/utils.jl index 9c58dfd6..be1771da 100644 --- a/src/nlp/utils.jl +++ b/src/nlp/utils.jl @@ -67,6 +67,30 @@ macro rangecheck(lo, hi, vars...) Expr(:block, exprs...) end +const UnconstrainedErrorMessage = "Try to evaluate constraints, but the problem is unconstrained." + +function check_unconstrained(nlp) + if unconstrained(nlp) + throw(error(UnconstrainedErrorMessage)) + end +end + +const NonlinearUnconstrainedErrorMessage = "Try to evaluate nonlinear constraints, but the problem has none." + +function check_nonlinear_constraints(nlp) + if nlp.meta.nnln == 0 + throw(error(NonlinearUnconstrainedErrorMessage)) + end +end + +const LinearUnconstrainedErrorMessage = "Try to evaluate linear constraints, but the problem has none." + +function check_linear_constraints(nlp) + if nlp.meta.nlin == 0 + throw(error(LinearUnconstrainedErrorMessage)) + end +end + """ coo_prod!(rows, cols, vals, v, Av) From 890130079a56f3138b4988e3ff0be99de0094d52 Mon Sep 17 00:00:00 2001 From: tmigot Date: Sat, 22 Jun 2024 16:51:07 +0200 Subject: [PATCH 2/4] Fix unconstrained error --- src/nlp/api.jl | 72 ++++++++++++++++++++++++++++------------- src/nlp/utils.jl | 12 +++---- test/nlp/dummy-model.jl | 6 ++-- 3 files changed, 58 insertions(+), 32 deletions(-) diff --git a/src/nlp/api.jl b/src/nlp/api.jl index 51015ab4..e2c400b5 100644 --- a/src/nlp/api.jl +++ b/src/nlp/api.jl @@ -45,7 +45,7 @@ Evaluate ``c(x)``, the constraints at `x`. """ function cons(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x - check_unconstrained(nlp) + check_constrained(nlp) c = S(undef, nlp.meta.ncon) return cons!(nlp, x, c) end @@ -58,6 +58,7 @@ Evaluate ``c(x)``, the constraints at `x` in place. function cons!(nlp::AbstractNLPModel, x::AbstractVector, cx::AbstractVector) @lencheck nlp.meta.nvar x @lencheck nlp.meta.ncon cx + check_constrained(nlp) increment!(nlp, :neval_cons) nlp.meta.nlin > 0 && cons_lin!(nlp, x, view(cx, nlp.meta.lin)) nlp.meta.nnln > 0 && cons_nln!(nlp, x, view(cx, nlp.meta.nln)) @@ -71,7 +72,7 @@ Evaluate the linear constraints at `x`. """ function cons_lin(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x - check_linear_constraints(nlp) + check_linearly_constrained(nlp) c = S(undef, nlp.meta.nlin) return cons_lin!(nlp, x, c) end @@ -90,7 +91,7 @@ Evaluate the nonlinear constraints at `x`. """ function cons_nln(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x - check_nonlinear_constraints(nlp) + check_nonlinearly_constrained(nlp) c = S(undef, nlp.meta.nnln) return cons_nln!(nlp, x, c) end @@ -121,7 +122,7 @@ Evaluate ``f(x)`` and ``c(x)`` at `x`. """ function objcons(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x - check_unconstrained(nlp) + check_constrained(nlp) f = obj(nlp, x) c = cons(nlp, x) return f, c @@ -135,7 +136,7 @@ Evaluate ``f(x)`` and ``c(x)`` at `x`. `c` is overwritten with the value of ``c( function objcons!(nlp::AbstractNLPModel, x::AbstractVector, c::AbstractVector) @lencheck nlp.meta.nvar x @lencheck nlp.meta.ncon c - check_unconstrained(nlp) + check_constrained(nlp) f = obj(nlp, x) cons!(nlp, x, c) return f, c @@ -171,7 +172,7 @@ end Return the structure of the constraints Jacobian in sparse coordinate format. """ function jac_structure(nlp::AbstractNLPModel) - check_unconstrained(nlp) + check_constrained(nlp) rows = Vector{Int}(undef, nlp.meta.nnzj) cols = Vector{Int}(undef, nlp.meta.nnzj) jac_structure!(nlp, rows, cols) @@ -187,7 +188,7 @@ function jac_structure!( rows::AbstractVector{T}, cols::AbstractVector{T}, ) where {T} - check_unconstrained(nlp) + check_constrained(nlp) @lencheck nlp.meta.nnzj rows cols lin_ind = 1:(nlp.meta.lin_nnzj) nlp.meta.nlin > 0 && jac_lin_structure!(nlp, view(rows, lin_ind), view(cols, lin_ind)) @@ -210,7 +211,7 @@ end Return the structure of the linear constraints Jacobian in sparse coordinate format. """ function jac_lin_structure(nlp::AbstractNLPModel) - check_linear_constraints(nlp) + check_linearly_constrained(nlp) rows = Vector{Int}(undef, nlp.meta.lin_nnzj) cols = Vector{Int}(undef, nlp.meta.lin_nnzj) jac_lin_structure!(nlp, rows, cols) @@ -229,7 +230,7 @@ function jac_lin_structure! end Return the structure of the nonlinear constraints Jacobian in sparse coordinate format. """ function jac_nln_structure(nlp::AbstractNLPModel) - check_nonlinear_constraints(nlp) + check_nonlinearly_constrained(nlp) rows = Vector{Int}(undef, nlp.meta.nln_nnzj) cols = Vector{Int}(undef, nlp.meta.nln_nnzj) jac_nln_structure!(nlp, rows, cols) @@ -251,7 +252,7 @@ rewriting `vals`. function jac_coord!(nlp::AbstractNLPModel, x::AbstractVector, vals::AbstractVector) @lencheck nlp.meta.nvar x @lencheck nlp.meta.nnzj vals - check_unconstrained(nlp) + check_constrained(nlp) increment!(nlp, :neval_jac) lin_ind = 1:(nlp.meta.lin_nnzj) nlp.meta.nlin > 0 && jac_lin_coord!(nlp, x, view(vals, lin_ind)) @@ -267,7 +268,7 @@ Evaluate ``J(x)``, the constraints Jacobian at `x` in sparse coordinate format. """ function jac_coord(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x - check_unconstrained(nlp) + check_constrained(nlp) vals = S(undef, nlp.meta.nnzj) return jac_coord!(nlp, x, vals) end @@ -279,7 +280,7 @@ Evaluate ``J(x)``, the constraints Jacobian at `x` as a sparse matrix. """ function jac(nlp::AbstractNLPModel, x::AbstractVector) @lencheck nlp.meta.nvar x - check_unconstrained(nlp) + check_constrained(nlp) rows, cols = jac_structure(nlp) vals = jac_coord(nlp, x) sparse(rows, cols, vals, nlp.meta.ncon, nlp.meta.nvar) @@ -300,7 +301,7 @@ Evaluate ``J(x)``, the linear constraints Jacobian at `x` in sparse coordinate f """ function jac_lin_coord(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x - check_linear_constraints(nlp) + check_linearly_constrained(nlp) vals = S(undef, nlp.meta.lin_nnzj) return jac_lin_coord!(nlp, x, vals) end @@ -312,7 +313,7 @@ Evaluate ``J(x)``, the linear constraints Jacobian at `x` as a sparse matrix. """ function jac_lin(nlp::AbstractNLPModel, x::AbstractVector) @lencheck nlp.meta.nvar x - check_linear_constraints(nlp) + check_linearly_constrained(nlp) rows, cols = jac_lin_structure(nlp) vals = jac_lin_coord(nlp, x) sparse(rows, cols, vals, nlp.meta.nlin, nlp.meta.nvar) @@ -333,7 +334,7 @@ Evaluate ``J(x)``, the nonlinear constraints Jacobian at `x` in sparse coordinat """ function jac_nln_coord(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x - check_nonlinear_constraints(nlp) + check_nonlinearly_constrained(nlp) vals = S(undef, nlp.meta.nln_nnzj) return jac_nln_coord!(nlp, x, vals) end @@ -345,7 +346,7 @@ Evaluate ``J(x)``, the nonlinear constraints Jacobian at `x` as a sparse matrix. """ function jac_nln(nlp::AbstractNLPModel, x::AbstractVector) @lencheck nlp.meta.nvar x - check_nonlinear_constraints(nlp) + check_nonlinearly_constrained(nlp) rows, cols = jac_nln_structure(nlp) vals = jac_nln_coord(nlp, x) sparse(rows, cols, vals, nlp.meta.nnln, nlp.meta.nvar) @@ -358,7 +359,7 @@ Evaluate ``J(x)v``, the Jacobian-vector product at `x`. """ function jprod(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x v - check_unconstrained(nlp) + check_constrained(nlp) Jv = S(undef, nlp.meta.ncon) return jprod!(nlp, x, v, Jv) end @@ -371,7 +372,7 @@ Evaluate ``J(x)v``, the Jacobian-vector product at `x` in place. function jprod!(nlp::AbstractNLPModel, x::AbstractVector, v::AbstractVector, Jv::AbstractVector) @lencheck nlp.meta.nvar x v @lencheck nlp.meta.ncon Jv - check_unconstrained(nlp) + check_constrained(nlp) increment!(nlp, :neval_jprod) nlp.meta.nlin > 0 && jprod_lin!(nlp, x, v, view(Jv, nlp.meta.lin)) nlp.meta.nnln > 0 && jprod_nln!(nlp, x, v, view(Jv, nlp.meta.nln)) @@ -395,7 +396,7 @@ function jprod!( @lencheck nlp.meta.nnzj rows cols vals @lencheck nlp.meta.nvar v @lencheck nlp.meta.ncon Jv - check_unconstrained(nlp) + check_constrained(nlp) increment!(nlp, :neval_jprod) coo_prod!(rows, cols, vals, v, Jv) end @@ -407,7 +408,7 @@ Evaluate ``J(x)v``, the linear Jacobian-vector product at `x`. """ function jprod_lin(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x v - check_linear_constraints(nlp) + check_linearly_constrained(nlp) Jv = S(undef, nlp.meta.nlin) return jprod_lin!(nlp, x, v, Jv) end @@ -436,7 +437,7 @@ function jprod_lin!( @lencheck nlp.meta.lin_nnzj rows cols vals @lencheck nlp.meta.nvar v @lencheck nlp.meta.nlin Jv - check_linear_constraints(nlp) + check_linearly_constrained(nlp) increment!(nlp, :neval_jprod_lin) coo_prod!(rows, cols, vals, v, Jv) end @@ -448,7 +449,7 @@ Evaluate ``J(x)v``, the nonlinear Jacobian-vector product at `x`. """ function jprod_nln(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x v - check_nonlinear_constraints(nlp) + check_nonlinearly_constrained(nlp) Jv = S(undef, nlp.meta.nnln) return jprod_nln!(nlp, x, v, Jv) end @@ -477,7 +478,7 @@ function jprod_nln!( @lencheck nlp.meta.nln_nnzj rows cols vals @lencheck nlp.meta.nvar v @lencheck nlp.meta.nnln Jv - check_nonlinear_constraints(nlp) + check_nonlinearly_constrained(nlp) increment!(nlp, :neval_jprod_nln) coo_prod!(rows, cols, vals, v, Jv) end @@ -490,6 +491,7 @@ Evaluate ``J(x)^Tv``, the transposed-Jacobian-vector product at `x`. function jtprod(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x @lencheck nlp.meta.ncon v + check_constrained(nlp) Jtv = S(undef, nlp.meta.nvar) return jtprod!(nlp, x, v, Jtv) end @@ -503,6 +505,7 @@ If the problem has linear and nonlinear constraints, this function allocates. function jtprod!(nlp::AbstractNLPModel, x::AbstractVector, v::AbstractVector, Jtv::AbstractVector) @lencheck nlp.meta.nvar x Jtv @lencheck nlp.meta.ncon v + check_constrained(nlp) increment!(nlp, :neval_jtprod) if nlp.meta.nnln == 0 jtprod_lin!(nlp, x, v, Jtv) @@ -539,6 +542,7 @@ function jtprod!( @lencheck nlp.meta.nnzj rows cols vals @lencheck nlp.meta.ncon v @lencheck nlp.meta.nvar Jtv + check_constrained(nlp) increment!(nlp, :neval_jtprod) coo_prod!(cols, rows, vals, v, Jtv) end @@ -551,6 +555,7 @@ Evaluate ``J(x)^Tv``, the linear transposed-Jacobian-vector product at `x`. function jtprod_lin(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x @lencheck nlp.meta.nlin v + check_linearly_constrained(nlp) Jtv = S(undef, nlp.meta.nvar) return jtprod_lin!(nlp, x, v, Jtv) end @@ -579,6 +584,7 @@ function jtprod_lin!( @lencheck nlp.meta.lin_nnzj rows cols vals @lencheck nlp.meta.nlin v @lencheck nlp.meta.nvar Jtv + check_linearly_constrained(nlp) increment!(nlp, :neval_jtprod_lin) coo_prod!(cols, rows, vals, v, Jtv) end @@ -591,6 +597,7 @@ Evaluate ``J(x)^Tv``, the nonlinear transposed-Jacobian-vector product at `x`. function jtprod_nln(nlp::AbstractNLPModel{T, S}, x::AbstractVector, v::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x @lencheck nlp.meta.nnln v + check_nonlinearly_constrained(nlp) Jtv = S(undef, nlp.meta.nvar) return jtprod_nln!(nlp, x, v, Jtv) end @@ -619,6 +626,7 @@ function jtprod_nln!( @lencheck nlp.meta.nln_nnzj rows cols vals @lencheck nlp.meta.nnln v @lencheck nlp.meta.nvar Jtv + check_nonlinearly_constrained(nlp) increment!(nlp, :neval_jtprod_nln) coo_prod!(cols, rows, vals, v, Jtv) end @@ -632,6 +640,7 @@ The resulting object may be used as if it were a matrix, e.g., `J * v` or """ function jac_op(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_constrained(nlp) Jv = S(undef, nlp.meta.ncon) Jtv = S(undef, nlp.meta.nvar) return jac_op!(nlp, x, Jv, Jtv) @@ -653,6 +662,7 @@ function jac_op!( ) where {T, S} @lencheck nlp.meta.nvar x Jtv @lencheck nlp.meta.ncon Jv + check_constrained(nlp) prod! = @closure (res, v, α, β) -> begin # res = α * J * v + β * res jprod!(nlp, x, v, Jv) if β == 0 @@ -692,6 +702,7 @@ function jac_op!( @lencheck nlp.meta.nnzj rows cols vals @lencheck nlp.meta.ncon Jv @lencheck nlp.meta.nvar Jtv + check_constrained(nlp) prod! = @closure (res, v, α, β) -> begin # res = α * J * v + β * res jprod!(nlp, rows, cols, vals, v, Jv) if β == 0 @@ -722,6 +733,7 @@ The resulting object may be used as if it were a matrix, e.g., `J * v` or """ function jac_lin_op(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_linearly_constrained(nlp) Jv = S(undef, nlp.meta.nlin) Jtv = S(undef, nlp.meta.nvar) return jac_lin_op!(nlp, x, Jv, Jtv) @@ -743,6 +755,7 @@ function jac_lin_op!( ) where {T, S} @lencheck nlp.meta.nvar x Jtv @lencheck nlp.meta.nlin Jv + check_linearly_constrained(nlp) prod! = @closure (res, v, α, β) -> begin # res = α * J * v + β * res jprod_lin!(nlp, x, v, Jv) if β == 0 @@ -782,6 +795,7 @@ function jac_lin_op!( @lencheck nlp.meta.lin_nnzj rows cols vals @lencheck nlp.meta.nlin Jv @lencheck nlp.meta.nvar Jtv + check_linearly_constrained(nlp) prod! = @closure (res, v, α, β) -> begin # res = α * J * v + β * res jprod_lin!(nlp, rows, cols, vals, v, Jv) if β == 0 @@ -812,6 +826,7 @@ The resulting object may be used as if it were a matrix, e.g., `J * v` or """ function jac_nln_op(nlp::AbstractNLPModel{T, S}, x::AbstractVector) where {T, S} @lencheck nlp.meta.nvar x + check_nonlinearly_constrained(nlp) Jv = S(undef, nlp.meta.nnln) Jtv = S(undef, nlp.meta.nvar) return jac_nln_op!(nlp, x, Jv, Jtv) @@ -833,6 +848,7 @@ function jac_nln_op!( ) where {T, S} @lencheck nlp.meta.nvar x Jtv @lencheck nlp.meta.nnln Jv + check_nonlinearly_constrained(nlp) prod! = @closure (res, v, α, β) -> begin # res = α * J * v + β * res jprod_nln!(nlp, x, v, Jv) if β == 0 @@ -872,6 +888,7 @@ function jac_nln_op!( @lencheck nlp.meta.nln_nnzj rows cols vals @lencheck nlp.meta.nnln Jv @lencheck nlp.meta.nvar Jtv + check_nonlinearly_constrained(nlp) prod! = @closure (res, v, α, β) -> begin # res = α * J * v + β * res jprod_nln!(nlp, rows, cols, vals, v, Jv) if β == 0 @@ -902,6 +919,7 @@ Only the lower triangle is returned. function jth_hess_coord(nlp::AbstractNLPModel{T, S}, x::AbstractVector, j::Integer) where {T, S} @lencheck nlp.meta.nvar x @rangecheck 1 nlp.meta.ncon j + check_constrained(nlp) vals = S(undef, nlp.meta.nnzh) return jth_hess_coord!(nlp, x, j, vals) end @@ -924,6 +942,7 @@ A `Symmetric` object wrapping the lower triangle is returned. function jth_hess(nlp::AbstractNLPModel, x::AbstractVector, j::Integer) @lencheck nlp.meta.nvar x @rangecheck 1 nlp.meta.ncon j + check_constrained(nlp) rows, cols = hess_structure(nlp) vals = jth_hess_coord(nlp, x, j) return Symmetric(sparse(rows, cols, vals, nlp.meta.nvar, nlp.meta.nvar), :L) @@ -942,6 +961,7 @@ function jth_hprod( ) where {T, S} @lencheck nlp.meta.nvar x v @rangecheck 1 nlp.meta.ncon j + check_constrained(nlp) Hv = S(undef, nlp.meta.nvar) return jth_hprod!(nlp, x, v, j, Hv) end @@ -966,6 +986,7 @@ function ghjvprod( v::AbstractVector, ) where {T, S} @lencheck nlp.meta.nvar x g v + check_constrained(nlp) gHv = S(undef, nlp.meta.ncon) return ghjvprod!(nlp, x, g, v, gHv) end @@ -1061,6 +1082,7 @@ function hess_coord( ) where {T, S} @lencheck nlp.meta.nvar x @lencheck nlp.meta.ncon y + check_constrained(nlp) vals = S(undef, nlp.meta.nnzh) return hess_coord!(nlp, x, y, vals; obj_weight = obj_weight) end @@ -1102,6 +1124,7 @@ function hess( ) where {T, S} @lencheck nlp.meta.nvar x @lencheck nlp.meta.ncon y + check_constrained(nlp) rows, cols = hess_structure(nlp) vals = hess_coord(nlp, x, y, obj_weight = obj_weight) Symmetric(sparse(rows, cols, vals, nlp.meta.nvar, nlp.meta.nvar), :L) @@ -1141,6 +1164,7 @@ function hprod( ) where {T, S} @lencheck nlp.meta.nvar x v @lencheck nlp.meta.ncon y + check_constrained(nlp) Hv = S(undef, nlp.meta.nvar) return hprod!(nlp, x, y, v, Hv; obj_weight = obj_weight) end @@ -1227,6 +1251,7 @@ function hess_op( ) where {T, S} @lencheck nlp.meta.nvar x @lencheck nlp.meta.ncon y + check_constrained(nlp) Hv = S(undef, nlp.meta.nvar) return hess_op!(nlp, x, y, Hv, obj_weight = obj_weight) end @@ -1310,6 +1335,7 @@ function hess_op!( ) where {T, S} @lencheck nlp.meta.nvar x Hv @lencheck nlp.meta.ncon y + check_constrained(nlp) prod! = @closure (res, v, α, β) -> begin hprod!(nlp, x, y, v, Hv; obj_weight = obj_weight) if β == 0 diff --git a/src/nlp/utils.jl b/src/nlp/utils.jl index be1771da..f0ce18a6 100644 --- a/src/nlp/utils.jl +++ b/src/nlp/utils.jl @@ -67,25 +67,25 @@ macro rangecheck(lo, hi, vars...) Expr(:block, exprs...) end -const UnconstrainedErrorMessage = "Try to evaluate constraints, but the problem is unconstrained." +const UnconstrainedErrorMessage = "Trying to evaluate constraints, but the problem is unconstrained." -function check_unconstrained(nlp) +function check_constrained(nlp) if unconstrained(nlp) throw(error(UnconstrainedErrorMessage)) end end -const NonlinearUnconstrainedErrorMessage = "Try to evaluate nonlinear constraints, but the problem has none." +const NonlinearUnconstrainedErrorMessage = "Trying to evaluate nonlinear constraints, but the problem has none." -function check_nonlinear_constraints(nlp) +function check_nonlinearly_constrained(nlp) if nlp.meta.nnln == 0 throw(error(NonlinearUnconstrainedErrorMessage)) end end -const LinearUnconstrainedErrorMessage = "Try to evaluate linear constraints, but the problem has none." +const LinearUnconstrainedErrorMessage = "Trying to evaluate linear constraints, but the problem has none." -function check_linear_constraints(nlp) +function check_linearly_constrained(nlp) if nlp.meta.nlin == 0 throw(error(LinearUnconstrainedErrorMessage)) end diff --git a/test/nlp/dummy-model.jl b/test/nlp/dummy-model.jl index aa0983dd..7917a567 100644 --- a/test/nlp/dummy-model.jl +++ b/test/nlp/dummy-model.jl @@ -27,7 +27,7 @@ end @test_throws(MethodError, jth_hprod!(model, [0.0], [1.0], 2, [3.0])) @test_throws(MethodError, ghjvprod!(model, [0.0], [1.0], [2.0], [3.0])) @assert isa(hess_op(model, [0.0]), LinearOperator) - @assert isa(jac_op(model, [0.0]), LinearOperator) - @assert isa(jac_lin_op(model, [0.0]), LinearOperator) - @assert isa(jac_nln_op(model, [0.0]), LinearOperator) + @test_throws "Trying to evaluate constraints, but the problem is unconstrained." jac_op(model, [0.0]) + @test_throws "Trying to evaluate linear constraints, but the problem has none." jac_lin_op(model, [0.0]) + @test_throws "Trying to evaluate nonlinear constraints, but the problem has none." jac_nln_op(model, [0.0]) end From c89cb945f388cb7660cde46ea3bd86d375743196 Mon Sep 17 00:00:00 2001 From: tmigot Date: Sat, 22 Jun 2024 17:00:07 +0200 Subject: [PATCH 3/4] fix julia 1.6 --- test/nlp/dummy-model.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/nlp/dummy-model.jl b/test/nlp/dummy-model.jl index 7917a567..81ec5b6c 100644 --- a/test/nlp/dummy-model.jl +++ b/test/nlp/dummy-model.jl @@ -27,7 +27,7 @@ end @test_throws(MethodError, jth_hprod!(model, [0.0], [1.0], 2, [3.0])) @test_throws(MethodError, ghjvprod!(model, [0.0], [1.0], [2.0], [3.0])) @assert isa(hess_op(model, [0.0]), LinearOperator) - @test_throws "Trying to evaluate constraints, but the problem is unconstrained." jac_op(model, [0.0]) - @test_throws "Trying to evaluate linear constraints, but the problem has none." jac_lin_op(model, [0.0]) - @test_throws "Trying to evaluate nonlinear constraints, but the problem has none." jac_nln_op(model, [0.0]) + @test_throws ErrorException("Trying to evaluate constraints, but the problem is unconstrained.") jac_op(model, [0.0]) + @test_throws ErrorException("Trying to evaluate linear constraints, but the problem has none.") jac_lin_op(model, [0.0]) + @test_throws ErrorException("Trying to evaluate nonlinear constraints, but the problem has none.") jac_nln_op(model, [0.0]) end From 21cb8aaeb72d065953a0248739ac1b3edd90a724 Mon Sep 17 00:00:00 2001 From: tmigot Date: Mon, 24 Jun 2024 15:57:49 +0200 Subject: [PATCH 4/4] does not have any --- src/nlp/utils.jl | 4 ++-- test/nlp/dummy-model.jl | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/nlp/utils.jl b/src/nlp/utils.jl index f0ce18a6..c57427da 100644 --- a/src/nlp/utils.jl +++ b/src/nlp/utils.jl @@ -75,7 +75,7 @@ function check_constrained(nlp) end end -const NonlinearUnconstrainedErrorMessage = "Trying to evaluate nonlinear constraints, but the problem has none." +const NonlinearUnconstrainedErrorMessage = "Trying to evaluate nonlinear constraints, but the problem does not have any." function check_nonlinearly_constrained(nlp) if nlp.meta.nnln == 0 @@ -83,7 +83,7 @@ function check_nonlinearly_constrained(nlp) end end -const LinearUnconstrainedErrorMessage = "Trying to evaluate linear constraints, but the problem has none." +const LinearUnconstrainedErrorMessage = "Trying to evaluate linear constraints, but the problem does not have any." function check_linearly_constrained(nlp) if nlp.meta.nlin == 0 diff --git a/test/nlp/dummy-model.jl b/test/nlp/dummy-model.jl index 81ec5b6c..69de0a7f 100644 --- a/test/nlp/dummy-model.jl +++ b/test/nlp/dummy-model.jl @@ -28,6 +28,6 @@ end @test_throws(MethodError, ghjvprod!(model, [0.0], [1.0], [2.0], [3.0])) @assert isa(hess_op(model, [0.0]), LinearOperator) @test_throws ErrorException("Trying to evaluate constraints, but the problem is unconstrained.") jac_op(model, [0.0]) - @test_throws ErrorException("Trying to evaluate linear constraints, but the problem has none.") jac_lin_op(model, [0.0]) - @test_throws ErrorException("Trying to evaluate nonlinear constraints, but the problem has none.") jac_nln_op(model, [0.0]) + @test_throws ErrorException("Trying to evaluate linear constraints, but the problem does not have any.") jac_lin_op(model, [0.0]) + @test_throws ErrorException("Trying to evaluate nonlinear constraints, but the problem does not have any.") jac_nln_op(model, [0.0]) end