diff --git a/Project.toml b/Project.toml index 82030991..ab3d8c64 100644 --- a/Project.toml +++ b/Project.toml @@ -36,6 +36,7 @@ ProximalOperators = "0.15" RegularizedProblems = "0.1.3" ShiftedProximalOperators = "0.2" SolverCore = "0.3.10" +SolverTest = "0.3.18" julia = "^1.6.0" [extras] @@ -43,8 +44,9 @@ ADNLPModels = "54578032-b7ea-4c30-94aa-7cbd1cce6c9a" OptimizationProblems = "5049e819-d29b-5fba-b941-0eee7e64c1c6" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" RegularizedProblems = "ea076b23-609f-44d2-bb12-a4ae45328278" +SolverTest = "4343dc35-3317-4c6e-8877-f0cc8502c90e" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" TestSetExtensions = "98d24dd4-01ad-11ea-1b02-c9a08f80db04" [targets] -test = ["ADNLPModels", "OptimizationProblems", "Random", "RegularizedProblems", "Test", "TestSetExtensions"] +test = ["ADNLPModels", "OptimizationProblems", "Random", "RegularizedProblems", "SolverTest", "Test", "TestSetExtensions"] diff --git a/src/utils.jl b/src/utils.jl index 15c53d9e..0992cc3d 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -189,3 +189,30 @@ function update_bounds!( @. u_bound_m_x = min(Δ, u_bound - xk) end end + +for solver in [ + :R2, + :R2N, + :R2DH, + :TR, + :TRDH, +] + @eval $solver(nlp::AbstractNLPModel{T, V}; kwargs...) where{T, V} = begin + selected = pop!(kwargs, :selected, 1:get_nvar(nlp)) + h = NullRegularizer(T) + reg_nlp = RegularizedNLPModel(nlp, h, selected) + $solver(reg_nlp; kwargs...) + end +end + +for solver in [ + :LM, + :LMTR +] + @eval $solver(nls::AbstractNLSModel{T, V}; kwargs...) where{T, V} = begin + selected = pop!(kwargs, :selected, 1:get_nvar(nls)) + h = NullRegularizer(T) + reg_nls = RegularizedNLSModel(nls, h, selected) + $solver(reg_nls; kwargs...) + end +end diff --git a/test/runtests.jl b/test/runtests.jl index caaf8e12..4cb4fed7 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -8,7 +8,8 @@ using ADNLPModels, NLPModelsModifiers, RegularizedProblems, RegularizedOptimization, - SolverCore + SolverCore, + SolverTest Random.seed!(0) const global compound = 1 @@ -19,6 +20,7 @@ const global bpdn2, bpdn_nls2, sol2 = bpdn_model(compound, bounds = true) const global λ = norm(grad(bpdn, zeros(bpdn.meta.nvar)), Inf) / 10 include("test_AL.jl") +include("test-smooth.jl") for (mod, mod_name) ∈ ((x -> x, "exact"), (LSR1Model, "lsr1"), (LBFGSModel, "lbfgs")) for (h, h_name) ∈ ((NormL0(λ), "l0"), (NormL1(λ), "l1"), (IndBallL0(10 * compound), "B0")) diff --git a/test/test-smooth.jl b/test/test-smooth.jl new file mode 100644 index 00000000..c478a0a5 --- /dev/null +++ b/test/test-smooth.jl @@ -0,0 +1,25 @@ +@testset "smooth NLP" begin + @testset "unconstrained" begin + for solver in [R2, R2N, R2DH, TR, TRDH] + unconstrained_nlp(solver, atol = 1e-3, rtol = 1e-3) + end + end + @testset "bound-constrained" begin + for solver in [R2, R2N, R2DH, TR, TRDH] + bound_constrained_nlp(solver, atol = 1e-3, rtol = 1e-3) + end + end +end + +@testset "smooth NLS" begin + @testset "unconstrained" begin + for solver in [LM, LMTR] + unconstrained_nls(solver, atol = 1e-3, rtol = 1e-3) + end + end + @testset "bound-constrained" begin + for solver in [LM, LMTR] + bound_constrained_nls(solver, atol = 1e-3, rtol = 1e-3) + end + end +end \ No newline at end of file