From f92deb424dc5f82e8bf1d602a67967d070a8b553 Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Wed, 25 Feb 2026 16:52:36 -0500 Subject: [PATCH 01/10] add random_marix_completion_eq_model --- src/matrand_model.jl | 77 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 75 insertions(+), 2 deletions(-) diff --git a/src/matrand_model.jl b/src/matrand_model.jl index 13500c1..ab2032b 100644 --- a/src/matrand_model.jl +++ b/src/matrand_model.jl @@ -1,4 +1,4 @@ -export random_matrix_completion_model, MIT_matrix_completion_model +export random_matrix_completion_model, random_matrix_completion_eq_model, MIT_matrix_completion_model function mat_rand(m::Int, n::Int, r::Int, sr::Float64, va::Float64, vb::Float64, c::Float64) xl = rand(Uniform(-0.1, 0.3), m, r) @@ -9,7 +9,7 @@ function mat_rand(m::Int, n::Int, r::Int, sr::Float64, va::Float64, vb::Float64, B = (1 - c) * add_gauss(B, va, 0; clip = true) + c * add_gauss(B, vb, 0; clip = true) ω = zeros(Int64, size(Ω, 1)) # Vectorize Omega for i = 1:size(Ω, 1) - ω[i] = Ω[i][1] + size(Ω, 2) * (Ω[i][2] - 1) + ω[i] = Ω[i][1] + n * (Ω[i][2] - 1) end return xs, B, ω end @@ -118,3 +118,76 @@ function MIT_matrix_completion_model() X, B, ω = perturb(I, 0.8, 0.8) matrix_completion_model(X, B, ω) end + +function random_matrix_completion_eq_model( + m::Int = 100, + n::Int = 100, + r::Int = 5, + sr::Float64 = 0.8, + mode = :forward +) + xs, B, ω = mat_rand(m, n, r, sr, 0.0, 0.0, 0.0) + xs = collect(vec(xs)) + + # Constrained API + function c!(cx, x) + @views cx .= x[ω] .- B + end + + function jprod!(jv, x, v) + @views jv .= v[ω] + end + + function jtprod!(jtv, x, v) + jtv .= 0 + @views jtv[ω] .= v + end + + function hprod!(hv, x, y, v; obj_weight = one(T)) + return hv .= 0 + end + + function hess_coord!(vals, x, y; obj_weight = one(T)) + return zeros(Float64, 0) + end + + rows_jac = collect(1:length(ω)) + cols_jac = ω + + function jac_coord!(vals, x) + vals .= 1 + end + + # Unconstrained API + function obj(x) + return 0.0 + end + + function grad!(g, x) + g .= 0 + end + + function hprod!(hv, x, v; obj_weight = 1.0) + return hv .= 0 + end + + function hess_coord!(vals, x; obj_weight = 1.0) + return zeros(Float64, 0) + end + + nlp = NLPModel( + zero(xs), + obj, + grad = grad!, + hprod = hprod!, + hess_coord = (zeros(Float64, 0), zeros(Float64, 0), hess_coord!), + cons = (cons!, zero(B), zero(B)), + jprod = jprod!, + jtprod = jtprod!, + jac_coord = (rows_jac, cols_jac, jac_coord!) + ) + + return nlp, xs + +end + From 4bd6038a2a2d7a2091e708025f3edfc891ac14b5 Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Thu, 26 Feb 2026 10:32:07 -0500 Subject: [PATCH 02/10] add doc --- src/matrand_model.jl | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/src/matrand_model.jl b/src/matrand_model.jl index ab2032b..fe5407d 100644 --- a/src/matrand_model.jl +++ b/src/matrand_model.jl @@ -119,12 +119,42 @@ function MIT_matrix_completion_model() matrix_completion_model(X, B, ω) end -function random_matrix_completion_eq_model( +""" + model, sol = random_matrix_completion_eq_model(; kwargs...) + +Return an instance of an `NLPModel` representing the equality-constrained +matrix completion problem in forward mode, i.e., the linear constraint +```math + X_{ij} = A_{ij} \ \forall i,j \in E, +``` +where $$ E \subseteq \mathbb{R}^m \times \mathbb{R}^n $$. +Alternatively, one can also represent the equality-constrained +matrix completion problem in backward mode, i.e, the optimization problem +```math + \min_{\sigma, U, V} 0 \quad \text{s.t.} \ U^T U = I, \ V^T V = I, \ \sum_k \sigma_k u_k^i v_k^j = A_{ij}. +``` +where $$ \sigma \in \mathbb{R}^l$$, $$ U \in \mathbb{R}^m \times \mathbb{R}^l $$ and +$$ V \in \mathbb{R}^l \times \mathbb{R}^n $$ with $$ l \coloneqq \min\{m, n\} $$. + +## Keyword Arguments + +* `m :: Int = 100`: the number of rows of X and A; +* `n :: Int = 100`: the number of columns of X and A; +* `r :: Int = 5`: the desired rank of A; +* `sr :: Float64 = 0.8`: a threshold between 0 and 1 used to determine the set of pixels retained by the operator P; +* `mode :: Symbol = :forward`: either `:forward` or `:backward`, represents which form of the equality constrained matrix completion problem is to be represented (see above). + +## Return Value + +n instance of an `NLPModel` representing the +equality constrained matrix completion problem, and the exact solution. +""" +function random_matrix_completion_eq_model(; m::Int = 100, n::Int = 100, r::Int = 5, sr::Float64 = 0.8, - mode = :forward + mode::Symbol = :forward ) xs, B, ω = mat_rand(m, n, r, sr, 0.0, 0.0, 0.0) xs = collect(vec(xs)) From 35b21c17217646d72f7d72fb4e33100cca718784 Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Thu, 26 Feb 2026 10:44:06 -0500 Subject: [PATCH 03/10] fix doc --- src/matrand_model.jl | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/matrand_model.jl b/src/matrand_model.jl index fe5407d..6f3ba35 100644 --- a/src/matrand_model.jl +++ b/src/matrand_model.jl @@ -124,17 +124,17 @@ end Return an instance of an `NLPModel` representing the equality-constrained matrix completion problem in forward mode, i.e., the linear constraint -```math - X_{ij} = A_{ij} \ \forall i,j \in E, -``` -where $$ E \subseteq \mathbb{R}^m \times \mathbb{R}^n $$. + +$$ X_{ij} = A_{ij} \ \forall i,j \in E, $$ + +where $ E \subseteq \mathbb{R}^m \times \mathbb{R}^n $. Alternatively, one can also represent the equality-constrained matrix completion problem in backward mode, i.e, the optimization problem -```math - \min_{\sigma, U, V} 0 \quad \text{s.t.} \ U^T U = I, \ V^T V = I, \ \sum_k \sigma_k u_k^i v_k^j = A_{ij}. -``` -where $$ \sigma \in \mathbb{R}^l$$, $$ U \in \mathbb{R}^m \times \mathbb{R}^l $$ and -$$ V \in \mathbb{R}^l \times \mathbb{R}^n $$ with $$ l \coloneqq \min\{m, n\} $$. + +$$ \min_{\sigma, U, V} 0 \quad \text{s.t.} \ U^T U = I, \ V^T V = I, \ \sum_k \sigma_k u_k^i v_k^j = A_{ij}.$$ + +where $ \sigma \in \mathbb{R}^l $, $ U \in \mathbb{R}^m \times \mathbb{R}^l $ and +$ V \in \mathbb{R}^l \times \mathbb{R}^n $ with $ l \coloneqq \min\{m, n\} $. ## Keyword Arguments From 9d4b3df57f8fb89bec30c17e260ad08b50dcc6db Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Thu, 26 Feb 2026 10:54:08 -0500 Subject: [PATCH 04/10] fix latex equations in doc --- src/matrand_model.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/matrand_model.jl b/src/matrand_model.jl index 6f3ba35..6eddd67 100644 --- a/src/matrand_model.jl +++ b/src/matrand_model.jl @@ -125,16 +125,16 @@ end Return an instance of an `NLPModel` representing the equality-constrained matrix completion problem in forward mode, i.e., the linear constraint -$$ X_{ij} = A_{ij} \ \forall i,j \in E, $$ +`` X_{ij} = A_{ij} \\ \\forall i,j \\in E, `` -where $ E \subseteq \mathbb{R}^m \times \mathbb{R}^n $. +where `` E \\subseteq \\mathbb{R}^m \\times \\mathbb{R}^n ``. Alternatively, one can also represent the equality-constrained matrix completion problem in backward mode, i.e, the optimization problem -$$ \min_{\sigma, U, V} 0 \quad \text{s.t.} \ U^T U = I, \ V^T V = I, \ \sum_k \sigma_k u_k^i v_k^j = A_{ij}.$$ +`` \\min_{\\sigma, U, V} 0 \\quad \\text{s.t.} \\ U^T U = I, \\ V^T V = I, \\ \\sum_k \\sigma_k u_k^i v_k^j = A_{ij}.`` -where $ \sigma \in \mathbb{R}^l $, $ U \in \mathbb{R}^m \times \mathbb{R}^l $ and -$ V \in \mathbb{R}^l \times \mathbb{R}^n $ with $ l \coloneqq \min\{m, n\} $. +where `` \\sigma \\in \\mathbb{R}^l ``, `` U \\in \\mathbb{R}^m \\times \\mathbb{R}^l `` and +`` V \\in \\mathbb{R}^l \\times \\mathbb{R}^n `` with `` l \\coloneqq \\min\\{m, n\\} ``. ## Keyword Arguments From d1d4600cae9d20fd97784e5a045e2d8d62277723 Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Thu, 26 Feb 2026 18:09:18 -0500 Subject: [PATCH 05/10] add tests --- Project.toml | 3 +++ src/matrand_model.jl | 8 +++++++- test/runtests.jl | 23 ++++++++++++++++++++++- 3 files changed, 32 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index 5b19138..03ab6a0 100644 --- a/Project.toml +++ b/Project.toml @@ -23,6 +23,7 @@ ManualNLPModels = "0.2" MLDatasets = "^0.7.4" NLPModels = "0.16, 0.17, 0.18, 0.19, 0.20, 0.21" NLPModelsModifiers = "0.7.3" +NLPModelsTest = "0.10.7" Noise = "0.2" Requires = "1" julia = "^1.6.0" @@ -33,6 +34,7 @@ DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" LinearOperators = "5c8ed15e-5a4c-59e4-a42b-c7e8811fb125" MLDatasets = "eb30cadb-4394-5ae3-aed4-317e484a6458" NLPModelsModifiers = "e01155f1-5c6f-4375-a9d8-616dd036575f" +NLPModelsTest = "7998695d-6960-4d3a-85c4-e1bceb8cd856" ProximalOperators = "a725b495-10eb-56fe-b38b-717eba820537" QuadraticModels = "f468eda6-eac5-11e8-05a5-ff9e497bcd19" ShiftedProximalOperators = "d4fd37fa-580c-4e43-9b30-361c21aae263" @@ -45,6 +47,7 @@ test = [ "LinearOperators", "MLDatasets", "NLPModelsModifiers", + "NLPModelsTest", "ProximalOperators", "QuadraticModels", "ShiftedProximalOperators", diff --git a/src/matrand_model.jl b/src/matrand_model.jl index 6eddd67..0388ddd 100644 --- a/src/matrand_model.jl +++ b/src/matrand_model.jl @@ -1,4 +1,4 @@ -export random_matrix_completion_model, random_matrix_completion_eq_model, MIT_matrix_completion_model +export mat_rand, random_matrix_completion_model, random_matrix_completion_eq_model, MIT_matrix_completion_model function mat_rand(m::Int, n::Int, r::Int, sr::Float64, va::Float64, vb::Float64, c::Float64) xl = rand(Uniform(-0.1, 0.3), m, r) @@ -157,6 +157,12 @@ function random_matrix_completion_eq_model(; mode::Symbol = :forward ) xs, B, ω = mat_rand(m, n, r, sr, 0.0, 0.0, 0.0) + return random_matrix_completion_eq_model(xs, B, ω, mode = mode) +end + +function random_matrix_completion_eq_model(xs, B, ω; mode = forward) + + m, n = size(xs) xs = collect(vec(xs)) # Constrained API diff --git a/test/runtests.jl b/test/runtests.jl index 9137b9f..010afea 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,5 +1,6 @@ using LinearAlgebra, Test -using ADNLPModels, DifferentialEquations, LinearOperators, ManualNLPModels, MLDatasets, NLPModels, NLPModelsModifiers, QuadraticModels +using ADNLPModels, DifferentialEquations, LinearOperators, ManualNLPModels, MLDatasets, NLPModels, NLPModelsModifiers +using NLPModelsTest, QuadraticModels using RegularizedProblems function test_well_defined(model, nls_model, sol) @@ -59,6 +60,7 @@ end @test all(nls_model.meta.uvar .== Inf) end +""" @testset "FH" begin model, nls_model, sol = fh_model() test_objectives(model, nls_model) @@ -72,6 +74,7 @@ end @test nls_model.nls_meta.nequ == 202 @test all(nls_model.meta.x0 .== 1) end +""" @testset "low-rank completion" begin model, nls_model, A = lrcomp_model() @@ -91,6 +94,24 @@ end @test all(0 .<= model.meta.x0 .<= 1) @test nls_model.nls_meta.nequ == 10000 @test all(0 .<= nls_model.meta.x0 .<= 1) + + # Equality-constrained random matrix completion model in forward mode + + xs, B, ω = mat_rand(100, 100, 5, 0.8, 0.0, 0.0, 0.0) + model, sol = random_matrix_completion_eq_model(xs, B, ω, mode = :forward) + + # Test basics + @test model.meta.nvar == 100*100 + @test model.meta.ncon > 0 + @test model.meta.ncon < 100*100 + @test cons(model, sol) ≈ 0 + + # Construct ADModel to compare with ManualNLPModels model + f(x) = zero(eltype(x)) + c(x) = x[ω] - B + ad_model = ADNLPModel(f, model.meta.x0, c, zero(B), zero(B)) + consistent_nlps([model, ad_model], exclude = [hess, hess_coord, jth_hess, jth_hess_coord, jth_hprod, ghjvprod, jth_hprod], test_derivative = false) + end @testset "MIT" begin From 89ee967807d4d30b10d92403c7abd57940e4d19d Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Fri, 27 Feb 2026 17:47:18 -0500 Subject: [PATCH 06/10] add backward mode --- src/matrand_model.jl | 66 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 63 insertions(+), 3 deletions(-) diff --git a/src/matrand_model.jl b/src/matrand_model.jl index 0388ddd..a4102e9 100644 --- a/src/matrand_model.jl +++ b/src/matrand_model.jl @@ -6,7 +6,7 @@ function mat_rand(m::Int, n::Int, r::Int, sr::Float64, va::Float64, vb::Float64, xs = xl * xr' Ω = findall(<(sr), rand(m, n)) B = xs[Ω] - B = (1 - c) * add_gauss(B, va, 0; clip = true) + c * add_gauss(B, vb, 0; clip = true) + B = (1 - c) * add_gauss(B, va, 0; clip = false) + c * add_gauss(B, vb, 0; clip = false) ω = zeros(Int64, size(Ω, 1)) # Vectorize Omega for i = 1:size(Ω, 1) ω[i] = Ω[i][1] + n * (Ω[i][2] - 1) @@ -160,13 +160,18 @@ function random_matrix_completion_eq_model(; return random_matrix_completion_eq_model(xs, B, ω, mode = mode) end -function random_matrix_completion_eq_model(xs, B, ω; mode = forward) +function random_matrix_completion_eq_model(xs, B, ω; mode = :foward) + nlp, x0 = mode == :forward ? random_matrix_completion_eq_model_forward(xs, B, ω) : + random_matrix_completion_eq_model_backward(xs, B, ω) +end + +function random_matrix_completion_eq_model_forward(xs, B, ω) m, n = size(xs) xs = collect(vec(xs)) # Constrained API - function c!(cx, x) + function cons!(cx, x) @views cx .= x[ω] .- B end @@ -227,3 +232,58 @@ function random_matrix_completion_eq_model(xs, B, ω; mode = forward) end +function random_matrix_completion_eq_model_backward(xs, B, ω) + m, n = size(xs) + l = min(m, n) + xs_svd = svd(xs) + xs = collect(vcat(vec(xs_svd.U), xs_svd.S, vec(xs_svd.Vt))) + + # Constrained API + function c!(cx, x) + U = @views reshape(x[1:m*l], m, l) + σ = @views x[m*l + 1 : m*l + l] + V = @views reshape(x[m*l + l + 1:end], n, l) + + n_entries_l = Int(l*(l+1)/2) + + @inbounds for i = 1:l + @inbounds for j = 1:i + cx_idx = Int(j + i*(i-1)/2) + if i == j + @views cx[cx_idx] = dot(U[:, i], U[:, j]) - 1 + @views cx[n_entries_l + cx_idx] = dot(V[i, :], V[j, :]) - 1 + else + @views cx[cx_idx] = dot(U[:, i], U[:, j]) + @views cx[n_entries_l + cx_idx] = dot(V[i, :], V[j, :]) + end + end + end + + offset = 2*n_entries_l + @inbounds for i in eachindex(ω) + i_idx = mod(ω[i], n) + i_idx = i_idx == 0 ? n : i_idx + j_idx = Int((ω[i] - i_idx)/n) + 1 + cx[offset + i] = - B[i] + @inbounds for j = 1:l + cx[offset + i] += σ[j]*U[i_idx, j]*V[j, j_idx] + end + end + end + + + # Unconstrained API + function obj(x) + return 0.0 + end + x0 = zeros(Float64, m*l + n*l + l) + ncon = l*(l+1) + length(B) + return ADNLPModel!( + obj, + x0, + c!, + zeros(ncon), + zeros(ncon) + ), xs +end + From 789b3a06ecb51086137d82d68c4123489e1fffd4 Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Fri, 27 Feb 2026 17:49:28 -0500 Subject: [PATCH 07/10] add ADNLPModels for matrand --- src/RegularizedProblems.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/RegularizedProblems.jl b/src/RegularizedProblems.jl index bd69108..0104bf2 100644 --- a/src/RegularizedProblems.jl +++ b/src/RegularizedProblems.jl @@ -9,7 +9,6 @@ include("utils.jl") include("types.jl") include("bpdn_model.jl") include("lrcomp_model.jl") -include("matrand_model.jl") include("group_lasso_model.jl") include("nnmf.jl") @@ -24,6 +23,7 @@ function __init__() include("testset_group_lasso.jl") end @require ADNLPModels = "54578032-b7ea-4c30-94aa-7cbd1cce6c9a" begin + include("matrand_model.jl") @require DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" begin include("fh_model.jl") @require ProximalOperators = "a725b495-10eb-56fe-b38b-717eba820537" begin From ebd17505c53c43812699c785261a900dbce90f84 Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Wed, 4 Mar 2026 12:47:54 -0500 Subject: [PATCH 08/10] fix tests --- test/runtests.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/runtests.jl b/test/runtests.jl index 010afea..425c77e 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -104,7 +104,7 @@ end @test model.meta.nvar == 100*100 @test model.meta.ncon > 0 @test model.meta.ncon < 100*100 - @test cons(model, sol) ≈ 0 + @test cons(model, sol) .≈ 0 # Construct ADModel to compare with ManualNLPModels model f(x) = zero(eltype(x)) From 5f5fd83dba40439a6a11ee616874035882f710a9 Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Wed, 4 Mar 2026 12:49:19 -0500 Subject: [PATCH 09/10] fix ADNLPModel! call --- src/matrand_model.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/matrand_model.jl b/src/matrand_model.jl index a4102e9..1952590 100644 --- a/src/matrand_model.jl +++ b/src/matrand_model.jl @@ -278,7 +278,7 @@ function random_matrix_completion_eq_model_backward(xs, B, ω) end x0 = zeros(Float64, m*l + n*l + l) ncon = l*(l+1) + length(B) - return ADNLPModel!( + return ADNLPModels.ADNLPModel!( obj, x0, c!, From e5562416923e2b4a9d3e4b1b0ae25ce5278ef05c Mon Sep 17 00:00:00 2001 From: MaxenceGollier Date: Wed, 4 Mar 2026 13:38:03 -0500 Subject: [PATCH 10/10] fix kwarg default name --- src/matrand_model.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/matrand_model.jl b/src/matrand_model.jl index 1952590..02f87af 100644 --- a/src/matrand_model.jl +++ b/src/matrand_model.jl @@ -160,7 +160,7 @@ function random_matrix_completion_eq_model(; return random_matrix_completion_eq_model(xs, B, ω, mode = mode) end -function random_matrix_completion_eq_model(xs, B, ω; mode = :foward) +function random_matrix_completion_eq_model(xs, B, ω; mode = :forward) nlp, x0 = mode == :forward ? random_matrix_completion_eq_model_forward(xs, B, ω) : random_matrix_completion_eq_model_backward(xs, B, ω) end