Skip to content

Commit 00a4dd4

Browse files
committed
added test
1 parent bf137c4 commit 00a4dd4

File tree

3 files changed

+49
-36
lines changed

3 files changed

+49
-36
lines changed

src/nlp.jl

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -502,7 +502,6 @@ function _constraint!(c, f, pars)
502502
c.nconaug += nitr
503503
c.nnzj += nitr * f.o1step
504504
c.nnzh += nitr * f.o2step
505-
506505
c.con = ConstraintAug(c.con, f, convert_array(pars, c.backend), oa)
507506
end
508507

@@ -690,7 +689,7 @@ coord(itr, i, ::Tuple{}) = ()
690689
@inbounds @inline offset0(a::C, i) where {C<:ConstraintAug} = offset0(a.f, a.itr, i)
691690
@inbounds @inline offset0(f::F, itr, i) where {P<:Pair,F<:SIMDFunction{P}} =
692691
f.o0 + f.f.first(itr[i], nothing)
693-
@inbounds @inline offset0(f::F, itr, i) where {P<:Pair,F<:SIMDFunction{P}} = f.o0 + idxx(coord(itr, i, f.f.first), Base.size(itr))
692+
@inbounds @inline offset0(f::F, itr, i) where {T<:Tuple,P<:Pair{T},F<:SIMDFunction{P}} = f.o0 + idxx(coord(itr, i, f.f.first), Base.size(itr))
694693

695694
for (thing, val) in [(:solution, 1), (:multipliers_L, 0), (:multipliers_U, 2)]
696695
@eval begin

test/NLPTest/NLPTest.jl

Lines changed: 25 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -123,24 +123,24 @@ end
123123

124124
function runtests()
125125
@testset "NLP test" begin
126-
for (name, args) in NLP_TEST_ARGUMENTS
127-
@testset "$name $args" begin
126+
for backend in BACKENDS
127+
@testset "$backend" begin
128+
for (name, args) in NLP_TEST_ARGUMENTS
129+
@testset "$name $args" begin
128130

129-
exa_model = getfield(@__MODULE__, Symbol("_exa_$(name)_model"))
130-
jump_model = getfield(@__MODULE__, Symbol("_jump_$(name)_model"))
131+
exa_model = getfield(@__MODULE__, Symbol("_exa_$(name)_model"))
132+
jump_model = getfield(@__MODULE__, Symbol("_jump_$(name)_model"))
131133

132-
m, vars0, cons0 = exa_model(nothing, args)
133-
m0 = WrapperNLPModel(m)
134+
m, vars0, cons0 = exa_model(nothing, args)
135+
m0 = WrapperNLPModel(m)
134136

135-
m, vars2, cons2 = jump_model(nothing, args)
136-
m2 = MathOptNLPModel(m)
137+
m, vars2, cons2 = jump_model(nothing, args)
138+
m2 = MathOptNLPModel(m)
139+
140+
set_optimizer(m, MadNLP.Optimizer)
141+
set_optimizer_attribute(m, "print_level", MadNLP.ERROR)
142+
optimize!(m)
137143

138-
set_optimizer(m, MadNLP.Optimizer)
139-
set_optimizer_attribute(m, "print_level", MadNLP.ERROR)
140-
optimize!(m)
141-
142-
for backend in BACKENDS
143-
@testset "$backend" begin
144144

145145
m, vars1, cons1 = exa_model(backend, args)
146146
m1 = WrapperNLPModel(m)
@@ -164,11 +164,21 @@ function runtests()
164164
end
165165
end
166166
end
167-
168167
result1 = madnlp(m1; print_level = MadNLP.ERROR)
169168
test_api(result1, vars1, cons1, vars2, cons2)
170169
end
171170
end
171+
172+
m, vars0, cons0 = exa_luksan_vlcek_model(nothing, args; M = 2)
173+
m3 = WrapperNLPModel(m)
174+
175+
m, vars2, cons2 = jump_luksan_vlcek_model(nothing, args; M = 2)
176+
m4 = MathOptNLPModel(m)
177+
178+
@testset "Multi-column constraints" begin
179+
test_nlp(m3, m4; full = false)
180+
end
181+
172182
end
173183
end
174184
end

test/NLPTest/luksan.jl

Lines changed: 23 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,47 +1,51 @@
1-
function luksan_vlcek_obj(x, i)
2-
return 100 * (x[i-1]^2 - x[i])^2 + (x[i-1] - 1)^2
1+
function luksan_vlcek_obj(x, i, j)
2+
return 100 * (x[i-1, j]^2 - x[i, j])^2 + (x[i-1, j] - 1)^2
33
end
44

5-
function luksan_vlcek_con(x, i)
6-
return 3x[i+1]^3 + 2 * x[i+2] - 5 + sin(x[i+1] - x[i+2])sin(x[i+1] + x[i+2]) + 4x[i+1] -
7-
x[i]exp(x[i] - x[i+1]) - 3
5+
function luksan_vlcek_con1(x, i, j)
6+
return 3x[i+1, j]^3 + 2 * x[i+2, j] - 5
7+
end
8+
function luksan_vlcek_con2(x, i, j)
9+
return sin(x[i+1, j] - x[i+2, j])sin(x[i+1, j] + x[i+2, j]) + 4x[i+1, j] -
10+
x[i, j]exp(x[i, j] - x[i+1, j]) - 3
811
end
912

1013
function luksan_vlcek_x0(i)
1114
return mod(i, 2) == 1 ? -1.2 : 1.0
1215
end
1316

14-
function _exa_luksan_vlcek_model(backend, N)
17+
function _exa_luksan_vlcek_model(backend, N; M = 1)
1518

1619
c = ExaCore(backend = backend)
17-
x = variable(c, N; start = (luksan_vlcek_x0(i) for i = 1:N))
18-
s = constraint(c, luksan_vlcek_con(x, i) for i = 1:N-2)
19-
objective(c, luksan_vlcek_obj(x, i) for i = 2:N)
20+
x = variable(c, N, M; start = [luksan_vlcek_x0(i) for i = 1:N, j=1:M])
21+
s = constraint(c, luksan_vlcek_con1(x, i, j) for i = 1:N-2, j=1:M)
22+
constraint!(c, s, (i,j) => luksan_vlcek_con1(x, i, j) for i = 1:N-2, j=1:M)
23+
objective(c, luksan_vlcek_obj(x, i, j) for i = 2:N, j=1:M)
2024

2125
return ExaModel(c; prod = true), (x,), (s,)
2226
end
2327

24-
function exa_luksan_vlcek_model(backend, N)
25-
m, vars, cons = _exa_luksan_vlcek_model(backend, N)
28+
function exa_luksan_vlcek_model(backend, N; M = 1)
29+
m, vars, cons = _exa_luksan_vlcek_model(backend, N;M = M)
2630
return m
2731
end
2832

29-
function _jump_luksan_vlcek_model(backend, N)
33+
function _jump_luksan_vlcek_model(backend, N; M = 1)
3034
jm = JuMP.Model()
3135

32-
JuMP.@variable(jm, x[i = 1:N], start = mod(i, 2) == 1 ? -1.2 : 1.0)
36+
JuMP.@variable(jm, x[i = 1:N, j=1:M], start = mod(i, 2) == 1 ? -1.2 : 1.0)
3337
JuMP.@NLconstraint(
3438
jm,
35-
s[i = 1:N-2],
36-
3x[i+1]^3 + 2x[i+2] - 5 + sin(x[i+1] - x[i+2])sin(x[i+1] + x[i+2]) + 4x[i+1] -
37-
x[i]exp(x[i] - x[i+1]) - 3 == 0.0
39+
s[i = 1:N-2, j=1:M],
40+
3x[i+1,j]^3 + 2x[i+2,j] - 5 + sin(x[i+1,j] - x[i+2,j])sin(x[i+1,j] + x[i+2,j]) + 4x[i+1,j] -
41+
x[i,j]exp(x[i,j] - x[i+1,j]) - 3 == 0.0
3842
)
39-
JuMP.@NLobjective(jm, Min, sum(100(x[i-1]^2 - x[i])^2 + (x[i-1] - 1)^2 for i = 2:N))
43+
JuMP.@NLobjective(jm, Min, sum(100(x[i-1,j]^2 - x[i,j])^2 + (x[i-1,j] - 1)^2 for i = 2:N, j=1:M))
4044

4145
return jm, (x,), (s,)
4246
end
4347

44-
function jump_luksan_vlcek_model(backend, N)
45-
jm, vars, cons = _jump_luksan_vlcek_model(backend, N)
48+
function jump_luksan_vlcek_model(backend, N; M = 1)
49+
jm, vars, cons = _jump_luksan_vlcek_model(backend, N; M = M)
4650
return MathOptNLPModel(jm)
4751
end

0 commit comments

Comments
 (0)