Skip to content

Commit 4e8be11

Browse files
test: add tests for SurrogatesPolyChaos, SVM, RandomForest
1 parent 658a2cd commit 4e8be11

File tree

1 file changed

+157
-2
lines changed

1 file changed

+157
-2
lines changed

test/extensions.jl

Lines changed: 157 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,19 @@ end
187187
update!(surrogate, x_new, y_new)
188188
end
189189

190-
@testset "Optimization" begin
190+
@testset "1D Optimization" begin
191+
lb = 0.0
192+
ub = 10.0
193+
x = sample(5, lb, ub, SobolSample())
194+
objective_function_1D = z -> 2 * z + 3
195+
y = objective_function_1D.(x)
196+
model = Chain(Dense(1, 1), first)
197+
my_neural_1D_neural = NeuralSurrogate(x, y, lb, ub, model = model)
198+
surrogate_optimize!(objective_function_1D, SRBF(), lb, ub, my_neural_1D_neural,
199+
SobolSample(), maxiters = 15)
200+
end
201+
202+
@testset "ND Optimization" begin
191203
lb = [1.0, 1.0]
192204
ub = [6.0, 6.0]
193205
x = sample(5, lb, ub, SobolSample())
@@ -262,4 +274,147 @@ end
262274
my_second = SecondOrderPolynomialSurrogate(x, y, lb, ub)
263275
Zygote.gradient(x -> sum(my_second(x)), [2.0, 5.0])
264276
end
265-
end
277+
end
278+
279+
@safetestset "PolynomialChaosSurrogates" begin
280+
using Surrogates
281+
using PolyChaos
282+
using Zygote
283+
284+
@testset "Scalar Inputs" begin
285+
n = 20
286+
lb = 0.0
287+
ub = 4.0
288+
f = x -> 2 * x
289+
x = sample(n, lb, ub, SobolSample())
290+
y = f.(x)
291+
my_pce = PolynomialChaosSurrogate(x, y, lb, ub)
292+
x_val = 1.2
293+
@test my_pce(x_val) f(x_val)
294+
update!(my_pce, [3.0], [6.0])
295+
my_pce_changed = PolynomialChaosSurrogate(
296+
x, y, lb, ub; orthopolys = Uniform01OrthoPoly(1))
297+
@test my_pce_changed(x_val) f(x_val)
298+
end
299+
300+
@testset "Vector Inputs" begin
301+
n = 60
302+
lb = [0.0, 0.0]
303+
ub = [5.0, 5.0]
304+
f = x -> x[1] * x[2]
305+
x = collect.(sample(n, lb, ub, SobolSample()))
306+
y = f.(x)
307+
my_pce = PolynomialChaosSurrogate(x, y, lb, ub)
308+
x_val = [1.2, 1.4]
309+
@test my_pce(x_val) f(x_val)
310+
update!(my_pce, [[2.0, 3.0]], [6.0])
311+
@test my_pce(x_val) f(x_val)
312+
op1 = Uniform01OrthoPoly(1)
313+
op2 = Beta01OrthoPoly(2, 2, 1.2)
314+
ops = [op1, op2]
315+
multi_poly = MultiOrthoPoly(ops, min(1, 2))
316+
my_pce_changed = PolynomialChaosSurrogate(x, y, lb, ub, orthopolys = multi_poly)
317+
end
318+
319+
@testset "Derivative" begin
320+
lb = 0.0
321+
ub = 3.0
322+
f = x -> x^2
323+
n = 50
324+
x = collect(sample(n, lb, ub, SobolSample()))
325+
y = f.(x)
326+
my_poli = PolynomialChaosSurrogate(x, y, lb, ub)
327+
g = x -> my_poli'(x)
328+
x_val = 3.0
329+
@test g(x_val) 2 * x_val
330+
end
331+
332+
@testset "Gradient" begin
333+
n = 50
334+
lb = [0.0, 0.0]
335+
ub = [10.0, 10.0]
336+
x = collect.(sample(n, lb, ub, SobolSample()))
337+
f = x -> x[1] * x[2]
338+
y = f.(x)
339+
my_poli_ND = PolynomialChaosSurrogate(x, y, lb, ub)
340+
g = x -> Zygote.gradient(my_poli_ND, x)[1]
341+
x_val = [1.0, 2.0]
342+
@test g(x_val) [x_val[2], x_val[1]]
343+
end
344+
end
345+
346+
@safetestset "RandomForestSurrogate" begin
347+
using Surrogates
348+
using XGBoost: xgboost, predict
349+
350+
@testset "1D" begin
351+
obj_1D = x -> 3 * x + 1
352+
x = [1.0, 2.0, 3.0, 4.0, 5.0]
353+
y = obj_1D.(x)
354+
a = 0.0
355+
b = 10.0
356+
num_round = 2
357+
my_forest_1D = RandomForestSurrogate(x, y, a, b; num_round = 2)
358+
xgboost1 = xgboost((reshape(x, length(x), 1), y); num_round = 2)
359+
val = my_forest_1D(3.5)
360+
@test predict(xgboost1, [3.5;;])[1] == val
361+
update!(my_forest_1D, [6.0], [19.0])
362+
update!(my_forest_1D, [7.0, 8.0], obj_1D.([7.0, 8.0]))
363+
end
364+
365+
@testset "ND" begin
366+
lb = [0.0, 0.0, 0.0]
367+
ub = [10.0, 10.0, 10.0]
368+
x = sample(5, lb, ub, SobolSample())
369+
obj_ND = x -> x[1] * x[2]^2 * x[3]
370+
y = obj_ND.(x)
371+
my_forest_ND = RandomForestSurrogate(x, y, lb, ub; num_round = 2)
372+
xgboostND = xgboost((reduce(hcat, collect.(x))', y); num_round = 2)
373+
val = my_forest_ND([1.0, 1.0, 1.0])
374+
@test predict(xgboostND, reshape([1.0, 1.0, 1.0], 3, 1))[1] == val
375+
update!(my_forest_ND, [[1.0, 1.0, 1.0]], [1.0])
376+
update!(my_forest_ND, [[1.2, 1.2, 1.0], [1.5, 1.5, 1.0]], [1.728, 3.375])
377+
end
378+
end
379+
380+
@safetestset "SVMSurrogate" begin
381+
using Surrogates
382+
using LIBSVM
383+
384+
@testset "1D" begin
385+
obj_1D = x -> 2 * x + 1
386+
a = 0.0
387+
b = 10.0
388+
x = sample(5, a, b, SobolSample())
389+
y = obj_1D.(x)
390+
svm = LIBSVM.fit!(SVC(), reshape(x, length(x), 1), y)
391+
my_svm_1D = SVMSurrogate(x, y, a, b)
392+
val = my_svm_1D([5.0])
393+
@test LIBSVM.predict(svm, [5.0;;])[1] == val
394+
update!(my_svm_1D, [3.1], [7.2])
395+
update!(my_svm_1D, [3.2, 3.5], [7.4, 8.0])
396+
svm = LIBSVM.fit!(SVC(), reshape(my_svm_1D.x, length(my_svm_1D.x), 1), my_svm_1D.y)
397+
val = my_svm_1D(3.1)
398+
@test LIBSVM.predict(svm, [3.1;;])[1] == val
399+
end
400+
401+
@testset "ND" begin
402+
obj_N = x -> x[1]^2 * x[2]
403+
lb = [0.0, 0.0]
404+
ub = [10.0, 10.0]
405+
x = sample(100, lb, ub, RandomSample())
406+
y = obj_N.(x)
407+
svm = LIBSVM.fit!(SVC(), transpose(reduce(hcat, collect.(x))), y)
408+
my_svm_ND = SVMSurrogate(x, y, lb, ub)
409+
x_test = [5.0, 1.2]
410+
val = my_svm_ND(x_test)
411+
@test LIBSVM.predict(svm, reshape(x_test, 1, 2))[1] == val
412+
update!(my_svm_ND, [(1.0, 1.0)], [1.0])
413+
update!(my_svm_ND, [(1.2, 1.2), (1.5, 1.5)], [1.728, 3.375])
414+
svm = LIBSVM.fit!(
415+
SVC(), transpose(reduce(hcat, collect.(my_svm_ND.x))), my_svm_ND.y)
416+
x_test = [1.0, 1.0]
417+
val = my_svm_ND(x_test)
418+
@test LIBSVM.predict(svm, reshape(x_test, 1, 2))[1] == val
419+
end
420+
end

0 commit comments

Comments
 (0)