Skip to content

Commit 7593c40

Browse files
authored
Don't use _tol in tests (#1158)
* Don't use _tol in tests * don't deprecate g_tol yet
1 parent 6bcbc53 commit 7593c40

File tree

7 files changed

+18
-18
lines changed

7 files changed

+18
-18
lines changed

src/api.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,8 @@ f_relchange(r::MultivariateOptimizationResults) = r.f_relchange
157157

158158
g_tol(r::OptimizationResults) = error("g_tol is not implemented for $(summary(r)).")
159159
g_tol(r::MultivariateOptimizationResults) = r.g_abstol
160+
g_abstol(r::OptimizationResults) = error("g_tol is not implemented for $(summary(r)).")
161+
g_abstol(r::MultivariateOptimizationResults) = r.g_abstol
160162
g_residual(r::MultivariateOptimizationResults) = r.g_residual
161163

162164

src/types.jl

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -113,9 +113,7 @@ function Options(;
113113
x_abstol = x_tol
114114
end
115115
if !(g_tol === nothing)
116-
@warn(
117-
lazy"g_tol is deprecated. Use g_abstol instead. The provided value ($(g_tol)) will be used as g_abstol.",
118-
)
116+
# lets deprecate this when reltol is introduced
119117
g_abstol = g_tol
120118
end
121119
if !(f_tol === nothing)

test/general/api.jl

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -82,21 +82,21 @@
8282
optimize(f, g!, h!, initial_x, SimulatedAnnealing())
8383

8484
options = Optim.Options(
85-
g_tol = 1e-12,
85+
g_abstol = 1e-12,
8686
iterations = 10,
8787
store_trace = true,
8888
show_trace = false,
8989
)
9090
res = optimize(f, g!, h!, initial_x, BFGS(), options)
9191

9292
options_g = Optim.Options(
93-
g_tol = 1e-12,
93+
g_abstol = 1e-12,
9494
iterations = 10,
9595
store_trace = true,
9696
show_trace = false,
9797
)
9898
options_f = Optim.Options(
99-
g_tol = 1e-12,
99+
g_abstol = 1e-12,
100100
iterations = 10,
101101
store_trace = true,
102102
show_trace = false,
@@ -114,7 +114,7 @@
114114

115115
res = optimize(f, g!, h!, initial_x, BFGS(), options_g)
116116
options_ext = Optim.Options(
117-
g_tol = 1e-12,
117+
g_abstol = 1e-12,
118118
iterations = 10,
119119
store_trace = true,
120120
show_trace = false,
@@ -132,7 +132,7 @@
132132
@test Optim.x_converged(res) == false
133133
@test Optim.f_converged(res) == false
134134
@test Optim.g_converged(res) == false
135-
@test Optim.g_tol(res) == 1e-12
135+
@test Optim.g_abstol(res) == 1e-12
136136
@test Optim.iteration_limit_reached(res) == true
137137
@test Optim.initial_state(res) == [-1.2, 1.0]
138138
@test haskey(Optim.trace(res_ext)[1].metadata, "x")
@@ -160,7 +160,7 @@
160160

161161

162162

163-
resgterm = optimize(f, g!, initial_x, BFGS(), Optim.Options(g_tol = 1e12))
163+
resgterm = optimize(f, g!, initial_x, BFGS(), Optim.Options(g_abstol = 1e12))
164164
Optim.termination_code(resgterm) == Optim.TerminationCode.GradientNorm
165165

166166
resx0term = optimize(
@@ -170,7 +170,7 @@
170170
BFGS(),
171171
Optim.Options(
172172
x_reltol = -1,
173-
g_tol = -1,
173+
g_abstol = -1,
174174
f_abstol = -1,
175175
f_reltol = -1,
176176
iterations = 10^10,
@@ -187,7 +187,7 @@
187187
BFGS(),
188188
Optim.Options(
189189
x_abstol = -1,
190-
g_tol = -1,
190+
g_abstol = -1,
191191
f_abstol = -1,
192192
f_reltol = -1,
193193
iterations = 10^10,
@@ -205,7 +205,7 @@
205205
Optim.Options(
206206
x_abstol = 1e-3,
207207
x_reltol = -1,
208-
g_tol = -1,
208+
g_abstol = -1,
209209
f_abstol = -1,
210210
f_reltol = -1,
211211
iterations = 10^10,
@@ -221,7 +221,7 @@
221221
Optim.Options(
222222
x_abstol = -1,
223223
x_reltol = 1e-3,
224-
g_tol = -1,
224+
g_abstol = -1,
225225
f_abstol = -1,
226226
f_reltol = -1,
227227
iterations = 10^10,
@@ -258,7 +258,7 @@ end
258258
@test_throws ErrorException Optim.g_converged(res)
259259
@test_throws ErrorException Optim.x_tol(res)
260260
@test_throws ErrorException Optim.f_tol(res)
261-
@test_throws ErrorException Optim.g_tol(res)
261+
@test_throws ErrorException Optim.g_abstol(res)
262262
res = optimize(f, -2.0, 1.0, GoldenSection(), store_trace = true, extended_trace = true)
263263

264264
# Right now, these just "test" if they run

test/general/optimize.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ end
8888
end
8989

9090
# To set tight tolerance on gradient g, need to disable any check on f
91-
options = Optim.Options(g_tol = 1e-10, f_reltol = NaN, f_abstol = NaN)
91+
options = Optim.Options(g_abstol = 1e-10, f_reltol = NaN, f_abstol = NaN)
9292
result = Optim.optimize(
9393
rosenbrock,
9494
g_rosenbrock!,

test/multivariate/manifolds.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ using StableRNGs
3535
gmanif!,
3636
x0,
3737
method(manifold = manif, linesearch = ls()),
38-
Optim.Options(allow_f_increases = true, g_tol = 1e-6),
38+
Optim.Options(allow_f_increases = true, g_abstol = 1e-6),
3939
)
4040
debug_printing && printstyled("Iter\tf-calls\tg-calls\n", color = :green)
4141
debug_printing && printstyled(

test/multivariate/optimize/interface.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
fgh_res = []
2929
push!(fgh_res, optimize(tup..., problem.initial_x))
3030
for m in (NelderMead(), LBFGS(), Newton())
31-
push!(fgh_res, optimize(tup..., problem.initial_x; f_tol = 1e-8))
31+
push!(fgh_res, optimize(tup..., problem.initial_x; f_abstol = 1e-8))
3232
push!(fgh_res, optimize(tup..., problem.initial_x, m))
3333
push!(fgh_res, optimize(tup..., problem.initial_x, m, Optim.Options()))
3434
end

test/multivariate/successive_f_tol.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
alphaguess = LineSearches.InitialStatic(),
44
linesearch = LineSearches.Static(),
55
)
6-
opt = Optim.Options(iterations = 10, successive_f_tol = 5, f_tol = 3, g_tol = -1)
6+
opt = Optim.Options(iterations = 10, successive_f_tol = 5, f_abstol = 3, g_tol = -1)
77
result = Optim.optimize(sum, (y, _) -> fill!(y, 1), [0.0, 0.0], alg, opt)
88
@test result.iterations == opt.successive_f_tol + 1
99
end

0 commit comments

Comments
 (0)