22# using Revise
33using Test
44using LinearAlgebra
5+ using IncrementalInference
56using ManifoldsBase
67using Manifolds, Manopt
78import Optim
@@ -189,22 +190,48 @@ end
189190
190191X = hat (M, e0, zeros (6 ))
191192g_FD! (X, q)
193+
194+ @show X_ = [X. x[1 ][:]; X. x[2 ][:]]
192195# gradient at the optimal point should be zero
193- @test isapprox (0 , sum (abs .(X[:] )); atol= 1e-8 )
196+ @test isapprox (0 , sum (abs .(X_ )); atol= 1e-8 )
194197
195198# gradient not the optimal point should be non-zero
196199g_FD! (X, e0)
197- @test 0.01 < sum (abs .(X[:]))
200+ @show X_ = [X. x[1 ][:]; X. x[2 ][:]]
201+ @test 0.01 < sum (abs .(X_))
198202
199203# # do optimization
200204x0 = deepcopy (e0)
201205sol = Optim. optimize (f, g_FD!, x0, Optim. ConjugateGradient (; manifold= ManifoldWrapper (M)))
202- Cq .= 0.5 * randn (6 )
206+ # Cq .= 0.5*randn(6)
203207# Cq[
204208@show sol. minimizer
205209@test isapprox ( f (sol. minimizer), 0 ; atol= 1e-8 )
206210@test isapprox ( 0 , sum (abs .(log (M, e0, compose (M, inv (M,q), sol. minimizer)))); atol= 1e-5 )
207211
208212
213+ # #
214+ end
215+
216+
217+ @testset " Optim.Manifolds, SpecialEuclidean(3), using IIF.optimizeManifold_FD" begin
218+ # #
219+
220+ M = Manifolds. SpecialEuclidean (3 )
221+ e0 = ArrayPartition ([0 ,0 ,0. ], Matrix (_Rot. RotXYZ (0 ,0 ,0. )))
222+
223+ x0 = deepcopy (e0)
224+ Cq = 0.5 * randn (6 )
225+ q = exp (M,e0,hat (M,e0,Cq))
226+
227+ f (p) = distance (M, p, q)^ 2
228+
229+ sol = IncrementalInference. optimizeManifold_FD (M,f,x0)
230+
231+ @show sol. minimizer
232+ @test isapprox ( f (sol. minimizer), 0 ; atol= 1e-8 )
233+ @test isapprox ( 0 , sum (abs .(log (M, e0, compose (M, inv (M,q), sol. minimizer)))); atol= 1e-5 )
234+
235+
209236# #
210237end
0 commit comments