From d6a1e51615f487ef60acd65dd7a6599ba09ece68 Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Tue, 24 Sep 2024 09:33:26 -0400 Subject: [PATCH] Update OptimizationOptimisers.jl --- lib/OptimizationOptimisers/src/OptimizationOptimisers.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl b/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl index ea2ef9202..309038e4e 100644 --- a/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl +++ b/lib/OptimizationOptimisers/src/OptimizationOptimisers.jl @@ -80,7 +80,7 @@ function SciMLBase.__solve(cache::OptimizationCache{ t0 = time() Optimization.@withprogress cache.progress name="Training" begin - for _ in 1:maxiters + for epoch in 1:maxiters for (i, d) in enumerate(data) if cache.f.fg !== nothing && dataiterate x = cache.f.fg(G, θ, d) @@ -93,7 +93,7 @@ function SciMLBase.__solve(cache::OptimizationCache{ cache.f.grad(G, θ) x = cache.f(θ) end - opt_state = Optimization.OptimizationState(iter = i, + opt_state = Optimization.OptimizationState(iter = i + (epoch-1)*length(data), u = θ, objective = x[1], grad = G,