File tree Expand file tree Collapse file tree 1 file changed +12
-2
lines changed Expand file tree Collapse file tree 1 file changed +12
-2
lines changed Original file line number Diff line number Diff line change @@ -277,12 +277,22 @@ def update_inference_batch_size(param_value: int) -> None:
277
277
config ["data" ]["test_subset" ]["batch_size" ] = param_value
278
278
279
279
def update_learning_rate (param_value : float ) -> None :
280
- config ["model" ]["init_args" ]["optimizer" ]["init_args" ]["lr" ] = param_value
280
+ optimizer = config ["model" ]["init_args" ]["optimizer" ]
281
+ if isinstance (optimizer , dict ) and "init_args" in optimizer :
282
+ optimizer ["init_args" ]["lr" ] = param_value
283
+ else :
284
+ warn ("Warning: learning_rate is not updated" , stacklevel = 1 )
281
285
282
286
def update_learning_rate_warmup_iters (param_value : int ) -> None :
283
287
scheduler = config ["model" ]["init_args" ]["scheduler" ]
284
- if scheduler ["class_path" ] == "otx.core.schedulers.LinearWarmupSchedulerCallable" :
288
+ if (
289
+ isinstance (scheduler , dict )
290
+ and "class_path" in scheduler
291
+ and scheduler ["class_path" ] == "otx.core.schedulers.LinearWarmupSchedulerCallable"
292
+ ):
285
293
scheduler ["init_args" ]["num_warmup_steps" ] = param_value
294
+ else :
295
+ warn ("Warning: learning_rate_warmup_iters is not updated" , stacklevel = 1 )
286
296
287
297
def update_num_iters (param_value : int ) -> None :
288
298
config ["max_epochs" ] = param_value
You can’t perform that action at this time.
0 commit comments