Created
January 28, 2025 08:59
Custom Optimizer realized by Subclass approach for NeuralForecast
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#| hide | |
# test customized optimizer behavior such that the user defined optimizer result should differ from default | |
# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate | |
def custom_optimizer(base_model): | |
optimizer = torch.optim.Adadelta(params=base_model.parameters(), rho=0.75) | |
scheduler=torch.optim.lr_scheduler.StepLR( | |
optimizer=optimizer, step_size=10e7, gamma=0.5 | |
) | |
scheduler_config = { | |
'scheduler': scheduler, | |
'interval': 'step', | |
'frequency': 1, | |
'monitor': 'val_loss', | |
'strict': True, | |
'name': None, | |
} | |
return {'optimizer': optimizer, 'lr_scheduler': scheduler_config} | |
for nf_model in [NHITS, RNN, StemGNN]: | |
# default optimizer is based on Adam | |
params = {"h": 12, "input_size": 24, "max_steps": 1} | |
params2 = params.copy() | |
if nf_model.__name__ == "StemGNN": | |
params.update({"n_series": 2}) | |
params2.update({"n_series": 2}) | |
models = [nf_model(**params)] | |
nf = NeuralForecast(models=models, freq='M') | |
nf.fit(AirPassengersPanel_train) | |
default_optimizer_predict = nf.predict() | |
mean = default_optimizer_predict.loc[:, nf_model.__name__].mean() | |
# employ custom optimizer | |
params.update({'config_optimizers': custom_optimizer}) | |
models2 = [nf_model(**params)] | |
nf2 = NeuralForecast(models=models2, freq='M') | |
nf2.fit(AirPassengersPanel_train) | |
customized_optimizer_predict = nf2.predict() | |
mean2 = customized_optimizer_predict.loc[:, nf_model.__name__].mean() | |
# comparing the difference using config_optimizers as parameter and | |
# using the default optimizer | |
assert mean2 != mean | |
# subclass approach | |
class CustomOptimizerSubclass(nf_model): | |
def configure_optimizers(self): | |
optimizer = torch.optim.Adadelta(params=self.parameters(), rho=0.75) | |
scheduler=torch.optim.lr_scheduler.StepLR( | |
optimizer=optimizer, step_size=10e7, gamma=0.5 | |
) | |
scheduler_config = { | |
'scheduler': scheduler, | |
'interval': 'step', | |
'frequency': 1, | |
'monitor': 'val_loss', | |
'strict': True, | |
'name': None, | |
} | |
return {'optimizer': optimizer, 'lr_scheduler': scheduler_config} | |
assert "config_optimizers" not in params2.keys() | |
models3 = [CustomOptimizerSubclass(**params2)] | |
nf3 = NeuralForecast(models=models3, freq='M') | |
nf3.fit(AirPassengersPanel_train) | |
customized_optimizer_predict2 = nf3.predict() | |
mean3 = customized_optimizer_predict2.loc[:, "CustomOptimizerSubclass"].mean() | |
# subclass approach and config_optimizers as parameter options provide | |
# the same prediction outputs | |
assert mean3 == mean2 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment