Shortcuts

LambdaStateScheduler#

class ignite.handlers.state_param_scheduler.LambdaStateScheduler(lambda_obj, param_name, save_history=False, create_new=False)[source]#
Update a parameter during training by using a user defined callable object.

User defined callable object is taking an event index as input and returns parameter value.

Parameters
  • lambda_obj (Any) – user defined callable object.

  • param_name (str) – name of parameter to update.

  • save_history (bool) – whether to log the parameter values to engine.state.param_history, (default=False).

  • create_new (bool) – whether to create param_name on engine.state taking into account whether param_name attribute already exists or not. Overrides existing attribute by default, (default=False).

Examples

from collections import OrderedDict

import torch
from torch import nn, optim

from ignite.engine import *
from ignite.handlers import *
from ignite.metrics import *
from ignite.utils import *
from ignite.contrib.metrics.regression import *
from ignite.contrib.metrics import *

# create default evaluator for doctests

def eval_step(engine, batch):
    return batch

default_evaluator = Engine(eval_step)

# create default optimizer for doctests

param_tensor = torch.zeros([1], requires_grad=True)
default_optimizer = torch.optim.SGD([param_tensor], lr=0.1)

# create default trainer for doctests
# as handlers could be attached to the trainer,
# each test must define his own trainer using `.. testsetup:`

def get_default_trainer():

    def train_step(engine, batch):
        return batch

    return Engine(train_step)

# create default model for doctests

default_model = nn.Sequential(OrderedDict([
    ('base', nn.Linear(4, 2)),
    ('fc', nn.Linear(2, 1))
]))

manual_seed(666)
default_trainer = get_default_trainer()

class LambdaState:
    def __init__(self, initial_value, gamma):
        self.initial_value = initial_value
        self.gamma = gamma

    def __call__(self, event_index):
        return self.initial_value * self.gamma ** (event_index % 9)

param_scheduler = LambdaStateScheduler(
    param_name="param", lambda_obj=LambdaState(1, 0.9), create_new=True
)

# parameter is param, initial_value sets param to 1 and in this example gamma = 1
# using class 'LambdaState' user defined callable object can be created
# update a parameter during training by using a user defined callable object
# user defined callable object is taking an event index as input and returns parameter value
# in this example, we update as initial_value * gamma ** (event_endex % 9)
# in every Epoch the parameter is updated as 1 * 0.9 ** (Epoch % 9)
# In Epoch 3, parameter param = 1 * 0.9 ** (3 % 9) = 0.729
# In Epoch 10, parameter param = 1 * 0.9 ** (10 % 9) = 0.9

param_scheduler.attach(default_trainer, Events.EPOCH_COMPLETED)

@default_trainer.on(Events.EPOCH_COMPLETED)
def print_param():
    print(default_trainer.state.param)

default_trainer.run([0], max_epochs=10)
0.9
0.81
0.7290...
0.6561
0.5904...
0.5314...
0.4782...
0.4304...
1.0
0.9

New in version 0.4.7.

Methods

get_param

Method to get current parameter values

get_param()[source]#

Method to get current parameter values

Returns

list of params, or scalar param

Return type

Union[List[float], float]