cerebras.modelzoo.config_manager.config_classes.base.optimizer_config.OptimizerConfig#

class cerebras.modelzoo.config_manager.config_classes.base.optimizer_config.OptimizerConfig(**kwargs)[source]#
optimizer_type: str = <object object>#

Optimizer to be used. See supported optimizers - https://docs.cerebras.net/en/latest/pytorch-docs/pytorch-ops/supported-pytorch-optimizers.html)

weight_decay: float = 0.0#
log_summaries: bool = False#

Flag to log per layer gradient norm in Tensorboard. Defaults to False

loss_scaling_factor: Union[str, float] = 1.0#
learning_rate: Optional[Union[float, List[dict]]] = None#

Learning rate scheduler to be used. See [supported LR schedulers] (https://docs.cerebras.net/en/latest/pytorch-docs/pytorch-ops/ supported-pt-learning-rate-schedulers.html). optional, defaults to None)

max_gradient_norm: Optional[float] = None#

Max norm of the gradients for learnable parameters. Used for gradient clipping. Default=None

adjust_learning_rate: Optional[dict] = None#