torch.optim

API名称

是否支持

限制与说明

Optimizer.add_param_group

  

Optimizer.load_state_dict

  

Optimizer.state_dict

  

Optimizer.step

  

Optimizer.zero_grad

  

torch.optim.Adadelta

支持fp16,fp32

torch.optim.Adadelta.add_param_group

  

torch.optim.Adadelta.load_state_dict

  

torch.optim.Adadelta.state_dict

  

torch.optim.Adadelta.step

  

torch.optim.Adadelta.zero_grad

  

torch.optim.Adagrad

支持fp16,fp32

torch.optim.Adagrad.add_param_group

  

torch.optim.Adagrad.load_state_dict

  

torch.optim.Adagrad.state_dict

  

torch.optim.Adagrad.step

  

torch.optim.Adagrad.zero_grad

  

torch.optim.Adam

支持fp16,fp32

torch.optim.Adam.add_param_group

  

torch.optim.Adam.load_state_dict

  

torch.optim.Adam.state_dict

  

torch.optim.Adam.step

  

torch.optim.Adam.zero_grad

  

torch.optim.AdamW

支持fp16,fp32

torch.optim.Adamax

  

torch.optim.RMSprop

  

torch.optim.SGD

支持fp16,fp32

torch.optim.lr_scheduler.LambdaLR

  

torch.optim.lr_scheduler.MultiplicativeLR

  

torch.optim.lr_scheduler.StepLR

  

torch.optim.lr_scheduler.MultiStepLR

  

torch.optim.lr_scheduler.LinearLR.get_last_lr

  

torch.optim.lr_scheduler.LinearLR.load_state_dict

  

torch.optim.lr_scheduler.LinearLR.print_lr

  

torch.optim.lr_scheduler.LinearLR.state_dict

  

torch.optim.lr_scheduler.ExponentialLR

  

torch.optim.lr_scheduler.ExponentialLR.get_last_lr

  

torch.optim.lr_scheduler.ExponentialLR.load_state_dict

  

torch.optim.lr_scheduler.ExponentialLR.print_lr

  

torch.optim.lr_scheduler.ExponentialLR.state_dict

  

torch.optim.lr_scheduler.CosineAnnealingLR

  

torch.optim.lr_scheduler.CosineAnnealingLR.get_last_lr

  

torch.optim.lr_scheduler.CosineAnnealingLR.load_state_dict

  

torch.optim.lr_scheduler.CosineAnnealingLR.print_lr

  

torch.optim.lr_scheduler.CosineAnnealingLR.state_dict

  

torch.optim.lr_scheduler.ReduceLROnPlateau

  

torch.optim.lr_scheduler.OneCycleLR

  

torch.optim.lr_scheduler.CosineAnnealingWarmRestarts

  

torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.get_last_lr

  

torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.load_state_dict

  

torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.print_lr

  

torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.state_dict

  

torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.step