feat: add sigmoid learning rate scheduler
Add new 'sigmoid' learning rate adjustment method to utils/tools.py with logistic growth rate and warm-up smoothing for better training convergence.
This commit is contained in:
@ -22,6 +22,11 @@ def adjust_learning_rate(optimizer, epoch, args):
|
|||||||
lr_adjust = {epoch: args.learning_rate if epoch < 3 else args.learning_rate * (0.9 ** ((epoch - 3) // 1))}
|
lr_adjust = {epoch: args.learning_rate if epoch < 3 else args.learning_rate * (0.9 ** ((epoch - 3) // 1))}
|
||||||
elif args.lradj == "cosine":
|
elif args.lradj == "cosine":
|
||||||
lr_adjust = {epoch: args.learning_rate /2 * (1 + math.cos(epoch / args.train_epochs * math.pi))}
|
lr_adjust = {epoch: args.learning_rate /2 * (1 + math.cos(epoch / args.train_epochs * math.pi))}
|
||||||
|
elif args.lradj == 'sigmoid':
|
||||||
|
k = 0.5 # logistic growth rate
|
||||||
|
s = 10 # decreasing curve smoothing rate
|
||||||
|
w = 10 # warm-up coefficient
|
||||||
|
lr_adjust = {epoch: args.learning_rate / (1 + np.exp(-k * (epoch - w))) - args.learning_rate / (1 + np.exp(-k/s * (epoch - w*s)))}
|
||||||
if epoch in lr_adjust.keys():
|
if epoch in lr_adjust.keys():
|
||||||
lr = lr_adjust[epoch]
|
lr = lr_adjust[epoch]
|
||||||
for param_group in optimizer.param_groups:
|
for param_group in optimizer.param_groups:
|
||||||
|
Reference in New Issue
Block a user