Learning rate decay in Transformer

In [1]:
def learning_rate(model_dim, warmup_steps, x):
    #return model_dim ** 0.5 * np.min(np.power(x, -0.5), x * np.power(warmup_steps, -1.5))
    return model_dim ** 0.5 * np.min([np.power(x, -0.5), x * np.power(warmup_steps, -1.5)], axis=0)
In [10]:
%matplotlib inline

from ipywidgets import *
import numpy as np
import matplotlib.pyplot as plt

x = np.arange(0, 10000, 10)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)

prev_model_dim = 300
prev_warmup_steps = 300

def update(model_dim=300, warmup_steps=100):
    global prev_model_dim, prev_warmup_steps
    ax.scatter(x, learning_rate(model_dim=prev_model_dim, warmup_steps=prev_warmup_steps, x=x),
               marker=".", color="r", label="previous")
    plt.scatter(x, learning_rate(model_dim, warmup_steps, x))
    prev_model_dim = model_dim
    warmup_steps = warmup_steps
    plt.show()

interactive_plot = interactive(update)
interactive_plot
/home/libovicky/.conda/envs/py35/lib/python3.5/site-packages/ipykernel/__main__.py:3: RuntimeWarning: divide by zero encountered in power
  app.launch_new_instance()
In [5]:
%matplotlib inline

from ipywidgets import *
import numpy as np
import matplotlib.pyplot as plt

x = np.arange(0, 10000)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)

ax.scatter(x, learning_rate(model_dim=600, warmup_steps=100, x=x), marker=".", color="r", label="warmup 100")
ax.scatter(x, learning_rate(model_dim=600, warmup_steps=200, x=x), marker=".", color="c", label="warmup 200")
ax.scatter(x, learning_rate(model_dim=600, warmup_steps=500, x=x), marker=".", color="b", label="warmup 500")
ax.scatter(x, learning_rate(model_dim=600, warmup_steps=1000, x=x), marker=".", color="g", label="warmup 1000")

plt.xlim(0, 5000)
plt.ylim(0, 2.5)

plt.legend(loc='upper right');
plt.show()
/home/libovicky/.conda/envs/py35/lib/python3.5/site-packages/ipykernel/__main__.py:3: RuntimeWarning: divide by zero encountered in power
  app.launch_new_instance()