from fastai.tabular.all import *
%matplotlib inline
# fastai v1 backward compatibility
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import numpy as np
Subplots
What I want it to display multiple plots, with a given max rows. And to display my plots depending only on these parameters.
def my_hidden_f(x):
return 4*x**3+2*x**2-12*x+5+10*torch.rand(x.shape)
=100
n= torch.ones(n,1)
time 0].uniform_(-3.14,3.14)
time[:,
=my_hidden_f(time)
speed
0], speed)
plt.scatter(time[:,-1.5), my_hidden_f(tensor([-1.5])), color='red')
plt.scatter(tensor(
def f(t, params):
= params
a,b,c,d return a*(t**3) + (b*t**2) + c*t + d
def mse(preds, targets): return ((preds-targets)**2).mean()
def show_preds(preds, ax=None):
if ax is None: ax=plt.subplots()[1]
ax.scatter(time, speed)='red')
ax.scatter(time, to_np(preds), color-50,150)
ax.set_ylim(
= 1e-4
lr
def apply_step(params, prn=True):
= f(time, params)
preds = mse(preds, speed)
loss
loss.backward()-= lr * params.grad.data
params.data = None
params.grad if prn: print(loss.item())
return preds
#load initial parameters
= torch.randn(4).requires_grad_()
params #nbr of iterations
= 1000
max_iter #nbr of curves visible
= 4
nbr_graph #max number of curves on one row
= 5
max_columns #nbr of rows
= (nbr_graph-1) // max_columns + 1
max_rows #nbr of iter per plot
= max_iter //(nbr_graph-1)
graph_iteration
= plt.subplots(nrows=max_rows,ncols=max_columns,figsize=(3*max_columns,3*max_rows))
_,axs
=-1
i= ((i+1) // graph_iteration ) // (max_columns), ((i+1) // graph_iteration ) % (max_columns)
ax_indexif (max_rows ==1): ax_index= ((i+1) // graph_iteration ) % (max_columns)
=False), axs[ax_index])
show_preds(apply_step(params, prn'iter 0')
axs[ax_index].set_title(
for i in range(max_iter):
=apply_step(params, prn=False)
predsif ((i+1) % graph_iteration == 0):
= ((i+1) // graph_iteration ) // (max_columns), ((i+1) // graph_iteration ) % (max_columns)
ax_indexif (max_rows ==1): ax_index= ((i+1) // graph_iteration ) % (max_columns)
show_preds(preds, axs[ax_index])'iter '+str(i+1))
axs[ax_index].set_title( plt.tight_layout()
Animation
import
%matplotlib inline
# fastai v1 backward compatibility
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import numpy as np
def tensor(*argv): return torch.tensor(argv)
# TEST
assert torch.all(tensor(1,2) == torch.tensor([1,2])), 'Backward compatibility with fastai v1'
function and plot
=100
n= torch.ones(n,1)
x -3.14,3.14)
x.uniform_(
def my_function(x, a):
return ((torch.cat((x**3, x**2, x, torch.ones(n,1) ), 1))@a).reshape((n))
=tensor(4., 2., -12., 5.)
a= my_function(x, a)
y
= tensor(-1.,-2., 6., -8)
a = my_function(x, a)
y_hat
0], y)
plt.scatter(x[:,0],y_hat);
plt.scatter(x[:,
def mse(y_hat, y): return ((y_hat-y)**2).mean()
gradient descent
= nn.Parameter(a); a
a
def update():
= my_function(x, a)
y_hat = mse(y, y_hat)
loss if t % 10 == 0: print(loss)
loss.backward()with torch.no_grad():
* a.grad)
a.sub_(lr
a.grad.zero_()
= 1e-3
lr for t in range(100): update()
tensor(1967.0251, grad_fn=<MeanBackward0>)
tensor(559.2718, grad_fn=<MeanBackward0>)
tensor(365.7207, grad_fn=<MeanBackward0>)
tensor(282.6393, grad_fn=<MeanBackward0>)
tensor(245.4054, grad_fn=<MeanBackward0>)
tensor(227.3450, grad_fn=<MeanBackward0>)
tensor(217.3324, grad_fn=<MeanBackward0>)
tensor(210.7267, grad_fn=<MeanBackward0>)
tensor(205.5912, grad_fn=<MeanBackward0>)
tensor(201.1171, grad_fn=<MeanBackward0>)
animation
from matplotlib import animation, rc
'animation', html='jshtml')
rc(
= nn.Parameter(tensor(-1.,1))
a
=tensor(4., 2., -12., 5.)
a= my_function(x, a)
y
= tensor(-1.,-2., 6., -8)
a = my_function(x, a)
y_hat = nn.Parameter(a); a
a
= plt.figure()
fig 0], y, c='orange')
plt.scatter(x[:,= plt.scatter(x[:,0], y_hat.detach())
line
plt.close()
def animate(i):
0], (my_function(x,a)).detach()])
line.set_offsets(np.c_[x[:,
update()
return line,
0, 300), interval=5) animation.FuncAnimation(fig, animate, np.arange(