1313
1414# here : f = 2 * x
1515
16- # 0) Training samples, watch the shape!
17- X = torch .tensor ([[1 ], [2 ], [3 ], [4 ]], dtype = torch .float32 )
18- Y = torch .tensor ([[2 ], [4 ], [6 ], [8 ]], dtype = torch .float32 )
19-
20- n_samples , n_features = X .shape
21- print (f'#samples: { n_samples } , #features: { n_features } ' )
22- # 0) create a test sample
23- X_test = torch .tensor ([5 ], dtype = torch .float32 )
24-
25- # 1) Design Model, the model has to implement the forward pass!
26- # Here we can use a built-in model from PyTorch
27- input_size = n_features
28- output_size = n_features
29-
30- # we can call this model with samples X
31- model = nn .Linear (input_size , output_size )
32-
33- '''
34- class LinearRegression(nn.Module):
35- def __init__(self, input_dim, output_dim):
36- super(LinearRegression, self).__init__()
37- # define diferent layers
38- self.lin = nn.Linear(input_dim, output_dim)
39-
40- def forward(self, x):
41- return self.lin(x)
42-
43- model = LinearRegression(input_size, output_size)
44- '''
45-
46- print (f'Prediction before training: f(5) = { model (X_test ).item ():.3f} ' )
16+ # 0) Training samples
17+ X = torch .tensor ([1 , 2 , 3 , 4 ], dtype = torch .float32 )
18+ Y = torch .tensor ([2 , 4 , 6 , 8 ], dtype = torch .float32 )
19+
20+ # 1) Design Model: Weights to optimize and forward function
21+ w = torch .tensor (0.0 , dtype = torch .float32 , requires_grad = True )
22+
23+ def forward (x ):
24+ return w * x
25+
26+ print (f'Prediction before training: f(5) = { forward (5 ).item ():.3f} ' )
4727
4828# 2) Define loss and optimizer
4929learning_rate = 0.01
5030n_iters = 100
5131
32+ # callable function
5233loss = nn .MSELoss ()
53- optimizer = torch .optim .SGD (model .parameters (), lr = learning_rate )
34+
35+ optimizer = torch .optim .SGD ([w ], lr = learning_rate )
5436
5537# 3) Training loop
5638for epoch in range (n_iters ):
57- # predict = forward pass with our model
58- y_predicted = model (X )
39+ # predict = forward pass
40+ y_predicted = forward (X )
5941
6042 # loss
6143 l = loss (Y , y_predicted )
@@ -70,7 +52,6 @@ def forward(self, x):
7052 optimizer .zero_grad ()
7153
7254 if epoch % 10 == 0 :
73- [w , b ] = model .parameters () # unpack parameters
74- print ('epoch ' , epoch + 1 , ': w = ' , w [0 ][0 ].item (), ' loss = ' , l )
55+ print ('epoch ' , epoch + 1 , ': w = ' , w , ' loss = ' , l )
7556
76- print (f'Prediction after training: f(5) = { model ( X_test ).item ():.3f} ' )
57+ print (f'Prediction after training: f(5) = { forward ( 5 ).item ():.3f} ' )
0 commit comments