Skip to content
Snippets Groups Projects
Commit 6c0047a0 authored by Audard Lucile's avatar Audard Lucile
Browse files

Update mlp.py

parent d6759854
Branches
No related tags found
No related merge requests found
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def learn_once_mse(w1, b1, w2, b2, data, targets, learning_rate):
N = len(targets)
# Forward pass
a0 = data # the data are the input of the first layer
z1 = np.matmul(a0, w1) + b1 # input of the hidden layer
a1 = 1 / (1 + np.exp(-z1)) # output of the hidden layer (sigmoid activation function)
z2 = np.matmul(a1, w2) + b2 # input of the output layer
a2 = 1 / (1 + np.exp(-z2)) # output of the output layer (sigmoid activation function)
predictions = a2 # the predicted values are the outputs of the output layer
# Compute loss (MSE)
loss = np.mean(np.square(predictions - targets))
# According to the formulas established by theory :
d_a2 = 2 / N * (1 - targets)
d_z2 = d_a2 * a2 * (1 - a2)
d_w2 = np.matmul(a1.T, d_z2)
d_b2 = d_z2
d_a1 = np.matmul(d_z2, w2.T)
d_z1 = d_a1 * a1 * (1 - a1)
d_w1 = np.matmul(a0.T, d_z1)
d_b1 = d_z1
# Calculation of the updated weights and biases of the network
return w1, b1, w2, b2, loss
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment