From b1739e66a2cf4b5883bff2775fd05176b1d17a1e Mon Sep 17 00:00:00 2001
From: Aya SAIDI <aya.saidi@auditeur.ec-lyon.fr>
Date: Sun, 6 Nov 2022 17:17:40 +0100
Subject: [PATCH] Create mlp.py

---
 mlp.py | 21 +++++++++++++++++++++
 1 file changed, 21 insertions(+)
 create mode 100644 mlp.py

diff --git a/mlp.py b/mlp.py
new file mode 100644
index 0000000..f669758
--- /dev/null
+++ b/mlp.py
@@ -0,0 +1,21 @@
+import numpy as np
+#We are using the segmoid activation function
+def segmoid(x):
+    return 1/(1+np.exp(-x))
+
+#We will also need the derivation function to instore the gradient
+def derivation(x):
+    deriv_segmoid = segmoid(x)*(1-segmoid(x))
+    return deriv_segmoid
+
+def learn_once_mse(w1,b1,w2,b2,data,targets,learning_rate):
+    # This function performs one gradient descent step
+    # w1, b1, w2 and b2 -- the weights and biases of the network,
+    # data -- a matrix of shape (batch_size x d_in)
+    # targets -- a matrix of shape (batch_size x d_out)
+    # learning_rate -- the learning rate
+    A0=data
+    A1=segmoid(np.matmul(A0, w1) + b1)
+    A2=segmoid(np.matmul(A1,w2) + b2)
+    #Let calculate the partial derivates
+    
-- 
GitLab