Skip to content
Snippets Groups Projects
Commit 815d6e3c authored by corentin's avatar corentin
Browse files

add of color in the code

parent 557e0a75
No related branches found
No related tags found
No related merge requests found
......@@ -6,14 +6,14 @@ Corentin MASSALA
All the code can be found on the python file read_cifar.py
#### 2-
```
```rb
def read_cifar_batch(file):
with open(file, 'rb') as fo:
dict = pickle.load(fo, encoding='bytes')
return (np.array(dict[b'data']).astype('float32'), np.array(dict[b'labels']).astype('int64') )
```
#### 3-
```
```rb
def read_cifar(path):
data = []
labels = []
......@@ -39,7 +39,7 @@ def read_cifar(path):
To split the dataset we use the split function from the sklearn library
```
```rb
def split_dataset(data, labels, split):
X_train, X_test, y_train, y_test = train_test_split(
data, labels, test_size=(1 - split), random_state=0)
......@@ -52,7 +52,7 @@ def split_dataset(data, labels, split):
All the code can be found on the python file knn.py
#### 1-
```
```rb
def distance_matrix(matrix1, matrix2):
#X_test then X_train in this order
sum_of_squares_matrix1 = np.sum(np.square(matrix1), axis=1, keepdims=True) #A^2
......@@ -66,7 +66,7 @@ def distance_matrix(matrix1, matrix2):
#### 2-
```
```rb
def knn_predict(dists, labels_train, k):
output = []
# Loop on all the images_test
......@@ -86,7 +86,7 @@ def knn_predict(dists, labels_train, k):
#### 3-
```
``` rb
def evaluate_knn(data_train, labels_train, data_test, labels_tests, k):
dist = distance_matrix(data_test, data_train)
result_test = knn_predict(dist, labels_train, k)
......@@ -98,7 +98,7 @@ def evaluate_knn(data_train, labels_train, data_test, labels_tests, k):
```
#### 4-
```
```rb
def bench_knn():
k_indices = [i for i in range(20) if i % 2 != 0]
......@@ -165,7 +165,7 @@ Here are all the answer for the theory of the backpropagation.
### Coding part
All the code can be found on the file mlp.py
```
```rb
def learn_once_mse(w1, b1, w2, b2, data, targets, learning_rate):
N_out = len(targets) #number of training examples
......@@ -217,7 +217,7 @@ def learn_once_mse(w1, b1, w2, b2, data, targets, learning_rate):
```
#### 11-
```
```rb
def one_hot(labels):
#num_classes = np.max(labels) + 1 on va le hardcoder ici
num_classes = 10
......@@ -227,14 +227,14 @@ def one_hot(labels):
#### 12-
The cross_entropy_loss is :
```
```rb
def cross_entropy_loss(y_pred, y_true):
loss = -np.sum(y_true * np.log(y_pred)) / len(y_pred)
return loss
```
The new learning function is :
```
```rb
def learn_once_cross_entropy(w1, b1, w2, b2, data, labels_train, learning_rate):
N_out = len(labels_train) #number of training examples
......@@ -275,7 +275,7 @@ def learn_once_cross_entropy(w1, b1, w2, b2, data, labels_train, learning_rate):
```
#### 13-
```
```rb
def forward(w1, b1, w2, b2, data):
# Forward pass
a0 = data # the data are the input of the first layer
......@@ -286,7 +286,7 @@ def forward(w1, b1, w2, b2, data):
predictions = a2 # the predicted values are the outputs of the output layer
return(predictions)
```
```
```rb
def train_mlp(w1, b1, w2, b2, data_train, labels_train, learning_rate, num_epoch):
train_accuracies = []
for epoch in range(num_epoch):
......@@ -306,7 +306,8 @@ def train_mlp(w1, b1, w2, b2, data_train, labels_train, learning_rate, num_epoch
return w1, b1, w2, b2, train_accuracies
```
#### 14-
```def test_mlp(w1, b1, w2, b2, data_test, labels_test):
```rb
def test_mlp(w1, b1, w2, b2, data_test, labels_test):
# Compute accuracy
predictions = forward(w1, b1, w2, b2, data_test)
......@@ -319,7 +320,7 @@ def train_mlp(w1, b1, w2, b2, data_train, labels_train, learning_rate, num_epoch
#### 15-
```
```rb
def run_mlp_training(data_train, labels_train, data_test, labels_test, d_h,learning_rate, num_epoch):
d_in = data_train.shape[1]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment