Skip to content
Snippets Groups Projects
Unverified Commit f196ea84 authored by Jangberry (Nomad-Debian)'s avatar Jangberry (Nomad-Debian)
Browse files

What ?

parent 91871cd9
No related branches found
No related tags found
No related merge requests found
Source diff could not be displayed: it is too large. Options to address this: view the blob.
from matplotlib import pyplot as plt
import numpy as np
import read_cifar
......@@ -242,48 +243,52 @@ def run_mlp_training(data_train, labels_train, data_test, labels_test, learning_
if __name__ == "__main__":
N = 30 # number of input data
d_in = 3 # input dimension
d_h = 3 # number of neurons in the hidden layer
d_out = 2 # output dimension (number of neurons of the output layer)
# N = 30 # number of input data
# d_in = 3 # input dimension
# d_h = 3 # number of neurons in the hidden layer
# d_out = 2 # output dimension (number of neurons of the output layer)
# Random initialization of the network weights and biaises
w1 = 2 * np.random.rand(d_in, d_h) - 1 # first layer weights
b1 = np.zeros((1, d_h)) # first layer biaises
w2 = 2 * np.random.rand(d_h, d_out) - 1 # second layer weights
b2 = np.zeros((1, d_out)) # second layer biaises
# # Random initialization of the network weights and biaises
# w1 = 2 * np.random.rand(d_in, d_h) - 1 # first layer weights
# b1 = np.zeros((1, d_h)) # first layer biaises
# w2 = 2 * np.random.rand(d_h, d_out) - 1 # second layer weights
# b2 = np.zeros((1, d_out)) # second layer biaises
data = np.random.rand(N, d_in) # create a random data
targets = np.random.rand(N, d_out) # create a random targets
# data = np.random.rand(N, d_in) # create a random data
# targets = np.random.rand(N, d_out) # create a random targets
for i in range(100):
w1, b1, w2, b2, loss = learn_once_mse(
w1, b1, w2, b2, data, targets, 0.1)
print(loss)
# for i in range(100):
# w1, b1, w2, b2, loss = learn_once_mse(
# w1, b1, w2, b2, data, targets, 0.1)
# print(loss)
print(one_hot(np.array([9, 1, 3, 0, 6, 5, 2, 7, 8, 4])))
# print(one_hot(np.array([9, 1, 3, 0, 6, 5, 2, 7, 8, 4])))
N = 30 # number of input data
d_in = 3 # input dimension
d_h = 3 # number of neurons in the hidden layer
d_out = 5 # output dimension (number of neurons of the output layer)
# N = 30 # number of input data
# d_in = 3 # input dimension
# d_h = 3 # number of neurons in the hidden layer
# d_out = 5 # output dimension (number of neurons of the output layer)
w1 = 2 * np.random.rand(d_in, d_h) - 1 # first layer weights
b1 = np.zeros((1, d_h)) # first layer biaises
w2 = 2 * np.random.rand(d_h, d_out) - 1 # second layer weights
b2 = np.zeros((1, d_out)) # second layer biaises
# w1 = 2 * np.random.rand(d_in, d_h) - 1 # first layer weights
# b1 = np.zeros((1, d_h)) # first layer biaises
# w2 = 2 * np.random.rand(d_h, d_out) - 1 # second layer weights
# b2 = np.zeros((1, d_out)) # second layer biaises
data = np.random.rand(N, d_in) # create a random data
targets = np.random.randint(1, d_out, N) # create a random targets
# data = np.random.rand(N, d_in) # create a random data
# targets = np.random.randint(1, d_out, N) # create a random targets
for i in range(100):
w1, b1, w2, b2, loss = learn_once_cross_entropy(
w1, b1, w2, b2, data, targets, 0.1)
print(loss)
# for i in range(100):
# w1, b1, w2, b2, loss = learn_once_cross_entropy(
# w1, b1, w2, b2, data, targets, 0.1)
# print(loss)
data, labels = read_cifar.read_cifar("data/cifar-10-batches-py/")
data_train, labels_train, data_test, labels_test = read_cifar.split_dataset(
data, labels, 0.8)
w1, b1, w2, b2, acc = run_mlp_training(
data_train, labels_train, data_test, labels_test, 0.1, 100)
data_train, labels_train, data_test, labels_test, 0.01, 100)
print(acc)
fig = plt.figure()
plt.plot(acc)
fig.savefig("results/acc.png")
plt.show()
\ No newline at end of file
results/acc.png

19.3 KiB

0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment