Skip to content
Snippets Groups Projects
Commit 6d83eff1 authored by selalimi's avatar selalimi
Browse files

Update readme

parent 4487092d
No related branches found
No related tags found
No related merge requests found
...@@ -3,5 +3,5 @@ image/* ...@@ -3,5 +3,5 @@ image/*
_pycache_/* _pycache_/*
plotting.py plotting.py
image-classification/ image-classification/
cifar.PNG
...@@ -57,7 +57,17 @@ X, y = rc.read_cifar('data') ...@@ -57,7 +57,17 @@ X, y = rc.read_cifar('data')
# Split the Dataset # Split the Dataset
X_train, y_train, X_test, y_test = rc.split_dataset(X, y, split=0.9) X_train, y_train, X_test, y_test = rc.split_dataset(X, y, split=0.9)
``` ```
2. Function to run knn
```bash
import knn
knn.plot_KNN(X_train, y_train, X_test, y_test)
```
3. Running the ANN Code
```bash
import mlp
mlp.plot_ANN(X_train,y_train,X_test,y_test)
```
## Results : ## Results :
### Generating the Graph ### Generating the Graph
1. Results using KNN: 1. Results using KNN:
......
cifar.PNG 0 → 100644
cifar.PNG

324 KiB

...@@ -94,7 +94,7 @@ def learn_once_mse(W1, b1, W2, b2, data, targets, learning_rate): ...@@ -94,7 +94,7 @@ def learn_once_mse(W1, b1, W2, b2, data, targets, learning_rate):
# Update weights and biases of the output layer # Update weights and biases of the output layer
W2 = W2 - learning_rate * np.dot(hidden_layer_output.T, output_layer_gradients) / data.shape[0] W2 = W2 - learning_rate * np.dot(hidden_layer_output.T, output_layer_gradients) / data.shape[0]
b2 = b2 - learning_rate * (1 / hidden_layer_output.shape[1]) * output_layer_gradients.sum(axis=0) b2 = b2 - learning_rate * (1 / hidden_layer_output.shape[1]) * output_layer_gradients.sum(axis=0, keepdims=True)
# Calculate the error at the hidden layer # Calculate the error at the hidden layer
hidden_layer_error = np.dot(output_layer_gradients, W2.T) hidden_layer_error = np.dot(output_layer_gradients, W2.T)
...@@ -291,11 +291,11 @@ def run_mlp_training(X_train, labels_train, data_test, labels_test, num_hidden_u ...@@ -291,11 +291,11 @@ def run_mlp_training(X_train, labels_train, data_test, labels_test, num_hidden_u
- train_accuracies: List of training accuracies across epochs. - train_accuracies: List of training accuracies across epochs.
- test_accuracy: The final testing accuracy. - test_accuracy: The final testing accuracy.
""" """
input_dimension = X_train.shape[1] #input_dimension = X_train.shape[1]
output_dimension = np.unique(labels_train).shape[0] # Number of classes #output_dimension = np.unique(labels_train).shape[0] # Number of classes
# Initialize weights and biases # Initialize weights and biases
W1, b1, W2, b2 = initialization(input_dimension, num_hidden_units, output_dimension) W1, b1, W2, b2 = initialization(d_in, d_h, d_out)
train_accuracies = [] # List to store training accuracies train_accuracies = [] # List to store training accuracies
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment