diff --git a/mlp.py b/mlp.py
index c8822b9b0725354509ed1ab3349a70ad1e1791a8..1dde5c5401bf32a20945e961981e9d851addb68b 100644
--- a/mlp.py
+++ b/mlp.py
@@ -139,7 +139,7 @@ def run_mlp_training(data_train, labels_train, data_test, labels_test, d_h, lear
     d_in = data_train.shape[1]
     d_out = 10 #we can hard code it here or len(np.unique(label_train))
 
-    #Random initialisation of weights
+    #Random initialisation of weights Xavier initialisation
     w1 = np.random.randn(d_in, d_h) / np.sqrt(d_in)
     b1 = np.zeros((1, d_h))
     w2 = np.random.randn(d_h, d_out) / np.sqrt(d_h)
@@ -175,7 +175,7 @@ if __name__ == '__main__':
     X_train, X_test, y_train, y_test = read_cifar.split_dataset(data, labels, 0.9)
     d_in, d_h, d_out = 3072, 64, 10
     learning_rate = 0.1
-    num_epoch = 100
+    num_epoch = 300
 
     # #Initialisation 
     # w1 = np.random.randn(d_in, d_h) / np.sqrt(d_in)