malharDL 2 .Ipynb - Colab
malharDL 2 .Ipynb - Colab
ipynb - Colab
https://colab.research.google.com/drive/10xWLhOwYzGpiVz27yd5yOAxDpXpFaGnN#printMode=true 1/6
4/22/24, 1:23 PM DL 2 .ipynb - Colab
def vectorize_sequences(sequences, dimension=5000): # Function for vectorising data
results = np.zeros((len(sequences), dimension)) # Creating an all-zero matrix of shape (len(sequences), dimension)
for i, sequence in enumerate(sequences):
results[i, sequence] = 1. # Set specific indices of results[i] to 1s
return results
# Compiling Model
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])
start_time_m1 = time.time()
history = model.fit(partial_x_train,
partial_y_train,
epochs=20,
batch_size=512,
validation_data=(x_val, y_val))
total_time_m1 = time.time() - start_time_m1
print("The Dense Convolutional Neural Network 1 layer took %.4f seconds to train." % (total_time_m1))
Epoch 1/20
30/30 [==============================] - 3s 79ms/step - loss: 0.5512 - acc: 0.7727 - val_loss: 0.3889 - val_acc: 0.8479
Epoch 2/20
30/30 [==============================] - 1s 45ms/step - loss: 0.2926 - acc: 0.8936 - val_loss: 0.2950 - val_acc: 0.8816
Epoch 3/20
30/30 [==============================] - 1s 31ms/step - loss: 0.2108 - acc: 0.9213 - val_loss: 0.2880 - val_acc: 0.8831
Epoch 4/20
30/30 [==============================] - 1s 32ms/step - loss: 0.1706 - acc: 0.9383 - val_loss: 0.3042 - val_acc: 0.8783
Epoch 5/20
30/30 [==============================] - 1s 31ms/step - loss: 0.1420 - acc: 0.9514 - val_loss: 0.3253 - val_acc: 0.8748
Epoch 6/20
30/30 [==============================] - 1s 34ms/step - loss: 0.1208 - acc: 0.9587 - val_loss: 0.3621 - val_acc: 0.8721
Epoch 7/20
30/30 [==============================] - 1s 35ms/step - loss: 0.1010 - acc: 0.9669 - val_loss: 0.3928 - val_acc: 0.8659
Epoch 8/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0830 - acc: 0.9743 - val_loss: 0.4288 - val_acc: 0.8636
Epoch 9/20
30/30 [==============================] - 1s 32ms/step - loss: 0.0655 - acc: 0.9817 - val_loss: 0.4705 - val_acc: 0.8608
Epoch 10/20
30/30 [==============================] - 1s 30ms/step - loss: 0.0520 - acc: 0.9885 - val_loss: 0.5118 - val_acc: 0.8567
Epoch 11/20
30/30 [==============================] - 1s 31ms/step - loss: 0.0395 - acc: 0.9923 - val_loss: 0.5597 - val_acc: 0.8559
Epoch 12/20
30/30 [==============================] - 1s 31ms/step - loss: 0.0283 - acc: 0.9965 - val_loss: 0.6085 - val_acc: 0.8540
Epoch 13/20
30/30 [==============================] - 2s 52ms/step - loss: 0.0207 - acc: 0.9981 - val_loss: 0.6437 - val_acc: 0.8552
Epoch 14/20
30/30 [==============================] - 2s 56ms/step - loss: 0.0139 - acc: 0.9996 - val_loss: 0.6840 - val_acc: 0.8537
Epoch 15/20
30/30 [==============================] - 1s 31ms/step - loss: 0.0095 - acc: 0.9997 - val_loss: 0.7194 - val_acc: 0.8526
Epoch 16/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0067 - acc: 0.9999 - val_loss: 0.7480 - val_acc: 0.8541
Epoch 17/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0049 - acc: 0.9999 - val_loss: 0.7748 - val_acc: 0.8521
Epoch 18/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0038 - acc: 1.0000 - val_loss: 0.7988 - val_acc: 0.8520
Epoch 19/20
30/30 [==============================] - 1s 31ms/step - loss: 0.0031 - acc: 1.0000 - val_loss: 0.8184 - val_acc: 0.8534
Epoch 20/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0025 - acc: 1.0000 - val_loss: 0.8367 - val_acc: 0.8526
The Dense Convolutional Neural Network 1 layer took 24.8242 seconds to train.
history_dict = history.history
history_dict.keys()
https://colab.research.google.com/drive/10xWLhOwYzGpiVz27yd5yOAxDpXpFaGnN#printMode=true 2/6
4/22/24, 1:23 PM DL 2 .ipynb - Colab
y_ y ()
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(1, len(acc) + 1)
https://colab.research.google.com/drive/10xWLhOwYzGpiVz27yd5yOAxDpXpFaGnN#printMode=true 3/6
4/22/24, 1:23 PM DL 2 .ipynb - Colab
# Model Summary
print(model.summary())
# Predictions
pred = model.predict(x_test)
classes_x=np.argmax(pred,axis=1)
accuracy_score(y_test,classes_x)
#Confusion Matrix
conf_mat = confusion_matrix(y_test, classes_x)
print(conf_mat)
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense (Dense) (None, 32) 160032
=================================================================
Total params: 161121 (629.38 KB)
Trainable params: 161121 (629.38 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
None
782/782 [==============================] - 3s 3ms/step
[[12500 0]
[12500 0]]
Text(0.5, 23.52222222222222, 'Predicted label')
# Compiling Model
model2.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])
start_time_m2 = time.time()
history= model2.fit(partial_x_train,
partial_y_train,
epochs=20,
batch_size=512,
validation_data=(x_val, y_val))
total_time_m2 = time.time() - start_time_m2
print("The Dense Convolutional Neural Network 2 layers took %.4f seconds to train." % (total_time_m2))
Epoch 1/20
30/30 [==============================] - 3s 61ms/step - loss: 0.5641 - acc: 0.7379 - val_loss: 0.3708 - val_acc: 0.8590
Epoch 2/20
30/30 [==============================] - 1s 34ms/step - loss: 0.2742 - acc: 0.8943 - val_loss: 0.2890 - val_acc: 0.8842
https://colab.research.google.com/drive/10xWLhOwYzGpiVz27yd5yOAxDpXpFaGnN#printMode=true 4/6
4/22/24, 1:23 PM DL 2 .ipynb - Colab
Epoch 3/20
30/30 [==============================] - 1s 31ms/step - loss: 0.1906 - acc: 0.9304 - val_loss: 0.3050 - val_acc: 0.8804
Epoch 4/20
30/30 [==============================] - 1s 30ms/step - loss: 0.1464 - acc: 0.9475 - val_loss: 0.3439 - val_acc: 0.8716
Epoch 5/20
30/30 [==============================] - 1s 31ms/step - loss: 0.1126 - acc: 0.9623 - val_loss: 0.3771 - val_acc: 0.8685
Epoch 6/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0817 - acc: 0.9743 - val_loss: 0.4260 - val_acc: 0.8664
Epoch 7/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0506 - acc: 0.9870 - val_loss: 0.4980 - val_acc: 0.8636
Epoch 8/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0286 - acc: 0.9951 - val_loss: 0.5630 - val_acc: 0.8620
Epoch 9/20
30/30 [==============================] - 2s 57ms/step - loss: 0.0141 - acc: 0.9989 - val_loss: 0.6416 - val_acc: 0.8596
Epoch 10/20
30/30 [==============================] - 1s 42ms/step - loss: 0.0074 - acc: 0.9997 - val_loss: 0.6919 - val_acc: 0.8603
Epoch 11/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0040 - acc: 0.9999 - val_loss: 0.7355 - val_acc: 0.8590
Epoch 12/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0025 - acc: 1.0000 - val_loss: 0.7728 - val_acc: 0.8603
Epoch 13/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0017 - acc: 1.0000 - val_loss: 0.8039 - val_acc: 0.8594
Epoch 14/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0013 - acc: 1.0000 - val_loss: 0.8279 - val_acc: 0.8599
Epoch 15/20
30/30 [==============================] - 1s 31ms/step - loss: 0.0010 - acc: 1.0000 - val_loss: 0.8495 - val_acc: 0.8607
Epoch 16/20
30/30 [==============================] - 1s 33ms/step - loss: 8.3106e-04 - acc: 1.0000 - val_loss: 0.8688 - val_acc: 0.8599
Epoch 17/20
30/30 [==============================] - 1s 34ms/step - loss: 6.9839e-04 - acc: 1.0000 - val_loss: 0.8877 - val_acc: 0.8594
Epoch 18/20
30/30 [==============================] - 1s 33ms/step - loss: 5.8774e-04 - acc: 1.0000 - val_loss: 0.9059 - val_acc: 0.8594
Epoch 19/20
30/30 [==============================] - 1s 33ms/step - loss: 5.0707e-04 - acc: 1.0000 - val_loss: 0.9190 - val_acc: 0.8598
Epoch 20/20
30/30 [==============================] - 1s 29ms/step - loss: 4.4136e-04 - acc: 1.0000 - val_loss: 0.9329 - val_acc: 0.8597
The Dense Convolutional Neural Network 2 layers took 23.3940 seconds to train.
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(1, len(acc) + 1)
# Plotting Loss
plt.plot(epochs, loss, 'bo', label='Training loss') # "bo" is for "blue dot"
plt.plot(epochs, val_loss, 'b', label='Validation loss') # b is for "solid blue line"
plt.title('DNN 2 layer Training and validation loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
https://colab.research.google.com/drive/10xWLhOwYzGpiVz27yd5yOAxDpXpFaGnN#printMode=true 5/6
4/22/24, 1:23 PM DL 2 .ipynb - Colab
plt.clf() # clear figure
acc_values = history_dict['acc']
val_acc_values = history_dict['val_acc']
# Plotting Accuracy
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('DNN 2 layer Training and validation accuracy')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
print(model2.summary())
# Predictions
pred = model2.predict(x_test)
classes_x=np.argmax(pred,axis=-1)
accuracy_score(y_test,classes_x)
=================================================================
Total params: 162177 (633.50 KB)
Trainable params: 162177 (633.50 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
None
782/782 [==============================] - 2s 2ms/step
0.5
https://colab.research.google.com/drive/10xWLhOwYzGpiVz27yd5yOAxDpXpFaGnN#printMode=true 6/6