0% found this document useful (0 votes)
34 views

ML - Lab-7.ipynb - Colab

Uploaded by

22b01a45a0
Copyright
© © All Rights Reserved
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
34 views

ML - Lab-7.ipynb - Colab

Uploaded by

22b01a45a0
Copyright
© © All Rights Reserved
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 2

EXPERIMENT-7

Program to implement K-nearest neighbour algorithm to classify the iris dataset. Print both Correct and wrong predictions

#NAIVE BAYE
import numpy as nm
import matplotlib.pyplot as mtp
import pandas as pd

# Importing the dataset


dataset = pd.read_csv('/content/Social_Network_Ads (1).csv')
x = dataset.iloc[:, [0, 1]].values
y = dataset.iloc[:, 2].values

# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.3, random_state = 0)

# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
x_train = sc.fit_transform(x_train)
x_test = sc.transform(x_test)
# Fitting Naive Bayes to the Training set
from sklearn.naive_bayes import GaussianNB
classifier = GaussianNB()
classifier.fit(x_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(x_test)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
print(cm)
from sklearn.metrics import accuracy_score
accuracy_score(y_test, y_pred)
## classification report
from sklearn.metrics import classification_report
class_report=classification_report(y_test, y_pred)

print("Classification Report:\n",class_report)

[[74 5]
[ 8 33]]
Classification Report:
precision recall f1-score support

0 0.90 0.94 0.92 79


1 0.87 0.80 0.84 41

accuracy 0.89 120


macro avg 0.89 0.87 0.88 120
weighted avg 0.89 0.89 0.89 120

# Visualising the Training set results


from matplotlib.colors import ListedColormap
x_set, y_set = x_train, y_train
X1, X2 = nm.meshgrid(nm.arange(start = x_set[:, 0].min() - 1, stop = x_set[:, 0].max() + 1, step = 0.01),
nm.arange(start = x_set[:, 1].min() - 1, stop = x_set[:, 1].max() + 1, step = 0.01))
mtp.contourf(X1, X2, classifier.predict(nm.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.7, cmap = ListedColormap(('red', 'yellow')))
mtp.xlim(X1.min(), X1.max())
mtp.ylim(X2.min(), X2.max())
for i, j in enumerate(nm.unique(y_set)):
mtp.scatter(x_set[y_set == j, 0], x_set[y_set == j, 1],
c = ListedColormap(('red', 'yellow'))(i), label = j)
mtp.title('Naive Bayes (Training set)')
mtp.xlabel('Age')
mtp.ylabel('Estimated Salary')
mtp.legend()
mtp.show()
<ipython-input-5-11c060604824>:11: UserWarning: *c* argument looks like a single numeric RGB or RGBA sequence, which should be avoid
mtp.scatter(x_set[y_set == j, 0], x_set[y_set == j, 1],

You might also like