0% found this document useful (0 votes)
8 views

ACI - Gaurav

Uploaded by

abhishek9582822
Copyright
© © All Rights Reserved
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
8 views

ACI - Gaurav

Uploaded by

abhishek9582822
Copyright
© © All Rights Reserved
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 27

DPG Institute of Technology and Management ,

Gurugram ,122004, Haryana

ARTIFICIAL & COMPUTATIONAL INTELLIGENCE LAB


(LC-DS-347G)
V SEMESTER
CSE-DS ENGINEERING

Submitted By:
Submitted To:
Gaurav Dr. Nidhi Srivastava
Roll. No:22DS16 Asst. Professor
B.Tech(CSE-DS)-5th sem CSE Department
INDEX

S.NO NAME OF EXPERIMENT DAT SIGN.


E
1. Write a program for heuristic search
using python ( using BSF)

2. Write program of binary search using


python

3. Write a program of linear search


using python

4. Write a program of BSF using python

5. Write a program of DSF using python

6. Write a program for hill climbing


search using python

7. Write a program for uniform search


using python (uniform cost search )

8. Write a program ANN module using


python

9. Write a program CNN module using


python

10. Write a program for naïve bayes


theorem using python
EXPERIMENT- 1

AIM : Write a program of Heuristic Search using Python(using Best First


Search)

Program :

from queue import PriorityQueue

def best_first_search(graph, start, goal, h):


pq = PriorityQueue()
pq.put((h[start],
start)) visited = set()

while not pq.empty():


_, current =
pq.get() if current
== goal:
print(f"Goal {goal}
reached!") return True

visited.add(current)
print(f"Visiting node:
{current}")

for neighbor in
graph[current]: if neighbor
not in visited:
pq.put((h[neighbor], neighbor))

print("Goal not
reachable.") return False
graph = {
'A': ['B', 'C'],
'B': ['D', 'E'],
'C': ['F',
'G'], 'D': [],
'E': [],
'F': [],
'G': []
}

# Heuristic values for nodes

h= {
'A': 3,
'B': 2,
'C': 4,
'D': 6,
'E': 1,
'F': 5,
'G': 0
}

best_first_search(graph, 'A', 'G', h)

Output :
EXPERIMENT-2

AIM : Write a program of Binary Search using Python

Program :

def binary_search(arr, target):


low = 0
high = len(arr) - 1

while low <= high:


mid = (low + high) // 2

ifarr[mid] == target:
return mid
elifarr[mid] < target:
low = mid + 1
else:
high = mid - 1

return 1

arr = [2, 4, 6, 8, 10, 12, 14, 16, 18, 69]


print(arr)
target = int(input("enter the element :

")) result = binary_search(arr, target)

if result != 1:
print(f"Element {target} found at index {result}")
print(f"Element {target} found at position {result + 1}")
else:
print(f"Element {target} not found in the array")

output:
EXPERIMENT-3

AIM : Write a program of Linear Search using Python

Program :

def linear_search(arr, target):


for i in
range(len(arr)):
ifarr[i] == target:
return i # Return the index if the target is found
return 1

arr = [22, 13, 45, 87, 29, 69, 99, 12, 55]
print(arr)
target = int(input("enter the element :

")) result = linear_search(arr, target)

if result != 1:
print(f"Element {target} found at index {result}")
print(f"Element {target} found at position {result + 1}")

else:
print(f"Element {target} not found in the array")

Output :
EXPERIMENT :- 04

AIM :- Write a program for heuristic search using python (using BSF)

PROGRAM:-

graph = {
'5' : ['3','7'],
'3' : ['2', '4'],
'7' : ['8'],
'2' : [],
'4' : ['8'],
'8' : []
}

visited =
[] queue =
[]
def bfs(visited, graph,
node): visited.append(node)
queue.append(node)

while queue:
m = queue.pop(0)
print (m, end = " ")

for neighbour in graph[m]:


if neighbour not in
visited:
visited.append(neighbour)
queue.append(neighbour)
print("Following is the Breadth-First Search")
bfs(visited, graph, '5')

OUTPUT :-
EXPERIMENT :- 05

AIM :- Write a program of DSF using python

PROGRAM :-

graph = {
'5' : ['3','7'],
'3' : ['2', '4'],
'7' : ['8'],
'2' : [],
'4' : ['8'],
'8' : []
}

visited = set()

defdfs(visited, graph, node):


if node not in visited:
print (node)
visited.add(node
)
for neighbour in
graph[node]: dfs(visited,
graph, neighbour)

print("Following is the Depth-First


Search") dfs(visited, graph, '5')

OUTPUT :-
EXPERIMENT :- 06

AIM :- Write a program for hill climbing search using python

PROGRAM :-

import random

def objective_function(x):
return -(x ** 2) + 10 # Simple function with a peak atx = 0

def hill_climbing(init, step_size, iterations):


current = init
for _ in range(iterations):
neighbor = current + random.uniform(-step_size, step_size)
if objective_function(neighbor) > objective_function(current):
current = neighbor
return current

# Parameters
initial_solution = random.uniform(- 10, 10)
step_size = 0.1
iterations = 100

# Run the hill climbing algorithm


best_solution = hill_climbing(initial_solution, step_size, iterations)

print(f"Best solution: {best_solution:.4f} with value:


{objective_function(best_solution):.4f}")

OUTPUT
:-
EXPERIMENT :- 07

AIM :- Write a program for uniform search using python (uniform


cost search )

PROGRAM :-

import heapq

def uniform_cost_search(graph, start, goal):


frontier = [(0, start)] # Priority queue (cost, node)
explored = {}

while frontier:
cost, node =
heapq.heappop(frontier) if node ==
goal:
return cost
if node not in explored:
explored[node] =
cost
for neighbor, edge_cost in graph[node]:
heapq.heappush(frontier, (cost + edge_cost,
neighbor))

return float("inf")

# Example graph
graph = {
'A': [('B', 1), ('C', 4)],
'B': [('D', 2), ('E', 5)],
'C': [('F', 3)],
'D': [], 'E': [('F', 1)], 'F': []
}
# Run the search
print(uniform_cost_search(graph, 'A',
'F'))

OUTPUT :-
EXPERIMENT :- 08

AIM :- Write a program ANN module using python

PROGRAM :-

import numpy asnp


from tensorflow.keras.models import
Sequential from tensorflow.keras.layers import
Dense

# Generate dummy dataset


X = np.random.random((1000, 20)) # 1000 samples, 20 features
y = np.random.randint(3, size=(1000, 1)) # 3 classes (0, 1, 2)
y = np.eye(3)[y.reshape(- 1)] # One-hot encoding

# Build a simple
ANN model =
Sequential([
Dense(64, input_dim=20, activation='relu'), # Hidden layer
Dense(3, activation='softmax') # Output layer for 3 classes
])

# Compile and train the model


model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
model.fit(X, y, epochs=10, batch_size=32)

# Evaluate the model


loss, accuracy = model.evaluate(X,
y) print(f"Accuracy:
{accuracy:.4f}")

OUTPUT :-
EXPERIMENT :- 09

AIM :Write a program CNN module using python

PROGRAM :-

import numpy asnp


from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import
Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten,
Dense from tensorflow.keras.utils import to_categorical

# Load and preprocess the MNIST dataset


(X_train,y_train), (X_test,y_test) = mnist.load_data()
X_train = X_train.reshape(- 1, 28, 28, 1) / 255.0
X_test = X_test.reshape(- 1, 28, 28, 1) / 255.0
y_train = to_categorical(y_train,
10) y_test = to_categorical(y_test,
10)

# Build the CNN


model model =
Sequential([
Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)),
MaxPooling2D((2,
2)), Flatten(),
Dense(10, activation='softmax')
])

# Compile and train the model


model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
model.fit(X_train,y_train, epochs=5, batch_size=32)

# Evaluate the model


loss, accuracy = model.evaluate(X_test,y_test)
print(f"Test Accuracy: {accuracy:.4f}")
OUTPUT :-
EXPERIMENT :- 10

AIM :Write a program for naïve bayes theorem using python

PROGRAM :-

# Install necessary libraries (if you haven't installed them yet)


# pip install scikit-learn

from sklearn import datasets


from sklearn.model_selection import
train_test_split from sklearn.naive_bayes import
GaussianNB
from sklearn.metrics import accuracy_score

# Load the Iris dataset


iris = datasets.load_iris()
X = iris.data # Features
y = iris.target # Labels

# Split the dataset into training and testing sets


X_train, X_test, y_train,y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# Create and train the Naive Bayes


model model = GaussianNB()
model.fit(X_train,y_train)

# Make predictions
y_pred = model.predict(X_test)

# Evaluate the model


accuracy = accuracy_score(y_test,
y_pred) print(f"Accuracy:
{accuracy:.4f}")
OUTPUT :-

You might also like