Nikitha
Nikitha
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
dataset = pd.read_csv("/content/ParisHousing.csv")
dataset.head(20)
1 8 6 2015 1 0
2 6 8 2021 0 0
3 10 4 2012 0 1
4 3 7 1990 1 0
5 8 6 2012 0 1
6 10 9 1995 1 1
7 3 4 2003 1 0
8 8 3 2012 1 1
9 5 8 2021 1 1
10 6 8 1993 1 0
11 4 2 2011 1 1
12 2 9 2008 0 0
13 8 4 2004 1 0
14 10 5 2018 0 1
15 6 9 2009 1 1
16 3 9 2011 1 1
17 7 8 2013 1 1
18 3 1 2016 0 0
19 9 5 2017 1 1
dataset.shape
(10000, 17)
dataset.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 10000 entries, 0 to 9999
Data columns (total 17 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 squareMeters 10000 non-null int64
1 numberOfRooms 10000 non-null int64
2 hasYard 10000 non-null int64
3 hasPool 10000 non-null int64
4 floors 10000 non-null int64
5 cityCode 10000 non-null int64
6 cityPartRange 10000 non-null int64
7 numPrevOwners 10000 non-null int64
8 made 10000 non-null int64
9 isNewBuilt 10000 non-null int64
10 hasStormProtector 10000 non-null int64
11 basement 10000 non-null int64
12 attic 10000 non-null int64
13 garage 10000 non-null int64
14 hasStorageRoom 10000 non-null int64
15 hasGuestRoom 10000 non-null int64
16 price 10000 non-null float64
dtypes: float64(1), int64(16)
memory usage: 1.3 MB
corr = dataset.corr()
plt.figure(figsize=(10, 8))
sns.heatmap(corr, annot=True, cmap='coolwarm', fmt=".2f",
linewidths=0.5)
plt.title("Correlation Heatmap")
plt.show()
correlation_with_price = dataset.corr()['price'].abs()
threshold = 0.01
highly_correlated_columns =
correlation_with_price[correlation_with_price >
threshold].index.tolist()
df = dataset[highly_correlated_columns]
df.head()
X = df['squareMeters'].values
y = df['price'].values
learning_rate = 0.01
iterations = 10
# Gradient Descent
for iteration in range(iterations):
# Calculate predictions
y_pred = b0 + b1 * X
# Calculate gradients
gradient_b0 = np.mean(y_pred - y)
gradient_b1 = np.mean((y_pred - y) * X)
plt.tight_layout()
plt.show()
X = df['squareMeters'].values
y = df['price'].values
learning_rate = 0.1
iterations = 10
# Gradient Descent
for iteration in range(iterations):
# Calculate predictions
y_pred = b0 + b1 * X
# Calculate gradients
gradient_b0 = np.mean(y_pred - y)
gradient_b1 = np.mean((y_pred - y) * X)
plt.tight_layout()
plt.show()
X = df['squareMeters'].values
y = df['price'].values
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.25, random_state=42)
model = LinearRegression()
X_train = X_train.reshape(-1, 1)
X_test = X_test.reshape(-1, 1)
# Fit the model to the data
model.fit(X_train, y_train)
# Make predictions
y_pred = model.predict(X_test)
r2 = r2_score(y_test, y_pred)
mse = mean_squared_error(y_test, y_pred)
print("R-squared:", r2)
print("Mean Squared Error:", mse)
plt.legend()
plt.show()
R-squared: 0.999998793097589
Mean Squared Error: 10440151.787275104
# Hyperparameters
alpha = 0.01
num_iterations = 10
# Initialization
m = len(X['squareMeters'])
X0 = np.ones(m)
X1 = normalize(np.array(X['isNewBuilt']))
X2 = normalize(np.array(X['numPrevOwners']))
X3 = normalize(np.array(X['squareMeters']))
X4 = normalize(np.array(X['garage']))
y = normalize(np.array(y))
# Gradient Descent
for _ in range(num_iterations):
y_pred = np.dot(X, theta)
gradient = (1/m) * np.dot(X.T, (y_pred - y))
theta -= alpha * gradient
print("Parameters:", theta)
def normalize(feature):
"""Standardize the feature using Z-score normalization."""
return (feature - np.mean(feature)) / np.std(feature)
# Hyperparameters
alpha = 0.10
num_iterations = 10
# Initialization
m = len(X['squareMeters'])
X0 = np.ones(m)
X1 = normalize(np.array(X['isNewBuilt']))
X2 = normalize(np.array(X['numPrevOwners']))
X3 = normalize(np.array(X['squareMeters']))
X4 = normalize(np.array(X['garage']))
y = normalize(np.array(y))
# Gradient Descent
for _ in range(num_iterations):
y_pred = np.dot(X, theta)
gradient = (1/m) * np.dot(X.T, (y_pred - y))
theta -= alpha * gradient
print("Parameters:", theta)
def normalize(feature):
"""Standardize the feature using Z-score normalization."""
return (feature - np.mean(feature)) / np.std(feature)
# Hyperparameters
alpha = 0.01
num_iterations = 20
# Initialization
m = len(X['squareMeters'])
X0 = np.ones(m)
X1 = normalize(np.array(X['isNewBuilt']))
X2 = normalize(np.array(X['numPrevOwners']))
X3 = normalize(np.array(X['squareMeters']))
X4 = normalize(np.array(X['garage']))
y = normalize(np.array(y))
# Gradient Descent
for _ in range(num_iterations):
y_pred = np.dot(X, theta)
cost = (1/m) * sum((y_pred - y)**2)
cost_history.append(cost)
plt.plot(cost_history)
plt.title('Cost Function Over Iterations (Multiple Regression)')
plt.xlabel('Iterations')
plt.ylabel('Cost (MSE)')
plt.show()
# Hyperparameters
alpha = 0.10
num_iterations = 20
# Initialization
m = len(X['squareMeters'])
X0 = np.ones(m)
X1 = normalize(np.array(X['isNewBuilt']))
X2 = normalize(np.array(X['numPrevOwners']))
X3 = normalize(np.array(X['squareMeters']))
X4 = normalize(np.array(X['garage']))
y = normalize(np.array(y))
# Gradient Descent
for _ in range(num_iterations):
y_pred = np.dot(X, theta)
cost = (1/m) * sum((y_pred - y)**2)
cost_history.append(cost)
plt.plot(cost_history)
plt.title('Cost Function Over Iterations (Multiple Regression)')
plt.xlabel('Iterations')
plt.ylabel('Cost (MSE)')
plt.show()
from sklearn.model_selection import train_test_split
model = LinearRegression()
# Make predictions
y_pred = model.predict(X_test)
r2 = r2_score(y_test, y_pred)
mse = mean_squared_error(y_test, y_pred)
print("R-squared:", r2)
print("Mean Squared Error:", mse)
plt.legend()
plt.show()
R-squared: 0.9999987942704442
Mean Squared Error: 10430006.155390566