0% found this document useful (0 votes)
13 views

Econometrics All R Codes Final

Copyright
© © All Rights Reserved
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
13 views

Econometrics All R Codes Final

Copyright
© © All Rights Reserved
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 12

ECONOMETRICS

R Codes for Lab Exam

Amrin Binte Ahmed


[email protected]
❖ Introduction Part R Codes
1. Likelihood Ratio Test
set.seed(105)
n = 25
x2 = runif(n, 45, 99)
x3 = runif(n, 4, 900)
y = rnorm(n, 0, 7)

##Unrestricted Regression
reg_un = lm(y ~ x2+x3)
res_un = reg_un$residuals
sigma_2 = var(res_un)
ULLF = -(n/2)*log(sigma_2)-deviance(reg_un)/(2*sigma_2)

##Restricted Regression (beta3 = 0)


reg_r = lm(y ~ x2)
res_r = reg_r$residuals
sigma1_2 = var(res_r)
RLLF = -(n/2)*log(sigma1_2)-deviance(reg_r)/(2*sigma1_2)

###LR Test
lambda = 2*(ULLF-RLLF)
pchisq(lambda, 1, lower.tail = F)

2. Monte Carlo Experiments


set.seed(105)
b1 = 20
b2 = 0.6
n = 25
x = runif(n, 45, 99)
b1_hat = numeric(100)
b2_hat = numeric(100)

for (i in 1:100)
{
set.seed(105 + i)
ui = rnorm(n, 0, 5)
y = b1 + b2 * x + ui
reg = lm(y ~ x)
coef_values = coef(summary(reg))
b1_hat[i] = coef_values[1, 1]
b2_hat[i] = coef_values[2, 1]
}

b1_est = mean(b1_hat)
b2_est = mean(b2_hat)
3. Wald Test Statistic
set.seed(105)
n = 25
k=2
x2 = runif(n, 45, 99)
x3 = runif(n, 4, 900)
y = rnorm(n, 0, 7)

##Unrestricted Regression
model1 = lm(y ~ x2+x3)
URSS = deviance(model1)

##Restricted Regression
model2 = lm(y ~ x2)
RRSS = deviance(model2)

##Wald test statistic


num = (n-k)*(RRSS-URSS)
W = num/URSS
pchisq(W, 1, lower.tail = F)

4. Lagrange Multiplier Test Statistic


set.seed(105)
n = 25
k=2
x2 = runif(n, 45, 99)
x3 = runif(n, 4, 900)
y = rnorm(n, 0, 7)

##Unrestricted Regression
model1 = lm(y ~ x2+x3)
URSS = deviance(model1)

##Restricted Regression
model2 = lm(y ~ x2)
RRSS = deviance(model2)

##Lagrange Multiplier test statistic


r=1
num = (n-k+r)*(RRSS-URSS)
LM = num/RRSS
pchisq(LM, 1, lower.tail = F)

❖ Model Selection R Codes


1. Davidson-MacKinnon J Test
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/2. Model Selection")
data = read.table("Table 13.3.txt")
colnames(data) = c("Year", "PPCE", "PDPI")

##Adding lagged Variable


data$PPCE_1 = c(NA, head(data$PPCE, -1))
data$PDPI_1 = c(NA, head(data$PDPI, -1))
##Model A
m1 = lm(PPCE ~ PDPI + PDPI_1, data)
summary(m1)

##Model B
m2 = lm(PPCE ~ PDPI + PPCE_1, data)
summary(m2)

##Davidson–MacKinnon J Test
library(lmtest)
jtest(m1,m2)

❖ Model Specification R Codes


1. The Durbin-Watson d Statistic
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Model Specification")
data = read.table("Table 7.4.txt")
colnames(data) = c("X", "Y")

model = lm(data$Y~data$X)
results = summary(model)
ui = results$residuals
u_t = ui[2:10]
u_t_1 = ui[1:9]
ut1 = ui[1:10]

##Durbin Watson d statistic


num = sum((u_t-u_t_1)**2)
denum = sum(ut1**2)
d = num/denum

##Using library
library(lmtest)
dwtest(model)

2. Ramsey’s RESET Test


setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Model Specification")
data = read.table("Table 7.4.txt")
colnames(data) = c("X", "Y")
dim(data)

##Old Model
reg = lm(data$Y~data$X)
r1 = summary(reg)
u_hat = r1$residuals
Y_hat = predict(reg)

##Plot for checking


plot(Y_hat, u_hat, type = "l")

##New Model
Y_hat_sq = Y_hat**2
Y_hat_cube = Y_hat**3
n_reg = lm(data$Y~data$X+Y_hat_sq+Y_hat_cube)
r2 = summary(n_reg)

##R Square Value


R_sq_old = r1$r.squared
R_sq_new = r2$r.squared

##Ramsey RESET Test


num_df = 2 #No. of New Regressors
denum_df = 10-4 #(n-No. of Parameters in the New Model)

num = (R_sq_new-R_sq_old)/num_df
denum = (1-R_sq_new)/denum_df

##Result
F_cal = num/denum
F_tab = qf(0.05, 2,6, lower.tail = FALSE)

3. Lagrange Multiplier (LM) Test for Adding Variables


setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Model Specification")
data = read.table("Table 7.4.txt")
colnames(data) = c("X", "Y")
dim(data)

##Restricted Regression
m1 = lm(data$Y ~ data$X)
r1 = summary(m1)
u_hat = r1$residuals

##New Regression
X_sq = (data$X)**2
X_cu = (data$X)**3
m2 = lm(u_hat ~ data$X+X_sq+X_cu)
r2 = summary(m2)

##Lagrange Multiplier Test


n = 10
R_sq_new = r2$r.squared
LM = n*R_sq_new
Chisq_val = qchisq(0.05, 2, lower.tail = FALSE)

❖ Qualitative Response Regression Model R Codes


1. Linear Probability Model
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Logit")
data = read.table("Table_15.1.txt")
colnames(data) = c("Family", "Y", "X")
View(data)

reg = lm(data$Y~data$X)
summary(reg)
data$Y_hat = predict(reg)
data$w_hat = data$Y_hat*(1-data$Y_hat)
d = data[data$w_hat>0,]
d$Y_star = d$Y/sqrt(d$w_hat)
d$int = 1/sqrt(d$w_hat)
d$X_star = d$X/sqrt(d$w_hat)

LPM = lm(d$Y_star~d$int+d$X_star-1)
summary(LPM)

2. Logit Model
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Logit")
data = read.table("Table_15.4.txt")
colnames(data) = c("X", "N", "n")
View(data)

data$P = data$n/data$N
data$L = log(data$P/(1-data$P))
data$w = data$N*data$P*(1-data$P)
data$sq_w = sqrt(data$w)

###Regression
data$L_star = data$L*data$sq_w
data$sq_w = sqrt(data$w)
data$X_star = data$X*data$sq_w

reg = lm(data$L_star~data$sq_w+data$X_star-1)
data$Pred_L_star = predict(reg)
coef = as.numeric(coef(summary(reg))[,1])
Beta = coef[2]

####Output
data$logit = data$Pred_L_star/data$sq_w
data$Probability = exp(data$logit)/(1+exp(data$logit))
data$Change_in_Pr = Beta*data$Probability*(1-data$Probability)

3. Probit Model
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Logit")
data = read.table("Table_15.4.txt")
colnames(data) = c("X", "N", "n")
View(data)

###Probit_Model
data$P = data$n/data$N
data$I = qnorm(data$P)

reg = lm(data$I~data$X)
summary(reg)

data$Pred_I = predict(reg)
data$Pr = pnorm(data$Pred_I)
b2 = coef(summary(reg))[2,1]
data$M = b2*data$Pr*100 (For interpreting)
##Corrected_for_Heteroscedasticity
##Exercise: 15.12
data$sigma_2 = (data$P*(1-data$P))/(data$N*(data$I)**2)
data$sigma = sqrt(data$sigma_2)
data$Y_star = data$I/data$sigma
data$sigma_inv = 1/data$sigma
data$X_star = data$X/data$sigma

C_reg = lm(data$Y_star~data$sigma_inv+data$X_star-1)
summary(C_reg)

❖ Multicollinearity R Codes
1. Detection of Multicollinearity
library(tidyverse)
data("mtcars")
attach(mtcars)
head(mtcars)

data = cbind(mpg, cyl, disp, hp, wt)


data_1 = data.frame(data)
View(data_1)

##1. High R^2 but few significant t ratios


reg = lm(mpg ~., data = data_1)
summary(reg)

##2. High pair-wise correlations among regressors


library(corrplot)
corrplot(cor(data_1), method = "number")

##3. Examination of partial correlations


library(ppcor)
pcor(data_1, method = “pearson”)

##4. Auxiliary regressions


Aux_reg = lm(disp~cyl+hp+wt)
results = summary(Aux_reg)
F = results$fstatistic
qf(0.05,3,28, lower.tail = FALSE)
pf(87.484, 3,28, lower.tail = FALSE)

Comment: Here, “disp” is collinear with other regressors.

##5. Eigen values and Condition index


library(olsrr)
eigen = ols_eigen_cindex(reg)
eigen_val = eigen[,1]
k = max(eigen_val)/min(eigen_val)
CI = sqrt(k)
Comment: There is strong multicollinearity.
##6. Tolerance and Variance Inflation Factor
VIF_Tol = ols_vif_tol(reg)
VIF = VIF_Tol$VIF
Tolerance = VIF_Tol$Tolerance

❖ Heteroscedasticity R Codes
1. Park Test
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Heteroscedasticity")
data = read.table("Table 11.3.txt")
Y = data$V1
X = data$V2

#Adding Extreme Values


Y[10] = 500

reg = lm(Y~X)
summary(reg)
results = summary(reg)

##Park test
Ui = results$residuals
lnUi_sq = log(Ui**2)
lnXi = log(X)

reg_2 = lm(lnUi_sq~lnXi)
summary(reg_2)

Comment: There is heteroscedasticity.

##Note: Informal Method for Detecting Heteroscedasticity


ui_2 = (reg$residuals)**2
Yi_hat = predict(reg)
plot(Yi_hat, ui_2, type = "l")
plot(x, ui_2, type = "l")

2. Glejser Test
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Heteroscedasticity")
data = read.table("Table 11.3.txt")
Y = data$V1
X = data$V2

##Adding Extreme Values


Y[10] = 800

reg = lm(Y~X)
summary(reg_1)
results = summary(reg_1)

Ui = results$residuals
Ui_abs = abs(Ui)
##Glejser test
X1 = X
X2 = sqrt(X)
X3 = 1/X
X4 = 1/sqrt(X)

reg_1 = lm(Ui_abs~X1)
summary(reg_1)

reg_2 = lm(Ui_abs~X2)
summary(reg_2)

reg_3 = lm(Ui_abs~X3)
summary(reg_3)

reg_4 = lm(Ui_abs~X4)
summary(reg_4)

3. Spearman’s Rank Correlation Test


setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Heteroscedasticity")
data = read.table("Table 11.3.txt")
Y = data$V1
X = data$V2

reg = lm(Y~X)
results = summary(reg)
ui = results$residuals
abs_ui = abs(ui)
Ru = rank(abs_ui)
Ry = rank(Y)
di_2 = (Ru - Ry)**2
rs = 1-6*((sum(di_2)/(n*(n**2-1))))
n = 30

##Test
num = rs*sqrt(n-2)
denum = sqrt(1-rs**2)
t = num/denum
a = 0.05 #level of significance
Critical_val = qt(a/2, 28, lower.tail = TRUE) ##Because here t is negative value, lower.tail = TRUE.
P_val = pt(t, 28, lower.tail = TRUE)

4. Goldfield-Quandt Test
library(tidyverse)
data("mtcars")
attach(mtcars)
head(mtcars)

y = mpg
x = cyl
data = data.frame(y, x)
dim(data)

d = data[order(data$x),]
n = 32
c=6
central_observation = c((n/2 - 2):(n/2 + 3))
new_d = d[-central_observation,]
dim(new_d)

G1 = new_d[1:13,]
G2 = new_d[14:26,]

m1 = lm(y ~ x, G1)
RSS1 = deviance(m1)

m2 = lm(y ~ x, G2)
RSS2 = deviance(m2)

k=2
df1 = (n-c)/2 - k
df2 = (n-c)/2 - k

lambda = (RSS1/df1)/(RSS2/df2)
lambda
qf(0.05,11,11,lower.tail = F)

5. Breusch-Pagan-Godfrey Test
library(tidyverse)
data("mtcars")
attach(mtcars)
head(mtcars)

y = mpg
x = cyl
data = data.frame(y,x)
dim(data)

reg = lm(y ~ x, data)


ui_hat = reg$residuals
RSS = deviance(reg)
n = 32
sigma_sq = RSS/n
pi = ui_hat**2/sigma_sq

reg_2 = lm(pi ~ x)
ESS = sum((predict(reg_2) - mean(pi))**2)
big_theta = ESS/2
m=2
pchisq(big_theta, m-1,lower.tail = F)

##Using library
library(lmtest)
bptest(reg, studentize = F)
6. White’s General Heteroscedasticity Test
library(tidyverse)
data("mtcars")
attach(mtcars)
head(mtcars)

y = mpg
x2 = cyl
x3 = disp
data = data.frame(y, x2, x3)

reg = lm(y ~ x2+x3, data)


summary(reg)
u_hat_2 = (reg$residuals)**2

x2_2 = x2**2
x3_2 = x3**2
x2x3 = x2*x3

reg_new = lm(u_hat_2 ~ x2 + x3 + x2_2 + x3_2 + x2x3)


r = summary(reg_new)
R_sq = r$r.squared
n = length(u_hat_2)

WG = n*R_sq
k=6
qchisq(0.05, k-1, lower.tail = F)

##Using library
library(lmtest)
bptest(reg_new)

white_se = sqrt(diag(vcovHC(reg, type = "HC0")))


print(white_se)
ols_se = results$coefficients[,2]

❖ Autocorrelation R Codes
1. Durbin h Test
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Autocorrelation")
data = read.table("Table 12.4.txt")
colnames(data) = c("Year", "Y", "X")
dim(data)

##One Period lag of Y


install.packages("dplyr")
library(dplyr)
data$Y_1 = lag(data$Y, n = 1)

##Regression
reg = lm(data$Y ~ data$X+data$Y_1)
r = summary(reg)
coef = coef(summary(reg))[,2]
var_beta3 = coef[3]**2
##Durbin h test
library(lmtest)
DW = dwtest(reg)
d = as.numeric(DW$statistic)
phro = 1 - d/2
n = 46
h = phro*sqrt(n/(1-n*var_beta3))

Comment: There is positive first order autocorrelation.


2. Durbin Watson d Statistic
setwd("D:/3rd Year/LAB EXAM 3RD YEAR/Econometrics/Model Specification")
data = read.table("Table 7.4.txt")
colnames(data) = c("X", "Y")
View(data)
dim(data)

reg = lm(data$Y~data$X)
r = summary(reg)

##Durbin Watson d
ui = r$residuals
ui_1 = ui[2:10]
ui_2 = ui[1:9]
ui_3 = ui[1:10]
num = sum((ui_1-ui_2)**2)
denum = sum(ui_3**2)
d = num/denum

##Another Method
library(lmtest)
dwtest(reg)

###From Critical Value Table


n = 10
k=1
d_L = 0.88
d_U = 1.32
dcal = d

Comment: Reject H0, there is evidence of positive autocorrelation.

You might also like