Skip to content
Snippets Groups Projects
Commit adcab70d authored by Angie Liu's avatar Angie Liu
Browse files

Upload New File

parent 9e6a0eb7
No related branches found
No related tags found
No related merge requests found
# Lab 4
# Importing data file from the repository
# Making plots
# Linear regression
# Model selection
# Forward/backward/stepwise selection
# Ridge
# LASSO
# install packages
install.packages("tidyverse")
install.packages("stargazer")
install.packages("glmnet")
# load packages
library(tidyverse)
library(stargazer)
library(glmnet)
# SP21 AEDE6120 repository
# https://code.osu.edu/liu.6200/sp21-aede6120/-/tree/master/LabCode
# https://code.osu.edu/liu.6200/sp21-aede6120/-/tree/master/LabData
# Importing data file from the web/repository ####
wage1 <- read_csv("https://code.osu.edu/liu.6200/sp21-aede6120/-/raw/master/LabData/wageEX.csv")
# Making plots ####
x <- 1:3
y <- c(1, 4, 6)
plot(x, y, type = "b")
?plot
# adding points/line/grid to the current plot
points(1:3, c(3, 2, 2), col="blue")
lines(1:3, c(1, 5, 3), col="red")
abline(v=0, h=1:6, col="grey")
# plots with example dataset
?cars
plot(cars$speed, cars$dist)
lines(spline(cars$speed, cars$dist, ties = mean))
# Linear regression ####
# example 1: Y=wage, X1=education, X2=experience, X3=experience^2
lm1 <- lm(wage ~ educ + exper + expersq, data = wage1)
# present result
summary(lm1)
# export formatted result as html file
stargazer(lm1, type = "html", out = "lm1.html")
# example 2: Y=wage, X1=education, X2=education^2, X3=experience, X4=experience^2
wage1 <- wage1 %>% mutate(educsq = educ^2)
lm2 <- lm(wage ~ educ + educsq + exper + expersq, data = wage1)
# present result
summary(lm2)
# export formatted result as doc file
stargazer(lm2, type = "text", out = "lm2.doc")
# other useful information stored in the regression result:
# coefficients() -- the coefficients
# confint() -- the confidence intervals (95% by default)
# fitted() -- the fitted values
# residuals() -- the residuals
# plot() -- diagnostic plots of the fitted model
# Model selection ####
# clear the work space
rm(list = ls())
# import data file
wageR <- read.csv("https://code.osu.edu/liu.6200/sp21-aede6120/-/raw/master/LabData/wageEX.csv")
# take a look at the data
wageR %>% head
wageR %>% stargazer(type = "text")
# prep steps:
# create the full model
reg1 <- lm(wage ~ ., data = wageR) # '.' represents the rest of the variables
summary(reg1)
# create the empty model
reg1.null <- lm(wage ~ 1, data = wageR) # '1' represents the intercept
summary(reg1.null)
# forward selection ####
# with AIC (default)
reg.fwA <- step(reg1.null,
direction = "forward",
scope = ( ~ educ + exper + nonwhite + female + married + expersq))
summary(reg.fwA)
# with SBC
reg.fwB <- step(reg1.null,
direction = "forward",
k = log(nrow(wageR)),
scope = ( ~ educ + exper + nonwhite + female + married + expersq))
summary(reg.fwB)
# backward selection ####
reg.bw <- step(reg1, direction = "backward")
summary(reg.bw)
# stepwise selection ####
reg.sw <- step(reg1.null,
direction = "both",
scope = ( ~ educ + exper + nonwhite + female + married + expersq))
summary(reg.sw)
# Ridge ####
# prep: extract independent variables as a matrix
Xs <- wageR %>% select(educ, exper, nonwhite, female, married, expersq) %>% as.matrix
# Ridge regression
rdg <- cv.glmnet(x = Xs, y = wageR$wage, alpha = 0)
# see the lambda-MSE plot
plot(rdg)
# the two threshold lambdas:
# lambda.min - it minimizes the prediction error (MSE)
# lambda.1se - it gives an MSE that's one standard error away from the minimum
# see the selected variables and their coefficients
coef(rdg, s = rdg$lambda.min)
# LASSO ####
lss <- cv.glmnet(x = Xs, y = wageR$wage, alpha = 1)
# see the lambda-MSE plot
plot(lss)
# see the selected variables and their coefficients
coef(lss, s = lss$lambda.min)
coef(lss, s = lss$lambda.1se)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment