# # Edps/Psych/Stat 587 # Fall 2017 # Carolyn J Anderson # # All analyses in lecture on SAS and R (High School & Beyond Ones) # # # For p-value and different degrees of freedom using lmerTest package see # http://mindingthebrain.blogspot.com/2014/02/three-ways-to-get-parameter-specific-p.html # Below I use lmerTest, which give you Satterthwaite df and p-values for HLM (these are same # as given by SAS) # # # Load Packages: # lme4 # lmrTest # # # Set working directory directory # #setwd('C:/Users/cja/Dropbox/edps587/lectures/SAS_MIXED_R') setwd('C:/Users/cja/Documents/Dropbox/edps587/lectures/SAS_MIXED_R') ############################################################################ # Data set up: merge hsb1 and hsb2 # ############################################################################ # # Read in student level data # hsb1 <- read.table(file="HSB1data.txt", header=TRUE) # Top & bottom of the file # head(hsb1) tail(hsb1) # # Read in school level data # hsb2 <- read.table(file="HSB2data.txt", header=TRUE) # Top & bottom of the file # head(hsb2) tail(hsb2) # # # Make long matrix # # # # Already Sorted by school id # # # # Merging student & school level files by school id ('id') # # # hsb <- merge(hsb1,hsb2, by=c('id')) head(hsb) ########################################################### # Create and add mean meanminority & meanfemale to data # ########################################################### meanfemale <- aggregate(female ~ id , data=hsb, FUN="mean") names(meanfemale) <- c('id', 'meanfemale') hsb <- merge(hsb,meanfemale, by =c('id')) # Since meanses is already in data file hsb$ses.centered <- hsb$ses - hsb$meanses head(hsb) # If you want to save data so that you don't got through steps # to create is again, you can write the results to a file. # There are other file formats that you can use (e.g., csv) # # #write.table(hsb, 'hsb.txt', row.names=F, na='.') ###################################################################### # Model fitting # # Note that "Std.Dev." in the Random effects table produced by lmer # is NOT the standard error of the variance estmates. The latter # is not computed in lmer but it is computed in SAS. These is some # controversy over whether se of variance estimates should even be # computed. We do not use them, so not problem. ###################################################################### # # Attach file that has all variables # attach(hsb) # # Random inercept / empty HLM / Null model / RANDOM effects ANOVA # # model.null <- lmer(mathach ~ 1 + (1 | id), data=hsb, REML=FALSE) summary(model.null) # Cut and paste icc <- 8.553/(8.553+39.148) icc # Or extract the information to compute vars <- as.data.frame(VarCorr(model.null))[4] total <- sum(vars) tau00 <- vars[1,1] icc <- tau00/total icc # Use the following function that I wrote: icc.lmer <- function(modl){ vars <- as.data.frame(VarCorr(modl))[4] total <- sum(vars) tau00 <- vars[1,1] icc <- tau00/total return(icc) } # example of how to use function icc.lmer(model.null) # # Simple model # require(lmerTest) model.simple <- lmer(mathach ~ 1 + ses + (1 | id), data=hsb, REML=FALSE) summary(model.simple) icc.lmer(model.simple) # # More complex model # model.complex <- lmer(mathach ~ 1 + ses.centered + minority + female + meanses + himinty + pracad + disclim + sector + size + (1 | id), data=hsb, REML=FALSE) summary(model.complex) icc.lmer(model.complex)