dwaine <- read.table("F:\\sta4210\\dwaine_studios.txt",header=F,col.names=c("Y","X1","X2")) attach(dwaine) dwaine plot(dwaine) (n <- length(Y)) X0 <- rep(1,n) X <- as.matrix(cbind(X0,X1,X2)) Y <- as.matrix(Y) (XX <- t(X) %*% X) # Compute X'X (t(X) is transpose of X, %*% performs matrix multiplication) (XY <- t(X) %*% Y) # Compute X'Y (XXI <- solve(XX)) # Compute (X'X)^(-1) (solve obtains inverse of square, full rank matrix) (b <- XXI %*% XY) # Compute b = (X'X)^(-1)X'Y J_n <- matrix(rep(1/n,n^2),byrow=T,ncol=n) # Obtain The (1/n)J matrix I_n <- diag(n) # Obtain the identity matrix H <- X %*% XXI %*% t(X) # Compute the hat matrix (SSTO <- t(Y) %*% (I_n - J_n) %*% Y) # Compute the Total (Corrected) Sum of Squares (SSE <- t(Y) %*% (I_n - H) %*% Y) # Compute the Error Sum of Squares (SSR <- t(Y) %*% (H - J_n) %*% Y) # Compute the Regression Sum of Squares (df_TO <- n-1) # Obtain the Total Degrees of Freedom (df_E <- n-ncol(X)) # Obtain the Error Degrees of Freedom (df_R <- ncol(X)-1) # Obtain the Regression Degrees of Freedom (MSE <- SSE/df_E) # Compute Mean square Error (MSR <- SSR/df_R) # Compute Mean Square Regression (F_obs <- MSR/MSE) # Compute F* for testing H0: beta1=beta2=0 (F_05 <- qf(.95,df_E,df_R)) # Obtain critical F-value (alpha=0.05) (P_F_obs <- 1-pf(F_obs,df_E,df_R)) # Obtain the P-value for F-test (R2 <- SSR/SSTO) # Compute the Coefficient of Multiple Determination (s2_b <- MSE[1,1]*XXI) # Compute the Variance-Covariance Matrix of Regression Coefficients b1 <- b[2] # Retrieve b1 from Vector b b2 <- b[3] # Retrieve b2 from Vector b (b1_95CI <- c(b1-qt(.975,df_E)*sqrt(s2_b[2,2]),b1+qt(.975,df_E)*sqrt(s2_b[2,2]))) # 95% CI for beta1 (b2_95CI <- c(b2-qt(.975,df_E)*sqrt(s2_b[3,3]),b2+qt(.975,df_E)*sqrt(s2_b[3,3]))) # 95% CI for beta2 xh1 <- as.matrix(c(1,65.4,17.6),ncol=1) # Set up X_h vector when X1=65.4, X2=17.6 (yh1 <- t(xh1) %*% b) # Compute Y-hat when X1=65.4, X2=17.6 (s2_yh1 <- t(xh1) %*% s2_b %*% xh1) # Compute s2{Y-hat} when X1=65.4, X2=17.6 (s_yh1 <- sqrt(s2_yh1)) # Compute s{Y-hat} when X1=65.4, X2=17.6 (yh1_95CI <- c(yh1-qt(.975,df_E)*s_yh1,yh1+qt(.975,df_E)*s_yh1)) # 95% CI for mean when X1=65.4, X2=16 xhpredA <- as.matrix(c(1,65.4,17.6),ncol=1) # Set up X_h vector when X1=65.4, X2=17.6 for City A xhpredB <- as.matrix(c(1,53.1,17.7),ncol=1) # Set up X_h vector when X1=53.1, X2=17.7 for City B (yhpredA <- t(xhpredA) %*% b) # Obtain predicted value for City A (yhpredB <- t(xhpredB) %*% b) # Obtain predicted value for City B (s2_yhpredA <- MSE + (t(xhpredA) %*% s2_b %*% xhpredA)) # Compute s2{yhpredA} (s2_yhpredB <- MSE + (t(xhpredB) %*% s2_b %*% xhpredB)) # Compute s2{yhpredB} (s_yhpredA <- sqrt(s2_yhpredA)) # Compute s{yhpredA} (s_yhpredB <- sqrt(s2_yhpredB)) # Compute s{yhpredB} (Bon_90PI_A <- c(yhpredA-qt(1-.10/4,df_E)*s_yhpredA,yhpredA+qt(1-.10/4,df_E)*s_yhpredA)) # Bonferroni PI for City A (Joint 90% PI for both cities) (Bon_90PI_B <- c(yhpredB-qt(1-.10/4,df_E)*s_yhpredB,yhpredB+qt(1-.10/4,df_E)*s_yhpredB)) # Bonferroni PI for City B (Joint 90% PI for both cities) (Scheffe_90_PI_A <- c(yhpredA-sqrt(2*qf(.90,2,df_E))*s_yhpredA,yhpredA+sqrt(2*qf(.90,2,df_E))*s_yhpredA)) # Scheffe PI for City A (Joint 90% PI for both cities) (Scheffe_90_PI_B <- c(yhpredB-sqrt(2*qf(.90,2,df_E))*s_yhpredB,yhpredB+sqrt(2*qf(.90,2,df_E))*s_yhpredB)) # Scheffe PI for City B (Joint 90% PI for both cities) ############### Regression with lm Function dwaine.reg1 <- lm(Y~X1+X2) summary(dwaine.reg1) anova(dwaine.reg1) drop1(dwaine.reg1)