### Gibbs sampler for regression... ################################### p=9 n=100 sigma2true = 1 betatrue = rnorm(p,0,3) X = matrix(rnorm(n*p,0,1),n,p) Y = X%*%betatrue + rnorm(n,0,sqrt(sigma2true)) MLE = solve(t(X)%*%X)%*%t(X)%*%Y e = Y-X%*%MLE sig2MLE = sum(e^2)/(n-p-1) tX = t(X)%*%X tXY = t(X)%*%Y # Priors ##################### m0 = rep(0,p) C0 = diag(100,p) iC0 = solve(C0) nu0 = 3 nusig0 = nu0*2 ###################### M = 1000 BETAs = matrix(0,M,p) SIGs = rep(0,M) ## Initial value ################# sigma2 = 1/rgamma(1,nu0/2,nusig0/2) for(i in 1:M){ ## Posterior Full Conditional for Beta ###################################### C1 = solve(tX/sigma2 + iC0) m1 = C1%*%(tXY/sigma2 + iC0%*%m0) beta = m1 + t(chol(C1))%*%rnorm(p) ## Posterior Full Conditional for sigma ####################################### e = Y - X%*%(beta) nu1 = n + nu0 nusig1 = nusig0 + sum(e^2) sigma2 = 1/rgamma(1,nu1/2,nusig1/2) ## Save BETAs[i,] = beta SIGs[i] = sigma2 print(i) } par(mfrow=c(3,3)) for(i in 1:p) { hist(BETAs[,i],main=paste("Beta",i)) abline(v=MLE[i],col=2,lwd=2) } par(mfrow=c(1,1)) hist(sqrt(SIGs),main="sigma") abline(v=sqrt(sig2MLE),col=2,lwd=2) par(mfrow=c(2,2)) for(i in 1:3) { plot(BETAs[,i],type="l") abline(h=MLE[i],col=2,lwd=2) } plot(SIGs,type="l") abline(h=sig2MLE,col=2,lwd=2) ll = seq(0,2,by=0.01) par(mfrow=c(1,1)) hist(1/SIGs,prob=T,xlim=c(0,2),main="precision") lines(ll,dgamma(ll,nu0/2,nusig0/2),col=2,lwd=2)