# # Chapter 3 -- Bayesian Computation With R # Cauchy Problem # # Remove all objects just to be safe # rm(list=ls(all=TRUE)) # library(LearnBayes) library(lattice) # # # Observed Data # y <- c(0, 10, 9, 8, 11, 3, 3, 8, 8, 11) # # Theta # theta <- seq(-2, 12, .1) # # Posterior Distribution # post <- NULL kk <- length(theta) i <- 0 j <- 0 while(i < kk){ i <- i + 1 j <- 0 sum <- 1.0 while (j < 10){ j <- j + 1 sum <- sum*(1.0/(1.0 + (y[j]-theta[i])*(y[j]-theta[i]))) } post[i] <- sum } # # Normalize Posterior # post <- post/sum(post) yy <- cbind(theta,post) # plot(theta, post, type = "h", col = "blue", lwd=3,xlab="",ylab="",main="",font=2) # # # Main title: which side of the plot (1=bottom, 2=left, 3=top, 4=right). mtext("Posterior Distribution for Theta",side=3,line=1.00,cex=1.2,font=2) # x-axis title mtext("Theta",side=1,line=2.75,cex=1.2) # y-axis title mtext("Density",side=2,line=2.5,cex=1.2) # # Compute E(X) # thetamean <- sum(theta*post) # # Compute E(X*X) # thetaXX <- sum(theta*theta*post) # # VAR(X)=E(X*X) - E(X)*E(X) # thetavar <- thetaXX - thetamean*thetamean thetasd <- sqrt(thetavar) kludge <- NULL py <- NULL for(j in 1:length(theta)) { #ftheta <- theta[j] #for(i in 1:length(y)) #{ #kludge <- 1/(1+(y-theta[j])^2) #py[j] <- c(py[j], prod(1/(1+(y[i]-theta[j])^2))) #py[j] <- prod(kludge) py[j] <- prod(1/(1+(y-theta[j])^2)) #} } py <- py/sum(py) jamie <- cbind(theta,py)