# # Chapter 2 -- Bayesian Computation With R # Demonstrates Priors # # Remove all objects just to be safe # rm(list=ls(all=TRUE)) # library(LearnBayes) library(lattice) # midpoints <- seq(0.05, 0.95, by=0.1) # # Make up your own Prior -- I am putting the weight around # .4, .5, .6 # priorprob <- c(1,2,3,4,5,5,4,3,2,1) priorprob <- priorprob/sum(priorprob) # # Call histprior to get vector of probabilities for the intervals # (See page 27) # curve(histprior(x,midpoints,priorprob),from=0, to=1,xlab="",ylab="",lty=1,lwd=3,font=2,col="blue") # Main title mtext("Prior Distribution for Coin Probability",side=3,line=1.00,cex=1.2,font=2) # x-axis title mtext("Coin Probability",side=1,line=2.75,font=2,cex=1.2) # y-axis title mtext("Density",side=2,line=2.75,font=2,cex=1.2) # # Experiment -- 11 Heads and 9 Tails # windows() # curve(histprior(x,midpoints,priorprob)*dbeta(x,12,10),from=0, to=1,xlab="",ylab="",lty=1,lwd=3,font=2,col="red") # # Main title mtext("Posterior Distribution for Coin Probability",side=3,line=1.00,cex=1.2,font=2) # x-axis title mtext("Posterior Coin Probability",side=1,line=2.75,font=2,cex=1.2) # y-axis title mtext("Density",side=2,line=2.75,font=2,cex=1.2) # # Sampling -- see page 28 # p <- seq(0, 1, length=500) postprob <- histprior(p,midpoints,priorprob)*dbeta(p,12,10) postprob <- postprob/sum(postprob) # psample <- sample(p, replace = TRUE, prob = postprob) windows() hist(psample,xlab="",ylab="",main="") # # Main title mtext("Simulated Draws From Posterior Distribution\n for Coin Probability",side=3,line=1.00,cex=1.2,font=2) # x-axis title mtext("Probability",side=1,line=2.75,font=2,cex=1.2) # y-axis title mtext("Frequency",side=2,line=2.75,font=2,cex=1.2)