Skip to content

Instantly share code, notes, and snippets.

@cdeitrick
Created November 20, 2025 19:40
Show Gist options
  • Select an option

  • Save cdeitrick/ed7b0a6350636013b63e9af5a910efea to your computer and use it in GitHub Desktop.

Select an option

Save cdeitrick/ed7b0a6350636013b63e9af5a910efea to your computer and use it in GitHub Desktop.
Bayesian Data Analysis Assignment 3
import numpy as np
from scipy.stats import beta
import scipy.integrate as integrate
def generate_posterior(prior, fraction: float):
# define the likelihood
likelihood = lambda s: (s ** fraction) * ((1 - s) ** (1 - fraction))
# define the total probability and find the normalization constant
f = lambda s: prior(s) * likelihood(s)
total_probability = integrate.quad(f, -np.inf, np.inf)[0]
# define the posterior through Bayes
posterior = lambda s: (prior(s) * likelihood(s)) / total_probability
return posterior
def expected_loss(loss_matrix, posterior):
# your code here
p = integrate.quad(lambda s: s * posterior(s), -np.inf, np.inf)[0]
q = 1 - p
print(p)
print(q)
expected_losses = [
loss_matrix[0][0] * p + loss_matrix[0][1] * (1-p),
loss_matrix[1][0] * p + loss_matrix[1][1] * (1-p),
]
return tuple(expected_losses)
def main():
a, b = 3, 2
rv = beta(a, b)
prior = lambda x: rv.pdf(x)
posterior = generate_posterior(prior, 0.60)
L = [[5, 50], [20, 0]]
result = expected_loss(L, posterior)
print(result)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment