Skip to content

Bayesian Beta Distributed Coin Inference

Creative Commons LicenseaGrUMinteractive online version

build a fully bayesian beta distributed coin inference

Section titled “build a fully bayesian beta distributed coin inference”

This notebook is based on examples from Benjamin Datko.

The basic idea of this notebook is to show you could assess the probability for a coin, knowing a sequence of heads/tails.

import time
from pylab import *
import scipy.stats
import pyagrum as gum
import pyagrum.lib.notebook as gnb
gum.config["notebook", "default_graph_size"] = "12!"
gum.config["notebook", "default_graph_inference_size"] = "12!"

Fill Beta parameters with a re-parameterization

Section titled “Fill Beta parameters with a re-parameterization”

image.png

We propose a model where : mu and nu are the parameters of a beta which gives the distribution for the coins.

  • below are some useful definitions α=μν \alpha = \mu \nu β=(1μ)ν\beta = (1 - \mu) \nu

μ=αα+β\mu = \frac{\alpha}{\alpha + \beta}

  • like in Wikipedia article, we will have a uniform prior on μ and an expoential prior on ν
## the sequence of COINS
serie = [1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1]
NB_ = 200
vmin, vmax = 0.001, 0.999
pmin_mu, pmax_mu = 0.001, 0.999
pmin_nu, pmax_nu = 1, 50
size_ = 16
bn = gum.BayesNet("SEQUENCE OF COINS MODEL")
mu = bn.add(gum.NumericalDiscreteVariable("mu", "mean of the Beta distribution", pmin_mu, pmax_mu, NB_))
nu = bn.add(
gum.NumericalDiscreteVariable("nu", "'sample size' of the Beta where nu = a + b > 0", pmin_nu, pmax_nu, NB_)
)
bias = bn.add(gum.NumericalDiscreteVariable("bias", "The bias of the coin", vmin, vmax, NB_))
hs = [bn.add(gum.RangeVariable(f"H{i}", "The hallucinations of coin flips", 0, 1)) for i in range(size_)]
bn.addArc(mu, bias)
bn.addArc(nu, bias)
for h in hs:
bn.addArc(bias, h)
print(bn)
bn
BN{nodes: 19, arcs: 18, domainSize: 10^11.7196, dim: 7963598, mem: 61Mo 89Ko 128o}
G H8 H8 H1 H1 H11 H11 H2 H2 H12 H12 bias bias bias->H8 bias->H1 bias->H11 bias->H2 bias->H12 H9 H9 bias->H9 H4 H4 bias->H4 H5 H5 bias->H5 H0 H0 bias->H0 H6 H6 bias->H6 H15 H15 bias->H15 H14 H14 bias->H14 H3 H3 bias->H3 H13 H13 bias->H13 H7 H7 bias->H7 H10 H10 bias->H10 mu mu mu->bias nu nu nu->bias
bn.cpt(nu).fillFromDistribution(scipy.stats.expon, loc=2, scale=5)
bn.cpt(mu).fillFromDistribution(scipy.stats.uniform, loc=pmin_mu, scale=pmax_mu - pmin_mu)
gnb.flow.clear()
gnb.flow.add(gnb.getProba(bn.cpt(nu)), caption="Distribution for nu")
gnb.flow.add(gnb.getProba(bn.cpt(mu)), caption="Distribution for mu")
gnb.flow.display()
PyAgrum inline image
Distribution for nu
PyAgrum inline image
Distribution for mu
## <https://scicomp.stackexchange.com/a/10800>
t_start = time.time()
bn.cpt("bias").fillFromDistribution(scipy.stats.beta, a="mu*nu", b="(1-mu)*nu")
end_time = time.time() - t_start
print(f"Filling {NB_}^3 parameters in {end_time:5.3f}s")
Filling 200^3 parameters in 2.640s
for h in hs:
bn.cpt(h).fillFromDistribution(scipy.stats.bernoulli, p="bias")
gnb.showInference(bn, size="17!")

svg

print(bn)
BN{nodes: 19, arcs: 18, domainSize: 10^11.7196, dim: 7963598, mem: 61Mo 89Ko 128o}
coin_evidence = {f"H{i}": serie[i] for i in range(len(serie))}
gnb.showInference(bn, evs=coin_evidence)

svg

ie = gum.LazyPropagation(bn)
ie.setEvidence(coin_evidence)
ie.makeInference()
from scipy.ndimage import center_of_mass
idx = ie.posterior("bias").argmax()[0][0]["bias"]
map_bias = bn["bias"].label(idx)
com = center_of_mass(ie.posterior("nu").toarray())[0]
idx = ie.posterior("mu").argmax()[0][0]["mu"]
map_mu = bn["mu"].label(idx)
print(f"MAP for mu : {map_mu}")
print(f"center of mass for nu : {com}")
print(f"MAP for bias : {map_bias}")
MAP for mu : 0.4473
center of mass for nu : 26.67889867211198
MAP for bias : 0.4373
## With a smaller serie
serie = [
1,
0,
0,
0,
0,
0,
1,
]
bn = gum.BayesNet("SEQUENCE OF COINS MODEL")
mu = bn.add(gum.NumericalDiscreteVariable("mu", "mean of the Beta distribution", pmin_mu, pmax_mu, NB_))
nu = bn.add(
gum.NumericalDiscreteVariable("nu", "'sample size' of the Beta where nu = a + b > 0", pmin_nu, pmax_nu, NB_)
)
bias = bn.add(gum.NumericalDiscreteVariable("bias", "The bias of the coin", vmin, vmax, NB_))
hs = [bn.add(gum.RangeVariable(f"H{i}", "The hallucinations of coin flips", 0, 1)) for i in range(len(serie))]
bn.addArc(mu, bias)
bn.addArc(nu, bias)
for h in hs:
bn.addArc(bias, h)
bn.cpt(nu).fillFromDistribution(scipy.stats.expon, loc=2, scale=5)
bn.cpt(mu).fillFromDistribution(scipy.stats.uniform, loc=pmin_mu, scale=pmax_mu - pmin_mu)
bn.cpt("bias").fillFromDistribution(scipy.stats.beta, a="mu*nu", b="(1-mu)*nu")
for h in hs:
bn.cpt(h).fillFromDistribution(scipy.stats.bernoulli, p="bias")
coin_evidence = {f"H{i}": serie[i] for i in range(len(serie))}
gnb.showInference(bn, evs=coin_evidence)

svg