# %%
import numpy as np
from scipy.stats import norm, poisson
from matplotlib import pyplot as plt
import seaborn as sns
sns.set()
# %% [markdown]
# The following function calculates the KL-divergence of two distributions (p and q).
#
# The KL formula that is being used here is:
#
# $D_{KL}(P || Q)=\Sigma (p \times \log {p \over q})$
# %%
def kl(p, q):
"""
Calculates relative entropy a.k.a. Kullback–Leibler divergence.
"""
return np.sum(np.where(p!=0, p*np.log(p/q),0))
# 1. generate example distributions
x = np.arange(-10,10,0.001)
p = norm.pdf(x,0,1)
q = norm.pdf(x,1,2)
# 2. calc kl divergence
kl_score = kl(p, q)
# plot distributiions
plt.title(f'KL(P||Q) = {kl(p,q):.2f}')
plt.plot(x,p)
plt.plot(x,q, color="red")
plt.show()
# %%
# [snippet[ plot poisson distribution generated by scipy
from scipy.stats import poisson as poi
r = poi.cdf(x, 0.9)
plt.plot(x, r, color="green")
plt.show()
x = np.arange(20)
lambda_ = [1.5, 4.25]
colors = ["#348ABD", "#A60628"]
# lambda = 1.5
plt.bar(x, poi.pmf(x, 1.5), color="red", label="$\lambda = 1.5$", alpha=0.60, lw="3")
plt.bar(x, poi.pmf(x, 4.5), color="blue", label="$\lambda = 4.5$", alpha=0.60, lw="3")
plt.xticks(x + 0.4, x)
plt.ylabel("Probability of $k$")
plt.xlabel("$k$")
plt.show()
# %%
# [snippet] plot multiple exponentional distributions (EXP)
from scipy.stats import expon as expo
x = np.linspace(0,4,100)
colors = ['red','blue','green']
lambdas = [0.5, 1.0, 1.5]
for l, c in zip(lambdas, colors):
y = expo.pdf(x, scale=1.0/l)
plt.plot(x, y, lw=2, color=c)
# %%