% vim: tw=50 % 05/02/2022 11AM \subsubsection*{Discrete Probability Distributions} $\Omega$ finite. \subsubsection*{1. Bernoulli Distribution} (``(biased) coin toss''). \\ $X \sim \mathrm{Bern}(p)$, $p \in [0, 1]$. \[ \mathrm{Im}(x) = \{0, 1\} \] \[ p_X(1) = \PP(X = 1) = p \] \[ p_X(0) = \PP(X = 0) = 1 - p .\] \ul{Key example}: $\mathbbm{1}_A \sim \mathrm{Bern}(p)$ with $p = \PP(A)$. \subsubsection*{2. Binomial Distribution} $X \sim \mathrm{Bin}(n, p)$, $n \in \ZZ^+$, $p \in [0, 1]$. \\ (`Toss coin $n$ times, count number of heads''.) \[ \mathrm{Im}(X) = \{0, 1, \dots, n\} \] \[ p_X(k) = \PP(X = k) = {n \choose k} p^k (1 - p)^{n - k} \] check: \[ \sum_{k = 0}^n p_X(k) = (p + (1 - p))^n = 1 \] \subsubsection*{More than one Random Variable} \textbf{Motivation}: Doll a dice. Outcome $X \in \{1, 2, \dots, 6\}$. Events: \[ A = \{1 \text{ or } 2\}, \qquad B = \{1 \text{ or } 2 \text{ or } 3\}, \qquad C = \{1 \text{ or } 3 \text{ or } 5\} .\] \[ \mathbbm{1}_A \sim \mathrm{Bern} \left( \frac{1}{3} \right), \qquad \mathbbm{1}_B \sim \mathrm{Bern} \left( \frac{1}{2} \right), \qquad \mathbbm{1}_C \sim \mathrm{Bern} \left( \frac{1}{2} \right) \] \begin{note*} $\mathbbm{1}_A \le \mathbbm{1}_B$ for all outcomes, but $\mathbbm{1}_A \le \mathbbm{1}_C$ for outcomes is \emph{false}. \end{note*} \begin{definition*} $X_1, \dots, X_n$ discrete random variables. Say $X_1, \dots, X_n$ are \emph{independent} if \[ \PP(X_1 = x_1, \dots, X_n = x_n) = \PP(X_1 = x_1) \cdots \PP(X_n = x_n) \qquad \forall\,\, x_1, \dots, x_n \in \RR \] (suffices to check $\forall\,\, x_i \in \mathrm{Im}(X_i)$). \end{definition*} \begin{example*} $X_1, \dots, X_n$ independent random variables each with the Bernoulli$(p)$ distribution. Study $S_n = X_1 + \cdots + X_n$. Then \begin{align*} \PP(S_n = k) &= \sum_{\substack{X_1 + \cdots X_n = k\\X_i \in \{0, 1\}}} \PP(X_1 = x_1, \dots, X_n = x_n) \\ &= \sum_{X_1 + \cdots + X_n = k} \PP(X_1 = x_1) \cdots \PP(X_n = x_n) \\ &= \sum_{X_1 + \cdots + X_n = k} p^{|\{i : x_i = 1\}|} (1 - p)^{|\{i : x_i = 0\}|} \\ &= \sum_{X_1 + \cdots + X_n = k} p^k (1 - p)^{n - k} \\ &= {n \choose k} p^k (1 - p)^{n - k} \end{align*} so $S_n \sim \mathrm{Bin}(n, k)$. \end{example*} \begin{example*}[Non-example] $(\sigma(1), \sigma(2), \dots, \sigma(n))$ uniform in $\sum_n$. \begin{claim*} $\sigma(1)$ and $\sigma(2)$ are \emph{not} independent. \end{claim*} \noindent Suffices to find $i_1$, $i_2$ such that \[ \PP(\sigma(1) = i, \sigma(2) = i_2) \neq \PP(\sigma(1) = i_1) \PP(\sigma(2) = i_2) \] for example \[ \PP(\sigma(1) = 1, \sigma(2) = 1) = 0 \neq \frac{1}{n} \times \frac{1}{n} = \PP(\sigma(1) = 1) \PP(\sigma(2) = 1) \] \end{example*} \noindent \ul{Consequence of definition} \\ $X_1, \dots, X_n$ independent then $\forall\,\, A_1, \dots, A_n \subset \RR$ countable, then \[ \PP(X_1 \in A_1, \dots, X_n \in A_n) = \PP(X_1 \in A_1) \cdots \PP(X_n \in A_n) \] \subsubsection*{$\Omega = \NN$} ``Ways of choosing a random integer'' \\ \subsubsection*{3. Geometric distribution} (``waiting for success'') \\ $X \sim \mathrm{Geom}(p)$, $p \in (0, 1]$. \\ (``Toss a coin with $\PP(\text{heads}) = p$ until a head appears. Count how many trials were needed.'') \[ \mathrm{Im}(X) = \{1, 2, \dots\} \] \[ p_X(k) = \PP((k - 1) \text{ failures, then success on $k$-th}) = (1 - p)^{k - 1}p \] Check: \[ \sum_{k \ge 1} (1 - p)^{k - 1} p = p \sum_{l \ge 0} (1 - p)^l = \frac{p}{1 - (1 - p)} = 1 \] \begin{note*} We could alternatively ``count how many failures before a success''. \[ \mathrm{Im}(Y) = \{0, 1, 2, \dots\} \] \[ p_Y(k) = \PP(k \text{ failures, then success on $(k + 1)$-th}) = (1 - p)^k p \] Check: \[ \sum_{k \ge 0} (1 - p)^k p = 1 \] \end{note*} \subsubsection*{4. Poisson Distribution} $\lambda \in (0, \infty)$. \[ X \sim \mathrm{Po}(\lambda) \] \[ \mathrm{Im}(X) = \{0, 1, 2, \dots\} \] \[ \PP(X = k) = e^{-\lambda} \frac{\lambda^k}{k!} \qquad \forall\,\,k \ge 0 \] \begin{note*} \[ \sum_{k \ge 0} \PP(X = k) = e^{-k} \sum_{k \ge 0} \frac{\lambda^k}{k!} = e^{-\lambda} e^\lambda = 1 \] \end{note*} \noindent \ul{Motivation}: Consider $X_n \sim \mathrm{Bin} \left( n, \frac{\lambda}{n} \right)$. .image \begin{itemize} \item Probability of an arrival in each interval is $p$, independently across intervals. \item Total arrivals is $X_n$. \end{itemize} \[ \PP(X_n = k) = {n \choose k} \left( \frac{\lambda}{n} \right)^k \left( 1 - \frac{\lambda}{n} \right)^{n - k} \] Fix $k$, let $n \to \infty$: \[ \PP(X_n = k) = \ub{\frac{n!}{n^k(n - k)!}}_{\to 1} \times \frac{\lambda^k}{k!} \times \ub{\left( 1 - \frac{\lambda}{n} \right)^n}_{\to e^{-\lambda}} \times \ub{\left( 1 - \frac{1}{n} \right)^{-k}}_{\to 1} \] so \[ \PP(X_n = k) \to e^{-\lambda} \frac{\lambda^k}{k!} \]