% vim: tw=50 % 19/02/2022 11AM \subsubsection*{Unbounded Random Walk: ``Gambler's Ruin''} \begin{center} \includegraphics[width=0.6\linewidth] {images/985fcaca943111ec.png} \end{center} \begin{align*} \PP_x(\text{hit 0}) &= \lim_{a \to \infty}(\text{hit 0 before $a$}) \\ &= \begin{cases} 1 - \left( \frac{q}{p} \right)^x & p > q \\ 1 & p < q \\ 1 & p = q = \half \end{cases} \end{align*} \[ p = \half : \EE_x[\text{time to hit 0}] \ge \EE_x[\text{time to hit 0 or $a$}] = x(a - x) \] which $\to \infty$ as $a \to \infty$. \myskip \ul{Key conclusion}: $T_x$ (time to hit 0 from $x$) is for $p = \half$: \begin{itemize} \item finite with probability $= 1$ \item infinite expectation \end{itemize} \begin{note*}[non-examinable] Alternative derivation of $\EE[T_1] = \infty$. \begin{center} \includegraphics[width=0.6\linewidth] {images/1e23555a943211ec.png} \end{center} ``Random Walk $2 \mapsto 1$'' = ``Random Walk $1 \mapsto 0$'' + 1 \[ \EE[T_1] = \half \times 1 + \half(1 + \ub{\EE[T_2]}_{2\EE[T_1]}) \] \[ \EE[T_1] = 1 + \EE[T_1] \] so $\EE[T_1] = \infty$. \end{note*} \subsubsection*{Generating Functions} \ul{Setting}: $X$ is a random variable taking values in $\{0, 1, 2, \dots\}$. \begin{definition*} The \emph{Probability Generating Function} of $X$ is \[ G_X(z) = \EE[z^X] = \sum_{k \ge 0} z^k \PP(X = k) .\] Analytic comment: $G_X : (-1, 1) \stackrel{k \ge 0}{\to} \RR$. \end{definition*} \noindent \ul{Idea}: ``To \emph{encode} the distribution of $X$ as a function with nice analytic properties''. \setcounter{customexample}{0} \begin{example} $X \sim \mathrm{Bern}(p)$ \[ G_X(z) =z^0 \PP(X = 0) + z^1 \PP(X = 1) = (1 - p) + pz \] \end{example} \begin{example*} $X \sim \mathrm{Bin}(n, p)$ we will save for later. \end{example*} \begin{example} $X \sim \mathrm{Poisson}(\lambda)$ \begin{align*} G_X(z) &= \sum_{k \ge 0} z^k e^{-\lambda} \frac{\lambda^k}{k!} \\ &= e^{-\lambda} \sum_{k \ge 0} \frac{(\lambda z)^k}{k!} \\ &= e^{-\lambda} e^{\lambda z} \\ &= e^{\lambda(z - 1)} \end{align*} \end{example} \subsubsection*{Recovering PMF (mass function) from PGF} \begin{note*} $G_X(0) = 0^0 \PP(X = 0) = \PP(X = 0)$. \end{note*} \ul{Idea}: Differentiate $n$ times. \begin{align*} \dfrac[n]{}{z} G_X(z) &= \sum_{k \ge 0} \dfrac[n]{}{z} (z^k) \PP(X = k) \\ &= \sum_{k \ge 0} k(k - 1) \cdots (k - n + 1) z^{k - n} \PP(X = k) \\ &= \sum_{k \ge n} k(k - 1) \cdots (k - n + 1) z^{k - n} \PP(X = k) \\ &= \sum_{l \ge 0} (l + 1)(l + 2) \cdots (l + n) z^l \PP(X = l + n) \end{align*} \ul{Evaluate at 0}: \[ \dfrac[n]{}{z} G_X(0) = n! \PP(X = n) .\] \[ \PP(X = n) = \frac{1}{n!} G_X^{(n)} (0) \] \ul{Key fact}: PGF \emph{determines} PMF / distribution exactly. \subsubsection*{Recovering other probabilistic quantities} \begin{note*} $G_X(1) = \sum_{k \ge 0} \PP(X = k) = 1$. \end{note*} \noindent \ul{Technical comment}: $G_X(1)$ means $\lim_{z \to 1} G_X(z)$ if the domain is $(-1, 1)$ (the limit is from below). \begin{itemize} \item What about $G_X'(1)$? \[ G_X'(z) = \sum_{k \ge 1} kz^{k - 1} \PP(X = k) \] \[ G_X'(1) = \sum_{k \ge 1} k \PP(X = k) = \EE[X] \] \item What about $G_X^{(n)}(1)$? \begin{align*} G_X^{(n)}(1) &= \sum_{k \ge n} k(k - 1) \cdots (k - n + 1) \PP(X = k) \\ &= \EE[X(x - 1) \cdots (X - n + 1)] \end{align*} \item Other expectations: \begin{align*} \EE[X^2] &= \EE[X(X - 1)] + \EE[X] \\ &= G_X''(1) + G_X'(1) \end{align*} \begin{align*} \mathrm{Var}(X) = G_X''(1) + G_X'(1) - [G_X'(1)]^2 \end{align*} Idea: Find in general $\EE[P(X)]$ using $\EE[\text{falling factorials of $X$}$. \end{itemize} \begin{note*}[Linear Algebra Aside] The falling factorials \[ 1, X, X(X - 1), X(X - 1)(X - 2) \] form a \emph{basis} for $\RR[X]$ (the set of polynomials with real coefficients). \end{note*} \subsubsection*{PGFs for sums of Independent Random Variables} $X_1, \dots, X_n$ independent random variables. \\ $G_{X_1}, \dots, G_{X_n}$ are the PGFs. \\ Let $X = X_1 + \cdots + X_n$. \\ \ul{Question}: What's the PGF of $X$? (Is it nice)? \begin{align*} G_X(z) &= \EE[Z^X] \\ &= \EE[z^{X_1 + \cdots + X_n}] \\ &= \EE[z^{X_1} z^{X_2} \cdots z^{X_n}] \\ &= \EE[z^{X_1}] \cdots \EE[z^{X_n}] \\ &= G_{X_1}(z) \cdots G_{X_n}(z) \end{align*} Special case: $X_i = X_1 \to G_X(z) = (G_{X_1}(z))^n$. \begin{note*} \[ \EE[f(X) g(Y)] = \EE[f(X)] \EE[g(Y)] \] for independent random variables $X, Y$. \end{note*}