%! TEX root = PM.tex % vim: tw=50 % 21/11/2023 09AM \begin{remark*} For probability measures, if $X_n$ to $X$ \gls{vc_wly}, then $\EE f(X_n) \to \EE f(X)$ for all $f$ bounded continuous. \end{remark*} \begin{proof}[Proof of \nameref{levy_thm}] Let $g : \RR^d \to \RR$ be compactly supported and Lipschitz continuous, i.e. \[ |g(x) - g(y)| \le Cg|x - y| \qquad \forall x, y \in \RR^d \] (any $g \in C_c^\infty$ will do). Enough to show $\EE g(X_n) \to \EE g(X)$ (by \nameref{lec20_Cc_infty_remark} last lecture). Let $Z \sim \normaldist(0, I_d)$ independent of $(X_n)$, $X$. Then for fixed $\eps > 0$, choose $t$ small enough so that \[ Cg\sqrt{t} \EE|Z| \le \frac{\eps}{3} .\] Then, \begin{align*} |\EE g(X_n) - \EE g(X)| &\le |\EE g(X_n) - \EE g(X_n + \sqrt{t} Z)| + |\EE g(X) - \EE g(X + \sqrt{t} Z)| \\ &~~~~+ |\EE g(X_n + \sqrt{t} Z) - \EE g(X + \sqrt{t}Z)| \\ &\le \EE |g(X_n) - g(X_n + \sqrt{t} Z)| + \EE|g(X) - g(X + \sqrt{t} Z)| \\ &~~~~+ |\EE g(X_n + \sqrt{t}Z) - \EE g(X + \sqrt{t} Z)| \\ &\le \EE|Cg\sqrt{t} Z| + \EE|Cg\sqrt{t} \EE Z| + |\EE g(X_n + |sqrt{t} Z) - \EE g(X + \sqrt{Z})| \\ &\le \ub{Cg\sqrt{t} \EE|Z| + Cg\sqrt{t} \EE|Z|}_{\le 2 \frac{\eps}{3}} + |\EE g(X_n + \sqrt{t} Z) - \EE g(X + \sqrt{t} Z)| \end{align*} $X_n + \sqrt{t} Z$ has density $\mu_{X_n} \conv \gt \eqdef f_{t, n}$. Then by \gls{FI}, \[ f_{t, n}(x) = \frac{1}{(2\pi)^d} \int \phi_{X_n}(u) e^{-\frac{t|u|^2}{2}} e^{-i\langle u, x\rangle} \dd u \] So \begin{align*} \EE g(X_n + \sqrt{t}Z) &= \frac{1}{(2\pi)^d} \iint g(x) \phi_{X_n}(u) e^{-\frac{t|u|^2}{2}} e^{-i\langle u, x\rangle} \dd u \dd x \\ &\stackrel[n \to \infty]{\text{\nameref{dct}}}{\longrightarrow} \frac{1}{(2\pi)^d} \iint g(x) \phi_X(u) e^{-\frac{t|u|^2}{2}} e^{-i\langle u, x\rangle} \dd u \dd x \\ &= \EE (X + \sqrt{t} Z) \end{align*} (using $g(x) e^{-\frac{t|u|^2}{2}}$ as the bounding function to apply \nameref{dct}). \end{proof} \begin{flashcard}[clt-thm] \begin{theorem*}[Central Limit Theorem] \label{clt} \cloze{ Let $(X_n)$ be IID random variables on $\RR$ with $\EE(X_i) = 0$, $\Var(X_i) = 1$ for all $i$. Then for $S_n = X_1 + \cdots + X_n$, we have, $\frac{S_n}{\sqrt{n}} \to Z \sim \normaldist(0, 1)$ \gls{vc_wly} or \emph{in distribution}, i.e. \[ \PP \left( \frac{S_n}{\sqrt{n}} \le x \right) \stackrel{n \to \infty}{\longrightarrow} \PP(Z \le x) \qquad \forall x \] } \end{theorem*} \begin{proof} \cloze{ Set $\phi(u) = \phi_{X_1}(u) = \EE e^{iu X_1}$. Then $\phi(0) = 1$ and since $\EE X_1^2 < \infty$, we can differentiate under the integral sign and get \begin{align*} \phi'(u) &= i\EE X_1 e^{iu X_1} \\ \phi''(u) &= i^2 \EE X_1^2 e^{iu X_1} \end{align*} (see \es[7.4]{3}), i.e. $\phi'(0) = 0$, $\phi''(0) = -1$. By Taylor's Theorem, as $u \to 0$, $\phi(u) = 1 - \frac{u^2}{2} + o(u^2)$. Let $\phi_n$ be the \gls{char_func} of $\frac{S_n}{\sqrt{n}}$. Then \begin{align*} \phi_n(u) &= \EE e^{iu \frac{S_n}{\sqrt{n}}} \\ &= \EE e^{i \frac{u}{\sqrt{n}}(X_1 + \cdots + X_n)} \\ &\stackrel{\text{IID}}{=} ( \EE e^{i \frac{u}{\sqrt{n}} X_1} )^n \\ &= \left( \phi \left( \frac{u}{\sqrt{n}} \right) \right)^n \\ &= \left( 1 - \frac{u^2}{2n} + o \left( \frac{u^2}{n} \right) \right)^n \\ &= \left( 1 - \frac{u^2}{2n} + o \left( \frac{1}{n} \right) \right)^n \end{align*} (as $u$ fixed, $n \to \infty$). The complex logarithm satisfies, as $z \to 0$, $\log(1 + z) = z + o(z)$. So, \begin{align*} \log \phi_n(u) &= n \log \left( 1 - \frac{u^2}{2n} + o \left( \frac{1}{n} \right) \right) \\ &= n \left( -\frac{u^2}{2n} + o \left( \frac{1}{n} \right) \right) \\ &= -\frac{u^2}{n} + o(1) \end{align*} So, $\phi_n(u) \to e^{-\frac{u^2}{2}} = \phi_Z(u)$. So by \nameref{levy_thm}, $\frac{S_n}{\sqrt{n}} \to Z$ \gls{vc_wly}.} \end{proof} \end{flashcard} \begin{remark*} The \nameref{clt} in $\RR^d$ can be proved similarly using the \nameref{Cramer_Wold} and properties of multivariate Gaussians (Exercise). \end{remark*} \vspace{-1em} A random variable on $\RR$ is Gaussian $(\normaldist(\mu, \sigma^2))$ if it has density $\frac{1}{\sqrt{2\pi}\sigma} e^{-\frac{(x - \mu)^2}{2\sigma^2}}$ for $\mu \in \RR$, $\sigma > 0$. \begin{definition*} $X$ in $\RR^d$ is Gaussian if \[ \langle u, X\rangle = \sum_{i = 1}^d u_i X_i \] is Gaussian for all $u \in \RR^d$. \end{definition*} \begin{example*} If $X_1, \ldots, X_n \iidsim \normaldist(0, 1)$, then $X = (X_1, \ldots, X_n)$ is Gaussian in $\RR^n$. \end{example*} \begin{proposition*} Let $X$ be Gaussian in $\RR^n$ and $A$ be an $m \times n$ matrix, $b \in \RR^m$. Then \begin{enumerate}[(a)] \item $AX + b$ is Gaussian in $\RR^m$. \item $X \in \Lp[2]$ and $\mu_X$ is determined by $\EE(X) = \mu$ and $(\Cov(X_i, X_j))_{i, j} = V$. \item $\cf_X(u) = e^{i\langle u, \mu\rangle - \frac{\langle u, Vu\rangle}{2}}$ for all $u \in \RR^n$. \item If $V$ is invertile, then $X$ has a pdf on $\RR^n$ given by \[ f_X(x) = \frac{1}{(2\pi)^{n/2}} \frac{1}{|V|^{1/2}} e^{-\langle x - \mu , V^{-1} (x - \mu) \rangle/2} \] \item If $X = (X_1, X_2)$ with $X_1 \in \RR^{n_1}$, $X_2 \in \RR^{n_2}$, ($n_1 + n_2 = n$), then $X_1, X_2$ independent if and only if $\Cov(X_1, X_2) = 0$. \end{enumerate} \end{proposition*} \begin{proof} Easy. Also see \es{4} and lecturer's online notes. \end{proof} \subsubsection*{Law of Large Numbers} Weak Law of Large Numbers: For $(X_i)$ IID with $\EE X_i = \mu$, $\Var(X_i) < \infty$. Then $\forall \eps > 0$, \begin{align*} \PP \left( \left| \frac{1}{n} \sum_{i = 1}^n X_i - \mu \right| > \eps \right) &\le \frac{1}{n^2 \eps^2} \Var \left( \sum_{i = 1}^n X_i \right) \\ &= \frac{\cancel{n} \Var(X_1)}{n^{\cancel{2}} \eps^2} \\ &\to 0 \end{align*} as $n \to \infty$. So, \[ \frac{1}{n} \sum_{i = 1}^n X_i \convP \mu \] Strong Law of Large Numbers: if $(X_i)$ are IID, $\EE(X_1) = \mu < \infty$, then \[ \frac{1}{n} \sum_{i = 1}^n X_i \to \mu \] \gls{al_surely} as $n \to \infty$.