%! TEX root = PM.tex % vim: tw=50 % 04/11/2023 10AM \begin{flashcard}[indep-rv-prop] \begin{proposition*} Let $X_1, \ldots, X_n$ be random variables $X_i : (\Omega, \mathcal{F}, \PP) \to (E_i, \mathcal{E}_i)$. Set $E = E_1 \times \cdots \times E_n$, $\mathcal{E} = \mathcal{E}_1 \pmeas \cdots \pmeas \mathcal{E}_n$. Consider $X : (\Omega, \mathcal{F}, \PP) \to (E, \mathcal{E})$ given by $X(\omega) = (X_1(\omega), \ldots, X_n(\omega))$. Then \cloze{$X$ is $\mathcal{E}$-measurable and the following are equivalent:} \begin{enumerate}[(i)] \item \cloze{$X_1, \ldots, X_n$ are \gls{indep_rv} (i.e. $\sigma\{X_i^{-1}(A) : A \in \mathcal{E}_i\}$ are independent).} \item \cloze{$\mu_X = \mu_{X_1} \pmu \mu_{X_2} \pmu \cdots \pmu \mu_{X_n}$.} \item \cloze{For all bounded measurable $f_i : E_i \to \RR$, \[ \EE \left( \prod_{i = 1}^n f_i(X_i) \right) = \prod_{i = 1}^n \EE (f_i(X_i)) .\]} \end{enumerate} \end{proposition*} \begin{proof} \cloze{ $X$ measurable: $X^{-1}(A) \in \mathcal{F} ~\forall A \in \mathcal{E}$. Enough to check \begin{align*} X^{-1}(A_1 \times \cdots \times A_n) &= \{\omega : X_i(\omega) \in A_1, \ldots, X_n(\omega) \in A_n\} \\ &= \bigcap_{i = 1}^n X_i^{-1}(A_i) \end{align*} is in $\mathcal{F}$. But this is true since $X_i : (\Omega, \mathcal{F}) \to (E_i, \mathcal{E}_i)$ are all measurable, so $X_i^{-1}(A_i) \in \mathcal{F}$ for all $A_i \in \mathcal{E}_i$. Now we will show (i) $\implies$ (ii) $\implies$ (iii) $\implies$ (i). \begin{enumerate}[(i) $\implies$ (ii)] \item[(i) $\implies$ (ii)] Let $\nu = \mu_{X_1} \pmu \cdots \pmu \mu_{X_n}$. Enough to show that \begin{align*} \mu_X(A_1 \times \cdots \times A_n) &\stackrel{?}{=} \nu (A_1 \times \cdots \times A_n) \\ &= \PP(X_1 \in A_1, \ldots, X_n \in A_n) \\ &\stackrel{\text{(a)}}{=} \PP(X_1 \in A_1) \cdots \PP(X_n \in A_n) \\ &= \mu_{X_1} (A_1) \cdots \mu_{X_n}(A_n) \\ &= \nu(A) \end{align*} Now finish since $\mathcal{A}$ is a \pisys{} generating $\mathcal{E}$\ldots \item[(ii) $\implies$ (iii)] \begin{align*} \EE \left( \prod_{i = 1}^n f_i(X_i) \right) &= \int \prod_{i = 1}^n f_i(x_i) \dd \mu_X(x) \\ &= \prod_{i = 1}^n \int f_i(x_i) \dd \mu_{X_i}(x_i) \\ &= \prod_{i = 1}^n \EE (f_i(X_i)) \end{align*} \item[(iii) $\implies$ (i)] Use $f_i = \indicator{A_i}$, $A_i \in \mathcal{E}_i$. Then \begin{align*} \EE \left( \prod_{i = 1}^n \indicator{A_i}(X_i) \right) &= \EE (\indicator{A_1 \times \cdots \times A_n}(X)) \\ &= \PP(X_1 \in A_1, \ldots, X_n \in A_n) \\ &\stackrel{\text{(a)}}{=} \prod_{i = 1}^n \EE(\indicator{A_i}(X_i)) \\ &= \prod_{i = 1}^n \PP(X_i \in A_i) \qedhere \end{align*} \end{enumerate} } \end{proof} \end{flashcard} \newpage \section{$L^p$ Spaces, Norms, Inequalities} \begin{flashcard}[norm-defn] \begin{definition*}[Norm] \cloze{ Recall that a norm on a real vector space $V$ is $\|\bullet\| : V \to [0, \infty)$ such that \begin{enumerate}[(1)] \item $\|\lambda v\| = |\lambda| \|v\|$ for all $\lambda \in \RR, v \in V$. \item $\|v + w\| \le \|v\| + \|w\|$ for all $v, w \in W$. \item $\|v\| = 0 \iff v = 0$. \end{enumerate} } \end{definition*} \end{flashcard} \begin{flashcard}[Lp-norm-defn] \begin{definition*}[$L^p$ norm] \glssymboldefn{Lp_norm}{$L^p$}{$L^p$} \glssymboldefn{Lp_norm_func}{$\|f\|_p$}{$\|f\|_p$} \cloze{For $(E, \mathcal{E}, \mu)$ a measure space and $1 \le p \le \infty$, define \[ L^p = L^p(E, \mathcal{E}, \mu) = \{f : E \to \RR \text{ measurable } : \normp\|f\|_p < \infty\} \] where \[ \normp\|f\|_p = \left( \int |f(x)|^p \dd \mu(x) \right)^{1/p} \] for $1 \le p < \infty$, and \[ \normp\|f\|_\infty = \esssup |f| = \inf\{\lambda \ge 0 : |f| \le \lambda \text{ $\mu$.\gls{al_ev}}\} \]} \end{definition*} \end{flashcard} \vspace{-1em} By linearity of integral, this satisfies the properties of a norm: \begin{enumerate}[(1)] \item This holds for $1 \le p < \infty$ and for $p = \infty$ it's obvious. \item This holds for $p = 1, \infty$ easily. For other $p$, we shall show by Minkowski inequality. \item $f = 0$ implies $\normp\|f\|_p = 0$. But $\normp\|f\|_p = 0$ implies $f = 0$ \gls{al_ev}. So we fix this by defining equivalence classes \[ [f] = \{g : g = f \text{ \gls{al_ev}}\} \] and \glssymboldefn{Lpcal_norm}{$L^p$}{$L^p$} \[ \mathcal{L}^p = \{[f] : f \in \Lp\} \] \end{enumerate} Then $\mathcal{L}^p$, $1 \le p \le \infty$ are normed vector spaces. \subsection{Inequalities} \subsubsection*{Markov / Chebyshev's Inequality} Let $f \ge 0$ measurable, then $\forall \lambda > 0$, \[ \ub{\mu(\{x \in E : f(x) \ge \lambda\})}_{\mu(f \ge \lambda)} \le \frac{\mu(f)}{\lambda} \] \begin{proof} $\lambda \indicator{\{f \ge \lambda\}} \le f$. So integrating with respect to $\mu$, we get \[ \lambda \mu(f \ge \lambda) \le \mu(f). \qedhere \] \end{proof} In particular, if $g \in L^p$, $p < \infty$, then $\mu(|g| \ge \lambda) \le \frac{\mu(|g|^p)}{\lambda^p} < \infty$. This gives the tail estimates as $\lambda \to \infty$. \begin{flashcard}[convex-fn-defn] \begin{definition*}[Convex function] \glsadjdefn{convex_fn}{convex}{function} \cloze{ For $I \subseteq \RR$ an interval, say a function $c : I \to \RR$ is \emph{convex} if $\forall x, y \in I$, $t \in [0, 1]$, \[ c(tx + (1 - t)x) \le tc(x) + (1 - t)c(y) \] equivalently: \[ \label{convex_defn_2} \frac{c(\tilde{t}) - c(t)}{\tilde{t} - x} \le \frac{f(y) - c(\tilde{t})}{y - \tilde{t}} \tag{$*$} \] for $x < \tilde{t} < y$ in $I$. (In particular, this second definition shows that $c$ is continuous on $I$, hence Borel measurable). } \end{definition*} \end{flashcard} \begin{flashcard}[convex-linear-ineq-lemma] \begin{lemma*} Let $c : I \to \RR$ be \gls{convex_fn}, $m \in I$ (an interval). Then there exists $a, b \in \RR$ such that $c(x) \ge ax + b$ for all $x \in I$, and equality at $x = m$. \end{lemma*} \begin{proof} \cloze{ Let \[ a = \sup \left\{ \frac{c(m) - c(x)}{m - x} : x \in I, x < m\right\} < \infty .\] Then by \eqref{convex_defn_2}, $\forall y > m, y \in I$, \[ \frac{c(m) - c(x)}{m - x} \le a \le \frac{c(y) - c(m)}{y - m} \] These inequalities imply \[ c(y) \ge ay - am + c(m) \qquad \forall y \ge m \] and \[ c(x) \ge ax - am + c(m) \qquad \forall x \le m. \qedhere \] } \end{proof} \end{flashcard} \begin{flashcard}[jensens-inequality-thm] \begin{theorem*}[Jensen's Inequality] \label{jensens_ineq} \cloze{ Let $X$ be an integrable random variable (i.e. $\EE |X| < \infty$), taking values in an interval $I \subset \RR$, and let $c : I \to \RR$ be convex. Then $\EE(c(x))$ is well-defined, and \[ \boxed{\EE(c(X)) \ge c(\EE(X))} \] } \end{theorem*} \begin{proof} \cloze{If $X$ is a constant \gls{al_surely}, then nothing to prove. Assume otherwise. Then $\EE(x) = m \in I$. Using the previous lemma, $\exists a, b \in \RR$ such that \[ \label{lec14_l236_eq} c(X) \ge aX + b \tag{$*$} \] In particular, \[ c((X))^- \le |a| |X| + |b| \] so $\EE(c(X)^-) < \infty$. Hence $\EE(c(X)) = \EE(c(X)^+) - \EE(c(X)^-)$ is well-defined on $(-\infty, \infty]$. Claim: \eqref{lec14_l236_eq} $\implies$ $\EE(c(X)) \ge a\EE(X) + b$. If $\EE(c(X)) = \infty$, nothing to prove. Otherwise, $c(X)$ and $aX + b$ are integrable random variables satisfying \eqref{lec14_l236_eq}, so taking expectatation, \[ \EE(c(X)) \ge a\EE(X) + b = am + b = c(m) = c(\EE(X)). \qedhere \]} \end{proof} \end{flashcard} As an application: $(\Omega, \mathcal{F}, \PP)$ and $1 \le p \le \infty$. If $X \in \Lp[\infty(\PP)]$, then $X \in \Lp^{p(\PP)}$ for all $1 \le p < \infty$ as $\normp\|X\|_p \le \normp\|X\|_\infty$. Claim: If $X \in \Lp[q]$ and $q > p \ge 1$, then $X \in \Lp$. % $c(X) = |x|^{q / p}$ is convex, hence \[ \normp\|X\|_p = (\EE |X|^p)^{1/p} = (c(\EE(|X|^p)))^{1/q} \le (\EE(c(|X|^p)))^{1/q} = (\EE(|X|^q))^{1/q} = \normp\|X\|_q \] Hence, $X \in \Lp[q]$ implies $X \in \Lp$ for all $1 \le p < q$, i.e. \[ \Lp[\infty](\PP) \subseteq \Lp[q](\PP) \subseteq \Lp(\PP) \subseteq \Lp[1](\PP) \] for all $1 \le p \le q \le \infty$.