% vim: tw=50 % 05/03/2022 11AM \begin{note*} The discrete equivalent is $X, Y \ge 0$ independent, \[ \PP(X + Y) = k) = \sum_{l = 0}^k \PP(X = l) \PP(Y = k - l) \] \end{note*} \begin{example*} $X, Y \stackrel{\text{IID}}{\sim} \mathrm{Exp}(\lambda)$. $Z = X + Y$. \begin{align*} f_Z(z) &= \int_{x = 0}^z \lambda^2 e^{-\lambda x} e^{-\lambda (z - x)} \dd x \\ &= \lambda^2 \int_{x = 0}^z e^{-\lambda z} \dd z \\ &= \lambda^2 z e^{-\lambda z} \end{align*} \end{example*} \begin{definition*} $X \sim \mathrm{J}(n, \lambda)$ \emph{Gamma distribution}. $\lambda > 0$, $n \in \{1, 2, \dots\}$. Range is $[0, \infty)$. Density: \[ f_X(x) = e^{-\lambda x} \frac{\lambda^n x^{n - 1}}{(n - 1)!} \] \[ n = 1 \mapsto \mathrm{Exp}(\lambda) \] \[ n = 2 \mapsto \lambda^2 x e^{-\lambda x} \] So $X + Y \sim J(2, \lambda)$. (and in fact: $X_1 + \cdots + X_n \sim J(n, \lambda)$). \end{definition*} \begin{example*} $X_1 \sim \mathrm{N}(\sigma_1, \sigma_1^2)$, $X_2 \sim \mathrm{N}(\mu_2, \sigma_2^2)$ independent. Then: $X_1 + X_2 \sim \mathrm{N}(\mu_1 + \mu_2, \sigma_1^2 + \sigma_2^2)$. \begin{note*} Already know that \[ \EE[X_1 + X_2] = \mu_1 + \mu_2 \qquad \mathrm{Var}(X_1 + X_2) = \sigma_1^2 + \sigma_2^2 \] \end{note*} \begin{proof} \begin{itemize} \item Calculation exercise \item Generating functions?? Coming up. \end{itemize} \end{proof} \end{example*} \begin{theorem*} Let $X = (X_1, \dots, X_n)$ on $D$. $g : \RR^n \to \RR^n$ well-behaved. \[ U = g(X) = (U_1, \dots, U_n) \] Joint density $f_X(x)$ is continuous. Then joint density \[f_U(\bf{u}) = f_X(g^{-1}(\bf{u})) |J(\bf{u})| \] where \[ J = \det \left( \left( \pfrac{[g^{-1}]_i}{u_j} \right)_{i, j = 1}^n \right) \] ``Jacobean'' ($d \times d$ matrix) \end{theorem*} \begin{customproof}{``Proof''} Definition of multivariate integration by substitution. \end{customproof} \begin{example*}[Radial Symmetry] $X, Y \stackrel{\text{IID}}{\sim} \mathrm{N}(0, 1)$. Write $(X, Y) = (R\cos\theta, R\sin\theta)$. Range: $R > 0$, $\theta \in [0, 2\pi)$. \begin{align*} f_{X, Y}(x, y) &= \frac{1}{\sqrt{2\pi}} e^{-\frac{x^2}{2}} \frac{1}{\sqrt{2\pi}} e^{-\frac{y^2}{2}} \\ &= \frac{1}{2\pi} e^{-\frac{x^2 + y^2}{2}} \end{align*} \begin{note*} \[ |\text{Jacobean of $g^{-1}$}| = \frac{1}{|\text{Jacobean of $g$}|} \] \end{note*} \[ J = \left| \begin{matrix} \cos\theta & \sin\theta \\ -R\sin\theta & R\cos\theta \end{matrix} \right| = R(\cos^2\theta + \sin^2\theta) = R \] So $f_{R, \theta}(r, \theta) = \frac{1}{2\pi} e^{-\frac{r^2}{2}} \times r$. Marginal: \[ f_\theta(\theta) = \frac{1}{2\pi} \] \[ f_R(r) = e^{-\frac{r^2}{2}} \times r \] Conclusion: $\theta, R$ are independent. $\theta$ is uniform on $[0, 2\pi)$. \end{example*} \begin{note*} Change of range: for example $X, Y \ge 0$, $Z = X + Y$. \[ f_{X, Z}(x, z) = ?(x, z)\mathbbm{1}_{(Z \ge x)} \] so $X, Z$ \emph{not} independent, even if $?$ splits as a product. \end{note*} \subsubsection*{Moment Generating Function} \begin{definition*} Let $X$ have density $f$. The \emph{MGF} of $X$ is: \[ m_X(\theta) := \EE[e^{\theta X}] = \int_{-\infty}^\infty e^{\theta x} f(x) \dd x \] whenever this is finite. \end{definition*} \begin{note*} $m_X(0) = 1$. \end{note*} \begin{theorem*} The MGF uniquely determines distribution of a random variable whenever it exists for all $\theta \in (-\eps, \eps)$ for some $\eps > 0$. \end{theorem*} \begin{theorem*} Suppose $m(\theta)$ exists for all $\theta \in (-\eps, \eps)$. Then \[ m^{(n)}(0) = \dfrac[n]{}{\theta} m(\theta) \big|_{m = 0} = \EE[X^n] \] ($\EE[X^n]$ is the ``$n$-th moment'') \end{theorem*} \noindent Proof comment: $\pfrac[n]{e^{\theta x}}{\theta} = x^n e^{\theta x}$. \begin{claim*} $X_1, \dots, X_n$ independent. \[ X = X_1 + \cdots + X_n \] Then \begin{align*} m_X(\theta) &= \EE[e^{\theta(X_1 + \cdots + X_n)}] \\ &= \EE[e^{\theta X_1}] \cdots \EE[e^{\theta X_n}] \\ &= \prod m_{X_i}(\theta) \end{align*} \end{claim*} \begin{example*} Gamma distribution: $X \sim \mathrm{J}(n, \lambda)$. \[ f_X(x) = e^{-\lambda x} \frac{\lambda^n x^{n - 1}}{(n - 1)!} \] \begin{align*} m(\theta) &= \int_0^\infty e^{\theta x} e^{-\lambda x} \frac{\lambda^n x^{n - 1}}{(n - 1)!} \dd x \\ &= \int_0^\infty e^{-(\lambda - \theta)x} x^{n - 1} \frac{\lambda^n}{(n - 1)!} \dd x \\ &= \left( \frac{\lambda}{\lambda - \theta} \right)^n \int_0^\infty e^{-(\lambda - \theta) x} x^{n - 1} \frac{(\lambda - \theta)^n}{(n - 1)!} \dd x \\ &= \left( \frac{\lambda}{\lambda - \theta} \right)^n \end{align*} ($\theta < \lambda$ (and infinite if $\theta \ge \lambda$)) \[ \mathrm{Exp}(\lambda) \to \left( \frac{\lambda}{\lambda - \theta} \right) \text{MGF} \] We've proved \[ X_1 + \cdots + X_n \sim J(n, \lambda) \] \end{example*}