% vim: tw=50 % 29/01/2022 11AM \begin{example} Derangements (Permutation with no fixed points) \[ \Omega = \{\text{permutations of } \{1, \dots, n\}\} \] \[ D = \{\sigma \in \Omega : \sigma(i) \neq i \,\,\forall\,\, i = 1, \dots, n\} \] \ul{Question}: Is $\PP(D) = \frac{|D|}{|\Omega|}$ large or small (when $n \to \infty$)? \[ \forall\,\, i \in \{1, \dots, n\} : A_i = \{\sigma \in \Omega : \sigma(i) = i\} .\] \begin{itemize} \item $D = A_1^c \cap \cdots \cap A_n^c = \left( \bigcup_{i = 1}^n A_i \right)^c$. \item $\PP (A_{i_1} \cap \cdots \cap A_{i_k}) = \frac{(n - k)!}{n!}$ \end{itemize} Now Inclusion-Exclusion Principle implies: \begin{align*} \PP \left( \bigcup_{i = 1}^n A_i \right) &= \sum_{k = 1}^n (-1)^{k + 1} \sum_{i_1 < \cdots i_k} \PP (A_{i _1} \cap \cdots \cap A_{i_k}) \\ &= \sum_{k = 1}^n (-1)^{k + 1} {n \choose k} \frac{(n - k)!}{n!} \end{align*} So \begin{align*} \PP(D) &= 1 - \PP \left( \bigcup_{i = 1}^n A_i \right) \\ &= 1 - \sum_{k = 1}^n \frac{(-1)^{k + 1}}{k!} \\ &= \sum_{k = 1}^n \frac{(-1)^k}{k!} \end{align*} And as $n \to \infty$, \[ \PP(D) \to \sum_{k = 0}^\infty \frac{(-1)^k}{k!} = e^{-1} \approx 0.37 \] \end{example} \subsubsection*{Comments} What if instead we have \[ \Omega' = \{f : \{1, \dots, n\} \to \{1, \dots, n\}\} .\] \[ D = \{f \in \Omega' : f(i) \neq i \,\,\forall\,\, i = 1, \dots, n\} .\] Then \[ \PP(D) = \frac{(n - 1)^n}{n^n} = \left( 1 - \frac{1}{n} \right)^n \] which also approaches $e^{-1}$ as $n \to \infty$. \begin{itemize} \item Would be nice to write as a product of probabilities, i.e. $\left( \frac{n - 1}{n} \right)^n$, and we will be allowed to do this soon. \item $f(i)$ is a random quantity associated to $\Omega$. (Will be allowed to study $f(i)$ as a \emph{random variable}.) \item Are allowed to toss a fair coin $n$ times. \[ \Omega = \{H, T\}^n \] \end{itemize} \subsubsection*{Independence} $(\Omega, \mathcal{F}, \PP)$ as before. \begin{definition*} \begin{itemize} \item Events $A, B \in \mathcal{F}$ are \emph{independent} if \[ \PP(A \cap B) = \PP(A)\PP(B) \] (denoted $A \ci B$). \item A \emph{countable} collection of events $(A_n)$ is \emph{independent} if $\forall$ distinct $i_1, \dots, i_k$ we have: \[ \PP(A_{i_1} \cap \cdots \cap A_{i_k}) = \prod_{j = 1}^k \PP(A_{i_j}) .\] \end{itemize} \end{definition*} \begin{note*} ``Pairwise independence'' does not imply independence. \end{note*} \begin{example*} $\Omega = \{(H, H), (H, T), (T, H), (T, T)\}$, $\PP(\{\omega\}) = \frac{1}{4} \,\,\forall\,\, \omega \in \Omega$. Now define \[ A = \text{first coin in $H$} = \{(H, H), (H, T)\} \] \[ B = \text{second coin $H$} = \{(H, H), (T, H)\} \] \[ C = \text{same outcome} = \{(H, H), (T, T)\} .\] Then we have that \[ \PP(A) = \PP(B) = \PP(C) = \half \qquad A \cap B = A \cap C = B \cap C = \{(H, H)\} \] \[ \implies \PP(A \cap B) = \PP(A \cap C) = \PP(B \cap C) = \frac{1}{4} \] so pairwise independent, however \[ \PP(A \cap B \cap C) = \frac{1}{4} \neq \PP(A)\PP(B)\PP(C) \] so the events are not independent. \end{example*} \subsubsection*{Example(s) of Independence} \begin{itemize} \item Define \[ \Omega' = \{f : \{1, \dots, n\} \to \{1, \dots, n\}\} .\] \[ A_i := \{f \in \Omega' : f(i) = i\} .\] \[ \PP(A_i) = \frac{n^{n - 1}}{n^n} = \frac{1}{n} \] \[ \PP(A_{i_1} \cap \cdots \cap A_{i_k}) = \frac{n^{n - k}}{n^n} = \frac{1}{n^k} = \prod_{j = 1}^k \PP(A_{i_j}) \] Here: $(A_i)$ independent events. \item Define \[ \Omega = \{\sigma : \text{permutation of} \{1, \dots, n\}\} \] \[ A_i = \{\sigma \in \Omega : \sigma(i) = i\} \] For $i \neq j$, \[ \PP(A_i \cap A_j) = \frac{(n - 2)!}{n!} = \frac{1}{n(n - 1)} \neq \PP(A_i)\PP(A_j) \] So here, $(A_i)$ are not independent. \end{itemize} \subsubsection*{Properties} \begin{claim} If $A$ is independent of $B$, then $A$ is also independent of $B^c$. \end{claim} \begin{proof} \begin{align*} \PP(A \cap B^c) &+ \PP(A) - \PP(A \cap B) \\ &= \PP(A) - \PP(A)\PP(B) \\ &= \PP(A)[1 - \PP(B)] \\ &= \PP(A)\PP(B^c) \end{align*} \end{proof} \begin{claim} $A$ is independent of $B = \Omega$ and of $C = \phi$. \end{claim} \begin{proof} \[ \PP(A \cap \Omega) = \PP(A) = \PP(A)\PP(\Omega) .\] And by claim 1, this implies that $A \ci \emptyset$. \end{proof} \myskip As an exercise, one can further prove that if $\PP(B) = 0 \text{ or } 1$, then $A$ is independent of $B$. \subsubsection*{Conditional Probability} $(\Omega, \mathcal{F}, \PP)$ as before. \myskip Consider $B \in \mathcal{F}$ with $\PP(B) > 0$, $A \in \mathcal{F}$. \begin{definition*} The \emph{conditional probability of $A$ given $B$} is \[ P(A \mid B) := \frac{\PP(A \cap B)}{\PP(B)} \] ``The probability of $A$ if we know $B$ happened''. (for example revealing info in succession). \end{definition*} \begin{example*} If $A$, $B$ independent, \[ \PP(A \mid B) = \frac{\PP(A \cap B)}{\PP(B)} = \frac{\PP(A)\PP(B)}{\PP(B)} = \PP(A) .\] ``Knowing whether $B$ happened doesn't affect the probability of $A$.'' \end{example*}