%! TEX root = GT.tex % vim: tw=50 % 21/11/2023 09AM \begin{flashcard}[isolated-vertex-threshold-effect-thm] \begin{theorem} % Theorem 4 Let $\lambda$ be fixed. Then: \begin{enumerate}[(i)] \item If \cloze{$\lambda < 1$ then almost surely $G \in \psp G \left( n, \lambda \frac{\log n}{n} \right)$} has an \gls{isolated} \gls{vertex}. \item If \cloze{$\lambda > 1$ then almost surely $G \in \psp G \left( n, \lambda \frac{\log n}{n} \right)$} has no \gls{isolated} \gls{vertex}. \end{enumerate} \glsnoundefn{al_surely}{almost surely}{N/A} (`almost surely' means: \cloze{with probability tending towards $1$ as $n \to \infty$}). \end{theorem} \vspace{-1em} \cloze{``$\PP = \frac{\log n}{n}$ is a threshold for existence of an \gls{isolated} \gls{vertex}''.} \begin{proof} \cloze{ Let $X = \#\text{\gls{isolated} \glspl{vertex}}$ in $\psp G(n, p)$. Then \[ \mu = \EE(X) = n(1 - p)^{n - 1} = \frac{n}{1 - p} (1 - p)^n .\] \begin{enumerate}[(i)] \item[(ii)] Have $p = \lambda \frac{\log n}{n}$, where $\lambda > 1$. So \[ \mu \le \frac{n}{1 - p} e^{-pn} = \frac{n}{1 - p} e^{-\log n} = \frac{n^{1 - \lambda}}{1 - p} \to 0 \] as $n \to \infty$. So \gls{al_surely} $X = 0$. \item Have $p = \lambda \frac{\log n}{n}$, where $\lambda < 1$. So $\mu \ge \frac{n}{1 - p} e^{-p(1 + \delta) n}$, any $\delta > 0$ ($p$ small) -- as $1 - x \ge e^{-(1 + \delta) x}$ for $x$ small, whence \[ \mu \ge \frac{n}{1 - p} n^{-x(1 + \delta)} = \frac{n^{1 - \lambda(1 + \delta)}}{1 - p} .\] So pick fixed $\delta > 0$ with $\lambda(1 + \delta) < 1$. Then \[ \mu \ge \frac{n^{\text{positive number}}}{1 - p} \to \infty .\] Also, \begin{align*} V &= \sum_A \PP(A) \sum_B (\PP(B \mid A) - \PP(B)) \\ &= \ub{n(1 - p)^{n - 1} (1 - (1 - p)^{n - 1})}_{A = B} + \ub{n(n - 1) (1 - p)^{n - 1} ((1 - p)^{n - 2} - (1 - p)^{n - 1})}_{A \neq B} \\ &\le \mu + n^2 (1 - p)^{n - 1} p(1 - p)^{n - 2} \\ &= \mu + \mu^2 \frac{p}{1 - p} \end{align*} So $\frac{V}{\mu^2} \le \frac{1}{\mu} + \frac{p}{1 - p} \to 0$ as $n \to \infty$. \qedhere \end{enumerate} } \end{proof} \end{flashcard} A different kind of `threshold effect' comes from \gls{graph} parameters, for example $\cliquen(G)$ (\gls{cliquen}). So fix $0 < p < 1$, and we ask: how is the \gls{cliquen} of $G \in \psp G(n, p)$ distributed? We'd expect \begin{center} \includegraphics[width=0.6\linewidth]{images/f0c37bcac61b47c4.png} \end{center} Width of hump? Might guess about $\sqrt{n}$, or maybe $\log n$. But in fact: \begin{center} \includegraphics[width=0.6\linewidth]{images/df0bf57435874cd0.png} \end{center} i.e. there exists $a$ such that the \gls{cliquen} of $G \in \psp G(n, p)$ is $a$ or $a + 1$ \gls{al_surely}. \begin{flashcard}[clique-threshold-thm] \begin{theorem} % Theorem 5 Let $0 < p < 1$ be fixed, and let $d$ be a real with ${n \choose d} p^{{d \choose 2}} = 1$. Then $G \in \psp G(n, p)$ has \gls{cliquen} $\left\lceil d \right\rceil$ or $\left\lfloor d \right\rfloor$ or $\left\lfloor d \right\rfloor - 1$ \gls{al_surely}. \end{theorem} \begin{remark*} With more work, could get down to only $2$ values. \end{remark*} \begin{proof} \cloze{ Let $X = \#\complete_K$ in $G$. We'll show that if $k \ge d + 1$ then $X = 0$ \gls{al_surely}, and if $k \le d - 1$ then $X \ge 1$ \gls{al_surely}. W'll show that if $k \ge d + !$ then $X = 0$ \gls{al_surely}, and if $k \le d - 1$ then $X \ge 1$ \gls{al_surely}. Have \[ \mu = \EE(X) = {n \choose k} p^{{k \choose 2}} .\] So $k \ge d + 1 \implies \mu \to 0$ as $n \to \infty$ (check), so \gls{al_surely} $X = 0$. Now, for $k \le d - 1$, have $\mu \to \infty$ as $n \to \infty$ (check). Also \begin{align*} V = \ub{{n \choose k}}_{\#A} \ub{p^{{k \choose 2}}}_{\PP(A)} \sum_{s = 2}^k \ub{{k \choose s} {n - k \choose k - s}}_{\text{$\#B$ with $\#(A \cap B) = s$}} (p^{{k \choose 2} - {s \choose 2}} - p^{{k \choose 2}}) \end{align*} In the sum: \begin{itemize} \item First term is ${k \choose 2} {n - k \choose k - 2} p^{{k \choose 2}} \left( \frac{1}{p} - 1 \right) \le {k \choose 2}{n - k \choose k - 2} p^{{k \choose 2}} \frac{1}{p}$. \item Last term is $1- p^{{k \choose 2}} \le 1$. \end{itemize} In fact, sum is bounded by first $+$ last: more precisely, the sum is $\le C(\text{first} + \text{last})$, for some $C$ (check). So \[ V \le \mu C ({k \choose 2} {n - k \choose k - 2} p^{{k \choose 2}} \frac{1}{p} + 1) ,\] whence $\frac{V}{\mu^2}$ (check), so that $X \neq 0$ \gls{al_surely}.} \end{proof} \end{flashcard}