%! TEX root = GT.tex % vim: tw=50 % 18/11/2023 09AM \subsection{Graphs with high chromatic number} To ensure $\chromnum(G) \ge k$, could just have $G \supset \complete_k$. Not necessary to have $G \supset \complete_k$ -- for example $\cycle_5$ has no $\complete_3$, but $\chromnum(\cycle_5) = 3$. \glssymboldefn{CL}{CL$(G)$}{CL$(G)$} \glspropdefn{cliquen}{clique number}{\gls{graph}} Can have $\chromnum(G)$ much greater than $\cliquen(G)$, the \emph{clique number} of $G$, namely $\max\{k : G \supset \complete_k\}$. \begin{example*} \phantom{} \begin{enumerate}[(1)] \item The $G$ of \cref{ramsey_lower_bound}: $G$ on $2^{s/2}$ \glspl{vertex}, no $\complete_s \subset G$, so $\cliquen(G) \le s - 1$. Also, each \emph{independent set} (set with no \glspl{edge}) has size $\le s - 1$. But in any vertex-colouring of a graph, each colour class is an independent set. Hence $\chromnum(G) \ge \frac{2^{s/2}}{s - 1}$ -- much more than $\cliquen(G)$. Better: \item Can find $G$ which is triangle-free ($\cliquen(G) = 2$), but $\chromnum(G)$ arbitrarily large -- quite hard (on \es{3}). \end{enumerate} \end{example*} \vspace{-1em} Could we even ask for \gls{girth} $\ge 5$? Or more -- like $G$ with \gls{girth} $\ge 10$, $\chromnum(G) \ge 100$? Sounds unlikely, but\ldots \begin{flashcard}[random-girth-chromnum-thm] \begin{theorem} $\forall k, g$ there exists \gls{graph} $G$ with \gls{girth} $\ge g$ and $\chromnum(G) \ge k$. \end{theorem} \vspace{-1em} \cloze{ \textbf{Idea:} Find $G$ on $n$ \glspl{vertex} such that the number of short \glspl{cycle} is $\le \frac{n}{2}$ and each independent set has size $\le \frac{n}{2k}$ -- then done, by removing a \gls{vertex} from each short \gls{cycle} to obtain a \gls{graph} $H$ with \gls{girth} $\ge g$ and $\chromnum(G) \ge \frac{n/2}{n/2k} = k$. \begin{proof} For large $n$, choose $G \in \psp G(n, p)$ where $p = n^{-1 + \frac{1}{g}}$. For $i = 3, 4, \ldots, g - 1$, let $X_i = \#\text{$i$ \glspl{cycle} in $G$}$. Let $X = X_3 + \cdots + X_{g - 1}$ be the number of \glspl{cycle} in $G$ of length $< g$. Then \begin{align*} \EE(X_i) &\le (\#\text{possible $i$-\glspl{cycle}}) \PP(\text{given $i$-\gls{cycle} $\subset G$}) \\ &\le n^i p^i \\ &= n^{i/g} \\ &\le n^{\frac{g - 1}{g}} \end{align*} Hence $\EE(X) \le g \cdot \frac{n}{n^{1/g}} < \frac{n}{4}$ for $n$ large (as $n^{1/g} \to 0$ as $n \to \infty$). So $\PP \left( X > \frac{n}{2} \right) < \half$ (else $\EE(X) \ge \frac{n}{2} \cdot \half = \frac{n}{4}$ \contradiction). Write $t = \frac{n}{2k}$ ($n$ a multiple of $2k$), and let $Y = \#\text{independent $t$-sets in $G$}$. Then \begin{align*} \EE(Y) &\le n^t (1 - p)^{{t \choose 2}} \\ &\le n^t e^{-p{t \choose 2}} &&\text{(using $1 - x \le e^{-x}$)} \\ &\le \exp\left(\frac{n}{2k} \log n - n^{-1 + \frac{1}{g}} \cdot \frac{n^2}{16k^2} \right) \\ &\to 0 \end{align*} as $n \to \infty$ ($n^{1 + \frac{1}{g}}$ grows faster than $n\log n$). So $\PP(X = 0) > \half$ if $n$ large enough. Hence there exists $G$ on $n$ vertices with $\le \frac{n}{2}$ short cycles and no independent set of $\frac{n}{2k}$. \end{proof} } \end{flashcard} \subsection{The structure of a random graph} What does $G \in \psp G(n, p)$ look like? How do the properties of $G$ vary as $p$ invreases? For example, how to $\PP(\text{no isolated \gls{vertex}})$ behave? \glsadjdefn{isolated}{isolated}{\gls{vertex}} (A \gls{vertex} is \emph{isolated} if it has no \glspl{neighbour}). We might guess: \begin{center} \includegraphics[width=0.6\linewidth]{images/ce7842842fe043a1.png} \end{center} But in fact: \begin{center} \includegraphics[width=0.6\linewidth]{images/919db8529d3948d0.png} \end{center} Why does this happen? Where is the threshold? \subsubsection*{Probability Digression / Reminder} Let $X$ be a random variable taking values in $0, 1, 2, \ldots$. \textbf{To show $\PP(X = 0)$ is big:} enough to show $\mu = \EE(X)$ is small. Indeed, for any $t > 0$ have $\PP(X \ge t) t \le \mu$. So $\PP(X \ge t) \le \frac{\mu}{t}$ (Markov). In particular, $\PP(X \ge 1) \le \mu$, so $\PP(X = 0) \ge 1 - \mu$. \textbf{To show $\PP(X = 0)$ is small:} not enough to have $\mu$ large, for example \[ X = \begin{cases} 0 & \text{probability $\frac{99}{100}$} \\ 10^{10} & \text{probability $\frac{1}{100}$} \end{cases} \] So instead we look at the variance $V = \Var(X) = \EE((X - \mu)^2) = \EE(X^2) - \EE(X)^2$. Then \[ \PP(|X - \mu| \ge t) = \PP(|X - \mu|^2 \ge t^2) \le \frac{V}{t^2} \] by Markov (this is known as Chebyshev inequality). So $\PP(|X - \mu| \ge \mu) \le \frac{V}{\mu^2}$, whence $\PP(X = 0) \le \frac{V}{\mu^2}$. Conclusion: to show $\PP(X = 0)$ is small, check $\frac{V}{\mu^2}$ is small. \begin{hiddenflashcard}[show-Px-0-big] \prompt{How to show $\PP(X = 0)$ is big?} \cloze{ Show $\EE(X)$ is small (sufficient by Markov's inequality). } \end{hiddenflashcard} \begin{hiddenflashcard}[show-Px-0-small] \prompt{How to show $\PP(X = 0)$ is small?} \cloze{ Show $\frac{\Var(X)}{\EE(X)^2}$ is small (using Chebyshev's inequality). } \end{hiddenflashcard} Suppose $X$ counts the number of some events $A$ that occur. Then $\mu = \EE(X) = \sum_A \PP(A)$. Variance? Have \begin{align*} \EE(X)^2 &= \sum_A \sum_B \PP(A) \PP(B) \\ \EE(X^2) &= \EE \left( \left( \sum_A \indicator{A} \right)^2 \right) \\ &= \EE \left( \sum_A \sum_B \indicator{A} \indicator{B} \right) \\ &= \sum_A \sum_B \EE(\indicator{A} \indicator{B}) \\ &= \sum_A \sum_B \PP(A \cap B) \\ &= \sum_A \sum_B \PP(A) PP(B \mid A) \end{align*} \begin{flashcard}[var-nice-formula] \fcscrap{So }$\Var(X) = \cloze{\sum_A \PP(A) \sum_B (\PP(B \mid A) - \PP(B))}$\cloze{. Key: $\PP(B \mid A) - \P(B)$ is $0$ if $A$ and $B$ are independent.} \end{flashcard}