%! TEX root = PC.tex % vim: tw=80 ft=tex % 19/11/2025 09AM \textbf{Analysis of Algorithm:} We define the index of a partition $V_1, \ldots, V_k$ to be \[ \frac{1}{k^2} \sum_{i, j} (\density(V_i, V_j))^2 .\] % Note that this quantity is $\le 1$. % We show at each loop of the algorithm, the index increases by at least a % $\eps^5$. % % \begin{center} % \includegraphics[width=0.6\linewidth]{images/2ca165641d5e42f0.png} % \end{center} % % \begin{align*} % \frac{1}{r^2} \sum_{s, t = 1}^r \density(V_{is}, V_{jt})^2 - \density(V_i, % V_j)^2 % &= \frac{1}{r^2} \sum_{s, t} (\density(V_{is}, V_{jt}) - \density(V_i, % V_j))^2 \\ % &\ge \eps^2 \cdot \eps^2 % \end{align*} We claim that as we go from $V_1, \ldots, V_k$ to $\{V_{ij}\}$, we increase the index by at least $+\frac{\eps^5}{2}$. Fix $V_i$, $V_j$ and consider \begin{align*} (*) &= \frac{1}{r^2} \sum_{s, t = 1}^r \density(V_{is}, V_{jt})^2 - \density(V_i, V_j)^2 \\ &= \frac{1}{r^2} \sum_{s, t} (\density(V_{is}, V_{jt}) - \density(V_i, V_j))^2 \end{align*} Here we are using \[ \frac{1}{r^2} \sum_{s, t} \density(V_{is}, V_{jt}) = \density(V_i, V_j) .\] \begin{center} \includegraphics[width=0.6\linewidth]{images/17a770d9ec9841d7.png} \end{center} Note $(*) \ge 0$. Now assume that $V_i$, $V_j$ are not \gls{epsunif}. Define \begin{align*} S &= \{s : V_{is} \subset W_{ij}\} \\ T &= \{t : V_{jt} \subset W_{ji}\} \end{align*} So \begin{align*} (*) &\ge \frac{1}{r^2} \sum_{\substack{s \in S \\ t \it T}} (\density(V_{is}, V_{jt}) - \density(V_i, V_j))^2 \\ &\ge \frac{|S||T|}{r^2} \frac{1}{|S||T|} \sum_{\substack{s \in S \\ t \in T}} (\density(V_{is}, V_{jt}) - \density(V_i, V_j))^2 \\ &\ge \eps^2 \left( \frac{1}{|S||T|} \sum_{\substack{s \in S \\ t \in T}} \density(V_{is}, V_{jt}) - \density(V_i, V_j) \right)^2 \\ &= \eps^2 ( \density(W_{ij}, W_{ji}) - \density(V_i, V_j) )^2 \\ &\ge \eps^4 \end{align*} So since the number of non \gls{epsunif} pairs is $\ge {k \choose 2}$, we get an $+\eps^4$ boost from $\eps$ proportion of the parts. Also, the other pairs don't hurt us. So index increases by $\ge \frac{\eps^5}{2}$. \textbf{How to fix the tiny lie at $*$:} \begin{center} \includegraphics[width=0.2\linewidth]{images/834a2ed6382c46a7.png} \end{center} We can assume that we start the algorithm with $\gg \frac{1}{\eps^2}$ parts. Now each time we encounter a ``rounding issue'', we just throw out the remainder of the cell to some bin I track. At a given step, we throw out at most \[ \left( \frac{n}{4^k \cdot k} \right) \cdot 2^k \cdot k < \frac{n}{2^k} \ll\!\!\ll \eps n .\] Summing over all steps in algorithm, we still throw out at most $\ll \eps n$. We just redistribute these vertices equally at the end. \begin{itemize} \item This does not ruin the \glsref[epsunif]{$\eps$-uniformity}: your new partition is at most \epsuniform{2\eps}. \item Also need to check that throwing out these bits does not affect the calculations by more than a small amount. \qedhere \end{itemize} \end{proof} \newpage \section{The binomial Random Graph} \[ G(n, p) \] We are interested in sliding $p$ from $0$ to $1$. At what point do certain structures emerge? Questions: \begin{itemize} \item When do we expect to see a triangle? \item When do we expect $G(n, p)$ to be connected? \item When does $G(n, p)$ have a component that spans $\Omega(n)$ vertices? \item When does $G(n, p)$ have a Hamiltonian cycle? \end{itemize} Fix some $H$. When do we expect $G \supset H$, $G \sim G(n, p)$? \begin{fcdefn}[m] \glssymboldefn{m}% For $H$ a graph, define \[ m(H) = \max \left\{ \frac{e(F)}{|F|} : F \subset H, |F| \ge 1 \right\} .\] \end{fcdefn} \begin{example*} $H = K_3$, $\mdense(K_3) = 1$. \begin{center} \includegraphics[width=0.6\linewidth]{images/923a891bf775407d.png} \end{center} \end{example*} \begin{fcthm}[Bollobás, 80s] Assuming: - $H$ a graph - $G \sim G(n, p)$ Then: \[ \lim_{n \to \infty} \Pbb(G \supset H) = \begin{cases} 1 & \text{when $pn^{\frac{1}{\mdense(H)}} \to \infty$} \\ 0 & \text{when $pn^{\frac{1}{\mdense(H)}} \to 0$} \end{cases} \] \end{fcthm} \begin{proof} We use a second moment argument. Let \[ X \ \text{\# of copies of $H$ in $G$} ,\] where $G \sim G(n, p)$. Then \[ \Ebb X = \Theta(n^{|H|} \cdot p^{e(H)}) .\] Since $\Var X = \Ebb X^2 - (\Ebb X)^2$, we will compute $\Ebb X^2$. \begin{align*} \Ebb X^2 &= \left( \sum_H \indicator{H \subset G} \right)^2 \\ &= \sum_{H_1, H_2} \Pbb(H_1 \subset G, H_2 \subset G) \\ &\le (\Ebb X)^2 + (\Ebb X) + \sum_{\emptyset \neq F \subsetneq H} \sum_{\substack{H_1, H_2 \\ H_1 \cap H_2 = F}} p^{2e(H) - e(F)} \\ &\le (\Ebb X)^2 + (\Ebb X) + C \sum_{\emptyset F \subsetneq H} n^{2|H| - |F|} p^{2e(H) - e(F)} \\ &\le (\Ebb X)^2 + (\Ebb X) + Cn^{2|H|} p^{2e(H)} \sum_F \frac{1}{n^{|F|} p^{e(F)}} \end{align*} Now note \[ \left( \frac{1}{np^{\frac{e(F)}{|F|}}} \right)^{|F|} \le \left( \frac{1}{np^{\mdense(H)}} \right)^{|F|} \to 0 \] when $pn^{\frac{1}{\mdense(H)}} \to \infty$. So \[ \Ebb X^2 = (1 + o(1)) (\Ebb X)^2 + \Ebb X . \qedhere \] \end{proof}