%! TEX root = ABF.tex % vim: tw=80 ft=tex % 10/03/2026 12PM \begin{fcdefn}[Modified upper shadow] \glssymboldefn{modifieduppershadow}% Let $n \in \Nbb$, let $J \subset [n]$, let $r < s$ and let $\mathcal{A} \subset [n]^{(r)}$. Define the \emph{modified upper shadow} $\partial_J^s \mathcal{A}$ to be $\{B \in [n]^{(s)} : A \subset B, (B \setminus A) \cap J = \emptyset\}$. \end{fcdefn} \begin{fclemma}[] % Lemma 7.7 \label{lemma:7.7} Let $n, r, s, J, \mathcal{A}$ be as above. Let $\alpha = \frac{|\mathcal{A}|}{{n \choose r}}$ and $\beta = \frac{|\mshad_J^s \mathcal{A}|}{{n \choose s}}$. Assume that $r \le \frac{n}{2}$. Then \[ \beta \ge \alpha \left( 1 - \frac{2|J|}{n} \right)^{s - r} .\] \end{fclemma} \begin{proof} Pick a random pair $(A, B)$ with $A \in [n]^{(r)}$, $B \in [n]^{(s)}$, $A \subset B$. Then \[ \Pbb[(A, B) \in \mathcal{A} \times \mshad_J^s \mathcal{A}] \ge \alpha \left( 1 - \frac{2|J|}{n} \right)^{s - r} .\] Also, the probability is at most $\beta$. The result follows. \end{proof} \begin{fccoro}[] % Corollary 7.8 \label{coro:7.8} For every $p \in (0, \half)$, $\eps > 0$, there exists $m$ such that for every $n \in \Nbb$ and every intersecting family $\mathcal{A} \subset [n]^{(r)}$ where $r = pn$, there exists $J \subset [n]$, $|J| \le m$, and an intersecting family $\mathcal{B}$ of subsets of $J$ such that $|\mathcal{A} \setminus \ol{\mathcal{B}}| \le \eps {n \choose r}$. \end{fccoro} \begin{proof} Suppose not. Let $\mathcal{C} = hacal \setminus \ol{\mathcal{B}}$. Then $\mathcal{C}$ has density $\ge \eps$. Apply the \nameref{thm:7.5} Theorem to $\mathcal{A}$ to obtain $J$ and an intersecting family $\mathcal{B}$ of subsets of $J$ with $\mup(\ol{\mathcal{A}} \setminus \ol{\mathcal{B}}) \le \frac{\eps}{4}$. Note that since $\mathcal{C} \cap \ol{\mathcal{B}} = \emptyset$, $\mshad_J^s \mathcal{C} \cap \ol{\mathcal{B}} = \emptyset$ for every $s > r$. Also, if $s \le r + n^{2 / 3}$, then \[ \frac{|\mshad_J^s \mathcal{C}|}{{n \choose s}} \ge \eps\left(1 - \frac{2|J|}{n}\right)^{n^{2 / 3}} \ge \eps \left( 1 - \frac{2|J|}{n^{1 / 3}} \right) \ge \frac{3\eps}{4} ,\] for $n$ sufficiently large. But (by law of total probability), \[ \mup(\ol{\mathcal{C}}) = \sum_{s \ge r} \mup([n]^{(s)}) \cdot \left[ \frac{|\mshad_J^s \mathcal{C}|}{{n \choose s}} \right] \ge \frac{3\eps}{4} \sum_{r \le s \le r + n^{2 / 3}} \mup([n]^{(s)}) \ge \frac{3\eps}{4} (1 - o(1)) \ge \frac{5\eps}{16} .\] But $\ol{\mathcal{C}} \subset \ol{\mathcal{A}} \setminus \ol{\mathcal{B}}$, so this is a contradiction. \end{proof} \newpage \section{Invariance principles} \begin{example*} Let $X_i$ uniform on $\{-1, 1\}$, $Y_i \sim N(0, 1)$. Then $\frac{1}{\sqrt{n}} (X_1 + \cdots + X_n) \approx \frac{1}{\sqrt{n}} (Y_1 + \cdots + Y_n) \sim N(0, 1)$. Here, $\approx$ is meant to mean ``has approximately the same distribution as''. Here, we saw that replacing $X_i$ by another variable $Y_i$ with roughly similar properties (e.g. same mean and variance) didn't affect the distribution of the sum by much. How can we define ``approximately same distribution''? You may have seen before that we can define it as $|\Pbb[X \le t] - \Pbb[Y \le t]| \le \eps$ holding for all $t$. This can be rephrased in terms of $\Ebb \indicator{X \le t}$. We will instead use a notion of similar distribution where we use continuous functions (in fact we will even require stronger conditions than this). \end{example*} \begin{fcthm}[Generalisation / modification of the Berry--Esseen Theorem] % Theorem 8.1 \label{thm:8.1} Let $X_1, \ldots, X_n$ and $Y_1, \ldots, Y_n$ be sequences of independent random variables. Suppose that $\Ebb X_i = \Ebb Y_i$ and $\Ebb X_i^2 = \Ebb Y_i^2$ for each $i$. Let $\psi : \Rbb \to \Rbb$ be such that $\|\psi'''\|_\infty \le C$ (bounded third derivative). Then \[ |\Ebb \psi(X_1 + \cdots + X_n - \Ebb \psi(Y_1 + \cdots + Y_n)| \le \frac{1}{6} C \left( \sum_{i = 1}^{n} (\|X_i\|_3^3 + \|Y_i\|_3^3) \right) .\] \end{fcthm} \begin{note*} For $Y \sim N(0, 1)$, we have $\|Y\|_3^3 = 2\sqrt{\frac{2}{\pi}}$ and $\|Y\|_4^4 = 3$ (will be an exercise on the example sheet). \end{note*} \begin{proof} By the triangle inequality, the quantity we wish to bound is at most \[ \sum_{i = 1}^{n} |\Ebb \psi(Y_1 + \cdots + Y_{i - 1} + X_i + \cdots + X_n) - \Ebb \psi(Y_1 + \cdots + Y_{i - 1} + Y_i + X_{i + 1} + \cdots + X_n)| Y_| .\] Write $U_i$ for $Y_1 + \cdots + Y_{i - 1} + X_{i + 1} + \cdots + X_n$. So the above is \[ \sum_{i = 1}^{n} |\Ebb \psi(U_i + X_i) - \Ebb \psi(U_i + Y_i)| .\] By Taylor's Theorem, \begin{align*} \psi(U_i + X_i) &= \psi(U_i) + X_i \psi'(U_i) + \frac{X_i^2}{2} \psi''(U_i) + \frac{X_i^3}{6} \psi'''(V_i) \\ \psi(U_i + Y_i) &= \psi(U_i) + Y_i \psi'(U_i) + \frac{Y_i^2}{2} \psi''(U_i) + \frac{Y_i^3}{6} \psi'''(W_i) \end{align*} where $V_i$ is between $U_i$ and $U_i + X_i$, $W_i$ is between $U_i$ and $U_i + Y_i$. Taking expectations and subtracting, and using the fact that $\Ebb X_i = \Ebb Y_i$ and $\Ebb X_i^2 = \Ebb Y_i^2$ and also that $X_i$ and $Y_i$ are independent of $U_i$, we get \[ \Ebb\left( \frac{X_i^3}{6} \psi'''(V_i) - \frac{Y_i^3}{6} \psi'''(W_i) \right) ,\] which has size at most $\frac{C}{6}(\Ebb|X_i|^3 + \Ebb |Y_i|^3) = \frac{C}{6} (\|X_i\|^3 + \|Y_i\|_3^3)$. Summing gives the result. \end{proof} \begin{fccoro}[] % Corollary 7.2 \label{coro:7.2} Let $X_1, \ldots, X_n$ be independent with $\Ebb X_i = 0$ and $\Ebb X_i^2 = \sigma_i^2$ with $\sum \sigma_i^2 = 1$. Let $\psi$ be such that $\|\psi'''\|_\infty \le C$. Then \[ \left| \Ebb \psi \left( \sum_{i = 1}^{n} X_i \right) - \Ebb \psi (Y) \right| \le \frac{C}{6} \left( \sum_{i = 1}^n \|X_i\|_3^3 + 2 \sum_{i = 1}^n \sigma_i^3 \sqrt{\frac{2}{\pi}} \right) \] where $Y \sim N(0, 1)$. \end{fccoro} \begin{proof} Let $Y_1, \ldots, Y_n$ be normal with mean zero and $\Ebb Y_i^2 = \sigma_i^2$. Then $\sum_{i = 1}^n Y_i \sim N(0, 1)$. By the previous theorem, we get a bound of \[ \frac{C}{6} \left( \sum_{i = 1}^n \|X_i\|_3^3 + \sum_{i = 1}^n \sigma_i^3 2 \sqrt{\frac{2}{\pi}} \right) . \qedhere \] \end{proof}