%! TEX root = ABF.tex % vim: tw=80 ft=tex % 12/03/2026 12PM \begin{fcdefn}[] Let $f : \Rbb^n \to \Rbb$ be a multilinear function $f = \sum_A \ft{f}(A) x_A$. Alternatively, think of $f$ as a formal multilinear polynomial. Define \begin{itemize} \item $\Ebb f = \ft{f}(\emptyset)$, \item $\Ebb f^2 = \sum_A \ft{f}(A)^2$, \item $\Var f = \sum_{A \neq \emptyset} \ft{f}(A)^2$, \item $\ip \langle f, g \rangle = \sum_A \ft{f}(A) \ft{g}(A)$, \item $E_i f = \sum_{A \not\ni i} \ft{f}(A) x_A$, \item $\D_i f = \sum_{A \ni i} \ft{f}(A) x_{A \setminus \{i\}}$. \end{itemize} Note that $E_i f$ and $\D_i f$ do not depend on $x_i$ and also that $f = E_i f + x_i \D_i f$, and $\langle E_i f, x_i \D_i f \rangle = 0$. Define \begin{itemize} \item $\Inf_i f = \sum_{A \ni i} \ft{f}(A)^2$, \item $\totinf(f) = \sum_i \Inf_i f = \sum_A |A| \ft{f}(A)^2$. \end{itemize} One could also define $\noise_\rho f$, $\Stab_\rho f$. \end{fcdefn} Now let $X = (X_1, \ldots, X_n)$ be independent random variables with $\Ebb X_i = 0$, $\Ebb X_i^2 = 1$. Then define $F(X)$ to be $f$ evaluated at $(X_1, \ldots, X_n)$, i.e. $\sum_A \ft{f}(A) \prod_{i \in A} X_i$. Then it is easy to check that $\langle F, G \rangle = \langle f, g \rangle$, $\|F\|_2^2 = \|f\|_2^2$. For example, \begin{align*} \langle F, G \rangle &= \Ebb_X \left(\sum_A \ft{f}(A) \prod_{i \in A} X_i\right) \left( \sum_B \ft{g}(B) \prod_{i \in B} X_i \right) \\ &= \sum_{A, B} \ft{f}(A) \ft{g}(B) \prod_{i \in A \symdiff B} X_i \prod_{i \in A \cap B} X_i^2 \\ &= \sum_A \ft{f}(A) \ft{g}(A) \end{align*} Also, defining $E_i F(X)$ to be $\Ebb[f(X_1, \ldots, X_n) \mid X_1, \ldots, X_{i - 1}, X_{i + 1}, \ldots, X_n]$ we have that $E_i F(X) = E_i f(X)$. Suppose that $(X_1, \ldots, X_n)$ are independent and \[ \Ebb X_i = 0, \qquad \Ebb X_i^2 = 1, \qquad \Ebb X_i^3 = 0, \qquad \Ebb X_i^4 \le 9 \tag{$*$} \label{lec15:eqstar} .\] Then the proof of \nameref{lemma:4.1} straightforwardly gives that if $f$ has degree at most $k$, then $\Ebb f(X_1, \ldots, X_n)^4 \le 9^k (\Ebb f(X_1, \ldots, X_n)^2)^2$. \begin{fcthm}[Invariance principle] % Theorem 8.2 \label{thm:8.2} Let $(X_1, \ldots, X_n)$ and $(Y_1, \ldots, Y_n)$ be sequences of random variables satisfying condition \eqref{lec15:eqstar}. Let $f$ be a multilinear polynomial of degree at most $k$ and let $\psi : \Rbb \to \Rbb$ satisfy that $\|\psi''''\|_\infty \le C$ (bounded fourth derivative). Then \[ |\Ebb \psi(f(X_1, \ldots, X_n)) - \Ebb \psi(f(Y_1, \ldots, Y_n))| \le \frac{C}{12} \cdot 9^k \sum_{i = 1}^{n} (\Inf_i f)^2 .\] \end{fcthm} \begin{remark*} It is possible to get a stronger result than this, but we prove this version because it can be proved with the version of Bonami's Lemma mentioned above. \end{remark*} \begin{example*} Examples of $f$ where the LHS of the above Theorem is large if we set $X_i \sim \Unif(\{-1, 1\})$, $Y_i \sim N(0, 1)$: \begin{itemize} \item $f(Z) = Z_1$. Then the distribution of $f(X)$ is then uniform on $\{-1, 1\}$, so LHS is large. The RHS is large because $\Inf_1 f$ is large. \item $f(Z) = Z_1 \cdots Z_n$. Again, distribution of $f(X)$ is uniform on $\{-1, 1\}$. The RHS is large because $k$ is large, and also because all $\Inf$ terms are large. \end{itemize} \end{example*} \begin{proof} By the triangle inequality, the quantity we wish to bound is at most \[ \sum_{i = 1}^{n} |\Ebb \psi(f(Y_1, \ldots, Y_{i - 1}, X_i, X_{i + 1}, \ldots, X_n)) - \Ebb \psi(f(Y_1, \ldots, Y_{i - 1}, Y_i, X_{i + 1}, \ldots, X_n))| .\] Write $U_i = (Y_1, \ldots, Y_{i - 1}, X_{i + 1}, \ldots, X_n)$. Then we can rewrite each summand as \[ |\Ebb \psi(E_i f(U_i) + X_i \D_i f(U_i)) - \Ebb \psi(E_i f(U_i) + Y_i \D_i f(U_i))| .\] Let $u_i = E_i f(U_i)$, $v_i = \D_i f(U_i)$. So we can rewrite as \[ |\Ebb \psi(u_i + X_i v_i) - \Ebb \psi(u_i + Y_i v_i)| .\] But \[ \psi(u_i + X_i v_i) = \psi(u_i) + X_i v_i \psi'(u_i) + \half X_i^2 v_i^2 \psi''(u_i) + \frac{1}{6} X_i^3 v_i^3 \psi'''(u_i) + \frac{1}{24} X_i^4 v_i^4 \psi''''(w_i) \] and \[ \psi(u_i + Y_i v_i) = \psi(u_i) + Y_i v_i \psi'(u_i) + \half Y_i^2 v_i^2 \psi''(u_i) + \frac{1}{6} Y_i^3 v_i^3 \psi'''(u_i) + \frac{1}{24} Y_i^4 v_i^4 \psi''''(z_i) .\] Taking expectations and subtracting, noting condition \eqref{lec15:eqstar} and that $X_i$ and $Y_i$ are independent of $u_i$ and $v_i$, we see that everything cancels apart from the error terms, so we get \[ \frac{1}{24} | \Ebb X_i^4 v_i^4 \psi''''(w_i) - \Ebb Y_i^4 v_i^4 \psi''''(z_i) | \le \frac{C}{24} |\Ebb X_i^4 v_i^4 + \Ebb Y_i^4 v_i^4| .\] But \[ \Ebb X_i^4 v_i^4 = \Ebb (X_i \D_i f(U_i))^4 = \Ebb (x_i \D_i f)(Y_1, \ldots, Y_{i - 1} X_i X_{i + 1}, \ldots, X_n)^4 .\] But $(Y_1, \ldots, Y_{n - 1}, X_i, X_{i + 1}, \ldots, X_n)$ satisfies \eqref{lec15:eqstar} and $x_i \D_i f$ has degree at most $k$. So Bonami's Lemma applies, and we get an upper bound of $9^k (\Ebb X_i^2 v_i^2)^2 = 9^k (\Ebb (\D_i f)^2)^2 = 9^k (\Inf_i f)^2$. Same for $Y_i$, so summing over $i$ gives the result. \end{proof} \subsubsection*{Gaussian Space} Let $x \in \Rbb$. We say that $y \sim N_\rho(x)$, \emph{$y$ is $\rho$-correlated with $x$} if $y \sim \rho x + \sqrt{1 - \rho^2} g$, where $g \sim N(0, 1)$. If $x \sim N(0, 1)$ and $y \sim N_\rho(x)$, thene there are independent Gaussians $g_1$, $g_2$ with $x = g_1$, $y = \rho g_1 + \sqrt{1 - \rho^2} g_2$, so $y \sim N(0, 1)$ and $\Ebb xy = \rho \Ebb g_1^2 + \sqrt{1 - \rho^2} \Ebb g_1 g_2 = \rho$. A nice way to construct a pair $(x, y)$ of $\rho$-correlated Gaussians is to take unit vectors $u, v \in \Rbb^2$, $g \in N(0, 1)^2$ and set $x = \langle u, g \rangle$, $y = \langle v, g \rangle$, choosing $u, v$ so that $\langle u, v \rangle = \rho$. Writing $g = (g_1, g_2)$, we have \[ \Ebb xy = \Ebb u_1 v_1 g_1^2 + \Ebb u_2 v_2 g_2^2 = u_1 v_1 + u_2 v_2 = \langle u, v \rangle = \rho .\]