%! TEX root = ABF.tex % vim: tw=80 ft=tex % 10/02/2026 12PM \begin{notation*} \glssymboldefn{degkpart}% Given $f : \{-1, 1\}^n \to \Rbb$, write $f^{(=k)}$ for the \emph{degree-$k$ part of $f$}, i.e. $\sum_{|A| = k} \ft{f}(A) x_A$. \end{notation*} \begin{fcthm}[] % Theorem 4.6 \label{thm:4.6} Let $\rho = \frac{1}{\sqrt{3}}$. Then $\norm \|\noise_\rho f\|_4 \le \norm \|f\|_2$ for every $f : \{-1, 1\}^n \to \Rbb$. \end{fcthm} \begin{proof} \begin{align*} \norm \|\noise_\rho f\|_4 &= \norm \|\sum_{k = 0}^{n} \noise_rho f^{\degkpart}\|_4 \\ &= \norm \|\sum_{k = 0}^{n} \rho^k f^{\degkpart}\|_4 \\ &\le \sum_{k = 0}^{n} \rho^k \norm \|f^{\degkpart}\|_4 \\ &\le \sum_{k = 0}^{n} \rho^k \cdot 3^{k / 2} \norm \|f^{\degkpart}\|_2 && \text{(by \nameref{lemma:4.1})} \\ &= \sum_{k = 0}^{n} \norm \|f^{\degkpart}\|_2 \\ &\le \sqrt{n} \left( \sum_{k = 0}^{n} \norm\|f^{\degkpart}\|_2^2 \right)^{\half} \\ &= \sqrt{n} \norm \|f\|_2 &&\text{(since the $f^{\degkpart}$ are orthogonal)} \end{align*} To get rid of the $\sqrt{n}$ we use the \emph{tensor-power trick}. Write $f^{\otimes m} : (\{-1, 1\}^n)^m \to \Rbb$ for the function \[ f^{\otimes m}(x^1, \ldots, x^m) = f(x^1) \cdots f(x^m) .\] It is easy to check that $\norm \|\noise_\rho f^{\otimes m}\|_4 = \norm \|\noise_\rho f\|_4^m$ and $\norm \|f^{\otimes m}\|_2 = \norm \|f\|_2^m$. Therefore, \[ \norm \|\noise_\rho f\|_4^m = \norm \|\noise_\rho f^{\otimes m}\|_4 \le \sqrt{mn} \norm \|f^{\otimes m}\|_2 = \sqrt{mn} \norm \|f\|_2^m ,\] where the inequality is what we proved earlier. So $\norm \|\noise_\rho f\|_4 \le (mn)^{\frac{1}{2m}} \norm \|f\|_2$ for every $m$. Letting $m \to \infty$, we deduce that $\norm \|\noise_\rho f\|_4 \le \norm \|f\|_2$. \end{proof} \begin{remark*} The tensor trick here would have worked even if $\sqrt{n}$ was replaced by any subexponential function. \end{remark*} \begin{fccoro}[] % Corollary 4.7 \label{coro:4.7} Let $\rho = \frac{1}{\sqrt{3}}$ and let $f : \{-1, 1\}^n \to \Rbb$. Then $\norm \|\noise_\rho f\|_2 \le \norm \|f\|_{4 / 3}$. \end{fccoro} \begin{proof} Note that $\noise_\rho$ is self adjoint (see the example sheet, or note that it easily follows from $\ft{\noise_\rho f}(A) = \rho^{|A|} \ft{f}(A)$). So \begin{align*} \norm \|\noise_\rho f\|_2 &= \max_{\|g\|_2 = 1} \ip \langle \noise_\rho f, g \rangle \\ &= \max_{\|g\|_2 = 1} \ip \langle f, \noise_\rho g \rangle &&\text{(self-adjoint)} \\ &\le \max_{\|h\|_4 = 1} \ip \langle f, h \rangle &&\text{(by \cref{thm:4.6})} \\ &= \norm \|f\|_{4 / 3} &&\text{(since $L_{4 / 3}$ is the dual of $L_4$)} \qedhere \end{align*} \end{proof} \begin{remark*} On the last $=$ in the above proof, we could instead just say $\le$ by Hölder. \end{remark*} \begin{remark*} $\norm \|\noise_\rho f\|_2^2 = \ip \langle \noise_\rho f, \noise_\rho f \rangle = \ip \langle \noise_{\rho^2} f, f \rangle = \ip \langle \noise_{\third} f, f \rangle = \stab_{\third} f$. So an equivalent formulation of \cref{thm:4.6} is that $\stab_{\third} f \le \norm \|f\|_{\frac{4}{3}}^2$. \end{remark*} \newpage \section{The KKL theorem and Friedgut's junta theorem} A \emph{dictator} was a single variable that dictates the function. A \emph{junta} is a small set of variables that dictate the function. We view $\{-1, 1\}^n$ as a graph, where a pair of vertices is connected by an edge if they differ in a single coordinate. \glssymboldefn{edgebound}% The \emph{edge boundary} of a subset $A$ of a graph is the set of edges with one vertex in $A$ and one in $A^c$. We denote the edge boundary by $\partial A$. \begin{fcthm}[The edge-isoperimetric inequality in the cube -- approximate version] % Theorem 5.1 \label{thm:5.1} Let $A \subset \{0, 1\}^n$. Then \[ |\edgebound A| \ge |A|(n - \log_2 |A|) .\] \end{fcthm} \begin{remark*} Sharp for subcubes of dimension $k$. \end{remark*} \begin{proof} Let $\theta A$ be the set of internal edges of $A$ -- i.e. edges $ab$ with $a, b \in A$. Then $n|A| = 2|\theta A| + |\edgebound A|$, so the theorem is equivalent to showing that \[ |\theta A| \le \half |A| \log_2 |A| \] for every $A$. Induction on $n$. If $n = 1$ the result is true ($3$ cases to check). Let $A_0 = \{x \in A : x_n = 0\}$, $A_1 = \{x \in A : x_n = 1\}$. Then \[ |\theta A| \le |\theta A_0| + |\theta A_1| + \min \{|A_0|, |A_1|\} \le \half |A_0| \log_2 |A_0| + \half |A_1| \log_2 |A_1| + \min \{|A_0|, |A_1|\} \] by induction hypothesis. Want $\le \half (|A_0| + |A_1|) \log(|A_0| + |A_1|)$. So it's enough to prove \[ \half x \log_2 x + \half y \log_2 y + x \le \half (x + y) \log_2 (x + y) \] whenever $0 \le x \le y$, or equivalently \[ x \log x + y \log y + 2x \log 2 \le (x + y) \log (x + y) .\] If $x = y$, we need \[ 2x \log x + 2x \log 2 \le 2x \log(2x) ,\] which is indeed true. Now differentiate with respect to $y$. Left hand side becomes $\log y + 1$, and right hand side becomes $\log(x + y) + 1$. Since $x \ge 0$, the left hand derivative is $\le$ to the right hand derivative. \end{proof} \begin{remark*} If $|A| = 2^{n - 1}$, this tells us that the edge-boundary is minimised by a half space. Let $f : \{-1, 1\}^n \to \{-1, 1\}$ be a function with $\Ebb f = 0$, and therefore $\Var f = 1$. If $\totinf(f) = 1$, then $\sum_A |A| \ft{f}(A)^2 = 1$, but $\ft{f}(\emptyset) = 0$ and also $\sum_A \ft{f}(A)^2 = 1$ by \gls{parseval}, so equality holds only if $f$ is linear, so $f$ is a dictator. Similarly, if equality almost holds, then by FKN $f$ is almost a dictator. \end{remark*} The above remark says that to minimise $\totinf(f)$, the best thing is to have a dictator: just one variable contributing to $\totinf(f)$. What if we forbid this, for example by asking that each variable has the same influence? Might guess that taking majority vote is best for this, but as mentioned before this has $\totinf(f) \approx \sqrt{n}$. It turns out the following is much better: The ``tribes'' function of Ben--Or and Linial: Let $k, m \in \Nbb$. Let $n = km$, write $[n] = A_0 \cup \cdots \cup A_i$, $|A_i| = k$. Define $f(x) = 1$ if and only if there exists $i$ such that $x_j = 1$ for every $j \in A_i$. This achieves $\totinf(f) = \frac{c\log n}{n}$ (once we optimise $k$ and $m$).