%! TEX root = EMC.tex % vim: tw=50 % 06/02/2025 09AM \vspace{-1em} \textbf{Key idea:} we now regard $x_1, \ldots, x_n$ as a \emph{random} enumeration of $X$ and take the average. For each $x \in X$, define the \emph{contribution} of $x$ to be \[ \log(d_{x_1, \ldots, x_{i - 1}}^\sigma(x_i)) \] where $x_i = x$ (note that this ``contribution'' is a random variable rather than a constant). We shall now fix $\sigma$. Let the neighbours of $x$ be $y_1, \ldots, y_k$. \begin{center} \includegraphics[width=0.4\linewidth]{images/0b09257615d24c01.png} \end{center} Then one of the $y_j$ will be $\sigma(x)$, say $y_h$. Note that $d_{x_1, \ldots, x_{i - 1}}^\sigma(x_i)$ (given that $x_i = x$) is \[ d(x) - |\{j : \text{$\sigma^{-1}(y_j)$ comes earlier than $x = \sigma^{-1}(y_h)$}\}| .\] All positions of $\sigma^{-1}(y_h)$ are equally likely, so the average contribution of $x$ is \[ \frac{1}{d(x)} \left( \log d(x) + \log (d(x) - 1) + \cdots + \log 1 \right) = \frac{1}{d(x)} \log (d(x)!) .\] By linearity of expectation, \[ \ent \sigma \le \sum_{x \in X} \frac{1}{d(x)} \log(d(x)!) ,\] so the number of matchings is at most \[ \prod_{x \in X} (d(x)!)^{\frac{1}{d(x)}} . \qedhere \] \end{proof} \begin{fcdefnstar}[$1$-factor] \glsnoundefn{onefac}{$1$-factor}{$1$-factors}% Let $G$ be a graph with $2n$ vertices. A \emph{$1$-factor} in $G$ is a collection of $n$ disjoint edges. \end{fcdefnstar} \begin{fcthm}[Kahn-Lovasz] \label{thm:3.2} % [Kahn-Lov\'asz] Assuming: - $G$ a graph with $2n$ vertices Then: the number of \glspl{onefac} in $G$ is at most \[ \prod_{x \in V(G)} (d(x)!)^{\frac{1}{2d(x)}} .\] \end{fcthm} \begin{proof}[Proof (Alon, Friedman)] Let $\mathcal{M}$ be the set of $1$-factors of $G$, and let $(M_1, M_2)$ be a uniform random element of $\mathcal{M}^2$. For each $M_1, M_2$, the union $M_1 \cup M_2$ is a collection of disjoint edges and even cycles that covers all the vertices of $G$. \begin{center} \includegraphics[width=0.4\linewidth]{images/a92d21cb6dfe425a.png} \end{center} Call such a union a \emph{cover of $G$ by edges and even cycles}. If we are given such a cover, then the number of pairs $(M_1, M_2)$ that could give rise to it is $2^k$, where $k$ is the number of even cycles. Now let's build a bipartite graph $G_2$ out of $G$. $G_2$ has two vertex sets (call them $V_1, V_2$), both copies of $V(G)$. Join $x \in V_1$ to $y \in V_2$ if and only if $xy \in E(G)$. For example: \begin{center} \includegraphics[width=0.6\linewidth]{images/446a168023094c19.png} \end{center} By \nameref{thm:3.1}, the number of perfect matchings in $G_2$ is $\le \prod_{x \in V(G)} (d(x)!)^{\frac{1}{d(x)}}$. Each matching gives a permutation $\sigma$ of $V(G)$, such that $x\sigma(x) \in E(G)$ for every $x \in V(G)$. Each such $\sigma$ has a cycle decomposition, and each cycle gives a cycle in $G$. So $\sigma$ gives a cover of $V(G)$ by isolated vertices, edges and cycles. Given such a cover with $k$ cycles, each edge can be directed in two ways, so the number of $\sigma$ that give rise to is is $2^k$, where $k$ is the number of cycles. So there is an injection from $\mathcal{M}^2$ to the set of matchings of $G_2$, since every cover by edges and even cycles is a cover by vertices, edges and cycles. So \[ |\mathcal{M}|^2 \le \prod_{x \in V(G)} (d(x)!)^{\frac{1}{d(x)}} . \qedhere \] \end{proof} \newpage \section{Shearer's lemma and applications} \begin{notation*} Given a random variable $X = (X_1, \ldots, X_n)$ and $A = \{a_1, \ldots, a_k\} \subset [n]$ with $a_1 < a_2 < \cdots < a_k$, write $X_A$ for the random variable $(X_{a_1}, X_{a_2}, \ldots, X_{a_k})$. \end{notation*} \begin{fclemma}[Shearer] \label{lemma:4.1} Assuming: - $X = (X_1, \ldots, X_n)$ a random variable - $\mathcal{A}$ a family of subsets of $[n]$ such that every $i \in [n]$ belongs to at least $r$ of the sets $A \in \mathcal{A}$ Then: \[ \ent{X_1, \ldots, X_n} \le \frac{1}{r} \sum_{A \in \mathcal{A}} \ent{X_A} .\] \end{fclemma} \begin{proof} For each $a \in [n]$, write $X_{< a}$ for $(X_1, \ldots, X_{a - 1})$. For each $A \in \mathcal{A}$, $A = \{a_1, \ldots, a_k\}$ with $a_1 < \cdots < a_k$, we have \begin{align*} \ent{X_A} &= \ent{X_{a_1}} + \cent{X_{a_2}}{X_{a_1}} + \cdots + \cent{X_{a_k}}{X_{a_1}, \ldots, X_{a_k}} \\ &\ge \cent{X_{a_1}}{X_{