% vim: tw=50 % 22/10/2022 11AM \begin{enumerate}[(1)] \setcounter{enumi}{3} \item[] \vspace{-1\baselineskip} \begin{proof} Let $(f_n)$ be a Cauchy sequence in $B(X)$. Then $(f_n)$ is uniformly Cauchy so by general principle of uniform convergence is uniformly convergent. That is $f_n \to f$ uniformly for some $f : X \to \RR$. As $f_n \to f$ uniformly we know $f_n - f$ is bounded for $n$ sufficiently large. Take such an $n$. Then $f_n - f$ and $f_n$ are bounded so $f = f_n - (f_n - f)$ is bounded. That is, $f \in B(X)$. Finally, $f_n \to f$ uniformly so $d(f_n, f) \to 0$ i.e. $f_n \to f$ in $(B(X), d)$. \end{proof} \begin{remark*} In many ways, this is typical of a proof that a given space $(X, d)$ is complete: \begin{enumerate}[(i)] \item Take $(x_n)$ Cauchy in $X$; \item Constant / find a putative limit object $x$ where it seems $(x_n)$ converges to $x$ in some sense; \item Show $x \in X$, \item Show $x_n \to x$ in metric space $(X, d)$, i.e. that $d(x_n, x) \to 0$. \end{enumerate} This is often tricky / fiddly / annoying / repetitive / boring. But need to take care as, for example, it's tempting to talk about $d(x_n, x)$ while doing (ii) or (iii); but this makes no sense to write `$d(x_n, x)$' until we've completed (iii) as $d$ only defined on $X^2$. \end{remark*} \begin{hiddenflashcard}[proving-completeness] How to check if a metric space is complete? \\ \cloze{ \begin{enumerate}[(i)] \item Take a Cauchy sequence $(x_n)$ in $X$. \item Find $x \in X$ where it seems that $(x_n)$ converges to. \item Show $x \in X$. \item Now show $x_n \to x$, i.e. $d(x_n, x) \to 0$. \end{enumerate} The reason it's important that we do this is because it makes no sense to talk about $d(x_n, x)$ in (iv) until we've actually shown that $x \in X$! } \end{hiddenflashcard} \item If $[a, b]$ is a closed interval then $C([a, b])$ with uniform norm $d$ is complete. \begin{proof} \begin{enumerate}[(i)] \item Let $(f_n)$ be a Cauchy sequence in $C([a, b])$. \item We know $C([a, b])$ is a subspace of $B([a, b])$ with uniform metric. We know $B([a, b])$ is complete and $(f_n)$ is a Cauchy sequence in $B([a, b])$ so in $B([a, b])$, $f_n \to f$ for some $f$. \item Each function is continuous and $f_n \to f$ uniformly so $f$ is continuous, i.e. $f \in C([a, b])$. \item Finally, each $f_n \in C([a, b])$, $f \in C([a, b])$ and $f_n \to f$ uniformly so $d(f_n, f) \to 0$. \end{enumerate} \end{proof} This generalises: \begin{definition*} Let $(X, d)$ be a metric space and $Y \subset X$. We say $Y$ is \emph{closed} if whenever $(x_n)$ is a sequence in $Y$ with $x_n \to x \in X$ then $x \in Y$. \end{definition*} \begin{proposition} A closed subset of a complete metric space is complete. \end{proposition} \begin{remark*} This \emph{does} make sense: if $Y \subset X$ then $Y$ is itself a metric space as a subspace of $X$ so can say for example `$Y$ is complete' to mean the metric space $Y$ (as a subspace of $X$) is complete. Could do exactly the same with any further properties of metric spaces we define. \end{remark*} \begin{proof} Let $(X, d)$ be a metric space and $Y \subset X$ with $X$ complete and $Y$ closed. \begin{enumerate}[(i)] \item Let $(x_n)$ be a Cauchy sequence in $Y$. \item Now $(x_n)$ is a Cauchy sequence in $X$ so by completeness $x_n \to x$ in $X$ for some $x \in X$. \item $Y \subset X$ is closed so $x \in Y$. \item Finally we now have each $x_n \in Y$, $x \in Y$ and $x_n \to x$ in $X$ so $d(x_n, x) \to 0$ so $x_n \to x$ in $Y$. \end{enumerate} \end{proof} \item Define \[ l_1 = \{(x_n)_{n \ge 1} \in \RR^\NN \mid \sum_{n = 1}^\infty |x_n| \text{ converges}\} \] Define a metric $d$ on $l_1$ by \[ d((x_n), (y_n)) = \sum_{n = 1}^\infty |x_n - y_n| \] Note we have $\sum |x_n|$, $\sum |y_n|$ converge and for each $n$, $|x_n - y_n| \le |x_n| + |y_n|$ so by comparison test $\sum |x_n - y_n|$ converges. So $d$ is well-defined. Easy to check $d$ is a metric on $l_1$. Then $(l_1, d)$ is complete. \begin{proof} \begin{enumerate}[(i)] \item Let $(x^{(n)})_n \ge 1$ be a Cauchy sequence in $l_1$, so for each $n$, $(x_i^{(n)})_{i \ge 1}$ is a sequence in $\RR$ with $\sum_{i = 1}^\infty |x_i^{(n)}|$ convergent. \item For each $i$, $(x_i^{(n)})_{n \ge 1}$ is a Cauchy sequence in $\RR$, since if $y, z \in l_1$, then $|y_i - z_i| \le d(y, z)$. But $\RR$ is complete, so for each $i$ we can find $x_i \in \RR$ with $x_i^{(n)} \to x_i$ as $n \to \infty$. Let $x = (x_1, x_2, x_3, \dots) \in \RR^\NN$. \item We next show $x \in l_1$, i.e. that $\sum_{i = 1}^\infty |x_i|$ converges. Given $y \in l_1$, define $\sigma(y) = \sum_{i = 1}^\infty |y_i|$, i.e. $\sigma(y) = d(y, z)$ where $z$ is the constant zero sequence. We now have, for any $m, n$, \begin{align*} \sigma(x^{(m)}) &= d(x^{(m)}, z) \\ &\le d(x^{(m)}, x^{(n)}) + d(x^{(n)}, z) \\ &= d(x^{(m)}, x^{(n)}) + \sigma(x^{(n)}) \end{align*} So \[ \sigma(x^{(m)}) - \sigma(x^{(n)}) \le d(x^{(m)}, x^{(n)}) .\] But we can find a similar inequality by swapping $m$ and $n$, so \[ |\sigma(x^{(m)}) - \sigma(x^{(n)})| \le d(x^{(m)}, x^{(n)}) \] Hence $(\sigma(x^{(m)}))_{m \ge 1}$ is a Cauchy sequence in $\RR$, and so by general principle of convergence it converges, say $\sigma(x^{(m)}) \to K$ as $m \to \infty$. \begin{claim*} For any $I \in \NN$, $\sum_{i = 1}^I |x_i| \le K + 2$. \end{claim*} \begin{proof} As $\sigma(x^{(n)}) \to K$ as $n \to \infty$ we can find $N_1$ such that for all $n \ge N_1$, \[ \sum_{i = 1}^\infty |x_i^{(n)}| \le K + 1 \] This also implies that for all $n \ge N_1$, \[ \sum_{i = 1}^I |x_i^{(n)}| \le K + 1 \] Next, for each $i \in \{1, 2, \dots, I\}$ we have $x_i^{(n)} \to x_i$ as $n \to \infty$ so can find $N_2$ such that \[ n \ge N_2 \implies \forall i \in \{1, \dots, I\} |x_i^{(n)} - x_i| < \frac{1}{I} \] Now let $n = \max\{N_1, N_2\}$. Then \begin{align*} \sum_{i = 1}^I |x_i| &\le \sum_{i = 1}^I + \sum_{i = 1}^I |x_i^{(n)} - x_i| \\ &\le K + 1 + I \times \frac{1}{I} \\ &= K + 2 \qedhere \end{align*} \end{proof} Now the partial sums of $\sum |x_i|$ are increasing and bounded above so $\sum |x_i|$ converges. That is, $x \in l_1$. \item Finally, need to check $x^{(n)} \to x$ as $n \to \infty$ in $l_1$, i.e. that $d(x^{(n)}, x) \to 0$ as $n \to \infty$. We have, for all $n$, $I$, \begin{align*} d(x^{(n)}, x) &= \sum_{i = 1}^\infty |x_i^{(n)} - x_i| \\ &\le \sum_{i = 1}^I |x_i^{(n)} - x_i| + \sum_{i = I + 1}^\infty |x_i^{(n)}| + \sum_{i = I + 1}^\infty |x_i| \end{align*} Let $\eps > 0$. We know $\sum |x_i|$ convergent (as $x \in l_1$) so can pick $I_1$ such that $\sum_{i = I_1 + 1}^\infty |x_i| < \eps$. As $(x^{(n)})$ is Cauchy, we can find $N_1$ such that \[ m, n \ge N_1 \implies d(x^{(m)}, x^{(n)}) < \eps \] As $\sum_i |x_i^{(N_1)}|$ converges, can find $I_2$ such that $\sum_{i = I_2 + 1}^\infty |x_i^{(N_1)}| < \eps$. Then \begin{align*} n \ge N_1 &\implies \sum_{i = I_2 + 1}^\infty \\ &\le \sum_{i = I_1 + 1}^\infty |x_i^{(N_1)}| + \sum_{i = I_2 + 1}^\infty |x_i^{(n)} - x_i^{(N_1)}| \\ &< \eps + d(x^{(n)}, x^{(N_1)} \\ &< 2\eps \end{align*} Let $I = \max\{I_1, I_2\}$. For each $i = 1, 2, \dots, I$ we have $|x_i^{(n)} - x_i| \to 0$ as $n \to \infty$, so $\sum_{i = 1}^I |x_i^{(n)} - x_i| \to 0$ as $n \to \infty$. Hence we can find $N_2$ such that \[ n \ge N_2 \implies \sum_{i = 1}^I |x_i^{(n)} - x_i| < \eps \] Let $N = \max\{N_1, N_2\}$ and let $n \ge N$. Then \begin{align*} d(x^{(n)}, x) &\le \sum_{i = 1}^I |x_i^{(n)} - x_i| + \sum_{i = I + 1}^\infty |x_i^{(n)}| + \sum_{i = I + 1}^\infty |x_i| \\ &\le \sum_{i = 1}^I |x_i^{(n)} - x_i| + \sum_{i = I_2 + 1}^\infty |x_i^{(n)}| + \sum_{i = I_1 + 1}^\infty |x_i| \\ &< \eps + 2\eps + \eps \\ &= 4\eps \end{align*} Hence $d(x^{(n)}, x) \to 0$ as $n \to \infty$, i.e. $x^{(n)} \to x$ in $l_1$. Hence $l_1$ is \emph{complete}. \end{enumerate} \end{proof} \end{enumerate}