% vim: tw=50 % 15/10/2022 11AM \begin{theorem} A continuous real-valued function on a closed bounded interval is uniformly continuous. \end{theorem} \begin{proof} Let $f : [a, b] \to \RR$ and suppose $f$ is continuous but not uniformly continuous. Then we can find an $\eps > 0$ such that for all $\delta > 0$ there exists $x, y \in [a, b]$ with $|x - y| < \delta$ but $|f(x) - f(y)| \ge \eps$. In particular taking $\delta = \frac{1}{n}$ for $n = 1, 2, 3, \dots$, we can find sequences $(x_n)$, $(y_n)$ in $[a, b]$ with for each $n$, $|x_n - y_n| < \frac{1}{n}$ but $|f(x_n) - f(y_n)| \ge \eps$. The sequence $(x_n)$ is bounded so by Bolzano Weierstrass it has a convergent subsequence $x_{n_j} \to x$ say. And $[a, b]$ is a closed interval so $x \in [a, b]$. Then $x_{n_j} - y_{n_j} \to 0$ so also $y_{n_j} \to x$. But $f$ is continuous at $x$, so there exists $\delta > 0$ such that for all $y \in [a, b]$, $|y - x| < \delta$ implies that $|f(y) - f(x)| < \frac{\eps}{2}$. Take such a $\delta$. As $x_{n_j} \to x$ we can find $J_1$ such that $j \ge J_1$ implies that $|x_{n_j} - x| < \delta.$ Similarly can find $J_2$ such that $j \ge J_2$ implies $|y_{n_j} - x| < \delta$. Now let $j = \max\{J_1, J_2\}$. Then $|x_{n_J} - x| < \delta$ and $|y_{n_j} - x| < \delta$ so we have $|f(x_{n_J}) - f(x)| < \frac{\eps}{2}$ and $|f(y_{n_j}) - f(x)| < \frac{\eps}{2}$. Then \[ |f(x_{n_j}) - f(y_{n_j})| \le |f(x_{n_j}) - f(x)| + |f(x) - f(y_{n_j})| < \frac{\eps}{2} + \frac{\eps}{2} = \eps \] contradiction. \end{proof} \begin{corollary} A continuous real-valued function on a closed bounded interval is bounded. \end{corollary} \begin{proof} Let $f : [a, b] \to \RR$ be continuous, and so uniformly continuous by Theorem 12. Then can find $\delta > 0$ such that \[ \forall x, y \in [a, b] \quad |x - y| < \delta \implies |f(x) - f(y)| < 1 \] Let $M = \left\lceil \frac{b - a}{\delta} \right\rceil$. Now let $x \in [a, b]$. We can find $a = x_0 \le x_1 \le \cdots \le x_m = x$, with $|x_i - x_{i - 1}| < \delta$ for each $i$. Hence \begin{align*} |f(x)| = |f(a) + \sum_{i = 1}^M f(x_i) - f(x_{i - 1})| \\ &\le |f(a)| + \sum_{i = 1}^M |f(x_i) - f(x_{i - 1})| \\ &< |f(a)| + \sum_{i = 1}^M 1 \\ &= M + f(a) \end{align*} \end{proof} \begin{corollary} A continuous real-valued function on a closed bounded interval is integrable. \end{corollary} \begin{proof} Let $f : [a, b] \to \RR$ be continuous and so uniformly continuous by Theorem 12. Let $\eps > 0$. Then can find $\delta > 0$ such that for all $x, y \in [a, b]$, $|x - y| < \delta \implies |f(x) - f(y)| < \eps$. Let $\mathcal{D} = \{x_0 < x_1 < \cdots < x_n\}$ be a dissection such that for each $i$ we have $x_i - x_{i - 1} < \delta$. Let $i \in \{1, \dots, n\}$. Then for any $u, v \in [x_{i - 1}, x_i]$ we have $|u - v| < \delta$ so $|f(u) - f(v)| < \eps$. Hence \[ \sup_{x \in [x_{i - 1}, x_i]} f(x) - \inf_{x \in [x_{i - 1}, x_i]} f(x) \le \eps \] Hence: \begin{align*} S(f, \mathcal{D}) - s(f, \mathcal{D}) &= \sum_{i = 1}^n (x_i - x_{i - 1}) (\sup_{x \in [x_{i - 1}, x_i]} f(x) - \inf_{x \in [x_{i - 1}, x_i]} f(x)) \\ &\le \sum_{i = 1}^n (x_i - x_{i - 1}) \eps \\ &= \eps \sum_{i = 1}^n (x_i - x_{i - 1}) \\ &= \eps(b - a) \end{align*} But $\eps(b - a)$ can be made arbitrarily small by taking $\eps$ small. So by Riemann's criterion $f$ is integrable over $[a, b]$. \end{proof} \newpage \section{Metric Spaces} \subsection{Definitions and Examples} Can we think about convergence in a more general setting? What do we really need? - A notion of distance. \myskip In $\RR$: distance $x$ to $y$ is $|x - y|$. \\ In $\RR^2$: distance $x$ to $y$ is $\|x - y\|$. \\ For functions distance $x$ to $y$ is \[ \sup \sup_{x \in X} |f(x) - g(x)| \] (where this exists, i.e. if $f - g$ is bounded). \myskip Triangle inequality was often important. \begin{flashcard} \begin{definition*}[Metric space] A \emph{metric space} is a set $X$ endowed with a \emph{metric} $d$, i.e. a function $d : X^2 \to \RR$ satisfying: \begin{enumerate}[(i)] \item \cloze{$d(x, y) \ge 0$ for all $x, y \in X$ with equality if and only if $x = y$}; \item \cloze{$d(x, y) = d(y, x)$ for all $x, y \in X$}; \item \cloze{$d(x, z) \le d(x, y) + d(y, z)$ for all $x, y, z \in X$}. \end{enumerate} \end{definition*} \end{flashcard} \noindent Could define a metric space as an ordered pair $(X, d)$. If it is obvious what $d$ is, sometimes write ``The metric space $X$\dots'' \subsubsection*{Examples} \begin{enumerate}[(1)] \item $X = \RR$, $d(x, y) = |x - y|$ ``The \emph{usual metric} on $\RR$''. \item $X = \RR^n$ with the \emph{Euclidean metric} \[ d(x, y) = \|x - y\| = \sqrt{\sum_{i = 1}^n (x_i - y_i)^2} \] \item Let $Y \subset \RR$. Take \[ X = B(Y) = \{f : Y \to \RR | \text{$f$ is bounded}\} \] now we can use the \emph{uniform metric} \[ d(f, g) = \sup_{x \in Y} |f(x) - g(x)| \] (we need the bounded condition for this supremum to necessarily exist). Check triangle inequality: let $f, g, h \in B(Y)$. Let $x \in Y$. Then \begin{align*} |f(x) - h(x)| &\le |f(x) - g(x)| + |g(x) - h(x)| \\ &\le d(f, g) + d(g, h) \end{align*} Take sup over all $x \in Y$ we get \[ d(f, h) \le d(f, g) + d(g, h) \] \begin{remark*} Suppose $(X, d)$ is a metric space and $Y \subset X$. Then $d \mid_{Y^2}$ is a metric on $Y$. We say $Y$ with this metric is a \emph{subspace} of $X$. \end{remark*} \item Subspaces of $\RR$: any of $\QQ, \ZZ, \NN, [0, 1], \dots$ with the usual metric of $d(x, y) = |x - y|$. \item Recall that a continuous function on a closed bounded interval is bounded. Define $C([a, b]) = \{f : [a, b] \to \RR | \text{$f$ is continuous}\}$. This is a subspace of $B([a, b])$. \item The empty metric space $X = \empty$ with the empty metric. \item Can define different metrics on the same set, for example the $l_1$ metric on $\RR^n$: \begin{flashcard}[l1-metric-Rn] \prompt{$l_1$ metric on $\RR^n$? \\} \[ \cloze{d(x, y) = \sum_{i = 1}^n |x_i - y_i|} \] \end{flashcard} \item \begin{flashcard}[linf-metric-Rn] The $l_\infty$ metric on $\RR^n$: \[ \cloze{d(x, y) = \max_i |x_i - y_i|} \] \end{flashcard} (proof of triangle inequality is the same as for uniform metric in example 3). \item \begin{flashcard}[L1-metric-Cab] On $C([a, b])$ we can define the $L_1$ metric \[ \cloze{d(f, g) = \int_a^b |f - g|} \] \end{flashcard} \item $X = \CC$ with \[ d(z, w) = \begin{cases} 0 & \text{if $z = w$} \\ |z| + |w| & \text{if $z \neq w$} \end{cases} \] triangle inequality? Need $d(u, w) \le d(u, v) + d(v, w)$ \begin{itemize} \item if $u = w$, $LHS = 0$ \item If $u = v$ or $v = w$ then $LHS = RHS$ \item If $u, v, w$ all distinct: \[ |u| + |w| < |u| + |w| + 2|w| \] ``British rail metric'' or ``SNCF metric'': \begin{center} \includegraphics[width=0.6\linewidth] {images/5f17847e4c7811ed.png} \end{center} \end{itemize} \end{enumerate}