% vim: tw=50 % 18/10/2022 11AM \begin{enumerate}[(1)] \setcounter{enumi}{10} \item Let $X$ be any set. Define a metric $d$ on $X$ by \begin{flashcard}[discrete-metric] \prompt{Discrete metric?} \[ \cloze{d(x, y) = \begin{cases} 0 & x = y \\ 1 & x \neq y \end{cases}} \] \end{flashcard} Easy to check this works. This is called the \emph{discrete metric} on $X$. \item \begin{flashcard}[p-adic-metric] Let $X = \ZZ$. Let $p$ be a prime. The \emph{$p$-adic metric} on $\ZZ$ is the metric $d$ defined by \[ \cloze{d(x, y) = \begin{cases} 0 & x = y \\ p^{-a} & \text{if $x \neq y$ and $x - y = p^am$ with $p \nmid m$} \end{cases}} \] \end{flashcard} ``Two numbers are close if the difference is divisible by a large power of $p$''. Triangle inequality: \begin{itemize} \item Easy if any two of $x, y, z$ are the same, so assume $x, y, z$ are all distinct. \item Let $x - y = p^am$ and $y - z = p^bn$ where $p \nmid m$, $p \nmid n$ and without loss of generality $a \le b$. So $d(x, y) = p^{-a}$ and $d(y, z) = p^{-b}$. Now: \begin{align*} x - z &= (x - y) + (y - z) \\ &= p^am + p^b n \\ p^a(m + p^{b - a} n) \end{align*} so $p^a \mid x - z$ so $d(x, z) \le p^{-a}$. But $d(x, y) + d(y, z) \ge d(x, y) = p^{-a}$, so triangle inequality does hold. \end{itemize} \end{enumerate} \begin{definition*} Let $(X, d)$ be a metric space. Let $(X_n)$ be a sequence in $X$ and let $x \in X$. We say $(X_n)$ converges to $x$ and write ``$x_n \to x$'' or ``$x_n \to x$ as $n \to \infty$'' if \[ \forall \eps > 0 ~ \exists N ~ \forall n \ge N \quad d(x_n, x) < \eps .\] Equivalently $x_n \to x$ if and only if $d(x_n, x) \to 0$ in $\RR$. \end{definition*} \begin{hiddenflashcard} \begin{definition*}[Convergence in a metric space] Let $(X, d)$ be a metric space, and $(x_n)$ \cloze{be a sequence in $X$. We say $x_n \to x$ if and only if $d(x_n, x) \to 0$.} \end{definition*} \end{hiddenflashcard} \begin{proposition} Limits are unique. That is, if $(X, d)$ is a metric space, $(x_n)$ a sequence in $X$, $x, y \in X$ with $x_n \to x$ and $x_n \to y$, then $x = y$. \end{proposition} \begin{proof} For each $n$, \begin{align*} d(x, y) &\le d(x, x_n) + d(x_n, y) \\ &\le d(x_n, x) + d(x_n, y) \\ &\to 0 + 0 = 0 \end{align*} So we would need $d(x, y) \to 0$ as $n \to \infty$, but $d(x, y)$ is constant, so $d(x, y) = 0$. So $x = y$. \end{proof} \begin{remark*} This justifies talking about \emph{the} limit of a convergent sequence in a metric space, and writing $x = \lim_{n \to \infty} x_n$ if $x_n \to x$. \end{remark*} \subsubsection*{Remarks on the definition} \begin{enumerate}[(1)] \item Constant sequences obviously converge. Moreover, eventually constant sequences converge. \item Suppose $(X, d)$ is a metric space and $Y$ is a subspace of $X$. Suppose $(x_n)$ is a sequence in $Y$ which converges in $Y$ to $x$. Then also $(x_n)$ converges in $X$ to $x$. However the converse is false. For example, in $\RR$ with the usual metric then $\frac{1}{n} \to 0$ as $n \to \infty$. Consider the subspace $\RR \setminus \{0\}$. Then $\left( \frac{1}{n} \right)_{n \ge 1}$ is a sequence in $\RR \setminus \{0\}$ but it doesn't converge in $\RR \setminus \{0\}$. This is because by uniqueness of limits, it would have to converge to 0, but $0 \not\in \RR \setminus \{0\}$. \end{enumerate} \subsubsection*{Examples} \begin{enumerate}[(1)] \item Let $d$ be the Euclidean metric on $\RR^n$. Exactly as in $\RR^2$, we have $x_n \to x$ if and only if the sequence converges in each coordinate in the usual way in $\RR$. \myskip What about other metrics on $\RR^n$? For example let $d_\infty$ be the uniform metric \[ d_\infty (x, y) = \max_i |x_i - y_i| \] which sequences converge in $(\RR^n, d_\infty)$? Note that \[ d(x, y) = \sqrt{\sum_{i = 1}^n (x_i - y_i)^2} \le \sqrt{\sum_{i = 1}^n d_\infty (x, y)^2} \] so $d(x, y) \le \sqrt{n} d_\infty (x, y)$. But also $d_\infty(x, y) \le d(x, y)$. Now suppose $(x_n)$ is a sequence in $\RR^n$. Then \[ d(x_n, x) \to 0 \iff d_\infty (x_n, x) \to 0 \] So exactly the same sequences converge in $(\RR^n, d)$ and $(\RR^n, d_\infty)$. What about the $l_1$ metric $d_1$? \[ d_1(x, y) = \sum_{i = 1}^n |x_i - y_i| \] Similarly $d_\infty(x, y) \le d_1(x, y) \le nd_\infty (x, y)$. So again, exactly the same sequences converge in $(\RR^n, d)$. \item Let $X = C([0, 1]) = \{f : [0, 1] \to \RR \mid \text{$f$ is continuous}\}$. Let $d_\infty$ be the uniform metric on $X$: \[ d_\infty(f, g) = \sup_{x \in [0, 1]} |f(x) - g(x)| \] Now note that \begin{align*} f_n \to f \text{ in $X, d_\infty$} &\iff d_\infty(f_n, f) \to 0 \\ &\iff \sup_{x \in [0, 1]} |f_n(x) - f(x)| \to 0 \\ &\iff f_n \to f \text{ uniformly} \end{align*} We also had the $L_1$ metric $d_1$ on $X$: \[ d_1(f, g) = \int_0^1 |f - g| \] Now \begin{align*} d_1(f, g) &= \int_0^1 |f - g| \\ &\le \int_0^1 d_\infty(f, g) \\ &= d_\infty(f, g) \end{align*} So similarly to previous example, \[ f_n \to f \text{ in $(X, d_\infty)$} \implies f_n \to f \text{ in $(X, d_1)$} \] But converse does not hold, i.e. we can find a sequence $(f_n)$ in $X$ such that $f_n \to 0$ in $d_1$ metric by $f_n$ doesn't converge in the $d_\infty$ metric. So we want $(f_n)$ such that $\int_0^1 |f_n| \to 0$ as $n \to \infty$, but $(f_n)$ does not converge uniformly. We can just take functions like this: \begin{center} \includegraphics[width=0.6\linewidth] {images/f18f5b964ed111ed.png} \end{center} Then clearly $f_n \to f$ in the $d_1$ metric but not in the $d_\infty$ metric. \item Let $(X, d)$ be a discrete metric space; \[ d(x, y) = \begin{cases} 0 & x = y \\ 1 & x \neq y \end{cases} \] When do we have $x_n \to x$ in $(X, d)$? Suppose $x_n \to x$, i.e. \[ \forall \eps > 0 ~ \exists N ~ \forall n \ge N \quad d(x_n, x) < \eps \] Setting $\eps = 1$ in this, can find $N$ such that \[ \forall n \ge N ~ d(x_n, x) < 1 \] i.e. $\forall n \ge N$, $d(x_n, x) = 0$, i.e. $\forall n \ge N$, $x_n = x$. Thus $(x_n)$ is eventually constant. But we know that in \emph{any} metric space, eventually constant sequences converge. So in this space, $(x_n)$ converges if and only if $(x_n)$ eventually constant. \end{enumerate} \begin{definition*} Let $(X, d)$ and $(Y, e)$ be metric spaces and let $f : X \to Y$. \begin{enumerate}[(i)] \item \begin{flashcard}[metric-space-convergence] Let $a \in X$ and $b \in Y$. We say $f(x) \to b$ as $x \to a$ if \cloze{ \[ \forall \eps > 0 ~ \exists \delta > 0 ~ \forall x \in X \] \[ 0 < d(x, a) < \delta \implies e(f(x), b) < \eps \]} \prompt{\cloze{\fcemph{Note that $d(x, a) > 0$}! \\ This means that the value of $f(a)$ doesn't affect whether $f(x) \to b$ as $x \to a$. This is different to continuity.}} \end{flashcard} \item Let $a \in X$. We say $f$ is \emph{continuous} at $a$ if $f(x) \to f(a)$ as $x \to a$. That is \[ \forall \eps > 0 ~ \exists \delta > 0 ~ \forall x \in X \] \[ d(x, a) < \delta \implies e(f(x), f(a)) < \eps \] \item If $\forall x \in X$, $f$ is continuous at $a$, we say \emph{$f$ is a continuous function} or simply \emph{$f$ is continuous}. \item We say \emph{$f$ is uniformly continuous} if \[ \forall \eps > 0 ~ \exists \delta > 0 ~ \forall x, y \in X \] \[ d(x, y) < \delta \implies e(f(x), f(y)) < \eps \] \item Suppose $W \subset X$. We say $f$ is \emph{continuous on $W$} (similarly for \emph{uniformly continuous on $W$}) if the function $f \mid_W$ is continuous, as a function $W \to Y$ where now thinking of $W$ as a subspace of $X$. \end{enumerate} \end{definition*}