%! TEX root = TA.tex % vim: tw=50 % 06/02/2024 12PM \newpage \section{Approximation by polynomials} Taylors Theorem is not the entire answer. 2 examples: \begin{lemma*} If $E : \RR \to \RR$ is given by \[ E(x) = \begin{cases} \exp \left( -\frac{1}{x^2} \right) & x \neq 0 \\ 0 & x = 0 \end{cases} \] then $E$ is infinitely differentiable everywhere, with $E^{(r)}(0) = 0$ for all $r$. So the Taylor expansion about $0$ is \[ \sum_r 0 x^r = 0 \not\to E(r) \] for $x \neq 0$. \end{lemma*} \begin{proof} If $x \neq 0$ then standard differentiation theorems give that $E$ is infinitely differentiable with $E^{(r)}(x) = Q_r(1 / x) E(x)$ where $Q_r$ is a polynomial (proved by induction). At $0$, $E(0)$. Suppose $E^{(r)}$ exists with $E^{(r)}(0) = 0$. Then \[ \frac{E^{(r)}(t) - E^{(r)}(0)}{t} = \frac{1}{t} Q_r \left( \frac{1}{t} \right) E(t) \to 0 .\] (Exponential beats polynomials). So $E^{(r + 1)}(0)$ exists and equals $0$. \end{proof} The second problem is practical. It is true that \[ \exp x = \sum_{r = 0}^\infty \frac{x^r}{r!} \qquad \forall x \] but computing $\exp(-20)$ by using \[ \exp(-20) \approx \sum_{r = 0}^N \frac{(-20)^r}{r!} \] involves large $N$ and ridiculous cancelation. One attempt to get polynomial approximation is to use interpolation: \begin{lemma*} \phantom{} \begin{enumerate}[(i)] \item $\mathcal{P}_n$ the collection of polynomials of degree at most $n$ is a vector space of dimension $n + 1$. \item If $f \in C[a, b]$, $x_0 < x_1 < \cdots < x_n$, then there is at most one polynomial $P \in \mathcal{P}_n$ such that $f(x_j) = P(x_j)$. \item If $e_j(x) = \prod_{x \neq j} \frac{x - x_j}{x_i - x_j}$ then these form a basis of $\mathcal{P}_n$, and writing \[ P(x) = \sum_j f(x_j) e_j(x) \] we have $P \in \mathcal{P}_n$, $P(x_j) = f(x_j)$, so in fact by (ii) there is exactly one polynomial. \end{enumerate} \end{lemma*} \begin{proof} \phantom{} \begin{enumerate}[(i)] \item $1, t, t^2, \ldots, t^n$ is a basis for $\mathcal{P}_n$. \item Suppose $f(x_j) = P(x_j) = Q(x_j)$, $[0 \le j \le n]$, $P, Q \in \mathcal{P}_n$. Then $P - Q \in \mathcal{P}_n$ and has $n + 1$ zeroes (given by $x_0, \ldots, x_n$), so $P - Q = 0$. \item $e_j(x_i) = \delta_{ij}$, $e_j \in \mathcal{P}_n$. So if $P(x) = \sum_j f(x_j) e_j(x)$, then $P(x_i) = f(x_i)$ for $0 \le i \le n$. \end{enumerate} \end{proof} Practice shows interpolation to be an unreliable friend. Chebychev introduced an interesting polynomial $T_n$ which shows how oddly polynomials can behave (borhter noticed a companion $U_n$ Chebychev polynomial of the second kind). Chebychev says look at: \begin{align*} (\cos n\theta + i \sin n\theta) &= (\cos \theta + i \sin\theta)^n \\ &= \sum_r {n \choose r} (\cos\theta)^{n - r} i^r (\sin\theta)^r \\ &= \sum_r (-1)^r {n \choose 2r} (\cos\theta)^{n - 2r} (\sin\theta)^{2r} \\ &~~~~+ i \sum_r {n \choose 2r + 1} (\cos\theta)^{n - 2r - 1} (\sin\theta)^{2r + 1} \\ &= \sum_r (-1)^e {n \choose 2r} (\cos\theta)^{n - 2r} (1 - \cos^2 \theta)^r \\ &~~~~+ i \sum_r (-1)^r {n \choose 2r + 1} (\cos\theta)^{n - 2r - 1} (\sin\theta) (1 - \cos^2 \theta)^r \end{align*} so taking real and imaginary parts we get \[ \cos n\theta = T_n(\cos\theta), \qquad \sin n\theta = (\sin\theta) U_n(\cos\theta) \] with \begin{align*} T_n(t) &= \sum_r (-1)^r {n \choose 2r} t^{n - 2r} (1 - t^2)^r \\ U_n(t) &= \sum_r (-1)^r {n \choose 2r + 1} t^{2n - 2r - 1} (1 - t^2)^r \end{align*} $T_n \in \mathcal{P}_n$, $|T_n(t)| \le 1$ for all $t \in [-1, 1]$. Leading coefficient of $T_n$ is \[ \sum_{0 \le 2r \le n} {n \choose 2r} (-1)^r = \half ((1 + 1)^n - (1 - 1)^n) = 2^{n - 1} \] \begin{center} \includegraphics[width=0.6\linewidth]{images/e23a654cc0084ff3.png} \end{center} zeroes of $T_n$ are $\cos^{-1}(r\pi / n)$ ($T_n(\cos\theta) = \cos n\theta$). $\sin n\theta = U_n(\cos\theta) \sin\theta$, so $U_n(\cos\theta) = \frac{\sin n\theta}{\sin \theta}$. (if $\sin \theta = 0$, use continuity to get $U_n = \pm n$ at this point). \begin{center} \includegraphics[width=0.6\linewidth]{images/fcfdd52425d8479a.png} \end{center} In spite of all this, Weierstrass showed: \begin{flashcard}[weierstrass-thm] \begin{theorem*}[Weierstrass approximation] \cloze{The polynomials are uniformly dense in $C[a, b]$, i.e. given $f : [a, b] \to \RR$ continuous and $\eps > 0$, there exists polynomial $P$ such that $|f(t) - P(t)| \le \eps$ for all $t \in [a, b]$.} \end{theorem*} \end{flashcard} Bernstein: \begin{flashcard}[bernstein-thm] \begin{theorem*}[Bernstein] \cloze{If $f : [0, 1] \to \RR$ then \[ \sum_{r = 0}^n f \left( \frac{r}{n} \right) {n \choose r} t^r (t - t)^{n - r} \to f(t) \] uniformly on $[0, 1]$ as $n \to \infty$. (Can get from $[0, 1]$ to $[a, b]$ by a scaling transformation).} \end{theorem*} \end{flashcard} One proof depends on reinterpreting the theorem probabilistically. Let $X_1, X_2$ be independent such that $\PP(X_j = 1) = p$, $\PP(X_j = 0) = 1 - p$. Then \[ \sum_{r = 0}^n f \left( \frac{r}{n} \right) {n \choose r} p^r (1 - p)^{n - r} = \EE f(\ol{X}) \] where $\ol{X} = \frac{X_1 + \cdots + X_n}{n}$.