% vim: tw=50 % 18/11/2022 10AM \noindent For $m = 2$ note $i \dfrac{}{\omega} \left( \frac{1}{i\omega - a} \right) = \frac{1}{(i\omega - a)^2}$ and recall (8.12) $\mathcal{F}(tf(t)) = i\tilde{f}'(\omega)$, so \[ \mathcal{F}^{-1} \left( \frac{1}{(i\omega - a)^2} \right) = \begin{cases} te^{at} & t > 0 \\ 0 & t < 0 \end{cases} \] By induction, \[ \mathcal{F}^{-1} \left( \frac{1}{(i\omega - a)^m} \right) = \begin{cases} \frac{t^{m - 1}}{(m - 1)!} e^{at} & t > 0 \\ 0 & t < 0 \end{cases} \tag{8.34} \] Thus the response function takes the form \[ R(t) = \sum_j \sum_m \Gamma_{jm} \frac{t^{m - 1}}{(m - 1)!} e^{c_j t} \quad t > 0 \tag{8.35} \] We can solve (8.31) in Green's function form (8.30) or directly invert $\tilde{R}(\omega) \tilde{J}(\omega)$ for polynomial $\tilde{J}(\omega)$. \begin{example*}[Damped oscillator] Solve \[ \mathcal{L} y \equiv y'' + 2p y' + (p^2 + q^2) y = f(t) \] with damping $p > 0$ and homogeneous initial conditions $y(0) = y'(0) = 0$. Fourier Transform is \[ (i\omega)^2 \tilde{y} + 2ip\omega \tilde{y} + (p^2 + q^2) \tilde{y} = \tilde{f} \] \[ \tilde{y} = \frac{\tilde{f}}{-\omega^2 + 2ip\omega + p^2 + q^2} \equiv \tilde{R} \tilde{f} \] Inverting with convolution theorem (8.17) \[ y(t) = \int_0^t r(t - \tau) f(\tau) \dd \tau \] with response \[ R(t - \tau) = \frac{1}{2\pi} \int_{-\infty}^\infty \frac{e^{i\omega(t - \tau)} \dd \omega}{p^2 + q^2 + 2ip\omega - \omega^2} \] \end{example*} \noindent Exercise: Show $\mathcal{L} R(t - \tau) = \delta(t - \tau)$ using (8.23). That is, the response function for $R(t - \tau)$ is the Green's function (see Example sheet 3, Q4). \subsection{Discrete Fourier Transforms} \subsubsection*{Discrete sampling \& the Nyquist frequency} Sample a signal $h(t)$ at equal times $t_n = n\Delta$ with time-sampling $\Delta$, and values \[ h_n = h(n\Delta), \quad n = \dots, -2, 1, 0, 1, 2, \dots \tag{8.36} \] i.e. with sampling frequency $\frac{1}{\Delta}$ ($\omega_s = 2\pi f_s = \frac{2\pi}{\Delta}$). \\ \begin{hiddenflashcard}[Nyquist-frequency] \prompt{What is the Nyquist frequency?} \\ \[ \cloze{f_c = \frac{1}{2\Delta}} \] \end{hiddenflashcard} The \emph{Nyquist frequency} $f_c = \frac{1}{2\Delta}$ (8.37) is the highest frequency actually sampled at $\Delta$. Suppose we have a signal with given frequency $f$. \begin{align*} g_f(t) &= A \cos (2\pi ft + \phi) \\ &= \Re(A e^{2\pi i ft + \phi}) \\ &= \half (A e^{i\phi} e^{2\pi i ft} + A e^{-i\phi} e^{-2\pi i ft}) \tag{8.38} \end{align*} (i.e. for real complex Fourier Series, the sum of positive frequencies $f$ and negative frequency $-f$ modes). \\ What happens if we sample at Nyquist $f = f_c$? \begin{align*} g_{f_c}(t_n) &= A \cos (2\pi \left( \frac{1}{2\Delta}n\Delta + \phi \right) \\ &= A \cos \pi n \cos \phi + A \sin n\pi \sin \phi \\ &= A' \cos (2\pi f_c t_n) \tag{8.39} \end{align*} with $A' = A \cos \phi$. So phase / amplitude information is lost (no distinction) and we can identify $f_c \leftrightarrow -f_c$ i.e. (8.38) and (8.39) are \emph{aliased} together. \\ What happens if we sample above $f > f_c$? Exercise: Take $f = f_c + \delta f > f_c$ and show that ($\delta f < f_c$) \begin{align*} g_f(t_n) &= A \cos (2\pi (f_c + \delta f)t_n + \phi) \\ &= A \cos (2\pi (f_c - \delta f)t_n - \phi) \tag{8.40} \end{align*} So the effect is to \emph{alias} a ``ghost signal'' to frequency $f_c - \delta f$ (actually - $-(f_c - \delta f)$). \subsubsection*{Sampling Theorem} A signal $g(t)$ is \emph{bandwidth limited} if it contains no frequencies above $\omega_{\text{max}} = 2\pi f_{\text{max}}$, i.e. $\tilde{g}(\omega) = 0$ for $|\omega| > \omega_{\text{max}}$. So \begin{align*} g(t) &= \frac{1}{2\pi} \int_{-\infty}^\infty \tilde{g}(\omega) e^{i\omega t} \dd \omega \\ &= \frac{1}{2\pi} \int_{-\omega_{\text{max}}}^{\omega_{\text{max}}} \tilde{g}(\omega) e^{i\omega t} \dd \omega \tag{8.41} \end{align*} Set sampling to satisfy Nyquist condition \[ \Delta = \frac{1}{2f_{\text{max}}} \] then \[ g_n \equiv g(t_n) = \frac{1}{2\pi} \int_{-\omega_{\text{max}}}^{\omega_{\text{max}}} \tilde{g}(\omega) e^{i\pi n\omega/\omega_{\text{max}}} \dd \omega \] which is complex Fourier series coefficient (1.13) $c_n \times \frac{\omega_{\text{max}}}{\pi}$ ($x \to \omega$). The Fourier Series represents a periodic function (period $2\omega_{\text{max}}$) \[ \tilde{g}_{\text{per}}(\omega) = \frac{\pi}{\omega_{\text{max}}} \sum_{n = -\infty}^\infty g_n e^{-i\pi n\omega / \omega_{\text{max}}} \tag{8.42} \] \begin{center} \includegraphics[width=0.6\linewidth] {images/0ba745fa69d211ed.png} \end{center} The actual Fourier Transform $\tilde{g}(\omega)$ is found by multiplying by a ``top hat'' \[ \tilde{h}(\omega) = \begin{cases} 1 & |\omega| \le \omega_{\text{max}} \\ 0 & \text{otherwise} \end{cases} \] i.e. \[ \tilde{g}(\omega) = \tilde{g}_{\text{per}}(\omega) \tilde{h}(\omega) \tag{8.43} \] which is an \emph{exact} relation. Inverting with (8.42): \begin{align*} g(t) &= \frac{1}{2\pi} \int_{-\infty}^\infty \tilde{g}_{\text{per}}(\omega) \tilde{h}(\omega) e^{i\omega t} \dd \omega \\ &= \frac{1}{2\omega_{\text{max}}} \sum_{n = -\infty}^\infty g_n \int_{-\omega_{\text{max}}}^{\omega_{\text{max}}} \exp \left( i\omega \left( t - \frac{n\pi}{\omega_{\text{max}}} \right) \right) \dd \omega \\ &= \sum_{n = -\infty}^\infty g_n \frac{\sin(\omega_{\text{max}} t - \pi n)}{\omega_{\text{max}} t - \pi n} \tag{8.44} \end{align*} So $g(t)$ can be exactly represented after sampling at discrete times $t_n$ (sampling theorem).