% vim: tw=50 % 07/11/2022 11AM \subsection{Eigenvectors, eigenvalues and trigonal matrices} First step towards the diagonalisation of endomorphisms. \begin{itemize} \item $V$ vector space over $F$, $\dim_F V = n < \infty$. $\alpha : V \to V$ linear (endomorphism of $V$). General problem: Can we find a basis $\mathcal{B}$ of $V$ such that in this basis, \[ [\alpha]_{\mathcal{B}} = [\alpha]_{\mathcal{B}, \mathcal{B}} \] has a ``nice'' form. \end{itemize} Reminder: $\mathcal{B}'$ another basis of $V$, $P$ = change of basis matrix, \[ [\alpha]_{\mathcal{B}'} = P^{-1} [\alpha]_{\mathcal{B}} P \] Equivalently: given a matrix $A \in \mathcal{M}_n(F)$, is it conjugated to a matrix with a ``simple'' form? \begin{definition*} \begin{enumerate}[(i)] \item $\alpha \in \mathcal{L}(V)$ ($\alpha : V \to V$ linear) is \emph{diagonalisable} if there exists a basis $\mathcal{B}$ of $V$ such that $[\alpha]_{\mathcal{B}}$ in $\mathcal{B}$ is diagonal: \[ [\alpha]_{\mathcal{B}} = \begin{pmatrix} \lambda_1 & 0 & \cdots & 0 \\ 0 & \lambda_2 & \cdots & 0 \\ \vdots & \vdots & \ddots & \vdots \\ 0 & 0 & \cdots & \lambda_n \end{pmatrix} \] \item $\alpha \in \mathcal{L}(V)$ is \emph{triangulable} if there exists $\mathcal{B}$ basis of $V$ such that $[\alpha]_{\mathcal{B}}$ is triangular: \[ [\alpha]_{\mathcal{B}} = \begin{pmatrix} \lambda_1 & * & \cdots & * \\ 0 & \lambda_2 & \cdots & * \\ \vdots & \vdots & \ddots & \vdots \\ 0 & 0 & \cdots & \lambda_n \end{pmatrix} \] \end{enumerate} \end{definition*} \begin{remark*} A matrix is diagonalisable (respectively triangulable) if and only if it is conjugated to a diagonal (respectively triangular) matrix. \end{remark*} \begin{definition*}[eigenvalue, eigenvector, eigenspace] \begin{enumerate}[(i)] \item $\lambda \in F$ is an eigenvalue of $\alpha \in \mathcal{L}(V)$ if and only if there exists $v \in V \setminus \{0\}$ such that $\alpha(v) = \lambda v$. \item $v \in V$ is an eigenvector of $\alpha \in \mathcal{L}(v)$ if and only if $v \neq 0$ and there exists $\lambda \in F$ such that $\alpha(v) = \lambda v$. \item $V_\lambda = \{v \in V \mid \alpha(v) = \lambda v\} \le V$ is the eigenspace associated to $\lambda \in F$. \end{enumerate} \end{definition*} \begin{remark*} Once can write evalue, evectors, espace. \end{remark*} \begin{lemma*} $\alpha \in L(v)$, $\lambda \in F$, then \[ \text{$\lambda$ eigenvalue} \iff \det(\alpha - \lambda \id) = 0 \] \end{lemma*} \begin{proof} \begin{align*} \text{$\lambda$ eigenvalue} &\iff \exists v \in V \setminus \{0\} \mid \alpha(v) = \lambda v \\ &\iff \exists v \in V \setminus \{0\} \mid (\alpha - \lambda \id)(v) = 0 \\ &= \Ker (\alpha - \lambda \id) \neq \{0\} \\ &= \alpha - \lambda \id \text{ not injective} \\ &\iff \alpha - \lambda \id \text{ not surjective} \\ &\iff \alpha - \lambda \id \text{ not bijective} \\ &\iff \det(\alpha - \lambda \id) = 0 \end{align*} \end{proof} \begin{remark*} If $\alpha(v_j) = \lambda v_j$, $v_j \neq 0$. I can complete it to a basis $(v_1, \dots, v_{j - 1}, v_j, \dots, v_n)$ of $V$. Then \[ [\alpha]_{\mathcal{B}} = (\mid \cdots \mid \text{($\lambda$ in $j$-th entry)} \mid \cdots) \] \end{remark*} \subsubsection*{Elementary facts about polynomials} We will study $P(\alpha)$, $P$ polynomial. $\alpha \in \mathcal{L}(V)$. \begin{itemize} \item $F$ field, \[f(t) = a_n t^n + a_{n - 1} t^{n - 1} + \cdots + a_1 t + a_0 \] $a_i \in F$. $n \equiv$ the largest exponent such that $a_n \neq 0$, $n = \deg f$. \item $\deg (f + g) \le \max\{\deg f, \deg g\}$, $\deg(fg) = \deg f + \deg g$ \item $F[t] = \{\text{polynomials with coefficients in $F$}\}$ \item $\lambda$ root of $f(t) \iff f(\lambda) = 0$. \end{itemize} \begin{lemma*} $\lambda$ is a root of $f$, then $t - \lambda$ divides $f$: \[ f(t) = (t - \lambda) g(t), \quad g(t) \in F(t) \] \end{lemma*} \begin{proof} $f(t) = a_n t^n + \cdots + a_1 t + a_0$, $f(\lambda) = a_n \lambda^n + \cdots + a_1 \lambda + a_0 = 0$. \begin{align*} f(t) &= f(t) - f(\lambda) \\ &= a_n (t^n - \lambda^n) + \cdots + a_1 (t - \lambda) \\ &= a_n(t - \lambda)(t^{n - 1} + \lambda t^{n - 2} + \cdots \lambda^{n - 2} t + \lambda^{n - 1}) + \cdots \end{align*} \end{proof} \begin{corollary*} A \emph{nonzero} polynomial of degree $n$ ($\ge 0$) has \emph{at most} $n$ roots (counted with multiplicity). \end{corollary*} \begin{proof} Induction on the degree. (Exercise) \end{proof} \begin{corollary*} $f_1, f_2$ polynomials of degree $< n$ such that $f_1(t_i) = f_2(t_i)$, $(t_i)_{1 \le i \le n}$ $n$ distinct values. Then $f_1 \equiv f_2$. \end{corollary*} \begin{proof} $f_1 - f_2$ has degree $< n$ and at least $n$ roots so $f_1 - f_2 \equiv 0$. \end{proof} \begin{theorem*} Any $f \in \CC[t]$ of positive degree has a (complex) root (hence exactly $\deg f$ roots when counted with multiplicity). \end{theorem*} \noindent So $f \in \CC[t]$, \[ f(t) = c \prod_{i = 1}^r (t - \lambda_i)^\alpha_i \quad c, \lambda_i \in \CC, \alpha_i \in \NN \] $\to$ complex analysis. \begin{definition*}[characteristic polynomial] Let $\alpha \in \mathcal{L}(V)$, the characteristic polynomial of $\alpha$ is \[ \chi_\alpha(t) = \det (A - t\id) \] \end{definition*} \begin{remark*} The fact that $\det(A - \lambda \id)$ is a polynomial in $\lambda$ follows from the very definition of $\det$. \end{remark*} \begin{remark*} Conjugate matrices have the same characteristic polynomial. \begin{align*} \det(P^{-1}AP - \lambda \id) &= \det(P^{-1}(A - \lambda \id)P) \\ &= \det(A - \lambda \id) \end{align*} So we can define \[ \chi_\alpha(t) = \det(A - \lambda \id) \] where $A = [\alpha]_{\mathcal{B}}$, and the polynomial does not depend on the choice of basis. \end{remark*} \begin{flashcard}[triangulable-iff] \begin{theorem*} $\alpha \in \mathcal{L}(V)$ is triangulable if and only $\chi_\alpha$ can be written as a product of linear factors over $F$: \[ \chi_\alpha(t) = c \prod_{i = 1}^n (t - \lambda_i) \] \end{theorem*} \noindent $\to$ If $F = \CC$, any matrix is triangulable. \begin{proof} \begin{enumerate} \item[$\Rightarrow$] \cloze{Suppose $\alpha$ triangulable, then \[ [\alpha]_{\mathcal{B}} = \begin{pmatrix} a_1 & * & \cdots & * \\ 0 & a_2 & \cdots & * \\ \vdots & \vdots & \ddots & \vdots \\ 0 & 0 & \cdots & a_n \end{pmatrix} \] so \[ \chi_\alpha(t) = \det \begin{pmatrix} a_1 & * & \cdots & * \\ 0 & a_2 & \cdots & * \\ \vdots & \vdots & \ddots & \vdots \\ 0 & 0 & \cdots & a_n \end{pmatrix} = \prod_{i = 1}^n (a_i - t) \]} \item[$\Leftarrow$] \cloze{We argue by induction on $n = \dim V$. \begin{itemize} \item $n = 1$ easy. \item $n > 1$. By assumption, let $\chi_\alpha(t)$ which has a root $\lambda$. Then $\chi_\alpha(\lambda) = 0$ if and only if $\lambda$ is an eigenvalue of $\alpha$. Let $U = V_\lambda$ be associated eigenspace. Let $(v_1, \dots, v_k)$ be a basis of $U$. We complete to $(v_{k + 1}, \dots, v_n)$ of $V$ \[ \Span (v_{k + 1}, \dots, v_n) = W \] \[ V = U \oplus W \] \item $[\alpha]_{\mathcal{B}} =$ \begin{center} \includegraphics[width=0.6\linewidth] {images/cf9b46e65e9211ed.png} \end{center} $\alpha$ induces an endormorphism $\ol{\alpha} : V / U \to V / U$, \[ C = [\ol{\alpha}]_{\ol{\mathcal{B}}}, \ol{\mathcal{B}} = (v_{k + 1} + U, \dots, v_n + U) \] Then: (block product) \[ \det(\alpha - \id) = \] \begin{center} \includegraphics[width=0.6\linewidth] {images/2282c3c05e9311ed.png} \end{center} \[ = (\lambda - t)^k \det(C - t \id) = c\prod_{i = 1}^n (t - a_i) \] \[ \implies \det(C - t\id) = \tilde{c} \prod_{i = k + 1}^n (t - \tilde{a}_i) \] so use the induction because $\dim V / U = \dim V - \dim U < \dim V$. So $\tilde{\mathcal{B}} = (\tilde{v}_{k + 1}, \dots, \tilde{v}_n)$ basis of $W$ where: \[ [C]_W = \begin{pmatrix} \tilde{a}_1 & * & \cdots & * \\ 0 & \tilde{a}_2 & \cdots & * \\ \vdots & \vdots & \ddots & \vdots \\ 0 & 0 & \cdots & \tilde{a}_n \end{pmatrix} \] implies $V = U \oplus W$, \[ \hat{\mathcal{B}} = (v_1, \dots, v_k, \tilde{v}_{k + 1}, \dots, \tilde{v}_n) \] basis of $V$ in which \begin{center} \includegraphics[width=0.6\linewidth] {images/bbfaad2e5e9311ed.png} \end{center} $\to$ triangular form. \end{itemize} } \end{enumerate} \end{proof} \end{flashcard}