% vim: tw=50 % 21/10/2022 11AM \begin{definition*} $A \in \mathcal{M}_{m, n}(F)$ \begin{itemize} \item The column rank of $A$, $r(A)$ is the dimension of the span of the column vectors of $A$ in $F^m$, i.e. if $A = (c_1, \dots, c_n)$ then $r(A) = \dim_F \Span \{c_1, \dots, c_n\}$. \item Similarly, the row rank is the column rank of $A^\top$. \end{itemize} \end{definition*} \begin{remark*} If $\alpha$ is a linear map represented by $A$ with respect to some basis, then \[ r(A) = r(\alpha) = \dim \Image \alpha \] \end{remark*} \begin{proposition*} Two matrices are equivalent if and only if $r(A) = r(A')$. \end{proposition*} \begin{proof} \begin{enumerate}[(1)] \item[$\Rightarrow$] If $A$ and $A'$ are equivalent, then they correspond to the same linear map $\alpha$ written in two different bases \[ r(A) = r(\alpha) = r(A') \] \item[$\Leftarrow$] $r(A) = r(A') = r$, then both $A$ and $A'$ are equivalent to: \[ \left( \begin{tabular}{c|c} $I_r$ & $0$ \\ \hline $0$ & $0$ \end{tabular} \right) \] so $A$ and $A'$ are equivalent. \end{enumerate} \end{proof} \begin{theorem*} $r(A) = r(A^\top)$ (column rank is the same as row rank) \end{theorem*} \begin{proof} Exercise. \end{proof} \subsection{Elementary operations and elementary matrices} Special case of the change of basis formula. \\ Let $\alpha : V \to W$ be a linear map, $(\mathcal{B}, \mathcal{B}')$ bases of $V$, $(\mathcal{C}, \mathcal{C}')$ bases of $W$. \[ [\alpha]_{\mathcal{B}, \mathcal{C}} \to [\alpha]_{\mathcal{B}', \mathcal{C}'} \] If $V = W$, $\alpha : V \to V$ linear then we call it an endomorphism. \begin{itemize} \item $\mathcal{B} = \mathcal{C}$, $\mathcal{B}' = \mathcal{C}'$ \item $P$ is change of matrix from $\mathcal{B}'$ to $\mathcal{B}$. \end{itemize} then \[ [\alpha]_{\mathcal{B}', \mathcal{B}'} = P^{-1} [\alpha]_{\mathcal{B}, \mathcal{B}} P \] \begin{definition*} $A$, $A'$ are $n \times n$ (square) matrices, we say that $A$ and $A'$ are \emph{similar} (or \emph{conjugate}) if and only if: \begin{itemize} \item $A' = P^{-1}AP$ \item $P$ is $n \times n$ square invertible. \end{itemize} \end{definition*} \noindent Central concept when we will study diagonalisation of matrices. (Spectral theory) \subsection{Elementary operations and elementary matrices} \begin{flashcard} \begin{definition*} Elementary \emph{column} operation on an $m \times n$ matrix $A$: \begin{enumerate}[(i)] \item \cloze{swap columns $i$ and $j$ ($i \neq j$)} \item \cloze{replace column $i$ by $\lambda$ times column $i$ ($\lambda \neq 0$, $\lambda \in F$)} \item \cloze{add $\lambda$ times column $i$ to column $j$ (with $i \neq j$)} \end{enumerate} \end{definition*} \end{flashcard} \begin{itemize} \item Elementary row operations: analogous way \item Elementary operations are invertible \item These operations can be realised through the action of \emph{elementary matrices}. \begin{enumerate}[(i)] \item $i, j$, $i \neq j$. \begin{center} \includegraphics[width=0.6\linewidth] {images/7940f95e512b11ed.png} \end{center} \item $i$ \begin{center} \includegraphics[width=0.6\linewidth] {images/974e031a512b11ed.png} \end{center} \item $i, j, \lambda$, $i \neq j$ \[ C_{i, j, \lambda} = \id + E_{i, j} \] \begin{center} \includegraphics[width=0.6\linewidth] {images/e69a9d98512b11ed.png} \end{center} \end{enumerate} \end{itemize} Link between elementary operations / matrices: \\ an elementary column (row) operation can be performed by multiplying $A$ by the corresponding elementary matrix from the right (left) $\to$ Exercise. \myskip Now a constructive proof that any $m \times n$ matrix is equivalent to \[ \left( \begin{tabular}{c|c} $I_r$ & $0$ \\ \hline $0$ & $0$ \end{tabular} \right) \] \begin{itemize} \item Start with $A$. If all entries are zero, done. \item Pick $a_{ij} = \lambda \neq 0$. Swap rows $i$ and 1 and swap columns $j$ and 1. Then $\lambda$ is in position (1, 1) \item Multiply column 1 by $\frac{1}{\lambda}$ to get 1 in position (1, 1). \item Now clean out row 1 and column 1 using elementary operations of type (iii). \item Iterate with $\tilde{A}$ (the $(m - 1) \times (n - 1)$ sub matrix) \item Then at the end of the process we will have shown that \[ \left( \begin{tabular}{c|c} $I_r$ & $0$ \\ \hline $0$ & $0$ \end{tabular} \right) \equiv Q^{-1} AP = \ub{E_p' \cdots E_1'}_{\text{row operations}} A \ub{E_1 \cdots E_c}_{\text{column operations}} \] \end{itemize} \subsubsection*{Variation} \begin{itemize} \item Gauss' pivot algorithm. If you use \emph{only} row operations, we can reach the so called ''row echelon form'' of the matrix \begin{center} \includegraphics[width=0.6\linewidth] {images/dfa1f3dc512c11ed.png} \end{center} \item Assume that $a_{i1} \neq 0$ for some $i$ \item Swap rows $i$ and 1 \item Divide first row by $\lambda = a_{i1}$, to get 1 in (1, 1) \item Use 1 to clean the rest of the first column \item Move to second column \item Iterate. \end{itemize} This procedure is exactly what you do when solving a linear system of equations: Gauss' pivot algorithm \subsubsection*{Representation of square invertible matrices} \begin{flashcard}[I-n-row-operations-only] \begin{lemma*} If $A$ is $n \times n$ \emph{square invertible} matrix, then \cloze{we can obtain $I_n$ using row elementary operations only (or column operations only).} \end{lemma*} \end{flashcard} \begin{proof} \begin{itemize} \item We do the proof for column operations. We argue by induction on the number of rows \item Suppose that we could reach a form where the upper left corner is $I_k$. We want to obtain the same structure with $k \to k + 1$. \item Claim: there exists $j > k$ such that $\lambda = a_{k + 1, j} \neq 0$. Otherwise the vector $\delta_{i(k + 1)}$ is \emph{not} in the span of the column vectors of $A$ (exercise) which contradicts the assumption that $A$ is invertible. \item Swap column $k + 1$ and $j$ \item Divide column $k + 1$ by $\lambda = a_{k + 1, j} \neq 0$ \item Use 1 to clear the rest of the $k + 1$-th row using elementary operation of type (iii). \item This completes the inductive step. \item Continue until $k = n$. \end{itemize} \end{proof} \myskip Outcome: \[ AE_1 \cdots E_c = I_n \] \[ \implies A^{-1} = E_1 \cdots E_c \] so this gives an algorithm for computing $A^{-1}$. (useful for solving $AX = F$, linear system of equations). \begin{proposition*} Any invertible square matrix is a product of elementary matrices. \end{proposition*}