% vim: tw=50 % 17/10/2022 11AM \begin{definition*}[Rank and nullity] \begin{itemize} \item $r(\alpha) = \dim \Image \alpha$ (rank) \item $n(\alpha) = \dim \Ker \alpha$ (nullity) \end{itemize} \end{definition*} \begin{theorem*}[Rank nullity theorem] \begin{itemize} \item Let $U, V$ be vector spaces over $F$, $\dim_F U < +\infty$. \item Let $\alpha : U \to V$ be a linea map, then \[ \dim U = r(\alpha) + n(\alpha \] \end{itemize} \end{theorem*} \begin{proof} We have proved that $U / \Ker \alpha \simeq \Image \alpha$. So $\dim (U / \Ker \alpha) = \dim \Image \alpha$. But $\dim (U / \Ker \alpha) = \dim U - \dim \Ker \alpha$. So $\dim U = \dim \Ker \alpha + \dim \Image \alpha = r(\alpha) + n(\alpha)$. \end{proof} \begin{lemma*} Let $V$, $W$ be vector spaces over $F$ of \emph{equal} finite dimension. Let $\alpha : V \to W$ be a linear map. Then the following are equivalent: \begin{itemize} \item $\alpha$ is injective \item $\alpha$ is surjective \item $\alpha$ is an isomorphism \end{itemize} \end{lemma*} \begin{proof} Follows immediately from the rank-nullity theorem. (Exercise) \end{proof} \begin{example*} Let $V = \{(x, y, z) \in \RR^3 : x + y + z = 0\}$. Then consider $\alpha : \RR^3 \to \RR$ defined by $(x, y, z) \mapsto x + y + z$. Then $\Ker \alpha = V$ and $\Image \alpha = \RR$, hence by rank nullity $3 = n(\alpha) + 1$ hence $\dim V = 2$. \end{example*} \subsection{Linear maps from $V$ to $W$ and matrices} The space of linear maps from $V$ to $W$. Let $V$, $W$ be vector spaces over $F$. \[ L(V, W) = \{\alpha : V \to W \text{ linear}\} \] \begin{proposition*} $L(V, W)$ is a vector space over $F$ with: \[ (\alpha_1 + \alpha_2)(v) = \alpha_1(v) + \alpha_2(v) \] \[ (\lambda \alpha)(v) = \lambda \alpha (v) \] Moreover if $V$ and $W$ are finite dimensional over $F$, then so is $L(V, W)$ and: \[ \dim_F L(V, W) = (\dim_F V) (\dim_F W) \] \end{proposition*} \begin{proof} Proof that it is a vector space is an exercise. \\ We will prove the statement about dimensions soon. \end{proof} \subsubsection*{Matrices and linear maps} \begin{definition*}[Matrix] A $m \times n$ matrix over $F$ is an array with $m$ rows and $n$ columns with entries in $F$. \end{definition*} \begin{notation*} $\mathcal{M}_{m, n}(F)$ is the set of $m \times n$ matrices over $F$. \end{notation*} \begin{proposition*} $\mathcal{M}_{m, n}(F)$ is an $F$ vector space under operations: \begin{itemize} \item $(a_{ij}) + (b_{ij}) = (a_{ij} + b_{ij})$ \item $\lambda (a_{ij}) = (\lambda a_{ij})$ \end{itemize} \end{proposition*} \begin{proof} Exercise. \end{proof} \begin{proposition*} $\dim_F \mathcal{M}_{m, n}(F) = m \times n$. \end{proposition*} \begin{proof} We exhibit a basis using \emph{elementary} matrices. Pick $1 \le i \le m$, $1 \le j \le n$. Then we define $E_{ij}$ to be the matrix which is 0 everywhere, except it is 1 in the entry that is in the $i$-th row and $j$-th column. Then $(E_{ij})$ is a basis of $\mathcal{M}_{m, n}(F)$. Clearly spans $\mathcal{M}_{m, n}(F)$. Family is free is an exercise. \end{proof} \subsubsection*{Representation of linear maps} \begin{itemize} \item $V, W$ vector spaces over $F$, $\alpha : V \to W$ linear map. \item Basis $\mathcal{B}(v_1, \dots, v_n)$ basis of $V$, $\mathcal{C} = (w_1, \dots, w_m)$ basis of $W$. \item Let $v \in V$, then we can write \[ v = \sum_{j = 1}^n \lambda_j v_j \] so we can consider the coordinates of $v$ in the basis $\mathcal{B}$ $(\lambda_1, \dots, \lambda_n \in F^n$). We may write this as $[v]_{\mathcal{B}}$. \item Similarly for $w \in W$, we note $[w]_{\mathcal{C}}$ in a similar way. \end{itemize} \begin{definition*}[Matrix of $\alpha$ in $\mathcal{B}, \mathcal{C}$ basis] \[ [\alpha]_{\mathcal{B}, \mathcal{C}} \equiv \text{matrix of $\alpha$ with respect to $\mathcal{B}, \mathcal{C}$} \] We define it as: \[ [\alpha]_{\mathcal{B}, \mathcal{C}} = \begin{pmatrix} \vdots & \vdots & & \vdots \\ [\alpha(v_1)]_{\mathcal{C}} & [\alpha(v_2)]_{\mathcal{C}} & \cdots & [\alpha(v_n)]_{\mathcal{C}} \\ \vdots & \vdots & & \vdots \end{pmatrix} \] \end{definition*} \noindent Observation: \[ \alpha(v_j) = \sum_{i = 1}^m a_{ij} w_i \] \begin{lemma*} For any $v \in V$, \[ [\alpha(v)]_{\mathcal{C}} = [\alpha]_{\mathcal{B}, \mathcal{C}} \cdot [v]_{\mathcal{B}} \] where \[ (Av)_i = \sum_{j = 1}^n a_{ij} \lambda_j \] \end{lemma*} \begin{proof} Let $v \in V$, with \[ v = \sum_{j = 1}^n \lambda_j v_j \] Then \begin{align*} \alpha(v) &= \alpha \left( \sum_{j = 1}^n \lambda_j v_j\right) \\ &= \sum_{j = 1}^n \lambda_j \alpha(v_j) \\ &= \sum_{j = 1}^n \lambda_j \sum_{i = 1}^m a_{ij} w_i \\ &= \sum_{i = 1}^m \left( \sum_{j = 1}^n a_{ij} \lambda_j \right) w_i \end{align*} \end{proof} \begin{lemma*} Let $\beta : U \to V$, $\alpha : V \to W$ linear, and hence $\alpha \circ \beta : U \to W$ linear. Let $\mathcal{A}$ be a basis of $U$, $\mathcal{B}$ be a basis of $V$, and $\mathcal{C}$ a basis of $W$. Then \[ [\alpha \circ \beta]_{\mathcal{A}, \mathcal{C}} = [\alpha]_{\mathcal{B}, \mathcal{C}} [\beta]_{\mathcal{A},\mathcal{B}} \] \end{lemma*} \begin{proof} $A = [\alpha]_{\mathcal{B}, \mathcal{C}}$, $B = [\beta]_{\mathcal{A}, \mathcal{B}}$. Pick $u_l \in \mathcal{A}$. Then \begin{align*} (\alpha \circ \beta)(u_l) &= \alpha(\beta(u_l)) \\ &= \alpha \left(\sum_j b_{jl} v_j \right) \\ &= \sum_j b_{jl} \alpha(v_j) \\ &= \sum_j b_{jl} \sum_i a_{ij} w_i \\ &= \sum_i \left( \sum_j a_{ij} b_{jl} \right) w_i \qedhere \end{align*} \end{proof} \begin{proposition*} If $V$ and $W $are vector spaces over $F$ and $\dim_F V = n$ and $\dim_F W = m$. Then $L(V, W) \simeq \mathcal{M}_{m, n}(F)$, and in particular, $\dim L(V, W) = m \times n$. \end{proposition*} \begin{proof} Fix $\mathcal{B}, \mathcal{C}$ basis of $V$, $W$. \begin{claim*} $\theta : L(V, W) \to \mathcal{M}_{m, n}(F)$ defined by $\alpha \mapsto [\alpha]_{\mathcal{B}, \mathcal{C}}$ is an isomorphism. \end{claim*} \begin{itemize} \item $\theta$ is linear: \[ [\lambda_1 \alpha_1 + \lambda_2 \alpha_2]_{\mathcal{B}, \mathcal{C}} = \lambda_1 [\alpha_1]_{\mathcal{B}, \mathcal{C}} + \lambda_2 [\alpha_2]_{\mathcal{B}, \mathcal{C}} \] \item $\theta$ is surjective: let \[ A = (a_{ij}) \] Consider the map: \[ \alpha : v_j \mapsto \sum_{i = 1}^m a_{ij} w_i \] and extend by linearity. Then $[\alpha]_{\mathcal{B}, \mathcal{C}} = A$. \item $\theta$ is injective because \[ [\alpha]_{\mathcal{B}, \mathcal{C}} = 0 \implies \alpha \equiv 0 \] \end{itemize} Hence, using $\theta$, $L(V, W) \simeq \mathcal{M}_{m, n}(F)$. \end{proof} \begin{remark*} Let $\mathcal{B}, \mathcal{C}$ be bases of $V, W$. Let $\eps_\mathcal{B} : V \to F^n$ be defined such that $v \mapsto [\alpha]_{\mathcal{B}}$, and similarly define $\eps_{\mathcal{C}} : W \to F^m$ such that $w \mapsto [w]_{\mathcal{C}}$. Then the following diagram commutes: \begin{center} \includegraphics[width=0.6\linewidth] {images/b3088fea4e7311ed.png} \end{center} \end{remark*}