%! TEX root = RT.tex % vim: tw=50 % 30/10/2023 11AM $\chichar_0 = 0$ chere $0$ is the $k$-vector space of dimension $0$, so $\GL(0) = \{\id_0\}$. $\chichar_k = \indicator{G}$ where $k$ is the \gls{triv_rep}. \textbf{Goal:} If $V$, $W$ are \glspl{rep}, build a \gls{rep} $V \otimes W$ such that $\chichar_{V \otimes W} = \chichar_V \cdot \chichar_W$. \subsection{Tensor Products} Suppose $V$ and $W$ are vector spaces over $k$ with bases $v_1, \ldots, v_m$ and $w_1, \ldots, w_n$ respectively. We can view $V \oplus W$ as the set of pairs $(v, w)$ with $v \in V$, $w \in W$ under pointwise operation or as the vector space with basis $v_1, \ldots, v_m, w_1, \ldots, w_n$. \begin{flashcard}[tensor-prod-defn] \begin{definition*} \glsnoundefn{tens_prod}{tensor product}{tensor products} \glssymboldefn{tensor_product}{$\otimes$}{$\otimes$} The \emph{tensor product} \cloze{$V \otimes W$ of $V$ and $W$ is the $k$ vector space with basis $w_i \otimes w_i$ for $1 \le i \le m$ and $1 \le j \le n$ (so $\dim V \otimes W = (\dim V)(\dim W)$).} \end{definition*} \end{flashcard} \begin{example*} If $X$ and $Y$ are finite then $kX \tensprod kY$ has basis $\delta_x \tensvecprod \delta_y$ for $x \in X$, $y \in Y$ and $\alpha_{X \times Y} : kX \tensprod kY \to kX \times Y$, $\delta_x \tensvecprod \delta_y \mapsto \delta_{(x, y)}$ extends to an isomorphism of vector spaces. \begin{notation*} If $v = \sum \lambda_i v_i \in V$ and $w = \sum \mu_j w_j \in W$ then \[ v \tensvecprod w \defeq \sum_{i, j} \lambda_i \mu_j v_i \tensvecprod w_j \in V \tensprod W .\] \end{notation*} So under $\alpha_{X \times Y}$, \[ \alpha_{X \times Y} (f \tensvecprod g)(x, y) = f(x)g(y) \] Note that in general, not every element of $V \tensprod W$ can be written in the form $V \tensprod W$. For example, $v_1 \tensvecprod w_1 + v_2 \tensvecprod w_2$. The smallest number of summands needed is called the \emph{rank} of the tensor. \end{example*} \begin{lemma*} The map $V \times W \to V \tensprod W$, $(v, w) \mapsto v \tensvecprod w$ is bilinear. \end{lemma*} \begin{proof} We should prove that if $x, x_1, x_2 \in V$ and $y, y_1, y_2 \in W$ and $u_1, u_2 \in k$ then \begin{align*} (u_1 x_1 + u_2 x_2) \tensvecprod y &= u_1 (x_1 \tensvecprod y) + u_2 (x_2 \tensvecprod y) \\ x \tensvecprod (u_1 y_1 + u_2 y_2) &= u_1 (x \tensvecprod y_1) + u_2 (x \tensvecprod y_2) \end{align*} We'll do the second and then appeal to symmetry. We write $x = \sum \lambda_i v_i$, $y_k = \sum \mu_j^k w_j$ for $k = 1, 2$. Then \begin{align*} x \tensvecprod (u_1 y_1 + u_2 y_2) &= \sum_{i, j} \lambda_i (u_1 \mu_j^1 + u_2 \mu_j^2) (v_i \tensvecprod w_j) \\ u_1 (x \tensvecprod y_1) + u_2(x \tensvecprod y_2) &= \sum_{i, j} u_1 \lambda_i \mu_j^1 (v_i \tensvecprod w_j) + \sum_{i, j} u_2 \lambda_I \mu_j^2 (v_i \tensvecprod w_j) \end{align*} These are equal. \end{proof} \textbf{Exercise:} Show that given $U$, $V$ and $W$ there is a one to one correspondence \[ \{\text{linear maps $V \tensprod W \to U$}\} \to \{\text{bilinear maps $V \times W \to U$}\} \] given by precomposition with the bilinear map $(v, w) \mapsto v \tensvecprod w$. \begin{lemma*} If $x_1, \ldots, x_m$ is any basis for $V$ and $y_1, \ldots, y_n$ is any basis for $W$ then $x_i \tensvecprod y_j$ with $1 \le i \le m$ and $1 \le j \le n$ is a basis for $V \tensprod W$. Thus the definition of $V \tensprod W$ does not depend on the choice of basis. \end{lemma*} \begin{proof} It suffices to show that the given set spans $V \tensprod W$ since it has size $mn$. But if $v_i = \sum_r A_{ri} x_r$ and $w_J = \sum_s V_{sj} y_s$ then \[ v_i \tensvecprod w_j = \sum_{r, s} (A_{ri} B_{sj}) (x_r \tensvecprod y_s) \] But $\{v_i \tensvecprod w_j\}_{1 \le i \le m, 1 \le j \le n}$ span $V \tensvecprod V \tensprod W$ so we're done. \end{proof} \begin{remark*}[for enthusiast's] In fact we could've defined $V \tensprod W$ in a basis independent way. Let $F$ be the (infinite dimensional) vector space basis $\langle v \tensvecprod w \st v \in V, w \in W \rangle$ and $R$ the subspace spanned by elements \begin{align*} x \tensvecprod (u_1 y_1 + u_2 y_2) - u_1 (x \tensvecprod y_1) - u_2 (x \tensvecprod y_2) \\ (u_1 v_1 + u_2 v_2) \tensvecprod y - u_1 (x_1 \tensvecprod y) - u_2(x_2 \tensvecprod y) \end{align*} for all $x, x_1, x_2 \in V$, $y, y_1, y_2 \in W$, $u_1, u_2 \in k$. Then let $V \tensprod W = F / R$. \end{remark*} \textbf{Exercise:} Show that for vector spaces $U$, $V$ and $W$ there is a natural (basis independent) isomorphism \[ (U \oplus V) \tensprod W \to (U \tensprod W) \oplus (V \tensprod W) .\] \begin{flashcard}[lin-map-tens-prod] \begin{definition*} \glssymboldefn{tens_lin_prod}{$\otimes$}{$\otimes$} Suppose $V$ and $W$ are vector spaces with bases $v_1, \ldots, v_m$ and $w_1, \ldots, w_n$ and $\varphi : V \to V$ and $\psi : W \to W$ are linear maps. We can define \begin{align*} \varphi \otimes \psi &: \cloze{V \tensprod W \to V \tensprod W} (\varphi \otimes \psi) &\cloze{(v_i \tensvecprod w_j) = \varphi(v_i \tensvecprod \psi(w_j)} \end{align*} \end{definition*} \end{flashcard} \begin{example*} If $\varphi$ is represented by $A$ and $\psi$ is represented by $B$ with respect to given bases, then if we order $V_i \tprod W_j$ lexicographically (i.e. $v_1 \tvprod w_1$, $v_1 \tvprod w_2$, $v_1 \tvprod w_3$, \ldots, $v_1 \tvprod w_n$, $v_2 \tvprod w_1$, \ldots, $v_m \tvprod w_n$), then $(\varphi \tlprod \psi)$ is representede by the block matrix \[ \begin{pmatrix} A_{11} B & A_{12} B & \cdots & \\ A_{21} B & A_{22} B & \cdots & \\ \vdots & \vdots & \ddots & \\ & & & A_{mm} B \end{pmatrix} \] Since \[ (\varphi \tlprod \psi) (v_i \tvprod w_j) = \left( \sum_k A_{ki} v_k \right) \tvprod \left( \sum_l B_{lj} w_l \right) = \sum_{k, l} A_{ki} B_{lj} v_k \tvprod w_l \] \end{example*} \begin{lemma*} The linear map $\varphi \tlprod \psi$ does not depend on the basis. Indeed, \[ (\varphi \tlprod \psi)(v \tvprod w) = \varphi(v) \tvprod \psi(w) \qquad \forall v \in V, w \in W .\] \end{lemma*} \begin{proof} Writing $v = \sum \lambda_i v_i$ and $w = \sum \mu_j w_j$, \begin{align*} (\varphi \tlprod \psi)(v \tvprod w) &= (\varphi \tlprod \psi) \left( \sum_{i, j} \lambda_i \mu_j v_i \tvprod w_j \right) \\ &= \sum_{i, j} \lambda_i \mu_j \varphi(v_i) \tvprod \psi(w_j) \\ &= \varphi(v) \tvprod \psi(w) \end{align*} \end{proof} \begin{remark*} The proof really says that $V \times W \to V \tprod W$, $(v, w) \mapsto v \tvprod w$ is bilinear and $\varphi \tlprod \psi$ is the corresponding linear map $V \tprod W \to V \tprod W$ from an earlier exercise. \end{remark*} \begin{lemma*} \refsteplabel[the last lemma last time]{the_last_lemma_last_time_l11} Suppose $\varphi, \varphi_1, \varphi_2 \in \Hom_k(V,V)$ and $\varphi, \varphi_1, \varphi_2 \in \Hom_k(W, W)$. Then \begin{enumerate}[(i)] \item $(\varphi_1 \varphi_2) \tlprod (\psi_1 \psi_2) = (\varphi_1 \tlprod \psi_1)(\varphi_2 \tlprod \psi_2) \in \Hom_k(V \tprod W, V \tprod W)$. \item $\id_V \tlprod \id_W = \id_{V \tprod W}$ \item $\Trace(\varphi \tlprod \psi) = \Trace(\varphi) \Trace(\psi)$. \end{enumerate} \end{lemma*} \begin{proof} \phantom{} \begin{enumerate}[(i)] \item Given $v \in V$, $w \in W$ by the last lemma we can compute \begin{align*} (\varphi_1 \varphi_2 \tlprod \varphi_1 \varphi_2)(v \tvprod w) &= \varphi_1 \varphi_2(v) \tvprod \varphi_1 \varphi_2(w) \\ &= (\varphi_1 \tlprod \psi_1)(\varphi_2(v) \tvprod \psi_2(w)) \\ &= (\varphi_1 \tlprod \psi_1)(\varphi_2 \tlprod \psi_2)(v \tvprod w) \end{align*} We're done since all maps and linear and $\{v \tvprod w\}$ spans $V \tprod W$. \item Is clear. \item By earlier example it suffices to see \[ \Trace \begin{pmatrix} A_{11}B & & & \\ & A_{22} B & & \\ & & \ddots & \\ & & & A_{nn} B \end{pmatrix} = \sum_{i, j} B_{ii} A_{jj} = (\Trace A)(\Trace B) \qedhere \] \end{enumerate} \end{proof} \begin{flashcard}[rep-tens-prod-defn] \begin{definition*} \glssymboldefn{tens_rep_prod}{$\otimes$}{$\otimes$} Given two \glspl{rep} $(\rho, V)$ and $\sigma, W)$ of a group $G$ we can define a \gls{rep} $(\rho \otimes \sigma, V \tprod W)$ via \[ (\rho \otimes \sigma)(g) = \cloze{\rho(g) \tvprod \sigma(g)} \] \end{definition*} \end{flashcard} \vspace{-1em} This is a \gls{rep} by parts (i) and (ii) of the last lemma, and $\chichar_{\rho \trprod \sigma} = \chichar_\rho \cdot \chichar_\sigma$ by part (iii).