% DEFINE some information that will be populated throughout the course notes. \def \coursename {Advanced Linear Algebra} \def \coursecode {MATH 3221} \def \courseterm {Fall 2020} \def \instructorname {Nathan Johnston} % END DEFINITIONS % IMPORT the course note formatting and templates \input{course_notes_template} % END IMPORT %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \setcounter{chapter}{0} % Set to one less than the week number \chapter{Vector Spaces} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% {\large This week we will learn about: \begin{itemize} \item Abstract vector spaces, \item How to do linear algebra over fields other than $\R$, \item How to do linear algebra with things that don't look like vectors, and \item Linear combinations and linear (in)dependence (again). \end{itemize}\bigskip\bigskip \noindent Extra reading and watching: \begin{itemize} \item Sections 1.1.1 and 1.1.2 in the textbook \item Lecture videos \href{https://www.youtube.com/watch?v=1ADC9rZQ11E&list=PLOAf1ViVP13jdhvy-wVS7aR02xnDxueuL&index=1}{1}, \href{https://www.youtube.com/watch?v=-CDNQY1GTtA&list=PLOAf1ViVP13jdhvy-wVS7aR02xnDxueuL&index=2}{1.5}, \href{https://www.youtube.com/watch?v=OYC2_jiO8ks&list=PLOAf1ViVP13jdhvy-wVS7aR02xnDxueuL&index=3}{2}, \href{https://www.youtube.com/watch?v=RIDcmbIY70E&list=PLOAf1ViVP13jdhvy-wVS7aR02xnDxueuL&index=4}{3}, and \href{https://www.youtube.com/watch?v=nAaNM_xKpOk&list=PLOAf1ViVP13jdhvy-wVS7aR02xnDxueuL&index=5}{4} on YouTube \item \href{http://en.wikipedia.org/wiki/Vector_space}{Vector space} at Wikipedia \item \href{http://en.wikipedia.org/wiki/Complex_number}{Complex number} at Wikipedia \item \href{http://en.wikipedia.org/wiki/Linear_independence}{Linear independence} at Wikipedia \end{itemize}\bigskip\bigskip \noindent Extra textbook problems: \begin{itemize} \item[$\star$] 1.1.1, 1.1.4(a--f,h) \item[$\phantom{\star}\star\star$] 1.1.2, 1.1.5, 1.1.6, 1.1.8, 1.1.10, 1.1.17, 1.1.18 \item[$\star\star\star$] 1.1.9, 1.1.12, 1.1.19, 1.1.21, 1.1.22 \item[$\skull$] none this week \end{itemize}} \newpage In the previous linear algebra course (MATH~2221), for the most part you learned how to perform computations with vectors and matrices. Some things that you learned how to compute include: \horlines{9} % Solving linear systems of equations % Dot product % Matrix multiplication % Standard matrix of a linear transformation % Determinants % Eigenvalues/eigenvectors In this course, we will be working with many of these same objects, but we are going to generalize them and look at them in strange settings where we didn't know we could use them. For example: \horlines{9} % Can work with vectors with entries other than real numbers (e.g., complex, Z_p, etc). % Can work with "vectors" that don't really look like vectors at all (e.g., functions). % Derivative of a function is a linear transformation. Can be represented by a matrix -> can do calculus stuff with linear algebra! \newpage In order to use our linear algebra tools in a more general setting, we need a proper definition that tells us what types of objects we can consider. The following definition makes this precise, and the intuition behind it is that the objects we work with should be ``like'' vectors in $\R^n$: \begin{definition}[Vector Space] Let $\V$ be a set and let $\mathbb{F}$ be a field. Let $\mathbf{v},\mathbf{w} \in \V$ and $c \in \mathbb{F}$, and suppose we have defined two operations called \emph{addition} and \emph{scalar multiplication} on $\V$. We write the addition of $\mathbf{v}$ and $\mathbf{w}$ as $\mathbf{v}+\mathbf{w}$, and the scalar multiplication of $c$ and $\mathbf{v}$ as $c\mathbf{v}$. \\ If the following ten conditions hold for all $\mathbf{v},\mathbf{w},\mathbf{x} \in \V$ and all $c,d \in \mathbb{F}$, then $\V$ is called a \textbf{vector space} and its elements are called \textbf{vectors}: \begin{enumerate}[label=\alph*)] \item $\mathbf{v} + \mathbf{w} \in \V$ \hfill {\color{gray}(closure under addition)} \item $\mathbf{v} + \mathbf{w} = \mathbf{w} + \mathbf{v}$ \hfill {\color{gray}(commutativity)} \item $(\mathbf{v} + \mathbf{w}) + \mathbf{x} = \mathbf{v} + (\mathbf{w} + \mathbf{x})$ \hfill {\color{gray}(associativity)} \item There exists a ``zero vector'' $\mathbf{0} \in \V$ such that $\mathbf{v} + \mathbf{0} = \mathbf{v}$. \item There exists a vector $-\mathbf{v}$ such that $\mathbf{v} + (-\mathbf{v}) = \mathbf{0}$. \item $c\mathbf{v} \in \V$ \hfill {\color{gray}(closure under scalar multiplication)} \item $c(\mathbf{v} + \mathbf{w}) = c\mathbf{v} + c\mathbf{w}$ \hfill {\color{gray}(distributivity)} \item $(c+d)\mathbf{v} = c\mathbf{v} + d\mathbf{v}$ \hfill {\color{gray}(distributivity)} \item $c(d\mathbf{v}) = (cd)\mathbf{v}$ \item $1\mathbf{v} = \mathbf{v}$ \end{enumerate} \end{definition} \noindent Some points of interest are in order: \begin{itemize} \item A field $\mathbb{F}$ is basically just a set on which we can add, subtract, multiply, and divide according to the usual laws of arithmetic. \horlines{4} % Tell students not to worry about it. % Examples of fields: R, C, Z_p where p is prime % NOT matrices (multiple elements we can't divide by) % In this course, always R or C \newpage \item Vectors might not look at all like what you're used to vectors looking like. Similarly, vector addition and scalar multiplication might look weird too (we will look at some examples). \end{itemize} \exx[8]{$\R^n$ is a vector space.} % Go through some of the axioms to show that $\mathbb{R}^n$ is a vector space. \exx[9]{$\mathcal{F}$, the set of all functions $f : \R \rightarrow \R$, is a vector space.} % Go through some of the axioms to show that this is a vector space. % Mention that it's infinite-dimensional(maybe?), so it's slightly different from other vector spaces we'll consider. \newpage \exx[2]{$\M_{m,n}(\mathbb{F})$, the set of all $m \times n$ matrices with entries from $\mathbb{F}$, is a vector space.} % Go through some of the axioms to show that this is a vector space. Or just say "is similar". Be careful: the operations that we call vector addition and scalar multiplication just have to satisfy the $10$ axioms that were provided---they do not have to look \emph{anything} like what we usually call ``addition'' or ``multiplication.'' \exx[10]{Let $\V = \{x \in \R : x > 0\}$ be the set of positive real numbers. Define addition $\oplus$ on this set via usual multiplication of real numbers (i.e., $\x \oplus \y = xy$), and scalar multiplication $\odot$ on this set via exponentiation (i.e., $c \odot \x = x^c$). Show that this is a vector space.} % Go through some of the axioms to show that this is a vector space. % Zero vector is 1, for example. OK, so vectors and vector spaces can in fact look quite different from $\mathbb{R}^n$. However, doing math with them isn't much different at all: almost all facts that we proved in MATH~2221 actually only relied on the ten vector space properties provided a couple pages ago. \\ Thus we will see that really not much changes when we do linear algebra in this more general setting. We will re-introduce the core concepts again (e.g., subspaces and linear independence), but only very quickly, as they do not change significantly. \newpage \section*{Complex Numbers} As mentioned earlier, the field $\mathbb{F}$ we will be working with throughout this course will always be $\R$ (the real numbers) or $\C$ (the complex numbers). Since complex numbers make linear algebra work so nicely, we give them a one-page introduction: \begin{itemize} \item We define $i$ to be a number that satisfies $i^2 = -1$ (clearly, $i$ is not a member of $\R$). \\[-0.5cm] \item An \textbf{imaginary number} is a number of the form $bi$, where $b \in \R$. \\ % note that this terminology is *terrible* \item A \textbf{complex number} is a number of the form $a + bi$, where $a,b \in \mathbb{R}$. \\ % Label a and b as the real and imaginary part of a+bi % END OF CLASS 1 \item Arithmetic with complex numbers works how you might naively expect: \horlines{2} \vspace*{-1.35in} \begin{align*} (a + bi) + (c + di) & = \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \\[1em] % (a+c) + (b+d)i (a + bi)(c + di) & = \quad \quad \quad \quad \quad \quad % (ac-bd) + (ad + bc)i % Mention "FOIL" \end{align*} \item Much like we think of $\mathbb{R}$ as a line, we can think of $\mathbb{C}$ as a plane, and the number $a+bi$ has coordinates $(a,b)$ on that plane. \horlines{4} \vspace*{-1cm} % Draw plane. Label real axis and imaginary axis. \item The \textbf{length} (or \textbf{magnitude}) of the complex number $a + bi$ is $|a + bi| = \sqrt{a^2 + b^2}$. \\[-1em] % Distance from the origin in the complex plane. % Draw on *previous* picture. \item The \textbf{complex conjugate} of the complex number $a + bi$ is $\overline{a + bi} = a - bi$. \\[-1em] % Reflection about the real axis in the complex plane. % Draw on *previous* picture. \item We can use the previous facts to check that $(a + bi)\overline{(a + bi)} = |a + bi|^2$. \horlines{1}\vspace*{-0.8cm} % Just do the multiplication \item We can also divide by (non-zero) complex numbers:\\[-0.1in] \horlines{1} \vspace*{-1.15in} \begin{align*} \frac{a+bi}{c+di} & = \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad % multiply numerator and denominator by c-di and then simplify \end{align*} \end{itemize} \newpage \section*{Subspaces} It will often be useful for us to deal with vector spaces that are contained within other vector spaces. This situation comes up often enough that it gets its own name: \begin{definition}[Subspace] If $\V$ is a vector space and $\mathcal{S} \subseteq \V$, then $\mathcal{S}$ is a \textbf{subspace} of $\V$ if $\mathcal{S}$ is itself a vector space with the same addition and scalar multiplication as $\V$. \end{definition} % Go back to half-real line example and note that *NOT* a subspace of R It turns out that checking whether or not something is a subspace is much simpler than checking whether or not it is a vector space. In particular, instead of checking all ten vector space axioms, you only have to check two: \begin{theorem}[Determining if a Set is a Subspace] Let $\V$ be a vector space and let $\mathcal{S} \subseteq \V$ be non-empty. Then $\mathcal{S}$ is a subspace of $\V$ if and only if the following two conditions hold for all $\mathbf{v},\mathbf{w} \in \mathcal{S}$ and all $c \in \mathbb{F}$: \begin{enumerate}[label=\alph*)] \item $\mathbf{v} + \mathbf{w} \in \mathcal{S}$ \hfill {\color{gray}(closure under addition)} \item $c\mathbf{v} \in \mathcal{S}$ \hfill {\color{gray}(closure under scalar multiplication)} \end{enumerate} \end{theorem} \begin{proof} For the ``only if'' direction, \horlines{2} % the two conditions are axioms (a) and (f), so of course they must hold. \noindent For the ``if'' direction, \vspace*{-1.25cm}\horlines{7}\vspace*{-1.25cm} % we know that axioms (a) and (f) hold by hypothesis. We know that axioms b, c, g, h, i, j hold since they hold for ALL vectors in V, so they must hold for all vectors in W too. This leaves axioms (d) and (e). % axiom (d): we need to show that 0 \in S. Well, if v \in S then we know that 0v \in S too since S is closed under scalar multiplication. Well, 0v = 0 (hopefully intuitive enough), but proved in the textbook. % axiom (e): we need to show that if v \in S then -v \in S too. Well, we know that (-1)v \in S since S is closed under scalar multiplication. Well, (-1)v = -v (again, proved in textbook and hopefully intuitive enough), so we are done. \end{proof} \newpage \exx[7]{Is $\mathcal{P}^p$, the set of real-valued polynomials of degree at most $p$, a subspace of $\mathcal{F}$?} % Yes. Show the two properties of the previous theorem hold. \exx[7]{Is the set of $n \times n$ real symmetric matrices a subspace of $\M_{n}(\R)$?} % Yes. % Note about notation: \M_n means \M_{n,n} \exx[4]{Is the set of $2 \times 2$ matrices with determinant $0$ a subspace of $\M_{2}$?} % No. (add up diag(1,0) and diag(0,1)) \newpage \section*{Spans, Linear Combinations, and Independence} We now present some definitions that you likely saw (restricted to $\mathbb{R}^n$) in your first linear algebra course. All of the theorems and proofs involving these definitions carry over just fine when replacing $\mathbb{R}^n$ by a general vector space $\V$. \begin{definition}[Linear Combinations] Let $\V$ be a vector space over the field $\mathbb{F}$, let $\mathbf{v}_1,\mathbf{v}_2,\ldots,\mathbf{v}_k \in \V$, and let $c_1,c_2,\ldots, c_k \in \mathbb{F}$. Then every vector of the form \begin{align*} c_1\mathbf{v}_1 + c_2\mathbf{v}_2 + \cdots + c_k\mathbf{v}_k \end{align*} is called a \textbf{linear combination} of $\mathbf{v}_1,\mathbf{v}_2,\ldots,\mathbf{v}_k$. \end{definition} % Emphasize that linear combinations are still FINITE \exx[6]{Is $\quad \quad \quad \quad \quad \quad$ a linear combination of $\quad \quad \quad \quad \quad \quad$ and $\quad \quad \quad \quad \quad \quad$?} % Polynomials maybe? % Probably need to solve a simple linear system. % END OF CLASS 2 \exx[6]{Is $\quad \quad \quad \quad \quad \quad$ a linear combination of $\quad \quad \quad \quad \quad \quad$ and $\quad \quad \quad \quad \quad \quad$?} % Matrices maybe? % Probably need to solve a simple linear system. \begin{definition}[Span] Let $\V$ be a vector space and let $B \subseteq \V$ be a set of vectors. Then the \textbf{span} of $B$, denoted by $\mathrm{span}(B)$, is the set of all (finite!) linear combinations of vectors from $B$: \[ \mathrm{span}(B) \defeq \left\{ \sum_{j=1}^k c_j\v_j \ \Big| \ k \in \mathbb{N}, \ c_j \in \mathbb{F} \ \text{and} \ \v_j \in B \ \text{for all} \ 1 \leq j \leq k \right\}. \] Furthermore, if $\mathrm{span}(B) = \V$ then $\V$ is said to be \textbf{spanned} by $B$. \end{definition} \exx[7]{Show that the polynomials $1, x,$ and $x^2$ span $\mathcal{P}^2$.} % Trivial. % More generally, 1, x, ..., x^n span P_n. Polynomials are spanned by {1,x,x^2,...}, but things like e^x aren't (despite Taylor series) since a linear combination must be *finite* \exx[7]{Is $e^x$ in the span of $\{1,x,x^2,x^3,\ldots\}$?} % Nope! Trick question. Students might think yes because of Taylor series, but the problem is that no *FINITE* linear combination works. % To actually *prove* that no finite linear combination works, note that derivative of a polynomial is always eventually zero, but derivative of e^x is always non-zero \newpage \exx[6]{Let $E_{i,j}$ be the matrix with a $1$ in its $(i,j)$-entry and zeros elsewhere. Show that $\M_{2}$ is spanned by $E_{1,1}, E_{1,2}, E_{2,1}$, and $E_{2,2}$.} % Trivial. % More generally, m times n matrices spanned by mn "standard matrix units" \exx[7]{Determine whether or not the polynomial $r(x) = x^2 - 3x - 4$ is in the span of the polynomials $p(x) = x^2 - x + 2$ and $q(x) = 2x^2 - 3x + 1$.} % Set up system of linear equations to get r = 2q - 3p. Our primary reason for being interested in spans is that the span of a set of vectors is always a subspace (and in fact, we will see shortly that every subspace can be written as the span of some vectors). \begin{theorem}[Spans are Subspaces] Let $\V$ be a vector space and let $B \subseteq \V$. Then $\mathrm{span}(B)$ is a subspace of $\V$. \end{theorem} \newpage \begin{proof} We just verify that the two defining properties of subspaces are satisfied: \horlines{8}\vspace*{-1.3cm} % Easy. % (a) if you add two linear combinations, you get another linear combination. % (b) if you multiply a linear combination by a scalar, it still is. \end{proof} \begin{definition}[Linear Dependence and Independence] Let $\V$ be a vector space and let $B \subseteq \V$ be a set of vectors. Then $B$ is \textbf{linearly dependent} if there exist scalars $c_1$, $c_2$, $\ldots$, $c_k \in \mathbb{F}$, at least one of which is not zero, and vectors $\v_1$, $\v_2$, $\ldots$, $\v_k \in B$ such that \begin{align*} c_1\mathbf{v}_1 + c_2\mathbf{v}_2 + \cdots + c_k\mathbf{v}_k = \mathbf{0}. \end{align*} If $B$ is not linearly dependent then it is called \textbf{linearly independent}. \end{definition} There are a couple of different ways of looking at linear dependence and independence. For example: \begin{itemize} \item A set of vectors $\{\mathbf{v}_1,\mathbf{v}_2,\ldots,\mathbf{v}_k\}$ is linearly independent if and only if \horlines{1} \vspace*{-1in} \begin{align*} c_1\mathbf{v}_1 + c_2\mathbf{v}_2 + \cdots + c_k\mathbf{v}_k = \mathbf{0} \quad \text{implies} \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad % c1 = c2 = ... = c_k = 0 \end{align*} \\[-2em] \item A set of vectors $\{\v_1, \v_2, \ldots, \v_k\}$ is linearly dependent if and only if there exists a particular $j$ such that \horlines{1} \vspace*{-1in} \begin{align*} \mathbf{v}_j \text{ is a}\quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad \quad % linear combination of other v_i's. \end{align*} \\[-2em] \end{itemize} \newpage In particular, a set of two vectors is linearly dependent if and only if they are scalar multiples of each other. \exx[1]{Is the set of polynomials $\big\{\quad \quad \quad \quad \quad \quad \quad, \quad \quad \quad \quad \quad \quad \quad \big\}$ linearly dependent or independent?} % Just two polynomials, so easy to see. \exx[6]{Is the set of matrices $\Big\{\quad \quad \quad \quad \quad \quad, \quad \quad \quad \quad \quad \quad, \quad \quad \quad \quad \quad \quad \Big\}$ linearly dependent or independent?} % Three matrices, so have to do some work here. \exx[4]{Is the set of functions $\{\sin^2(x),\cos^2(x),\cos(2x)\} \subset \mathcal{F}$ linearly dependent or independent?} % cos(2x) = cos^2(x) - sin^2(x) % Thus dependent (but not easy to see!) Roughly, the reason that this final example didn't devolve into something we can just compute via ``plug and chug'' is that we don't have a nice basis for $\mathcal{F}$ that we can work with. This contrasts with the previous two examples (polynomials and matrices), where we do have nice bases, and we've been working with those nice bases already (perhaps without even realizing it). \\ \noindent We will talk about bases in depth next week! \end{document}