Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Download
142 views
1
\documentclass{article}
2
\usepackage{fullpage}
3
\usepackage{amsfonts, framed}
4
\usepackage{amsmath}
5
\usepackage{amsthm}
6
%\usepackage{nopageno}
7
\usepackage[margin=2cm,top=2cm,footskip=1cm,nohead]{geometry}
8
\usepackage{setspace}
9
\newcommand*{\rowopswap}[2]{\ensuremath{R\sb{#1}\leftrightarrow R\sb{#2}}}
10
\newcommand*{\rowopmult}[2]{\ensuremath{#1R\sb{#2}}}
11
\newcommand*{\rowopadd}[3]{\ensuremath{#1R\sb{#2}+R\sb{#3}}}
12
\usepackage{wasysym}
13
\usepackage{multicol}
14
\usepackage{graphicx}
15
\usepackage{wrapfig}
16
\usepackage{subfigure}
17
\title{Proof Portfolio}
18
\author{Benjamin A. Sigmon}
19
20
\begin{document}
21
\maketitle
22
\newpage
23
Suppose that
24
25
$$
26
A=
27
\begin{pmatrix}
28
a&b\\
29
c&d
30
\end{pmatrix}
31
$$
32
33
is a $2 \times 2$ matrix where $ad - bc \neq 0$ . Prove that $A$ is nonsingular.
34
\vspace{1 in}
35
\begin{proof}
36
37
There are three cases which need to be shown: 1) $ad \neq 0$, 2) $bc \neq 0$, and 3) $ad \neq bc$. The third case, however, was given. If $ad \neq 0$ then $a$ and $d$ are nonzero. For simplicity, assume $c \neq 0$. Note:
38
\begin{align*}
39
\begin{pmatrix}
40
a&b\\
41
c&d
42
\end{pmatrix}
43
\xrightarrow{\rowopmult{c}{1}}
44
\begin{pmatrix}
45
ac&bc\\
46
c&d
47
\end{pmatrix}
48
\xrightarrow{\rowopmult{a}{2}}
49
\begin{pmatrix}
50
ac&bc\\
51
ac&ad
52
\end{pmatrix}
53
\xrightarrow{\rowopadd{-1}{1}{2}}
54
\begin{pmatrix}
55
ac&bc\\
56
0&ad-bc
57
\end{pmatrix}
58
\xrightarrow{\rowopmult{1/c}{2}}\\
59
\begin{pmatrix}
60
a&b\\
61
0&ad-bc
62
\end{pmatrix}
63
\xrightarrow{\rowopmult{1/ad-bc}{2}}
64
\begin{pmatrix}
65
a&b\\
66
0&1
67
\end{pmatrix}
68
\xrightarrow{\rowopadd{-b}{2}{1}}
69
\begin{pmatrix}
70
a&0\\
71
0&1
72
\end{pmatrix}
73
\xrightarrow{\rowopmult{1/a}{1}}
74
\begin{pmatrix}
75
1&0\\
76
0&1
77
\end{pmatrix}
78
\end{align*}
79
80
Since $ad - bc \neq 0 $, we can divide $ad - bc \neq 0 $ by $ad - bc \neq 0$ to get 1. The proof of row reduction to the $2 \times 2$ identity matrix when $bc \neq 0$ is similar, except Row 1 and Row 2 would need to be swapped once a zero was present in the first column of the first row. Showing that the matrix may be row reduced to the identity matrix satisfies the criteria for a matrix to be nonsingular.
81
\end{proof}
82
\newpage
83
Suppose that $A$ is an $m \times n$ matrix with a row where every entry is zero. Suppose that $B$ is an $n \times p$ matrix. Prove that $AB$ has a row where every entry is zero.
84
\vspace{1 in}
85
\begin{proof}
86
Let the $i$th row of matrix $A$ size $m \times n$ contain only zero entries and $B$ be an $n \times p$ matrix. Let $1 \leq j \leq p$ and $k \in \mathbb{N}$. Note:
87
\begin{align*}
88
[AB]_{ij} &= \sum_{k=1}^{n}[A]_{ik}[B]_{kj} &&\\
89
&= \sum_{k=1}^{n}0[B]_{kj} &&\\
90
&= \sum_{k=1}^{n}0 &&\\
91
&= 0
92
\end{align*}
93
This shows the $i$th row of $AB$ has all zero entries.
94
\end{proof}
95
\newpage
96
Prove property AAC of Theorem VSPCV. That is: If $\overrightarrow{\rm u}, \overrightarrow{\rm v}, \overrightarrow{\rm w} \in \mathbb{C}^{m}$, then
97
$\overrightarrow{\rm u}+ (\overrightarrow{\rm v} + \overrightarrow{\rm w}) = (\overrightarrow{\rm u} + \overrightarrow{\rm v}) + \overrightarrow{\rm w}$.
98
\vspace{1 in}
99
\begin{proof}
100
Let $\overrightarrow{\rm u}, \overrightarrow{\rm v}, \overrightarrow{\rm w} \in \mathbb{C}^{m}$ and $1 \leq i \leq m$. Note:
101
\begin{align*}
102
[\overrightarrow{\rm u} + (\overrightarrow{\rm v} + \overrightarrow{\rm w})]_{i} & = [\overrightarrow{\rm u}]_{i} + [\overrightarrow{\rm v} + \overrightarrow{\rm w}]_{i}\\
103
& = [\overrightarrow{\rm u}]_{i} + [\overrightarrow{\rm v}]_{i} + [\overrightarrow{\rm w}]_{i}\\
104
& = [\overrightarrow{\rm u} + \overrightarrow{\rm v}]_{i} + [\overrightarrow{\rm w}]_{i}\\
105
& = [(\overrightarrow{\rm u} + \overrightarrow{\rm v}) + \overrightarrow{\rm w}]_{i}
106
\end{align*}
107
This shows associativity of vector addition.
108
\end{proof}
109
\newpage
110
Suppose that $S$ is a linearly independent set of vectors, and $T$ is a subset of $S$ that is, $T \subseteq S$. Prove that $T$ is a linearly independent set of vectors.
111
\vspace{1 in}
112
\begin{proof}
113
Suppose $S = ({\overrightarrow{\rm u_{1}},\overrightarrow{\rm u_{2}},..., \overrightarrow{\rm u_{p}}, \overrightarrow{\rm v_{1}},\overrightarrow{\rm v_{2}},..., \overrightarrow{\rm v_{q}}})$ and $T \subseteq S$ where $T = (\overrightarrow{\rm v_{1}},\overrightarrow{\rm v_{2}},..., \overrightarrow{\rm v_{q}})$. Let $\alpha_{i},\beta_{j} \in \mathbb{C}$. Since $S$ is linearly independent, then, for $1 \leq i \leq p$ and $1 \leq j \leq q$:
114
$$
115
\sum_{i=1}^{p}\alpha_{i}[\overrightarrow{\rm u}]_{i} + \sum_{j=1}^{q}\beta_{j}[\overrightarrow{\rm v}]_{j} = \overrightarrow{\rm 0}
116
$$
117
only for $\alpha_{i}=0$ and $\beta_{j}=0$ by definition of linear independence. So considering $T$ as a subset of $S$:
118
$$
119
\sum_{j=1}^{q}\beta_{j}[\overrightarrow{\rm v}]_{j} = \overrightarrow{\rm 0}
120
$$
121
$\beta{j}=0$ $\forall$ $j$, showing $T$ to be linearly independent by definition. Thus, the subset of a linearly independent set is linearly independent.
122
\end{proof}
123
\newpage
124
Suppose that $T$ is a linearly dependent set of vector, and $T \subseteq S$. Prove that $S$ is a linearly dependent set of vectors.
125
\vspace{1 in}
126
\begin{proof}
127
Suppose $T = (\overrightarrow{\rm v_{1}}, \overrightarrow{\rm v_{2}}, ... , \overrightarrow{\rm v_{q}})$ and that $T \subseteq S$. Since $T$ is linearly dependent, there exists a nonzero $\alpha_{i}$, $1 \leq i \leq q$, $\alpha \in \mathbb{C}$, such that:
128
$$
129
\sum_{i=1}^{q}\alpha_{i}\overrightarrow{\rm v_{i}} = 0
130
$$
131
Notice that $S = (\overrightarrow{\rm u_{1}}, \overrightarrow{\rm u_{2}}, ... , \overrightarrow{\rm u_{p}}, \overrightarrow{\rm v_{1}}, \overrightarrow{\rm v_{2}}, ... , \overrightarrow{\rm v_{q}})$. To show linear dependence, a linear combination of $S$ must equal 0 besides the trivial solution. Let $\beta_{j}=0$ be a solution where $\beta \in \mathbb{C}$ for $1 \leq j \leq p$. Note:
132
$$
133
\sum_{j=1}^{p}\beta_{j}\overrightarrow{\rm u}_{j} + \sum_{i=1}^{q}\alpha_{i}\overrightarrow{\rm v}_{i} = \overrightarrow{\rm 0}
134
$$
135
is a solution to the system, where $\alpha_{i}$ was nonzero. Thus, by definition, $S$ is linearly dependent, since it has a nonzero solution.
136
\end{proof}
137
\newpage
138
Suppose that $U$ is a unitary matrix with eigenvalue $\lambda$. Prove that $\lambda$ has modulus 1, i.e. $\lambda\overline{\lambda} = 1$. (This says that all of the eigenvalues of a unitary matrix lie on the unit circle of the complex plane.)
139
\vspace{1 in}
140
\begin{proof}
141
Let $U$ be a unitary matrix of size $n$ with eigenvalue $\lambda \in \mathbb{C}$ and eigenvector $\overrightarrow{\rm x} \in \mathbb{C}$. Recall that an inner product of two vectors yields a scalar in $\mathbb{C}$. Note:
142
\begin{align*}
143
U\overrightarrow{\rm x} = &\lambda\overrightarrow{\rm x} &&\text{Definition EEM}
144
\end{align*}
145
and assume that
146
$$
147
(U\overrightarrow{\rm x})^* = \lambda^*\overrightarrow{\rm x}^*
148
$$
149
Also, recall that taking the adjoint of a scalar is the same as the conjugate. Now note:
150
\begin{align*}
151
\langle U\overrightarrow{\rm x}, U\overrightarrow{\rm x} \rangle & = (U\overrightarrow{\rm x})^*(U\overrightarrow{\rm x}) && = \lambda^*\overrightarrow{\rm x}^*\lambda\overrightarrow{\rm x}\\
152
& = \overrightarrow{\rm x}^*U^*U\overrightarrow{\rm x} && = \lambda\overline{\lambda}\overrightarrow{\rm x}\overrightarrow{\rm x}^*\\
153
& = \overrightarrow{\rm x}^*I\overrightarrow{\rm x} && = \lambda\overline{\lambda}\langle \overrightarrow{\rm x}, \overrightarrow{\rm x} \rangle\\
154
& = \langle \overrightarrow{\rm x}, \overrightarrow{\rm x} \rangle && = \lambda\overline{\lambda}\langle \overrightarrow{\rm x}, \overrightarrow{\rm x} \rangle\\
155
& = 1 && = \lambda\overline{\lambda}
156
\end{align*}
157
This shows that $\lambda$ has a modulus of 1, which is what we wanted to prove.
158
\end{proof}
159
\newpage
160
Suppose that $A$ is a square, singular matrix. Prove that the homogeneous system $\mathcal{LS}(A, \overrightarrow{\rm 0})$ has infinitely many solutions.
161
\vspace{1 in}
162
\begin{proof}
163
Suppose $A$ is a singular, square matrix. Note that homogeneity is indicative of consistency. Since $A$ is homogeneous and square, there exists at least one unique solution along with the trivial solution. Additionally, since $A$ is singular, or more importantly not nonsingular, not every column is a pivot column. This means that there is at least one dependent variable defined by other free variables, which reveals that there are infinite solutions.
164
\end{proof}
165
\newpage
166
Use Theorem EMP to prove part (2) of Theorem MMIM: If $A$ is an $m \times n$ matrix and $I_{m}$ is the identity matrix of size $m$, then $I_{m}A = A$.
167
\vspace{1 in}
168
\begin{proof}
169
Suppose $A$ is an $m \times n$ matrix and $I_{m}$ is the identity matrix of size $m$. For $1 \leq i \leq m$, $1 \leq j \leq n$, and $k \in \mathbb{N}$ $I_{m}A$ is given by:
170
\begin{align*}
171
[I_{m}A]_{ij} & = \sum_{k=1}^m[I_{m}]_{ik}[A]_{kj}\\
172
& = [I_{m}]_{ii}[A]_{ij} + \sum_{k=1, k \neq i}^{m}[I_{m}]_{ik}[A]_{kj}\\
173
& = 1[A]_{ij} + \sum_{k=1, k \neq i}^{m}0[A]_{kj}\\
174
& = [A]_{ij} + 0\\
175
& = [A]_{ij}
176
\end{align*}
177
This shows that $I_{m}A = A$. So multiplying by the identity matrix on the left preverse matrices.
178
\end{proof}
179
\newpage
180
Suppose that $A$ is a square matrix of size $n$ and $\alpha \in \mathbb{C}$ is a scalar. Prove that $det(\alpha A) = \alpha^n det(A)$.
181
\vspace{1 in}
182
\begin{proof}
183
Let $A$ be a square matrix of size $n$ and $\alpha \in \mathbb{C}$. We can write $\alpha A$ as follows:
184
$$\alpha A = E_{1}(\alpha)E_{2}(\alpha)...E_{n}(\alpha)A$$
185
Note that $det(E_{n}(\alpha)) = \alpha$ by Theorem DEM. So taking the determinate of $\alpha A$ gives:
186
\begin{align*}
187
det(\alpha A) & = det(E_{1}(\alpha)E_{2}(\alpha)...E_{n}(\alpha)A)\\
188
& = det(E_{1}(\alpha))det(E_{2}(\alpha))...det(E_{n}(\alpha))det(A)\\
189
& = \alpha \alpha ... \alpha det(A)\\
190
& = \alpha^n det(A)
191
\end{align*}
192
This shows $det(\alpha A) = \alpha^n det(A)$.
193
\end{proof}
194
\end{document}
195