/
so8b.tex
66 lines (59 loc) · 2.95 KB
/
so8b.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
\documentclass{article}
% Include macros here
\input{macros}
\usepackage{fancyhdr}
%\include{macros}
\usepackage{pifont}
% Number of problem sheet
\newcounter{problemSheetNumber}
\setcounter{problemSheetNumber}{8}
\newcommand{\matlabprob}{\ding{100} \ }
\newcommand{\examprob}{\ding{80} \ }
%\setcounter{section}{\theproblemSheetNumber}
%\renewcommand{\theparagraph}{(\thesection.\arabic{paragraph})}
\newcounter{problems}
\setcounter{problems}{3}
%\setlength{\parindent}{0cm}
\renewcommand{\problem}[1]{\paragraph{(\theproblemSheetNumber.\theproblems)}\addtocounter{problems}{1}\label{#1}}
\renewcommand{\solution}[1]{\paragraph{Solution (\theproblemSheetNumber.\theproblems)}\addtocounter{problems}{1}\label{#1}}
\setcounter{MaxMatrixCols}{20}
\pagestyle{fancy}
\lhead{MATH36061}
\chead{Convex Optimization}
\rhead{December 6, 2016}
\begin{document}
\begin{center}
{\Large {\bf Solutions to Part B of Problem Sheet \theproblemSheetNumber}}
\end{center}
\solution{pr:1} The largest eigenvalue of $\mtx{A}$ can be written as
\begin{equation*}
\lambda_{\mathrm{max}}(\mtx{A}) = \max_{\vct{v}\in \R^n} \frac{\vct{v}^{\trans}\mtx{A}\vct{v}}{\vct{v}^{\trans}\vct{v}}.
\end{equation*}
Therefore, $t\geq \lambda_{\mathrm{max}}(\mtx{A})$ is equivalent to the statement that for all $\vct{v}$,
\begin{equation*}
t\geq \frac{\vct{v}^{\trans}\mtx{A}\vct{v}}{\vct{v}^{\trans}\vct{v}} \ \Leftrightarrow \ t\vct{v}^{\trans}\vct{v}-\vct{v}^{\trans}\mtx{A}\vct{v}\geq 0.
\end{equation*}
We can write $\vct{v}^{\trans}\vct{v}=\vct{v}^{\trans}\mtx{I}\vct{v}$, with the itendity matrix $\mtx{I}$, so that the above is equivalent to
\begin{equation*}
t\mtx{I}-\mtx{A}\succeq \zerovct.
\end{equation*}
The following semidefinite programming problem gives the largest eigenvalue:
\begin{align*}
\minimize & t\\
\subjto & t\mtx{I}-\mtx{A}\succeq \zerovct.
\end{align*}
\solution{pr:2}
\begin{itemize}
\item[(a)] A symmetric matrix can be diagonalized by orthogonal transformations, $\Sigma = \mtx{Q}^{\trans}\mtx{A}\mtx{Q}$, with $\mtx{Q}$ orthogonal, and $\Sigma$ is diagonal with the eigenvalues of $\mtx{A}$ in the diagonal. The trace is invariant under similarity transformations, $\mathrm{tr}(\mtx{A}) = \mathrm{tr}(\mtx{Q}^{\trans}\mtx{A}\mtx{Q})$, from which we get that the trace equals the sum of the eigenvalues.
\item[(b)] By Part (a), the problem is formally defined as
\begin{equation*}
\minimize \mtx{I}\bullet \mtx{X} \subjto x_{ij} = a_{ij} \text{ for } (i,j)\in \Omega, \quad \mtx{X}\succeq \zerovct.
\end{equation*}
We can formulate each of the constraints in the form $\mtx{A}_{ij} \bullet \mtx{X} = a_{ij}$, with $\mtx{A}_{ij}$ the matrix with a $1$ in entry $(i,j)$ and $0$ elsewhere. The dual problem would be of the form
\begin{align*}
\maximize &\sum_{(i,j)\in \Omega} y_{ij}a_{ij} \\
\subjto & \sum_{(i,j)\in \Omega} y_{ij}\mtx{A}_{ij} +\mtx{S} = \mtx{I}\\
& \mtx{S}\succeq \zerovct.
\end{align*}
\end{itemize}
\end{document}