Masterarbeit/TeX/Plots/pfg_test.tex

157 lines
5.4 KiB
TeX
Raw Normal View History

2020-07-22 11:22:36 +02:00
\documentclass{article}
\usepackage{pgfplots}
\usepackage{filecontents}
\usepackage{subcaption}
\usepackage{adjustbox}
\usepackage{xcolor}
\usepackage{tabu}
\usepackage{graphicx}
\usetikzlibrary{calc, 3d}
\begin{document}
\pgfplotsset{
compat=1.11,
legend image code/.code={
\draw[mark repeat=2,mark phase=2]
plot coordinates {
(0cm,0cm)
(0.0cm,0cm) %% default is (0.3cm,0cm)
(0.0cm,0cm) %% default is (0.6cm,0cm)
};%
}
}
\begin{figure}
\begin{subfigure}[b]{\textwidth}
\begin{tikzpicture}
\begin{axis}[tick style = {draw = none}, width = \textwidth,
2020-08-03 18:54:35 +02:00
height = 0.7\textwidth, ymin = 0.92, legend style={at={(0.9825,0.75)},anchor=north east},
2020-07-22 11:22:36 +02:00
xlabel = {epoch}, ylabel = {Classification Accuracy}]
\addplot table
2020-08-03 18:54:35 +02:00
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
{Data/adagrad.log};
2020-07-22 11:22:36 +02:00
\addplot table
2020-08-03 18:54:35 +02:00
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
{Data/adadelta.log};
2020-07-22 11:22:36 +02:00
\addplot table
2020-08-03 18:54:35 +02:00
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
{Data/adam.log};
2020-07-22 11:22:36 +02:00
2020-08-03 18:54:35 +02:00
\addlegendentry{\footnotesize{ADAGRAD}}
\addlegendentry{\footnotesize{ADADELTA}}
\addlegendentry{\footnotesize{ADAM}}
2020-07-22 11:22:36 +02:00
\addlegendentry{SGD$_{0.01}$}
\end{axis}
\end{tikzpicture}
%\caption{Classification accuracy}
\end{subfigure}
\begin{subfigure}[b]{\textwidth}
\begin{tikzpicture}
\begin{axis}[tick style = {draw = none}, width = \textwidth,
2020-08-03 18:54:35 +02:00
height = 0.7\textwidth, ymax = 0.5,
xlabel = {epoch}, ylabel = {Error Measure\vphantom{y}},ytick ={0,0.1,0.2,0.3,0.4,0.45,0.5}, yticklabels =
{0,0.1,0.2,0.3,0.4,\phantom{0.94},0.5}]
2020-07-22 11:22:36 +02:00
\addplot table
2020-08-03 18:54:35 +02:00
[x=epoch, y=val_loss, col sep=comma, mark = none] {Data/adagrad.log};
2020-07-22 11:22:36 +02:00
\addplot table
2020-08-03 18:54:35 +02:00
[x=epoch, y=val_loss, col sep=comma, mark = none] {Data/adadelta.log};
2020-07-22 11:22:36 +02:00
\addplot table
2020-08-03 18:54:35 +02:00
[x=epoch, y=val_loss, col sep=comma, mark = none] {Data/adam.log};
2020-07-22 11:22:36 +02:00
2020-08-03 18:54:35 +02:00
\addlegendentry{\footnotesize{ADAGRAD}}
\addlegendentry{\footnotesize{ADADELTA}}
\addlegendentry{\footnotesize{ADAM}}
2020-07-22 11:22:36 +02:00
\addlegendentry{SGD$_{0.01}$}
\end{axis}
\end{tikzpicture}
\caption{Performance metrics during training}
\end{subfigure}
\\~\\
\begin{subfigure}[b]{1.0\linewidth}
2020-08-03 18:54:35 +02:00
\begin{tabu} to \textwidth {@{} *3{X[c]}c*3{X[c]} @{}}
\multicolumn{3}{c}{Classification Accuracy}
&~&\multicolumn{3}{c}{Error Measure}
\\\cline{1-3}\cline{5-7}
ADAGRAD&ADADELTA&ADAM&&ADAGRAD&ADADELTA&ADAM
\\\cline{1-3}\cline{5-7}
1&1&1&&1&1&1
2020-07-22 11:22:36 +02:00
\end{tabu}
\caption{Performace metrics after 20 epochs}
\end{subfigure}
\caption{The neural network given in ?? trained with different
algorithms on the MNIST handwritten digits data set. For gradient
2020-07-22 17:11:09 +02:00
descent the learning rated 0.01, 0.05 and 0.1 are (GD$_{
rate}$). For
2020-07-22 11:22:36 +02:00
stochastic gradient descend a batch size of 32 and learning rate
of 0.01 is used (SDG$_{0.01}$)}
\end{figure}
\begin{center}
2020-07-22 17:11:09 +02:00
\begin{figure}[h]
\centering
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist0.pdf}
2020-07-22 11:22:36 +02:00
\end{subfigure}
2020-07-22 17:11:09 +02:00
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist1.pdf}
2020-07-22 11:22:36 +02:00
\end{subfigure}
2020-07-22 17:11:09 +02:00
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist2.pdf}
2020-07-22 11:22:36 +02:00
\end{subfigure}
2020-07-22 17:11:09 +02:00
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist3.pdf}
2020-07-22 11:22:36 +02:00
\end{subfigure}
2020-07-22 17:11:09 +02:00
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist4.pdf}
\end{subfigure}\\
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist5.pdf}
\end{subfigure}
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist6.pdf}
\end{subfigure}
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist7.pdf}
\end{subfigure}
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist8.pdf}
\end{subfigure}
\begin{subfigure}{0.19\textwidth}
\includegraphics[width=\textwidth]{Data/mnist9.pdf}
\end{subfigure}
\caption{The MNIST data set contains 70.000 images of preprocessed handwritten
digits. Of these images 60.000 are used as training images, while
the rest are used to validate the models trained.}
2020-07-22 11:22:36 +02:00
\end{figure}
\end{center}
\begin{figure}
\begin{adjustbox}{width=\textwidth}
\begin{tikzpicture}
\begin{scope}[x = (0:1cm), y=(90:1cm), z=(15:-0.5cm)]
\node[canvas is xy plane at z=0, transform shape] at (0,0)
{\includegraphics[width=5cm]{Data/klammern_r.jpg}};
\node[canvas is xy plane at z=2, transform shape] at (0,-0.2)
{\includegraphics[width=5cm]{Data/klammern_g.jpg}};
\node[canvas is xy plane at z=4, transform shape] at (0,-0.4)
{\includegraphics[width=5cm]{Data/klammern_b.jpg}};
\node[canvas is xy plane at z=4, transform shape] at (-8,-0.2)
{\includegraphics[width=5.3cm]{Data/klammern_rgb.jpg}};
\end{scope}
\end{tikzpicture}
\end{adjustbox}
\caption{On the right the red, green and blue chanels of the picture
are displayed. In order to better visualize the color channes the
black and white picture of each channel has been colored in the
respective color. Combining the layers results in the image on the
left}
\end{figure}
\end{document}
%%% Local Variables:
%%% mode: latex
%%% TeX-master: t
%%% End: