\pgfplotsset{ compat=1.11, legend image code/.code={ \draw[mark repeat=2,mark phase=2] plot coordinates { (0cm,0cm) (0.15cm,0cm) %% default is (0.3cm,0cm) (0.3cm,0cm) %% default is (0.6cm,0cm) };% } } \begin{figure} \begin{subfigure}[h]{\textwidth} \begin{tikzpicture} \small \begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed, /pgf/number format/precision=3},tick style = {draw = none}, width = 0.975\textwidth, height = 0.6\textwidth, ymin = 0.988, legend style={at={(0.9825,0.0175)},anchor=south east}, xlabel = {epoch}, ylabel = {Classification Accuracy}, cycle list/Dark2, every axis plot/.append style={line width =1.25pt}] \addplot table [x=epoch, y=val_accuracy, col sep=comma, mark = none] {Figures/Data/adam_datagen_full_mean.log}; \addplot table [x=epoch, y=val_accuracy, col sep=comma, mark = none] {Figures/Data/adam_datagen_dropout_02_full_mean.log}; \addplot table [x=epoch, y=val_accuracy, col sep=comma, mark = none] {Figures/Data/adam_datagen_dropout_04_full_mean.log}; \addplot table [x=epoch, y=val_accuracy, col sep=comma, mark = none] {Figures/Data/adam_dropout_02_full_mean.log}; \addplot table [x=epoch, y=val_accuracy, col sep=comma, mark = none] {Figures/Data/adam_dropout_04_full_mean.log}; \addplot [dashed] table [x=epoch, y=val_accuracy, col sep=comma, mark = none] {Figures/Data/adam_full_mean.log}; \addlegendentry{\footnotesize{G.}} \addlegendentry{\footnotesize{G. + D. 0.2}} \addlegendentry{\footnotesize{G. + D. 0.4}} \addlegendentry{\footnotesize{D. 0.2}} \addlegendentry{\footnotesize{D. 0.4}} \addlegendentry{\footnotesize{Default}} \end{axis} \end{tikzpicture} \caption{Classification accuracy} \vspace{.25cm} \end{subfigure} \begin{subfigure}[h]{1.0\linewidth} \begin{tabu} to \textwidth {@{}lc*5{X[c]}@{}} \Tstrut \Bstrut & \textsc{\,Adam\,} & D. 0.2 & D. 0.4 & G. &G.+D.\,0.2 & G.+D.\,0.4 \\ \hline \multicolumn{7}{c}{Test Accuracy}\Bstrut \\ \cline{2-7} mean \Tstrut & 0.9914 & 0.9923 & 0.9930 & 0.9937 & 0.9938 & 0.9943 \\ max & 0.9926 & 0.9930 & 0.9934 & 0.9946 & 0.9955 & 0.9956 \\ min & 0.9887 & 0.9909 & 0.9922 & 0.9929 & 0.9929 & 0.9934 \\ \hline \multicolumn{7}{c}{Training Accuracy}\Bstrut \\ \cline{2-7} mean \Tstrut & 0.9994 & 0.9991 & 0.9989 & 0.9967 & 0.9954 & 0.9926 \\ max & 0.9996 & 0.9996 & 0.9992 & 0.9979 & 0.9971 & 0.9937 \\ min & 0.9992 & 0.9990 & 0.9984 & 0.9947 & 0.9926 & 0.9908 \\ \end{tabu} \caption{Mean and maximum accuracy after 48 epochs of training.} \label{fig:gen_dropout_b} \end{subfigure} \caption[Performance comparison of overfitting measures]{Accuracy for the net given in ... with Dropout (D.), data generation (G.), a combination, or neither (Default) implemented and trained with \textsc{Adam}. For each epoch the 60.000 training samples were used, or for data generation 10.000 steps with each using batches of 60 generated data points. For each configuration the model was trained 5 times and the average accuracies at each epoch are given in (a). Mean, maximum and minimum values of accuracy on the test and training set are given in (b).} \label{fig:gen_dropout} \end{figure} %%% Local Variables: %%% mode: latex %%% TeX-master: "../main" %%% End: