Compare commits
5 Commits
Author | SHA1 | Date |
---|---|---|
Tobias Arndt | 2b10ef56e3 | 4 years ago |
Tobias Arndt | 2ef7cda1dd | 4 years ago |
Tobias Arndt | 3bae82eaf9 | 4 years ago |
Tobias Arndt | a498fb1a8c | 4 years ago |
Tobias Arndt | e96331d072 | 4 years ago |
@ -0,0 +1,52 @@
|
|||||||
|
import breeze.plot._
|
||||||
|
import breeze.plot.DomainFunction._
|
||||||
|
import breeze.linalg._
|
||||||
|
import breeze.stats.distributions.Gaussian
|
||||||
|
|
||||||
|
val nn = new RSNN(5000, 0.0000001)
|
||||||
|
|
||||||
|
val g = Gaussian(0, 0.3)
|
||||||
|
|
||||||
|
//val data = EqSeq(-math.Pi, math.Pi, 15) map (t => (t, math.sin(t)+ g.sample(1).last))
|
||||||
|
val (ws, evaluate) = nn.train(data, iter = 100000, lambda = (1.0/20) / 5 * (nn.n * 8) * 1)
|
||||||
|
|
||||||
|
val f = Figure()
|
||||||
|
val p = f.subplot(0)
|
||||||
|
val x = linspace(-5, 5)
|
||||||
|
val y = x.map(evaluate)
|
||||||
|
//print_data(nn, x, y, 3)
|
||||||
|
p += plot(x, y)
|
||||||
|
p += scatter(data.map(_._1), data.map(_._2), x => 0.1)
|
||||||
|
f.saveas("lines.png")
|
||||||
|
|
||||||
|
|
||||||
|
val x_i = data map {case (x,y) => x}
|
||||||
|
val y_i = data map {case (x,y) => y}
|
||||||
|
|
||||||
|
def print_data(nn: RSNN, x: DenseVector[Double], y: DenseVector[Double], tlambda: Double): Unit = {
|
||||||
|
val n = nn.n
|
||||||
|
reflect.io.File("C:/Users/tobia/Documents/Studium/Masterarbeit/Outputs/scala_out_d_1.csv").appendAll(s"x_n_$n"+s"_tl_$tlambda;" + x.toArray.mkString(";") + "\n")
|
||||||
|
reflect.io.File("C:/Users/tobia/Documents/Studium/Masterarbeit/Outputs/scala_out_d_1.csv").appendAll(s"y_n_$n"+s"_tl_$tlambda;" + y.toArray.mkString(";") + "\n")
|
||||||
|
}
|
||||||
|
reflect.io.File("C:/Users/tobia/Documents/Studium/Masterarbeit/Outputs/data_sin_d.csv").appendAll(x_i.mkString(";") + "\n")
|
||||||
|
reflect.io.File("C:/Users/tobia/Documents/Studium/Masterarbeit/Outputs/data_sin_d.csv").appendAll(y_i.mkString(";") + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
reflect.io.File("C:/Users/tobia/Documents/Studium/Masterarbeit/Outputs/vals1.csv").appendAll(x.toArray.mkString(";") + "\n")
|
||||||
|
reflect.io.File("C:/Users/tobia/Documents/Studium/Masterarbeit/Outputs/vals1.csv").appendAll(y.toArray.mkString(";") + "\n")
|
||||||
|
|
||||||
|
for(j <- List(0.1, 1, 3)) {
|
||||||
|
for (i <- 3 until 4) {
|
||||||
|
val nn = new RSNN((5 * math.pow(10, i)).asInstanceOf[Int], 0.0000001)
|
||||||
|
val (ws, evaluate) = nn.train(data, iter = 100000, lambda = (1.0 / 20) / 5 * (nn.n * 8) * j)
|
||||||
|
|
||||||
|
val x = linspace(-5, 5)
|
||||||
|
val y = x.map(evaluate)
|
||||||
|
print_data(nn, x, y, j)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val x_i = Seq(-3.141592653589793,-2.722713633111154,-2.303834612632515,-1.8849555921538759,-1.4660765716752369,-1.0471975511965979,-0.6283185307179586,-0.2094395102393194,0.2094395102393194,0.6283185307179586,1.0471975511965974,1.4660765716752362,1.8849555921538759,2.3038346126325155,2.7227136331111543,3.1415926535897922)
|
||||||
|
val y_i = Seq(0.0802212608585366,-0.3759376368887911,-1.3264180339054117,-0.8971334213504949,-0.7724344034354425,-0.9501497164520739,-0.6224628757084738,-0.35622668982623207,-0.18377660088356823,0.7836770998126841,0.5874762732054489,1.0696991264956026,1.1297065441952743,0.7587275382323738,-0.030547103790458163,0.044327111895927106)
|
||||||
|
|
||||||
|
val data = x_i zip y_i
|
@ -0,0 +1,621 @@
|
|||||||
|
\section{Implementations}
|
||||||
|
In this section the implementations of the models used are given.
|
||||||
|
The randomized shallow neural network used in Section~\ref{sec:conv} is
|
||||||
|
implemented in Scala. No pre-existing frameworks were used to ensure
|
||||||
|
the implementation was according to the definitions used in Theorem~\ref{theo:main1}.
|
||||||
|
|
||||||
|
The neural networks used in Section~\ref{sec:cnn} are implemented in Python using
|
||||||
|
the Keras framework given in TensorFlow. TensorFlow is a library
|
||||||
|
containing highly efficient GPU implementations of a wide variety of
|
||||||
|
tensor operations and algorithms
|
||||||
|
for training neural networks.% (computing derivatives, updating parameters).
|
||||||
|
|
||||||
|
\vspace*{-0.5cm}
|
||||||
|
\begin{lstfloat}
|
||||||
|
\begin{lstlisting}[language=iPython]
|
||||||
|
import breeze.stats.distributions.Uniform
|
||||||
|
import breeze.stats.distributions.Gaussian
|
||||||
|
import scala.language.postfixOps
|
||||||
|
|
||||||
|
object Activation {
|
||||||
|
def apply(x: Double): Double = math.max(0, x)
|
||||||
|
|
||||||
|
def d(x: Double): Double = if (x > 0) 1 else 0
|
||||||
|
}
|
||||||
|
|
||||||
|
class RSNN(val n: Int, val gamma: Double = 0.001) {
|
||||||
|
val g_unif = Uniform(-10, 10)
|
||||||
|
val g_gauss = Gaussian(0, 5)
|
||||||
|
|
||||||
|
val xis = g_unif.sample(n)
|
||||||
|
val vs = g_gauss.sample(n)
|
||||||
|
val bs = xis zip vs map {case(xi, v) => xi * v}
|
||||||
|
|
||||||
|
def computeL1(x: Double) = (bs zip vs) map {
|
||||||
|
case (b, v) => Activation(b + v * x) }
|
||||||
|
|
||||||
|
def computeL2(l1: Seq[Double], ws: Seq[Double]): Double =
|
||||||
|
(l1 zip ws) map { case (l, w) => w * l } sum
|
||||||
|
|
||||||
|
def output(ws: Seq[Double])(x: Double): Double =
|
||||||
|
computeL2(computeL1(x), ws)
|
||||||
|
|
||||||
|
def learn(data: Seq[(Double, Double)], ws: Seq[Double],
|
||||||
|
lamb: Double, gamma: Double): Seq[Double] = {
|
||||||
|
|
||||||
|
lazy val deltas = data.map {
|
||||||
|
case (x, y) =>
|
||||||
|
val l1 = computeL1(x)
|
||||||
|
val out = computeL2(l1, ws)
|
||||||
|
(l1 zip ws) map {case (l1, w) => (l1 * 2 * (out - y) +
|
||||||
|
lam * 2 * w) * gamma * -1}
|
||||||
|
}
|
||||||
|
|
||||||
|
deltas.foldRight(ws)(
|
||||||
|
(delta, ws) => ws zip (delta) map { case (w, d) => w + d })
|
||||||
|
}
|
||||||
|
|
||||||
|
def train(data: Seq[(Double, Double)], iter: Int, lam: Double,
|
||||||
|
gamma: Double = gamma): (Seq[Double], Double => Double) = {
|
||||||
|
|
||||||
|
val ws = (1 to iter).foldRight((1 to n).map(
|
||||||
|
_ => 0.0) :Seq[Double])((i, w) => {
|
||||||
|
println(s"Training iteration $i")
|
||||||
|
println(w.sum/w.length)
|
||||||
|
learn(data, w, lam, gamma / 10)
|
||||||
|
})
|
||||||
|
(ws, output(ws))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
\end{lstlisting}
|
||||||
|
\caption{Scala code used to build and train the ridge penalized
|
||||||
|
randomized shallow neural network in Section~\ref{sec:rsnn_sim}.}
|
||||||
|
% The parameter \textit{lam}
|
||||||
|
% in the train function represents the $\lambda$ parameter in the error
|
||||||
|
% function. The parameters \textit{n} and \textit{gamma} set the number
|
||||||
|
% of hidden nodes and the stepsize for training.}
|
||||||
|
\label{lst:rsnn}
|
||||||
|
\end{lstfloat}
|
||||||
|
\clearpage
|
||||||
|
\begin{lstfloat}
|
||||||
|
\begin{lstlisting}[language=iPython]
|
||||||
|
import tensorflow as tf
|
||||||
|
import numpy as np
|
||||||
|
from tensorflow.keras.callbacks import CSVLogger
|
||||||
|
from tensorflow.keras.preprocessing.image import ImageDataGenerator
|
||||||
|
|
||||||
|
mnist = tf.keras.datasets.mnist
|
||||||
|
|
||||||
|
(x_train, y_train), (x_test, y_test) = mnist.load_data()
|
||||||
|
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
|
||||||
|
x_train = x_train / 255.0
|
||||||
|
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
|
||||||
|
x_test = x_test / 255.0
|
||||||
|
|
||||||
|
y_train = tf.keras.utils.to_categorical(y_train)
|
||||||
|
y_test = tf.keras.utils.to_categorical(y_test)
|
||||||
|
|
||||||
|
model = tf.keras.models.Sequential()
|
||||||
|
model.add(tf.keras.layers.Conv2D(24,kernel_size=5,padding='same',
|
||||||
|
activation='relu',input_shape=(28,28,1)))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D())
|
||||||
|
model.add(tf.keras.layers.Conv2D(64,kernel_size=5,padding='same',
|
||||||
|
activation='relu'))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D(padding='same'))
|
||||||
|
model.add(tf.keras.layers.Flatten())
|
||||||
|
model.add(tf.keras.layers.Dense(256, activation='relu'))
|
||||||
|
model.add(tf.keras.layers.Dropout(0.2))
|
||||||
|
model.add(tf.keras.layers.Dense(10, activation='softmax'))
|
||||||
|
model.compile(optimizer='adam', loss="categorical_crossentropy",
|
||||||
|
metrics=["accuracy"])
|
||||||
|
|
||||||
|
datagen = ImageDataGenerator(
|
||||||
|
rotation_range = 30,
|
||||||
|
zoom_range = 0.15,
|
||||||
|
width_shift_range=2,
|
||||||
|
height_shift_range=2,
|
||||||
|
shear_range = 1)
|
||||||
|
|
||||||
|
csv_logger = CSVLogger(<Target File>)
|
||||||
|
|
||||||
|
history = model.fit(datagen.flow(x_train, y_train, batch_size=50),
|
||||||
|
validation_data=(x_test, y_test),
|
||||||
|
epochs=125, callbacks=[csv_logger],
|
||||||
|
steps_per_epoch = x_train.shape[0]//50)
|
||||||
|
|
||||||
|
\end{lstlisting}
|
||||||
|
\caption{Python code used to build the network modeling the MNIST
|
||||||
|
handwritten digits data set.}
|
||||||
|
\label{lst:handwriting}
|
||||||
|
\end{lstfloat}
|
||||||
|
\clearpage
|
||||||
|
\begin{lstfloat}
|
||||||
|
\begin{lstlisting}[language=iPython]
|
||||||
|
import tensorflow as tf
|
||||||
|
import numpy as np
|
||||||
|
from tensorflow.keras.callbacks import CSVLogger
|
||||||
|
from tensorflow.keras.preprocessing.image import ImageDataGenerator
|
||||||
|
mnist = tf.keras.datasets.fashion_mnist
|
||||||
|
|
||||||
|
(x_train, y_train), (x_test, y_test) = mnist.load_data()
|
||||||
|
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
|
||||||
|
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
|
||||||
|
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||||
|
|
||||||
|
y_train = tf.keras.utils.to_categorical(y_train)
|
||||||
|
y_test = tf.keras.utils.to_categorical(y_test)
|
||||||
|
|
||||||
|
model = tf.keras.Sequential()
|
||||||
|
model.add(tf.keras.layers.Conv2D(filters = 32, kernel_size = (3, 3),
|
||||||
|
activation='relu', input_shape = (28, 28, 1), padding='same'))
|
||||||
|
model.add(tf.keras.layers.Conv2D(filters = 32, kernel_size = (2, 2), activation='relu', padding = 'same'))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D(strides=(2,2)))
|
||||||
|
model.add(tf.keras.layers.Conv2D(filters = 64, kernel_size = (3, 3), activation='relu', padding='same'))
|
||||||
|
model.add(tf.keras.layers.Conv2D(filters = 64, kernel_size = (3, 3), activation='relu', padding='same'))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D(strides=(2,2)))
|
||||||
|
model.add(tf.keras.layers.Flatten())
|
||||||
|
model.add(tf.keras.layers.Dense(256, activation='relu'))
|
||||||
|
model.add(tf.keras.layers.Dropout(0.2))
|
||||||
|
model.add(tf.keras.layers.Dense(10, activation='softmax'))
|
||||||
|
|
||||||
|
model.compile(optimizer=tf.keras.optimizers.Adam(lr = 1e-3), loss="categorical_crossentropy", metrics=["accuracy"])
|
||||||
|
|
||||||
|
datagen = ImageDataGenerator(
|
||||||
|
rotation_range = 6,
|
||||||
|
zoom_range = 0.15,
|
||||||
|
width_shift_range=2,
|
||||||
|
height_shift_range=2,
|
||||||
|
shear_range = 0.15,
|
||||||
|
fill_mode = 'constant',
|
||||||
|
cval = 0)
|
||||||
|
|
||||||
|
csv_logger = CSVLogger(<Target File>)
|
||||||
|
|
||||||
|
history = model.fit(datagen.flow(x_train, y_train, batch_size=30),
|
||||||
|
steps_per_epoch=x_train.shape[0]//30,
|
||||||
|
validation_data=(x_test, y_test),
|
||||||
|
epochs=125, callbacks=[csv_logger],
|
||||||
|
shuffle=True)
|
||||||
|
|
||||||
|
\end{lstlisting}
|
||||||
|
\caption[Python Code for fashion MNIST]{Python code
|
||||||
|
used to build the network modeling the fashion MNIST data set.}
|
||||||
|
\label{lst:fashion}
|
||||||
|
\end{lstfloat}
|
||||||
|
\clearpage
|
||||||
|
\begin{lstfloat}
|
||||||
|
\begin{lstlisting}[language=iPython]
|
||||||
|
def get_random_sample(a, b, number_of_samples=10):
|
||||||
|
x = []
|
||||||
|
y = []
|
||||||
|
for category_number in range(0,10):
|
||||||
|
# get all samples of a category
|
||||||
|
train_data_category = a[b==category_number]
|
||||||
|
# pick a number of random samples from the category
|
||||||
|
train_data_category = train_data_category[np.random.randint(
|
||||||
|
train_data_category.shape[0], size=number_of_samples), :]
|
||||||
|
x.extend(train_data_category)
|
||||||
|
y.append([category_number]*number_of_samples)
|
||||||
|
|
||||||
|
return (np.asarray(x).reshape(-1, 28, 28, 1),
|
||||||
|
np.asarray(y).reshape(10*number_of_samples,1))
|
||||||
|
\end{lstlisting}
|
||||||
|
\caption{Python code used to generate the data sets containing a
|
||||||
|
certain amount of random data points per class.}
|
||||||
|
\end{lstfloat}
|
||||||
|
|
||||||
|
\section{Additional Comparisons}
|
||||||
|
\label{app:comp}
|
||||||
|
In this section, comparisons of cross entropy loss and training
|
||||||
|
accuracy for the models trained in Section~\ref{sec:smalldata} are given.
|
||||||
|
\begin{figure}[h]
|
||||||
|
\centering
|
||||||
|
\small
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch},ylabel = {Test Loss}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_02_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_02_1.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{1 Sample per Class}
|
||||||
|
\vspace{0.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch},ylabel = {Test Loss}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_00_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_02_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_00_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_02_10.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default.}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G + D. 0.2}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{10 Samples per Class}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = 0.9875\textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch}, ylabel = {Test Loss}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_00_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_02_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_00_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_02_100.mean};
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default.}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G + D. 0.2}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{100 Samples per Class}
|
||||||
|
\vspace{.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Mean Test Loss for Subsets of MNIST Handwritten
|
||||||
|
Digits]{Mean test cross entropy loss of the models fitting the
|
||||||
|
sampled subsets of MNIST
|
||||||
|
handwritten digits over the 125 epochs of training.}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{figure}[h]
|
||||||
|
\centering
|
||||||
|
\small
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style =
|
||||||
|
{draw = none}, width = \textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch},ylabel = {Test Loss}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_0_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_2_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_0_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_2_1.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.4}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{1 Sample per Class}
|
||||||
|
\vspace{0.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch},ylabel = {Test Loss}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}, ymin = {0.62}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_0_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_2_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_0_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_2_10.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default.}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G + D. 0.2}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{10 Samples per Class}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = 0.9875\textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch}, ylabel = {Test Loss}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_0_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_2_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_0_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_2_100.mean};
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default.}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G + D. 0.2}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{100 Samples per Class}
|
||||||
|
\vspace{.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Mean Test Accuracies for Subsets of Fashion MNIST]{Mean
|
||||||
|
test cross entropy loss of the models fitting the sampled subsets
|
||||||
|
of fashion MNIST
|
||||||
|
over the 125 epochs of training.}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{figure}[h]
|
||||||
|
\centering
|
||||||
|
\small
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch},ylabel = {Training Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_02_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_02_1.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{1 Sample per Class}
|
||||||
|
\vspace{0.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch},ylabel = {Test Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_00_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_02_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_00_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_02_10.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default.}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G + D. 0.2}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{10 Samples per Class}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = 0.9875\textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch}, ylabel = {Training Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}, ymin = {0.92}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_00_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_02_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_00_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_02_100.mean};
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default.}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G + D. 0.2}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{100 Samples per Class}
|
||||||
|
\vspace{.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Mean Training Accuracies for Subsets of MNIST Handwritten
|
||||||
|
Digits]{Mean training accuracies of the models fitting the sampled
|
||||||
|
subsets of MNIST
|
||||||
|
handwritten digits over the 125 epochs of training.}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{figure}[h]
|
||||||
|
\centering
|
||||||
|
\small
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style =
|
||||||
|
{draw = none}, width = \textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch},ylabel = {Training Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_0_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_2_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_0_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_2_1.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.4}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{1 Sample per Class}
|
||||||
|
\vspace{0.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch},ylabel = {Training Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}, ymin = {0.62}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_0_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_2_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_0_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_2_10.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default.}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G + D. 0.2}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{10 Samples per Class}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = 0.9875\textwidth,
|
||||||
|
height = 0.4\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {Epoch}, ylabel = {Training Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_0_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_dropout_2_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_0_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/fashion_datagen_dropout_2_100.mean};
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{Default.}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G + D. 0.2}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{100 Samples per Class}
|
||||||
|
\vspace{.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Mean Training Accuracies for Subsets of Fashion MNIST]{Mean
|
||||||
|
training accuracies of the models fitting the sampled subsets of fashion MNIST
|
||||||
|
over the 125 epochs of training.}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: "main"
|
||||||
|
%%% End:
|
@ -1,17 +0,0 @@
|
|||||||
x,y
|
|
||||||
-3.141592653589793,0.0802212608585366
|
|
||||||
-2.722713633111154,-0.3759376368887911
|
|
||||||
-2.303834612632515,-1.3264180339054117
|
|
||||||
-1.8849555921538759,-0.8971334213504949
|
|
||||||
-1.4660765716752369,-0.7724344034354425
|
|
||||||
-1.0471975511965979,-0.9501497164520739
|
|
||||||
-0.6283185307179586,-0.6224628757084738
|
|
||||||
-0.2094395102393194,-0.35622668982623207
|
|
||||||
0.2094395102393194,-0.18377660088356823
|
|
||||||
0.6283185307179586,0.7836770998126841
|
|
||||||
1.0471975511965974,0.5874762732054489
|
|
||||||
1.4660765716752362,1.0696991264956026
|
|
||||||
1.8849555921538759,1.1297065441952743
|
|
||||||
2.3038346126325155,0.7587275382323738
|
|
||||||
2.7227136331111543,-0.030547103790458163
|
|
||||||
3.1415926535897922,0.044327111895927106
|
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,58 +0,0 @@
|
|||||||
datagen_dropout_02_1
|
|
||||||
test
|
|
||||||
0.6604& 0.5175& 0.60136& 0.002348447
|
|
||||||
|
|
||||||
datagen_dropout_00_1
|
|
||||||
test
|
|
||||||
0.6704& 0.4878& 0.58621& 0.003600539
|
|
||||||
|
|
||||||
dropout_02_1
|
|
||||||
test
|
|
||||||
0.5312& 0.4224& 0.47137& 0.001175149
|
|
||||||
|
|
||||||
default_1
|
|
||||||
test
|
|
||||||
0.5633& 0.3230& 0.45702& 0.004021449
|
|
||||||
|
|
||||||
datagen_dropout_02_10
|
|
||||||
test
|
|
||||||
0.9441& 0.9061& 0.92322& 0.00015
|
|
||||||
train
|
|
||||||
1& 0.97& 0.989& 1e-04
|
|
||||||
|
|
||||||
datagen_dropout_00_10
|
|
||||||
test
|
|
||||||
0.931& 0.9018& 0.9185& 6e-05
|
|
||||||
train
|
|
||||||
1& 0.97& 0.99& 0.00013
|
|
||||||
|
|
||||||
dropout_02_10
|
|
||||||
test
|
|
||||||
0.9423& 0.9081& 0.92696& 0.00013
|
|
||||||
train
|
|
||||||
1& 0.99& 0.992& 2e-05
|
|
||||||
|
|
||||||
default_10
|
|
||||||
test
|
|
||||||
0.8585& 0.8148& 0.83771& 0.00027
|
|
||||||
train
|
|
||||||
1& 1& 1& 0
|
|
||||||
|
|
||||||
datagen_dropout_02_100
|
|
||||||
test
|
|
||||||
0.9805& 0.9727& 0.97826& 0
|
|
||||||
train
|
|
||||||
|
|
||||||
datagen_dropout_00_100
|
|
||||||
test
|
|
||||||
0.981& 0.9702& 0.9769& 1e-05
|
|
||||||
train
|
|
||||||
|
|
||||||
dropout_02_100
|
|
||||||
test
|
|
||||||
0.9796& 0.9719& 0.97703& 1e-05
|
|
||||||
train
|
|
||||||
|
|
||||||
default_100
|
|
||||||
test
|
|
||||||
0.9637& 0.9506& 0.95823& 2e-05
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,101 +0,0 @@
|
|||||||
x_n_5000_tl_0.1,y_n_5000_tl_0.1,x_n_5000_tl_1.0,y_n_5000_tl_1.0,x_n_5000_tl_3.0,y_n_5000_tl_3.0
|
|
||||||
-5.0,1.794615305950707,-5.0,0.3982406589003759,-5.0,-0.4811539502118497
|
|
||||||
-4.898989898989899,1.6984389486364895,-4.898989898989899,0.35719218031912614,-4.898989898989899,-0.48887996302459025
|
|
||||||
-4.797979797979798,1.6014200743009022,-4.797979797979798,0.3160182633093358,-4.797979797979798,-0.4966732473871599
|
|
||||||
-4.696969696969697,1.5040575427157106,-4.696969696969697,0.27464978660531225,-4.696969696969697,-0.5045073579233731
|
|
||||||
-4.595959595959596,1.4061194142774731,-4.595959595959596,0.23293440418365288,-4.595959595959596,-0.5123589845230747
|
|
||||||
-4.494949494949495,1.3072651356075136,-4.494949494949495,0.19100397829173557,-4.494949494949495,-0.5202738824510786
|
|
||||||
-4.393939393939394,1.2078259346207492,-4.393939393939394,0.1488314515422353,-4.393939393939394,-0.5282281154332915
|
|
||||||
-4.292929292929293,1.1079271590765678,-4.292929292929293,0.10646618526238515,-4.292929292929293,-0.536250283913464
|
|
||||||
-4.191919191919192,1.0073183089866045,-4.191919191919192,0.0637511521454329,-4.191919191919192,-0.5443068679044686
|
|
||||||
-4.090909090909091,0.9064682044248323,-4.090909090909091,0.020965778107027506,-4.090909090909091,-0.5524049731989601
|
|
||||||
-3.9898989898989896,0.805095064694333,-3.9898989898989896,-0.02200882631350869,-3.9898989898989896,-0.5605562335116703
|
|
||||||
-3.888888888888889,0.7032463151196859,-3.888888888888889,-0.06548644224881082,-3.888888888888889,-0.5687680272492979
|
|
||||||
-3.787878787878788,0.6007843964001714,-3.787878787878788,-0.10914135786185346,-3.787878787878788,-0.5770307386196555
|
|
||||||
-3.686868686868687,0.4978572358270573,-3.686868686868687,-0.15292201515712506,-3.686868686868687,-0.5853131654059709
|
|
||||||
-3.5858585858585856,0.39465522349482535,-3.5858585858585856,-0.19694472820060063,-3.5858585858585856,-0.593636189078738
|
|
||||||
-3.484848484848485,0.29091175104318323,-3.484848484848485,-0.24139115547918963,-3.484848484848485,-0.6019914655156898
|
|
||||||
-3.383838383838384,0.1868284306918275,-3.383838383838384,-0.28617728400089926,-3.383838383838384,-0.6103823599700093
|
|
||||||
-3.282828282828283,0.0817944681090728,-3.282828282828283,-0.33119615483860937,-3.282828282828283,-0.6188088888423856
|
|
||||||
-3.1818181818181817,-0.023670753859105602,-3.1818181818181817,-0.3764480559542342,-3.1818181818181817,-0.6272515625106694
|
|
||||||
-3.080808080808081,-0.1299349094939808,-3.080808080808081,-0.42202262988259276,-3.080808080808081,-0.6357221532633648
|
|
||||||
-2.9797979797979797,-0.2360705715363967,-2.9797979797979797,-0.467584017465408,-2.9797979797979797,-0.6440454918766952
|
|
||||||
-2.878787878787879,-0.34125419448980393,-2.878787878787879,-0.5126079284225549,-2.878787878787879,-0.65203614244987
|
|
||||||
-2.7777777777777777,-0.443504036212927,-2.7777777777777777,-0.5569084060463078,-2.7777777777777777,-0.6594896031012563
|
|
||||||
-2.676767676767677,-0.5411482698953787,-2.676767676767677,-0.6002683604183435,-2.676767676767677,-0.6661215834468585
|
|
||||||
-2.5757575757575757,-0.6363089624800997,-2.5757575757575757,-0.6396725440402657,-2.5757575757575757,-0.6715398637661353
|
|
||||||
-2.474747474747475,-0.725241414197713,-2.474747474747475,-0.6753456416248385,-2.474747474747475,-0.674565545688341
|
|
||||||
-2.3737373737373737,-0.8010191169999671,-2.3737373737373737,-0.7066964605752718,-2.3737373737373737,-0.6765307025278043
|
|
||||||
-2.272727272727273,-0.8626605255789729,-2.272727272727273,-0.7348121862404637,-2.272727272727273,-0.6766187567521622
|
|
||||||
-2.1717171717171717,-0.911435840482434,-2.1717171717171717,-0.7592451818361001,-2.1717171717171717,-0.6747200340049733
|
|
||||||
-2.070707070707071,-0.9518228090965052,-2.070707070707071,-0.7755022118880182,-2.070707070707071,-0.6711535886166349
|
|
||||||
-1.9696969696969697,-0.9791642715505677,-1.9696969696969697,-0.7889078495544403,-1.9696969696969697,-0.6653309071624213
|
|
||||||
-1.868686868686869,-0.9959505678135467,-1.868686868686869,-0.7978655263590677,-1.868686868686869,-0.6574048849245917
|
|
||||||
-1.7676767676767677,-1.0042572630521163,-1.7676767676767677,-0.8024926242661324,-1.7676767676767677,-0.6465258005011485
|
|
||||||
-1.6666666666666665,-1.0031374573437621,-1.6666666666666665,-0.8024786300118695,-1.6666666666666665,-0.6326231142587367
|
|
||||||
-1.5656565656565657,-0.9924082586558415,-1.5656565656565657,-0.7967021619463882,-1.5656565656565657,-0.6166476676023103
|
|
||||||
-1.4646464646464645,-0.9734669180157094,-1.4646464646464645,-0.7849942222838879,-1.4646464646464645,-0.5979735104135664
|
|
||||||
-1.3636363636363638,-0.9509454078185711,-1.3636363636363638,-0.7662349774950723,-1.3636363636363638,-0.5774876452737464
|
|
||||||
-1.2626262626262625,-0.9231872651397443,-1.2626262626262625,-0.7433085627087517,-1.2626262626262625,-0.554712230754877
|
|
||||||
-1.1616161616161618,-0.8903321986477033,-1.1616161616161618,-0.7150493507052204,-1.1616161616161618,-0.5295933185437713
|
|
||||||
-1.0606060606060606,-0.8533989447900909,-1.0606060606060606,-0.6814643745239313,-1.0606060606060606,-0.5021785239088743
|
|
||||||
-0.9595959595959593,-0.8107636317978494,-0.9595959595959593,-0.6421615608115637,-0.9595959595959593,-0.472606158673678
|
|
||||||
-0.858585858585859,-0.7612745578549842,-0.858585858585859,-0.5973114244123007,-0.858585858585859,-0.4405007246413654
|
|
||||||
-0.7575757575757578,-0.7079734098301842,-0.7575757575757578,-0.5483264663676062,-0.7575757575757578,-0.4059991890198415
|
|
||||||
-0.6565656565656566,-0.6488963804386183,-0.6565656565656566,-0.49554278063844803,-0.6565656565656566,-0.3695525928005769
|
|
||||||
-0.5555555555555554,-0.5859222961089965,-0.5555555555555554,-0.4403758682478846,-0.5555555555555554,-0.33111757514282614
|
|
||||||
-0.45454545454545503,-0.5162955936688821,-0.45454545454545503,-0.38037108381900747,-0.45454545454545503,-0.28897806883385513
|
|
||||||
-0.3535353535353538,-0.4413321076045784,-0.3535353535353538,-0.31690399361617216,-0.3535353535353538,-0.24421776219711205
|
|
||||||
-0.2525252525252526,-0.3616414699818406,-0.2525252525252526,-0.25204481791119354,-0.2525252525252526,-0.19795939679257332
|
|
||||||
-0.15151515151515138,-0.2780916794094584,-0.15151515151515138,-0.18575713332565263,-0.15151515151515138,-0.15066195015784248
|
|
||||||
-0.050505050505050164,-0.18977454284683343,-0.050505050505050164,-0.11797643773197505,-0.050505050505050164,-0.10274021898431054
|
|
||||||
0.050505050505050164,-0.0969321739577506,0.050505050505050164,-0.049351343645831554,0.050505050505050164,-0.05414525935109969
|
|
||||||
0.15151515151515138,-4.4802289442360816E-4,0.15151515151515138,0.019464788799119597,0.15151515151515138,-0.005354051541524688
|
|
||||||
0.2525252525252526,0.09918485823776255,0.2525252525252526,0.08804193897553166,0.2525252525252526,0.0433816826222638
|
|
||||||
0.3535353535353538,0.1998735386668185,0.3535353535353538,0.15569793996298523,0.3535353535353538,0.09176342956997338
|
|
||||||
0.45454545454545414,0.2999169047201809,0.45454545454545414,0.2218157527002848,0.45454545454545414,0.13952481930457306
|
|
||||||
0.5555555555555554,0.3978204122760816,0.5555555555555554,0.2846069052305317,0.5555555555555554,0.18668380673527113
|
|
||||||
0.6565656565656566,0.49120659266814587,0.6565656565656566,0.34467300454040606,0.6565656565656566,0.23277011860523958
|
|
||||||
0.7575757575757578,0.5777980409414698,0.7575757575757578,0.40208229496894643,0.7575757575757578,0.27613740421328176
|
|
||||||
0.8585858585858581,0.6568213676446025,0.8585858585858581,0.45705882493784666,0.8585858585858581,0.316305372116494
|
|
||||||
0.9595959595959593,0.7305067401293432,0.9595959595959593,0.5066458373898202,0.9595959595959593,0.35343427932594923
|
|
||||||
1.0606060606060606,0.7966609096765547,1.0606060606060606,0.5516149744358979,1.0606060606060606,0.38717949746647334
|
|
||||||
1.1616161616161618,0.8521200140106753,1.1616161616161618,0.5878017101641295,1.1616161616161618,0.4170777567516486
|
|
||||||
1.262626262626262,0.8975259277901253,1.262626262626262,0.6168588441570951,1.262626262626262,0.4446516626376453
|
|
||||||
1.3636363636363633,0.9290861930067627,1.3636363636363633,0.6411836178298306,1.3636363636363633,0.46927636759559477
|
|
||||||
1.4646464646464645,0.9508521659740165,1.4646464646464645,0.6610795923876176,1.4646464646464645,0.4901812911280025
|
|
||||||
1.5656565656565657,0.9612143570080512,1.5656565656565657,0.6768219209716341,1.5656565656565657,0.5079918402617868
|
|
||||||
1.666666666666667,0.9590141254017294,1.666666666666667,0.6878304863477654,1.666666666666667,0.5233400296358803
|
|
||||||
1.7676767676767673,0.9434050911299104,1.7676767676767673,0.6925040592034013,1.7676767676767673,0.5351552186913862
|
|
||||||
1.8686868686868685,0.9166484175947194,1.8686868686868685,0.6900246131027935,1.8686868686868685,0.5441567759439713
|
|
||||||
1.9696969696969697,0.8762489440965586,1.9696969696969697,0.6764843940414706,1.9696969696969697,0.5496025817549586
|
|
||||||
2.070707070707071,0.821609113516158,2.070707070707071,0.6566284893291617,2.070707070707071,0.5536820874974513
|
|
||||||
2.1717171717171713,0.7581599898835192,2.1717171717171713,0.6308981649064993,2.1717171717171713,0.5533100035360206
|
|
||||||
2.2727272727272725,0.6877704486402438,2.2727272727272725,0.6016976467409065,2.2727272727272725,0.550251787575325
|
|
||||||
2.3737373737373737,0.610815603287697,2.3737373737373737,0.5704721438286479,2.3737373737373737,0.5445865851994449
|
|
||||||
2.474747474747475,0.5275282181728166,2.474747474747475,0.5362814307290142,2.474747474747475,0.537858723684707
|
|
||||||
2.5757575757575752,0.44098299617705367,2.5757575757575752,0.5007018478259194,2.5757575757575752,0.5301810557083476
|
|
||||||
2.6767676767676765,0.3535127269572474,2.6767676767676765,0.4635791072799046,2.6767676767676765,0.5214280506499815
|
|
||||||
2.7777777777777777,0.2669314340184933,2.7777777777777777,0.4252681214470508,2.7777777777777777,0.5119428002841875
|
|
||||||
2.878787878787879,0.18244774892195767,2.878787878787879,0.3860805361925665,2.878787878787879,0.5020280103571171
|
|
||||||
2.9797979797979792,0.10009287374461422,2.9797979797979792,0.34649978327862213,2.9797979797979792,0.4918997465440798
|
|
||||||
3.0808080808080813,0.01825358803182036,3.0808080808080813,0.3067456416075246,3.0808080808080813,0.48152164248236273
|
|
||||||
3.1818181818181817,-0.06257603867024951,3.1818181818181817,0.2670556605010131,3.1818181818181817,0.4710506406469346
|
|
||||||
3.282828282828282,-0.14256250037038515,3.282828282828282,0.22747478740583862,3.282828282828282,0.46061400021772264
|
|
||||||
3.383838383838384,-0.22183964093761221,3.383838383838384,0.18823442296238005,3.383838383838384,0.4502063176185161
|
|
||||||
3.4848484848484844,-0.3000530710681483,3.4848484848484844,0.14930923451816047,3.4848484848484844,0.43983195563012295
|
|
||||||
3.5858585858585865,-0.37715837046834677,3.5858585858585865,0.11064727810620513,3.5858585858585865,0.4294855408707603
|
|
||||||
3.686868686868687,-0.4535879015098929,3.686868686868687,0.0721761317620166,3.686868686868687,0.41918651120808587
|
|
||||||
3.787878787878787,-0.5295958753874862,3.787878787878787,0.03385158496402993,3.787878787878787,0.4089211108732785
|
|
||||||
3.8888888888888893,-0.605341954214415,3.8888888888888893,-0.004196426105451837,3.8888888888888893,0.3986849690078671
|
|
||||||
3.9898989898989896,-0.6805725256650321,3.9898989898989896,-0.04204424507819378,3.9898989898989896,0.3884698016669201
|
|
||||||
4.09090909090909,-0.7553382625080638,4.09090909090909,-0.0795288839270637,4.09090909090909,0.37826736472008937
|
|
||||||
4.191919191919192,-0.8294318073700058,4.191919191919192,-0.11675718948094181,4.191919191919192,0.36808861016948324
|
|
||||||
4.292929292929292,-0.9025671571505313,4.292929292929292,-0.15379169226972225,4.292929292929292,0.3579396881040081
|
|
||||||
4.3939393939393945,-0.9751233932017581,4.3939393939393945,-0.19069301489402432,4.3939393939393945,0.3478279422102407
|
|
||||||
4.494949494949495,-1.0471623188798242,4.494949494949495,-0.227426975503073,4.494949494949495,0.3377388026398381
|
|
||||||
4.595959595959595,-1.1187532876284094,4.595959595959595,-0.263878605240927,4.595959595959595,0.32767338817749475
|
|
||||||
4.696969696969697,-1.189660915888889,4.696969696969697,-0.3001960056492053,4.696969696969697,0.3176530967513947
|
|
||||||
4.797979797979798,-1.2601246569645388,4.797979797979798,-0.3363281464377301,4.797979797979798,0.3076778013243957
|
|
||||||
4.8989898989899,-1.3303637186847002,4.8989898989899,-0.37225330321499334,4.8989898989899,0.29772768053304777
|
|
||||||
5.0,-1.4004134094571867,5.0,-0.4080316669473787,5.0,0.2878184725593889
|
|
|
@ -1,101 +0,0 @@
|
|||||||
x_n_50_tl_0.0,y_n_50_tl_0.0,x_n_500_tl_0.0,y_n_500_tl_0.0,x_n_5000_tl_0.0,y_n_5000_tl_0.0,x_n_50_tl_1.0,y_n_50_tl_1.0,x_n_500_tl_1.0,y_n_500_tl_1.0,x_n_5000_tl_1.0,y_n_5000_tl_1.0,x_n_50_tl_3.0,y_n_50_tl_3.0,x_n_500_tl_3.0,y_n_500_tl_3.0,x_n_5000_tl_3.0,y_n_5000_tl_3.0
|
|
||||||
-5.0,-0.8599583057554976,-5.0,1.6797068787192495,-5.0,1.7379689606223239,-5.0,-0.42741272499487776,-5.0,0.23661838590976328,-5.0,0.20399386816229978,-5.0,0.13095951218866275,-5.0,-0.46242184829078237,-5.0,-0.41058629664051305
|
|
||||||
-4.898989898989899,-0.8456047840536887,-4.898989898989899,1.5940442438460278,-4.898989898989899,1.6472202329485999,-4.898989898989899,-0.4276431031893983,-4.898989898989899,0.20862681459226723,-4.898989898989899,0.17824071850107404,-4.898989898989899,0.10539057470765349,-4.898989898989899,-0.4609018322257037,-4.898989898989899,-0.4110599614729015
|
|
||||||
-4.797979797979798,-0.8312512623518801,-4.797979797979798,1.5066655952530659,-4.797979797979798,1.5560370024912986,-4.797979797979798,-0.42787348138391906,-4.797979797979798,0.18056404254218186,-4.797979797979798,0.1523309553054011,-4.797979797979798,0.07982163722664384,-4.797979797979798,-0.4593800781031771,-4.797979797979798,-0.41155161184122596
|
|
||||||
-4.696969696969697,-0.8168977406500709,-4.696969696969697,1.4192486056640365,-4.696969696969697,1.4641612521550218,-4.696969696969697,-0.42810385957843955,-4.696969696969697,0.1524990189306639,-4.696969696969697,0.1262143553005724,-4.696969696969697,0.05464380509332076,-4.696969696969697,-0.4578583174084625,-4.696969696969697,-0.41205688060740875
|
|
||||||
-4.595959595959596,-0.8025442189482614,-4.595959595959596,1.3308076153149195,-4.595959595959596,1.3718747642404912,-4.595959595959596,-0.42833423777296026,-4.595959595959596,0.12443399531914556,-4.595959595959596,0.10000299804643913,-4.595959595959596,0.029720704709016,-4.595959595959596,-0.45633655338498746,-4.595959595959596,-0.4126005212950324
|
|
||||||
-4.494949494949495,-0.788190697246453,-4.494949494949495,1.2408764237610932,-4.494949494949495,1.2794547935729972,-4.494949494949495,-0.42856461596748074,-4.494949494949495,0.09628036393480953,-4.494949494949495,0.07370213597938947,-4.494949494949495,0.004797604324711557,-4.494949494949495,-0.45481454100468904,-4.494949494949495,-0.41317280828652125
|
|
||||||
-4.393939393939394,-0.7757194193374484,-4.393939393939394,1.150777108936673,-4.393939393939394,1.1865984175078124,-4.393939393939394,-0.4287949941620015,-4.393939393939394,0.06803799087458409,-4.393939393939394,0.047353868838267546,-4.393939393939394,-0.019952866294811474,-4.393939393939394,-0.4532902682540511,-4.393939393939394,-0.41378088791316736
|
|
||||||
-4.292929292929293,-0.7635428572249876,-4.292929292929293,1.0606777941122512,-4.292929292929293,1.0935156155193826,-4.292929292929293,-0.42902537235652216,-4.292929292929293,0.039745189354681264,-4.292929292929293,0.020863777423783696,-4.292929292929293,-0.04424719286600705,-4.292929292929293,-0.45176167641583376,-4.292929292929293,-0.41441903123033147
|
|
||||||
-4.191919191919192,-0.7514991436388702,-4.191919191919192,0.9705784792878309,-4.191919191919192,0.9999451479756023,-4.191919191919192,-0.42925575055104276,-4.191919191919192,0.01144626171509771,-4.191919191919192,-0.005903721047402898,-4.191919191919192,-0.06854151943720274,-4.191919191919192,-0.4502329821869361,-4.191919191919192,-0.415076548381381
|
|
||||||
-4.090909090909091,-0.7396941691045894,-4.090909090909091,0.8798554638230421,-4.090909090909091,0.9059203084364202,-4.090909090909091,-0.42948612874556336,-4.090909090909091,-0.016952280979816926,-4.090909090909091,-0.03298925765732338,-4.090909090909091,-0.09283584600839848,-4.090909090909091,-0.44869972853751156,-4.090909090909091,-0.4157629995846106
|
|
||||||
-3.9898989898989896,-0.7279252765177078,-3.9898989898989896,0.7884244803113447,-3.9898989898989896,0.811474387051809,-3.9898989898989896,-0.42971650694008423,-3.9898989898989896,-0.04548036359257723,-3.9898989898989896,-0.06017986522111469,-3.9898989898989896,-0.11713017257959416,-3.9898989898989896,-0.44715472797022665,-3.9898989898989896,-0.41647096691012625
|
|
||||||
-3.888888888888889,-0.7161580919866168,-3.888888888888889,0.6966140451148786,-3.888888888888889,0.7168906385054419,-3.888888888888889,-0.4299468851346048,-3.888888888888889,-0.07408610945271141,-3.888888888888889,-0.0874709084540591,-3.888888888888889,-0.14142449915078953,-3.888888888888889,-0.4456015995456161,-3.888888888888889,-0.4171930364234525
|
|
||||||
-3.787878787878788,-0.7043909074555256,-3.787878787878788,0.604803249010758,-3.787878787878788,0.6219712537736367,-3.787878787878788,-0.4301772633291252,-3.787878787878788,-0.10285723661640957,-3.787878787878788,-0.11503695886523099,-3.787878787878788,-0.16571882572198493,-3.787878787878788,-0.4440477592686527,-3.787878787878788,-0.41792735866227004
|
|
||||||
-3.686868686868687,-0.6926237229244344,-3.686868686868687,0.512070766385858,-3.686868686868687,0.5265347560169878,-3.686868686868687,-0.4304076415236461,-3.686868686868687,-0.13176620357773466,-3.686868686868687,-0.1429497539600965,-3.686868686868687,-0.19001315229318066,-3.686868686868687,-0.44249216926013074,-3.686868686868687,-0.4186788950692494
|
|
||||||
-3.5858585858585856,-0.680856538393343,-3.5858585858585856,0.418341406261733,-3.5858585858585856,0.43037422799158725,-3.5858585858585856,-0.43063801971816673,-3.5858585858585856,-0.16072772857488207,-3.5858585858585856,-0.17103810603915154,-3.5858585858585856,-0.21430747886437626,-3.5858585858585856,-0.44093657925160834,-3.5858585858585856,-0.41944890491602094
|
|
||||||
-3.484848484848485,-0.6690893538622519,-3.484848484848485,0.3230008626762439,-3.484848484848485,0.33347359833985296,-3.484848484848485,-0.43086839791268744,-3.484848484848485,-0.189786562504877,-3.484848484848485,-0.1992640699299042,-3.484848484848485,-0.238601805435572,-3.484848484848485,-0.4393809892430859,-3.484848484848485,-0.4202525693559286
|
|
||||||
-3.383838383838384,-0.6573221693311603,-3.383838383838384,0.22755806300474243,-3.383838383838384,0.23599152727957395,-3.383838383838384,-0.4310987761072079,-3.383838383838384,-0.21885301172451227,-3.383838383838384,-0.22770533404467666,-3.383838383838384,-0.2628961320067672,-3.383838383838384,-0.43781693796746485,-3.383838383838384,-0.4210766722370822
|
|
||||||
-3.282828282828283,-0.6455549848000697,-3.282828282828283,0.13172938749299176,-3.282828282828283,0.13785071540835,-3.282828282828283,-0.4313291543017285,-3.282828282828283,-0.24792012144222308,-3.282828282828283,-0.25633384693349226,-3.282828282828283,-0.28719045857796294,-3.282828282828283,-0.4362515901030497,-3.282828282828283,-0.42192705020460003
|
|
||||||
-3.1818181818181817,-0.6337878002689783,-3.1818181818181817,0.03583960513370717,-3.1818181818181817,0.03926297085619488,-3.1818181818181817,-0.43155953249624923,-3.1818181818181817,-0.2770868438988566,-3.1818181818181817,-0.28512064843139634,-3.1818181818181817,-0.3114847851491585,-3.1818181818181817,-0.4346861097486259,-3.1818181818181817,-0.42279043662854426
|
|
||||||
-3.080808080808081,-0.6219933944673289,-3.080808080808081,-0.06005017722557655,-3.080808080808081,-0.05953650043486377,-3.080808080808081,-0.4317899106907698,-3.080808080808081,-0.30634202732953336,-3.080808080808081,-0.3140197227479732,-3.080808080808081,-0.33577911172035446,-3.080808080808081,-0.4331124443470669,-3.080808080808081,-0.42366980349780375
|
|
||||||
-2.9797979797979797,-0.6084802589111126,-2.9797979797979797,-0.15590935392992944,-2.9797979797979797,-0.15810366579897028,-2.9797979797979797,-0.4320202888852905,-2.9797979797979797,-0.33549678779642544,-2.9797979797979797,-0.3430021282671825,-2.9797979797979797,-0.3600734382915496,-2.9797979797979797,-0.4315218307109141,-2.9797979797979797,-0.42449207343700956
|
|
||||||
-2.878787878787879,-0.5891232690738096,-2.878787878787879,-0.24713180817765498,-2.878787878787879,-0.2552003497036097,-2.878787878787879,-0.43225066707981114,-2.878787878787879,-0.36352866123332933,-2.878787878787879,-0.3716002292573769,-2.878787878787879,-0.38436776486274526,-2.878787878787879,-0.42982012082652077,-2.878787878787879,-0.4251380414134998
|
|
||||||
-2.7777777777777777,-0.5636588831509095,-2.7777777777777777,-0.33701300990207655,-2.7777777777777777,-0.35066910453142525,-2.7777777777777777,-0.4324810452743318,-2.7777777777777777,-0.3911342117000581,-2.7777777777777777,-0.39951657101606874,-2.7777777777777777,-0.4086620914339411,-2.7777777777777777,-0.42794280685642583,-2.7777777777777777,-0.4254095546530059
|
|
||||||
-2.676767676767677,-0.538194497228009,-2.676767676767677,-0.4265304961947721,-2.676767676767677,-0.4419057912445846,-2.676767676767677,-0.4295143886441945,-2.676767676767677,-0.41758811768544335,-2.676767676767677,-0.4264377612958712,-2.676767676767677,-0.4329564180051365,-2.676767676767677,-0.4251801800597513,-2.676767676767677,-0.42514350551302893
|
|
||||||
-2.5757575757575757,-0.5127301113051083,-2.5757575757575757,-0.5160338868263108,-2.5757575757575757,-0.530562896182845,-2.5757575757575757,-0.4209813938653777,-2.5757575757575757,-0.4421888684751682,-2.5757575757575757,-0.4521958194404763,-2.5757575757575757,-0.4572507445763323,-2.5757575757575757,-0.4220835438175992,-2.5757575757575757,-0.42424941235712643
|
|
||||||
-2.474747474747475,-0.48726572538220836,-2.474747474747475,-0.6045443334592155,-2.474747474747475,-0.615529859161848,-2.474747474747475,-0.4124483990865609,-2.474747474747475,-0.4657884717671948,-2.474747474747475,-0.4762840194362591,-2.474747474747475,-0.480179747245649,-2.474747474747475,-0.4184871960008546,-2.474747474747475,-0.4227211360179997
|
|
||||||
-2.3737373737373737,-0.4618013394593081,-2.3737373737373737,-0.6866461198443653,-2.3737373737373737,-0.6916556206405179,-2.3737373737373737,-0.4039154043077441,-2.3737373737373737,-0.4872175481179362,-2.3737373737373737,-0.49664688375599,-2.3737373737373737,-0.5021327343044837,-2.3737373737373737,-0.4148617786025484,-2.3737373737373737,-0.42058969704823307
|
|
||||||
-2.272727272727273,-0.4363369535364072,-2.272727272727273,-0.7664221699283893,-2.272727272727273,-0.76211944205629,-2.272727272727273,-0.3953824095289272,-2.272727272727273,-0.5066515567337302,-2.272727272727273,-0.5156479697413601,-2.272727272727273,-0.5240857213633179,-2.272727272727273,-0.4101489198915738,-2.272727272727273,-0.41773244666508813
|
|
||||||
-2.1717171717171717,-0.41087256761350716,-2.1717171717171717,-0.8294863656303931,-2.1717171717171717,-0.8275864122047706,-2.1717171717171717,-0.38684941475011053,-2.1717171717171717,-0.5248642081767847,-2.1717171717171717,-0.5320776321494358,-2.1717171717171717,-0.5460387084221523,-2.1717171717171717,-0.40386935734460455,-2.1717171717171717,-0.41386532161191136
|
|
||||||
-2.070707070707071,-0.38540818169060687,-2.070707070707071,-0.8777818560548117,-2.070707070707071,-0.8828614286116081,-2.070707070707071,-0.37790597680581006,-2.070707070707071,-0.5419305295559403,-2.070707070707071,-0.5450192204063132,-2.070707070707071,-0.5535021346303699,-2.070707070707071,-0.3970390682426877,-2.070707070707071,-0.40816135821642785
|
|
||||||
-1.9696969696969697,-0.3599437957677064,-1.9696969696969697,-0.9240065596308831,-1.9696969696969697,-0.9252381701217932,-1.9696969696969697,-0.3679210297690768,-1.9696969696969697,-0.5515520831674893,-1.9696969696969697,-0.5532507694312989,-1.9696969696969697,-0.5395642887779512,-1.9696969696969697,-0.3899536977126602,-1.9696969696969697,-0.4010221140801823
|
|
||||||
-1.868686868686869,-0.3344794098448062,-1.868686868686869,-0.9642081153190732,-1.868686868686869,-0.9553319880266173,-1.868686868686869,-0.3579360827323437,-1.868686868686869,-0.5596849243269256,-1.868686868686869,-0.556146459781286,-1.868686868686869,-0.5226399861377664,-1.868686868686869,-0.38238093755017905,-1.868686868686869,-0.3924834151653046
|
|
||||||
-1.7676767676767677,-0.3090150239219054,-1.7676767676767677,-1.0007396420666628,-1.7676767676767677,-0.9785388909278812,-1.7676767676767677,-0.34795113569561026,-1.7676767676767677,-0.5614467949548656,-1.7676767676767677,-0.556098671354368,-1.7676767676767677,-0.4982759643499402,-1.7676767676767677,-0.37323932215085087,-1.7676767676767677,-0.3822790688909727
|
|
||||||
-1.6666666666666665,-0.2835506379990052,-1.6666666666666665,-1.0187333297343348,-1.6666666666666665,-0.990642179129256,-1.6666666666666665,-0.3378404050890797,-1.6666666666666665,-0.5581030917440444,-1.6666666666666665,-0.5516597526410076,-1.6666666666666665,-0.47067804898067184,-1.6666666666666665,-0.3614402633008814,-1.6666666666666665,-0.37030436851426224
|
|
||||||
-1.5656565656565657,-0.2580862520761052,-1.5656565656565657,-1.0247628857811257,-1.5656565656565657,-0.9908786897501635,-1.5656565656565657,-0.32764529263529574,-1.5656565656565657,-0.5521110428952534,-1.5656565656565657,-0.543054168961121,-1.5656565656565657,-0.44308013361140386,-1.5656565656565657,-0.34868249075072216,-1.5656565656565657,-0.35699361568660476
|
|
||||||
-1.4646464646464645,-0.2326218661532044,-1.4646464646464645,-1.0034906902849632,-1.4646464646464645,-0.9791175953628313,-1.4646464646464645,-0.3174501801815117,-1.4646464646464645,-0.5459322825614802,-1.4646464646464645,-0.5306579767422843,-1.4646464646464645,-0.41548221824213516,-1.4646464646464645,-0.3311832422822113,-1.4646464646464645,-0.3422960409489238
|
|
||||||
-1.3636363636363638,-0.20715748023030392,-1.3636363636363638,-0.9673348570651019,-1.3636363636363638,-0.9595107779813504,-1.3636363636363638,-0.30725506772772765,-1.3636363636363638,-0.5358046337748493,-1.3636363636363638,-0.5149935986561597,-1.3636363636363638,-0.3878843028728669,-1.3636363636363638,-0.3132121589299601,-1.3636363636363638,-0.32640862478895577
|
|
||||||
-1.2626262626262625,-0.1816930943074038,-1.2626262626262625,-0.9225014127525308,-1.2626262626262625,-0.9337929369785798,-1.2626262626262625,-0.29705995527394363,-1.2626262626262625,-0.5219865374295057,-1.2626262626262625,-0.49551878203869837,-1.2626262626262625,-0.3602863875035988,-1.2626262626262625,-0.2946441284959401,-1.2626262626262625,-0.3093875165551468
|
|
||||||
-1.1616161616161618,-0.15622870838450328,-1.1616161616161618,-0.8751043056611054,-1.1616161616161618,-0.8989581380947891,-1.1616161616161618,-0.2868560938657385,-1.1616161616161618,-0.5034750880272445,-1.1616161616161618,-0.47203943335323734,-1.1616161616161618,-0.33268847213433056,-1.1616161616161618,-0.274883632364574,-1.1616161616161618,-0.290930041718859
|
|
||||||
-1.0606060606060606,-0.13076432246160322,-1.0606060606060606,-0.821606899074672,-1.0606060606060606,-0.8584249497008333,-1.0606060606060606,-0.27660353819390815,-1.0606060606060606,-0.48270847299437897,-1.0606060606060606,-0.44464074915622404,-1.0606060606060606,-0.3050905567650622,-1.0606060606060606,-0.25396600066040825,-1.0606060606060606,-0.27118022111102713
|
|
||||||
-0.9595959595959593,-0.1052999365387022,-0.9595959595959593,-0.7640740662013277,-0.9595959595959593,-0.8091349495541134,-0.9595959595959593,-0.2663509825220778,-0.9595959595959593,-0.4531496187924299,-0.9595959595959593,-0.4131252245857649,-0.9595959595959593,-0.2774926413957938,-0.9595959595959593,-0.2325608605277687,-0.9595959595959593,-0.24999263682664583
|
|
||||||
-0.858585858585859,-0.07983555061580246,-0.858585858585859,-0.6997648036121712,-0.858585858585859,-0.7481101580520273,-0.858585858585859,-0.24945014324598108,-0.858585858585859,-0.4128551081137216,-0.858585858585859,-0.3783375004573455,-0.858585858585859,-0.24988890615957382,-0.858585858585859,-0.20970608424200354,-0.858585858585859,-0.22760758480332924
|
|
||||||
-0.7575757575757578,-0.054371164692902076,-0.7575757575757578,-0.6349094271338603,-0.7575757575757578,-0.6820384544330558,-0.7575757575757578,-0.22976061598357173,-0.7575757575757578,-0.37194755761368214,-0.7575757575757578,-0.34125536540984164,-0.7575757575757578,-0.22211577202959193,-0.7575757575757578,-0.18612295967753525,-0.7575757575757578,-0.20435972492122192
|
|
||||||
-0.6565656565656566,-0.028906778770001355,-0.6565656565656566,-0.5675463340257147,-0.6565656565656566,-0.6095055279444694,-0.6565656565656566,-0.21007108872116223,-0.6565656565656566,-0.33089771921954814,-0.6565656565656566,-0.3018873155488892,-0.6565656565656566,-0.193901705770251,-0.6565656565656566,-0.16215648653127196,-0.6565656565656566,-0.17931671250996567
|
|
||||||
-0.5555555555555554,-0.003442392847101086,-0.5555555555555554,-0.4979737843441253,-0.5555555555555554,-0.5294156894319434,-0.5555555555555554,-0.17756203711819088,-0.5555555555555554,-0.28543993548509355,-0.5555555555555554,-0.26041062451302716,-0.5555555555555554,-0.1652647608815763,-0.5555555555555554,-0.13697108727984195,-0.5555555555555554,-0.15330854213602407
|
|
||||||
-0.45454545454545503,0.022021993075799252,-0.45454545454545503,-0.41446378537016554,-0.45454545454545503,-0.44063136513918405,-0.45454545454545503,-0.14370193132078618,-0.45454545454545503,-0.2395445410097954,-0.45454545454545503,-0.21652789115320525,-0.45454545454545503,-0.13529651419425484,-0.45454545454545503,-0.11162353028803523,-0.45454545454545503,-0.12623393965312618
|
|
||||||
-0.3535353535353538,0.047486378998699605,-0.3535353535353538,-0.32279891003383887,-0.3535353535353538,-0.3477046435373429,-0.3535353535353538,-0.10934683153775412,-0.3535353535353538,-0.19101529776271153,-0.3535353535353538,-0.17035416577174828,-0.3535353535353538,-0.10509845793132169,-0.3535353535353538,-0.08626013443382194,-0.3535353535353538,-0.0984136402387288
|
|
||||||
-0.2525252525252526,0.07295076492159988,-0.2525252525252526,-0.2310925448666578,-0.2525252525252526,-0.25069145628093464,-0.2525252525252526,-0.07491795886312486,-0.2525252525252526,-0.14150481827496786,-0.2525252525252526,-0.12255925867115473,-0.2525252525252526,-0.07490040166838845,-0.2525252525252526,-0.060434579838324495,-0.2525252525252526,-0.07006332009798681
|
|
||||||
-0.15151515151515138,0.09843047923373265,-0.15151515151515138,-0.13636354870852932,-0.15151515151515138,-0.15095910699954188,-0.15151515151515138,-0.040306119685216676,-0.15151515151515138,-0.08982558834407159,-0.15151515151515138,-0.07398207558396772,-0.15151515151515138,-0.044702345405455264,-0.15151515151515138,-0.033631412543263274,-0.15151515151515138,-0.04141233375856603
|
|
||||||
-0.050505050505050164,0.12391212075429944,-0.050505050505050164,-0.03941345742250633,-0.050505050505050164,-0.04947445191778734,-0.050505050505050164,-0.005694280507308445,-0.050505050505050164,-0.03797674651308919,-0.050505050505050164,-0.025080464074353173,-0.050505050505050164,-0.014504289142522105,-0.050505050505050164,-0.006446181090338347,-0.050505050505050164,-0.012381418678247798
|
|
||||||
0.050505050505050164,0.14939376227486617,0.050505050505050164,0.056551574802519614,0.050505050505050164,0.0525838784102356,0.050505050505050164,0.028911158365061536,0.050505050505050164,0.013973891774473416,0.050505050505050164,0.023794553267499748,0.050505050505050164,0.01583885016218507,0.050505050505050164,0.021038028372213642,0.050505050505050164,0.016846741994686543
|
|
||||||
0.15151515151515138,0.17487540379543332,0.15151515151515138,0.15017264202689645,0.15151515151515138,0.15408973105493792,0.15151515151515138,0.062183868537649845,0.15151515151515138,0.06589471730593952,0.15151515151515138,0.07245763138776953,0.15151515151515138,0.046508129166361926,0.15151515151515138,0.04842915541973139,0.15151515151515138,0.04601083462340586
|
|
||||||
0.2525252525252526,0.2003570453160002,0.2525252525252526,0.24151055338001104,0.2525252525252526,0.2530277286116801,0.2525252525252526,0.09533027991528796,0.2525252525252526,0.11633887943820748,0.2525252525252526,0.11992049316059605,0.2525252525252526,0.07717740817053882,0.2525252525252526,0.07538338916654858,0.2525252525252526,0.07493657104851133
|
|
||||||
0.3535353535353538,0.22583868683656727,0.3535353535353538,0.3245702345293225,0.3535353535353538,0.3487077570947679,0.3535353535353538,0.12847669129292608,0.3535353535353538,0.1661606781018032,0.3535353535353538,0.16626942811591283,0.3535353535353538,0.10784668717471575,0.3535353535353538,0.10205978943459323,0.3535353535353538,0.10356289911566637
|
|
||||||
0.45454545454545414,0.25132032835713397,0.45454545454545414,0.4042440047834261,0.45454545454545414,0.4412637068427958,0.45454545454545414,0.15705349698246504,0.45454545454545414,0.21489012650224273,0.45454545454545414,0.21055873443432177,0.45454545454545414,0.1385159661788923,0.45454545454545414,0.12849799626750344,0.45454545454545414,0.13171638145035697
|
|
||||||
0.5555555555555554,0.2768019698777009,0.5555555555555554,0.48386343064481413,0.5555555555555554,0.5292644209820558,0.5555555555555554,0.1822941322301175,0.5555555555555554,0.26332131026810235,0.5555555555555554,0.25282542280637477,0.5555555555555554,0.16918524518306918,0.5555555555555554,0.1537986605041808,0.5555555555555554,0.15885558014342485
|
|
||||||
0.6565656565656566,0.30228361139826787,0.6565656565656566,0.5582703975525269,0.6565656565656566,0.6095279265110211,0.6565656565656566,0.20753476747777022,0.6565656565656566,0.311752494033962,0.6565656565656566,0.2926908500466596,0.6565656565656566,0.1998545241872461,0.6565656565656566,0.17827793057103108,0.6565656565656566,0.18425901109338033
|
|
||||||
0.7575757575757578,0.3276630675001063,0.7575757575757578,0.6240165672599972,0.7575757575757578,0.6817170975194252,0.7575757575757578,0.23277540272542308,0.7575757575757578,0.35585725421977105,0.7575757575757578,0.32990973382338223,0.7575757575757578,0.23052380319142296,0.7575757575757578,0.19993717433313357,0.7575757575757578,0.20856541522380753
|
|
||||||
0.8585858585858581,0.35294340046326517,0.8585858585858581,0.6832251591090945,0.8585858585858581,0.7490023509530548,0.8585858585858581,0.2580160379730755,0.8585858585858581,0.39411574874383437,0.8585858585858581,0.3646086605463153,0.8585858585858581,0.2611930821955996,0.8585858585858581,0.21959645347898898,0.8585858585858581,0.2319021251050189
|
|
||||||
0.9595959595959593,0.3782237334264241,0.9595959595959593,0.7379264665053952,0.9595959595959593,0.8101967957597399,0.9595959595959593,0.28325667322072823,0.9595959595959593,0.4295026011065611,0.9595959595959593,0.39755496473819213,0.9595959595959593,0.2918623611997765,0.9595959595959593,0.23923560012200779,0.9595959595959593,0.25414437767202697
|
|
||||||
1.0606060606060606,0.40350406638958297,1.0606060606060606,0.7921580999576039,1.0606060606060606,0.865038072851208,1.0606060606060606,0.3013557830052828,1.0606060606060606,0.4622992830762259,1.0606060606060606,0.4264742505103137,1.0606060606060606,0.3225316402039533,1.0606060606060606,0.2583177367004956,1.0606060606060606,0.27509011865395333
|
|
||||||
1.1616161616161618,0.4287843993527419,1.1616161616161618,0.8463753861957045,1.1616161616161618,0.9101530745705552,1.1616161616161618,0.30890687222540525,1.1616161616161618,0.4901351365169132,1.1616161616161618,0.45319808589043276,1.1616161616161618,0.35276108409396234,1.1616161616161618,0.27725858987652097,1.1616161616161618,0.29560030900846
|
|
||||||
1.262626262626262,0.4540647323159006,1.262626262626262,0.897495878378595,1.262626262626262,0.9417185837581196,1.262626262626262,0.3164579614455276,1.262626262626262,0.5145846409490937,1.262626262626262,0.4780296455205537,1.262626262626262,0.3794404038170447,1.262626262626262,0.2943968389517317,1.262626262626262,0.3152556528081
|
|
||||||
1.3636363636363633,0.47934506527905946,1.3636363636363633,0.9371746663372353,1.3636363636363633,0.9683350572505884,1.3636363636363633,0.32400905066565,1.3636363636363633,0.5362370529858077,1.3636363636363633,0.49985847015098533,1.3636363636363633,0.4061135157391696,1.3636363636363633,0.31086438420332474,1.3636363636363633,0.33319398921001137
|
|
||||||
1.4646464646464645,0.5046253982422182,1.4646464646464645,0.9707358108138878,1.4646464646464645,0.9850292043911345,1.4646464646464645,0.3315601398857724,1.4646464646464645,0.5531019255981576,1.4646464646464645,0.5181848921010453,1.4646464646464645,0.43278662766129444,1.4646464646464645,0.32679862313827224,1.4646464646464645,0.34908318351734496
|
|
||||||
1.5656565656565657,0.519310758600954,1.5656565656565657,0.9906032176938914,1.5656565656565657,0.9918397190961462,1.5656565656565657,0.3391112291058948,1.5656565656565657,0.5659801950328859,1.5656565656565657,0.5323498791465002,1.5656565656565657,0.4511009412793216,1.5656565656565657,0.34162587661768695,1.5656565656565657,0.3628958484057042
|
|
||||||
1.666666666666667,0.51401635833774,1.666666666666667,1.005715077214144,1.666666666666667,0.9899656142606021,1.666666666666667,0.346662318326017,1.666666666666667,0.575829702298404,1.666666666666667,0.541260149475436,1.666666666666667,0.4674803110925756,1.666666666666667,0.35601920704359724,1.666666666666667,0.37572761649169056
|
|
||||||
1.7676767676767673,0.5060676795476615,1.7676767676767673,1.0131883048070176,1.7676767676767673,0.9786887428475383,1.7676767676767673,0.35421340754613934,1.7676767676767673,0.5828151703640635,1.7676767676767673,0.5455395874048847,1.7676767676767673,0.4838510153495891,1.7676767676767673,0.3703169330810678,1.7676767676767673,0.38781035405087
|
|
||||||
1.8686868686868685,0.495017206229559,1.8686868686868685,0.9802541539054102,1.8686868686868685,0.9559310588882513,1.8686868686868685,0.3617644967662619,1.8686868686868685,0.5839088497682434,1.8686868686868685,0.5467157898697311,1.8686868686868685,0.49835864007261943,1.8686868686868685,0.38410765063343066,1.8686868686868685,0.3977196343512365
|
|
||||||
1.9696969696969697,0.48396673291145637,1.9696969696969697,0.9263388630289161,1.9696969696969697,0.9221166683929235,1.9696969696969697,0.36931558598638414,1.9696969696969697,0.5804936028756624,1.9696969696969697,0.5450082343452209,1.9696969696969697,0.5115510651058692,1.9696969696969697,0.39647206872026003,1.9696969696969697,0.4057110985660076
|
|
||||||
2.070707070707071,0.4729162595933537,2.070707070707071,0.8698358861835761,2.070707070707071,0.8764481362001709,2.070707070707071,0.3768666752065065,2.070707070707071,0.574716686049867,2.070707070707071,0.5394474878302619,2.070707070707071,0.5097127295818997,2.070707070707071,0.4049032898801099,2.070707070707071,0.41126316053027995
|
|
||||||
2.1717171717171713,0.46186578627525116,2.1717171717171713,0.8081407617658106,2.1717171717171713,0.8224404974364862,2.1717171717171713,0.38441776442662906,2.1717171717171713,0.5655375705620478,2.1717171717171713,0.5300324428024472,2.1717171717171713,0.49554940844796147,2.1717171717171713,0.4101839304627971,2.1717171717171713,0.4155357725301964
|
|
||||||
2.2727272727272725,0.4491770446280175,2.2727272727272725,0.7442526428212628,2.2727272727272725,0.7592323649828391,2.2727272727272725,0.391968853646751,2.2727272727272725,0.552350323381661,2.2727272727272725,0.5163813504127768,2.2727272727272725,0.48094925798793925,2.2727272727272725,0.413936941837358,2.2727272727272725,0.41843071308941276
|
|
||||||
2.3737373737373737,0.43609986761848685,2.3737373737373737,0.675405575107383,2.3737373737373737,0.6874741372997285,2.3737373737373737,0.39951994286687353,2.3737373737373737,0.5335539998256553,2.3737373737373737,0.49865541506871236,2.3737373737373737,0.4655571015656922,2.3737373737373737,0.4173906236056948,2.3737373737373737,0.42027249977934045
|
|
||||||
2.474747474747475,0.4066895271847391,2.474747474747475,0.5978840366507735,2.474747474747475,0.6073682995880296,2.474747474747475,0.40692119452733155,2.474747474747475,0.5117177142842388,2.474747474747475,0.4784532511364369,2.474747474747475,0.4501649451434452,2.474747474747475,0.4206585025597512,2.474747474747475,0.4213399238172195
|
|
||||||
2.5757575757575752,0.3749622763477891,2.5757575757575752,0.5099585586540418,2.5757575757575752,0.5223271133442401,2.5757575757575752,0.41415264022012394,2.5757575757575752,0.4850415148130571,2.5757575757575752,0.4567094947730761,2.5757575757575752,0.43458555601387144,2.5757575757575752,0.42158324745022285,2.5757575757575752,0.42181632222498416
|
|
||||||
2.6767676767676765,0.3432350255108388,2.6767676767676765,0.4205365946887392,2.6767676767676765,0.432906236858961,2.6767676767676765,0.4199131836378292,2.6767676767676765,0.45218830888592937,2.6767676767676765,0.4332394825941561,2.6767676767676765,0.41774264448225407,2.6767676767676765,0.42145613907090707,2.6767676767676765,0.4215504924390677
|
|
||||||
2.7777777777777777,0.3115077746738885,2.7777777777777777,0.32930350370842715,2.7777777777777777,0.3412321347424227,2.7777777777777777,0.42274639662898705,2.7777777777777777,0.4163402713183856,2.7777777777777777,0.40851950219775013,2.7777777777777777,0.40089973295063663,2.7777777777777777,0.4209228617300304,2.7777777777777777,0.4203590184673923
|
|
||||||
2.878787878787879,0.27978052383693824,2.878787878787879,0.23807041272811588,2.878787878787879,0.24760314946640188,2.878787878787879,0.42557960962014507,2.878787878787879,0.3802049595409251,2.878787878787879,0.383057999391408,2.878787878787879,0.3840568214190192,2.878787878787879,0.41938009129458526,2.878787878787879,0.41854626446476473
|
|
||||||
2.9797979797979792,0.24805327299998842,2.9797979797979792,0.14646854757187647,2.9797979797979792,0.15264712621771054,2.9797979797979792,0.428104678899817,2.9797979797979792,0.3432577786602793,2.9797979797979792,0.35694448241628624,2.9797979797979792,0.367213909887402,2.9797979797979792,0.41773298189050795,2.9797979797979792,0.4163510447804036
|
|
||||||
3.0808080808080813,0.21632602216303798,3.0808080808080813,0.05456143993271787,3.0808080808080813,0.057336396951423035,3.0808080808080813,0.42910204221273207,3.0808080808080813,0.30602019255320434,3.0808080808080813,0.3305660520102483,3.0808080808080813,0.3503709983557844,3.0808080808080813,0.41593157838764133,3.0808080808080813,0.41396474245507225
|
|
||||||
3.1818181818181817,0.18459877132608776,3.1818181818181817,-0.03733538955626138,3.1818181818181817,-0.03779843888287274,3.1818181818181817,0.4300994055256468,3.1818181818181817,0.26873960102765904,3.1818181818181817,0.30419224859801247,3.1818181818181817,0.3335280868241671,3.1818181818181817,0.41409475876758717,3.1818181818181817,0.41152646064562604
|
|
||||||
3.282828282828282,0.15287152048913782,3.282828282828282,-0.12920906194738088,3.282828282828282,-0.13249853932321157,3.282828282828282,0.43099899837317435,3.282828282828282,0.2314874157056526,3.282828282828282,0.27788417508140784,3.282828282828282,0.3164995410780566,3.282828282828282,0.4122620364061852,3.282828282828282,0.40912247673587887
|
|
||||||
3.383838383838384,0.12114426965218736,3.383838383838384,-0.22108273433850145,3.383838383838384,-0.22672866959540386,3.383838383838384,0.4318917322435721,3.383838383838384,0.19424068277399548,3.383838383838384,0.25176947991950477,3.383838383838384,0.2992528546417876,3.383838383838384,0.41043205422405316,3.383838383838384,0.40674183306733336
|
|
||||||
3.4848484848484844,0.08941701881523752,3.4848484848484844,-0.3129564067296208,3.4848484848484844,-0.3204339220693533,3.4848484848484844,0.43278446611396965,3.4848484848484844,0.15713787053146627,3.4848484848484844,0.22587592408322044,3.4848484848484844,0.2820061682055188,3.4848484848484844,0.4086021011097265,3.4848484848484844,0.4043698847877142
|
|
||||||
3.5858585858585865,0.058162275193419995,3.5858585858585865,-0.40462815693660914,3.5858585858585865,-0.41324795154433747,3.5858585858585865,0.4336771999843675,3.5858585858585865,0.12019800234358827,3.5858585858585865,0.20009983185318994,3.5858585858585865,0.2647594817692496,3.5858585858585865,0.4067722514909233,3.5858585858585865,0.40203120630187705
|
|
||||||
3.686868686868687,0.027654025225499562,3.686868686868687,-0.49422269067564845,3.686868686868687,-0.505293720158625,3.686868686868687,0.43456993385476517,3.686868686868687,0.08338176166505175,3.686868686868687,0.17451220690194294,3.686868686868687,0.24694025624429472,3.686868686868687,0.40494401437700783,3.686868686868687,0.39972779600606
|
|
||||||
3.787878787878787,-0.0028542247424208616,3.787878787878787,-0.5825355853286744,3.787878787878787,-0.5971159649192432,3.787878787878787,0.4354626677251625,3.787878787878787,0.04665044899957155,3.787878787878787,0.14916273839002891,3.787878787878787,0.22899283485249716,3.787878787878787,0.40312179798093106,3.787878787878787,0.39746202764807126
|
|
||||||
3.8888888888888893,-0.03336247471034154,3.8888888888888893,-0.6703463394238872,3.8888888888888893,-0.68824406601414,3.8888888888888893,0.4363554015955604,3.8888888888888893,0.009919136334091362,3.8888888888888893,0.12414842115967273,3.8888888888888893,0.21104541346069938,3.8888888888888893,0.4013011021954902,3.8888888888888893,0.3952295870367829
|
|
||||||
3.9898989898989896,-0.06387072467826214,3.9898989898989896,-0.7575928168757736,3.9898989898989896,-0.7784133912470257,3.9898989898989896,0.437248135465958,3.9898989898989896,-0.026722390982327433,3.9898989898989896,0.09939234299162882,3.9898989898989896,0.19309799206890174,3.9898989898989896,0.399484052282032,3.9898989898989896,0.3930265651896393
|
|
||||||
4.09090909090909,-0.0943789746461824,4.09090909090909,-0.8443788481067765,4.09090909090909,-0.8681309126980375,4.09090909090909,0.43814086933635565,4.09090909090909,-0.06308596529257729,4.09090909090909,0.07491765400345742,4.09090909090909,0.1750164743635475,4.09090909090909,0.39766754707663343,4.09090909090909,0.3908577509521082
|
|
||||||
4.191919191919192,-0.12488722461410334,4.191919191919192,-0.9297917533069101,4.191919191919192,-0.9573364023412008,4.191919191919192,0.4390336032067535,4.191919191919192,-0.09929539509789244,4.191919191919192,0.05074971564267564,4.191919191919192,0.1568727764842795,4.191919191919192,0.3958543351530404,4.191919191919192,0.38872432233841003
|
|
||||||
4.292929292929292,-0.15539547458202363,4.292929292929292,-1.0140884125491687,4.292929292929292,-1.0459165238042567,4.292929292929292,0.4399263370771512,4.292929292929292,-0.1349334585206603,4.292929292929292,0.02675516616820918,4.292929292929292,0.13872907860501169,4.292929292929292,0.3940418892740997,4.292929292929292,0.38661923148208605
|
|
||||||
4.3939393939393945,-0.18590372454994458,4.3939393939393945,-1.0972974392893766,4.3939393939393945,-1.1342383379633272,4.3939393939393945,0.4408190709475487,4.3939393939393945,-0.16982980680843562,4.3939393939393945,0.002964652994963484,4.3939393939393945,0.11796054958424437,4.3939393939393945,0.3922298874756054,4.3939393939393945,0.3845302650106349
|
|
||||||
4.494949494949495,-0.216411974517865,4.494949494949495,-1.179182894055243,4.494949494949495,-1.2221355458185688,4.494949494949495,0.44032091498508585,4.494949494949495,-0.20469748939648835,4.494949494949495,-0.0206002794035424,4.494949494949495,0.09701325884395126,4.494949494949495,0.39041788567711144,4.494949494949495,0.38248614430609396
|
|
||||||
4.595959595959595,-0.24692022448578524,4.595959595959595,-1.2601894992373368,4.595959595959595,-1.3091379548259912,4.595959595959595,0.4390119198940737,4.595959595959595,-0.239564339118166,4.595959595959595,-0.044064215802437315,4.595959595959595,0.07606596810365834,4.595959595959595,0.38861853091288373,4.595959595959595,0.3804739406387159
|
|
||||||
4.696969696969697,-0.2774284744537062,4.696969696969697,-1.3408190143954206,4.696969696969697,-1.395667382198044,4.696969696969697,0.4377029248030613,4.696969696969697,-0.2744311888398445,4.696969696969697,-0.06739710896332894,4.696969696969697,0.05511867736336504,4.696969696969697,0.38683625018149875,4.696969696969697,0.37848669218529357
|
|
||||||
4.797979797979798,-0.3079367244216266,4.797979797979798,-1.4214485295534998,4.797979797979798,-1.4814148159277154,4.797979797979798,0.436393929712049,4.797979797979798,-0.3092980385615221,4.797979797979798,-0.09057526494106827,4.797979797979798,0.034171386623072064,4.797979797979798,0.3850542123238927,4.797979797979798,0.37652869146057905
|
|
||||||
4.8989898989899,-0.3384449743895474,4.8989898989899,-1.5019215376311323,4.8989898989899,-1.5662892316768398,4.8989898989899,0.4350560618496009,4.8989898989899,-0.34416306870335767,4.8989898989899,-0.11357143325279366,4.8989898989899,0.013224095882778591,4.8989898989899,0.383272237289863,4.8989898989899,0.37460430584833954
|
|
||||||
5.0,-0.3689532243574676,5.0,-1.5820215750973248,5.0,-1.6508596672714462,5.0,0.43307940950570034,5.0,-0.37879161071248096,5.0,-0.13636462992911846,5.0,-0.007723194857514326,5.0,0.38149127984729847,5.0,0.37272620912380855
|
|
|
@ -1,7 +0,0 @@
|
|||||||
x,y
|
|
||||||
-3.14159265358979 , -1.22464679914735e-16
|
|
||||||
-1.88495559215388 , -0.951056516295154
|
|
||||||
-0.628318530717959 , -0.587785252292473
|
|
||||||
0.628318530717959 , 0.587785252292473
|
|
||||||
1.88495559215388 , 0.951056516295154
|
|
||||||
3.14159265358979 , 1.22464679914735e-16
|
|
|
@ -1,64 +0,0 @@
|
|||||||
,x_i,y_i,x_d,y_d,x,y
|
|
||||||
"1",0,0,-0.251688505259414,-0.109203329280437,-0.0838961684198045,-0.0364011097601456
|
|
||||||
"2",0.1,0.0998334166468282,0.216143831477992,0.112557051753147,0.00912581751114394,0.0102181849309398
|
|
||||||
"3",0.2,0.198669330795061,0.351879533708722,0.52138915851383,0.120991434720523,0.180094983253476
|
|
||||||
"4",0.3,0.29552020666134,-0.0169121548298757,0.0870956013269369,0.0836131805695847,0.163690012207993
|
|
||||||
"5",0.4,0.389418342308651,0.278503661037003,0.464752686490904,0.182421968363305,0.294268636359638
|
|
||||||
"6",0.5,0.479425538604203,0.241783494554983,0.521480762031938,0.216291763003623,0.399960258238722
|
|
||||||
"7",0.6,0.564642473395035,0.67288177436767,0.617435509386938,0.35521581484916,0.469717955748659
|
|
||||||
"8",0.7,0.644217687237691,0.692239292735764,0.395366561077235,0.492895242512842,0.472257444593698
|
|
||||||
"9",0.8,0.717356090899523,0.779946606884677,0.830045203984444,0.621840812496715,0.609161571471379
|
|
||||||
"10",0.9,0.783326909627483,0.796987424421658,0.801263132114778,0.723333122197902,0.682652280249237
|
|
||||||
"11",1,0.841470984807897,1.06821012817873,0.869642838589798,0.860323524382936,0.752971972337735
|
|
||||||
"12",1.1,0.891207360061435,1.50128637982775,0.899079529605641,1.09148187598916,0.835465707990221
|
|
||||||
"13",1.2,0.932039085967226,1.1194263347154,0.906626360727432,1.13393429991233,0.875953352580199
|
|
||||||
"14",1.3,0.963558185417193,1.24675170552299,1.07848030956084,1.2135821540696,0.950969562327306
|
|
||||||
"15",1.4,0.98544972998846,1.32784804980202,0.76685418220594,1.2818141129714,0.899892140468108
|
|
||||||
"16",1.5,0.997494986604054,1.23565831982523,1.07310713979952,1.2548338349408,0.961170357331681
|
|
||||||
"17",1.6,0.999573603041505,1.90289281875567,0.88003153305018,1.47254506382487,0.94006950203764
|
|
||||||
"18",1.7,0.991664810452469,1.68871194985252,1.01829329437246,1.56940444551462,0.955793455192302
|
|
||||||
"19",1.8,0.973847630878195,1.72179983981017,1.02268013575533,1.64902528694529,0.988666907865147
|
|
||||||
"20",1.9,0.946300087687414,2.0758716236832,0.805032560816536,1.83908127693465,0.928000158917177
|
|
||||||
"21",2,0.909297426825682,2.11118945422405,1.0134691646089,1.94365432453739,0.957334347939419
|
|
||||||
"22",2.1,0.863209366648874,2.00475777514698,0.86568986134637,1.9826265174693,0.924298444442167
|
|
||||||
"23",2.2,0.80849640381959,2.40773948766051,0.667018023975934,2.15807575978944,0.826761739840873
|
|
||||||
"24",2.3,0.74570521217672,2.14892522112975,0.872704236332415,2.17485332420928,0.839957045849706
|
|
||||||
"25",2.4,0.675463180551151,2.41696701330131,0.253955021611832,2.26412064248401,0.631186439537074
|
|
||||||
"26",2.5,0.598472144103957,2.4087686184711,0.49450592290142,2.33847747374241,0.557319074033222
|
|
||||||
"27",2.6,0.515501371821464,2.55312145187913,0.343944677655963,2.4151672191424,0.467867318187242
|
|
||||||
"28",2.7,0.42737988023383,2.6585492172135,0.528990826178838,2.51649125567521,0.447178678139147
|
|
||||||
"29",2.8,0.334988150155905,2.86281283456189,0.311400289332401,2.65184232661008,0.399952143417531
|
|
||||||
"30",2.9,0.239249329213982,2.74379162744449,0.501282616227342,2.70796893413474,0.432791852065713
|
|
||||||
"31",3,0.141120008059867,2.95951338295806,0.241385538727577,2.81576254355573,0.373424929745113
|
|
||||||
"32",3.1,0.0415806624332905,2.87268165585702,0.0764217470113609,2.85626015646841,0.264426413128825
|
|
||||||
"33",3.2,-0.0583741434275801,3.29898326143096,-0.272500742891131,3.0101734240017,0.0756660807058224
|
|
||||||
"34",3.3,-0.157745694143249,3.64473302259565,-0.24394459655987,3.24463496592626,-0.0688606479078372
|
|
||||||
"35",3.4,-0.255541102026832,3.46698556586598,-0.184272732807665,3.35339770834784,-0.15210430721581
|
|
||||||
"36",3.5,-0.35078322768962,3.67208160089566,-0.119933071489115,3.51318482264886,-0.176430496141549
|
|
||||||
"37",3.6,-0.442520443294852,3.73738883546162,-0.486197268315415,3.62961845872181,-0.283186040443485
|
|
||||||
"38",3.7,-0.529836140908493,3.77209072631297,-0.70275845349803,3.68619468325631,-0.422698101171958
|
|
||||||
"39",3.8,-0.611857890942719,3.66424718733509,-0.482410535792735,3.69727905622484,-0.462935060857071
|
|
||||||
"40",3.9,-0.687766159183974,3.72257849834575,-0.58477261395861,3.71784166083333,-0.543108060927685
|
|
||||||
"41",4,-0.756802495307928,3.85906293918747,-0.703015362823377,3.76539960460785,-0.618449987254768
|
|
||||||
"42",4.1,-0.818277111064411,4.0131961543859,-0.900410257326814,3.84632588679948,-0.708384794580195
|
|
||||||
"43",4.2,-0.871575772413588,4.0263131749378,-0.906044808231391,3.92085812717095,-0.789303202089581
|
|
||||||
"44",4.3,-0.916165936749455,4.77220075671212,-0.530827398816399,4.22925719163087,-0.729943577630504
|
|
||||||
"45",4.4,-0.951602073889516,4.4795636311648,-1.26672674728111,4.35331987391088,-0.921377204806384
|
|
||||||
"46",4.5,-0.977530117665097,4.5088210845027,-0.886168448505782,4.44898342417679,-0.914264630323723
|
|
||||||
"47",4.6,-0.993691003633465,4.70645816063034,-1.1082213336257,4.58861983576766,-0.97806804633887
|
|
||||||
"48",4.7,-0.999923257564101,4.48408312008838,-0.98352521226689,4.55827710678399,-1.01979325501755
|
|
||||||
"49",4.8,-0.996164608835841,4.97817348334347,-1.03043977928678,4.69715193557134,-1.02203657500247
|
|
||||||
"50",4.9,-0.982452612624332,5.09171179984929,-0.948912592308037,4.8484480091335,-0.999631162740658
|
|
||||||
"51",5,-0.958924274663138,4.87710566000798,-0.825224506141761,4.87693462801326,-0.937722874707385
|
|
||||||
"52",5.1,-0.925814682327732,5.04139294635392,-0.718936957124138,4.97198282698482,-0.856650521199568
|
|
||||||
"53",5.2,-0.883454655720153,4.94893136398377,-0.992753696742329,4.98294046406006,-0.885371127105841
|
|
||||||
"54",5.3,-0.832267442223901,5.38128555915899,-0.717434652733088,5.10670981664685,-0.816103747160468
|
|
||||||
"55",5.4,-0.772764487555987,5.46192736637355,-0.724060934669406,5.2398375587704,-0.780347098915984
|
|
||||||
"56",5.5,-0.705540325570392,5.30834840605735,-0.721772537926303,5.28807996342596,-0.766498807502665
|
|
||||||
"57",5.6,-0.631266637872321,5.53199687756185,-0.583133415115471,5.40779902870202,-0.688843253413245
|
|
||||||
"58",5.7,-0.550685542597638,5.9238064899769,-0.541063721566544,5.59865656961444,-0.627040990301198
|
|
||||||
"59",5.8,-0.464602179413757,5.8067999294844,-0.43156566524513,5.68077207716296,-0.552246304884294
|
|
||||||
"60",5.9,-0.373876664830236,5.93089453525347,-0.604056792592816,5.80084302534748,-0.550733954237757
|
|
||||||
"61",6,-0.279415498198926,6.02965160059402,-0.234452930170458,5.91786841211583,-0.434812265604247
|
|
||||||
"62",6.1,-0.182162504272095,5.88697419016579,-0.135764844759742,5.91990685000071,-0.323660336266941
|
|
||||||
"63",6.2,-0.0830894028174964,5.91445270773648,-0.0073552500992853,5.92798052258888,-0.205537962618181
|
|
|
@ -1,141 +0,0 @@
|
|||||||
\pgfplotsset{
|
|
||||||
compat=1.11,
|
|
||||||
legend image code/.code={
|
|
||||||
\draw[mark repeat=2,mark phase=2]
|
|
||||||
plot coordinates {
|
|
||||||
(0cm,0cm)
|
|
||||||
(0.075cm,0cm) %% default is (0.3cm,0cm)
|
|
||||||
(0.15cm,0cm) %% default is (0.6cm,0cm)
|
|
||||||
};%
|
|
||||||
}
|
|
||||||
}
|
|
||||||
\begin{figure}
|
|
||||||
\begin{subfigure}[b]{0.5\textwidth}
|
|
||||||
\begin{subfigure}[b]{\textwidth}
|
|
||||||
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[
|
|
||||||
ytick = {-1, 0, 1, 2},
|
|
||||||
yticklabels = {$-1$, $\phantom{-0.}0$, $1$, $2$},]
|
|
||||||
\addplot table [x=x, y=y, col sep=comma, only marks,
|
|
||||||
forget plot] {Plots/Data/sin_6.csv};
|
|
||||||
\addplot [black, line width=2pt] table [x=x, y=y, col
|
|
||||||
sep=comma, mark=none] {Plots/Data/matlab_0.csv};
|
|
||||||
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_0.0,
|
|
||||||
y=y_n_5000_tl_0.0, col sep=comma, mark=none] {Plots/Data/scala_out_sin.csv};
|
|
||||||
\addlegendentry{$f_1^{*, 0.1}$};
|
|
||||||
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{$\lambda = 0.1$}
|
|
||||||
\end{subfigure}\\
|
|
||||||
\begin{subfigure}[b]{\textwidth}
|
|
||||||
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}
|
|
||||||
\addplot table [x=x, y=y, col sep=comma, only marks,
|
|
||||||
forget plot] {Plots/Data/sin_6.csv};
|
|
||||||
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_1.csv};
|
|
||||||
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_1.0,
|
|
||||||
y=y_n_5000_tl_1.0, col sep=comma, mark=none] {Plots/Data/scala_out_sin.csv};
|
|
||||||
\addlegendentry{$f_1^{*, 1.0}$};
|
|
||||||
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{$\lambda = 1.0$}
|
|
||||||
\end{subfigure}\\
|
|
||||||
\begin{subfigure}[b]{\textwidth}
|
|
||||||
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}
|
|
||||||
\addplot table [x=x, y=y, col sep=comma, only marks,
|
|
||||||
forget plot] {Plots/Data/sin_6.csv};
|
|
||||||
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_3.csv};
|
|
||||||
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_3.0,
|
|
||||||
y=y_n_5000_tl_3.0, col sep=comma, mark=none] {Plots/Data/scala_out_sin.csv};
|
|
||||||
\addlegendentry{$f_1^{*, 3.0}$};
|
|
||||||
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{$\lambda = 3.0$}
|
|
||||||
\end{subfigure}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}[b]{0.5\textwidth}
|
|
||||||
\begin{subfigure}[b]{\textwidth}
|
|
||||||
\begin{adjustbox}{width=\textwidth, height=0.245\textheight}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[
|
|
||||||
ytick = {-2,-1, 0, 1, 2},
|
|
||||||
yticklabels = {$-2$,$-1$, $\phantom{-0.}0$, $1$, $2$},]
|
|
||||||
\addplot table [x=x, y=y, col sep=comma, only marks,
|
|
||||||
forget plot] {Plots/Data/data_sin_d_t.csv};
|
|
||||||
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_sin_d_01.csv};
|
|
||||||
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_0.1,
|
|
||||||
y=y_n_5000_tl_0.1, col sep=comma, mark=none] {Plots/Data/scala_out_d_1_t.csv};
|
|
||||||
\addlegendentry{$f_1^{*, 0.1}$};
|
|
||||||
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{$\lambda = 0.1$}
|
|
||||||
\end{subfigure}\\
|
|
||||||
\begin{subfigure}[b]{\textwidth}
|
|
||||||
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}
|
|
||||||
\addplot table [x=x, y=y, col sep=comma, only marks,
|
|
||||||
forget plot] {Plots/Data/data_sin_d_t.csv};
|
|
||||||
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_sin_d_1.csv};
|
|
||||||
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_1.0,
|
|
||||||
y=y_n_5000_tl_1.0, col sep=comma, mark=none] {Plots/Data/scala_out_d_1_t.csv};
|
|
||||||
\addlegendentry{$f_1^{*, 1.0}$};
|
|
||||||
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda},*}$};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{$\lambda = 1.0$}
|
|
||||||
\end{subfigure}\\
|
|
||||||
\begin{subfigure}[b]{\textwidth}
|
|
||||||
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}
|
|
||||||
\addplot table [x=x, y=y, col sep=comma, only marks,
|
|
||||||
forget plot] {Plots/Data/data_sin_d_t.csv};
|
|
||||||
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_sin_d_3.csv};
|
|
||||||
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_3.0,
|
|
||||||
y=y_n_5000_tl_3.0, col sep=comma, mark=none] {Plots/Data/scala_out_d_1_t.csv};
|
|
||||||
\addlegendentry{$f_1^{*, 3.0}$};
|
|
||||||
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{$\lambda = 3.0$}
|
|
||||||
\end{subfigure}
|
|
||||||
\end{subfigure}
|
|
||||||
\caption[Comparison of shallow neural networks and regression
|
|
||||||
splines]{% In these Figures the behaviour stated in ... is
|
|
||||||
% visualized
|
|
||||||
% in two exaples. For $(a), (b), (c)$ six values of sinus equidistantly
|
|
||||||
% spaced on $[-\pi, \pi]$ have been used as training data. For
|
|
||||||
% $(d),(e),(f)$ 15 equidistand values have been used, where
|
|
||||||
% $y_i^{train} = \sin(x_i^{train}) + \varepsilon_i$ and
|
|
||||||
% $\varepsilon_i \sim \mathcal{N}(0, 0.3)$. For
|
|
||||||
% $\mathcal{RN}_w^{\tilde{\lambda, *}}$ the random weights are
|
|
||||||
% distributed as follows
|
|
||||||
% \begin{align*}
|
|
||||||
% \xi_k &\sim
|
|
||||||
% \end{align*}
|
|
||||||
Ridge Penalized Neural Network compared to Regression Spline,
|
|
||||||
with them being trained on $\text{data}_A$ in a), b), c) and on
|
|
||||||
$\text{data}_B$ in d), e), f).
|
|
||||||
The Parameters of each are given above.
|
|
||||||
}
|
|
||||||
\label{fig:rn_vs_rs}
|
|
||||||
\end{figure}
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master:
|
|
||||||
%%% End:
|
|
@ -1,93 +0,0 @@
|
|||||||
\pgfplotsset{
|
|
||||||
compat=1.11,
|
|
||||||
legend image code/.code={
|
|
||||||
\draw[mark repeat=2,mark phase=2]
|
|
||||||
plot coordinates {
|
|
||||||
(0cm,0cm)
|
|
||||||
(0.0cm,0cm) %% default is (0.3cm,0cm)
|
|
||||||
(0.0cm,0cm) %% default is (0.6cm,0cm)
|
|
||||||
};%
|
|
||||||
}
|
|
||||||
}
|
|
||||||
\begin{figure}
|
|
||||||
\begin{subfigure}[h!]{\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[tick style = {draw = none}, width = \textwidth,
|
|
||||||
height = 0.6\textwidth,
|
|
||||||
xtick = {1, 3, 5,7,9,11,13,15,17,19},
|
|
||||||
xticklabels = {$2$, $4$, $6$, $8$,
|
|
||||||
$10$,$12$,$14$,$16$,$18$,$20$},
|
|
||||||
xlabel = {training epoch}, ylabel = {classification accuracy}]
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma] {Plots/Data/GD_01.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma] {Plots/Data/GD_05.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma] {Plots/Data/GD_1.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma]
|
|
||||||
{Plots/Data/SGD_01_b32.log};
|
|
||||||
|
|
||||||
\addlegendentry{GD$_{0.01}$}
|
|
||||||
\addlegendentry{GD$_{0.05}$}
|
|
||||||
\addlegendentry{GD$_{0.1}$}
|
|
||||||
\addlegendentry{SGD$_{0.01}$}
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
%\caption{Classification accuracy}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}[b]{\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[tick style = {draw = none}, width = \textwidth,
|
|
||||||
height = 0.6\textwidth,
|
|
||||||
ytick = {0, 1, 2, 3, 4},
|
|
||||||
yticklabels = {$0$, $1$, $\phantom{0.}2$, $3$, $4$},
|
|
||||||
xtick = {1, 3, 5,7,9,11,13,15,17,19},
|
|
||||||
xticklabels = {$2$, $4$, $6$, $8$,
|
|
||||||
$10$,$12$,$14$,$16$,$18$,$20$},
|
|
||||||
xlabel = {training epoch}, ylabel = {error measure\vphantom{fy}}]
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_loss, col sep=comma] {Plots/Data/GD_01.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_loss, col sep=comma] {Plots/Data/GD_05.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_loss, col sep=comma] {Plots/Data/GD_1.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_loss, col sep=comma] {Plots/Data/SGD_01_b32.log};
|
|
||||||
|
|
||||||
\addlegendentry{GD$_{0.01}$}
|
|
||||||
\addlegendentry{GD$_{0.05}$}
|
|
||||||
\addlegendentry{GD$_{0.1}$}
|
|
||||||
\addlegendentry{SGD$_{0.01}$}
|
|
||||||
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\caption{Performance metrics during training}
|
|
||||||
\end{subfigure}
|
|
||||||
% \\~\\
|
|
||||||
\caption[Performance comparison of SDG and GD]{The neural network given in ?? trained with different
|
|
||||||
algorithms on the MNIST handwritten digits data set. For gradient
|
|
||||||
descent the learning rated 0.01, 0.05 and 0.1 are (GD$_{\cdot}$). For
|
|
||||||
stochastic gradient descend a batch size of 32 and learning rate
|
|
||||||
of 0.01 is used (SDG$_{0.01}$).}
|
|
||||||
\label{fig:sgd_vs_gd}
|
|
||||||
\end{figure}
|
|
||||||
|
|
||||||
\begin{table}[h]
|
|
||||||
\begin{tabu} to \textwidth {@{} *4{X[c]}c*4{X[c]} @{}}
|
|
||||||
\multicolumn{4}{c}{Classification Accuracy}
|
|
||||||
&~&\multicolumn{4}{c}{Error Measure}
|
|
||||||
\\\cline{1-4}\cline{6-9}
|
|
||||||
GD$_{0.01}$&GD$_{0.05}$&GD$_{0.1}$&SGD$_{0.01}$&&GD$_{0.01}$&GD$_{0.05}$&GD$_{0.1}$&SGD$_{0.01}$
|
|
||||||
\\\cline{1-4}\cline{6-9}
|
|
||||||
\multicolumn{9}{c}{test}\\
|
|
||||||
0.265&0.633&0.203&0.989&&2.267&1.947&3.91&0.032
|
|
||||||
\end{tabu}
|
|
||||||
\caption{Performance metrics of the networks trained in
|
|
||||||
Figure~\ref{fig:sgd_vs_gd} after 20 training epochs.}
|
|
||||||
\label{table:sgd_vs_gd}
|
|
||||||
\end{table}
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: "../main"
|
|
||||||
%%% End:
|
|
@ -1,71 +0,0 @@
|
|||||||
\message{ !name(pfg_test.tex)}\documentclass{article}
|
|
||||||
\usepackage{pgfplots}
|
|
||||||
\usepackage{filecontents}
|
|
||||||
\usepackage{subcaption}
|
|
||||||
\usepackage{adjustbox}
|
|
||||||
\usepackage{xcolor}
|
|
||||||
\usepackage{graphicx}
|
|
||||||
\usetikzlibrary{calc, 3d}
|
|
||||||
|
|
||||||
\begin{document}
|
|
||||||
|
|
||||||
\message{ !name(pfg_test.tex) !offset(6) }
|
|
||||||
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{True position (\textcolor{red}{red}), distorted data (black)}
|
|
||||||
\end{figure}
|
|
||||||
\begin{center}
|
|
||||||
\begin{figure}[h]
|
|
||||||
\begin{subfigure}{0.49\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/klammern.jpg}
|
|
||||||
\caption{Original Picure}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.49\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/image_conv4.png}
|
|
||||||
\caption{test}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.49\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/image_conv5.png}
|
|
||||||
\caption{test}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.49\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/image_conv6.png}
|
|
||||||
\caption{test}
|
|
||||||
\end{subfigure}
|
|
||||||
\end{figure}
|
|
||||||
\end{center}
|
|
||||||
|
|
||||||
\begin{figure}
|
|
||||||
\begin{adjustbox}{width=\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{scope}[x = (0:1cm), y=(90:1cm), z=(15:-0.5cm)]
|
|
||||||
\node[canvas is xy plane at z=0, transform shape] at (0,0)
|
|
||||||
{\includegraphics[width=5cm]{Data/klammern_r.jpg}};
|
|
||||||
\node[canvas is xy plane at z=2, transform shape] at (0,-0.2)
|
|
||||||
{\includegraphics[width=5cm]{Data/klammern_g.jpg}};
|
|
||||||
\node[canvas is xy plane at z=4, transform shape] at (0,-0.4)
|
|
||||||
{\includegraphics[width=5cm]{Data/klammern_b.jpg}};
|
|
||||||
\node[canvas is xy plane at z=4, transform shape] at (-8,-0.2)
|
|
||||||
{\includegraphics[width=5.3cm]{Data/klammern_rgb.jpg}};
|
|
||||||
\end{scope}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{On the right the red, green and blue chanels of the picture
|
|
||||||
are displayed. In order to better visualize the color channes the
|
|
||||||
black and white picture of each channel has been colored in the
|
|
||||||
respective color. Combining the layers results in the image on the
|
|
||||||
left}
|
|
||||||
\end{figure}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
\message{ !name(pfg_test.tex) !offset(3) }
|
|
||||||
|
|
||||||
\end{document}
|
|
||||||
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: t
|
|
||||||
%%% End:
|
|
@ -1,53 +0,0 @@
|
|||||||
\begin{figure}[h]
|
|
||||||
\centering
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist0.pdf}
|
|
||||||
\caption{T-shirt/top}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist1.pdf}
|
|
||||||
\caption{Trousers}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist2.pdf}
|
|
||||||
\caption{Pullover}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist3.pdf}
|
|
||||||
\caption{Dress}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist4.pdf}
|
|
||||||
\caption{Coat}
|
|
||||||
\end{subfigure}\\
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist5.pdf}
|
|
||||||
\caption{Sandal}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist6.pdf}
|
|
||||||
\caption{Shirt}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist7.pdf}
|
|
||||||
\caption{Sneaker}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist8.pdf}
|
|
||||||
\caption{Bag}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/fashion_mnist9.pdf}
|
|
||||||
\caption{Ankle boot}
|
|
||||||
\end{subfigure}
|
|
||||||
\caption[Fashion MNIST data set]{The fashtion MNIST data set contains 70.000 images of
|
|
||||||
preprocessed product images from Zalando, which are categorized as
|
|
||||||
T-shirt/top, Trouser, Pullover, Dress, Coat, Sandal, Shirt,
|
|
||||||
Sneaker, Bag, Ankle boot. Of these images 60.000 are used as training images, while
|
|
||||||
the rest are used to validate the models trained.}
|
|
||||||
\label{fig:MNIST}
|
|
||||||
\end{figure}
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: "../main"
|
|
||||||
%%% End:
|
|
@ -1,82 +0,0 @@
|
|||||||
\pgfplotsset{
|
|
||||||
compat=1.11,
|
|
||||||
legend image code/.code={
|
|
||||||
\draw[mark repeat=2,mark phase=2]
|
|
||||||
plot coordinates {
|
|
||||||
(0cm,0cm)
|
|
||||||
(0.15cm,0cm) %% default is (0.3cm,0cm)
|
|
||||||
(0.3cm,0cm) %% default is (0.6cm,0cm)
|
|
||||||
};%
|
|
||||||
}
|
|
||||||
}
|
|
||||||
\begin{figure}
|
|
||||||
\begin{subfigure}[h]{\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
|
||||||
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
|
||||||
height = 0.6\textwidth, ymin = 0.988, legend style={at={(0.9825,0.0175)},anchor=south east},
|
|
||||||
xlabel = {epoch}, ylabel = {Classification Accuracy}, cycle
|
|
||||||
list/Dark2, every axis plot/.append style={line width =1.25pt}]
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adam_datagen_full_mean.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adam_datagen_dropout_02_full_mean.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adam_datagen_dropout_04_full_mean.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adam_dropout_02_full_mean.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adam_dropout_04_full_mean.log};
|
|
||||||
\addplot [dashed] table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adam_full_mean.log};
|
|
||||||
|
|
||||||
\addlegendentry{\footnotesize{G.}}
|
|
||||||
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
|
||||||
\addlegendentry{\footnotesize{G. + D. 0.4}}
|
|
||||||
\addlegendentry{\footnotesize{D. 0.2}}
|
|
||||||
\addlegendentry{\footnotesize{D. 0.4}}
|
|
||||||
\addlegendentry{\footnotesize{Default}}
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\caption{Classification accuracy}
|
|
||||||
\vspace{.25cm}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}[h]{1.0\linewidth}
|
|
||||||
\begin{tabu} to \textwidth {@{}lc*5{X[c]}@{}}
|
|
||||||
\Tstrut \Bstrut & \textsc{\,Adam\,} & D. 0.2 & D. 0.4 & G. &G.+D.\,0.2 & G.+D.\,0.4 \\
|
|
||||||
\hline
|
|
||||||
\multicolumn{7}{c}{Test Accuracy}\Bstrut \\
|
|
||||||
\cline{2-7}
|
|
||||||
mean \Tstrut & 0.9914 & 0.9923 & 0.9930 & 0.9937 & 0.9938 & 0.9943 \\
|
|
||||||
max & 0.9926 & 0.9930 & 0.9934 & 0.9946 & 0.9955 & 0.9956 \\
|
|
||||||
min & 0.9887 & 0.9909 & 0.9922 & 0.9929 & 0.9929 & 0.9934 \\
|
|
||||||
\hline
|
|
||||||
\multicolumn{7}{c}{Training Accuracy}\Bstrut \\
|
|
||||||
\cline{2-7}
|
|
||||||
mean \Tstrut & 0.9994 & 0.9991 & 0.9989 & 0.9967 & 0.9954 & 0.9926 \\
|
|
||||||
max & 0.9996 & 0.9996 & 0.9992 & 0.9979 & 0.9971 & 0.9937 \\
|
|
||||||
min & 0.9992 & 0.9990 & 0.9984 & 0.9947 & 0.9926 & 0.9908 \\
|
|
||||||
\end{tabu}
|
|
||||||
\caption{Mean and maximum accuracy after 48 epochs of training.}
|
|
||||||
\label{fig:gen_dropout_b}
|
|
||||||
\end{subfigure}
|
|
||||||
\caption[Performance comparison of overfitting measures]{Accuracy for the net given in ... with Dropout (D.),
|
|
||||||
data generation (G.), a combination, or neither (Default) implemented and trained
|
|
||||||
with \textsc{Adam}. For each epoch the 60.000 training samples
|
|
||||||
were used, or for data generation 10.000 steps with each using
|
|
||||||
batches of 60 generated data points. For each configuration the
|
|
||||||
model was trained 5 times and the average accuracies at each epoch
|
|
||||||
are given in (a). Mean, maximum and minimum values of accuracy on
|
|
||||||
the test and training set are given in (b).}
|
|
||||||
\label{fig:gen_dropout}
|
|
||||||
\end{figure}
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: "../main"
|
|
||||||
%%% End:
|
|
@ -1,41 +0,0 @@
|
|||||||
\begin{figure}[h]
|
|
||||||
\centering
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist0.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist1.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist2.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist3.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist4.pdf}
|
|
||||||
\end{subfigure}\\
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist5.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist6.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist7.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist8.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Plots/Data/mnist9.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\caption[MNIST data set]{The MNIST data set contains 70.000 images of preprocessed handwritten
|
|
||||||
digits. Of these images 60.000 are used as training images, while
|
|
||||||
the rest are used to validate the models trained.}
|
|
||||||
\label{fig:MNIST}
|
|
||||||
\end{figure}
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: "../main"
|
|
||||||
%%% End:
|
|
@ -1,301 +0,0 @@
|
|||||||
\documentclass[a4paper, 12pt, draft=true]{article}
|
|
||||||
\usepackage{pgfplots}
|
|
||||||
\usepackage{filecontents}
|
|
||||||
\usepackage{subcaption}
|
|
||||||
\usepackage{adjustbox}
|
|
||||||
\usepackage{xcolor}
|
|
||||||
\usepackage{tabu}
|
|
||||||
\usepackage{showframe}
|
|
||||||
\usepackage{graphicx}
|
|
||||||
\usepackage{titlecaps}
|
|
||||||
\usetikzlibrary{calc, 3d}
|
|
||||||
\usepgfplotslibrary{colorbrewer}
|
|
||||||
|
|
||||||
\newcommand\Tstrut{\rule{0pt}{2.6ex}} % = `top' strut
|
|
||||||
\newcommand\Bstrut{\rule[-0.9ex]{0pt}{0pt}} % = `bottom' strut
|
|
||||||
|
|
||||||
\begin{document}
|
|
||||||
\pgfplotsset{
|
|
||||||
compat=1.11,
|
|
||||||
legend image code/.code={
|
|
||||||
\draw[mark repeat=2,mark phase=2]
|
|
||||||
plot coordinates {
|
|
||||||
(0cm,0cm)
|
|
||||||
(0.3cm,0cm) %% default is (0.3cm,0cm)
|
|
||||||
(0.6cm,0cm) %% default is (0.6cm,0cm)
|
|
||||||
};%
|
|
||||||
}
|
|
||||||
}
|
|
||||||
\begin{figure}
|
|
||||||
\begin{subfigure}[h]{\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
|
||||||
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
|
||||||
height = 0.35\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
|
||||||
ylabel = {Test Accuracy}, cycle
|
|
||||||
list/Dark2, every axis plot/.append style={line width
|
|
||||||
=1.25pt}]
|
|
||||||
% \addplot [dashed] table
|
|
||||||
% [x=epoch, y=accuracy, col sep=comma, mark = none]
|
|
||||||
% {Data/adam_datagen_full.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_1.mean};
|
|
||||||
% \addplot [dashed] table
|
|
||||||
% [x=epoch, y=accuracy, col sep=comma, mark = none]
|
|
||||||
% {Data/adam_datagen_dropout_02_full.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_datagen_1.mean};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_datagen_dropout_02_1.mean};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_dropout_02_1.mean};
|
|
||||||
|
|
||||||
|
|
||||||
\addlegendentry{\footnotesize{G.}}
|
|
||||||
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
|
||||||
\addlegendentry{\footnotesize{G. + D. 0.4}}
|
|
||||||
\addlegendentry{\footnotesize{D. 0.2}}
|
|
||||||
\addlegendentry{\footnotesize{D. 0.4}}
|
|
||||||
\addlegendentry{\footnotesize{Default}}
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\caption{1 sample per class}
|
|
||||||
\vspace{0.25cm}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}[h]{\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
|
||||||
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
|
||||||
height = 0.35\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
|
||||||
ylabel = {Test Accuracy}, cycle
|
|
||||||
list/Dark2, every axis plot/.append style={line width
|
|
||||||
=1.25pt}]
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_dropout_00_10.mean};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_dropout_02_10.mean};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_datagen_dropout_00_10.mean};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_datagen_dropout_02_10.mean};
|
|
||||||
|
|
||||||
|
|
||||||
\addlegendentry{\footnotesize{G.}}
|
|
||||||
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
|
||||||
\addlegendentry{\footnotesize{G. + D. 0.4}}
|
|
||||||
\addlegendentry{\footnotesize{D. 0.2}}
|
|
||||||
\addlegendentry{\footnotesize{D. 0.4}}
|
|
||||||
\addlegendentry{\footnotesize{Default}}
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\caption{10 samples per class}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}[h]{\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
|
||||||
/pgf/number format/precision=3},tick style = {draw = none}, width = 0.9875\textwidth,
|
|
||||||
height = 0.35\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
|
||||||
xlabel = {epoch}, ylabel = {Test Accuracy}, cycle
|
|
||||||
list/Dark2, every axis plot/.append style={line width
|
|
||||||
=1.25pt}, ymin = {0.92}]
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_dropout_00_100.mean};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_dropout_02_100.mean};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_datagen_dropout_00_100.mean};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Data/adam_datagen_dropout_02_100.mean};
|
|
||||||
|
|
||||||
\addlegendentry{\footnotesize{G.}}
|
|
||||||
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
|
||||||
\addlegendentry{\footnotesize{G. + D. 0.4}}
|
|
||||||
\addlegendentry{\footnotesize{D. 0.2}}
|
|
||||||
\addlegendentry{\footnotesize{D. 0.4}}
|
|
||||||
\addlegendentry{\footnotesize{Default}}
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\caption{100 samples per class}
|
|
||||||
\vspace{.25cm}
|
|
||||||
\end{subfigure}
|
|
||||||
\caption{Accuracy for the net given in ... with Dropout (D.),
|
|
||||||
data generation (G.), a combination, or neither (Default) implemented and trained
|
|
||||||
with \textsc{Adam}. For each epoch the 60.000 training samples
|
|
||||||
were used, or for data generation 10.000 steps with each using
|
|
||||||
batches of 60 generated data points. For each configuration the
|
|
||||||
model was trained 5 times and the average accuracies at each epoch
|
|
||||||
are given in (a). Mean, maximum and minimum values of accuracy on
|
|
||||||
the test and training set are given in (b).}
|
|
||||||
\end{figure}
|
|
||||||
\begin{table}
|
|
||||||
\centering
|
|
||||||
\begin{tabu} to \textwidth {@{}l*4{X[c]}@{}}
|
|
||||||
\Tstrut \Bstrut & \textsc{Adam} & D. 0.2 & Gen & Gen.+D. 0.2 \\
|
|
||||||
\hline
|
|
||||||
&
|
|
||||||
\multicolumn{4}{c}{\titlecap{test accuracy for 1 sample}}\Bstrut \\
|
|
||||||
\cline{2-5}
|
|
||||||
max \Tstrut & 0.5633 & 0.5312 & 0.6704 & 0.6604 \\
|
|
||||||
min & 0.3230 & 0.4224 & 0.4878 & 0.5175 \\
|
|
||||||
mean & 0.4570 & 0.4714 & 0.5862 & 0.6014 \\
|
|
||||||
var & 0.0040 & 0.0012 & 0.0036 & 0.0023 \\
|
|
||||||
\hline
|
|
||||||
&
|
|
||||||
\multicolumn{4}{c}{\titlecap{test accuracy for 10 samples}}\Bstrut \\
|
|
||||||
\cline{2-5}
|
|
||||||
max \Tstrut & 0.8585 & 0.9423 & 0.9310 & 0.9441 \\
|
|
||||||
min & 0.8148 & 0.9081 & 0.9018 & 0.9061 \\
|
|
||||||
mean & 0.8377 & 0.9270 & 0.9185 & 0.9232 \\
|
|
||||||
var & 2.7e-4 & 1.3e-4 & 6e-05 & 1.5e-4 \\
|
|
||||||
\hline
|
|
||||||
&
|
|
||||||
\multicolumn{4}{c}{\titlecap{test accuracy for 100 samples}}\Bstrut \\
|
|
||||||
\cline{2-5}
|
|
||||||
max & 0.9637 & 0.9796 & 0.9810 & 0.9805 \\
|
|
||||||
min & 0.9506 & 0.9719 & 0.9702 & 0.9727 \\
|
|
||||||
mean & 0.9582 & 0.9770 & 0.9769 & 0.9783 \\
|
|
||||||
var & 2e-05 & 1e-05 & 1e-05 & 0 \\
|
|
||||||
\hline
|
|
||||||
\end{tabu}
|
|
||||||
\caption{Values of the test accuracy of the model trained 10 times
|
|
||||||
of random training sets containing 1, 10 and 100 data points per
|
|
||||||
class.}
|
|
||||||
\end{table}
|
|
||||||
|
|
||||||
\begin{center}
|
|
||||||
\begin{figure}[h]
|
|
||||||
\centering
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist0.pdf}
|
|
||||||
\caption{original\\image}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist_gen_zoom.pdf}
|
|
||||||
\caption{random\\zoom}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist_gen_shear.pdf}
|
|
||||||
\caption{random\\shear}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist_gen_rotation.pdf}
|
|
||||||
\caption{random\\rotation}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist_gen_shift.pdf}
|
|
||||||
\caption{random\\positional shift}
|
|
||||||
\end{subfigure}\\
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist5.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist6.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist7.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist8.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{0.19\textwidth}
|
|
||||||
\includegraphics[width=\textwidth]{Data/mnist9.pdf}
|
|
||||||
\end{subfigure}
|
|
||||||
\caption{The MNIST data set contains 70.000 images of preprocessed handwritten
|
|
||||||
digits. Of these images 60.000 are used as training images, while
|
|
||||||
the rest are used to validate the models trained.}
|
|
||||||
\end{figure}
|
|
||||||
\end{center}
|
|
||||||
|
|
||||||
\begin{figure}
|
|
||||||
\begin{adjustbox}{width=\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{scope}[x = (0:1cm), y=(90:1cm), z=(15:-0.5cm)]
|
|
||||||
\node[canvas is xy plane at z=0, transform shape] at (0,0)
|
|
||||||
{\includegraphics[width=5cm]{Data/klammern_r.jpg}};
|
|
||||||
\node[canvas is xy plane at z=2, transform shape] at (0,-0.2)
|
|
||||||
{\includegraphics[width=5cm]{Data/klammern_g.jpg}};
|
|
||||||
\node[canvas is xy plane at z=4, transform shape] at (0,-0.4)
|
|
||||||
{\includegraphics[width=5cm]{Data/klammern_b.jpg}};
|
|
||||||
\node[canvas is xy plane at z=4, transform shape] at (-8,-0.2)
|
|
||||||
{\includegraphics[width=5.3cm]{Data/klammern_rgb.jpg}};
|
|
||||||
\end{scope}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{On the right the red, green and blue chanels of the picture
|
|
||||||
are displayed. In order to better visualize the color channes the
|
|
||||||
black and white picture of each channel has been colored in the
|
|
||||||
respective color. Combining the layers results in the image on the
|
|
||||||
left}
|
|
||||||
\end{figure}
|
|
||||||
|
|
||||||
\begin{figure}
|
|
||||||
\centering
|
|
||||||
\begin{subfigure}{.45\linewidth}
|
|
||||||
\centering
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[enlargelimits=false, ymin=0, ymax = 1, width=\textwidth]
|
|
||||||
\addplot [domain=-5:5, samples=101,unbounded coords=jump]{1/(1+exp(-x)};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{.45\linewidth}
|
|
||||||
\centering
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[enlargelimits=false, width=\textwidth]
|
|
||||||
\addplot[domain=-5:5, samples=100]{tanh(x)};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{.45\linewidth}
|
|
||||||
\centering
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[enlargelimits=false, width=\textwidth,
|
|
||||||
ytick={0,2,4},yticklabels={\hphantom{4.}0,2,4}, ymin=-1]
|
|
||||||
\addplot[domain=-5:5, samples=100]{max(0,x)};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}{.45\linewidth}
|
|
||||||
\centering
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[enlargelimits=false, width=\textwidth, ymin=-1,
|
|
||||||
ytick={0,2,4},yticklabels={$\hphantom{-5.}0$,2,4}]
|
|
||||||
\addplot[domain=-5:5, samples=100]{max(0,x)+ 0.1*min(0,x)};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{subfigure}
|
|
||||||
\end{figure}
|
|
||||||
|
|
||||||
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[enlargelimits=false]
|
|
||||||
\addplot [domain=-5:5, samples=101,unbounded coords=jump]{1/(1+exp(-x)};
|
|
||||||
\addplot[domain=-5:5, samples=100]{tanh(x)};
|
|
||||||
\addplot[domain=-5:5, samples=100]{max(0,x)};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[enlargelimits=false]
|
|
||||||
\addplot[domain=-2*pi:2*pi, samples=100]{cos(deg(x))};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
|
|
||||||
\end{document}
|
|
||||||
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: t
|
|
||||||
%%% End:
|
|
@ -1,78 +0,0 @@
|
|||||||
\pgfplotsset{
|
|
||||||
compat=1.11,
|
|
||||||
legend image code/.code={
|
|
||||||
\draw[mark repeat=2,mark phase=2]
|
|
||||||
plot coordinates {
|
|
||||||
(0cm,0cm)
|
|
||||||
(0.0cm,0cm) %% default is (0.3cm,0cm)
|
|
||||||
(0.0cm,0cm) %% default is (0.6cm,0cm)
|
|
||||||
};%
|
|
||||||
}
|
|
||||||
}
|
|
||||||
\begin{figure}
|
|
||||||
\begin{subfigure}[h]{\textwidth}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[tick style = {draw = none}, width = \textwidth,
|
|
||||||
height = 0.6\textwidth, ymin = 0.92, legend style={at={(0.9825,0.75)},anchor=north east},
|
|
||||||
xlabel = {epoch}, ylabel = {Classification Accuracy}]
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adagrad.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adadelta.log};
|
|
||||||
\addplot table
|
|
||||||
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
|
||||||
{Plots/Data/adam.log};
|
|
||||||
|
|
||||||
\addlegendentry{\footnotesize{ADAGRAD}}
|
|
||||||
\addlegendentry{\footnotesize{ADADELTA}}
|
|
||||||
\addlegendentry{\footnotesize{ADAM}}
|
|
||||||
\addlegendentry{SGD$_{0.01}$}
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
%\caption{Classification accuracy}
|
|
||||||
\vspace{.25cm}
|
|
||||||
\end{subfigure}
|
|
||||||
% \begin{subfigure}[b]{\textwidth}
|
|
||||||
% \begin{tikzpicture}
|
|
||||||
% \begin{axis}[tick style = {draw = none}, width = \textwidth,
|
|
||||||
% height = 0.6\textwidth, ymax = 0.5,
|
|
||||||
% xlabel = {epoch}, ylabel = {Error Measure\vphantom{y}},ytick ={0,0.1,0.2,0.3,0.4,0.45,0.5}, yticklabels =
|
|
||||||
% {0,0.1,0.2,0.3,0.4,\phantom{0.94},0.5}]
|
|
||||||
% \addplot table
|
|
||||||
% [x=epoch, y=val_loss, col sep=comma, mark = none] {Plots/Data/adagrad.log};
|
|
||||||
% \addplot table
|
|
||||||
% [x=epoch, y=val_loss, col sep=comma, mark = none] {Plots/Data/adadelta.log};
|
|
||||||
% \addplot table
|
|
||||||
% [x=epoch, y=val_loss, col sep=comma, mark = none] {Plots/Data/adam.log};
|
|
||||||
|
|
||||||
% \addlegendentry{\footnotesize{ADAGRAD}}
|
|
||||||
% \addlegendentry{\footnotesize{ADADELTA}}
|
|
||||||
% \addlegendentry{\footnotesize{ADAM}}
|
|
||||||
% \addlegendentry{SGD$_{0.01}$}
|
|
||||||
|
|
||||||
% \end{axis}
|
|
||||||
% \end{tikzpicture}
|
|
||||||
% \caption{Performance metrics during training}
|
|
||||||
% \vspace{.25cm}
|
|
||||||
% \end{subfigure}
|
|
||||||
\begin{subfigure}[b]{1.0\linewidth}
|
|
||||||
\begin{tabu} to \textwidth {@{} *3{X[c]}c*3{X[c]} @{}}
|
|
||||||
\multicolumn{3}{c}{Classification Accuracy}
|
|
||||||
&~&\multicolumn{3}{c}{Error Measure}
|
|
||||||
\\\cline{1-3}\cline{5-7}
|
|
||||||
ADAGRAD&ADADELTA&ADAM&&ADAGRAD&ADADELTA&ADAM
|
|
||||||
\\\cline{1-3}\cline{5-7}
|
|
||||||
1&1&1&&1&1&1
|
|
||||||
\end{tabu}
|
|
||||||
\caption{Performace metrics after 20 epochs}
|
|
||||||
\end{subfigure}
|
|
||||||
\caption[Performance comparison of training algorithms]{Classification accuracy on the test set and ...Performance metrics of the network given in ... trained
|
|
||||||
with different optimization algorithms}
|
|
||||||
\label{fig:comp_alg}
|
|
||||||
\end{figure}
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: "../main"
|
|
||||||
%%% End:
|
|
@ -1,64 +0,0 @@
|
|||||||
"","x_i","y_i","x_d","y_d","x","y"
|
|
||||||
"1",0,0,0.0815633019993375,0.095134925029757,0.0815633019993375,0.095134925029757
|
|
||||||
"2",0.1,0.0998334166468282,-0.137539012603596,0.503920419784276,-0.137539012603596,0.503920419784276
|
|
||||||
"3",0.2,0.198669330795061,0.219868163218743,0.32022289024623,0.219868163218743,0.32022289024623
|
|
||||||
"4",0.3,0.29552020666134,0.378332723534869,0.474906286765401,0.378332723534869,0.474906286765401
|
|
||||||
"5",0.4,0.389418342308651,0.286034335293811,0.422891394375764,0.215056588291437,0.412478430748051
|
|
||||||
"6",0.5,0.479425538604203,-0.109871707385461,0.229661026779107,0.122574532557623,0.353221043330047
|
|
||||||
"7",0.6,0.564642473395035,0.91036951450573,0.56079130435097,0.451160317716352,0.452893574072324
|
|
||||||
"8",0.7,0.644217687237691,0.899001194675409,0.714355793051917,0.491731451724399,0.514477919331008
|
|
||||||
"9",0.8,0.717356090899523,0.733791390723896,0.694085383523086,0.488943974889845,0.530054084580656
|
|
||||||
"10",0.9,0.783326909627483,0.893642943873427,0.739792642916928,0.599785378272423,0.575149967162231
|
|
||||||
"11",1,0.841470984807897,0.895913227983752,0.658288213778898,0.650886140047209,0.577618711891772
|
|
||||||
"12",1.1,0.891207360061435,1.01252219752013,0.808981437684505,0.726263244907525,0.643161394030218
|
|
||||||
"13",1.2,0.932039085967226,1.30930912337975,1.04111824066026,0.872590842152803,0.745714536528734
|
|
||||||
"14",1.3,0.963558185417193,1.0448292335495,0.741250429230841,0.850147062957694,0.687171673021914
|
|
||||||
"15",1.4,0.98544972998846,1.57369086195552,1.17277927321094,1.06520673597544,0.847936751231165
|
|
||||||
"16",1.5,0.997494986604054,1.61427415976939,1.3908361301708,1.15616745244604,0.969474391592075
|
|
||||||
"17",1.6,0.999573603041505,1.34409615749122,0.976992098566069,1.13543598207093,0.889434319996364
|
|
||||||
"18",1.7,0.991664810452469,1.79278028030419,1.02939764179765,1.33272772191879,0.935067381106346
|
|
||||||
"19",1.8,0.973847630878195,1.50721559744085,0.903076361857071,1.30862923824728,0.91665506605512
|
|
||||||
"20",1.9,0.946300087687414,1.835014641556,0.830477479204284,1.45242210409837,0.889715842048808
|
|
||||||
"21",2,0.909297426825682,1.98589997236352,0.887302138185342,1.56569111721857,0.901843632635883
|
|
||||||
"22",2.1,0.863209366648874,2.31436634488224,0.890096618924313,1.73810390755555,0.899632162941341
|
|
||||||
"23",2.2,0.80849640381959,2.14663445612581,0.697012453130415,1.77071083163663,0.831732978616874
|
|
||||||
"24",2.3,0.74570521217672,2.17162372560288,0.614243640399509,1.84774268936257,0.787400621584077
|
|
||||||
"25",2.4,0.675463180551151,2.2488591417345,0.447664288915269,1.93366609303299,0.707449056213168
|
|
||||||
"26",2.5,0.598472144103957,2.56271588872389,0.553368843490625,2.08922735802261,0.702402440783529
|
|
||||||
"27",2.6,0.515501371821464,2.60986205081511,0.503762006272682,2.17548673152621,0.657831176057599
|
|
||||||
"28",2.7,0.42737988023383,2.47840649766003,0.215060732402894,2.20251747034638,0.533903400086802
|
|
||||||
"29",2.8,0.334988150155905,2.99861119922542,0.28503285049582,2.43015164462239,0.512492561673074
|
|
||||||
"30",2.9,0.239249329213982,3.09513467852082,0.245355736487949,2.54679545455398,0.461447717313721
|
|
||||||
"31",3,0.141120008059867,2.86247369846558,0.0960140633436418,2.55274767368554,0.371740588261606
|
|
||||||
"32",3.1,0.0415806624332905,2.79458017090243,-0.187923650913249,2.59422388058738,0.234694070506915
|
|
||||||
"33",3.2,-0.0583741434275801,3.6498183243501,-0.186738431858275,2.9216851043241,0.173308072295566
|
|
||||||
"34",3.3,-0.157745694143249,3.19424275971809,-0.221908035274934,2.86681135711315,0.101325637659584
|
|
||||||
"35",3.4,-0.255541102026832,3.53166785156005,-0.295496842654793,3.03827050777863,0.0191967841533109
|
|
||||||
"36",3.5,-0.35078322768962,3.53250700922714,-0.364585027403596,3.12709094619305,-0.0558446366563474
|
|
||||||
"37",3.6,-0.442520443294852,3.52114271616751,-0.363845774016092,3.18702722489489,-0.10585071711408
|
|
||||||
"38",3.7,-0.529836140908493,3.72033580551176,-0.386489608468821,3.31200591645168,-0.158195730190865
|
|
||||||
"39",3.8,-0.611857890942719,4.0803717995796,-0.64779795182054,3.49862620703954,-0.284999326812438
|
|
||||||
"40",3.9,-0.687766159183974,3.88351729419721,-0.604406622894426,3.51908925124143,-0.324791870057922
|
|
||||||
"41",4,-0.756802495307928,3.9941257036697,-0.8061112437715,3.62222513609486,-0.438560071688316
|
|
||||||
"42",4.1,-0.818277111064411,3.81674488816054,-0.548538951165239,3.63032709398802,-0.41285438330036
|
|
||||||
"43",4.2,-0.871575772413588,4.47703348424544,-0.998992385231986,3.88581748102334,-0.592305016590357
|
|
||||||
"44",4.3,-0.916165936749455,4.46179199544059,-0.969288921090897,3.96444243944485,-0.643076376622242
|
|
||||||
"45",4.4,-0.951602073889516,4.15184730382548,-1.11987501275525,3.93838897981045,-0.743258835859858
|
|
||||||
"46",4.5,-0.977530117665097,4.64522916494355,-0.772872365801468,4.15504805602606,-0.691414328153313
|
|
||||||
"47",4.6,-0.993691003633465,4.68087925098283,-0.650422764094352,4.24176417425486,-0.675107584174976
|
|
||||||
"48",4.7,-0.999923257564101,5.00475403211142,-0.922605880059771,4.41432228408005,-0.770625346502085
|
|
||||||
"49",4.8,-0.996164608835841,4.71428836112322,-1.14280193223997,4.41279031790692,-0.861010494025717
|
|
||||||
"50",4.9,-0.982452612624332,5.02115518218406,-0.9819618243158,4.57449352886454,-0.843786948015608
|
|
||||||
"51",5,-0.958924274663138,4.92057344952522,-0.872931430146499,4.61418118503201,-0.836318916150308
|
|
||||||
"52",5.1,-0.925814682327732,5.37277893732831,-0.91444926304078,4.81555148166217,-0.864686555983682
|
|
||||||
"53",5.2,-0.883454655720153,5.19524942845082,-1.41169784739596,4.84152902094499,-1.03768305406186
|
|
||||||
"54",5.3,-0.832267442223901,5.4432222181271,-0.726481337519931,4.98565483155961,-0.856094353978009
|
|
||||||
"55",5.4,-0.772764487555987,4.98285013865449,-0.692803346852181,4.90897053115903,-0.838425020062396
|
|
||||||
"56",5.5,-0.705540325570392,5.33298025214155,-0.343702005257262,5.0497327607228,-0.711573964373115
|
|
||||||
"57",5.6,-0.631266637872321,5.49935694796791,-0.828968673188174,5.15036520204232,-0.816467931201244
|
|
||||||
"58",5.7,-0.550685542597638,5.69204187550805,-0.481580461165225,5.26232964126231,-0.689500817105975
|
|
||||||
"59",5.8,-0.464602179413757,5.84391772412888,-0.20453899468884,5.38069867877875,-0.564365367144995
|
|
||||||
"60",5.9,-0.373876664830236,5.48166674139637,-0.597796931577294,5.3357436834558,-0.649913835818738
|
|
||||||
"61",6,-0.279415498198926,5.77474590863769,-0.280234463056808,5.46956415981143,-0.524503219480344
|
|
||||||
"62",6.1,-0.182162504272095,6.36764321572312,-0.0996286988755344,5.7169871104113,-0.422854073705143
|
|
||||||
"63",6.2,-0.0830894028174964,6.46175133910451,-0.025702847911482,5.83540227044819,-0.355719019286555
|
|
|
@ -1,45 +0,0 @@
|
|||||||
\begin{figure}
|
|
||||||
\centering
|
|
||||||
\begin{subfigure}[b]{0.49\textwidth}
|
|
||||||
\centering
|
|
||||||
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[tick style = {draw = none}, xticklabel = \empty,
|
|
||||||
yticklabel=\empty]
|
|
||||||
\addplot [mark options={scale = 0.7}, mark = o] table
|
|
||||||
[x=x_d,y=y_d, col sep = comma] {Plots/Data/sin_conv.csv};
|
|
||||||
\addplot [red, mark=x] table [x=x_i, y=y_i, col sep=comma, color ='black'] {Plots/Data/sin_conv.csv};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{True position (\textcolor{red}{red}), distorted position data (black)}
|
|
||||||
\end{subfigure}
|
|
||||||
\begin{subfigure}[b]{0.49\textwidth}
|
|
||||||
\centering
|
|
||||||
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
|
||||||
\begin{tikzpicture}
|
|
||||||
\begin{axis}[tick style = {draw = none}, xticklabel = \empty,
|
|
||||||
yticklabel=\empty]
|
|
||||||
\addplot [mark options={scale = 0.7}, mark = o] table [x=x,y=y, col
|
|
||||||
sep = comma] {Plots/Data/sin_conv.csv};
|
|
||||||
\addplot [red, mark=x] table [x=x_i, y=y_i, col sep=comma, color ='black'] {Plots/Data/sin_conv.csv};
|
|
||||||
\end{axis}
|
|
||||||
\end{tikzpicture}
|
|
||||||
\end{adjustbox}
|
|
||||||
\caption{True position (\textcolor{red}{red}), filtered position data (black)}
|
|
||||||
\end{subfigure}
|
|
||||||
\caption[Signal smoothing using convolution]{Example for noise reduction using convolution with simulated
|
|
||||||
positional data. As filter
|
|
||||||
$g(i)=\left(\nicefrac{1}{3},\nicefrac{1}{4},\nicefrac{1}{5},\nicefrac{1}{6},\nicefrac{1}{20}\right)_{(i-1)}$
|
|
||||||
is chosen and applied to the $x$ and $y$ coordinate
|
|
||||||
data seperately. The convolution of both signals with $g$
|
|
||||||
improves the MSE of the positions from 0.196 to 0.170 and
|
|
||||||
visibly smoothes the data.
|
|
||||||
}
|
|
||||||
\label{fig:sin_conv}
|
|
||||||
\end{figure}
|
|
||||||
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: "../main"
|
|
||||||
%%% End:
|
|
@ -1,5 +0,0 @@
|
|||||||
|
|
||||||
%%% Local Variables:
|
|
||||||
%%% mode: latex
|
|
||||||
%%% TeX-master: "../main"
|
|
||||||
%%% End:
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,6 @@
|
|||||||
|
\boolfalse {citerequest}\boolfalse {citetracker}\boolfalse {pagetracker}\boolfalse {backtracker}\relax
|
||||||
|
\babel@toc {english}{}
|
||||||
|
\defcounter {refsection}{0}\relax
|
||||||
|
\contentsline {table}{\numberline {4.1}{\ignorespaces Values of Test Accuracies for Models Trained on Subsets of MNIST Handwritten Digits}}{41}{table.4.1}%
|
||||||
|
\defcounter {refsection}{0}\relax
|
||||||
|
\contentsline {table}{\numberline {4.2}{\ignorespaces Values of Test Accuracies for Models Trained on Subsets of Fashion MNIST}}{41}{table.4.2}%
|
@ -0,0 +1,25 @@
|
|||||||
|
\BOOKMARK [1][-]{section.1}{Introduction}{}% 1
|
||||||
|
\BOOKMARK [1][-]{section.2}{Introduction to Neural Networks}{}% 2
|
||||||
|
\BOOKMARK [2][-]{subsection.2.1}{Nonlinearity of Neural Networks}{section.2}% 3
|
||||||
|
\BOOKMARK [2][-]{subsection.2.2}{Training Neural Networks}{section.2}% 4
|
||||||
|
\BOOKMARK [3][-]{subsubsection.2.2.1}{Nonlinearity in the Last Layer}{subsection.2.2}% 5
|
||||||
|
\BOOKMARK [3][-]{subsubsection.2.2.2}{Error Measurement}{subsection.2.2}% 6
|
||||||
|
\BOOKMARK [3][-]{subsubsection.2.2.3}{Gradient Descent Algorithm}{subsection.2.2}% 7
|
||||||
|
\BOOKMARK [1][-]{section.3}{Shallow Neural Networks}{}% 8
|
||||||
|
\BOOKMARK [2][-]{subsection.3.1}{Convergence Behavior of One-Dimensional Randomized Shallow Neural Networks}{section.3}% 9
|
||||||
|
\BOOKMARK [2][-]{subsection.3.2}{Simulations}{section.3}% 10
|
||||||
|
\BOOKMARK [1][-]{section.4}{Application of Neural Networks to Higher Complexity Problems}{}% 11
|
||||||
|
\BOOKMARK [2][-]{subsection.4.1}{Convolution}{section.4}% 12
|
||||||
|
\BOOKMARK [2][-]{subsection.4.2}{Convolutional Neural Networks}{section.4}% 13
|
||||||
|
\BOOKMARK [2][-]{subsection.4.3}{Stochastic Training Algorithms}{section.4}% 14
|
||||||
|
\BOOKMARK [2][-]{subsection.4.4}{Modified Stochastic Gradient Descent}{section.4}% 15
|
||||||
|
\BOOKMARK [2][-]{subsection.4.5}{Combating Overfitting}{section.4}% 16
|
||||||
|
\BOOKMARK [3][-]{subsubsection.4.5.1}{Dropout}{subsection.4.5}% 17
|
||||||
|
\BOOKMARK [3][-]{subsubsection.4.5.2}{Manipulation of Input Data}{subsection.4.5}% 18
|
||||||
|
\BOOKMARK [3][-]{subsubsection.4.5.3}{Comparisons}{subsection.4.5}% 19
|
||||||
|
\BOOKMARK [3][-]{subsubsection.4.5.4}{Effectiveness for Small Training Sets}{subsection.4.5}% 20
|
||||||
|
\BOOKMARK [1][-]{section.5}{Summary and Outlook}{}% 21
|
||||||
|
\BOOKMARK [1][-]{section*.27}{Appendices}{}% 22
|
||||||
|
\BOOKMARK [1][-]{Appendix.1.A}{Notes on Proofs of Lemmata in Section 3.1}{}% 23
|
||||||
|
\BOOKMARK [1][-]{Appendix.1.B}{Implementations}{}% 24
|
||||||
|
\BOOKMARK [1][-]{Appendix.1.C}{Additional Comparisons}{}% 25
|
Loading…
Reference in New Issue