diff --git a/.gitignore b/.gitignore index 1e8f2a4..9fc4169 100644 --- a/.gitignore +++ b/.gitignore @@ -4,12 +4,26 @@ *.toc *.gz *.xml +*.el +*.bbl +*.tdo +*.blg TeX/auto/* main-blx.bib # emacs autosaves *.tex~ *#*.tex* +*~ # no pdfs *.pdf + +# no images +*image* +*.png +*.jpg +*.xcf + +# no slurm logs +*slurm*.out diff --git a/Cluster/mnist.py b/Cluster/mnist.py new file mode 100644 index 0000000..d255c1b --- /dev/null +++ b/Cluster/mnist.py @@ -0,0 +1,26 @@ +import tensorflow as tf +from tensorflow.keras.callbacks import CSVLogger +mnist = tf.keras.datasets.mnist + +(x_train, y_train), (x_test, y_test) = mnist.load_data() +x_train = x_train.reshape(x_train.shape[0], 28, 28, 1) +x_test = x_test.reshape(x_test.shape[0], 28, 28, 1) +x_train, x_test = x_train / 255.0, x_test / 255.0 + +y_train = tf.keras.utils.to_categorical(y_train) +y_test = tf.keras.utils.to_categorical(y_test) + +model = tf.keras.models.Sequential() +model.add(tf.keras.layers.Conv2D(24,kernel_size=5,padding='same',activation='relu', + input_shape=(28,28,1))) +model.add(tf.keras.layers.MaxPool2D()) +model.add(tf.keras.layers.Conv2D(64,kernel_size=5,padding='same',activation='relu')) +model.add(tf.keras.layers.MaxPool2D(padding='same')) +model.add(tf.keras.layers.Flatten()) +model.add(tf.keras.layers.Dense(256, activation='relu')) +model.add(tf.keras.layers.Dense(10, activation='softmax')) +model.compile(optimizer=tf.keras.optimizers.SGD(), loss="categorical_crossentropy", metrics=["accuracy"]) + + +csv_logger = CSVLogger('SGD_01_b32.log') +history = model.fit(x_train, y_train, validation_data=(x_test, y_test), batch_size = 32, epochs=20, callbacks=[csv_logger]) diff --git a/Cluster/test.py b/Cluster/test.py new file mode 100644 index 0000000..fec34f3 --- /dev/null +++ b/Cluster/test.py @@ -0,0 +1,22 @@ +import tensorflow as tf +mnist = tf.keras.datasets.mnist + +(x_train, y_train), (x_test, y_test) = mnist.load_data() +x_train, x_test = x_train / 255.0, x_test / 255.0 + +model = tf.keras.models.Sequential([ + tf.keras.layers.Flatten(input_shape=(28, 28)), + tf.keras.layers.Dense(128, activation='relu'), + tf.keras.layers.Dropout(0.2), + tf.keras.layers.Dense(10) +]) + +loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) + +model.compile(optimizer='adam', + loss=loss_fn, + metrics=['accuracy']) + +model.fit(x_train, y_train, epochs=10) + + diff --git a/Cluster/test/mnist.py b/Cluster/test/mnist.py new file mode 100644 index 0000000..fadfb62 --- /dev/null +++ b/Cluster/test/mnist.py @@ -0,0 +1,26 @@ +import tensorflow as tf +from tensorflow.keras.callbacks import CSVLogger +mnist = tf.keras.datasets.mnist + +(x_train, y_train), (x_test, y_test) = mnist.load_data() +x_train = x_train.reshape(x_train.shape[0], 28, 28, 1) +x_test = x_test.reshape(x_test.shape[0], 28, 28, 1) +x_train, x_test = x_train / 255.0, x_test / 255.0 + +y_train = tf.keras.utils.to_categorical(y_train) +y_test = tf.keras.utils.to_categorical(y_test) + +model = tf.keras.models.Sequential() +model.add(tf.keras.layers.Conv2D(24,kernel_size=5,padding='same',activation='relu', + input_shape=(28,28,1))) +model.add(tf.keras.layers.MaxPool2D()) +model.add(tf.keras.layers.Conv2D(64,kernel_size=5,padding='same',activation='relu')) +model.add(tf.keras.layers.MaxPool2D(padding='same')) +model.add(tf.keras.layers.Flatten()) +model.add(tf.keras.layers.Dense(256, activation='relu')) +model.add(tf.keras.layers.Dense(10, activation='softmax')) +model.compile(optimizer=tf.keras.optimizers.SGD(learning_rate=0.1), loss="categorical_crossentropy", metrics=["accuracy"]) + + +csv_logger = CSVLogger('GD_1.log') +history = model.fit(x_train, y_train, validation_data=(x_test, y_test), batch_size = x_train.shape[0], epochs=20, callbacks=[csv_logger]) diff --git a/Cluster/test/test.py b/Cluster/test/test.py new file mode 100644 index 0000000..fec34f3 --- /dev/null +++ b/Cluster/test/test.py @@ -0,0 +1,22 @@ +import tensorflow as tf +mnist = tf.keras.datasets.mnist + +(x_train, y_train), (x_test, y_test) = mnist.load_data() +x_train, x_test = x_train / 255.0, x_test / 255.0 + +model = tf.keras.models.Sequential([ + tf.keras.layers.Flatten(input_shape=(28, 28)), + tf.keras.layers.Dense(128, activation='relu'), + tf.keras.layers.Dropout(0.2), + tf.keras.layers.Dense(10) +]) + +loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) + +model.compile(optimizer='adam', + loss=loss_fn, + metrics=['accuracy']) + +model.fit(x_train, y_train, epochs=10) + + diff --git a/Cluster/test/tf_test.slurm b/Cluster/test/tf_test.slurm new file mode 100644 index 0000000..a581be8 --- /dev/null +++ b/Cluster/test/tf_test.slurm @@ -0,0 +1,10 @@ +#!/bin/bash -l + +#SBATCH --job-name="Keras MNIST" +#SBATCH --ntasks=1 +#SBATCH --ntasks-per-core=1 +#SBATCH --time=0-00:10:00 +#SBATCH --nodelist=node18 + +srun python3 mnist.py + diff --git a/Cluster/tf_test.slurm b/Cluster/tf_test.slurm new file mode 100644 index 0000000..a581be8 --- /dev/null +++ b/Cluster/tf_test.slurm @@ -0,0 +1,10 @@ +#!/bin/bash -l + +#SBATCH --job-name="Keras MNIST" +#SBATCH --ntasks=1 +#SBATCH --ntasks-per-core=1 +#SBATCH --time=0-00:10:00 +#SBATCH --nodelist=node18 + +srun python3 mnist.py + diff --git a/R/convolution.R b/R/convolution.R new file mode 100644 index 0000000..911bbf3 --- /dev/null +++ b/R/convolution.R @@ -0,0 +1,59 @@ +x=seq(0, 2*pi,0.1) +y=sin(x) +plot(x,y) +x_i = x +y_i = y +x = x+rnorm(63,0,0.15) +y = y+rnorm(63,0,0.15) +plot(x, y) +x_d = x +y_d = y + +for(i in 5:63){ + x[i] = (sum(x_d[(i-4):i] * c(1/20,1/6,1/5,1/4,1/3))) +} + +for(i in 5:63){ + y[i] = (sum(y_d[(i-4):i] * c(1/20,1/6,1/5,1/4,1/3))) +} +#x[1:4] = NA +#y[1:4] = NA + +plot(x[-(1:4)],y[-(1:4)]) + + +image = image_read(path = "~/Masterarbeit/TeX/Plots/Data/klammern60_80.jpg") +kernel <- matrix(0, ncol = 3, nrow = 3) +kernel[c(1,3),1] = -1 +kernel[c(1,3),3] = 1 +kernel[2,1] = -2 +kernel[2,3] = 2 +kernel + +kernel <- matrix(data = c(1,4,7,4,1,4,16,26,16,4,7,26,41,26,7,4,16,26,16,4,1,4,7,4,1), + ncol = 5, nrow=5) +kernel = kernel/273 + +n=11 +s=4 +kernel = matrix(0,nrow = n, ncol = n) +for(i in 1:n){ + for(j in 1:n){ + kernel[i,j] = 1/(2*pi*s) * exp(-(i+j)/(2*s)) + } +} + +image_con <- image_convolve(image, (kernel)) +image_con +image_write(image_con, "~/Masterarbeit/TeX/Plots/Data/image_conv11.png", format="png") +img <- readPNG("~/Masterarbeit/TeX/Plots/Data/image_conv11.png") + + +out <- matrix(0, ncol = 15, nrow=20) +for(j in 1:15){ + for(i in 1:20){ + out[i,j] = max(img[((i-1)*4 +1):((i-1)*4+4), ((j-1)*4 +1):((j-1)*4+4)]) + } +} + +writePNG(out, target = "~/Masterarbeit/TeX/Plots/Data/image_conv12.png") diff --git a/TF/test.py b/TF/test.py new file mode 100644 index 0000000..76342e7 --- /dev/null +++ b/TF/test.py @@ -0,0 +1,22 @@ +import tensorflow as tf +mnist = tf.keras.datasets.mnist + +(x_train, y_train), (x_test, y_test) = mnist.load_data() +x_train, x_test = x_train / 255.0, x_test / 255.0 + +model = tf.keras.models.Sequential([ + tf.keras.layers.Flatten(input_shape=(28, 28)), + tf.keras.layers.Dense(128, activation='relu'), + tf.keras.layers.Dropout(0.2), + tf.keras.layers.Dense(10) +]) + +loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) + +model.compile(optimizer='adam', + loss=loss_fn, + metrics=['accuracy']) + +model.fit(x_train, y_train, epochs=5) + + diff --git a/TeX/Plots/Data/data_sin_d_t.csv b/TeX/Plots/Data/data_sin_d_t.csv new file mode 100755 index 0000000..3320476 --- /dev/null +++ b/TeX/Plots/Data/data_sin_d_t.csv @@ -0,0 +1,17 @@ +x,y +-3.141592653589793,0.0802212608585366 +-2.722713633111154,-0.3759376368887911 +-2.303834612632515,-1.3264180339054117 +-1.8849555921538759,-0.8971334213504949 +-1.4660765716752369,-0.7724344034354425 +-1.0471975511965979,-0.9501497164520739 +-0.6283185307179586,-0.6224628757084738 +-0.2094395102393194,-0.35622668982623207 +0.2094395102393194,-0.18377660088356823 +0.6283185307179586,0.7836770998126841 +1.0471975511965974,0.5874762732054489 +1.4660765716752362,1.0696991264956026 +1.8849555921538759,1.1297065441952743 +2.3038346126325155,0.7587275382323738 +2.7227136331111543,-0.030547103790458163 +3.1415926535897922,0.044327111895927106 diff --git a/TeX/Plots/Data/matlab_0.csv b/TeX/Plots/Data/matlab_0.csv new file mode 100755 index 0000000..13a2f91 --- /dev/null +++ b/TeX/Plots/Data/matlab_0.csv @@ -0,0 +1,1002 @@ +x,y +-5,0.90686 +-4.99,0.9118 +-4.98,0.91659 +-4.97,0.92122 +-4.96,0.92569 +-4.95,0.93001 +-4.94,0.93417 +-4.93,0.93818 +-4.92,0.94203 +-4.91,0.94573 +-4.9,0.94927 +-4.89,0.95267 +-4.88,0.95591 +-4.87,0.95901 +-4.86,0.96196 +-4.85,0.96475 +-4.84,0.96741 +-4.83,0.96991 +-4.82,0.97227 +-4.81,0.97449 +-4.8,0.97656 +-4.79,0.97849 +-4.78,0.98028 +-4.77,0.98192 +-4.76,0.98343 +-4.75,0.9848 +-4.74,0.98602 +-4.73,0.98712 +-4.72,0.98807 +-4.71,0.98889 +-4.7,0.98957 +-4.69,0.99012 +-4.68,0.99054 +-4.67,0.99082 +-4.66,0.99098 +-4.65,0.991 +-4.64,0.99089 +-4.63,0.99065 +-4.62,0.99029 +-4.61,0.98979 +-4.6,0.98918 +-4.59,0.98843 +-4.58,0.98756 +-4.57,0.98657 +-4.56,0.98546 +-4.55,0.98422 +-4.54,0.98286 +-4.53,0.98138 +-4.52,0.97978 +-4.51,0.97806 +-4.5,0.97623 +-4.49,0.97428 +-4.48,0.97221 +-4.47,0.97003 +-4.46,0.96773 +-4.45,0.96532 +-4.44,0.96279 +-4.43,0.96016 +-4.42,0.95741 +-4.41,0.95456 +-4.4,0.95159 +-4.39,0.94852 +-4.38,0.94534 +-4.37,0.94205 +-4.36,0.93866 +-4.35,0.93516 +-4.34,0.93156 +-4.33,0.92785 +-4.32,0.92405 +-4.31,0.92014 +-4.3,0.91613 +-4.29,0.91202 +-4.28,0.90781 +-4.27,0.9035 +-4.26,0.8991 +-4.25,0.8946 +-4.24,0.89001 +-4.23,0.88532 +-4.22,0.88054 +-4.21,0.87566 +-4.2,0.87069 +-4.19,0.86563 +-4.18,0.86049 +-4.17,0.85525 +-4.16,0.84992 +-4.15,0.84451 +-4.14,0.83901 +-4.13,0.83342 +-4.12,0.82775 +-4.11,0.82199 +-4.1,0.81615 +-4.09,0.81023 +-4.08,0.80423 +-4.07,0.79814 +-4.06,0.79198 +-4.05,0.78573 +-4.04,0.77941 +-4.03,0.77301 +-4.02,0.76654 +-4.01,0.75999 +-4,0.75336 +-3.99,0.74666 +-3.98,0.73989 +-3.97,0.73305 +-3.96,0.72613 +-3.95,0.71914 +-3.94,0.71209 +-3.93,0.70496 +-3.92,0.69777 +-3.91,0.69051 +-3.9,0.68319 +-3.89,0.67579 +-3.88,0.66834 +-3.87,0.66082 +-3.86,0.65324 +-3.85,0.6456 +-3.84,0.63789 +-3.83,0.63013 +-3.82,0.6223 +-3.81,0.61442 +-3.8,0.60648 +-3.79,0.59849 +-3.78,0.59044 +-3.77,0.58233 +-3.76,0.57417 +-3.75,0.56595 +-3.74,0.55769 +-3.73,0.54937 +-3.72,0.541 +-3.71,0.53258 +-3.7,0.52411 +-3.69,0.5156 +-3.68,0.50704 +-3.67,0.49843 +-3.66,0.48977 +-3.65,0.48107 +-3.64,0.47233 +-3.63,0.46355 +-3.62,0.45472 +-3.61,0.44585 +-3.6,0.43694 +-3.59,0.42799 +-3.58,0.41901 +-3.57,0.40998 +-3.56,0.40092 +-3.55,0.39182 +-3.54,0.38269 +-3.53,0.37352 +-3.52,0.36432 +-3.51,0.35509 +-3.5,0.34582 +-3.49,0.33653 +-3.48,0.3272 +-3.47,0.31785 +-3.46,0.30846 +-3.45,0.29905 +-3.44,0.28962 +-3.43,0.28015 +-3.42,0.27067 +-3.41,0.26115 +-3.4,0.25162 +-3.39,0.24206 +-3.38,0.23248 +-3.37,0.22289 +-3.36,0.21327 +-3.35,0.20363 +-3.34,0.19398 +-3.33,0.1843 +-3.32,0.17462 +-3.31,0.16491 +-3.3,0.15519 +-3.29,0.14546 +-3.28,0.13572 +-3.27,0.12596 +-3.26,0.11619 +-3.25,0.10642 +-3.24,0.096628 +-3.23,0.086833 +-3.22,0.07703 +-3.21,0.06722 +-3.2,0.057404 +-3.19,0.047583 +-3.18,0.037758 +-3.17,0.02793 +-3.16,0.018099 +-3.15,0.0082669 +-3.14,-0.0015661 +-3.13,-0.011399 +-3.12,-0.021231 +-3.11,-0.031061 +-3.1,-0.040888 +-3.09,-0.050712 +-3.08,-0.060531 +-3.07,-0.070345 +-3.06,-0.080153 +-3.05,-0.089954 +-3.04,-0.099747 +-3.03,-0.10953 +-3.02,-0.11931 +-3.01,-0.12907 +-3,-0.13882 +-2.99,-0.14856 +-2.98,-0.15829 +-2.97,-0.168 +-2.96,-0.1777 +-2.95,-0.18739 +-2.94,-0.19705 +-2.93,-0.2067 +-2.92,-0.21633 +-2.91,-0.22595 +-2.9,-0.23554 +-2.89,-0.24511 +-2.88,-0.25466 +-2.87,-0.26419 +-2.86,-0.27369 +-2.85,-0.28317 +-2.84,-0.29262 +-2.83,-0.30205 +-2.82,-0.31146 +-2.81,-0.32083 +-2.8,-0.33017 +-2.79,-0.33949 +-2.78,-0.34878 +-2.77,-0.35803 +-2.76,-0.36725 +-2.75,-0.37644 +-2.74,-0.3856 +-2.73,-0.39472 +-2.72,-0.40381 +-2.71,-0.41286 +-2.7,-0.42187 +-2.69,-0.43085 +-2.68,-0.43978 +-2.67,-0.44868 +-2.66,-0.45754 +-2.65,-0.46635 +-2.64,-0.47512 +-2.63,-0.48385 +-2.62,-0.49254 +-2.61,-0.50118 +-2.6,-0.50977 +-2.59,-0.51832 +-2.58,-0.52682 +-2.57,-0.53527 +-2.56,-0.54367 +-2.55,-0.55202 +-2.54,-0.56033 +-2.53,-0.56858 +-2.52,-0.57677 +-2.51,-0.58492 +-2.5,-0.59301 +-2.49,-0.60104 +-2.48,-0.60902 +-2.47,-0.61694 +-2.46,-0.6248 +-2.45,-0.63261 +-2.44,-0.64035 +-2.43,-0.64804 +-2.42,-0.65566 +-2.41,-0.66322 +-2.4,-0.67072 +-2.39,-0.67816 +-2.38,-0.68553 +-2.37,-0.69283 +-2.36,-0.70007 +-2.35,-0.70724 +-2.34,-0.71434 +-2.33,-0.72138 +-2.32,-0.72834 +-2.31,-0.73523 +-2.3,-0.74206 +-2.29,-0.7488 +-2.28,-0.75548 +-2.27,-0.76208 +-2.26,-0.76861 +-2.25,-0.77506 +-2.24,-0.78143 +-2.23,-0.78773 +-2.22,-0.79395 +-2.21,-0.80009 +-2.2,-0.80615 +-2.19,-0.81212 +-2.18,-0.81802 +-2.17,-0.82383 +-2.16,-0.82956 +-2.15,-0.83521 +-2.14,-0.84077 +-2.13,-0.84624 +-2.12,-0.85163 +-2.11,-0.85693 +-2.1,-0.86214 +-2.09,-0.86726 +-2.08,-0.87229 +-2.07,-0.87722 +-2.06,-0.88207 +-2.05,-0.88682 +-2.04,-0.89148 +-2.03,-0.89605 +-2.02,-0.90051 +-2.01,-0.90489 +-2,-0.90916 +-1.99,-0.91334 +-1.98,-0.91742 +-1.97,-0.92139 +-1.96,-0.92527 +-1.95,-0.92904 +-1.94,-0.93272 +-1.93,-0.93629 +-1.92,-0.93975 +-1.91,-0.94311 +-1.9,-0.94636 +-1.89,-0.94951 +-1.88,-0.95255 +-1.87,-0.95548 +-1.86,-0.9583 +-1.85,-0.96102 +-1.84,-0.96363 +-1.83,-0.96613 +-1.82,-0.96853 +-1.81,-0.97082 +-1.8,-0.973 +-1.79,-0.97508 +-1.78,-0.97706 +-1.77,-0.97892 +-1.76,-0.98069 +-1.75,-0.98235 +-1.74,-0.98391 +-1.73,-0.98536 +-1.72,-0.98671 +-1.71,-0.98796 +-1.7,-0.98911 +-1.69,-0.99015 +-1.68,-0.99109 +-1.67,-0.99193 +-1.66,-0.99267 +-1.65,-0.99331 +-1.64,-0.99385 +-1.63,-0.99429 +-1.62,-0.99463 +-1.61,-0.99487 +-1.6,-0.99501 +-1.59,-0.99505 +-1.58,-0.99499 +-1.57,-0.99484 +-1.56,-0.99458 +-1.55,-0.99423 +-1.54,-0.99378 +-1.53,-0.99324 +-1.52,-0.9926 +-1.51,-0.99186 +-1.5,-0.99103 +-1.49,-0.9901 +-1.48,-0.98908 +-1.47,-0.98796 +-1.46,-0.98675 +-1.45,-0.98544 +-1.44,-0.98404 +-1.43,-0.98254 +-1.42,-0.98096 +-1.41,-0.97928 +-1.4,-0.9775 +-1.39,-0.97564 +-1.38,-0.97368 +-1.37,-0.97163 +-1.36,-0.96949 +-1.35,-0.96726 +-1.34,-0.96494 +-1.33,-0.96253 +-1.32,-0.96003 +-1.31,-0.95744 +-1.3,-0.95476 +-1.29,-0.95199 +-1.28,-0.94913 +-1.27,-0.94618 +-1.26,-0.94315 +-1.25,-0.94003 +-1.24,-0.93682 +-1.23,-0.93352 +-1.22,-0.93014 +-1.21,-0.92667 +-1.2,-0.92312 +-1.19,-0.91948 +-1.18,-0.91575 +-1.17,-0.91194 +-1.16,-0.90805 +-1.15,-0.90407 +-1.14,-0.90001 +-1.13,-0.89586 +-1.12,-0.89163 +-1.11,-0.88732 +-1.1,-0.88292 +-1.09,-0.87844 +-1.08,-0.87388 +-1.07,-0.86924 +-1.06,-0.86452 +-1.05,-0.85971 +-1.04,-0.85483 +-1.03,-0.84986 +-1.02,-0.84482 +-1.01,-0.83969 +-1,-0.83449 +-0.99,-0.8292 +-0.98,-0.82384 +-0.97,-0.8184 +-0.96,-0.81288 +-0.95,-0.80729 +-0.94,-0.80161 +-0.93,-0.79586 +-0.92,-0.79003 +-0.91,-0.78413 +-0.9,-0.77815 +-0.89,-0.77209 +-0.88,-0.76596 +-0.87,-0.75975 +-0.86,-0.75347 +-0.85,-0.74712 +-0.84,-0.74069 +-0.83,-0.73418 +-0.82,-0.72761 +-0.81,-0.72096 +-0.8,-0.71423 +-0.79,-0.70744 +-0.78,-0.70057 +-0.77,-0.69363 +-0.76,-0.68662 +-0.75,-0.67954 +-0.74,-0.67239 +-0.73,-0.66516 +-0.72,-0.65787 +-0.71,-0.6505 +-0.7,-0.64307 +-0.69,-0.63557 +-0.68,-0.628 +-0.67,-0.62036 +-0.66,-0.61265 +-0.65,-0.60488 +-0.64,-0.59703 +-0.63,-0.58912 +-0.62,-0.58114 +-0.61,-0.5731 +-0.6,-0.56499 +-0.59,-0.55682 +-0.58,-0.54859 +-0.57,-0.54029 +-0.56,-0.53193 +-0.55,-0.52352 +-0.54,-0.51504 +-0.53,-0.50651 +-0.52,-0.49792 +-0.51,-0.48928 +-0.5,-0.48058 +-0.49,-0.47183 +-0.48,-0.46302 +-0.47,-0.45417 +-0.46,-0.44526 +-0.45,-0.43631 +-0.44,-0.42731 +-0.43,-0.41826 +-0.42,-0.40917 +-0.41,-0.40003 +-0.4,-0.39084 +-0.39,-0.38162 +-0.38,-0.37235 +-0.37,-0.36305 +-0.36,-0.3537 +-0.35,-0.34432 +-0.34,-0.33489 +-0.33,-0.32544 +-0.32,-0.31594 +-0.31,-0.30642 +-0.3,-0.29686 +-0.29,-0.28726 +-0.28,-0.27764 +-0.27,-0.26799 +-0.26,-0.25831 +-0.25,-0.2486 +-0.24,-0.23886 +-0.23,-0.2291 +-0.22,-0.21932 +-0.21,-0.20951 +-0.2,-0.19968 +-0.19,-0.18982 +-0.18,-0.17995 +-0.17,-0.17006 +-0.16,-0.16015 +-0.15,-0.15022 +-0.14,-0.14028 +-0.13,-0.13032 +-0.12,-0.12035 +-0.11,-0.11037 +-0.1,-0.10037 +-0.09,-0.090364 +-0.08,-0.080347 +-0.07,-0.070323 +-0.06,-0.06029 +-0.05,-0.050252 +-0.04,-0.040208 +-0.03,-0.03016 +-0.02,-0.020108 +-0.01,-0.010055 +0,0 +0.01,0.010055 +0.02,0.020108 +0.03,0.03016 +0.04,0.040208 +0.05,0.050252 +0.06,0.06029 +0.07,0.070323 +0.08,0.080347 +0.09,0.090364 +0.1,0.10037 +0.11,0.11037 +0.12,0.12035 +0.13,0.13032 +0.14,0.14028 +0.15,0.15022 +0.16,0.16015 +0.17,0.17006 +0.18,0.17995 +0.19,0.18982 +0.2,0.19968 +0.21,0.20951 +0.22,0.21932 +0.23,0.2291 +0.24,0.23886 +0.25,0.2486 +0.26,0.25831 +0.27,0.26799 +0.28,0.27764 +0.29,0.28726 +0.3,0.29686 +0.31,0.30642 +0.32,0.31594 +0.33,0.32544 +0.34,0.33489 +0.35,0.34432 +0.36,0.3537 +0.37,0.36305 +0.38,0.37235 +0.39,0.38162 +0.4,0.39084 +0.41,0.40003 +0.42,0.40917 +0.43,0.41826 +0.44,0.42731 +0.45,0.43631 +0.46,0.44526 +0.47,0.45417 +0.48,0.46302 +0.49,0.47183 +0.5,0.48058 +0.51,0.48928 +0.52,0.49792 +0.53,0.50651 +0.54,0.51504 +0.55,0.52352 +0.56,0.53193 +0.57,0.54029 +0.58,0.54859 +0.59,0.55682 +0.6,0.56499 +0.61,0.5731 +0.62,0.58114 +0.63,0.58912 +0.64,0.59703 +0.65,0.60488 +0.66,0.61265 +0.67,0.62036 +0.68,0.628 +0.69,0.63557 +0.7,0.64307 +0.71,0.6505 +0.72,0.65787 +0.73,0.66516 +0.74,0.67239 +0.75,0.67954 +0.76,0.68662 +0.77,0.69363 +0.78,0.70057 +0.79,0.70744 +0.8,0.71423 +0.81,0.72096 +0.82,0.72761 +0.83,0.73418 +0.84,0.74069 +0.85,0.74712 +0.86,0.75347 +0.87,0.75975 +0.88,0.76596 +0.89,0.77209 +0.9,0.77815 +0.91,0.78413 +0.92,0.79003 +0.93,0.79586 +0.94,0.80161 +0.95,0.80729 +0.96,0.81288 +0.97,0.8184 +0.98,0.82384 +0.99,0.8292 +1,0.83449 +1.01,0.83969 +1.02,0.84482 +1.03,0.84986 +1.04,0.85483 +1.05,0.85971 +1.06,0.86452 +1.07,0.86924 +1.08,0.87388 +1.09,0.87844 +1.1,0.88292 +1.11,0.88732 +1.12,0.89163 +1.13,0.89586 +1.14,0.90001 +1.15,0.90407 +1.16,0.90805 +1.17,0.91194 +1.18,0.91575 +1.19,0.91948 +1.2,0.92312 +1.21,0.92667 +1.22,0.93014 +1.23,0.93352 +1.24,0.93682 +1.25,0.94003 +1.26,0.94315 +1.27,0.94618 +1.28,0.94913 +1.29,0.95199 +1.3,0.95476 +1.31,0.95744 +1.32,0.96003 +1.33,0.96253 +1.34,0.96494 +1.35,0.96726 +1.36,0.96949 +1.37,0.97163 +1.38,0.97368 +1.39,0.97564 +1.4,0.9775 +1.41,0.97928 +1.42,0.98096 +1.43,0.98254 +1.44,0.98404 +1.45,0.98544 +1.46,0.98675 +1.47,0.98796 +1.48,0.98908 +1.49,0.9901 +1.5,0.99103 +1.51,0.99186 +1.52,0.9926 +1.53,0.99324 +1.54,0.99378 +1.55,0.99423 +1.56,0.99458 +1.57,0.99484 +1.58,0.99499 +1.59,0.99505 +1.6,0.99501 +1.61,0.99487 +1.62,0.99463 +1.63,0.99429 +1.64,0.99385 +1.65,0.99331 +1.66,0.99267 +1.67,0.99193 +1.68,0.99109 +1.69,0.99015 +1.7,0.98911 +1.71,0.98796 +1.72,0.98671 +1.73,0.98536 +1.74,0.98391 +1.75,0.98235 +1.76,0.98069 +1.77,0.97892 +1.78,0.97706 +1.79,0.97508 +1.8,0.973 +1.81,0.97082 +1.82,0.96853 +1.83,0.96613 +1.84,0.96363 +1.85,0.96102 +1.86,0.9583 +1.87,0.95548 +1.88,0.95255 +1.89,0.94951 +1.9,0.94636 +1.91,0.94311 +1.92,0.93975 +1.93,0.93629 +1.94,0.93272 +1.95,0.92904 +1.96,0.92527 +1.97,0.92139 +1.98,0.91742 +1.99,0.91334 +2,0.90916 +2.01,0.90489 +2.02,0.90051 +2.03,0.89605 +2.04,0.89148 +2.05,0.88682 +2.06,0.88207 +2.07,0.87722 +2.08,0.87229 +2.09,0.86726 +2.1,0.86214 +2.11,0.85693 +2.12,0.85163 +2.13,0.84624 +2.14,0.84077 +2.15,0.83521 +2.16,0.82956 +2.17,0.82383 +2.18,0.81802 +2.19,0.81212 +2.2,0.80615 +2.21,0.80009 +2.22,0.79395 +2.23,0.78773 +2.24,0.78143 +2.25,0.77506 +2.26,0.76861 +2.27,0.76208 +2.28,0.75548 +2.29,0.7488 +2.3,0.74206 +2.31,0.73523 +2.32,0.72834 +2.33,0.72138 +2.34,0.71434 +2.35,0.70724 +2.36,0.70007 +2.37,0.69283 +2.38,0.68553 +2.39,0.67816 +2.4,0.67072 +2.41,0.66322 +2.42,0.65566 +2.43,0.64804 +2.44,0.64035 +2.45,0.63261 +2.46,0.6248 +2.47,0.61694 +2.48,0.60902 +2.49,0.60104 +2.5,0.59301 +2.51,0.58492 +2.52,0.57677 +2.53,0.56858 +2.54,0.56033 +2.55,0.55202 +2.56,0.54367 +2.57,0.53527 +2.58,0.52682 +2.59,0.51832 +2.6,0.50977 +2.61,0.50118 +2.62,0.49254 +2.63,0.48385 +2.64,0.47512 +2.65,0.46635 +2.66,0.45754 +2.67,0.44868 +2.68,0.43978 +2.69,0.43085 +2.7,0.42187 +2.71,0.41286 +2.72,0.40381 +2.73,0.39472 +2.74,0.3856 +2.75,0.37644 +2.76,0.36725 +2.77,0.35803 +2.78,0.34878 +2.79,0.33949 +2.8,0.33017 +2.81,0.32083 +2.82,0.31146 +2.83,0.30205 +2.84,0.29262 +2.85,0.28317 +2.86,0.27369 +2.87,0.26419 +2.88,0.25466 +2.89,0.24511 +2.9,0.23554 +2.91,0.22595 +2.92,0.21633 +2.93,0.2067 +2.94,0.19705 +2.95,0.18739 +2.96,0.1777 +2.97,0.168 +2.98,0.15829 +2.99,0.14856 +3,0.13882 +3.01,0.12907 +3.02,0.11931 +3.03,0.10953 +3.04,0.099747 +3.05,0.089954 +3.06,0.080153 +3.07,0.070345 +3.08,0.060531 +3.09,0.050712 +3.1,0.040888 +3.11,0.031061 +3.12,0.021231 +3.13,0.011399 +3.14,0.0015661 +3.15,-0.0082669 +3.16,-0.018099 +3.17,-0.02793 +3.18,-0.037758 +3.19,-0.047583 +3.2,-0.057404 +3.21,-0.06722 +3.22,-0.07703 +3.23,-0.086833 +3.24,-0.096628 +3.25,-0.10642 +3.26,-0.11619 +3.27,-0.12596 +3.28,-0.13572 +3.29,-0.14546 +3.3,-0.15519 +3.31,-0.16491 +3.32,-0.17462 +3.33,-0.1843 +3.34,-0.19398 +3.35,-0.20363 +3.36,-0.21327 +3.37,-0.22289 +3.38,-0.23248 +3.39,-0.24206 +3.4,-0.25162 +3.41,-0.26115 +3.42,-0.27067 +3.43,-0.28015 +3.44,-0.28962 +3.45,-0.29905 +3.46,-0.30846 +3.47,-0.31785 +3.48,-0.3272 +3.49,-0.33653 +3.5,-0.34582 +3.51,-0.35509 +3.52,-0.36432 +3.53,-0.37352 +3.54,-0.38269 +3.55,-0.39182 +3.56,-0.40092 +3.57,-0.40998 +3.58,-0.41901 +3.59,-0.42799 +3.6,-0.43694 +3.61,-0.44585 +3.62,-0.45472 +3.63,-0.46355 +3.64,-0.47233 +3.65,-0.48107 +3.66,-0.48977 +3.67,-0.49843 +3.68,-0.50704 +3.69,-0.5156 +3.7,-0.52411 +3.71,-0.53258 +3.72,-0.541 +3.73,-0.54937 +3.74,-0.55769 +3.75,-0.56595 +3.76,-0.57417 +3.77,-0.58233 +3.78,-0.59044 +3.79,-0.59849 +3.8,-0.60648 +3.81,-0.61442 +3.82,-0.6223 +3.83,-0.63013 +3.84,-0.63789 +3.85,-0.6456 +3.86,-0.65324 +3.87,-0.66082 +3.88,-0.66834 +3.89,-0.67579 +3.9,-0.68319 +3.91,-0.69051 +3.92,-0.69777 +3.93,-0.70496 +3.94,-0.71209 +3.95,-0.71914 +3.96,-0.72613 +3.97,-0.73305 +3.98,-0.73989 +3.99,-0.74666 +4,-0.75336 +4.01,-0.75999 +4.02,-0.76654 +4.03,-0.77301 +4.04,-0.77941 +4.05,-0.78573 +4.06,-0.79198 +4.07,-0.79814 +4.08,-0.80423 +4.09,-0.81023 +4.1,-0.81615 +4.11,-0.82199 +4.12,-0.82775 +4.13,-0.83342 +4.14,-0.83901 +4.15,-0.84451 +4.16,-0.84992 +4.17,-0.85525 +4.18,-0.86049 +4.19,-0.86563 +4.2,-0.87069 +4.21,-0.87566 +4.22,-0.88054 +4.23,-0.88532 +4.24,-0.89001 +4.25,-0.8946 +4.26,-0.8991 +4.27,-0.9035 +4.28,-0.90781 +4.29,-0.91202 +4.3,-0.91613 +4.31,-0.92014 +4.32,-0.92405 +4.33,-0.92785 +4.34,-0.93156 +4.35,-0.93516 +4.36,-0.93866 +4.37,-0.94205 +4.38,-0.94534 +4.39,-0.94852 +4.4,-0.95159 +4.41,-0.95456 +4.42,-0.95741 +4.43,-0.96016 +4.44,-0.96279 +4.45,-0.96532 +4.46,-0.96773 +4.47,-0.97003 +4.48,-0.97221 +4.49,-0.97428 +4.5,-0.97623 +4.51,-0.97806 +4.52,-0.97978 +4.53,-0.98138 +4.54,-0.98286 +4.55,-0.98422 +4.56,-0.98546 +4.57,-0.98657 +4.58,-0.98756 +4.59,-0.98843 +4.6,-0.98918 +4.61,-0.98979 +4.62,-0.99029 +4.63,-0.99065 +4.64,-0.99089 +4.65,-0.991 +4.66,-0.99098 +4.67,-0.99082 +4.68,-0.99054 +4.69,-0.99012 +4.7,-0.98957 +4.71,-0.98889 +4.72,-0.98807 +4.73,-0.98712 +4.74,-0.98602 +4.75,-0.9848 +4.76,-0.98343 +4.77,-0.98192 +4.78,-0.98028 +4.79,-0.97849 +4.8,-0.97656 +4.81,-0.97449 +4.82,-0.97227 +4.83,-0.96991 +4.84,-0.96741 +4.85,-0.96475 +4.86,-0.96196 +4.87,-0.95901 +4.88,-0.95591 +4.89,-0.95267 +4.9,-0.94927 +4.91,-0.94573 +4.92,-0.94203 +4.93,-0.93818 +4.94,-0.93417 +4.95,-0.93001 +4.96,-0.92569 +4.97,-0.92122 +4.98,-0.91659 +4.99,-0.9118 +5,-0.90686 diff --git a/TeX/Plots/Data/matlab_1.csv b/TeX/Plots/Data/matlab_1.csv new file mode 100755 index 0000000..790981a --- /dev/null +++ b/TeX/Plots/Data/matlab_1.csv @@ -0,0 +1,1002 @@ +x,y +-5,-0.15899 +-4.99,-0.15625 +-4.98,-0.15358 +-4.97,-0.15096 +-4.96,-0.1484 +-4.95,-0.1459 +-4.94,-0.14345 +-4.93,-0.14106 +-4.92,-0.13872 +-4.91,-0.13644 +-4.9,-0.13421 +-4.89,-0.13204 +-4.88,-0.12992 +-4.87,-0.12786 +-4.86,-0.12585 +-4.85,-0.12389 +-4.84,-0.12199 +-4.83,-0.12014 +-4.82,-0.11834 +-4.81,-0.1166 +-4.8,-0.1149 +-4.79,-0.11326 +-4.78,-0.11167 +-4.77,-0.11013 +-4.76,-0.10864 +-4.75,-0.1072 +-4.74,-0.10581 +-4.73,-0.10447 +-4.72,-0.10318 +-4.71,-0.10194 +-4.7,-0.10075 +-4.69,-0.099604 +-4.68,-0.098508 +-4.67,-0.09746 +-4.66,-0.09646 +-4.65,-0.095506 +-4.64,-0.0946 +-4.63,-0.09374 +-4.62,-0.092926 +-4.61,-0.092158 +-4.6,-0.091436 +-4.59,-0.090759 +-4.58,-0.090128 +-4.57,-0.089541 +-4.56,-0.088998 +-4.55,-0.088499 +-4.54,-0.088045 +-4.53,-0.087633 +-4.52,-0.087265 +-4.51,-0.08694 +-4.5,-0.086657 +-4.49,-0.086416 +-4.48,-0.086217 +-4.47,-0.08606 +-4.46,-0.085944 +-4.45,-0.08587 +-4.44,-0.085835 +-4.43,-0.085841 +-4.42,-0.085888 +-4.41,-0.085974 +-4.4,-0.086099 +-4.39,-0.086263 +-4.38,-0.086467 +-4.37,-0.086708 +-4.36,-0.086988 +-4.35,-0.087306 +-4.34,-0.087661 +-4.33,-0.088054 +-4.32,-0.088483 +-4.31,-0.088949 +-4.3,-0.089452 +-4.29,-0.08999 +-4.28,-0.090564 +-4.27,-0.091174 +-4.26,-0.091818 +-4.25,-0.092498 +-4.24,-0.093211 +-4.23,-0.093959 +-4.22,-0.094741 +-4.21,-0.095556 +-4.2,-0.096405 +-4.19,-0.097286 +-4.18,-0.0982 +-4.17,-0.099146 +-4.16,-0.10012 +-4.15,-0.10113 +-4.14,-0.10218 +-4.13,-0.10325 +-4.12,-0.10435 +-4.11,-0.10548 +-4.1,-0.10665 +-4.09,-0.10784 +-4.08,-0.10906 +-4.07,-0.11031 +-4.06,-0.11159 +-4.05,-0.1129 +-4.04,-0.11424 +-4.03,-0.11561 +-4.02,-0.117 +-4.01,-0.11842 +-4,-0.11987 +-3.99,-0.12134 +-3.98,-0.12284 +-3.97,-0.12437 +-3.96,-0.12592 +-3.95,-0.1275 +-3.94,-0.1291 +-3.93,-0.13073 +-3.92,-0.13238 +-3.91,-0.13406 +-3.9,-0.13576 +-3.89,-0.13748 +-3.88,-0.13923 +-3.87,-0.141 +-3.86,-0.14279 +-3.85,-0.1446 +-3.84,-0.14644 +-3.83,-0.1483 +-3.82,-0.15018 +-3.81,-0.15208 +-3.8,-0.154 +-3.79,-0.15594 +-3.78,-0.15791 +-3.77,-0.15989 +-3.76,-0.16189 +-3.75,-0.16391 +-3.74,-0.16595 +-3.73,-0.16801 +-3.72,-0.17009 +-3.71,-0.17218 +-3.7,-0.17429 +-3.69,-0.17642 +-3.68,-0.17857 +-3.67,-0.18073 +-3.66,-0.18291 +-3.65,-0.18511 +-3.64,-0.18732 +-3.63,-0.18955 +-3.62,-0.19179 +-3.61,-0.19405 +-3.6,-0.19632 +-3.59,-0.1986 +-3.58,-0.20091 +-3.57,-0.20322 +-3.56,-0.20555 +-3.55,-0.20789 +-3.54,-0.21024 +-3.53,-0.2126 +-3.52,-0.21498 +-3.51,-0.21737 +-3.5,-0.21977 +-3.49,-0.22218 +-3.48,-0.2246 +-3.47,-0.22704 +-3.46,-0.22948 +-3.45,-0.23193 +-3.44,-0.2344 +-3.43,-0.23687 +-3.42,-0.23935 +-3.41,-0.24184 +-3.4,-0.24434 +-3.39,-0.24684 +-3.38,-0.24935 +-3.37,-0.25188 +-3.36,-0.2544 +-3.35,-0.25694 +-3.34,-0.25948 +-3.33,-0.26203 +-3.32,-0.26458 +-3.31,-0.26714 +-3.3,-0.2697 +-3.29,-0.27227 +-3.28,-0.27485 +-3.27,-0.27742 +-3.26,-0.28001 +-3.25,-0.28259 +-3.24,-0.28518 +-3.23,-0.28777 +-3.22,-0.29037 +-3.21,-0.29296 +-3.2,-0.29556 +-3.19,-0.29817 +-3.18,-0.30077 +-3.17,-0.30337 +-3.16,-0.30598 +-3.15,-0.30858 +-3.14,-0.31119 +-3.13,-0.31379 +-3.12,-0.3164 +-3.11,-0.319 +-3.1,-0.32161 +-3.09,-0.32421 +-3.08,-0.32681 +-3.07,-0.32941 +-3.06,-0.332 +-3.05,-0.3346 +-3.04,-0.33719 +-3.03,-0.33978 +-3.02,-0.34236 +-3.01,-0.34494 +-3,-0.34752 +-2.99,-0.35009 +-2.98,-0.35266 +-2.97,-0.35522 +-2.96,-0.35778 +-2.95,-0.36033 +-2.94,-0.36288 +-2.93,-0.36541 +-2.92,-0.36795 +-2.91,-0.37047 +-2.9,-0.37299 +-2.89,-0.3755 +-2.88,-0.37801 +-2.87,-0.3805 +-2.86,-0.38299 +-2.85,-0.38547 +-2.84,-0.38793 +-2.83,-0.39039 +-2.82,-0.39284 +-2.81,-0.39528 +-2.8,-0.39771 +-2.79,-0.40013 +-2.78,-0.40254 +-2.77,-0.40494 +-2.76,-0.40732 +-2.75,-0.4097 +-2.74,-0.41206 +-2.73,-0.41441 +-2.72,-0.41674 +-2.71,-0.41906 +-2.7,-0.42137 +-2.69,-0.42367 +-2.68,-0.42595 +-2.67,-0.42822 +-2.66,-0.43047 +-2.65,-0.43271 +-2.64,-0.43493 +-2.63,-0.43714 +-2.62,-0.43933 +-2.61,-0.4415 +-2.6,-0.44366 +-2.59,-0.4458 +-2.58,-0.44793 +-2.57,-0.45003 +-2.56,-0.45212 +-2.55,-0.45419 +-2.54,-0.45625 +-2.53,-0.45828 +-2.52,-0.4603 +-2.51,-0.46229 +-2.5,-0.46427 +-2.49,-0.46622 +-2.48,-0.46816 +-2.47,-0.47007 +-2.46,-0.47197 +-2.45,-0.47384 +-2.44,-0.47569 +-2.43,-0.47752 +-2.42,-0.47933 +-2.41,-0.48112 +-2.4,-0.48288 +-2.39,-0.48462 +-2.38,-0.48633 +-2.37,-0.48803 +-2.36,-0.48969 +-2.35,-0.49134 +-2.34,-0.49296 +-2.33,-0.49455 +-2.32,-0.49612 +-2.31,-0.49767 +-2.3,-0.49918 +-2.29,-0.50068 +-2.28,-0.50214 +-2.27,-0.50358 +-2.26,-0.50499 +-2.25,-0.50638 +-2.24,-0.50773 +-2.23,-0.50906 +-2.22,-0.51036 +-2.21,-0.51163 +-2.2,-0.51287 +-2.19,-0.51409 +-2.18,-0.51527 +-2.17,-0.51643 +-2.16,-0.51755 +-2.15,-0.51864 +-2.14,-0.5197 +-2.13,-0.52074 +-2.12,-0.52174 +-2.11,-0.5227 +-2.1,-0.52364 +-2.09,-0.52454 +-2.08,-0.52541 +-2.07,-0.52625 +-2.06,-0.52706 +-2.05,-0.52783 +-2.04,-0.52856 +-2.03,-0.52927 +-2.02,-0.52994 +-2.01,-0.53057 +-2,-0.53117 +-1.99,-0.53173 +-1.98,-0.53226 +-1.97,-0.53275 +-1.96,-0.5332 +-1.95,-0.53362 +-1.94,-0.534 +-1.93,-0.53434 +-1.92,-0.53465 +-1.91,-0.53492 +-1.9,-0.53515 +-1.89,-0.53534 +-1.88,-0.53549 +-1.87,-0.5356 +-1.86,-0.53568 +-1.85,-0.53571 +-1.84,-0.53571 +-1.83,-0.53567 +-1.82,-0.53559 +-1.81,-0.53547 +-1.8,-0.53531 +-1.79,-0.53512 +-1.78,-0.53488 +-1.77,-0.53461 +-1.76,-0.5343 +-1.75,-0.53396 +-1.74,-0.53357 +-1.73,-0.53315 +-1.72,-0.53269 +-1.71,-0.53219 +-1.7,-0.53166 +-1.69,-0.53109 +-1.68,-0.53048 +-1.67,-0.52983 +-1.66,-0.52915 +-1.65,-0.52843 +-1.64,-0.52768 +-1.63,-0.52689 +-1.62,-0.52606 +-1.61,-0.5252 +-1.6,-0.5243 +-1.59,-0.52336 +-1.58,-0.52239 +-1.57,-0.52138 +-1.56,-0.52033 +-1.55,-0.51925 +-1.54,-0.51814 +-1.53,-0.51699 +-1.52,-0.5158 +-1.51,-0.51458 +-1.5,-0.51333 +-1.49,-0.51203 +-1.48,-0.51071 +-1.47,-0.50935 +-1.46,-0.50795 +-1.45,-0.50652 +-1.44,-0.50506 +-1.43,-0.50356 +-1.42,-0.50202 +-1.41,-0.50046 +-1.4,-0.49885 +-1.39,-0.49722 +-1.38,-0.49555 +-1.37,-0.49385 +-1.36,-0.49211 +-1.35,-0.49034 +-1.34,-0.48853 +-1.33,-0.4867 +-1.32,-0.48483 +-1.31,-0.48292 +-1.3,-0.48099 +-1.29,-0.47902 +-1.28,-0.47701 +-1.27,-0.47498 +-1.26,-0.47291 +-1.25,-0.47081 +-1.24,-0.46868 +-1.23,-0.46651 +-1.22,-0.46432 +-1.21,-0.46209 +-1.2,-0.45983 +-1.19,-0.45753 +-1.18,-0.45521 +-1.17,-0.45285 +-1.16,-0.45046 +-1.15,-0.44804 +-1.14,-0.44559 +-1.13,-0.44311 +-1.12,-0.4406 +-1.11,-0.43805 +-1.1,-0.43548 +-1.09,-0.43287 +-1.08,-0.43024 +-1.07,-0.42757 +-1.06,-0.42487 +-1.05,-0.42214 +-1.04,-0.41938 +-1.03,-0.4166 +-1.02,-0.41378 +-1.01,-0.41093 +-1,-0.40805 +-0.99,-0.40514 +-0.98,-0.4022 +-0.97,-0.39924 +-0.96,-0.39624 +-0.95,-0.39321 +-0.94,-0.39016 +-0.93,-0.38707 +-0.92,-0.38396 +-0.91,-0.38081 +-0.9,-0.37764 +-0.89,-0.37444 +-0.88,-0.37121 +-0.87,-0.36796 +-0.86,-0.36467 +-0.85,-0.36136 +-0.84,-0.35801 +-0.83,-0.35464 +-0.82,-0.35124 +-0.81,-0.34782 +-0.8,-0.34436 +-0.79,-0.34088 +-0.78,-0.33737 +-0.77,-0.33384 +-0.76,-0.33027 +-0.75,-0.32668 +-0.74,-0.32306 +-0.73,-0.31942 +-0.72,-0.31574 +-0.71,-0.31204 +-0.7,-0.30832 +-0.69,-0.30456 +-0.68,-0.30079 +-0.67,-0.29698 +-0.66,-0.29315 +-0.65,-0.28929 +-0.64,-0.28541 +-0.63,-0.28149 +-0.62,-0.27756 +-0.61,-0.2736 +-0.6,-0.26961 +-0.59,-0.2656 +-0.58,-0.26156 +-0.57,-0.2575 +-0.56,-0.25342 +-0.55,-0.24931 +-0.54,-0.24518 +-0.53,-0.24103 +-0.52,-0.23686 +-0.51,-0.23267 +-0.5,-0.22845 +-0.49,-0.22421 +-0.48,-0.21996 +-0.47,-0.21568 +-0.46,-0.21139 +-0.45,-0.20707 +-0.44,-0.20274 +-0.43,-0.19839 +-0.42,-0.19402 +-0.41,-0.18963 +-0.4,-0.18523 +-0.39,-0.18081 +-0.38,-0.17638 +-0.37,-0.17193 +-0.36,-0.16746 +-0.35,-0.16298 +-0.34,-0.15848 +-0.33,-0.15397 +-0.32,-0.14945 +-0.31,-0.14491 +-0.3,-0.14036 +-0.29,-0.1358 +-0.28,-0.13123 +-0.27,-0.12664 +-0.26,-0.12205 +-0.25,-0.11744 +-0.24,-0.11283 +-0.23,-0.1082 +-0.22,-0.10356 +-0.21,-0.098917 +-0.2,-0.094263 +-0.19,-0.0896 +-0.18,-0.08493 +-0.17,-0.080253 +-0.16,-0.075568 +-0.15,-0.070877 +-0.14,-0.06618 +-0.13,-0.061477 +-0.12,-0.056768 +-0.11,-0.052055 +-0.1,-0.047337 +-0.09,-0.042615 +-0.08,-0.03789 +-0.07,-0.033161 +-0.06,-0.028429 +-0.05,-0.023694 +-0.04,-0.018958 +-0.03,-0.01422 +-0.02,-0.0094806 +-0.01,-0.0047405 +0,0 +0.01,0.0047405 +0.02,0.0094806 +0.03,0.01422 +0.04,0.018958 +0.05,0.023694 +0.06,0.028429 +0.07,0.033161 +0.08,0.03789 +0.09,0.042615 +0.1,0.047337 +0.11,0.052055 +0.12,0.056768 +0.13,0.061477 +0.14,0.06618 +0.15,0.070877 +0.16,0.075568 +0.17,0.080253 +0.18,0.08493 +0.19,0.0896 +0.2,0.094263 +0.21,0.098917 +0.22,0.10356 +0.23,0.1082 +0.24,0.11283 +0.25,0.11744 +0.26,0.12205 +0.27,0.12664 +0.28,0.13123 +0.29,0.1358 +0.3,0.14036 +0.31,0.14491 +0.32,0.14945 +0.33,0.15397 +0.34,0.15848 +0.35,0.16298 +0.36,0.16746 +0.37,0.17193 +0.38,0.17638 +0.39,0.18081 +0.4,0.18523 +0.41,0.18963 +0.42,0.19402 +0.43,0.19839 +0.44,0.20274 +0.45,0.20707 +0.46,0.21139 +0.47,0.21568 +0.48,0.21996 +0.49,0.22421 +0.5,0.22845 +0.51,0.23267 +0.52,0.23686 +0.53,0.24103 +0.54,0.24518 +0.55,0.24931 +0.56,0.25342 +0.57,0.2575 +0.58,0.26156 +0.59,0.2656 +0.6,0.26961 +0.61,0.2736 +0.62,0.27756 +0.63,0.28149 +0.64,0.28541 +0.65,0.28929 +0.66,0.29315 +0.67,0.29698 +0.68,0.30079 +0.69,0.30456 +0.7,0.30832 +0.71,0.31204 +0.72,0.31574 +0.73,0.31942 +0.74,0.32306 +0.75,0.32668 +0.76,0.33027 +0.77,0.33384 +0.78,0.33737 +0.79,0.34088 +0.8,0.34436 +0.81,0.34782 +0.82,0.35124 +0.83,0.35464 +0.84,0.35801 +0.85,0.36136 +0.86,0.36467 +0.87,0.36796 +0.88,0.37121 +0.89,0.37444 +0.9,0.37764 +0.91,0.38081 +0.92,0.38396 +0.93,0.38707 +0.94,0.39016 +0.95,0.39321 +0.96,0.39624 +0.97,0.39924 +0.98,0.4022 +0.99,0.40514 +1,0.40805 +1.01,0.41093 +1.02,0.41378 +1.03,0.4166 +1.04,0.41938 +1.05,0.42214 +1.06,0.42487 +1.07,0.42757 +1.08,0.43024 +1.09,0.43287 +1.1,0.43548 +1.11,0.43805 +1.12,0.4406 +1.13,0.44311 +1.14,0.44559 +1.15,0.44804 +1.16,0.45046 +1.17,0.45285 +1.18,0.45521 +1.19,0.45753 +1.2,0.45983 +1.21,0.46209 +1.22,0.46432 +1.23,0.46651 +1.24,0.46868 +1.25,0.47081 +1.26,0.47291 +1.27,0.47498 +1.28,0.47701 +1.29,0.47902 +1.3,0.48099 +1.31,0.48292 +1.32,0.48483 +1.33,0.4867 +1.34,0.48853 +1.35,0.49034 +1.36,0.49211 +1.37,0.49385 +1.38,0.49555 +1.39,0.49722 +1.4,0.49885 +1.41,0.50046 +1.42,0.50202 +1.43,0.50356 +1.44,0.50506 +1.45,0.50652 +1.46,0.50795 +1.47,0.50935 +1.48,0.51071 +1.49,0.51203 +1.5,0.51333 +1.51,0.51458 +1.52,0.5158 +1.53,0.51699 +1.54,0.51814 +1.55,0.51925 +1.56,0.52033 +1.57,0.52138 +1.58,0.52239 +1.59,0.52336 +1.6,0.5243 +1.61,0.5252 +1.62,0.52606 +1.63,0.52689 +1.64,0.52768 +1.65,0.52843 +1.66,0.52915 +1.67,0.52983 +1.68,0.53048 +1.69,0.53109 +1.7,0.53166 +1.71,0.53219 +1.72,0.53269 +1.73,0.53315 +1.74,0.53357 +1.75,0.53396 +1.76,0.5343 +1.77,0.53461 +1.78,0.53488 +1.79,0.53512 +1.8,0.53531 +1.81,0.53547 +1.82,0.53559 +1.83,0.53567 +1.84,0.53571 +1.85,0.53571 +1.86,0.53568 +1.87,0.5356 +1.88,0.53549 +1.89,0.53534 +1.9,0.53515 +1.91,0.53492 +1.92,0.53465 +1.93,0.53434 +1.94,0.534 +1.95,0.53362 +1.96,0.5332 +1.97,0.53275 +1.98,0.53226 +1.99,0.53173 +2,0.53117 +2.01,0.53057 +2.02,0.52994 +2.03,0.52927 +2.04,0.52856 +2.05,0.52783 +2.06,0.52706 +2.07,0.52625 +2.08,0.52541 +2.09,0.52454 +2.1,0.52364 +2.11,0.5227 +2.12,0.52174 +2.13,0.52074 +2.14,0.5197 +2.15,0.51864 +2.16,0.51755 +2.17,0.51643 +2.18,0.51527 +2.19,0.51409 +2.2,0.51287 +2.21,0.51163 +2.22,0.51036 +2.23,0.50906 +2.24,0.50773 +2.25,0.50638 +2.26,0.50499 +2.27,0.50358 +2.28,0.50214 +2.29,0.50068 +2.3,0.49918 +2.31,0.49767 +2.32,0.49612 +2.33,0.49455 +2.34,0.49296 +2.35,0.49134 +2.36,0.48969 +2.37,0.48803 +2.38,0.48633 +2.39,0.48462 +2.4,0.48288 +2.41,0.48112 +2.42,0.47933 +2.43,0.47752 +2.44,0.47569 +2.45,0.47384 +2.46,0.47197 +2.47,0.47007 +2.48,0.46816 +2.49,0.46622 +2.5,0.46427 +2.51,0.46229 +2.52,0.4603 +2.53,0.45828 +2.54,0.45625 +2.55,0.45419 +2.56,0.45212 +2.57,0.45003 +2.58,0.44793 +2.59,0.4458 +2.6,0.44366 +2.61,0.4415 +2.62,0.43933 +2.63,0.43714 +2.64,0.43493 +2.65,0.43271 +2.66,0.43047 +2.67,0.42822 +2.68,0.42595 +2.69,0.42367 +2.7,0.42137 +2.71,0.41906 +2.72,0.41674 +2.73,0.41441 +2.74,0.41206 +2.75,0.4097 +2.76,0.40732 +2.77,0.40494 +2.78,0.40254 +2.79,0.40013 +2.8,0.39771 +2.81,0.39528 +2.82,0.39284 +2.83,0.39039 +2.84,0.38793 +2.85,0.38547 +2.86,0.38299 +2.87,0.3805 +2.88,0.37801 +2.89,0.3755 +2.9,0.37299 +2.91,0.37047 +2.92,0.36795 +2.93,0.36541 +2.94,0.36288 +2.95,0.36033 +2.96,0.35778 +2.97,0.35522 +2.98,0.35266 +2.99,0.35009 +3,0.34752 +3.01,0.34494 +3.02,0.34236 +3.03,0.33978 +3.04,0.33719 +3.05,0.3346 +3.06,0.332 +3.07,0.32941 +3.08,0.32681 +3.09,0.32421 +3.1,0.32161 +3.11,0.319 +3.12,0.3164 +3.13,0.31379 +3.14,0.31119 +3.15,0.30858 +3.16,0.30598 +3.17,0.30337 +3.18,0.30077 +3.19,0.29817 +3.2,0.29556 +3.21,0.29296 +3.22,0.29037 +3.23,0.28777 +3.24,0.28518 +3.25,0.28259 +3.26,0.28001 +3.27,0.27742 +3.28,0.27485 +3.29,0.27227 +3.3,0.2697 +3.31,0.26714 +3.32,0.26458 +3.33,0.26203 +3.34,0.25948 +3.35,0.25694 +3.36,0.2544 +3.37,0.25188 +3.38,0.24935 +3.39,0.24684 +3.4,0.24434 +3.41,0.24184 +3.42,0.23935 +3.43,0.23687 +3.44,0.2344 +3.45,0.23193 +3.46,0.22948 +3.47,0.22704 +3.48,0.2246 +3.49,0.22218 +3.5,0.21977 +3.51,0.21737 +3.52,0.21498 +3.53,0.2126 +3.54,0.21024 +3.55,0.20789 +3.56,0.20555 +3.57,0.20322 +3.58,0.20091 +3.59,0.1986 +3.6,0.19632 +3.61,0.19405 +3.62,0.19179 +3.63,0.18955 +3.64,0.18732 +3.65,0.18511 +3.66,0.18291 +3.67,0.18073 +3.68,0.17857 +3.69,0.17642 +3.7,0.17429 +3.71,0.17218 +3.72,0.17009 +3.73,0.16801 +3.74,0.16595 +3.75,0.16391 +3.76,0.16189 +3.77,0.15989 +3.78,0.15791 +3.79,0.15594 +3.8,0.154 +3.81,0.15208 +3.82,0.15018 +3.83,0.1483 +3.84,0.14644 +3.85,0.1446 +3.86,0.14279 +3.87,0.141 +3.88,0.13923 +3.89,0.13748 +3.9,0.13576 +3.91,0.13406 +3.92,0.13238 +3.93,0.13073 +3.94,0.1291 +3.95,0.1275 +3.96,0.12592 +3.97,0.12437 +3.98,0.12284 +3.99,0.12134 +4,0.11987 +4.01,0.11842 +4.02,0.117 +4.03,0.11561 +4.04,0.11424 +4.05,0.1129 +4.06,0.11159 +4.07,0.11031 +4.08,0.10906 +4.09,0.10784 +4.1,0.10665 +4.11,0.10548 +4.12,0.10435 +4.13,0.10325 +4.14,0.10218 +4.15,0.10113 +4.16,0.10012 +4.17,0.099146 +4.18,0.0982 +4.19,0.097286 +4.2,0.096405 +4.21,0.095556 +4.22,0.094741 +4.23,0.093959 +4.24,0.093211 +4.25,0.092498 +4.26,0.091818 +4.27,0.091174 +4.28,0.090564 +4.29,0.08999 +4.3,0.089452 +4.31,0.088949 +4.32,0.088483 +4.33,0.088054 +4.34,0.087661 +4.35,0.087306 +4.36,0.086988 +4.37,0.086708 +4.38,0.086467 +4.39,0.086263 +4.4,0.086099 +4.41,0.085974 +4.42,0.085888 +4.43,0.085841 +4.44,0.085835 +4.45,0.08587 +4.46,0.085944 +4.47,0.08606 +4.48,0.086217 +4.49,0.086416 +4.5,0.086657 +4.51,0.08694 +4.52,0.087265 +4.53,0.087633 +4.54,0.088045 +4.55,0.088499 +4.56,0.088998 +4.57,0.089541 +4.58,0.090128 +4.59,0.090759 +4.6,0.091436 +4.61,0.092158 +4.62,0.092926 +4.63,0.09374 +4.64,0.0946 +4.65,0.095506 +4.66,0.09646 +4.67,0.09746 +4.68,0.098508 +4.69,0.099604 +4.7,0.10075 +4.71,0.10194 +4.72,0.10318 +4.73,0.10447 +4.74,0.10581 +4.75,0.1072 +4.76,0.10864 +4.77,0.11013 +4.78,0.11167 +4.79,0.11326 +4.8,0.1149 +4.81,0.1166 +4.82,0.11834 +4.83,0.12014 +4.84,0.12199 +4.85,0.12389 +4.86,0.12585 +4.87,0.12786 +4.88,0.12992 +4.89,0.13204 +4.9,0.13421 +4.91,0.13644 +4.92,0.13872 +4.93,0.14106 +4.94,0.14345 +4.95,0.1459 +4.96,0.1484 +4.97,0.15096 +4.98,0.15358 +4.99,0.15625 +5,0.15899 diff --git a/TeX/Plots/Data/matlab_3.csv b/TeX/Plots/Data/matlab_3.csv new file mode 100755 index 0000000..aec36fc --- /dev/null +++ b/TeX/Plots/Data/matlab_3.csv @@ -0,0 +1,1002 @@ +x,y +-5,-0.50686 +-4.99,-0.50478 +-4.98,-0.50273 +-4.97,-0.5007 +-4.96,-0.4987 +-4.95,-0.49672 +-4.94,-0.49477 +-4.93,-0.49284 +-4.92,-0.49093 +-4.91,-0.48905 +-4.9,-0.4872 +-4.89,-0.48536 +-4.88,-0.48356 +-4.87,-0.48177 +-4.86,-0.48001 +-4.85,-0.47828 +-4.84,-0.47656 +-4.83,-0.47487 +-4.82,-0.47321 +-4.81,-0.47156 +-4.8,-0.46994 +-4.79,-0.46834 +-4.78,-0.46677 +-4.77,-0.46522 +-4.76,-0.46368 +-4.75,-0.46218 +-4.74,-0.46069 +-4.73,-0.45923 +-4.72,-0.45778 +-4.71,-0.45636 +-4.7,-0.45496 +-4.69,-0.45358 +-4.68,-0.45223 +-4.67,-0.45089 +-4.66,-0.44958 +-4.65,-0.44828 +-4.64,-0.44701 +-4.63,-0.44576 +-4.62,-0.44453 +-4.61,-0.44331 +-4.6,-0.44212 +-4.59,-0.44095 +-4.58,-0.4398 +-4.57,-0.43867 +-4.56,-0.43755 +-4.55,-0.43646 +-4.54,-0.43539 +-4.53,-0.43433 +-4.52,-0.4333 +-4.51,-0.43228 +-4.5,-0.43128 +-4.49,-0.4303 +-4.48,-0.42934 +-4.47,-0.4284 +-4.46,-0.42748 +-4.45,-0.42657 +-4.44,-0.42568 +-4.43,-0.42481 +-4.42,-0.42396 +-4.41,-0.42313 +-4.4,-0.42231 +-4.39,-0.42151 +-4.38,-0.42073 +-4.37,-0.41996 +-4.36,-0.41921 +-4.35,-0.41848 +-4.34,-0.41776 +-4.33,-0.41706 +-4.32,-0.41638 +-4.31,-0.41571 +-4.3,-0.41506 +-4.29,-0.41443 +-4.28,-0.41381 +-4.27,-0.4132 +-4.26,-0.41261 +-4.25,-0.41204 +-4.24,-0.41148 +-4.23,-0.41094 +-4.22,-0.41041 +-4.21,-0.4099 +-4.2,-0.4094 +-4.19,-0.40892 +-4.18,-0.40845 +-4.17,-0.40799 +-4.16,-0.40755 +-4.15,-0.40713 +-4.14,-0.40671 +-4.13,-0.40631 +-4.12,-0.40593 +-4.11,-0.40556 +-4.1,-0.4052 +-4.09,-0.40485 +-4.08,-0.40452 +-4.07,-0.4042 +-4.06,-0.40389 +-4.05,-0.4036 +-4.04,-0.40331 +-4.03,-0.40304 +-4.02,-0.40279 +-4.01,-0.40254 +-4,-0.40231 +-3.99,-0.40208 +-3.98,-0.40187 +-3.97,-0.40167 +-3.96,-0.40149 +-3.95,-0.40131 +-3.94,-0.40115 +-3.93,-0.40099 +-3.92,-0.40085 +-3.91,-0.40072 +-3.9,-0.40059 +-3.89,-0.40048 +-3.88,-0.40038 +-3.87,-0.40029 +-3.86,-0.40021 +-3.85,-0.40014 +-3.84,-0.40007 +-3.83,-0.40002 +-3.82,-0.39998 +-3.81,-0.39995 +-3.8,-0.39992 +-3.79,-0.39991 +-3.78,-0.3999 +-3.77,-0.3999 +-3.76,-0.39991 +-3.75,-0.39993 +-3.74,-0.39996 +-3.73,-0.4 +-3.72,-0.40004 +-3.71,-0.40009 +-3.7,-0.40015 +-3.69,-0.40022 +-3.68,-0.4003 +-3.67,-0.40038 +-3.66,-0.40047 +-3.65,-0.40056 +-3.64,-0.40067 +-3.63,-0.40078 +-3.62,-0.4009 +-3.61,-0.40102 +-3.6,-0.40115 +-3.59,-0.40129 +-3.58,-0.40143 +-3.57,-0.40158 +-3.56,-0.40173 +-3.55,-0.40189 +-3.54,-0.40206 +-3.53,-0.40223 +-3.52,-0.40241 +-3.51,-0.40259 +-3.5,-0.40278 +-3.49,-0.40297 +-3.48,-0.40317 +-3.47,-0.40337 +-3.46,-0.40357 +-3.45,-0.40378 +-3.44,-0.404 +-3.43,-0.40422 +-3.42,-0.40444 +-3.41,-0.40467 +-3.4,-0.4049 +-3.39,-0.40513 +-3.38,-0.40537 +-3.37,-0.40561 +-3.36,-0.40585 +-3.35,-0.4061 +-3.34,-0.40635 +-3.33,-0.4066 +-3.32,-0.40685 +-3.31,-0.40711 +-3.3,-0.40737 +-3.29,-0.40763 +-3.28,-0.40789 +-3.27,-0.40816 +-3.26,-0.40843 +-3.25,-0.4087 +-3.24,-0.40897 +-3.23,-0.40924 +-3.22,-0.40951 +-3.21,-0.40978 +-3.2,-0.41006 +-3.19,-0.41034 +-3.18,-0.41061 +-3.17,-0.41089 +-3.16,-0.41117 +-3.15,-0.41144 +-3.14,-0.41172 +-3.13,-0.412 +-3.12,-0.41228 +-3.11,-0.41255 +-3.1,-0.41283 +-3.09,-0.41311 +-3.08,-0.41338 +-3.07,-0.41366 +-3.06,-0.41393 +-3.05,-0.4142 +-3.04,-0.41447 +-3.03,-0.41474 +-3.02,-0.41501 +-3.01,-0.41528 +-3,-0.41554 +-2.99,-0.41581 +-2.98,-0.41607 +-2.97,-0.41633 +-2.96,-0.41658 +-2.95,-0.41684 +-2.94,-0.41709 +-2.93,-0.41734 +-2.92,-0.41758 +-2.91,-0.41782 +-2.9,-0.41806 +-2.89,-0.4183 +-2.88,-0.41853 +-2.87,-0.41876 +-2.86,-0.41899 +-2.85,-0.41921 +-2.84,-0.41942 +-2.83,-0.41964 +-2.82,-0.41985 +-2.81,-0.42005 +-2.8,-0.42025 +-2.79,-0.42045 +-2.78,-0.42064 +-2.77,-0.42082 +-2.76,-0.421 +-2.75,-0.42118 +-2.74,-0.42135 +-2.73,-0.42151 +-2.72,-0.42167 +-2.71,-0.42182 +-2.7,-0.42197 +-2.69,-0.42211 +-2.68,-0.42225 +-2.67,-0.42237 +-2.66,-0.4225 +-2.65,-0.42261 +-2.64,-0.42272 +-2.63,-0.42282 +-2.62,-0.42292 +-2.61,-0.423 +-2.6,-0.42308 +-2.59,-0.42316 +-2.58,-0.42322 +-2.57,-0.42328 +-2.56,-0.42333 +-2.55,-0.42337 +-2.54,-0.4234 +-2.53,-0.42343 +-2.52,-0.42345 +-2.51,-0.42345 +-2.5,-0.42345 +-2.49,-0.42344 +-2.48,-0.42343 +-2.47,-0.4234 +-2.46,-0.42336 +-2.45,-0.42332 +-2.44,-0.42326 +-2.43,-0.4232 +-2.42,-0.42312 +-2.41,-0.42304 +-2.4,-0.42294 +-2.39,-0.42284 +-2.38,-0.42272 +-2.37,-0.4226 +-2.36,-0.42246 +-2.35,-0.42231 +-2.34,-0.42216 +-2.33,-0.42199 +-2.32,-0.42181 +-2.31,-0.42162 +-2.3,-0.42141 +-2.29,-0.4212 +-2.28,-0.42097 +-2.27,-0.42074 +-2.26,-0.42049 +-2.25,-0.42023 +-2.24,-0.41995 +-2.23,-0.41967 +-2.22,-0.41937 +-2.21,-0.41906 +-2.2,-0.41873 +-2.19,-0.41839 +-2.18,-0.41804 +-2.17,-0.41768 +-2.16,-0.4173 +-2.15,-0.41691 +-2.14,-0.41651 +-2.13,-0.41609 +-2.12,-0.41566 +-2.11,-0.41522 +-2.1,-0.41476 +-2.09,-0.41428 +-2.08,-0.41379 +-2.07,-0.41329 +-2.06,-0.41277 +-2.05,-0.41224 +-2.04,-0.41169 +-2.03,-0.41113 +-2.02,-0.41055 +-2.01,-0.40996 +-2,-0.40935 +-1.99,-0.40873 +-1.98,-0.40809 +-1.97,-0.40743 +-1.96,-0.40676 +-1.95,-0.40607 +-1.94,-0.40537 +-1.93,-0.40464 +-1.92,-0.40391 +-1.91,-0.40315 +-1.9,-0.40238 +-1.89,-0.40159 +-1.88,-0.40079 +-1.87,-0.39996 +-1.86,-0.39912 +-1.85,-0.39827 +-1.84,-0.39739 +-1.83,-0.3965 +-1.82,-0.39559 +-1.81,-0.39467 +-1.8,-0.39372 +-1.79,-0.39277 +-1.78,-0.39179 +-1.77,-0.3908 +-1.76,-0.38979 +-1.75,-0.38876 +-1.74,-0.38772 +-1.73,-0.38666 +-1.72,-0.38558 +-1.71,-0.38449 +-1.7,-0.38338 +-1.69,-0.38226 +-1.68,-0.38112 +-1.67,-0.37996 +-1.66,-0.37878 +-1.65,-0.37759 +-1.64,-0.37639 +-1.63,-0.37517 +-1.62,-0.37393 +-1.61,-0.37267 +-1.6,-0.3714 +-1.59,-0.37012 +-1.58,-0.36881 +-1.57,-0.3675 +-1.56,-0.36616 +-1.55,-0.36481 +-1.54,-0.36345 +-1.53,-0.36207 +-1.52,-0.36067 +-1.51,-0.35926 +-1.5,-0.35783 +-1.49,-0.35639 +-1.48,-0.35493 +-1.47,-0.35345 +-1.46,-0.35196 +-1.45,-0.35046 +-1.44,-0.34894 +-1.43,-0.3474 +-1.42,-0.34585 +-1.41,-0.34429 +-1.4,-0.34271 +-1.39,-0.34111 +-1.38,-0.3395 +-1.37,-0.33788 +-1.36,-0.33624 +-1.35,-0.33458 +-1.34,-0.33291 +-1.33,-0.33123 +-1.32,-0.32953 +-1.31,-0.32781 +-1.3,-0.32609 +-1.29,-0.32434 +-1.28,-0.32259 +-1.27,-0.32081 +-1.26,-0.31903 +-1.25,-0.31723 +-1.24,-0.31541 +-1.23,-0.31358 +-1.22,-0.31174 +-1.21,-0.30988 +-1.2,-0.30801 +-1.19,-0.30612 +-1.18,-0.30422 +-1.17,-0.30231 +-1.16,-0.30038 +-1.15,-0.29844 +-1.14,-0.29648 +-1.13,-0.29451 +-1.12,-0.29253 +-1.11,-0.29053 +-1.1,-0.28852 +-1.09,-0.2865 +-1.08,-0.28446 +-1.07,-0.28241 +-1.06,-0.28035 +-1.05,-0.27827 +-1.04,-0.27618 +-1.03,-0.27407 +-1.02,-0.27195 +-1.01,-0.26982 +-1,-0.26768 +-0.99,-0.26552 +-0.98,-0.26335 +-0.97,-0.26116 +-0.96,-0.25896 +-0.95,-0.25675 +-0.94,-0.25453 +-0.93,-0.25229 +-0.92,-0.25005 +-0.91,-0.24778 +-0.9,-0.24551 +-0.89,-0.24322 +-0.88,-0.24092 +-0.87,-0.23861 +-0.86,-0.23629 +-0.85,-0.23395 +-0.84,-0.2316 +-0.83,-0.22924 +-0.82,-0.22686 +-0.81,-0.22447 +-0.8,-0.22207 +-0.79,-0.21966 +-0.78,-0.21724 +-0.77,-0.2148 +-0.76,-0.21235 +-0.75,-0.20989 +-0.74,-0.20742 +-0.73,-0.20494 +-0.72,-0.20244 +-0.71,-0.19993 +-0.7,-0.19741 +-0.69,-0.19488 +-0.68,-0.19234 +-0.67,-0.18978 +-0.66,-0.18722 +-0.65,-0.18464 +-0.64,-0.18205 +-0.63,-0.17945 +-0.62,-0.17683 +-0.61,-0.17421 +-0.6,-0.17157 +-0.59,-0.16893 +-0.58,-0.16627 +-0.57,-0.1636 +-0.56,-0.16093 +-0.55,-0.15824 +-0.54,-0.15554 +-0.53,-0.15283 +-0.52,-0.15011 +-0.51,-0.14739 +-0.5,-0.14465 +-0.49,-0.1419 +-0.48,-0.13915 +-0.47,-0.13639 +-0.46,-0.13361 +-0.45,-0.13083 +-0.44,-0.12805 +-0.43,-0.12525 +-0.42,-0.12245 +-0.41,-0.11963 +-0.4,-0.11681 +-0.39,-0.11399 +-0.38,-0.11115 +-0.37,-0.10831 +-0.36,-0.10546 +-0.35,-0.10261 +-0.34,-0.09975 +-0.33,-0.096884 +-0.32,-0.094011 +-0.31,-0.091133 +-0.3,-0.088248 +-0.29,-0.085359 +-0.28,-0.082464 +-0.27,-0.079564 +-0.26,-0.076659 +-0.25,-0.073749 +-0.24,-0.070835 +-0.23,-0.067916 +-0.22,-0.064994 +-0.21,-0.062067 +-0.2,-0.059136 +-0.19,-0.056202 +-0.18,-0.053264 +-0.17,-0.050323 +-0.16,-0.047379 +-0.15,-0.044432 +-0.14,-0.041482 +-0.13,-0.03853 +-0.12,-0.035575 +-0.11,-0.032618 +-0.1,-0.029659 +-0.09,-0.026698 +-0.08,-0.023736 +-0.07,-0.020772 +-0.06,-0.017807 +-0.05,-0.014841 +-0.04,-0.011874 +-0.03,-0.0089061 +-0.02,-0.0059377 +-0.01,-0.0029689 +0,-8.3267e-17 +0.01,0.0029689 +0.02,0.0059377 +0.03,0.0089061 +0.04,0.011874 +0.05,0.014841 +0.06,0.017807 +0.07,0.020772 +0.08,0.023736 +0.09,0.026698 +0.1,0.029659 +0.11,0.032618 +0.12,0.035575 +0.13,0.03853 +0.14,0.041482 +0.15,0.044432 +0.16,0.047379 +0.17,0.050323 +0.18,0.053264 +0.19,0.056202 +0.2,0.059136 +0.21,0.062067 +0.22,0.064994 +0.23,0.067916 +0.24,0.070835 +0.25,0.073749 +0.26,0.076659 +0.27,0.079564 +0.28,0.082464 +0.29,0.085359 +0.3,0.088248 +0.31,0.091133 +0.32,0.094011 +0.33,0.096884 +0.34,0.09975 +0.35,0.10261 +0.36,0.10546 +0.37,0.10831 +0.38,0.11115 +0.39,0.11399 +0.4,0.11681 +0.41,0.11963 +0.42,0.12245 +0.43,0.12525 +0.44,0.12805 +0.45,0.13083 +0.46,0.13361 +0.47,0.13639 +0.48,0.13915 +0.49,0.1419 +0.5,0.14465 +0.51,0.14739 +0.52,0.15011 +0.53,0.15283 +0.54,0.15554 +0.55,0.15824 +0.56,0.16093 +0.57,0.1636 +0.58,0.16627 +0.59,0.16893 +0.6,0.17157 +0.61,0.17421 +0.62,0.17683 +0.63,0.17945 +0.64,0.18205 +0.65,0.18464 +0.66,0.18722 +0.67,0.18978 +0.68,0.19234 +0.69,0.19488 +0.7,0.19741 +0.71,0.19993 +0.72,0.20244 +0.73,0.20494 +0.74,0.20742 +0.75,0.20989 +0.76,0.21235 +0.77,0.2148 +0.78,0.21724 +0.79,0.21966 +0.8,0.22207 +0.81,0.22447 +0.82,0.22686 +0.83,0.22924 +0.84,0.2316 +0.85,0.23395 +0.86,0.23629 +0.87,0.23861 +0.88,0.24092 +0.89,0.24322 +0.9,0.24551 +0.91,0.24778 +0.92,0.25005 +0.93,0.25229 +0.94,0.25453 +0.95,0.25675 +0.96,0.25896 +0.97,0.26116 +0.98,0.26335 +0.99,0.26552 +1,0.26768 +1.01,0.26982 +1.02,0.27195 +1.03,0.27407 +1.04,0.27618 +1.05,0.27827 +1.06,0.28035 +1.07,0.28241 +1.08,0.28446 +1.09,0.2865 +1.1,0.28852 +1.11,0.29053 +1.12,0.29253 +1.13,0.29451 +1.14,0.29648 +1.15,0.29844 +1.16,0.30038 +1.17,0.30231 +1.18,0.30422 +1.19,0.30612 +1.2,0.30801 +1.21,0.30988 +1.22,0.31174 +1.23,0.31358 +1.24,0.31541 +1.25,0.31723 +1.26,0.31903 +1.27,0.32081 +1.28,0.32259 +1.29,0.32434 +1.3,0.32609 +1.31,0.32781 +1.32,0.32953 +1.33,0.33123 +1.34,0.33291 +1.35,0.33458 +1.36,0.33624 +1.37,0.33788 +1.38,0.3395 +1.39,0.34111 +1.4,0.34271 +1.41,0.34429 +1.42,0.34585 +1.43,0.3474 +1.44,0.34894 +1.45,0.35046 +1.46,0.35196 +1.47,0.35345 +1.48,0.35493 +1.49,0.35639 +1.5,0.35783 +1.51,0.35926 +1.52,0.36067 +1.53,0.36207 +1.54,0.36345 +1.55,0.36481 +1.56,0.36616 +1.57,0.3675 +1.58,0.36881 +1.59,0.37012 +1.6,0.3714 +1.61,0.37267 +1.62,0.37393 +1.63,0.37517 +1.64,0.37639 +1.65,0.37759 +1.66,0.37878 +1.67,0.37996 +1.68,0.38112 +1.69,0.38226 +1.7,0.38338 +1.71,0.38449 +1.72,0.38558 +1.73,0.38666 +1.74,0.38772 +1.75,0.38876 +1.76,0.38979 +1.77,0.3908 +1.78,0.39179 +1.79,0.39277 +1.8,0.39372 +1.81,0.39467 +1.82,0.39559 +1.83,0.3965 +1.84,0.39739 +1.85,0.39827 +1.86,0.39912 +1.87,0.39996 +1.88,0.40079 +1.89,0.40159 +1.9,0.40238 +1.91,0.40315 +1.92,0.40391 +1.93,0.40464 +1.94,0.40537 +1.95,0.40607 +1.96,0.40676 +1.97,0.40743 +1.98,0.40809 +1.99,0.40873 +2,0.40935 +2.01,0.40996 +2.02,0.41055 +2.03,0.41113 +2.04,0.41169 +2.05,0.41224 +2.06,0.41277 +2.07,0.41329 +2.08,0.41379 +2.09,0.41428 +2.1,0.41476 +2.11,0.41522 +2.12,0.41566 +2.13,0.41609 +2.14,0.41651 +2.15,0.41691 +2.16,0.4173 +2.17,0.41768 +2.18,0.41804 +2.19,0.41839 +2.2,0.41873 +2.21,0.41906 +2.22,0.41937 +2.23,0.41967 +2.24,0.41995 +2.25,0.42023 +2.26,0.42049 +2.27,0.42074 +2.28,0.42097 +2.29,0.4212 +2.3,0.42141 +2.31,0.42162 +2.32,0.42181 +2.33,0.42199 +2.34,0.42216 +2.35,0.42231 +2.36,0.42246 +2.37,0.4226 +2.38,0.42272 +2.39,0.42284 +2.4,0.42294 +2.41,0.42304 +2.42,0.42312 +2.43,0.4232 +2.44,0.42326 +2.45,0.42332 +2.46,0.42336 +2.47,0.4234 +2.48,0.42343 +2.49,0.42344 +2.5,0.42345 +2.51,0.42345 +2.52,0.42345 +2.53,0.42343 +2.54,0.4234 +2.55,0.42337 +2.56,0.42333 +2.57,0.42328 +2.58,0.42322 +2.59,0.42316 +2.6,0.42308 +2.61,0.423 +2.62,0.42292 +2.63,0.42282 +2.64,0.42272 +2.65,0.42261 +2.66,0.4225 +2.67,0.42237 +2.68,0.42225 +2.69,0.42211 +2.7,0.42197 +2.71,0.42182 +2.72,0.42167 +2.73,0.42151 +2.74,0.42135 +2.75,0.42118 +2.76,0.421 +2.77,0.42082 +2.78,0.42064 +2.79,0.42045 +2.8,0.42025 +2.81,0.42005 +2.82,0.41985 +2.83,0.41964 +2.84,0.41942 +2.85,0.41921 +2.86,0.41899 +2.87,0.41876 +2.88,0.41853 +2.89,0.4183 +2.9,0.41806 +2.91,0.41782 +2.92,0.41758 +2.93,0.41734 +2.94,0.41709 +2.95,0.41684 +2.96,0.41658 +2.97,0.41633 +2.98,0.41607 +2.99,0.41581 +3,0.41554 +3.01,0.41528 +3.02,0.41501 +3.03,0.41474 +3.04,0.41447 +3.05,0.4142 +3.06,0.41393 +3.07,0.41366 +3.08,0.41338 +3.09,0.41311 +3.1,0.41283 +3.11,0.41255 +3.12,0.41228 +3.13,0.412 +3.14,0.41172 +3.15,0.41144 +3.16,0.41117 +3.17,0.41089 +3.18,0.41061 +3.19,0.41034 +3.2,0.41006 +3.21,0.40978 +3.22,0.40951 +3.23,0.40924 +3.24,0.40897 +3.25,0.4087 +3.26,0.40843 +3.27,0.40816 +3.28,0.40789 +3.29,0.40763 +3.3,0.40737 +3.31,0.40711 +3.32,0.40685 +3.33,0.4066 +3.34,0.40635 +3.35,0.4061 +3.36,0.40585 +3.37,0.40561 +3.38,0.40537 +3.39,0.40513 +3.4,0.4049 +3.41,0.40467 +3.42,0.40444 +3.43,0.40422 +3.44,0.404 +3.45,0.40378 +3.46,0.40357 +3.47,0.40337 +3.48,0.40317 +3.49,0.40297 +3.5,0.40278 +3.51,0.40259 +3.52,0.40241 +3.53,0.40223 +3.54,0.40206 +3.55,0.40189 +3.56,0.40173 +3.57,0.40158 +3.58,0.40143 +3.59,0.40129 +3.6,0.40115 +3.61,0.40102 +3.62,0.4009 +3.63,0.40078 +3.64,0.40067 +3.65,0.40056 +3.66,0.40047 +3.67,0.40038 +3.68,0.4003 +3.69,0.40022 +3.7,0.40015 +3.71,0.40009 +3.72,0.40004 +3.73,0.4 +3.74,0.39996 +3.75,0.39993 +3.76,0.39991 +3.77,0.3999 +3.78,0.3999 +3.79,0.39991 +3.8,0.39992 +3.81,0.39995 +3.82,0.39998 +3.83,0.40002 +3.84,0.40007 +3.85,0.40014 +3.86,0.40021 +3.87,0.40029 +3.88,0.40038 +3.89,0.40048 +3.9,0.40059 +3.91,0.40072 +3.92,0.40085 +3.93,0.40099 +3.94,0.40115 +3.95,0.40131 +3.96,0.40149 +3.97,0.40167 +3.98,0.40187 +3.99,0.40208 +4,0.40231 +4.01,0.40254 +4.02,0.40279 +4.03,0.40304 +4.04,0.40331 +4.05,0.4036 +4.06,0.40389 +4.07,0.4042 +4.08,0.40452 +4.09,0.40485 +4.1,0.4052 +4.11,0.40556 +4.12,0.40593 +4.13,0.40631 +4.14,0.40671 +4.15,0.40713 +4.16,0.40755 +4.17,0.40799 +4.18,0.40845 +4.19,0.40892 +4.2,0.4094 +4.21,0.4099 +4.22,0.41041 +4.23,0.41094 +4.24,0.41148 +4.25,0.41204 +4.26,0.41261 +4.27,0.4132 +4.28,0.41381 +4.29,0.41443 +4.3,0.41506 +4.31,0.41571 +4.32,0.41638 +4.33,0.41706 +4.34,0.41776 +4.35,0.41848 +4.36,0.41921 +4.37,0.41996 +4.38,0.42073 +4.39,0.42151 +4.4,0.42231 +4.41,0.42313 +4.42,0.42396 +4.43,0.42481 +4.44,0.42568 +4.45,0.42657 +4.46,0.42748 +4.47,0.4284 +4.48,0.42934 +4.49,0.4303 +4.5,0.43128 +4.51,0.43228 +4.52,0.4333 +4.53,0.43433 +4.54,0.43539 +4.55,0.43646 +4.56,0.43755 +4.57,0.43867 +4.58,0.4398 +4.59,0.44095 +4.6,0.44212 +4.61,0.44331 +4.62,0.44453 +4.63,0.44576 +4.64,0.44701 +4.65,0.44828 +4.66,0.44958 +4.67,0.45089 +4.68,0.45223 +4.69,0.45358 +4.7,0.45496 +4.71,0.45636 +4.72,0.45778 +4.73,0.45923 +4.74,0.46069 +4.75,0.46218 +4.76,0.46368 +4.77,0.46522 +4.78,0.46677 +4.79,0.46834 +4.8,0.46994 +4.81,0.47156 +4.82,0.47321 +4.83,0.47487 +4.84,0.47656 +4.85,0.47828 +4.86,0.48001 +4.87,0.48177 +4.88,0.48356 +4.89,0.48536 +4.9,0.4872 +4.91,0.48905 +4.92,0.49093 +4.93,0.49284 +4.94,0.49477 +4.95,0.49672 +4.96,0.4987 +4.97,0.5007 +4.98,0.50273 +4.99,0.50478 +5,0.50686 diff --git a/TeX/Plots/Data/matlab_sin_d_01.csv b/TeX/Plots/Data/matlab_sin_d_01.csv new file mode 100755 index 0000000..3cecc56 --- /dev/null +++ b/TeX/Plots/Data/matlab_sin_d_01.csv @@ -0,0 +1,1002 @@ +x,y +-5,0.71025 +-4.99,0.72077 +-4.98,0.73105 +-4.97,0.74109 +-4.96,0.7509 +-4.95,0.76048 +-4.94,0.76983 +-4.93,0.77895 +-4.92,0.78784 +-4.91,0.7965 +-4.9,0.80493 +-4.89,0.81315 +-4.88,0.82113 +-4.87,0.8289 +-4.86,0.83644 +-4.85,0.84377 +-4.84,0.85087 +-4.83,0.85776 +-4.82,0.86444 +-4.81,0.8709 +-4.8,0.87714 +-4.79,0.88318 +-4.78,0.889 +-4.77,0.89462 +-4.76,0.90002 +-4.75,0.90522 +-4.74,0.91021 +-4.73,0.915 +-4.72,0.91959 +-4.71,0.92397 +-4.7,0.92816 +-4.69,0.93214 +-4.68,0.93593 +-4.67,0.93952 +-4.66,0.94292 +-4.65,0.94612 +-4.64,0.94913 +-4.63,0.95194 +-4.62,0.95457 +-4.61,0.95701 +-4.6,0.95926 +-4.59,0.96132 +-4.58,0.9632 +-4.57,0.9649 +-4.56,0.96641 +-4.55,0.96774 +-4.54,0.96889 +-4.53,0.96986 +-4.52,0.97066 +-4.51,0.97128 +-4.5,0.97172 +-4.49,0.97199 +-4.48,0.97209 +-4.47,0.97202 +-4.46,0.97177 +-4.45,0.97136 +-4.44,0.97078 +-4.43,0.97004 +-4.42,0.96913 +-4.41,0.96806 +-4.4,0.96682 +-4.39,0.96543 +-4.38,0.96387 +-4.37,0.96216 +-4.36,0.96029 +-4.35,0.95826 +-4.34,0.95608 +-4.33,0.95375 +-4.32,0.95126 +-4.31,0.94863 +-4.3,0.94584 +-4.29,0.94291 +-4.28,0.93983 +-4.27,0.9366 +-4.26,0.93324 +-4.25,0.92972 +-4.24,0.92607 +-4.23,0.92227 +-4.22,0.91834 +-4.21,0.91427 +-4.2,0.91006 +-4.19,0.90572 +-4.18,0.90124 +-4.17,0.89663 +-4.16,0.89189 +-4.15,0.88702 +-4.14,0.88202 +-4.13,0.87689 +-4.12,0.87163 +-4.11,0.86625 +-4.1,0.86075 +-4.09,0.85513 +-4.08,0.84938 +-4.07,0.84351 +-4.06,0.83753 +-4.05,0.83142 +-4.04,0.82521 +-4.03,0.81887 +-4.02,0.81242 +-4.01,0.80586 +-4,0.79919 +-3.99,0.79241 +-3.98,0.78553 +-3.97,0.77853 +-3.96,0.77143 +-3.95,0.76422 +-3.94,0.75691 +-3.93,0.7495 +-3.92,0.74199 +-3.91,0.73438 +-3.9,0.72667 +-3.89,0.71886 +-3.88,0.71096 +-3.87,0.70296 +-3.86,0.69487 +-3.85,0.68669 +-3.84,0.67842 +-3.83,0.67006 +-3.82,0.66161 +-3.81,0.65307 +-3.8,0.64445 +-3.79,0.63574 +-3.78,0.62695 +-3.77,0.61808 +-3.76,0.60913 +-3.75,0.6001 +-3.74,0.591 +-3.73,0.58181 +-3.72,0.57255 +-3.71,0.56322 +-3.7,0.55382 +-3.69,0.54434 +-3.68,0.53479 +-3.67,0.52518 +-3.66,0.51549 +-3.65,0.50574 +-3.64,0.49593 +-3.63,0.48605 +-3.62,0.47611 +-3.61,0.46611 +-3.6,0.45605 +-3.59,0.44593 +-3.58,0.43575 +-3.57,0.42551 +-3.56,0.41523 +-3.55,0.40488 +-3.54,0.39449 +-3.53,0.38405 +-3.52,0.37355 +-3.51,0.36301 +-3.5,0.35242 +-3.49,0.34178 +-3.48,0.3311 +-3.47,0.32038 +-3.46,0.30961 +-3.45,0.29881 +-3.44,0.28796 +-3.43,0.27708 +-3.42,0.26616 +-3.41,0.2552 +-3.4,0.24421 +-3.39,0.23319 +-3.38,0.22213 +-3.37,0.21105 +-3.36,0.19993 +-3.35,0.18879 +-3.34,0.17762 +-3.33,0.16642 +-3.32,0.15521 +-3.31,0.14396 +-3.3,0.1327 +-3.29,0.12142 +-3.28,0.11011 +-3.27,0.098795 +-3.26,0.087458 +-3.25,0.076107 +-3.24,0.064741 +-3.23,0.053363 +-3.22,0.041974 +-3.21,0.030574 +-3.2,0.019166 +-3.19,0.0077508 +-3.18,-0.003671 +-3.17,-0.015098 +-3.16,-0.026528 +-3.15,-0.037961 +-3.14,-0.049394 +-3.13,-0.060828 +-3.12,-0.07226 +-3.11,-0.083689 +-3.1,-0.095114 +-3.09,-0.10653 +-3.08,-0.11795 +-3.07,-0.12935 +-3.06,-0.14075 +-3.05,-0.15214 +-3.04,-0.16351 +-3.03,-0.17487 +-3.02,-0.18622 +-3.01,-0.19755 +-3,-0.20886 +-2.99,-0.22016 +-2.98,-0.23144 +-2.97,-0.24269 +-2.96,-0.25393 +-2.95,-0.26514 +-2.94,-0.27633 +-2.93,-0.28749 +-2.92,-0.29862 +-2.91,-0.30973 +-2.9,-0.3208 +-2.89,-0.33185 +-2.88,-0.34286 +-2.87,-0.35384 +-2.86,-0.36479 +-2.85,-0.3757 +-2.84,-0.38657 +-2.83,-0.3974 +-2.82,-0.40819 +-2.81,-0.41895 +-2.8,-0.42966 +-2.79,-0.44032 +-2.78,-0.45094 +-2.77,-0.46152 +-2.76,-0.47205 +-2.75,-0.48252 +-2.74,-0.49295 +-2.73,-0.50333 +-2.72,-0.51366 +-2.71,-0.52393 +-2.7,-0.53414 +-2.69,-0.54429 +-2.68,-0.55439 +-2.67,-0.56441 +-2.66,-0.57437 +-2.65,-0.58426 +-2.64,-0.59408 +-2.63,-0.60382 +-2.62,-0.61349 +-2.61,-0.62307 +-2.6,-0.63257 +-2.59,-0.64199 +-2.58,-0.65131 +-2.57,-0.66055 +-2.56,-0.66969 +-2.55,-0.67873 +-2.54,-0.68768 +-2.53,-0.69653 +-2.52,-0.70527 +-2.51,-0.7139 +-2.5,-0.72242 +-2.49,-0.73084 +-2.48,-0.73914 +-2.47,-0.74732 +-2.46,-0.75538 +-2.45,-0.76331 +-2.44,-0.77113 +-2.43,-0.77881 +-2.42,-0.78637 +-2.41,-0.79379 +-2.4,-0.80107 +-2.39,-0.80822 +-2.38,-0.81523 +-2.37,-0.82209 +-2.36,-0.82881 +-2.35,-0.83538 +-2.34,-0.84179 +-2.33,-0.84806 +-2.32,-0.85416 +-2.31,-0.86011 +-2.3,-0.86589 +-2.29,-0.87152 +-2.28,-0.87698 +-2.27,-0.88228 +-2.26,-0.88742 +-2.25,-0.89241 +-2.24,-0.89725 +-2.23,-0.90194 +-2.22,-0.90648 +-2.21,-0.91087 +-2.2,-0.91512 +-2.19,-0.91922 +-2.18,-0.92318 +-2.17,-0.92701 +-2.16,-0.9307 +-2.15,-0.93425 +-2.14,-0.93767 +-2.13,-0.94096 +-2.12,-0.94412 +-2.11,-0.94715 +-2.1,-0.95006 +-2.09,-0.95284 +-2.08,-0.95551 +-2.07,-0.95806 +-2.06,-0.96049 +-2.05,-0.9628 +-2.04,-0.965 +-2.03,-0.96709 +-2.02,-0.96907 +-2.01,-0.97095 +-2,-0.97272 +-1.99,-0.97439 +-1.98,-0.97596 +-1.97,-0.97742 +-1.96,-0.97879 +-1.95,-0.98007 +-1.94,-0.98125 +-1.93,-0.98235 +-1.92,-0.98335 +-1.91,-0.98427 +-1.9,-0.9851 +-1.89,-0.98585 +-1.88,-0.98652 +-1.87,-0.98711 +-1.86,-0.98762 +-1.85,-0.98806 +-1.84,-0.98841 +-1.83,-0.9887 +-1.82,-0.98891 +-1.81,-0.98904 +-1.8,-0.98911 +-1.79,-0.98911 +-1.78,-0.98903 +-1.77,-0.98889 +-1.76,-0.98868 +-1.75,-0.98841 +-1.74,-0.98807 +-1.73,-0.98766 +-1.72,-0.9872 +-1.71,-0.98667 +-1.7,-0.98608 +-1.69,-0.98543 +-1.68,-0.98473 +-1.67,-0.98397 +-1.66,-0.98315 +-1.65,-0.98228 +-1.64,-0.98135 +-1.63,-0.98037 +-1.62,-0.97934 +-1.61,-0.97826 +-1.6,-0.97713 +-1.59,-0.97595 +-1.58,-0.97472 +-1.57,-0.97345 +-1.56,-0.97213 +-1.55,-0.97077 +-1.54,-0.96937 +-1.53,-0.96792 +-1.52,-0.96644 +-1.51,-0.96491 +-1.5,-0.96335 +-1.49,-0.96175 +-1.48,-0.96011 +-1.47,-0.95844 +-1.46,-0.95674 +-1.45,-0.955 +-1.44,-0.95323 +-1.43,-0.95142 +-1.42,-0.94958 +-1.41,-0.9477 +-1.4,-0.94578 +-1.39,-0.94382 +-1.38,-0.94182 +-1.37,-0.93979 +-1.36,-0.93771 +-1.35,-0.9356 +-1.34,-0.93344 +-1.33,-0.93124 +-1.32,-0.92899 +-1.31,-0.9267 +-1.3,-0.92437 +-1.29,-0.92199 +-1.28,-0.91956 +-1.27,-0.91709 +-1.26,-0.91457 +-1.25,-0.912 +-1.24,-0.90938 +-1.23,-0.90671 +-1.22,-0.90399 +-1.21,-0.90121 +-1.2,-0.89839 +-1.19,-0.89551 +-1.18,-0.89258 +-1.17,-0.88959 +-1.16,-0.88655 +-1.15,-0.88345 +-1.14,-0.8803 +-1.13,-0.87709 +-1.12,-0.87382 +-1.11,-0.87049 +-1.1,-0.8671 +-1.09,-0.86365 +-1.08,-0.86014 +-1.07,-0.85657 +-1.06,-0.85293 +-1.05,-0.84923 +-1.04,-0.84547 +-1.03,-0.84164 +-1.02,-0.83775 +-1.01,-0.83379 +-1,-0.82977 +-0.99,-0.82569 +-0.98,-0.82154 +-0.97,-0.81733 +-0.96,-0.81306 +-0.95,-0.80873 +-0.94,-0.80434 +-0.93,-0.79988 +-0.92,-0.79536 +-0.91,-0.79079 +-0.9,-0.78615 +-0.89,-0.78145 +-0.88,-0.77669 +-0.87,-0.77187 +-0.86,-0.76699 +-0.85,-0.76205 +-0.84,-0.75706 +-0.83,-0.752 +-0.82,-0.74689 +-0.81,-0.74171 +-0.8,-0.73648 +-0.79,-0.73119 +-0.78,-0.72585 +-0.77,-0.72044 +-0.76,-0.71498 +-0.75,-0.70947 +-0.74,-0.7039 +-0.73,-0.69827 +-0.72,-0.69258 +-0.71,-0.68684 +-0.7,-0.68105 +-0.69,-0.6752 +-0.68,-0.66929 +-0.67,-0.66333 +-0.66,-0.65732 +-0.65,-0.65125 +-0.64,-0.64513 +-0.63,-0.63896 +-0.62,-0.63273 +-0.61,-0.62645 +-0.6,-0.62012 +-0.59,-0.61374 +-0.58,-0.6073 +-0.57,-0.60081 +-0.56,-0.59427 +-0.55,-0.58767 +-0.54,-0.58103 +-0.53,-0.57433 +-0.52,-0.56758 +-0.51,-0.56078 +-0.5,-0.55392 +-0.49,-0.54702 +-0.48,-0.54006 +-0.47,-0.53306 +-0.46,-0.526 +-0.45,-0.51889 +-0.44,-0.51173 +-0.43,-0.50452 +-0.42,-0.49726 +-0.41,-0.48995 +-0.4,-0.48259 +-0.39,-0.47518 +-0.38,-0.46772 +-0.37,-0.4602 +-0.36,-0.45264 +-0.35,-0.44503 +-0.34,-0.43737 +-0.33,-0.42966 +-0.32,-0.4219 +-0.31,-0.41409 +-0.3,-0.40624 +-0.29,-0.39833 +-0.28,-0.39037 +-0.27,-0.38237 +-0.26,-0.37432 +-0.25,-0.36621 +-0.24,-0.35806 +-0.23,-0.34987 +-0.22,-0.34162 +-0.21,-0.33333 +-0.2,-0.32498 +-0.19,-0.3166 +-0.18,-0.30816 +-0.17,-0.29968 +-0.16,-0.29115 +-0.15,-0.28257 +-0.14,-0.27395 +-0.13,-0.26528 +-0.12,-0.25657 +-0.11,-0.24781 +-0.1,-0.23901 +-0.09,-0.23016 +-0.08,-0.22128 +-0.07,-0.21234 +-0.06,-0.20337 +-0.05,-0.19435 +-0.04,-0.18529 +-0.03,-0.17619 +-0.02,-0.16705 +-0.01,-0.15786 +0,-0.14864 +0.01,-0.13937 +0.02,-0.13006 +0.03,-0.12072 +0.04,-0.11133 +0.05,-0.10191 +0.06,-0.092447 +0.07,-0.082947 +0.08,-0.073408 +0.09,-0.063831 +0.1,-0.054218 +0.11,-0.044567 +0.12,-0.03488 +0.13,-0.025157 +0.14,-0.015397 +0.15,-0.0056029 +0.16,0.0042268 +0.17,0.014091 +0.18,0.02399 +0.19,0.033923 +0.2,0.04389 +0.21,0.05389 +0.22,0.063922 +0.23,0.073985 +0.24,0.084075 +0.25,0.09419 +0.26,0.10433 +0.27,0.11448 +0.28,0.12465 +0.29,0.13484 +0.3,0.14504 +0.31,0.15524 +0.32,0.16545 +0.33,0.17567 +0.34,0.18588 +0.35,0.19609 +0.36,0.2063 +0.37,0.2165 +0.38,0.22669 +0.39,0.23686 +0.4,0.24702 +0.41,0.25716 +0.42,0.26727 +0.43,0.27737 +0.44,0.28743 +0.45,0.29747 +0.46,0.30747 +0.47,0.31744 +0.48,0.32737 +0.49,0.33726 +0.5,0.34711 +0.51,0.35691 +0.52,0.36666 +0.53,0.37636 +0.54,0.386 +0.55,0.39559 +0.56,0.40512 +0.57,0.41458 +0.58,0.42398 +0.59,0.43331 +0.6,0.44257 +0.61,0.45176 +0.62,0.46087 +0.63,0.46991 +0.64,0.47886 +0.65,0.48773 +0.66,0.49652 +0.67,0.50523 +0.68,0.51386 +0.69,0.52242 +0.7,0.53089 +0.71,0.53929 +0.72,0.5476 +0.73,0.55585 +0.74,0.56401 +0.75,0.5721 +0.76,0.58011 +0.77,0.58805 +0.78,0.59591 +0.79,0.60369 +0.8,0.61141 +0.81,0.61905 +0.82,0.62661 +0.83,0.6341 +0.84,0.64152 +0.85,0.64887 +0.86,0.65615 +0.87,0.66335 +0.88,0.67048 +0.89,0.67755 +0.9,0.68454 +0.91,0.69146 +0.92,0.69832 +0.93,0.7051 +0.94,0.71182 +0.95,0.71847 +0.96,0.72505 +0.97,0.73157 +0.98,0.73802 +0.99,0.7444 +1,0.75072 +1.01,0.75697 +1.02,0.76316 +1.03,0.76928 +1.04,0.77534 +1.05,0.78133 +1.06,0.78726 +1.07,0.79313 +1.08,0.79893 +1.09,0.80466 +1.1,0.81032 +1.11,0.81591 +1.12,0.82142 +1.13,0.82687 +1.14,0.83223 +1.15,0.83752 +1.16,0.84274 +1.17,0.84787 +1.18,0.85292 +1.19,0.85788 +1.2,0.86277 +1.21,0.86756 +1.22,0.87227 +1.23,0.87689 +1.24,0.88142 +1.25,0.88585 +1.26,0.8902 +1.27,0.89444 +1.28,0.89859 +1.29,0.90265 +1.3,0.9066 +1.31,0.91045 +1.32,0.9142 +1.33,0.91785 +1.34,0.92139 +1.35,0.92482 +1.36,0.92815 +1.37,0.93136 +1.38,0.93446 +1.39,0.93745 +1.4,0.94033 +1.41,0.94309 +1.42,0.94573 +1.43,0.94825 +1.44,0.95066 +1.45,0.95294 +1.46,0.95509 +1.47,0.95713 +1.48,0.95903 +1.49,0.96081 +1.5,0.96246 +1.51,0.96399 +1.52,0.96539 +1.53,0.96666 +1.54,0.9678 +1.55,0.96881 +1.56,0.9697 +1.57,0.97045 +1.58,0.97108 +1.59,0.97157 +1.6,0.97194 +1.61,0.97217 +1.62,0.97228 +1.63,0.97225 +1.64,0.97209 +1.65,0.9718 +1.66,0.97137 +1.67,0.97082 +1.68,0.97013 +1.69,0.9693 +1.7,0.96834 +1.71,0.96725 +1.72,0.96602 +1.73,0.96466 +1.74,0.96316 +1.75,0.96153 +1.76,0.95976 +1.77,0.95785 +1.78,0.95581 +1.79,0.95363 +1.8,0.95131 +1.81,0.94885 +1.82,0.94626 +1.83,0.94353 +1.84,0.94065 +1.85,0.93764 +1.86,0.93449 +1.87,0.9312 +1.88,0.92777 +1.89,0.9242 +1.9,0.92048 +1.91,0.91663 +1.92,0.91264 +1.93,0.90852 +1.94,0.90426 +1.95,0.89987 +1.96,0.89536 +1.97,0.89071 +1.98,0.88593 +1.99,0.88104 +2,0.87601 +2.01,0.87087 +2.02,0.8656 +2.03,0.86022 +2.04,0.85472 +2.05,0.84911 +2.06,0.84338 +2.07,0.83754 +2.08,0.83159 +2.09,0.82553 +2.1,0.81937 +2.11,0.8131 +2.12,0.80673 +2.13,0.80025 +2.14,0.79368 +2.15,0.787 +2.16,0.78023 +2.17,0.77337 +2.18,0.76641 +2.19,0.75936 +2.2,0.75222 +2.21,0.745 +2.22,0.73768 +2.23,0.73028 +2.24,0.7228 +2.25,0.71524 +2.26,0.70759 +2.27,0.69987 +2.28,0.69207 +2.29,0.6842 +2.3,0.67626 +2.31,0.66824 +2.32,0.66015 +2.33,0.652 +2.34,0.64378 +2.35,0.6355 +2.36,0.62716 +2.37,0.61877 +2.38,0.61032 +2.39,0.60182 +2.4,0.59327 +2.41,0.58467 +2.42,0.57603 +2.43,0.56735 +2.44,0.55863 +2.45,0.54987 +2.46,0.54108 +2.47,0.53226 +2.48,0.5234 +2.49,0.51453 +2.5,0.50563 +2.51,0.4967 +2.52,0.48776 +2.53,0.47881 +2.54,0.46983 +2.55,0.46085 +2.56,0.45186 +2.57,0.44287 +2.58,0.43387 +2.59,0.42487 +2.6,0.41587 +2.61,0.40687 +2.62,0.39788 +2.63,0.3889 +2.64,0.37994 +2.65,0.37098 +2.66,0.36205 +2.67,0.35313 +2.68,0.34424 +2.69,0.33536 +2.7,0.32652 +2.71,0.31771 +2.72,0.30892 +2.73,0.30017 +2.74,0.29146 +2.75,0.28278 +2.76,0.27413 +2.77,0.26552 +2.78,0.25694 +2.79,0.24838 +2.8,0.23986 +2.81,0.23136 +2.82,0.22289 +2.83,0.21445 +2.84,0.20604 +2.85,0.19765 +2.86,0.18929 +2.87,0.18095 +2.88,0.17263 +2.89,0.16433 +2.9,0.15606 +2.91,0.1478 +2.92,0.13957 +2.93,0.13135 +2.94,0.12315 +2.95,0.11497 +2.96,0.1068 +2.97,0.098654 +2.98,0.090518 +2.99,0.082396 +3,0.074286 +3.01,0.066188 +3.02,0.058102 +3.03,0.050025 +3.04,0.041958 +3.05,0.033899 +3.06,0.025848 +3.07,0.017804 +3.08,0.0097657 +3.09,0.0017328 +3.1,-0.0062958 +3.11,-0.014321 +3.12,-0.022343 +3.13,-0.030364 +3.14,-0.038384 +3.15,-0.046403 +3.16,-0.054424 +3.17,-0.062445 +3.18,-0.07047 +3.19,-0.078497 +3.2,-0.086529 +3.21,-0.094565 +3.22,-0.10261 +3.23,-0.11066 +3.24,-0.11871 +3.25,-0.12678 +3.26,-0.13485 +3.27,-0.14293 +3.28,-0.15103 +3.29,-0.15913 +3.3,-0.16725 +3.31,-0.17538 +3.32,-0.18353 +3.33,-0.19169 +3.34,-0.19987 +3.35,-0.20806 +3.36,-0.21627 +3.37,-0.2245 +3.38,-0.23275 +3.39,-0.24102 +3.4,-0.24931 +3.41,-0.25762 +3.42,-0.26595 +3.43,-0.27431 +3.44,-0.28269 +3.45,-0.29109 +3.46,-0.29953 +3.47,-0.30798 +3.48,-0.31647 +3.49,-0.32499 +3.5,-0.33353 +3.51,-0.3421 +3.52,-0.35071 +3.53,-0.35935 +3.54,-0.36801 +3.55,-0.37672 +3.56,-0.38545 +3.57,-0.39423 +3.58,-0.40304 +3.59,-0.41188 +3.6,-0.42076 +3.61,-0.42968 +3.62,-0.43865 +3.63,-0.44765 +3.64,-0.45669 +3.65,-0.46577 +3.66,-0.4749 +3.67,-0.48407 +3.68,-0.49328 +3.69,-0.50254 +3.7,-0.51185 +3.71,-0.5212 +3.72,-0.5306 +3.73,-0.54005 +3.74,-0.54955 +3.75,-0.5591 +3.76,-0.5687 +3.77,-0.57835 +3.78,-0.58805 +3.79,-0.59781 +3.8,-0.60762 +3.81,-0.61749 +3.82,-0.62741 +3.83,-0.63739 +3.84,-0.64743 +3.85,-0.65753 +3.86,-0.66769 +3.87,-0.6779 +3.88,-0.68818 +3.89,-0.69852 +3.9,-0.70893 +3.91,-0.71939 +3.92,-0.72992 +3.93,-0.74052 +3.94,-0.75118 +3.95,-0.76191 +3.96,-0.77271 +3.97,-0.78358 +3.98,-0.79452 +3.99,-0.80552 +4,-0.8166 +4.01,-0.82775 +4.02,-0.83897 +4.03,-0.85027 +4.04,-0.86164 +4.05,-0.87309 +4.06,-0.88461 +4.07,-0.89621 +4.08,-0.90789 +4.09,-0.91965 +4.1,-0.93148 +4.11,-0.9434 +4.12,-0.9554 +4.13,-0.96748 +4.14,-0.97964 +4.15,-0.99189 +4.16,-1.0042 +4.17,-1.0166 +4.18,-1.0291 +4.19,-1.0417 +4.2,-1.0544 +4.21,-1.0672 +4.22,-1.08 +4.23,-1.093 +4.24,-1.106 +4.25,-1.1192 +4.26,-1.1324 +4.27,-1.1457 +4.28,-1.1591 +4.29,-1.1726 +4.3,-1.1862 +4.31,-1.1999 +4.32,-1.2137 +4.33,-1.2276 +4.34,-1.2416 +4.35,-1.2557 +4.36,-1.2699 +4.37,-1.2843 +4.38,-1.2987 +4.39,-1.3132 +4.4,-1.3278 +4.41,-1.3425 +4.42,-1.3573 +4.43,-1.3723 +4.44,-1.3873 +4.45,-1.4025 +4.46,-1.4177 +4.47,-1.4331 +4.48,-1.4486 +4.49,-1.4642 +4.5,-1.4799 +4.51,-1.4957 +4.52,-1.5117 +4.53,-1.5277 +4.54,-1.5439 +4.55,-1.5602 +4.56,-1.5766 +4.57,-1.5931 +4.58,-1.6098 +4.59,-1.6265 +4.6,-1.6434 +4.61,-1.6605 +4.62,-1.6776 +4.63,-1.6948 +4.64,-1.7122 +4.65,-1.7297 +4.66,-1.7474 +4.67,-1.7651 +4.68,-1.783 +4.69,-1.8011 +4.7,-1.8192 +4.71,-1.8375 +4.72,-1.8559 +4.73,-1.8745 +4.74,-1.8931 +4.75,-1.912 +4.76,-1.9309 +4.77,-1.95 +4.78,-1.9692 +4.79,-1.9886 +4.8,-2.0081 +4.81,-2.0277 +4.82,-2.0475 +4.83,-2.0674 +4.84,-2.0875 +4.85,-2.1077 +4.86,-2.128 +4.87,-2.1485 +4.88,-2.1692 +4.89,-2.19 +4.9,-2.2109 +4.91,-2.232 +4.92,-2.2532 +4.93,-2.2746 +4.94,-2.2961 +4.95,-2.3178 +4.96,-2.3396 +4.97,-2.3616 +4.98,-2.3837 +4.99,-2.406 +5,-2.4284 diff --git a/TeX/Plots/Data/matlab_sin_d_1.csv b/TeX/Plots/Data/matlab_sin_d_1.csv new file mode 100755 index 0000000..2758ef5 --- /dev/null +++ b/TeX/Plots/Data/matlab_sin_d_1.csv @@ -0,0 +1,1002 @@ +x,y +-5,-0.051532 +-4.99,-0.047965 +-4.98,-0.044485 +-4.97,-0.041092 +-4.96,-0.037786 +-4.95,-0.034566 +-4.94,-0.031431 +-4.93,-0.028382 +-4.92,-0.025417 +-4.91,-0.022537 +-4.9,-0.01974 +-4.89,-0.017026 +-4.88,-0.014395 +-4.87,-0.011847 +-4.86,-0.0093801 +-4.85,-0.0069947 +-4.84,-0.0046901 +-4.83,-0.002466 +-4.82,-0.00032174 +-4.81,0.0017431 +-4.8,0.0037289 +-4.79,0.0056362 +-4.78,0.0074655 +-4.77,0.0092173 +-4.76,0.010892 +-4.75,0.01249 +-4.74,0.014012 +-4.73,0.015458 +-4.72,0.016829 +-4.71,0.018126 +-4.7,0.019348 +-4.69,0.020496 +-4.68,0.021571 +-4.67,0.022574 +-4.66,0.023503 +-4.65,0.024361 +-4.64,0.025148 +-4.63,0.025864 +-4.62,0.026509 +-4.61,0.027084 +-4.6,0.02759 +-4.59,0.028027 +-4.58,0.028395 +-4.57,0.028695 +-4.56,0.028928 +-4.55,0.029093 +-4.54,0.029192 +-4.53,0.029224 +-4.52,0.029191 +-4.51,0.029093 +-4.5,0.028929 +-4.49,0.028702 +-4.48,0.02841 +-4.47,0.028056 +-4.46,0.027638 +-4.45,0.027158 +-4.44,0.026616 +-4.43,0.026013 +-4.42,0.025348 +-4.41,0.024624 +-4.4,0.023839 +-4.39,0.022994 +-4.38,0.022091 +-4.37,0.021128 +-4.36,0.020108 +-4.35,0.01903 +-4.34,0.017895 +-4.33,0.016703 +-4.32,0.015455 +-4.31,0.014151 +-4.3,0.012792 +-4.29,0.011377 +-4.28,0.0099089 +-4.27,0.0083866 +-4.26,0.0068108 +-4.25,0.0051821 +-4.24,0.003501 +-4.23,0.0017678 +-4.22,-1.686e-05 +-4.21,-0.0018526 +-4.2,-0.0037389 +-4.19,-0.0056753 +-4.18,-0.0076613 +-4.17,-0.0096964 +-4.16,-0.01178 +-4.15,-0.013912 +-4.14,-0.016092 +-4.13,-0.018319 +-4.12,-0.020593 +-4.11,-0.022913 +-4.1,-0.025279 +-4.09,-0.02769 +-4.08,-0.030147 +-4.07,-0.032647 +-4.06,-0.035192 +-4.05,-0.03778 +-4.04,-0.040411 +-4.03,-0.043085 +-4.02,-0.0458 +-4.01,-0.048557 +-4,-0.051356 +-3.99,-0.054195 +-3.98,-0.057074 +-3.97,-0.059993 +-3.96,-0.062951 +-3.95,-0.065947 +-3.94,-0.068982 +-3.93,-0.072055 +-3.92,-0.075165 +-3.91,-0.078312 +-3.9,-0.081496 +-3.89,-0.084715 +-3.88,-0.08797 +-3.87,-0.091259 +-3.86,-0.094583 +-3.85,-0.097941 +-3.84,-0.10133 +-3.83,-0.10476 +-3.82,-0.10822 +-3.81,-0.1117 +-3.8,-0.11523 +-3.79,-0.11878 +-3.78,-0.12236 +-3.77,-0.12597 +-3.76,-0.12962 +-3.75,-0.13329 +-3.74,-0.13699 +-3.73,-0.14072 +-3.72,-0.14448 +-3.71,-0.14826 +-3.7,-0.15207 +-3.69,-0.15591 +-3.68,-0.15977 +-3.67,-0.16366 +-3.66,-0.16758 +-3.65,-0.17152 +-3.64,-0.17548 +-3.63,-0.17946 +-3.62,-0.18347 +-3.61,-0.18751 +-3.6,-0.19156 +-3.59,-0.19564 +-3.58,-0.19973 +-3.57,-0.20385 +-3.56,-0.20799 +-3.55,-0.21215 +-3.54,-0.21633 +-3.53,-0.22052 +-3.52,-0.22474 +-3.51,-0.22897 +-3.5,-0.23322 +-3.49,-0.23749 +-3.48,-0.24177 +-3.47,-0.24607 +-3.46,-0.25039 +-3.45,-0.25472 +-3.44,-0.25906 +-3.43,-0.26342 +-3.42,-0.26779 +-3.41,-0.27218 +-3.4,-0.27658 +-3.39,-0.28099 +-3.38,-0.28541 +-3.37,-0.28984 +-3.36,-0.29429 +-3.35,-0.29874 +-3.34,-0.30321 +-3.33,-0.30768 +-3.32,-0.31216 +-3.31,-0.31666 +-3.3,-0.32115 +-3.29,-0.32566 +-3.28,-0.33018 +-3.27,-0.3347 +-3.26,-0.33922 +-3.25,-0.34375 +-3.24,-0.34829 +-3.23,-0.35283 +-3.22,-0.35738 +-3.21,-0.36193 +-3.2,-0.36648 +-3.19,-0.37104 +-3.18,-0.3756 +-3.17,-0.38016 +-3.16,-0.38472 +-3.15,-0.38928 +-3.14,-0.39384 +-3.13,-0.39841 +-3.12,-0.40297 +-3.11,-0.40753 +-3.1,-0.41209 +-3.09,-0.41665 +-3.08,-0.4212 +-3.07,-0.42575 +-3.06,-0.4303 +-3.05,-0.43485 +-3.04,-0.43939 +-3.03,-0.44392 +-3.02,-0.44845 +-3.01,-0.45298 +-3,-0.4575 +-2.99,-0.46201 +-2.98,-0.46651 +-2.97,-0.47101 +-2.96,-0.4755 +-2.95,-0.47998 +-2.94,-0.48445 +-2.93,-0.48891 +-2.92,-0.49336 +-2.91,-0.4978 +-2.9,-0.50223 +-2.89,-0.50665 +-2.88,-0.51106 +-2.87,-0.51545 +-2.86,-0.51984 +-2.85,-0.5242 +-2.84,-0.52856 +-2.83,-0.5329 +-2.82,-0.53722 +-2.81,-0.54153 +-2.8,-0.54583 +-2.79,-0.55011 +-2.78,-0.55437 +-2.77,-0.55861 +-2.76,-0.56284 +-2.75,-0.56705 +-2.74,-0.57124 +-2.73,-0.57541 +-2.72,-0.57956 +-2.71,-0.58369 +-2.7,-0.58781 +-2.69,-0.5919 +-2.68,-0.59596 +-2.67,-0.60001 +-2.66,-0.60403 +-2.65,-0.60803 +-2.64,-0.612 +-2.63,-0.61595 +-2.62,-0.61987 +-2.61,-0.62376 +-2.6,-0.62763 +-2.59,-0.63147 +-2.58,-0.63528 +-2.57,-0.63906 +-2.56,-0.64281 +-2.55,-0.64653 +-2.54,-0.65022 +-2.53,-0.65388 +-2.52,-0.6575 +-2.51,-0.66109 +-2.5,-0.66464 +-2.49,-0.66817 +-2.48,-0.67165 +-2.47,-0.6751 +-2.46,-0.67851 +-2.45,-0.68189 +-2.44,-0.68522 +-2.43,-0.68852 +-2.42,-0.69178 +-2.41,-0.69499 +-2.4,-0.69817 +-2.39,-0.7013 +-2.38,-0.7044 +-2.37,-0.70745 +-2.36,-0.71045 +-2.35,-0.71341 +-2.34,-0.71633 +-2.33,-0.7192 +-2.32,-0.72203 +-2.31,-0.7248 +-2.3,-0.72753 +-2.29,-0.73021 +-2.28,-0.73285 +-2.27,-0.73543 +-2.26,-0.73797 +-2.25,-0.74046 +-2.24,-0.7429 +-2.23,-0.74529 +-2.22,-0.74763 +-2.21,-0.74992 +-2.2,-0.75217 +-2.19,-0.75436 +-2.18,-0.75651 +-2.17,-0.75861 +-2.16,-0.76065 +-2.15,-0.76265 +-2.14,-0.7646 +-2.13,-0.7665 +-2.12,-0.76835 +-2.11,-0.77016 +-2.1,-0.77191 +-2.09,-0.77361 +-2.08,-0.77526 +-2.07,-0.77686 +-2.06,-0.77842 +-2.05,-0.77992 +-2.04,-0.78137 +-2.03,-0.78277 +-2.02,-0.78413 +-2.01,-0.78543 +-2,-0.78668 +-1.99,-0.78788 +-1.98,-0.78903 +-1.97,-0.79013 +-1.96,-0.79118 +-1.95,-0.79218 +-1.94,-0.79313 +-1.93,-0.79402 +-1.92,-0.79487 +-1.91,-0.79567 +-1.9,-0.79641 +-1.89,-0.7971 +-1.88,-0.79774 +-1.87,-0.79833 +-1.86,-0.79887 +-1.85,-0.79936 +-1.84,-0.7998 +-1.83,-0.80018 +-1.82,-0.80051 +-1.81,-0.8008 +-1.8,-0.80103 +-1.79,-0.80121 +-1.78,-0.80134 +-1.77,-0.80142 +-1.76,-0.80144 +-1.75,-0.80142 +-1.74,-0.80134 +-1.73,-0.80122 +-1.72,-0.80104 +-1.71,-0.80081 +-1.7,-0.80054 +-1.69,-0.80021 +-1.68,-0.79983 +-1.67,-0.79939 +-1.66,-0.79891 +-1.65,-0.79838 +-1.64,-0.7978 +-1.63,-0.79716 +-1.62,-0.79648 +-1.61,-0.79574 +-1.6,-0.79496 +-1.59,-0.79412 +-1.58,-0.79323 +-1.57,-0.79229 +-1.56,-0.7913 +-1.55,-0.79026 +-1.54,-0.78918 +-1.53,-0.78803 +-1.52,-0.78684 +-1.51,-0.7856 +-1.5,-0.78431 +-1.49,-0.78297 +-1.48,-0.78158 +-1.47,-0.78013 +-1.46,-0.77864 +-1.45,-0.7771 +-1.44,-0.7755 +-1.43,-0.77386 +-1.42,-0.77217 +-1.41,-0.77042 +-1.4,-0.76863 +-1.39,-0.76678 +-1.38,-0.76489 +-1.37,-0.76294 +-1.36,-0.76095 +-1.35,-0.7589 +-1.34,-0.7568 +-1.33,-0.75466 +-1.32,-0.75246 +-1.31,-0.75022 +-1.3,-0.74792 +-1.29,-0.74558 +-1.28,-0.74318 +-1.27,-0.74073 +-1.26,-0.73824 +-1.25,-0.73569 +-1.24,-0.73309 +-1.23,-0.73045 +-1.22,-0.72775 +-1.21,-0.72501 +-1.2,-0.72221 +-1.19,-0.71937 +-1.18,-0.71647 +-1.17,-0.71353 +-1.16,-0.71053 +-1.15,-0.70749 +-1.14,-0.70439 +-1.13,-0.70125 +-1.12,-0.69805 +-1.11,-0.69481 +-1.1,-0.69151 +-1.09,-0.68817 +-1.08,-0.68478 +-1.07,-0.68133 +-1.06,-0.67784 +-1.05,-0.6743 +-1.04,-0.67071 +-1.03,-0.66707 +-1.02,-0.66338 +-1.01,-0.65964 +-1,-0.65585 +-0.99,-0.65201 +-0.98,-0.64813 +-0.97,-0.6442 +-0.96,-0.64022 +-0.95,-0.6362 +-0.94,-0.63212 +-0.93,-0.628 +-0.92,-0.62384 +-0.91,-0.61963 +-0.9,-0.61537 +-0.89,-0.61107 +-0.88,-0.60672 +-0.87,-0.60233 +-0.86,-0.59789 +-0.85,-0.59341 +-0.84,-0.58889 +-0.83,-0.58432 +-0.82,-0.57971 +-0.81,-0.57506 +-0.8,-0.57036 +-0.79,-0.56562 +-0.78,-0.56084 +-0.77,-0.55602 +-0.76,-0.55115 +-0.75,-0.54625 +-0.74,-0.5413 +-0.73,-0.53631 +-0.72,-0.53129 +-0.71,-0.52622 +-0.7,-0.52111 +-0.69,-0.51597 +-0.68,-0.51078 +-0.67,-0.50556 +-0.66,-0.50029 +-0.65,-0.49499 +-0.64,-0.48965 +-0.63,-0.48428 +-0.62,-0.47886 +-0.61,-0.47341 +-0.6,-0.46792 +-0.59,-0.4624 +-0.58,-0.45684 +-0.57,-0.45125 +-0.56,-0.44562 +-0.55,-0.43995 +-0.54,-0.43425 +-0.53,-0.42852 +-0.52,-0.42276 +-0.51,-0.41696 +-0.5,-0.41113 +-0.49,-0.40527 +-0.48,-0.39938 +-0.47,-0.39346 +-0.46,-0.3875 +-0.45,-0.38152 +-0.44,-0.3755 +-0.43,-0.36946 +-0.42,-0.36339 +-0.41,-0.35729 +-0.4,-0.35116 +-0.39,-0.34501 +-0.38,-0.33883 +-0.37,-0.33262 +-0.36,-0.32638 +-0.35,-0.32012 +-0.34,-0.31384 +-0.33,-0.30753 +-0.32,-0.30119 +-0.31,-0.29483 +-0.3,-0.28845 +-0.29,-0.28204 +-0.28,-0.27562 +-0.27,-0.26917 +-0.26,-0.26269 +-0.25,-0.2562 +-0.24,-0.24968 +-0.23,-0.24315 +-0.22,-0.23659 +-0.21,-0.23002 +-0.2,-0.22342 +-0.19,-0.21681 +-0.18,-0.21018 +-0.17,-0.20353 +-0.16,-0.19687 +-0.15,-0.19018 +-0.14,-0.18349 +-0.13,-0.17677 +-0.12,-0.17004 +-0.11,-0.1633 +-0.1,-0.15655 +-0.09,-0.14978 +-0.08,-0.143 +-0.07,-0.1362 +-0.06,-0.1294 +-0.05,-0.12258 +-0.04,-0.11576 +-0.03,-0.10892 +-0.02,-0.10207 +-0.01,-0.095221 +0,-0.088359 +0.01,-0.08149 +0.02,-0.074614 +0.03,-0.067732 +0.04,-0.060844 +0.05,-0.05395 +0.06,-0.047052 +0.07,-0.04015 +0.08,-0.033244 +0.09,-0.026335 +0.1,-0.019423 +0.11,-0.01251 +0.12,-0.0055955 +0.13,0.0013199 +0.14,0.0082355 +0.15,0.015151 +0.16,0.022065 +0.17,0.028978 +0.18,0.035889 +0.19,0.042797 +0.2,0.049703 +0.21,0.056604 +0.22,0.063501 +0.23,0.070394 +0.24,0.07728 +0.25,0.08416 +0.26,0.091032 +0.27,0.097896 +0.28,0.10475 +0.29,0.1116 +0.3,0.11843 +0.31,0.12525 +0.32,0.13206 +0.33,0.13886 +0.34,0.14564 +0.35,0.15241 +0.36,0.15917 +0.37,0.1659 +0.38,0.17263 +0.39,0.17933 +0.4,0.18601 +0.41,0.19268 +0.42,0.19932 +0.43,0.20594 +0.44,0.21254 +0.45,0.21912 +0.46,0.22568 +0.47,0.23221 +0.48,0.23872 +0.49,0.2452 +0.5,0.25165 +0.51,0.25807 +0.52,0.26447 +0.53,0.27084 +0.54,0.27718 +0.55,0.28349 +0.56,0.28976 +0.57,0.29601 +0.58,0.30222 +0.59,0.3084 +0.6,0.31454 +0.61,0.32065 +0.62,0.32672 +0.63,0.33275 +0.64,0.33875 +0.65,0.34471 +0.66,0.35063 +0.67,0.35651 +0.68,0.36235 +0.69,0.36816 +0.7,0.37392 +0.71,0.37964 +0.72,0.38532 +0.73,0.39097 +0.74,0.39657 +0.75,0.40212 +0.76,0.40764 +0.77,0.41311 +0.78,0.41855 +0.79,0.42393 +0.8,0.42928 +0.81,0.43458 +0.82,0.43984 +0.83,0.44505 +0.84,0.45022 +0.85,0.45535 +0.86,0.46042 +0.87,0.46546 +0.88,0.47044 +0.89,0.47538 +0.9,0.48028 +0.91,0.48512 +0.92,0.48992 +0.93,0.49467 +0.94,0.49937 +0.95,0.50403 +0.96,0.50863 +0.97,0.51319 +0.98,0.5177 +0.99,0.52215 +1,0.52656 +1.01,0.53091 +1.02,0.53522 +1.03,0.53947 +1.04,0.54367 +1.05,0.54782 +1.06,0.55192 +1.07,0.55597 +1.08,0.55996 +1.09,0.5639 +1.1,0.56778 +1.11,0.57161 +1.12,0.57539 +1.13,0.57911 +1.14,0.58278 +1.15,0.58639 +1.16,0.58995 +1.17,0.59345 +1.18,0.5969 +1.19,0.60029 +1.2,0.60362 +1.21,0.6069 +1.22,0.61011 +1.23,0.61328 +1.24,0.61638 +1.25,0.61942 +1.26,0.62241 +1.27,0.62534 +1.28,0.62821 +1.29,0.63102 +1.3,0.63377 +1.31,0.63646 +1.32,0.63908 +1.33,0.64165 +1.34,0.64416 +1.35,0.64661 +1.36,0.64899 +1.37,0.65132 +1.38,0.65358 +1.39,0.65578 +1.4,0.65791 +1.41,0.65999 +1.42,0.662 +1.43,0.66394 +1.44,0.66582 +1.45,0.66764 +1.46,0.66939 +1.47,0.67108 +1.48,0.67271 +1.49,0.67426 +1.5,0.67576 +1.51,0.67719 +1.52,0.67855 +1.53,0.67985 +1.54,0.68108 +1.55,0.68226 +1.56,0.68336 +1.57,0.6844 +1.58,0.68538 +1.59,0.6863 +1.6,0.68715 +1.61,0.68793 +1.62,0.68865 +1.63,0.68931 +1.64,0.68991 +1.65,0.69044 +1.66,0.69091 +1.67,0.69131 +1.68,0.69166 +1.69,0.69193 +1.7,0.69215 +1.71,0.6923 +1.72,0.69239 +1.73,0.69242 +1.74,0.69239 +1.75,0.69229 +1.76,0.69213 +1.77,0.6919 +1.78,0.69162 +1.79,0.69127 +1.8,0.69086 +1.81,0.69039 +1.82,0.68986 +1.83,0.68927 +1.84,0.68861 +1.85,0.68789 +1.86,0.68711 +1.87,0.68627 +1.88,0.68537 +1.89,0.6844 +1.9,0.68338 +1.91,0.6823 +1.92,0.68115 +1.93,0.67995 +1.94,0.67868 +1.95,0.67736 +1.96,0.67599 +1.97,0.67455 +1.98,0.67306 +1.99,0.67151 +2,0.6699 +2.01,0.66825 +2.02,0.66653 +2.03,0.66476 +2.04,0.66294 +2.05,0.66107 +2.06,0.65914 +2.07,0.65717 +2.08,0.65514 +2.09,0.65306 +2.1,0.65093 +2.11,0.64875 +2.12,0.64652 +2.13,0.64424 +2.14,0.64192 +2.15,0.63955 +2.16,0.63713 +2.17,0.63466 +2.18,0.63215 +2.19,0.62959 +2.2,0.62699 +2.21,0.62434 +2.22,0.62165 +2.23,0.61892 +2.24,0.61615 +2.25,0.61333 +2.26,0.61047 +2.27,0.60757 +2.28,0.60463 +2.29,0.60165 +2.3,0.59863 +2.31,0.59557 +2.32,0.59247 +2.33,0.58933 +2.34,0.58616 +2.35,0.58296 +2.36,0.57971 +2.37,0.57643 +2.38,0.57312 +2.39,0.56978 +2.4,0.5664 +2.41,0.56299 +2.42,0.55955 +2.43,0.55608 +2.44,0.55257 +2.45,0.54904 +2.46,0.54548 +2.47,0.5419 +2.48,0.53828 +2.49,0.53464 +2.5,0.53098 +2.51,0.52729 +2.52,0.52357 +2.53,0.51983 +2.54,0.51607 +2.55,0.51229 +2.56,0.50848 +2.57,0.50465 +2.58,0.50081 +2.59,0.49694 +2.6,0.49306 +2.61,0.48916 +2.62,0.48524 +2.63,0.4813 +2.64,0.47735 +2.65,0.47338 +2.66,0.4694 +2.67,0.46541 +2.68,0.4614 +2.69,0.45738 +2.7,0.45334 +2.71,0.4493 +2.72,0.44525 +2.73,0.44118 +2.74,0.43711 +2.75,0.43303 +2.76,0.42894 +2.77,0.42484 +2.78,0.42073 +2.79,0.41662 +2.8,0.4125 +2.81,0.40836 +2.82,0.40423 +2.83,0.40008 +2.84,0.39593 +2.85,0.39177 +2.86,0.3876 +2.87,0.38343 +2.88,0.37925 +2.89,0.37507 +2.9,0.37088 +2.91,0.36668 +2.92,0.36248 +2.93,0.35828 +2.94,0.35407 +2.95,0.34985 +2.96,0.34564 +2.97,0.34141 +2.98,0.33719 +2.99,0.33296 +3,0.32872 +3.01,0.32449 +3.02,0.32025 +3.03,0.316 +3.04,0.31176 +3.05,0.30751 +3.06,0.30326 +3.07,0.29901 +3.08,0.29476 +3.09,0.2905 +3.1,0.28625 +3.11,0.28199 +3.12,0.27773 +3.13,0.27348 +3.14,0.26922 +3.15,0.26496 +3.16,0.2607 +3.17,0.25645 +3.18,0.25219 +3.19,0.24793 +3.2,0.24368 +3.21,0.23943 +3.22,0.23517 +3.23,0.23092 +3.24,0.22668 +3.25,0.22243 +3.26,0.21819 +3.27,0.21395 +3.28,0.20971 +3.29,0.20547 +3.3,0.20124 +3.31,0.19701 +3.32,0.19279 +3.33,0.18857 +3.34,0.18435 +3.35,0.18014 +3.36,0.17594 +3.37,0.17173 +3.38,0.16754 +3.39,0.16335 +3.4,0.15916 +3.41,0.15498 +3.42,0.15081 +3.43,0.14664 +3.44,0.14248 +3.45,0.13832 +3.46,0.13417 +3.47,0.13003 +3.48,0.1259 +3.49,0.12177 +3.5,0.11766 +3.51,0.11355 +3.52,0.10945 +3.53,0.10535 +3.54,0.10127 +3.55,0.097193 +3.56,0.093127 +3.57,0.08907 +3.58,0.085023 +3.59,0.080985 +3.6,0.076958 +3.61,0.072941 +3.62,0.068934 +3.63,0.064939 +3.64,0.060954 +3.65,0.05698 +3.66,0.053018 +3.67,0.049067 +3.68,0.045128 +3.69,0.041201 +3.7,0.037287 +3.71,0.033385 +3.72,0.029496 +3.73,0.025619 +3.74,0.021756 +3.75,0.017907 +3.76,0.014071 +3.77,0.010248 +3.78,0.0064404 +3.79,0.0026467 +3.8,-0.0011325 +3.81,-0.004897 +3.82,-0.0086464 +3.83,-0.012381 +3.84,-0.016099 +3.85,-0.019803 +3.86,-0.02349 +3.87,-0.027161 +3.88,-0.030816 +3.89,-0.034454 +3.9,-0.038076 +3.91,-0.04168 +3.92,-0.045267 +3.93,-0.048837 +3.94,-0.052389 +3.95,-0.055923 +3.96,-0.059439 +3.97,-0.062937 +3.98,-0.066416 +3.99,-0.069877 +4,-0.073318 +4.01,-0.07674 +4.02,-0.080143 +4.03,-0.083526 +4.04,-0.086889 +4.05,-0.090232 +4.06,-0.093554 +4.07,-0.096856 +4.08,-0.10014 +4.09,-0.1034 +4.1,-0.10664 +4.11,-0.10985 +4.12,-0.11305 +4.13,-0.11622 +4.14,-0.11937 +4.15,-0.1225 +4.16,-0.12561 +4.17,-0.12869 +4.18,-0.13175 +4.19,-0.13479 +4.2,-0.13781 +4.21,-0.1408 +4.22,-0.14376 +4.23,-0.1467 +4.24,-0.14962 +4.25,-0.15251 +4.26,-0.15538 +4.27,-0.15822 +4.28,-0.16104 +4.29,-0.16383 +4.3,-0.1666 +4.31,-0.16934 +4.32,-0.17206 +4.33,-0.17474 +4.34,-0.1774 +4.35,-0.18004 +4.36,-0.18265 +4.37,-0.18523 +4.38,-0.18778 +4.39,-0.1903 +4.4,-0.1928 +4.41,-0.19527 +4.42,-0.19771 +4.43,-0.20012 +4.44,-0.2025 +4.45,-0.20485 +4.46,-0.20718 +4.47,-0.20947 +4.48,-0.21174 +4.49,-0.21397 +4.5,-0.21618 +4.51,-0.21835 +4.52,-0.22049 +4.53,-0.22261 +4.54,-0.22469 +4.55,-0.22674 +4.56,-0.22876 +4.57,-0.23074 +4.58,-0.2327 +4.59,-0.23462 +4.6,-0.23651 +4.61,-0.23837 +4.62,-0.24019 +4.63,-0.24198 +4.64,-0.24374 +4.65,-0.24546 +4.66,-0.24715 +4.67,-0.24881 +4.68,-0.25043 +4.69,-0.25202 +4.7,-0.25357 +4.71,-0.25509 +4.72,-0.25657 +4.73,-0.25802 +4.74,-0.25943 +4.75,-0.2608 +4.76,-0.26214 +4.77,-0.26345 +4.78,-0.26471 +4.79,-0.26594 +4.8,-0.26714 +4.81,-0.26829 +4.82,-0.26941 +4.83,-0.27049 +4.84,-0.27154 +4.85,-0.27254 +4.86,-0.27351 +4.87,-0.27443 +4.88,-0.27532 +4.89,-0.27617 +4.9,-0.27699 +4.91,-0.27776 +4.92,-0.27849 +4.93,-0.27918 +4.94,-0.27983 +4.95,-0.28045 +4.96,-0.28102 +4.97,-0.28155 +4.98,-0.28204 +4.99,-0.28249 +5,-0.28289 diff --git a/TeX/Plots/Data/matlab_sin_d_3.csv b/TeX/Plots/Data/matlab_sin_d_3.csv new file mode 100755 index 0000000..9952e15 --- /dev/null +++ b/TeX/Plots/Data/matlab_sin_d_3.csv @@ -0,0 +1,1002 @@ +x,y +-5,-0.62401 +-4.99,-0.6213 +-4.98,-0.61863 +-4.97,-0.616 +-4.96,-0.61341 +-4.95,-0.61086 +-4.94,-0.60835 +-4.93,-0.60589 +-4.92,-0.60346 +-4.91,-0.60108 +-4.9,-0.59874 +-4.89,-0.59643 +-4.88,-0.59417 +-4.87,-0.59194 +-4.86,-0.58976 +-4.85,-0.58761 +-4.84,-0.58551 +-4.83,-0.58344 +-4.82,-0.58141 +-4.81,-0.57942 +-4.8,-0.57746 +-4.79,-0.57555 +-4.78,-0.57367 +-4.77,-0.57183 +-4.76,-0.57002 +-4.75,-0.56826 +-4.74,-0.56653 +-4.73,-0.56483 +-4.72,-0.56317 +-4.71,-0.56155 +-4.7,-0.55996 +-4.69,-0.55841 +-4.68,-0.5569 +-4.67,-0.55542 +-4.66,-0.55397 +-4.65,-0.55256 +-4.64,-0.55118 +-4.63,-0.54984 +-4.62,-0.54853 +-4.61,-0.54726 +-4.6,-0.54601 +-4.59,-0.54481 +-4.58,-0.54363 +-4.57,-0.54249 +-4.56,-0.54138 +-4.55,-0.5403 +-4.54,-0.53925 +-4.53,-0.53824 +-4.52,-0.53725 +-4.51,-0.5363 +-4.5,-0.53538 +-4.49,-0.53449 +-4.48,-0.53364 +-4.47,-0.53281 +-4.46,-0.53201 +-4.45,-0.53124 +-4.44,-0.5305 +-4.43,-0.5298 +-4.42,-0.52912 +-4.41,-0.52847 +-4.4,-0.52784 +-4.39,-0.52725 +-4.38,-0.52669 +-4.37,-0.52615 +-4.36,-0.52564 +-4.35,-0.52516 +-4.34,-0.52471 +-4.33,-0.52429 +-4.32,-0.52389 +-4.31,-0.52352 +-4.3,-0.52317 +-4.29,-0.52285 +-4.28,-0.52256 +-4.27,-0.52229 +-4.26,-0.52205 +-4.25,-0.52184 +-4.24,-0.52165 +-4.23,-0.52148 +-4.22,-0.52134 +-4.21,-0.52123 +-4.2,-0.52113 +-4.19,-0.52107 +-4.18,-0.52102 +-4.17,-0.521 +-4.16,-0.52101 +-4.15,-0.52103 +-4.14,-0.52108 +-4.13,-0.52116 +-4.12,-0.52125 +-4.11,-0.52137 +-4.1,-0.52151 +-4.09,-0.52167 +-4.08,-0.52185 +-4.07,-0.52205 +-4.06,-0.52228 +-4.05,-0.52253 +-4.04,-0.52279 +-4.03,-0.52308 +-4.02,-0.52339 +-4.01,-0.52371 +-4,-0.52406 +-3.99,-0.52443 +-3.98,-0.52481 +-3.97,-0.52522 +-3.96,-0.52564 +-3.95,-0.52609 +-3.94,-0.52655 +-3.93,-0.52703 +-3.92,-0.52752 +-3.91,-0.52804 +-3.9,-0.52857 +-3.89,-0.52912 +-3.88,-0.52969 +-3.87,-0.53027 +-3.86,-0.53087 +-3.85,-0.53148 +-3.84,-0.53212 +-3.83,-0.53277 +-3.82,-0.53343 +-3.81,-0.53411 +-3.8,-0.5348 +-3.79,-0.53551 +-3.78,-0.53624 +-3.77,-0.53698 +-3.76,-0.53773 +-3.75,-0.5385 +-3.74,-0.53928 +-3.73,-0.54007 +-3.72,-0.54088 +-3.71,-0.5417 +-3.7,-0.54254 +-3.69,-0.54338 +-3.68,-0.54424 +-3.67,-0.54512 +-3.66,-0.546 +-3.65,-0.5469 +-3.64,-0.5478 +-3.63,-0.54872 +-3.62,-0.54965 +-3.61,-0.55059 +-3.6,-0.55154 +-3.59,-0.5525 +-3.58,-0.55348 +-3.57,-0.55446 +-3.56,-0.55545 +-3.55,-0.55645 +-3.54,-0.55746 +-3.53,-0.55848 +-3.52,-0.55951 +-3.51,-0.56054 +-3.5,-0.56159 +-3.49,-0.56264 +-3.48,-0.5637 +-3.47,-0.56477 +-3.46,-0.56585 +-3.45,-0.56693 +-3.44,-0.56803 +-3.43,-0.56912 +-3.42,-0.57023 +-3.41,-0.57134 +-3.4,-0.57245 +-3.39,-0.57358 +-3.38,-0.5747 +-3.37,-0.57584 +-3.36,-0.57698 +-3.35,-0.57812 +-3.34,-0.57927 +-3.33,-0.58042 +-3.32,-0.58158 +-3.31,-0.58274 +-3.3,-0.58391 +-3.29,-0.58508 +-3.28,-0.58625 +-3.27,-0.58742 +-3.26,-0.5886 +-3.25,-0.58978 +-3.24,-0.59097 +-3.23,-0.59215 +-3.22,-0.59334 +-3.21,-0.59453 +-3.2,-0.59572 +-3.19,-0.59691 +-3.18,-0.59811 +-3.17,-0.5993 +-3.16,-0.60049 +-3.15,-0.60169 +-3.14,-0.60289 +-3.13,-0.60408 +-3.12,-0.60528 +-3.11,-0.60647 +-3.1,-0.60767 +-3.09,-0.60886 +-3.08,-0.61005 +-3.07,-0.61124 +-3.06,-0.61243 +-3.05,-0.61362 +-3.04,-0.6148 +-3.03,-0.61598 +-3.02,-0.61716 +-3.01,-0.61834 +-3,-0.61952 +-2.99,-0.62069 +-2.98,-0.62185 +-2.97,-0.62302 +-2.96,-0.62418 +-2.95,-0.62534 +-2.94,-0.62649 +-2.93,-0.62763 +-2.92,-0.62878 +-2.91,-0.62991 +-2.9,-0.63104 +-2.89,-0.63217 +-2.88,-0.63329 +-2.87,-0.63441 +-2.86,-0.63552 +-2.85,-0.63662 +-2.84,-0.63771 +-2.83,-0.6388 +-2.82,-0.63988 +-2.81,-0.64096 +-2.8,-0.64202 +-2.79,-0.64308 +-2.78,-0.64413 +-2.77,-0.64518 +-2.76,-0.64621 +-2.75,-0.64724 +-2.74,-0.64825 +-2.73,-0.64926 +-2.72,-0.65026 +-2.71,-0.65125 +-2.7,-0.65222 +-2.69,-0.65319 +-2.68,-0.65415 +-2.67,-0.6551 +-2.66,-0.65603 +-2.65,-0.65696 +-2.64,-0.65787 +-2.63,-0.65877 +-2.62,-0.65966 +-2.61,-0.66053 +-2.6,-0.66139 +-2.59,-0.66224 +-2.58,-0.66308 +-2.57,-0.6639 +-2.56,-0.6647 +-2.55,-0.66549 +-2.54,-0.66627 +-2.53,-0.66703 +-2.52,-0.66777 +-2.51,-0.6685 +-2.5,-0.66922 +-2.49,-0.66991 +-2.48,-0.67059 +-2.47,-0.67125 +-2.46,-0.6719 +-2.45,-0.67252 +-2.44,-0.67313 +-2.43,-0.67372 +-2.42,-0.67429 +-2.41,-0.67484 +-2.4,-0.67538 +-2.39,-0.67589 +-2.38,-0.67638 +-2.37,-0.67685 +-2.36,-0.6773 +-2.35,-0.67773 +-2.34,-0.67814 +-2.33,-0.67853 +-2.32,-0.67889 +-2.31,-0.67923 +-2.3,-0.67955 +-2.29,-0.67985 +-2.28,-0.68012 +-2.27,-0.68037 +-2.26,-0.6806 +-2.25,-0.6808 +-2.24,-0.68098 +-2.23,-0.68114 +-2.22,-0.68127 +-2.21,-0.68138 +-2.2,-0.68147 +-2.19,-0.68153 +-2.18,-0.68156 +-2.17,-0.68158 +-2.16,-0.68157 +-2.15,-0.68153 +-2.14,-0.68147 +-2.13,-0.68139 +-2.12,-0.68128 +-2.11,-0.68114 +-2.1,-0.68098 +-2.09,-0.6808 +-2.08,-0.68059 +-2.07,-0.68036 +-2.06,-0.6801 +-2.05,-0.67981 +-2.04,-0.6795 +-2.03,-0.67916 +-2.02,-0.6788 +-2.01,-0.67841 +-2,-0.678 +-1.99,-0.67756 +-1.98,-0.67709 +-1.97,-0.6766 +-1.96,-0.67608 +-1.95,-0.67554 +-1.94,-0.67497 +-1.93,-0.67437 +-1.92,-0.67374 +-1.91,-0.67309 +-1.9,-0.67241 +-1.89,-0.67171 +-1.88,-0.67097 +-1.87,-0.67021 +-1.86,-0.66943 +-1.85,-0.66861 +-1.84,-0.66777 +-1.83,-0.6669 +-1.82,-0.666 +-1.81,-0.66508 +-1.8,-0.66413 +-1.79,-0.66315 +-1.78,-0.66214 +-1.77,-0.66111 +-1.76,-0.66005 +-1.75,-0.65896 +-1.74,-0.65784 +-1.73,-0.6567 +-1.72,-0.65552 +-1.71,-0.65432 +-1.7,-0.6531 +-1.69,-0.65184 +-1.68,-0.65056 +-1.67,-0.64925 +-1.66,-0.64791 +-1.65,-0.64654 +-1.64,-0.64515 +-1.63,-0.64372 +-1.62,-0.64227 +-1.61,-0.6408 +-1.6,-0.63929 +-1.59,-0.63776 +-1.58,-0.63619 +-1.57,-0.6346 +-1.56,-0.63298 +-1.55,-0.63134 +-1.54,-0.62966 +-1.53,-0.62796 +-1.52,-0.62623 +-1.51,-0.62447 +-1.5,-0.62268 +-1.49,-0.62086 +-1.48,-0.61902 +-1.47,-0.61715 +-1.46,-0.61525 +-1.45,-0.61332 +-1.44,-0.61136 +-1.43,-0.60937 +-1.42,-0.60736 +-1.41,-0.60531 +-1.4,-0.60324 +-1.39,-0.60114 +-1.38,-0.59902 +-1.37,-0.59686 +-1.36,-0.59468 +-1.35,-0.59246 +-1.34,-0.59022 +-1.33,-0.58796 +-1.32,-0.58566 +-1.31,-0.58333 +-1.3,-0.58098 +-1.29,-0.5786 +-1.28,-0.57619 +-1.27,-0.57376 +-1.26,-0.57129 +-1.25,-0.5688 +-1.24,-0.56628 +-1.23,-0.56373 +-1.22,-0.56116 +-1.21,-0.55855 +-1.2,-0.55592 +-1.19,-0.55326 +-1.18,-0.55057 +-1.17,-0.54786 +-1.16,-0.54512 +-1.15,-0.54235 +-1.14,-0.53955 +-1.13,-0.53672 +-1.12,-0.53387 +-1.11,-0.53099 +-1.1,-0.52808 +-1.09,-0.52515 +-1.08,-0.52218 +-1.07,-0.51919 +-1.06,-0.51618 +-1.05,-0.51313 +-1.04,-0.51006 +-1.03,-0.50696 +-1.02,-0.50383 +-1.01,-0.50068 +-1,-0.4975 +-0.99,-0.49429 +-0.98,-0.49105 +-0.97,-0.48779 +-0.96,-0.48451 +-0.95,-0.4812 +-0.94,-0.47786 +-0.93,-0.47449 +-0.92,-0.47111 +-0.91,-0.46769 +-0.9,-0.46425 +-0.89,-0.46079 +-0.88,-0.4573 +-0.87,-0.45379 +-0.86,-0.45025 +-0.85,-0.44669 +-0.84,-0.4431 +-0.83,-0.43949 +-0.82,-0.43586 +-0.81,-0.4322 +-0.8,-0.42852 +-0.79,-0.42482 +-0.78,-0.42109 +-0.77,-0.41734 +-0.76,-0.41357 +-0.75,-0.40977 +-0.74,-0.40595 +-0.73,-0.40212 +-0.72,-0.39825 +-0.71,-0.39437 +-0.7,-0.39047 +-0.69,-0.38654 +-0.68,-0.38259 +-0.67,-0.37862 +-0.66,-0.37463 +-0.65,-0.37062 +-0.64,-0.36659 +-0.63,-0.36254 +-0.62,-0.35847 +-0.61,-0.35438 +-0.6,-0.35026 +-0.59,-0.34613 +-0.58,-0.34198 +-0.57,-0.33781 +-0.56,-0.33362 +-0.55,-0.32942 +-0.54,-0.32519 +-0.53,-0.32095 +-0.52,-0.31669 +-0.51,-0.31241 +-0.5,-0.30811 +-0.49,-0.3038 +-0.48,-0.29947 +-0.47,-0.29512 +-0.46,-0.29076 +-0.45,-0.28638 +-0.44,-0.28199 +-0.43,-0.27758 +-0.42,-0.27315 +-0.41,-0.26871 +-0.4,-0.26426 +-0.39,-0.25979 +-0.38,-0.25531 +-0.37,-0.25081 +-0.36,-0.2463 +-0.35,-0.24178 +-0.34,-0.23724 +-0.33,-0.23269 +-0.32,-0.22813 +-0.31,-0.22355 +-0.3,-0.21897 +-0.29,-0.21437 +-0.28,-0.20976 +-0.27,-0.20514 +-0.26,-0.2005 +-0.25,-0.19586 +-0.24,-0.19121 +-0.23,-0.18654 +-0.22,-0.18187 +-0.21,-0.17718 +-0.2,-0.17249 +-0.19,-0.16779 +-0.18,-0.16307 +-0.17,-0.15835 +-0.16,-0.15363 +-0.15,-0.14889 +-0.14,-0.14414 +-0.13,-0.13939 +-0.12,-0.13463 +-0.11,-0.12987 +-0.1,-0.1251 +-0.09,-0.12032 +-0.08,-0.11554 +-0.07,-0.11075 +-0.06,-0.10595 +-0.05,-0.10115 +-0.04,-0.09635 +-0.03,-0.091542 +-0.02,-0.086731 +-0.01,-0.081916 +0,-0.077098 +0.01,-0.072277 +0.02,-0.067453 +0.03,-0.062627 +0.04,-0.057799 +0.05,-0.05297 +0.06,-0.048139 +0.07,-0.043307 +0.08,-0.038475 +0.09,-0.033642 +0.1,-0.028809 +0.11,-0.023977 +0.12,-0.019145 +0.13,-0.014314 +0.14,-0.0094837 +0.15,-0.0046553 +0.16,0.00017117 +0.17,0.0049955 +0.18,0.0098173 +0.19,0.014636 +0.2,0.019452 +0.21,0.024265 +0.22,0.029073 +0.23,0.033878 +0.24,0.038678 +0.25,0.043473 +0.26,0.048263 +0.27,0.053047 +0.28,0.057825 +0.29,0.062597 +0.3,0.067362 +0.31,0.072119 +0.32,0.076869 +0.33,0.081611 +0.34,0.086345 +0.35,0.091069 +0.36,0.095785 +0.37,0.10049 +0.38,0.10519 +0.39,0.10987 +0.4,0.11455 +0.41,0.11921 +0.42,0.12386 +0.43,0.12851 +0.44,0.13313 +0.45,0.13775 +0.46,0.14235 +0.47,0.14694 +0.48,0.15152 +0.49,0.15608 +0.5,0.16063 +0.51,0.16516 +0.52,0.16967 +0.53,0.17418 +0.54,0.17866 +0.55,0.18313 +0.56,0.18758 +0.57,0.19201 +0.58,0.19643 +0.59,0.20083 +0.6,0.20521 +0.61,0.20957 +0.62,0.21391 +0.63,0.21824 +0.64,0.22254 +0.65,0.22682 +0.66,0.23108 +0.67,0.23532 +0.68,0.23955 +0.69,0.24375 +0.7,0.24792 +0.71,0.25208 +0.72,0.25622 +0.73,0.26033 +0.74,0.26443 +0.75,0.2685 +0.76,0.27254 +0.77,0.27657 +0.78,0.28057 +0.79,0.28455 +0.8,0.28851 +0.81,0.29244 +0.82,0.29635 +0.83,0.30024 +0.84,0.3041 +0.85,0.30794 +0.86,0.31176 +0.87,0.31555 +0.88,0.31931 +0.89,0.32305 +0.9,0.32677 +0.91,0.33046 +0.92,0.33412 +0.93,0.33776 +0.94,0.34137 +0.95,0.34496 +0.96,0.34852 +0.97,0.35206 +0.98,0.35556 +0.99,0.35904 +1,0.3625 +1.01,0.36592 +1.02,0.36932 +1.03,0.3727 +1.04,0.37604 +1.05,0.37936 +1.06,0.38264 +1.07,0.3859 +1.08,0.38913 +1.09,0.39234 +1.1,0.39551 +1.11,0.39865 +1.12,0.40177 +1.13,0.40486 +1.14,0.40792 +1.15,0.41094 +1.16,0.41394 +1.17,0.41691 +1.18,0.41985 +1.19,0.42276 +1.2,0.42564 +1.21,0.42849 +1.22,0.43131 +1.23,0.4341 +1.24,0.43686 +1.25,0.43959 +1.26,0.44228 +1.27,0.44495 +1.28,0.44758 +1.29,0.45019 +1.3,0.45276 +1.31,0.4553 +1.32,0.45781 +1.33,0.46029 +1.34,0.46274 +1.35,0.46515 +1.36,0.46754 +1.37,0.46989 +1.38,0.47221 +1.39,0.47449 +1.4,0.47675 +1.41,0.47897 +1.42,0.48115 +1.43,0.48331 +1.44,0.48543 +1.45,0.48752 +1.46,0.48958 +1.47,0.4916 +1.48,0.49359 +1.49,0.49554 +1.5,0.49747 +1.51,0.49936 +1.52,0.50121 +1.53,0.50303 +1.54,0.50482 +1.55,0.50658 +1.56,0.5083 +1.57,0.50999 +1.58,0.51165 +1.59,0.51328 +1.6,0.51487 +1.61,0.51643 +1.62,0.51796 +1.63,0.51945 +1.64,0.52091 +1.65,0.52234 +1.66,0.52374 +1.67,0.52511 +1.68,0.52644 +1.69,0.52774 +1.7,0.52901 +1.71,0.53025 +1.72,0.53146 +1.73,0.53263 +1.74,0.53377 +1.75,0.53488 +1.76,0.53596 +1.77,0.53701 +1.78,0.53802 +1.79,0.53901 +1.8,0.53996 +1.81,0.54088 +1.82,0.54177 +1.83,0.54263 +1.84,0.54346 +1.85,0.54426 +1.86,0.54502 +1.87,0.54576 +1.88,0.54646 +1.89,0.54713 +1.9,0.54778 +1.91,0.54839 +1.92,0.54897 +1.93,0.54953 +1.94,0.55005 +1.95,0.55054 +1.96,0.55101 +1.97,0.55144 +1.98,0.55185 +1.99,0.55223 +2,0.55258 +2.01,0.5529 +2.02,0.5532 +2.03,0.55347 +2.04,0.55371 +2.05,0.55393 +2.06,0.55411 +2.07,0.55428 +2.08,0.55441 +2.09,0.55452 +2.1,0.55461 +2.11,0.55467 +2.12,0.5547 +2.13,0.55471 +2.14,0.5547 +2.15,0.55466 +2.16,0.5546 +2.17,0.55452 +2.18,0.55441 +2.19,0.55428 +2.2,0.55412 +2.21,0.55394 +2.22,0.55374 +2.23,0.55352 +2.24,0.55328 +2.25,0.55302 +2.26,0.55273 +2.27,0.55242 +2.28,0.55209 +2.29,0.55175 +2.3,0.55138 +2.31,0.55099 +2.32,0.55058 +2.33,0.55015 +2.34,0.54971 +2.35,0.54924 +2.36,0.54876 +2.37,0.54826 +2.38,0.54774 +2.39,0.5472 +2.4,0.54665 +2.41,0.54608 +2.42,0.54549 +2.43,0.54489 +2.44,0.54427 +2.45,0.54364 +2.46,0.54299 +2.47,0.54233 +2.48,0.54165 +2.49,0.54096 +2.5,0.54025 +2.51,0.53953 +2.52,0.5388 +2.53,0.53806 +2.54,0.5373 +2.55,0.53653 +2.56,0.53575 +2.57,0.53496 +2.58,0.53416 +2.59,0.53334 +2.6,0.53252 +2.61,0.53168 +2.62,0.53084 +2.63,0.52998 +2.64,0.52912 +2.65,0.52825 +2.66,0.52737 +2.67,0.52648 +2.68,0.52558 +2.69,0.52468 +2.7,0.52377 +2.71,0.52285 +2.72,0.52193 +2.73,0.521 +2.74,0.52006 +2.75,0.51912 +2.76,0.51817 +2.77,0.51722 +2.78,0.51626 +2.79,0.51529 +2.8,0.51432 +2.81,0.51335 +2.82,0.51237 +2.83,0.51139 +2.84,0.5104 +2.85,0.50941 +2.86,0.50841 +2.87,0.50741 +2.88,0.50641 +2.89,0.5054 +2.9,0.50438 +2.91,0.50337 +2.92,0.50235 +2.93,0.50133 +2.94,0.5003 +2.95,0.49927 +2.96,0.49824 +2.97,0.49721 +2.98,0.49617 +2.99,0.49514 +3,0.4941 +3.01,0.49305 +3.02,0.49201 +3.03,0.49096 +3.04,0.48991 +3.05,0.48886 +3.06,0.48781 +3.07,0.48676 +3.08,0.48571 +3.09,0.48465 +3.1,0.4836 +3.11,0.48254 +3.12,0.48149 +3.13,0.48043 +3.14,0.47938 +3.15,0.47832 +3.16,0.47726 +3.17,0.47621 +3.18,0.47515 +3.19,0.4741 +3.2,0.47304 +3.21,0.47199 +3.22,0.47094 +3.23,0.46989 +3.24,0.46884 +3.25,0.46779 +3.26,0.46674 +3.27,0.4657 +3.28,0.46465 +3.29,0.46361 +3.3,0.46257 +3.31,0.46154 +3.32,0.4605 +3.33,0.45947 +3.34,0.45844 +3.35,0.45742 +3.36,0.45639 +3.37,0.45537 +3.38,0.45436 +3.39,0.45334 +3.4,0.45233 +3.41,0.45133 +3.42,0.45033 +3.43,0.44933 +3.44,0.44833 +3.45,0.44734 +3.46,0.44636 +3.47,0.44538 +3.48,0.4444 +3.49,0.44343 +3.5,0.44247 +3.51,0.44151 +3.52,0.44055 +3.53,0.4396 +3.54,0.43866 +3.55,0.43772 +3.56,0.43679 +3.57,0.43586 +3.58,0.43494 +3.59,0.43403 +3.6,0.43312 +3.61,0.43222 +3.62,0.43133 +3.63,0.43044 +3.64,0.42956 +3.65,0.42869 +3.66,0.42782 +3.67,0.42696 +3.68,0.42611 +3.69,0.42527 +3.7,0.42444 +3.71,0.42361 +3.72,0.42279 +3.73,0.42198 +3.74,0.42118 +3.75,0.42039 +3.76,0.41961 +3.77,0.41883 +3.78,0.41807 +3.79,0.41731 +3.8,0.41656 +3.81,0.41583 +3.82,0.4151 +3.83,0.41438 +3.84,0.41367 +3.85,0.41298 +3.86,0.41229 +3.87,0.41161 +3.88,0.41095 +3.89,0.41029 +3.9,0.40965 +3.91,0.40901 +3.92,0.40839 +3.93,0.40778 +3.94,0.40718 +3.95,0.40659 +3.96,0.40601 +3.97,0.40545 +3.98,0.4049 +3.99,0.40436 +4,0.40383 +4.01,0.40331 +4.02,0.40281 +4.03,0.40232 +4.04,0.40184 +4.05,0.40137 +4.06,0.40092 +4.07,0.40049 +4.08,0.40006 +4.09,0.39965 +4.1,0.39925 +4.11,0.39887 +4.12,0.3985 +4.13,0.39814 +4.14,0.3978 +4.15,0.39748 +4.16,0.39716 +4.17,0.39687 +4.18,0.39659 +4.19,0.39632 +4.2,0.39607 +4.21,0.39583 +4.22,0.39561 +4.23,0.3954 +4.24,0.39521 +4.25,0.39504 +4.26,0.39488 +4.27,0.39474 +4.28,0.39462 +4.29,0.39451 +4.3,0.39442 +4.31,0.39434 +4.32,0.39428 +4.33,0.39424 +4.34,0.39422 +4.35,0.39421 +4.36,0.39422 +4.37,0.39425 +4.38,0.3943 +4.39,0.39436 +4.4,0.39444 +4.41,0.39454 +4.42,0.39466 +4.43,0.3948 +4.44,0.39496 +4.45,0.39513 +4.46,0.39533 +4.47,0.39554 +4.48,0.39577 +4.49,0.39602 +4.5,0.3963 +4.51,0.39659 +4.52,0.3969 +4.53,0.39723 +4.54,0.39758 +4.55,0.39795 +4.56,0.39834 +4.57,0.39876 +4.58,0.39919 +4.59,0.39964 +4.6,0.40012 +4.61,0.40061 +4.62,0.40113 +4.63,0.40167 +4.64,0.40223 +4.65,0.40281 +4.66,0.40342 +4.67,0.40404 +4.68,0.40469 +4.69,0.40536 +4.7,0.40605 +4.71,0.40677 +4.72,0.40751 +4.73,0.40827 +4.74,0.40905 +4.75,0.40986 +4.76,0.41069 +4.77,0.41154 +4.78,0.41242 +4.79,0.41332 +4.8,0.41425 +4.81,0.4152 +4.82,0.41617 +4.83,0.41717 +4.84,0.41819 +4.85,0.41924 +4.86,0.42031 +4.87,0.42141 +4.88,0.42253 +4.89,0.42367 +4.9,0.42485 +4.91,0.42604 +4.92,0.42727 +4.93,0.42852 +4.94,0.42979 +4.95,0.43109 +4.96,0.43242 +4.97,0.43377 +4.98,0.43515 +4.99,0.43656 +5,0.43799 diff --git a/TeX/Plots/Data/overfit.csv b/TeX/Plots/Data/overfit.csv new file mode 100644 index 0000000..69c7579 --- /dev/null +++ b/TeX/Plots/Data/overfit.csv @@ -0,0 +1,1202 @@ +"",x,y,x_n,y_n,s_n +"1",0,0.300748410464688,-0.1,0,0.587785252292473 +"2",0.0159154943091895,-0.245009267940694,-0.099,0,0.582690479668576 +"3",0.0318309886183791,-0.489424845949232,-0.098,0,0.577572703422267 +"4",0.0477464829275686,-0.454603711505162,-0.097,0,0.572432125594591 +"5",0.0636619772367581,-1.14383092423676,-0.096,0,0.567268949126756 +"6",0.0795774715459477,-0.268438353095194,-0.095,0,0.562083377852131 +"7",0.0954929658551372,-0.863200029000433,-0.094,0,0.556875616488188 +"8",0.111408460164327,-0.133850486977185,-0.093,0,0.55164587062843 +"9",0.127323954473516,0.013053144826199,-0.092,0,0.546394346734269 +"10",0.143239448782706,-1.65305408026189,-0.091,0,0.541121252126876 +"11",0.159154943091895,-0.692647494483372,-0.09,0,0.535826794978996 +"12",0.175070437401085,-0.970280869716453,-0.089,0,0.530511184306734 +"13",0.190985931710274,-1.29958462578389,-0.088,0,0.525174629961296 +"14",0.206901426019464,-1.11246317973262,-0.087,0,0.51981734262071 +"15",0.222816920328653,-0.981067019751566,-0.086,0,0.514439533781506 +"16",0.238732414637843,-0.587046956825003,-0.085,0,0.509041415750371 +"17",0.254647908947033,-1.56205394210675,-0.084,0,0.503623201635761 +"18",0.270563403256222,-1.29698279212195,-0.083,0,0.498185105339491 +"19",0.286478897565412,-0.990358936490416,-0.082,0,0.492727341548291 +"20",0.302394391874601,-1.52499598536358,-0.081,0,0.487250125725332 +"21",0.318309886183791,-0.5341744258034,-0.08,0,0.481753674101715 +"22",0.33422538049298,-1.1832943759879,-0.079,0,0.476238203667939 +"23",0.35014087480217,-0.281560880846134,-0.078,0,0.470703932165333 +"24",0.366056369111359,-0.606632526741593,-0.077,0,0.465151078077458 +"25",0.381971863420549,-0.743601709690359,-0.076,0,0.459579860621488 +"26",0.397887357729738,-1.01683126768115,-0.075,0,0.453990499739547 +"27",0.413802852038928,-1.14975368447276,-0.074,0,0.448383216090032 +"28",0.429718346348117,-0.607522402804607,-0.073,0,0.442758231038901 +"29",0.445633840657307,-0.271339475152741,-0.072,0,0.437115766650933 +"30",0.461549334966497,-0.437197064911255,-0.071,0,0.431456045680959 +"31",0.477464829275686,-0.547023389217064,-0.07,0,0.425779291565073 +"32",0.493380323584876,0.260974103055521,-0.069,0,0.420085728411806 +"33",0.509295817894065,0.659463613742447,-0.068,0,0.414375580993284 +"34",0.525211312203255,-0.248024136688147,-0.067,0,0.408649074736349 +"35",0.541126806512444,0.415140478056865,-0.066,0,0.402906435713663 +"36",0.557042300821634,0.412858807943808,-0.065,0,0.39714789063478 +"37",0.572957795130823,0.241390791267404,-0.064,0,0.391373666837202 +"38",0.588873289440013,-0.0698245963124352,-0.063,0,0.385583992277397 +"39",0.604788783749202,0.233332652503595,-0.062,0,0.379779095521801 +"40",0.620704278058392,0.367600406756737,-0.061,0,0.3739592057378 +"41",0.636619772367581,1.03502856645952,-0.06,0,0.368124552684678 +"42",0.652535266676771,0.69105436800266,-0.059,0,0.362275366704546 +"43",0.66845076098596,0.599344179926799,-0.058,0,0.356411878713251 +"44",0.68436625529515,0.880025991663828,-0.057,0,0.350534320191259 +"45",0.70028174960434,0.607990733574782,-0.056,0,0.344642923174517 +"46",0.716197243913529,1.00466748990519,-0.055,0,0.338737920245291 +"47",0.732112738222719,0.304714589608625,-0.054,0,0.332819544522987 +"48",0.748028232531908,1.35907391337559,-0.053,0,0.326888029654943 +"49",0.763943726841098,0.943334601591182,-0.052,0,0.320943609807209 +"50",0.779859221150287,0.939977782750524,-0.051,0,0.314986519655305 +"51",0.795774715459477,0.44329608863974,-0.05,0,0.309016994374947 +"52",0.811690209768666,1.13087648659454,-0.049,0,0.303035269632774 +"53",0.827605704077856,0.709047990072121,-0.048,0,0.297041581577035 +"54",0.843521198387045,1.03328603739799,-0.047,0,0.291036166828272 +"55",0.859436692696235,0.720504803210088,-0.046,0,0.285019262469976 +"56",0.875352187005424,-0.153404672608947,-0.045,0,0.278991106039229 +"57",0.891267681314614,0.647480878481554,-0.044,0,0.272951935517325 +"58",0.907183175623803,0.505120891477449,-0.043,0,0.266901989320376 +"59",0.923098669932993,0.463882813829634,-0.042,0,0.260841506289897 +"60",0.939014164242183,0.78010467042593,-0.041,0,0.254770725683382 +"61",0.954929658551372,0.50914924064767,-0.04,0,0.248689887164855 +"62",0.970845152860562,0.0304677994141883,-0.039,0,0.242599230795407 +"63",0.986760647169751,-0.218122411711338,-0.038,0,0.236498997023725 +"64",,,-0.037,0,0.230389426676591 +"65",,,-0.036,0,0.224270760949381 +"66",,,-0.035,0,0.218143241396543 +"67",,,-0.034,0,0.212007109922055 +"68",,,-0.033,0,0.205862608769881 +"69",,,-0.032,0,0.199709980514407 +"70",,,-0.031,0,0.19354946805086 +"71",,,-0.03,0,0.187381314585724 +"72",,,-0.029,0,0.181205763627137 +"73",,,-0.028,0,0.175023058975276 +"74",,,-0.027,0,0.168833444712734 +"75",,,-0.026,0,0.162637165194884 +"76",,,-0.025,0,0.156434465040231 +"77",,,-0.024,0,0.150225589120757 +"78",,,-0.023,0,0.144010782552252 +"79",,,-0.022,0,0.137790290684638 +"80",,,-0.021,0,0.131564359092283 +"81",,,-0.02,0,0.125333233564304 +"82",,,-0.019,0,0.11909716009487 +"83",,,-0.018,0,0.112856384873482 +"84",,,-0.017,0,0.10661115427526 +"85",,,-0.016,0,0.100361714851215 +"86",,,-0.015,0,0.0941083133185141 +"87",,,-0.014,0,0.0878511965507429 +"88",,,-0.013,0,0.0815906115681572 +"89",,,-0.012,0,0.0753268055279328 +"90",,,-0.011,0,0.0690600257144058 +"91",,,-0.01,0,0.0627905195293133 +"92",,,-0.00900000000000001,0.0300748410464686,0.0565185344820244 +"93",,,-0.00800000000000001,0.0601496820929375,0.0502443181797694 +"94",,,-0.00700000000000001,0.0902245231394063,0.0439681183178647 +"95",,,-0.00600000000000001,0.120299364185875,0.0376901826699343 +"96",,,-0.005,0.150374205232344,0.031410759078128 +"97",,,-0.004,0.180449046278813,0.0251300954433376 +"98",,,-0.003,0.210523887325282,0.0188484397154082 +"99",,,-0.002,0.240598728371751,0.0125660398833526 +"100",,,-0.001,0.27067356941822,0.00628314396555888 +"101",,,0,0.300748410464688,-1.22464679914735e-16 +"102",,,0.001,0.266457444202317,-0.00628314396555913 +"103",,,0.002,0.232166477939946,-0.0125660398833528 +"104",,,0.003,0.197875511677574,-0.0188484397154085 +"105",,,0.004,0.163584545415203,-0.0251300954433378 +"106",,,0.00499999999999999,0.129293579152832,-0.0314107590781282 +"107",,,0.00599999999999999,0.0950026128904606,-0.0376901826699345 +"108",,,0.00699999999999999,0.0607116466280893,-0.043968118317865 +"109",,,0.00799999999999999,0.0264206803657179,-0.0502443181797697 +"110",,,0.00899999999999999,-0.00787028589665351,-0.0565185344820247 +"111",,,0.01,-0.042161252159025,-0.0627905195293136 +"112",,,0.011,-0.0764522184213964,-0.0690600257144061 +"113",,,0.012,-0.110743184683768,-0.0753268055279326 +"114",,,0.013,-0.145034150946139,-0.0815906115681575 +"115",,,0.014,-0.17932511720851,-0.0878511965507432 +"116",,,0.015,-0.213616083470882,-0.0941083133185144 +"117",,,0.016,-0.246307028906404,-0.100361714851215 +"118",,,0.017,-0.261664112592294,-0.10661115427526 +"119",,,0.018,-0.277021196278185,-0.112856384873482 +"120",,,0.019,-0.292378279964075,-0.11909716009487 +"121",,,0.02,-0.307735363649966,-0.125333233564304 +"122",,,0.021,-0.323092447335856,-0.131564359092282 +"123",,,0.022,-0.338449531021747,-0.137790290684638 +"124",,,0.023,-0.353806614707637,-0.144010782552252 +"125",,,0.024,-0.369163698393528,-0.150225589120757 +"126",,,0.025,-0.384520782079418,-0.156434465040231 +"127",,,0.026,-0.399877865765309,-0.162637165194884 +"128",,,0.027,-0.415234949451199,-0.168833444712734 +"129",,,0.028,-0.43059203313709,-0.175023058975276 +"130",,,0.029,-0.445949116822981,-0.181205763627138 +"131",,,0.03,-0.461306200508871,-0.187381314585725 +"132",,,0.031,-0.476663284194762,-0.19354946805086 +"133",,,0.032,-0.489055069935514,-0.199709980514407 +"134",,,0.033,-0.486867193532331,-0.205862608769881 +"135",,,0.034,-0.484679317129148,-0.212007109922055 +"136",,,0.035,-0.482491440725965,-0.218143241396543 +"137",,,0.036,-0.480303564322782,-0.224270760949381 +"138",,,0.037,-0.478115687919599,-0.230389426676591 +"139",,,0.038,-0.475927811516416,-0.236498997023725 +"140",,,0.039,-0.473739935113233,-0.242599230795407 +"141",,,0.04,-0.47155205871005,-0.248689887164855 +"142",,,0.041,-0.469364182306867,-0.254770725683382 +"143",,,0.042,-0.467176305903684,-0.260841506289897 +"144",,,0.043,-0.4649884295005,-0.266901989320376 +"145",,,0.044,-0.462800553097317,-0.272951935517325 +"146",,,0.045,-0.460612676694134,-0.278991106039229 +"147",,,0.046,-0.458424800290951,-0.285019262469976 +"148",,,0.047,-0.456236923887768,-0.291036166828272 +"149",,,0.048,-0.465582375555256,-0.297041581577035 +"150",,,0.049,-0.508887798518691,-0.303035269632774 +"151",,,0.05,-0.552193221482126,-0.309016994374948 +"152",,,0.051,-0.595498644445561,-0.314986519655305 +"153",,,0.052,-0.638804067408996,-0.32094360980721 +"154",,,0.053,-0.682109490372432,-0.326888029654942 +"155",,,0.054,-0.725414913335867,-0.332819544522987 +"156",,,0.055,-0.768720336299302,-0.338737920245291 +"157",,,0.056,-0.812025759262737,-0.344642923174517 +"158",,,0.057,-0.855331182226172,-0.350534320191259 +"159",,,0.058,-0.898636605189607,-0.356411878713251 +"160",,,0.059,-0.941942028153043,-0.362275366704546 +"161",,,0.06,-0.985247451116478,-0.368124552684678 +"162",,,0.061,-1.02855287407991,-0.373959205737801 +"163",,,0.062,-1.07185829704335,-0.379779095521801 +"164",,,0.063,-1.11516372000678,-0.385583992277397 +"165",,,0.064,-1.12523881455608,-0.391373666837202 +"166",,,0.065,-1.07023627714597,-0.397147890634781 +"167",,,0.066,-1.01523373973586,-0.402906435713663 +"168",,,0.067,-0.960231202325755,-0.408649074736349 +"169",,,0.068,-0.905228664915646,-0.414375580993284 +"170",,,0.069,-0.850226127505538,-0.420085728411806 +"171",,,0.07,-0.795223590095429,-0.425779291565073 +"172",,,0.071,-0.740221052685321,-0.431456045680959 +"173",,,0.072,-0.685218515275212,-0.437115766650933 +"174",,,0.073,-0.630215977865105,-0.442758231038902 +"175",,,0.074,-0.575213440454996,-0.448383216090032 +"176",,,0.075,-0.520210903044888,-0.453990499739547 +"177",,,0.076,-0.465208365634781,-0.459579860621488 +"178",,,0.077,-0.410205828224672,-0.465151078077459 +"179",,,0.078,-0.355203290814564,-0.470703932165332 +"180",,,0.079,-0.300200753404455,-0.476238203667939 +"181",,,0.08,-0.284228232226043,-0.481753674101715 +"182",,,0.081,-0.321598210459255,-0.487250125725332 +"183",,,0.082,-0.358968188692469,-0.492727341548292 +"184",,,0.083,-0.396338166925681,-0.498185105339491 +"185",,,0.084,-0.433708145158895,-0.503623201635761 +"186",,,0.085,-0.471078123392107,-0.509041415750371 +"187",,,0.086,-0.508448101625321,-0.514439533781507 +"188",,,0.087,-0.545818079858534,-0.519817342620709 +"189",,,0.088,-0.583188058091747,-0.525174629961296 +"190",,,0.089,-0.620558036324959,-0.530511184306734 +"191",,,0.09,-0.657928014558173,-0.535826794978997 +"192",,,0.091,-0.695297992791385,-0.541121252126876 +"193",,,0.092,-0.732667971024599,-0.546394346734269 +"194",,,0.093,-0.770037949257812,-0.55164587062843 +"195",,,0.094,-0.807407927491024,-0.556875616488188 +"196",,,0.095,-0.844777905724237,-0.562083377852131 +"197",,,0.096,-0.839964487950835,-0.567268949126756 +"198",,,0.097,-0.794138104688448,-0.572432125594591 +"199",,,0.098,-0.748311721426062,-0.577572703422268 +"200",,,0.099,-0.702485338163676,-0.582690479668576 +"201",,,0.1,-0.65665895490129,-0.587785252292473 +"202",,,0.101,-0.610832571638904,-0.592856820161059 +"203",,,0.102,-0.565006188376517,-0.597904983057519 +"204",,,0.103,-0.51917980511413,-0.602929541689025 +"205",,,0.104,-0.473353421851744,-0.607930297694606 +"206",,,0.105,-0.427527038589358,-0.612907053652976 +"207",,,0.106,-0.381700655326971,-0.617859613090335 +"208",,,0.107,-0.335874272064585,-0.622787780488113 +"209",,,0.108,-0.290047888802199,-0.627691361290701 +"210",,,0.109,-0.244221505539814,-0.632570161913125 +"211",,,0.11,-0.198395122277428,-0.63742398974869 +"212",,,0.111,-0.152568739015042,-0.642252653176585 +"213",,,0.112,-0.12839043977233,-0.647055961569444 +"214",,,0.113,-0.119160212363147,-0.651833725300879 +"215",,,0.114,-0.109929984953963,-0.656585755752956 +"216",,,0.115,-0.100699757544781,-0.661311865323652 +"217",,,0.116,-0.0914695301355949,-0.666011867434252 +"218",,,0.117,-0.082239302726412,-0.67068557653672 +"219",,,0.118,-0.0730090753172284,-0.675332808121025 +"220",,,0.119,-0.0637788479080452,-0.679953378722419 +"221",,,0.12,-0.054548620498862,-0.684547105928689 +"222",,,0.121,-0.0453183930896786,-0.689113808387348 +"223",,,0.122,-0.0360881656804949,-0.693653305812805 +"224",,,0.123,-0.0268579382713118,-0.698165418993473 +"225",,,0.124,-0.0176277108621274,-0.702649969798849 +"226",,,0.125,-0.00839748345294405,-0.707106781186548 +"227",,,0.126,0.000832743956239335,-0.711535677209285 +"228",,,0.127,0.010062971365423,-0.715936483021831 +"229",,,0.128,-0.0577184136489081,-0.720309024887907 +"230",,,0.129,-0.162403018017501,-0.724653130187047 +"231",,,0.13,-0.267087622386095,-0.728968627421411 +"232",,,0.131,-0.371772226754687,-0.73325534622256 +"233",,,0.132,-0.476456831123278,-0.737513117358174 +"234",,,0.133,-0.581141435491872,-0.741741772738739 +"235",,,0.134,-0.685826039860464,-0.745941145424182 +"236",,,0.135,-0.790510644229057,-0.75011106963046 +"237",,,0.136,-0.895195248597648,-0.754251380736104 +"238",,,0.137,-0.999879852966241,-0.758361915288722 +"239",,,0.138,-1.10456445733483,-0.762442511011448 +"240",,,0.139,-1.20924906170343,-0.76649300680935 +"241",,,0.14,-1.31393366607202,-0.770513242775789 +"242",,,0.141,-1.41861827044061,-0.774503060198734 +"243",,,0.142,-1.5233028748092,-0.778462301567023 +"244",,,0.143,-1.62798747917779,-0.782390810576588 +"245",,,0.144,-1.60715928216633,-0.786288432136619 +"246",,,0.145,-1.54681515667951,-0.790155012375691 +"247",,,0.146,-1.48647103119269,-0.793990398647835 +"248",,,0.147,-1.42612690570587,-0.797794439538571 +"249",,,0.148,-1.36578278021905,-0.801566984870877 +"250",,,0.149,-1.30543865473223,-0.805307885711122 +"251",,,0.15,-1.24509452924541,-0.809016994374947 +"252",,,0.151,-1.18475040375859,-0.812694164433094 +"253",,,0.152,-1.12440627827177,-0.816339250717184 +"254",,,0.153,-1.06406215278495,-0.819952109325452 +"255",,,0.154,-1.00371802729813,-0.823532597628428 +"256",,,0.155,-0.943373901811306,-0.827080574274562 +"257",,,0.156,-0.883029776324487,-0.830595899195813 +"258",,,0.157,-0.822685650837664,-0.834078433613171 +"259",,,0.158,-0.762341525350844,-0.837528040042142 +"260",,,0.159,-0.701997399864024,-0.840944582298169 +"261",,,0.16,-0.707388852628038,-0.844327925502015 +"262",,,0.161,-0.724833072068509,-0.847677936085083 +"263",,,0.162,-0.742277291508981,-0.850994481794692 +"264",,,0.163,-0.759721510949454,-0.854277431699295 +"265",,,0.164,-0.777165730389926,-0.857526656193652 +"266",,,0.165,-0.794609949830397,-0.860742027003944 +"267",,,0.166,-0.812054169270867,-0.863923417192835 +"268",,,0.167,-0.82949838871134,-0.86707070116449 +"269",,,0.168,-0.846942608151811,-0.870183754669526 +"270",,,0.169,-0.86438682759228,-0.87326245480992 +"271",,,0.17,-0.881831047032753,-0.876306680043864 +"272",,,0.171,-0.899275266473225,-0.879316310190556 +"273",,,0.172,-0.916719485913698,-0.882291226434954 +"274",,,0.173,-0.934163705354168,-0.885231311332455 +"275",,,0.174,-0.95160792479464,-0.888136448813545 +"276",,,0.175,-0.969052144235112,-0.891006524188368 +"277",,,0.176,-0.989514231205316,-0.893841424151264 +"278",,,0.177,-1.01020499642254,-0.896641036785236 +"279",,,0.178,-1.03089576163975,-0.899405251566371 +"280",,,0.179,-1.05158652685697,-0.902133959368203 +"281",,,0.18,-1.07227729207419,-0.904827052466019 +"282",,,0.181,-1.09296805729142,-0.907484424541117 +"283",,,0.182,-1.11365882250863,-0.910105970684996 +"284",,,0.183,-1.13434958772586,-0.912691587403503 +"285",,,0.184,-1.15504035294307,-0.915241172620918 +"286",,,0.185,-1.17573111816029,-0.917754625683981 +"287",,,0.186,-1.19642188337751,-0.92023184736587 +"288",,,0.187,-1.21711264859473,-0.922672739870115 +"289",,,0.188,-1.23780341381195,-0.925077206834458 +"290",,,0.189,-1.25849417902917,-0.927445153334661 +"291",,,0.19,-1.27918494424639,-0.929776485888251 +"292",,,0.191,-1.29941922226794,-0.932071112458211 +"293",,,0.192,-1.28766203506306,-0.934328942456612 +"294",,,0.193,-1.27590484785819,-0.936549886748192 +"295",,,0.194,-1.26414766065331,-0.938733857653874 +"296",,,0.195,-1.25239047344844,-0.940880768954225 +"297",,,0.196,-1.24063328624356,-0.942990535892864 +"298",,,0.197,-1.22887609903869,-0.945063075179805 +"299",,,0.198,-1.21711891183381,-0.947098304994744 +"300",,,0.199,-1.20536172462893,-0.949096144990295 +"301",,,0.2,-1.19360453742406,-0.951056516295154 +"302",,,0.201,-1.18184735021918,-0.952979341517219 +"303",,,0.202,-1.17009016301431,-0.954864544746643 +"304",,,0.203,-1.15833297580943,-0.95671205155883 +"305",,,0.204,-1.14657578860456,-0.958521789017376 +"306",,,0.205,-1.13481860139968,-0.960293685676943 +"307",,,0.206,-1.12306141419481,-0.962027671586086 +"308",,,0.207,-1.11164936633388,-0.96372367829001 +"309",,,0.208,-1.10339350211575,-0.965381638833274 +"310",,,0.209,-1.09513763789762,-0.967001487762435 +"311",,,0.21,-1.0868817736795,-0.968583161128631 +"312",,,0.211,-1.07862590946136,-0.970126596490106 +"313",,,0.212,-1.07037004524324,-0.971631732914674 +"314",,,0.213,-1.06211418102511,-0.973098510982127 +"315",,,0.214,-1.05385831680698,-0.974526872786577 +"316",,,0.215,-1.04560245258885,-0.975916761938747 +"317",,,0.216,-1.03734658837072,-0.977268123568193 +"318",,,0.217,-1.0290907241526,-0.978580904325472 +"319",,,0.218,-1.02083485993447,-0.979855052384247 +"320",,,0.219,-1.01257899571634,-0.981090517443334 +"321",,,0.22,-1.00432313149821,-0.982287250728689 +"322",,,0.221,-0.996067267280085,-0.98344520499533 +"323",,,0.222,-0.987811403061958,-0.984564334529205 +"324",,,0.223,-0.976534514368877,-0.985644595148998 +"325",,,0.224,-0.951777503667737,-0.986685944207868 +"326",,,0.225,-0.927020492966597,-0.987688340595138 +"327",,,0.226,-0.902263482265455,-0.988651744737914 +"328",,,0.227,-0.877506471564313,-0.989576118602651 +"329",,,0.228,-0.852749460863168,-0.990461425696651 +"330",,,0.229,-0.827992450162029,-0.991307631069507 +"331",,,0.23,-0.80323543946089,-0.992114701314478 +"332",,,0.231,-0.778478428759753,-0.992882604569814 +"333",,,0.232,-0.753721418058602,-0.993611310520008 +"334",,,0.233,-0.728964407357464,-0.994300790396999 +"335",,,0.234,-0.704207396656321,-0.9949510169813 +"336",,,0.235,-0.679450385955183,-0.99556196460308 +"337",,,0.236,-0.654693375254039,-0.996133609143173 +"338",,,0.237,-0.629936364552898,-0.99666592803403 +"339",,,0.238,-0.605179353851758,-0.997158900260614 +"340",,,0.239,-0.60343963632297,-0.997612506361225 +"341",,,0.24,-0.66470113196617,-0.998026728428272 +"342",,,0.241,-0.725962627609368,-0.998401550108975 +"343",,,0.242,-0.787224123252569,-0.998736956606017 +"344",,,0.243,-0.848485618895762,-0.999032934678125 +"345",,,0.244,-0.909747114538956,-0.999289472640589 +"346",,,0.245,-0.971008610182158,-0.999506560365732 +"347",,,0.246,-1.03227010582535,-0.9996841892833 +"348",,,0.247,-1.09353160146855,-0.999822352380809 +"349",,,0.248,-1.15479309711175,-0.999921044203816 +"350",,,0.249,-1.21605459275495,-0.999980260856137 +"351",,,0.25,-1.27731608839814,-1 +"352",,,0.251,-1.33857758404134,-0.999980260856137 +"353",,,0.252,-1.39983907968454,-0.999921044203816 +"354",,,0.253,-1.46110057532773,-0.999822352380809 +"355",,,0.254,-1.52236207097094,-0.9996841892833 +"356",,,0.255,-1.55618989676224,-0.999506560365732 +"357",,,0.256,-1.53953498521282,-0.999289472640589 +"358",,,0.257,-1.5228800736634,-0.999032934678125 +"359",,,0.258,-1.50622516211398,-0.998736956606017 +"360",,,0.259,-1.48957025056457,-0.998401550108975 +"361",,,0.26,-1.47291533901515,-0.998026728428272 +"362",,,0.261,-1.45626042746574,-0.997612506361225 +"363",,,0.262,-1.43960551591632,-0.997158900260614 +"364",,,0.263,-1.4229506043669,-0.99666592803403 +"365",,,0.264,-1.40629569281749,-0.996133609143173 +"366",,,0.265,-1.38964078126807,-0.99556196460308 +"367",,,0.266,-1.37298586971865,-0.9949510169813 +"368",,,0.267,-1.35633095816923,-0.994300790396999 +"369",,,0.268,-1.33967604661981,-0.993611310520008 +"370",,,0.269,-1.3230211350704,-0.992882604569814 +"371",,,0.27,-1.30636622352098,-0.992114701314478 +"372",,,0.271,-1.28857143056869,-0.991307631069507 +"373",,,0.272,-1.26930568552334,-0.990461425696651 +"374",,,0.273,-1.250039940478,-0.989576118602651 +"375",,,0.274,-1.23077419543265,-0.988651744737914 +"376",,,0.275,-1.2115084503873,-0.987688340595138 +"377",,,0.276,-1.19224270534196,-0.986685944207868 +"378",,,0.277,-1.17297696029661,-0.985644595148998 +"379",,,0.278,-1.15371121525126,-0.984564334529205 +"380",,,0.279,-1.13444547020591,-0.98344520499533 +"381",,,0.28,-1.11517972516056,-0.982287250728689 +"382",,,0.281,-1.09591398011522,-0.981090517443334 +"383",,,0.282,-1.07664823506986,-0.979855052384247 +"384",,,0.283,-1.05738249002452,-0.978580904325472 +"385",,,0.284,-1.03811674497917,-0.977268123568193 +"386",,,0.285,-1.01885099993382,-0.975916761938747 +"387",,,0.286,-0.999585254888476,-0.974526872786577 +"388",,,0.287,-1.00786393271464,-0.973098510982127 +"389",,,0.288,-1.04145616921618,-0.971631732914674 +"390",,,0.289,-1.07504840571771,-0.970126596490106 +"391",,,0.29,-1.10864064221925,-0.968583161128631 +"392",,,0.291,-1.14223287872079,-0.967001487762435 +"393",,,0.292,-1.17582511522232,-0.965381638833274 +"394",,,0.293,-1.20941735172386,-0.96372367829001 +"395",,,0.294,-1.24300958822539,-0.962027671586086 +"396",,,0.295,-1.27660182472693,-0.960293685676943 +"397",,,0.296,-1.31019406122847,-0.958521789017376 +"398",,,0.297,-1.34378629773,-0.95671205155883 +"399",,,0.298,-1.37737853423154,-0.954864544746643 +"400",,,0.299,-1.41097077073308,-0.952979341517219 +"401",,,0.3,-1.44456300723462,-0.951056516295154 +"402",,,0.301,-1.47815524373615,-0.949096144990294 +"403",,,0.302,-1.51174748023769,-0.947098304994744 +"404",,,0.303,-1.48729375785918,-0.945063075179805 +"405",,,0.304,-1.42503860320852,-0.942990535892864 +"406",,,0.305,-1.36278344855787,-0.940880768954225 +"407",,,0.306,-1.30052829390722,-0.938733857653874 +"408",,,0.307,-1.23827313925656,-0.936549886748192 +"409",,,0.308,-1.17601798460591,-0.934328942456612 +"410",,,0.309,-1.11376282995526,-0.932071112458211 +"411",,,0.31,-1.05150767530461,-0.929776485888251 +"412",,,0.311,-0.98925252065396,-0.927445153334661 +"413",,,0.312,-0.926997366003309,-0.925077206834458 +"414",,,0.313,-0.864742211352655,-0.922672739870115 +"415",,,0.314,-0.802487056701997,-0.92023184736587 +"416",,,0.315,-0.740231902051351,-0.917754625683981 +"417",,,0.316,-0.677976747400703,-0.915241172620917 +"418",,,0.317,-0.615721592750048,-0.912691587403503 +"419",,,0.318,-0.553466438099406,-0.910105970684996 +"420",,,0.319,-0.562321000285906,-0.907484424541117 +"421",,,0.32,-0.603106409621872,-0.90482705246602 +"422",,,0.321,-0.643891818957834,-0.902133959368203 +"423",,,0.322,-0.684677228293794,-0.899405251566371 +"424",,,0.323,-0.725462637629766,-0.896641036785236 +"425",,,0.324,-0.766248046965723,-0.893841424151264 +"426",,,0.325,-0.807033456301687,-0.891006524188368 +"427",,,0.326,-0.847818865637655,-0.888136448813545 +"428",,,0.327,-0.888604274973614,-0.885231311332455 +"429",,,0.328,-0.929389684309583,-0.882291226434953 +"430",,,0.329,-0.970175093645542,-0.879316310190556 +"431",,,0.33,-1.0109605029815,-0.876306680043864 +"432",,,0.331,-1.05174591231748,-0.87326245480992 +"433",,,0.332,-1.09253132165343,-0.870183754669526 +"434",,,0.333,-1.1333167309894,-0.86707070116449 +"435",,,0.334,-1.17410214032536,-0.863923417192835 +"436",,,0.335,-1.13940630428241,-0.860742027003944 +"437",,,0.336,-1.08274871780576,-0.857526656193652 +"438",,,0.337,-1.02609113132909,-0.854277431699295 +"439",,,0.338,-0.969433544852427,-0.850994481794692 +"440",,,0.339,-0.912775958375765,-0.847677936085083 +"441",,,0.34,-0.856118371899097,-0.844327925502015 +"442",,,0.341,-0.799460785422435,-0.840944582298169 +"443",,,0.342,-0.742803198945772,-0.837528040042142 +"444",,,0.343,-0.686145612469105,-0.834078433613171 +"445",,,0.344,-0.629488025992437,-0.830595899195813 +"446",,,0.345,-0.572830439515776,-0.827080574274562 +"447",,,0.346,-0.516172853039106,-0.823532597628428 +"448",,,0.347,-0.459515266562446,-0.819952109325452 +"449",,,0.348,-0.402857680085784,-0.816339250717184 +"450",,,0.349,-0.346200093609119,-0.812694164433094 +"451",,,0.35,-0.289542507132453,-0.809016994374947 +"452",,,0.351,-0.299108387487358,-0.805307885711122 +"453",,,0.352,-0.319533241380075,-0.801566984870877 +"454",,,0.353,-0.339958095272783,-0.797794439538571 +"455",,,0.354,-0.360382949165495,-0.793990398647835 +"456",,,0.355,-0.380807803058206,-0.79015501237569 +"457",,,0.356,-0.401232656950913,-0.786288432136619 +"458",,,0.357,-0.421657510843629,-0.782390810576588 +"459",,,0.358,-0.44208236473634,-0.778462301567023 +"460",,,0.359,-0.462507218629046,-0.774503060198734 +"461",,,0.36,-0.482932072521759,-0.770513242775789 +"462",,,0.361,-0.503356926414458,-0.76649300680935 +"463",,,0.362,-0.523781780307174,-0.762442511011448 +"464",,,0.363,-0.54420663419989,-0.758361915288722 +"465",,,0.364,-0.564631488092598,-0.754251380736104 +"466",,,0.365,-0.58505634198531,-0.75011106963046 +"467",,,0.366,-0.605481195878016,-0.745941145424182 +"468",,,0.367,-0.614753440193062,-0.741741772738739 +"469",,,0.368,-0.623359467771467,-0.737513117358174 +"470",,,0.369,-0.631965495349863,-0.73325534622256 +"471",,,0.37,-0.64057152292827,-0.728968627421411 +"472",,,0.371,-0.649177550506659,-0.724653130187047 +"473",,,0.372,-0.657783578085065,-0.720309024887907 +"474",,,0.373,-0.666389605663476,-0.715936483021831 +"475",,,0.374,-0.674995633241873,-0.711535677209285 +"476",,,0.375,-0.683601660820269,-0.707106781186547 +"477",,,0.376,-0.692207688398663,-0.702649969798849 +"478",,,0.377,-0.700813715977064,-0.698165418993473 +"479",,,0.378,-0.709419743555474,-0.693653305812805 +"480",,,0.379,-0.718025771133872,-0.689113808387348 +"481",,,0.38,-0.726631798712269,-0.684547105928689 +"482",,,0.381,-0.735237826290669,-0.679953378722419 +"483",,,0.382,-0.744084744965122,-0.675332808121024 +"484",,,0.383,-0.761252264407682,-0.67068557653672 +"485",,,0.384,-0.778419783850228,-0.666011867434252 +"486",,,0.385,-0.795587303292774,-0.661311865323652 +"487",,,0.386,-0.812754822735321,-0.656585755752956 +"488",,,0.387,-0.829922342177867,-0.651833725300879 +"489",,,0.388,-0.847089861620426,-0.647055961569444 +"490",,,0.389,-0.864257381062977,-0.642252653176584 +"491",,,0.39,-0.881424900505526,-0.63742398974869 +"492",,,0.391,-0.898592419948074,-0.632570161913124 +"493",,,0.392,-0.915759939390614,-0.6276913612907 +"494",,,0.393,-0.932927458833173,-0.622787780488112 +"495",,,0.394,-0.95009497827572,-0.617859613090334 +"496",,,0.395,-0.967262497718263,-0.612907053652976 +"497",,,0.396,-0.984430017160818,-0.607930297694605 +"498",,,0.397,-1.00159753660336,-0.602929541689024 +"499",,,0.398,-1.01777202908668,-0.597904983057519 +"500",,,0.399,-1.02612379084848,-0.592856820161059 +"501",,,0.4,-1.03447555261028,-0.587785252292473 +"502",,,0.401,-1.04282731437208,-0.582690479668576 +"503",,,0.402,-1.05117907613387,-0.577572703422268 +"504",,,0.403,-1.05953083789567,-0.572432125594591 +"505",,,0.404,-1.06788259965748,-0.567268949126756 +"506",,,0.405,-1.07623436141928,-0.56208337785213 +"507",,,0.406,-1.08458612318106,-0.556875616488188 +"508",,,0.407,-1.09293788494287,-0.55164587062843 +"509",,,0.408,-1.10128964670466,-0.546394346734269 +"510",,,0.409,-1.10964140846647,-0.541121252126876 +"511",,,0.41,-1.11799317022826,-0.535826794978997 +"512",,,0.411,-1.12634493199005,-0.530511184306734 +"513",,,0.412,-1.13469669375186,-0.525174629961295 +"514",,,0.413,-1.14304845551365,-0.519817342620709 +"515",,,0.414,-1.14303697247289,-0.514439533781506 +"516",,,0.415,-1.10896757625219,-0.509041415750371 +"517",,,0.416,-1.07489818003148,-0.50362320163576 +"518",,,0.417,-1.04082878381077,-0.498185105339491 +"519",,,0.418,-1.00675938759007,-0.492727341548292 +"520",,,0.419,-0.972689991369371,-0.487250125725332 +"521",,,0.42,-0.938620595148659,-0.481753674101715 +"522",,,0.421,-0.904551198927952,-0.476238203667939 +"523",,,0.422,-0.87048180270725,-0.470703932165332 +"524",,,0.423,-0.836412406486547,-0.465151078077458 +"525",,,0.424,-0.802343010265846,-0.459579860621487 +"526",,,0.425,-0.768273614045145,-0.453990499739546 +"527",,,0.426,-0.73420421782444,-0.448383216090032 +"528",,,0.427,-0.700134821603733,-0.442758231038901 +"529",,,0.428,-0.666065425383029,-0.437115766650933 +"530",,,0.429,-0.631996029162333,-0.431456045680959 +"531",,,0.43,-0.601573033753655,-0.425779291565072 +"532",,,0.431,-0.580450037438194,-0.420085728411806 +"533",,,0.432,-0.559327041122723,-0.414375580993284 +"534",,,0.433,-0.538204044807252,-0.408649074736349 +"535",,,0.434,-0.517081048491792,-0.402906435713662 +"536",,,0.435,-0.495958052176323,-0.39714789063478 +"537",,,0.436,-0.474835055860854,-0.391373666837202 +"538",,,0.437,-0.453712059545387,-0.385583992277396 +"539",,,0.438,-0.432589063229915,-0.379779095521801 +"540",,,0.439,-0.41146606691445,-0.3739592057378 +"541",,,0.44,-0.390343070598976,-0.368124552684678 +"542",,,0.441,-0.369220074283512,-0.362275366704545 +"543",,,0.442,-0.348097077968039,-0.35641187871325 +"544",,,0.443,-0.326974081652568,-0.350534320191259 +"545",,,0.444,-0.305851085337105,-0.344642923174517 +"546",,,0.445,-0.284728089021641,-0.338737920245291 +"547",,,0.446,-0.27515527281928,-0.332819544522986 +"548",,,0.447,-0.285576412529817,-0.326888029654942 +"549",,,0.448,-0.295997552240375,-0.320943609807209 +"550",,,0.449,-0.306418691950917,-0.314986519655304 +"551",,,0.45,-0.316839831661465,-0.309016994374947 +"552",,,0.451,-0.327260971372019,-0.303035269632773 +"553",,,0.452,-0.337682111082563,-0.297041581577034 +"554",,,0.453,-0.348103250793103,-0.291036166828271 +"555",,,0.454,-0.358524390503657,-0.285019262469975 +"556",,,0.455,-0.368945530214215,-0.278991106039229 +"557",,,0.456,-0.379366669924753,-0.272951935517325 +"558",,,0.457,-0.389787809635307,-0.266901989320375 +"559",,,0.458,-0.400208949345861,-0.260841506289896 +"560",,,0.459,-0.410630089056406,-0.254770725683382 +"561",,,0.46,-0.421051228766957,-0.248689887164854 +"562",,,0.461,-0.431472368477492,-0.242599230795407 +"563",,,0.462,-0.440306920198264,-0.236498997023724 +"564",,,0.463,-0.447207511670459,-0.23038942667659 +"565",,,0.464,-0.454108103142648,-0.224270760949381 +"566",,,0.465,-0.461008694614859,-0.218143241396542 +"567",,,0.466,-0.467909286087054,-0.212007109922054 +"568",,,0.467,-0.47480987755925,-0.205862608769881 +"569",,,0.468,-0.481710469031458,-0.199709980514406 +"570",,,0.469,-0.488611060503647,-0.19354946805086 +"571",,,0.47,-0.49551165197585,-0.187381314585724 +"572",,,0.471,-0.502412243448042,-0.181205763627137 +"573",,,0.472,-0.509312834920241,-0.175023058975275 +"574",,,0.473,-0.516213426392448,-0.168833444712734 +"575",,,0.474,-0.523114017864629,-0.162637165194883 +"576",,,0.475,-0.530014609336843,-0.15643446504023 +"577",,,0.476,-0.536915200809041,-0.150225589120757 +"578",,,0.477,-0.543815792281236,-0.144010782552252 +"579",,,0.478,-0.51985385274004,-0.137790290684638 +"580",,,0.479,-0.469085873023185,-0.131564359092283 +"581",,,0.48,-0.418317893306342,-0.125333233564304 +"582",,,0.481,-0.367549913589481,-0.11909716009487 +"583",,,0.482,-0.316781933872641,-0.112856384873482 +"584",,,0.483,-0.266013954155789,-0.10661115427526 +"585",,,0.484,-0.215245974438935,-0.100361714851215 +"586",,,0.485,-0.164477994722076,-0.0941083133185142 +"587",,,0.486,-0.113710015005225,-0.087851196550743 +"588",,,0.487,-0.0629420352883821,-0.0815906115681578 +"589",,,0.488,-0.0121740555715372,-0.0753268055279329 +"590",,,0.489,0.0385939241453106,-0.0690600257144059 +"591",,,0.49,0.0893619038621697,-0.0627905195293135 +"592",,,0.491,0.140129883579007,-0.0565185344820246 +"593",,,0.492,0.190897863295868,-0.0502443181797695 +"594",,,0.493,0.24166584301272,-0.0439681183178648 +"595",,,0.494,0.276489458510398,-0.0376901826699344 +"596",,,0.495,0.301527292896542,-0.0314107590781281 +"597",,,0.496,0.326565127282672,-0.0251300954433377 +"598",,,0.497,0.351602961668801,-0.0188484397154083 +"599",,,0.498,0.376640796054924,-0.0125660398833527 +"600",,,0.499,0.401678630441059,-0.006283143965559 +"601",,,0.5,0.426716464827194,0 +"602",,,0.501,0.451754299213332,0.006283143965559 +"603",,,0.502,0.47679213359947,0.0125660398833527 +"604",,,0.503,0.501829967985588,0.0188484397154083 +"605",,,0.504,0.526867802371735,0.0251300954433377 +"606",,,0.505,0.551905636757861,0.0314107590781281 +"607",,,0.506,0.576943471144008,0.0376901826699344 +"608",,,0.507,0.601981305530128,0.0439681183178648 +"609",,,0.508,0.627019139916254,0.0502443181797695 +"610",,,0.509,0.65205697430239,0.0565185344820246 +"611",,,0.51,0.619311757771535,0.0627905195293135 +"612",,,0.511,0.56229262077204,0.0690600257144059 +"613",,,0.512,0.505273483772532,0.0753268055279329 +"614",,,0.513,0.448254346773006,0.0815906115681578 +"615",,,0.514,0.391235209773496,0.087851196550743 +"616",,,0.515,0.334216072773999,0.0941083133185142 +"617",,,0.516,0.277196935774483,0.100361714851215 +"618",,,0.517,0.220177798774976,0.10661115427526 +"619",,,0.518,0.163158661775471,0.112856384873482 +"620",,,0.519,0.106139524775945,0.11909716009487 +"621",,,0.52,0.0491203877764394,0.125333233564304 +"622",,,0.521,-0.00789874922306968,0.131564359092283 +"623",,,0.522,-0.0649178862225724,0.137790290684638 +"624",,,0.523,-0.12193702322209,0.144010782552252 +"625",,,0.524,-0.178956160221604,0.150225589120757 +"626",,,0.525,-0.235975297221111,0.156434465040231 +"627",,,0.526,-0.215161202699299,0.162637165194884 +"628",,,0.527,-0.173493341063233,0.168833444712734 +"629",,,0.528,-0.131825479427166,0.175023058975276 +"630",,,0.529,-0.0901576177910873,0.181205763627138 +"631",,,0.53,-0.0484897561550265,0.187381314585725 +"632",,,0.531,-0.00682189451894985,0.193549468050861 +"633",,,0.532,0.034845967117121,0.199709980514407 +"634",,,0.533,0.0765138287532107,0.205862608769881 +"635",,,0.534,0.118181690389271,0.212007109922055 +"636",,,0.535,0.15984955202536,0.218143241396543 +"637",,,0.536,0.201517413661433,0.224270760949381 +"638",,,0.537,0.243185275297494,0.230389426676591 +"639",,,0.538,0.284853136933574,0.236498997023725 +"640",,,0.539,0.326520998569643,0.242599230795408 +"641",,,0.54,0.368188860205719,0.248689887164855 +"642",,,0.541,0.409856721841783,0.254770725683383 +"643",,,0.542,0.41501529567517,0.260841506289897 +"644",,,0.543,0.41487193411389,0.266901989320376 +"645",,,0.544,0.414728572552562,0.272951935517325 +"646",,,0.545,0.41458521099128,0.278991106039229 +"647",,,0.546,0.414441849429961,0.285019262469976 +"648",,,0.547,0.414298487868656,0.291036166828272 +"649",,,0.548,0.41415512630736,0.297041581577035 +"650",,,0.549,0.41401176474605,0.303035269632774 +"651",,,0.55,0.41386840318475,0.309016994374948 +"652",,,0.551,0.413725041623445,0.314986519655305 +"653",,,0.552,0.413581680062142,0.32094360980721 +"654",,,0.553,0.413438318500842,0.326888029654943 +"655",,,0.554,0.413294956939542,0.332819544522987 +"656",,,0.555,0.413151595378245,0.338737920245292 +"657",,,0.556,0.413008233816939,0.344642923174517 +"658",,,0.557,0.412864872255613,0.350534320191259 +"659",,,0.558,0.402540889097121,0.356411878713251 +"660",,,0.559,0.391767235866792,0.362275366704546 +"661",,,0.56,0.380993582636467,0.368124552684678 +"662",,,0.561,0.370219929406141,0.373959205737801 +"663",,,0.562,0.359446276175815,0.379779095521801 +"664",,,0.563,0.348672622945493,0.385583992277397 +"665",,,0.564,0.337898969715158,0.391373666837203 +"666",,,0.565,0.327125316484859,0.397147890634781 +"667",,,0.566,0.316351663254525,0.402906435713663 +"668",,,0.567,0.305578010024196,0.408649074736349 +"669",,,0.568,0.29480435679388,0.414375580993285 +"670",,,0.569,0.284030703563544,0.420085728411807 +"671",,,0.57,0.27325705033325,0.425779291565073 +"672",,,0.571,0.262483397102897,0.431456045680959 +"673",,,0.572,0.251709743872577,0.437115766650933 +"674",,,0.573,0.240565507147186,0.442758231038902 +"675",,,0.574,0.221011267641075,0.448383216090033 +"676",,,0.575,0.201457028135011,0.453990499739547 +"677",,,0.576,0.181902788628916,0.459579860621488 +"678",,,0.577,0.162348549122793,0.465151078077459 +"679",,,0.578,0.142794309616705,0.470703932165333 +"680",,,0.579,0.123240070110608,0.47623820366794 +"681",,,0.58,0.103685830604516,0.481753674101715 +"682",,,0.581,0.084131591098416,0.487250125725333 +"683",,,0.582,0.0645773515922932,0.492727341548292 +"684",,,0.583,0.0450231120862077,0.498185105339491 +"685",,,0.584,0.025468872580119,0.503623201635761 +"686",,,0.585,0.00591463307401197,0.509041415750372 +"687",,,0.586,-0.0136396064320771,0.514439533781507 +"688",,,0.587,-0.0331938459381849,0.51981734262071 +"689",,,0.588,-0.0527480854442714,0.525174629961296 +"690",,,0.589,-0.0674110222181982,0.530511184306734 +"691",,,0.59,-0.0483630905029219,0.535826794978997 +"692",,,0.591,-0.0293151587876729,0.541121252126876 +"693",,,0.592,-0.0102672270724193,0.546394346734269 +"694",,,0.593,0.00878070464284056,0.551645870628431 +"695",,,0.594,0.027828636358098,0.556875616488189 +"696",,,0.595,0.0468765680733631,0.562083377852131 +"697",,,0.596,0.0659244997886107,0.567268949126757 +"698",,,0.597,0.0849724315039037,0.572432125594591 +"699",,,0.598,0.104020363219134,0.577572703422268 +"700",,,0.599,0.123068294934388,0.582690479668576 +"701",,,0.6,0.142116226649659,0.587785252292473 +"702",,,0.601,0.161164158364915,0.59285682016106 +"703",,,0.602,0.180212090080176,0.597904983057519 +"704",,,0.603,0.199260021795424,0.602929541689025 +"705",,,0.604,0.218307953510684,0.607930297694605 +"706",,,0.605,0.235114534429804,0.612907053652976 +"707",,,0.606,0.243550826237328,0.617859613090334 +"708",,,0.607,0.251987118044832,0.622787780488112 +"709",,,0.608,0.260423409852358,0.6276913612907 +"710",,,0.609,0.268859701659871,0.632570161913124 +"711",,,0.61,0.277295993467371,0.63742398974869 +"712",,,0.611,0.285732285274892,0.642252653176584 +"713",,,0.612,0.294168577082403,0.647055961569444 +"714",,,0.613,0.302604868889928,0.651833725300879 +"715",,,0.614,0.311041160697419,0.656585755752956 +"716",,,0.615,0.319477452504941,0.661311865323652 +"717",,,0.616,0.327913744312463,0.666011867434252 +"718",,,0.617,0.336350036119964,0.67068557653672 +"719",,,0.618,0.344786327927484,0.675332808121024 +"720",,,0.619,0.353222619735006,0.679953378722419 +"721",,,0.62,0.361658911542499,0.684547105928689 +"722",,,0.621,0.380001727597733,0.689113808387348 +"723",,,0.622,0.42193747566415,0.693653305812805 +"724",,,0.623,0.463873223730569,0.698165418993473 +"725",,,0.624,0.505808971796987,0.702649969798849 +"726",,,0.625,0.547744719863415,0.707106781186547 +"727",,,0.626,0.589680467929847,0.711535677209285 +"728",,,0.627,0.631616215996271,0.715936483021831 +"729",,,0.628,0.673551964062696,0.720309024887907 +"730",,,0.629,0.715487712129125,0.724653130187047 +"731",,,0.63,0.757423460195543,0.728968627421411 +"732",,,0.631,0.799359208262,0.73325534622256 +"733",,,0.632,0.841294956328387,0.737513117358174 +"734",,,0.633,0.883230704394837,0.741741772738739 +"735",,,0.634,0.925166452461246,0.745941145424182 +"736",,,0.635,0.967102200527697,0.75011106963046 +"737",,,0.636,1.0090379485941,0.754251380736104 +"738",,,0.637,1.02681088295241,0.758361915288722 +"739",,,0.638,1.00519834665447,0.762442511011448 +"740",,,0.639,0.983585810356548,0.76649300680935 +"741",,,0.64,0.961973274058609,0.770513242775789 +"742",,,0.641,0.94036073776069,0.774503060198734 +"743",,,0.642,0.918748201462742,0.778462301567024 +"744",,,0.643,0.897135665164845,0.782390810576588 +"745",,,0.644,0.87552312886688,0.786288432136619 +"746",,,0.645,0.853910592568974,0.79015501237569 +"747",,,0.646,0.832298056271029,0.793990398647835 +"748",,,0.647,0.810685519973107,0.797794439538571 +"749",,,0.648,0.789072983675168,0.801566984870877 +"750",,,0.649,0.767460447377251,0.805307885711122 +"751",,,0.65,0.7458479110793,0.809016994374947 +"752",,,0.651,0.724235374781394,0.812694164433094 +"753",,,0.652,0.702622838483441,0.816339250717184 +"754",,,0.653,0.688376425385863,0.819952109325452 +"755",,,0.654,0.682614104323459,0.823532597628428 +"756",,,0.655,0.676851783261115,0.827080574274562 +"757",,,0.656,0.671089462198716,0.830595899195813 +"758",,,0.657,0.665327141136358,0.834078433613171 +"759",,,0.658,0.659564820073982,0.837528040042142 +"760",,,0.659,0.653802499011618,0.840944582298169 +"761",,,0.66,0.648040177949254,0.844327925502015 +"762",,,0.661,0.642277856886879,0.847677936085083 +"763",,,0.662,0.636515535824493,0.850994481794692 +"764",,,0.663,0.630753214762158,0.854277431699295 +"765",,,0.664,0.624990893699751,0.857526656193652 +"766",,,0.665,0.619228572637416,0.860742027003944 +"767",,,0.666,0.613466251575016,0.863923417192835 +"768",,,0.667,0.607703930512659,0.86707070116449 +"769",,,0.668,0.601941609450281,0.870183754669526 +"770",,,0.669,0.609030426457534,0.87326245480992 +"771",,,0.67,0.626666184812515,0.876306680043864 +"772",,,0.671,0.64430194316753,0.879316310190557 +"773",,,0.672,0.661937701522475,0.882291226434953 +"774",,,0.673,0.679573459877485,0.885231311332455 +"775",,,0.674,0.697209218232449,0.888136448813545 +"776",,,0.675,0.714844976587458,0.891006524188368 +"777",,,0.676,0.732480734942435,0.893841424151264 +"778",,,0.677,0.750116493297416,0.896641036785236 +"779",,,0.678,0.767752251652395,0.899405251566371 +"780",,,0.679,0.785388010007401,0.902133959368203 +"781",,,0.68,0.803023768362387,0.90482705246602 +"782",,,0.681,0.820659526717386,0.907484424541117 +"783",,,0.682,0.838295285072336,0.910105970684996 +"784",,,0.683,0.855931043427353,0.912691587403503 +"785",,,0.684,0.873566801782312,0.915241172620918 +"786",,,0.685,0.86919372337251,0.917754625683981 +"787",,,0.686,0.852101244005881,0.92023184736587 +"788",,,0.687,0.835008764639294,0.922672739870115 +"789",,,0.688,0.817916285272676,0.925077206834458 +"790",,,0.689,0.800823805906094,0.927445153334662 +"791",,,0.69,0.783731326539504,0.929776485888252 +"792",,,0.691,0.76663884717291,0.932071112458211 +"793",,,0.692,0.749546367806299,0.934328942456612 +"794",,,0.693,0.73245388843972,0.936549886748192 +"795",,,0.694,0.715361409073091,0.938733857653874 +"796",,,0.695,0.698268929706526,0.940880768954226 +"797",,,0.696,0.681176450339915,0.942990535892865 +"798",,,0.697,0.664083970973322,0.945063075179805 +"799",,,0.698,0.646991491606704,0.947098304994745 +"800",,,0.699,0.629899012240126,0.949096144990295 +"801",,,0.7,0.612806532873523,0.951056516295154 +"802",,,0.701,0.625892360231745,0.952979341517219 +"803",,,0.702,0.65081629590247,0.954864544746643 +"804",,,0.703,0.675740231573242,0.956712051558831 +"805",,,0.704,0.70066416724397,0.958521789017376 +"806",,,0.705,0.725588102914742,0.960293685676943 +"807",,,0.706,0.750512038585475,0.962027671586086 +"808",,,0.707,0.775435974256231,0.96372367829001 +"809",,,0.708,0.800359909926969,0.965381638833274 +"810",,,0.709,0.825283845597721,0.967001487762435 +"811",,,0.71,0.850207781268477,0.968583161128631 +"812",,,0.711,0.875131716939232,0.970126596490106 +"813",,,0.712,0.90005565260997,0.971631732914674 +"814",,,0.713,0.924979588280738,0.973098510982127 +"815",,,0.714,0.949903523951446,0.974526872786577 +"816",,,0.715,0.974827459622242,0.975916761938747 +"817",,,0.716,0.999751395292958,0.977268123568194 +"818",,,0.717,0.969362808816458,0.978580904325472 +"819",,,0.718,0.925383471027803,0.979855052384247 +"820",,,0.719,0.881404133239199,0.981090517443334 +"821",,,0.72,0.837424795450588,0.982287250728689 +"822",,,0.721,0.793445457661988,0.98344520499533 +"823",,,0.722,0.749466119873365,0.984564334529205 +"824",,,0.723,0.705486782084743,0.985644595148998 +"825",,,0.724,0.661507444296094,0.986685944207868 +"826",,,0.725,0.617528106507521,0.987688340595138 +"827",,,0.726,0.573548768718921,0.988651744737914 +"828",,,0.727,0.529569430930321,0.989576118602651 +"829",,,0.728,0.48559009314169,0.990461425696651 +"830",,,0.729,0.441610755353066,0.991307631069507 +"831",,,0.73,0.397631417564463,0.992114701314478 +"832",,,0.731,0.353652079775851,0.992882604569814 +"833",,,0.732,0.309672741987223,0.993611310520009 +"834",,,0.733,0.363493331212544,0.994300790396999 +"835",,,0.734,0.429740681328352,0.9949510169813 +"836",,,0.735,0.495988031444123,0.99556196460308 +"837",,,0.736,0.562235381559953,0.996133609143173 +"838",,,0.737,0.628482731675744,0.99666592803403 +"839",,,0.738,0.694730081791582,0.997158900260614 +"840",,,0.739,0.760977431907358,0.997612506361225 +"841",,,0.74,0.827224782023165,0.998026728428272 +"842",,,0.741,0.893472132138975,0.998401550108975 +"843",,,0.742,0.959719482254775,0.998736956606017 +"844",,,0.743,1.02596683237057,0.999032934678125 +"845",,,0.744,1.0922141824864,0.999289472640589 +"846",,,0.745,1.15846153260218,0.999506560365732 +"847",,,0.746,1.22470888271802,0.9996841892833 +"848",,,0.747,1.29095623283377,0.999822352380809 +"849",,,0.748,1.35720358294963,0.999921044203816 +"850",,,0.749,1.33368972294137,0.999980260856137 +"851",,,0.75,1.30756805158718,1 +"852",,,0.751,1.28144638023297,0.999980260856137 +"853",,,0.752,1.25532470887876,0.999921044203816 +"854",,,0.753,1.22920303752455,0.999822352380809 +"855",,,0.754,1.20308136617036,0.9996841892833 +"856",,,0.755,1.17695969481613,0.999506560365732 +"857",,,0.756,1.15083802346193,0.999289472640589 +"858",,,0.757,1.12471635210772,0.999032934678125 +"859",,,0.758,1.09859468075353,0.998736956606017 +"860",,,0.759,1.07247300939929,0.998401550108975 +"861",,,0.76,1.04635133804511,0.998026728428272 +"862",,,0.761,1.02022966669087,0.997612506361225 +"863",,,0.762,0.994107995336694,0.997158900260614 +"864",,,0.763,0.96798632398246,0.99666592803403 +"865",,,0.764,0.943322732729526,0.996133609143173 +"866",,,0.765,0.943111817581331,0.99556196460308 +"867",,,0.766,0.942900902433154,0.9949510169813 +"868",,,0.767,0.942689987284962,0.994300790396999 +"869",,,0.768,0.94247907213681,0.993611310520008 +"870",,,0.769,0.942268156988598,0.992882604569814 +"871",,,0.77,0.942057241840433,0.992114701314478 +"872",,,0.771,0.941846326692243,0.991307631069507 +"873",,,0.772,0.941635411544062,0.990461425696651 +"874",,,0.773,0.941424496395879,0.989576118602651 +"875",,,0.774,0.941213581247681,0.988651744737914 +"876",,,0.775,0.941002666099481,0.987688340595138 +"877",,,0.776,0.940791750951329,0.986685944207868 +"878",,,0.777,0.940580835803118,0.985644595148998 +"879",,,0.778,0.940369920654949,0.984564334529205 +"880",,,0.779,0.940159005506737,0.98344520499533 +"881",,,0.78,0.935584436479765,0.982287250728689 +"882",,,0.781,0.904377005251946,0.981090517443334 +"883",,,0.782,0.87316957402413,0.979855052384247 +"884",,,0.783,0.841962142796308,0.978580904325472 +"885",,,0.784,0.810754711568489,0.977268123568193 +"886",,,0.785,0.779547280340658,0.975916761938747 +"887",,,0.786,0.748339849112862,0.974526872786577 +"888",,,0.787,0.717132417885019,0.973098510982126 +"889",,,0.788,0.685924986657218,0.971631732914674 +"890",,,0.789,0.654717555429414,0.970126596490106 +"891",,,0.79,0.623510124201596,0.968583161128631 +"892",,,0.791,0.592302692973773,0.967001487762435 +"893",,,0.792,0.561095261745937,0.965381638833274 +"894",,,0.793,0.529887830518117,0.96372367829001 +"895",,,0.794,0.498680399290304,0.962027671586086 +"896",,,0.795,0.467472968062489,0.960293685676943 +"897",,,0.796,0.45302882021672,0.958521789017376 +"898",,,0.797,0.496230770756048,0.95671205155883 +"899",,,0.798,0.539432721295377,0.954864544746643 +"900",,,0.799,0.582634671834712,0.952979341517219 +"901",,,0.8,0.625836622374073,0.951056516295154 +"902",,,0.801,0.669038572913424,0.949096144990294 +"903",,,0.802,0.712240523452755,0.947098304994744 +"904",,,0.803,0.755442473992098,0.945063075179805 +"905",,,0.804,0.798644424531434,0.942990535892864 +"906",,,0.805,0.84184637507078,0.940880768954225 +"907",,,0.806,0.885048325610123,0.938733857653874 +"908",,,0.807,0.928250276149458,0.936549886748192 +"909",,,0.808,0.971452226688793,0.934328942456612 +"910",,,0.809,1.01465417722816,0.932071112458211 +"911",,,0.81,1.05785612776749,0.929776485888251 +"912",,,0.811,1.10105807830684,0.927445153334661 +"913",,,0.812,1.12266572386344,0.925077206834458 +"914",,,0.813,1.09616145774845,0.922672739870115 +"915",,,0.814,1.06965719163345,0.92023184736587 +"916",,,0.815,1.04315292551846,0.917754625683981 +"917",,,0.816,1.01664865940347,0.915241172620918 +"918",,,0.817,0.990144393288468,0.912691587403503 +"919",,,0.818,0.963640127173462,0.910105970684996 +"920",,,0.819,0.937135861058459,0.907484424541117 +"921",,,0.82,0.910631594943501,0.904827052466019 +"922",,,0.821,0.884127328828497,0.902133959368203 +"923",,,0.822,0.857623062713511,0.899405251566371 +"924",,,0.823,0.831118796598518,0.896641036785236 +"925",,,0.824,0.804614530483511,0.893841424151264 +"926",,,0.825,0.778110264368508,0.891006524188368 +"927",,,0.826,0.751605998253513,0.888136448813544 +"928",,,0.827,0.725101732138508,0.885231311332455 +"929",,,0.828,0.717080774815144,0.882291226434953 +"930",,,0.829,0.737453252164982,0.879316310190556 +"931",,,0.83,0.757825729514865,0.876306680043863 +"932",,,0.831,0.778198206864741,0.87326245480992 +"933",,,0.832,0.798570684214611,0.870183754669525 +"934",,,0.833,0.818943161564468,0.86707070116449 +"935",,,0.834,0.839315638914332,0.863923417192835 +"936",,,0.835,0.859688116264189,0.860742027003943 +"937",,,0.836,0.880060593614058,0.857526656193652 +"938",,,0.837,0.900433070963912,0.854277431699295 +"939",,,0.838,0.920805548313802,0.850994481794692 +"940",,,0.839,0.941178025663657,0.847677936085083 +"941",,,0.84,0.961550503013537,0.844327925502015 +"942",,,0.841,0.981922980363376,0.840944582298169 +"943",,,0.842,1.00229545771324,0.837528040042142 +"944",,,0.843,1.0226679350631,0.834078433613171 +"945",,,0.844,1.0238763290646,0.830595899195812 +"946",,,0.845,1.00422370451448,0.827080574274562 +"947",,,0.846,0.984571079964361,0.823532597628427 +"948",,,0.847,0.964918455414249,0.819952109325452 +"949",,,0.848,0.945265830864163,0.816339250717183 +"950",,,0.849,0.925613206314034,0.812694164433094 +"951",,,0.85,0.905960581763948,0.809016994374947 +"952",,,0.851,0.886307957213857,0.805307885711122 +"953",,,0.852,0.866655332663724,0.801566984870876 +"954",,,0.853,0.847002708113616,0.797794439538571 +"955",,,0.854,0.827350083563515,0.793990398647835 +"956",,,0.855,0.807697459013422,0.79015501237569 +"957",,,0.856,0.78804483446331,0.786288432136619 +"958",,,0.857,0.768392209913195,0.782390810576588 +"959",,,0.858,0.748739585363074,0.778462301567023 +"960",,,0.859,0.729086960812949,0.774503060198733 +"961",,,0.86,0.689573964305873,0.770513242775789 +"962",,,0.861,0.634664612523177,0.76649300680935 +"963",,,0.862,0.579755260740452,0.762442511011448 +"964",,,0.863,0.524845908957739,0.758361915288722 +"965",,,0.864,0.469936557175011,0.754251380736104 +"966",,,0.865,0.415027205392306,0.750111069630459 +"967",,,0.866,0.360117853609589,0.745941145424182 +"968",,,0.867,0.305208501826895,0.74174177273874 +"969",,,0.868,0.250299150044191,0.737513117358174 +"970",,,0.869,0.195389798261463,0.73325534622256 +"971",,,0.87,0.140480446478717,0.728968627421411 +"972",,,0.871,0.0855710946960418,0.724653130187047 +"973",,,0.872,0.0306617429133133,0.720309024887907 +"974",,,0.873,-0.0242476088693916,0.715936483021831 +"975",,,0.874,-0.0791569606520856,0.711535677209285 +"976",,,0.875,-0.134066312434816,0.707106781186548 +"977",,,0.876,-0.120805995050785,0.70264996979885 +"978",,,0.877,-0.0704848717773553,0.698165418993473 +"979",,,0.878,-0.0201637485038893,0.693653305812805 +"980",,,0.879,0.0301573747695647,0.689113808387348 +"981",,,0.88,0.0804784980429836,0.684547105928689 +"982",,,0.881,0.130799621316433,0.679953378722419 +"983",,,0.882,0.181120744589863,0.675332808121025 +"984",,,0.883,0.231441867863321,0.67068557653672 +"985",,,0.884,0.281762991136738,0.666011867434252 +"986",,,0.885,0.33208411441021,0.661311865323652 +"987",,,0.886,0.382405237683635,0.656585755752956 +"988",,,0.887,0.432726360957084,0.651833725300879 +"989",,,0.888,0.483047484230525,0.647055961569444 +"990",,,0.889,0.533368607503988,0.642252653176585 +"991",,,0.89,0.583689730777414,0.63742398974869 +"992",,,0.891,0.634010854050858,0.632570161913125 +"993",,,0.892,0.640930476935156,0.6276913612907 +"994",,,0.893,0.631985735148426,0.622787780488113 +"995",,,0.894,0.623040993361679,0.617859613090335 +"996",,,0.895,0.614096251574965,0.612907053652976 +"997",,,0.896,0.605151509788219,0.607930297694606 +"998",,,0.897,0.596206768001441,0.602929541689025 +"999",,,0.898,0.587262026214695,0.597904983057519 +"1000",,,0.899,0.578317284427964,0.592856820161059 +"1001",,,0.9,0.56937254264122,0.587785252292473 +"1002",,,0.901,0.56042780085448,0.582690479668576 +"1003",,,0.902,0.551483059067727,0.577572703422268 +"1004",,,0.903,0.542538317280984,0.57243212559459 +"1005",,,0.904,0.533593575494231,0.567268949126756 +"1006",,,0.905,0.524648833707516,0.56208337785213 +"1007",,,0.906,0.515704091920771,0.556875616488188 +"1008",,,0.907,0.506759350133997,0.55164587062843 +"1009",,,0.908,0.503004446559342,0.546394346734269 +"1010",,,0.909,0.500413381723592,0.541121252126875 +"1011",,,0.91,0.497822316887881,0.535826794978996 +"1012",,,0.911,0.49523125205215,0.530511184306733 +"1013",,,0.912,0.492640187216398,0.525174629961295 +"1014",,,0.913,0.490049122380664,0.519817342620709 +"1015",,,0.914,0.487458057544932,0.514439533781507 +"1016",,,0.915,0.484866992709207,0.50904141575037 +"1017",,,0.916,0.482275927873494,0.503623201635761 +"1018",,,0.917,0.479684863037736,0.49818510533949 +"1019",,,0.918,0.477093798202025,0.492727341548292 +"1020",,,0.919,0.474502733366273,0.487250125725332 +"1021",,,0.92,0.471911668530536,0.481753674101715 +"1022",,,0.921,0.469320603694815,0.476238203667939 +"1023",,,0.922,0.466729538859083,0.470703932165332 +"1024",,,0.923,0.464138474023358,0.465151078077458 +"1025",,,0.924,0.481791165380514,0.459579860621488 +"1026",,,0.925,0.501659970612249,0.453990499739547 +"1027",,,0.926,0.521528775843995,0.448383216090032 +"1028",,,0.927,0.541397581075761,0.442758231038902 +"1029",,,0.928,0.561266386307497,0.437115766650932 +"1030",,,0.929,0.581135191539243,0.43145604568096 +"1031",,,0.93,0.601003996770993,0.425779291565072 +"1032",,,0.931,0.620872802002747,0.420085728411807 +"1033",,,0.932,0.640741607234492,0.414375580993284 +"1034",,,0.933,0.660610412466263,0.40864907473635 +"1035",,,0.934,0.680479217698028,0.402906435713663 +"1036",,,0.935,0.700348022929734,0.397147890634781 +"1037",,,0.936,0.720216828161493,0.391373666837202 +"1038",,,0.937,0.740085633393261,0.385583992277397 +"1039",,,0.938,0.759954438624975,0.379779095521801 +"1040",,,0.939,0.779823243856743,0.373959205737801 +"1041",,,0.94,0.763321179680305,0.368124552684678 +"1042",,,0.941,0.746296547927475,0.362275366704546 +"1043",,,0.942,0.72927191617465,0.35641187871325 +"1044",,,0.943,0.712247284421797,0.350534320191259 +"1045",,,0.944,0.695222652668971,0.344642923174517 +"1046",,,0.945,0.67819802091612,0.338737920245292 +"1047",,,0.946,0.661173389163296,0.332819544522986 +"1048",,,0.947,0.644148757410485,0.326888029654943 +"1049",,,0.948,0.627124125657601,0.320943609807209 +"1050",,,0.949,0.610099493904788,0.314986519655305 +"1051",,,0.95,0.593074862151987,0.309016994374948 +"1052",,,0.951,0.576050230399103,0.303035269632774 +"1053",,,0.952,0.559025598646272,0.297041581577035 +"1054",,,0.953,0.54200096689347,0.291036166828272 +"1055",,,0.954,0.524976335140648,0.285019262469976 +"1056",,,0.955,0.507033620148927,0.27899110603923 +"1057",,,0.956,0.476957178165172,0.272951935517325 +"1058",,,0.957,0.446880736181397,0.266901989320376 +"1059",,,0.958,0.416804294197595,0.260841506289897 +"1060",,,0.959,0.38672785221383,0.254770725683382 +"1061",,,0.96,0.356651410230059,0.248689887164854 +"1062",,,0.961,0.326574968246256,0.242599230795407 +"1063",,,0.962,0.296498526262494,0.236498997023724 +"1064",,,0.963,0.266422084278726,0.230389426676591 +"1065",,,0.964,0.236345642294926,0.224270760949381 +"1066",,,0.965,0.206269200311142,0.218143241396543 +"1067",,,0.966,0.176192758327368,0.212007109922054 +"1068",,,0.967,0.146116316343622,0.205862608769882 +"1069",,,0.968,0.11603987435982,0.199709980514406 +"1070",,,0.969,0.0859634323760642,0.193549468050861 +"1071",,,0.97,0.0558869903922755,0.187381314585725 +"1072",,,0.971,0.028049182540724,0.181205763627138 +"1073",,,0.972,0.0124297989202057,0.175023058975276 +"1074",,,0.973,-0.00318958470030663,0.168833444712734 +"1075",,,0.974,-0.0188089683208115,0.162637165194883 +"1076",,,0.975,-0.0344283519413494,0.156434465040231 +"1077",,,0.976,-0.0500477355618579,0.150225589120757 +"1078",,,0.977,-0.0656671191823809,0.144010782552252 +"1079",,,0.978,-0.0812865028029398,0.137790290684638 +"1080",,,0.979,-0.0969058864234522,0.131564359092282 +"1081",,,0.98,-0.112525270043977,0.125333233564304 +"1082",,,0.981,-0.128144653664507,0.11909716009487 +"1083",,,0.982,-0.143764037285034,0.112856384873481 +"1084",,,0.983,-0.159383420905568,0.10661115427526 +"1085",,,0.984,-0.175002804526084,0.100361714851214 +"1086",,,0.985,-0.190622188146604,0.0941083133185148 +"1087",,,0.986,-0.20624157176713,0.0878511965507423 +"1088",,,0.987,-0.22186095538764,0.0815906115681579 +"1089",,,0.988,-0.237480339008189,0.0753268055279326 +"1090",,,0.989,-0.253099722628713,0.0690600257144061 +"1091",,,0.99,-0.268719106249225,0.0627905195293131 +"1092",,,0.991,-0.284338489869769,0.0565185344820247 +"1093",,,0.992,-0.299957873490268,0.0502443181797692 +"1094",,,0.993,-0.315577257110821,0.043968118317865 +"1095",,,0.994,-0.331196640731309,0.0376901826699341 +"1096",,,0.995,-0.346816024351848,0.0314107590781282 +"1097",,,0.996,-0.362435407972342,0.0251300954433369 +"1098",,,0.997,-0.378054791592887,0.018848439715408 +"1099",,,0.998,-0.393674175213458,0.0125660398833519 +"1100",,,0.999,-0.409293558833934,0.00628314396555868 +"1101",,,1,-0.424912942454464,1.22464679914735e-16 +"1102",,,1.001,-0.440532326074995,-0.00628314396555844 +"1103",,,1.002,-0.456151709695483,-0.0125660398833526 +"1104",,,1.003,-0.471771093316044,-0.0188484397154078 +"1105",,,1.004,-0.487390476936574,-0.0251300954433376 +"1106",,,1.005,-0.503009860557075,-0.0314107590781271 +"1107",,,1.006,-0.518629244177621,-0.0376901826699347 +"1108",,,1.007,-0.534248627798135,-0.0439681183178638 +"1109",,,1.008,-0.549868011418636,-0.0502443181797699 +"1110",,,1.009,-0.565487395039175,-0.0565185344820235 +"1111",,,1.01,-0.581106778659752,-0.0627905195293129 +"1112",,,1.011,-0.596726162280232,-0.0690600257144049 +"1113",,,1.012,-0.612345545900743,-0.0753268055279323 +"1114",,,1.013,-0.627964929521277,-0.0815906115681568 +"1115",,,1.014,-0.643584313141805,-0.0878511965507429 +"1116",,,1.015,-0.659203696762311,-0.0941083133185137 +"1117",,,1.016,-0.674823080382863,-0.100361714851215 +"1118",,,1.017,-0.690442464003388,-0.106611154275259 +"1119",,,1.018,-0.70606184762391,-0.112856384873482 +"1120",,,1.019,-0.721681231244425,-0.119097160094869 +"1121",,,1.02,-0.737300614864949,-0.125333233564304 +"1122",,,1.021,-0.752919998485486,-0.131564359092282 +"1123",,,1.022,-0.768539382105971,-0.137790290684638 +"1124",,,1.023,-0.784158765726522,-0.144010782552251 +"1125",,,1.024,-0.799778149347023,-0.150225589120757 +"1126",,,1.025,-0.815397532967537,-0.15643446504023 +"1127",,,1.026,-0.831016916588096,-0.162637165194884 +"1128",,,1.027,-0.846636300208623,-0.168833444712733 +"1129",,,1.028,-0.862255683829155,-0.175023058975276 +"1130",,,1.029,-0.877875067449716,-0.181205763627137 +"1131",,,1.03,-0.893494451070222,-0.187381314585724 +"1132",,,1.031,-0.909113834690738,-0.19354946805086 +"1133",,,1.032,-0.924733218311238,-0.199709980514407 +"1134",,,1.033,-0.940352601931798,-0.205862608769881 +"1135",,,1.034,-0.955971985552284,-0.212007109922055 +"1136",,,1.035,-0.971591369172825,-0.218143241396542 +"1137",,,1.036,-0.987210752793383,-0.224270760949381 +"1138",,,1.037,-1.0028301364139,-0.23038942667659 +"1139",,,1.038,-1.01844952003443,-0.236498997023725 +"1140",,,1.039,-1.03406890365496,-0.242599230795407 +"1141",,,1.04,-1.04968828727548,-0.248689887164855 +"1142",,,1.041,-1.06530767089598,-0.254770725683382 +"1143",,,1.042,-1.08092705451649,-0.260841506289897 +"1144",,,1.043,-1.09654643813704,-0.266901989320375 +"1145",,,1.044,-1.11216582175755,-0.272951935517326 +"1146",,,1.045,-1.12778520537807,-0.278991106039228 +"1147",,,1.046,-1.14340458899861,-0.285019262469977 +"1148",,,1.047,-1.15902397261912,-0.291036166828271 +"1149",,,1.048,-1.17464335623966,-0.297041581577035 +"1150",,,1.049,-1.1902627398602,-0.303035269632773 +"1151",,,1.05,-1.20588212348072,-0.309016994374947 +"1152",,,1.051,-1.2215015071012,-0.314986519655304 +"1153",,,1.052,-1.23712089072172,-0.320943609807209 +"1154",,,1.053,-1.25274027434227,-0.326888029654942 +"1155",,,1.054,-1.26835965796278,-0.332819544522986 +"1156",,,1.055,-1.2839790415833,-0.338737920245291 +"1157",,,1.056,-1.29959842520383,-0.344642923174516 +"1158",,,1.057,-1.31521780882436,-0.350534320191259 +"1159",,,1.058,-1.33083719244488,-0.356411878713249 +"1160",,,1.059,-1.3464565760654,-0.362275366704546 +"1161",,,1.06,-1.36207595968594,-0.368124552684677 +"1162",,,1.061,-1.37769534330647,-0.3739592057378 +"1163",,,1.062,-1.39331472692701,-0.3797790955218 +"1164",,,1.063,-1.40893411054752,-0.385583992277396 +"1165",,,1.064,-1.42455349416804,-0.391373666837201 +"1166",,,1.065,-1.44017287778855,-0.39714789063478 +"1167",,,1.066,-1.45579226140909,-0.402906435713662 +"1168",,,1.067,-1.47141164502964,-0.408649074736349 +"1169",,,1.068,-1.48703102865013,-0.414375580993283 +"1170",,,1.069,-1.50265041227068,-0.420085728411806 +"1171",,,1.07,-1.51826979589119,-0.425779291565072 +"1172",,,1.071,-1.53388917951172,-0.431456045680959 +"1173",,,1.072,-1.54950856313228,-0.437115766650932 +"1174",,,1.073,-1.56512794675277,-0.442758231038901 +"1175",,,1.074,-1.58074733037329,-0.448383216090031 +"1176",,,1.075,-1.59636671399381,-0.453990499739547 +"1177",,,1.076,-1.61198609761434,-0.459579860621487 +"1178",,,1.077,-1.62760548123487,-0.465151078077458 +"1179",,,1.078,-1.64322486485537,-0.470703932165331 +"1180",,,1.079,-1.65884424847593,-0.476238203667938 +"1181",,,1.08,-1.67446363209648,-0.481753674101714 +"1182",,,1.081,-1.69008301571698,-0.487250125725332 +"1183",,,1.082,-1.7057023993375,-0.492727341548291 +"1184",,,1.083,-1.72132178295802,-0.49818510533949 +"1185",,,1.084,-1.73694116657854,-0.50362320163576 +"1186",,,1.085,-1.75256055019907,-0.509041415750371 +"1187",,,1.086,-1.76817993381959,-0.514439533781506 +"1188",,,1.087,-1.78379931744011,-0.519817342620709 +"1189",,,1.088,-1.7994187010606,-0.525174629961295 +"1190",,,1.089,-1.81503808468116,-0.530511184306734 +"1191",,,1.09,-1.83065746830169,-0.535826794978996 +"1192",,,1.091,-1.8462768519222,-0.541121252126876 +"1193",,,1.092,-1.86189623554273,-0.546394346734268 +"1194",,,1.093,-1.87751561916325,-0.55164587062843 +"1195",,,1.094,-1.89313500278377,-0.556875616488187 +"1196",,,1.095,-1.90875438640432,-0.562083377852131 +"1197",,,1.096,-1.92437377002484,-0.567268949126756 +"1198",,,1.097,-1.9399931536454,-0.572432125594591 +"1199",,,1.098,-1.95561253726587,-0.577572703422267 +"1200",,,1.099,-1.97123192088639,-0.582690479668576 +"1201",,,1.1,-1.98685130450695,-0.587785252292472 diff --git a/TeX/Plots/Data/overfit_spline.csv b/TeX/Plots/Data/overfit_spline.csv new file mode 100644 index 0000000..280607d --- /dev/null +++ b/TeX/Plots/Data/overfit_spline.csv @@ -0,0 +1,1202 @@ +"",x,y +"1",-0.1,0.303237326230256 +"2",-0.099,0.298227894259559 +"3",-0.098,0.293218462288862 +"4",-0.097,0.288209030318165 +"5",-0.096,0.283199598347468 +"6",-0.095,0.278190166376772 +"7",-0.094,0.273180734406075 +"8",-0.093,0.268171302435378 +"9",-0.092,0.263161870464681 +"10",-0.091,0.258152438493984 +"11",-0.09,0.253143006523287 +"12",-0.089,0.24813357455259 +"13",-0.088,0.243124142581893 +"14",-0.087,0.238114710611196 +"15",-0.086,0.233105278640499 +"16",-0.085,0.228095846669802 +"17",-0.084,0.223086414699105 +"18",-0.083,0.218076982728408 +"19",-0.082,0.213067550757712 +"20",-0.081,0.208058118787015 +"21",-0.08,0.203048686816318 +"22",-0.079,0.198039254845621 +"23",-0.078,0.193029822874924 +"24",-0.077,0.188020390904227 +"25",-0.076,0.18301095893353 +"26",-0.075,0.178001526962833 +"27",-0.074,0.172992094992136 +"28",-0.073,0.167982663021439 +"29",-0.072,0.162973231050742 +"30",-0.071,0.157963799080046 +"31",-0.07,0.152954367109349 +"32",-0.069,0.147944935138652 +"33",-0.068,0.142935503167955 +"34",-0.067,0.137926071197258 +"35",-0.066,0.132916639226561 +"36",-0.065,0.127907207255864 +"37",-0.064,0.122897775285167 +"38",-0.063,0.11788834331447 +"39",-0.062,0.112878911343773 +"40",-0.061,0.107869479373076 +"41",-0.06,0.102860047402379 +"42",-0.059,0.0978506154316824 +"43",-0.058,0.0928411834609855 +"44",-0.057,0.0878317514902886 +"45",-0.056,0.0828223195195917 +"46",-0.055,0.0778128875488948 +"47",-0.054,0.0728034555781978 +"48",-0.053,0.067794023607501 +"49",-0.052,0.062784591636804 +"50",-0.051,0.0577751596661071 +"51",-0.05,0.0527657276954102 +"52",-0.049,0.0477562957247133 +"53",-0.048,0.0427468637540163 +"54",-0.047,0.0377374317833194 +"55",-0.046,0.0327279998126225 +"56",-0.045,0.0277185678419256 +"57",-0.044,0.0227091358712287 +"58",-0.043,0.0176997039005317 +"59",-0.042,0.0126902719298348 +"60",-0.041,0.00768083995913788 +"61",-0.04,0.00267140798844101 +"62",-0.039,-0.00233802398225591 +"63",-0.038,-0.00734745595295286 +"64",-0.037,-0.0123568879236498 +"65",-0.036,-0.0173663198943467 +"66",-0.035,-0.0223757518650436 +"67",-0.034,-0.0273851838357405 +"68",-0.033,-0.0323946158064375 +"69",-0.032,-0.0374040477771344 +"70",-0.031,-0.0424134797478313 +"71",-0.03,-0.0474229117185282 +"72",-0.029,-0.0524323436892252 +"73",-0.028,-0.0574417756599221 +"74",-0.027,-0.0624512076306189 +"75",-0.026,-0.0674606396013159 +"76",-0.025,-0.0724700715720128 +"77",-0.024,-0.0774795035427097 +"78",-0.023,-0.0824889355134066 +"79",-0.022,-0.0874983674841036 +"80",-0.021,-0.0925077994548005 +"81",-0.02,-0.0975172314254974 +"82",-0.019,-0.102526663396194 +"83",-0.018,-0.107536095366891 +"84",-0.017,-0.112545527337588 +"85",-0.016,-0.117554959308285 +"86",-0.015,-0.122564391278982 +"87",-0.014,-0.127573823249679 +"88",-0.013,-0.132583255220376 +"89",-0.012,-0.137592687191073 +"90",-0.011,-0.14260211916177 +"91",-0.01,-0.147611551132467 +"92",-0.00900000000000001,-0.152620983103163 +"93",-0.00800000000000001,-0.15763041507386 +"94",-0.00700000000000001,-0.162639847044557 +"95",-0.00600000000000001,-0.167649279015254 +"96",-0.005,-0.172658710985951 +"97",-0.004,-0.177668142956648 +"98",-0.003,-0.182677574927345 +"99",-0.002,-0.187687006898042 +"100",-0.001,-0.192696438868739 +"101",0,-0.197705870839436 +"102",0.001,-0.202715266172601 +"103",0.002,-0.207724431634405 +"104",0.003,-0.212733132330402 +"105",0.004,-0.217741133366145 +"106",0.00499999999999999,-0.222748199847187 +"107",0.00599999999999999,-0.227754096879082 +"108",0.00699999999999999,-0.232758589567383 +"109",0.00799999999999999,-0.237761443017645 +"110",0.00899999999999999,-0.242762422335419 +"111",0.01,-0.247761292626261 +"112",0.011,-0.252757818995723 +"113",0.012,-0.257751766549359 +"114",0.013,-0.262742900392723 +"115",0.014,-0.267730985631367 +"116",0.015,-0.272715787370845 +"117",0.016,-0.277697070715286 +"118",0.017,-0.282674597760437 +"119",0.018,-0.28764812124711 +"120",0.019,-0.292617392102991 +"121",0.02,-0.297582161255772 +"122",0.021,-0.30254217963314 +"123",0.022,-0.307497198162786 +"124",0.023,-0.312446967772399 +"125",0.024,-0.317391239389667 +"126",0.025,-0.322329763942281 +"127",0.026,-0.327262292357929 +"128",0.027,-0.3321885755643 +"129",0.028,-0.337108364489085 +"130",0.029,-0.342021410059971 +"131",0.03,-0.346927463204649 +"132",0.031,-0.351826274850807 +"133",0.032,-0.356717595974837 +"134",0.033,-0.361601193474308 +"135",0.034,-0.366476873015524 +"136",0.035,-0.371344446053557 +"137",0.036,-0.376203724043477 +"138",0.037,-0.381054518440357 +"139",0.038,-0.385896640699265 +"140",0.039,-0.390729902275273 +"141",0.04,-0.395554114623452 +"142",0.041,-0.400369089198872 +"143",0.042,-0.405174637456604 +"144",0.043,-0.409970570851719 +"145",0.044,-0.414756700839287 +"146",0.045,-0.419532838874379 +"147",0.046,-0.424298796412067 +"148",0.047,-0.42905438490742 +"149",0.048,-0.433799415866218 +"150",0.049,-0.438533706721285 +"151",0.05,-0.443257086306061 +"152",0.051,-0.447969384748544 +"153",0.052,-0.452670432176729 +"154",0.053,-0.457360058718614 +"155",0.054,-0.462038094502196 +"156",0.055,-0.466704369655471 +"157",0.056,-0.471358714306436 +"158",0.057,-0.476000958583088 +"159",0.058,-0.480630932613423 +"160",0.059,-0.485248466525439 +"161",0.06,-0.489853390447132 +"162",0.061,-0.494445534506499 +"163",0.062,-0.499024728831536 +"164",0.063,-0.503590803550241 +"165",0.064,-0.508143590516217 +"166",0.065,-0.512683021707939 +"167",0.066,-0.517209182366943 +"168",0.067,-0.521722170695605 +"169",0.068,-0.526222084896304 +"170",0.069,-0.530709023171417 +"171",0.07,-0.535183083723322 +"172",0.071,-0.539644364754395 +"173",0.072,-0.544092964467015 +"174",0.073,-0.54852898106356 +"175",0.074,-0.552952512746406 +"176",0.075,-0.557363657717931 +"177",0.076,-0.561762514180513 +"178",0.077,-0.56614918033653 +"179",0.078,-0.570523754388358 +"180",0.079,-0.574886334538375 +"181",0.08,-0.579237018120935 +"182",0.081,-0.583575872818143 +"183",0.082,-0.58790293000594 +"184",0.083,-0.592218218844337 +"185",0.084,-0.596521768493343 +"186",0.085,-0.600813608112967 +"187",0.086,-0.60509376686322 +"188",0.087,-0.609362273904112 +"189",0.088,-0.613619158395651 +"190",0.089,-0.617864449497849 +"191",0.09,-0.622098176370715 +"192",0.091,-0.626320368174259 +"193",0.092,-0.63053105406849 +"194",0.093,-0.634730263213418 +"195",0.094,-0.638918024769053 +"196",0.095,-0.643094367895406 +"197",0.096,-0.647259321752485 +"198",0.097,-0.651412915500301 +"199",0.098,-0.655555178298863 +"200",0.099,-0.659686139308182 +"201",0.1,-0.663805827688266 +"202",0.101,-0.667914272599127 +"203",0.102,-0.672011503200773 +"204",0.103,-0.676097548653215 +"205",0.104,-0.680172438116462 +"206",0.105,-0.684236200750524 +"207",0.106,-0.688288865715411 +"208",0.107,-0.692330462171133 +"209",0.108,-0.696361019277699 +"210",0.109,-0.70038056619512 +"211",0.11,-0.704389132083405 +"212",0.111,-0.708386746102564 +"213",0.112,-0.712373430736141 +"214",0.113,-0.716349105142462 +"215",0.114,-0.720313607150692 +"216",0.115,-0.724266772391916 +"217",0.116,-0.728208436497218 +"218",0.117,-0.732138435097683 +"219",0.118,-0.736056603824395 +"220",0.119,-0.739962778308438 +"221",0.12,-0.743856794180898 +"222",0.121,-0.747738487072857 +"223",0.122,-0.751607692615402 +"224",0.123,-0.755464246439615 +"225",0.124,-0.759307984176583 +"226",0.125,-0.763138741457388 +"227",0.126,-0.766956353913116 +"228",0.127,-0.770760657174851 +"229",0.128,-0.774551465999147 +"230",0.129,-0.778328360553404 +"231",0.13,-0.782090773406328 +"232",0.131,-0.785838134829731 +"233",0.132,-0.789569875095425 +"234",0.133,-0.793285424475225 +"235",0.134,-0.796984213240942 +"236",0.135,-0.800665671664389 +"237",0.136,-0.80432923001738 +"238",0.137,-0.807974318571727 +"239",0.138,-0.811600367599242 +"240",0.139,-0.81520680737174 +"241",0.14,-0.818793068161031 +"242",0.141,-0.82235858023893 +"243",0.142,-0.825902773877249 +"244",0.143,-0.829425079347801 +"245",0.144,-0.832924956943878 +"246",0.145,-0.836402119258292 +"247",0.146,-0.839856405073211 +"248",0.147,-0.843287654107689 +"249",0.148,-0.846695706080776 +"250",0.149,-0.850080400711525 +"251",0.15,-0.853441577718987 +"252",0.151,-0.856779076822214 +"253",0.152,-0.860092737740258 +"254",0.153,-0.863382400192171 +"255",0.154,-0.866647903897004 +"256",0.155,-0.869889088573809 +"257",0.156,-0.873105793941638 +"258",0.157,-0.876297859719542 +"259",0.158,-0.879465125626574 +"260",0.159,-0.882607431381785 +"261",0.16,-0.885724602556049 +"262",0.161,-0.888816374057309 +"263",0.162,-0.891882445024286 +"264",0.163,-0.89492251450849 +"265",0.164,-0.897936281561432 +"266",0.165,-0.900923445234623 +"267",0.166,-0.903883704579573 +"268",0.167,-0.906816758647793 +"269",0.168,-0.909722306490795 +"270",0.169,-0.912600047160089 +"271",0.17,-0.915449679707185 +"272",0.171,-0.918270903183595 +"273",0.172,-0.921063416640829 +"274",0.173,-0.923826919130399 +"275",0.174,-0.926561109703815 +"276",0.175,-0.929265687412588 +"277",0.176,-0.931940369453524 +"278",0.177,-0.934584962737102 +"279",0.178,-0.937199301850653 +"280",0.179,-0.939783221389401 +"281",0.18,-0.942336555948574 +"282",0.181,-0.944859140123397 +"283",0.182,-0.947350808509096 +"284",0.183,-0.949811395700896 +"285",0.184,-0.952240736294025 +"286",0.185,-0.954638664883707 +"287",0.186,-0.957005016065168 +"288",0.187,-0.959339624433635 +"289",0.188,-0.961642324584333 +"290",0.189,-0.963912951112488 +"291",0.19,-0.966151338613326 +"292",0.191,-0.968357321682073 +"293",0.192,-0.970530734913955 +"294",0.193,-0.972671412904197 +"295",0.194,-0.974779190248026 +"296",0.195,-0.976853901540667 +"297",0.196,-0.978895381377346 +"298",0.197,-0.98090346435329 +"299",0.198,-0.982877985063723 +"300",0.199,-0.984818778103872 +"301",0.2,-0.986725678068963 +"302",0.201,-0.988598519554222 +"303",0.202,-0.990437137154874 +"304",0.203,-0.992241365466145 +"305",0.204,-0.994011039083261 +"306",0.205,-0.995745992601449 +"307",0.206,-0.997446060615934 +"308",0.207,-0.999111077749097 +"309",0.208,-1.00074091610382 +"310",0.209,-1.0023355596171 +"311",0.21,-1.00389501299247 +"312",0.211,-1.00541928093348 +"313",0.212,-1.00690836814368 +"314",0.213,-1.00836227932662 +"315",0.214,-1.00978101918583 +"316",0.215,-1.01116459242487 +"317",0.216,-1.01251300374728 +"318",0.217,-1.01382625785661 +"319",0.218,-1.0151043594564 +"320",0.219,-1.0163473132502 +"321",0.22,-1.01755512394156 +"322",0.221,-1.01872779623402 +"323",0.222,-1.01986533483113 +"324",0.223,-1.02096774436756 +"325",0.224,-1.02203501117038 +"326",0.225,-1.02306707872828 +"327",0.226,-1.02406388441191 +"328",0.227,-1.02502536559191 +"329",0.228,-1.02595145963892 +"330",0.229,-1.02684210392358 +"331",0.23,-1.02769723581651 +"332",0.231,-1.02851679268837 +"333",0.232,-1.02930071190979 +"334",0.233,-1.0300489308514 +"335",0.234,-1.03076138688386 +"336",0.235,-1.03143801737778 +"337",0.236,-1.03207875970382 +"338",0.237,-1.03268355123261 +"339",0.238,-1.03325232933479 +"340",0.239,-1.03378503074801 +"341",0.24,-1.03428152745394 +"342",0.241,-1.03474157157901 +"343",0.242,-1.03516490226963 +"344",0.243,-1.03555125867217 +"345",0.244,-1.03590037993303 +"346",0.245,-1.0362120051986 +"347",0.246,-1.03648587361527 +"348",0.247,-1.03672172432943 +"349",0.248,-1.03691929648747 +"350",0.249,-1.03707832923578 +"351",0.25,-1.03719856172075 +"352",0.251,-1.03727973308877 +"353",0.252,-1.03732158248623 +"354",0.253,-1.03732384905952 +"355",0.254,-1.03728627195504 +"356",0.255,-1.03720859211673 +"357",0.256,-1.03709064509555 +"358",0.257,-1.03693240593497 +"359",0.258,-1.03673386087952 +"360",0.259,-1.03649499617376 +"361",0.26,-1.03621579806222 +"362",0.261,-1.03589625278944 +"363",0.262,-1.03553634659997 +"364",0.263,-1.03513606573836 +"365",0.264,-1.03469539644914 +"366",0.265,-1.03421432497686 +"367",0.266,-1.03369283756607 +"368",0.267,-1.03313092046129 +"369",0.268,-1.03252855990709 +"370",0.269,-1.031885742148 +"371",0.27,-1.03120245342856 +"372",0.271,-1.03047868162575 +"373",0.272,-1.02971446624313 +"374",0.273,-1.02890990770837 +"375",0.274,-1.02806510995707 +"376",0.275,-1.02718017692481 +"377",0.276,-1.0262552125472 +"378",0.277,-1.02529032075982 +"379",0.278,-1.02428560549827 +"380",0.279,-1.02324117069814 +"381",0.28,-1.02215712029502 +"382",0.281,-1.0210335582245 +"383",0.282,-1.01987058842218 +"384",0.283,-1.01866831482366 +"385",0.284,-1.01742684136452 +"386",0.285,-1.01614627198035 +"387",0.286,-1.01482671060676 +"388",0.287,-1.01346826117932 +"389",0.288,-1.01207102763364 +"390",0.289,-1.01063511390531 +"391",0.29,-1.00916062392992 +"392",0.291,-1.00764766164307 +"393",0.292,-1.00609633098034 +"394",0.293,-1.00450673587733 +"395",0.294,-1.00287898026963 +"396",0.295,-1.00121316809284 +"397",0.296,-0.999509403282542 +"398",0.297,-0.997767789774338 +"399",0.298,-0.995988431503817 +"400",0.299,-0.994171432406572 +"401",0.3,-0.992316896418197 +"402",0.301,-0.990424927474284 +"403",0.302,-0.988495629510426 +"404",0.303,-0.98652911550162 +"405",0.304,-0.984525630720063 +"406",0.305,-0.98248552078808 +"407",0.306,-0.980409133824596 +"408",0.307,-0.978296817948536 +"409",0.308,-0.976148921278822 +"410",0.309,-0.973965791934381 +"411",0.31,-0.971747778034135 +"412",0.311,-0.96949522769701 +"413",0.312,-0.96720848904193 +"414",0.313,-0.964887910187818 +"415",0.314,-0.9625338392536 +"416",0.315,-0.9601466243582 +"417",0.316,-0.957726613620541 +"418",0.317,-0.955274155159549 +"419",0.318,-0.952789597094147 +"420",0.319,-0.950273276952471 +"421",0.32,-0.947725419060048 +"422",0.321,-0.945146179156612 +"423",0.322,-0.942535712022997 +"424",0.323,-0.939894172440039 +"425",0.324,-0.937221715188572 +"426",0.325,-0.934518495049431 +"427",0.326,-0.931784666803449 +"428",0.327,-0.929020385231463 +"429",0.328,-0.926225805114306 +"430",0.329,-0.923401081232814 +"431",0.33,-0.920546368367821 +"432",0.331,-0.917661821300161 +"433",0.332,-0.91474759481067 +"434",0.333,-0.911803843680181 +"435",0.334,-0.908830722689531 +"436",0.335,-0.905828396631777 +"437",0.336,-0.902797110638489 +"438",0.337,-0.899737148489724 +"439",0.338,-0.896648794212154 +"440",0.339,-0.89353233183245 +"441",0.34,-0.890388045377282 +"442",0.341,-0.887216218873321 +"443",0.342,-0.884017136347239 +"444",0.343,-0.880791081825705 +"445",0.344,-0.877538339335392 +"446",0.345,-0.874259192902969 +"447",0.346,-0.870953926555109 +"448",0.347,-0.867622824318481 +"449",0.348,-0.864266170219756 +"450",0.349,-0.860884248285606 +"451",0.35,-0.857477342542701 +"452",0.351,-0.854045708297861 +"453",0.352,-0.850589424706662 +"454",0.353,-0.847108504175832 +"455",0.354,-0.843602958985476 +"456",0.355,-0.840072801415697 +"457",0.356,-0.836518043746601 +"458",0.357,-0.832938698258292 +"459",0.358,-0.829334777230874 +"460",0.359,-0.825706292944451 +"461",0.36,-0.82205325767913 +"462",0.361,-0.818375683715012 +"463",0.362,-0.814673583332204 +"464",0.363,-0.81094696881081 +"465",0.364,-0.807195852430934 +"466",0.365,-0.80342024647268 +"467",0.366,-0.799620163216154 +"468",0.367,-0.795795602594673 +"469",0.368,-0.7919465060368 +"470",0.369,-0.788072797659764 +"471",0.37,-0.78417440157816 +"472",0.371,-0.780251241906586 +"473",0.372,-0.776303242759637 +"474",0.373,-0.772330328251912 +"475",0.374,-0.768332422498005 +"476",0.375,-0.764309449612514 +"477",0.376,-0.760261333710036 +"478",0.377,-0.756187998905167 +"479",0.378,-0.752089369312503 +"480",0.379,-0.747965369046642 +"481",0.38,-0.74381592222218 +"482",0.381,-0.739640952953713 +"483",0.382,-0.735440385355839 +"484",0.383,-0.731214143543153 +"485",0.384,-0.726962151630252 +"486",0.385,-0.722684333731734 +"487",0.386,-0.718380613962194 +"488",0.387,-0.714050916436229 +"489",0.388,-0.709695165268436 +"490",0.389,-0.705313284573411 +"491",0.39,-0.700905198465751 +"492",0.391,-0.696470831060053 +"493",0.392,-0.692010106470913 +"494",0.393,-0.687522948812929 +"495",0.394,-0.683009282200695 +"496",0.395,-0.67846903074881 +"497",0.396,-0.673902118571869 +"498",0.397,-0.66930846978447 +"499",0.398,-0.664688008541503 +"500",0.399,-0.660040697671097 +"501",0.4,-0.655366610750182 +"502",0.401,-0.650665841054792 +"503",0.402,-0.645938481860954 +"504",0.403,-0.6411846264447 +"505",0.404,-0.63640436808206 +"506",0.405,-0.631597800049065 +"507",0.406,-0.626765015621744 +"508",0.407,-0.621906108076129 +"509",0.408,-0.617021170688248 +"510",0.409,-0.612110296734134 +"511",0.41,-0.607173579489815 +"512",0.411,-0.602211112231323 +"513",0.412,-0.597222988234687 +"514",0.413,-0.592209300775939 +"515",0.414,-0.587170143462582 +"516",0.415,-0.582105682795519 +"517",0.416,-0.577016249216728 +"518",0.417,-0.571902195554363 +"519",0.418,-0.566763874636577 +"520",0.419,-0.561601639291523 +"521",0.42,-0.556415842347354 +"522",0.421,-0.551206836632224 +"523",0.422,-0.545974974974285 +"524",0.423,-0.540720610201691 +"525",0.424,-0.535444095142594 +"526",0.425,-0.530145782625149 +"527",0.426,-0.524826025477508 +"528",0.427,-0.519485176527824 +"529",0.428,-0.514123588604251 +"530",0.429,-0.508741614534941 +"531",0.43,-0.50333960737663 +"532",0.431,-0.497917940809876 +"533",0.432,-0.492477025253503 +"534",0.433,-0.487017274918595 +"535",0.434,-0.481539104016238 +"536",0.435,-0.476042926757516 +"537",0.436,-0.470529157353513 +"538",0.437,-0.464998210015314 +"539",0.438,-0.459450498954003 +"540",0.439,-0.453886438380665 +"541",0.44,-0.448306442506385 +"542",0.441,-0.442710925542247 +"543",0.442,-0.437100301699336 +"544",0.443,-0.431474985188735 +"545",0.444,-0.42583539022153 +"546",0.445,-0.420181931008805 +"547",0.446,-0.414515020810824 +"548",0.447,-0.408835027306466 +"549",0.448,-0.403142253429994 +"550",0.449,-0.397436997183613 +"551",0.45,-0.391719556569527 +"552",0.451,-0.385990229589941 +"553",0.452,-0.380249314247059 +"554",0.453,-0.374497108543087 +"555",0.454,-0.368733910480227 +"556",0.455,-0.362960018060685 +"557",0.456,-0.357175729286665 +"558",0.457,-0.351381342160371 +"559",0.458,-0.345577154684009 +"560",0.459,-0.339763464859781 +"561",0.46,-0.333940570689894 +"562",0.461,-0.328108770176551 +"563",0.462,-0.322268363847686 +"564",0.463,-0.316419726369904 +"565",0.464,-0.310563316738968 +"566",0.465,-0.304699598525067 +"567",0.466,-0.298829035298392 +"568",0.467,-0.292952090629133 +"569",0.468,-0.287069228087481 +"570",0.469,-0.281180911243625 +"571",0.47,-0.275287603667757 +"572",0.471,-0.269389768930065 +"573",0.472,-0.263487870600741 +"574",0.473,-0.257582372249975 +"575",0.474,-0.251673737447957 +"576",0.475,-0.245762429764877 +"577",0.476,-0.239848912770925 +"578",0.477,-0.233933650036294 +"579",0.478,-0.22801710513117 +"580",0.479,-0.222099741625746 +"581",0.48,-0.216182023090212 +"582",0.481,-0.210264413094758 +"583",0.482,-0.204347375209573 +"584",0.483,-0.198431373004849 +"585",0.484,-0.192516870050776 +"586",0.485,-0.186604329917544 +"587",0.486,-0.180694216175342 +"588",0.487,-0.174786992394363 +"589",0.488,-0.168883122144795 +"590",0.489,-0.162983068996829 +"591",0.49,-0.157087296520656 +"592",0.491,-0.151196268286464 +"593",0.492,-0.145310447864446 +"594",0.493,-0.139430298824791 +"595",0.494,-0.133556281634579 +"596",0.495,-0.12768881376346 +"597",0.496,-0.12182828125456 +"598",0.497,-0.115975069433606 +"599",0.498,-0.110129563626324 +"600",0.499,-0.10429214915844 +"601",0.5,-0.0984632113556776 +"602",0.501,-0.0926431355437642 +"603",0.502,-0.0868323070484252 +"604",0.503,-0.0810311111953863 +"605",0.504,-0.075239933310374 +"606",0.505,-0.0694591587191126 +"607",0.506,-0.0636891727473286 +"608",0.507,-0.0579303607207476 +"609",0.508,-0.052183107965096 +"610",0.509,-0.0464477998060982 +"611",0.51,-0.0407247997058712 +"612",0.511,-0.0350142486857251 +"613",0.512,-0.0293161580121543 +"614",0.513,-0.0236305373308106 +"615",0.514,-0.0179573962873481 +"616",0.515,-0.0122967445274198 +"617",0.516,-0.00664859169667907 +"618",0.517,-0.00101294744077975 +"619",0.518,0.00461017859462617 +"620",0.519,0.0102207767638848 +"621",0.52,0.015818837421343 +"622",0.521,0.0214043509213468 +"623",0.522,0.0269773076182443 +"624",0.523,0.0325376978663815 +"625",0.524,0.0380855120201053 +"626",0.525,0.0436207404337618 +"627",0.526,0.0491433615416619 +"628",0.527,0.054653262409902 +"629",0.528,0.0601502878368741 +"630",0.529,0.0656342823917064 +"631",0.53,0.0711050906435264 +"632",0.531,0.0765625571614635 +"633",0.532,0.0820065265146452 +"634",0.533,0.0874368432721995 +"635",0.534,0.0928533520032542 +"636",0.535,0.0982558972769383 +"637",0.536,0.10364432366238 +"638",0.537,0.109018475728706 +"639",0.538,0.114378198045045 +"640",0.539,0.119723335180527 +"641",0.54,0.125053731704277 +"642",0.541,0.130369232185426 +"643",0.542,0.135669694492492 +"644",0.543,0.140955054591442 +"645",0.544,0.146225276864408 +"646",0.545,0.151480325734251 +"647",0.546,0.156720165623832 +"648",0.547,0.161944760956012 +"649",0.548,0.167154076153655 +"650",0.549,0.172348075639621 +"651",0.55,0.177526723836772 +"652",0.551,0.182689985167969 +"653",0.552,0.187837824056074 +"654",0.553,0.19297020492395 +"655",0.554,0.198087092194457 +"656",0.555,0.203188450290457 +"657",0.556,0.208274243634811 +"658",0.557,0.213344436650382 +"659",0.558,0.218399016785788 +"660",0.559,0.223438076068544 +"661",0.56,0.228461736201095 +"662",0.561,0.23347011888787 +"663",0.562,0.238463345833296 +"664",0.563,0.243441538741801 +"665",0.564,0.248404819317814 +"666",0.565,0.253353309265763 +"667",0.566,0.258287130290076 +"668",0.567,0.263206404095181 +"669",0.568,0.268111252385506 +"670",0.569,0.27300179686548 +"671",0.57,0.277878159239531 +"672",0.571,0.282740461212088 +"673",0.572,0.287588824487577 +"674",0.573,0.292423370768204 +"675",0.574,0.29724418828602 +"676",0.575,0.30205124728546 +"677",0.576,0.306844492025424 +"678",0.577,0.311623866764811 +"679",0.578,0.316389315762523 +"680",0.579,0.321140783277459 +"681",0.58,0.325878213568518 +"682",0.581,0.330601550894601 +"683",0.582,0.335310739514607 +"684",0.583,0.340005723687436 +"685",0.584,0.344686447671989 +"686",0.585,0.349352855727164 +"687",0.586,0.354004892111862 +"688",0.587,0.358642501084984 +"689",0.588,0.363265626905427 +"690",0.589,0.367874213832094 +"691",0.59,0.372468206123882 +"692",0.591,0.377047548039693 +"693",0.592,0.381612183838426 +"694",0.593,0.386162057778981 +"695",0.594,0.390697114120258 +"696",0.595,0.395217297121157 +"697",0.596,0.399722551040577 +"698",0.597,0.404212820137419 +"699",0.598,0.408688048670583 +"700",0.599,0.413148180898967 +"701",0.6,0.417593161081473 +"702",0.601,0.422022933477 +"703",0.602,0.426437442344447 +"704",0.603,0.430836631942716 +"705",0.604,0.435220446530705 +"706",0.605,0.43958882997559 +"707",0.606,0.443941653842119 +"708",0.607,0.448278633360264 +"709",0.608,0.452599463357988 +"710",0.609,0.456903838663255 +"711",0.61,0.461191454104027 +"712",0.611,0.465462004508267 +"713",0.612,0.469715184703937 +"714",0.613,0.473950689519002 +"715",0.614,0.478168213781425 +"716",0.615,0.482367452319167 +"717",0.616,0.486548099960192 +"718",0.617,0.490709851532464 +"719",0.618,0.494852401863945 +"720",0.619,0.498975445782598 +"721",0.62,0.503078678116386 +"722",0.621,0.507161793680979 +"723",0.622,0.511224486307095 +"724",0.623,0.515266448136502 +"725",0.624,0.519287371144905 +"726",0.625,0.523286947308007 +"727",0.626,0.527264868601514 +"728",0.627,0.531220827001132 +"729",0.628,0.535154514482563 +"730",0.629,0.539065623021514 +"731",0.63,0.542953844593688 +"732",0.631,0.546818871174792 +"733",0.632,0.550660394740529 +"734",0.633,0.554478107266603 +"735",0.634,0.558271700728721 +"736",0.635,0.562040867102587 +"737",0.636,0.565785298363905 +"738",0.637,0.569504688404753 +"739",0.638,0.573198815116183 +"740",0.639,0.576867571344864 +"741",0.64,0.580510858236843 +"742",0.641,0.584128576938165 +"743",0.642,0.587720628594876 +"744",0.643,0.591286914353021 +"745",0.644,0.594827335358647 +"746",0.645,0.5983417927578 +"747",0.646,0.601830187696524 +"748",0.647,0.605292421320866 +"749",0.648,0.608728394776873 +"750",0.649,0.612138009210589 +"751",0.65,0.61552116576806 +"752",0.651,0.618877765595332 +"753",0.652,0.622207709838452 +"754",0.653,0.625510899841603 +"755",0.654,0.628787242359849 +"756",0.655,0.63203665008075 +"757",0.656,0.635259035994605 +"758",0.657,0.638454313091711 +"759",0.658,0.641622394362367 +"760",0.659,0.64476319279687 +"761",0.66,0.647876621385518 +"762",0.661,0.650962593118609 +"763",0.662,0.654021020986442 +"764",0.663,0.657051817979314 +"765",0.664,0.660054897087522 +"766",0.665,0.663030171301367 +"767",0.666,0.665977553611144 +"768",0.667,0.668896957007152 +"769",0.668,0.671788294479689 +"770",0.669,0.674651479626376 +"771",0.67,0.677486437245355 +"772",0.671,0.680293101984278 +"773",0.672,0.683071408826511 +"774",0.673,0.685821292755421 +"775",0.674,0.688542688754374 +"776",0.675,0.691235531806738 +"777",0.676,0.693899756895878 +"778",0.677,0.696535299005163 +"779",0.678,0.699142093117958 +"780",0.679,0.70172007421763 +"781",0.68,0.704269177287547 +"782",0.681,0.706789337311075 +"783",0.682,0.70928048927158 +"784",0.683,0.71174256815243 +"785",0.684,0.714175508936991 +"786",0.685,0.71657924660863 +"787",0.686,0.718953716150714 +"788",0.687,0.72129885254661 +"789",0.688,0.723614590779684 +"790",0.689,0.725900865833303 +"791",0.69,0.728157612690834 +"792",0.691,0.730384766335643 +"793",0.692,0.732582261751099 +"794",0.693,0.734750033920566 +"795",0.694,0.736888017827412 +"796",0.695,0.738996148455004 +"797",0.696,0.741074360786708 +"798",0.697,0.743122589805892 +"799",0.698,0.745140770495922 +"800",0.699,0.747128837840165 +"801",0.7,0.749086726821987 +"802",0.701,0.751014371920713 +"803",0.702,0.752911702731045 +"804",0.703,0.754778646104882 +"805",0.704,0.756615128863699 +"806",0.705,0.75842107782897 +"807",0.706,0.760196419822169 +"808",0.707,0.761941081664771 +"809",0.708,0.76365499017825 +"810",0.709,0.765338072184081 +"811",0.71,0.766990254503738 +"812",0.711,0.768611463958696 +"813",0.712,0.770201627370428 +"814",0.713,0.77176067156041 +"815",0.714,0.773288523350116 +"816",0.715,0.77478510956102 +"817",0.716,0.776250357014596 +"818",0.717,0.777684199582329 +"819",0.718,0.779086622781049 +"820",0.719,0.780457635096925 +"821",0.72,0.781797245120708 +"822",0.721,0.783105461443149 +"823",0.722,0.784382292654999 +"824",0.723,0.785627747347008 +"825",0.724,0.786841834109928 +"826",0.725,0.788024561534508 +"827",0.726,0.7891759382115 +"828",0.727,0.790295972731655 +"829",0.728,0.791384673685722 +"830",0.729,0.792442049664455 +"831",0.73,0.793468109258601 +"832",0.731,0.794462861058913 +"833",0.732,0.795426313656141 +"834",0.733,0.796358450106777 +"835",0.734,0.797259109870842 +"836",0.735,0.798128082250784 +"837",0.736,0.798965156496666 +"838",0.737,0.799770121858555 +"839",0.738,0.800542767586516 +"840",0.739,0.801282882930614 +"841",0.74,0.801990257140913 +"842",0.741,0.80266467946748 +"843",0.742,0.803305939160378 +"844",0.743,0.803913825469673 +"845",0.744,0.804488127645431 +"846",0.745,0.805028634937716 +"847",0.746,0.805535136596594 +"848",0.747,0.806007421872129 +"849",0.748,0.806445280014387 +"850",0.749,0.806848536666524 +"851",0.75,0.807217175917101 +"852",0.751,0.807551224963878 +"853",0.752,0.807850711005506 +"854",0.753,0.808115661240635 +"855",0.754,0.808346102867918 +"856",0.755,0.808542063086006 +"857",0.756,0.80870356909355 +"858",0.757,0.808830648089202 +"859",0.758,0.808923327271612 +"860",0.759,0.808981633839432 +"861",0.76,0.809005594991314 +"862",0.761,0.808995237925908 +"863",0.762,0.808950589841867 +"864",0.763,0.808871677937841 +"865",0.764,0.808758529415826 +"866",0.765,0.808611193584847 +"867",0.766,0.808429794487349 +"868",0.767,0.808214471942012 +"869",0.768,0.807965365767514 +"870",0.769,0.807682615782534 +"871",0.77,0.807366361805751 +"872",0.771,0.807016743655843 +"873",0.772,0.80663390115149 +"874",0.773,0.806217974111371 +"875",0.774,0.805769102354163 +"876",0.775,0.805287425698546 +"877",0.776,0.804773083963199 +"878",0.777,0.8042262169668 +"879",0.778,0.803646964528029 +"880",0.779,0.803035466465563 +"881",0.78,0.802391862598083 +"882",0.781,0.801716292744266 +"883",0.782,0.801008896722792 +"884",0.783,0.800269814352339 +"885",0.784,0.799499185451586 +"886",0.785,0.798697149839213 +"887",0.786,0.797863847333897 +"888",0.787,0.796999417754317 +"889",0.788,0.796104000919153 +"890",0.789,0.795177736647083 +"891",0.79,0.794220764756786 +"892",0.791,0.793233225066941 +"893",0.792,0.792215257396227 +"894",0.793,0.791167001563322 +"895",0.794,0.790088597386905 +"896",0.795,0.788980184685655 +"897",0.796,0.787841903056322 +"898",0.797,0.786673857278219 +"899",0.798,0.785476079736488 +"900",0.799,0.784248593791281 +"901",0.8,0.782991422802751 +"902",0.801,0.781704590131052 +"903",0.802,0.780388119136335 +"904",0.803,0.779042033178754 +"905",0.804,0.777666355618461 +"906",0.805,0.77626110981561 +"907",0.806,0.774826319130352 +"908",0.807,0.773362006922841 +"909",0.808,0.771868196553229 +"910",0.809,0.77034491138167 +"911",0.81,0.768792174768315 +"912",0.811,0.767210010073318 +"913",0.812,0.7655984414498 +"914",0.813,0.763957549810961 +"915",0.814,0.762287509778138 +"916",0.815,0.760588504742603 +"917",0.816,0.758860718095633 +"918",0.817,0.757104333228499 +"919",0.818,0.755319533532477 +"920",0.819,0.753506502398841 +"921",0.82,0.751665423218865 +"922",0.821,0.749796479383822 +"923",0.822,0.747899854284988 +"924",0.823,0.745975731313635 +"925",0.824,0.744024293861038 +"926",0.825,0.742045725318471 +"927",0.826,0.740040209077209 +"928",0.827,0.738007928528525 +"929",0.828,0.735949066674016 +"930",0.829,0.733863790843277 +"931",0.83,0.731752247699406 +"932",0.831,0.729614582492898 +"933",0.832,0.727450940474248 +"934",0.833,0.72526146689395 +"935",0.834,0.7230463070025 +"936",0.835,0.720805606050392 +"937",0.836,0.718539509288122 +"938",0.837,0.716248161966185 +"939",0.838,0.713931709335074 +"940",0.839,0.711590296645287 +"941",0.84,0.709224069147316 +"942",0.841,0.706833172091658 +"943",0.842,0.704417750728807 +"944",0.843,0.701977950309259 +"945",0.844,0.699513920974475 +"946",0.845,0.69702593739995 +"947",0.846,0.694514405876888 +"948",0.847,0.691979739005149 +"949",0.848,0.689422349384595 +"950",0.849,0.686842649615089 +"951",0.85,0.68424105229649 +"952",0.851,0.681617970028663 +"953",0.852,0.678973815411467 +"954",0.853,0.676309001044764 +"955",0.854,0.673623939528417 +"956",0.855,0.670919043462287 +"957",0.856,0.668194725446236 +"958",0.857,0.665451398080125 +"959",0.858,0.662689473963816 +"960",0.859,0.659909365697171 +"961",0.86,0.657111478767447 +"962",0.861,0.654296095083638 +"963",0.862,0.651463391809181 +"964",0.863,0.648613542793761 +"965",0.864,0.645746721887061 +"966",0.865,0.642863102938765 +"967",0.866,0.639962859798558 +"968",0.867,0.637046166316123 +"969",0.868,0.634113196341145 +"970",0.869,0.631164123723308 +"971",0.87,0.628199122312296 +"972",0.871,0.625218365957793 +"973",0.872,0.622222028509482 +"974",0.873,0.619210283817048 +"975",0.874,0.616183305730175 +"976",0.875,0.613141268098548 +"977",0.876,0.61008434477185 +"978",0.877,0.607012709599764 +"979",0.878,0.603926536431977 +"980",0.879,0.60082599911817 +"981",0.88,0.597711271508029 +"982",0.881,0.594582527451238 +"983",0.882,0.59143994079748 +"984",0.883,0.58828368539644 +"985",0.884,0.585113935097801 +"986",0.885,0.581930863751248 +"987",0.886,0.578734645206465 +"988",0.887,0.575525453313136 +"989",0.888,0.572303461920945 +"990",0.889,0.569068844879576 +"991",0.89,0.565821776038713 +"992",0.891,0.56256242924804 +"993",0.892,0.559290963339282 +"994",0.893,0.556007398426275 +"995",0.894,0.552711679656008 +"996",0.895,0.549403751442033 +"997",0.896,0.546083558197899 +"998",0.897,0.542751044337158 +"999",0.898,0.539406154273358 +"1000",0.899,0.536048832420052 +"1001",0.9,0.532679023190788 +"1002",0.901,0.529296670999118 +"1003",0.902,0.525901720258592 +"1004",0.903,0.52249411538276 +"1005",0.904,0.519073800785174 +"1006",0.905,0.515640720879381 +"1007",0.906,0.512194820078935 +"1008",0.907,0.508736042797384 +"1009",0.908,0.505264343762812 +"1010",0.909,0.501779749946858 +"1011",0.91,0.498282319203961 +"1012",0.911,0.494772109504881 +"1013",0.912,0.491249178820382 +"1014",0.913,0.487713585121224 +"1015",0.914,0.48416538637817 +"1016",0.915,0.48060464056198 +"1017",0.916,0.477031405643417 +"1018",0.917,0.473445739593242 +"1019",0.918,0.469847700382218 +"1020",0.919,0.466237345981105 +"1021",0.92,0.462614734360666 +"1022",0.921,0.458979923491662 +"1023",0.922,0.455332971344855 +"1024",0.923,0.451673935891006 +"1025",0.924,0.44800287121006 +"1026",0.925,0.444319810422624 +"1027",0.926,0.440624779622937 +"1028",0.927,0.436917804900137 +"1029",0.928,0.433198912343356 +"1030",0.929,0.429468128041732 +"1031",0.93,0.425725478084401 +"1032",0.931,0.421970988560498 +"1033",0.932,0.418204685559159 +"1034",0.933,0.41442659516952 +"1035",0.934,0.410636743480715 +"1036",0.935,0.406835156581882 +"1037",0.936,0.403021860562156 +"1038",0.937,0.399196881510673 +"1039",0.938,0.395360245516567 +"1040",0.939,0.391511978668976 +"1041",0.94,0.387652141900216 +"1042",0.941,0.383780941566005 +"1043",0.942,0.379898621955678 +"1044",0.943,0.376005427358674 +"1045",0.944,0.37210160206443 +"1046",0.945,0.368187390362386 +"1047",0.946,0.364263036541977 +"1048",0.947,0.360328784892645 +"1049",0.948,0.356384879703825 +"1050",0.949,0.352431565264958 +"1051",0.95,0.348469085865479 +"1052",0.951,0.34449768579483 +"1053",0.952,0.340517609342445 +"1054",0.953,0.336529100797766 +"1055",0.954,0.332532404450228 +"1056",0.955,0.328527764590125 +"1057",0.956,0.324515428510455 +"1058",0.957,0.320495653240168 +"1059",0.958,0.316468697777954 +"1060",0.959,0.31243482112251 +"1061",0.96,0.308394282272525 +"1062",0.961,0.304347340226695 +"1063",0.962,0.300294253983712 +"1064",0.963,0.296235282542268 +"1065",0.964,0.292170684901057 +"1066",0.965,0.288100720058772 +"1067",0.966,0.284025647014105 +"1068",0.967,0.27994572476575 +"1069",0.968,0.275861212312399 +"1070",0.969,0.271772368652746 +"1071",0.97,0.267679452785483 +"1072",0.971,0.263582723709304 +"1073",0.972,0.2594824404229 +"1074",0.973,0.255378861924966 +"1075",0.974,0.251272247214193 +"1076",0.975,0.247162855289277 +"1077",0.976,0.243050945148907 +"1078",0.977,0.238936775791779 +"1079",0.978,0.234820606216584 +"1080",0.979,0.230702695422018 +"1081",0.98,0.226583302406769 +"1082",0.981,0.222462686169535 +"1083",0.982,0.218341105709005 +"1084",0.983,0.214218820023875 +"1085",0.984,0.210096088112835 +"1086",0.985,0.205973168974581 +"1087",0.986,0.201850321607803 +"1088",0.987,0.19772778930117 +"1089",0.988,0.193605390678688 +"1090",0.989,0.189482992056206 +"1091",0.99,0.185360593433723 +"1092",0.991,0.181238194811242 +"1093",0.992,0.177115796188759 +"1094",0.993,0.172993397566277 +"1095",0.994,0.168870998943796 +"1096",0.995,0.164748600321314 +"1097",0.996,0.160626201698831 +"1098",0.997,0.15650380307635 +"1099",0.998,0.152381404453867 +"1100",0.999,0.148259005831385 +"1101",1,0.144136607208904 +"1102",1.001,0.140014208586422 +"1103",1.002,0.135891809963939 +"1104",1.003,0.131769411341458 +"1105",1.004,0.127647012718975 +"1106",1.005,0.123524614096493 +"1107",1.006,0.119402215474011 +"1108",1.007,0.11527981685153 +"1109",1.008,0.111157418229047 +"1110",1.009,0.107035019606566 +"1111",1.01,0.102912620984083 +"1112",1.011,0.0987902223616014 +"1113",1.012,0.0946678237391188 +"1114",1.013,0.0905454251166371 +"1115",1.014,0.0864230264941545 +"1116",1.015,0.0823006278716728 +"1117",1.016,0.0781782292491911 +"1118",1.017,0.0740558306267094 +"1119",1.018,0.0699334320042268 +"1120",1.019,0.0658110333817452 +"1121",1.02,0.0616886347592625 +"1122",1.021,0.0575662361367809 +"1123",1.022,0.0534438375142982 +"1124",1.023,0.0493214388918165 +"1125",1.024,0.0451990402693349 +"1126",1.025,0.0410766416468532 +"1127",1.026,0.0369542430243706 +"1128",1.027,0.0328318444018889 +"1129",1.028,0.0287094457794063 +"1130",1.029,0.0245870471569246 +"1131",1.03,0.020464648534442 +"1132",1.031,0.0163422499119603 +"1133",1.032,0.0122198512894776 +"1134",1.033,0.00809745266699685 +"1135",1.034,0.00397505404451426 +"1136",1.035,-0.000147344577967445 +"1137",1.036,-0.00426974320045004 +"1138",1.037,-0.00839214182293174 +"1139",1.038,-0.0125145404454143 +"1140",1.039,-0.016636939067896 +"1141",1.04,-0.0207593376903786 +"1142",1.041,-0.0248817363128603 +"1143",1.042,-0.029004134935342 +"1144",1.043,-0.0331265335578237 +"1145",1.044,-0.0372489321803063 +"1146",1.045,-0.041371330802788 +"1147",1.046,-0.0454937294252706 +"1148",1.047,-0.0496161280477523 +"1149",1.048,-0.0537385266702349 +"1150",1.049,-0.0578609252927166 +"1151",1.05,-0.0619833239151983 +"1152",1.051,-0.06610572253768 +"1153",1.052,-0.0702281211601626 +"1154",1.053,-0.0743505197826443 +"1155",1.054,-0.078472918405126 +"1156",1.055,-0.0825953170276086 +"1157",1.056,-0.0867177156500903 +"1158",1.057,-0.0908401142725729 +"1159",1.058,-0.0949625128950546 +"1160",1.059,-0.0990849115175363 +"1161",1.06,-0.103207310140018 +"1162",1.061,-0.107329708762501 +"1163",1.062,-0.111452107384982 +"1164",1.063,-0.115574506007465 +"1165",1.064,-0.119696904629947 +"1166",1.065,-0.123819303252429 +"1167",1.066,-0.127941701874911 +"1168",1.067,-0.132064100497394 +"1169",1.068,-0.136186499119874 +"1170",1.069,-0.140308897742357 +"1171",1.07,-0.144431296364839 +"1172",1.071,-0.148553694987321 +"1173",1.072,-0.152676093609803 +"1174",1.073,-0.156798492232285 +"1175",1.074,-0.160920890854767 +"1176",1.075,-0.16504328947725 +"1177",1.076,-0.169165688099731 +"1178",1.077,-0.173288086722213 +"1179",1.078,-0.177410485344695 +"1180",1.079,-0.181532883967177 +"1181",1.08,-0.185655282589659 +"1182",1.081,-0.189777681212142 +"1183",1.082,-0.193900079834623 +"1184",1.083,-0.198022478457106 +"1185",1.084,-0.202144877079588 +"1186",1.085,-0.206267275702069 +"1187",1.086,-0.210389674324551 +"1188",1.087,-0.214512072947034 +"1189",1.088,-0.218634471569515 +"1190",1.089,-0.222756870191998 +"1191",1.09,-0.22687926881448 +"1192",1.091,-0.231001667436962 +"1193",1.092,-0.235124066059444 +"1194",1.093,-0.239246464681926 +"1195",1.094,-0.243368863304407 +"1196",1.095,-0.24749126192689 +"1197",1.096,-0.251613660549372 +"1198",1.097,-0.255736059171854 +"1199",1.098,-0.259858457794336 +"1200",1.099,-0.263980856416819 +"1201",1.1,-0.2681032550393 diff --git a/TeX/Plots/Data/scala_out_d_1_t.csv b/TeX/Plots/Data/scala_out_d_1_t.csv new file mode 100755 index 0000000..fb5b119 --- /dev/null +++ b/TeX/Plots/Data/scala_out_d_1_t.csv @@ -0,0 +1,101 @@ +x_n_5000_tl_0.1,y_n_5000_tl_0.1,x_n_5000_tl_1.0,y_n_5000_tl_1.0,x_n_5000_tl_3.0,y_n_5000_tl_3.0 +-5.0,1.794615305950707,-5.0,0.3982406589003759,-5.0,-0.4811539502118497 +-4.898989898989899,1.6984389486364895,-4.898989898989899,0.35719218031912614,-4.898989898989899,-0.48887996302459025 +-4.797979797979798,1.6014200743009022,-4.797979797979798,0.3160182633093358,-4.797979797979798,-0.4966732473871599 +-4.696969696969697,1.5040575427157106,-4.696969696969697,0.27464978660531225,-4.696969696969697,-0.5045073579233731 +-4.595959595959596,1.4061194142774731,-4.595959595959596,0.23293440418365288,-4.595959595959596,-0.5123589845230747 +-4.494949494949495,1.3072651356075136,-4.494949494949495,0.19100397829173557,-4.494949494949495,-0.5202738824510786 +-4.393939393939394,1.2078259346207492,-4.393939393939394,0.1488314515422353,-4.393939393939394,-0.5282281154332915 +-4.292929292929293,1.1079271590765678,-4.292929292929293,0.10646618526238515,-4.292929292929293,-0.536250283913464 +-4.191919191919192,1.0073183089866045,-4.191919191919192,0.0637511521454329,-4.191919191919192,-0.5443068679044686 +-4.090909090909091,0.9064682044248323,-4.090909090909091,0.020965778107027506,-4.090909090909091,-0.5524049731989601 +-3.9898989898989896,0.805095064694333,-3.9898989898989896,-0.02200882631350869,-3.9898989898989896,-0.5605562335116703 +-3.888888888888889,0.7032463151196859,-3.888888888888889,-0.06548644224881082,-3.888888888888889,-0.5687680272492979 +-3.787878787878788,0.6007843964001714,-3.787878787878788,-0.10914135786185346,-3.787878787878788,-0.5770307386196555 +-3.686868686868687,0.4978572358270573,-3.686868686868687,-0.15292201515712506,-3.686868686868687,-0.5853131654059709 +-3.5858585858585856,0.39465522349482535,-3.5858585858585856,-0.19694472820060063,-3.5858585858585856,-0.593636189078738 +-3.484848484848485,0.29091175104318323,-3.484848484848485,-0.24139115547918963,-3.484848484848485,-0.6019914655156898 +-3.383838383838384,0.1868284306918275,-3.383838383838384,-0.28617728400089926,-3.383838383838384,-0.6103823599700093 +-3.282828282828283,0.0817944681090728,-3.282828282828283,-0.33119615483860937,-3.282828282828283,-0.6188088888423856 +-3.1818181818181817,-0.023670753859105602,-3.1818181818181817,-0.3764480559542342,-3.1818181818181817,-0.6272515625106694 +-3.080808080808081,-0.1299349094939808,-3.080808080808081,-0.42202262988259276,-3.080808080808081,-0.6357221532633648 +-2.9797979797979797,-0.2360705715363967,-2.9797979797979797,-0.467584017465408,-2.9797979797979797,-0.6440454918766952 +-2.878787878787879,-0.34125419448980393,-2.878787878787879,-0.5126079284225549,-2.878787878787879,-0.65203614244987 +-2.7777777777777777,-0.443504036212927,-2.7777777777777777,-0.5569084060463078,-2.7777777777777777,-0.6594896031012563 +-2.676767676767677,-0.5411482698953787,-2.676767676767677,-0.6002683604183435,-2.676767676767677,-0.6661215834468585 +-2.5757575757575757,-0.6363089624800997,-2.5757575757575757,-0.6396725440402657,-2.5757575757575757,-0.6715398637661353 +-2.474747474747475,-0.725241414197713,-2.474747474747475,-0.6753456416248385,-2.474747474747475,-0.674565545688341 +-2.3737373737373737,-0.8010191169999671,-2.3737373737373737,-0.7066964605752718,-2.3737373737373737,-0.6765307025278043 +-2.272727272727273,-0.8626605255789729,-2.272727272727273,-0.7348121862404637,-2.272727272727273,-0.6766187567521622 +-2.1717171717171717,-0.911435840482434,-2.1717171717171717,-0.7592451818361001,-2.1717171717171717,-0.6747200340049733 +-2.070707070707071,-0.9518228090965052,-2.070707070707071,-0.7755022118880182,-2.070707070707071,-0.6711535886166349 +-1.9696969696969697,-0.9791642715505677,-1.9696969696969697,-0.7889078495544403,-1.9696969696969697,-0.6653309071624213 +-1.868686868686869,-0.9959505678135467,-1.868686868686869,-0.7978655263590677,-1.868686868686869,-0.6574048849245917 +-1.7676767676767677,-1.0042572630521163,-1.7676767676767677,-0.8024926242661324,-1.7676767676767677,-0.6465258005011485 +-1.6666666666666665,-1.0031374573437621,-1.6666666666666665,-0.8024786300118695,-1.6666666666666665,-0.6326231142587367 +-1.5656565656565657,-0.9924082586558415,-1.5656565656565657,-0.7967021619463882,-1.5656565656565657,-0.6166476676023103 +-1.4646464646464645,-0.9734669180157094,-1.4646464646464645,-0.7849942222838879,-1.4646464646464645,-0.5979735104135664 +-1.3636363636363638,-0.9509454078185711,-1.3636363636363638,-0.7662349774950723,-1.3636363636363638,-0.5774876452737464 +-1.2626262626262625,-0.9231872651397443,-1.2626262626262625,-0.7433085627087517,-1.2626262626262625,-0.554712230754877 +-1.1616161616161618,-0.8903321986477033,-1.1616161616161618,-0.7150493507052204,-1.1616161616161618,-0.5295933185437713 +-1.0606060606060606,-0.8533989447900909,-1.0606060606060606,-0.6814643745239313,-1.0606060606060606,-0.5021785239088743 +-0.9595959595959593,-0.8107636317978494,-0.9595959595959593,-0.6421615608115637,-0.9595959595959593,-0.472606158673678 +-0.858585858585859,-0.7612745578549842,-0.858585858585859,-0.5973114244123007,-0.858585858585859,-0.4405007246413654 +-0.7575757575757578,-0.7079734098301842,-0.7575757575757578,-0.5483264663676062,-0.7575757575757578,-0.4059991890198415 +-0.6565656565656566,-0.6488963804386183,-0.6565656565656566,-0.49554278063844803,-0.6565656565656566,-0.3695525928005769 +-0.5555555555555554,-0.5859222961089965,-0.5555555555555554,-0.4403758682478846,-0.5555555555555554,-0.33111757514282614 +-0.45454545454545503,-0.5162955936688821,-0.45454545454545503,-0.38037108381900747,-0.45454545454545503,-0.28897806883385513 +-0.3535353535353538,-0.4413321076045784,-0.3535353535353538,-0.31690399361617216,-0.3535353535353538,-0.24421776219711205 +-0.2525252525252526,-0.3616414699818406,-0.2525252525252526,-0.25204481791119354,-0.2525252525252526,-0.19795939679257332 +-0.15151515151515138,-0.2780916794094584,-0.15151515151515138,-0.18575713332565263,-0.15151515151515138,-0.15066195015784248 +-0.050505050505050164,-0.18977454284683343,-0.050505050505050164,-0.11797643773197505,-0.050505050505050164,-0.10274021898431054 +0.050505050505050164,-0.0969321739577506,0.050505050505050164,-0.049351343645831554,0.050505050505050164,-0.05414525935109969 +0.15151515151515138,-4.4802289442360816E-4,0.15151515151515138,0.019464788799119597,0.15151515151515138,-0.005354051541524688 +0.2525252525252526,0.09918485823776255,0.2525252525252526,0.08804193897553166,0.2525252525252526,0.0433816826222638 +0.3535353535353538,0.1998735386668185,0.3535353535353538,0.15569793996298523,0.3535353535353538,0.09176342956997338 +0.45454545454545414,0.2999169047201809,0.45454545454545414,0.2218157527002848,0.45454545454545414,0.13952481930457306 +0.5555555555555554,0.3978204122760816,0.5555555555555554,0.2846069052305317,0.5555555555555554,0.18668380673527113 +0.6565656565656566,0.49120659266814587,0.6565656565656566,0.34467300454040606,0.6565656565656566,0.23277011860523958 +0.7575757575757578,0.5777980409414698,0.7575757575757578,0.40208229496894643,0.7575757575757578,0.27613740421328176 +0.8585858585858581,0.6568213676446025,0.8585858585858581,0.45705882493784666,0.8585858585858581,0.316305372116494 +0.9595959595959593,0.7305067401293432,0.9595959595959593,0.5066458373898202,0.9595959595959593,0.35343427932594923 +1.0606060606060606,0.7966609096765547,1.0606060606060606,0.5516149744358979,1.0606060606060606,0.38717949746647334 +1.1616161616161618,0.8521200140106753,1.1616161616161618,0.5878017101641295,1.1616161616161618,0.4170777567516486 +1.262626262626262,0.8975259277901253,1.262626262626262,0.6168588441570951,1.262626262626262,0.4446516626376453 +1.3636363636363633,0.9290861930067627,1.3636363636363633,0.6411836178298306,1.3636363636363633,0.46927636759559477 +1.4646464646464645,0.9508521659740165,1.4646464646464645,0.6610795923876176,1.4646464646464645,0.4901812911280025 +1.5656565656565657,0.9612143570080512,1.5656565656565657,0.6768219209716341,1.5656565656565657,0.5079918402617868 +1.666666666666667,0.9590141254017294,1.666666666666667,0.6878304863477654,1.666666666666667,0.5233400296358803 +1.7676767676767673,0.9434050911299104,1.7676767676767673,0.6925040592034013,1.7676767676767673,0.5351552186913862 +1.8686868686868685,0.9166484175947194,1.8686868686868685,0.6900246131027935,1.8686868686868685,0.5441567759439713 +1.9696969696969697,0.8762489440965586,1.9696969696969697,0.6764843940414706,1.9696969696969697,0.5496025817549586 +2.070707070707071,0.821609113516158,2.070707070707071,0.6566284893291617,2.070707070707071,0.5536820874974513 +2.1717171717171713,0.7581599898835192,2.1717171717171713,0.6308981649064993,2.1717171717171713,0.5533100035360206 +2.2727272727272725,0.6877704486402438,2.2727272727272725,0.6016976467409065,2.2727272727272725,0.550251787575325 +2.3737373737373737,0.610815603287697,2.3737373737373737,0.5704721438286479,2.3737373737373737,0.5445865851994449 +2.474747474747475,0.5275282181728166,2.474747474747475,0.5362814307290142,2.474747474747475,0.537858723684707 +2.5757575757575752,0.44098299617705367,2.5757575757575752,0.5007018478259194,2.5757575757575752,0.5301810557083476 +2.6767676767676765,0.3535127269572474,2.6767676767676765,0.4635791072799046,2.6767676767676765,0.5214280506499815 +2.7777777777777777,0.2669314340184933,2.7777777777777777,0.4252681214470508,2.7777777777777777,0.5119428002841875 +2.878787878787879,0.18244774892195767,2.878787878787879,0.3860805361925665,2.878787878787879,0.5020280103571171 +2.9797979797979792,0.10009287374461422,2.9797979797979792,0.34649978327862213,2.9797979797979792,0.4918997465440798 +3.0808080808080813,0.01825358803182036,3.0808080808080813,0.3067456416075246,3.0808080808080813,0.48152164248236273 +3.1818181818181817,-0.06257603867024951,3.1818181818181817,0.2670556605010131,3.1818181818181817,0.4710506406469346 +3.282828282828282,-0.14256250037038515,3.282828282828282,0.22747478740583862,3.282828282828282,0.46061400021772264 +3.383838383838384,-0.22183964093761221,3.383838383838384,0.18823442296238005,3.383838383838384,0.4502063176185161 +3.4848484848484844,-0.3000530710681483,3.4848484848484844,0.14930923451816047,3.4848484848484844,0.43983195563012295 +3.5858585858585865,-0.37715837046834677,3.5858585858585865,0.11064727810620513,3.5858585858585865,0.4294855408707603 +3.686868686868687,-0.4535879015098929,3.686868686868687,0.0721761317620166,3.686868686868687,0.41918651120808587 +3.787878787878787,-0.5295958753874862,3.787878787878787,0.03385158496402993,3.787878787878787,0.4089211108732785 +3.8888888888888893,-0.605341954214415,3.8888888888888893,-0.004196426105451837,3.8888888888888893,0.3986849690078671 +3.9898989898989896,-0.6805725256650321,3.9898989898989896,-0.04204424507819378,3.9898989898989896,0.3884698016669201 +4.09090909090909,-0.7553382625080638,4.09090909090909,-0.0795288839270637,4.09090909090909,0.37826736472008937 +4.191919191919192,-0.8294318073700058,4.191919191919192,-0.11675718948094181,4.191919191919192,0.36808861016948324 +4.292929292929292,-0.9025671571505313,4.292929292929292,-0.15379169226972225,4.292929292929292,0.3579396881040081 +4.3939393939393945,-0.9751233932017581,4.3939393939393945,-0.19069301489402432,4.3939393939393945,0.3478279422102407 +4.494949494949495,-1.0471623188798242,4.494949494949495,-0.227426975503073,4.494949494949495,0.3377388026398381 +4.595959595959595,-1.1187532876284094,4.595959595959595,-0.263878605240927,4.595959595959595,0.32767338817749475 +4.696969696969697,-1.189660915888889,4.696969696969697,-0.3001960056492053,4.696969696969697,0.3176530967513947 +4.797979797979798,-1.2601246569645388,4.797979797979798,-0.3363281464377301,4.797979797979798,0.3076778013243957 +4.8989898989899,-1.3303637186847002,4.8989898989899,-0.37225330321499334,4.8989898989899,0.29772768053304777 +5.0,-1.4004134094571867,5.0,-0.4080316669473787,5.0,0.2878184725593889 diff --git a/TeX/Plots/Data/scala_out_sin.csv b/TeX/Plots/Data/scala_out_sin.csv new file mode 100755 index 0000000..7a95079 --- /dev/null +++ b/TeX/Plots/Data/scala_out_sin.csv @@ -0,0 +1,101 @@ +x_n_50_tl_0.0,y_n_50_tl_0.0,x_n_500_tl_0.0,y_n_500_tl_0.0,x_n_5000_tl_0.0,y_n_5000_tl_0.0,x_n_50_tl_1.0,y_n_50_tl_1.0,x_n_500_tl_1.0,y_n_500_tl_1.0,x_n_5000_tl_1.0,y_n_5000_tl_1.0,x_n_50_tl_3.0,y_n_50_tl_3.0,x_n_500_tl_3.0,y_n_500_tl_3.0,x_n_5000_tl_3.0,y_n_5000_tl_3.0 +-5.0,-0.8599583057554976,-5.0,1.6797068787192495,-5.0,1.7379689606223239,-5.0,-0.42741272499487776,-5.0,0.23661838590976328,-5.0,0.20399386816229978,-5.0,0.13095951218866275,-5.0,-0.46242184829078237,-5.0,-0.41058629664051305 +-4.898989898989899,-0.8456047840536887,-4.898989898989899,1.5940442438460278,-4.898989898989899,1.6472202329485999,-4.898989898989899,-0.4276431031893983,-4.898989898989899,0.20862681459226723,-4.898989898989899,0.17824071850107404,-4.898989898989899,0.10539057470765349,-4.898989898989899,-0.4609018322257037,-4.898989898989899,-0.4110599614729015 +-4.797979797979798,-0.8312512623518801,-4.797979797979798,1.5066655952530659,-4.797979797979798,1.5560370024912986,-4.797979797979798,-0.42787348138391906,-4.797979797979798,0.18056404254218186,-4.797979797979798,0.1523309553054011,-4.797979797979798,0.07982163722664384,-4.797979797979798,-0.4593800781031771,-4.797979797979798,-0.41155161184122596 +-4.696969696969697,-0.8168977406500709,-4.696969696969697,1.4192486056640365,-4.696969696969697,1.4641612521550218,-4.696969696969697,-0.42810385957843955,-4.696969696969697,0.1524990189306639,-4.696969696969697,0.1262143553005724,-4.696969696969697,0.05464380509332076,-4.696969696969697,-0.4578583174084625,-4.696969696969697,-0.41205688060740875 +-4.595959595959596,-0.8025442189482614,-4.595959595959596,1.3308076153149195,-4.595959595959596,1.3718747642404912,-4.595959595959596,-0.42833423777296026,-4.595959595959596,0.12443399531914556,-4.595959595959596,0.10000299804643913,-4.595959595959596,0.029720704709016,-4.595959595959596,-0.45633655338498746,-4.595959595959596,-0.4126005212950324 +-4.494949494949495,-0.788190697246453,-4.494949494949495,1.2408764237610932,-4.494949494949495,1.2794547935729972,-4.494949494949495,-0.42856461596748074,-4.494949494949495,0.09628036393480953,-4.494949494949495,0.07370213597938947,-4.494949494949495,0.004797604324711557,-4.494949494949495,-0.45481454100468904,-4.494949494949495,-0.41317280828652125 +-4.393939393939394,-0.7757194193374484,-4.393939393939394,1.150777108936673,-4.393939393939394,1.1865984175078124,-4.393939393939394,-0.4287949941620015,-4.393939393939394,0.06803799087458409,-4.393939393939394,0.047353868838267546,-4.393939393939394,-0.019952866294811474,-4.393939393939394,-0.4532902682540511,-4.393939393939394,-0.41378088791316736 +-4.292929292929293,-0.7635428572249876,-4.292929292929293,1.0606777941122512,-4.292929292929293,1.0935156155193826,-4.292929292929293,-0.42902537235652216,-4.292929292929293,0.039745189354681264,-4.292929292929293,0.020863777423783696,-4.292929292929293,-0.04424719286600705,-4.292929292929293,-0.45176167641583376,-4.292929292929293,-0.41441903123033147 +-4.191919191919192,-0.7514991436388702,-4.191919191919192,0.9705784792878309,-4.191919191919192,0.9999451479756023,-4.191919191919192,-0.42925575055104276,-4.191919191919192,0.01144626171509771,-4.191919191919192,-0.005903721047402898,-4.191919191919192,-0.06854151943720274,-4.191919191919192,-0.4502329821869361,-4.191919191919192,-0.415076548381381 +-4.090909090909091,-0.7396941691045894,-4.090909090909091,0.8798554638230421,-4.090909090909091,0.9059203084364202,-4.090909090909091,-0.42948612874556336,-4.090909090909091,-0.016952280979816926,-4.090909090909091,-0.03298925765732338,-4.090909090909091,-0.09283584600839848,-4.090909090909091,-0.44869972853751156,-4.090909090909091,-0.4157629995846106 +-3.9898989898989896,-0.7279252765177078,-3.9898989898989896,0.7884244803113447,-3.9898989898989896,0.811474387051809,-3.9898989898989896,-0.42971650694008423,-3.9898989898989896,-0.04548036359257723,-3.9898989898989896,-0.06017986522111469,-3.9898989898989896,-0.11713017257959416,-3.9898989898989896,-0.44715472797022665,-3.9898989898989896,-0.41647096691012625 +-3.888888888888889,-0.7161580919866168,-3.888888888888889,0.6966140451148786,-3.888888888888889,0.7168906385054419,-3.888888888888889,-0.4299468851346048,-3.888888888888889,-0.07408610945271141,-3.888888888888889,-0.0874709084540591,-3.888888888888889,-0.14142449915078953,-3.888888888888889,-0.4456015995456161,-3.888888888888889,-0.4171930364234525 +-3.787878787878788,-0.7043909074555256,-3.787878787878788,0.604803249010758,-3.787878787878788,0.6219712537736367,-3.787878787878788,-0.4301772633291252,-3.787878787878788,-0.10285723661640957,-3.787878787878788,-0.11503695886523099,-3.787878787878788,-0.16571882572198493,-3.787878787878788,-0.4440477592686527,-3.787878787878788,-0.41792735866227004 +-3.686868686868687,-0.6926237229244344,-3.686868686868687,0.512070766385858,-3.686868686868687,0.5265347560169878,-3.686868686868687,-0.4304076415236461,-3.686868686868687,-0.13176620357773466,-3.686868686868687,-0.1429497539600965,-3.686868686868687,-0.19001315229318066,-3.686868686868687,-0.44249216926013074,-3.686868686868687,-0.4186788950692494 +-3.5858585858585856,-0.680856538393343,-3.5858585858585856,0.418341406261733,-3.5858585858585856,0.43037422799158725,-3.5858585858585856,-0.43063801971816673,-3.5858585858585856,-0.16072772857488207,-3.5858585858585856,-0.17103810603915154,-3.5858585858585856,-0.21430747886437626,-3.5858585858585856,-0.44093657925160834,-3.5858585858585856,-0.41944890491602094 +-3.484848484848485,-0.6690893538622519,-3.484848484848485,0.3230008626762439,-3.484848484848485,0.33347359833985296,-3.484848484848485,-0.43086839791268744,-3.484848484848485,-0.189786562504877,-3.484848484848485,-0.1992640699299042,-3.484848484848485,-0.238601805435572,-3.484848484848485,-0.4393809892430859,-3.484848484848485,-0.4202525693559286 +-3.383838383838384,-0.6573221693311603,-3.383838383838384,0.22755806300474243,-3.383838383838384,0.23599152727957395,-3.383838383838384,-0.4310987761072079,-3.383838383838384,-0.21885301172451227,-3.383838383838384,-0.22770533404467666,-3.383838383838384,-0.2628961320067672,-3.383838383838384,-0.43781693796746485,-3.383838383838384,-0.4210766722370822 +-3.282828282828283,-0.6455549848000697,-3.282828282828283,0.13172938749299176,-3.282828282828283,0.13785071540835,-3.282828282828283,-0.4313291543017285,-3.282828282828283,-0.24792012144222308,-3.282828282828283,-0.25633384693349226,-3.282828282828283,-0.28719045857796294,-3.282828282828283,-0.4362515901030497,-3.282828282828283,-0.42192705020460003 +-3.1818181818181817,-0.6337878002689783,-3.1818181818181817,0.03583960513370717,-3.1818181818181817,0.03926297085619488,-3.1818181818181817,-0.43155953249624923,-3.1818181818181817,-0.2770868438988566,-3.1818181818181817,-0.28512064843139634,-3.1818181818181817,-0.3114847851491585,-3.1818181818181817,-0.4346861097486259,-3.1818181818181817,-0.42279043662854426 +-3.080808080808081,-0.6219933944673289,-3.080808080808081,-0.06005017722557655,-3.080808080808081,-0.05953650043486377,-3.080808080808081,-0.4317899106907698,-3.080808080808081,-0.30634202732953336,-3.080808080808081,-0.3140197227479732,-3.080808080808081,-0.33577911172035446,-3.080808080808081,-0.4331124443470669,-3.080808080808081,-0.42366980349780375 +-2.9797979797979797,-0.6084802589111126,-2.9797979797979797,-0.15590935392992944,-2.9797979797979797,-0.15810366579897028,-2.9797979797979797,-0.4320202888852905,-2.9797979797979797,-0.33549678779642544,-2.9797979797979797,-0.3430021282671825,-2.9797979797979797,-0.3600734382915496,-2.9797979797979797,-0.4315218307109141,-2.9797979797979797,-0.42449207343700956 +-2.878787878787879,-0.5891232690738096,-2.878787878787879,-0.24713180817765498,-2.878787878787879,-0.2552003497036097,-2.878787878787879,-0.43225066707981114,-2.878787878787879,-0.36352866123332933,-2.878787878787879,-0.3716002292573769,-2.878787878787879,-0.38436776486274526,-2.878787878787879,-0.42982012082652077,-2.878787878787879,-0.4251380414134998 +-2.7777777777777777,-0.5636588831509095,-2.7777777777777777,-0.33701300990207655,-2.7777777777777777,-0.35066910453142525,-2.7777777777777777,-0.4324810452743318,-2.7777777777777777,-0.3911342117000581,-2.7777777777777777,-0.39951657101606874,-2.7777777777777777,-0.4086620914339411,-2.7777777777777777,-0.42794280685642583,-2.7777777777777777,-0.4254095546530059 +-2.676767676767677,-0.538194497228009,-2.676767676767677,-0.4265304961947721,-2.676767676767677,-0.4419057912445846,-2.676767676767677,-0.4295143886441945,-2.676767676767677,-0.41758811768544335,-2.676767676767677,-0.4264377612958712,-2.676767676767677,-0.4329564180051365,-2.676767676767677,-0.4251801800597513,-2.676767676767677,-0.42514350551302893 +-2.5757575757575757,-0.5127301113051083,-2.5757575757575757,-0.5160338868263108,-2.5757575757575757,-0.530562896182845,-2.5757575757575757,-0.4209813938653777,-2.5757575757575757,-0.4421888684751682,-2.5757575757575757,-0.4521958194404763,-2.5757575757575757,-0.4572507445763323,-2.5757575757575757,-0.4220835438175992,-2.5757575757575757,-0.42424941235712643 +-2.474747474747475,-0.48726572538220836,-2.474747474747475,-0.6045443334592155,-2.474747474747475,-0.615529859161848,-2.474747474747475,-0.4124483990865609,-2.474747474747475,-0.4657884717671948,-2.474747474747475,-0.4762840194362591,-2.474747474747475,-0.480179747245649,-2.474747474747475,-0.4184871960008546,-2.474747474747475,-0.4227211360179997 +-2.3737373737373737,-0.4618013394593081,-2.3737373737373737,-0.6866461198443653,-2.3737373737373737,-0.6916556206405179,-2.3737373737373737,-0.4039154043077441,-2.3737373737373737,-0.4872175481179362,-2.3737373737373737,-0.49664688375599,-2.3737373737373737,-0.5021327343044837,-2.3737373737373737,-0.4148617786025484,-2.3737373737373737,-0.42058969704823307 +-2.272727272727273,-0.4363369535364072,-2.272727272727273,-0.7664221699283893,-2.272727272727273,-0.76211944205629,-2.272727272727273,-0.3953824095289272,-2.272727272727273,-0.5066515567337302,-2.272727272727273,-0.5156479697413601,-2.272727272727273,-0.5240857213633179,-2.272727272727273,-0.4101489198915738,-2.272727272727273,-0.41773244666508813 +-2.1717171717171717,-0.41087256761350716,-2.1717171717171717,-0.8294863656303931,-2.1717171717171717,-0.8275864122047706,-2.1717171717171717,-0.38684941475011053,-2.1717171717171717,-0.5248642081767847,-2.1717171717171717,-0.5320776321494358,-2.1717171717171717,-0.5460387084221523,-2.1717171717171717,-0.40386935734460455,-2.1717171717171717,-0.41386532161191136 +-2.070707070707071,-0.38540818169060687,-2.070707070707071,-0.8777818560548117,-2.070707070707071,-0.8828614286116081,-2.070707070707071,-0.37790597680581006,-2.070707070707071,-0.5419305295559403,-2.070707070707071,-0.5450192204063132,-2.070707070707071,-0.5535021346303699,-2.070707070707071,-0.3970390682426877,-2.070707070707071,-0.40816135821642785 +-1.9696969696969697,-0.3599437957677064,-1.9696969696969697,-0.9240065596308831,-1.9696969696969697,-0.9252381701217932,-1.9696969696969697,-0.3679210297690768,-1.9696969696969697,-0.5515520831674893,-1.9696969696969697,-0.5532507694312989,-1.9696969696969697,-0.5395642887779512,-1.9696969696969697,-0.3899536977126602,-1.9696969696969697,-0.4010221140801823 +-1.868686868686869,-0.3344794098448062,-1.868686868686869,-0.9642081153190732,-1.868686868686869,-0.9553319880266173,-1.868686868686869,-0.3579360827323437,-1.868686868686869,-0.5596849243269256,-1.868686868686869,-0.556146459781286,-1.868686868686869,-0.5226399861377664,-1.868686868686869,-0.38238093755017905,-1.868686868686869,-0.3924834151653046 +-1.7676767676767677,-0.3090150239219054,-1.7676767676767677,-1.0007396420666628,-1.7676767676767677,-0.9785388909278812,-1.7676767676767677,-0.34795113569561026,-1.7676767676767677,-0.5614467949548656,-1.7676767676767677,-0.556098671354368,-1.7676767676767677,-0.4982759643499402,-1.7676767676767677,-0.37323932215085087,-1.7676767676767677,-0.3822790688909727 +-1.6666666666666665,-0.2835506379990052,-1.6666666666666665,-1.0187333297343348,-1.6666666666666665,-0.990642179129256,-1.6666666666666665,-0.3378404050890797,-1.6666666666666665,-0.5581030917440444,-1.6666666666666665,-0.5516597526410076,-1.6666666666666665,-0.47067804898067184,-1.6666666666666665,-0.3614402633008814,-1.6666666666666665,-0.37030436851426224 +-1.5656565656565657,-0.2580862520761052,-1.5656565656565657,-1.0247628857811257,-1.5656565656565657,-0.9908786897501635,-1.5656565656565657,-0.32764529263529574,-1.5656565656565657,-0.5521110428952534,-1.5656565656565657,-0.543054168961121,-1.5656565656565657,-0.44308013361140386,-1.5656565656565657,-0.34868249075072216,-1.5656565656565657,-0.35699361568660476 +-1.4646464646464645,-0.2326218661532044,-1.4646464646464645,-1.0034906902849632,-1.4646464646464645,-0.9791175953628313,-1.4646464646464645,-0.3174501801815117,-1.4646464646464645,-0.5459322825614802,-1.4646464646464645,-0.5306579767422843,-1.4646464646464645,-0.41548221824213516,-1.4646464646464645,-0.3311832422822113,-1.4646464646464645,-0.3422960409489238 +-1.3636363636363638,-0.20715748023030392,-1.3636363636363638,-0.9673348570651019,-1.3636363636363638,-0.9595107779813504,-1.3636363636363638,-0.30725506772772765,-1.3636363636363638,-0.5358046337748493,-1.3636363636363638,-0.5149935986561597,-1.3636363636363638,-0.3878843028728669,-1.3636363636363638,-0.3132121589299601,-1.3636363636363638,-0.32640862478895577 +-1.2626262626262625,-0.1816930943074038,-1.2626262626262625,-0.9225014127525308,-1.2626262626262625,-0.9337929369785798,-1.2626262626262625,-0.29705995527394363,-1.2626262626262625,-0.5219865374295057,-1.2626262626262625,-0.49551878203869837,-1.2626262626262625,-0.3602863875035988,-1.2626262626262625,-0.2946441284959401,-1.2626262626262625,-0.3093875165551468 +-1.1616161616161618,-0.15622870838450328,-1.1616161616161618,-0.8751043056611054,-1.1616161616161618,-0.8989581380947891,-1.1616161616161618,-0.2868560938657385,-1.1616161616161618,-0.5034750880272445,-1.1616161616161618,-0.47203943335323734,-1.1616161616161618,-0.33268847213433056,-1.1616161616161618,-0.274883632364574,-1.1616161616161618,-0.290930041718859 +-1.0606060606060606,-0.13076432246160322,-1.0606060606060606,-0.821606899074672,-1.0606060606060606,-0.8584249497008333,-1.0606060606060606,-0.27660353819390815,-1.0606060606060606,-0.48270847299437897,-1.0606060606060606,-0.44464074915622404,-1.0606060606060606,-0.3050905567650622,-1.0606060606060606,-0.25396600066040825,-1.0606060606060606,-0.27118022111102713 +-0.9595959595959593,-0.1052999365387022,-0.9595959595959593,-0.7640740662013277,-0.9595959595959593,-0.8091349495541134,-0.9595959595959593,-0.2663509825220778,-0.9595959595959593,-0.4531496187924299,-0.9595959595959593,-0.4131252245857649,-0.9595959595959593,-0.2774926413957938,-0.9595959595959593,-0.2325608605277687,-0.9595959595959593,-0.24999263682664583 +-0.858585858585859,-0.07983555061580246,-0.858585858585859,-0.6997648036121712,-0.858585858585859,-0.7481101580520273,-0.858585858585859,-0.24945014324598108,-0.858585858585859,-0.4128551081137216,-0.858585858585859,-0.3783375004573455,-0.858585858585859,-0.24988890615957382,-0.858585858585859,-0.20970608424200354,-0.858585858585859,-0.22760758480332924 +-0.7575757575757578,-0.054371164692902076,-0.7575757575757578,-0.6349094271338603,-0.7575757575757578,-0.6820384544330558,-0.7575757575757578,-0.22976061598357173,-0.7575757575757578,-0.37194755761368214,-0.7575757575757578,-0.34125536540984164,-0.7575757575757578,-0.22211577202959193,-0.7575757575757578,-0.18612295967753525,-0.7575757575757578,-0.20435972492122192 +-0.6565656565656566,-0.028906778770001355,-0.6565656565656566,-0.5675463340257147,-0.6565656565656566,-0.6095055279444694,-0.6565656565656566,-0.21007108872116223,-0.6565656565656566,-0.33089771921954814,-0.6565656565656566,-0.3018873155488892,-0.6565656565656566,-0.193901705770251,-0.6565656565656566,-0.16215648653127196,-0.6565656565656566,-0.17931671250996567 +-0.5555555555555554,-0.003442392847101086,-0.5555555555555554,-0.4979737843441253,-0.5555555555555554,-0.5294156894319434,-0.5555555555555554,-0.17756203711819088,-0.5555555555555554,-0.28543993548509355,-0.5555555555555554,-0.26041062451302716,-0.5555555555555554,-0.1652647608815763,-0.5555555555555554,-0.13697108727984195,-0.5555555555555554,-0.15330854213602407 +-0.45454545454545503,0.022021993075799252,-0.45454545454545503,-0.41446378537016554,-0.45454545454545503,-0.44063136513918405,-0.45454545454545503,-0.14370193132078618,-0.45454545454545503,-0.2395445410097954,-0.45454545454545503,-0.21652789115320525,-0.45454545454545503,-0.13529651419425484,-0.45454545454545503,-0.11162353028803523,-0.45454545454545503,-0.12623393965312618 +-0.3535353535353538,0.047486378998699605,-0.3535353535353538,-0.32279891003383887,-0.3535353535353538,-0.3477046435373429,-0.3535353535353538,-0.10934683153775412,-0.3535353535353538,-0.19101529776271153,-0.3535353535353538,-0.17035416577174828,-0.3535353535353538,-0.10509845793132169,-0.3535353535353538,-0.08626013443382194,-0.3535353535353538,-0.0984136402387288 +-0.2525252525252526,0.07295076492159988,-0.2525252525252526,-0.2310925448666578,-0.2525252525252526,-0.25069145628093464,-0.2525252525252526,-0.07491795886312486,-0.2525252525252526,-0.14150481827496786,-0.2525252525252526,-0.12255925867115473,-0.2525252525252526,-0.07490040166838845,-0.2525252525252526,-0.060434579838324495,-0.2525252525252526,-0.07006332009798681 +-0.15151515151515138,0.09843047923373265,-0.15151515151515138,-0.13636354870852932,-0.15151515151515138,-0.15095910699954188,-0.15151515151515138,-0.040306119685216676,-0.15151515151515138,-0.08982558834407159,-0.15151515151515138,-0.07398207558396772,-0.15151515151515138,-0.044702345405455264,-0.15151515151515138,-0.033631412543263274,-0.15151515151515138,-0.04141233375856603 +-0.050505050505050164,0.12391212075429944,-0.050505050505050164,-0.03941345742250633,-0.050505050505050164,-0.04947445191778734,-0.050505050505050164,-0.005694280507308445,-0.050505050505050164,-0.03797674651308919,-0.050505050505050164,-0.025080464074353173,-0.050505050505050164,-0.014504289142522105,-0.050505050505050164,-0.006446181090338347,-0.050505050505050164,-0.012381418678247798 +0.050505050505050164,0.14939376227486617,0.050505050505050164,0.056551574802519614,0.050505050505050164,0.0525838784102356,0.050505050505050164,0.028911158365061536,0.050505050505050164,0.013973891774473416,0.050505050505050164,0.023794553267499748,0.050505050505050164,0.01583885016218507,0.050505050505050164,0.021038028372213642,0.050505050505050164,0.016846741994686543 +0.15151515151515138,0.17487540379543332,0.15151515151515138,0.15017264202689645,0.15151515151515138,0.15408973105493792,0.15151515151515138,0.062183868537649845,0.15151515151515138,0.06589471730593952,0.15151515151515138,0.07245763138776953,0.15151515151515138,0.046508129166361926,0.15151515151515138,0.04842915541973139,0.15151515151515138,0.04601083462340586 +0.2525252525252526,0.2003570453160002,0.2525252525252526,0.24151055338001104,0.2525252525252526,0.2530277286116801,0.2525252525252526,0.09533027991528796,0.2525252525252526,0.11633887943820748,0.2525252525252526,0.11992049316059605,0.2525252525252526,0.07717740817053882,0.2525252525252526,0.07538338916654858,0.2525252525252526,0.07493657104851133 +0.3535353535353538,0.22583868683656727,0.3535353535353538,0.3245702345293225,0.3535353535353538,0.3487077570947679,0.3535353535353538,0.12847669129292608,0.3535353535353538,0.1661606781018032,0.3535353535353538,0.16626942811591283,0.3535353535353538,0.10784668717471575,0.3535353535353538,0.10205978943459323,0.3535353535353538,0.10356289911566637 +0.45454545454545414,0.25132032835713397,0.45454545454545414,0.4042440047834261,0.45454545454545414,0.4412637068427958,0.45454545454545414,0.15705349698246504,0.45454545454545414,0.21489012650224273,0.45454545454545414,0.21055873443432177,0.45454545454545414,0.1385159661788923,0.45454545454545414,0.12849799626750344,0.45454545454545414,0.13171638145035697 +0.5555555555555554,0.2768019698777009,0.5555555555555554,0.48386343064481413,0.5555555555555554,0.5292644209820558,0.5555555555555554,0.1822941322301175,0.5555555555555554,0.26332131026810235,0.5555555555555554,0.25282542280637477,0.5555555555555554,0.16918524518306918,0.5555555555555554,0.1537986605041808,0.5555555555555554,0.15885558014342485 +0.6565656565656566,0.30228361139826787,0.6565656565656566,0.5582703975525269,0.6565656565656566,0.6095279265110211,0.6565656565656566,0.20753476747777022,0.6565656565656566,0.311752494033962,0.6565656565656566,0.2926908500466596,0.6565656565656566,0.1998545241872461,0.6565656565656566,0.17827793057103108,0.6565656565656566,0.18425901109338033 +0.7575757575757578,0.3276630675001063,0.7575757575757578,0.6240165672599972,0.7575757575757578,0.6817170975194252,0.7575757575757578,0.23277540272542308,0.7575757575757578,0.35585725421977105,0.7575757575757578,0.32990973382338223,0.7575757575757578,0.23052380319142296,0.7575757575757578,0.19993717433313357,0.7575757575757578,0.20856541522380753 +0.8585858585858581,0.35294340046326517,0.8585858585858581,0.6832251591090945,0.8585858585858581,0.7490023509530548,0.8585858585858581,0.2580160379730755,0.8585858585858581,0.39411574874383437,0.8585858585858581,0.3646086605463153,0.8585858585858581,0.2611930821955996,0.8585858585858581,0.21959645347898898,0.8585858585858581,0.2319021251050189 +0.9595959595959593,0.3782237334264241,0.9595959595959593,0.7379264665053952,0.9595959595959593,0.8101967957597399,0.9595959595959593,0.28325667322072823,0.9595959595959593,0.4295026011065611,0.9595959595959593,0.39755496473819213,0.9595959595959593,0.2918623611997765,0.9595959595959593,0.23923560012200779,0.9595959595959593,0.25414437767202697 +1.0606060606060606,0.40350406638958297,1.0606060606060606,0.7921580999576039,1.0606060606060606,0.865038072851208,1.0606060606060606,0.3013557830052828,1.0606060606060606,0.4622992830762259,1.0606060606060606,0.4264742505103137,1.0606060606060606,0.3225316402039533,1.0606060606060606,0.2583177367004956,1.0606060606060606,0.27509011865395333 +1.1616161616161618,0.4287843993527419,1.1616161616161618,0.8463753861957045,1.1616161616161618,0.9101530745705552,1.1616161616161618,0.30890687222540525,1.1616161616161618,0.4901351365169132,1.1616161616161618,0.45319808589043276,1.1616161616161618,0.35276108409396234,1.1616161616161618,0.27725858987652097,1.1616161616161618,0.29560030900846 +1.262626262626262,0.4540647323159006,1.262626262626262,0.897495878378595,1.262626262626262,0.9417185837581196,1.262626262626262,0.3164579614455276,1.262626262626262,0.5145846409490937,1.262626262626262,0.4780296455205537,1.262626262626262,0.3794404038170447,1.262626262626262,0.2943968389517317,1.262626262626262,0.3152556528081 +1.3636363636363633,0.47934506527905946,1.3636363636363633,0.9371746663372353,1.3636363636363633,0.9683350572505884,1.3636363636363633,0.32400905066565,1.3636363636363633,0.5362370529858077,1.3636363636363633,0.49985847015098533,1.3636363636363633,0.4061135157391696,1.3636363636363633,0.31086438420332474,1.3636363636363633,0.33319398921001137 +1.4646464646464645,0.5046253982422182,1.4646464646464645,0.9707358108138878,1.4646464646464645,0.9850292043911345,1.4646464646464645,0.3315601398857724,1.4646464646464645,0.5531019255981576,1.4646464646464645,0.5181848921010453,1.4646464646464645,0.43278662766129444,1.4646464646464645,0.32679862313827224,1.4646464646464645,0.34908318351734496 +1.5656565656565657,0.519310758600954,1.5656565656565657,0.9906032176938914,1.5656565656565657,0.9918397190961462,1.5656565656565657,0.3391112291058948,1.5656565656565657,0.5659801950328859,1.5656565656565657,0.5323498791465002,1.5656565656565657,0.4511009412793216,1.5656565656565657,0.34162587661768695,1.5656565656565657,0.3628958484057042 +1.666666666666667,0.51401635833774,1.666666666666667,1.005715077214144,1.666666666666667,0.9899656142606021,1.666666666666667,0.346662318326017,1.666666666666667,0.575829702298404,1.666666666666667,0.541260149475436,1.666666666666667,0.4674803110925756,1.666666666666667,0.35601920704359724,1.666666666666667,0.37572761649169056 +1.7676767676767673,0.5060676795476615,1.7676767676767673,1.0131883048070176,1.7676767676767673,0.9786887428475383,1.7676767676767673,0.35421340754613934,1.7676767676767673,0.5828151703640635,1.7676767676767673,0.5455395874048847,1.7676767676767673,0.4838510153495891,1.7676767676767673,0.3703169330810678,1.7676767676767673,0.38781035405087 +1.8686868686868685,0.495017206229559,1.8686868686868685,0.9802541539054102,1.8686868686868685,0.9559310588882513,1.8686868686868685,0.3617644967662619,1.8686868686868685,0.5839088497682434,1.8686868686868685,0.5467157898697311,1.8686868686868685,0.49835864007261943,1.8686868686868685,0.38410765063343066,1.8686868686868685,0.3977196343512365 +1.9696969696969697,0.48396673291145637,1.9696969696969697,0.9263388630289161,1.9696969696969697,0.9221166683929235,1.9696969696969697,0.36931558598638414,1.9696969696969697,0.5804936028756624,1.9696969696969697,0.5450082343452209,1.9696969696969697,0.5115510651058692,1.9696969696969697,0.39647206872026003,1.9696969696969697,0.4057110985660076 +2.070707070707071,0.4729162595933537,2.070707070707071,0.8698358861835761,2.070707070707071,0.8764481362001709,2.070707070707071,0.3768666752065065,2.070707070707071,0.574716686049867,2.070707070707071,0.5394474878302619,2.070707070707071,0.5097127295818997,2.070707070707071,0.4049032898801099,2.070707070707071,0.41126316053027995 +2.1717171717171713,0.46186578627525116,2.1717171717171713,0.8081407617658106,2.1717171717171713,0.8224404974364862,2.1717171717171713,0.38441776442662906,2.1717171717171713,0.5655375705620478,2.1717171717171713,0.5300324428024472,2.1717171717171713,0.49554940844796147,2.1717171717171713,0.4101839304627971,2.1717171717171713,0.4155357725301964 +2.2727272727272725,0.4491770446280175,2.2727272727272725,0.7442526428212628,2.2727272727272725,0.7592323649828391,2.2727272727272725,0.391968853646751,2.2727272727272725,0.552350323381661,2.2727272727272725,0.5163813504127768,2.2727272727272725,0.48094925798793925,2.2727272727272725,0.413936941837358,2.2727272727272725,0.41843071308941276 +2.3737373737373737,0.43609986761848685,2.3737373737373737,0.675405575107383,2.3737373737373737,0.6874741372997285,2.3737373737373737,0.39951994286687353,2.3737373737373737,0.5335539998256553,2.3737373737373737,0.49865541506871236,2.3737373737373737,0.4655571015656922,2.3737373737373737,0.4173906236056948,2.3737373737373737,0.42027249977934045 +2.474747474747475,0.4066895271847391,2.474747474747475,0.5978840366507735,2.474747474747475,0.6073682995880296,2.474747474747475,0.40692119452733155,2.474747474747475,0.5117177142842388,2.474747474747475,0.4784532511364369,2.474747474747475,0.4501649451434452,2.474747474747475,0.4206585025597512,2.474747474747475,0.4213399238172195 +2.5757575757575752,0.3749622763477891,2.5757575757575752,0.5099585586540418,2.5757575757575752,0.5223271133442401,2.5757575757575752,0.41415264022012394,2.5757575757575752,0.4850415148130571,2.5757575757575752,0.4567094947730761,2.5757575757575752,0.43458555601387144,2.5757575757575752,0.42158324745022285,2.5757575757575752,0.42181632222498416 +2.6767676767676765,0.3432350255108388,2.6767676767676765,0.4205365946887392,2.6767676767676765,0.432906236858961,2.6767676767676765,0.4199131836378292,2.6767676767676765,0.45218830888592937,2.6767676767676765,0.4332394825941561,2.6767676767676765,0.41774264448225407,2.6767676767676765,0.42145613907090707,2.6767676767676765,0.4215504924390677 +2.7777777777777777,0.3115077746738885,2.7777777777777777,0.32930350370842715,2.7777777777777777,0.3412321347424227,2.7777777777777777,0.42274639662898705,2.7777777777777777,0.4163402713183856,2.7777777777777777,0.40851950219775013,2.7777777777777777,0.40089973295063663,2.7777777777777777,0.4209228617300304,2.7777777777777777,0.4203590184673923 +2.878787878787879,0.27978052383693824,2.878787878787879,0.23807041272811588,2.878787878787879,0.24760314946640188,2.878787878787879,0.42557960962014507,2.878787878787879,0.3802049595409251,2.878787878787879,0.383057999391408,2.878787878787879,0.3840568214190192,2.878787878787879,0.41938009129458526,2.878787878787879,0.41854626446476473 +2.9797979797979792,0.24805327299998842,2.9797979797979792,0.14646854757187647,2.9797979797979792,0.15264712621771054,2.9797979797979792,0.428104678899817,2.9797979797979792,0.3432577786602793,2.9797979797979792,0.35694448241628624,2.9797979797979792,0.367213909887402,2.9797979797979792,0.41773298189050795,2.9797979797979792,0.4163510447804036 +3.0808080808080813,0.21632602216303798,3.0808080808080813,0.05456143993271787,3.0808080808080813,0.057336396951423035,3.0808080808080813,0.42910204221273207,3.0808080808080813,0.30602019255320434,3.0808080808080813,0.3305660520102483,3.0808080808080813,0.3503709983557844,3.0808080808080813,0.41593157838764133,3.0808080808080813,0.41396474245507225 +3.1818181818181817,0.18459877132608776,3.1818181818181817,-0.03733538955626138,3.1818181818181817,-0.03779843888287274,3.1818181818181817,0.4300994055256468,3.1818181818181817,0.26873960102765904,3.1818181818181817,0.30419224859801247,3.1818181818181817,0.3335280868241671,3.1818181818181817,0.41409475876758717,3.1818181818181817,0.41152646064562604 +3.282828282828282,0.15287152048913782,3.282828282828282,-0.12920906194738088,3.282828282828282,-0.13249853932321157,3.282828282828282,0.43099899837317435,3.282828282828282,0.2314874157056526,3.282828282828282,0.27788417508140784,3.282828282828282,0.3164995410780566,3.282828282828282,0.4122620364061852,3.282828282828282,0.40912247673587887 +3.383838383838384,0.12114426965218736,3.383838383838384,-0.22108273433850145,3.383838383838384,-0.22672866959540386,3.383838383838384,0.4318917322435721,3.383838383838384,0.19424068277399548,3.383838383838384,0.25176947991950477,3.383838383838384,0.2992528546417876,3.383838383838384,0.41043205422405316,3.383838383838384,0.40674183306733336 +3.4848484848484844,0.08941701881523752,3.4848484848484844,-0.3129564067296208,3.4848484848484844,-0.3204339220693533,3.4848484848484844,0.43278446611396965,3.4848484848484844,0.15713787053146627,3.4848484848484844,0.22587592408322044,3.4848484848484844,0.2820061682055188,3.4848484848484844,0.4086021011097265,3.4848484848484844,0.4043698847877142 +3.5858585858585865,0.058162275193419995,3.5858585858585865,-0.40462815693660914,3.5858585858585865,-0.41324795154433747,3.5858585858585865,0.4336771999843675,3.5858585858585865,0.12019800234358827,3.5858585858585865,0.20009983185318994,3.5858585858585865,0.2647594817692496,3.5858585858585865,0.4067722514909233,3.5858585858585865,0.40203120630187705 +3.686868686868687,0.027654025225499562,3.686868686868687,-0.49422269067564845,3.686868686868687,-0.505293720158625,3.686868686868687,0.43456993385476517,3.686868686868687,0.08338176166505175,3.686868686868687,0.17451220690194294,3.686868686868687,0.24694025624429472,3.686868686868687,0.40494401437700783,3.686868686868687,0.39972779600606 +3.787878787878787,-0.0028542247424208616,3.787878787878787,-0.5825355853286744,3.787878787878787,-0.5971159649192432,3.787878787878787,0.4354626677251625,3.787878787878787,0.04665044899957155,3.787878787878787,0.14916273839002891,3.787878787878787,0.22899283485249716,3.787878787878787,0.40312179798093106,3.787878787878787,0.39746202764807126 +3.8888888888888893,-0.03336247471034154,3.8888888888888893,-0.6703463394238872,3.8888888888888893,-0.68824406601414,3.8888888888888893,0.4363554015955604,3.8888888888888893,0.009919136334091362,3.8888888888888893,0.12414842115967273,3.8888888888888893,0.21104541346069938,3.8888888888888893,0.4013011021954902,3.8888888888888893,0.3952295870367829 +3.9898989898989896,-0.06387072467826214,3.9898989898989896,-0.7575928168757736,3.9898989898989896,-0.7784133912470257,3.9898989898989896,0.437248135465958,3.9898989898989896,-0.026722390982327433,3.9898989898989896,0.09939234299162882,3.9898989898989896,0.19309799206890174,3.9898989898989896,0.399484052282032,3.9898989898989896,0.3930265651896393 +4.09090909090909,-0.0943789746461824,4.09090909090909,-0.8443788481067765,4.09090909090909,-0.8681309126980375,4.09090909090909,0.43814086933635565,4.09090909090909,-0.06308596529257729,4.09090909090909,0.07491765400345742,4.09090909090909,0.1750164743635475,4.09090909090909,0.39766754707663343,4.09090909090909,0.3908577509521082 +4.191919191919192,-0.12488722461410334,4.191919191919192,-0.9297917533069101,4.191919191919192,-0.9573364023412008,4.191919191919192,0.4390336032067535,4.191919191919192,-0.09929539509789244,4.191919191919192,0.05074971564267564,4.191919191919192,0.1568727764842795,4.191919191919192,0.3958543351530404,4.191919191919192,0.38872432233841003 +4.292929292929292,-0.15539547458202363,4.292929292929292,-1.0140884125491687,4.292929292929292,-1.0459165238042567,4.292929292929292,0.4399263370771512,4.292929292929292,-0.1349334585206603,4.292929292929292,0.02675516616820918,4.292929292929292,0.13872907860501169,4.292929292929292,0.3940418892740997,4.292929292929292,0.38661923148208605 +4.3939393939393945,-0.18590372454994458,4.3939393939393945,-1.0972974392893766,4.3939393939393945,-1.1342383379633272,4.3939393939393945,0.4408190709475487,4.3939393939393945,-0.16982980680843562,4.3939393939393945,0.002964652994963484,4.3939393939393945,0.11796054958424437,4.3939393939393945,0.3922298874756054,4.3939393939393945,0.3845302650106349 +4.494949494949495,-0.216411974517865,4.494949494949495,-1.179182894055243,4.494949494949495,-1.2221355458185688,4.494949494949495,0.44032091498508585,4.494949494949495,-0.20469748939648835,4.494949494949495,-0.0206002794035424,4.494949494949495,0.09701325884395126,4.494949494949495,0.39041788567711144,4.494949494949495,0.38248614430609396 +4.595959595959595,-0.24692022448578524,4.595959595959595,-1.2601894992373368,4.595959595959595,-1.3091379548259912,4.595959595959595,0.4390119198940737,4.595959595959595,-0.239564339118166,4.595959595959595,-0.044064215802437315,4.595959595959595,0.07606596810365834,4.595959595959595,0.38861853091288373,4.595959595959595,0.3804739406387159 +4.696969696969697,-0.2774284744537062,4.696969696969697,-1.3408190143954206,4.696969696969697,-1.395667382198044,4.696969696969697,0.4377029248030613,4.696969696969697,-0.2744311888398445,4.696969696969697,-0.06739710896332894,4.696969696969697,0.05511867736336504,4.696969696969697,0.38683625018149875,4.696969696969697,0.37848669218529357 +4.797979797979798,-0.3079367244216266,4.797979797979798,-1.4214485295534998,4.797979797979798,-1.4814148159277154,4.797979797979798,0.436393929712049,4.797979797979798,-0.3092980385615221,4.797979797979798,-0.09057526494106827,4.797979797979798,0.034171386623072064,4.797979797979798,0.3850542123238927,4.797979797979798,0.37652869146057905 +4.8989898989899,-0.3384449743895474,4.8989898989899,-1.5019215376311323,4.8989898989899,-1.5662892316768398,4.8989898989899,0.4350560618496009,4.8989898989899,-0.34416306870335767,4.8989898989899,-0.11357143325279366,4.8989898989899,0.013224095882778591,4.8989898989899,0.383272237289863,4.8989898989899,0.37460430584833954 +5.0,-0.3689532243574676,5.0,-1.5820215750973248,5.0,-1.6508596672714462,5.0,0.43307940950570034,5.0,-0.37879161071248096,5.0,-0.13636462992911846,5.0,-0.007723194857514326,5.0,0.38149127984729847,5.0,0.37272620912380855 diff --git a/TeX/Plots/Data/sin_6.csv b/TeX/Plots/Data/sin_6.csv new file mode 100644 index 0000000..8acc554 --- /dev/null +++ b/TeX/Plots/Data/sin_6.csv @@ -0,0 +1,7 @@ +x,y +-3.14159265358979 , -1.22464679914735e-16 +-1.88495559215388 , -0.951056516295154 +-0.628318530717959 , -0.587785252292473 +0.628318530717959 , 0.587785252292473 +1.88495559215388 , 0.951056516295154 +3.14159265358979 , 1.22464679914735e-16 diff --git a/TeX/Plots/Data/sin_conv.csv b/TeX/Plots/Data/sin_conv.csv new file mode 100644 index 0000000..18f9767 --- /dev/null +++ b/TeX/Plots/Data/sin_conv.csv @@ -0,0 +1,64 @@ +,x_i,y_i,x_d,y_d,x,y +"1",0,0,-0.251688505259414,-0.109203329280437,-0.0838961684198045,-0.0364011097601456 +"2",0.1,0.0998334166468282,0.216143831477992,0.112557051753147,0.00912581751114394,0.0102181849309398 +"3",0.2,0.198669330795061,0.351879533708722,0.52138915851383,0.120991434720523,0.180094983253476 +"4",0.3,0.29552020666134,-0.0169121548298757,0.0870956013269369,0.0836131805695847,0.163690012207993 +"5",0.4,0.389418342308651,0.278503661037003,0.464752686490904,0.182421968363305,0.294268636359638 +"6",0.5,0.479425538604203,0.241783494554983,0.521480762031938,0.216291763003623,0.399960258238722 +"7",0.6,0.564642473395035,0.67288177436767,0.617435509386938,0.35521581484916,0.469717955748659 +"8",0.7,0.644217687237691,0.692239292735764,0.395366561077235,0.492895242512842,0.472257444593698 +"9",0.8,0.717356090899523,0.779946606884677,0.830045203984444,0.621840812496715,0.609161571471379 +"10",0.9,0.783326909627483,0.796987424421658,0.801263132114778,0.723333122197902,0.682652280249237 +"11",1,0.841470984807897,1.06821012817873,0.869642838589798,0.860323524382936,0.752971972337735 +"12",1.1,0.891207360061435,1.50128637982775,0.899079529605641,1.09148187598916,0.835465707990221 +"13",1.2,0.932039085967226,1.1194263347154,0.906626360727432,1.13393429991233,0.875953352580199 +"14",1.3,0.963558185417193,1.24675170552299,1.07848030956084,1.2135821540696,0.950969562327306 +"15",1.4,0.98544972998846,1.32784804980202,0.76685418220594,1.2818141129714,0.899892140468108 +"16",1.5,0.997494986604054,1.23565831982523,1.07310713979952,1.2548338349408,0.961170357331681 +"17",1.6,0.999573603041505,1.90289281875567,0.88003153305018,1.47254506382487,0.94006950203764 +"18",1.7,0.991664810452469,1.68871194985252,1.01829329437246,1.56940444551462,0.955793455192302 +"19",1.8,0.973847630878195,1.72179983981017,1.02268013575533,1.64902528694529,0.988666907865147 +"20",1.9,0.946300087687414,2.0758716236832,0.805032560816536,1.83908127693465,0.928000158917177 +"21",2,0.909297426825682,2.11118945422405,1.0134691646089,1.94365432453739,0.957334347939419 +"22",2.1,0.863209366648874,2.00475777514698,0.86568986134637,1.9826265174693,0.924298444442167 +"23",2.2,0.80849640381959,2.40773948766051,0.667018023975934,2.15807575978944,0.826761739840873 +"24",2.3,0.74570521217672,2.14892522112975,0.872704236332415,2.17485332420928,0.839957045849706 +"25",2.4,0.675463180551151,2.41696701330131,0.253955021611832,2.26412064248401,0.631186439537074 +"26",2.5,0.598472144103957,2.4087686184711,0.49450592290142,2.33847747374241,0.557319074033222 +"27",2.6,0.515501371821464,2.55312145187913,0.343944677655963,2.4151672191424,0.467867318187242 +"28",2.7,0.42737988023383,2.6585492172135,0.528990826178838,2.51649125567521,0.447178678139147 +"29",2.8,0.334988150155905,2.86281283456189,0.311400289332401,2.65184232661008,0.399952143417531 +"30",2.9,0.239249329213982,2.74379162744449,0.501282616227342,2.70796893413474,0.432791852065713 +"31",3,0.141120008059867,2.95951338295806,0.241385538727577,2.81576254355573,0.373424929745113 +"32",3.1,0.0415806624332905,2.87268165585702,0.0764217470113609,2.85626015646841,0.264426413128825 +"33",3.2,-0.0583741434275801,3.29898326143096,-0.272500742891131,3.0101734240017,0.0756660807058224 +"34",3.3,-0.157745694143249,3.64473302259565,-0.24394459655987,3.24463496592626,-0.0688606479078372 +"35",3.4,-0.255541102026832,3.46698556586598,-0.184272732807665,3.35339770834784,-0.15210430721581 +"36",3.5,-0.35078322768962,3.67208160089566,-0.119933071489115,3.51318482264886,-0.176430496141549 +"37",3.6,-0.442520443294852,3.73738883546162,-0.486197268315415,3.62961845872181,-0.283186040443485 +"38",3.7,-0.529836140908493,3.77209072631297,-0.70275845349803,3.68619468325631,-0.422698101171958 +"39",3.8,-0.611857890942719,3.66424718733509,-0.482410535792735,3.69727905622484,-0.462935060857071 +"40",3.9,-0.687766159183974,3.72257849834575,-0.58477261395861,3.71784166083333,-0.543108060927685 +"41",4,-0.756802495307928,3.85906293918747,-0.703015362823377,3.76539960460785,-0.618449987254768 +"42",4.1,-0.818277111064411,4.0131961543859,-0.900410257326814,3.84632588679948,-0.708384794580195 +"43",4.2,-0.871575772413588,4.0263131749378,-0.906044808231391,3.92085812717095,-0.789303202089581 +"44",4.3,-0.916165936749455,4.77220075671212,-0.530827398816399,4.22925719163087,-0.729943577630504 +"45",4.4,-0.951602073889516,4.4795636311648,-1.26672674728111,4.35331987391088,-0.921377204806384 +"46",4.5,-0.977530117665097,4.5088210845027,-0.886168448505782,4.44898342417679,-0.914264630323723 +"47",4.6,-0.993691003633465,4.70645816063034,-1.1082213336257,4.58861983576766,-0.97806804633887 +"48",4.7,-0.999923257564101,4.48408312008838,-0.98352521226689,4.55827710678399,-1.01979325501755 +"49",4.8,-0.996164608835841,4.97817348334347,-1.03043977928678,4.69715193557134,-1.02203657500247 +"50",4.9,-0.982452612624332,5.09171179984929,-0.948912592308037,4.8484480091335,-0.999631162740658 +"51",5,-0.958924274663138,4.87710566000798,-0.825224506141761,4.87693462801326,-0.937722874707385 +"52",5.1,-0.925814682327732,5.04139294635392,-0.718936957124138,4.97198282698482,-0.856650521199568 +"53",5.2,-0.883454655720153,4.94893136398377,-0.992753696742329,4.98294046406006,-0.885371127105841 +"54",5.3,-0.832267442223901,5.38128555915899,-0.717434652733088,5.10670981664685,-0.816103747160468 +"55",5.4,-0.772764487555987,5.46192736637355,-0.724060934669406,5.2398375587704,-0.780347098915984 +"56",5.5,-0.705540325570392,5.30834840605735,-0.721772537926303,5.28807996342596,-0.766498807502665 +"57",5.6,-0.631266637872321,5.53199687756185,-0.583133415115471,5.40779902870202,-0.688843253413245 +"58",5.7,-0.550685542597638,5.9238064899769,-0.541063721566544,5.59865656961444,-0.627040990301198 +"59",5.8,-0.464602179413757,5.8067999294844,-0.43156566524513,5.68077207716296,-0.552246304884294 +"60",5.9,-0.373876664830236,5.93089453525347,-0.604056792592816,5.80084302534748,-0.550733954237757 +"61",6,-0.279415498198926,6.02965160059402,-0.234452930170458,5.91786841211583,-0.434812265604247 +"62",6.1,-0.182162504272095,5.88697419016579,-0.135764844759742,5.91990685000071,-0.323660336266941 +"63",6.2,-0.0830894028174964,5.91445270773648,-0.0073552500992853,5.92798052258888,-0.205537962618181 diff --git a/TeX/Plots/RN_vs_RS.tex b/TeX/Plots/RN_vs_RS.tex new file mode 100644 index 0000000..82c0ad9 --- /dev/null +++ b/TeX/Plots/RN_vs_RS.tex @@ -0,0 +1,138 @@ +\pgfplotsset{ +compat=1.11, +legend image code/.code={ +\draw[mark repeat=2,mark phase=2] +plot coordinates { +(0cm,0cm) +(0.075cm,0cm) %% default is (0.3cm,0cm) +(0.15cm,0cm) %% default is (0.6cm,0cm) +};% +} +} +\begin{figure} + \begin{subfigure}[b]{0.5\textwidth} + \begin{subfigure}[b]{\textwidth} + \begin{adjustbox}{width=\textwidth, height=0.25\textheight} + \begin{tikzpicture} + \begin{axis}[ + ytick = {-1, 0, 1, 2}, + yticklabels = {$-1$, $\phantom{-0.}0$, $1$, $2$},] + \addplot table [x=x, y=y, col sep=comma, only marks, + forget plot] {Plots/Data/sin_6.csv}; + \addplot [black, line width=2pt] table [x=x, y=y, col + sep=comma, mark=none] {Plots/Data/matlab_0.csv}; + \addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_0.0, + y=y_n_5000_tl_0.0, col sep=comma, mark=none] {Plots/Data/scala_out_sin.csv}; + \addlegendentry{$f_1^{*, 0.1}$}; + \addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{$\lambda = 0.1$} + \end{subfigure}\\ + \begin{subfigure}[b]{\textwidth} + \begin{adjustbox}{width=\textwidth, height=0.25\textheight} + \begin{tikzpicture} + \begin{axis} + \addplot table [x=x, y=y, col sep=comma, only marks, + forget plot] {Plots/Data/sin_6.csv}; + \addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_1.csv}; + \addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_1.0, + y=y_n_5000_tl_1.0, col sep=comma, mark=none] {Plots/Data/scala_out_sin.csv}; + \addlegendentry{$f_1^{*, 1.0}$}; + \addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{$\lambda = 1.0$} + \end{subfigure}\\ + \begin{subfigure}[b]{\textwidth} + \begin{adjustbox}{width=\textwidth, height=0.25\textheight} + \begin{tikzpicture} + \begin{axis} + \addplot table [x=x, y=y, col sep=comma, only marks, + forget plot] {Plots/Data/sin_6.csv}; + \addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_3.csv}; + \addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_3.0, + y=y_n_5000_tl_3.0, col sep=comma, mark=none] {Plots/Data/scala_out_sin.csv}; + \addlegendentry{$f_1^{*, 3.0}$}; + \addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{$\lambda = 3.0$} + \end{subfigure} + \end{subfigure} + \begin{subfigure}[b]{0.5\textwidth} + \begin{subfigure}[b]{\textwidth} + \begin{adjustbox}{width=\textwidth, height=0.245\textheight} + \begin{tikzpicture} + \begin{axis}[ + ytick = {-2,-1, 0, 1, 2}, + yticklabels = {$-2$,$-1$, $\phantom{-0.}0$, $1$, $2$},] + \addplot table [x=x, y=y, col sep=comma, only marks, + forget plot] {Plots/Data/data_sin_d_t.csv}; + \addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_sin_d_01.csv}; + \addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_0.1, + y=y_n_5000_tl_0.1, col sep=comma, mark=none] {Plots/Data/scala_out_d_1_t.csv}; + \addlegendentry{$f_1^{*, 0.1}$}; + \addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{$\lambda = 0.1$} + \end{subfigure}\\ + \begin{subfigure}[b]{\textwidth} + \begin{adjustbox}{width=\textwidth, height=0.25\textheight} + \begin{tikzpicture} + \begin{axis} + \addplot table [x=x, y=y, col sep=comma, only marks, + forget plot] {Plots/Data/data_sin_d_t.csv}; + \addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_sin_d_1.csv}; + \addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_1.0, + y=y_n_5000_tl_1.0, col sep=comma, mark=none] {Plots/Data/scala_out_d_1_t.csv}; + \addlegendentry{$f_1^{*, 1.0}$}; + \addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda},*}$}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{$\lambda = 1.0$} + \end{subfigure}\\ + \begin{subfigure}[b]{\textwidth} + \begin{adjustbox}{width=\textwidth, height=0.25\textheight} + \begin{tikzpicture} + \begin{axis} + \addplot table [x=x, y=y, col sep=comma, only marks, + forget plot] {Plots/Data/data_sin_d_t.csv}; + \addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Plots/Data/matlab_sin_d_3.csv}; + \addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_3.0, + y=y_n_5000_tl_3.0, col sep=comma, mark=none] {Plots/Data/scala_out_d_1_t.csv}; + \addlegendentry{$f_1^{*, 3.0}$}; + \addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{$\lambda = 3.0$} + \end{subfigure} + \end{subfigure} + \caption{% In these Figures the behaviour stated in ... is visualized + % in two exaples. For $(a), (b), (c)$ six values of sinus equidistantly + % spaced on $[-\pi, \pi]$ have been used as training data. For + % $(d),(e),(f)$ 15 equidistand values have been used, where + % $y_i^{train} = \sin(x_i^{train}) + \varepsilon_i$ and + % $\varepsilon_i \sim \mathcal{N}(0, 0.3)$. For + % $\mathcal{RN}_w^{\tilde{\lambda, *}}$ the random weights are + % distributed as follows + % \begin{align*} + % \xi_k &\sim + % \end{align*} + Ridge Penalized Neural Network compared to Regression Spline, + with them being trained on $\text{data}_A$ in a), b), c) and on + $\text{data}_B$ in d), e), f). + The Parameters of each are given above. + } +\end{figure} +%%% Local Variables: +%%% mode: latex +%%% TeX-master: +%%% End: diff --git a/TeX/Plots/SGD_vs_GD.tex b/TeX/Plots/SGD_vs_GD.tex new file mode 100644 index 0000000..0bb9bbe --- /dev/null +++ b/TeX/Plots/SGD_vs_GD.tex @@ -0,0 +1,91 @@ +\pgfplotsset{ +compat=1.11, +legend image code/.code={ +\draw[mark repeat=2,mark phase=2] +plot coordinates { +(0cm,0cm) +(0.0cm,0cm) %% default is (0.3cm,0cm) +(0.0cm,0cm) %% default is (0.6cm,0cm) +};% +} +} +\begin{figure} + \begin{subfigure}[h!]{\textwidth} + \begin{tikzpicture} + \begin{axis}[tick style = {draw = none}, width = \textwidth, + height = 0.65\textwidth, + xtick = {1, 3, 5,7,9,11,13,15,17,19}, + xticklabels = {$2$, $4$, $6$, $8$, + $10$,$12$,$14$,$16$,$18$,$20$}, + xlabel = {training epoch}, ylabel = {classification accuracy}] + \addplot table + [x=epoch, y=val_accuracy, col sep=comma] {Plots/Data/GD_01.log}; + \addplot table + [x=epoch, y=val_accuracy, col sep=comma] {Plots/Data/GD_05.log}; + \addplot table + [x=epoch, y=val_accuracy, col sep=comma] {Plots/Data/GD_1.log}; + \addplot table + [x=epoch, y=val_accuracy, col sep=comma] + {Plots/Data/SGD_01_b32.log}; + + \addlegendentry{GD$_{0.01}$} + \addlegendentry{GD$_{0.05}$} + \addlegendentry{GD$_{0.1}$} + \addlegendentry{SGD$_{0.01}$} + \end{axis} + \end{tikzpicture} + %\caption{Classification accuracy} + \end{subfigure} + \begin{subfigure}[b]{\textwidth} + \begin{tikzpicture} + \begin{axis}[tick style = {draw = none}, width = \textwidth, + height = 0.65\textwidth, + ytick = {0, 1, 2, 3, 4}, + yticklabels = {$0$, $1$, $\phantom{0.}2$, $3$, $4$}, + xtick = {1, 3, 5,7,9,11,13,15,17,19}, + xticklabels = {$2$, $4$, $6$, $8$, + $10$,$12$,$14$,$16$,$18$,$20$}, + xlabel = {training epoch}, ylabel = {error measure}] + \addplot table + [x=epoch, y=val_loss, col sep=comma] {Plots/Data/GD_01.log}; + \addplot table + [x=epoch, y=val_loss, col sep=comma] {Plots/Data/GD_05.log}; + \addplot table + [x=epoch, y=val_loss, col sep=comma] {Plots/Data/GD_1.log}; + \addplot table + [x=epoch, y=val_loss, col sep=comma] {Plots/Data/SGD_01_b32.log}; + + \addlegendentry{GD$_{0.01}$} + \addlegendentry{GD$_{0.05}$} + \addlegendentry{GD$_{0.1}$} + \addlegendentry{SGD$_{0.01}$} + + \end{axis} + \end{tikzpicture} + \caption{Performance metrics during training} + \end{subfigure} + % \\~\\ + \caption{The neural network given in ?? trained with different + algorithms on the MNIST handwritten digits data set. For gradient + descent the learning rated 0.01, 0.05 and 0.1 are (GD$_{\cdot}$). For + stochastic gradient descend a batch size of 32 and learning rate + of 0.01 is used (SDG$_{0.01}$).} + \label{fig:sgd_vs_gd} +\end{figure} + +\begin{table} + \begin{tabu} to \textwidth {@{} *4{X[c]}c*4{X[c]} @{}} + \multicolumn{4}{c}{Classification Accuracy} + &~&\multicolumn{4}{c}{Error Measure} + \\\cline{1-4}\cline{6-9} + GD$_{0.01}$&GD$_{0.05}$&GD$_{0.1}$&SGD$_{0.01}$&&GD$_{0.01}$&GD$_{0.05}$&GD$_{0.1}$&SGD$_{0.01}$ + \\\cline{1-4}\cline{6-9} + 1&1&1&1&&1&1&1&1 + \end{tabu} + \caption{Performace metrics of the networks trained in + Figure~\ref{ref:sdg_vs_gd} after 20 training epochs.} +\end{table} +%%% Local Variables: +%%% mode: latex +%%% TeX-master: "../main" +%%% End: diff --git a/TeX/Plots/_region_.tex b/TeX/Plots/_region_.tex new file mode 100644 index 0000000..ae8d959 --- /dev/null +++ b/TeX/Plots/_region_.tex @@ -0,0 +1,71 @@ +\message{ !name(pfg_test.tex)}\documentclass{article} +\usepackage{pgfplots} +\usepackage{filecontents} +\usepackage{subcaption} +\usepackage{adjustbox} +\usepackage{xcolor} +\usepackage{graphicx} +\usetikzlibrary{calc, 3d} + +\begin{document} + +\message{ !name(pfg_test.tex) !offset(6) } + + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{True position (\textcolor{red}{red}), distorted data (black)} +\end{figure} +\begin{center} +\begin{figure}[h] + \begin{subfigure}{0.49\textwidth} + \includegraphics[width=\textwidth]{Data/klammern.jpg} + \caption{Original Picure} + \end{subfigure} + \begin{subfigure}{0.49\textwidth} + \includegraphics[width=\textwidth]{Data/image_conv4.png} + \caption{test} + \end{subfigure} + \begin{subfigure}{0.49\textwidth} + \includegraphics[width=\textwidth]{Data/image_conv5.png} + \caption{test} + \end{subfigure} + \begin{subfigure}{0.49\textwidth} + \includegraphics[width=\textwidth]{Data/image_conv6.png} + \caption{test} + \end{subfigure} +\end{figure} +\end{center} + +\begin{figure} + \begin{adjustbox}{width=\textwidth} + \begin{tikzpicture} + \begin{scope}[x = (0:1cm), y=(90:1cm), z=(15:-0.5cm)] + \node[canvas is xy plane at z=0, transform shape] at (0,0) + {\includegraphics[width=5cm]{Data/klammern_r.jpg}}; + \node[canvas is xy plane at z=2, transform shape] at (0,-0.2) + {\includegraphics[width=5cm]{Data/klammern_g.jpg}}; + \node[canvas is xy plane at z=4, transform shape] at (0,-0.4) + {\includegraphics[width=5cm]{Data/klammern_b.jpg}}; + \node[canvas is xy plane at z=4, transform shape] at (-8,-0.2) + {\includegraphics[width=5.3cm]{Data/klammern_rgb.jpg}}; + \end{scope} + \end{tikzpicture} + \end{adjustbox} + \caption{On the right the red, green and blue chanels of the picture + are displayed. In order to better visualize the color channes the + black and white picture of each channel has been colored in the + respective color. Combining the layers results in the image on the + left} +\end{figure} + + + +\message{ !name(pfg_test.tex) !offset(3) } + +\end{document} + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: t +%%% End: diff --git a/TeX/Plots/pfg_test.tex b/TeX/Plots/pfg_test.tex new file mode 100644 index 0000000..391dbf4 --- /dev/null +++ b/TeX/Plots/pfg_test.tex @@ -0,0 +1,146 @@ +\documentclass{article} +\usepackage{pgfplots} +\usepackage{filecontents} +\usepackage{subcaption} +\usepackage{adjustbox} +\usepackage{xcolor} +\usepackage{tabu} +\usepackage{graphicx} +\usetikzlibrary{calc, 3d} + +\begin{document} +\pgfplotsset{ +compat=1.11, +legend image code/.code={ +\draw[mark repeat=2,mark phase=2] +plot coordinates { +(0cm,0cm) +(0.0cm,0cm) %% default is (0.3cm,0cm) +(0.0cm,0cm) %% default is (0.6cm,0cm) +};% +} +} +\begin{figure} + \begin{subfigure}[b]{\textwidth} + \begin{tikzpicture} + \begin{axis}[tick style = {draw = none}, width = \textwidth, + height = 0.7\textwidth, + xtick = {1, 3, 5,7,9,11,13,15,17,19}, + xticklabels = {$2$, $4$, $6$, $8$, + $10$,$12$,$14$,$16$,$18$,$20$}, + xlabel = {epoch}, ylabel = {Classification Accuracy}] + \addplot table + [x=epoch, y=val_accuracy, col sep=comma] {Data/GD_01.log}; + \addplot table + [x=epoch, y=val_accuracy, col sep=comma] {Data/GD_05.log}; + \addplot table + [x=epoch, y=val_accuracy, col sep=comma] {Data/GD_1.log}; + \addplot table + [x=epoch, y=val_accuracy, col sep=comma] + {Data/SGD_01_b32.log}; + + \addlegendentry{GD$_{0.01}$} + \addlegendentry{GD$_{0.05}$} + \addlegendentry{GD$_{0.1}$} + \addlegendentry{SGD$_{0.01}$} + \end{axis} + \end{tikzpicture} + %\caption{Classification accuracy} + \end{subfigure} + \begin{subfigure}[b]{\textwidth} + \begin{tikzpicture} + \begin{axis}[tick style = {draw = none}, width = \textwidth, + height = 0.7\textwidth, + ytick = {0, 1, 2, 3, 4}, + yticklabels = {$0$, $1$, $\phantom{0.}2$, $3$, $4$}, + xtick = {1, 3, 5,7,9,11,13,15,17,19}, + xticklabels = {$2$, $4$, $6$, $8$, + $10$,$12$,$14$,$16$,$18$,$20$}, + xlabel = {epoch}, ylabel = {Error Measure}] + \addplot table + [x=epoch, y=val_loss, col sep=comma] {Data/GD_01.log}; + \addplot table + [x=epoch, y=val_loss, col sep=comma] {Data/GD_05.log}; + \addplot table + [x=epoch, y=val_loss, col sep=comma] {Data/GD_1.log}; + \addplot table + [x=epoch, y=val_loss, col sep=comma] {Data/SGD_01_b32.log}; + + \addlegendentry{GD$_{0.01}$} + \addlegendentry{GD$_{0.05}$} + \addlegendentry{GD$_{0.1}$} + \addlegendentry{SGD$_{0.01}$} + + \end{axis} + \end{tikzpicture} + \caption{Performance metrics during training} + \end{subfigure} + \\~\\ + \begin{subfigure}[b]{1.0\linewidth} + \begin{tabu} to \textwidth {@{} *4{X[c]}c*4{X[c]} @{}} + \multicolumn{4}{c}{Classification Accuracy} + &~&\multicolumn{4}{c}{Error Measure} + \\\cline{1-4}\cline{6-9} + GD$_{0.01}$&GD$_{0.05}$&GD$_{0.1}$&SGD$_{0.01}$&&GD$_{0.01}$&GD$_{0.05}$&GD$_{0.1}$&SGD$_{0.01}$ + \\\cline{1-4}\cline{6-9} + 1&1&1&1&&1&1&1&1 + \end{tabu} + \caption{Performace metrics after 20 epochs} + \end{subfigure} + \caption{The neural network given in ?? trained with different + algorithms on the MNIST handwritten digits data set. For gradient + descent the learning rated 0.01, 0.05 and 0.1 are (GD$_{\text{rate}}$). For + stochastic gradient descend a batch size of 32 and learning rate + of 0.01 is used (SDG$_{0.01}$)} +\end{figure} + +\begin{center} +\begin{figure}[h] + \begin{subfigure}{0.49\textwidth} + \includegraphics[width=\textwidth]{Data/klammern.jpg} + \caption{Original Picure} + \end{subfigure} + \begin{subfigure}{0.49\textwidth} + \includegraphics[width=\textwidth]{Data/image_conv4.png} + \caption{test} + \end{subfigure} + \begin{subfigure}{0.49\textwidth} + \includegraphics[width=\textwidth]{Data/image_conv5.png} + \caption{test} + \end{subfigure} + \begin{subfigure}{0.49\textwidth} + \includegraphics[width=\textwidth]{Data/image_conv6.png} + \caption{test} + \end{subfigure} +\end{figure} +\end{center} + +\begin{figure} + \begin{adjustbox}{width=\textwidth} + \begin{tikzpicture} + \begin{scope}[x = (0:1cm), y=(90:1cm), z=(15:-0.5cm)] + \node[canvas is xy plane at z=0, transform shape] at (0,0) + {\includegraphics[width=5cm]{Data/klammern_r.jpg}}; + \node[canvas is xy plane at z=2, transform shape] at (0,-0.2) + {\includegraphics[width=5cm]{Data/klammern_g.jpg}}; + \node[canvas is xy plane at z=4, transform shape] at (0,-0.4) + {\includegraphics[width=5cm]{Data/klammern_b.jpg}}; + \node[canvas is xy plane at z=4, transform shape] at (-8,-0.2) + {\includegraphics[width=5.3cm]{Data/klammern_rgb.jpg}}; + \end{scope} + \end{tikzpicture} + \end{adjustbox} + \caption{On the right the red, green and blue chanels of the picture + are displayed. In order to better visualize the color channes the + black and white picture of each channel has been colored in the + respective color. Combining the layers results in the image on the + left} +\end{figure} + + +\end{document} + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: t +%%% End: diff --git a/TeX/Plots/sin_conv.csv b/TeX/Plots/sin_conv.csv new file mode 100644 index 0000000..73f9e5c --- /dev/null +++ b/TeX/Plots/sin_conv.csv @@ -0,0 +1,64 @@ +"","x_i","y_i","x_d","y_d","x","y" +"1",0,0,0.0815633019993375,0.095134925029757,0.0815633019993375,0.095134925029757 +"2",0.1,0.0998334166468282,-0.137539012603596,0.503920419784276,-0.137539012603596,0.503920419784276 +"3",0.2,0.198669330795061,0.219868163218743,0.32022289024623,0.219868163218743,0.32022289024623 +"4",0.3,0.29552020666134,0.378332723534869,0.474906286765401,0.378332723534869,0.474906286765401 +"5",0.4,0.389418342308651,0.286034335293811,0.422891394375764,0.215056588291437,0.412478430748051 +"6",0.5,0.479425538604203,-0.109871707385461,0.229661026779107,0.122574532557623,0.353221043330047 +"7",0.6,0.564642473395035,0.91036951450573,0.56079130435097,0.451160317716352,0.452893574072324 +"8",0.7,0.644217687237691,0.899001194675409,0.714355793051917,0.491731451724399,0.514477919331008 +"9",0.8,0.717356090899523,0.733791390723896,0.694085383523086,0.488943974889845,0.530054084580656 +"10",0.9,0.783326909627483,0.893642943873427,0.739792642916928,0.599785378272423,0.575149967162231 +"11",1,0.841470984807897,0.895913227983752,0.658288213778898,0.650886140047209,0.577618711891772 +"12",1.1,0.891207360061435,1.01252219752013,0.808981437684505,0.726263244907525,0.643161394030218 +"13",1.2,0.932039085967226,1.30930912337975,1.04111824066026,0.872590842152803,0.745714536528734 +"14",1.3,0.963558185417193,1.0448292335495,0.741250429230841,0.850147062957694,0.687171673021914 +"15",1.4,0.98544972998846,1.57369086195552,1.17277927321094,1.06520673597544,0.847936751231165 +"16",1.5,0.997494986604054,1.61427415976939,1.3908361301708,1.15616745244604,0.969474391592075 +"17",1.6,0.999573603041505,1.34409615749122,0.976992098566069,1.13543598207093,0.889434319996364 +"18",1.7,0.991664810452469,1.79278028030419,1.02939764179765,1.33272772191879,0.935067381106346 +"19",1.8,0.973847630878195,1.50721559744085,0.903076361857071,1.30862923824728,0.91665506605512 +"20",1.9,0.946300087687414,1.835014641556,0.830477479204284,1.45242210409837,0.889715842048808 +"21",2,0.909297426825682,1.98589997236352,0.887302138185342,1.56569111721857,0.901843632635883 +"22",2.1,0.863209366648874,2.31436634488224,0.890096618924313,1.73810390755555,0.899632162941341 +"23",2.2,0.80849640381959,2.14663445612581,0.697012453130415,1.77071083163663,0.831732978616874 +"24",2.3,0.74570521217672,2.17162372560288,0.614243640399509,1.84774268936257,0.787400621584077 +"25",2.4,0.675463180551151,2.2488591417345,0.447664288915269,1.93366609303299,0.707449056213168 +"26",2.5,0.598472144103957,2.56271588872389,0.553368843490625,2.08922735802261,0.702402440783529 +"27",2.6,0.515501371821464,2.60986205081511,0.503762006272682,2.17548673152621,0.657831176057599 +"28",2.7,0.42737988023383,2.47840649766003,0.215060732402894,2.20251747034638,0.533903400086802 +"29",2.8,0.334988150155905,2.99861119922542,0.28503285049582,2.43015164462239,0.512492561673074 +"30",2.9,0.239249329213982,3.09513467852082,0.245355736487949,2.54679545455398,0.461447717313721 +"31",3,0.141120008059867,2.86247369846558,0.0960140633436418,2.55274767368554,0.371740588261606 +"32",3.1,0.0415806624332905,2.79458017090243,-0.187923650913249,2.59422388058738,0.234694070506915 +"33",3.2,-0.0583741434275801,3.6498183243501,-0.186738431858275,2.9216851043241,0.173308072295566 +"34",3.3,-0.157745694143249,3.19424275971809,-0.221908035274934,2.86681135711315,0.101325637659584 +"35",3.4,-0.255541102026832,3.53166785156005,-0.295496842654793,3.03827050777863,0.0191967841533109 +"36",3.5,-0.35078322768962,3.53250700922714,-0.364585027403596,3.12709094619305,-0.0558446366563474 +"37",3.6,-0.442520443294852,3.52114271616751,-0.363845774016092,3.18702722489489,-0.10585071711408 +"38",3.7,-0.529836140908493,3.72033580551176,-0.386489608468821,3.31200591645168,-0.158195730190865 +"39",3.8,-0.611857890942719,4.0803717995796,-0.64779795182054,3.49862620703954,-0.284999326812438 +"40",3.9,-0.687766159183974,3.88351729419721,-0.604406622894426,3.51908925124143,-0.324791870057922 +"41",4,-0.756802495307928,3.9941257036697,-0.8061112437715,3.62222513609486,-0.438560071688316 +"42",4.1,-0.818277111064411,3.81674488816054,-0.548538951165239,3.63032709398802,-0.41285438330036 +"43",4.2,-0.871575772413588,4.47703348424544,-0.998992385231986,3.88581748102334,-0.592305016590357 +"44",4.3,-0.916165936749455,4.46179199544059,-0.969288921090897,3.96444243944485,-0.643076376622242 +"45",4.4,-0.951602073889516,4.15184730382548,-1.11987501275525,3.93838897981045,-0.743258835859858 +"46",4.5,-0.977530117665097,4.64522916494355,-0.772872365801468,4.15504805602606,-0.691414328153313 +"47",4.6,-0.993691003633465,4.68087925098283,-0.650422764094352,4.24176417425486,-0.675107584174976 +"48",4.7,-0.999923257564101,5.00475403211142,-0.922605880059771,4.41432228408005,-0.770625346502085 +"49",4.8,-0.996164608835841,4.71428836112322,-1.14280193223997,4.41279031790692,-0.861010494025717 +"50",4.9,-0.982452612624332,5.02115518218406,-0.9819618243158,4.57449352886454,-0.843786948015608 +"51",5,-0.958924274663138,4.92057344952522,-0.872931430146499,4.61418118503201,-0.836318916150308 +"52",5.1,-0.925814682327732,5.37277893732831,-0.91444926304078,4.81555148166217,-0.864686555983682 +"53",5.2,-0.883454655720153,5.19524942845082,-1.41169784739596,4.84152902094499,-1.03768305406186 +"54",5.3,-0.832267442223901,5.4432222181271,-0.726481337519931,4.98565483155961,-0.856094353978009 +"55",5.4,-0.772764487555987,4.98285013865449,-0.692803346852181,4.90897053115903,-0.838425020062396 +"56",5.5,-0.705540325570392,5.33298025214155,-0.343702005257262,5.0497327607228,-0.711573964373115 +"57",5.6,-0.631266637872321,5.49935694796791,-0.828968673188174,5.15036520204232,-0.816467931201244 +"58",5.7,-0.550685542597638,5.69204187550805,-0.481580461165225,5.26232964126231,-0.689500817105975 +"59",5.8,-0.464602179413757,5.84391772412888,-0.20453899468884,5.38069867877875,-0.564365367144995 +"60",5.9,-0.373876664830236,5.48166674139637,-0.597796931577294,5.3357436834558,-0.649913835818738 +"61",6,-0.279415498198926,5.77474590863769,-0.280234463056808,5.46956415981143,-0.524503219480344 +"62",6.1,-0.182162504272095,6.36764321572312,-0.0996286988755344,5.7169871104113,-0.422854073705143 +"63",6.2,-0.0830894028174964,6.46175133910451,-0.025702847911482,5.83540227044819,-0.355719019286555 diff --git a/TeX/Plots/sin_conv.tex b/TeX/Plots/sin_conv.tex new file mode 100644 index 0000000..5e43475 --- /dev/null +++ b/TeX/Plots/sin_conv.tex @@ -0,0 +1,45 @@ +\begin{figure} + \centering + \begin{subfigure}[b]{0.49\textwidth} + \centering + \begin{adjustbox}{width=\textwidth, height=0.25\textheight} + \begin{tikzpicture} + \begin{axis}[tick style = {draw = none}, xticklabel = \empty, + yticklabel=\empty] + \addplot [mark options={scale = 0.7}, mark = o] table + [x=x_d,y=y_d, col sep = comma] {Plots/Data/sin_conv.csv}; + \addplot [red, mark=x] table [x=x_i, y=y_i, col sep=comma, color ='black'] {Plots/Data/sin_conv.csv}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{True position (\textcolor{red}{red}), distorted position data (black)} + \end{subfigure} + \begin{subfigure}[b]{0.49\textwidth} + \centering + \begin{adjustbox}{width=\textwidth, height=0.25\textheight} + \begin{tikzpicture} + \begin{axis}[tick style = {draw = none}, xticklabel = \empty, + yticklabel=\empty] + \addplot [mark options={scale = 0.7}, mark = o] table [x=x,y=y, col + sep = comma] {Plots/Data/sin_conv.csv}; + \addplot [red, mark=x] table [x=x_i, y=y_i, col sep=comma, color ='black'] {Plots/Data/sin_conv.csv}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{True position (\textcolor{red}{red}), filtered position data (black)} + \end{subfigure} + \caption{Example for noise reduction using convolution with simulated + positional data. As filter + $g(i)=\left(\nicefrac{1}{3},\nicefrac{1}{4},\nicefrac{1}{5},\nicefrac{1}{6},\nicefrac{1}{20}\right)_{(i-1)}$ + is chosen and applied to the $x$ and $y$ coordinate + data seperately. The convolution of both signals with $g$ + improves the MSE of the positions from 0.196 to 0.170 and + visibly smoothes the data. + } + \label{fig:sin_conv} +\end{figure} + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: "../main" +%%% End: diff --git a/TeX/Plots/y.tex b/TeX/Plots/y.tex new file mode 100644 index 0000000..bd3b524 --- /dev/null +++ b/TeX/Plots/y.tex @@ -0,0 +1,5 @@ + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: "../main" +%%% End: diff --git a/TeX/appendixA.tex b/TeX/appendixA.tex new file mode 100644 index 0000000..de656fa --- /dev/null +++ b/TeX/appendixA.tex @@ -0,0 +1,33 @@ + +\newpage +\begin{appendices} + \section{Proofs for sone Lemmata in ...} + In the following there will be proofs for some important Lemmata in + Section~\ref{sec:theo38}. Further proofs not discussed here can be + found in \textcite{heiss2019} + \begin{Theorem}[Proof of Lemma~\ref{theo38}] + \end{Theorem} + +\begin{Lemma}[$\frac{w^{*,\tilde{\lambda}}_k}{v_k}\approx\mathcal{O}(\frac{1}{n})$] + For any $\lambda > 0$ and training data $(x_i^{\text{train}}, + y_i^{\text{train}}) \in \mathbb{R}^2, \, i \in + \left\{1,\dots,N\right\}$, we have + \[ + \max_{k \in \left\{1,\dots,n\right\}} \frac{w^{*, + \tilde{\lambda}}_k}{v_k} = \po_{n\to\infty} + \] + + +\end{Lemma} +\end{appendices} + + + + + + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: "main" +%%% End: + diff --git a/TeX/bibliograpy.bib b/TeX/bibliograpy.bib new file mode 100644 index 0000000..ef748cb --- /dev/null +++ b/TeX/bibliograpy.bib @@ -0,0 +1,58 @@ +@UNPUBLISHED{heiss2019, + series = {arXiv}, + author = {Heiss, Jakob and Teichmann, Josef and Wutte, Hanna}, + publisher = {Cornell University}, + year = {2019}, + language = {en}, + copyright = {In Copyright - Non-Commercial Use Permitted}, + keywords = {early stopping; implicit regularization; machine learning; neural networks; spline; regression; gradient descent; artificial intelligence}, + size = {53 p.}, + address = {Ithaca, NY}, + abstract = {Today, various forms of neural networks are trained to perform approximation tasks in many fields. However, the solutions obtained are not fully understood. Empirical results suggest that typical training algorithms favor regularized solutions.These observations motivate us to analyze properties of the solutions found by gradient descent initialized close to zero, that is frequently employed to perform the training task. As a starting point, we consider one dimensional (shallow) ReLU neural networks in which weights are chosen randomly and only the terminal layer is trained. We show that the resulting solution converges to the smooth spline interpolation of the training data as the number of hidden nodes tends to infinity. Moreover, we derive a correspondence between the early stopped gradient descent and the smoothing spline regression. This might give valuable insight on the properties of the solutions obtained using gradient descent methods in general settings.}, + DOI = {10.3929/ethz-b-000402003}, + title = {How Implicit Regularization of Neural Networks Affects the Learned Function – Part I}, + PAGES = {1911.02903} +} + +@article{Dropout, + author = {Nitish Srivastava and Geoffrey Hinton and Alex Krizhevsky and Ilya Sutskever and Ruslan Salakhutdinov}, + title = {Dropout: A Simple Way to Prevent Neural Networks from Overfitting}, + journal = {Journal of Machine Learning Research}, + year = 2014, + volume = 15, + number = 56, + pages = {1929-1958}, + url = {http://jmlr.org/papers/v15/srivastava14a.html} +} + +@article{ADADELTA, + author = {Matthew D. Zeiler}, + title = {{ADADELTA:} An Adaptive Learning Rate Method}, + journal = {CoRR}, + volume = {abs/1212.5701}, + year = 2012, + url = {http://arxiv.org/abs/1212.5701}, + archivePrefix = {arXiv}, + eprint = {1212.5701}, + timestamp = {Mon, 13 Aug 2018 16:45:57 +0200}, + biburl = {https://dblp.org/rec/journals/corr/abs-1212-5701.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} + +@article{backprop, +author={Rumelhart, David E. +and Hinton, Geoffrey E. +and Williams, Ronald J.}, +title={Learning representations by back-propagating errors}, +journal={Nature}, +year={1986}, +month={Oct}, +day={01}, +volume={323}, +number={6088}, +pages={533-536}, +abstract={We describe a new learning procedure, back-propagation, for networks of neurone-like units. The procedure repeatedly adjusts the weights of the connections in the network so as to minimize a measure of the difference between the actual output vector of the net and the desired output vector. As a result of the weight adjustments, internal `hidden' units which are not part of the input or output come to represent important features of the task domain, and the regularities in the task are captured by the interactions of these units. The ability to create useful new features distinguishes back-propagation from earlier, simpler methods such as the perceptron-convergence procedure1.}, +issn={1476-4687}, +doi={10.1038/323533a0}, +url={https://doi.org/10.1038/323533a0} +} diff --git a/TeX/further_applications_of_nn.tex b/TeX/further_applications_of_nn.tex new file mode 100644 index 0000000..242a456 --- /dev/null +++ b/TeX/further_applications_of_nn.tex @@ -0,0 +1,329 @@ +\section{Application of NN to higher complexity Problems} + +As neural networks are applied to problems of higher complexity often +resulting in higher dimensionality of the input the amount of +parameters in the network rises drastically. For example a network +with ... +A way to combat the + +\subsection{Convolution} + +Convolution is a mathematical operation, where the product of two +functions is integrated after one has been reversed and shifted. + +\[ + (f * g) (t) \coloneqq \int_{-\infty}^{\infty} f(t-s) g(s) ds. +\] + +This operation can be described as a filter-function $g$ being applied +to $f$, +as values $f(t)$ are being replaced by an average of values of $f$ +weighted by $g$ in position $t$. +The convolution operation allows plentiful manipulation of data, with +a simple example being smoothing of real-time data. Consider a sensor +measuring the location of an object (e.g. via GPS). We expect the +output of the sensor to be noisy as a result of a number of factors +that will impact the accuracy. In order to get a better estimate of +the actual location we want to smooth +the data to reduce the noise. Using convolution for this task, we +can control the significance we want to give each data-point. We +might want to give a larger weight to more recent measurements than +older ones. If we assume these measurements are taken on a discrete +timescale, we need to introduce discrete convolution first. Let $f$, +$g: \mathbb{Z} \to \mathbb{R}$ then + +\[ +(f * g)(t) = \sum_{i \in \mathbb{Z}} f(t-i) g(i). +\] +Applying this on the data with the filter $g$ chosen accordingly we +are +able to improve the accuracy, which can be seen in +Figure~\ref{fig:sin_conv}. +\input{Plots/sin_conv.tex} +This form of discrete convolution can also be applied to functions +with inputs of higher dimensionality. Let $f$, $g: \mathbb{Z}^d \to +\mathbb{R}$ then + +\[ + (f * g)(x_1, \dots, x_d) = \sum_{i \in \mathbb{Z}^d} f(x_1 - i_1, + \dots, x_d - i_d) g(i_1, \dots, i_d) +\] +This will prove to be a useful framework for image manipulation but +in order to apply convolution to images we need to discuss +representation of image data first. Most often images are represented +by each pixel being a mixture of base colors these base colors define +the color-space in which the image is encoded. Often used are +color-spaces RGB (red, +blue, green) or CMYK (cyan, magenta, yellow, black). An example of an +image split in its red, green and blue channel is given in +Figure~\ref{fig:rgb} Using this +encoding of the image we can define a corresponding discrete function +describing the image, by mapping the coordinates $(x,y)$ of an pixel +and the +channel (color) $c$ to the respective value $v$ + +\begin{align} + \begin{split} + I: \mathbb{N}^3 & \to \mathbb{R}, \\ + (x,y,c) & \mapsto v. + \end{split} + \label{def:I} +\end{align} + +\begin{figure} + \begin{adjustbox}{width=\textwidth} + \begin{tikzpicture} + \begin{scope}[x = (0:1cm), y=(90:1cm), z=(15:-0.5cm)] + \node[canvas is xy plane at z=0, transform shape] at (0,0) + {\includegraphics[width=5cm]{Plots/Data/klammern_r.jpg}}; + \node[canvas is xy plane at z=2, transform shape] at (0,-0.2) + {\includegraphics[width=5cm]{Plots/Data/klammern_g.jpg}}; + \node[canvas is xy plane at z=4, transform shape] at (0,-0.4) + {\includegraphics[width=5cm]{Plots/Data/klammern_b.jpg}}; + \node[canvas is xy plane at z=4, transform shape] at (-8,-0.2) + {\includegraphics[width=5.3cm]{Plots/Data/klammern_rgb.jpg}}; + \end{scope} + \end{tikzpicture} + \end{adjustbox} + \caption{On the right the red, green and blue chances of the picture + are displayed. In order to better visualize the color channels the + black and white picture of each channel has been colored in the + respective color. Combining the layers results in the image on the + left.} + \label{fig:rgb} +\end{figure} + +With this representation of an image as a function, we can apply +filters to the image using convolution for multidimensional functions +as described above. In order to simplify the notation we will write +the function $I$ given in (\ref{def:I}) as well as the filter-function $g$ +as a tensor from now on, resulting in the modified notation of +convolution + +\[ + (I * g)_{x,y,c} = \sum_{i,j,l \in \mathbb{Z}} I_{x-i,y-j,c-l} g_{i,j,l}. +\] + +Simple examples for image manipulation using +convolution are smoothing operations or +rudimentary detection of edges in grayscale images, meaning they only +have one channel. A popular filter for smoothing images +is the Gauss-filter which for a given $\sigma \in \mathbb{R}_+$ and +size $s \in \mathbb{N}$ is +defined as +\[ + G_{x,y} = \frac{1}{2 \pi \sigma^2} e^{-\frac{x^2 + y^2}{2 + \sigma^2}}, ~ x,y \in \left\{1,\dots,s\right\}. +\] + +For edge detection purposes the Sobel operator is widespread. Here two +filters are applied to the +image $I$ and then combined. Edges in the $x$ direction are detected +by convolution with +\[ + G =\left[ + \begin{matrix} + -1 & 0 & 1 \\ + -2 & 0 & 2 \\ + -1 & 0 & 1 + \end{matrix}\right], +\] +and edges is the y direction by convolution with $G^T$, the final +output is given by + +\[ + O = \sqrt{(I * G)^2 + (I*G^T)^2} +\] +where $\sqrt{\cdot}$ and $\cdot^2$ are applied component +wise. Examples of convolution with both kernels are given in Figure~\ref{fig:img_conv}. + + + +\begin{figure}[h] + \centering + \begin{subfigure}{0.3\textwidth} + \centering + \includegraphics[width=\textwidth]{Plots/Data/klammern.jpg} + \caption{Original Picture} + \label{subf:OrigPicGS} + \end{subfigure} + \begin{subfigure}{0.3\textwidth} + \centering + \includegraphics[width=\textwidth]{Plots/Data/image_conv9.png} + \caption{Gaussian Blur $\sigma^2 = 1$} + \end{subfigure} + \begin{subfigure}{0.3\textwidth} + \centering + \includegraphics[width=\textwidth]{Plots/Data/image_conv10.png} + \caption{Gaussian Blur $\sigma^2 = 4$} + \end{subfigure}\\ + \begin{subfigure}{0.3\textwidth} + \centering + \includegraphics[width=\textwidth]{Plots/Data/image_conv4.png} + \caption{Sobel Operator $x$-direction} + \end{subfigure} + \begin{subfigure}{0.3\textwidth} + \centering + \includegraphics[width=\textwidth]{Plots/Data/image_conv5.png} + \caption{Sobel Operator $y$-direction} + \end{subfigure} + \begin{subfigure}{0.3\textwidth} + \centering + \includegraphics[width=\textwidth]{Plots/Data/image_conv6.png} + \caption{Sobel Operator combined} + \end{subfigure} +% \begin{subfigure}{0.24\textwidth} +% \centering +% \includegraphics[width=\textwidth]{Plots/Data/image_conv6.png} +% \caption{test} +% \end{subfigure} + \caption{Convolution of original greyscale Image (a) with different + kernels. In (b) and (c) Gaussian kernels of size 11 and stated + $\sigma^2$ are used. In (d) - (f) the above defined Sobel Operator + kernels are used.} + \label{fig:img_conv} +\end{figure} +\clearpage +\newpage +\subsection{Convolutional NN} + +In conventional neural networks as described in chapter ... all layers +are fully connected, meaning each output node in a layer is influenced +by all inputs. For $i$ inputs and $o$ output nodes this results in $i ++ 1$ variables at each node (weights and bias) and a total $o(i + 1)$ +variables. For large inputs like image data the amount of variables +that have to be trained in order to fit the model can get excessive +and hinder the ability to train the model due to memory and +computational restrictions. By using convolution we can extract +meaningful information such as edges in an image with a kernel of a +small size $k$ in the tens or hundreds independent of the size of the +original image. Thus for a large image $k \cdot i$ can be several +orders of magnitude smaller than $o\cdot i$ . + + +As seen convolution lends itself for image manipulation. In this +chapter we will explore how we can incorporate convolution in neural +networks, and how that might be beneficial. + +Convolutional Neural Networks as described by ... are made up of +convolutional layers, pooling layers, and fully connected ones. The +fully connected layers are layers in which each input node is +connected to each output node which is the structure introduced in +chapter ... + +In a convolutional layer instead of combining all input nodes for each +output node, the input nodes are interpreted as a tensor on which a +kernel is applied via convolution, resulting in the output. Most often +multiple kernels are used, resulting in multiple output tensors. These +kernels are the variables, which can be altered in order to fit the +model to the data. Using multiple kernels it is possible to extract +different features from the image (e.g. edges -> sobel). As this +increases dimensionality even further which is undesirable as it +increases the amount of variables in later layers of the model, a convolutional layer +is often followed by a pooling one. In a pooling layer the input is +reduced in size by extracting a single value from a +neighborhood \todo{moving...}... . The resulting output size is dependent on +the offset of the neighborhoods used. Popular is max-pooling where the +largest value in a neighborhood is used or. + +This construct allows for extraction of features from the input while +using far less input variables. + +... \todo{Beispiel mit kleinem Bild, am besten das von oben} + +\subsubsection{Parallels to the Visual Cortex in Mammals} + +The choice of convolution for image classification tasks is not +arbitrary. ... auge... bla bla + + +\subsection{Limitations of the Gradient Descent Algorithm} + +-Hyperparameter guesswork +-Problems navigating valleys -> momentum +-Different scale of gradients for vars in different layers -> ADAdelta + +\subsection{Stochastic Training Algorithms} + +For many applications in which neural networks are used such as +image classification or segmentation, large training data sets become +detrimental to capture the nuances of the +data. However as training sets get larger the memory requirement +during training grows with it. +In order to update the weights with the gradient descent algorithm +derivatives of the network with respect for each +variable need to be calculated for all data points in order to get the +full gradient of the error of the network. +Thus the amount of memory and computing power available limits the +size of the training data that can be efficiently used in fitting the +network. A class of algorithms that augment the gradient descent +algorithm in order to lessen this problem are stochastic gradient +descent algorithms. Here the premise is that instead of using the whole +dataset a (different) subset of data is chosen to +compute the gradient in each iteration. +The amount of iterations until each data point has been considered in +updating the parameters is commonly called a ``epoch''. +This reduces the amount of memory and computing power required for +each iteration. This allows for use of very large training +sets. Additionally the noise introduced on the gradient can improve +the accuracy of the fit as stochastic gradient descent algorithms are +less likely to get stuck on local extrema. + +\input{Plots/SGD_vs_GD.tex} + +Another benefit of using subsets even if enough memory is available to +use the whole dataset is that depending on the size of the subsets the +gradient can be calculated far quicker which allows to make more steps +in the same time. If the approximated gradient is close enough to the +``real'' one this can drastically cut down the time required for +training the model. + +\begin{itemize} + \item ADAM + \item momentum + \item ADADETLA \textcite{ADADELTA} + + +\end{itemize} + + + +% \subsubsubsection{Stochastic Gradient Descent} + +\subsection{Combating Overfitting} + +% As in many machine learning applications if the model is overfit in +% the data it can drastically reduce the generalization of the model. In +% many machine learning approaches noise introduced in the learning +% algorithm in order to reduce overfitting. This results in a higher +% bias of the model but the trade off of lower variance of the model is +% beneficial in many cases. For example the regression tree model +% ... benefits greatly from restricting the training algorithm on +% randomly selected features in every iteration and then averaging many +% such trained trees inserted of just using a single one. \todo{noch +% nicht sicher ob ich das nehmen will} For neural networks similar +% strategies exist. A popular approach in regularizing convolutional neural network +% is \textit{dropout} which has been first introduced in +% \cite{Dropout} + +Similarly to shallow networks overfitting still can impact the quality of +convolutional neural networks. A popular way to combat this problem is +by introducing noise into the training of the model. This is a +successful strategy for ofter models as well, the a conglomerate of +descision trees grown on bootstrapped trainig samples benefit greatly +of randomizing the features available to use in each training +iteration (Hastie, Bachelorarbeit??). The way noise is introduced into +the model is by deactivating certain nodes (setting the output of the +node to 0) in the fully connected layers of the convolutional neural +networks. The nodes are chosen at random and change in every +iteration, this practice is called Dropout and was introduced by +\textcite{Dropout}. + +\todo{Vergleich verschiedene dropout größen auf MNSIT o.ä.} + + + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: "main" +%%% End: diff --git a/TeX/introduction.tex b/TeX/introduction.tex new file mode 100644 index 0000000..10ec7de --- /dev/null +++ b/TeX/introduction.tex @@ -0,0 +1,10 @@ +\section{Introduction} + + + + + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: "main" +%%% End: diff --git a/TeX/introduction_nn.tex b/TeX/introduction_nn.tex index d171dd3..c6a6df5 100644 --- a/TeX/introduction_nn.tex +++ b/TeX/introduction_nn.tex @@ -26,107 +26,67 @@ except for the input layer, which recieves the components of the input. \begin{figure}[h!] \center - \fbox{ - - \resizebox{\textwidth}{!}{% - \begin{tikzpicture}[x=1.75cm, y=1.75cm, >=stealth] - \tikzset{myptr/.style={decoration={markings,mark=at position 1 with % - {\arrow[scale=1.5,>=stealth]{>}}},postaction={decorate}}} - - \foreach \m/\l [count=\y] in {1,2,3,missing,4} - \node [every neuron/.try, neuron \m/.try] (input-\m) at (0,2.5-\y) {}; - - \foreach \m [count=\y] in {1,missing,2} - \node [every neuron/.try, neuron \m/.try ] (hidden1-\m) at (2,2-\y*1.25) {}; - - \foreach \m [count=\y] in {1,missing,2} - \node [every neuron/.try, neuron \m/.try ] (hidden2-\m) at (5,2-\y*1.25) {}; - - \foreach \m [count=\y] in {1,missing,2} - \node [every neuron/.try, neuron \m/.try ] (output-\m) at (7,1.5-\y) {}; - - \foreach \l [count=\i] in {1,2,3,d_i} - \draw [myptr] (input-\i)+(-1,0) -- (input-\i) - node [above, midway] {$x_{\l}$}; - - \foreach \l [count=\i] in {1,n_1} - \node [above] at (hidden1-\i.north) {$\mathcal{N}_{1,\l}$}; - - \foreach \l [count=\i] in {1,n_l} - \node [above] at (hidden2-\i.north) {$\mathcal{N}_{l,\l}$}; - - \foreach \l [count=\i] in {1,d_o} - \draw [myptr] (output-\i) -- ++(1,0) - node [above, midway] {$O_{\l}$}; - - \foreach \i in {1,...,4} - \foreach \j in {1,...,2} - \draw [myptr] (input-\i) -- (hidden1-\j); - - \foreach \i in {1,...,2} - \foreach \j in {1,...,2} - \draw [myptr] (hidden1-\i) -- (hidden2-\j); - - \foreach \i in {1,...,2} - \foreach \j in {1,...,2} - \draw [myptr] (hidden2-\i) -- (output-\j); - - \node [align=center, above] at (0,2) {Input\\layer}; - \node [align=center, above] at (2,2) {Hidden \\layer $1$}; - \node [align=center, above] at (5,2) {Hidden \\layer $l$}; - \node [align=center, above] at (7,2) {Output \\layer}; + % \fbox{ + + \resizebox{\textwidth}{!}{% + \begin{tikzpicture}[x=1.75cm, y=1.75cm, >=stealth] + \tikzset{myptr/.style={decoration={markings,mark=at position 1 with % + {\arrow[scale=1.5,>=stealth]{>}}},postaction={decorate}}} + + \foreach \m/\l [count=\y] in {1,2,3,missing,4} + \node [every neuron/.try, neuron \m/.try] (input-\m) at (0,2.5-\y) {}; + + \foreach \m [count=\y] in {1,missing,2} + \node [every neuron/.try, neuron \m/.try ] (hidden1-\m) at (2,2-\y*1.25) {}; + + \foreach \m [count=\y] in {1,missing,2} + \node [every neuron/.try, neuron \m/.try ] (hidden2-\m) at (5,2-\y*1.25) {}; - \node[fill=white,scale=1.5,inner xsep=10pt,inner ysep=10mm] at ($(hidden1-1)!.5!(hidden2-2)$) {$\dots$}; + \foreach \m [count=\y] in {1,missing,2} + \node [every neuron/.try, neuron \m/.try ] (output-\m) at (7,1.5-\y) {}; + + \foreach \l [count=\i] in {1,2,3,d_i} + \draw [myptr] (input-\i)+(-1,0) -- (input-\i) + node [above, midway] {$x_{\l}$}; + + \foreach \l [count=\i] in {1,n_1} + \node [above] at (hidden1-\i.north) {$\mathcal{N}_{1,\l}$}; + + \foreach \l [count=\i] in {1,n_l} + \node [above] at (hidden2-\i.north) {$\mathcal{N}_{l,\l}$}; + + \foreach \l [count=\i] in {1,d_o} + \draw [myptr] (output-\i) -- ++(1,0) + node [above, midway] {$O_{\l}$}; + + \foreach \i in {1,...,4} + \foreach \j in {1,...,2} + \draw [myptr] (input-\i) -- (hidden1-\j); + + \foreach \i in {1,...,2} + \foreach \j in {1,...,2} + \draw [myptr] (hidden1-\i) -- (hidden2-\j); + + \foreach \i in {1,...,2} + \foreach \j in {1,...,2} + \draw [myptr] (hidden2-\i) -- (output-\j); + + \node [align=center, above] at (0,2) {Input\\layer}; + \node [align=center, above] at (2,2) {Hidden \\layer $1$}; + \node [align=center, above] at (5,2) {Hidden \\layer $l$}; + \node [align=center, above] at (7,2) {Output \\layer}; + + \node[fill=white,scale=1.5,inner xsep=10pt,inner ysep=10mm] at ($(hidden1-1)!.5!(hidden2-2)$) {$\dots$}; - \end{tikzpicture}}} - \caption{test} + \end{tikzpicture}}%} + \caption{Illustration of a neural network with $d_i$ inputs, $l$ + hidden layers with $n_{\cdot}$ nodes in each layer, as well as + $d_o$ outputs. + } \end{figure} -\begin{figure} - \begin{tikzpicture}[x=1.5cm, y=1.5cm] - \tikzset{myptr/.style={decoration={markings,mark=at position 1 with % - {\arrow[scale=1.5,>=stealth]{>}}},postaction={decorate}}} - - \foreach \m/\l [count=\y] in {1} - \node [every neuron/.try, neuron \m/.try] (input-\m) at (0,0.5-\y) {}; - - \foreach \m [count=\y] in {1,2,missing,3,4} - \node [every neuron/.try, neuron \m/.try ] (hidden-\m) at (1.25,3.25-\y*1.25) {}; - - \foreach \m [count=\y] in {1} - \node [every neuron/.try, neuron \m/.try ] (output-\m) at (2.5,0.5-\y) {}; - - \foreach \l [count=\i] in {1} - \draw [myptr] (input-\i)+(-1,0) -- (input-\i) - node [above, midway] {$x$}; - - \foreach \l [count=\i] in {1,2,n-1,n} - \node [above] at (hidden-\i.north) {$\mathcal{N}_{\l}$}; - - \foreach \l [count=\i] in {1,n_l} - \node [above] at (output-\i.north) {}; - - \foreach \l [count=\i] in {1} - \draw [myptr, >=stealth] (output-\i) -- ++(1,0) - node [above, midway] {$y$}; - - - \foreach \i in {1} - \foreach \j in {1,2,...,3,4} - \draw [myptr, >=stealth] (input-\i) -- (hidden-\j); - - \foreach \i in {1,2,...,3,4} - \foreach \j in {1} - \draw [myptr, >=stealth] (hidden-\i) -- (output-\j); - - \node [align=center, above] at (0,1) {Input \\layer}; - \node [align=center, above] at (1.25,3) {Hidden layer}; - \node [align=center, above] at (2.5,1) {Output \\layer}; +\subsection{Nonlinearity of Neural Networks} - \end{tikzpicture} - \caption{Shallow Neural Network with input- and output-dimension of \(d - = 1\)} -\end{figure} \begin{figure} @@ -159,7 +119,7 @@ except for the input layer, which recieves the components of the input. \node [align = center, below] at (3, 0) {Summing \\junction}; \node [draw, minimum size = 1.25cm] (act) at (4.5, 0.625) - {\(\psi(.)\)}; + {\(\sigma(.)\)}; \node [align = center, above] at (4.5, 1.25) {Activation \\function}; \node [circle, draw, fill=black, inner sep = 0pt, minimum size = @@ -215,17 +175,125 @@ except for the input layer, which recieves the components of the input. \caption{Structure of a single neuron} \end{figure} -\begin{tikzpicture} -\tikzset{myptr/.style={decoration={markings,mark=at position 1 with % - {\arrow[scale=2,>=stealth]{>}}},postaction={decorate}}} -%1 -\draw [->,>=stealth] (0,.5) -- (2,.5); -%2 -\draw [myptr] (0,0) -- (2,0); -\end{tikzpicture} - - - +\clearpage +\subsection{Training Neural Networks} + +After a neural network model is designed, like most statistical models +it has to be fit to the data. In the machine learning context this is +often called ``training'' as due to the complexity and amount of +variables in these models they are fitted iteratively to the data, +``learing'' the properties of the data better with each iteration. + +There are two main categories of machine learning models, being +supervised and unsupervised learners. Unsupervised learners learn +structure in the data without guidance form outside (as labeling data +beforehand for training) popular examples of this are clustering +algorithms\todo{quelle}. Supervised learners on the other hand are as +the name suggest supervised during learning. This generally amounts to +using data with the expected response (label) attached to each +data-point in fitting the model, where usually some distance between +the model output and the labels is minimized. + +\subsubsection{Interpreting the Output} + +In order to properly interpret the output of a neural network and +training it, depending on the problem it might be advantageous to +transform the output form the last layer. Given the nature of the +neural network the value at each output node is a real number. This is +desirable for applications where the desired output is a real numbered +vector (e.g. steering inputs for a autonomous car), however for +classification problems it is desirable to transform this +output. Often classification problems are modeled in such a way that +each output node corresponds to a class. Then the output vector needs +to be normalized in order to give a prediction. The naive approach is +to transform the output vector $o$ into a one-hot vector $p$ +corresponding to a $0$ +entry for all classes except one, which is the predicted class. + +\[ + p_i = + \begin{cases} + 1,& i < j, \forall i,j \in \text{arg}\max o_i, \\ + 0,& \text{else.} + \end{cases} +\]\todo{besser formulieren} + +However this imposes difficulties in training the network as with this +addition the model is no longer differentiable which imitates the +ways the model can be trained. Additionally information about the +``certainty'' for each class in the prediction gets lost. A popular +way to circumvent this problem is to normalize the output vector is +such a way that the entries add up to one, this allows for the +interpretation of probabilities assigned to each class. + +\subsubsection{Error Measurement} + +In order to make assessment about the quality of a network $\mathcal{NN}$ and train +it we need to discuss how we measure error. As for regression problems +the output is continuous in contrast to the class predictions in a +classification problem, we need to discuss these problems separately. +\paragraph{Regression Problems} + +\subsubsection{Gradient Descent Algorithm} + +When trying to fit a neural network it is hard +to predict the impact of the single parameters on the accuracy of the +output. Thus applying numeric optimization algorithms is the only +feasible way to fit the model. A attractive algorithm for training +neural networks is gradient descent where each parameter $\theta_i$ is +iterative changed according to the gradient regarding the error +measure and a step size $\gamma$. For this all parameters are +initialized (often random or close to zero) and then iteratively +updated until a certain criteria is hit, mostly either being a fixed +number of iterations or a desired upper limit for the error measure. +% For a function $f_\theta$ with parameters $\theta \in \mathbb{R}^n$ +% and a error function $L(f_\theta)$ the gradient descent algorithm is +% given in \ref{alg:gd}. + +\begin{algorithm}[H] + \SetAlgoLined + \KwInput{function $f_\theta$ with parameters $\theta \in + \mathbb{R}^n$ \newline step size $\gamma$} + initialize $\theta^0$\; + $i \leftarrow 1$\; + \While{While termination condition is not met}{ + $\nabla \leftarrow \frac{\mathrm{d}f_\theta}{\mathrm{d} \theta}\vert_{\theta^{i-1}}$\; + $\theta^i \leftarrow \theta^{i-1} - \gamma \nabla $\; + $i \leftarrow i +1$\; + } + + \caption{Gradient Descent} + \label{alg:gd} +\end{algorithm} + +The algorithm for gradient descent is given in +Algorithm~\ref{alg:gd}. In the context of fitting a neural network +$f_\theta$ corresponds to the error measurement of the network +$L\left(\mathcal{NN}_{\theta}\right)$ where $\theta$ is a vector +containing all the weights and biases of the network. +As ca be seen this requires computing the derivative of the network +with regard to each variable. With the number of variables getting +large in networks with multiple layers of high neuron count naively +computing these can get quite memory and computational expensive. But +by using the chain rule and exploiting the layered structure we can +compute the gradient much more efficiently by using backpropagation +first introduced by \textcite{backprop}. + +\subsubsection{Backpropagation} + +As with an increasing amount of layers the derivative of a loss +function with respect to a certain variable becomes more intensive to +compute there have been efforts in increasing the efficiency of +computing these derivatives. Today the BACKPROPAGATION algorithm is +widely used to compute the derivatives needed for the optimization +algorithms. Here instead of naively calculating the derivative for +each variable, the chain rule is used in order to compute derivatives +for each layer from output layer towards the first layer while only +needing to .... + +\[ + \frac{\partial L(...)}{} +\] %%% Local Variables: %%% mode: latex diff --git a/TeX/main.tex b/TeX/main.tex index 4a7f87c..336dfc5 100644 --- a/TeX/main.tex +++ b/TeX/main.tex @@ -1,6 +1,8 @@ -\documentclass[a4paper, 12pt]{article} +\documentclass[a4paper, 12pt, draft=true]{article} + +%\usepackage[margin=1in]{geometry} +%\geometry{a4paper, left=30mm, right=40mm,top=25mm, bottom=20mm} -\usepackage[margin=1in]{geometry} \usepackage[english]{babel} \usepackage[utf8]{inputenc} \usepackage[T1]{fontenc} @@ -16,8 +18,8 @@ \usepackage{sectsty} \usepackage{setspace} \usepackage{booktabs} -\usepackage{caption} -\usepackage[justification=RaggedRight, singlelinecheck=false]{caption} +\usepackage[format=plain, + textfont=it]{caption} %\usepackage{natbib} %[numbers] \usepackage{multirow} \usepackage{3parttable} @@ -28,9 +30,18 @@ \usepackage{tikz} \usepackage{nicefrac} \usepackage{enumitem} +\usepackage[toc, page]{appendix} +\usepackage{todonotes} +\usepackage{lipsum} +\usepackage[ruled,vlined]{algorithm2e} +\usepackage{showframe} +\usepackage[protrusion=true, expansion=true, kerning=true]{microtype} + +\captionsetup[sub]{justification=centering} \usetikzlibrary{matrix,chains,positioning,decorations.pathreplacing,arrows} \usetikzlibrary{positioning,calc,calligraphy} +\usetikzlibrary{calc, 3d} \usepackage{pgfplots} \usepgfplotslibrary{colorbrewer} @@ -42,9 +53,8 @@ \usepackage[style=authoryear, backend=bibtex]{biblatex} -\addbibresource{Literaturverzeichnis.bib} \urlstyle{same} -\bibliography{Literaturverzeichnis.bib} +\bibliography{bibliograpy.bib} \numberwithin{figure}{section} \numberwithin{table}{section} \numberwithin{equation}{section} @@ -66,16 +76,22 @@ \DeclareMathOperator*{\plim}{\mathbb{P}\text{-}\lim} \DeclareMathOperator{\supp}{supp} \DeclareMathOperator*{\argmin}{arg\,min} -\begin{document} +\DeclareMathOperator*{\po}{\mathbb{P}\text{-}\mathcal{O}} +\DeclareMathOperator*{\equals}{=} +\begin{document}f + \newcommand{\plimn}[0]{\plim\limits_{n \to \infty}} \newcommand{\norm}[1]{\left\lVert#1\right\rVert} \newcommand*\circled[1]{\tikz[baseline=(char.base)]{ - \node[shape=circle,draw,inner sep=2pt] (char) {#1};}} + \node[shape=circle,draw,inner sep=2pt] (char) {#1};}} \newcommand{\abs}[1]{\ensuremath{\left\vert#1\right\vert}} + +\SetKwInput{KwInput}{Input} + %\newcommand{\myrightarrow}[1]{\xrightarrow{\makebox[2em][c]{$\scriptstyle#1$}}}‌ %Arndt Tobias \hfill 21.12.2017\newline @@ -91,7 +107,13 @@ \pagenumbering{gobble} \newpage %\setcounter{tocdepth}{4} -\tableofcontents +\tableofcontents +\listoftodos +\newpage +\pagenumbering{arabic} +% Introduction +\input{introduction} + \newpage % Introduction Neural Networks @@ -100,7 +122,19 @@ \newpage % Theorem 3.8 -\input{theo_3_8.tex} +\input{theo_3_8} + +\newpage + +% Kapitel 4 +\input{further_applications_of_nn} + +\newpage + +\printbibliography + +% Appendix A +\input{appendixA.tex} \end{document} diff --git a/TeX/pfg_test.tex b/TeX/pfg_test.tex new file mode 100644 index 0000000..4ad5b20 --- /dev/null +++ b/TeX/pfg_test.tex @@ -0,0 +1,25 @@ +\documentclass{article} +\usepackage{pgfplots} +\usepackage{filecontents} + + +\begin{document} +\begin{tikzpicture} +\begin{axis} +\addplot+ [mark options={scale = 0.7}, mark = o] table [x=x,y=y, col sep = comma, +only marks] {data_sin_d_t.csv}; +\addplot [black] table [x=x, y=y, col sep=comma, mark=none, color = 'black'] {matlab_sin_d_01.csv}; +\end{axis} +\end{tikzpicture} +\begin{tikzpicture} +\begin{axis} +\addplot table [x=x, y=y, col sep=comma, only marks] {data_sin_d_t.csv}; +\addplot table [black, x=x, y=y, col sep=comma, mark=none, color = 'black'] {matlab_sin_d_01.csv}; +\end{axis} +\end{tikzpicture} +\end{document} + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: t +%%% End: diff --git a/TeX/theo_3_8.tex b/TeX/theo_3_8.tex index ebd4c6e..1fcd7f6 100644 --- a/TeX/theo_3_8.tex +++ b/TeX/theo_3_8.tex @@ -4,15 +4,247 @@ %%% mode: latex %%% TeX-master: "main" %%% End: +\section{Shallow Neural Networks} -In this section we will analyze the connection of shallow Neural -Networks and regression splines. We will see that the punishment of -wight size in training the shallow Neural Netowork will result in a -function that minimizes the second derivative as the amount of hidden -nodes ia grown to infinity. In order to properly formulate this relation we will -first need to introduce some definitions. +In order to examine some behavior of neural networks in this chapter +we consider a simple class of networks, the shallow ones. These +networks only contain one hidden layer and have a single output node. + +\begin{Definition}[Shallow neural network] + For a input dimension $d$ and a Lipschitz continuous activation function $\sigma: + \mathbb{R} \to \mathbb{R}$ we define a shallow neural network with + $n$ hidden nodes as + $\mathcal{NN}_\vartheta : \mathbb{R}^d \to \mathbb{R}$ as + \[ + \mathcal{NN}_\vartheta \coloneqq \sum_{k=1}^n w_k \sigma\left(b_k + + \sum_{j=1}^d v_{k,j} x_j\right) + c ~~ \forall x \in \mathbb{R}^d +\] +with +\begin{itemize} + \item weights $w_k \in \mathbb{R},~k \in \left\{1,\dots,n\right\}$ + \item biases $b_k \in \mathbb{R},~k \in \left\{1, \dots,n\right\}$ + \item weights $v_k \in \mathbb{R}^d,~k\in\left\{1,\dots,n\right\}$ + \item bias $c \in \mathbb{R}$ + \item these weights and biases collected in + \[ + \vartheta \coloneqq (w, b, v, c) \in \Theta \coloneqq + \mathbb{R}^{n \times n \times (n \times d) \times 1} + \] +\end{itemize} +\end{Definition} +% \begin{figure} +% \begin{tikzpicture}[x=1.5cm, y=1.5cm] +% \tikzset{myptr/.style={decoration={markings,mark=at position 1 with % +% {\arrow[scale=1.5,>=stealth]{>}}},postaction={decorate}}} + +% \foreach \m/\l [count=\y] in {1} +% \node [every neuron/.try, neuron \m/.try] (input-\m) at (0,0.5-\y) {}; + +% \foreach \m [count=\y] in {1,2,missing,3,4} +% \node [every neuron/.try, neuron \m/.try ] (hidden-\m) at (1.25,3.25-\y*1.25) {}; + +% \foreach \m [count=\y] in {1} +% \node [every neuron/.try, neuron \m/.try ] (output-\m) at (2.5,0.5-\y) {}; + +% \foreach \l [count=\i] in {1} +% \draw [myptr] (input-\i)+(-1,0) -- (input-\i) +% node [above, midway] {$x$}; + +% \foreach \l [count=\i] in {1,2,n-1,n} +% \node [above] at (hidden-\i.north) {$\mathcal{N}_{\l}$}; + +% \foreach \l [count=\i] in {1,n_l} +% \node [above] at (output-\i.north) {}; + +% \foreach \l [count=\i] in {1} +% \draw [myptr, >=stealth] (output-\i) -- ++(1,0) +% node [above, midway] {$y$}; + + +% \foreach \i in {1} +% \foreach \j in {1,2,...,3,4} +% \draw [myptr, >=stealth] (input-\i) -- (hidden-\j); + +% \foreach \i in {1,2,...,3,4} +% \foreach \j in {1} +% \draw [myptr, >=stealth] (hidden-\i) -- (output-\j); + +% \node [align=center, above] at (0,1) {Input \\layer}; +% \node [align=center, above] at (1.25,3) {Hidden layer}; +% \node [align=center, above] at (2.5,1) {Output \\layer}; + +% \end{tikzpicture} +% \caption{Shallow Neural Network with input- and output-dimension of \(d +% = 1\)} +% \label{fig:shallowNN} +% \end{figure} + +As neural networks with a large amount of nodes have a large amount of +parameters that can be tuned it can often fit the data quite well. If a ReLU +\[ + \sigma(x) \coloneqq \max{(0, x)} +\] +is chosen as activation function one can easily prove that if the +amount of hidden nodes exceeds the +amount of data points in the training data a shallow network trained +on MSE will perfectly fit the data. +\begin{Theorem}[sinnvoller titel] + For training data of size t + \[ + \left(x_i^{\text{train}}, y_i^{\text{train}}\right) \in \mathbb{R}^d + \times \mathbb{R},~i\in\left\{1,\dots,t\right\} + \] + a shallow neural network $\mathcal{NN}_\vartheta$ with $n \geq t$ + hidden nodes will perfectly fit the data when + minimizing squared error loss. + \proof + W.l.o.g. all values $x_{ij}^{\text{train}} \in [0,1],~\forall i \in + \left\{1,\dots\right\}, j \in \left\{1,\dots,d\right\}$. Now we + chose $v^*$ in order to calculate a unique value for all + $x_i^{\text{train}}$: + \[ + v^*_{k,j} = v^*_{j} = 10^{j-1}, ~ \forall k \in \left\{1,\dots,n\right\}. + \] + Assuming $x_i^{\text{train}} \neq x_j^{\text{train}},~\forall i\neq + j$ we get + \[ + \left(v_k^*\right)^{\mathrm{T}} x_i^{\text{train}} \neq + \left(v_k^*\right)^{\mathrm{T}} x_j^{\text{train}}, ~ \forall i + \neq j. + \] + W.l.o.g assume $x_i^{\text{train}}$ are ordered such that + $\left(v_k^*\right)^{\mathrm{T}} x_i^{\text{train}} < + \left(v_k^*\right)^{\mathrm{T}} x_j^{\text{train}}, ~\forall j -\left(v^*\right)^{\mathrm{T}} x_1^{\text{train}},\\ + b^*_k &= -\left(v^*\right)^{\mathrm{T}} + x_{k-1}^{\text{train}},~\forall k \in \left\{2, \dots, + t\right\}, \\ + b_k^* &\leq -\left(v^*\right)^{\mathrm{T}} + x_{t}^{\text{train}},~\forall k > t. + \end{align*} + With + \begin{align*} + w_k^* &= \frac{y_k^{\text{train}} - \sum_{j =1}^{k-1} w^*_j\left(b^*_j + + x_k^{\text{train}}\right)}{b_k + \left(v^*\right)^{\mathrm{T}} + x_k^{\text{train}}},~\forall k \in \left\{1,\dots,t\right\}\\ + w_k^* &\in \mathbb{R} \text{ arbitrary, } \forall k > t. + \end{align*} + and $\vartheta^* = (w^*, b^*, v^*, c = 0)$ we get + \[ + \mathcal{NN}_{\vartheta^*} (x_i^{\text{train}}) = \sum_{k = + 1}^{i-1} w_k\left(\left(v^*\right)^{\mathrm{T}} + x_i^{\text{train}}\right) + w_i\left(\left(v^*\right)^{\mathrm{T}} + x_i^{\text{train}}\right) = y_i^{\text{train}}. + \] + As the squared error of $\mathcal{NN}_{\vartheta^*}$ is zero all + squared error loss minimizing shallow networks with at least $t$ hidden + nodes will perfectly fit the data. + \qed + \label{theo:overfit} +\end{Theorem} + +However this behavior is often not desired as over fit models often +have bad generalization properties especially if noise is present in +the data. This effect can be seen in +Figure~\ref{fig:overfit}. Here a network that perfectly fits the +training data regarding the MSE is \todo{Formulierung} +constructed and compared to a regression spline +(Definition~\ref{def:wrs}). While the network +fits the data better than the spline, the spline is much closer to the +underlying mechanism that was used to generate the data. The better +generalization of the spline compared to the network is further +illustrated by the better validation error computed with new generated +test data. +In order to improve the accuracy of the model we want to reduce +overfitting. A possible way to achieve this is by explicitly +regularizing the network through the cost function as done with +ridge penalized networks +(Definition~\ref{def:rpnn}) where large weights $w$ are punished. In +Theorem~\ref{theo:main1} we will +prove that this will result in the network converging to +regressions splines as the amount of nodes in the hidden layer is +increased. + + + + +\begin{figure} + \begin{adjustbox}{width = \textwidth} + \pgfplotsset{ + compat=1.11, +legend image code/.code={ + \draw[mark repeat=2,mark phase=2] +plot coordinates { + (0cm,0cm) +(0.15cm,0cm) %% default is (0.3cm,0cm) +(0.3cm,0cm) %% default is (0.6cm,0cm) +};% +} +} + \begin{tikzpicture} + \begin{axis}[tick style = {draw = none}, width = \textwidth, + height = 0.6\textwidth] + \addplot table + [x=x, y=y, col sep=comma, only marks,mark options={scale = + 0.7}] {Plots/Data/overfit.csv}; + \addplot [red, line width=0.8pt] table [x=x_n, y=s_n, col + sep=comma, forget plot] {Plots/Data/overfit.csv}; + \addplot [black, line width=0.8pt] table [x=x_n, y=y_n, col + sep=comma] {Plots/Data/overfit.csv}; + \addplot [black, line width=0.8pt, dashed] table [x=x, y=y, col + sep=comma] {Plots/Data/overfit_spline.csv}; + + \addlegendentry{\footnotesize{data}}; + \addlegendentry{\footnotesize{$\mathcal{NN}_{\vartheta^*}$}}; + \addlegendentry{\footnotesize{spline}}; + \end{axis} + \end{tikzpicture} + \end{adjustbox} + \caption{For data of the form $y=\sin(\frac{x+\pi}{2 \pi}) + + \varepsilon,~ \varepsilon \sim \mathcal{N}(0,0.4)$ + (\textcolor{blue}{blue dots}) the neural network constructed + according to the proof of Theorem~\ref{theo:overfit} (black) and the + underlying signal (\textcolor{red}{red}). While the network has no + bias a regression spline (black dashed) fits the data much + better. For a test set of size 20 with uniformly distributed $x$ + values and responses of the same fashion as the training data the MSE of the neural network is + 0.30, while the MSE of the spline is only 0.14 thus generalizing + much better. + } + \label{fig:overfit} +\end{figure} + +\clearpage +\subsection{Convergence Behaviour of 1-dim. Randomized Shallow Neural + Networks} + + +In this section we will analyze the connection of randomized shallow +Neural Networks with one dimensional input and regression splines. We +will see that the punishment of the size of the weights in training +the randomized shallow +Neural Network will result in a function that minimizes the second +derivative as the amount of hidden nodes is grown to infinity. In order +to properly formulate this relation we will first need to introduce +some definitions. + +\begin{Definition}[Randomized shallow neural network] + For an input dimension $d$, let $n \in \mathbb{N}$ be the number of + hidden nodes and $v(\omega) \in \mathbb{R}^{i \times n}, b(\omega) + \in \mathbb{R}^n$ randomly drawn weights. Then for a weight vector + $w$ the corresponding randomized shallow neural network is given by +\[ + \mathcal{RN}_{w, \omega} (x) = \sum_{k=1}^n w_k + \sigma\left(b_k(\omega) + \sum_{j=1}^d v_{k, j}(\omega) x_j\right). +\] +\label{def:rsnn} +\end{Definition} \begin{Definition}[Ridge penalized Neural Network] + \label{def:rpnn} Let $\mathcal{RN}_{w, \omega}$ be a randomized shallow neural network, as introduced in ???. Then the optimal ridge penalized network is given by @@ -24,12 +256,11 @@ first need to introduce some definitions. \[ w^{*,\tilde{\lambda}}(\omega) :\in \argmin_{w \in \mathbb{R}^n} \underbrace{ \left\{\overbrace{\sum_{i = 1}^N \left(\mathcal{RN}_{w, - \omega}(x_i^{\text{train}}) - - y_i^{\text{train}}\right)^2}^{L(\mathcal{RN}_{w, \omega})} + - \tilde{\lambda} \norm{w}_2^2\right\}}_{\eqqcolon F_n^{\tilde{\lambda}}(\mathcal{RN}_{w,\omega})}. + \omega}(x_i^{\text{train}}) - + y_i^{\text{train}}\right)^2}^{L(\mathcal{RN}_{w, \omega})} + + \tilde{\lambda} \norm{w}_2^2\right\}}_{\eqqcolon F_n^{\tilde{\lambda}}(\mathcal{RN}_{w,\omega})}. \] \end{Definition} -\label{def:rpnn} In the ridge penalized Neural Network large weights are penalized, the extend of which can be tuned with the parameter $\tilde{\lambda}$. If $n$ is larger than the amount of training samples $N$ then for @@ -43,14 +274,12 @@ having minimal weights, resulting in the \textit{minimum norm \[ w^{\text{min}} \in \argmin_{w \in \mathbb{R}^n} \norm{w}, \text{ s.t. } - \mathcal{RN}_{w,\omega}(x_i^{train}) = y_i^{train}, \, \forall i \in + \mathcal{RN}_{w,\omega}(x_i^{\text{train}}) = y_i^{\text{train}}, \, \forall i \in \left\{1,\dots,N\right\}. \] For $\tilde{\lambda} \to \infty$ the learned function will resemble the data less and less with the weights -approaching $0$. Usually $\tilde{\lambda}$ lies between 0 and 1, as -for larger values the focus of weight reduction is larger than fittig -the data.\par +approaching $0$. .\par In order to make the notation more convinient in the follwoing the $\omega$ used to express the realised random parameters will no longer be explizitly mentioned. @@ -60,10 +289,10 @@ be explizitly mentioned. Network according to Definition~\ref{def:rsnn}, then kinks depending on the random parameters can be observed. \[ - \mathcal{RN}_w(x) = \sum_{k = 1}^n w_k \gamma(b_k + v_kx) + \mathcal{RN}_w(x) = \sum_{k = 1}^n w_k \sigma(b_k + v_kx) \] - Because we specified $\gamma(y) \coloneqq \max\left\{0, y\right\}$ a - kink in $\gamma$ can be observed at $\gamma(0) = 0$. As $b_k + v_kx = 0$ for $x + Because we specified $\sigma(y) \coloneqq \max\left\{0, y\right\}$ a + kink in $\sigma$ can be observed at $\sigma(0) = 0$. As $b_k + v_kx = 0$ for $x = -\frac{b_k}{v_k}$ we define the following: \begin{enumerate}[label=(\alph*)] \item Let $\xi_k \coloneqq -\frac{b_k}{v_k}$ be the k-th kink of $\mathcal{RN}_w$. @@ -91,7 +320,7 @@ smooth approximation of the RSNN. \[ \kappa_x(s) \coloneqq \mathds{1}_{\left\{\abs{s} \leq \frac{1}{2 \sqrt{n} - g_{\xi}(x)}\right\}}(s)\sqrt{n} g_{\xi}(x), \, \forall s \in \mathbb{R} + g_{\xi}(x)}\right\}}(s)\sqrt{n} g_{\xi}(x), \, \forall s \in \mathbb{R} \] Using this kernel we define a smooth approximation of @@ -113,69 +342,120 @@ that the ridge penalized neural network as defined in Definition~\ref{def:rpnn} converges a weighted regression spline, as the amount of hidden nodes is grown to inifity. -\begin{Definition}[Weighted regression spline] - Let $x_i^{train}, y_i^{train} \in \mathbb{R}, i \in +\begin{Definition}[Adapted Weighted regression spline] + \label{def:wrs} + Let $x_i^{\text{train}}, y_i^{\text{train}} \in \mathbb{R}, i \in \left\{1,\dots,N\right\}$ be trainig data. For a given $\lambda \in \mathbb{R}_{>0}$ and a function $g: \mathbb{R} \to \mathbb{R}_{>0}$ the weighted regression spline $f^{*, \lambda}_g$ is given by \[ f^{*, \lambda}_g :\in \argmin_{\substack{f \in \mathcal{C}^2(\mathbb{R}) - \\ \supp(f) \subseteq \supp(g)}} \underbrace{\left\{ \overbrace{\sum_{i = - 1}^N \left(f(x_i^{train}) - y_i^{train}\right)^2}^{L(f)} + - \lambda g(0) \int_{\supp(g)}\frac{\left(f''(x)\right)^2}{g(x)} - dx\right\}}_{\eqqcolon F^{\lambda, g}(f)}. + \\ \supp(f) \subseteq \supp(g)}} \underbrace{\left\{ \overbrace{\sum_{i = + 1}^N \left(f(x_i^{\text{train}}) - y_i^{\text{train}}\right)^2}^{L(f)} + + \lambda g(0) \int_{\supp(g)}\frac{\left(f''(x)\right)^2}{g(x)} + dx\right\}}_{\eqqcolon F^{\lambda, g}(f)}. \] + \todo{Anforderung an Ableitung von f, doch nicht?} \end{Definition} Similary to ridge weight penalized neural networks the parameter $\lambda$ controls a trade-off between accuracy on the training data and smoothness or low second dreivative. For $g \equiv 1$ and $\lambda \to 0$ the resuling function $f^{*, 0+}$ will interpolate the training data while minimizing -the second derivative. Such a function is known as smooth spline -interpolation or (cubic) smoothing spline. +the second derivative. Such a function is known as cubic spline +interpolation. +\todo{cite cubic spline} \[ - f^{*, 0+} \text{ smooth spline interpolation: } + f^{*, 0+} \text{ smooth spline interpolation: } \] \[ f^{*, 0+} \coloneqq \lim_{\lambda \to 0+} f^{*, \lambda}_1 \in - \argmin_{\substack{f \in \mathcal{C}^2\mathbb{R}, \\ f(x_i^{train}) = - y_i^{train}} = \left( \int _{\mathbb{R}} (f''(x))^2dx\right). + \argmin_{\substack{f \in \mathcal{C}^2\mathbb{R}, \\ f(x_i^{\text{train}}) = + y_i^{\text{train}}}} = \left( \int _{\mathbb{R}} (f''(x))^2dx\right). \] +For $\lambda \to \infty$ on the other hand $f_g^{*\lambda}$ converges +to linear regression of the data. +\begin{Definition}[Spline approximating Randomised Shallow Neural + Network] + \label{def:sann} + Let $\mathcal{RN}$ be a randomised shallow Neural Network according + to Definition~\ref{def:RSNN} and $f^{*, \lambda}_g$ be the weighted + regression spline as introduced in Definition~\ref{def:wrs}. Then + the randomised shallow neural network approximating $f^{*, + \lambda}_g$ is given by + \[ + \mathcal{RN}_{\tilde{w}}(x) = \sum_{k = 1}^n \tilde{w}_k \sigma(b_k + v_k x), + \] + with the weights $\tilde{w}_k$ defined as + \[ + \tilde{w}_k \coloneqq \frac{h_{k,n} v_k}{\mathbb{E}[v^2 \vert \xi + = \xi_k]} (f_g^{*, \lambda})''(\xi_k). + \] +\end{Definition} + +The approximating nature of the network in +Definition~\ref{def:sann} can be seen by LOOKING \todo{besseres Wort + finden} at the first derivative of $\mathcal{RN}_{\tilde{w}}(x)$ which is given +by +\begin{align} + \frac{\partial \mathcal{RN}_{\tilde{w}}}{\partial x} + \Big{|}_{x} &= \sum_k^n \tilde{w}_k \mathds{1}_{\left\{b_k + v_k x > + 0\right\}}(v_k) = \sum_{\substack{k \in \mathbb{N} \\ \xi_k < + x}} \tilde{w}_k v_k \nonumber \\ + &= \frac{1}{n} \sum_{\substack{k \in \mathbb{N} \\ + \xi_k < x}} \frac{v_k^2}{g_{\xi}(\xi_k) \mathbb{E}[v^2 \vert \xi + = \xi_k]} (f_g^{*, \lambda})''(\xi_k). \label{eq:derivnn} +\end{align} +\todo{gescheite Ableitungs Notation} +As the expression (\ref{eq:derivnn}) behaves similary to a +Riemann-sum for $n \to \infty$ it will converge to the first +derievative of $f^{*,\lambda}_g$. A formal proof of this behaviour +is given in Lemma~\ref{lem:s0}. + + +In order to formulate the theorem describing the convergence of $RN_w$ +we need to make a couple of assumptions. +\todo{Bessere Formulierung} + \begin{Assumption}~ + \label{ass:theo38} \begin{enumerate}[label=(\alph*)] - \item The probability density function of the kinks $\xi_k$, namely $g_\xi$ + \item The probability density fucntion of the kinks $\xi_k$, + namely $g_{\xi}$ as defined in Definition~\ref{def:kink} exists + and is well defined. + \item The density function $g_\xi$ has compact support on $\supp(g_{\xi})$. - \item The density $g_{\xi}$ is uniformly continuous on $\supp(g_{\xi})$. - \item $g_{\xi}(0) \neq 0$ + \item The density function $g_{\xi}$ is uniformly continuous on $\supp(g_{\xi})$. + \item $g_{\xi}(0) \neq 0$. + \item $\frac{1}{g_{\xi}}\Big|_{\supp(g_{\xi})}$ is uniformly + continous on $\supp(g_{\xi})$. + \item The conditional distribution $\mathcal{L}(v_k|\xi_k = x)$ + is uniformly continous on $\supp(g_{\xi})$. + \item $\mathbb{E}\left[v_k^2\right] < \infty$. \end{enumerate} \end{Assumption} -\begin{Theorem}[Ridge weight penaltiy corresponds to adapted spline] - \label{theo:main1} - For arbitrary training data \(\left(x_i^{train}, y_i^{train}\right)\) it holds +As we will prove the prorpsition in the Sobolev space, we hereby +introduce it and its inuced\todo{richtiges wort?} norm. + +\begin{Definition}[Sobolev Space] + For $K \subset \mathbb{R}^n$ open and $1 \leq p \leq \infty$ we + define the Sobolev space $W^{k,p}(K)$ as the space containing all + real valued functions $u \in L^p(K)$ such that for every multi-index + $\alpha \in \mathbb{N}^n$ with $\abs{\alpha} \leq + k$ the mixed parial derivatives \[ - \plimn \norm{\mathcal{RN^{*, \tilde{\lambda}}} - f^{*, - \tilde{\lambda}}_{g, \pm}}_{W^{1,\infty}(K)} = 0. + u^{(\alpha)} = \frac{\partial^{\abs{\alpha}} u}{\partial + x_1^{\alpha_1} \dots \partial x_n^{\alpha_n}} \] - - With - \begin{align*} - \label{eq:1} - \tilde{\lambda} &\coloneqq \lambda n g(0), \\ - g(x) &\coloneqq - g_{\xi}(x)\mathbb{E}\left[ v_k^2 \vert \xi_k = x \right], \forall x - \in \mathbb{R} - \end{align*} - and \(RN^{*, \tilde{\lambda}}\), \(f^{*,\tilde{\lambda}}_{g, \pm}\) - as defined in ??? and ??? respectively. -\end{Theorem} -In order to proof Theo~\ref{theo:main1} we need to proof a number of -auxiliary Lemmata first. - -\begin{Definition}[Sobolev Norm] + exists in the weak sense and + \[ + \norm{u^{(\alpha)}}_{L^p} < \infty. + \] + \todo{feritg machen} \label{def:sobonorm} The natural norm of the sobolev space is given by \[ @@ -191,7 +471,51 @@ auxiliary Lemmata first. \] \end{Definition} +With these assumption in place we can formulate the main theorem. +\todo{Bezug Raum} + + +\begin{Theorem}[Ridge weight penaltiy corresponds to weighted regression spline] + \label{theo:main1} + For $N \in \mathbb{N}$ arbitrary training data + \(\left(x_i^{\text{train}}, y_i^{\text{train}} + \right)\) and $\mathcal{RN}^{*, \tilde{\lambda}}, f_g^{*, \lambda}$ + according to Definition~\ref{def:rpnn} and Definition~\ref{def:wrs} + respectively with Assumption~\ref{ass:theo38} it holds + + \begin{equation} + \label{eq:main1} + \plimn \norm{\mathcal{RN^{*, \tilde{\lambda}}} - f^{*, + \lambda}_{g}}_{W^{1,\infty}(K)} = 0. + \end{equation} + + With + \begin{align*} + g(x) & \coloneqq g_{\xi}(x)\mathbb{E}\left[ v_k^2 \vert \xi_k = x + \right], \forall x \in \mathbb{R}, \\ + \tilde{\lambda} & \coloneqq \lambda n g(0). + \end{align*} +\end{Theorem} +We will proof Theo~\ref{theo:main1} by showing that +\begin{equation} + \label{eq:main2} + \plimn \norm{\mathcal{RN}^{*, \tilde{\lambda}} - f^{w^*}}_{W^{1, + \infty}(K)} = 0 +\end{equation} +and +\begin{equation} + \label{eq:main3} + \plimn \norm{f^{w^*} - f_g^{*, \lambda}}_{W^{1,\infty}(K)} = 0 +\end{equation} +and then using the triangle inequality to follow (\ref{eq:main1}). In +order to prove (\ref{eq:main2}) and (\ref{eq:main3}) we will need to +introduce a number of auxiliary lemmmata, proves to these will be +provided in the appendix, as they would SPRENGEN DEN RAHMEN. + + + \begin{Lemma}[Poincar\'e typed inequality] + \label{lem:pieq} Let \(f:\mathbb{R} \to \mathbb{R}\) differentiable with \(f' : \mathbb{R} \to \mathbb{R}\) Lesbeque integrable. Then for \(K=[a,b] \subset \mathbb{R}\) with \(f(a)=0\) it holds that @@ -229,20 +553,21 @@ auxiliary Lemmata first. % get (\ref{eq:pti1}). % By using the Hölder inequality, we can proof the second claim. % \begin{align*} - % \norm{f'}_{L^{\infty}(K)} &= \sup_{x \in K} \abs{\int_a^bf''(y) - % \mathds{1}_{[a,x]}(y)dy} \leq \sup_{x \in - % K}\norm{f''\mathds{1}_{[a,x]}}_{L^1(K)}\\ - % &\hspace{-6pt} \stackrel{\text{Hölder}}{\leq} sup_{x - % \in - % K}\norm{f''}_{L^2(K)}\norm{\mathds{1}_{[a,x]}}_{L^2(K)} - % = \abs{b-a}\norm{f''}_{L^2(K)}. - % \end{align*} - % Thus (\ref{eq:pti2}) follows with \(C_K^2 \coloneqq - % \abs{b-a}C_K^{\infty}\). - % \qed + % \norm{f'}_{L^{\infty}(K)} &= \sup_{x \in K} \abs{\int_a^bf''(y) + % \mathds{1}_{[a,x]}(y)dy} \leq \sup_{x \in + % K}\norm{f''\mathds{1}_{[a,x]}}_{L^1(K)}\\ + % &\hspace{-6pt} \stackrel{\text{Hölder}}{\leq} sup_{x + % \in + % K}\norm{f''}_{L^2(K)}\norm{\mathds{1}_{[a,x]}}_{L^2(K)} + % = \abs{b-a}\norm{f''}_{L^2(K)}. + % \end{align*} + % Thus (\ref{eq:pti2}) follows with \(C_K^2 \coloneqq + % \abs{b-a}C_K^{\infty}\). + % \qed \end{Lemma} \begin{Lemma} + \label{lem:cnvh} Let $\mathcal{RN}$ be a shallow Neural network. For \(\varphi : \mathbb{R}^2 \to \mathbb{R}\) uniformly continous such that \[ @@ -252,68 +577,221 @@ auxiliary Lemmata first. it holds, that \[ \plimn \sum_{k \in \kappa : \xi_k < T} \varphi(\xi_k, v_k) - \frac{\bar{h}_k}{2} - =\int_{max\left\{C_{g_{\xi}}^l,T\right\}}^{min\left\{C_{g_{\xi}}^u,T\right\}} + h_{k,n} + =\int_{\min\left\{C_{g_{\xi}}^l, T\right\}}^{min\left\{C_{g_{\xi}}^u,T\right\}} \mathbb{E}\left[\varphi(\xi, v) \vert \xi = x \right] dx \] uniformly in \(T \in K\). - % \proof -% For \(T \leq C_{g_{\xi}}^l\) both sides equal 0, so it is sufficient to -% consider \(T > C_{g_{\xi}}^l\). With \(\varphi\) and -% \(\nicefrac{1}{g_{\xi}}\) uniformly continous in \(\xi\), -% \begin{equation} -% \label{eq:psi_stet} -% \forall \varepsilon > 0 : \exists \delta(\varepsilon) : \forall -% \abs{\xi - \xi'} < \delta(\varepsilon) : \abs{\varphi(\xi, v) -% \frac{1}{g_{\xi}(\xi)} - \varphi(\xi', v) -% \frac{1}{g_{\xi}(\xi')}} < \varepsilon -% \end{equation} -% uniformly in \(v\). In order to -% save space we use the notation \((a \wedge b) \coloneqq \min\{a,b\}\) for $a$ and $b -% \in \mathbb{R}$. W.l.o.g. assume \(\sup(g_{\xi})\) in an -% intervall. By splitting the interval in disjoint strips of length \(\delta -% \leq \delta(\varepsilon)\) we get: - -% \[ -% \underbrace{\sum_{k \in \kappa : \xi_k < T} \varphi(\xi_k, v_k) -% \frac{\bar{h}_k}{2}}_{\circled{1}} = -% \underbrace{\sum_{l \in \mathbb{Z}: -% \left[\delta l, \delta (l + 1)\right] \subseteq -% \left[C_{g_{\xi}}^l, C_{g_{\xi}}^u \wedge T -% \right]}}_{\coloneqq \, l \in I_{\delta}} \left( \, \sum_{\substack{k \in \kappa\\ -% \xi_k \in \left[\delta l, \delta (l + 1)\right]}} -% \varphi\left(\xi_k, v_k\right)\frac{\bar{h}_k}{2} \right) -% \] -% Using (\ref{eq:psi_stet}) we can approximate $\circled{1}$ by -% \begin{align*} -% \circled{1} & \approx \sum_{l \in I_{\delta}} \left( \, \sum_{\substack{k \in \kappa\\ -% \xi_k \in \left[\delta l, \delta (l + 1)\right]}} -% \left(\varphi\left(l\delta, v_k\right)\frac{1}{g_{\xi}(l\delta)} -% \pm \varepsilon\right)\frac{1}{n} \underbrace{\frac{\abs{\left\{m \in -% \kappa : \xi_m \in [\delta l, \delta(l + 1)]\right\}}}{\abs{\left\{m \in -% \kappa : \xi_m \in [\delta l, \delta(l + 1)]\right\}}}}_{= -% 1}\right) \\ -% % \intertext{} -% &= \sum_{l \in I_{\delta}} \left( \frac{ \sum_{ \substack{k \in \kappa\\ -% \xi_k \in \left[\delta l, \delta (l + 1)\right]}} -% \varphi\left(l\delta, v_k\right)} -% {\abs{\left\{m \in -% \kappa : \xi_m \in [\delta l, \delta(l + 1)]\right\}}}\frac{\abs{\left\{m \in -% \kappa : \xi_m \in [\delta l, \delta(l + -% 1)]\right\}}}{ng_{\xi}(l\delta)}\right) \pm \varepsilon .\\ -% \intertext{We use the mean to approximate the number of kinks in -% each $\delta$-strip, as it follows a bonomial distribution this -% amounts to -% \[ -% \mathbb{E}\left[\abs{\left\{m \in \kappa : \xi_m \in [\delta l, -% \delta(l + 1)]\right\}\right]} = n \int_{[\delta l, \delta (l + -% 1)]} g_{\xi}(x)dx \approx n (\delta g_{\xi}(l\delta) \pm -% \tilde{\varepsilon}). -% \] -% Bla Bla Bla $v_k$} -% \circled{1} & \approx -% \end{align*} + % \proof + % For \(T \leq C_{g_{\xi}}^l\) both sides equal 0, so it is sufficient to + % consider \(T > C_{g_{\xi}}^l\). With \(\varphi\) and + % \(\nicefrac{1}{g_{\xi}}\) uniformly continous in \(\xi\), + % \begin{equation} + % \label{eq:psi_stet} + % \forall \varepsilon > 0 : \exists \delta(\varepsilon) : \forall + % \abs{\xi - \xi'} < \delta(\varepsilon) : \abs{\varphi(\xi, v) + % \frac{1}{g_{\xi}(\xi)} - \varphi(\xi', v) + % \frac{1}{g_{\xi}(\xi')}} < \varepsilon + % \end{equation} + % uniformly in \(v\). In order to + % save space we use the notation \((a \wedge b) \coloneqq \min\{a,b\}\) for $a$ and $b + % \in \mathbb{R}$. W.l.o.g. assume \(\sup(g_{\xi})\) in an + % intervall. By splitting the interval in disjoint strips of length \(\delta + % \leq \delta(\varepsilon)\) we get: + + % \[ + % \underbrace{\sum_{k \in \kappa : \xi_k < T} \varphi(\xi_k, v_k) + % \frac{\bar{h}_k}{2}}_{\circled{1}} = + % \underbrace{\sum_{l \in \mathbb{Z}: + % \left[\delta l, \delta (l + 1)\right] \subseteq + % \left[C_{g_{\xi}}^l, C_{g_{\xi}}^u \wedge T + % \right]}}_{\coloneqq \, l \in I_{\delta}} \left( \, \sum_{\substack{k \in \kappa\\ + % \xi_k \in \left[\delta l, \delta (l + 1)\right]}} + % \varphi\left(\xi_k, v_k\right)\frac{\bar{h}_k}{2} \right) + % \] + % Using (\ref{eq:psi_stet}) we can approximate $\circled{1}$ by + % \begin{align*} + % \circled{1} & \approx \sum_{l \in I_{\delta}} \left( \, \sum_{\substack{k \in \kappa\\ + % \xi_k \in \left[\delta l, \delta (l + 1)\right]}} + % \left(\varphi\left(l\delta, v_k\right)\frac{1}{g_{\xi}(l\delta)} + % \pm \varepsilon\right)\frac{1}{n} \underbrace{\frac{\abs{\left\{m \in + % \kappa : \xi_m \in [\delta l, \delta(l + 1)]\right\}}}{\abs{\left\{m \in + % \kappa : \xi_m \in [\delta l, \delta(l + 1)]\right\}}}}_{= + % 1}\right) \\ + % % \intertext{} + % &= \sum_{l \in I_{\delta}} \left( \frac{ \sum_{ \substack{k \in \kappa\\ + % \xi_k \in \left[\delta l, \delta (l + 1)\right]}} + % \varphi\left(l\delta, v_k\right)} + % {\abs{\left\{m \in + % \kappa : \xi_m \in [\delta l, \delta(l + 1)]\right\}}}\frac{\abs{\left\{m \in + % \kappa : \xi_m \in [\delta l, \delta(l + + % 1)]\right\}}}{ng_{\xi}(l\delta)}\right) \pm \varepsilon .\\ + % \intertext{We use the mean to approximate the number of kinks in + % each $\delta$-strip, as it follows a bonomial distribution this + % amounts to + % \[ + % \mathbb{E}\left[\abs{\left\{m \in \kappa : \xi_m \in [\delta l, + % \delta(l + 1)]\right\}\right]} = n \int_{[\delta l, \delta (l + + % 1)]} g_{\xi}(x)dx \approx n (\delta g_{\xi}(l\delta) \pm + % \tilde{\varepsilon}). + % \] + % Bla Bla Bla $v_k$} + % \circled{1} & \approx + % \end{align*} +\end{Lemma} + +\begin{Lemma}[Step 0] + For any $\lambda > 0$, training data $(x_i^{\text{train}} + y_i^{\text{train}}) \in \mathbb{R}^2$, with $ i \in + \left\{1,\dots,N\right\}$ and subset $K \subset \mathbb{R}$ the spline approximating randomized + shallow neural network $\mathcal{RN}_{\tilde{w}}$ converges to the + regression spline $f^{*, \lambda}_g$ in + $\norm{.}_{W^{1,\infty}(K)}$ as the node count $n$ increases, + \begin{equation} + \label{eq:s0} + \plimn \norm{\mathcal{RN}_{\tilde{w}} - f^{*, \lambda}_g}_{W^{1, + \infty}(K)} = 0 + \end{equation} + \proof + Using Lemma~\ref{lem:pieq} it is sufficient to show + \[ + \plimn \norm{\mathcal{RN}_{\tilde{w}}' - (f^{*, + \lambda}_g)'}_{L^{\infty}} = 0. + \] + This can be achieved by using Lemma~\ref{lem:cnvh} with $\varphi(\xi_k, + v_k) = \frac{v_k^2}{\mathbb{E}[v^2|\xi = z]} (f^{*, \lambda}_w)''(\xi_k) $ + thus obtaining + \begin{align*} + \plimn \frac{\partial \mathcal{RN}_{\tilde{w}}}{\partial x} + \stackrel{(\ref{eq:derivnn})}{=} + & \plimn \sum_{\substack{k \in \mathbb{N} \\ + \xi_k < x}} \frac{v_k^2}{\mathbb{E}[v^2 \vert \xi + = \xi_k]} (f_g^{*, \lambda})''(\xi_k) h_{k,n} + \stackrel{\text{Lemma}~\ref{lem:cnvh}}{=} \\ + \stackrel{\phantom{(\ref{eq:derivnn})}}{=} + & + \int_{\min\left\{C_{g_{\xi}}^l,T\right\}}^{min\left\{C_{g_{\xi}}^u,T\right\}} + \mathbb{E}\left[\frac{v^2}{\mathbb{E}[v^2|\xi = z]} (f^{*, + \lambda}_w)''(\xi) \vert + \xi = x \right] dx \equals^{\text{Tower-}}_{\text{property}} \\ + \stackrel{\phantom{(\ref{eq:derivnn})}}{=} + & + \int_{\min\left\{C_{g_{\xi}}^l, + T\right\}}^{min\left\{C_{g_{\xi}}^u,T\right\}}(f^{*,\lambda}_w)''(x) + dx. + \end{align*} + By the fundamental theorem of calculus and $\supp(f') \subset + \supp(f)$, (\ref{eq:s0}) follows with Lemma~\ref{lem:pieq}. + \qed \end{Lemma} + +\begin{Lemma}[Step 2] + For any $\lambda > 0$ and training data $(x_i^{\text{train}}, + y_i^{\text{train}}) \in \mathbb{R}^2, \, i \in + \left\{1,\dots,N\right\}$, we have + \[ + \plimn F^{\tilde{\lambda}}_n(\mathcal{RN}_{\tilde{w}}) = + F^{\lambda, g}(f^{*, \lambda}_g) = 0. + \] + \proof + This can be prooven by showing +\end{Lemma} + +\begin{Lemma}[Step 3] + For any $\lambda > 0$ and training data $(x_i^{\text{train}}, + y_i^{\text{train}}) \in \mathbb{R}^2, \, i \in + \left\{1,\dots,N\right\}$, with $w^*$ and $\tilde{\lambda}$ as + defined in Definition~\ref{def:rpnn} and Theroem~\ref{theo:main1} + respectively, it holds + \[ + \plimn \norm{\mathcal{RN}^{*,\tilde{\lambda}} - + f^{w*, \tilde{\lambda}}}_{W^{1,\infty}(K)} = 0. + \] +\end{Lemma} + +\begin{Lemma}[Step 4] + For any $\lambda > 0$ and training data $(x_i^{\text{train}}, + y_i^{\text{train}}) \in \mathbb{R}^2, \, i \in + \left\{1,\dots,N\right\}$, with $w^*$ and $\tilde{\lambda}$ as + defined in Definition~\ref{def:rpnn} and Theroem~\ref{theo:main1} + respectively, it holds + \[ + \plimn \abs{F_n^{\lambda}(\mathcal{RN}^{*,\tilde{\lambda}}) - + F^{\lambda, g}(f^{w*, \tilde{\lambda}})} = 0. + \] +\end{Lemma} + +\begin{Lemma}[Step 7] + For any $\lambda > 0$ and training data $(x_i^{\text{train}}, + y_i^{\text{train}}) \in \mathbb{R}^2, \, i \in + \left\{1,\dots,N\right\}$, for any sequence of functions $f^n \in + W^{2,2}$ with + \[ + \plimn F^{\lambda, g} (f^n) = F^{\lambda, g}(f^{*, \lambda}), + \] + it follows + \[ + \plimn \norm{f^n - f^{*, \lambda}} = 0. + \] +\end{Lemma} + +\textcite{heiss2019} further show a link between ridge penalized +networks and randomized shallow neural networks which are trained with +gradient descent which is stopped after a certain amount of iterations. + +\newpage +\subsection{Simulations} +In the following the behaviour described in Theorem~\ref{theo:main1} +is visualized in a simulated example. For this two sets of training +data have been generated. +\begin{itemize} + \item $\text{data}_A = (x_{i, A}^{\text{train}}, + y_{i,A}^{\text{train}})$ with + \begin{align*} + x_{i, A}^{\text{train}} &\coloneqq -\pi + \frac{2 \pi}{5} (i - 1), + i \in \left\{1, \dots, 6\right\}, \\ + y_{i, A}^{\text{train}} &\coloneqq \sin( x_{i, A}^{\text{train}}). \phantom{(i - 1), + i \in \left\{1, \dots, 6\right\}} + \end{align*} + \item $\text{data}_b = (x_{i, B}^{\text{train}}, y_{i, + B}^{\text{train}})$ with + \begin{align*} + x_{i, B}^{\text{train}} &\coloneqq \pi\frac{i - 8}{7}, + i \in \left\{1, \dots, 15\right\}, \\ + y_{i, B}^{\text{train}} &\coloneqq \sin( x_{i, B}^{\text{train}}). \phantom{(i - 1), + i \in \left\{1, \dots, 6\right\}} + \end{align*} +\end{itemize} +For the $\mathcal{RN}$ the random weights are distributed +as follows +\begin{align*} + \xi_i &\stackrel{i.i.d.}{\sim} \text{Unif}(-5,5), \\ + v_i &\stackrel{i.i.d.}{\sim} \mathcal{N}(0, 5), \\ + b_i &\stackrel{\phantom{i.i.d.}}{\sim} -\xi_i v_i. +\end{align*} +Note that by the choices for the distributions $g$ as defined in +Theorem~\ref{theo:main1} +would equate to $g(x) = \frac{\mathbb{E}[v_k^2|\xi_k = x]}{10}$. In +order to utilize the +smoothing spline implemented in Mathlab, $g$ has been simplified to $g +\equiv \frac{1}{10}$ instead. For all figures $f_1^{*, \lambda}$ has +been calculated with Matlab's ..... As ... minimizes +\[ + \bar{\lambda} \sum_{i=1}^N(y_i^{train} - f(x_i^{train}))^2 + (1 - + \bar{\lambda}) \int (f''(x))^2 dx +\] +the smoothing parameter used for fittment is $\bar{\lambda} = +\frac{1}{1 + \lambda}$. The parameter $\tilde{\lambda}$ for training +the networks is chosen as defined in Theorem~\ref{theo:main1} and each +one is trained on the full training data for 5000 iterations using +gradient descent. The +results are given in Figure~\ref{blblb}, here it can be seen that in +the intervall of the traing data $[-\pi, \pi]$ the neural network and +smoothing spline are nearly identical, coinciding with the proposition. + +\input{Plots/RN_vs_RS} + + %%% Local Variables: %%% mode: latex %%% TeX-master: "main"