You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

303 lines
11 KiB
TeX

\documentclass{report}
\usepackage[utf8]{inputenc}
\usepackage[english]{babel}
\usepackage[T1]{fontenc}
\usepackage{xcolor}
\definecolor{maroon}{cmyk}{0, 0.87, 0.68, 0.32}
\definecolor{halfgray}{gray}{0.55}
\definecolor{ipython_frame}{RGB}{207, 207, 207}
\definecolor{ipython_bg}{RGB}{247, 247, 247}
\definecolor{ipython_red}{RGB}{186, 33, 33}
\definecolor{ipython_green}{RGB}{0, 128, 0}
\definecolor{ipython_cyan}{RGB}{64, 128, 128}
\definecolor{ipython_purple}{RGB}{110, 64, 130}
\usepackage{listings}
\usepackage{float}
\newfloat{lstfloat}{htbp}{lop}
\floatname{lstfloat}{Listing}
\def\lstfloatautorefname{Listing}
\lstset{
breaklines=true,
%
extendedchars=true,
literate=
{á}{{\'a}}1 {é}{{\'e}}1 {í}{{\'i}}1 {ó}{{\'o}}1 {ú}{{\'u}}1
{Á}{{\'A}}1 {É}{{\'E}}1 {Í}{{\'I}}1 {Ó}{{\'O}}1 {Ú}{{\'U}}1
{à}{{\`a}}1 {è}{{\`e}}1 {ì}{{\`i}}1 {ò}{{\`o}}1 {ù}{{\`u}}1
{À}{{\`A}}1 {È}{{\'E}}1 {Ì}{{\`I}}1 {Ò}{{\`O}}1 {Ù}{{\`U}}1
{ä}{{\"a}}1 {ë}{{\"e}}1 {ï}{{\"i}}1 {ö}{{\"o}}1 {ü}{{\"u}}1
{Ä}{{\"A}}1 {Ë}{{\"E}}1 {Ï}{{\"I}}1 {Ö}{{\"O}}1 {Ü}{{\"U}}1
{â}{{\^a}}1 {ê}{{\^e}}1 {î}{{\^i}}1 {ô}{{\^o}}1 {û}{{\^u}}1
{Â}{{\^A}}1 {Ê}{{\^E}}1 {Î}{{\^I}}1 {Ô}{{\^O}}1 {Û}{{\^U}}1
{œ}{{\oe}}1 {Œ}{{\OE}}1 {æ}{{\ae}}1 {Æ}{{\AE}}1 {ß}{{\ss}}1
{ç}{{\c c}}1 {Ç}{{\c C}}1 {ø}{{\o}}1 {å}{{\r a}}1 {Å}{{\r A}}1
{}{{\EUR}}1 {£}{{\pounds}}1
}
%%
%% Python definition (c) 1998 Michael Weber
%% Additional definitions (2013) Alexis Dimitriadis
%% modified by me (should not have empty lines)
%%
\lstdefinelanguage{iPython}{
morekeywords={access,and,break,class,continue,def,del,elif,else,except,exec,finally,for,from,global,if,import,
in,is,lambda,not,or,pass,print,raise,return,try,while},%
%
% Built-ins
morekeywords=[2]{abs,all,any,basestring,bin,bool,bytearray,callable,chr,classmethod,cmp,compile,complex,delattr,dict,dir,divmod,enumerate,eval,execfile,file,filter,float,format,frozenset,getattr,globals,hasattr,hash,help,hex,id,input,int,isinstance,issubclass,iter,len,list,locals,long,map,max,memoryview,min,next,object,oct,open,ord,pow,property,range,raw_input,reduce,reload,repr,reversed,round,set,setattr,slice,sorted,staticmethod,str,sum,super,tuple,type,unichr,unicode,vars,xrange,zip,apply,buffer,coerce,intern,val},%
%
sensitive=true,%
morecomment=[l]\#,%
morestring=[b]',%
morestring=[b]",%
%
morestring=[s]{'''}{'''},% used for documentation text (mulitiline strings)
morestring=[s]{"""}{"""},% added by Philipp Matthias Hahn
%
morestring=[s]{r'}{'},% `raw' strings
morestring=[s]{r"}{"},%
morestring=[s]{r'''}{'''},%
morestring=[s]{r"""}{"""},%
morestring=[s]{u'}{'},% unicode strings
morestring=[s]{u"}{"},%
morestring=[s]{u'''}{'''},%
morestring=[s]{u"""}{"""},%
%
% {replace}{replacement}{lenght of replace}
% *{-}{-}{1} will not replace in comments and so on
literate=
{á}{{\'a}}1 {é}{{\'e}}1 {í}{{\'i}}1 {ó}{{\'o}}1 {ú}{{\'u}}1
{Á}{{\'A}}1 {É}{{\'E}}1 {Í}{{\'I}}1 {Ó}{{\'O}}1 {Ú}{{\'U}}1
{à}{{\`a}}1 {è}{{\`e}}1 {ì}{{\`i}}1 {ò}{{\`o}}1 {ù}{{\`u}}1
{À}{{\`A}}1 {È}{{\'E}}1 {Ì}{{\`I}}1 {Ò}{{\`O}}1 {Ù}{{\`U}}1
{ä}{{\"a}}1 {ë}{{\"e}}1 {ï}{{\"i}}1 {ö}{{\"o}}1 {ü}{{\"u}}1
{Ä}{{\"A}}1 {Ë}{{\"E}}1 {Ï}{{\"I}}1 {Ö}{{\"O}}1 {Ü}{{\"U}}1
{â}{{\^a}}1 {ê}{{\^e}}1 {î}{{\^i}}1 {ô}{{\^o}}1 {û}{{\^u}}1
{Â}{{\^A}}1 {Ê}{{\^E}}1 {Î}{{\^I}}1 {Ô}{{\^O}}1 {Û}{{\^U}}1
{œ}{{\oe}}1 {Œ}{{\OE}}1 {æ}{{\ae}}1 {Æ}{{\AE}}1 {ß}{{\ss}}1
{ç}{{\c c}}1 {Ç}{{\c C}}1 {ø}{{\o}}1 {å}{{\r a}}1 {Å}{{\r A}}1
{}{{\EUR}}1 {£}{{\pounds}}1
%
{^}{{{\color{ipython_purple}\^{}}}}1
{=}{{{\color{ipython_purple}=}}}1
%
{+}{{{\color{ipython_purple}+}}}1
{*}{{{\color{ipython_purple}$^\ast$}}}1
{/}{{{\color{ipython_purple}/}}}1
%
{+=}{{{+=}}}1
{-=}{{{-=}}}1
{*=}{{{$^\ast$=}}}1
{/=}{{{/=}}}1,
literate=
*{-}{{{\color{ipython_purple}-}}}1
{?}{{{\color{ipython_purple}?}}}1,
%
identifierstyle=\color{black}\ttfamily,
commentstyle=\color{ipython_red}\ttfamily,
stringstyle=\color{ipython_red}\ttfamily,
keepspaces=true,
showspaces=false,
showstringspaces=false,
%
rulecolor=\color{ipython_frame},
frame=single,
frameround={t}{t}{t}{t},
framexleftmargin=6mm,
numbers=left,
numberstyle=\tiny\color{halfgray},
%
%
backgroundcolor=\color{ipython_bg},
% extendedchars=true,
basicstyle=\scriptsize,
keywordstyle=\color{ipython_green}\ttfamily,
morekeywords = [3]{Int, Double},
morekeywords = [2]{foldRight, case},
keywordstyle = [3]{\color{ipython_purple}\ttfamily},
keywordstyle = [2]{\color{ipython_cyan}\ttfamily},
}
\begin{document}
\begin{lstfloat}
\begin{lstlisting}[language=iPython]
import breeze.stats.distributions.Uniform
import breeze.stats.distributions.Gaussian
import scala.language.postfixOps
object Activation {
def apply(x: Double): Double = math.max(0, x)
def d(x: Double): Double = if (x > 0) 1 else 0
}
class RSNN(val n: Int, val gamma: Double = 0.001) {
val g_unif = Uniform(-10, 10)
val g_gauss = Gaussian(0, 5)
val xis = g_unif.sample(n)
val vs = g_gauss.sample(n)
val bs = xis zip vs map {case(xi, v) => xi * v}
def computeL1(x: Double) = (bs zip vs) map {
case (b, v) => Activation(b + v * x) }
def computeL2(l1: Seq[Double], ws: Seq[Double]): Double =
(l1 zip ws) map { case (l, w) => w * l } sum
def output(ws: Seq[Double])(x: Double): Double =
computeL2(computeL1(x), ws)
def learn(data: Seq[(Double, Double)], ws: Seq[Double],
lamb: Double, gamma: Double): Seq[Double] = {
lazy val deltas = data.map {
case (x, y) =>
val l1 = computeL1(x) // n
val out = computeL2(l1, ws) // 1
(l1 zip ws) map {case (l1, w) => (l1 * 2 * (out - y) +
lam * 2 * w) * gamma * -1}
}
deltas.foldRight(ws)(
(delta, ws) => ws zip (delta) map { case (w, d) => w + d })
}
def train(data: Seq[(Double, Double)], iter: Int, lam: Double,
gamma: Double = gamma): (Seq[Double], Double => Double)= {
val ws = (1 to iter).foldRight((1 to n).map(
_ => 0.0) :Seq[Double])((i, w) => {
println(s"Training iteration $i")
println(w.sum/w.length)
learn(data, w, lam, gamma / 10)
})
(ws, output(ws))
}
}
\end{lstlisting}
\caption{Scala code used to build and train the ridge penalized
randomized shallow neural network in .... The parameter \textit{lam}
in the train function represents the $\lambda$ parameter in the error
function. The parameters \textit{n} and \textit{gamma} set the number
of hidden nodes and the stepsize for training.}
\end{lstfloat}
\clearpage
\begin{lstlisting}[language=iPython]
import tensorflow as tf
import numpy as np
from tensorflow.keras.callbacks import CSVLogger
from tensorflow.keras.preprocessing.image import ImageDataGenerator
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
x_train = x_train / 255.0
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
x_test = x_test / 255.0
y_train = tf.keras.utils.to_categorical(y_train)
y_test = tf.keras.utils.to_categorical(y_test)
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Conv2D(24,kernel_size=5,padding='same',activation='relu',input_shape=(28,28,1)))
model.add(tf.keras.layers.MaxPool2D())
model.add(tf.keras.layers.Conv2D(64,kernel_size=5,padding='same',activation='relu'))
model.add(tf.keras.layers.MaxPool2D(padding='same'))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(256, activation='relu'))
model.add(tf.keras.layers.Dropout(0.2))
model.add(tf.keras.layers.Dense(10, activation='softmax'))
model.compile(optimizer='adam', loss="categorical_crossentropy",
metrics=["accuracy"])
datagen = ImageDataGenerator(
rotation_range = 30,
zoom_range = 0.15,
width_shift_range=2,
height_shift_range=2,
shear_range = 1)
csv_logger = CSVLogger(<Target File>)
history = model.fit(datagen.flow(x_train, y_train, batch_size=50),
validation_data=(x_test, y_test),
epochs=125, callbacks=[csv_logger],
steps_per_epoch = x_train.shape[0]//50)
\end{lstlisting}
\clearpage
\begin{lstlisting}[language=iPython]
import tensorflow as tf
import numpy as np
from tensorflow.keras.callbacks import CSVLogger
from tensorflow.keras.preprocessing.image import ImageDataGenerator
mnist = tf.keras.datasets.fashion_mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
x_train, x_test = x_train / 255.0, x_test / 255.0
y_train = tf.keras.utils.to_categorical(y_train)
y_test = tf.keras.utils.to_categorical(y_test)
model = tf.keras.Sequential()
model.add(tf.keras.layers.Conv2D(filters = 32, kernel_size = (3, 3), activation='relu',
input_shape = (28, 28, 1), padding='same'))
model.add(tf.keras.layers.Conv2D(filters = 32, kernel_size = (2, 2), activation='relu', padding = 'same'))
model.add(tf.keras.layers.MaxPool2D(strides=(2,2)))
model.add(tf.keras.layers.Conv2D(filters = 64, kernel_size = (3, 3), activation='relu', padding='same'))
model.add(tf.keras.layers.Conv2D(filters = 64, kernel_size = (3, 3), activation='relu', padding='same'))
model.add(tf.keras.layers.MaxPool2D(strides=(2,2)))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(256, activation='relu'))
model.add(tf.keras.layers.Dropout(0.2))
model.add(tf.keras.layers.Dense(10, activation='softmax'))
model.compile(optimizer=tf.keras.optimizers.Adam(lr = 1e-3), loss="categorical_crossentropy", metrics=["accuracy"])
datagen = ImageDataGenerator(
rotation_range = 15,
zoom_range = 0.1,
width_shift_range=2,
height_shift_range=2,
shear_range = 0.5,
fill_mode = 'constant',
cval = 0)
csv_logger = CSVLogger(<Target File>)
history = model.fit(datagen.flow(x_train, y_train, batch_size=30),
steps_per_epoch=2000,
validation_data=(x_test, y_test),
epochs=125, callbacks=[csv_logger],
shuffle=True)
\end{lstlisting}
\begin{lstlisting}[language=iPython]
def get_random_sample(a, b, number_of_samples=10):
x = []
y = []
for category_number in range(0,10):
# get all samples of a category
train_data_category = a[b==category_number]
# pick a number of random samples from the category
train_data_category = train_data_category[np.random.randint(
train_data_category.shape[0], size=number_of_samples), :]
x.extend(train_data_category)
y.append([category_number]*number_of_samples)
return (np.asarray(x).reshape(-1, 28, 28, 1),
np.asarray(y).reshape(10*number_of_samples,1))
\end{lstlisting}
\end{document}