added introductions to nn

main
Tobias Arndt 5 years ago
parent e2df388229
commit eb18ac33b2

12
.gitignore vendored

@ -0,0 +1,12 @@
# weird latex files
*.log
*.aux
*.toc
*.gz
*.xml
TeX/auto/*
main-blx.bib
# emacs autosaves
*.tex~

@ -0,0 +1,124 @@
%%% Local Variables:
%%% mode: latex
%%% TeX-master: "main"
%%% End:
\section{Introduction to Neural Networks}
Neural Networks (NN) are a mathematical construct inspired by the
connection of neurons in nature. It consists of an input and output
layer with an arbitrary amount of hidden layers between them. Each
layer consits of a numer of neurons (nodes) with the number of nodes
in the in-/output layers corresponding to the dimensions of the
in-/output.\par
Each neuron recieves the output of all layers in the previous layers,
except for the input layer, which recieves the components of the input.
\tikzset{%
every neuron/.style={
circle,
draw,
minimum size=1cm
},
neuron missing/.style={
draw=none,
scale=1.5,
text height=0.333cm,
execute at begin node=\color{black}$\vdots$
},
}
\begin{figure}[h!]
\center
\fbox{
\resizebox{\textwidth}{!}{%
\begin{tikzpicture}[x=1.75cm, y=1.75cm, >=stealth]
\foreach \m/\l [count=\y] in {1,2,3,missing,4}
\node [every neuron/.try, neuron \m/.try] (input-\m) at (0,2.5-\y) {};
\foreach \m [count=\y] in {1,missing,2}
\node [every neuron/.try, neuron \m/.try ] (hidden1-\m) at (2,2-\y*1.25) {};
\foreach \m [count=\y] in {1,missing,2}
\node [every neuron/.try, neuron \m/.try ] (hidden2-\m) at (5,2-\y*1.25) {};
\foreach \m [count=\y] in {1,missing,2}
\node [every neuron/.try, neuron \m/.try ] (output-\m) at (7,1.5-\y) {};
\foreach \l [count=\i] in {1,2,3,d_i}
\draw [<-] (input-\i) -- ++(-1,0)
node [above, midway] {$x_{\l}$};
\foreach \l [count=\i] in {1,n_1}
\node [above] at (hidden1-\i.north) {$\mathcal{N}_{1,\l}$};
\foreach \l [count=\i] in {1,n_l}
\node [above] at (hidden2-\i.north) {$\mathcal{N}_{l,\l}$};
\foreach \l [count=\i] in {1,d_o}
\draw [->] (output-\i) -- ++(1,0)
node [above, midway] {$O_{\l}$};
\foreach \i in {1,...,4}
\foreach \j in {1,...,2}
\draw [->] (input-\i) -- (hidden1-\j);
\foreach \i in {1,...,2}
\foreach \j in {1,...,2}
\draw [->] (hidden1-\i) -- (hidden2-\j);
\foreach \i in {1,...,2}
\foreach \j in {1,...,2}
\draw [->] (hidden2-\i) -- (output-\j);
\node [align=center, above] at (0,2) {Input\\layer};
\node [align=center, above] at (2,2) {Hidden \\layer $1$};
\node [align=center, above] at (5,2) {Hidden \\layer $l$};
\node [align=center, above] at (7,2) {Output \\layer};
\node[fill=white,scale=1.5,inner xsep=10pt,inner ysep=10mm] at ($(hidden1-1)!.5!(hidden2-2)$) {$\dots$};
\end{tikzpicture}}}
\caption{test}
\end{figure}
\begin{tikzpicture}[x=1.5cm, y=1.5cm, >=stealth]
\foreach \m/\l [count=\y] in {1}
\node [every neuron/.try, neuron \m/.try] (input-\m) at (0,0.5-\y) {};
\foreach \m [count=\y] in {1,2,missing,3,4}
\node [every neuron/.try, neuron \m/.try ] (hidden-\m) at (1.25,3.25-\y*1.25) {};
\foreach \m [count=\y] in {1}
\node [every neuron/.try, neuron \m/.try ] (output-\m) at (2.5,0.5-\y) {};
\foreach \l [count=\i] in {1}
\draw [<-] (input-\i) -- ++(-1,0)
node [above, midway] {$x$};
\foreach \l [count=\i] in {1,2,n-1,n}
\node [above] at (hidden-\i.north) {$\mathcal{N}_{\l}$};
\foreach \l [count=\i] in {1,n_l}
\node [above] at (output-\i.north) {};
\foreach \l [count=\i] in {1}
\draw [->] (output-\i) -- ++(1,0)
node [above, midway] {$y$};
\foreach \i in {1}
\foreach \j in {1,2,...,3,4}
\draw [->] (input-\i) -- (hidden-\j);
\foreach \i in {1,2,...,3,4}
\foreach \j in {1}
\draw [->] (hidden-\i) -- (output-\j);
\node [align=center, above] at (0,1) {Input\\layer};
\node [align=center, above] at (1.25,3) {Hidden layer};
\node [align=center, above] at (2.5,1) {Output\\layer};
\end{tikzpicture}

Binary file not shown.

@ -26,7 +26,10 @@
\usepackage{makecell}
\usepackage{dsfont}
\usepackage{tikz}
\usetikzlibrary{positioning}
\usetikzlibrary{matrix,chains,positioning,decorations.pathreplacing,arrows}
\usetikzlibrary{positioning,calc}
\usepackage{pgfplots}
\usepgfplotslibrary{colorbrewer}
\usepackage{subcaption}
@ -47,7 +50,7 @@
\sectionfont{\centering}
\input{insbox}
\parindent0in
%\parindent0in
\pagestyle{plain}
\thispagestyle{plain}
\newtheorem{Theorem}{Theorem}[section]
@ -83,6 +86,11 @@
\tableofcontents
\newpage
% Introduction Neural Networks
\input{introduction_nn}
\newpage
% Theorem 3.8
\input{theo_3_8.tex}

@ -24,7 +24,7 @@ limes of RN as the amount of nodes is increased.
g_{\xi}(x)\mathbb{E}\left[ v_k^2 \vert \xi_k = x \right], \forall x
\in \mathbb{R}
\end{align*}
and \(RN^{*, \tilde{\lambda}}}\), \(f^{*,\tilde{\lambda}}_{g, \pm}\)
and \(RN^{*, \tilde{\lambda}}\), \(f^{*,\tilde{\lambda}}_{g, \pm}\)
as defined in ??? and ??? respectively.
\end{Theorem}
In order to proof Theo~\ref{theo:main1} we need to proof a number of

Loading…
Cancel
Save