Plots nach Figure
parent
06d93ef937
commit
340e0017c4
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"cells": [],
|
||||||
|
"metadata": {},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
@ -0,0 +1,266 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"ename": "ModuleNotFoundError",
|
||||||
|
"evalue": "No module named 'numpy'",
|
||||||
|
"output_type": "error",
|
||||||
|
"traceback": [
|
||||||
|
"\u001b[0;31m--------------------------------------------------------\u001b[0m",
|
||||||
|
"\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
|
||||||
|
"\u001b[0;32m<ipython-input-2-d9bbc8b73862>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mos\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mnumpy\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mmatplotlib\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpyplot\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrcdefaults\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mmatplotlib\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlines\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mLine2D\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||||
|
"\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'numpy'"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"import os\n",
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import matplotlib.pyplot as plt\n",
|
||||||
|
"plt.rcdefaults()\n",
|
||||||
|
"from matplotlib.lines import Line2D\n",
|
||||||
|
"from matplotlib.patches import Rectangle\n",
|
||||||
|
"from matplotlib.patches import Circle\n",
|
||||||
|
"\n",
|
||||||
|
"NumDots = 4\n",
|
||||||
|
"NumConvMax = 8\n",
|
||||||
|
"NumFcMax = 20\n",
|
||||||
|
"White = 1.\n",
|
||||||
|
"Light = 0.7\n",
|
||||||
|
"Medium = 0.5\n",
|
||||||
|
"Dark = 0.3\n",
|
||||||
|
"Darker = 0.15\n",
|
||||||
|
"Black = 0.\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"def add_layer(patches, colors, size=(24, 24), num=5,\n",
|
||||||
|
" top_left=[0, 0],\n",
|
||||||
|
" loc_diff=[3, -3],\n",
|
||||||
|
" ):\n",
|
||||||
|
" # add a rectangle\n",
|
||||||
|
" top_left = np.array(top_left)\n",
|
||||||
|
" loc_diff = np.array(loc_diff)\n",
|
||||||
|
" loc_start = top_left - np.array([0, size[0]])\n",
|
||||||
|
" for ind in range(num):\n",
|
||||||
|
" patches.append(Rectangle(loc_start + ind * loc_diff, size[1], size[0]))\n",
|
||||||
|
" if ind % 2:\n",
|
||||||
|
" colors.append(Medium)\n",
|
||||||
|
" else:\n",
|
||||||
|
" colors.append(Light)\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"def add_layer_with_omission(patches, colors, size=(24, 24),\n",
|
||||||
|
" num=5, num_max=8,\n",
|
||||||
|
" num_dots=4,\n",
|
||||||
|
" top_left=[0, 0],\n",
|
||||||
|
" loc_diff=[3, -3],\n",
|
||||||
|
" ):\n",
|
||||||
|
" # add a rectangle\n",
|
||||||
|
" top_left = np.array(top_left)\n",
|
||||||
|
" loc_diff = np.array(loc_diff)\n",
|
||||||
|
" loc_start = top_left - np.array([0, size[0]])\n",
|
||||||
|
" this_num = min(num, num_max)\n",
|
||||||
|
" start_omit = (this_num - num_dots) // 2\n",
|
||||||
|
" end_omit = this_num - start_omit\n",
|
||||||
|
" start_omit -= 1\n",
|
||||||
|
" for ind in range(this_num):\n",
|
||||||
|
" if (num > num_max) and (start_omit < ind < end_omit):\n",
|
||||||
|
" omit = True\n",
|
||||||
|
" else:\n",
|
||||||
|
" omit = False\n",
|
||||||
|
"\n",
|
||||||
|
" if omit:\n",
|
||||||
|
" patches.append(\n",
|
||||||
|
" Circle(loc_start + ind * loc_diff + np.array(size) / 2, 0.5))\n",
|
||||||
|
" else:\n",
|
||||||
|
" patches.append(Rectangle(loc_start + ind * loc_diff,\n",
|
||||||
|
" size[1], size[0]))\n",
|
||||||
|
"\n",
|
||||||
|
" if omit:\n",
|
||||||
|
" colors.append(Black)\n",
|
||||||
|
" elif ind % 2:\n",
|
||||||
|
" colors.append(Medium)\n",
|
||||||
|
" else:\n",
|
||||||
|
" colors.append(Light)\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"def add_mapping(patches, colors, start_ratio, end_ratio, patch_size, ind_bgn,\n",
|
||||||
|
" top_left_list, loc_diff_list, num_show_list, size_list):\n",
|
||||||
|
"\n",
|
||||||
|
" start_loc = top_left_list[ind_bgn] \\\n",
|
||||||
|
" + (num_show_list[ind_bgn] - 1) * np.array(loc_diff_list[ind_bgn]) \\\n",
|
||||||
|
" + np.array([start_ratio[0] * (size_list[ind_bgn][1] - patch_size[1]),\n",
|
||||||
|
" - start_ratio[1] * (size_list[ind_bgn][0] - patch_size[0])]\n",
|
||||||
|
" )\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
" end_loc = top_left_list[ind_bgn + 1] \\\n",
|
||||||
|
" + (num_show_list[ind_bgn + 1] - 1) * np.array(\n",
|
||||||
|
" loc_diff_list[ind_bgn + 1]) \\\n",
|
||||||
|
" + np.array([end_ratio[0] * size_list[ind_bgn + 1][1],\n",
|
||||||
|
" - end_ratio[1] * size_list[ind_bgn + 1][0]])\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
" patches.append(Rectangle(start_loc, patch_size[1], -patch_size[0]))\n",
|
||||||
|
" colors.append(Dark)\n",
|
||||||
|
" patches.append(Line2D([start_loc[0], end_loc[0]],\n",
|
||||||
|
" [start_loc[1], end_loc[1]]))\n",
|
||||||
|
" colors.append(Darker)\n",
|
||||||
|
" patches.append(Line2D([start_loc[0] + patch_size[1], end_loc[0]],\n",
|
||||||
|
" [start_loc[1], end_loc[1]]))\n",
|
||||||
|
" colors.append(Darker)\n",
|
||||||
|
" patches.append(Line2D([start_loc[0], end_loc[0]],\n",
|
||||||
|
" [start_loc[1] - patch_size[0], end_loc[1]]))\n",
|
||||||
|
" colors.append(Darker)\n",
|
||||||
|
" patches.append(Line2D([start_loc[0] + patch_size[1], end_loc[0]],\n",
|
||||||
|
" [start_loc[1] - patch_size[0], end_loc[1]]))\n",
|
||||||
|
" colors.append(Darker)\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"def label(xy, text, xy_off=[0, 4]):\n",
|
||||||
|
" plt.text(xy[0] + xy_off[0], xy[1] + xy_off[1], text,\n",
|
||||||
|
" family='sans-serif', size=8)\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"if __name__ == '__main__':\n",
|
||||||
|
"\n",
|
||||||
|
" fc_unit_size = 2\n",
|
||||||
|
" layer_width = 40\n",
|
||||||
|
" flag_omit = True\n",
|
||||||
|
"\n",
|
||||||
|
" patches = []\n",
|
||||||
|
" colors = []\n",
|
||||||
|
"\n",
|
||||||
|
" fig, ax = plt.subplots()\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
" ############################\n",
|
||||||
|
" # conv layers\n",
|
||||||
|
" size_list = [(32, 32), (18, 18), (10, 10), (6, 6), (4, 4)]\n",
|
||||||
|
" num_list = [3, 32, 32, 48, 48]\n",
|
||||||
|
" x_diff_list = [0, layer_width, layer_width, layer_width, layer_width]\n",
|
||||||
|
" text_list = ['Inputs'] + ['Feature\\nmaps'] * (len(size_list) - 1)\n",
|
||||||
|
" loc_diff_list = [[3, -3]] * len(size_list)\n",
|
||||||
|
"\n",
|
||||||
|
" num_show_list = list(map(min, num_list, [NumConvMax] * len(num_list)))\n",
|
||||||
|
" top_left_list = np.c_[np.cumsum(x_diff_list), np.zeros(len(x_diff_list))]\n",
|
||||||
|
"\n",
|
||||||
|
" for ind in range(len(size_list)-1,-1,-1):\n",
|
||||||
|
" if flag_omit:\n",
|
||||||
|
" add_layer_with_omission(patches, colors, size=size_list[ind],\n",
|
||||||
|
" num=num_list[ind],\n",
|
||||||
|
" num_max=NumConvMax,\n",
|
||||||
|
" num_dots=NumDots,\n",
|
||||||
|
" top_left=top_left_list[ind],\n",
|
||||||
|
" loc_diff=loc_diff_list[ind])\n",
|
||||||
|
" else:\n",
|
||||||
|
" add_layer(patches, colors, size=size_list[ind],\n",
|
||||||
|
" num=num_show_list[ind],\n",
|
||||||
|
" top_left=top_left_list[ind], loc_diff=loc_diff_list[ind])\n",
|
||||||
|
" label(top_left_list[ind], text_list[ind] + '\\n{}@{}x{}'.format(\n",
|
||||||
|
" num_list[ind], size_list[ind][0], size_list[ind][1]))\n",
|
||||||
|
"\n",
|
||||||
|
" ############################\n",
|
||||||
|
" # in between layers\n",
|
||||||
|
" start_ratio_list = [[0.4, 0.5], [0.4, 0.8], [0.4, 0.5], [0.4, 0.8]]\n",
|
||||||
|
" end_ratio_list = [[0.4, 0.5], [0.4, 0.8], [0.4, 0.5], [0.4, 0.8]]\n",
|
||||||
|
" patch_size_list = [(5, 5), (2, 2), (5, 5), (2, 2)]\n",
|
||||||
|
" ind_bgn_list = range(len(patch_size_list))\n",
|
||||||
|
" text_list = ['Convolution', 'Max-pooling', 'Convolution', 'Max-pooling']\n",
|
||||||
|
"\n",
|
||||||
|
" for ind in range(len(patch_size_list)):\n",
|
||||||
|
" add_mapping(\n",
|
||||||
|
" patches, colors, start_ratio_list[ind], end_ratio_list[ind],\n",
|
||||||
|
" patch_size_list[ind], ind,\n",
|
||||||
|
" top_left_list, loc_diff_list, num_show_list, size_list)\n",
|
||||||
|
" label(top_left_list[ind], text_list[ind] + '\\n{}x{} kernel'.format(\n",
|
||||||
|
" patch_size_list[ind][0], patch_size_list[ind][1]), xy_off=[26, -65]\n",
|
||||||
|
" )\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
" ############################\n",
|
||||||
|
" # fully connected layers\n",
|
||||||
|
" size_list = [(fc_unit_size, fc_unit_size)] * 3\n",
|
||||||
|
" num_list = [768, 500, 2]\n",
|
||||||
|
" num_show_list = list(map(min, num_list, [NumFcMax] * len(num_list)))\n",
|
||||||
|
" x_diff_list = [sum(x_diff_list) + layer_width, layer_width, layer_width]\n",
|
||||||
|
" top_left_list = np.c_[np.cumsum(x_diff_list), np.zeros(len(x_diff_list))]\n",
|
||||||
|
" loc_diff_list = [[fc_unit_size, -fc_unit_size]] * len(top_left_list)\n",
|
||||||
|
" text_list = ['Hidden\\nunits'] * (len(size_list) - 1) + ['Outputs']\n",
|
||||||
|
"\n",
|
||||||
|
" for ind in range(len(size_list)):\n",
|
||||||
|
" if flag_omit:\n",
|
||||||
|
" add_layer_with_omission(patches, colors, size=size_list[ind],\n",
|
||||||
|
" num=num_list[ind],\n",
|
||||||
|
" num_max=NumFcMax,\n",
|
||||||
|
" num_dots=NumDots,\n",
|
||||||
|
" top_left=top_left_list[ind],\n",
|
||||||
|
" loc_diff=loc_diff_list[ind])\n",
|
||||||
|
" else:\n",
|
||||||
|
" add_layer(patches, colors, size=size_list[ind],\n",
|
||||||
|
" num=num_show_list[ind],\n",
|
||||||
|
" top_left=top_left_list[ind],\n",
|
||||||
|
" loc_diff=loc_diff_list[ind])\n",
|
||||||
|
" label(top_left_list[ind], text_list[ind] + '\\n{}'.format(\n",
|
||||||
|
" num_list[ind]))\n",
|
||||||
|
"\n",
|
||||||
|
" text_list = ['Flatten\\n', 'Fully\\nconnected', 'Fully\\nconnected']\n",
|
||||||
|
"\n",
|
||||||
|
" for ind in range(len(size_list)):\n",
|
||||||
|
" label(top_left_list[ind], text_list[ind], xy_off=[-10, -65])\n",
|
||||||
|
"\n",
|
||||||
|
" ############################\n",
|
||||||
|
" for patch, color in zip(patches, colors):\n",
|
||||||
|
" patch.set_color(color * np.ones(3))\n",
|
||||||
|
" if isinstance(patch, Line2D):\n",
|
||||||
|
" ax.add_line(patch)\n",
|
||||||
|
" else:\n",
|
||||||
|
" patch.set_edgecolor(Black * np.ones(3))\n",
|
||||||
|
" ax.add_patch(patch)\n",
|
||||||
|
"\n",
|
||||||
|
" plt.tight_layout()\n",
|
||||||
|
" plt.axis('equal')\n",
|
||||||
|
" plt.axis('off')\n",
|
||||||
|
" plt.show()\n",
|
||||||
|
" fig.set_size_inches(8, 2.5)\n",
|
||||||
|
"\n",
|
||||||
|
" # fig_dir = './'\n",
|
||||||
|
" # fig_ext = '.png'\n",
|
||||||
|
" # fig.savefig(os.path.join(fig_dir, 'convnet_fig' + fig_ext),\n",
|
||||||
|
" # bbox_inches='tight', pad_inches=0)\n",
|
||||||
|
"\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.7.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
@ -0,0 +1,217 @@
|
|||||||
|
import os
|
||||||
|
import numpy as np
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
plt.rcdefaults()
|
||||||
|
from matplotlib.lines import Line2D
|
||||||
|
from matplotlib.patches import Rectangle
|
||||||
|
from matplotlib.patches import Circle
|
||||||
|
|
||||||
|
NumDots = 4
|
||||||
|
NumConvMax = 8
|
||||||
|
NumFcMax = 20
|
||||||
|
White = 1.
|
||||||
|
Light = 0.7
|
||||||
|
Medium = 0.5
|
||||||
|
Dark = 0.3
|
||||||
|
Darker = 0.15
|
||||||
|
Black = 0.
|
||||||
|
|
||||||
|
|
||||||
|
def add_layer(patches, colors, size=(24, 24), num=5,
|
||||||
|
top_left=[0, 0],
|
||||||
|
loc_diff=[3, -3],
|
||||||
|
):
|
||||||
|
# add a rectangle
|
||||||
|
top_left = np.array(top_left)
|
||||||
|
loc_diff = np.array(loc_diff)
|
||||||
|
loc_start = top_left - np.array([0, size[0]])
|
||||||
|
for ind in range(num):
|
||||||
|
patches.append(Rectangle(loc_start + ind * loc_diff, size[1], size[0]))
|
||||||
|
if ind % 2:
|
||||||
|
colors.append(Medium)
|
||||||
|
else:
|
||||||
|
colors.append(Light)
|
||||||
|
|
||||||
|
|
||||||
|
def add_layer_with_omission(patches, colors, size=(24, 24),
|
||||||
|
num=5, num_max=8,
|
||||||
|
num_dots=4,
|
||||||
|
top_left=[0, 0],
|
||||||
|
loc_diff=[3, -3],
|
||||||
|
):
|
||||||
|
# add a rectangle
|
||||||
|
top_left = np.array(top_left)
|
||||||
|
loc_diff = np.array(loc_diff)
|
||||||
|
loc_start = top_left - np.array([0, size[0]])
|
||||||
|
this_num = min(num, num_max)
|
||||||
|
start_omit = (this_num - num_dots) // 2
|
||||||
|
end_omit = this_num - start_omit
|
||||||
|
start_omit -= 1
|
||||||
|
for ind in range(this_num):
|
||||||
|
if (num > num_max) and (start_omit < ind < end_omit):
|
||||||
|
omit = True
|
||||||
|
else:
|
||||||
|
omit = False
|
||||||
|
|
||||||
|
if omit:
|
||||||
|
patches.append(
|
||||||
|
Circle(loc_start + ind * loc_diff + np.array(size) / 2, 0.5))
|
||||||
|
else:
|
||||||
|
patches.append(Rectangle(loc_start + ind * loc_diff,
|
||||||
|
size[1], size[0]))
|
||||||
|
|
||||||
|
if omit:
|
||||||
|
colors.append(Black)
|
||||||
|
elif ind % 2:
|
||||||
|
colors.append(Medium)
|
||||||
|
else:
|
||||||
|
colors.append(Light)
|
||||||
|
|
||||||
|
|
||||||
|
def add_mapping(patches, colors, start_ratio, end_ratio, patch_size, ind_bgn,
|
||||||
|
top_left_list, loc_diff_list, num_show_list, size_list):
|
||||||
|
|
||||||
|
start_loc = top_left_list[ind_bgn] \
|
||||||
|
+ (num_show_list[ind_bgn] - 1) * np.array(loc_diff_list[ind_bgn]) \
|
||||||
|
+ np.array([start_ratio[0] * (size_list[ind_bgn][1] - patch_size[1]),
|
||||||
|
- start_ratio[1] * (size_list[ind_bgn][0] - patch_size[0])]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
end_loc = top_left_list[ind_bgn + 1] \
|
||||||
|
+ (num_show_list[ind_bgn + 1] - 1) * np.array(
|
||||||
|
loc_diff_list[ind_bgn + 1]) \
|
||||||
|
+ np.array([end_ratio[0] * size_list[ind_bgn + 1][1],
|
||||||
|
- end_ratio[1] * size_list[ind_bgn + 1][0]])
|
||||||
|
|
||||||
|
|
||||||
|
patches.append(Rectangle(start_loc, patch_size[1], -patch_size[0]))
|
||||||
|
colors.append(Dark)
|
||||||
|
patches.append(Line2D([start_loc[0], end_loc[0]],
|
||||||
|
[start_loc[1], end_loc[1]]))
|
||||||
|
colors.append(Darker)
|
||||||
|
patches.append(Line2D([start_loc[0] + patch_size[1], end_loc[0]],
|
||||||
|
[start_loc[1], end_loc[1]]))
|
||||||
|
colors.append(Darker)
|
||||||
|
patches.append(Line2D([start_loc[0], end_loc[0]],
|
||||||
|
[start_loc[1] - patch_size[0], end_loc[1]]))
|
||||||
|
colors.append(Darker)
|
||||||
|
patches.append(Line2D([start_loc[0] + patch_size[1], end_loc[0]],
|
||||||
|
[start_loc[1] - patch_size[0], end_loc[1]]))
|
||||||
|
colors.append(Darker)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def label(xy, text, xy_off=[0, 4]):
|
||||||
|
plt.text(xy[0] + xy_off[0], xy[1] + xy_off[1], text,
|
||||||
|
family='sans-serif', size=8)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
fc_unit_size = 2
|
||||||
|
layer_width = 40
|
||||||
|
flag_omit = False
|
||||||
|
|
||||||
|
patches = []
|
||||||
|
colors = []
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
|
||||||
|
|
||||||
|
############################
|
||||||
|
# conv layers
|
||||||
|
size_list = [(28, 28), (28, 28), (28,28), (14, 14), (14,14), (14,14), (7,7)]
|
||||||
|
num_list = [1, 32, 32, 32, 64, 64, 64]
|
||||||
|
x_diff_list = [0, layer_width, layer_width, layer_width, layer_width, layer_width, layer_width]
|
||||||
|
text_list = ['Inputs'] + ['Feature\nmaps'] * (len(size_list) - 1)
|
||||||
|
loc_diff_list = [[3, -3]] * len(size_list)
|
||||||
|
|
||||||
|
num_show_list = list(map(min, num_list, [NumConvMax] * len(num_list)))
|
||||||
|
top_left_list = np.c_[np.cumsum(x_diff_list), np.zeros(len(x_diff_list))]
|
||||||
|
|
||||||
|
for ind in range(len(size_list)-1,-1,-1):
|
||||||
|
if flag_omit:
|
||||||
|
add_layer_with_omission(patches, colors, size=size_list[ind],
|
||||||
|
num=num_list[ind],
|
||||||
|
num_max=NumConvMax,
|
||||||
|
num_dots=NumDots,
|
||||||
|
top_left=top_left_list[ind],
|
||||||
|
loc_diff=loc_diff_list[ind])
|
||||||
|
else:
|
||||||
|
add_layer(patches, colors, size=size_list[ind],
|
||||||
|
num=num_show_list[ind],
|
||||||
|
top_left=top_left_list[ind], loc_diff=loc_diff_list[ind])
|
||||||
|
label(top_left_list[ind], text_list[ind] + '\n{}@{}x{}'.format(
|
||||||
|
num_list[ind], size_list[ind][0], size_list[ind][1]))
|
||||||
|
|
||||||
|
############################
|
||||||
|
# in between layers
|
||||||
|
start_ratio_list = [[0.4, 0.5], [0.4, 0.8], [0.4,0.8], [0.4, 0.5], [0.4, 0.8],[0.4,0.8]]
|
||||||
|
end_ratio_list = [[0.4, 0.5], [0.4, 0.8], [0.4,0.8], [0.4, 0.5], [0.4, 0.8],[0.4,0.8]]
|
||||||
|
patch_size_list = [(3, 3), (3, 3), (2, 2), (3,3), (3, 3), (2, 2)]
|
||||||
|
ind_bgn_list = range(len(patch_size_list))
|
||||||
|
text_list = ['Conv.', 'Conv.', 'Max-pool.', 'Conv.', 'Conv.', 'Max-pool.']
|
||||||
|
|
||||||
|
for ind in range(len(patch_size_list)):
|
||||||
|
add_mapping(
|
||||||
|
patches, colors, start_ratio_list[ind], end_ratio_list[ind],
|
||||||
|
patch_size_list[ind], ind,
|
||||||
|
top_left_list, loc_diff_list, num_show_list, size_list)
|
||||||
|
label(top_left_list[ind], text_list[ind] + '\n{}x{} kernel'.format(
|
||||||
|
patch_size_list[ind][0], patch_size_list[ind][1]), xy_off=[26, -65]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
############################
|
||||||
|
# fully connected layers
|
||||||
|
size_list = [(fc_unit_size, fc_unit_size)] * 2
|
||||||
|
num_list = [256, 10]
|
||||||
|
num_show_list = list(map(min, num_list, [NumFcMax] * len(num_list)))
|
||||||
|
x_diff_list = [sum(x_diff_list) + layer_width, layer_width, layer_width]
|
||||||
|
top_left_list = np.c_[np.cumsum(x_diff_list), np.zeros(len(x_diff_list))]
|
||||||
|
loc_diff_list = [[fc_unit_size, -fc_unit_size]] * len(top_left_list)
|
||||||
|
text_list = ['Hidden\nunits'] * (len(size_list) - 1) + ['Outputs']
|
||||||
|
|
||||||
|
for ind in range(len(size_list)):
|
||||||
|
if flag_omit:
|
||||||
|
add_layer_with_omission(patches, colors, size=size_list[ind],
|
||||||
|
num=num_list[ind],
|
||||||
|
num_max=NumFcMax,
|
||||||
|
num_dots=NumDots,
|
||||||
|
top_left=top_left_list[ind],
|
||||||
|
loc_diff=loc_diff_list[ind])
|
||||||
|
else:
|
||||||
|
add_layer(patches, colors, size=size_list[ind],
|
||||||
|
num=num_show_list[ind],
|
||||||
|
top_left=top_left_list[ind],
|
||||||
|
loc_diff=loc_diff_list[ind])
|
||||||
|
label(top_left_list[ind], text_list[ind] + '\n{}'.format(
|
||||||
|
num_list[ind]))
|
||||||
|
|
||||||
|
text_list = ['Flatten\n', 'Fully\nconnected']
|
||||||
|
|
||||||
|
for ind in range(len(size_list)):
|
||||||
|
label(top_left_list[ind], text_list[ind], xy_off=[-10, -65])
|
||||||
|
|
||||||
|
############################
|
||||||
|
for patch, color in zip(patches, colors):
|
||||||
|
patch.set_color(color * np.ones(3))
|
||||||
|
if isinstance(patch, Line2D):
|
||||||
|
ax.add_line(patch)
|
||||||
|
else:
|
||||||
|
patch.set_edgecolor(Black * np.ones(3))
|
||||||
|
ax.add_patch(patch)
|
||||||
|
|
||||||
|
plt.tight_layout()
|
||||||
|
plt.axis('equal')
|
||||||
|
plt.axis('off')
|
||||||
|
# plt.show()
|
||||||
|
fig.set_size_inches(8, 2.5)
|
||||||
|
|
||||||
|
fig_dir = '/home/tobi/Masterarbeit/TeX/Plots/Data/'
|
||||||
|
fig_ext = '.pdf'
|
||||||
|
fig.savefig(os.path.join(fig_dir, 'cnn_fashion_fig' + fig_ext),
|
||||||
|
bbox_inches='tight', pad_inches=0)
|
@ -0,0 +1,74 @@
|
|||||||
|
import tensorflow as tf
|
||||||
|
import numpy as np
|
||||||
|
from tensorflow.keras.callbacks import CSVLogger
|
||||||
|
from tensorflow.keras.preprocessing.image import ImageDataGenerator
|
||||||
|
mnist = tf.keras.datasets.fashion_mnist
|
||||||
|
|
||||||
|
(x_train, y_train), (x_test, y_test) = mnist.load_data()
|
||||||
|
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
|
||||||
|
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
|
||||||
|
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||||
|
|
||||||
|
#y_train = tf.keras.utils.to_categorical(y_train)
|
||||||
|
y_test = tf.keras.utils.to_categorical(y_test)
|
||||||
|
|
||||||
|
def get_random_sample(a, b, number_of_samples=10):
|
||||||
|
x = []
|
||||||
|
y = []
|
||||||
|
for category_number in range(0,10):
|
||||||
|
# get all samples of a category
|
||||||
|
train_data_category = a[b==category_number]
|
||||||
|
# pick a number of random samples from the category
|
||||||
|
train_data_category = train_data_category[np.random.randint(train_data_category.shape[0],
|
||||||
|
size=number_of_samples), :]
|
||||||
|
x.extend(train_data_category)
|
||||||
|
y.append([category_number]*number_of_samples)
|
||||||
|
|
||||||
|
return np.asarray(x).reshape(-1, 28, 28, 1), np.asarray(y).reshape(10*number_of_samples,1)
|
||||||
|
|
||||||
|
for i in ['1']:
|
||||||
|
|
||||||
|
model = tf.keras.Sequential()
|
||||||
|
|
||||||
|
model.add(tf.keras.layers.Conv2D(filters = 32, kernel_size = (3, 3), activation='relu',
|
||||||
|
input_shape = (28, 28, 1), padding='same'))
|
||||||
|
model.add(tf.keras.layers.Conv2D(filters = 32, kernel_size = (2, 2), activation='relu', padding = 'same'))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D(strides=(2,2)))
|
||||||
|
|
||||||
|
model.add(tf.keras.layers.Conv2D(filters = 64, kernel_size = (3, 3), activation='relu', padding='same'))
|
||||||
|
model.add(tf.keras.layers.Conv2D(filters = 64, kernel_size = (3, 3), activation='relu', padding='same'))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D(strides=(2,2)))
|
||||||
|
|
||||||
|
model.add(tf.keras.layers.Flatten())
|
||||||
|
|
||||||
|
model.add(tf.keras.layers.Dense(256, activation='relu'))
|
||||||
|
model.add(tf.keras.layers.Dropout(0.2))
|
||||||
|
model.add(tf.keras.layers.Dense(10, activation='softmax'))
|
||||||
|
|
||||||
|
|
||||||
|
model.compile(optimizer=tf.keras.optimizers.Adam(lr = 1e-3), loss="categorical_crossentropy", metrics=["accuracy"])
|
||||||
|
|
||||||
|
|
||||||
|
x_train_, y_train_ = get_random_sample(x_train, y_train, number_of_samples=100)
|
||||||
|
y_train_ = tf.keras.utils.to_categorical(y_train_)
|
||||||
|
print(np.shape(y_train.shape))
|
||||||
|
|
||||||
|
datagen = ImageDataGenerator(
|
||||||
|
rotation_range = 15,
|
||||||
|
zoom_range = 0.1,
|
||||||
|
width_shift_range=2,
|
||||||
|
height_shift_range=2,
|
||||||
|
shear_range = 0.5,
|
||||||
|
fill_mode = 'constant',
|
||||||
|
cval = 0)
|
||||||
|
print(model.summary())
|
||||||
|
#x_test_ = np.append(x_train[300:],x_test).reshape(x_train[300:].shape[0]+x_test.shape[0],28,28,1)
|
||||||
|
#y_test_ = np.append(y_train[300:],y_test).reshape(y_train[300:].shape[0]+y_test.shape[0],10)
|
||||||
|
|
||||||
|
# csv_logger = CSVLogger('output/fashion_exacly_like_novatec__'+i+'.log')
|
||||||
|
# history = model.fit(datagen.flow(x_train, tf.keras.utils.to_categorical(y_train), batch_size=20), validation_data=(x_test, y_test), epochs=125, steps_per_epoch = x_train_.shape[0]//20, callbacks=[csv_logger])
|
||||||
|
# history = model.fit(datagen.flow(x_train, tf.keras.utils.to_categorical(y_train), batch_size=30),steps_per_epoch=2000,
|
||||||
|
# validation_data=(x_test, y_test),
|
||||||
|
# epochs=125, callbacks=[csv_logger],
|
||||||
|
# shuffle=True)
|
||||||
|
|
@ -0,0 +1,60 @@
|
|||||||
|
import tensorflow as tf
|
||||||
|
import numpy as np
|
||||||
|
from tensorflow.keras.callbacks import CSVLogger
|
||||||
|
from tensorflow.keras.preprocessing.image import ImageDataGenerator
|
||||||
|
mnist = tf.keras.datasets.mnist
|
||||||
|
|
||||||
|
(x_train, y_train), (x_test, y_test) = mnist.load_data()
|
||||||
|
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
|
||||||
|
x_train = x_train / 255.0
|
||||||
|
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
|
||||||
|
x_test = x_test / 255.0
|
||||||
|
|
||||||
|
#y_train = tf.keras.utils.to_categorical(y_train)
|
||||||
|
y_test = tf.keras.utils.to_categorical(y_test)
|
||||||
|
|
||||||
|
def get_random_sample(a, b, number_of_samples=10):
|
||||||
|
x = []
|
||||||
|
y = []
|
||||||
|
for category_number in range(0,10):
|
||||||
|
# get all samples of a category
|
||||||
|
train_data_category = a[b==category_number]
|
||||||
|
# pick a number of random samples from the category
|
||||||
|
train_data_category = train_data_category[np.random.randint(train_data_category.shape[0],
|
||||||
|
size=number_of_samples), :]
|
||||||
|
x.extend(train_data_category)
|
||||||
|
y.append([category_number]*number_of_samples)
|
||||||
|
|
||||||
|
return np.asarray(x).reshape(-1, 28, 28, 1), np.asarray(y).reshape(10*number_of_samples,1)
|
||||||
|
for j in [0.0]:
|
||||||
|
for i in ['1','2','3','4','5','6','7','8','9','0']:
|
||||||
|
|
||||||
|
model = tf.keras.models.Sequential()
|
||||||
|
model.add(tf.keras.layers.Conv2D(24,kernel_size=5,padding='same',activation='relu',
|
||||||
|
input_shape=(28,28,1)))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D())
|
||||||
|
model.add(tf.keras.layers.Conv2D(64,kernel_size=5,padding='same',activation='relu'))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D(padding='same'))
|
||||||
|
model.add(tf.keras.layers.Flatten())
|
||||||
|
model.add(tf.keras.layers.Dense(256, activation='relu'))
|
||||||
|
model.add(tf.keras.layers.Dropout(j))
|
||||||
|
model.add(tf.keras.layers.Dense(10, activation='softmax'))
|
||||||
|
model.compile(optimizer='adam', loss="categorical_crossentropy", metrics=["accuracy"])
|
||||||
|
print(model.summary())
|
||||||
|
for n in [10,100]:
|
||||||
|
x_train_, y_train_ = get_random_sample(x_train, y_train, number_of_samples=n)
|
||||||
|
y_train_ = tf.keras.utils.to_categorical(y_train_)
|
||||||
|
|
||||||
|
datagen = ImageDataGenerator(
|
||||||
|
rotation_range = 30,
|
||||||
|
zoom_range = 0.15,
|
||||||
|
width_shift_range=2,
|
||||||
|
height_shift_range=2,
|
||||||
|
shear_range = 1)
|
||||||
|
|
||||||
|
#x_test_ = np.append(x_train[300:],x_test).reshape(x_train[300:].shape[0]+x_test.shape[0],28,28,1)
|
||||||
|
#y_test_ = np.append(y_train[300:],y_test).reshape(y_train[300:].shape[0]+y_test.shape[0],10)
|
||||||
|
|
||||||
|
# csv_logger = CSVLogger('Sample/adam_dropout_'+str(j).replace('.',"")+'_'+str(n)+'_'+i+'.log')
|
||||||
|
# history = model.fit(datagen.flow(x_train_, y_train_, batch_size=50), validation_data=(x_test, y_test), epochs=125, callbacks=[csv_logger], steps_per_epoch = x_train_.shape[0]//50)
|
||||||
|
# history = model.fit(x_train_, y_train_, validation_data=(x_test, y_test), epochs=125, callbacks=[csv_logger])
|
@ -0,0 +1,58 @@
|
|||||||
|
datagen_dropout_02_1
|
||||||
|
test
|
||||||
|
0.6604& 0.5175& 0.60136& 0.002348447
|
||||||
|
|
||||||
|
datagen_dropout_00_1
|
||||||
|
test
|
||||||
|
0.6704& 0.4878& 0.58621& 0.003600539
|
||||||
|
|
||||||
|
dropout_02_1
|
||||||
|
test
|
||||||
|
0.5312& 0.4224& 0.47137& 0.001175149
|
||||||
|
|
||||||
|
default_1
|
||||||
|
test
|
||||||
|
0.5633& 0.3230& 0.45702& 0.004021449
|
||||||
|
|
||||||
|
datagen_dropout_02_10
|
||||||
|
test
|
||||||
|
0.9441& 0.9061& 0.92322& 0.00015
|
||||||
|
train
|
||||||
|
1& 0.97& 0.989& 1e-04
|
||||||
|
|
||||||
|
datagen_dropout_00_10
|
||||||
|
test
|
||||||
|
0.931& 0.9018& 0.9185& 6e-05
|
||||||
|
train
|
||||||
|
1& 0.97& 0.99& 0.00013
|
||||||
|
|
||||||
|
dropout_02_10
|
||||||
|
test
|
||||||
|
0.9423& 0.9081& 0.92696& 0.00013
|
||||||
|
train
|
||||||
|
1& 0.99& 0.992& 2e-05
|
||||||
|
|
||||||
|
default_10
|
||||||
|
test
|
||||||
|
0.8585& 0.8148& 0.83771& 0.00027
|
||||||
|
train
|
||||||
|
1& 1& 1& 0
|
||||||
|
|
||||||
|
datagen_dropout_02_100
|
||||||
|
test
|
||||||
|
0.9805& 0.9727& 0.97826& 0
|
||||||
|
train
|
||||||
|
|
||||||
|
datagen_dropout_00_100
|
||||||
|
test
|
||||||
|
0.981& 0.9702& 0.9769& 1e-05
|
||||||
|
train
|
||||||
|
|
||||||
|
dropout_02_100
|
||||||
|
test
|
||||||
|
0.9796& 0.9719& 0.97703& 1e-05
|
||||||
|
train
|
||||||
|
|
||||||
|
default_100
|
||||||
|
test
|
||||||
|
0.9637& 0.9506& 0.95823& 2e-05
|
@ -0,0 +1,141 @@
|
|||||||
|
\pgfplotsset{
|
||||||
|
compat=1.11,
|
||||||
|
legend image code/.code={
|
||||||
|
\draw[mark repeat=2,mark phase=2]
|
||||||
|
plot coordinates {
|
||||||
|
(0cm,0cm)
|
||||||
|
(0.075cm,0cm) %% default is (0.3cm,0cm)
|
||||||
|
(0.15cm,0cm) %% default is (0.6cm,0cm)
|
||||||
|
};%
|
||||||
|
}
|
||||||
|
}
|
||||||
|
\begin{figure}
|
||||||
|
\begin{subfigure}[b]{0.5\textwidth}
|
||||||
|
\begin{subfigure}[b]{\textwidth}
|
||||||
|
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[
|
||||||
|
ytick = {-1, 0, 1, 2},
|
||||||
|
yticklabels = {$-1$, $\phantom{-0.}0$, $1$, $2$},]
|
||||||
|
\addplot table [x=x, y=y, col sep=comma, only marks,
|
||||||
|
forget plot] {Figures/Data/sin_6.csv};
|
||||||
|
\addplot [black, line width=2pt] table [x=x, y=y, col
|
||||||
|
sep=comma, mark=none] {Figures/Data/matlab_0.csv};
|
||||||
|
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_0.0,
|
||||||
|
y=y_n_5000_tl_0.0, col sep=comma, mark=none] {Figures/Data/scala_out_sin.csv};
|
||||||
|
\addlegendentry{$f_1^{*, 0.1}$};
|
||||||
|
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{$\lambda = 0.1$}
|
||||||
|
\end{subfigure}\\
|
||||||
|
\begin{subfigure}[b]{\textwidth}
|
||||||
|
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}
|
||||||
|
\addplot table [x=x, y=y, col sep=comma, only marks,
|
||||||
|
forget plot] {Figures/Data/sin_6.csv};
|
||||||
|
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Figures/Data/matlab_1.csv};
|
||||||
|
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_1.0,
|
||||||
|
y=y_n_5000_tl_1.0, col sep=comma, mark=none] {Figures/Data/scala_out_sin.csv};
|
||||||
|
\addlegendentry{$f_1^{*, 1.0}$};
|
||||||
|
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{$\lambda = 1.0$}
|
||||||
|
\end{subfigure}\\
|
||||||
|
\begin{subfigure}[b]{\textwidth}
|
||||||
|
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}
|
||||||
|
\addplot table [x=x, y=y, col sep=comma, only marks,
|
||||||
|
forget plot] {Figures/Data/sin_6.csv};
|
||||||
|
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Figures/Data/matlab_3.csv};
|
||||||
|
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_3.0,
|
||||||
|
y=y_n_5000_tl_3.0, col sep=comma, mark=none] {Figures/Data/scala_out_sin.csv};
|
||||||
|
\addlegendentry{$f_1^{*, 3.0}$};
|
||||||
|
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{$\lambda = 3.0$}
|
||||||
|
\end{subfigure}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[b]{0.5\textwidth}
|
||||||
|
\begin{subfigure}[b]{\textwidth}
|
||||||
|
\begin{adjustbox}{width=\textwidth, height=0.245\textheight}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[
|
||||||
|
ytick = {-2,-1, 0, 1, 2},
|
||||||
|
yticklabels = {$-2$,$-1$, $\phantom{-0.}0$, $1$, $2$},]
|
||||||
|
\addplot table [x=x, y=y, col sep=comma, only marks,
|
||||||
|
forget plot] {Figures/Data/data_sin_d_t.csv};
|
||||||
|
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Figures/Data/matlab_sin_d_01.csv};
|
||||||
|
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_0.1,
|
||||||
|
y=y_n_5000_tl_0.1, col sep=comma, mark=none] {Figures/Data/scala_out_d_1_t.csv};
|
||||||
|
\addlegendentry{$f_1^{*, 0.1}$};
|
||||||
|
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{$\lambda = 0.1$}
|
||||||
|
\end{subfigure}\\
|
||||||
|
\begin{subfigure}[b]{\textwidth}
|
||||||
|
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}
|
||||||
|
\addplot table [x=x, y=y, col sep=comma, only marks,
|
||||||
|
forget plot] {Figures/Data/data_sin_d_t.csv};
|
||||||
|
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Figures/Data/matlab_sin_d_1.csv};
|
||||||
|
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_1.0,
|
||||||
|
y=y_n_5000_tl_1.0, col sep=comma, mark=none] {Figures/Data/scala_out_d_1_t.csv};
|
||||||
|
\addlegendentry{$f_1^{*, 1.0}$};
|
||||||
|
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda},*}$};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{$\lambda = 1.0$}
|
||||||
|
\end{subfigure}\\
|
||||||
|
\begin{subfigure}[b]{\textwidth}
|
||||||
|
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}
|
||||||
|
\addplot table [x=x, y=y, col sep=comma, only marks,
|
||||||
|
forget plot] {Figures/Data/data_sin_d_t.csv};
|
||||||
|
\addplot [black, line width=2pt] table [x=x, y=y, col sep=comma, mark=none] {Figures/Data/matlab_sin_d_3.csv};
|
||||||
|
\addplot [red, line width = 1.5pt, dashed] table [x=x_n_5000_tl_3.0,
|
||||||
|
y=y_n_5000_tl_3.0, col sep=comma, mark=none] {Figures/Data/scala_out_d_1_t.csv};
|
||||||
|
\addlegendentry{$f_1^{*, 3.0}$};
|
||||||
|
\addlegendentry{$\mathcal{RN}_w^{\tilde{\lambda}}$};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{$\lambda = 3.0$}
|
||||||
|
\end{subfigure}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Comparison of shallow neural networks and regression
|
||||||
|
splines]{% In these Figures the behaviour stated in ... is
|
||||||
|
% visualized
|
||||||
|
% in two exaples. For $(a), (b), (c)$ six values of sinus equidistantly
|
||||||
|
% spaced on $[-\pi, \pi]$ have been used as training data. For
|
||||||
|
% $(d),(e),(f)$ 15 equidistand values have been used, where
|
||||||
|
% $y_i^{train} = \sin(x_i^{train}) + \varepsilon_i$ and
|
||||||
|
% $\varepsilon_i \sim \mathcal{N}(0, 0.3)$. For
|
||||||
|
% $\mathcal{RN}_w^{\tilde{\lambda, *}}$ the random weights are
|
||||||
|
% distributed as follows
|
||||||
|
% \begin{align*}
|
||||||
|
% \xi_k &\sim
|
||||||
|
% \end{align*}
|
||||||
|
Ridge Penalized Neural Network compared to Regression Spline,
|
||||||
|
with them being trained on $\text{data}_A$ in a), b), c) and on
|
||||||
|
$\text{data}_B$ in d), e), f).
|
||||||
|
The Parameters of each are given above.
|
||||||
|
}
|
||||||
|
\label{fig:rn_vs_rs}
|
||||||
|
\end{figure}
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master:
|
||||||
|
%%% End:
|
@ -0,0 +1,93 @@
|
|||||||
|
\pgfplotsset{
|
||||||
|
compat=1.11,
|
||||||
|
legend image code/.code={
|
||||||
|
\draw[mark repeat=2,mark phase=2]
|
||||||
|
plot coordinates {
|
||||||
|
(0cm,0cm)
|
||||||
|
(0.0cm,0cm) %% default is (0.3cm,0cm)
|
||||||
|
(0.0cm,0cm) %% default is (0.6cm,0cm)
|
||||||
|
};%
|
||||||
|
}
|
||||||
|
}
|
||||||
|
\begin{figure}
|
||||||
|
\begin{subfigure}[h!]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.6\textwidth,
|
||||||
|
xtick = {1, 3, 5,7,9,11,13,15,17,19},
|
||||||
|
xticklabels = {$2$, $4$, $6$, $8$,
|
||||||
|
$10$,$12$,$14$,$16$,$18$,$20$},
|
||||||
|
xlabel = {training epoch}, ylabel = {classification accuracy}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma] {Figures/Data/GD_01.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma] {Figures/Data/GD_05.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma] {Figures/Data/GD_1.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma]
|
||||||
|
{Figures/Data/SGD_01_b32.log};
|
||||||
|
|
||||||
|
\addlegendentry{GD$_{0.01}$}
|
||||||
|
\addlegendentry{GD$_{0.05}$}
|
||||||
|
\addlegendentry{GD$_{0.1}$}
|
||||||
|
\addlegendentry{SGD$_{0.01}$}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
%\caption{Classification accuracy}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[b]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.6\textwidth,
|
||||||
|
ytick = {0, 1, 2, 3, 4},
|
||||||
|
yticklabels = {$0$, $1$, $\phantom{0.}2$, $3$, $4$},
|
||||||
|
xtick = {1, 3, 5,7,9,11,13,15,17,19},
|
||||||
|
xticklabels = {$2$, $4$, $6$, $8$,
|
||||||
|
$10$,$12$,$14$,$16$,$18$,$20$},
|
||||||
|
xlabel = {training epoch}, ylabel = {error measure\vphantom{fy}}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma] {Figures/Data/GD_01.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma] {Figures/Data/GD_05.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma] {Figures/Data/GD_1.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_loss, col sep=comma] {Figures/Data/SGD_01_b32.log};
|
||||||
|
|
||||||
|
\addlegendentry{GD$_{0.01}$}
|
||||||
|
\addlegendentry{GD$_{0.05}$}
|
||||||
|
\addlegendentry{GD$_{0.1}$}
|
||||||
|
\addlegendentry{SGD$_{0.01}$}
|
||||||
|
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{Performance metrics during training}
|
||||||
|
\end{subfigure}
|
||||||
|
% \\~\\
|
||||||
|
\caption[Performance comparison of SDG and GD]{The neural network given in ?? trained with different
|
||||||
|
algorithms on the MNIST handwritten digits data set. For gradient
|
||||||
|
descent the learning rated 0.01, 0.05 and 0.1 are (GD$_{\cdot}$). For
|
||||||
|
stochastic gradient descend a batch size of 32 and learning rate
|
||||||
|
of 0.01 is used (SDG$_{0.01}$).}
|
||||||
|
\label{fig:sgd_vs_gd}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{table}[h]
|
||||||
|
\begin{tabu} to \textwidth {@{} *4{X[c]}c*4{X[c]} @{}}
|
||||||
|
\multicolumn{4}{c}{Classification Accuracy}
|
||||||
|
&~&\multicolumn{4}{c}{Error Measure}
|
||||||
|
\\\cline{1-4}\cline{6-9}
|
||||||
|
GD$_{0.01}$&GD$_{0.05}$&GD$_{0.1}$&SGD$_{0.01}$&&GD$_{0.01}$&GD$_{0.05}$&GD$_{0.1}$&SGD$_{0.01}$
|
||||||
|
\\\cline{1-4}\cline{6-9}
|
||||||
|
\multicolumn{9}{c}{test}\\
|
||||||
|
0.265&0.633&0.203&0.989&&2.267&1.947&3.91&0.032
|
||||||
|
\end{tabu}
|
||||||
|
\caption{Performance metrics of the networks trained in
|
||||||
|
Figure~\ref{fig:sgd_vs_gd} after 20 training epochs.}
|
||||||
|
\label{table:sgd_vs_gd}
|
||||||
|
\end{table}
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: "../main"
|
||||||
|
%%% End:
|
@ -0,0 +1,71 @@
|
|||||||
|
\message{ !name(pfg_test.tex)}\documentclass{article}
|
||||||
|
\usepackage{pgfplots}
|
||||||
|
\usepackage{filecontents}
|
||||||
|
\usepackage{subcaption}
|
||||||
|
\usepackage{adjustbox}
|
||||||
|
\usepackage{xcolor}
|
||||||
|
\usepackage{graphicx}
|
||||||
|
\usetikzlibrary{calc, 3d}
|
||||||
|
|
||||||
|
\begin{document}
|
||||||
|
|
||||||
|
\message{ !name(pfg_test.tex) !offset(6) }
|
||||||
|
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{True position (\textcolor{red}{red}), distorted data (black)}
|
||||||
|
\end{figure}
|
||||||
|
\begin{center}
|
||||||
|
\begin{figure}[h]
|
||||||
|
\begin{subfigure}{0.49\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/klammern.jpg}
|
||||||
|
\caption{Original Picure}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.49\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/image_conv4.png}
|
||||||
|
\caption{test}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.49\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/image_conv5.png}
|
||||||
|
\caption{test}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.49\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/image_conv6.png}
|
||||||
|
\caption{test}
|
||||||
|
\end{subfigure}
|
||||||
|
\end{figure}
|
||||||
|
\end{center}
|
||||||
|
|
||||||
|
\begin{figure}
|
||||||
|
\begin{adjustbox}{width=\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{scope}[x = (0:1cm), y=(90:1cm), z=(15:-0.5cm)]
|
||||||
|
\node[canvas is xy plane at z=0, transform shape] at (0,0)
|
||||||
|
{\includegraphics[width=5cm]{Data/klammern_r.jpg}};
|
||||||
|
\node[canvas is xy plane at z=2, transform shape] at (0,-0.2)
|
||||||
|
{\includegraphics[width=5cm]{Data/klammern_g.jpg}};
|
||||||
|
\node[canvas is xy plane at z=4, transform shape] at (0,-0.4)
|
||||||
|
{\includegraphics[width=5cm]{Data/klammern_b.jpg}};
|
||||||
|
\node[canvas is xy plane at z=4, transform shape] at (-8,-0.2)
|
||||||
|
{\includegraphics[width=5.3cm]{Data/klammern_rgb.jpg}};
|
||||||
|
\end{scope}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{On the right the red, green and blue chanels of the picture
|
||||||
|
are displayed. In order to better visualize the color channes the
|
||||||
|
black and white picture of each channel has been colored in the
|
||||||
|
respective color. Combining the layers results in the image on the
|
||||||
|
left}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
\message{ !name(pfg_test.tex) !offset(3) }
|
||||||
|
|
||||||
|
\end{document}
|
||||||
|
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: t
|
||||||
|
%%% End:
|
@ -0,0 +1,53 @@
|
|||||||
|
\begin{figure}[h]
|
||||||
|
\centering
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist0.pdf}
|
||||||
|
\caption{T-shirt/top}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist1.pdf}
|
||||||
|
\caption{Trousers}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist2.pdf}
|
||||||
|
\caption{Pullover}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist3.pdf}
|
||||||
|
\caption{Dress}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist4.pdf}
|
||||||
|
\caption{Coat}
|
||||||
|
\end{subfigure}\\
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist5.pdf}
|
||||||
|
\caption{Sandal}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist6.pdf}
|
||||||
|
\caption{Shirt}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist7.pdf}
|
||||||
|
\caption{Sneaker}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist8.pdf}
|
||||||
|
\caption{Bag}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Figures/Data/fashion_mnist9.pdf}
|
||||||
|
\caption{Ankle boot}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Fashion MNIST data set]{The fashtion MNIST data set contains 70.000 images of
|
||||||
|
preprocessed product images from Zalando, which are categorized as
|
||||||
|
T-shirt/top, Trouser, Pullover, Dress, Coat, Sandal, Shirt,
|
||||||
|
Sneaker, Bag, Ankle boot. Of these images 60.000 are used as training images, while
|
||||||
|
the rest are used to validate the models trained.}
|
||||||
|
\label{fig:fashionMNIST}
|
||||||
|
\end{figure}
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: "../main"
|
||||||
|
%%% End:
|
@ -0,0 +1,83 @@
|
|||||||
|
\pgfplotsset{
|
||||||
|
compat=1.11,
|
||||||
|
legend image code/.code={
|
||||||
|
\draw[mark repeat=2,mark phase=2]
|
||||||
|
plot coordinates {
|
||||||
|
(0cm,0cm)
|
||||||
|
(0.15cm,0cm) %% default is (0.3cm,0cm)
|
||||||
|
(0.3cm,0cm) %% default is (0.6cm,0cm)
|
||||||
|
};%
|
||||||
|
}
|
||||||
|
}
|
||||||
|
\begin{figure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\small
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = 0.975\textwidth,
|
||||||
|
height = 0.6\textwidth, ymin = 0.988, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {epoch}, ylabel = {Classification Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width =1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_full_mean.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_02_full_mean.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_datagen_dropout_04_full_mean.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_02_full_mean.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_dropout_04_full_mean.log};
|
||||||
|
\addplot [dashed] table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam_full_mean.log};
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{Classification accuracy}
|
||||||
|
\vspace{.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{1.0\linewidth}
|
||||||
|
\begin{tabu} to \textwidth {@{}lc*5{X[c]}@{}}
|
||||||
|
\Tstrut \Bstrut & \textsc{\,Adam\,} & D. 0.2 & D. 0.4 & G. &G.+D.\,0.2 & G.+D.\,0.4 \\
|
||||||
|
\hline
|
||||||
|
\multicolumn{7}{c}{Test Accuracy}\Bstrut \\
|
||||||
|
\cline{2-7}
|
||||||
|
mean \Tstrut & 0.9914 & 0.9923 & 0.9930 & 0.9937 & 0.9938 & 0.9943 \\
|
||||||
|
max & 0.9926 & 0.9930 & 0.9934 & 0.9946 & 0.9955 & 0.9956 \\
|
||||||
|
min & 0.9887 & 0.9909 & 0.9922 & 0.9929 & 0.9929 & 0.9934 \\
|
||||||
|
\hline
|
||||||
|
\multicolumn{7}{c}{Training Accuracy}\Bstrut \\
|
||||||
|
\cline{2-7}
|
||||||
|
mean \Tstrut & 0.9994 & 0.9991 & 0.9989 & 0.9967 & 0.9954 & 0.9926 \\
|
||||||
|
max & 0.9996 & 0.9996 & 0.9992 & 0.9979 & 0.9971 & 0.9937 \\
|
||||||
|
min & 0.9992 & 0.9990 & 0.9984 & 0.9947 & 0.9926 & 0.9908 \\
|
||||||
|
\end{tabu}
|
||||||
|
\caption{Mean and maximum accuracy after 48 epochs of training.}
|
||||||
|
\label{fig:gen_dropout_b}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Performance comparison of overfitting measures]{Accuracy for the net given in ... with Dropout (D.),
|
||||||
|
data generation (G.), a combination, or neither (Default) implemented and trained
|
||||||
|
with \textsc{Adam}. For each epoch the 60.000 training samples
|
||||||
|
were used, or for data generation 10.000 steps with each using
|
||||||
|
batches of 60 generated data points. For each configuration the
|
||||||
|
model was trained 5 times and the average accuracies at each epoch
|
||||||
|
are given in (a). Mean, maximum and minimum values of accuracy on
|
||||||
|
the test and training set are given in (b).}
|
||||||
|
\label{fig:gen_dropout}
|
||||||
|
\end{figure}
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: "../main"
|
||||||
|
%%% End:
|
@ -0,0 +1,41 @@
|
|||||||
|
\begin{figure}[h]
|
||||||
|
\centering
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist0.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist1.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist2.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist3.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist4.pdf}
|
||||||
|
\end{subfigure}\\
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist5.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist6.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist7.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist8.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Plots/Data/mnist9.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[MNIST data set]{The MNIST data set contains 70.000 images of preprocessed handwritten
|
||||||
|
digits. Of these images 60.000 are used as training images, while
|
||||||
|
the rest are used to validate the models trained.}
|
||||||
|
\label{fig:MNIST}
|
||||||
|
\end{figure}
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: "../main"
|
||||||
|
%%% End:
|
@ -0,0 +1,297 @@
|
|||||||
|
\documentclass[a4paper, 12pt, draft=true]{article}
|
||||||
|
\usepackage{pgfplots}
|
||||||
|
\usepackage{filecontents}
|
||||||
|
\usepackage{subcaption}
|
||||||
|
\usepackage{adjustbox}
|
||||||
|
\usepackage{xcolor}
|
||||||
|
\usepackage{tabu}
|
||||||
|
\usepackage{showframe}
|
||||||
|
\usepackage{graphicx}
|
||||||
|
\usepackage{titlecaps}
|
||||||
|
\usetikzlibrary{calc, 3d}
|
||||||
|
\usepgfplotslibrary{colorbrewer}
|
||||||
|
|
||||||
|
\newcommand\Tstrut{\rule{0pt}{2.6ex}} % = `top' strut
|
||||||
|
\newcommand\Bstrut{\rule[-0.9ex]{0pt}{0pt}} % = `bottom' strut
|
||||||
|
|
||||||
|
\begin{document}
|
||||||
|
\pgfplotsset{
|
||||||
|
compat=1.11,
|
||||||
|
legend image code/.code={
|
||||||
|
\draw[mark repeat=2,mark phase=2]
|
||||||
|
plot coordinates {
|
||||||
|
(0cm,0cm)
|
||||||
|
(0.3cm,0cm) %% default is (0.3cm,0cm)
|
||||||
|
(0.6cm,0cm) %% default is (0.6cm,0cm)
|
||||||
|
};%
|
||||||
|
}
|
||||||
|
}
|
||||||
|
\begin{figure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.35\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
ylabel = {Test Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
% \addplot [dashed] table
|
||||||
|
% [x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
% {Data/adam_datagen_full.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_1.mean};
|
||||||
|
% \addplot [dashed] table
|
||||||
|
% [x=epoch, y=accuracy, col sep=comma, mark = none]
|
||||||
|
% {Data/adam_datagen_dropout_02_full.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_datagen_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_datagen_dropout_02_1.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_dropout_02_1.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{1 sample per class}
|
||||||
|
\vspace{0.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.35\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
ylabel = {Test Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_dropout_00_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_dropout_02_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_datagen_dropout_00_10.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_datagen_dropout_02_10.mean};
|
||||||
|
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{10 samples per class}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[legend cell align={left},yticklabel style={/pgf/number format/fixed,
|
||||||
|
/pgf/number format/precision=3},tick style = {draw = none}, width = 0.9875\textwidth,
|
||||||
|
height = 0.35\textwidth, legend style={at={(0.9825,0.0175)},anchor=south east},
|
||||||
|
xlabel = {epoch}, ylabel = {Test Accuracy}, cycle
|
||||||
|
list/Dark2, every axis plot/.append style={line width
|
||||||
|
=1.25pt}, ymin = {0.92}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_dropout_00_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_dropout_02_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_datagen_dropout_00_100.mean};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Data/adam_datagen_dropout_02_100.mean};
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{G.}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{G. + D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.2}}
|
||||||
|
\addlegendentry{\footnotesize{D. 0.4}}
|
||||||
|
\addlegendentry{\footnotesize{Default}}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{100 samples per class}
|
||||||
|
\vspace{.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption{Accuracy for the net given in ... with Dropout (D.),
|
||||||
|
data generation (G.), a combination, or neither (Default) implemented and trained
|
||||||
|
with \textsc{Adam}. For each epoch the 60.000 training samples
|
||||||
|
were used, or for data generation 10.000 steps with each using
|
||||||
|
batches of 60 generated data points. For each configuration the
|
||||||
|
model was trained 5 times and the average accuracies at each epoch
|
||||||
|
are given in (a). Mean, maximum and minimum values of accuracy on
|
||||||
|
the test and training set are given in (b).}
|
||||||
|
\end{figure}
|
||||||
|
\begin{table}
|
||||||
|
\centering
|
||||||
|
\begin{tabu} to \textwidth {@{}l*4{X[c]}@{}}
|
||||||
|
\Tstrut \Bstrut & \textsc{Adam} & D. 0.2 & Gen & Gen.+D. 0.2 \\
|
||||||
|
\hline
|
||||||
|
&
|
||||||
|
\multicolumn{4}{c}{\titlecap{test accuracy for 1 sample}}\Bstrut \\
|
||||||
|
\cline{2-5}
|
||||||
|
max \Tstrut & 0.5633 & 0.5312 & 0.6704 & 0.6604 \\
|
||||||
|
min & 0.3230 & 0.4224 & 0.4878 & 0.5175 \\
|
||||||
|
mean & 0.4570 & 0.4714 & 0.5862 & 0.6014 \\
|
||||||
|
var & 0.0040 & 0.0012 & 0.0036 & 0.0023 \\
|
||||||
|
\hline
|
||||||
|
&
|
||||||
|
\multicolumn{4}{c}{\titlecap{test accuracy for 10 samples}}\Bstrut \\
|
||||||
|
\cline{2-5}
|
||||||
|
max \Tstrut & 0.8585 & 0.9423 & 0.9310 & 0.9441 \\
|
||||||
|
min & 0.8148 & 0.9081 & 0.9018 & 0.9061 \\
|
||||||
|
mean & 0.8377 & 0.9270 & 0.9185 & 0.9232 \\
|
||||||
|
var & 2.7e-4 & 1.3e-4 & 6e-05 & 1.5e-4 \\
|
||||||
|
\hline
|
||||||
|
&
|
||||||
|
\multicolumn{4}{c}{\titlecap{test accuracy for 100 samples}}\Bstrut \\
|
||||||
|
\cline{2-5}
|
||||||
|
max & 0.9637 & 0.9796 & 0.9810 & 0.9805 \\
|
||||||
|
min & 0.9506 & 0.9719 & 0.9702 & 0.9727 \\
|
||||||
|
mean & 0.9582 & 0.9770 & 0.9769 & 0.9783 \\
|
||||||
|
var & 2e-05 & 1e-05 & 1e-05 & 0 \\
|
||||||
|
\hline
|
||||||
|
\end{tabu}
|
||||||
|
\caption{Values of the test accuracy of the model trained 10 times
|
||||||
|
of random training sets containing 1, 10 and 100 data points per
|
||||||
|
class.}
|
||||||
|
\end{table}
|
||||||
|
|
||||||
|
\begin{center}
|
||||||
|
\begin{figure}[h]
|
||||||
|
\centering
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist0.pdf}
|
||||||
|
\caption{original\\image}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist_gen_zoom.pdf}
|
||||||
|
\caption{random\\zoom}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist_gen_shear.pdf}
|
||||||
|
\caption{random\\shear}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist_gen_rotation.pdf}
|
||||||
|
\caption{random\\rotation}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist_gen_shift.pdf}
|
||||||
|
\caption{random\\positional shift}
|
||||||
|
\end{subfigure}\\
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist5.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist6.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist7.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist8.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{0.19\textwidth}
|
||||||
|
\includegraphics[width=\textwidth]{Data/mnist9.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption{The MNIST data set contains 70.000 images of preprocessed handwritten
|
||||||
|
digits. Of these images 60.000 are used as training images, while
|
||||||
|
the rest are used to validate the models trained.}
|
||||||
|
\end{figure}
|
||||||
|
\end{center}
|
||||||
|
|
||||||
|
\begin{figure}
|
||||||
|
\begin{adjustbox}{width=\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{scope}[x = (0:1cm), y=(90:1cm), z=(15:-0.5cm)]
|
||||||
|
\node[canvas is xy plane at z=0, transform shape] at (0,0)
|
||||||
|
{\includegraphics[width=5cm]{Data/klammern_r.jpg}};
|
||||||
|
\node[canvas is xy plane at z=2, transform shape] at (0,-0.2)
|
||||||
|
{\includegraphics[width=5cm]{Data/klammern_g.jpg}};
|
||||||
|
\node[canvas is xy plane at z=4, transform shape] at (0,-0.4)
|
||||||
|
{\includegraphics[width=5cm]{Data/klammern_b.jpg}};
|
||||||
|
\node[canvas is xy plane at z=4, transform shape] at (-8,-0.2)
|
||||||
|
{\includegraphics[width=5.3cm]{Data/klammern_rgb.jpg}};
|
||||||
|
\end{scope}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{On the right the red, green and blue chanels of the picture
|
||||||
|
are displayed. In order to better visualize the color channes the
|
||||||
|
black and white picture of each channel has been colored in the
|
||||||
|
respective color. Combining the layers results in the image on the
|
||||||
|
left}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{figure}
|
||||||
|
\centering
|
||||||
|
\begin{subfigure}{\linewidth}
|
||||||
|
\centering
|
||||||
|
\includegraphics[width=\textwidth]{Data/convnet_fig.pdf}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{.45\linewidth}
|
||||||
|
\centering
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[enlargelimits=false, width=\textwidth]
|
||||||
|
\addplot[domain=-5:5, samples=100]{tanh(x)};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{.45\linewidth}
|
||||||
|
\centering
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[enlargelimits=false, width=\textwidth,
|
||||||
|
ytick={0,2,4},yticklabels={\hphantom{4.}0,2,4}, ymin=-1]
|
||||||
|
\addplot[domain=-5:5, samples=100]{max(0,x)};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{.45\linewidth}
|
||||||
|
\centering
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[enlargelimits=false, width=\textwidth, ymin=-1,
|
||||||
|
ytick={0,2,4},yticklabels={$\hphantom{-5.}0$,2,4}]
|
||||||
|
\addplot[domain=-5:5, samples=100]{max(0,x)+ 0.1*min(0,x)};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{subfigure}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[enlargelimits=false]
|
||||||
|
\addplot [domain=-5:5, samples=101,unbounded coords=jump]{1/(1+exp(-x)};
|
||||||
|
\addplot[domain=-5:5, samples=100]{tanh(x)};
|
||||||
|
\addplot[domain=-5:5, samples=100]{max(0,x)};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[enlargelimits=false]
|
||||||
|
\addplot[domain=-2*pi:2*pi, samples=100]{cos(deg(x))};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
|
||||||
|
\end{document}
|
||||||
|
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: t
|
||||||
|
%%% End:
|
@ -0,0 +1,78 @@
|
|||||||
|
\pgfplotsset{
|
||||||
|
compat=1.11,
|
||||||
|
legend image code/.code={
|
||||||
|
\draw[mark repeat=2,mark phase=2]
|
||||||
|
plot coordinates {
|
||||||
|
(0cm,0cm)
|
||||||
|
(0.0cm,0cm) %% default is (0.3cm,0cm)
|
||||||
|
(0.0cm,0cm) %% default is (0.6cm,0cm)
|
||||||
|
};%
|
||||||
|
}
|
||||||
|
}
|
||||||
|
\begin{figure}
|
||||||
|
\begin{subfigure}[h]{\textwidth}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[tick style = {draw = none}, width = \textwidth,
|
||||||
|
height = 0.6\textwidth, ymin = 0.92, legend style={at={(0.9825,0.75)},anchor=north east},
|
||||||
|
xlabel = {epoch}, ylabel = {Classification Accuracy}]
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adagrad.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adadelta.log};
|
||||||
|
\addplot table
|
||||||
|
[x=epoch, y=val_accuracy, col sep=comma, mark = none]
|
||||||
|
{Figures/Data/adam.log};
|
||||||
|
|
||||||
|
\addlegendentry{\footnotesize{ADAGRAD}}
|
||||||
|
\addlegendentry{\footnotesize{ADADELTA}}
|
||||||
|
\addlegendentry{\footnotesize{ADAM}}
|
||||||
|
\addlegendentry{SGD$_{0.01}$}
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
%\caption{Classification accuracy}
|
||||||
|
\vspace{.25cm}
|
||||||
|
\end{subfigure}
|
||||||
|
% \begin{subfigure}[b]{\textwidth}
|
||||||
|
% \begin{tikzpicture}
|
||||||
|
% \begin{axis}[tick style = {draw = none}, width = \textwidth,
|
||||||
|
% height = 0.6\textwidth, ymax = 0.5,
|
||||||
|
% xlabel = {epoch}, ylabel = {Error Measure\vphantom{y}},ytick ={0,0.1,0.2,0.3,0.4,0.45,0.5}, yticklabels =
|
||||||
|
% {0,0.1,0.2,0.3,0.4,\phantom{0.94},0.5}]
|
||||||
|
% \addplot table
|
||||||
|
% [x=epoch, y=val_loss, col sep=comma, mark = none] {Figures/Data/adagrad.log};
|
||||||
|
% \addplot table
|
||||||
|
% [x=epoch, y=val_loss, col sep=comma, mark = none] {Figures/Data/adadelta.log};
|
||||||
|
% \addplot table
|
||||||
|
% [x=epoch, y=val_loss, col sep=comma, mark = none] {Figures/Data/adam.log};
|
||||||
|
|
||||||
|
% \addlegendentry{\footnotesize{ADAGRAD}}
|
||||||
|
% \addlegendentry{\footnotesize{ADADELTA}}
|
||||||
|
% \addlegendentry{\footnotesize{ADAM}}
|
||||||
|
% \addlegendentry{SGD$_{0.01}$}
|
||||||
|
|
||||||
|
% \end{axis}
|
||||||
|
% \end{tikzpicture}
|
||||||
|
% \caption{Performance metrics during training}
|
||||||
|
% \vspace{.25cm}
|
||||||
|
% \end{subfigure}
|
||||||
|
\begin{subfigure}[b]{1.0\linewidth}
|
||||||
|
\begin{tabu} to \textwidth {@{} *3{X[c]}c*3{X[c]} @{}}
|
||||||
|
\multicolumn{3}{c}{Classification Accuracy}
|
||||||
|
&~&\multicolumn{3}{c}{Error Measure}
|
||||||
|
\\\cline{1-3}\cline{5-7}
|
||||||
|
ADAGRAD&ADADELTA&ADAM&&ADAGRAD&ADADELTA&ADAM
|
||||||
|
\\\cline{1-3}\cline{5-7}
|
||||||
|
1&1&1&&1&1&1
|
||||||
|
\end{tabu}
|
||||||
|
\caption{Performace metrics after 20 epochs}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Performance comparison of training algorithms]{Classification accuracy on the test set and ...Performance metrics of the network given in ... trained
|
||||||
|
with different optimization algorithms}
|
||||||
|
\label{fig:comp_alg}
|
||||||
|
\end{figure}
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: "../main"
|
||||||
|
%%% End:
|
@ -0,0 +1,45 @@
|
|||||||
|
\begin{figure}
|
||||||
|
\centering
|
||||||
|
\begin{subfigure}[b]{0.49\textwidth}
|
||||||
|
\centering
|
||||||
|
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[tick style = {draw = none}, xticklabel = \empty,
|
||||||
|
yticklabel=\empty]
|
||||||
|
\addplot [mark options={scale = 0.7}, mark = o] table
|
||||||
|
[x=x_d,y=y_d, col sep = comma] {Figures/Data/sin_conv.csv};
|
||||||
|
\addplot [red, mark=x] table [x=x_i, y=y_i, col sep=comma, color ='black'] {Figures/Data/sin_conv.csv};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{True position (\textcolor{red}{red}), distorted position data (black)}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}[b]{0.49\textwidth}
|
||||||
|
\centering
|
||||||
|
\begin{adjustbox}{width=\textwidth, height=0.25\textheight}
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[tick style = {draw = none}, xticklabel = \empty,
|
||||||
|
yticklabel=\empty]
|
||||||
|
\addplot [mark options={scale = 0.7}, mark = o] table [x=x,y=y, col
|
||||||
|
sep = comma] {Figures/Data/sin_conv.csv};
|
||||||
|
\addplot [red, mark=x] table [x=x_i, y=y_i, col sep=comma, color ='black'] {Figures/Data/sin_conv.csv};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{adjustbox}
|
||||||
|
\caption{True position (\textcolor{red}{red}), filtered position data (black)}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption[Signal smoothing using convolution]{Example for noise reduction using convolution with simulated
|
||||||
|
positional data. As filter
|
||||||
|
$g(i)=\left(\nicefrac{1}{3},\nicefrac{1}{4},\nicefrac{1}{5},\nicefrac{1}{6},\nicefrac{1}{20}\right)_{(i-1)}$
|
||||||
|
is chosen and applied to the $x$ and $y$ coordinate
|
||||||
|
data seperately. The convolution of both signals with $g$
|
||||||
|
improves the MSE of the positions from 0.196 to 0.170 and
|
||||||
|
visibly smoothes the data.
|
||||||
|
}
|
||||||
|
\label{fig:sin_conv}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: "../main"
|
||||||
|
%%% End:
|
@ -0,0 +1,175 @@
|
|||||||
|
\documentclass{report}
|
||||||
|
\usepackage[utf8]{inputenc}
|
||||||
|
\usepackage[english]{babel}
|
||||||
|
\usepackage[T1]{fontenc}
|
||||||
|
|
||||||
|
\usepackage{xcolor}
|
||||||
|
\definecolor{maroon}{cmyk}{0, 0.87, 0.68, 0.32}
|
||||||
|
\definecolor{halfgray}{gray}{0.55}
|
||||||
|
\definecolor{ipython_frame}{RGB}{207, 207, 207}
|
||||||
|
\definecolor{ipython_bg}{RGB}{247, 247, 247}
|
||||||
|
\definecolor{ipython_red}{RGB}{186, 33, 33}
|
||||||
|
\definecolor{ipython_green}{RGB}{0, 128, 0}
|
||||||
|
\definecolor{ipython_cyan}{RGB}{64, 128, 128}
|
||||||
|
\definecolor{ipython_purple}{RGB}{170, 34, 255}
|
||||||
|
|
||||||
|
\usepackage{listings}
|
||||||
|
\lstset{
|
||||||
|
breaklines=true,
|
||||||
|
%
|
||||||
|
extendedchars=true,
|
||||||
|
literate=
|
||||||
|
{á}{{\'a}}1 {é}{{\'e}}1 {í}{{\'i}}1 {ó}{{\'o}}1 {ú}{{\'u}}1
|
||||||
|
{Á}{{\'A}}1 {É}{{\'E}}1 {Í}{{\'I}}1 {Ó}{{\'O}}1 {Ú}{{\'U}}1
|
||||||
|
{à}{{\`a}}1 {è}{{\`e}}1 {ì}{{\`i}}1 {ò}{{\`o}}1 {ù}{{\`u}}1
|
||||||
|
{À}{{\`A}}1 {È}{{\'E}}1 {Ì}{{\`I}}1 {Ò}{{\`O}}1 {Ù}{{\`U}}1
|
||||||
|
{ä}{{\"a}}1 {ë}{{\"e}}1 {ï}{{\"i}}1 {ö}{{\"o}}1 {ü}{{\"u}}1
|
||||||
|
{Ä}{{\"A}}1 {Ë}{{\"E}}1 {Ï}{{\"I}}1 {Ö}{{\"O}}1 {Ü}{{\"U}}1
|
||||||
|
{â}{{\^a}}1 {ê}{{\^e}}1 {î}{{\^i}}1 {ô}{{\^o}}1 {û}{{\^u}}1
|
||||||
|
{Â}{{\^A}}1 {Ê}{{\^E}}1 {Î}{{\^I}}1 {Ô}{{\^O}}1 {Û}{{\^U}}1
|
||||||
|
{œ}{{\oe}}1 {Œ}{{\OE}}1 {æ}{{\ae}}1 {Æ}{{\AE}}1 {ß}{{\ss}}1
|
||||||
|
{ç}{{\c c}}1 {Ç}{{\c C}}1 {ø}{{\o}}1 {å}{{\r a}}1 {Å}{{\r A}}1
|
||||||
|
{€}{{\EUR}}1 {£}{{\pounds}}1
|
||||||
|
}
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Python definition (c) 1998 Michael Weber
|
||||||
|
%% Additional definitions (2013) Alexis Dimitriadis
|
||||||
|
%% modified by me (should not have empty lines)
|
||||||
|
%%
|
||||||
|
\lstdefinelanguage{iPython}{
|
||||||
|
morekeywords={access,and,break,class,continue,def,del,elif,else,except,exec,finally,for,from,global,if,import,in,is,lambda,not,or,pass,print,raise,return,try,while},%
|
||||||
|
%
|
||||||
|
% Built-ins
|
||||||
|
morekeywords=[2]{abs,all,any,basestring,bin,bool,bytearray,callable,chr,classmethod,cmp,compile,complex,delattr,dict,dir,divmod,enumerate,eval,execfile,file,filter,float,format,frozenset,getattr,globals,hasattr,hash,help,hex,id,input,int,isinstance,issubclass,iter,len,list,locals,long,map,max,memoryview,min,next,object,oct,open,ord,pow,property,range,raw_input,reduce,reload,repr,reversed,round,set,setattr,slice,sorted,staticmethod,str,sum,super,tuple,type,unichr,unicode,vars,xrange,zip,apply,buffer,coerce,intern},%
|
||||||
|
%
|
||||||
|
sensitive=true,%
|
||||||
|
morecomment=[l]\#,%
|
||||||
|
morestring=[b]',%
|
||||||
|
morestring=[b]",%
|
||||||
|
%
|
||||||
|
morestring=[s]{'''}{'''},% used for documentation text (mulitiline strings)
|
||||||
|
morestring=[s]{"""}{"""},% added by Philipp Matthias Hahn
|
||||||
|
%
|
||||||
|
morestring=[s]{r'}{'},% `raw' strings
|
||||||
|
morestring=[s]{r"}{"},%
|
||||||
|
morestring=[s]{r'''}{'''},%
|
||||||
|
morestring=[s]{r"""}{"""},%
|
||||||
|
morestring=[s]{u'}{'},% unicode strings
|
||||||
|
morestring=[s]{u"}{"},%
|
||||||
|
morestring=[s]{u'''}{'''},%
|
||||||
|
morestring=[s]{u"""}{"""},%
|
||||||
|
%
|
||||||
|
% {replace}{replacement}{lenght of replace}
|
||||||
|
% *{-}{-}{1} will not replace in comments and so on
|
||||||
|
literate=
|
||||||
|
{á}{{\'a}}1 {é}{{\'e}}1 {í}{{\'i}}1 {ó}{{\'o}}1 {ú}{{\'u}}1
|
||||||
|
{Á}{{\'A}}1 {É}{{\'E}}1 {Í}{{\'I}}1 {Ó}{{\'O}}1 {Ú}{{\'U}}1
|
||||||
|
{à}{{\`a}}1 {è}{{\`e}}1 {ì}{{\`i}}1 {ò}{{\`o}}1 {ù}{{\`u}}1
|
||||||
|
{À}{{\`A}}1 {È}{{\'E}}1 {Ì}{{\`I}}1 {Ò}{{\`O}}1 {Ù}{{\`U}}1
|
||||||
|
{ä}{{\"a}}1 {ë}{{\"e}}1 {ï}{{\"i}}1 {ö}{{\"o}}1 {ü}{{\"u}}1
|
||||||
|
{Ä}{{\"A}}1 {Ë}{{\"E}}1 {Ï}{{\"I}}1 {Ö}{{\"O}}1 {Ü}{{\"U}}1
|
||||||
|
{â}{{\^a}}1 {ê}{{\^e}}1 {î}{{\^i}}1 {ô}{{\^o}}1 {û}{{\^u}}1
|
||||||
|
{Â}{{\^A}}1 {Ê}{{\^E}}1 {Î}{{\^I}}1 {Ô}{{\^O}}1 {Û}{{\^U}}1
|
||||||
|
{œ}{{\oe}}1 {Œ}{{\OE}}1 {æ}{{\ae}}1 {Æ}{{\AE}}1 {ß}{{\ss}}1
|
||||||
|
{ç}{{\c c}}1 {Ç}{{\c C}}1 {ø}{{\o}}1 {å}{{\r a}}1 {Å}{{\r A}}1
|
||||||
|
{€}{{\EUR}}1 {£}{{\pounds}}1
|
||||||
|
%
|
||||||
|
{^}{{{\color{ipython_purple}\^{}}}}1
|
||||||
|
{=}{{{\color{ipython_purple}=}}}1
|
||||||
|
%
|
||||||
|
{+}{{{\color{ipython_purple}+}}}1
|
||||||
|
{*}{{{\color{ipython_purple}$^\ast$}}}1
|
||||||
|
{/}{{{\color{ipython_purple}/}}}1
|
||||||
|
%
|
||||||
|
{+=}{{{+=}}}1
|
||||||
|
{-=}{{{-=}}}1
|
||||||
|
{*=}{{{$^\ast$=}}}1
|
||||||
|
{/=}{{{/=}}}1,
|
||||||
|
literate=
|
||||||
|
*{-}{{{\color{ipython_purple}-}}}1
|
||||||
|
{?}{{{\color{ipython_purple}?}}}1,
|
||||||
|
%
|
||||||
|
identifierstyle=\color{black}\ttfamily,
|
||||||
|
commentstyle=\color{ipython_cyan}\ttfamily,
|
||||||
|
stringstyle=\color{ipython_red}\ttfamily,
|
||||||
|
keepspaces=true,
|
||||||
|
showspaces=false,
|
||||||
|
showstringspaces=false,
|
||||||
|
%
|
||||||
|
rulecolor=\color{ipython_frame},
|
||||||
|
frame=single,
|
||||||
|
frameround={t}{t}{t}{t},
|
||||||
|
framexleftmargin=6mm,
|
||||||
|
numbers=left,
|
||||||
|
numberstyle=\tiny\color{halfgray},
|
||||||
|
%
|
||||||
|
%
|
||||||
|
backgroundcolor=\color{ipython_bg},
|
||||||
|
% extendedchars=true,
|
||||||
|
basicstyle=\scriptsize,
|
||||||
|
keywordstyle=\color{ipython_green}\ttfamily,
|
||||||
|
}
|
||||||
|
|
||||||
|
\begin{document}
|
||||||
|
\begin{lstlisting}[language=iPython]
|
||||||
|
import tensorflow as tf
|
||||||
|
import numpy as np
|
||||||
|
from tensorflow.keras.callbacks import CSVLogger
|
||||||
|
from tensorflow.keras.preprocessing.image import ImageDataGenerator
|
||||||
|
|
||||||
|
mnist = tf.keras.datasets.mnist
|
||||||
|
|
||||||
|
(x_train, y_train), (x_test, y_test) = mnist.load_data()
|
||||||
|
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
|
||||||
|
x_train = x_train / 255.0
|
||||||
|
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
|
||||||
|
x_test = x_test / 255.0
|
||||||
|
|
||||||
|
y_train = tf.keras.utils.to_categorical(y_train)
|
||||||
|
y_test = tf.keras.utils.to_categorical(y_test)
|
||||||
|
|
||||||
|
model = tf.keras.models.Sequential()
|
||||||
|
model.add(tf.keras.layers.Conv2D(24,kernel_size=5,padding='same',activation='relu',input_shape=(28,28,1)))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D())
|
||||||
|
model.add(tf.keras.layers.Conv2D(64,kernel_size=5,padding='same',activation='relu'))
|
||||||
|
model.add(tf.keras.layers.MaxPool2D(padding='same'))
|
||||||
|
model.add(tf.keras.layers.Flatten())
|
||||||
|
model.add(tf.keras.layers.Dense(256, activation='relu'))
|
||||||
|
model.add(tf.keras.layers.Dropout(j))
|
||||||
|
model.add(tf.keras.layers.Dense(10, activation='softmax'))
|
||||||
|
model.compile(optimizer='adam', loss="categorical_crossentropy",
|
||||||
|
metrics=["accuracy"])
|
||||||
|
|
||||||
|
datagen = ImageDataGenerator(
|
||||||
|
rotation_range = 30,
|
||||||
|
zoom_range = 0.15,
|
||||||
|
width_shift_range=2,
|
||||||
|
height_shift_range=2,
|
||||||
|
shear_range = 1)
|
||||||
|
|
||||||
|
csv_logger = CSVLogger(<Target File>)
|
||||||
|
|
||||||
|
history = model.fit(datagen.flow(x_train_, y_train_, batch_size=50),
|
||||||
|
validation_data=(x_test, y_test), epochs=125,
|
||||||
|
callbacks=[csv_logger],
|
||||||
|
steps_per_epoch = x_train_.shape[0]//50)
|
||||||
|
|
||||||
|
\end{lstlisting}
|
||||||
|
\begin{lstlisting}[language=iPython]
|
||||||
|
def get_random_sample(a, b, number_of_samples=10):
|
||||||
|
x = []
|
||||||
|
y = []
|
||||||
|
for category_number in range(0,10):
|
||||||
|
# get all samples of a category
|
||||||
|
train_data_category = a[b==category_number]
|
||||||
|
# pick a number of random samples from the category
|
||||||
|
train_data_category = train_data_category[np.random.randint(
|
||||||
|
train_data_category.shape[0], size=number_of_samples), :]
|
||||||
|
x.extend(train_data_category)
|
||||||
|
y.append([category_number]*number_of_samples)
|
||||||
|
|
||||||
|
return (np.asarray(x).reshape(-1, 28, 28, 1),
|
||||||
|
np.asarray(y).reshape(10*number_of_samples,1))
|
||||||
|
\end{lstlisting}
|
||||||
|
\end{document}
|
@ -0,0 +1,5 @@
|
|||||||
|
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: latex
|
||||||
|
%%% TeX-master: "../main"
|
||||||
|
%%% End:
|
@ -0,0 +1,5 @@
|
|||||||
|
Robust error measure for supervised neural network learning with outliers
|
||||||
|
|
||||||
|
Learning rate decay https://arxiv.org/pdf/1908.01878.pdf
|
||||||
|
|
||||||
|
Best mnist https://arxiv.org/abs/1805.01890
|
Loading…
Reference in New Issue