added risks chapter
This commit is contained in:
parent
1a830d0832
commit
03707a1e28
11 changed files with 540 additions and 430 deletions
|
|
@ -32,86 +32,93 @@
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {1.2}Regressionsprobleme}{4}{subsection.1.2}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {1.2}Regressionsprobleme}{4}{subsection.1.2}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2}{\ignorespaces Regression\relax }}{4}{figure.caption.3}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2}{\ignorespaces Regression\relax }}{4}{figure.caption.3}\protected@file@percent }
|
||||||
\newlabel{Regression}{{2}{4}{Regression\relax }{figure.caption.3}{}}
|
\newlabel{Regression}{{2}{4}{Regression\relax }{figure.caption.3}{}}
|
||||||
|
\abx@aux@cite{4}
|
||||||
|
\abx@aux@segm{0}{0}{4}
|
||||||
|
\abx@aux@cite{5}
|
||||||
|
\abx@aux@segm{0}{0}{5}
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {1.3}Gefahren von maschinellem Lernen}{5}{subsection.1.3}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {1.3.1}Overfitting}{5}{subsubsection.1.3.1}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {1.3.2}Die Daten}{5}{subsubsection.1.3.2}\protected@file@percent }
|
||||||
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {3}{\ignorespaces Overfitting\relax }}{6}{figure.caption.4}\protected@file@percent }
|
||||||
|
\newlabel{Overfitting}{{3}{6}{Overfitting\relax }{figure.caption.4}{}}
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {2}Verschiedene Techniken maschinellen Lernens}{6}{section.2}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1}\IeC {\"U}berwachtes Lernen}{6}{subsection.2.1}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2}Un\IeC {\"u}berwachtes Lernen}{6}{subsection.2.2}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.3}Best\IeC {\"a}rkendes Lernen}{6}{subsection.2.3}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {3}Neuronale Netze}{6}{section.3}\protected@file@percent }
|
||||||
\abx@aux@cite{2}
|
\abx@aux@cite{2}
|
||||||
\abx@aux@segm{0}{0}{2}
|
\abx@aux@segm{0}{0}{2}
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {1.3}Gefahren von maschinellem Lernen}{5}{subsection.1.3}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.1}Maschinelles Lernen und menschliches Lernen}{7}{subsection.3.1}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {1.3.1}Eignung der Datens\IeC {\"a}tze}{5}{subsubsection.1.3.1}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {4}{\ignorespaces Neuron \newline Quelle: simple.wikipedia.org/wiki/File:Neuron.svg\newline Copyright: CC Attribution-Share Alike von Nutzer Dhp1080,\newline bearbeitet}}{7}{figure.caption.5}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {1.3.2}Overfitting}{5}{subsubsection.1.3.2}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.2}Der Aufbau eines neuronalen Netzes}{8}{subsection.3.2}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {1.3.3}Unbewusste Manipulation der Daten}{5}{subsubsection.1.3.3}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {5}{\ignorespaces Ein einfaches neuronales Netz\relax }}{8}{figure.caption.6}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {2}Verschiedene Techniken maschinellen lernens}{5}{section.2}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.3}Berechnung des Ausgabevektors}{9}{subsection.3.3}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1}\IeC {\"U}berwachtes Lernen}{5}{subsection.2.1}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {6}{\ignorespaces Der Plot der Sigmoid Funktion $\sigma (x)=\frac {e^x}{e^x+1}$\relax }}{9}{figure.caption.7}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2}Un\IeC {\"u}berwachtes Lernen}{5}{subsection.2.2}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {7}{\ignorespaces Formel zur Berechnung eines Ausgabevektors aus einem Eingabevektor durch ein Layer Neuronen. \relax }}{10}{figure.caption.8}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.3}Best\IeC {\"a}rkendes Lernen}{5}{subsection.2.3}\protected@file@percent }
|
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {3}Neuronale Netze}{5}{section.3}\protected@file@percent }
|
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.1}Maschinelles Lernen und menschliches Lernen}{5}{subsection.3.1}\protected@file@percent }
|
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {3}{\ignorespaces Neuron \newline Quelle: simple.wikipedia.org/wiki/File:Neuron.svg\newline Copyright: CC Attribution-Share Alike von Nutzer Dhp1080,\newline bearbeitet}}{6}{figure.caption.4}\protected@file@percent }
|
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.2}Der Aufbau eines neuronalen Netzes}{6}{subsection.3.2}\protected@file@percent }
|
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {4}{\ignorespaces Ein einfaches neuronales Netz\relax }}{7}{figure.caption.5}\protected@file@percent }
|
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.3}Berechnung des Ausgabevektors}{7}{subsection.3.3}\protected@file@percent }
|
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {5}{\ignorespaces Der Plot der Sigmoid Funktion $\sigma (x)=\frac {e^x}{e^x+1}$\relax }}{8}{figure.caption.6}\protected@file@percent }
|
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {6}{\ignorespaces Formel zur Berechnung eines Ausgabevektors aus einem Eingabevektor durch ein Layer Neuronen. \relax }}{9}{figure.caption.7}\protected@file@percent }
|
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.4}Der Lernprozess}{9}{subsection.3.4}\protected@file@percent }
|
|
||||||
\abx@aux@cite{3}
|
\abx@aux@cite{3}
|
||||||
\abx@aux@segm{0}{0}{3}
|
\abx@aux@segm{0}{0}{3}
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.4}Der Lernprozess}{11}{subsection.3.4}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.5}Fehlerfunktionen}{11}{subsection.3.5}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.1}MSE -- Durchschnittlicher quadratischer Fehler}{11}{subsubsection.3.5.1}\protected@file@percent }
|
||||||
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {8}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen quadratischen Fehler\relax }}{11}{figure.caption.9}\protected@file@percent }
|
||||||
|
\newlabel{MSE_equation}{{8}{11}{Die Gleichung für den durchschnittlichen quadratischen Fehler\relax }{figure.caption.9}{}}
|
||||||
\abx@aux@segm{0}{0}{3}
|
\abx@aux@segm{0}{0}{3}
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.5}Fehlerfunktionen}{10}{subsection.3.5}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.2}MAE -- Durchschnitztlicher absoluter Fehler}{12}{subsubsection.3.5.2}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.1}MSE -- Durchschnittlicher quadratischer Fehler}{10}{subsubsection.3.5.1}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {9}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler\relax }}{12}{figure.caption.10}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {7}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen quadratischen Fehler\relax }}{10}{figure.caption.8}\protected@file@percent }
|
\newlabel{MAE_equation}{{9}{12}{Die Gleichung für den durchschnittlichen absoluten Fehler\relax }{figure.caption.10}{}}
|
||||||
\newlabel{MSE_equation}{{7}{10}{Die Gleichung für den durchschnittlichen quadratischen Fehler\relax }{figure.caption.8}{}}
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.3}Kreuzentropiefehler}{12}{subsubsection.3.5.3}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.2}MAE -- Durchschnitztlicher absoluter Fehler}{10}{subsubsection.3.5.2}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {10}{\ignorespaces Der Graph der Kreuzentropie Fehlerfunktion wenn das tats\IeC {\"a}chliche Label 1 ist\relax }}{13}{figure.caption.11}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {8}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler\relax }}{10}{figure.caption.9}\protected@file@percent }
|
\newlabel{CEL_Graph}{{10}{13}{Der Graph der Kreuzentropie Fehlerfunktion wenn das tatsächliche Label 1 ist\relax }{figure.caption.11}{}}
|
||||||
\newlabel{MAE_equation}{{8}{10}{Die Gleichung für den durchschnittlichen absoluten Fehler\relax }{figure.caption.9}{}}
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {11}{\ignorespaces Die Gleichung f\IeC {\"u}r den Kreuzentropiefehler\relax }}{13}{figure.caption.12}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.3}Kreuzentropiefehler}{11}{subsubsection.3.5.3}\protected@file@percent }
|
\newlabel{CEL_Function}{{11}{13}{Die Gleichung für den Kreuzentropiefehler\relax }{figure.caption.12}{}}
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {9}{\ignorespaces Der Graph der Kreuzentropie Fehlerfunktion wenn das tats\IeC {\"a}chliche Label 1 ist\relax }}{11}{figure.caption.10}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.6}Gradientenverfahren und Backpropagation}{13}{subsection.3.6}\protected@file@percent }
|
||||||
\newlabel{CEL_Graph}{{9}{11}{Der Graph der Kreuzentropie Fehlerfunktion wenn das tatsächliche Label 1 ist\relax }{figure.caption.10}{}}
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {12}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler\relax }}{14}{figure.caption.13}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {10}{\ignorespaces Die Gleichung f\IeC {\"u}r den Kreuzentropiefehler\relax }}{12}{figure.caption.11}\protected@file@percent }
|
\newlabel{CEL_Finction_cummulative}{{12}{14}{Die Gleichung für den durchschnittlichen absoluten Fehler\relax }{figure.caption.13}{}}
|
||||||
\newlabel{CEL_Function}{{10}{12}{Die Gleichung für den Kreuzentropiefehler\relax }{figure.caption.11}{}}
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {13}{\ignorespaces Die Gleichung f\IeC {\"u}r den Gradienten der Fehlerfunktion\relax }}{14}{figure.caption.14}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {11}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler\relax }}{12}{figure.caption.12}\protected@file@percent }
|
\newlabel{Gradient_Function}{{13}{14}{Die Gleichung für den Gradienten der Fehlerfunktion\relax }{figure.caption.14}{}}
|
||||||
\newlabel{CEL_Finction_cummulative}{{11}{12}{Die Gleichung für den durchschnittlichen absoluten Fehler\relax }{figure.caption.12}{}}
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.6.1}Lernrate}{14}{subsubsection.3.6.1}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.6}Gradientenverfahren und Backpropagation}{12}{subsection.3.6}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {14}{\ignorespaces Die Gleichung f\IeC {\"u}r die Anpassung eines einzelnen Parameters\relax }}{14}{figure.caption.15}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {12}{\ignorespaces Die Gleichung f\IeC {\"u}r den Gradienten der Fehlerfunktion\relax }}{12}{figure.caption.13}\protected@file@percent }
|
\newlabel{Learning_Rate_Function}{{14}{14}{Die Gleichung für die Anpassung eines einzelnen Parameters\relax }{figure.caption.15}{}}
|
||||||
\newlabel{Gradient_Function}{{12}{12}{Die Gleichung für den Gradienten der Fehlerfunktion\relax }{figure.caption.13}{}}
|
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.6.1}Lernrate}{13}{subsubsection.3.6.1}\protected@file@percent }
|
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {13}{\ignorespaces Die Gleichung f\IeC {\"u}r die Anpassung eines einzelnen Parameters\relax }}{13}{figure.caption.14}\protected@file@percent }
|
|
||||||
\newlabel{Learning_Rate_Function}{{13}{13}{Die Gleichung für die Anpassung eines einzelnen Parameters\relax }{figure.caption.14}{}}
|
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {14}{\ignorespaces $\eta $ ist hier zu gro\IeC {\ss } gew\IeC {\"a}hlt\relax }}{13}{figure.caption.15}\protected@file@percent }
|
|
||||||
\newlabel{Learning_Rate_Graphic}{{14}{13}{$\eta $ ist hier zu groß gewählt\relax }{figure.caption.15}{}}
|
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.7}Verschiedene Layerarten}{14}{subsection.3.7}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.7}Verschiedene Layerarten}{14}{subsection.3.7}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.7.1}Convolutional Layers}{14}{subsubsection.3.7.1}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {15}{\ignorespaces $\eta $ ist hier zu gro\IeC {\ss } gew\IeC {\"a}hlt\relax }}{15}{figure.caption.16}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {15}{\ignorespaces Eine Verbildlichung der Vorg\IeC {\"a}nge in einem convolutional Layer\newline Aus einer Animation von\newline https://github.com/vdumoulin/conv\_arithmetic/blob/master/README.md Vincent Dumoulin, Francesco Visin - A guide to convolution arithmetic for deep learning (BibTeX)}}{14}{figure.caption.16}\protected@file@percent }
|
\newlabel{Learning_Rate_Graphic}{{15}{15}{$\eta $ ist hier zu groß gewählt\relax }{figure.caption.16}{}}
|
||||||
\newlabel{Convolution_illustration}{{15}{14}{Eine Verbildlichung der Vorgänge in einem convolutional Layer\newline Aus einer Animation von\newline https://github.com/vdumoulin/conv\_arithmetic/blob/master/README.md\\ Vincent Dumoulin, Francesco Visin - A guide to convolution arithmetic for deep learning (BibTeX)}{figure.caption.16}{}}
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.7.1}Convolutional Layers}{15}{subsubsection.3.7.1}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {16}{\ignorespaces Erkennt obere horizontale Kanten\relax }}{15}{figure.caption.17}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {16}{\ignorespaces Eine Verbildlichung der Vorg\IeC {\"a}nge in einem convolutional Layer\newline Aus einer Animation von\newline https://github.com/vdumoulin/conv\_arithmetic/blob/master/README.md Vincent Dumoulin, Francesco Visin - A guide to convolution arithmetic for deep learning (BibTeX)}}{16}{figure.caption.17}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {17}{\ignorespaces Erkennt linke vertikale Kanten\relax }}{15}{figure.caption.17}\protected@file@percent }
|
\newlabel{Convolution_illustration}{{16}{16}{Eine Verbildlichung der Vorgänge in einem convolutional Layer\newline Aus einer Animation von\newline https://github.com/vdumoulin/conv\_arithmetic/blob/master/README.md\\ Vincent Dumoulin, Francesco Visin - A guide to convolution arithmetic for deep learning (BibTeX)}{figure.caption.17}{}}
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {18}{\ignorespaces Erkennt untere horizontale Kanten\relax }}{15}{figure.caption.17}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {17}{\ignorespaces Erkennt obere horizontale Kanten\relax }}{17}{figure.caption.18}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {19}{\ignorespaces Erkennt rechte vertikale Kanten\relax }}{15}{figure.caption.17}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {18}{\ignorespaces Erkennt linke vertikale Kanten\relax }}{17}{figure.caption.18}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {20}{\ignorespaces Das Beispielbild aus dem Mnist Datensatz\relax }}{15}{figure.caption.18}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {19}{\ignorespaces Erkennt untere horizontale Kanten\relax }}{17}{figure.caption.18}\protected@file@percent }
|
||||||
\newlabel{Filter_Example_raw}{{20}{15}{Das Beispielbild aus dem Mnist Datensatz\relax }{figure.caption.18}{}}
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {20}{\ignorespaces Erkennt rechte vertikale Kanten\relax }}{17}{figure.caption.18}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {21}{\ignorespaces Die jeweils oben stehenden Filter wurden auf das Beispielbild angewandt.\relax }}{15}{figure.caption.19}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {21}{\ignorespaces Das Beispielbild aus dem Mnist Datensatz\relax }}{17}{figure.caption.19}\protected@file@percent }
|
||||||
\newlabel{Filter_output dargestellt}{{21}{15}{Die jeweils oben stehenden Filter wurden auf das Beispielbild angewandt.\relax }{figure.caption.19}{}}
|
\newlabel{Filter_Example_raw}{{21}{17}{Das Beispielbild aus dem Mnist Datensatz\relax }{figure.caption.19}{}}
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {22}{\ignorespaces Beispiele f\IeC {\"u}r low- mid- und high-level Features in Convolutional Neural Nets\newline Quelle: https://tvirdi.github.io/2017-10-29/cnn/}}{16}{figure.caption.20}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {22}{\ignorespaces Die jeweils oben stehenden Filter wurden auf das Beispielbild angewandt.\relax }}{17}{figure.caption.20}\protected@file@percent }
|
||||||
\newlabel{HL_features_conv}{{22}{16}{Beispiele für low- mid- und high-level Features in Convolutional Neural Nets\newline Quelle: https://tvirdi.github.io/2017-10-29/cnn/}{figure.caption.20}{}}
|
\newlabel{Filter_output dargestellt}{{22}{17}{Die jeweils oben stehenden Filter wurden auf das Beispielbild angewandt.\relax }{figure.caption.20}{}}
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.7.2}Pooling Layers}{16}{subsubsection.3.7.2}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {23}{\ignorespaces Beispiele f\IeC {\"u}r low- mid- und high-level Features in Convolutional Neural Nets\newline Quelle: https://tvirdi.github.io/2017-10-29/cnn/}}{18}{figure.caption.21}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {23}{\ignorespaces Max Pooling mit $2\times 2$ gro\IeC {\ss }en Submatritzen\newline Quelle: https://computersciencewiki.org/index.php/Max-pooling\_/\_Pooling CC BY NC SA Lizenz}}{17}{figure.caption.21}\protected@file@percent }
|
\newlabel{HL_features_conv}{{23}{18}{Beispiele für low- mid- und high-level Features in Convolutional Neural Nets\newline Quelle: https://tvirdi.github.io/2017-10-29/cnn/}{figure.caption.21}{}}
|
||||||
\newlabel{Maxpool}{{23}{17}{Max Pooling mit $2\times 2$ großen Submatritzen\newline Quelle: https://computersciencewiki.org/index.php/Max-pooling\_/\_Pooling\\ CC BY NC SA Lizenz}{figure.caption.21}{}}
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.7.2}Pooling Layers}{18}{subsubsection.3.7.2}\protected@file@percent }
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {24}{\ignorespaces Average Pooling mit $2\times 2$ gro\IeC {\ss }en Submatritzen\newline Aus: Dominguez-Morales, Juan Pedro. (2018). Neuromorphic audio processing through real-time embedded spiking neural networks. Abbildung 33}}{17}{figure.caption.22}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {24}{\ignorespaces Max Pooling mit $2\times 2$ gro\IeC {\ss }en Submatritzen\newline Quelle: https://computersciencewiki.org/index.php/Max-pooling\_/\_Pooling CC BY NC SA Lizenz}}{19}{figure.caption.22}\protected@file@percent }
|
||||||
\newlabel{AvgPool}{{24}{17}{Average Pooling mit $2\times 2$ großen Submatritzen\newline Aus: Dominguez-Morales, Juan Pedro. (2018). Neuromorphic audio processing through real-time embedded spiking neural networks. Abbildung 33}{figure.caption.22}{}}
|
\newlabel{Maxpool}{{24}{19}{Max Pooling mit $2\times 2$ großen Submatritzen\newline Quelle: https://computersciencewiki.org/index.php/Max-pooling\_/\_Pooling\\ CC BY NC SA Lizenz}{figure.caption.22}{}}
|
||||||
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {25}{\ignorespaces Gegen\IeC {\"u}berstellung von Max und Average Pooling\relax }}{18}{figure.caption.23}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {25}{\ignorespaces Average Pooling mit $2\times 2$ gro\IeC {\ss }en Submatritzen\newline Aus: Dominguez-Morales, Juan Pedro. (2018). Neuromorphic audio processing through real-time embedded spiking neural networks. Abbildung 33}}{19}{figure.caption.23}\protected@file@percent }
|
||||||
\newlabel{Pooling_Mnist}{{25}{18}{Gegenüberstellung von Max und Average Pooling\relax }{figure.caption.23}{}}
|
\newlabel{AvgPool}{{25}{19}{Average Pooling mit $2\times 2$ großen Submatritzen\newline Aus: Dominguez-Morales, Juan Pedro. (2018). Neuromorphic audio processing through real-time embedded spiking neural networks. Abbildung 33}{figure.caption.23}{}}
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {4}PyTorch}{19}{section.4}\protected@file@percent }
|
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {26}{\ignorespaces Gegen\IeC {\"u}berstellung von Max und Average Pooling\relax }}{20}{figure.caption.24}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.1}Datenvorbereitung}{19}{subsection.4.1}\protected@file@percent }
|
\newlabel{Pooling_Mnist}{{26}{20}{Gegenüberstellung von Max und Average Pooling\relax }{figure.caption.24}{}}
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.2}Definieren des Netzes}{19}{subsection.4.2}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {4}PyTorch}{21}{section.4}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.3}Trainieren des Netzes}{19}{subsection.4.3}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.1}Datenvorbereitung}{21}{subsection.4.1}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {5}Fallbeispiel I:\newline Ein Klassifizierungsnetzwerk f\IeC {\"u}r handgeschriebene Ziffern}{19}{section.5}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.2}Definieren des Netzes}{21}{subsection.4.2}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Aufgabe}{19}{subsection.5.1}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.3}Trainieren des Netzes}{21}{subsection.4.3}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Der MNIST Datensatz}{19}{subsection.5.2}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {5}Fallbeispiel I:\newline Ein Klassifizierungsnetzwerk f\IeC {\"u}r handgeschriebene Ziffern}{21}{section.5}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Fragmentbasierte Erkennung}{19}{subsection.5.3}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Aufgabe}{21}{subsection.5.1}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.4}Ergebnis}{19}{subsection.5.4}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Der MNIST Datensatz}{21}{subsection.5.2}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {6}Fallbeispiel II:\newline Eine selbsttrainierende KI f\IeC {\"u}r Tic-Tac-Toe}{19}{section.6}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Fragmentbasierte Erkennung}{21}{subsection.5.3}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.1}Das Prinzip}{19}{subsection.6.1}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.4}Ergebnis}{21}{subsection.5.4}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.2}Chance-Tree Optimierung}{19}{subsection.6.2}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {6}Fallbeispiel II:\newline Eine selbsttrainierende KI f\IeC {\"u}r Tic-Tac-Toe}{21}{section.6}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.3}L\IeC {\"o}sung mittels eines neuronalen Netzes}{19}{subsection.6.3}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.1}Das Prinzip}{21}{subsection.6.1}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.4}Vergleich}{19}{subsection.6.4}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.2}Chance-Tree Optimierung}{21}{subsection.6.2}\protected@file@percent }
|
||||||
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {7}Schlusswort}{19}{section.7}\protected@file@percent }
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.3}L\IeC {\"o}sung mittels eines neuronalen Netzes}{21}{subsection.6.3}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.4}Vergleich}{21}{subsection.6.4}\protected@file@percent }
|
||||||
|
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {7}Schlusswort}{21}{section.7}\protected@file@percent }
|
||||||
\bibcite{1}{1}
|
\bibcite{1}{1}
|
||||||
\bibcite{2}{2}
|
\bibcite{2}{2}
|
||||||
\bibcite{3}{3}
|
\bibcite{3}{3}
|
||||||
|
\bibcite{4}{4}
|
||||||
|
\bibcite{5}{5}
|
||||||
|
|
|
||||||
|
|
@ -1995,9 +1995,11 @@
|
||||||
</bcf:bibdata>
|
</bcf:bibdata>
|
||||||
<bcf:section number="0">
|
<bcf:section number="0">
|
||||||
<bcf:citekey order="1">1</bcf:citekey>
|
<bcf:citekey order="1">1</bcf:citekey>
|
||||||
<bcf:citekey order="2">2</bcf:citekey>
|
<bcf:citekey order="2">4</bcf:citekey>
|
||||||
<bcf:citekey order="3">3</bcf:citekey>
|
<bcf:citekey order="3">5</bcf:citekey>
|
||||||
<bcf:citekey order="4">3</bcf:citekey>
|
<bcf:citekey order="4">2</bcf:citekey>
|
||||||
|
<bcf:citekey order="5">3</bcf:citekey>
|
||||||
|
<bcf:citekey order="6">3</bcf:citekey>
|
||||||
</bcf:section>
|
</bcf:section>
|
||||||
<!-- SORTING TEMPLATES -->
|
<!-- SORTING TEMPLATES -->
|
||||||
<bcf:sortingtemplate name="nty">
|
<bcf:sortingtemplate name="nty">
|
||||||
|
|
|
||||||
|
|
@ -5,48 +5,50 @@
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {2}{\ignorespaces Regression\relax }}{4}{figure.caption.3}%
|
\contentsline {figure}{\numberline {2}{\ignorespaces Regression\relax }}{4}{figure.caption.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {3}{\ignorespaces Neuron \newline Quelle: simple.wikipedia.org/wiki/File:Neuron.svg\newline Copyright: CC Attribution-Share Alike von Nutzer Dhp1080,\newline bearbeitet}}{6}{figure.caption.4}%
|
\contentsline {figure}{\numberline {3}{\ignorespaces Overfitting\relax }}{6}{figure.caption.4}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {4}{\ignorespaces Ein einfaches neuronales Netz\relax }}{7}{figure.caption.5}%
|
\contentsline {figure}{\numberline {4}{\ignorespaces Neuron \newline Quelle: simple.wikipedia.org/wiki/File:Neuron.svg\newline Copyright: CC Attribution-Share Alike von Nutzer Dhp1080,\newline bearbeitet}}{7}{figure.caption.5}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {5}{\ignorespaces Der Plot der Sigmoid Funktion $\sigma (x)=\frac {e^x}{e^x+1}$\relax }}{8}{figure.caption.6}%
|
\contentsline {figure}{\numberline {5}{\ignorespaces Ein einfaches neuronales Netz\relax }}{8}{figure.caption.6}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {6}{\ignorespaces Formel zur Berechnung eines Ausgabevektors aus einem Eingabevektor durch ein Layer Neuronen. \relax }}{9}{figure.caption.7}%
|
\contentsline {figure}{\numberline {6}{\ignorespaces Der Plot der Sigmoid Funktion $\sigma (x)=\frac {e^x}{e^x+1}$\relax }}{9}{figure.caption.7}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {7}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen quadratischen Fehler\relax }}{10}{figure.caption.8}%
|
\contentsline {figure}{\numberline {7}{\ignorespaces Formel zur Berechnung eines Ausgabevektors aus einem Eingabevektor durch ein Layer Neuronen. \relax }}{10}{figure.caption.8}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {8}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler\relax }}{10}{figure.caption.9}%
|
\contentsline {figure}{\numberline {8}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen quadratischen Fehler\relax }}{11}{figure.caption.9}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {9}{\ignorespaces Der Graph der Kreuzentropie Fehlerfunktion wenn das tats\IeC {\"a}chliche Label 1 ist\relax }}{11}{figure.caption.10}%
|
\contentsline {figure}{\numberline {9}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler\relax }}{12}{figure.caption.10}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {10}{\ignorespaces Die Gleichung f\IeC {\"u}r den Kreuzentropiefehler\relax }}{12}{figure.caption.11}%
|
\contentsline {figure}{\numberline {10}{\ignorespaces Der Graph der Kreuzentropie Fehlerfunktion wenn das tats\IeC {\"a}chliche Label 1 ist\relax }}{13}{figure.caption.11}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {11}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler\relax }}{12}{figure.caption.12}%
|
\contentsline {figure}{\numberline {11}{\ignorespaces Die Gleichung f\IeC {\"u}r den Kreuzentropiefehler\relax }}{13}{figure.caption.12}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {12}{\ignorespaces Die Gleichung f\IeC {\"u}r den Gradienten der Fehlerfunktion\relax }}{12}{figure.caption.13}%
|
\contentsline {figure}{\numberline {12}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler\relax }}{14}{figure.caption.13}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {13}{\ignorespaces Die Gleichung f\IeC {\"u}r die Anpassung eines einzelnen Parameters\relax }}{13}{figure.caption.14}%
|
\contentsline {figure}{\numberline {13}{\ignorespaces Die Gleichung f\IeC {\"u}r den Gradienten der Fehlerfunktion\relax }}{14}{figure.caption.14}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {14}{\ignorespaces $\eta $ ist hier zu gro\IeC {\ss } gew\IeC {\"a}hlt\relax }}{13}{figure.caption.15}%
|
\contentsline {figure}{\numberline {14}{\ignorespaces Die Gleichung f\IeC {\"u}r die Anpassung eines einzelnen Parameters\relax }}{14}{figure.caption.15}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {15}{\ignorespaces Eine Verbildlichung der Vorg\IeC {\"a}nge in einem convolutional Layer\newline Aus einer Animation von\newline https://github.com/vdumoulin/conv\_arithmetic/blob/master/README.md Vincent Dumoulin, Francesco Visin - A guide to convolution arithmetic for deep learning (BibTeX)}}{14}{figure.caption.16}%
|
\contentsline {figure}{\numberline {15}{\ignorespaces $\eta $ ist hier zu gro\IeC {\ss } gew\IeC {\"a}hlt\relax }}{15}{figure.caption.16}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {16}{\ignorespaces Erkennt obere horizontale Kanten\relax }}{15}{figure.caption.17}%
|
\contentsline {figure}{\numberline {16}{\ignorespaces Eine Verbildlichung der Vorg\IeC {\"a}nge in einem convolutional Layer\newline Aus einer Animation von\newline https://github.com/vdumoulin/conv\_arithmetic/blob/master/README.md Vincent Dumoulin, Francesco Visin - A guide to convolution arithmetic for deep learning (BibTeX)}}{16}{figure.caption.17}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {17}{\ignorespaces Erkennt linke vertikale Kanten\relax }}{15}{figure.caption.17}%
|
\contentsline {figure}{\numberline {17}{\ignorespaces Erkennt obere horizontale Kanten\relax }}{17}{figure.caption.18}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {18}{\ignorespaces Erkennt untere horizontale Kanten\relax }}{15}{figure.caption.17}%
|
\contentsline {figure}{\numberline {18}{\ignorespaces Erkennt linke vertikale Kanten\relax }}{17}{figure.caption.18}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {19}{\ignorespaces Erkennt rechte vertikale Kanten\relax }}{15}{figure.caption.17}%
|
\contentsline {figure}{\numberline {19}{\ignorespaces Erkennt untere horizontale Kanten\relax }}{17}{figure.caption.18}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {20}{\ignorespaces Das Beispielbild aus dem Mnist Datensatz\relax }}{15}{figure.caption.18}%
|
\contentsline {figure}{\numberline {20}{\ignorespaces Erkennt rechte vertikale Kanten\relax }}{17}{figure.caption.18}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {21}{\ignorespaces Die jeweils oben stehenden Filter wurden auf das Beispielbild angewandt.\relax }}{15}{figure.caption.19}%
|
\contentsline {figure}{\numberline {21}{\ignorespaces Das Beispielbild aus dem Mnist Datensatz\relax }}{17}{figure.caption.19}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {22}{\ignorespaces Beispiele f\IeC {\"u}r low- mid- und high-level Features in Convolutional Neural Nets\newline Quelle: https://tvirdi.github.io/2017-10-29/cnn/}}{16}{figure.caption.20}%
|
\contentsline {figure}{\numberline {22}{\ignorespaces Die jeweils oben stehenden Filter wurden auf das Beispielbild angewandt.\relax }}{17}{figure.caption.20}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {23}{\ignorespaces Max Pooling mit $2\times 2$ gro\IeC {\ss }en Submatritzen\newline Quelle: https://computersciencewiki.org/index.php/Max-pooling\_/\_Pooling CC BY NC SA Lizenz}}{17}{figure.caption.21}%
|
\contentsline {figure}{\numberline {23}{\ignorespaces Beispiele f\IeC {\"u}r low- mid- und high-level Features in Convolutional Neural Nets\newline Quelle: https://tvirdi.github.io/2017-10-29/cnn/}}{18}{figure.caption.21}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {24}{\ignorespaces Average Pooling mit $2\times 2$ gro\IeC {\ss }en Submatritzen\newline Aus: Dominguez-Morales, Juan Pedro. (2018). Neuromorphic audio processing through real-time embedded spiking neural networks. Abbildung 33}}{17}{figure.caption.22}%
|
\contentsline {figure}{\numberline {24}{\ignorespaces Max Pooling mit $2\times 2$ gro\IeC {\ss }en Submatritzen\newline Quelle: https://computersciencewiki.org/index.php/Max-pooling\_/\_Pooling CC BY NC SA Lizenz}}{19}{figure.caption.22}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {figure}{\numberline {25}{\ignorespaces Gegen\IeC {\"u}berstellung von Max und Average Pooling\relax }}{18}{figure.caption.23}%
|
\contentsline {figure}{\numberline {25}{\ignorespaces Average Pooling mit $2\times 2$ gro\IeC {\ss }en Submatritzen\newline Aus: Dominguez-Morales, Juan Pedro. (2018). Neuromorphic audio processing through real-time embedded spiking neural networks. Abbildung 33}}{19}{figure.caption.23}%
|
||||||
|
\defcounter {refsection}{0}\relax
|
||||||
|
\contentsline {figure}{\numberline {26}{\ignorespaces Gegen\IeC {\"u}berstellung von Max und Average Pooling\relax }}{20}{figure.caption.24}%
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -2,39 +2,38 @@
|
||||||
\BOOKMARK [2][-]{subsection.1.1}{Klassifizierungsprobleme}{section.1}% 2
|
\BOOKMARK [2][-]{subsection.1.1}{Klassifizierungsprobleme}{section.1}% 2
|
||||||
\BOOKMARK [2][-]{subsection.1.2}{Regressionsprobleme}{section.1}% 3
|
\BOOKMARK [2][-]{subsection.1.2}{Regressionsprobleme}{section.1}% 3
|
||||||
\BOOKMARK [2][-]{subsection.1.3}{Gefahren von maschinellem Lernen}{section.1}% 4
|
\BOOKMARK [2][-]{subsection.1.3}{Gefahren von maschinellem Lernen}{section.1}% 4
|
||||||
\BOOKMARK [3][-]{subsubsection.1.3.1}{Eignung der Datens\344tze}{subsection.1.3}% 5
|
\BOOKMARK [3][-]{subsubsection.1.3.1}{Overfitting}{subsection.1.3}% 5
|
||||||
\BOOKMARK [3][-]{subsubsection.1.3.2}{Overfitting}{subsection.1.3}% 6
|
\BOOKMARK [3][-]{subsubsection.1.3.2}{Die Daten}{subsection.1.3}% 6
|
||||||
\BOOKMARK [3][-]{subsubsection.1.3.3}{Unbewusste Manipulation der Daten}{subsection.1.3}% 7
|
\BOOKMARK [1][-]{section.2}{Verschiedene Techniken maschinellen Lernens}{}% 7
|
||||||
\BOOKMARK [1][-]{section.2}{Verschiedene Techniken maschinellen lernens}{}% 8
|
\BOOKMARK [2][-]{subsection.2.1}{\334berwachtes Lernen}{section.2}% 8
|
||||||
\BOOKMARK [2][-]{subsection.2.1}{\334berwachtes Lernen}{section.2}% 9
|
\BOOKMARK [2][-]{subsection.2.2}{Un\374berwachtes Lernen}{section.2}% 9
|
||||||
\BOOKMARK [2][-]{subsection.2.2}{Un\374berwachtes Lernen}{section.2}% 10
|
\BOOKMARK [2][-]{subsection.2.3}{Best\344rkendes Lernen}{section.2}% 10
|
||||||
\BOOKMARK [2][-]{subsection.2.3}{Best\344rkendes Lernen}{section.2}% 11
|
\BOOKMARK [1][-]{section.3}{Neuronale Netze}{}% 11
|
||||||
\BOOKMARK [1][-]{section.3}{Neuronale Netze}{}% 12
|
\BOOKMARK [2][-]{subsection.3.1}{Maschinelles Lernen und menschliches Lernen}{section.3}% 12
|
||||||
\BOOKMARK [2][-]{subsection.3.1}{Maschinelles Lernen und menschliches Lernen}{section.3}% 13
|
\BOOKMARK [2][-]{subsection.3.2}{Der Aufbau eines neuronalen Netzes}{section.3}% 13
|
||||||
\BOOKMARK [2][-]{subsection.3.2}{Der Aufbau eines neuronalen Netzes}{section.3}% 14
|
\BOOKMARK [2][-]{subsection.3.3}{Berechnung des Ausgabevektors}{section.3}% 14
|
||||||
\BOOKMARK [2][-]{subsection.3.3}{Berechnung des Ausgabevektors}{section.3}% 15
|
\BOOKMARK [2][-]{subsection.3.4}{Der Lernprozess}{section.3}% 15
|
||||||
\BOOKMARK [2][-]{subsection.3.4}{Der Lernprozess}{section.3}% 16
|
\BOOKMARK [2][-]{subsection.3.5}{Fehlerfunktionen}{section.3}% 16
|
||||||
\BOOKMARK [2][-]{subsection.3.5}{Fehlerfunktionen}{section.3}% 17
|
\BOOKMARK [3][-]{subsubsection.3.5.1}{MSE \205 Durchschnittlicher quadratischer Fehler}{subsection.3.5}% 17
|
||||||
\BOOKMARK [3][-]{subsubsection.3.5.1}{MSE \205 Durchschnittlicher quadratischer Fehler}{subsection.3.5}% 18
|
\BOOKMARK [3][-]{subsubsection.3.5.2}{MAE \205 Durchschnitztlicher absoluter Fehler}{subsection.3.5}% 18
|
||||||
\BOOKMARK [3][-]{subsubsection.3.5.2}{MAE \205 Durchschnitztlicher absoluter Fehler}{subsection.3.5}% 19
|
\BOOKMARK [3][-]{subsubsection.3.5.3}{Kreuzentropiefehler}{subsection.3.5}% 19
|
||||||
\BOOKMARK [3][-]{subsubsection.3.5.3}{Kreuzentropiefehler}{subsection.3.5}% 20
|
\BOOKMARK [2][-]{subsection.3.6}{Gradientenverfahren und Backpropagation}{section.3}% 20
|
||||||
\BOOKMARK [2][-]{subsection.3.6}{Gradientenverfahren und Backpropagation}{section.3}% 21
|
\BOOKMARK [3][-]{subsubsection.3.6.1}{Lernrate}{subsection.3.6}% 21
|
||||||
\BOOKMARK [3][-]{subsubsection.3.6.1}{Lernrate}{subsection.3.6}% 22
|
\BOOKMARK [2][-]{subsection.3.7}{Verschiedene Layerarten}{section.3}% 22
|
||||||
\BOOKMARK [2][-]{subsection.3.7}{Verschiedene Layerarten}{section.3}% 23
|
\BOOKMARK [3][-]{subsubsection.3.7.1}{Convolutional Layers}{subsection.3.7}% 23
|
||||||
\BOOKMARK [3][-]{subsubsection.3.7.1}{Convolutional Layers}{subsection.3.7}% 24
|
\BOOKMARK [3][-]{subsubsection.3.7.2}{Pooling Layers}{subsection.3.7}% 24
|
||||||
\BOOKMARK [3][-]{subsubsection.3.7.2}{Pooling Layers}{subsection.3.7}% 25
|
\BOOKMARK [1][-]{section.4}{PyTorch}{}% 25
|
||||||
\BOOKMARK [1][-]{section.4}{PyTorch}{}% 26
|
\BOOKMARK [2][-]{subsection.4.1}{Datenvorbereitung}{section.4}% 26
|
||||||
\BOOKMARK [2][-]{subsection.4.1}{Datenvorbereitung}{section.4}% 27
|
\BOOKMARK [2][-]{subsection.4.2}{Definieren des Netzes}{section.4}% 27
|
||||||
\BOOKMARK [2][-]{subsection.4.2}{Definieren des Netzes}{section.4}% 28
|
\BOOKMARK [2][-]{subsection.4.3}{Trainieren des Netzes}{section.4}% 28
|
||||||
\BOOKMARK [2][-]{subsection.4.3}{Trainieren des Netzes}{section.4}% 29
|
\BOOKMARK [1][-]{section.5}{Fallbeispiel I:Ein Klassifizierungsnetzwerk f\374r handgeschriebene Ziffern}{}% 29
|
||||||
\BOOKMARK [1][-]{section.5}{Fallbeispiel I:Ein Klassifizierungsnetzwerk f\374r handgeschriebene Ziffern}{}% 30
|
\BOOKMARK [2][-]{subsection.5.1}{Aufgabe}{section.5}% 30
|
||||||
\BOOKMARK [2][-]{subsection.5.1}{Aufgabe}{section.5}% 31
|
\BOOKMARK [2][-]{subsection.5.2}{Der MNIST Datensatz}{section.5}% 31
|
||||||
\BOOKMARK [2][-]{subsection.5.2}{Der MNIST Datensatz}{section.5}% 32
|
\BOOKMARK [2][-]{subsection.5.3}{Fragmentbasierte Erkennung}{section.5}% 32
|
||||||
\BOOKMARK [2][-]{subsection.5.3}{Fragmentbasierte Erkennung}{section.5}% 33
|
\BOOKMARK [2][-]{subsection.5.4}{Ergebnis}{section.5}% 33
|
||||||
\BOOKMARK [2][-]{subsection.5.4}{Ergebnis}{section.5}% 34
|
\BOOKMARK [1][-]{section.6}{Fallbeispiel II:Eine selbsttrainierende KI f\374r Tic-Tac-Toe}{}% 34
|
||||||
\BOOKMARK [1][-]{section.6}{Fallbeispiel II:Eine selbsttrainierende KI f\374r Tic-Tac-Toe}{}% 35
|
\BOOKMARK [2][-]{subsection.6.1}{Das Prinzip}{section.6}% 35
|
||||||
\BOOKMARK [2][-]{subsection.6.1}{Das Prinzip}{section.6}% 36
|
\BOOKMARK [2][-]{subsection.6.2}{Chance-Tree Optimierung}{section.6}% 36
|
||||||
\BOOKMARK [2][-]{subsection.6.2}{Chance-Tree Optimierung}{section.6}% 37
|
\BOOKMARK [2][-]{subsection.6.3}{L\366sung mittels eines neuronalen Netzes}{section.6}% 37
|
||||||
\BOOKMARK [2][-]{subsection.6.3}{L\366sung mittels eines neuronalen Netzes}{section.6}% 38
|
\BOOKMARK [2][-]{subsection.6.4}{Vergleich}{section.6}% 38
|
||||||
\BOOKMARK [2][-]{subsection.6.4}{Vergleich}{section.6}% 39
|
\BOOKMARK [1][-]{section.7}{Schlusswort}{}% 39
|
||||||
\BOOKMARK [1][-]{section.7}{Schlusswort}{}% 40
|
|
||||||
|
|
|
||||||
Binary file not shown.
Binary file not shown.
|
|
@ -70,10 +70,23 @@ Als Regressionsproblem hingegen bezeichnet man das Finden einer Funktion, die ei
|
||||||
\\
|
\\
|
||||||
Die Kurve stellt hier Keine Grenze, sondern die Funktion, die die Werte approximiert, dar. Die Punkte repräsentieren die Eingabedaten, in denen auch hier einige Ausreißer erkennbar sind.
|
Die Kurve stellt hier Keine Grenze, sondern die Funktion, die die Werte approximiert, dar. Die Punkte repräsentieren die Eingabedaten, in denen auch hier einige Ausreißer erkennbar sind.
|
||||||
\subsection{Gefahren von maschinellem Lernen}
|
\subsection{Gefahren von maschinellem Lernen}
|
||||||
\subsubsection{Eignung der Datensätze}
|
Maschinelles Lernen kann eine mächtige Technologie sein. Eine Vielzahl von Problemen lässt sich damit lösen, alle jedoch nicht. Man sollte sich bevor man maschinelles Lernen nutzt also Fragen: Lässt sich dieses Problem nicht einfacher auf konventionelle Weise lösen? Außerdem sollte man sich stets bewusst sein, dass maschinelles Lernen im Gegensatz zu den meißten Algorythmen, keine Technologie ist, die eine Treffsicherheit von 100\% aufweist. In Systemen, wo eine korrekte Antwort kritisch ist, sollte man also nicht alleine auf maschinelles Lernen setzen.\\
|
||||||
|
Auch ist für maschinelles Lernen stets eine enorme Datenmenge nötig. Diese Daten müssen erst gesammelt werden. Hier stellt sich natürlich sofort eine ethische Frage: Welche Daten können guten Gewissens gesammelt und ausgewertret werden? Dabei sollte das Persönlichkeitsrecht und das Recht auf Privatsphäre eine zentrale Rolle spielen. Niemals sollte der Nutzen der Technologie über die Rechte der Nutzer gestellt werden. Betrachtet man hier beispielsweise den Flukhafen von Peking, sind erschreckende Tendenzen festzustellen. Dort wird beim Check-In via Gesichtserkennung die Identität der Person mit ihrem Gesicht verknüpft. Danach läuft alles vom Ticketkauf bis hin zum Duty-free-shop mit Hilfe von Gesichtserkennung ab \cite{4}.\\
|
||||||
|
Die zentralen Gefahren maschinellen Lernens sind also die eventuelle Unsicherheit im Ergebnis, der hohe Trainingsaufwand, der gegebenenfalls mit klassischen Algorythmen vermieden werden kann und die Verletzung von Rechten durch das Auswerten persönlicher Daten.
|
||||||
\subsubsection{Overfitting}
|
\subsubsection{Overfitting}
|
||||||
\subsubsection{Unbewusste Manipulation der Daten}
|
Overfitting ist ein häufig auftretendes Problem bei Klassifizierungsaufgaben. Die Klassengrenzen werden dabei zu genau aber falsch definiert. In Abbildung \ref{Overfitting} ist dies dargestellt.
|
||||||
\section{Verschiedene Techniken maschinellen lernens}
|
\begin{figure}[h]
|
||||||
|
\centering
|
||||||
|
\includegraphics[width=0.6\linewidth]{../graphics/overfitting.png}
|
||||||
|
\caption{Overfitting}
|
||||||
|
\label{Overfitting}
|
||||||
|
\end{figure}
|
||||||
|
\\
|
||||||
|
Overfitting tritt auf, wenn man ein neuronales Netz zu lange auf einem Datensatz trainiert. Das Netz lernt dann die Daten auswendig, da es so einen Fehler von 0 erreichen kann. Dadurch wurden aber keine wirklichen Klassengrenzen erlernt.\\
|
||||||
|
Um Overfitting entgegenzuwirken reicht es oftmals den Trainingsdatensatz in der Reihenfolge zu randomisieren. Dadurch kann das Netz diese gar nicht auswendig lernen.
|
||||||
|
\subsubsection{Die Daten}
|
||||||
|
Wie bereits erwähnt sind die Datensätze oft der limitierende Faktor beim maschinellen Lernen. Das gravierendste Problem ist, überhaupt einen passenden Datensatz für das Problem zu finden oder generieren zu können. Dabei muss man beachten, dass man in den alle für das Problem relevanten Faktoren berücksichtigt. Möchte man beispielsweise Gesichter jeglicher Art erkennen, genügt es nicht den Algorythmus auf einem Datensatz von Gesichtern hellhäutiger Menschen zu trainieren, da dieser zum Erkennen von Gesichtern dunkelhäuitiger Menschen dann nutzlos wäre. Dass dies kein theoretisches, sondern auch ein praktisch auftretendes Phänomen ist, zeigt eine Studie des National Institute for Standards and Technology (NIST)\cite{5}. Diese hat ergeben, dass beispielsweise ein in den USA entwickelter und dort sehr populärer Algorythmus eine extremn hohe Fehlerquote für afroamerikanische Frauen hat. Da dieses System unter anderem von der Polizei in den USA verwendet wird, haben afroamerikanische Frauen eine wesentlich höhere Chance fälschlicherweise einer Straftat beschuldigt zu werden.
|
||||||
|
\section{Verschiedene Techniken maschinellen Lernens}
|
||||||
\subsection{Überwachtes Lernen}
|
\subsection{Überwachtes Lernen}
|
||||||
\subsection{Unüberwachtes Lernen}
|
\subsection{Unüberwachtes Lernen}
|
||||||
\subsection{Bestärkendes Lernen}
|
\subsection{Bestärkendes Lernen}
|
||||||
|
|
@ -531,7 +544,16 @@ Die Dimension der Submatritzen beträgt meißt $2\times2$. In Abbildung \ref{Poo
|
||||||
Von Ravindra Parmar\newline
|
Von Ravindra Parmar\newline
|
||||||
Veröffentlicht am 02.09.2018, abgerufen am 07.01.2020\newline
|
Veröffentlicht am 02.09.2018, abgerufen am 07.01.2020\newline
|
||||||
Quelle: https://towardsdatascience.com/common-loss-functions-in-machine-learning-46af0ffc4d23
|
Quelle: https://towardsdatascience.com/common-loss-functions-in-machine-learning-46af0ffc4d23
|
||||||
|
\bibitem{4}
|
||||||
|
Facial Recognition Is Everywhere at China’s New Mega Airport\\
|
||||||
|
Bloomberg, 11. Dezember 2019\\
|
||||||
|
https://www.bloomberg.com/news/articles/2019-12-11/face-recognition-tech-is-everywhere-at-china-s-new-mega-airport\\
|
||||||
|
Abgerufen am 23.01.2020
|
||||||
|
\bibitem{5}
|
||||||
|
A US government study confirms most face recognition systems are racist\\
|
||||||
|
20.12.2019 MIT technology review\\
|
||||||
|
https://www.technologyreview.com/f/614986/ai-face-recognition-racist-us-government-nist-study/\\
|
||||||
|
Abgerufen am 23.01.2019
|
||||||
\end{thebibliography}
|
\end{thebibliography}
|
||||||
\listoffigures
|
\listoffigures
|
||||||
\end{document}
|
\end{document}
|
||||||
|
|
@ -9,74 +9,72 @@
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {1.3}Gefahren von maschinellem Lernen}{5}{subsection.1.3}%
|
\contentsline {subsection}{\numberline {1.3}Gefahren von maschinellem Lernen}{5}{subsection.1.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsubsection}{\numberline {1.3.1}Eignung der Datens\IeC {\"a}tze}{5}{subsubsection.1.3.1}%
|
\contentsline {subsubsection}{\numberline {1.3.1}Overfitting}{5}{subsubsection.1.3.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsubsection}{\numberline {1.3.2}Overfitting}{5}{subsubsection.1.3.2}%
|
\contentsline {subsubsection}{\numberline {1.3.2}Die Daten}{5}{subsubsection.1.3.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsubsection}{\numberline {1.3.3}Unbewusste Manipulation der Daten}{5}{subsubsection.1.3.3}%
|
\contentsline {section}{\numberline {2}Verschiedene Techniken maschinellen Lernens}{6}{section.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {section}{\numberline {2}Verschiedene Techniken maschinellen lernens}{5}{section.2}%
|
\contentsline {subsection}{\numberline {2.1}\IeC {\"U}berwachtes Lernen}{6}{subsection.2.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {2.1}\IeC {\"U}berwachtes Lernen}{5}{subsection.2.1}%
|
\contentsline {subsection}{\numberline {2.2}Un\IeC {\"u}berwachtes Lernen}{6}{subsection.2.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {2.2}Un\IeC {\"u}berwachtes Lernen}{5}{subsection.2.2}%
|
\contentsline {subsection}{\numberline {2.3}Best\IeC {\"a}rkendes Lernen}{6}{subsection.2.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {2.3}Best\IeC {\"a}rkendes Lernen}{5}{subsection.2.3}%
|
\contentsline {section}{\numberline {3}Neuronale Netze}{6}{section.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {section}{\numberline {3}Neuronale Netze}{5}{section.3}%
|
\contentsline {subsection}{\numberline {3.1}Maschinelles Lernen und menschliches Lernen}{7}{subsection.3.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {3.1}Maschinelles Lernen und menschliches Lernen}{5}{subsection.3.1}%
|
\contentsline {subsection}{\numberline {3.2}Der Aufbau eines neuronalen Netzes}{8}{subsection.3.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {3.2}Der Aufbau eines neuronalen Netzes}{6}{subsection.3.2}%
|
\contentsline {subsection}{\numberline {3.3}Berechnung des Ausgabevektors}{9}{subsection.3.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {3.3}Berechnung des Ausgabevektors}{7}{subsection.3.3}%
|
\contentsline {subsection}{\numberline {3.4}Der Lernprozess}{11}{subsection.3.4}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {3.4}Der Lernprozess}{9}{subsection.3.4}%
|
\contentsline {subsection}{\numberline {3.5}Fehlerfunktionen}{11}{subsection.3.5}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {3.5}Fehlerfunktionen}{10}{subsection.3.5}%
|
\contentsline {subsubsection}{\numberline {3.5.1}MSE -- Durchschnittlicher quadratischer Fehler}{11}{subsubsection.3.5.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsubsection}{\numberline {3.5.1}MSE -- Durchschnittlicher quadratischer Fehler}{10}{subsubsection.3.5.1}%
|
\contentsline {subsubsection}{\numberline {3.5.2}MAE -- Durchschnitztlicher absoluter Fehler}{12}{subsubsection.3.5.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsubsection}{\numberline {3.5.2}MAE -- Durchschnitztlicher absoluter Fehler}{10}{subsubsection.3.5.2}%
|
\contentsline {subsubsection}{\numberline {3.5.3}Kreuzentropiefehler}{12}{subsubsection.3.5.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsubsection}{\numberline {3.5.3}Kreuzentropiefehler}{11}{subsubsection.3.5.3}%
|
\contentsline {subsection}{\numberline {3.6}Gradientenverfahren und Backpropagation}{13}{subsection.3.6}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {3.6}Gradientenverfahren und Backpropagation}{12}{subsection.3.6}%
|
\contentsline {subsubsection}{\numberline {3.6.1}Lernrate}{14}{subsubsection.3.6.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
|
||||||
\contentsline {subsubsection}{\numberline {3.6.1}Lernrate}{13}{subsubsection.3.6.1}%
|
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {3.7}Verschiedene Layerarten}{14}{subsection.3.7}%
|
\contentsline {subsection}{\numberline {3.7}Verschiedene Layerarten}{14}{subsection.3.7}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsubsection}{\numberline {3.7.1}Convolutional Layers}{14}{subsubsection.3.7.1}%
|
\contentsline {subsubsection}{\numberline {3.7.1}Convolutional Layers}{15}{subsubsection.3.7.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsubsection}{\numberline {3.7.2}Pooling Layers}{16}{subsubsection.3.7.2}%
|
\contentsline {subsubsection}{\numberline {3.7.2}Pooling Layers}{18}{subsubsection.3.7.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {section}{\numberline {4}PyTorch}{19}{section.4}%
|
\contentsline {section}{\numberline {4}PyTorch}{21}{section.4}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {4.1}Datenvorbereitung}{19}{subsection.4.1}%
|
\contentsline {subsection}{\numberline {4.1}Datenvorbereitung}{21}{subsection.4.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {4.2}Definieren des Netzes}{19}{subsection.4.2}%
|
\contentsline {subsection}{\numberline {4.2}Definieren des Netzes}{21}{subsection.4.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {4.3}Trainieren des Netzes}{19}{subsection.4.3}%
|
\contentsline {subsection}{\numberline {4.3}Trainieren des Netzes}{21}{subsection.4.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {section}{\numberline {5}Fallbeispiel I:\newline Ein Klassifizierungsnetzwerk f\IeC {\"u}r handgeschriebene Ziffern}{19}{section.5}%
|
\contentsline {section}{\numberline {5}Fallbeispiel I:\newline Ein Klassifizierungsnetzwerk f\IeC {\"u}r handgeschriebene Ziffern}{21}{section.5}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {5.1}Aufgabe}{19}{subsection.5.1}%
|
\contentsline {subsection}{\numberline {5.1}Aufgabe}{21}{subsection.5.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {5.2}Der MNIST Datensatz}{19}{subsection.5.2}%
|
\contentsline {subsection}{\numberline {5.2}Der MNIST Datensatz}{21}{subsection.5.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {5.3}Fragmentbasierte Erkennung}{19}{subsection.5.3}%
|
\contentsline {subsection}{\numberline {5.3}Fragmentbasierte Erkennung}{21}{subsection.5.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {5.4}Ergebnis}{19}{subsection.5.4}%
|
\contentsline {subsection}{\numberline {5.4}Ergebnis}{21}{subsection.5.4}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {section}{\numberline {6}Fallbeispiel II:\newline Eine selbsttrainierende KI f\IeC {\"u}r Tic-Tac-Toe}{19}{section.6}%
|
\contentsline {section}{\numberline {6}Fallbeispiel II:\newline Eine selbsttrainierende KI f\IeC {\"u}r Tic-Tac-Toe}{21}{section.6}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {6.1}Das Prinzip}{19}{subsection.6.1}%
|
\contentsline {subsection}{\numberline {6.1}Das Prinzip}{21}{subsection.6.1}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {6.2}Chance-Tree Optimierung}{19}{subsection.6.2}%
|
\contentsline {subsection}{\numberline {6.2}Chance-Tree Optimierung}{21}{subsection.6.2}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {6.3}L\IeC {\"o}sung mittels eines neuronalen Netzes}{19}{subsection.6.3}%
|
\contentsline {subsection}{\numberline {6.3}L\IeC {\"o}sung mittels eines neuronalen Netzes}{21}{subsection.6.3}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {subsection}{\numberline {6.4}Vergleich}{19}{subsection.6.4}%
|
\contentsline {subsection}{\numberline {6.4}Vergleich}{21}{subsection.6.4}%
|
||||||
\defcounter {refsection}{0}\relax
|
\defcounter {refsection}{0}\relax
|
||||||
\contentsline {section}{\numberline {7}Schlusswort}{19}{section.7}%
|
\contentsline {section}{\numberline {7}Schlusswort}{21}{section.7}%
|
||||||
|
|
|
||||||
BIN
graphics/overfitting.png
Normal file
BIN
graphics/overfitting.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 155 KiB |
|
|
@ -15,7 +15,10 @@
|
||||||
version="1.1"
|
version="1.1"
|
||||||
id="svg8"
|
id="svg8"
|
||||||
sodipodi:docname="overfitting.svg"
|
sodipodi:docname="overfitting.svg"
|
||||||
inkscape:version="0.92.4 (f8dce91, 2019-08-02)">
|
inkscape:version="0.92.4 (f8dce91, 2019-08-02)"
|
||||||
|
inkscape:export-filename="/home/clemens/repositorys/pytorch-ai/graphics/overfitting.png"
|
||||||
|
inkscape:export-xdpi="200"
|
||||||
|
inkscape:export-ydpi="200">
|
||||||
<defs
|
<defs
|
||||||
id="defs2" />
|
id="defs2" />
|
||||||
<sodipodi:namedview
|
<sodipodi:namedview
|
||||||
|
|
@ -26,15 +29,15 @@
|
||||||
inkscape:pageopacity="0.0"
|
inkscape:pageopacity="0.0"
|
||||||
inkscape:pageshadow="2"
|
inkscape:pageshadow="2"
|
||||||
inkscape:zoom="0.70003572"
|
inkscape:zoom="0.70003572"
|
||||||
inkscape:cx="362.68539"
|
inkscape:cx="292.65492"
|
||||||
inkscape:cy="539.81623"
|
inkscape:cy="497.96103"
|
||||||
inkscape:document-units="mm"
|
inkscape:document-units="mm"
|
||||||
inkscape:current-layer="layer1"
|
inkscape:current-layer="layer1"
|
||||||
showgrid="false"
|
showgrid="false"
|
||||||
inkscape:window-width="1280"
|
inkscape:window-width="1850"
|
||||||
inkscape:window-height="987"
|
inkscape:window-height="1016"
|
||||||
inkscape:window-x="1366"
|
inkscape:window-x="70"
|
||||||
inkscape:window-y="0"
|
inkscape:window-y="27"
|
||||||
inkscape:window-maximized="1" />
|
inkscape:window-maximized="1" />
|
||||||
<metadata
|
<metadata
|
||||||
id="metadata5">
|
id="metadata5">
|
||||||
|
|
@ -44,7 +47,7 @@
|
||||||
<dc:format>image/svg+xml</dc:format>
|
<dc:format>image/svg+xml</dc:format>
|
||||||
<dc:type
|
<dc:type
|
||||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||||
<dc:title></dc:title>
|
<dc:title />
|
||||||
</cc:Work>
|
</cc:Work>
|
||||||
</rdf:RDF>
|
</rdf:RDF>
|
||||||
</metadata>
|
</metadata>
|
||||||
|
|
@ -299,7 +302,7 @@
|
||||||
r="2.5" />
|
r="2.5" />
|
||||||
<path
|
<path
|
||||||
style="fill:none;stroke:#000000;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
style="fill:none;stroke:#000000;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
d="m 30.538564,230.72274 -0.06872,4.50266"
|
d="m 30.538564,230.91172 -0.06872,4.50266"
|
||||||
id="path1101"
|
id="path1101"
|
||||||
inkscape:connector-curvature="0" />
|
inkscape:connector-curvature="0" />
|
||||||
<path
|
<path
|
||||||
|
|
@ -432,10 +435,50 @@
|
||||||
style="fill:none;stroke:#000000;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
style="fill:none;stroke:#000000;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||||
</g>
|
</g>
|
||||||
<path
|
<path
|
||||||
style="fill:none;stroke:#000000;stroke-width:0.6;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none"
|
style="fill:none;stroke:#ff1e23;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
d="m 37.039777,78.540908 c 0,0 8.079012,10.283256 12.472577,15.118276 2.636085,2.900953 6.001407,5.150958 8.315053,8.315056 2.427936,3.3204 3.74932,7.32297 5.669354,10.96074 0.86828,1.64508 1.612595,3.36655 2.645698,4.91345 2.43694,3.6489 7.545445,5.88501 8.31505,10.20483 0.747722,4.197 -0.531131,9.85338 -4.157525,12.09462 -1.621773,1.00231 -3.938542,0.0435 -5.669354,-0.75591 -1.538762,-0.71071 -2.502535,-2.28712 -3.779568,-3.40161 -3.232898,-2.82141 -5.758376,-6.95151 -9.826881,-8.31505 -2.153512,-0.72174 -4.958538,-0.94706 -6.803223,0.37795 -2.204143,1.58321 -3.037361,4.84536 -3.023656,7.55914 0.0117,2.31619 1.500741,4.41183 2.645698,6.42527 0.934161,1.64275 1.855798,3.4484 3.401613,4.53548 1.401691,0.98573 3.273862,1.01366 4.91344,1.51183 1.258517,0.38239 2.485816,0.8966 3.779567,1.13387 1.739285,0.31898 3.532552,0.1954 5.291397,0.37795 2.78239,0.28878 5.73378,0.0559 8.315052,1.13388 1.906716,0.7963 3.360717,2.41621 4.91344,3.77956 1.071084,0.94045 2.379368,1.75222 3.023655,3.02366 0.749027,1.47813 0.527138,3.27223 0.755915,4.91344 0.280463,2.012 0.517755,4.02986 0.755915,6.04731 0.401245,3.39894 1.527142,6.80496 1.133869,10.20484 -0.476189,4.11669 -2.729652,7.82628 -4.157525,11.71666 -1.346367,3.66831 -2.988723,7.23206 -4.157528,10.96075 -1.242993,3.96536 -3.143957,7.94075 -3.023655,12.09462 0.06114,2.11102 0.963174,4.14954 1.889786,6.04731 0.874007,1.79003 1.610433,4.04179 3.401611,4.91344 2.383176,1.15974 5.502484,0.57502 7.937095,-0.47245 0.634532,-0.273 1.149685,-0.8291 1.511829,-1.41733 0.585202,-0.95054 0.785784,-2.1078 0.94489,-3.21264 0.206514,-1.43404 0.01317,-2.89773 0,-4.3465 -0.01575,-1.73252 -0.429742,-3.49706 -0.09449,-5.19691 0.29209,-1.481 0.796136,-3.026 1.795296,-4.15753 0.822354,-0.9313 1.997655,-1.63014 3.212634,-1.88978 1.394572,-0.29802 2.894178,0.0366 4.252013,0.47245 0.859295,0.2758 1.798533,0.61808 2.362232,1.32285 0.759396,0.94944 0.98373,2.2816 1.039381,3.4961 0.06616,1.44389 -0.562932,2.83548 -0.850402,4.25201 -0.217449,1.0715 -0.536317,2.12647 -0.661424,3.21263 -0.137001,1.18942 -0.288273,2.40911 -0.09449,3.5906 0.162889,0.99314 0.406435,2.05222 1.039381,2.83467 0.632017,0.7813 1.606563,1.26351 2.551211,1.60632 1.105422,0.40116 2.325722,0.35806 3.496102,0.47245 1.03635,0.10129 2.11438,0.46601 3.11814,0.18897 3.09122,-0.85318 6.82362,-2.42073 7.9448,-5.42514 0.61193,-1.63979 -0.74934,-3.42896 -1.33628,-5.07786 -1.08293,-3.04227 -2.16946,-6.16523 -4.00883,-8.81944 -1.36346,-1.96747 -3.42242,-3.34887 -5.07787,-5.07786 -1.00293,-1.04748 -2.07957,-2.03956 -2.93981,-3.20707 -1.468269,-1.99272 -2.587124,-4.22463 -3.741586,-6.41414 -0.87354,-1.65672 -2.14167,-3.2236 -2.405304,-5.07787 -0.393756,-2.76948 0.279787,-5.69478 1.336281,-8.28493 0.70012,-1.71645 1.490269,-3.84409 3.20707,-4.54335 5.453399,-2.22119 12.107819,0.56785 17.371639,3.20707 2.93034,1.46924 5.38857,3.95992 7.2159,6.6814 1.27289,1.89574 1.82112,4.2067 2.40531,6.41414 0.57703,2.1804 0.2688,4.57267 1.06902,6.6814 0.4121,1.08597 1.02535,2.14334 1.87079,2.93981 1.33519,1.25786 3.15305,1.88678 4.81061,2.67256 1.41132,0.66905 2.85549,1.29136 4.35437,1.73044 1.84951,0.54181 3.75276,0.97488 5.67583,1.10145 2.35725,0.15515 4.98601,0.67803 7.07418,-0.42659 2.18937,-1.15815 3.91977,-3.48258 4.54334,-5.87963 0.45358,-1.74361 -0.0211,-3.7214 -0.80176,-5.34512 -2.15791,-4.48821 -8.34373,-6.31882 -10.15573,-10.95749 -0.618,-1.58207 -0.8829,-3.65273 0.0134,-5.09546 0.83748,-1.34801 2.70777,-1.80295 4.26267,-2.12045 2.97279,-0.60702 6.2333,-0.49701 9.0867,0.53452 1.97725,0.7148 4.16212,1.849 5.07786,3.74158 1.1259,2.32693 -0.0798,5.17221 -0.26726,7.75042 -0.11697,1.60916 -0.33641,3.20941 -0.53451,4.81061 -0.24286,1.96305 -0.82463,3.90175 -0.80176,5.87963 0.0261,2.25531 -0.16244,4.79179 1.06902,6.68139 0.98488,1.51124 2.90461,2.18592 4.54335,2.93982 2.27905,1.04848 5.13423,0.73803 7.21591,2.13804 0.94667,0.63667 1.56365,1.68686 2.13804,2.67256 0.96616,1.65799 1.05221,3.76292 2.13805,5.34512 0.96306,1.40329 2.37013,2.46645 3.74159,3.47433 1.1575,0.85064 3.74158,2.13805 3.74158,2.13805"
|
d="m 38.173646,80.808649 c 0,0 19.989434,20.705431 28.88128,31.988781 3.531271,4.48102 9.953928,8.4238 9.670324,14.12196 -0.155562,3.12555 -3.424073,5.47476 -6.047309,7.18118 -3.409014,2.21758 -7.694108,3.99999 -11.716665,3.40161 -3.421794,-0.50901 -5.30969,-4.94759 -8.693007,-5.66935 -2.142297,-0.45701 -4.582213,-0.0452 -6.490117,1.03104 -1.331589,0.75111 -2.914485,1.97626 -2.958807,3.50444 -0.08852,3.05211 2.83059,5.83639 5.493806,7.32987 1.986834,1.11418 4.584534,0.12915 6.803223,0.64522 3.079462,0.71628 5.99877,2.0368 8.868558,3.36362 3.198872,1.47897 6.746501,2.63205 9.259944,5.10242 3.159539,3.1054 4.802429,7.48497 6.614246,11.52768 1.629362,3.6356 2.99095,7.43353 3.779567,11.33871 0.65031,3.2203 1.046125,6.55442 0.755915,9.82688 -0.310418,3.50034 -1.244304,6.98229 -2.645698,10.20484 -0.65508,1.50638 -1.889098,2.69949 -2.645699,4.15752 -2.13369,4.11179 -5.208247,8.21885 -5.291397,12.85054 -0.06511,3.62684 0.802305,8.13261 3.779571,10.20483 1.757881,1.22351 4.459183,0.84949 6.425266,0 2.699921,-1.16656 5.620214,-3.51527 6.047311,-6.42526 0.490902,-3.34472 -4.048732,-5.72565 -4.535482,-9.07097 -0.313147,-2.15219 -0.447601,-4.93231 1.133869,-6.42527 1.568147,-1.48038 4.316866,-1.20895 6.425269,-0.75591 2.154667,0.46298 4.69963,1.42257 5.669351,3.40161 1.982005,4.04494 -1.924627,8.9243 -1.606315,13.41747 0.103658,1.4632 0.415359,2.97317 1.13387,4.25202 0.49103,0.87396 1.156932,1.77936 2.078762,2.17325 0.724079,0.30939 1.58414,0.1208 2.362228,0 0.71854,-0.11156 1.40685,-0.38345 2.07877,-0.66142 1.01531,-0.42003 2.06746,-0.8301 2.92916,-1.51183 1.36638,-1.08101 2.58734,-2.42824 3.40161,-3.96855 0.56418,-1.06723 0.81325,-2.29612 0.9449,-3.4961 0.22693,-2.0685 0.44974,-4.28883 -0.28347,-6.23629 -0.56848,-1.50993 -2.08326,-2.46857 -3.02365,-3.77957 -1.39534,-1.94524 -2.32477,-4.22231 -3.87406,-6.04731 -0.54847,-0.64607 -1.27911,-1.11319 -1.88979,-1.7008 -1.139553,-1.0965 -2.391759,-2.11204 -3.307122,-3.40162 -1.681755,-2.36928 -3.668341,-4.90112 -3.895041,-7.79774 -0.298969,-3.82002 1.916703,-7.45667 3.474327,-10.95749 1.217352,-2.73605 3.723096,-4.8703 4.543346,-7.75042 0.98854,-3.47103 -0.10922,-7.2345 0.26726,-10.82386 0.27887,-2.65872 0.93282,-5.26526 1.46991,-7.88405 0.61443,-2.99588 1.1136,-6.02745 2.00442,-8.95307 0.75174,-2.46887 2.35212,-4.67538 2.80618,-7.21591 0.36467,-2.0404 0.0534,-4.15075 -0.10677,-6.21729 -0.31594,-4.07607 -1.27357,-8.08119 -1.68164,-12.14907 -0.30239,-3.01443 -1.8781,-6.33984 -0.56694,-9.07097 0.90008,-1.87485 3.10754,-3.00264 5.10242,-3.59059 1.51179,-0.445567 3.46841,-0.763056 4.72446,0.18898 1.95271,1.48008 2.26686,4.54195 2.26774,6.9922 8.1e-4,2.25106 -0.74436,4.61236 -2.07876,6.42527 -0.77702,1.05566 -2.8227,1.01628 -3.21263,2.26774 -0.97819,3.13946 1.98247,6.29362 3.40161,9.25995 0.79259,1.6567 2.19715,3.00212 2.83467,4.72446 1.04251,2.81649 1.80953,5.89353 1.51183,8.88198 -0.17394,1.74613 -1.06364,3.3653 -1.88978,4.91344 -0.97912,1.83482 -2.96288,3.11969 -3.59059,5.10242 -0.6084,1.92176 -0.15538,4.03754 0,6.04731 0.22113,2.86023 -0.31824,6.15102 1.32285,8.50403 0.371,0.53195 1.13919,0.62053 1.7008,0.9449 0.95421,0.55112 1.98643,0.99744 2.83468,1.7008 1.16782,0.96834 2.12556,2.17894 3.02365,3.40161 1.52211,2.07221 2.50754,4.49852 3.96855,6.61425 0.86649,1.25479 1.79786,2.47242 2.83468,3.59059 1.45498,1.56914 3.74764,2.44256 4.72446,4.3465 2.14354,4.17803 -0.17591,9.67992 1.70081,13.98441 1.4828,3.40098 3.55087,7.73843 7.18118,8.50403 2.81176,0.59298 6.04957,-1.46169 7.74811,-3.77957 1.7693,-2.41444 1.77299,-5.92271 1.32285,-8.88199 -0.30505,-2.00546 -1.76868,-3.65123 -2.6457,-5.48037 -1.26692,-2.64233 -3.75809,-5.00683 -3.77956,-7.9371 -0.0244,-3.32602 1.14453,-7.66209 4.15752,-9.07096 3.90369,-1.82535 9.99746,-0.12606 12.47258,3.40161 2.68614,3.82842 -0.26528,9.38901 -1.13387,13.98441 -0.89199,4.7192 -3.75371,9.00966 -4.15753,13.79542 -0.25621,3.0364 -0.5909,6.5589 1.13387,9.07097 4.53032,6.59827 17.25186,10.5598 21.16559,11.33871"
|
||||||
id="path1362"
|
id="path881"
|
||||||
inkscape:connector-curvature="0"
|
inkscape:connector-curvature="0"
|
||||||
sodipodi:nodetypes="caaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac" />
|
sodipodi:nodetypes="caaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac" />
|
||||||
|
<path
|
||||||
|
style="fill:none;stroke:#71e73f;stroke-width:0.4;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none"
|
||||||
|
d="m 38.173646,80.808649 c 0,0 36.835189,31.587801 42.537631,53.699771 0.947232,3.67302 -1.63064,7.43916 -1.870789,11.22474 -0.440249,6.93984 -1.768725,14.38471 0.801767,20.84596 0.700116,1.75983 1.958186,3.6384 3.741581,4.2761 6.63319,2.37188 13.825464,-2.8933 20.845964,-3.47433 2.04198,-0.169 4.31919,-0.92616 6.14688,0 2.95627,1.49806 3.52156,5.6889 5.87963,8.01768 4.90735,4.84639 10.76137,8.76234 16.83712,12.02651 3.94965,2.12194 8.33312,3.31826 12.56103,4.81061 3.00195,1.05962 6.48572,1.10421 9.0867,2.93981 3.94519,2.78425 5.63551,7.8617 8.81944,11.49201 3.32286,3.78872 7.03145,7.22482 10.69024,10.69023 2.08471,1.97452 6.41256,5.75169 6.41256,5.75169"
|
||||||
|
id="path883"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
sodipodi:nodetypes="caaaaaaaaaaaac" />
|
||||||
|
<path
|
||||||
|
style="fill:none;stroke:#ff2117;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 24.538175,243.55689 h 15.83491"
|
||||||
|
id="path885"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
sodipodi:nodetypes="cc" />
|
||||||
|
<text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:7.05555534px;line-height:1;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||||
|
x="44.220959"
|
||||||
|
y="245.50337"
|
||||||
|
id="text889"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan887"
|
||||||
|
x="44.220959"
|
||||||
|
y="245.50337"
|
||||||
|
style="stroke-width:0.26458332">Klassengrenze mit Overfitting</tspan></text>
|
||||||
|
<path
|
||||||
|
style="fill:none;stroke:#59ff37;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
d="m 24.538175,254.40392 h 15.83491"
|
||||||
|
id="path885-1"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
sodipodi:nodetypes="cc" />
|
||||||
|
<text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:7.05555534px;line-height:1;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||||
|
x="44.220959"
|
||||||
|
y="256.35034"
|
||||||
|
id="text908"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan906"
|
||||||
|
x="44.220959"
|
||||||
|
y="256.35034"
|
||||||
|
style="stroke-width:0.26458332">Erwartete Klassengrenze</tspan></text>
|
||||||
</g>
|
</g>
|
||||||
</svg>
|
</svg>
|
||||||
|
|
|
||||||
|
Before Width: | Height: | Size: 25 KiB After Width: | Height: | Size: 28 KiB |
Loading…
Add table
Add a link
Reference in a new issue