pytorch-ai/doc/Grundlagen_des_maschinellen_lernens.aux
2020-01-13 14:07:17 +01:00

100 lines
13 KiB
TeX

\relax
\providecommand\hyper@newdestlabel[2]{}
\catcode `"\active
\providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
\HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
\global\let\oldcontentsline\contentsline
\gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
\global\let\oldnewlabel\newlabel
\gdef\newlabel#1#2{\newlabelxx{#1}#2}
\gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
\AtEndDocument{\ifx\hyper@anchor\@undefined
\let\contentsline\oldcontentsline
\let\newlabel\oldnewlabel
\fi}
\fi}
\global\let\hyper@last\relax
\gdef\HyperFirstAtBeginDocument#1{#1}
\providecommand\HyField@AuxAddToFields[1]{}
\providecommand\HyField@AuxAddToCoFields[2]{}
\abx@aux@refcontext{nty/global//global/global}
\@writefile{toc}{\boolfalse {citerequest}\boolfalse {citetracker}\boolfalse {pagetracker}\boolfalse {backtracker}\relax }
\@writefile{lof}{\boolfalse {citerequest}\boolfalse {citetracker}\boolfalse {pagetracker}\boolfalse {backtracker}\relax }
\@writefile{lot}{\boolfalse {citerequest}\boolfalse {citetracker}\boolfalse {pagetracker}\boolfalse {backtracker}\relax }
\babel@aux{ngerman}{}
\abx@aux@cite{1}
\abx@aux@segm{0}{0}{1}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {1}Was ist maschinelles Lernen?}{3}{section.1}\protected@file@percent }
\abx@aux@cite{2}
\abx@aux@segm{0}{0}{2}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {1.1}Klassifizierungsprobleme}{4}{subsection.1.1}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {1.2}Regressionsprobleme}{4}{subsection.1.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {1.3}Gefahren von maschinellem Lernen}{4}{subsection.1.3}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {1.3.1}Eignung der Datens\IeC {\"a}tze}{4}{subsubsection.1.3.1}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {1.3.2}Overfitting}{4}{subsubsection.1.3.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {1.3.3}Unbewusste Manipulation der Daten}{4}{subsubsection.1.3.3}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {2}Verschiedene Techniken maschinellen lernens}{4}{section.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.1}\IeC {\"U}berwachtes Lernen}{4}{subsection.2.1}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.2}Un\IeC {\"u}berwachtes Lernen}{4}{subsection.2.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {2.3}Best\IeC {\"a}rkendes Lernen}{4}{subsection.2.3}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {3}Neuronale Netze}{4}{section.3}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.1}Maschinelles Lernen und menschliches Lernen}{4}{subsection.3.1}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {1}{\ignorespaces Neuron \newline Quelle: simple.wikipedia.org/wiki/File:Neuron.svg\newline Copyright: CC Attribution-Share Alike von Nutzer Dhp1080,\newline bearbeitet}}{5}{figure.1}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.2}Der Aufbau eines neuronalen Netzes}{5}{subsection.3.2}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {2}{\ignorespaces Ein einfaches neuronales Netz}}{6}{figure.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.3}Berechnung des Ausgabevektors}{6}{subsection.3.3}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {3}{\ignorespaces Der Plot der Sigmoid Funktion $\sigma (x)=\frac {e^x}{e^x+1}$}}{7}{figure.3}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {4}{\ignorespaces Formel zur Berechnung eines Ausgabevektors aus einem Eingabevektor durch ein Layer Neuronen. }}{8}{figure.4}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.4}Der Lernprozess}{8}{subsection.3.4}\protected@file@percent }
\abx@aux@cite{3}
\abx@aux@segm{0}{0}{3}
\abx@aux@segm{0}{0}{3}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.5}Fehlerfunktionen}{9}{subsection.3.5}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.1}MSE -- Durchschnittlicher quadratischer Fehler}{9}{subsubsection.3.5.1}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {5}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen quadratischen Fehler}}{9}{figure.5}\protected@file@percent }
\newlabel{MSE_equation}{{5}{9}{Die Gleichung für den durchschnittlichen quadratischen Fehler}{figure.5}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.2}MAE -- Durchschnitztlicher absoluter Fehler}{9}{subsubsection.3.5.2}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {6}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler}}{9}{figure.6}\protected@file@percent }
\newlabel{MAE_equation}{{6}{9}{Die Gleichung für den durchschnittlichen absoluten Fehler}{figure.6}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.5.3}Kreuzentropiefehler}{10}{subsubsection.3.5.3}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {7}{\ignorespaces Der Graph der Kreuzentropie Fehlerfunktion wenn das tats\IeC {\"a}chliche Label 1 ist}}{10}{figure.7}\protected@file@percent }
\newlabel{CEL_Graph}{{7}{10}{Der Graph der Kreuzentropie Fehlerfunktion wenn das tatsächliche Label 1 ist}{figure.7}{}}
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {8}{\ignorespaces Die Gleichung f\IeC {\"u}r den Kreuzentropiefehler}}{11}{figure.8}\protected@file@percent }
\newlabel{CEL_Function}{{8}{11}{Die Gleichung für den Kreuzentropiefehler}{figure.8}{}}
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {9}{\ignorespaces Die Gleichung f\IeC {\"u}r den durchschnittlichen absoluten Fehler}}{11}{figure.9}\protected@file@percent }
\newlabel{CEL_Finction_cummulative}{{9}{11}{Die Gleichung für den durchschnittlichen absoluten Fehler}{figure.9}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.6}Gradientenverfahren und Backpropagation}{11}{subsection.3.6}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {10}{\ignorespaces Die Gleichung f\IeC {\"u}r den Gradienten der Fehlerfunktion}}{11}{figure.10}\protected@file@percent }
\newlabel{Gradient_Function}{{10}{11}{Die Gleichung für den Gradienten der Fehlerfunktion}{figure.10}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.6.1}Lernrate}{11}{subsubsection.3.6.1}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {11}{\ignorespaces Die Gleichung f\IeC {\"u}r die Anpassung eines einzelnen Parameters}}{12}{figure.11}\protected@file@percent }
\newlabel{Learning_Rate_Function}{{11}{12}{Die Gleichung für die Anpassung eines einzelnen Parameters}{figure.11}{}}
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {12}{\ignorespaces $\eta $ ist hier zu gro\IeC {\ss } gew\IeC {\"a}hlt}}{12}{figure.12}\protected@file@percent }
\newlabel{Learning_Rate_Graphic}{{12}{12}{$\eta $ ist hier zu groß gewählt}{figure.12}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {3.7}Verschiedene Layerarten}{12}{subsection.3.7}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.7.1}Convolutional Layers}{13}{subsubsection.3.7.1}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {13}{\ignorespaces Eine Verbildlichung einer Convolution\newline Aus einer Animation von\newline https://deeplizard.com/learn/video/YRhxdVk\_sIs}}{13}{figure.13}\protected@file@percent }
\newlabel{Convolution_illustration}{{13}{13}{Eine Verbildlichung einer Convolution\newline Aus einer Animation von\newline https://deeplizard.com/learn/video/YRhxdVk\_sIs}{figure.13}{}}
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {14}{\ignorespaces Erkennt obere horizontale Kanten}}{14}{figure.14}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {15}{\ignorespaces Erkennt linke vertikale Kanten}}{14}{figure.15}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {16}{\ignorespaces Erkennt untere horizontale Kanten}}{14}{figure.16}\protected@file@percent }
\@writefile{lof}{\defcounter {refsection}{0}\relax }\@writefile{lof}{\contentsline {figure}{\numberline {17}{\ignorespaces Erkennt rechte vertikale Kanten}}{14}{figure.17}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.7.2}Pooling Layers}{15}{subsubsection.3.7.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {4}PyTorch}{15}{section.4}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.1}Datenvorbereitung}{15}{subsection.4.1}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.2}Definieren des Netzes}{15}{subsection.4.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {4.3}Trainieren des Netzes}{15}{subsection.4.3}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {5}Fallbeispiel I:\newline Ein Klassifizierungsnetzwerk f\IeC {\"u}r handgeschriebene Ziffern}{15}{section.5}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Aufgabe}{15}{subsection.5.1}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Der MNIST Datensatz}{15}{subsection.5.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Fragmentbasierte Erkennung}{15}{subsection.5.3}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.4}Ergebnis}{15}{subsection.5.4}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {6}Fallbeispiel II:\newline Eine selbsttrainierende KI f\IeC {\"u}r Tic-Tac-Toe}{15}{section.6}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.1}Das Prinzip}{15}{subsection.6.1}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.2}Chance-Tree Optimierung}{15}{subsection.6.2}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.3}L\IeC {\"o}sung mittels eines neuronalen Netzes}{15}{subsection.6.3}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.4}Vergleich}{15}{subsection.6.4}\protected@file@percent }
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {7}Schlusswort}{15}{section.7}\protected@file@percent }
\bibcite{1}{1}
\bibcite{2}{2}
\bibcite{3}{3}