Newer
Older
rnn_bachelor_thesis / Report / 04_machine_learning.aux
\relax 
\providecommand\hyper@newdestlabel[2]{}
\citation{connor1994recurrent}
\citation{grossberg2013recurrent}
\citation{ML:XGBoost}
\citation{chollet2015keras}
\citation{abadi2016tensorflow}
\@writefile{toc}{\contentsline {section}{\numberline {5}Machine learning}{21}{section.5}}
\@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Introduction}{21}{subsection.5.1}}
\newlabel{ML_Intro}{{5.1}{21}{Introduction}{subsection.5.1}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Artificial neural networks}{21}{subsection.5.2}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.1}General concepts}{21}{subsubsection.5.2.1}}
\newlabel{neural_network_arch}{{7a}{22}{Architecture of a neural network\relax }{figure.caption.11}{}}
\newlabel{sub@neural_network_arch}{{a}{22}{Architecture of a neural network\relax }{figure.caption.11}{}}
\newlabel{neuron}{{7b}{22}{Neuron\relax }{figure.caption.11}{}}
\newlabel{sub@neuron}{{b}{22}{Neuron\relax }{figure.caption.11}{}}
\@writefile{lof}{\contentsline {figure}{\numberline {7}{\ignorespaces Neural network architecture\relax }}{22}{figure.caption.11}}
\citation{klambauer2017self}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.2}Activation functions}{23}{subsubsection.5.2.2}}
\newlabel{selu}{{8a}{23}{Selu, elu activation function\relax }{figure.caption.12}{}}
\newlabel{sub@selu}{{a}{23}{Selu, elu activation function\relax }{figure.caption.12}{}}
\newlabel{relu}{{8b}{23}{Relu activation function\relax }{figure.caption.12}{}}
\newlabel{sub@relu}{{b}{23}{Relu activation function\relax }{figure.caption.12}{}}
\newlabel{tanh}{{8c}{23}{Tanh activation function\relax }{figure.caption.12}{}}
\newlabel{sub@tanh}{{c}{23}{Tanh activation function\relax }{figure.caption.12}{}}
\@writefile{lof}{\contentsline {figure}{\numberline {8}{\ignorespaces Activation functions\relax }}{23}{figure.caption.12}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.3}Concepts of training}{24}{subsubsection.5.2.3}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.4}Loss functions}{24}{subsubsection.5.2.4}}
\newlabel{MSE}{{8}{25}{Loss functions}{equation.5.8}{}}
\newlabel{BC}{{9}{25}{Loss functions}{equation.5.9}{}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.5}Stochastic gradient descent}{25}{subsubsection.5.2.5}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.6}Stochastic gradient descent with Momentum}{25}{subsubsection.5.2.6}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.7}RMSProp}{25}{subsubsection.5.2.7}}
\citation{chilimbi2014project}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.8}Adam}{26}{subsubsection.5.2.8}}
\newlabel{adam_alg}{{10}{26}{Adam}{equation.5.10}{}}
\citation{ioffe2015batch}
\citation{cooijmans2016recurrent}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.9}Decaying learning rate}{27}{subsubsection.5.2.9}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.10}Batch normalisation}{27}{subsubsection.5.2.10}}
\@writefile{lof}{\contentsline {figure}{\numberline {9}{\ignorespaces The effects of Batch Normalization on data\relax }}{27}{figure.caption.13}}
\newlabel{batch_norm}{{9}{27}{The effects of Batch Normalization on data\relax }{figure.caption.13}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Recurrent Neural Networks}{27}{subsection.5.3}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.3.1}General concepts}{27}{subsubsection.5.3.1}}
\@writefile{lof}{\contentsline {figure}{\numberline {10}{\ignorespaces General RNN architecture\relax }}{28}{figure.caption.14}}
\newlabel{RNN_arch}{{10}{28}{General RNN architecture\relax }{figure.caption.14}{}}
\citation{schuster1997bidirectional}
\citation{gers1999learning}
\citation{chung2014empirical}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.3.2}Most common architectures}{29}{subsubsection.5.3.2}}
\@writefile{toc}{\contentsline {subsubsection}{\numberline {5.3.3}Cell types}{29}{subsubsection.5.3.3}}
\citation{ML:XGBoost}
\@writefile{lof}{\contentsline {figure}{\numberline {11}{\ignorespaces Architecture of a LSTM cell\relax }}{30}{figure.caption.15}}
\newlabel{LSTM_arch}{{11}{30}{Architecture of a LSTM cell\relax }{figure.caption.15}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {5.4}XGBoost}{30}{subsection.5.4}}
\@setckpt{04_machine_learning}{
\setcounter{page}{32}
\setcounter{equation}{11}
\setcounter{enumi}{0}
\setcounter{enumii}{0}
\setcounter{enumiii}{0}
\setcounter{enumiv}{0}
\setcounter{footnote}{19}
\setcounter{mpfootnote}{0}
\setcounter{part}{0}
\setcounter{section}{5}
\setcounter{subsection}{4}
\setcounter{subsubsection}{0}
\setcounter{paragraph}{0}
\setcounter{subparagraph}{0}
\setcounter{figure}{11}
\setcounter{table}{3}
\setcounter{parentequation}{0}
\setcounter{AM@survey}{0}
\setcounter{ContinuedFloat}{0}
\setcounter{subfigure}{0}
\setcounter{subtable}{0}
\setcounter{float@type}{4}
\setcounter{Item}{0}
\setcounter{Hfootnote}{19}
\setcounter{bookmark@seq@number}{39}
\setcounter{@stackindex}{1}
\setcounter{ROWcellindex@}{0}
\setcounter{TABrowindex@}{2}
\setcounter{TABcolindex@}{1}
\setcounter{TABalignmentindex@}{0}
\setcounter{pp@next@reset}{0}
\setcounter{section@level}{2}
}