Newer
Older
rnn_bachelor_thesis / Report / 04_machine_learning.aux
  1. \relax
  2. \providecommand\hyper@newdestlabel[2]{}
  3. \citation{connor1994recurrent}
  4. \citation{grossberg2013recurrent}
  5. \citation{ML:XGBoost}
  6. \citation{chollet2015keras}
  7. \citation{abadi2016tensorflow}
  8. \@writefile{toc}{\contentsline {section}{\numberline {5}Machine learning}{21}{section.5}}
  9. \@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Introduction}{21}{subsection.5.1}}
  10. \newlabel{ML_Intro}{{5.1}{21}{Introduction}{subsection.5.1}{}}
  11. \@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Artificial neural networks}{21}{subsection.5.2}}
  12. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.1}General concepts}{21}{subsubsection.5.2.1}}
  13. \newlabel{neural_network_arch}{{7a}{22}{Architecture of a neural network\relax }{figure.caption.11}{}}
  14. \newlabel{sub@neural_network_arch}{{a}{22}{Architecture of a neural network\relax }{figure.caption.11}{}}
  15. \newlabel{neuron}{{7b}{22}{Neuron\relax }{figure.caption.11}{}}
  16. \newlabel{sub@neuron}{{b}{22}{Neuron\relax }{figure.caption.11}{}}
  17. \@writefile{lof}{\contentsline {figure}{\numberline {7}{\ignorespaces Neural network architecture\relax }}{22}{figure.caption.11}}
  18. \citation{klambauer2017self}
  19. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.2}Activation functions}{23}{subsubsection.5.2.2}}
  20. \newlabel{selu}{{8a}{23}{Selu, elu activation function\relax }{figure.caption.12}{}}
  21. \newlabel{sub@selu}{{a}{23}{Selu, elu activation function\relax }{figure.caption.12}{}}
  22. \newlabel{relu}{{8b}{23}{Relu activation function\relax }{figure.caption.12}{}}
  23. \newlabel{sub@relu}{{b}{23}{Relu activation function\relax }{figure.caption.12}{}}
  24. \newlabel{tanh}{{8c}{23}{Tanh activation function\relax }{figure.caption.12}{}}
  25. \newlabel{sub@tanh}{{c}{23}{Tanh activation function\relax }{figure.caption.12}{}}
  26. \@writefile{lof}{\contentsline {figure}{\numberline {8}{\ignorespaces Activation functions\relax }}{23}{figure.caption.12}}
  27. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.3}Concepts of training}{24}{subsubsection.5.2.3}}
  28. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.4}Loss functions}{24}{subsubsection.5.2.4}}
  29. \newlabel{MSE}{{8}{25}{Loss functions}{equation.5.8}{}}
  30. \newlabel{BC}{{9}{25}{Loss functions}{equation.5.9}{}}
  31. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.5}Stochastic gradient descent}{25}{subsubsection.5.2.5}}
  32. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.6}Stochastic gradient descent with Momentum}{25}{subsubsection.5.2.6}}
  33. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.7}RMSProp}{25}{subsubsection.5.2.7}}
  34. \citation{chilimbi2014project}
  35. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.8}Adam}{26}{subsubsection.5.2.8}}
  36. \newlabel{adam_alg}{{10}{26}{Adam}{equation.5.10}{}}
  37. \citation{ioffe2015batch}
  38. \citation{cooijmans2016recurrent}
  39. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.9}Decaying learning rate}{27}{subsubsection.5.2.9}}
  40. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.10}Batch normalisation}{27}{subsubsection.5.2.10}}
  41. \@writefile{lof}{\contentsline {figure}{\numberline {9}{\ignorespaces The effects of Batch Normalization on data\relax }}{27}{figure.caption.13}}
  42. \newlabel{batch_norm}{{9}{27}{The effects of Batch Normalization on data\relax }{figure.caption.13}{}}
  43. \@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Recurrent Neural Networks}{27}{subsection.5.3}}
  44. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.3.1}General concepts}{27}{subsubsection.5.3.1}}
  45. \citation{schuster1997bidirectional}
  46. \@writefile{lof}{\contentsline {figure}{\numberline {10}{\ignorespaces General RNN architecture\relax }}{28}{figure.caption.14}}
  47. \newlabel{RNN_arch}{{10}{28}{General RNN architecture\relax }{figure.caption.14}{}}
  48. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.3.2}Most common architectures}{28}{subsubsection.5.3.2}}
  49. \citation{gers1999learning}
  50. \citation{chung2014empirical}
  51. \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.3.3}Cell types}{29}{subsubsection.5.3.3}}
  52. \citation{ML:XGBoost}
  53. \@writefile{lof}{\contentsline {figure}{\numberline {11}{\ignorespaces Architecture of a LSTM cell\relax }}{30}{figure.caption.15}}
  54. \newlabel{LSTM_arch}{{11}{30}{Architecture of a LSTM cell\relax }{figure.caption.15}{}}
  55. \@writefile{toc}{\contentsline {subsection}{\numberline {5.4}XGBoost}{30}{subsection.5.4}}
  56. \@setckpt{04_machine_learning}{
  57. \setcounter{page}{31}
  58. \setcounter{equation}{11}
  59. \setcounter{enumi}{0}
  60. \setcounter{enumii}{0}
  61. \setcounter{enumiii}{0}
  62. \setcounter{enumiv}{0}
  63. \setcounter{footnote}{19}
  64. \setcounter{mpfootnote}{0}
  65. \setcounter{part}{0}
  66. \setcounter{section}{5}
  67. \setcounter{subsection}{4}
  68. \setcounter{subsubsection}{0}
  69. \setcounter{paragraph}{0}
  70. \setcounter{subparagraph}{0}
  71. \setcounter{figure}{11}
  72. \setcounter{table}{3}
  73. \setcounter{parentequation}{0}
  74. \setcounter{AM@survey}{0}
  75. \setcounter{ContinuedFloat}{0}
  76. \setcounter{subfigure}{0}
  77. \setcounter{subtable}{0}
  78. \setcounter{float@type}{4}
  79. \setcounter{Item}{0}
  80. \setcounter{Hfootnote}{19}
  81. \setcounter{bookmark@seq@number}{39}
  82. \setcounter{@stackindex}{1}
  83. \setcounter{ROWcellindex@}{0}
  84. \setcounter{TABrowindex@}{2}
  85. \setcounter{TABcolindex@}{1}
  86. \setcounter{TABalignmentindex@}{0}
  87. \setcounter{pp@next@reset}{0}
  88. \setcounter{section@level}{2}
  89. }