Newer
Older
R_phipi / DNN.ipynb
@Davide Lancierini Davide Lancierini on 10 Oct 2018 32 KB Fixed bugs
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import tensorflow as tf\n",
    "import matplotlib.pyplot as plt\n",
    "import os\n",
    "import pickle\n",
    "import math\n",
    "\n",
    "trunc_normal= tf.truncated_normal_initializer(stddev=1)\n",
    "normal = tf.random_normal_initializer(stddev=1)\n",
    "\n",
    "from architectures.data_processing import *\n",
    "from architectures.utils.toolbox import *\n",
    "from architectures.DNN import *"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# IMPORTING THE DATASET"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "l_index=1\n",
    "mag_index=1\n",
    "Ds_mass= 1968"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Signal MC amounts to 23821 while bkg data amounts to 86051\n"
     ]
    }
   ],
   "source": [
    "MC_sig_dict, data_bkg_dict = load_datasets(l_index, mag_index)\n",
    "m=MC_sig_dict[\"Ds_ConsD_M\"].shape[0]\n",
    "n=data_bkg_dict[\"Ds_ConsD_M\"].shape[0]\n",
    "\n",
    "print('Signal MC amounts to {0} while bkg data amounts to {1}'.format(m,n))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Normalising the Chi2 vertex fits to the NDoF\n",
    "\n",
    "MC_sig_dict[\"Ds_ENDVERTEX_CHI2\"]=MC_sig_dict[\"Ds_ENDVERTEX_CHI2\"]/MC_sig_dict[\"Ds_ENDVERTEX_NDOF\"]\n",
    "MC_sig_dict[\"Ds_OWNPV_CHI2\"]=MC_sig_dict[\"Ds_OWNPV_CHI2\"]/MC_sig_dict[\"Ds_OWNPV_NDOF\"]\n",
    "MC_sig_dict[\"Ds_IPCHI2_OWNPV\"]=MC_sig_dict[\"Ds_IPCHI2_OWNPV\"]/MC_sig_dict[\"Ds_ENDVERTEX_NDOF\"]\n",
    "\n",
    "del MC_sig_dict[\"Ds_ENDVERTEX_NDOF\"]\n",
    "del MC_sig_dict[\"Ds_OWNPV_NDOF\"]\n",
    "\n",
    "data_bkg_dict[\"Ds_ENDVERTEX_CHI2\"]=data_bkg_dict[\"Ds_ENDVERTEX_CHI2\"]/data_bkg_dict[\"Ds_ENDVERTEX_NDOF\"]\n",
    "data_bkg_dict[\"Ds_OWNPV_CHI2\"]=data_bkg_dict[\"Ds_OWNPV_CHI2\"]/data_bkg_dict[\"Ds_OWNPV_NDOF\"]\n",
    "data_bkg_dict[\"Ds_IPCHI2_OWNPV\"]=data_bkg_dict[\"Ds_IPCHI2_OWNPV\"]/data_bkg_dict[\"Ds_ENDVERTEX_NDOF\"]\n",
    "\n",
    "del data_bkg_dict[\"Ds_ENDVERTEX_NDOF\"]\n",
    "del data_bkg_dict[\"Ds_OWNPV_NDOF\"]\n",
    "\n",
    "data_bkg_dict[\"phi_ENDVERTEX_CHI2\"]=data_bkg_dict[\"phi_ENDVERTEX_CHI2\"]/data_bkg_dict[\"phi_ENDVERTEX_NDOF\"]\n",
    "data_bkg_dict[\"phi_OWNPV_CHI2\"]=data_bkg_dict[\"phi_OWNPV_CHI2\"]/data_bkg_dict[\"phi_OWNPV_NDOF\"]\n",
    "data_bkg_dict[\"phi_IPCHI2_OWNPV\"]=data_bkg_dict[\"phi_IPCHI2_OWNPV\"]/data_bkg_dict[\"phi_ENDVERTEX_NDOF\"]\n",
    "\n",
    "del data_bkg_dict[\"phi_ENDVERTEX_NDOF\"]\n",
    "del data_bkg_dict[\"phi_OWNPV_NDOF\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "branches_needed = [\n",
    "                    \"Ds_ENDVERTEX_CHI2\",\n",
    "                    #\"Ds_ENDVERTEX_NDOF\",\n",
    "                    \"Ds_OWNPV_CHI2\",\n",
    "                    #\"Ds_OWNPV_NDOF\",\n",
    "                    \"Ds_IPCHI2_OWNPV\",\n",
    "                    \"Ds_IP_OWNPV\",\n",
    "                    \"Ds_DIRA_OWNPV\",\n",
    "                    #l_flv[l_index]+\"_plus_MC15TuneV1_ProbNN\"+l_flv[l_index],\n",
    "                    #\"Ds_Hlt1TrackMVADecision_TOS\",\n",
    "                    #\"Ds_Hlt2RareCharmD2Pi\"+l_flv[l_index].capitalize()+l_flv[l_index].capitalize()+\"OSDecision_TOS\",\n",
    "                    #\"Ds_Hlt2Phys_TOS\",\n",
    "                    \"phi_ENDVERTEX_CHI2\",\n",
    "                    #\"phi_ENDVERTEX_NDOF\",\n",
    "                    \"phi_OWNPV_CHI2\",\n",
    "                    #\"phi_OWNPV_NDOF\",\n",
    "                    \"phi_IPCHI2_OWNPV\",\n",
    "                    \"phi_IP_OWNPV\",\n",
    "                    \"phi_DIRA_OWNPV\",\n",
    "                    \"Ds_ConsD_M\",\n",
    "                  ] "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Number of input features\n",
    "\n",
    "dim=len(branches_needed)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Convert data dictionaries to arrays for NN\n",
    "\n",
    "MC_sig = extract_array(MC_sig_dict, branches_needed, dim, m)\n",
    "data_bkg = extract_array(data_bkg_dict, branches_needed, dim, n)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Add 0/1 label for bkg/sig\n",
    "\n",
    "MC_sig_labelled=add_labels(MC_sig,signal=True)\n",
    "data_bkg_labelled=add_labels(data_bkg,signal=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "#SOME CROSS CHECKS\n",
    "#MC_sig.shape==data_bkg.shape\n",
    "#MC_sig_labelled.shape[1]==dim+1==data_bkg_labelled.shape[1]\n",
    "#data_bkg_labelled[:,dim].sum()==0\n",
    "#(MC_sig_labelled[:,dim].sum()/m)==1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(101872, 4000, 4000)"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#Establish train/val/test sizes\n",
    "\n",
    "val_size=4000\n",
    "test_size=4000\n",
    "\n",
    "train_size=MC_sig.shape[0]+data_bkg.shape[0]-val_size-test_size\n",
    "(train_size, val_size, test_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "True"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#Merge MC sig and data bkg, shuffle it\n",
    "\n",
    "data=np.concatenate((MC_sig_labelled,data_bkg_labelled), axis =0)\n",
    "np.random.seed(1)\n",
    "np.random.shuffle(data)\n",
    "\n",
    "#Check that nothing is missing\n",
    "\n",
    "data.shape[0]==train_size+val_size+test_size"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Strip away the label column and convert it to a one-hot encoding\n",
    "\n",
    "X=data[:,0:dim]\n",
    "Y_labels=data[:,dim].astype(int)\n",
    "Y_labels=Y_labels.reshape(train_size+val_size+test_size,1)\n",
    "Y_labels_hot = to_one_hot(Y_labels)\n",
    "Y_labels=Y_labels_hot\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Divide the dataset in train/val/test sets \n",
    "\n",
    "X_train_0 = X[0:train_size]\n",
    "Y_train = Y_labels[0:train_size]\n",
    "\n",
    "X_val_0 = X[train_size:train_size+val_size]\n",
    "Y_val = Y_labels[train_size:train_size+val_size]\n",
    "\n",
    "X_test_0 = X[train_size+val_size:train_size+val_size+test_size]\n",
    "Y_test = Y_labels[train_size+val_size:train_size+val_size+test_size]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['Ds_ENDVERTEX_CHI2',\n",
       " 'Ds_OWNPV_CHI2',\n",
       " 'Ds_IPCHI2_OWNPV',\n",
       " 'Ds_IP_OWNPV',\n",
       " 'Ds_DIRA_OWNPV',\n",
       " 'phi_ENDVERTEX_CHI2',\n",
       " 'phi_OWNPV_CHI2',\n",
       " 'phi_IPCHI2_OWNPV',\n",
       " 'phi_IP_OWNPV',\n",
       " 'phi_DIRA_OWNPV',\n",
       " 'Ds_ConsD_M']"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "branches_needed"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Strip out the reconstructed Ds mass\n",
    "\n",
    "X_train = X_train_0[:,0:dim-1]\n",
    "X_val = X_val_0[:,0:dim-1]\n",
    "X_test = X_test_0[:,0:dim-1]\n",
    "dim=X_train.shape[1]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# SETTING UP THE NETWORK"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "task='TRAIN'\n",
    "#task='TEST'\n",
    "\n",
    "PATH=l_flv[l_index]+'_Mag'+mag_status[mag_index]+'_test_4'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "if task =='TRAIN' and os.path.exists(PATH+'/hyper_parameters.pkl'):\n",
    "    with open(PATH+'/hyper_parameters.pkl', 'rb') as f:  \n",
    "        hyper_dict = pickle.load(f)\n",
    "        \n",
    "    m=hyper_dict[\"m\"]\n",
    "    test_size=hyper_dict[\"test_size\"]\n",
    "    val_size=hyper_dict[\"val_size\"]\n",
    "    LEARNING_RATE=hyper_dict[\"LEARNING_RATE\"]\n",
    "    BETA1=hyper_dict[\"BETA1\"]\n",
    "    BATCH_SIZE=hyper_dict[\"BATCH_SIZE\"]\n",
    "    EPOCHS=hyper_dict[\"EPOCHS\"]\n",
    "    VAL_PERIOD=hyper_dict[\"VAL_PERIOD\"]\n",
    "    SEED=hyper_dict[\"SEED\"]\n",
    "    sizes=hyper_dict[\"sizes\"]\n",
    "    LAMBD=hyper_dict[\"LAMBD\"]\n",
    "    PATH=hyper_dict[\"PATH\"]\n",
    "\n",
    "elif task=='TRAIN' and not os.path.exists(PATH+'/hyper_parameters.pkl'):\n",
    "    \n",
    "    \n",
    "    LEARNING_RATE = 0.001\n",
    "    BETA1 = 0.5\n",
    "    BATCH_SIZE = 64\n",
    "    EPOCHS = 20000\n",
    "    VAL_PERIOD = 2000\n",
    "    SEED=1\n",
    "    LAMBD=1.\n",
    "    \n",
    "    sizes = {\n",
    "    'dense_layers': [\n",
    "                        #(16, 'bn', 0.8, lrelu, tf.glorot_uniform_initializer()),\n",
    "                        #(8, 'bn', 0.5, lrelu, tf.glorot_uniform_initializer()),\n",
    "                        #(16, 'bn',0.8, lrelu, tf.glorot_uniform_initializer()),\n",
    "                        (32, 'bn', 0.8, lrelu, tf.glorot_uniform_initializer()),\n",
    "                        (16, 'bn', 0.8, lrelu, tf.glorot_uniform_initializer()),\n",
    "                        (8, 'bn', 0.8, lrelu, tf.glorot_uniform_initializer()),\n",
    "                    ],\n",
    "    'n_classes':2,\n",
    "    }"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "if task == 'TEST' and os.path.exists(PATH+'/hyper_parameters.pkl'):\n",
    "    with open(PATH+'/hyper_parameters.pkl', 'rb') as f:  \n",
    "        hyper_dict = pickle.load(f)\n",
    "        #for key, item in hyper_dict.items():\n",
    "        #    print(key+':'+str(item))\n",
    "            \n",
    "    m=hyper_dict[\"m\"]\n",
    "    test_size=hyper_dict[\"test_size\"]\n",
    "    val_size=hyper_dict[\"val_size\"]\n",
    "    LEARNING_RATE=hyper_dict[\"LEARNING_RATE\"]\n",
    "    BETA1=hyper_dict[\"BETA1\"]\n",
    "    BATCH_SIZE=hyper_dict[\"BATCH_SIZE\"]\n",
    "    EPOCHS=hyper_dict[\"EPOCHS\"]\n",
    "    VAL_PERIOD=hyper_dict[\"VAL_PERIOD\"]\n",
    "    SEED=hyper_dict[\"SEED\"]\n",
    "    sizes=hyper_dict[\"sizes\"]\n",
    "    LAMBD=hyper_dict[\"LAMBD\"]\n",
    "    PATH=hyper_dict[\"PATH\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "def bkg():\n",
    "    \n",
    "    tf.reset_default_graph()\n",
    "    nn = DNN(dim, sizes,\n",
    "              lr=LEARNING_RATE, beta1=BETA1, lambd=LAMBD,\n",
    "              batch_size=BATCH_SIZE, epochs=EPOCHS,\n",
    "              save_sample=VAL_PERIOD, path=PATH, seed=SEED)\n",
    "    \n",
    "    vars_to_train= tf.trainable_variables()\n",
    "    \n",
    "    if task == 'TRAIN':\n",
    "        init_op = tf.global_variables_initializer()\n",
    "        \n",
    "    if task == 'TEST':\n",
    "        vars_all = tf.global_variables()\n",
    "        vars_to_init = list(set(vars_all)-set(vars_to_train))\n",
    "        init_op = tf.variables_initializer(vars_to_init)\n",
    "    \n",
    "    # Add ops to save and restore all the variables.\n",
    "    saver = tf.train.Saver()\n",
    "    gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.333)\n",
    "    \n",
    "    with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess:\n",
    "        \n",
    "        sess.run(init_op)\n",
    "\n",
    "        if task=='TRAIN':\n",
    "            print('\\n Training...')\n",
    "            \n",
    "            if os.path.exists(PATH+'/CNN_model.ckpt.index'):\n",
    "                saver.restore(sess,PATH+'/CNN_model.ckpt')\n",
    "                print('Model restored.')\n",
    "            \n",
    "            nn.set_session(sess)\n",
    "            nn.fit(X_train, Y_train, X_val, Y_val)\n",
    "            \n",
    "            save_path = saver.save(sess, PATH+'/CNN_model.ckpt')\n",
    "            print(\"Model saved in path: %s\" % save_path)\n",
    "        \n",
    "        if task=='TEST':\n",
    "            print('\\n Evaluate model on test set...')\n",
    "            saver.restore(sess,PATH+'/CNN_model.ckpt')\n",
    "            print('Model restored.')\n",
    "            \n",
    "            nn.set_session(sess)\n",
    "            nn.test(X_test, Y_test)\n",
    "            \n",
    "            output = nn.predict(X_test)\n",
    "            \n",
    "            return output\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Input for propagation (?, 10)\n",
      "Logits shape (?, 2)\n",
      "Input for propagation (?, 10)\n",
      "Logits shape (?, 2)\n",
      "\n",
      " Training...\n",
      "\n",
      " ****** \n",
      "\n",
      "Training CNN for 20000 epochs with a total of 101872 samples\n",
      "distributed in 1591 batches of size 64\n",
      "\n",
      "The learning rate set is 0.001\n",
      "\n",
      " ****** \n",
      "\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 0, train cost: 0.003296, train accuracy 0.9763\n",
      "validation accuracy 0.9898\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 2000, train cost: 0.002107, train accuracy 0.9792\n",
      "validation accuracy 0.9375\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 4000, train cost: 0.001401, train accuracy 1\n",
      "validation accuracy 1\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 6000, train cost: 0.003771, train accuracy 1\n",
      "validation accuracy 1\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 8000, train cost: 0.001128, train accuracy 1\n",
      "validation accuracy 1\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 10000, train cost: 0.0007949, train accuracy 1\n",
      "validation accuracy 1\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 12000, train cost: 0.0005857, train accuracy 1\n",
      "validation accuracy 1\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 14000, train cost: 0.0006453, train accuracy 1\n",
      "validation accuracy 1\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 16000, train cost: 0.0004602, train accuracy 1\n",
      "validation accuracy 1\n",
      "Evaluating performance on validation/train sets\n",
      "At iteration 18000, train cost: 0.0005284, train accuracy 1\n",
      "validation accuracy 1\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3XucV1W9//HXe2YYUFBDIY+CCnqwwvJSE9XpZGVeMDviKUu6nOh2zMpu/jrnh9UxD/08lV2Pj+MxPSfSLCXTLqQYmmnmnUG5CIYOiDKiAqLcYZiZz++PvWbYfPl+5zsgm+8A7+fjMQ/2Xnut/V3fzcz3811r7b2WIgIzM7Oe1NW6AmZm1vc5WJiZWVUOFmZmVpWDhZmZVeVgYWZmVTlYmJlZVQ4WVhOSFks6uUavvVbSkbV4bbPdlYOF7XUiYlBELKp1PQAkhaS/LeC8B0r6jaR1kp6S9KEe8krSdyS9kH4ulaTc8eMlzZS0Pv17fO7YOyXdKWmVpMU7+31Y3+FgYXsUSfW1rkMXSQ01fPnLgTbgYODDwBWSjqmQ91zgLOA44FjgPcCnASQ1Ar8Dfg4MBq4BfpfSAdYBk4F/KeZtWF/hYGE1J6lO0kRJC9M32xskHZg7/itJz6Vvr3fnP/QkXS3pCknTJK0D3pnSLpd0i6Q1kh6UdFSuTPe3+V7kPVXSgvTa/y3pz5I+VeF9XCzpRkk/l7Qa+JikMZLul/SSpGcl/VfXB62ku1PR2alr7JyU/h5Js1KZ+yQdu53XcyDwPuDfImJtRNwDTAX+qUKRCcD3I6I1Ip4Bvg98LB17B9AA/CgiNkXEZYCAkwAi4qGIuBboEy01K46DhfUFXyD7Zvt24FDgRbJvxl1uBUYBrwQeBn5RUv5DwCXAfsA9Ke2DwL+TfRtuSccrKZtX0hDgRuBC4CBgAfB3Vd7LuFTmFameHcCXgSHAW4B3AZ8FiIgTU5njUtfYLyW9nuyb+qfTa14JTJXUP9Xp5hREyv3cnM53NNAREY/n6jUbqNSyOCYdL5f3GGBObD0v0JwezmV7KAcL6ws+DXwtfbPdBFwMnN3VjRMRkyNiTe7YcZIOyJX/XUTcGxGdEbExpf06fettJ/vQPp7KKuV9NzAvIn6djl0GPFflvdwfEb9NddkQETMj4oGIaI+IxWQf/m/vofw/A1dGxIMR0RER1wCbgDena/GeiHhFhZ/3pHMMAlaVnHcVWTAtpzT/KmBQGrfY3nPZHqqWfapmXY4AfiOpM5fWARws6Tmyb/rvB4YCXXmGsOVDbEmZc+Y/1NeTfehVUinvoflzR0RIau35rWxdF0lHAz8AmoB9yf7mZvZQ/ghggqTP59IaU116ay2wf0na/sCaXubfH1ib3u/2nsv2UG5ZWF+wBDi95FvygNR//iGyrp2TgQOAEamMcuWLmjr5WWB41076pj28cvaydbkC+CswKiL2B77K1nUvtQS4pORa7BsR16c63JrGN8r93JrO8TjQIGlU7rzHAfMqvOa8dLxc3nnAsfm7o8gGwSudy/ZQDhbWF/wYuETSEQCShkoal47tR9YN8wLZN/P/2IX1ugV4naSzUpfY54C/2c5z7AesBtZKejXwmZLjzwP5Zz7+BzhP0pvSLa0DJZ0haT+AiDg9jW+U+zk95VkH/BqYlMq/lSzgXluhjj8DLpA0TNKhwP8Brk7H7iJr5X1BUn9J56f0P0H3zQkDgH7Zrgbk7pSyPYiDhfUF/0l2t85tktYADwBvSsd+BjwFPAPMT8d2iYhYQdb9dSlZsBoNNJMFr976ClnraA1ZIPhlyfGLgWvSAPUHIqKZbNziv8gG+lvYcmfS9vgssA+wDLge+ExEzAOQ9LbUvdTlSuD3wFzgUbIgeSVARLSR3XzwUeAl4BPAWSkd4ERgAzANODxt37YD9bU+Tl78yKx3JNUBrcCHI+LOWtfHbFdyy8KsB5JOk/SKdOtq13jDLmvdmPUVDhZmPXsLsBBYAfwDWRfMhtpWyWzXczeUmZlV5ZaFmZlVtcc8lDdkyJAYMWJErathZrZbmTlz5oqIGFot3x4TLEaMGEFzc3Otq2FmtluR9FRv8rkbyszMqnKwMDOzqhwszMysqkKDhaSxaeGYFkkTe8h3dlqQpimXdmEqt0DSaUXW08zMelbYALey5S0vB04hmyJhhqSpETG/JN9+ZIvfPJhLGw2MJ1tg5VDgj5KOjoiOouprZmaVFdmyGAO0RMSiNOnYFLKZL0t9k2yito25tHHAlLSM45Nkk6mNKbCuZmbWgyKDxTC2XgimNaV1k3QCcFhE3MzWqpZN5c+V1Cypefny5Tun1mZmto0ig0W5BV665xZJM3j+kGzu/O0q250QcVVENEVE09ChVZ8pKWvdpnZ+cNsCHnn6xR0qb2a2NygyWLQCh+X2hwNLc/v7Aa8F7pK0mGyN4alpkLta2Z1m4+YOLvtTC3NaS5cZNjOzLkUGixnAKEkj08pZ48kWuAEgIlZFxJCIGBERI8imfT4zLf4yFRifVuYaCYwCHiqikvV1WSOmo9MTKpqZVVLY3VAR0Z6WYJwO1AOTI2KepElAc0RM7aHsPEk3kK2M1g58rqg7oepSsOj07LtmZhUVOjdUREwjW24xn3ZRhbzvKNm/BLiksMol9XLLwsysmr3+Ce7ubii3LMzMKtrrg0Vdall0umVhZlbRXh8sGlLLot3Bwsysor0+WHQPcDtYmJlVtNcHC8jGLTxmYWZWmYMF2R1RHZ21roWZWd/lYAHU1fk5CzOznjhY0NWycLAwM6vEwYJskNvBwsysMgcLsgFud0OZmVXmYIG7oczMqnGwIOuGcsvCzKwyBwvcsjAzq8bBgvRQnp+zMDOryMECP2dhZlaNgwXuhjIzq6bQYCFprKQFklokTSxz/DxJcyXNknSPpNEpfYSkDSl9lqQfF1nPOs8NZWbWo8JWypNUD1wOnAK0AjMkTY2I+bls10XEj1P+M4EfAGPTsYURcXxR9ctrqBMdHQ4WZmaVFNmyGAO0RMSiiGgDpgDj8hkiYnVudyBQk0/sOrllYWbWkyKDxTBgSW6/NaVtRdLnJC0ELgW+kDs0UtIjkv4s6W3lXkDSuZKaJTUvX758hytaXyevZ2Fm1oMig4XKpG3ziRwRl0fEUcD/Bb6ekp8FDo+IE4ALgOsk7V+m7FUR0RQRTUOHDt3hino9CzOznhUZLFqBw3L7w4GlPeSfApwFEBGbIuKFtD0TWAgcXVA9s24otyzMzCoqMljMAEZJGimpERgPTM1nkDQqt3sG8ERKH5oGyJF0JDAKWFRURT2RoJlZzwq7Gyoi2iWdD0wH6oHJETFP0iSgOSKmAudLOhnYDLwITEjFTwQmSWoHOoDzImJlUXX1cxZmZj0rLFgARMQ0YFpJ2kW57S9WKHcTcFORdcurq4NOT/dhZlaRn+DGA9xmZtU4WOABbjOzahws8AC3mVk1DhZ4gNvMrBoHC9JEgg4WZmYVOViQtSzcDWVmVpmDBVBfL9rdsjAzq8jBgtSycLAwM6vIwQI/Z2FmVo2DBdlzFn6C28ysMgcLoL4O3w1lZtYDBwvcDWVmVo2DBV3dUA4WZmaVOFjgloWZWTUOFngiQTOzahwsSBMJOliYmVVUaLCQNFbSAkktkiaWOX6epLmSZkm6R9Lo3LELU7kFkk4rsp7uhjIz61lhwSKtoX05cDowGvhgPhgk10XE6yLieOBS4Aep7GiyNbuPAcYC/921JncR/JyFmVnPimxZjAFaImJRRLQBU4Bx+QwRsTq3OxDo+no/DpgSEZsi4kmgJZ2vEPV1uGVhZtaDItfgHgYsye23Am8qzSTpc8AFQCNwUq7sAyVlh5Upey5wLsDhhx++wxX1ehZmZj0rsmWhMmnbfCJHxOURcRTwf4Gvb2fZqyKiKSKahg4dusMVra/LLoMHuc3MyisyWLQCh+X2hwNLe8g/BThrB8u+LPXpKniacjOz8ooMFjOAUZJGSmokG7Cems8gaVRu9wzgibQ9FRgvqb+kkcAo4KGiKlpXlzVkvACSmVl5hY1ZRES7pPOB6UA9MDki5kmaBDRHxFTgfEknA5uBF4EJqew8STcA84F24HMR0VFUXeuVBQuPW5iZlVfkADcRMQ2YVpJ2UW77iz2UvQS4pLjabVGfWha+I8rMrDw/wU32nAV4gNvMrBIHC3ItCwcLM7OyHCzYMsDtbigzs/IcLNgywO0pP8zMynOwYMtzFm5ZmJmV52CBB7jNzKpxsMAD3GZm1ThY4OcszMyqcbDA3VBmZtU4WOCWhZlZNQ4WbAkW7R0OFmZm5ThYkHvOwi0LM7OyHCzw3VBmZtU4WOD1LMzMqnGwIL+eRY0rYmbWRzlYAHVd0324G8rMrKxCg4WksZIWSGqRNLHM8QskzZc0R9Idko7IHeuQNCv9TC0tuzN5gNvMrGeFrZQnqR64HDgFaAVmSJoaEfNz2R4BmiJivaTPAJcC56RjGyLi+KLql+cBbjOznhXZshgDtETEoohoA6YA4/IZIuLOiFifdh8AhhdYn4q8noWZWc+KDBbDgCW5/daUVskngVtz+wMkNUt6QNJZ5QpIOjflaV6+fPkOV7Te032YmfWosG4oQGXSyn4aS/oI0AS8PZd8eEQslXQk8CdJcyNi4VYni7gKuAqgqalphz/p3Q1lZtazIlsWrcBhuf3hwNLSTJJOBr4GnBkRm7rSI2Jp+ncRcBdwQlEVrfMAt5lZj4oMFjOAUZJGSmoExgNb3dUk6QTgSrJAsSyXPlhS/7Q9BHgrkB8Y36m2tCyKegUzs91bYd1QEdEu6XxgOlAPTI6IeZImAc0RMRX4LjAI+JWyb/dPR8SZwGuAKyV1kgW0b5fcRbVTeVlVM7OeFTlmQURMA6aVpF2U2z65Qrn7gNcVWbe8+vRUXkenmxZmZuX4CW6gwd1QZmY9crAgP2bhaGFmVo6DBVtaFu2+ddbMrCwHC7xSnplZNQ4WQEO6HcotCzOz8hwsyA9we8zCzKwcBwu2dENtdjeUmVlZDhbkWxYOFmZm5ThYkBvgdrAwMyvLwQKQREOdPGZhZlaBg0VSXyffOmtmVoGDRdJQJ3dDmZlV0KtgIen9vUnbnTXU13mA28ysgt62LC7sZdpuq6FObPZMgmZmZfU4Rbmk04F3A8MkXZY7tD/QXmTFdrWGeo9ZmJlVUm09i6VAM3AmMDOXvgb4clGVqoWGujo2+24oM7OyeuyGiojZEXEN8LcRcU3angq0RMSL1U4uaaykBZJaJE0sc/wCSfMlzZF0h6QjcscmSHoi/UzYgfe2Xfq5ZWFmVlFvxyxul7S/pAOB2cBPJf2gpwKS6oHLgdOB0cAHJY0uyfYI0BQRxwI3ApemsgcC3wDeBIwBviFpcC/rukMa6utod8vCzKys3gaLAyJiNfBe4KcR8Qag7JKoOWPIWiCLIqINmAKMy2eIiDsjYn3afQAYnrZPA26PiJWpBXM7MLaXdd0h2QC3WxZmZuX0Nlg0SDoE+ABwcy/LDAOW5PZbU1olnwRu3Z6yks6V1Cypefny5b2sVnn96uto991QZmZl9TZYTAKmAwsjYoakI4EnqpRRmbSyX90lfQRoAr67PWUj4qqIaIqIpqFDh1apTs8a6v1QnplZJdXuhgIgIn4F/Cq3vwh4X5VircBhuf3hZHdXbUXSycDXgLdHxKZc2XeUlL2rN3XdUf3q6vychZlZBb19gnu4pN9IWibpeUk3SRpepdgMYJSkkZIagfFkd1Llz3sCcCVwZkQsyx2aDpwqaXAa2D41pRXGz1mYmVXW226on5J90B9KNnbw+5RWUUS0A+eTfcg/BtwQEfMkTZJ0Zsr2XWAQ8CtJsyRNTWVXAt8kCzgzgEkprTAN9XVsdjeUmVlZveqGAoZGRD44XC3pS9UKRcQ0YFpJ2kW57Yp3VEXEZGByL+v3svWrkwe4zcwq6G3LYoWkj0iqTz8fAV4osmK7mruhzMwq622w+ATZbbPPAc8CZwMfL6pStZB1Q7llYWZWTm+7ob4JTOia4iM9Yf09siCyR+jnxY/MzCrqbcvi2PxcUGmw+YRiqlQbDX4oz8ysot4Gi7r83EypZdHbVsluoV+9fDeUmVkFvf3A/z5wn6QbyZ6k/gBwSWG1qoGGOrcszMwq6e0T3D+T1AycRDYVx3sjYn6hNdvFfDeUmVllve5KSsFhjwoQef18N5SZWUW9HbPY4zX4bigzs4ocLJJs8aMgwgHDzKyUg0XSry6bFd3TlJuZbcvBImmozy6Fu6LMzLblYJH0q89aFh7kNjPbloNF0i+1LDa3O1iYmZVysEi6g4W7oczMtuFgkTQ2dAULtyzMzEoVGiwkjZW0QFKLpIlljp8o6WFJ7ZLOLjnWkVbP615Br0hdYxZtDhZmZtsobDJASfXA5cApQCswQ9LUkmlCngY+BnylzCk2RMTxRdWvVGPqhmrzmIWZ2TaKnDl2DNASEYsAJE0BxpGbMiQiFqdjNf+E3jJmUfOqmJn1OUV2Qw0DluT2W1Nabw2Q1CzpAUlnlcsg6dyUp3n58uUvp64eszAz60GRwUJl0rbnVqPDI6IJ+BDwI0lHbXOyiKsioikimoYOHbqj9QS2tCza2n03lJlZqSKDRStwWG5/OLC0t4UjYmn6dxFwFwWvzNfY4AFuM7NKigwWM4BRkkZKagTGA726q0nSYEn90/YQ4K0UPD26H8ozM6ussGAREe3A+cB04DHghoiYJ2mSpDMBJL1RUivwfuBKSfNS8dcAzZJmA3cC3y56sSWPWZiZVVboOtoRMQ2YVpJ2UW57Bln3VGm5+4DXFVm3Ut1jFg4WZmbb8BPciZ+zMDOrzMEi6WpZtL64ocY1MTPrexwskn0a6wHYf59+Na6JmVnf42CR9G9wN5SZWSUOFknXmMWm9o4a18TMrO9xsEjq6kRDndyyMDMrw8Eip7GhzsHCzKwMB4ucxoY6P2dhZlaGg0VOY71bFmZm5ThY5OzTWM/6Ng9wm5mVcrDI6e8xCzOzshwscjxmYWZWnoNFTv+Gej9nYWZWhoNFTv+GOjZtdsvCzKyUg0VOY0MdmzxmYWa2DQeLnAHuhjIzK6vQYCFprKQFklokTSxz/ERJD0tql3R2ybEJkp5IPxOKrGeXAf3q2LDZwcLMrFRhwUJSPXA5cDowGvigpNEl2Z4GPgZcV1L2QOAbwJuAMcA3JA0uqq5d9mmsZ6PHLMzMtlFky2IM0BIRiyKiDZgCjMtniIjFETEHKP2EPg24PSJWRsSLwO3A2ALrCmR3Q210y8LMbBtFBothwJLcfmtK22llJZ0rqVlS8/Lly3e4ol0G9Kv33VBmZmUUGSxUJi12ZtmIuCoimiKiaejQodtVuXL26VdPW0cnHZ29raaZ2d6hyGDRChyW2x8OLN0FZXfYgH7Z5XBXlJnZ1ooMFjOAUZJGSmoExgNTe1l2OnCqpMFpYPvUlFaoAf2ydbh9R5SZ2dYKCxYR0Q6cT/Yh/xhwQ0TMkzRJ0pkAkt4oqRV4P3ClpHmp7Ergm2QBZwYwKaUVap8ULNyyMDPbWkORJ4+IacC0krSLctszyLqYypWdDEwusn6l+nd3Q3mQ28wsz09w5wxwy8LMrCwHixx3Q5mZledgkdPVsvBqeWZmW3OwyNl/n2wIZ83G9hrXxMysb3GwyBnYmAWLdZscLMzM8hwscgb1T8GizcHCzCzPwSJnYH+3LMzMynGwyGlsqKNfvVjnAW4zs604WJQY2L/BLQszsxIOFiUGNjaw1sHCzGwrDhYlBvavZ/0md0OZmeU5WJTYt7HBd0OZmZUodCLB3dHSlzawbM2mWlfDzKxPccuihAOFmdm2HCxKfHDM4QxsrK91NczM+hQHixL3L1zBurYOr8NtZpZTaLCQNFbSAkktkiaWOd5f0i/T8QcljUjpIyRtkDQr/fy4yHrmLX5hPQBrPZmgmVm3woKFpHrgcuB0YDTwQUmjS7J9EngxIv4W+CHwndyxhRFxfPo5r6h6lrr4H7Iqbmz37bNmZl2KbFmMAVoiYlFEtAFTgHElecYB16TtG4F3SVKBdarqwEH9AVi1YXMtq2Fm1qcUGSyGAUty+60prWyeiGgHVgEHpWMjJT0i6c+S3lbuBSSdK6lZUvPy5ct3SqUH79sPgJfWO1iYmXUpMliUayGUjhpXyvMscHhEnABcAFwnaf9tMkZcFRFNEdE0dOjQl11hgMH7NgLw4vq2nXI+M7M9QZHBohU4LLc/HFhaKY+kBuAAYGVEbIqIFwAiYiawEDi6wLp2239A1rKYPu+5XfFyZma7hSKDxQxglKSRkhqB8cDUkjxTgQlp+2zgTxERkoamAXIkHQmMAhYVWNduBw7KWha3znWwMDPrUth0HxHRLul8YDpQD0yOiHmSJgHNETEV+AlwraQWYCVZQAE4EZgkqR3oAM6LiJVF1TWva7W8cccfuitezsxst1Do3FARMQ2YVpJ2UW57I/D+MuVuAm4qsm49GfXKQR7gNjPL8USCZTyxbC1PLFtb62qYmfUZnu7DzMyqcrAo422jhgCwuaOzxjUxM+sbHCzKWLR8HQDzl66ucU3MzPoGB4syvviuUQDMbn2pxjUxM+sbHCzKGDQgG/f/3788WeOamJn1DQ4WZZw6+mAAnl65vsY1MTPrGxwsymio92Uxs9p6btVGnnphXa2r0c3PWVQREdR41nQz2wu9+Vt3ALD422fUuCYZf4Wu4OTXvBKAx55dU+OalNfRGXR66Vcz20UcLCp456uzYPHuy/5S45qUd9RXpzHhpw/VuhpmtpdwsKhg/BsPr3UVqvrLEytqXQUz20s4WFRQX7dlnOKG5iU95DQz2/M5WPRgxEH7AvCvN85h5lO7ZIb0PqetvZP2MtOe3DLnWUZMvIX1be01qJWZ7WoOFj2461/e2b39vivu57Qf3g1kc0at2rCZjl00wNzRGTz0ZG2C1dFfv5XTfpS97wcXvcCzqzYA8P3bFwCw9KVsf9majVz34NM1qaPVzu9mPcPC5Z6heW/gYFHFr857S/f2gufXMGLiLYz62q0c9++3cdRXpzFi4i2MmHgLV9/7JPe2rGDq7KVMnb2UuxYsY8TEW5i3dNXLrsOP/7yQD1x5P/ct3L4xinN/1sxHJ7/8QfCFaa6sc656gJO+9+eSo1l33Wd//jBf/c1clqQHGb/zh78yYuItRFQOqNc+8FTZ5WsjghVrN/Wqbm3tnRxz0R/43axnepW/SBs3d7CpvYONmzu2+SIREfzP3Yt4oeR9nXX5vbzq67futDq0tRc/+eVpP7ybb948H4AvTpnFu75f+juxZ2hr7+Tz1z/S62AYEVW/QF7467mMmHjLdtXjD4/2jVU7Cw0WksZKWiCpRdLEMsf7S/plOv6gpBG5Yxem9AWSTiuynj1544gDufnzf18138W/n8+H//dBvnD9I3zh+kf42E9nAHDGZfd0B5QRE2/hLd+6g9/NeoYbmpcwYfJDXHbHE6zd1M6MxSu5de6z/Pvv59HW3klEdHf/dD2Y8/QL2z5R/p0//JW5rav463OrWbZmIy3Lttzqe9v857n78eVs3NzBFXct5NFnssC1bPXGbQJPRLBuU9aldOKld3Lx1Hll3+eGzR3bpC1bvZHn12wEsg9MgCvuWgjApvZONrR1dLdI8v7tt4/y6WtnAvDMSxtYlP4or7x7EU3/74/MXrLt3Fw/uedJFi1fy6b2DhavWMeqDZtZ19bBpN/PL1vftvZOlq3ZyDdvns/TL6znjseeB7Zc029Ne4yHn34RgMUr1vH1387d6g/+xXVtrN64mV/OeJrVGzfz4KIXyr4OwLEX38bffetPvPrf/sBnf5G9r7sWLOPFdW3Mf3Y1l0x7jC9OmQXA2k3tbO7oZNaSl7qv0Ya2Ldd24+at95ev2cSnrpnBqvWbWbxiHV+a8gg/vffJ7vf40vo2ps5eytFfv7Xih9sNzUt43xX3AdCybA0r17UBlA3oT7+wnikPlW8pLnh+DT+5p/pUOG3tnRVv737qhXXdgbOjM7j63ifZ1N7BSd+7a5sP00efWcXV9z5JRDBj8cqy9d3c0dmd/tXfzOXD//tA97GFy9d2t8xP+t5djE0t5cvvbOFT1zRvdZ77WlbQktaymd36Er+fvZR3ff/PvLQ+u1b/Me0xLv3DX2lZtoaPTn6o+/cdYOSF0zjqq9N4btVGbpzZWrae16druqGto9dfis77efa7tKm9g7mtW758fnTyQ93/n7uCevrm97JOnK2h/ThwCtBKtib3ByNifi7PZ4FjI+I8SeOBf4yIcySNBq4HxgCHAn8Ejo6IbT+pkqampmhubq50eKe4ec5Szr/ukUJfw2pvvwEN7Ne/gaWrNlbM87ZRQ6rejfalk0fxoz8+AcDVH39j9xeI7XHw/v15fvUmxow8kIeeXMnAxnrWtZX/M2isr6Oto5Mjhw7snjkZ4HXDDmDuMz23cAc21vPPJx7JfgP6saGtne/d9vhWxwfv249xxw/jglOP5tiLbwPgLUcexP0peH7mHUfxzle9kpXr2riheQnfeu/reNN/3MEbjhjMSa9+JY8+s4orPvKGst+qf3jOcXz5l7O3Svv4W0fwlVNfxeIX1nHGZfcAMGRQIyvWtvGaQ/bnlNEHc9kdT/DNccdw2mv/hjGXZA+w7T+ggdUbt4yjzbroFI6fdDsAZ7zuEG6Z+ywA+/VvYE36cnTkkIH884lHcm/LCm6ekx2/b+JJjL/qga2m/Dn7DcO5cWbrNvW/4sOv5w0jBnfXoZwzjj2EW9K5Sw3q38CogwfRGfDrz/wd196/mDcfdRBjf7T1bfunHXMw0+c9v035yR9r4oiDBnLU0EEVX78nkmZGRFPVfAUGi7cAF0fEaWn/QoCI+FYuz/SU535JDcBzwFBgYj5vPl+l19sVwaIn7R2dLF+7iYa6OjZu7uDdl/2F/g31ve5OMTN7OXb0Se/eBosip/sYBuTvOW0F3lQpT0S0S1oFHJTSHygpO6z0BSSdC5wLcPjhtX0uoqG+jkMO2Kd7f+7FNes5IyLoDGjv7KSxvo4I6EzTlry0vo01G9s57MB92bi5g7b2Tta1tbNyXRub2jv567OvzzdXAAAHb0lEQVSrGdi/gVf9zX7ctWA5y9ds4l/HvorHnl3NI0+/xJnHHcql0xdw48xW/nXsqzh4vwHcPv95Zre+xAWnHM2/3Din+9vvJf/4Wg4/cF/+6SeVx03OaTqMe1pW8MxLW3dTjRlxII8vW+O10G2v9om3jmTyvX1j9usiWxbvB06LiE+l/X8CxkTE53N55qU8rWl/IVnX0yTg/oj4eUr/CTAtIm6q9Hq1blmYme2OetuyKHKAuxU4LLc/HFhaKU/qhjoAWNnLsmZmtosUGSxmAKMkjZTUCIwHppbkmQpMSNtnA3+KrKkzFRif7pYaCYwCPBGSmVmNFDZmkcYgzgemA/XA5IiYJ2kS0BwRU4GfANdKaiFrUYxPZedJugGYD7QDn+vpTigzMytWYWMWu5rHLMzMtl9fGLMwM7M9hIOFmZlV5WBhZmZVOViYmVlVe8wAt6TlwFMv4xRDAC89ty1fl8p8bcrzdamsL16bIyJiaLVMe0yweLkkNffmjoC9ja9LZb425fm6VLY7Xxt3Q5mZWVUOFmZmVpWDxRZX1boCfZSvS2W+NuX5ulS2214bj1mYmVlVblmYmVlVDhZmZlbVXh8sJI2VtEBSi6SJta7PriBpsaS5kmZJak5pB0q6XdIT6d/BKV2SLkvXZ46k1+fOMyHlf0LShEqv15dJmixpmaRHc2k77VpIekO61i2prHbtO9xxFa7NxZKeSb87syS9O3fswvQ+F0g6LZde9m8sLV/wYLpmv0xLGfR5kg6TdKekxyTNk/TFlL5n/95ExF77QzZ1+kLgSKARmA2MrnW9dsH7XgwMKUm7FJiYticC30nb7wZuBQS8GXgwpR8ILEr/Dk7bg2v93nbgWpwIvB54tIhrQbYOy1tSmVuB02v9nl/mtbkY+EqZvKPT309/YGT6u6rv6W8MuAEYn7Z/DHym1u+5l9flEOD1aXs/4PH0/vfo35u9vWUxBmiJiEUR0QZMAcbVuE61Mg64Jm1fA5yVS/9ZZB4AXiHpEOA04PaIWBkRLwK3A2N3daVfroi4m2wtlbydci3Ssf0j4v7IPgF+ljtXn1fh2lQyDpgSEZsi4kmghezvq+zfWPqmfBJwYyqfv859WkQ8GxEPp+01wGPAMPbw35u9PVgMA5bk9ltT2p4ugNskzZR0bko7OCKeheyPAXhlSq90jfbka7ezrsWwtF2avrs7P3WnTO7qamH7r81BwEsR0V6SvluRNAI4AXiQPfz3Zm8PFuX6AfeGe4nfGhGvB04HPifpxB7yVrpGe+O1295rsSdeoyuAo4DjgWeB76f0ve7aSBoE3AR8KSJW95S1TNpud2329mDRChyW2x8OLK1RXXaZiFia/l0G/Iasq+D51Pwl/bssZa90jfbka7ezrkVr2i5N321FxPMR0RERncD/kP3uwPZfmxVk3TENJem7BUn9yALFLyLi1yl5j/692duDxQxgVLoro5FsDfCpNa5ToSQNlLRf1zZwKvAo2fvuuhtjAvC7tD0V+Gi6o+PNwKrUxJ4OnCppcOqKODWl7Ql2yrVIx9ZIenPqo/9o7ly7pa4Pw+QfyX53ILs24yX1lzQSGEU2SFv2byz1xd8JnJ3K569zn5b+L38CPBYRP8gd2rN/b2o9wl7rH7I7FR4nu2Pja7Wuzy54v0eS3ZEyG5jX9Z7J+pDvAJ5I/x6Y0gVcnq7PXKApd65PkA1ktgAfr/V728HrcT1Zd8pmsm90n9yZ1wJoIvtAXQj8F2nWhN3hp8K1uTa99zlkH4KH5PJ/Lb3PBeTu3qn0N5Z+Fx9K1+xXQP9av+deXpe/J+sWmgPMSj/v3tN/bzzdh5mZVbW3d0OZmVkvOFiYmVlVDhZmZlaVg4WZmVXlYGFmZlU5WJiVIem+9O8ISR/ayef+arnXMuvLfOusWQ8kvYNsltX3bEeZ+ojo6OH42ogYtDPqZ7aruGVhVoaktWnz28Db0toNX5ZUL+m7kmakyfQ+nfK/I61xcB3Zg1dI+m2arHFe14SNkr4N7JPO94v8a6UnfL8r6dG0lsE5uXPfJelGSX+V9Is+sb6B7VUaqmcx26tNJNeySB/6qyLijZL6A/dKui3lHQO8NrIpugE+ERErJe0DzJB0U0RMlHR+RBxf5rXeSzZB33HAkFTm7nTsBOAYsjmC7gXeCtyz89+uWXluWZhtn1PJ5vmZRTYt9UFk8yABPJQLFABfkDQbeIBswrhR9Ozvgesjm6jveeDPwBtz526NbAK/WcCInfJuzHrJLQuz7SPg8xGx1aSJaWxjXcn+ycBbImK9pLuAAb04dyWbctsd+G/XdjG3LMx6toZs6cwu04HPpCmqkXR0mr231AHAiylQvJpsOc0um7vKl7gbOCeNiwwlW9b0oZ3yLsxeJn87MevZHKA9dSddDfwnWRfQw2mQeTnll7z8A3CepDlks7A+kDt2FTBH0sMR8eFc+m/I1l2eTTar6b9GxHMp2JjVlG+dNTOzqtwNZWZmVTlYmJlZVQ4WZmZWlYOFmZlV5WBhZmZVOViYmVlVDhZmZlbV/wdB/0mXVx7B7wAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Parameters trained\n",
      "Model saved in path: mu_MagDown_test_4/CNN_model.ckpt\n"
     ]
    }
   ],
   "source": [
    "if __name__=='__main__':\n",
    "\n",
    "    if task == 'TRAIN':\n",
    "        if not os.path.exists(PATH):\n",
    "            os.mkdir(PATH)\n",
    "    \n",
    "        elif os.path.exists(PATH):\n",
    "            if os.path.exists(PATH+'/checkpoint'):\n",
    "                ans = input('A previous checkpoint already exists, choose the action to perform \\n \\n 1) Overwrite the current model saved at '+PATH+'/checkpoint \\n 2) Start training a new model \\n 3) Restore and continue training the previous model \\n ')\n",
    "                \n",
    "                if ans == '1':\n",
    "                    print('Overwriting existing model in '+PATH)\n",
    "                    for file in os.listdir(PATH):\n",
    "                        file_path = os.path.join(PATH, file)\n",
    "                        try:\n",
    "                            if os.path.isfile(file_path):\n",
    "                                os.unlink(file_path)\n",
    "                            #elif os.path.isdir(file_path): shutil.rmtree(file_path)\n",
    "                        except Exception as e:\n",
    "                            print(e)\n",
    "                            \n",
    "                elif ans == '2':\n",
    "                    PATH = input('Specify the name of the model, a new directory will be created.\\n')\n",
    "                    os.mkdir(PATH)         \n",
    "        bkg()\n",
    "\n",
    "    elif task == 'TEST': \n",
    "        if not os.path.exists(PATH+'/checkpoint'):\n",
    "            print('No checkpoint to test')\n",
    "        else:\n",
    "            output = bkg()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [],
   "source": [
    "if task=='TEST':\n",
    "\n",
    "    Ds_mass_MC =[MC_sig_dict[\"Ds_ConsD_M\"][i][0] for i in range(m)]\n",
    "    NN_selected = X_test_0[np.argmax(output, axis=1).astype(np.bool)]\n",
    "    Ds_mass_sel_NN = [NN_selected[i][dim] for i in range(NN_selected.shape[0])]\n",
    "    Ds_mass_train_NN =[X_train_0[i][dim] for i in range(X_train_0.shape[0])]\n",
    "\n",
    "    plt.subplot(1,2,1)\n",
    "    plt.hist(Ds_mass_MC,bins=70);\n",
    "    plt.subplot(1,2,2)\n",
    "    plt.hist(Ds_mass_sel_NN,alpha=0.8,bins=70);\n",
    "    #plt.hist(Ds_mass_train_NN,alpha=0.2,bins=70);\n",
    "\n",
    "    fig=plt.gcf();\n",
    "    fig.set_size_inches(20,8)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "if task=='TRAIN':\n",
    "    hyper_dict={\n",
    "        'm':m,\n",
    "        'test_size':test_size,\n",
    "        'val_size':val_size,\n",
    "        'LEARNING_RATE':LEARNING_RATE,\n",
    "        'BETA1':BETA1,\n",
    "        'BATCH_SIZE':BATCH_SIZE,\n",
    "        'EPOCHS':EPOCHS,\n",
    "        'VAL_PERIOD':VAL_PERIOD,\n",
    "        'SEED':SEED,\n",
    "        'sizes':sizes,\n",
    "        'LAMBD':LAMBD,\n",
    "        'PATH':PATH,\n",
    "    }\n",
    "    with open(PATH+'/hyper_parameters.pkl', 'wb') as f:  \n",
    "        pickle.dump(hyper_dict, f)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}