diff --git "a/1_to_1_multi_layer_roc_auc - Copy \0502\051.ipynb" "b/1_to_1_multi_layer_roc_auc - Copy \0502\051.ipynb" new file mode 100644 index 0000000..71fe718 --- /dev/null +++ "b/1_to_1_multi_layer_roc_auc - Copy \0502\051.ipynb" @@ -0,0 +1,2909 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Import all packages\n", + "***" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", + " from ._conv import register_converters as _register_converters\n", + "Using TensorFlow backend.\n" + ] + } + ], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "import matplotlib as mpl\n", + "import random\n", + "import math\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import tensorflow as tf\n", + "from tensorflow.python.framework import ops\n", + "from sklearn import preprocessing\n", + "import pickle as pkl\n", + "from pathlib import Path\n", + "from keras.datasets import imdb\n", + "from keras.models import Sequential\n", + "from keras.layers import Dense\n", + "from keras.layers import LSTM\n", + "from keras.layers import GRU\n", + "from keras.layers import Dropout, BatchNormalization\n", + "from keras.layers import ConvLSTM2D\n", + "from keras.layers import Conv1D\n", + "#from keras.layers.convolutional import Conv1D\n", + "#from keras.layers.convolutional import MaxPooling1D\n", + "from keras.layers.embeddings import Embedding\n", + "from keras.preprocessing import sequence\n", + "from keras.callbacks import History\n", + "from keras.callbacks import EarlyStopping\n", + "from keras.callbacks import ModelCheckpoint\n", + "from keras.callbacks import TensorBoard\n", + "from keras.models import load_model\n", + "import keras.backend as K\n", + "from keras.losses import mean_squared_error\n", + "\n", + "import xgboost as xgb\n", + "from xgboost import XGBClassifier\n", + "from xgboost import plot_tree\n", + "from sklearn.metrics import accuracy_score\n", + "import graphviz\n", + "\n", + "import sklearn as skl\n", + "\n", + "#import seaborn as sns" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Preprocessing of the matched 8-hit tracks\n", + "\n", + "***\n", + "\n", + "## Import the dataset of the matched 8-hit tracks" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "#import data as array\n", + "# 8 hits with x,y,z\n", + "\n", + "testset = pd.read_pickle('matched_8hittracks.pkl')\n", + "#print(testset)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Convert the data to an array (float32)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "#Convert the data\n", + "\n", + "tset = np.array(testset)\n", + "tset = tset.astype('float32')\n", + "\n", + "#Check testset with arbitrary particle\n", + "\n", + "#print(tset.shape)\n", + "#for i in range(8):\n", + " #print(tset[1,3*i:(3*i+3)])\n", + "#print(tset[0,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Transformation between original 2D-array into 3D-array\n", + "\n", + "***\n", + "\n", + "### reshapor()\n", + "\n", + "**Description:**\n", + "\n", + "Transforms 2D-array into 3D array\n", + "\n", + "**Arguments:**\n", + "\n", + "- arr_orig: Original 2D array\n", + "- num_inputs: Number of inputs per timestep (default value = 3 for X,Y,Z coordinates)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: 3D-array of shape(particlenumber, timesteps, input = coordinates)\n", + "\n", + "\n", + "***\n", + "\n", + "\n", + "### reshapor_inv()\n", + "\n", + "**Description:**\n", + "\n", + "Inverse transformation from 3D-array into 2D-array\n", + "\n", + "**Arguments:**\n", + "\n", + "- array_shaped: 3D-array of shape(particlenumber, timesteps, input = coordinates)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: 2D-array of shape(particlenumber, inputs)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "#Reshapes the 2D-array to a 3D-array\n", + "\n", + "def reshapor(arr_orig, num_inputs=3):\n", + " timesteps = int(arr_orig.shape[1]/num_inputs)\n", + " number_examples = int(arr_orig.shape[0])\n", + " arr = np.zeros((number_examples, timesteps, num_inputs))\n", + " \n", + " for i in range(number_examples):\n", + " for t in range(timesteps):\n", + " arr[i,t,:] = arr_orig[i,num_inputs*t:num_inputs*t+num_inputs]\n", + " \n", + " return arr\n", + "\n", + "#The inverse transformation of the reshapor function (3D to 2D)\n", + "\n", + "def reshapor_inv(array_shaped):\n", + " num_inputs = array_shaped.shape[2]\n", + " timesteps = int(array_shaped.shape[1])\n", + " num_examples = int(array_shaped.shape[0])\n", + " arr = np.zeros((num_examples, timesteps*num_inputs))\n", + " \n", + " for i in range(num_examples):\n", + " for t in range(timesteps):\n", + " arr[i,num_inputs*t:num_inputs*t+num_inputs] = array_shaped[i,t,:]\n", + " \n", + " return arr" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create random training and test sets from the data\n", + "\n", + "***\n", + "\n", + "### create_random_sets()\n", + "\n", + "**Description:**\n", + "\n", + "Splits an dataset into a train and a test set\n", + "\n", + "\n", + "**Input:**\n", + "\n", + "- dataset: The actual dataset with shape (particles, other dimensions)\n", + "- train_to_total_ratio: The ratio that the training-set should be out of the original set.\n", + " The remaining part will become the test-set\n", + " \n", + "\n", + "**Returns:**\n", + "\n", + "- train_set: The newly created training set (particles, other dimensions)\n", + "- test_set: The newly created test set (particles, other dimensions)\n", + " \n", + " \n", + "**Additional comments:**\n", + "\n", + "The data will be randomly shuffled before it gets split up" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "### create the training set and the test set###\n", + "\n", + "def create_random_sets(dataset, train_to_total_ratio):\n", + " #shuffle the dataset\n", + " num_examples = dataset.shape[0]\n", + " p = np.random.permutation(num_examples)\n", + " dataset = dataset[p,:]\n", + " \n", + " #evaluate size of training and test set and initialize them\n", + " train_set_size = np.int(num_examples*train_to_total_ratio)\n", + " test_set_size = num_examples - train_set_size\n", + " \n", + " train_set = np.zeros((train_set_size, dataset.shape[1]))\n", + " test_set = np.zeros((test_set_size, dataset.shape[1]))\n", + " \n", + "\n", + " #fill train and test sets\n", + " for i in range(num_examples):\n", + " if train_set_size > i:\n", + " train_set[i,:] += dataset[i,:]\n", + " else:\n", + " test_set[i - train_set_size,:] += dataset[i,:]\n", + " \n", + " return train_set, test_set\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "#Create the training and test-sets\n", + "\n", + "train_set, test_set = create_random_sets(tset, 0.9)\n", + "\n", + "#print(test_set.shape, train_set.shape, reshapor(tset).shape)\n", + "#print(test_set[0,:,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Normalization of the data\n", + "\n", + "***\n", + "\n", + "## Normalization based on min_max_scaler from sklearn\n", + "\n", + "### correct_array_steps()\n", + "\n", + "**Description:**\n", + "\n", + "As the scaler will be fixed on arrays of specific length this function returns an array padded with zeros with the correct shape\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- steps: Required number of timesteps for the scaler (default value = 8)\n", + "- num_inputs: Number of inputs per timestep (default value = 3 for X,Y,Z coordinates)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: 3D array of shape(particle_number, steps, num_inputs)\n", + "\n", + "***\n", + "\n", + "### set_min_max_scaler()\n", + "\n", + "**Description:**\n", + "\n", + "Sets the min_max_scaler based on the dataset given (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 2D of shape(particle_number, inputs) or 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- feature_range: Tuple which defines the area to which the data should be scaled (default value = (-1,1))\n", + "\n", + "**Returns:**\n", + "\n", + "- min_max_scalor: min_max_scaler based of the data given\n", + "\n", + "***\n", + "\n", + "### min_max_scaler()\n", + "\n", + "**Description:** \n", + "\n", + "Transforms a 3D-array with a given min_max_scaler (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- min_max_scalor: The min_max_scaler used for the transformation (default value: min_max_scalor)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Transformed 3D-array\n", + "\n", + "***\n", + "\n", + "### min_max_scaler_inv()\n", + "\n", + "**Description:**\n", + "\n", + "Transforms a 3D-array with a given min_max_scaler back to original form (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- min_max_scalor: The min_max_scaler used for the transformation (default value: min_max_scalor)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Transformed 3D-array" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "#Normalize the data advanced version with scikit learn\n", + "def correct_array_steps(arr, steps= 8, num_inputs= 3): #steps > array_steps\n", + " if arr.shape[1] != steps:\n", + " _ = np.zeros((arr.shape[0], steps, num_inputs))\n", + " _[:,:arr.shape[1],:] += arr\n", + " arr = _\n", + " return arr\n", + "\n", + "\n", + "#set the transormation based on training set\n", + "def set_min_max_scaler(arr, feature_range= (-.9,0.9)):\n", + " min_max_scalor = preprocessing.MinMaxScaler(feature_range=feature_range)\n", + " if len(arr.shape) == 3:\n", + " arr = reshapor(min_max_scalor.fit_transform(reshapor_inv(arr))) \n", + " else:\n", + " arr = min_max_scalor.fit_transform(arr)\n", + " return min_max_scalor\n", + "\n", + "min_max_scalor = set_min_max_scaler(train_set)\n", + "\n", + "\n", + "#transform data\n", + "def min_max_scaler(arr, min_max_scalor= min_max_scalor):\n", + " num_inputs = arr.shape[2]\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(min_max_scalor.transform(reshapor_inv(arr)), num_inputs=num_inputs)\n", + " return arr\n", + " \n", + "#inverse transformation\n", + "def min_max_scaler_inv(arr, min_max_scalor= min_max_scalor):\n", + " num_inputs = arr.shape[2]\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(min_max_scalor.inverse_transform(reshapor_inv(arr)), num_inputs=num_inputs)\n", + " return arr" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Normalization based on a standard_scaler from sklearn\n", + "\n", + "\n", + "### set_std_scaler()\n", + "\n", + "**Description: **\n", + "\n", + "Sets the std_scaler based on the dataset given (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 2D of shape(particle_number, inputs) or 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- feature_range: Tuple which defines the area to which the data should be scaled (default value = (-1,1))\n", + "\n", + "**Returns:**\n", + "\n", + "- std_scalor: std_scaler based of the data given\n", + "\n", + "***\n", + "\n", + "### std_scaler()\n", + "\n", + "**Description: **\n", + "\n", + "Transforms a 3D-array with a given std_scaler (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- std_scalor: The std_scaler used for the transformation (default value: std_scaler)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Transformed 3D-array\n", + "\n", + "***\n", + "\n", + "### std_scaler_inv()\n", + "\n", + "**Description: **\n", + "\n", + "Transforms a 3D-array with a given std_scaler back to original form (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- std_scalor: The std_scaler used for the transformation (default value: std_scaler)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Transformed 3D-array" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "#Normalize the data advanced version with scikit learn - Standard scaler\n", + "\n", + "#set the transormation based on training set\n", + "def set_std_scaler(arr):\n", + " std_scalor = preprocessing.StandardScaler()\n", + " if len(arr.shape) == 3:\n", + " arr = reshapor(std_scalor.fit_transform(reshapor_inv(arr))) \n", + " else:\n", + " arr = std_scalor.fit_transform(arr)\n", + " return std_scalor\n", + "\n", + "std_scalor = set_std_scaler(train_set)\n", + "\n", + "#transform data\n", + "def std_scaler(arr, std_scalor= std_scalor, num_inputs=3):\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(std_scalor.transform(reshapor_inv(arr)))\n", + " return arr\n", + " \n", + "#inverse transformation\n", + "def std_scaler_inv(arr, std_scalor= std_scalor, num_inputs=3):\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(std_scalor.inverse_transform(reshapor_inv(arr)))\n", + " return arr\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "#reshape the data\n", + "\n", + "train_set = reshapor(train_set)\n", + "test_set = reshapor(test_set)\n", + "\n", + "#print(train_set[0,:,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Scale the data using the min_max_scaler or the std_scaler defined before\n", + "\n", + "### scaler()\n", + "\n", + "****Description:****\n", + "\n", + "Shapes and transforms the data with a given sklearn scaler\n", + "\n", + "**Arguments:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- std_scalor: std_scaler of sklearn (default value = std_scalor defined above)\n", + "- min_max_scalor: min_max_scalor of sklearn (default value = min_max_scalor defined above)\n", + "- scalerfunc: string of the name of the scalerfunction to be used (default value = \"minmax\")\n", + "- scalor: sklearn scaler, if a scaler is given the array arr will be shaped and scaled with scalor (default value false)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Shaped and transformed 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "\n", + "**Additional comments:**\n", + "\n", + "By default included values for scalerfunc are \"minmax\" and \"std\" for a min_max_scaler and a std_scaler respectively.\n", + "For other scalers a scaler has to be given scalor argument.\n", + "\n", + "***\n", + "\n", + "### scaler_inv()\n", + "\n", + "**Description:**\n", + "\n", + "Shapes and transforms the data back to its original shape with a given sklearn scaler\n", + "\n", + "**Arguments:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- std_scalor: std_scaler of sklearn (default value = std_scalor defined above)\n", + "- min_max_scalor: min_max_scalor of sklearn (default value = min_max_scalor defined above)\n", + "- scalerfunc: string of the name of the scalerfunction to be used (default value = \"minmax\")\n", + "- scalor: sklearn scaler, if a scaler is given the array arr will be shaped and scaled with scalor (default value false)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Shaped and inverse transformed 3D-array of shape(particle_number, timesteps, num_inputs)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "def scaler(arr, std_scalor= std_scalor, min_max_scalor= min_max_scalor, scalerfunc= \"minmax\", scalor = False):\n", + " \n", + " if scalor != False:\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(scalor.transform(reshapor_inv(arr)))\n", + " return arr\n", + " \n", + " elif scalerfunc == \"std\":\n", + " arr = std_scaler(arr, std_scalor= std_scalor)\n", + " return arr\n", + " \n", + " elif scalerfunc == \"minmax\":\n", + " arr = min_max_scaler(arr, min_max_scalor= min_max_scalor)\n", + " return arr\n", + " \n", + " else:\n", + " raise ValueError(\"Uknown scaler chosen: {}\".format(scalerfunc))\n", + "\n", + "def scaler_inv(arr, std_scalor= std_scalor, min_max_scalor= min_max_scalor, scalerfunc= \"minmax\", scalor = False):\n", + "\n", + " num_inputs = arr.shape[2]\n", + " \n", + " if scalor != False:\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(scalor.inverse_transform(reshapor_inv(arr)))\n", + " return arr\n", + " \n", + " elif scalerfunc == \"std\":\n", + " arr = std_scaler_inv(arr, std_scalor= std_scalor)\n", + " return arr\n", + " \n", + " elif scalerfunc == \"minmax\":\n", + " arr = min_max_scaler_inv(arr, min_max_scalor= min_max_scalor)\n", + " return arr\n", + " \n", + " else:\n", + " raise ValueError(\"Uknown scaler chosen: {}\".format(scalerfunc))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "#scale the data\n", + "scalor = pkl.load( open(\"scalor.pkl\" , \"rb\" ) )\n", + "#scalor = min_max_scalor\n", + "\n", + "#pkl.dump(scalor , open(\"scalor.pkl\" , \"wb\" ) )\n", + "\n", + "func = \"minmax\"\n", + "\n", + "train_set = scaler(train_set, scalerfunc = func, scalor= scalor)\n", + "test_set = scaler(test_set, scalerfunc = func, scalor= scalor)\n", + "\n", + "if func == \"minmax\":\n", + " scalor = min_max_scalor\n", + "elif func == \"std\":\n", + " scalor = std_scalor\n", + "\n", + "#print(train_set[0,:,:])" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "# truncate and pad input sequences\n", + "max_review_length = 4\n", + "filepath = \"trained_models/keras_RNN2.h5\"\n", + "\n", + "X_train = train_set[:,:-4,:]\n", + "y_train = reshapor_inv(train_set[:,4:,:])\n", + "X_test = test_set[:,:-4,:]\n", + "y_test = reshapor_inv(test_set[:,4:,:])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def custom_loss(y_true, y_pred):\n", + " loss = 0\n", + " \n", + " #tensor_shape = K.int_shape(y_true)[1]\n", + " true_y_val = K.get_value(y_true)\n", + " pred_y_val = K.get_value(y_pred)\n", + " \n", + " for step in range(4):\n", + " true_step_coord = true_y_val[3*step: 3*step+3]\n", + " pred_step_coord = pred_y_val[3*step: 3*step+3]\n", + "\n", + " loss += K.sqrt(mean_squared_error(true_step_coord, pred_step_coord))\n", + " \n", + " return loss\n", + "\n", + " \n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(y_train.shape)\n", + "\n", + "#tf.reset_default_graph()\n", + "\n", + "callbacks = [\n", + " EarlyStopping(monitor='val_loss', patience=30, min_delta=0.0001),\n", + " ModelCheckpoint(filepath, monitor='val_loss', save_best_only=True),\n", + " History(),\n", + " #TensorBoard(log_dir= filepath[:-3] + \"/logs\", histogram_freq=1, batch_size=50)\n", + "]\n", + "\n", + "#\n", + "\n", + "# create the model\n", + "model = Sequential()\n", + "model.add(LSTM(50, return_sequences=True, input_shape=(4,3), activation = 'selu'))\n", + "#model.add(BatchNormalization())\n", + "model.add(LSTM(50, return_sequences=False, activation = 'selu')) \n", + "#model.add(BatchNormalization())\n", + "model.add(Dense(50, activation='selu'))\n", + "#model.add(BatchNormalization())\n", + "model.add(Dense(12, activation='linear'))\n", + "model.compile(loss='mean_squared_error', optimizer='adam', metrics=['mse'])\n", + "print(model.summary())\n", + "\n", + "\n", + "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=500, batch_size=50, callbacks= callbacks, verbose = 1)\n", + "# model_4_to_4 = load_model(filepath)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "model_4_to_4 = load_model(filepath)\n", + "\n", + "prediction = model_4_to_4.predict(X_test)\n", + "\n", + "print(model_4_to_4.summary())\n", + "\n", + "print(prediction[615,:]-y_test[615,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Finding the right track\n", + "\n", + "***\n", + "\n", + "## Loading the data" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
x1y1z1x2y2z2x3y3z3x4...z6x7y7z7x8y8z8eventtrackchi2matched
0-2.24858223.380732-6.040000-6.48999928.598572-5.640000-21.72477167.052704-3.240000-22.225971...7.55999928.8026563.9014626.04000021.4213926.9788455.640000124.013.3963791.0
1-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-4.04000026.880571-9.817033-4.84000019.684010-11.173258-5.000000220.03.0362351.0
2-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-4.04000026.880571-9.817033-4.84000019.684010-11.173258-5.000000220.03.0362351.0
3-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-3.08000029.303265-2.360749-1.40000022.469944-4.447415-1.080000220.025.0332890.0
4-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-3.08000029.303265-2.360749-1.40000022.469944-4.447415-1.080000220.025.0332890.0
5-6.01801121.819384-23.240000-10.00999828.598572-23.320000-25.35665366.079544-23.639999-28.309521...-6.92000025.644503-13.621260-6.20000120.724909-8.660306-6.200001312.027.8346670.0
6-6.01801121.819384-23.240000-10.00999828.598572-23.320000-25.35665366.079544-23.639999-28.309521...-6.92000025.644503-13.621260-6.20000120.724909-8.660306-6.200001312.027.8346670.0
7-6.01801121.819384-23.240000-10.00999828.598572-23.320000-25.35665366.079544-23.639999-28.309521...-18.36000125.490572-16.829796-18.04000119.622780-11.321079-18.040001312.07.2924680.0
86.59081621.582119-40.6800009.94126428.127028-40.84000033.71446261.815155-41.63999942.568947...-53.320000-6.150001-28.598572-54.439999-5.408250-22.071955-54.520000326.010.0815281.0
920.04373210.304811-22.68000026.00914412.499014-22.44000168.27433017.165602-21.24000080.994446...-33.239998-20.296682-20.603376-32.279999-17.410870-16.661102-32.119999352.043.5656200.0
1020.04373210.304811-22.68000026.00914412.499014-22.44000168.27433017.165602-21.24000080.994446...-10.840000-21.332224-19.851011-10.440001-17.870090-15.552447-10.440001352.016.8551641.0
11-16.06982017.65578815.719999-21.21896219.93330215.559999-57.20545242.34206814.440001-65.752090...11.00000027.4429828.08611110.28000020.7478698.60487410.120000539.016.2590470.0
12-6.31365221.69692416.039999-4.48999928.59857215.63999915.48488968.72467814.12000023.103405...28.520000-7.870180-29.63175831.000000-9.177679-20.51060531.799999668.037.1937711.0
13-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...11.5599994.249999-28.59857211.1600000.726311-24.61297211.080000891.019.8763730.0
14-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...11.5599994.249999-28.59857211.1600000.726311-24.61297211.080000891.019.8763730.0
15-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...11.5599994.249999-28.59857211.1600000.726311-24.61297211.080000891.019.8763730.0
16-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...11.5599994.249999-28.59857211.1600000.726311-24.61297211.080000891.019.8763730.0
17-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...-3.8000005.770000-28.598572-5.2400011.287747-23.778723-5.400001891.07.5833500.0
18-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...-3.8000005.770000-28.598572-5.2400011.287747-23.778723-5.400001891.07.5833500.0
19-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...-3.3200007.690000-28.598572-2.8400002.544224-23.258274-2.680000891.04.6119901.0
20-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...-32.91999811.049999-28.598572-35.7200014.244162-22.554136-36.119999891.035.6422840.0
21-3.412670-22.898550-22.360001-5.110001-28.598572-21.880001-25.453243-66.053665-19.160000-33.054714...-10.040001-6.56999928.598572-9.800000-4.83544522.309217-9.7200001026.036.1785051.0
2218.35992414.369882-43.23999824.30337017.748846-43.48000064.02515429.886627-44.20000176.569237...-48.759998-19.520025-21.167650-49.799999-17.012880-17.621937-49.8800011027.07.3863581.0
2318.35992414.369882-43.23999824.30337017.748846-43.48000064.02515429.886627-44.20000176.569237...-56.200001-17.319500-22.766426-56.680000-14.425314-18.336964-56.8400001027.016.4788610.0
2418.35992414.369882-43.23999824.30337017.748846-43.48000064.02515429.886627-44.20000176.569237...-56.200001-17.319500-22.766426-56.680000-14.425314-18.336964-56.8400001027.016.4788610.0
25-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...43.40000224.62474616.75974744.84000018.84976013.18731644.9199981067.014.5809061.0
26-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...43.40000224.62474616.75974744.84000018.84976013.18731644.9199981067.014.5809061.0
27-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...43.40000224.62474616.75974744.84000018.84976013.18731644.9199981067.014.5809061.0
28-17.54802717.04349514.920000-19.34204121.29696314.839999-30.44088463.70515413.720000-31.450989...-27.71999916.429581-23.412991-29.95999912.226481-19.247751-30.2800011097.044.6859170.0
29-17.54802717.04349514.920000-19.34204121.29696314.839999-30.44088463.70515413.720000-31.450989...-27.71999916.429581-23.412991-29.95999912.226481-19.247751-30.2800011097.044.6859170.0
..................................................................
109791-2.969208-23.08223926.760000-6.150001-28.59857226.760000-33.783745-61.77515827.320000-44.113327...53.79999914.92480924.50627154.84000013.83403218.58188155.16000019090.023.2342001.0
109792-2.969208-23.08223926.760000-6.150001-28.59857226.760000-33.783745-61.77515827.320000-44.113327...53.79999914.92480924.50627154.84000013.83403218.58188155.16000019090.023.2342001.0
109793-19.62278011.32107912.679999-26.43558711.18655412.599999-67.52375819.96678511.960000-78.462158...-21.48000020.10251820.744446-22.52000013.83403218.581881-22.52000019109.029.6021160.0
1097948.309232-20.87032910.36000014.487940-24.82367510.44000045.608902-53.93861811.00000052.103645...-4.840000-27.986851-6.412251-5.400001-20.686640-8.752694-5.40000119130.035.6180420.0
10979515.478537-17.90070534.20000119.212601-21.39100834.36000146.061451-53.48606935.00000051.978550...38.599998-26.26253711.71914737.959999-20.5718359.02985837.87999719146.029.9436660.0
10979615.478537-17.90070534.20000119.212601-21.39100834.36000146.061451-53.48606935.00000051.978550...43.320000-26.33670011.49089242.520000-20.6024498.95594842.43999919146.034.5578000.0
10979715.478537-17.90070534.20000119.212601-21.39100834.36000146.061451-53.48606935.00000051.978550...45.160000-26.46030811.11047045.720001-20.7555248.58639545.72000119146.017.6821921.0
1097980.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1097990.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1098000.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1098010.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1098020.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1098030.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
109804-8.882037-20.633064-6.040000-6.230000-28.598572-5.800000-1.250000-70.390724-4.680000-2.370000...-11.480000-25.94115812.708245-10.840000-21.8576515.925623-10.52000019219.032.8466111.0
109805-9.71352920.28865135.320000-12.41685726.32840535.400002-10.10999970.39072434.279999-1.789999...42.27999928.314409-5.40413147.40000222.531174-4.29959548.20000119260.09.6615841.0
109806-9.71352920.28865135.320000-12.41685726.32840535.400002-10.10999970.39072434.279999-1.789999...42.27999928.314409-5.40413147.40000222.531174-4.29959548.20000119260.09.6615841.0
109807-9.71352920.28865135.320000-12.41685726.32840535.400002-10.10999970.39072434.279999-1.789999...42.27999928.314409-5.40413147.40000222.531174-4.29959548.20000119260.09.6615841.0
109808-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.759998-27.9930326.39322946.040001-23.5108431.93446446.43999919518.011.0216221.0
109809-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.759998-27.9930326.39322946.040001-23.5108431.93446446.43999919518.011.0216221.0
109810-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.759998-27.9930326.39322946.040001-23.5108431.93446446.43999919518.011.0216221.0
109811-19.921274-10.60045319.559999-24.649467-16.68366219.400000-56.025154-43.74303418.840000-65.964081...9.959999-0.57000028.5985728.599999-2.47031323.2888878.36000019551.07.5439671.0
109812-19.921274-10.60045319.559999-24.649467-16.68366219.400000-56.025154-43.74303418.840000-65.964081...9.959999-0.57000028.5985728.599999-2.47031323.2888878.36000019551.07.5439671.0
1098131.86055323.541458-11.4800003.43000128.598572-11.32000125.45324366.053665-10.04000134.135880...-5.6400000.570000-28.598572-6.120000-0.382345-24.153751-6.20000119617.08.0627631.0
10981423.174084-2.74747811.32000027.646933-7.45841311.55999953.698204-45.84931613.32000058.983761...40.439999-11.70492226.84565541.320000-4.83544522.30921741.48000019682.017.1399381.0
10981523.174084-2.74747811.32000027.646933-7.45841311.55999953.698204-45.84931613.32000058.983761...40.439999-11.70492226.84565541.320000-4.83544522.30921741.48000019682.017.1399381.0
109816-12.37430219.186522-29.720001-16.42958123.412991-29.799999-33.76641861.785152-30.760000-34.045780...-45.63999929.303265-2.360749-48.04000123.847605-1.121449-48.27999919721.011.0050831.0
109817-20.9392118.142934-22.440001-26.41086611.262639-22.760000-59.61515837.524971-24.840000-67.946777...-56.27999926.410866-11.262639-57.72000120.173845-9.990692-57.95999919787.07.7969331.0
10981810.748274-19.860044-38.52000013.322956-25.670086-38.43999925.279379-66.100250-38.04000126.593647...-25.080000-20.05397820.779713-25.560001-16.66110217.410870-25.56000119826.019.2362191.0
10981910.748274-19.860044-38.52000013.322956-25.670086-38.43999925.279379-66.100250-38.04000126.593647...-25.080000-20.05397820.779713-25.560001-16.66110217.410870-25.56000119826.019.2362191.0
10982010.748274-19.860044-38.52000013.322956-25.670086-38.43999925.279379-66.100250-38.04000126.593647...-9.080000-20.76591120.262461-7.720000-17.76975816.951651-7.56000019826.020.7843910.0
\n", + "

109821 rows × 27 columns

\n", + "
" + ], + "text/plain": [ + " x1 y1 z1 x2 y2 z2 \\\n", + "0 -2.248582 23.380732 -6.040000 -6.489999 28.598572 -5.640000 \n", + "1 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "2 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "3 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "4 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "5 -6.018011 21.819384 -23.240000 -10.009998 28.598572 -23.320000 \n", + "6 -6.018011 21.819384 -23.240000 -10.009998 28.598572 -23.320000 \n", + "7 -6.018011 21.819384 -23.240000 -10.009998 28.598572 -23.320000 \n", + "8 6.590816 21.582119 -40.680000 9.941264 28.127028 -40.840000 \n", + "9 20.043732 10.304811 -22.680000 26.009144 12.499014 -22.440001 \n", + "10 20.043732 10.304811 -22.680000 26.009144 12.499014 -22.440001 \n", + "11 -16.069820 17.655788 15.719999 -21.218962 19.933302 15.559999 \n", + "12 -6.313652 21.696924 16.039999 -4.489999 28.598572 15.639999 \n", + "13 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "14 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "15 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "16 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "17 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "18 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "19 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "20 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "21 -3.412670 -22.898550 -22.360001 -5.110001 -28.598572 -21.880001 \n", + "22 18.359924 14.369882 -43.239998 24.303370 17.748846 -43.480000 \n", + "23 18.359924 14.369882 -43.239998 24.303370 17.748846 -43.480000 \n", + "24 18.359924 14.369882 -43.239998 24.303370 17.748846 -43.480000 \n", + "25 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "26 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "27 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "28 -17.548027 17.043495 14.920000 -19.342041 21.296963 14.839999 \n", + "29 -17.548027 17.043495 14.920000 -19.342041 21.296963 14.839999 \n", + "... ... ... ... ... ... ... \n", + "109791 -2.969208 -23.082239 26.760000 -6.150001 -28.598572 26.760000 \n", + "109792 -2.969208 -23.082239 26.760000 -6.150001 -28.598572 26.760000 \n", + "109793 -19.622780 11.321079 12.679999 -26.435587 11.186554 12.599999 \n", + "109794 8.309232 -20.870329 10.360000 14.487940 -24.823675 10.440000 \n", + "109795 15.478537 -17.900705 34.200001 19.212601 -21.391008 34.360001 \n", + "109796 15.478537 -17.900705 34.200001 19.212601 -21.391008 34.360001 \n", + "109797 15.478537 -17.900705 34.200001 19.212601 -21.391008 34.360001 \n", + "109798 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109799 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109800 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109801 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109802 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109803 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109804 -8.882037 -20.633064 -6.040000 -6.230000 -28.598572 -5.800000 \n", + "109805 -9.713529 20.288651 35.320000 -12.416857 26.328405 35.400002 \n", + "109806 -9.713529 20.288651 35.320000 -12.416857 26.328405 35.400002 \n", + "109807 -9.713529 20.288651 35.320000 -12.416857 26.328405 35.400002 \n", + "109808 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "109809 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "109810 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "109811 -19.921274 -10.600453 19.559999 -24.649467 -16.683662 19.400000 \n", + "109812 -19.921274 -10.600453 19.559999 -24.649467 -16.683662 19.400000 \n", + "109813 1.860553 23.541458 -11.480000 3.430001 28.598572 -11.320001 \n", + "109814 23.174084 -2.747478 11.320000 27.646933 -7.458413 11.559999 \n", + "109815 23.174084 -2.747478 11.320000 27.646933 -7.458413 11.559999 \n", + "109816 -12.374302 19.186522 -29.720001 -16.429581 23.412991 -29.799999 \n", + "109817 -20.939211 8.142934 -22.440001 -26.410866 11.262639 -22.760000 \n", + "109818 10.748274 -19.860044 -38.520000 13.322956 -25.670086 -38.439999 \n", + "109819 10.748274 -19.860044 -38.520000 13.322956 -25.670086 -38.439999 \n", + "109820 10.748274 -19.860044 -38.520000 13.322956 -25.670086 -38.439999 \n", + "\n", + " x3 y3 z3 x4 ... z6 \\\n", + "0 -21.724771 67.052704 -3.240000 -22.225971 ... 7.559999 \n", + "1 -66.736946 22.903200 4.360000 -74.096100 ... -4.040000 \n", + "2 -66.736946 22.903200 4.360000 -74.096100 ... -4.040000 \n", + "3 -66.736946 22.903200 4.360000 -74.096100 ... -3.080000 \n", + "4 -66.736946 22.903200 4.360000 -74.096100 ... -3.080000 \n", + "5 -25.356653 66.079544 -23.639999 -28.309521 ... -6.920000 \n", + "6 -25.356653 66.079544 -23.639999 -28.309521 ... -6.920000 \n", + "7 -25.356653 66.079544 -23.639999 -28.309521 ... -18.360001 \n", + "8 33.714462 61.815155 -41.639999 42.568947 ... -53.320000 \n", + "9 68.274330 17.165602 -21.240000 80.994446 ... -33.239998 \n", + "10 68.274330 17.165602 -21.240000 80.994446 ... -10.840000 \n", + "11 -57.205452 42.342068 14.440001 -65.752090 ... 11.000000 \n", + "12 15.484889 68.724678 14.120000 23.103405 ... 28.520000 \n", + "13 -32.935036 62.265152 -10.600000 -32.243843 ... 11.559999 \n", + "14 -32.935036 62.265152 -10.600000 -32.243843 ... 11.559999 \n", + "15 -32.935036 62.265152 -10.600000 -32.243843 ... 11.559999 \n", + "16 -32.935036 62.265152 -10.600000 -32.243843 ... 11.559999 \n", + "17 -32.935036 62.265152 -10.600000 -32.243843 ... -3.800000 \n", + "18 -32.935036 62.265152 -10.600000 -32.243843 ... -3.800000 \n", + "19 -32.935036 62.265152 -10.600000 -32.243843 ... -3.320000 \n", + "20 -32.935036 62.265152 -10.600000 -32.243843 ... -32.919998 \n", + "21 -25.453243 -66.053665 -19.160000 -33.054714 ... -10.040001 \n", + "22 64.025154 29.886627 -44.200001 76.569237 ... -48.759998 \n", + "23 64.025154 29.886627 -44.200001 76.569237 ... -56.200001 \n", + "24 64.025154 29.886627 -44.200001 76.569237 ... -56.200001 \n", + "25 -67.813629 18.884949 19.320000 -78.018410 ... 43.400002 \n", + "26 -67.813629 18.884949 19.320000 -78.018410 ... 43.400002 \n", + "27 -67.813629 18.884949 19.320000 -78.018410 ... 43.400002 \n", + "28 -30.440884 63.705154 13.720000 -31.450989 ... -27.719999 \n", + "29 -30.440884 63.705154 13.720000 -31.450989 ... -27.719999 \n", + "... ... ... ... ... ... ... \n", + "109791 -33.783745 -61.775158 27.320000 -44.113327 ... 53.799999 \n", + "109792 -33.783745 -61.775158 27.320000 -44.113327 ... 53.799999 \n", + "109793 -67.523758 19.966785 11.960000 -78.462158 ... -21.480000 \n", + "109794 45.608902 -53.938618 11.000000 52.103645 ... -4.840000 \n", + "109795 46.061451 -53.486069 35.000000 51.978550 ... 38.599998 \n", + "109796 46.061451 -53.486069 35.000000 51.978550 ... 43.320000 \n", + "109797 46.061451 -53.486069 35.000000 51.978550 ... 45.160000 \n", + "109798 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109799 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109800 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109801 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109802 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109803 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109804 -1.250000 -70.390724 -4.680000 -2.370000 ... -11.480000 \n", + "109805 -10.109999 70.390724 34.279999 -1.789999 ... 42.279999 \n", + "109806 -10.109999 70.390724 34.279999 -1.789999 ... 42.279999 \n", + "109807 -10.109999 70.390724 34.279999 -1.789999 ... 42.279999 \n", + "109808 15.929216 -68.605621 0.280000 18.170271 ... 42.759998 \n", + "109809 15.929216 -68.605621 0.280000 18.170271 ... 42.759998 \n", + "109810 15.929216 -68.605621 0.280000 18.170271 ... 42.759998 \n", + "109811 -56.025154 -43.743034 18.840000 -65.964081 ... 9.959999 \n", + "109812 -56.025154 -43.743034 18.840000 -65.964081 ... 9.959999 \n", + "109813 25.453243 66.053665 -10.040001 34.135880 ... -5.640000 \n", + "109814 53.698204 -45.849316 13.320000 58.983761 ... 40.439999 \n", + "109815 53.698204 -45.849316 13.320000 58.983761 ... 40.439999 \n", + "109816 -33.766418 61.785152 -30.760000 -34.045780 ... -45.639999 \n", + "109817 -59.615158 37.524971 -24.840000 -67.946777 ... -56.279999 \n", + "109818 25.279379 -66.100250 -38.040001 26.593647 ... -25.080000 \n", + "109819 25.279379 -66.100250 -38.040001 26.593647 ... -25.080000 \n", + "109820 25.279379 -66.100250 -38.040001 26.593647 ... -9.080000 \n", + "\n", + " x7 y7 z7 x8 y8 z8 \\\n", + "0 28.802656 3.901462 6.040000 21.421392 6.978845 5.640000 \n", + "1 26.880571 -9.817033 -4.840000 19.684010 -11.173258 -5.000000 \n", + "2 26.880571 -9.817033 -4.840000 19.684010 -11.173258 -5.000000 \n", + "3 29.303265 -2.360749 -1.400000 22.469944 -4.447415 -1.080000 \n", + "4 29.303265 -2.360749 -1.400000 22.469944 -4.447415 -1.080000 \n", + "5 25.644503 -13.621260 -6.200001 20.724909 -8.660306 -6.200001 \n", + "6 25.644503 -13.621260 -6.200001 20.724909 -8.660306 -6.200001 \n", + "7 25.490572 -16.829796 -18.040001 19.622780 -11.321079 -18.040001 \n", + "8 -6.150001 -28.598572 -54.439999 -5.408250 -22.071955 -54.520000 \n", + "9 -20.296682 -20.603376 -32.279999 -17.410870 -16.661102 -32.119999 \n", + "10 -21.332224 -19.851011 -10.440001 -17.870090 -15.552447 -10.440001 \n", + "11 27.442982 8.086111 10.280000 20.747869 8.604874 10.120000 \n", + "12 -7.870180 -29.631758 31.000000 -9.177679 -20.510605 31.799999 \n", + "13 4.249999 -28.598572 11.160000 0.726311 -24.612972 11.080000 \n", + "14 4.249999 -28.598572 11.160000 0.726311 -24.612972 11.080000 \n", + "15 4.249999 -28.598572 11.160000 0.726311 -24.612972 11.080000 \n", + "16 4.249999 -28.598572 11.160000 0.726311 -24.612972 11.080000 \n", + "17 5.770000 -28.598572 -5.240001 1.287747 -23.778723 -5.400001 \n", + "18 5.770000 -28.598572 -5.240001 1.287747 -23.778723 -5.400001 \n", + "19 7.690000 -28.598572 -2.840000 2.544224 -23.258274 -2.680000 \n", + "20 11.049999 -28.598572 -35.720001 4.244162 -22.554136 -36.119999 \n", + "21 -6.569999 28.598572 -9.800000 -4.835445 22.309217 -9.720000 \n", + "22 -19.520025 -21.167650 -49.799999 -17.012880 -17.621937 -49.880001 \n", + "23 -17.319500 -22.766426 -56.680000 -14.425314 -18.336964 -56.840000 \n", + "24 -17.319500 -22.766426 -56.680000 -14.425314 -18.336964 -56.840000 \n", + "25 24.624746 16.759747 44.840000 18.849760 13.187316 44.919998 \n", + "26 24.624746 16.759747 44.840000 18.849760 13.187316 44.919998 \n", + "27 24.624746 16.759747 44.840000 18.849760 13.187316 44.919998 \n", + "28 16.429581 -23.412991 -29.959999 12.226481 -19.247751 -30.280001 \n", + "29 16.429581 -23.412991 -29.959999 12.226481 -19.247751 -30.280001 \n", + "... ... ... ... ... ... ... \n", + "109791 14.924809 24.506271 54.840000 13.834032 18.581881 55.160000 \n", + "109792 14.924809 24.506271 54.840000 13.834032 18.581881 55.160000 \n", + "109793 20.102518 20.744446 -22.520000 13.834032 18.581881 -22.520000 \n", + "109794 -27.986851 -6.412251 -5.400001 -20.686640 -8.752694 -5.400001 \n", + "109795 -26.262537 11.719147 37.959999 -20.571835 9.029858 37.879997 \n", + "109796 -26.336700 11.490892 42.520000 -20.602449 8.955948 42.439999 \n", + "109797 -26.460308 11.110470 45.720001 -20.755524 8.586395 45.720001 \n", + "109798 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109799 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109800 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109801 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109802 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109803 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109804 -25.941158 12.708245 -10.840000 -21.857651 5.925623 -10.520000 \n", + "109805 28.314409 -5.404131 47.400002 22.531174 -4.299595 48.200001 \n", + "109806 28.314409 -5.404131 47.400002 22.531174 -4.299595 48.200001 \n", + "109807 28.314409 -5.404131 47.400002 22.531174 -4.299595 48.200001 \n", + "109808 -27.993032 6.393229 46.040001 -23.510843 1.934464 46.439999 \n", + "109809 -27.993032 6.393229 46.040001 -23.510843 1.934464 46.439999 \n", + "109810 -27.993032 6.393229 46.040001 -23.510843 1.934464 46.439999 \n", + "109811 -0.570000 28.598572 8.599999 -2.470313 23.288887 8.360000 \n", + "109812 -0.570000 28.598572 8.599999 -2.470313 23.288887 8.360000 \n", + "109813 0.570000 -28.598572 -6.120000 -0.382345 -24.153751 -6.200001 \n", + "109814 -11.704922 26.845655 41.320000 -4.835445 22.309217 41.480000 \n", + "109815 -11.704922 26.845655 41.320000 -4.835445 22.309217 41.480000 \n", + "109816 29.303265 -2.360749 -48.040001 23.847605 -1.121449 -48.279999 \n", + "109817 26.410866 -11.262639 -57.720001 20.173845 -9.990692 -57.959999 \n", + "109818 -20.053978 20.779713 -25.560001 -16.661102 17.410870 -25.560001 \n", + "109819 -20.053978 20.779713 -25.560001 -16.661102 17.410870 -25.560001 \n", + "109820 -20.765911 20.262461 -7.720000 -17.769758 16.951651 -7.560000 \n", + "\n", + " event trackchi2 matched \n", + "0 124.0 13.396379 1.0 \n", + "1 220.0 3.036235 1.0 \n", + "2 220.0 3.036235 1.0 \n", + "3 220.0 25.033289 0.0 \n", + "4 220.0 25.033289 0.0 \n", + "5 312.0 27.834667 0.0 \n", + "6 312.0 27.834667 0.0 \n", + "7 312.0 7.292468 0.0 \n", + "8 326.0 10.081528 1.0 \n", + "9 352.0 43.565620 0.0 \n", + "10 352.0 16.855164 1.0 \n", + "11 539.0 16.259047 0.0 \n", + "12 668.0 37.193771 1.0 \n", + "13 891.0 19.876373 0.0 \n", + "14 891.0 19.876373 0.0 \n", + "15 891.0 19.876373 0.0 \n", + "16 891.0 19.876373 0.0 \n", + "17 891.0 7.583350 0.0 \n", + "18 891.0 7.583350 0.0 \n", + "19 891.0 4.611990 1.0 \n", + "20 891.0 35.642284 0.0 \n", + "21 1026.0 36.178505 1.0 \n", + "22 1027.0 7.386358 1.0 \n", + "23 1027.0 16.478861 0.0 \n", + "24 1027.0 16.478861 0.0 \n", + "25 1067.0 14.580906 1.0 \n", + "26 1067.0 14.580906 1.0 \n", + "27 1067.0 14.580906 1.0 \n", + "28 1097.0 44.685917 0.0 \n", + "29 1097.0 44.685917 0.0 \n", + "... ... ... ... \n", + "109791 19090.0 23.234200 1.0 \n", + "109792 19090.0 23.234200 1.0 \n", + "109793 19109.0 29.602116 0.0 \n", + "109794 19130.0 35.618042 0.0 \n", + "109795 19146.0 29.943666 0.0 \n", + "109796 19146.0 34.557800 0.0 \n", + "109797 19146.0 17.682192 1.0 \n", + "109798 19171.0 15.471778 1.0 \n", + "109799 19171.0 15.471778 1.0 \n", + "109800 19171.0 15.471778 1.0 \n", + "109801 19171.0 15.471778 1.0 \n", + "109802 19171.0 15.471778 1.0 \n", + "109803 19171.0 15.471778 1.0 \n", + "109804 19219.0 32.846611 1.0 \n", + "109805 19260.0 9.661584 1.0 \n", + "109806 19260.0 9.661584 1.0 \n", + "109807 19260.0 9.661584 1.0 \n", + "109808 19518.0 11.021622 1.0 \n", + "109809 19518.0 11.021622 1.0 \n", + "109810 19518.0 11.021622 1.0 \n", + "109811 19551.0 7.543967 1.0 \n", + "109812 19551.0 7.543967 1.0 \n", + "109813 19617.0 8.062763 1.0 \n", + "109814 19682.0 17.139938 1.0 \n", + "109815 19682.0 17.139938 1.0 \n", + "109816 19721.0 11.005083 1.0 \n", + "109817 19787.0 7.796933 1.0 \n", + "109818 19826.0 19.236219 1.0 \n", + "109819 19826.0 19.236219 1.0 \n", + "109820 19826.0 20.784391 0.0 \n", + "\n", + "[109821 rows x 27 columns]" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "matched_unmatched_tracks = pd.read_pickle('matched_8hittracks2.pkl')\n", + "matched_unmatched_tracks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "#plt.figure()\n", + "#plt.hist(matched_unmatched_tracks['trackchi2'][matched_unmatched_tracks['matched'] == 1], label='matched', alpha=0.5, normed=True)\n", + "#plt.hist(matched_unmatched_tracks['trackchi2'][matched_unmatched_tracks['matched'] == 0], label='not matched', alpha=0.5, normed=True)\n", + "#plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(109821, 8, 3)\n" + ] + } + ], + "source": [ + "matched_unmatched_tracks = np.array(matched_unmatched_tracks)\n", + "matched_unmatched_tracks = matched_unmatched_tracks.astype('float32')\n", + "truth = matched_unmatched_tracks[:,-1]\n", + "chi2 = matched_unmatched_tracks[:,-2]\n", + "event_id = matched_unmatched_tracks[:,-3].astype('int')\n", + "tracks = scaler(reshapor(matched_unmatched_tracks[:,:-3]), scalerfunc = func, scalor= scalor)\n", + "\n", + "print(tracks.shape)\n", + "\n", + "#print(reshapor_inv(tset_matched).shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "array_len = truth.shape[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "track_with_chi = np.zeros((array_len, 8, 4))\n", + "track_with_chi[:,:,:3] += tracks[:,:,:]\n", + "\n", + "for track in range(array_len):\n", + " for t in range(8):\n", + " track_with_chi[track,t,3] = chi2[track]" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "truth_and_ev_id = np.zeros((array_len,2))\n", + "\n", + "truth_and_ev_id[:,0] += truth[:]\n", + "truth_and_ev_id[:,1] += event_id[:]" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\sklearn\\model_selection\\_split.py:2026: FutureWarning: From version 0.21, test_size will always complement train_size unless both are specified.\n", + " FutureWarning)\n" + ] + } + ], + "source": [ + "X_train, X_test, Y_train, Y_test = skl.model_selection.train_test_split(track_with_chi, truth_and_ev_id, shuffle=False, train_size=0.8)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "X_train_input = X_train[:,:4,:3]\n", + "X_test_input = X_test[:,:4,:3]\n", + "XY_train_proposed = X_train[:,4:,:]\n", + "XY_test_proposed = X_test[:,4:,:]" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "filepath = \"trained_models/keras_RNN2.h5\"\n", + "model_4_to_4 = load_model(filepath)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "prediction_train = reshapor(model_4_to_4.predict(X_train_input[:,:,:]))\n", + "prediction_test = reshapor(model_4_to_4.predict(X_test_input[:,:,:]))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "XY_train_proposed[:,:,:3] -= prediction_train[:,:,:]\n", + "XY_test_proposed[:,:,:3] -= prediction_test[:,:,:]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "XY_test_proposed[0,0,:]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X_train = XY_train_proposed\n", + "y_train = Y_train[:,0]\n", + "X_test = XY_test_proposed\n", + "y_test = Y_test[:,0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(X_train[1,:,:], y_train[1], X_test.shape, y_train.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "_________________________________________________________________\n", + "Layer (type) Output Shape Param # \n", + "=================================================================\n", + "lstm_1 (LSTM) (None, 4, 50) 11000 \n", + "_________________________________________________________________\n", + "batch_normalization_1 (Batch (None, 4, 50) 200 \n", + "_________________________________________________________________\n", + "lstm_2 (LSTM) (None, 4, 50) 20200 \n", + "_________________________________________________________________\n", + "batch_normalization_2 (Batch (None, 4, 50) 200 \n", + "_________________________________________________________________\n", + "lstm_3 (LSTM) (None, 50) 20200 \n", + "_________________________________________________________________\n", + "batch_normalization_3 (Batch (None, 50) 200 \n", + "_________________________________________________________________\n", + "dense_1 (Dense) (None, 100) 5100 \n", + "_________________________________________________________________\n", + "dense_2 (Dense) (None, 1) 101 \n", + "=================================================================\n", + "Total params: 57,201\n", + "Trainable params: 56,901\n", + "Non-trainable params: 300\n", + "_________________________________________________________________\n", + "None\n", + "Train on 87856 samples, validate on 21965 samples\n", + "Epoch 1/500\n" + ] + }, + { + "ename": "TypeError", + "evalue": "Cannot interpret feed_dict key as Tensor: Tensor Tensor(\"lstm_1_input:0\", shape=(?, 4, 4), dtype=float32) is not an element of this graph.", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run\u001b[1;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m 1079\u001b[0m subfeed_t = self.graph.as_graph_element(\n\u001b[1;32m-> 1080\u001b[1;33m subfeed, allow_tensor=True, allow_operation=False)\n\u001b[0m\u001b[0;32m 1081\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\ops.py\u001b[0m in \u001b[0;36mas_graph_element\u001b[1;34m(self, obj, allow_tensor, allow_operation)\u001b[0m\n\u001b[0;32m 3477\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 3478\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_as_graph_element_locked\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mallow_tensor\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mallow_operation\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3479\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\ops.py\u001b[0m in \u001b[0;36m_as_graph_element_locked\u001b[1;34m(self, obj, allow_tensor, allow_operation)\u001b[0m\n\u001b[0;32m 3556\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mobj\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mgraph\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 3557\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Tensor %s is not an element of this graph.\"\u001b[0m \u001b[1;33m%\u001b[0m \u001b[0mobj\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3558\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mobj\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mValueError\u001b[0m: Tensor Tensor(\"lstm_1_input:0\", shape=(?, 4, 4), dtype=float32) is not an element of this graph.", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mTypeError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 31\u001b[0m \u001b[0mmodelx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcompile\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mloss\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'binary_crossentropy'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0moptimizer\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'adam'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmetrics\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'accuracy'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmodelx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msummary\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 33\u001b[1;33m \u001b[0mmodelx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX_train\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0my_train\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mvalidation_data\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX_test\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0my_test\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mepochs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m500\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m50\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m=\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mverbose\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 34\u001b[0m \u001b[0mmodelx\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mload_model\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfilepath\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 35\u001b[0m \u001b[1;31m# Final evaluation of the model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\models.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[0;32m 961\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 962\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 963\u001b[1;33m validation_steps=validation_steps)\n\u001b[0m\u001b[0;32m 964\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 965\u001b[0m def evaluate(self, x=None, y=None,\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[0;32m 1703\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1704\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1705\u001b[1;33m validation_steps=validation_steps)\n\u001b[0m\u001b[0;32m 1706\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1707\u001b[0m def evaluate(self, x=None, y=None,\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\engine\\training.py\u001b[0m in \u001b[0;36m_fit_loop\u001b[1;34m(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)\u001b[0m\n\u001b[0;32m 1233\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtoarray\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1234\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1235\u001b[1;33m \u001b[0mouts\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1236\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1237\u001b[0m \u001b[0mouts\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m 2476\u001b[0m \u001b[0msession\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mget_session\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2477\u001b[0m updated = session.run(fetches=fetches, feed_dict=feed_dict,\n\u001b[1;32m-> 2478\u001b[1;33m **self.session_kwargs)\n\u001b[0m\u001b[0;32m 2479\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mupdated\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2480\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36mrun\u001b[1;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m 903\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 904\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[1;32m--> 905\u001b[1;33m run_metadata_ptr)\n\u001b[0m\u001b[0;32m 906\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 907\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run\u001b[1;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m 1081\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1082\u001b[0m raise TypeError(\n\u001b[1;32m-> 1083\u001b[1;33m 'Cannot interpret feed_dict key as Tensor: ' + e.args[0])\n\u001b[0m\u001b[0;32m 1084\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1085\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msubfeed_val\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mops\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTensor\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mTypeError\u001b[0m: Cannot interpret feed_dict key as Tensor: Tensor Tensor(\"lstm_1_input:0\", shape=(?, 4, 4), dtype=float32) is not an element of this graph." + ] + } + ], + "source": [ + "# truncate and pad input sequences\n", + "\n", + "tf.reset_default_graph()\n", + "max_review_length = 4\n", + "filepath = \"trained_models/keras_model_classifier.h5\"\n", + "\n", + "callbacks = [\n", + " EarlyStopping(monitor='val_loss', patience=20, min_delta=0.0003),\n", + " ModelCheckpoint(filepath, monitor='val_loss', save_best_only=True),\n", + " History(),\n", + " #TensorBoard(log_dir= filepath[:-3] + \"/logs\", histogram_freq=1, batch_size=32)\n", + "]\n", + "\n", + "#\n", + "\n", + "# create the model\n", + "modelx = Sequential()\n", + "#model.add(Dense(12, input_shape=(4,4)))\n", + "modelx.add(LSTM(50, return_sequences=True, input_shape=(4,4), activation = 'relu'))\n", + "modelx.add(BatchNormalization())\n", + "modelx.add(LSTM(50, return_sequences=True, activation = 'relu'))\n", + "modelx.add(BatchNormalization())\n", + "modelx.add(LSTM(50, return_sequences=False, activation = 'relu'))\n", + "modelx.add(BatchNormalization())\n", + "#model.add(LSTM(40, return_sequences=True, activation = 'relu')) \n", + "#model.add(Dropout(0.5))\n", + "#model.add(LSTM(4, activation = 'relu')) \n", + "#model.add(BatchNormalization())\n", + "modelx.add(Dense(100, activation='relu'))\n", + "modelx.add(Dense(1, activation='sigmoid'))\n", + "modelx.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n", + "print(modelx.summary())\n", + "modelx.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=500, batch_size=50, callbacks= callbacks, verbose = 1)\n", + "modelx = load_model(filepath)\n", + "# Final evaluation of the model\n", + "scores = modelx.evaluate(X_test, y_test, verbose=0)\n", + "print(\"Accuracy: %.2f%%\" % (scores[1]*100))" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'reshapor_inv' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mX_train\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mreshapor_inv\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX_train\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2\u001b[0m \u001b[0mX_test\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mreshapor_inv\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX_test\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mNameError\u001b[0m: name 'reshapor_inv' is not defined" + ] + } + ], + "source": [ + "X_train = reshapor_inv(X_train)\n", + "X_test = reshapor_inv(X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# fit model no training data\n", + "modelx = XGBClassifier(max_depth=3, n_estimators=100, learning_rate=0.05).fit(X_train, y_train, verbose = 1)\n", + "\n", + "predictions = modelx.predict_proba(X_test)\n", + "with open(\"trained_models/XGB_Classifier.xgb\" , \"wb\" ) as f:\n", + " pkl.dump(modelx , f )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "predictions_true = predictions[:, 1]\n", + "predictions_false = predictions[:, 0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cut = 0.5\n", + "predictions_hard = (predictions_true > cut).astype(int)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n_bins = 20\n", + "\n", + "plt.figure()\n", + "n, bins, patches = plt.hist(predictions_true[y_test == 1], bins=20, alpha=0.5, normed=True)\n", + "plt.hist(predictions_true[y_test == 0], bins=bins, alpha=0.5, normed=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fpr, tpr, _ = skl.metrics.roc_curve(y_test, predictions_true, pos_label=1)\n", + "roc_auc = skl.metrics.roc_auc_score(y_true=y_test, y_score=predictions_true)\n", + "print(\"ROC AUC:\", roc_auc)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.figure()\n", + "plt.plot(fpr, tpr)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "skl.metrics.accuracy_score(y_true=y_test, y_pred=predictions_hard)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/1_to_1_multi_layer_roc_auc.ipynb b/1_to_1_multi_layer_roc_auc.ipynb new file mode 100644 index 0000000..259b2af --- /dev/null +++ b/1_to_1_multi_layer_roc_auc.ipynb @@ -0,0 +1,2965 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Import all packages\n", + "***" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", + " from ._conv import register_converters as _register_converters\n", + "Using TensorFlow backend.\n" + ] + } + ], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "import matplotlib as mpl\n", + "import random\n", + "import math\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import tensorflow as tf\n", + "from tensorflow.python.framework import ops\n", + "from sklearn import preprocessing\n", + "import pickle as pkl\n", + "from pathlib import Path\n", + "from keras.datasets import imdb\n", + "from keras.models import Sequential\n", + "from keras.layers import Dense\n", + "from keras.layers import LSTM\n", + "from keras.layers import GRU\n", + "from keras.layers import Dropout, BatchNormalization\n", + "from keras.layers import ConvLSTM2D\n", + "from keras.layers import Conv1D\n", + "#from keras.layers.convolutional import Conv1D\n", + "#from keras.layers.convolutional import MaxPooling1D\n", + "from keras.layers.embeddings import Embedding\n", + "from keras.preprocessing import sequence\n", + "from keras.callbacks import History\n", + "from keras.callbacks import EarlyStopping\n", + "from keras.callbacks import ModelCheckpoint\n", + "from keras.callbacks import TensorBoard\n", + "from keras.models import load_model\n", + "\n", + "import xgboost as xgb\n", + "from xgboost import XGBClassifier\n", + "from xgboost import plot_tree\n", + "from sklearn.metrics import accuracy_score\n", + "import graphviz\n", + "\n", + "import sklearn as skl\n", + "\n", + "#import seaborn as sns" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Preprocessing of the matched 8-hit tracks\n", + "\n", + "***\n", + "\n", + "## Import the dataset of the matched 8-hit tracks" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
x1y1z1x2y2z2x3y3z3x4...z5x6y6z6x7y7z7x8y8z8
0-2.24858223.380732-6.040000-6.48999928.598572-5.640000-21.72477167.052704-3.240000-22.225971...7.24000070.3907240.1900007.55999928.8026563.9014626.04000021.4213926.9788455.640000
1-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-3.72000169.04043614.306461-4.04000026.880571-9.817033-4.84000019.684010-11.173258-5.000000
2-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-3.72000169.04043614.306461-4.04000026.880571-9.817033-4.84000019.684010-11.173258-5.000000
320.04373210.304811-22.68000026.00914412.499014-22.44000168.27433017.165602-21.24000080.994446...-11.480000-38.564201-59.015156-10.840000-21.332224-19.851011-10.440001-17.870090-15.552447-10.440001
4-6.31365221.69692416.039999-4.48999928.59857215.63999915.48488968.72467814.12000023.103405...28.1199996.350000-70.39072428.520000-7.870180-29.63175831.000000-9.177679-20.51060531.799999
518.35992414.369882-43.23999824.30337017.748846-43.48000064.02515429.886627-44.20000176.569237...-48.599998-36.693588-60.095158-48.759998-19.520025-21.167650-49.799999-17.012880-17.621937-49.880001
6-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...42.43999951.11019148.43732843.40000224.62474616.75974744.84000018.84976013.18731644.919998
7-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...42.43999951.11019148.43732843.40000224.62474616.75974744.84000018.84976013.18731644.919998
8-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...42.43999951.11019148.43732843.40000224.62474616.75974744.84000018.84976013.18731644.919998
9-7.05275521.390778-33.320000-0.40999928.598572-33.63999956.94515242.149548-38.52000074.868416...-52.5200006.430000-70.390724-52.440002-17.448942-22.672380-49.720001-17.349640-16.808924-49.320000
10-7.05275521.390778-33.320000-0.40999928.598572-33.63999956.94515242.149548-38.52000074.868416...-52.5200006.430000-70.390724-52.440002-17.448942-22.672380-49.720001-17.349640-16.808924-49.320000
1119.86004410.748274-47.40000224.79779616.227154-47.24000256.94515242.149548-46.84000067.211060...-42.919998-9.766608-70.256882-42.760002-1.830000-28.598572-41.239998-0.086704-24.276211-41.000000
12-19.890659-10.67436324.760000-25.242781-14.85763424.200001-62.665154-32.24221420.760000-74.660149...-13.24000017.18491768.269157-14.2800019.09988628.738325-19.6399995.85171221.888266-20.680000
13-19.890659-10.67436324.760000-25.242781-14.85763424.200001-62.665154-32.24221420.760000-74.660149...-13.24000017.18491768.269157-14.2800019.09988628.738325-19.6399995.85171221.888266-20.680000
14-19.890659-10.67436324.760000-25.242781-14.85763424.200001-62.665154-32.24221420.760000-74.660149...-13.24000017.18491768.269157-14.2800019.09988628.738325-19.6399995.85171221.888266-20.680000
15-19.890659-10.67436324.760000-25.242781-14.85763424.200001-62.665154-32.24221420.760000-74.660149...-13.24000017.18491768.269157-14.2800019.09988628.738325-19.6399995.85171221.888266-20.680000
16-19.890659-10.67436324.760000-25.242781-14.85763424.200001-62.665154-32.24221420.760000-74.660149...-13.24000017.18491768.269157-14.2800019.09988628.738325-19.6399995.85171221.888266-20.680000
17-19.890659-10.67436324.760000-25.242781-14.85763424.200001-62.665154-32.24221420.760000-74.660149...-13.24000017.18491768.269157-14.2800019.09988628.738325-19.6399995.85171221.888266-20.680000
18-8.882037-20.633064-30.040001-12.983168-25.916956-30.200001-47.546371-52.001148-32.360001-60.527248...-44.9199981.41000070.390724-45.48000011.36513427.092525-45.32000010.80370719.837084-44.840000
19-21.115246-7.717949-24.680000-26.750784-10.216477-24.600000-67.922340-18.479261-24.760000-80.264580...-10.68000040.08840658.135159-10.44000119.97307620.838491-9.88000017.10472317.400206-9.800000
2015.404627-17.931320-36.43999918.888992-21.626123-36.11999940.001804-58.185154-33.08000243.343781...-14.120001-70.4432379.071142-13.960001-28.1413595.936723-13.960000-22.5924034.151774-13.880000
21-22.247990-4.983265-10.280001-30.0696260.002129-9.800000-63.81515530.250359-7.560000-71.631638...40.91999863.415157-30.94318041.79999916.300138-23.50703644.3600018.309232-20.87032944.840000
22-22.247990-4.983265-10.280001-30.0696260.002129-9.800000-63.81515530.250359-7.560000-71.631638...40.91999863.415157-30.94318041.79999916.300138-23.50703644.3600018.309232-20.87032944.840000
23-22.247990-4.983265-10.280001-30.0696260.002129-9.800000-63.81515530.250359-7.560000-71.631638...40.91999863.415157-30.94318041.79999916.300138-23.50703644.3600018.309232-20.87032944.840000
24-19.92892610.581976-15.559999-26.21309511.871315-15.480000-59.05515738.494919-15.800000-63.856682...-4.04000045.96245653.585068-3.48000023.83366419.194452-2.36000117.80886115.700268-2.200001
258.36466420.847366-11.0000006.71000128.598572-10.9200006.21000070.390724-10.7600008.752466...-8.68000056.470062-43.077457-8.60000025.520897-14.001682-7.32000121.153515-7.625561-7.240001
26-15.25680517.9925504.200000-22.64283218.8988004.440000-63.61515830.5967695.080000-74.165520...-2.76000064.02515429.886627-2.84000023.93254918.890114-3.96000116.79857618.139311-4.200001
27-3.57896922.82966828.119999-5.61000028.59857228.360001-9.94999970.39072429.559999-8.190001...47.63999961.495155-34.26871548.11999925.545618-13.92559749.32000020.479992-9.25158949.400002
28-3.57896922.82966828.119999-5.61000028.59857228.360001-9.94999970.39072429.559999-8.190001...47.63999961.495155-34.26871548.11999925.545618-13.92559749.32000020.479992-9.25158949.400002
29-22.0107255.55607120.520000-27.0041779.43661020.920000-55.73467343.81285123.799999-61.412601...55.79999968.496918-16.33490456.43999925.174797-15.06686558.59999818.704340-13.53839058.919998
..................................................................
468661.805120-23.564421-22.0400013.049999-28.598572-22.0400014.270000-70.390724-22.6000002.190000...-33.799999-50.53036549.017155-34.040001-18.88899221.626123-35.000000-15.84808917.747631-35.080002
46867-24.0006790.751897-38.439999-29.6740841.219481-38.360001-67.97927918.266756-38.119999-77.220062...-32.91999851.61930847.928211-32.52000026.18219211.966422-28.76000020.8703298.309232-28.119999
46868-24.0006790.751897-38.439999-29.6740841.219481-38.360001-67.97927918.266756-38.119999-77.220062...-32.91999851.61930847.928211-32.52000026.18219211.966422-28.76000020.8703298.309232-28.119999
46869-24.0006790.751897-38.439999-29.6740841.219481-38.360001-67.97927918.266756-38.119999-77.220062...-32.91999851.61930847.928211-32.52000026.18219211.966422-28.76000020.8703298.309232-28.119999
46870-24.0006790.751897-38.439999-29.6740841.219481-38.360001-67.97927918.266756-38.119999-77.220062...-32.91999851.61930847.928211-32.52000026.18219211.966422-28.76000020.8703298.309232-28.119999
4687122.6765943.948521-38.84000028.9262643.521039-38.68000070.390724-5.410000-37.79999982.075897...-21.400000-67.839516-18.788357-21.080000-28.827377-3.825377-20.120001-23.258274-2.544224-19.959999
4687222.6765943.948521-38.84000028.9262643.521039-38.68000070.390724-5.410000-37.79999982.075897...-21.400000-67.839516-18.788357-21.080000-28.827377-3.825377-20.120001-23.258274-2.544224-19.959999
4687322.6765943.948521-38.84000028.9262643.521039-38.68000070.390724-5.410000-37.79999982.075897...-21.400000-67.839516-18.788357-21.080000-28.827377-3.825377-20.120001-23.258274-2.544224-19.959999
46874-2.969208-23.08223926.760000-6.150001-28.59857226.760000-33.783745-61.77515827.320000-44.113327...53.48000016.56672768.43479953.79999914.92480924.50627154.84000013.83403218.58188155.160000
46875-2.969208-23.08223926.760000-6.150001-28.59857226.760000-33.783745-61.77515827.320000-44.113327...53.48000016.56672768.43479953.79999914.92480924.50627154.84000013.83403218.58188155.160000
46876-2.969208-23.08223926.760000-6.150001-28.59857226.760000-33.783745-61.77515827.320000-44.113327...53.48000016.56672768.43479953.79999914.92480924.50627154.84000013.83403218.58188155.160000
4687715.478537-17.90070534.20000119.212601-21.39100834.36000146.061451-53.48606935.00000051.978550...45.000000-66.96470622.05318545.160000-26.46030811.11047045.720001-20.7555248.58639545.720001
468780.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...17.00000054.037613-45.50990716.6800007.690000-28.59857212.6799992.913775-23.10520011.880000
468790.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...17.00000054.037613-45.50990716.6800007.690000-28.59857212.6799992.913775-23.10520011.880000
468800.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...17.00000054.037613-45.50990716.6800007.690000-28.59857212.6799992.913775-23.10520011.880000
468810.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...17.00000054.037613-45.50990716.6800007.690000-28.59857212.6799992.913775-23.10520011.880000
468820.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...17.00000054.037613-45.50990716.6800007.690000-28.59857212.6799992.913775-23.10520011.880000
468830.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...17.00000054.037613-45.50990716.6800007.690000-28.59857212.6799992.913775-23.10520011.880000
46884-8.882037-20.633064-6.040000-6.230000-28.598572-5.800000-1.250000-70.390724-4.680000-2.370000...-11.400001-55.79123743.756279-11.480000-25.94115812.708245-10.840000-21.8576515.925623-10.520000
46885-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.040001-63.45515830.87389842.759998-27.9930326.39322946.040001-23.5108431.93446446.439999
46886-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.040001-63.45515830.87389842.759998-27.9930326.39322946.040001-23.5108431.93446446.439999
46887-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.040001-63.45515830.87389842.759998-27.9930326.39322946.040001-23.5108431.93446446.439999
46888-19.921274-10.60045319.559999-24.649467-16.68366219.400000-56.025154-43.74303418.840000-65.964081...10.1200006.37000070.3907249.959999-0.57000028.5985728.599999-2.47031323.2888878.360000
46889-19.921274-10.60045319.559999-24.649467-16.68366219.400000-56.025154-43.74303418.840000-65.964081...10.1200006.37000070.3907249.959999-0.57000028.5985728.599999-2.47031323.2888878.360000
4689023.174084-2.74747811.32000027.646933-7.45841311.55999953.698204-45.84931613.32000058.983761...40.119999-53.24565546.30186540.439999-11.70492226.84565541.320000-4.83544522.30921741.480000
4689123.174084-2.74747811.32000027.646933-7.45841311.55999953.698204-45.84931613.32000058.983761...40.119999-53.24565546.30186540.439999-11.70492226.84565541.320000-4.83544522.30921741.480000
46892-12.37430219.186522-29.720001-16.42958123.412991-29.799999-33.76641861.785152-30.760000-34.045780...-44.91999870.3907242.510000-45.63999929.303265-2.360749-48.04000123.847605-1.121449-48.279999
46893-20.9392118.142934-22.440001-26.41086611.262639-22.760000-59.61515837.524971-24.840000-67.946777...-55.79999969.242310-13.553038-56.27999926.410866-11.262639-57.72000120.173845-9.990692-57.959999
4689410.748274-19.860044-38.52000013.322956-25.670086-38.43999925.279379-66.100250-38.04000126.593647...-25.240002-53.69820445.849316-25.080000-20.05397820.779713-25.560001-16.66110217.410870-25.560001
4689510.748274-19.860044-38.52000013.322956-25.670086-38.43999925.279379-66.100250-38.04000126.593647...-25.240002-53.69820445.849316-25.080000-20.05397820.779713-25.560001-16.66110217.410870-25.560001
\n", + "

46896 rows × 24 columns

\n", + "
" + ], + "text/plain": [ + " x1 y1 z1 x2 y2 z2 \\\n", + "0 -2.248582 23.380732 -6.040000 -6.489999 28.598572 -5.640000 \n", + "1 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "2 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "3 20.043732 10.304811 -22.680000 26.009144 12.499014 -22.440001 \n", + "4 -6.313652 21.696924 16.039999 -4.489999 28.598572 15.639999 \n", + "5 18.359924 14.369882 -43.239998 24.303370 17.748846 -43.480000 \n", + "6 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "7 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "8 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "9 -7.052755 21.390778 -33.320000 -0.409999 28.598572 -33.639999 \n", + "10 -7.052755 21.390778 -33.320000 -0.409999 28.598572 -33.639999 \n", + "11 19.860044 10.748274 -47.400002 24.797796 16.227154 -47.240002 \n", + "12 -19.890659 -10.674363 24.760000 -25.242781 -14.857634 24.200001 \n", + "13 -19.890659 -10.674363 24.760000 -25.242781 -14.857634 24.200001 \n", + "14 -19.890659 -10.674363 24.760000 -25.242781 -14.857634 24.200001 \n", + "15 -19.890659 -10.674363 24.760000 -25.242781 -14.857634 24.200001 \n", + "16 -19.890659 -10.674363 24.760000 -25.242781 -14.857634 24.200001 \n", + "17 -19.890659 -10.674363 24.760000 -25.242781 -14.857634 24.200001 \n", + "18 -8.882037 -20.633064 -30.040001 -12.983168 -25.916956 -30.200001 \n", + "19 -21.115246 -7.717949 -24.680000 -26.750784 -10.216477 -24.600000 \n", + "20 15.404627 -17.931320 -36.439999 18.888992 -21.626123 -36.119999 \n", + "21 -22.247990 -4.983265 -10.280001 -30.069626 0.002129 -9.800000 \n", + "22 -22.247990 -4.983265 -10.280001 -30.069626 0.002129 -9.800000 \n", + "23 -22.247990 -4.983265 -10.280001 -30.069626 0.002129 -9.800000 \n", + "24 -19.928926 10.581976 -15.559999 -26.213095 11.871315 -15.480000 \n", + "25 8.364664 20.847366 -11.000000 6.710001 28.598572 -10.920000 \n", + "26 -15.256805 17.992550 4.200000 -22.642832 18.898800 4.440000 \n", + "27 -3.578969 22.829668 28.119999 -5.610000 28.598572 28.360001 \n", + "28 -3.578969 22.829668 28.119999 -5.610000 28.598572 28.360001 \n", + "29 -22.010725 5.556071 20.520000 -27.004177 9.436610 20.920000 \n", + "... ... ... ... ... ... ... \n", + "46866 1.805120 -23.564421 -22.040001 3.049999 -28.598572 -22.040001 \n", + "46867 -24.000679 0.751897 -38.439999 -29.674084 1.219481 -38.360001 \n", + "46868 -24.000679 0.751897 -38.439999 -29.674084 1.219481 -38.360001 \n", + "46869 -24.000679 0.751897 -38.439999 -29.674084 1.219481 -38.360001 \n", + "46870 -24.000679 0.751897 -38.439999 -29.674084 1.219481 -38.360001 \n", + "46871 22.676594 3.948521 -38.840000 28.926264 3.521039 -38.680000 \n", + "46872 22.676594 3.948521 -38.840000 28.926264 3.521039 -38.680000 \n", + "46873 22.676594 3.948521 -38.840000 28.926264 3.521039 -38.680000 \n", + "46874 -2.969208 -23.082239 26.760000 -6.150001 -28.598572 26.760000 \n", + "46875 -2.969208 -23.082239 26.760000 -6.150001 -28.598572 26.760000 \n", + "46876 -2.969208 -23.082239 26.760000 -6.150001 -28.598572 26.760000 \n", + "46877 15.478537 -17.900705 34.200001 19.212601 -21.391008 34.360001 \n", + "46878 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "46879 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "46880 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "46881 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "46882 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "46883 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "46884 -8.882037 -20.633064 -6.040000 -6.230000 -28.598572 -5.800000 \n", + "46885 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "46886 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "46887 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "46888 -19.921274 -10.600453 19.559999 -24.649467 -16.683662 19.400000 \n", + "46889 -19.921274 -10.600453 19.559999 -24.649467 -16.683662 19.400000 \n", + "46890 23.174084 -2.747478 11.320000 27.646933 -7.458413 11.559999 \n", + "46891 23.174084 -2.747478 11.320000 27.646933 -7.458413 11.559999 \n", + "46892 -12.374302 19.186522 -29.720001 -16.429581 23.412991 -29.799999 \n", + "46893 -20.939211 8.142934 -22.440001 -26.410866 11.262639 -22.760000 \n", + "46894 10.748274 -19.860044 -38.520000 13.322956 -25.670086 -38.439999 \n", + "46895 10.748274 -19.860044 -38.520000 13.322956 -25.670086 -38.439999 \n", + "\n", + " x3 y3 z3 x4 ... z5 \\\n", + "0 -21.724771 67.052704 -3.240000 -22.225971 ... 7.240000 \n", + "1 -66.736946 22.903200 4.360000 -74.096100 ... -3.720001 \n", + "2 -66.736946 22.903200 4.360000 -74.096100 ... -3.720001 \n", + "3 68.274330 17.165602 -21.240000 80.994446 ... -11.480000 \n", + "4 15.484889 68.724678 14.120000 23.103405 ... 28.119999 \n", + "5 64.025154 29.886627 -44.200001 76.569237 ... -48.599998 \n", + "6 -67.813629 18.884949 19.320000 -78.018410 ... 42.439999 \n", + "7 -67.813629 18.884949 19.320000 -78.018410 ... 42.439999 \n", + "8 -67.813629 18.884949 19.320000 -78.018410 ... 42.439999 \n", + "9 56.945152 42.149548 -38.520000 74.868416 ... -52.520000 \n", + "10 56.945152 42.149548 -38.520000 74.868416 ... -52.520000 \n", + "11 56.945152 42.149548 -46.840000 67.211060 ... -42.919998 \n", + "12 -62.665154 -32.242214 20.760000 -74.660149 ... -13.240000 \n", + "13 -62.665154 -32.242214 20.760000 -74.660149 ... -13.240000 \n", + "14 -62.665154 -32.242214 20.760000 -74.660149 ... -13.240000 \n", + "15 -62.665154 -32.242214 20.760000 -74.660149 ... -13.240000 \n", + "16 -62.665154 -32.242214 20.760000 -74.660149 ... -13.240000 \n", + "17 -62.665154 -32.242214 20.760000 -74.660149 ... -13.240000 \n", + "18 -47.546371 -52.001148 -32.360001 -60.527248 ... -44.919998 \n", + "19 -67.922340 -18.479261 -24.760000 -80.264580 ... -10.680000 \n", + "20 40.001804 -58.185154 -33.080002 43.343781 ... -14.120001 \n", + "21 -63.815155 30.250359 -7.560000 -71.631638 ... 40.919998 \n", + "22 -63.815155 30.250359 -7.560000 -71.631638 ... 40.919998 \n", + "23 -63.815155 30.250359 -7.560000 -71.631638 ... 40.919998 \n", + "24 -59.055157 38.494919 -15.800000 -63.856682 ... -4.040000 \n", + "25 6.210000 70.390724 -10.760000 8.752466 ... -8.680000 \n", + "26 -63.615158 30.596769 5.080000 -74.165520 ... -2.760000 \n", + "27 -9.949999 70.390724 29.559999 -8.190001 ... 47.639999 \n", + "28 -9.949999 70.390724 29.559999 -8.190001 ... 47.639999 \n", + "29 -55.734673 43.812851 23.799999 -61.412601 ... 55.799999 \n", + "... ... ... ... ... ... ... \n", + "46866 4.270000 -70.390724 -22.600000 2.190000 ... -33.799999 \n", + "46867 -67.979279 18.266756 -38.119999 -77.220062 ... -32.919998 \n", + "46868 -67.979279 18.266756 -38.119999 -77.220062 ... -32.919998 \n", + "46869 -67.979279 18.266756 -38.119999 -77.220062 ... -32.919998 \n", + "46870 -67.979279 18.266756 -38.119999 -77.220062 ... -32.919998 \n", + "46871 70.390724 -5.410000 -37.799999 82.075897 ... -21.400000 \n", + "46872 70.390724 -5.410000 -37.799999 82.075897 ... -21.400000 \n", + "46873 70.390724 -5.410000 -37.799999 82.075897 ... -21.400000 \n", + "46874 -33.783745 -61.775158 27.320000 -44.113327 ... 53.480000 \n", + "46875 -33.783745 -61.775158 27.320000 -44.113327 ... 53.480000 \n", + "46876 -33.783745 -61.775158 27.320000 -44.113327 ... 53.480000 \n", + "46877 46.061451 -53.486069 35.000000 51.978550 ... 45.000000 \n", + "46878 40.157688 58.095158 18.439999 58.581963 ... 17.000000 \n", + "46879 40.157688 58.095158 18.439999 58.581963 ... 17.000000 \n", + "46880 40.157688 58.095158 18.439999 58.581963 ... 17.000000 \n", + "46881 40.157688 58.095158 18.439999 58.581963 ... 17.000000 \n", + "46882 40.157688 58.095158 18.439999 58.581963 ... 17.000000 \n", + "46883 40.157688 58.095158 18.439999 58.581963 ... 17.000000 \n", + "46884 -1.250000 -70.390724 -4.680000 -2.370000 ... -11.400001 \n", + "46885 15.929216 -68.605621 0.280000 18.170271 ... 42.040001 \n", + "46886 15.929216 -68.605621 0.280000 18.170271 ... 42.040001 \n", + "46887 15.929216 -68.605621 0.280000 18.170271 ... 42.040001 \n", + "46888 -56.025154 -43.743034 18.840000 -65.964081 ... 10.120000 \n", + "46889 -56.025154 -43.743034 18.840000 -65.964081 ... 10.120000 \n", + "46890 53.698204 -45.849316 13.320000 58.983761 ... 40.119999 \n", + "46891 53.698204 -45.849316 13.320000 58.983761 ... 40.119999 \n", + "46892 -33.766418 61.785152 -30.760000 -34.045780 ... -44.919998 \n", + "46893 -59.615158 37.524971 -24.840000 -67.946777 ... -55.799999 \n", + "46894 25.279379 -66.100250 -38.040001 26.593647 ... -25.240002 \n", + "46895 25.279379 -66.100250 -38.040001 26.593647 ... -25.240002 \n", + "\n", + " x6 y6 z6 x7 y7 z7 \\\n", + "0 70.390724 0.190000 7.559999 28.802656 3.901462 6.040000 \n", + "1 69.040436 14.306461 -4.040000 26.880571 -9.817033 -4.840000 \n", + "2 69.040436 14.306461 -4.040000 26.880571 -9.817033 -4.840000 \n", + "3 -38.564201 -59.015156 -10.840000 -21.332224 -19.851011 -10.440001 \n", + "4 6.350000 -70.390724 28.520000 -7.870180 -29.631758 31.000000 \n", + "5 -36.693588 -60.095158 -48.759998 -19.520025 -21.167650 -49.799999 \n", + "6 51.110191 48.437328 43.400002 24.624746 16.759747 44.840000 \n", + "7 51.110191 48.437328 43.400002 24.624746 16.759747 44.840000 \n", + "8 51.110191 48.437328 43.400002 24.624746 16.759747 44.840000 \n", + "9 6.430000 -70.390724 -52.440002 -17.448942 -22.672380 -49.720001 \n", + "10 6.430000 -70.390724 -52.440002 -17.448942 -22.672380 -49.720001 \n", + "11 -9.766608 -70.256882 -42.760002 -1.830000 -28.598572 -41.239998 \n", + "12 17.184917 68.269157 -14.280001 9.099886 28.738325 -19.639999 \n", + "13 17.184917 68.269157 -14.280001 9.099886 28.738325 -19.639999 \n", + "14 17.184917 68.269157 -14.280001 9.099886 28.738325 -19.639999 \n", + "15 17.184917 68.269157 -14.280001 9.099886 28.738325 -19.639999 \n", + "16 17.184917 68.269157 -14.280001 9.099886 28.738325 -19.639999 \n", + "17 17.184917 68.269157 -14.280001 9.099886 28.738325 -19.639999 \n", + "18 1.410000 70.390724 -45.480000 11.365134 27.092525 -45.320000 \n", + "19 40.088406 58.135159 -10.440001 19.973076 20.838491 -9.880000 \n", + "20 -70.443237 9.071142 -13.960001 -28.141359 5.936723 -13.960000 \n", + "21 63.415157 -30.943180 41.799999 16.300138 -23.507036 44.360001 \n", + "22 63.415157 -30.943180 41.799999 16.300138 -23.507036 44.360001 \n", + "23 63.415157 -30.943180 41.799999 16.300138 -23.507036 44.360001 \n", + "24 45.962456 53.585068 -3.480000 23.833664 19.194452 -2.360001 \n", + "25 56.470062 -43.077457 -8.600000 25.520897 -14.001682 -7.320001 \n", + "26 64.025154 29.886627 -2.840000 23.932549 18.890114 -3.960001 \n", + "27 61.495155 -34.268715 48.119999 25.545618 -13.925597 49.320000 \n", + "28 61.495155 -34.268715 48.119999 25.545618 -13.925597 49.320000 \n", + "29 68.496918 -16.334904 56.439999 25.174797 -15.066865 58.599998 \n", + "... ... ... ... ... ... ... \n", + "46866 -50.530365 49.017155 -34.040001 -18.888992 21.626123 -35.000000 \n", + "46867 51.619308 47.928211 -32.520000 26.182192 11.966422 -28.760000 \n", + "46868 51.619308 47.928211 -32.520000 26.182192 11.966422 -28.760000 \n", + "46869 51.619308 47.928211 -32.520000 26.182192 11.966422 -28.760000 \n", + "46870 51.619308 47.928211 -32.520000 26.182192 11.966422 -28.760000 \n", + "46871 -67.839516 -18.788357 -21.080000 -28.827377 -3.825377 -20.120001 \n", + "46872 -67.839516 -18.788357 -21.080000 -28.827377 -3.825377 -20.120001 \n", + "46873 -67.839516 -18.788357 -21.080000 -28.827377 -3.825377 -20.120001 \n", + "46874 16.566727 68.434799 53.799999 14.924809 24.506271 54.840000 \n", + "46875 16.566727 68.434799 53.799999 14.924809 24.506271 54.840000 \n", + "46876 16.566727 68.434799 53.799999 14.924809 24.506271 54.840000 \n", + "46877 -66.964706 22.053185 45.160000 -26.460308 11.110470 45.720001 \n", + "46878 54.037613 -45.509907 16.680000 7.690000 -28.598572 12.679999 \n", + "46879 54.037613 -45.509907 16.680000 7.690000 -28.598572 12.679999 \n", + "46880 54.037613 -45.509907 16.680000 7.690000 -28.598572 12.679999 \n", + "46881 54.037613 -45.509907 16.680000 7.690000 -28.598572 12.679999 \n", + "46882 54.037613 -45.509907 16.680000 7.690000 -28.598572 12.679999 \n", + "46883 54.037613 -45.509907 16.680000 7.690000 -28.598572 12.679999 \n", + "46884 -55.791237 43.756279 -11.480000 -25.941158 12.708245 -10.840000 \n", + "46885 -63.455158 30.873898 42.759998 -27.993032 6.393229 46.040001 \n", + "46886 -63.455158 30.873898 42.759998 -27.993032 6.393229 46.040001 \n", + "46887 -63.455158 30.873898 42.759998 -27.993032 6.393229 46.040001 \n", + "46888 6.370000 70.390724 9.959999 -0.570000 28.598572 8.599999 \n", + "46889 6.370000 70.390724 9.959999 -0.570000 28.598572 8.599999 \n", + "46890 -53.245655 46.301865 40.439999 -11.704922 26.845655 41.320000 \n", + "46891 -53.245655 46.301865 40.439999 -11.704922 26.845655 41.320000 \n", + "46892 70.390724 2.510000 -45.639999 29.303265 -2.360749 -48.040001 \n", + "46893 69.242310 -13.553038 -56.279999 26.410866 -11.262639 -57.720001 \n", + "46894 -53.698204 45.849316 -25.080000 -20.053978 20.779713 -25.560001 \n", + "46895 -53.698204 45.849316 -25.080000 -20.053978 20.779713 -25.560001 \n", + "\n", + " x8 y8 z8 \n", + "0 21.421392 6.978845 5.640000 \n", + "1 19.684010 -11.173258 -5.000000 \n", + "2 19.684010 -11.173258 -5.000000 \n", + "3 -17.870090 -15.552447 -10.440001 \n", + "4 -9.177679 -20.510605 31.799999 \n", + "5 -17.012880 -17.621937 -49.880001 \n", + "6 18.849760 13.187316 44.919998 \n", + "7 18.849760 13.187316 44.919998 \n", + "8 18.849760 13.187316 44.919998 \n", + "9 -17.349640 -16.808924 -49.320000 \n", + "10 -17.349640 -16.808924 -49.320000 \n", + "11 -0.086704 -24.276211 -41.000000 \n", + "12 5.851712 21.888266 -20.680000 \n", + "13 5.851712 21.888266 -20.680000 \n", + "14 5.851712 21.888266 -20.680000 \n", + "15 5.851712 21.888266 -20.680000 \n", + "16 5.851712 21.888266 -20.680000 \n", + "17 5.851712 21.888266 -20.680000 \n", + "18 10.803707 19.837084 -44.840000 \n", + "19 17.104723 17.400206 -9.800000 \n", + "20 -22.592403 4.151774 -13.880000 \n", + "21 8.309232 -20.870329 44.840000 \n", + "22 8.309232 -20.870329 44.840000 \n", + "23 8.309232 -20.870329 44.840000 \n", + "24 17.808861 15.700268 -2.200001 \n", + "25 21.153515 -7.625561 -7.240001 \n", + "26 16.798576 18.139311 -4.200001 \n", + "27 20.479992 -9.251589 49.400002 \n", + "28 20.479992 -9.251589 49.400002 \n", + "29 18.704340 -13.538390 58.919998 \n", + "... ... ... ... \n", + "46866 -15.848089 17.747631 -35.080002 \n", + "46867 20.870329 8.309232 -28.119999 \n", + "46868 20.870329 8.309232 -28.119999 \n", + "46869 20.870329 8.309232 -28.119999 \n", + "46870 20.870329 8.309232 -28.119999 \n", + "46871 -23.258274 -2.544224 -19.959999 \n", + "46872 -23.258274 -2.544224 -19.959999 \n", + "46873 -23.258274 -2.544224 -19.959999 \n", + "46874 13.834032 18.581881 55.160000 \n", + "46875 13.834032 18.581881 55.160000 \n", + "46876 13.834032 18.581881 55.160000 \n", + "46877 -20.755524 8.586395 45.720001 \n", + "46878 2.913775 -23.105200 11.880000 \n", + "46879 2.913775 -23.105200 11.880000 \n", + "46880 2.913775 -23.105200 11.880000 \n", + "46881 2.913775 -23.105200 11.880000 \n", + "46882 2.913775 -23.105200 11.880000 \n", + "46883 2.913775 -23.105200 11.880000 \n", + "46884 -21.857651 5.925623 -10.520000 \n", + "46885 -23.510843 1.934464 46.439999 \n", + "46886 -23.510843 1.934464 46.439999 \n", + "46887 -23.510843 1.934464 46.439999 \n", + "46888 -2.470313 23.288887 8.360000 \n", + "46889 -2.470313 23.288887 8.360000 \n", + "46890 -4.835445 22.309217 41.480000 \n", + "46891 -4.835445 22.309217 41.480000 \n", + "46892 23.847605 -1.121449 -48.279999 \n", + "46893 20.173845 -9.990692 -57.959999 \n", + "46894 -16.661102 17.410870 -25.560001 \n", + "46895 -16.661102 17.410870 -25.560001 \n", + "\n", + "[46896 rows x 24 columns]" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#import data as array\n", + "# 8 hits with x,y,z\n", + "\n", + "testset = pd.read_pickle('matched_8hittracks.pkl')\n", + "testset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Convert the data to an array (float32)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "#Convert the data\n", + "\n", + "tset = np.array(testset)\n", + "tset = tset.astype('float32')\n", + "\n", + "#Check testset with arbitrary particle\n", + "\n", + "#print(tset.shape)\n", + "#for i in range(8):\n", + " #print(tset[1,3*i:(3*i+3)])\n", + "#print(tset[0,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Transformation between original 2D-array into 3D-array\n", + "\n", + "***\n", + "\n", + "### reshapor()\n", + "\n", + "**Description:**\n", + "\n", + "Transforms 2D-array into 3D array\n", + "\n", + "**Arguments:**\n", + "\n", + "- arr_orig: Original 2D array\n", + "- num_inputs: Number of inputs per timestep (default value = 3 for X,Y,Z coordinates)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: 3D-array of shape(particlenumber, timesteps, input = coordinates)\n", + "\n", + "\n", + "***\n", + "\n", + "\n", + "### reshapor_inv()\n", + "\n", + "**Description:**\n", + "\n", + "Inverse transformation from 3D-array into 2D-array\n", + "\n", + "**Arguments:**\n", + "\n", + "- array_shaped: 3D-array of shape(particlenumber, timesteps, input = coordinates)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: 2D-array of shape(particlenumber, inputs)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "#Reshapes the 2D-array to a 3D-array\n", + "\n", + "def reshapor(arr_orig, num_inputs=3):\n", + " timesteps = int(arr_orig.shape[1]/num_inputs)\n", + " number_examples = int(arr_orig.shape[0])\n", + " arr = np.zeros((number_examples, timesteps, num_inputs))\n", + " \n", + " for i in range(number_examples):\n", + " for t in range(timesteps):\n", + " arr[i,t,:] = arr_orig[i,num_inputs*t:num_inputs*t+num_inputs]\n", + " \n", + " return arr\n", + "\n", + "#The inverse transformation of the reshapor function (3D to 2D)\n", + "\n", + "def reshapor_inv(array_shaped):\n", + " num_inputs = array_shaped.shape[2]\n", + " timesteps = int(array_shaped.shape[1])\n", + " num_examples = int(array_shaped.shape[0])\n", + " arr = np.zeros((num_examples, timesteps*num_inputs))\n", + " \n", + " for i in range(num_examples):\n", + " for t in range(timesteps):\n", + " arr[i,num_inputs*t:num_inputs*t+num_inputs] = array_shaped[i,t,:]\n", + " \n", + " return arr" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create random training and test sets from the data\n", + "\n", + "***\n", + "\n", + "### create_random_sets()\n", + "\n", + "**Description:**\n", + "\n", + "Splits an dataset into a train and a test set\n", + "\n", + "\n", + "**Input:**\n", + "\n", + "- dataset: The actual dataset with shape (particles, other dimensions)\n", + "- train_to_total_ratio: The ratio that the training-set should be out of the original set.\n", + " The remaining part will become the test-set\n", + " \n", + "\n", + "**Returns:**\n", + "\n", + "- train_set: The newly created training set (particles, other dimensions)\n", + "- test_set: The newly created test set (particles, other dimensions)\n", + " \n", + " \n", + "**Additional comments:**\n", + "\n", + "The data will be randomly shuffled before it gets split up" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "### create the training set and the test set###\n", + "\n", + "def create_random_sets(dataset, train_to_total_ratio):\n", + " #shuffle the dataset\n", + " num_examples = dataset.shape[0]\n", + " p = np.random.permutation(num_examples)\n", + " dataset = dataset[p,:]\n", + " \n", + " #evaluate size of training and test set and initialize them\n", + " train_set_size = np.int(num_examples*train_to_total_ratio)\n", + " test_set_size = num_examples - train_set_size\n", + " \n", + " train_set = np.zeros((train_set_size, dataset.shape[1]))\n", + " test_set = np.zeros((test_set_size, dataset.shape[1]))\n", + " \n", + "\n", + " #fill train and test sets\n", + " for i in range(num_examples):\n", + " if train_set_size > i:\n", + " train_set[i,:] += dataset[i,:]\n", + " else:\n", + " test_set[i - train_set_size,:] += dataset[i,:]\n", + " \n", + " return train_set, test_set\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "#Create the training and test-sets\n", + "\n", + "train_set, test_set = create_random_sets(tset, 0.9)\n", + "\n", + "#print(test_set.shape, train_set.shape, reshapor(tset).shape)\n", + "#print(test_set[0,:,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Normalization of the data\n", + "\n", + "***\n", + "\n", + "## Normalization based on min_max_scaler from sklearn\n", + "\n", + "### correct_array_steps()\n", + "\n", + "**Description:**\n", + "\n", + "As the scaler will be fixed on arrays of specific length this function returns an array padded with zeros with the correct shape\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- steps: Required number of timesteps for the scaler (default value = 8)\n", + "- num_inputs: Number of inputs per timestep (default value = 3 for X,Y,Z coordinates)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: 3D array of shape(particle_number, steps, num_inputs)\n", + "\n", + "***\n", + "\n", + "### set_min_max_scaler()\n", + "\n", + "**Description:**\n", + "\n", + "Sets the min_max_scaler based on the dataset given (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 2D of shape(particle_number, inputs) or 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- feature_range: Tuple which defines the area to which the data should be scaled (default value = (-1,1))\n", + "\n", + "**Returns:**\n", + "\n", + "- min_max_scalor: min_max_scaler based of the data given\n", + "\n", + "***\n", + "\n", + "### min_max_scaler()\n", + "\n", + "**Description:** \n", + "\n", + "Transforms a 3D-array with a given min_max_scaler (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- min_max_scalor: The min_max_scaler used for the transformation (default value: min_max_scalor)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Transformed 3D-array\n", + "\n", + "***\n", + "\n", + "### min_max_scaler_inv()\n", + "\n", + "**Description:**\n", + "\n", + "Transforms a 3D-array with a given min_max_scaler back to original form (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- min_max_scalor: The min_max_scaler used for the transformation (default value: min_max_scalor)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Transformed 3D-array" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "#Normalize the data advanced version with scikit learn\n", + "def correct_array_steps(arr, steps= 8, num_inputs= 3): #steps > array_steps\n", + " if arr.shape[1] != steps:\n", + " _ = np.zeros((arr.shape[0], steps, num_inputs))\n", + " _[:,:arr.shape[1],:] += arr\n", + " arr = _\n", + " return arr\n", + "\n", + "\n", + "#set the transormation based on training set\n", + "def set_min_max_scaler(arr, feature_range= (-1,1)):\n", + " min_max_scalor = preprocessing.MinMaxScaler(feature_range=feature_range)\n", + " if len(arr.shape) == 3:\n", + " arr = reshapor(min_max_scalor.fit_transform(reshapor_inv(arr))) \n", + " else:\n", + " arr = min_max_scalor.fit_transform(arr)\n", + " return min_max_scalor\n", + "\n", + "min_max_scalor = set_min_max_scaler(train_set)\n", + "\n", + "\n", + "#transform data\n", + "def min_max_scaler(arr, min_max_scalor= min_max_scalor):\n", + " num_inputs = arr.shape[2]\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(min_max_scalor.transform(reshapor_inv(arr)), num_inputs=num_inputs)\n", + " return arr\n", + " \n", + "#inverse transformation\n", + "def min_max_scaler_inv(arr, min_max_scalor= min_max_scalor):\n", + " num_inputs = arr.shape[2]\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(min_max_scalor.inverse_transform(reshapor_inv(arr)), num_inputs=num_inputs)\n", + " return arr" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Normalization based on a standard_scaler from sklearn\n", + "\n", + "\n", + "### set_std_scaler()\n", + "\n", + "**Description: **\n", + "\n", + "Sets the std_scaler based on the dataset given (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 2D of shape(particle_number, inputs) or 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- feature_range: Tuple which defines the area to which the data should be scaled (default value = (-1,1))\n", + "\n", + "**Returns:**\n", + "\n", + "- std_scalor: std_scaler based of the data given\n", + "\n", + "***\n", + "\n", + "### std_scaler()\n", + "\n", + "**Description: **\n", + "\n", + "Transforms a 3D-array with a given std_scaler (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- std_scalor: The std_scaler used for the transformation (default value: std_scaler)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Transformed 3D-array\n", + "\n", + "***\n", + "\n", + "### std_scaler_inv()\n", + "\n", + "**Description: **\n", + "\n", + "Transforms a 3D-array with a given std_scaler back to original form (sklearn based)\n", + "\n", + "**Input:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- std_scalor: The std_scaler used for the transformation (default value: std_scaler)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Transformed 3D-array" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "#Normalize the data advanced version with scikit learn - Standard scaler\n", + "\n", + "#set the transormation based on training set\n", + "def set_std_scaler(arr):\n", + " std_scalor = preprocessing.StandardScaler()\n", + " if len(arr.shape) == 3:\n", + " arr = reshapor(std_scalor.fit_transform(reshapor_inv(arr))) \n", + " else:\n", + " arr = std_scalor.fit_transform(arr)\n", + " return std_scalor\n", + "\n", + "std_scalor = set_std_scaler(train_set)\n", + "\n", + "#transform data\n", + "def std_scaler(arr, std_scalor= std_scalor, num_inputs=3):\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(std_scalor.transform(reshapor_inv(arr)))\n", + " return arr\n", + " \n", + "#inverse transformation\n", + "def std_scaler_inv(arr, std_scalor= std_scalor, num_inputs=3):\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(std_scalor.inverse_transform(reshapor_inv(arr)))\n", + " return arr\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "#reshape the data\n", + "\n", + "train_set = reshapor(train_set)\n", + "test_set = reshapor(test_set)\n", + "\n", + "#print(train_set[0,:,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Scale the data using the min_max_scaler or the std_scaler defined before\n", + "\n", + "### scaler()\n", + "\n", + "****Description:****\n", + "\n", + "Shapes and transforms the data with a given sklearn scaler\n", + "\n", + "**Arguments:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- std_scalor: std_scaler of sklearn (default value = std_scalor defined above)\n", + "- min_max_scalor: min_max_scalor of sklearn (default value = min_max_scalor defined above)\n", + "- scalerfunc: string of the name of the scalerfunction to be used (default value = \"minmax\")\n", + "- scalor: sklearn scaler, if a scaler is given the array arr will be shaped and scaled with scalor (default value false)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Shaped and transformed 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "\n", + "**Additional comments:**\n", + "\n", + "By default included values for scalerfunc are \"minmax\" and \"std\" for a min_max_scaler and a std_scaler respectively.\n", + "For other scalers a scaler has to be given scalor argument.\n", + "\n", + "***\n", + "\n", + "### scaler_inv()\n", + "\n", + "**Description:**\n", + "\n", + "Shapes and transforms the data back to its original shape with a given sklearn scaler\n", + "\n", + "**Arguments:**\n", + "\n", + "- arr: 3D-array of shape(particle_number, timesteps, num_inputs)\n", + "- std_scalor: std_scaler of sklearn (default value = std_scalor defined above)\n", + "- min_max_scalor: min_max_scalor of sklearn (default value = min_max_scalor defined above)\n", + "- scalerfunc: string of the name of the scalerfunction to be used (default value = \"minmax\")\n", + "- scalor: sklearn scaler, if a scaler is given the array arr will be shaped and scaled with scalor (default value false)\n", + "\n", + "**Returns:**\n", + "\n", + "- arr: Shaped and inverse transformed 3D-array of shape(particle_number, timesteps, num_inputs)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "def scaler(arr, std_scalor= std_scalor, min_max_scalor= min_max_scalor, scalerfunc= \"minmax\", scalor = False):\n", + " \n", + " if scalor != False:\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(scalor.transform(reshapor_inv(arr)))\n", + " return arr\n", + " \n", + " elif scalerfunc == \"std\":\n", + " arr = std_scaler(arr, std_scalor= std_scalor)\n", + " return arr\n", + " \n", + " elif scalerfunc == \"minmax\":\n", + " arr = min_max_scaler(arr, min_max_scalor= min_max_scalor)\n", + " return arr\n", + " \n", + " else:\n", + " raise ValueError(\"Uknown scaler chosen: {}\".format(scalerfunc))\n", + "\n", + "def scaler_inv(arr, std_scalor= std_scalor, min_max_scalor= min_max_scalor, scalerfunc= \"std\", scalor = False):\n", + "\n", + " num_inputs = arr.shape[2]\n", + " \n", + " if scalor != False:\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(scalor.inverse_transform(reshapor_inv(arr)))\n", + " return arr\n", + " \n", + " elif scalerfunc == \"std\":\n", + " arr = std_scaler_inv(arr, std_scalor= std_scalor)\n", + " return arr\n", + " \n", + " elif scalerfunc == \"minmax\":\n", + " arr = min_max_scaler_inv(arr, min_max_scalor= min_max_scalor)\n", + " return arr\n", + " \n", + " else:\n", + " raise ValueError(\"Uknown scaler chosen: {}\".format(scalerfunc))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "#scale the data\n", + "scalor = pkl.load( open(\"scalor.pkl\" , \"rb\" ) )\n", + "\n", + "func = \"minmax\"\n", + "\n", + "train_set = scaler(train_set, scalerfunc = func, scalor= scalor)\n", + "test_set = scaler(test_set, scalerfunc = func, scalor= scalor)\n", + "\n", + "if func == \"minmax\":\n", + " scalor = min_max_scalor\n", + "elif func == \"std\":\n", + " scalor = std_scalor\n", + "\n", + "#print(train_set[0,:,:])" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "# truncate and pad input sequences\n", + "max_review_length = 4\n", + "filepath = \"trained_models/keras_RNN.h5\"\n", + "\n", + "X_train = train_set[:,:-4,:]\n", + "Y_train = reshapor_inv(train_set[:,4:,:])\n", + "X_test = test_set[:,:-4,:]\n", + "Y_test = reshapor_inv(test_set[:,4:,:])" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(42206, 12)\n", + "WARNING:tensorflow:From c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\contrib\\learn\\python\\learn\\datasets\\base.py:198: retry (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use the retry module or similar alternatives.\n", + "_________________________________________________________________\n", + "Layer (type) Output Shape Param # \n", + "=================================================================\n", + "lstm_1 (LSTM) (None, 4, 50) 10800 \n", + "_________________________________________________________________\n", + "batch_normalization_1 (Batch (None, 4, 50) 200 \n", + "_________________________________________________________________\n", + "lstm_2 (LSTM) (None, 50) 20200 \n", + "_________________________________________________________________\n", + "batch_normalization_2 (Batch (None, 50) 200 \n", + "_________________________________________________________________\n", + "dense_1 (Dense) (None, 50) 2550 \n", + "_________________________________________________________________\n", + "batch_normalization_3 (Batch (None, 50) 200 \n", + "_________________________________________________________________\n", + "dense_2 (Dense) (None, 12) 612 \n", + "=================================================================\n", + "Total params: 34,762\n", + "Trainable params: 34,462\n", + "Non-trainable params: 300\n", + "_________________________________________________________________\n", + "None\n", + "Train on 42206 samples, validate on 4690 samples\n", + "Epoch 1/10\n" + ] + }, + { + "ename": "InternalError", + "evalue": "Blas GEMM launch failed : a.shape=(50, 50), b.shape=(50, 50), m=50, n=50, k=50\n\t [[Node: lstm_1/while/MatMul_7 = MatMul[T=DT_FLOAT, transpose_a=false, transpose_b=false, _device=\"/job:localhost/replica:0/task:0/device:GPU:0\"](lstm_1/while/Switch_2:1, lstm_1/while/MatMul_7/Enter)]]\n\t [[Node: loss/mul/_281 = _Recv[client_terminated=false, recv_device=\"/job:localhost/replica:0/task:0/device:CPU:0\", send_device=\"/job:localhost/replica:0/task:0/device:GPU:0\", send_device_incarnation=1, tensor_name=\"edge_4530_loss/mul\", tensor_type=DT_FLOAT, _device=\"/job:localhost/replica:0/task:0/device:CPU:0\"]()]]\n\nCaused by op 'lstm_1/while/MatMul_7', defined at:\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\runpy.py\", line 193, in _run_module_as_main\n \"__main__\", mod_spec)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\runpy.py\", line 85, in _run_code\n exec(code, run_globals)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel_launcher.py\", line 16, in \n app.launch_new_instance()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\traitlets\\config\\application.py\", line 658, in launch_instance\n app.start()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\kernelapp.py\", line 486, in start\n self.io_loop.start()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tornado\\platform\\asyncio.py\", line 112, in start\n self.asyncio_loop.run_forever()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\asyncio\\base_events.py\", line 422, in run_forever\n self._run_once()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\asyncio\\base_events.py\", line 1432, in _run_once\n handle._run()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\asyncio\\events.py\", line 145, in _run\n self._callback(*self._args)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tornado\\ioloop.py\", line 760, in _run_callback\n ret = callback()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tornado\\stack_context.py\", line 276, in null_wrapper\n return fn(*args, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\zmq\\eventloop\\zmqstream.py\", line 536, in \n self.io_loop.add_callback(lambda : self._handle_events(self.socket, 0))\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\zmq\\eventloop\\zmqstream.py\", line 450, in _handle_events\n self._handle_recv()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\zmq\\eventloop\\zmqstream.py\", line 480, in _handle_recv\n self._run_callback(callback, msg)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\zmq\\eventloop\\zmqstream.py\", line 432, in _run_callback\n callback(*args, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tornado\\stack_context.py\", line 276, in null_wrapper\n return fn(*args, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\kernelbase.py\", line 283, in dispatcher\n return self.dispatch_shell(stream, msg)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\kernelbase.py\", line 233, in dispatch_shell\n handler(stream, idents, msg)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\kernelbase.py\", line 399, in execute_request\n user_expressions, allow_stdin)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\ipkernel.py\", line 208, in do_execute\n res = shell.run_cell(code, store_history=store_history, silent=silent)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\zmqshell.py\", line 537, in run_cell\n return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2662, in run_cell\n raw_cell, store_history, silent, shell_futures)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2785, in _run_cell\n interactivity=interactivity, compiler=compiler, result=result)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2903, in run_ast_nodes\n if self.run_code(code, result):\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2963, in run_code\n exec(code_obj, self.user_global_ns, self.user_ns)\n File \"\", line 14, in \n model.add(LSTM(50, return_sequences=True, input_shape=(4,3), activation = 'tanh'))\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\models.py\", line 467, in add\n layer(x)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 499, in __call__\n return super(RNN, self).__call__(inputs, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\engine\\topology.py\", line 619, in __call__\n output = self.call(inputs, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 2151, in call\n initial_state=initial_state)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 608, in call\n input_length=timesteps)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\", line 2767, in rnn\n swap_memory=True)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\control_flow_ops.py\", line 3202, in while_loop\n result = loop_context.BuildLoop(cond, body, loop_vars, shape_invariants)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\control_flow_ops.py\", line 2940, in BuildLoop\n pred, body, original_loop_vars, loop_vars, shape_invariants)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\control_flow_ops.py\", line 2877, in _BuildLoop\n body_result = body(*packed_vars_for_body)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\", line 2753, in _step\n tuple(constants))\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 599, in step\n return self.cell.call(inputs, states, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 1947, in call\n self.recurrent_kernel_o))\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\", line 1075, in dot\n out = tf.matmul(x, y)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\math_ops.py\", line 2108, in matmul\n a, b, transpose_a=transpose_a, transpose_b=transpose_b, name=name)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\gen_math_ops.py\", line 4492, in mat_mul\n name=name)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\op_def_library.py\", line 787, in _apply_op_helper\n op_def=op_def)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\ops.py\", line 3290, in create_op\n op_def=op_def)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\ops.py\", line 1654, in __init__\n self._traceback = self._graph._extract_stack() # pylint: disable=protected-access\n\nInternalError (see above for traceback): Blas GEMM launch failed : a.shape=(50, 50), b.shape=(50, 50), m=50, n=50, k=50\n\t [[Node: lstm_1/while/MatMul_7 = MatMul[T=DT_FLOAT, transpose_a=false, transpose_b=false, _device=\"/job:localhost/replica:0/task:0/device:GPU:0\"](lstm_1/while/Switch_2:1, lstm_1/while/MatMul_7/Enter)]]\n\t [[Node: loss/mul/_281 = _Recv[client_terminated=false, recv_device=\"/job:localhost/replica:0/task:0/device:CPU:0\", send_device=\"/job:localhost/replica:0/task:0/device:GPU:0\", send_device_incarnation=1, tensor_name=\"edge_4530_loss/mul\", tensor_type=DT_FLOAT, _device=\"/job:localhost/replica:0/task:0/device:CPU:0\"]()]]\n", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mInternalError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_call\u001b[1;34m(self, fn, *args)\u001b[0m\n\u001b[0;32m 1326\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1327\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1328\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[1;34m(feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[0;32m 1311\u001b[0m return self._call_tf_sessionrun(\n\u001b[1;32m-> 1312\u001b[1;33m options, feed_dict, fetch_list, target_list, run_metadata)\n\u001b[0m\u001b[0;32m 1313\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_call_tf_sessionrun\u001b[1;34m(self, options, feed_dict, fetch_list, target_list, run_metadata)\u001b[0m\n\u001b[0;32m 1419\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0moptions\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1420\u001b[1;33m status, run_metadata)\n\u001b[0m\u001b[0;32m 1421\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\errors_impl.py\u001b[0m in \u001b[0;36m__exit__\u001b[1;34m(self, type_arg, value_arg, traceback_arg)\u001b[0m\n\u001b[0;32m 515\u001b[0m \u001b[0mcompat\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mc_api\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTF_Message\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mstatus\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mstatus\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 516\u001b[1;33m c_api.TF_GetCode(self.status.status))\n\u001b[0m\u001b[0;32m 517\u001b[0m \u001b[1;31m# Delete the underlying status object from memory otherwise it stays alive\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mInternalError\u001b[0m: Blas GEMM launch failed : a.shape=(50, 50), b.shape=(50, 50), m=50, n=50, k=50\n\t [[Node: lstm_1/while/MatMul_7 = MatMul[T=DT_FLOAT, transpose_a=false, transpose_b=false, _device=\"/job:localhost/replica:0/task:0/device:GPU:0\"](lstm_1/while/Switch_2:1, lstm_1/while/MatMul_7/Enter)]]\n\t [[Node: loss/mul/_281 = _Recv[client_terminated=false, recv_device=\"/job:localhost/replica:0/task:0/device:CPU:0\", send_device=\"/job:localhost/replica:0/task:0/device:GPU:0\", send_device_incarnation=1, tensor_name=\"edge_4530_loss/mul\", tensor_type=DT_FLOAT, _device=\"/job:localhost/replica:0/task:0/device:CPU:0\"]()]]", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mInternalError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 23\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 24\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 25\u001b[1;33m \u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX_train\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mY_train\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mvalidation_data\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX_test\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mY_test\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mepochs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m10\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m50\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m=\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 26\u001b[0m \u001b[1;31m# model_4_to_4 = load_model(filepath)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\models.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[0;32m 961\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 962\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 963\u001b[1;33m validation_steps=validation_steps)\n\u001b[0m\u001b[0;32m 964\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 965\u001b[0m def evaluate(self, x=None, y=None,\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[0;32m 1703\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1704\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1705\u001b[1;33m validation_steps=validation_steps)\n\u001b[0m\u001b[0;32m 1706\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1707\u001b[0m def evaluate(self, x=None, y=None,\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\engine\\training.py\u001b[0m in \u001b[0;36m_fit_loop\u001b[1;34m(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)\u001b[0m\n\u001b[0;32m 1233\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtoarray\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1234\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1235\u001b[1;33m \u001b[0mouts\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1236\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1237\u001b[0m \u001b[0mouts\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m 2476\u001b[0m \u001b[0msession\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mget_session\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2477\u001b[0m updated = session.run(fetches=fetches, feed_dict=feed_dict,\n\u001b[1;32m-> 2478\u001b[1;33m **self.session_kwargs)\n\u001b[0m\u001b[0;32m 2479\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mupdated\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2480\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36mrun\u001b[1;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m 903\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 904\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[1;32m--> 905\u001b[1;33m run_metadata_ptr)\n\u001b[0m\u001b[0;32m 906\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 907\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run\u001b[1;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m 1138\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[1;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[1;32mor\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[1;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1139\u001b[0m results = self._do_run(handle, final_targets, final_fetches,\n\u001b[1;32m-> 1140\u001b[1;33m feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[0;32m 1141\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1142\u001b[0m \u001b[0mresults\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_run\u001b[1;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m 1319\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1320\u001b[0m return self._do_call(_run_fn, feeds, fetches, targets, options,\n\u001b[1;32m-> 1321\u001b[1;33m run_metadata)\n\u001b[0m\u001b[0;32m 1322\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1323\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_call\u001b[1;34m(self, fn, *args)\u001b[0m\n\u001b[0;32m 1338\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mKeyError\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1339\u001b[0m \u001b[1;32mpass\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1340\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0mtype\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mnode_def\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mop\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmessage\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1341\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1342\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_extend_graph\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mInternalError\u001b[0m: Blas GEMM launch failed : a.shape=(50, 50), b.shape=(50, 50), m=50, n=50, k=50\n\t [[Node: lstm_1/while/MatMul_7 = MatMul[T=DT_FLOAT, transpose_a=false, transpose_b=false, _device=\"/job:localhost/replica:0/task:0/device:GPU:0\"](lstm_1/while/Switch_2:1, lstm_1/while/MatMul_7/Enter)]]\n\t [[Node: loss/mul/_281 = _Recv[client_terminated=false, recv_device=\"/job:localhost/replica:0/task:0/device:CPU:0\", send_device=\"/job:localhost/replica:0/task:0/device:GPU:0\", send_device_incarnation=1, tensor_name=\"edge_4530_loss/mul\", tensor_type=DT_FLOAT, _device=\"/job:localhost/replica:0/task:0/device:CPU:0\"]()]]\n\nCaused by op 'lstm_1/while/MatMul_7', defined at:\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\runpy.py\", line 193, in _run_module_as_main\n \"__main__\", mod_spec)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\runpy.py\", line 85, in _run_code\n exec(code, run_globals)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel_launcher.py\", line 16, in \n app.launch_new_instance()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\traitlets\\config\\application.py\", line 658, in launch_instance\n app.start()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\kernelapp.py\", line 486, in start\n self.io_loop.start()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tornado\\platform\\asyncio.py\", line 112, in start\n self.asyncio_loop.run_forever()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\asyncio\\base_events.py\", line 422, in run_forever\n self._run_once()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\asyncio\\base_events.py\", line 1432, in _run_once\n handle._run()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\asyncio\\events.py\", line 145, in _run\n self._callback(*self._args)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tornado\\ioloop.py\", line 760, in _run_callback\n ret = callback()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tornado\\stack_context.py\", line 276, in null_wrapper\n return fn(*args, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\zmq\\eventloop\\zmqstream.py\", line 536, in \n self.io_loop.add_callback(lambda : self._handle_events(self.socket, 0))\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\zmq\\eventloop\\zmqstream.py\", line 450, in _handle_events\n self._handle_recv()\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\zmq\\eventloop\\zmqstream.py\", line 480, in _handle_recv\n self._run_callback(callback, msg)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\zmq\\eventloop\\zmqstream.py\", line 432, in _run_callback\n callback(*args, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tornado\\stack_context.py\", line 276, in null_wrapper\n return fn(*args, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\kernelbase.py\", line 283, in dispatcher\n return self.dispatch_shell(stream, msg)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\kernelbase.py\", line 233, in dispatch_shell\n handler(stream, idents, msg)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\kernelbase.py\", line 399, in execute_request\n user_expressions, allow_stdin)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\ipkernel.py\", line 208, in do_execute\n res = shell.run_cell(code, store_history=store_history, silent=silent)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\ipykernel\\zmqshell.py\", line 537, in run_cell\n return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2662, in run_cell\n raw_cell, store_history, silent, shell_futures)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2785, in _run_cell\n interactivity=interactivity, compiler=compiler, result=result)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2903, in run_ast_nodes\n if self.run_code(code, result):\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2963, in run_code\n exec(code_obj, self.user_global_ns, self.user_ns)\n File \"\", line 14, in \n model.add(LSTM(50, return_sequences=True, input_shape=(4,3), activation = 'tanh'))\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\models.py\", line 467, in add\n layer(x)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 499, in __call__\n return super(RNN, self).__call__(inputs, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\engine\\topology.py\", line 619, in __call__\n output = self.call(inputs, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 2151, in call\n initial_state=initial_state)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 608, in call\n input_length=timesteps)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\", line 2767, in rnn\n swap_memory=True)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\control_flow_ops.py\", line 3202, in while_loop\n result = loop_context.BuildLoop(cond, body, loop_vars, shape_invariants)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\control_flow_ops.py\", line 2940, in BuildLoop\n pred, body, original_loop_vars, loop_vars, shape_invariants)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\control_flow_ops.py\", line 2877, in _BuildLoop\n body_result = body(*packed_vars_for_body)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\", line 2753, in _step\n tuple(constants))\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 599, in step\n return self.cell.call(inputs, states, **kwargs)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\layers\\recurrent.py\", line 1947, in call\n self.recurrent_kernel_o))\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\", line 1075, in dot\n out = tf.matmul(x, y)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\math_ops.py\", line 2108, in matmul\n a, b, transpose_a=transpose_a, transpose_b=transpose_b, name=name)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\ops\\gen_math_ops.py\", line 4492, in mat_mul\n name=name)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\op_def_library.py\", line 787, in _apply_op_helper\n op_def=op_def)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\ops.py\", line 3290, in create_op\n op_def=op_def)\n File \"c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\tensorflow\\python\\framework\\ops.py\", line 1654, in __init__\n self._traceback = self._graph._extract_stack() # pylint: disable=protected-access\n\nInternalError (see above for traceback): Blas GEMM launch failed : a.shape=(50, 50), b.shape=(50, 50), m=50, n=50, k=50\n\t [[Node: lstm_1/while/MatMul_7 = MatMul[T=DT_FLOAT, transpose_a=false, transpose_b=false, _device=\"/job:localhost/replica:0/task:0/device:GPU:0\"](lstm_1/while/Switch_2:1, lstm_1/while/MatMul_7/Enter)]]\n\t [[Node: loss/mul/_281 = _Recv[client_terminated=false, recv_device=\"/job:localhost/replica:0/task:0/device:CPU:0\", send_device=\"/job:localhost/replica:0/task:0/device:GPU:0\", send_device_incarnation=1, tensor_name=\"edge_4530_loss/mul\", tensor_type=DT_FLOAT, _device=\"/job:localhost/replica:0/task:0/device:CPU:0\"]()]]\n" + ] + } + ], + "source": [ + "print(Y_train.shape)\n", + "\n", + "callbacks = [\n", + " EarlyStopping(monitor='val_loss', patience=3, min_delta=3/10**7),\n", + " ModelCheckpoint(filepath, monitor='val_loss', save_best_only=True),\n", + " History(),\n", + " TensorBoard(log_dir= filepath[:-3] + \"/logs\", histogram_freq=1, batch_size=50)\n", + "]\n", + "\n", + "#\n", + "\n", + "# create the model\n", + "model = Sequential()\n", + "model.add(LSTM(50, return_sequences=True, input_shape=(4,3), activation = 'tanh'))\n", + "model.add(BatchNormalization())\n", + "model.add(LSTM(50, return_sequences=False, activation = 'tanh')) \n", + "model.add(BatchNormalization())\n", + "model.add(Dense(50, activation='tanh'))\n", + "model.add(BatchNormalization())\n", + "model.add(Dense(12, activation='tanh'))\n", + "model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n", + "print(model.summary())\n", + "\n", + "\n", + "model.fit(X_train, Y_train, validation_data=(X_test, Y_test), epochs=10, batch_size=50, callbacks= callbacks)\n", + "# model_4_to_4 = load_model(filepath)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "model_4_to_4 = load_model(filepath)\n", + "\n", + "prediction = model_4_to_4.predict(X_test)\n", + "\n", + "print(prediction[615,:]-Y_test[615,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Finding the right track\n", + "\n", + "***\n", + "\n", + "## Loading the data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "matched_unmatched_tracks = pd.read_pickle('matched_8hittracks2.pkl')\n", + "matched_unmatched_tracks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#plt.figure()\n", + "#plt.hist(matched_unmatched_tracks['trackchi2'][matched_unmatched_tracks['matched'] == 1], label='matched', alpha=0.5, normed=True)\n", + "#plt.hist(matched_unmatched_tracks['trackchi2'][matched_unmatched_tracks['matched'] == 0], label='not matched', alpha=0.5, normed=True)\n", + "#plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "matched_unmatched_tracks = np.array(matched_unmatched_tracks)\n", + "matched_unmatched_tracks = matched_unmatched_tracks.astype('float32')\n", + "truth = matched_unmatched_tracks[:,-1]\n", + "chi2 = matched_unmatched_tracks[:,-2]\n", + "event_id = matched_unmatched_tracks[:,-3].astype('int')\n", + "tracks = scaler(reshapor(matched_unmatched_tracks[:,:-3]), scalerfunc = func, scalor= scalor)\n", + "\n", + "print(tracks.shape)\n", + "\n", + "#print(reshapor_inv(tset_matched).shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "array_len = truth.shape[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "track_with_chi = np.zeros((array_len, 8, 4))\n", + "track_with_chi[:,:,:3] += tracks[:,:,:]\n", + "\n", + "for track in range(array_len):\n", + " for t in range(8):\n", + " track_with_chi[track,t,3] = chi2[track]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "truth_and_ev_id = np.zeros((array_len,2))\n", + "\n", + "truth_and_ev_id[:,0] += truth[:]\n", + "truth_and_ev_id[:,1] += event_id[:]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X_train, X_test, Y_train, Y_test = skl.model_selection.train_test_split(track_with_chi, truth_and_ev_id, shuffle=True, train_size=0.8)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X_train_input = X_train[:,:4,:3]\n", + "X_test_input = X_test[:,:4,:3]\n", + "XY_train_proposed = X_train[:,4:,:]\n", + "XY_test_proposed = X_test[:,4:,:]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "filepath = \"trained_models/keras_RNN.h5\"\n", + "model_4_to_4 = load_model(filepath)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "prediction_train = reshapor(model_4_to_4.predict(X_train_input[:,:,:3]))\n", + "prediction_test = reshapor(model_4_to_4.predict(X_test_input[:,:,:3]))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "XY_train_proposed[:,:,:3] -= prediction_train[:,:,:]\n", + "XY_test_proposed[:,:,:3] -= prediction_test[:,:,:]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "XY_test_proposed.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X_train = XY_train_proposed\n", + "y_train = Y_train[:,0]\n", + "X_test = XY_test_proposed\n", + "y_test = Y_test[:,0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# truncate and pad input sequences\n", + "max_review_length = 4\n", + "filepath = \"trained_models/keras_model_classifier2.h5\"\n", + "\n", + "callbacks = [\n", + " EarlyStopping(monitor='val_loss', patience=20, min_delta=0.0003),\n", + " ModelCheckpoint(filepath, monitor='val_loss', save_best_only=True),\n", + " History(),\n", + " TensorBoard(log_dir= filepath[:-3] + \"/logs\", histogram_freq=1, batch_size=32)\n", + "]\n", + "\n", + "#\n", + "\n", + "# create the model\n", + "model = Sequential()\n", + "#model.add(Dense(12, input_shape=(4,4)))\n", + "model.add(LSTM(50, return_sequences=True, input_shape=(4,4), activation = 'relu'))\n", + "model.add(BatchNormalization())\n", + "model.add(LSTM(50, return_sequences=True, activation = 'relu'))\n", + "model.add(BatchNormalization())\n", + "model.add(LSTM(50, return_sequences=False, activation = 'relu'))\n", + "model.add(BatchNormalization())\n", + "#model.add(LSTM(40, return_sequences=True, activation = 'relu')) \n", + "#model.add(Dropout(0.5))\n", + "#model.add(LSTM(4, activation = 'relu')) \n", + "#model.add(BatchNormalization())\n", + "model.add(Dense(100, activation='relu'))\n", + "model.add(Dense(1, activation='sigmoid'))\n", + "model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n", + "print(model.summary())\n", + "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=500, batch_size=50, callbacks= callbacks, verbose = 2)\n", + "model = load_model(filepath)\n", + "# Final evaluation of the model\n", + "scores = model.evaluate(X_test, y_test, verbose=0)\n", + "print(\"Accuracy: %.2f%%\" % (scores[1]*100))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "model = load_model(filepath)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "predictions = model.predict(X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X_train = reshapor_inv(X_train)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X_train.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Y_train" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Y_train.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "test_data = np.random.random(size=(1000, 15))\n", + "np.all(test_data == reshapor_inv(reshapor(test_data)))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X_test = reshapor_inv(X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X_test.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# fit model no training data\n", + "modelx = XGBClassifier(max_depth=3, n_estimators=1000, learning_rate=0.05).fit(X_train, y_train, verbose = 1)\n", + "\n", + "predictions = modelx.predict_proba(X_test)\n", + "\n", + "pkl.dump(modelx , open(\"trained_models/XGB_Classifier2.xgb\" , \"wb\" ) )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "predictions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "predictions_true = predictions[:, 1]\n", + "predictions_false = predictions[:, 0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cut = 0.6\n", + "predictions_hard = (predictions_true > cut).astype(int)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "predictions_hard" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "predictions_true" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "y_test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n_bins = 20\n", + "\n", + "plt.figure()\n", + "n, bins, patches = plt.hist(predictions_true[y_test == 1], bins=20, alpha=0.5, normed=True)\n", + "plt.hist(predictions_true[y_test == 0], bins=bins, alpha=0.5, normed=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fpr, tpr, _ = skl.metrics.roc_curve(y_test, predictions_true, pos_label=1)\n", + "roc_auc = skl.metrics.roc_auc_score(y_true=y_test, y_score=predictions_true)\n", + "print(\"ROC AUC:\", roc_auc)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.figure()\n", + "plt.plot(fpr, tpr)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Classifier.ipynb b/Classifier.ipynb new file mode 100644 index 0000000..9aa6539 --- /dev/null +++ b/Classifier.ipynb @@ -0,0 +1,2771 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", + " from ._conv import register_converters as _register_converters\n", + "Using TensorFlow backend.\n" + ] + } + ], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "import matplotlib as mpl\n", + "import random\n", + "import math\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import tensorflow as tf\n", + "from tensorflow.python.framework import ops\n", + "from sklearn import preprocessing\n", + "import pickle as pkl\n", + "from pathlib import Path\n", + "from keras.datasets import imdb\n", + "from keras.models import Sequential\n", + "from keras.layers import Dense\n", + "from keras.layers import LSTM\n", + "from keras.layers import GRU\n", + "from keras.layers import Dropout, BatchNormalization, Bidirectional\n", + "from keras.layers import ConvLSTM2D\n", + "from keras.layers import Conv1D\n", + "#from keras.layers.convolutional import Conv1D\n", + "#from keras.layers.convolutional import MaxPooling1D\n", + "from keras.layers.embeddings import Embedding\n", + "from keras.preprocessing import sequence\n", + "from keras.callbacks import History\n", + "from keras.callbacks import EarlyStopping\n", + "from keras.callbacks import ModelCheckpoint\n", + "from keras.callbacks import TensorBoard\n", + "from keras.models import load_model\n", + "import keras.backend as K\n", + "from keras.losses import mean_squared_error\n", + "\n", + "import xgboost as xgb\n", + "from xgboost import XGBClassifier\n", + "from xgboost import plot_tree\n", + "from sklearn.metrics import accuracy_score\n", + "import graphviz\n", + "\n", + "import sklearn as skl\n", + "\n", + "#import seaborn as sns" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
x1y1z1x2y2z2x3y3z3x4...z6x7y7z7x8y8z8eventtrackchi2matched
0-2.24858223.380732-6.040000-6.48999928.598572-5.640000-21.72477167.052704-3.240000-22.225971...7.55999928.8026563.9014626.04000021.4213926.9788455.640000124.013.3963791.0
1-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-4.04000026.880571-9.817033-4.84000019.684010-11.173258-5.000000220.03.0362351.0
2-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-4.04000026.880571-9.817033-4.84000019.684010-11.173258-5.000000220.03.0362351.0
3-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-3.08000029.303265-2.360749-1.40000022.469944-4.447415-1.080000220.025.0332890.0
4-20.411108-9.4178874.760000-27.813803-6.9448434.760000-66.73694622.9032004.360000-74.096100...-3.08000029.303265-2.360749-1.40000022.469944-4.447415-1.080000220.025.0332890.0
5-6.01801121.819384-23.240000-10.00999828.598572-23.320000-25.35665366.079544-23.639999-28.309521...-6.92000025.644503-13.621260-6.20000120.724909-8.660306-6.200001312.027.8346670.0
6-6.01801121.819384-23.240000-10.00999828.598572-23.320000-25.35665366.079544-23.639999-28.309521...-6.92000025.644503-13.621260-6.20000120.724909-8.660306-6.200001312.027.8346670.0
7-6.01801121.819384-23.240000-10.00999828.598572-23.320000-25.35665366.079544-23.639999-28.309521...-18.36000125.490572-16.829796-18.04000119.622780-11.321079-18.040001312.07.2924680.0
86.59081621.582119-40.6800009.94126428.127028-40.84000033.71446261.815155-41.63999942.568947...-53.320000-6.150001-28.598572-54.439999-5.408250-22.071955-54.520000326.010.0815281.0
920.04373210.304811-22.68000026.00914412.499014-22.44000168.27433017.165602-21.24000080.994446...-33.239998-20.296682-20.603376-32.279999-17.410870-16.661102-32.119999352.043.5656200.0
1020.04373210.304811-22.68000026.00914412.499014-22.44000168.27433017.165602-21.24000080.994446...-10.840000-21.332224-19.851011-10.440001-17.870090-15.552447-10.440001352.016.8551641.0
11-16.06982017.65578815.719999-21.21896219.93330215.559999-57.20545242.34206814.440001-65.752090...11.00000027.4429828.08611110.28000020.7478698.60487410.120000539.016.2590470.0
12-6.31365221.69692416.039999-4.48999928.59857215.63999915.48488968.72467814.12000023.103405...28.520000-7.870180-29.63175831.000000-9.177679-20.51060531.799999668.037.1937711.0
13-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...11.5599994.249999-28.59857211.1600000.726311-24.61297211.080000891.019.8763730.0
14-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...11.5599994.249999-28.59857211.1600000.726311-24.61297211.080000891.019.8763730.0
15-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...11.5599994.249999-28.59857211.1600000.726311-24.61297211.080000891.019.8763730.0
16-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...11.5599994.249999-28.59857211.1600000.726311-24.61297211.080000891.019.8763730.0
17-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...-3.8000005.770000-28.598572-5.2400011.287747-23.778723-5.400001891.07.5833500.0
18-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...-3.8000005.770000-28.598572-5.2400011.287747-23.778723-5.400001891.07.5833500.0
19-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...-3.3200007.690000-28.598572-2.8400002.544224-23.258274-2.680000891.04.6119901.0
20-20.7555248.586395-10.680000-24.58447317.488115-10.680000-32.93503662.265152-10.600000-32.243843...-32.91999811.049999-28.598572-35.7200014.244162-22.554136-36.119999891.035.6422840.0
21-3.412670-22.898550-22.360001-5.110001-28.598572-21.880001-25.453243-66.053665-19.160000-33.054714...-10.040001-6.56999928.598572-9.800000-4.83544522.309217-9.7200001026.036.1785051.0
2218.35992414.369882-43.23999824.30337017.748846-43.48000064.02515429.886627-44.20000176.569237...-48.759998-19.520025-21.167650-49.799999-17.012880-17.621937-49.8800011027.07.3863581.0
2318.35992414.369882-43.23999824.30337017.748846-43.48000064.02515429.886627-44.20000176.569237...-56.200001-17.319500-22.766426-56.680000-14.425314-18.336964-56.8400001027.016.4788610.0
2418.35992414.369882-43.23999824.30337017.748846-43.48000064.02515429.886627-44.20000176.569237...-56.200001-17.319500-22.766426-56.680000-14.425314-18.336964-56.8400001027.016.4788610.0
25-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...43.40000224.62474616.75974744.84000018.84976013.18731644.9199981067.014.5809061.0
26-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...43.40000224.62474616.75974744.84000018.84976013.18731644.9199981067.014.5809061.0
27-22.1025705.33434015.799999-28.2896885.48021516.440001-67.81362918.88494919.320000-78.018410...43.40000224.62474616.75974744.84000018.84976013.18731644.9199981067.014.5809061.0
28-17.54802717.04349514.920000-19.34204121.29696314.839999-30.44088463.70515413.720000-31.450989...-27.71999916.429581-23.412991-29.95999912.226481-19.247751-30.2800011097.044.6859170.0
29-17.54802717.04349514.920000-19.34204121.29696314.839999-30.44088463.70515413.720000-31.450989...-27.71999916.429581-23.412991-29.95999912.226481-19.247751-30.2800011097.044.6859170.0
..................................................................
109791-2.969208-23.08223926.760000-6.150001-28.59857226.760000-33.783745-61.77515827.320000-44.113327...53.79999914.92480924.50627154.84000013.83403218.58188155.16000019090.023.2342001.0
109792-2.969208-23.08223926.760000-6.150001-28.59857226.760000-33.783745-61.77515827.320000-44.113327...53.79999914.92480924.50627154.84000013.83403218.58188155.16000019090.023.2342001.0
109793-19.62278011.32107912.679999-26.43558711.18655412.599999-67.52375819.96678511.960000-78.462158...-21.48000020.10251820.744446-22.52000013.83403218.581881-22.52000019109.029.6021160.0
1097948.309232-20.87032910.36000014.487940-24.82367510.44000045.608902-53.93861811.00000052.103645...-4.840000-27.986851-6.412251-5.400001-20.686640-8.752694-5.40000119130.035.6180420.0
10979515.478537-17.90070534.20000119.212601-21.39100834.36000146.061451-53.48606935.00000051.978550...38.599998-26.26253711.71914737.959999-20.5718359.02985837.87999719146.029.9436660.0
10979615.478537-17.90070534.20000119.212601-21.39100834.36000146.061451-53.48606935.00000051.978550...43.320000-26.33670011.49089242.520000-20.6024498.95594842.43999919146.034.5578000.0
10979715.478537-17.90070534.20000119.212601-21.39100834.36000146.061451-53.48606935.00000051.978550...45.160000-26.46030811.11047045.720001-20.7555248.58639545.72000119146.017.6821921.0
1097980.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1097990.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1098000.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1098010.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1098020.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
1098030.30843524.18436619.0800002.07000128.59857219.00000040.15768858.09515818.43999958.581963...16.6800007.690000-28.59857212.6799992.913775-23.10520011.88000019171.015.4717781.0
109804-8.882037-20.633064-6.040000-6.230000-28.598572-5.800000-1.250000-70.390724-4.680000-2.370000...-11.480000-25.94115812.708245-10.840000-21.8576515.925623-10.52000019219.032.8466111.0
109805-9.71352920.28865135.320000-12.41685726.32840535.400002-10.10999970.39072434.279999-1.789999...42.27999928.314409-5.40413147.40000222.531174-4.29959548.20000119260.09.6615841.0
109806-9.71352920.28865135.320000-12.41685726.32840535.400002-10.10999970.39072434.279999-1.789999...42.27999928.314409-5.40413147.40000222.531174-4.29959548.20000119260.09.6615841.0
109807-9.71352920.28865135.320000-12.41685726.32840535.400002-10.10999970.39072434.279999-1.789999...42.27999928.314409-5.40413147.40000222.531174-4.29959548.20000119260.09.6615841.0
109808-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.759998-27.9930326.39322946.040001-23.5108431.93446446.43999919518.011.0216221.0
109809-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.759998-27.9930326.39322946.040001-23.5108431.93446446.43999919518.011.0216221.0
109810-2.082284-23.449615-2.9200000.809999-28.598572-2.52000015.929216-68.6056210.28000018.170271...42.759998-27.9930326.39322946.040001-23.5108431.93446446.43999919518.011.0216221.0
109811-19.921274-10.60045319.559999-24.649467-16.68366219.400000-56.025154-43.74303418.840000-65.964081...9.959999-0.57000028.5985728.599999-2.47031323.2888878.36000019551.07.5439671.0
109812-19.921274-10.60045319.559999-24.649467-16.68366219.400000-56.025154-43.74303418.840000-65.964081...9.959999-0.57000028.5985728.599999-2.47031323.2888878.36000019551.07.5439671.0
1098131.86055323.541458-11.4800003.43000128.598572-11.32000125.45324366.053665-10.04000134.135880...-5.6400000.570000-28.598572-6.120000-0.382345-24.153751-6.20000119617.08.0627631.0
10981423.174084-2.74747811.32000027.646933-7.45841311.55999953.698204-45.84931613.32000058.983761...40.439999-11.70492226.84565541.320000-4.83544522.30921741.48000019682.017.1399381.0
10981523.174084-2.74747811.32000027.646933-7.45841311.55999953.698204-45.84931613.32000058.983761...40.439999-11.70492226.84565541.320000-4.83544522.30921741.48000019682.017.1399381.0
109816-12.37430219.186522-29.720001-16.42958123.412991-29.799999-33.76641861.785152-30.760000-34.045780...-45.63999929.303265-2.360749-48.04000123.847605-1.121449-48.27999919721.011.0050831.0
109817-20.9392118.142934-22.440001-26.41086611.262639-22.760000-59.61515837.524971-24.840000-67.946777...-56.27999926.410866-11.262639-57.72000120.173845-9.990692-57.95999919787.07.7969331.0
10981810.748274-19.860044-38.52000013.322956-25.670086-38.43999925.279379-66.100250-38.04000126.593647...-25.080000-20.05397820.779713-25.560001-16.66110217.410870-25.56000119826.019.2362191.0
10981910.748274-19.860044-38.52000013.322956-25.670086-38.43999925.279379-66.100250-38.04000126.593647...-25.080000-20.05397820.779713-25.560001-16.66110217.410870-25.56000119826.019.2362191.0
10982010.748274-19.860044-38.52000013.322956-25.670086-38.43999925.279379-66.100250-38.04000126.593647...-9.080000-20.76591120.262461-7.720000-17.76975816.951651-7.56000019826.020.7843910.0
\n", + "

109821 rows × 27 columns

\n", + "
" + ], + "text/plain": [ + " x1 y1 z1 x2 y2 z2 \\\n", + "0 -2.248582 23.380732 -6.040000 -6.489999 28.598572 -5.640000 \n", + "1 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "2 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "3 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "4 -20.411108 -9.417887 4.760000 -27.813803 -6.944843 4.760000 \n", + "5 -6.018011 21.819384 -23.240000 -10.009998 28.598572 -23.320000 \n", + "6 -6.018011 21.819384 -23.240000 -10.009998 28.598572 -23.320000 \n", + "7 -6.018011 21.819384 -23.240000 -10.009998 28.598572 -23.320000 \n", + "8 6.590816 21.582119 -40.680000 9.941264 28.127028 -40.840000 \n", + "9 20.043732 10.304811 -22.680000 26.009144 12.499014 -22.440001 \n", + "10 20.043732 10.304811 -22.680000 26.009144 12.499014 -22.440001 \n", + "11 -16.069820 17.655788 15.719999 -21.218962 19.933302 15.559999 \n", + "12 -6.313652 21.696924 16.039999 -4.489999 28.598572 15.639999 \n", + "13 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "14 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "15 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "16 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "17 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "18 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "19 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "20 -20.755524 8.586395 -10.680000 -24.584473 17.488115 -10.680000 \n", + "21 -3.412670 -22.898550 -22.360001 -5.110001 -28.598572 -21.880001 \n", + "22 18.359924 14.369882 -43.239998 24.303370 17.748846 -43.480000 \n", + "23 18.359924 14.369882 -43.239998 24.303370 17.748846 -43.480000 \n", + "24 18.359924 14.369882 -43.239998 24.303370 17.748846 -43.480000 \n", + "25 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "26 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "27 -22.102570 5.334340 15.799999 -28.289688 5.480215 16.440001 \n", + "28 -17.548027 17.043495 14.920000 -19.342041 21.296963 14.839999 \n", + "29 -17.548027 17.043495 14.920000 -19.342041 21.296963 14.839999 \n", + "... ... ... ... ... ... ... \n", + "109791 -2.969208 -23.082239 26.760000 -6.150001 -28.598572 26.760000 \n", + "109792 -2.969208 -23.082239 26.760000 -6.150001 -28.598572 26.760000 \n", + "109793 -19.622780 11.321079 12.679999 -26.435587 11.186554 12.599999 \n", + "109794 8.309232 -20.870329 10.360000 14.487940 -24.823675 10.440000 \n", + "109795 15.478537 -17.900705 34.200001 19.212601 -21.391008 34.360001 \n", + "109796 15.478537 -17.900705 34.200001 19.212601 -21.391008 34.360001 \n", + "109797 15.478537 -17.900705 34.200001 19.212601 -21.391008 34.360001 \n", + "109798 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109799 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109800 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109801 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109802 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109803 0.308435 24.184366 19.080000 2.070001 28.598572 19.000000 \n", + "109804 -8.882037 -20.633064 -6.040000 -6.230000 -28.598572 -5.800000 \n", + "109805 -9.713529 20.288651 35.320000 -12.416857 26.328405 35.400002 \n", + "109806 -9.713529 20.288651 35.320000 -12.416857 26.328405 35.400002 \n", + "109807 -9.713529 20.288651 35.320000 -12.416857 26.328405 35.400002 \n", + "109808 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "109809 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "109810 -2.082284 -23.449615 -2.920000 0.809999 -28.598572 -2.520000 \n", + "109811 -19.921274 -10.600453 19.559999 -24.649467 -16.683662 19.400000 \n", + "109812 -19.921274 -10.600453 19.559999 -24.649467 -16.683662 19.400000 \n", + "109813 1.860553 23.541458 -11.480000 3.430001 28.598572 -11.320001 \n", + "109814 23.174084 -2.747478 11.320000 27.646933 -7.458413 11.559999 \n", + "109815 23.174084 -2.747478 11.320000 27.646933 -7.458413 11.559999 \n", + "109816 -12.374302 19.186522 -29.720001 -16.429581 23.412991 -29.799999 \n", + "109817 -20.939211 8.142934 -22.440001 -26.410866 11.262639 -22.760000 \n", + "109818 10.748274 -19.860044 -38.520000 13.322956 -25.670086 -38.439999 \n", + "109819 10.748274 -19.860044 -38.520000 13.322956 -25.670086 -38.439999 \n", + "109820 10.748274 -19.860044 -38.520000 13.322956 -25.670086 -38.439999 \n", + "\n", + " x3 y3 z3 x4 ... z6 \\\n", + "0 -21.724771 67.052704 -3.240000 -22.225971 ... 7.559999 \n", + "1 -66.736946 22.903200 4.360000 -74.096100 ... -4.040000 \n", + "2 -66.736946 22.903200 4.360000 -74.096100 ... -4.040000 \n", + "3 -66.736946 22.903200 4.360000 -74.096100 ... -3.080000 \n", + "4 -66.736946 22.903200 4.360000 -74.096100 ... -3.080000 \n", + "5 -25.356653 66.079544 -23.639999 -28.309521 ... -6.920000 \n", + "6 -25.356653 66.079544 -23.639999 -28.309521 ... -6.920000 \n", + "7 -25.356653 66.079544 -23.639999 -28.309521 ... -18.360001 \n", + "8 33.714462 61.815155 -41.639999 42.568947 ... -53.320000 \n", + "9 68.274330 17.165602 -21.240000 80.994446 ... -33.239998 \n", + "10 68.274330 17.165602 -21.240000 80.994446 ... -10.840000 \n", + "11 -57.205452 42.342068 14.440001 -65.752090 ... 11.000000 \n", + "12 15.484889 68.724678 14.120000 23.103405 ... 28.520000 \n", + "13 -32.935036 62.265152 -10.600000 -32.243843 ... 11.559999 \n", + "14 -32.935036 62.265152 -10.600000 -32.243843 ... 11.559999 \n", + "15 -32.935036 62.265152 -10.600000 -32.243843 ... 11.559999 \n", + "16 -32.935036 62.265152 -10.600000 -32.243843 ... 11.559999 \n", + "17 -32.935036 62.265152 -10.600000 -32.243843 ... -3.800000 \n", + "18 -32.935036 62.265152 -10.600000 -32.243843 ... -3.800000 \n", + "19 -32.935036 62.265152 -10.600000 -32.243843 ... -3.320000 \n", + "20 -32.935036 62.265152 -10.600000 -32.243843 ... -32.919998 \n", + "21 -25.453243 -66.053665 -19.160000 -33.054714 ... -10.040001 \n", + "22 64.025154 29.886627 -44.200001 76.569237 ... -48.759998 \n", + "23 64.025154 29.886627 -44.200001 76.569237 ... -56.200001 \n", + "24 64.025154 29.886627 -44.200001 76.569237 ... -56.200001 \n", + "25 -67.813629 18.884949 19.320000 -78.018410 ... 43.400002 \n", + "26 -67.813629 18.884949 19.320000 -78.018410 ... 43.400002 \n", + "27 -67.813629 18.884949 19.320000 -78.018410 ... 43.400002 \n", + "28 -30.440884 63.705154 13.720000 -31.450989 ... -27.719999 \n", + "29 -30.440884 63.705154 13.720000 -31.450989 ... -27.719999 \n", + "... ... ... ... ... ... ... \n", + "109791 -33.783745 -61.775158 27.320000 -44.113327 ... 53.799999 \n", + "109792 -33.783745 -61.775158 27.320000 -44.113327 ... 53.799999 \n", + "109793 -67.523758 19.966785 11.960000 -78.462158 ... -21.480000 \n", + "109794 45.608902 -53.938618 11.000000 52.103645 ... -4.840000 \n", + "109795 46.061451 -53.486069 35.000000 51.978550 ... 38.599998 \n", + "109796 46.061451 -53.486069 35.000000 51.978550 ... 43.320000 \n", + "109797 46.061451 -53.486069 35.000000 51.978550 ... 45.160000 \n", + "109798 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109799 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109800 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109801 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109802 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109803 40.157688 58.095158 18.439999 58.581963 ... 16.680000 \n", + "109804 -1.250000 -70.390724 -4.680000 -2.370000 ... -11.480000 \n", + "109805 -10.109999 70.390724 34.279999 -1.789999 ... 42.279999 \n", + "109806 -10.109999 70.390724 34.279999 -1.789999 ... 42.279999 \n", + "109807 -10.109999 70.390724 34.279999 -1.789999 ... 42.279999 \n", + "109808 15.929216 -68.605621 0.280000 18.170271 ... 42.759998 \n", + "109809 15.929216 -68.605621 0.280000 18.170271 ... 42.759998 \n", + "109810 15.929216 -68.605621 0.280000 18.170271 ... 42.759998 \n", + "109811 -56.025154 -43.743034 18.840000 -65.964081 ... 9.959999 \n", + "109812 -56.025154 -43.743034 18.840000 -65.964081 ... 9.959999 \n", + "109813 25.453243 66.053665 -10.040001 34.135880 ... -5.640000 \n", + "109814 53.698204 -45.849316 13.320000 58.983761 ... 40.439999 \n", + "109815 53.698204 -45.849316 13.320000 58.983761 ... 40.439999 \n", + "109816 -33.766418 61.785152 -30.760000 -34.045780 ... -45.639999 \n", + "109817 -59.615158 37.524971 -24.840000 -67.946777 ... -56.279999 \n", + "109818 25.279379 -66.100250 -38.040001 26.593647 ... -25.080000 \n", + "109819 25.279379 -66.100250 -38.040001 26.593647 ... -25.080000 \n", + "109820 25.279379 -66.100250 -38.040001 26.593647 ... -9.080000 \n", + "\n", + " x7 y7 z7 x8 y8 z8 \\\n", + "0 28.802656 3.901462 6.040000 21.421392 6.978845 5.640000 \n", + "1 26.880571 -9.817033 -4.840000 19.684010 -11.173258 -5.000000 \n", + "2 26.880571 -9.817033 -4.840000 19.684010 -11.173258 -5.000000 \n", + "3 29.303265 -2.360749 -1.400000 22.469944 -4.447415 -1.080000 \n", + "4 29.303265 -2.360749 -1.400000 22.469944 -4.447415 -1.080000 \n", + "5 25.644503 -13.621260 -6.200001 20.724909 -8.660306 -6.200001 \n", + "6 25.644503 -13.621260 -6.200001 20.724909 -8.660306 -6.200001 \n", + "7 25.490572 -16.829796 -18.040001 19.622780 -11.321079 -18.040001 \n", + "8 -6.150001 -28.598572 -54.439999 -5.408250 -22.071955 -54.520000 \n", + "9 -20.296682 -20.603376 -32.279999 -17.410870 -16.661102 -32.119999 \n", + "10 -21.332224 -19.851011 -10.440001 -17.870090 -15.552447 -10.440001 \n", + "11 27.442982 8.086111 10.280000 20.747869 8.604874 10.120000 \n", + "12 -7.870180 -29.631758 31.000000 -9.177679 -20.510605 31.799999 \n", + "13 4.249999 -28.598572 11.160000 0.726311 -24.612972 11.080000 \n", + "14 4.249999 -28.598572 11.160000 0.726311 -24.612972 11.080000 \n", + "15 4.249999 -28.598572 11.160000 0.726311 -24.612972 11.080000 \n", + "16 4.249999 -28.598572 11.160000 0.726311 -24.612972 11.080000 \n", + "17 5.770000 -28.598572 -5.240001 1.287747 -23.778723 -5.400001 \n", + "18 5.770000 -28.598572 -5.240001 1.287747 -23.778723 -5.400001 \n", + "19 7.690000 -28.598572 -2.840000 2.544224 -23.258274 -2.680000 \n", + "20 11.049999 -28.598572 -35.720001 4.244162 -22.554136 -36.119999 \n", + "21 -6.569999 28.598572 -9.800000 -4.835445 22.309217 -9.720000 \n", + "22 -19.520025 -21.167650 -49.799999 -17.012880 -17.621937 -49.880001 \n", + "23 -17.319500 -22.766426 -56.680000 -14.425314 -18.336964 -56.840000 \n", + "24 -17.319500 -22.766426 -56.680000 -14.425314 -18.336964 -56.840000 \n", + "25 24.624746 16.759747 44.840000 18.849760 13.187316 44.919998 \n", + "26 24.624746 16.759747 44.840000 18.849760 13.187316 44.919998 \n", + "27 24.624746 16.759747 44.840000 18.849760 13.187316 44.919998 \n", + "28 16.429581 -23.412991 -29.959999 12.226481 -19.247751 -30.280001 \n", + "29 16.429581 -23.412991 -29.959999 12.226481 -19.247751 -30.280001 \n", + "... ... ... ... ... ... ... \n", + "109791 14.924809 24.506271 54.840000 13.834032 18.581881 55.160000 \n", + "109792 14.924809 24.506271 54.840000 13.834032 18.581881 55.160000 \n", + "109793 20.102518 20.744446 -22.520000 13.834032 18.581881 -22.520000 \n", + "109794 -27.986851 -6.412251 -5.400001 -20.686640 -8.752694 -5.400001 \n", + "109795 -26.262537 11.719147 37.959999 -20.571835 9.029858 37.879997 \n", + "109796 -26.336700 11.490892 42.520000 -20.602449 8.955948 42.439999 \n", + "109797 -26.460308 11.110470 45.720001 -20.755524 8.586395 45.720001 \n", + "109798 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109799 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109800 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109801 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109802 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109803 7.690000 -28.598572 12.679999 2.913775 -23.105200 11.880000 \n", + "109804 -25.941158 12.708245 -10.840000 -21.857651 5.925623 -10.520000 \n", + "109805 28.314409 -5.404131 47.400002 22.531174 -4.299595 48.200001 \n", + "109806 28.314409 -5.404131 47.400002 22.531174 -4.299595 48.200001 \n", + "109807 28.314409 -5.404131 47.400002 22.531174 -4.299595 48.200001 \n", + "109808 -27.993032 6.393229 46.040001 -23.510843 1.934464 46.439999 \n", + "109809 -27.993032 6.393229 46.040001 -23.510843 1.934464 46.439999 \n", + "109810 -27.993032 6.393229 46.040001 -23.510843 1.934464 46.439999 \n", + "109811 -0.570000 28.598572 8.599999 -2.470313 23.288887 8.360000 \n", + "109812 -0.570000 28.598572 8.599999 -2.470313 23.288887 8.360000 \n", + "109813 0.570000 -28.598572 -6.120000 -0.382345 -24.153751 -6.200001 \n", + "109814 -11.704922 26.845655 41.320000 -4.835445 22.309217 41.480000 \n", + "109815 -11.704922 26.845655 41.320000 -4.835445 22.309217 41.480000 \n", + "109816 29.303265 -2.360749 -48.040001 23.847605 -1.121449 -48.279999 \n", + "109817 26.410866 -11.262639 -57.720001 20.173845 -9.990692 -57.959999 \n", + "109818 -20.053978 20.779713 -25.560001 -16.661102 17.410870 -25.560001 \n", + "109819 -20.053978 20.779713 -25.560001 -16.661102 17.410870 -25.560001 \n", + "109820 -20.765911 20.262461 -7.720000 -17.769758 16.951651 -7.560000 \n", + "\n", + " event trackchi2 matched \n", + "0 124.0 13.396379 1.0 \n", + "1 220.0 3.036235 1.0 \n", + "2 220.0 3.036235 1.0 \n", + "3 220.0 25.033289 0.0 \n", + "4 220.0 25.033289 0.0 \n", + "5 312.0 27.834667 0.0 \n", + "6 312.0 27.834667 0.0 \n", + "7 312.0 7.292468 0.0 \n", + "8 326.0 10.081528 1.0 \n", + "9 352.0 43.565620 0.0 \n", + "10 352.0 16.855164 1.0 \n", + "11 539.0 16.259047 0.0 \n", + "12 668.0 37.193771 1.0 \n", + "13 891.0 19.876373 0.0 \n", + "14 891.0 19.876373 0.0 \n", + "15 891.0 19.876373 0.0 \n", + "16 891.0 19.876373 0.0 \n", + "17 891.0 7.583350 0.0 \n", + "18 891.0 7.583350 0.0 \n", + "19 891.0 4.611990 1.0 \n", + "20 891.0 35.642284 0.0 \n", + "21 1026.0 36.178505 1.0 \n", + "22 1027.0 7.386358 1.0 \n", + "23 1027.0 16.478861 0.0 \n", + "24 1027.0 16.478861 0.0 \n", + "25 1067.0 14.580906 1.0 \n", + "26 1067.0 14.580906 1.0 \n", + "27 1067.0 14.580906 1.0 \n", + "28 1097.0 44.685917 0.0 \n", + "29 1097.0 44.685917 0.0 \n", + "... ... ... ... \n", + "109791 19090.0 23.234200 1.0 \n", + "109792 19090.0 23.234200 1.0 \n", + "109793 19109.0 29.602116 0.0 \n", + "109794 19130.0 35.618042 0.0 \n", + "109795 19146.0 29.943666 0.0 \n", + "109796 19146.0 34.557800 0.0 \n", + "109797 19146.0 17.682192 1.0 \n", + "109798 19171.0 15.471778 1.0 \n", + "109799 19171.0 15.471778 1.0 \n", + "109800 19171.0 15.471778 1.0 \n", + "109801 19171.0 15.471778 1.0 \n", + "109802 19171.0 15.471778 1.0 \n", + "109803 19171.0 15.471778 1.0 \n", + "109804 19219.0 32.846611 1.0 \n", + "109805 19260.0 9.661584 1.0 \n", + "109806 19260.0 9.661584 1.0 \n", + "109807 19260.0 9.661584 1.0 \n", + "109808 19518.0 11.021622 1.0 \n", + "109809 19518.0 11.021622 1.0 \n", + "109810 19518.0 11.021622 1.0 \n", + "109811 19551.0 7.543967 1.0 \n", + "109812 19551.0 7.543967 1.0 \n", + "109813 19617.0 8.062763 1.0 \n", + "109814 19682.0 17.139938 1.0 \n", + "109815 19682.0 17.139938 1.0 \n", + "109816 19721.0 11.005083 1.0 \n", + "109817 19787.0 7.796933 1.0 \n", + "109818 19826.0 19.236219 1.0 \n", + "109819 19826.0 19.236219 1.0 \n", + "109820 19826.0 20.784391 0.0 \n", + "\n", + "[109821 rows x 27 columns]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "matched_unmatched_tracks = pd.read_pickle('matched_8hittracks2.pkl')\n", + "matched_unmatched_tracks" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\matplotlib\\axes\\_axes.py:6462: UserWarning: The 'normed' kwarg is deprecated, and has been replaced by the 'density' kwarg.\n", + " warnings.warn(\"The 'normed' kwarg is deprecated, and has been \"\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAD8CAYAAABw1c+bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAGUhJREFUeJzt3X+QVeWd5/H3x+aXjg5qS7YSUWlKooC0jTYO/giiJAQTV9RggesmJKVBarHKddZywd+S0dFNVhJLtxwyEg2SAQtlhmRI+QszxJQRmtD+QCR0WFY6WNo2iKCidvzuH/fQaa8NHLpvc+l+Pq+qLs55znPOec5p7ueefu45z1VEYGZm6Tik3A0wM7MDy8FvZpYYB7+ZWWIc/GZmiXHwm5klxsFvZpYYB7+ZWWIc/GZmiXHwm5klple5G1DsmGOOiUGDBpW7GWZm3crq1avfiYgBeeoedME/aNAg6urqyt0MM7NuRdL/y1vXXT1mZolx8JuZJcbBb2aWmIOuj9/MDk6ffPIJjY2N7Nq1q9xNSVq/fv0YOHAgvXv37vA2cgW/pAnAT4AK4J8j4u6i5X2BnwOnA83A5IjYlC2rBv4J+FvgU2BURPh/jlk309jYyBFHHMGgQYOQVO7mJCkiaG5uprGxkaqqqg5vZ59dPZIqgAeAC4BhwOWShhVVuxLYFhEnAnOAe7J1ewGPAtMjYjgwFvikw601s7LZtWsXlZWVDv0ykkRlZWWn/+rK08d/BtAQERsj4mNgITCxqM5E4JFsejEwToX/HeOBlyPiJYCIaI6Iv3SqxWZWNg798ivF7yBP8B8LbG4z35iVtVsnIlqA7UAl8GUgJD0p6Q+SbmhvB5KmSaqTVNfU1LS/x2BmZvshTx9/e28vxV/Uu6c6vYBzgFHAB8CzklZHxLOfqRgxF5gLUFtb6y8BNusG5jz9x5Ju77qvfbmk2ytWX1/Pli1b+MY3vrHXeocffjg7d+7s8H52P4R6zDHHdHgbXS1P8DcCx7WZHwhs2UOdxqxfvz+wNSv/j4h4B0DSMuA04FmsJEr94tsfXf1CNSul+vp66urq9hn8KcjT1bMKGCKpSlIfYAqwtKjOUmBqNj0JWB4RATwJVEs6LHtDOBd4rTRNN7PUbNq0iZNPPpmrrrqKU045hSuuuIJnnnmGs88+myFDhrBy5UpWrlzJWWedxciRIznrrLNYv349H3/8MbfeeiuLFi2ipqaGRYsWsXPnTr73ve8xYsQIqqurefzxx1v3c9NNN3HqqacyevRo3nrrLQCampr41re+xahRoxg1ahS/+93vAGhubmb8+PGMHDmSq6++mkL0Hdz2GfxZn/01FEJ8HfBYRKyVNFvSRVm1h4BKSQ3A3wMzs3W3AfdSePOoB/4QEf9e+sMws1Q0NDRw7bXX8vLLL/P666/zi1/8gueff54f/ehH3HXXXZx88smsWLGCNWvWMHv2bG688Ub69OnD7NmzmTx5MvX19UyePJkf/OAH9O/fn1deeYWXX36Z888/H4D333+f0aNH89JLLzFmzBh++tOfAnDttddy3XXXsWrVKh5//HGuuuoqAO644w7OOecc1qxZw0UXXcQbb7xRtnOTV677+CNiGbCsqOzWNtO7gMv2sO6jFG7pNDPrtKqqKkaMGAHA8OHDGTduHJIYMWIEmzZtYvv27UydOpUNGzYgiU8+af8O8meeeYaFCxe2zh911FEA9OnThwsvvBCA008/naeffrq1/muv/bXD4r333mPHjh2sWLGCJ554AoBvfvObrds5mPnJ3RIpZ1+7WUr69u3bOn3IIYe0zh9yyCG0tLRwyy23cN5557FkyRI2bdrE2LFj291ORLR7a2Tv3r1byysqKmhpaQHg008/5YUXXuDQQw/93Drd7TZXj9VjZj3K9u3bOfbYwh3nDz/8cGv5EUccwY4dO1rnx48fz/333986v23btr1ut7h+fX09AGPGjGHBggUA/PrXv97ndg4GvuI3sw45WO/quuGGG5g6dSr33ntva789wHnnncfdd99NTU0Ns2bN4uabb2bGjBmccsopVFRUcNttt3HppZfucbv33XcfM2bMoLq6mpaWFsaMGcODDz7IbbfdxuWXX85pp53Gueeey/HHH38gDrNTdLB9Al1bWxvd8YtYUuzqOVhf+NY11q1bx9ChQ8vdDKP930X2jFRtnvXd1WNmlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlhgHv5lZYnwfv5l1zHP/WNrtnTertNuj8ADX+PHj+dKXvlTybd91113ceOONe63z3e9+lwsvvJBJkyZ1aB+33347hx9+ONdff32H1t8TX/GbWY/18MMPs2VL8SjypXHXXXd1yXYPBAe/mXULmzZtYujQoXz/+99n+PDhjB8/ng8//BAoDJ8wevRoqqurueSSS9i2bRuLFy+mrq6OK664gpqamta6u40dO5brrruOMWPGMHToUFatWsWll17KkCFDuPnmm1vrXXzxxZx++ukMHz6cuXPnAjBz5kw+/PBDampquOKKKwD4+c9/TnV1Naeeeirf/va3W9dfsWIFZ511FoMHD2bx4sWt5T/84Q8ZNWoU1dXV3Hbbba3ld955JyeddBJf/epXWb9+felPJA5+M+tGNmzYwIwZM1i7di1HHnlk6xj63/nOd7jnnnt4+eWXGTFiBHfccQeTJk2itraWBQsWUF9f3+7gan369GHFihVMnz6diRMn8sADD/Dqq6/y8MMP09zcDMC8efNYvXo1dXV13HfffTQ3N3P33Xdz6KGHUl9fz4IFC1i7di133nkny5cv56WXXuInP/lJ6z7efPNNnn/+eX71q18xc+ZMAJ566ik2bNjAypUrqa+vZ/Xq1axYsYLVq1ezcOFC1qxZwxNPPMGqVau65Dy6j9/Muo2qqipqamqAwpDJu4dhfvfddzn33HMBmDp1Kpdd1u4o8Z9z0UWFrxQZMWIEw4cP54tf/CIAgwcPZvPmzVRWVnLfffexZMkSADZv3syGDRuorKz8zHaWL1/OpEmTWr9u8eijj25ddvHFF3PIIYcwbNiw1i91eeqpp3jqqacYOXIkADt37mTDhg3s2LGDSy65hMMOO+wz7Ss1B7+ZdRtth2SuqKj4XPdNR7fXdnjn3fMtLS385je/4ZlnnuGFF17gsMMOY+zYsezatetz29nTEM/Fbd49NlpEMGvWLK6++urP1P3xj398QIZ4dlePmXVr/fv356ijjuK3v/0tAPPnz2+9+i8einl/bd++naOOOorDDjuM119/nd///vety3r37t36JS/jxo3jsccea+0e2rp16163+/Wvf5158+a1fqn7n//8Z95++23GjBnDkiVL+PDDD9mxYwe//OUvO9z2vfEVv5l1TBfcftlRjzzyCNOnT+eDDz5g8ODB/OxnPwMKt1NOnz6dQw89dI9forI3EyZM4MEHH6S6upqTTjqJ0aNHty6bNm0a1dXVnHbaaSxYsICbbrqJc889l4qKCkaOHPmZ7wIoNn78eNatW8eZZ54JwOGHH86jjz7KaaedxuTJk6mpqeGEE07gK1/5yv6fjBw8LHOJeFhm6+k8LPPBw8Mym5nZfnHwm5klxsFvZrkdbF3DKSrF78DBb2a59OvXj+bmZod/GUUEzc3N9OvXr1Pb8V09ZpbLwIEDaWxspKmpqdxNSVq/fv0YOHBgp7bh4DezXHr37k1VVVW5m2ElkKurR9IESeslNUia2c7yvpIWZctflDQoKx8k6UNJ9dnPg6VtvpmZ7a99XvFLqgAeAL4GNAKrJC2NiNfaVLsS2BYRJ0qaAtwDTM6W/SkiakrcbjMz66A8V/xnAA0RsTEiPgYWAhOL6kwEHsmmFwPjdCAGnDAzs/2WJ/iPBTa3mW/MytqtExEtwHZg9/B1VZLWSPoPSV3z/LGZmeWW58Pd9q7ci+/n2lOdN4HjI6JZ0unAv0oaHhHvfWZlaRowDeD444/P0SQzM+uoPFf8jcBxbeYHAsXfZdZaR1IvoD+wNSI+iohmgIhYDfwJ+NwALxExNyJqI6J2wIAB+38UZmaWW57gXwUMkVQlqQ8wBVhaVGcpMDWbngQsj4iQNCD7cBhJg4EhwMbSNN3MzDpin109EdEi6RrgSaACmBcRayXNBuoiYinwEDBfUgOwlcKbA8AYYLakFuAvwPSI2PtA1WZm1qVyPcAVEcuAZUVlt7aZ3gV87rvOIuJx4PFOttHMzErIY/WYmSXGwW9mlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlphcwS9pgqT1khokzWxneV9Ji7LlL0oaVLT8eEk7JV1fmmabmVlH7TP4JVUADwAXAMOAyyUNK6p2JbAtIk4E5gD3FC2fA/y68801M7POynPFfwbQEBEbI+JjYCEwsajOROCRbHoxME6SACRdDGwE1pamyWZm1hl5gv9YYHOb+casrN06EdECbAcqJf0N8D+BOzrfVDMzK4U8wa92yiJnnTuAORGxc687kKZJqpNU19TUlKNJZmbWUb1y1GkEjmszPxDYsoc6jZJ6Af2BrcDfAZMk/S/gSOBTSbsi4v62K0fEXGAuQG1tbfGbipmZlVCe4F8FDJFUBfwZmAL8l6I6S4GpwAvAJGB5RATwld0VJN0O7CwOfTMzO7D2GfwR0SLpGuBJoAKYFxFrJc0G6iJiKfAQMF9SA4Ur/Sld2WgzM+u4PFf8RMQyYFlR2a1tpncBl+1jG7d3oH1mZlZifnLXzCwxDn4zs8Q4+M3MEuPgNzNLjIPfzCwxDn4zs8Q4+M3MEuPgNzNLjIPfzCwxuZ7cNWvPnKf/WJb9Xve1L5dlv2Y9ha/4zcwS4+A3M0uMg9/MLDEOfjOzxDj4zcwS4+A3M0uMg9/MLDEOfjOzxDj4zcwS4+A3M0uMg9/MLDEOfjOzxDj4zcwS4+A3M0uMg9/MLDEOfjOzxOQKfkkTJK2X1CBpZjvL+0palC1/UdKgrPwMSfXZz0uSLilt883MbH/tM/glVQAPABcAw4DLJQ0rqnYlsC0iTgTmAPdk5a8CtRFRA0wA/kmSv/XLzKyM8lzxnwE0RMTGiPgYWAhMLKozEXgkm14MjJOkiPggIlqy8n5AlKLRZmbWcXmC/1hgc5v5xqys3TpZ0G8HKgEk/Z2ktcArwPQ2bwRmZlYGeYJf7ZQVX7nvsU5EvBgRw4FRwCxJ/T63A2mapDpJdU1NTTmaZGZmHZUn+BuB49rMDwS27KlO1offH9jatkJErAPeB04p3kFEzI2I2oioHTBgQP7Wm5nZfssT/KuAIZKqJPUBpgBLi+osBaZm05OA5RER2Tq9ACSdAJwEbCpJy83MrEP2eYdNRLRIugZ4EqgA5kXEWkmzgbqIWAo8BMyX1EDhSn9Ktvo5wExJnwCfAv8tIt7pigMxM7N8ct1aGRHLgGVFZbe2md4FXNbOevOB+Z1so9nB5bl/LM9+z5tVnv1aj+Mnd83MEuOHqcy6C/+lYSXiK34zs8T4it/M9q5cf2mA/9roIg5+Mzt4uXurS7irx8wsMQ5+M7PEOPjNzBLj4DczS4yD38wsMb6rx7qvct5maD1bD7+byFf8ZmaJcfCbmSXGwW9mlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlhgHv5lZYvwAl3Xa6DfmHtgdPld5YPdn1sP4it/MLDEOfjOzxDj4zcwS4+A3M0uMg9/MLDEOfjOzxOQKfkkTJK2X1CBpZjvL+0palC1/UdKgrPxrklZLeiX79/zSNt/MzPbXPoNfUgXwAHABMAy4XNKwompXAtsi4kRgDnBPVv4O8J8jYgQwFZhfqoabmVnH5HmA6wygISI2AkhaCEwEXmtTZyJweza9GLhfkiJiTZs6a4F+kvpGxEedbrkl64WNzWXb95mD/fCYdX95unqOBTa3mW/MytqtExEtwHag+BXyLWBNe6EvaZqkOkl1TU1NedtuZmYdkCf41U5Z7E8dScMpdP9c3d4OImJuRNRGRO2AAQNyNMnMzDoqT/A3Ase1mR8IbNlTHUm9gP7A1mx+ILAE+E5E/KmzDTYzs87JE/yrgCGSqiT1AaYAS4vqLKXw4S3AJGB5RISkI4F/B2ZFxO9K1WgzM+u4fQZ/1md/DfAksA54LCLWSpot6aKs2kNApaQG4O+B3bd8XgOcCNwiqT77+ULJj8LMzHLLNSxzRCwDlhWV3dpmehdwWTvr/QPwD51so5mZlZCf3DUzS4yD38wsMQ5+M7PEOPjNzBLj4DczS4y/bL0HOeBfem5m3ZKv+M3MEuPgNzNLjIPfzCwxDn4zs8Q4+M3MEuPgNzNLjIPfzCwxDn4zs8Q4+M3MEtPjntyd8/Qfy90EM7ODmq/4zcwS4+A3M0uMg9/MLDEOfjOzxDj4zcwS4+A3M0uMg9/MLDEOfjOzxDj4zcwSkyv4JU2QtF5Sg6SZ7SzvK2lRtvxFSYOy8kpJz0naKen+0jbdzMw6Yp/BL6kCeAC4ABgGXC5pWFG1K4FtEXEiMAe4JyvfBdwCXF+yFpuZWafkGavnDKAhIjYCSFoITARea1NnInB7Nr0YuF+SIuJ94HlJJ5auyWbl88LG5rLs98zBlWXZr/VMebp6jgU2t5lvzMrarRMRLcB2wP9TzcwOQnmCX+2URQfq7HkH0jRJdZLqmpqa8q5mZmYdkKerpxE4rs38QGDLHuo0SuoF9Ae25m1ERMwF5gLU1tbmfsM4WI1+Y265m2Bmtkd5rvhXAUMkVUnqA0wBlhbVWQpMzaYnAcsjotsHuJlZT7TPK/6IaJF0DfAkUAHMi4i1kmYDdRGxFHgImC+pgcKV/pTd60vaBPwt0EfSxcD4iHiteD9mZnZg5PoGrohYBiwrKru1zfQu4LI9rDuoE+0zM7MS85O7ZmaJcfCbmSXGwW9mlhgHv5lZYhz8ZmaJcfCbmSXGwW9mlphc9/GbWXmVa1RQ8MigPZGv+M3MEuPgNzNLjIPfzCwxDn4zs8Q4+M3MEuPgNzNLjIPfzCwxDn4zs8Q4+M3MEuMnd81sr8r11LCfGO46vuI3M0uMg9/MLDEOfjOzxDj4zcwS4+A3M0uM7+oxs4OS7ybqOj02+Ee/MbfcTTAzOyj12OA3M+uIsn7b2XkHZj+5+vglTZC0XlKDpJntLO8raVG2/EVJg9osm5WVr5f09dI13czMOmKfwS+pAngAuAAYBlwuaVhRtSuBbRFxIjAHuCdbdxgwBRgOTAD+T7Y9MzMrkzxX/GcADRGxMSI+BhYCE4vqTAQeyaYXA+MkKStfGBEfRcT/BRqy7ZmZWZnkCf5jgc1t5huzsnbrREQLsB2ozLmumZkdQHk+3FU7ZZGzTp51kTQNmJbN7pS0Pke7jgHeyVGvJ0v9HKR+/OBz0LOO/6r/3ZG1dp+DE/KukCf4G4Hj2swPBLbsoU6jpF5Af2BrznWJiLnAft1/KakuImr3Z52eJvVzkPrxg89B6scPHTsHebp6VgFDJFVJ6kPhw9qlRXWWAlOz6UnA8oiIrHxKdtdPFTAEWLk/DTQzs9La5xV/RLRIugZ4EqgA5kXEWkmzgbqIWAo8BMyX1EDhSn9Ktu5aSY8BrwEtwIyI+EsXHYuZmeWQ6wGuiFgGLCsqu7XN9C7gsj2seydwZyfauCd+NNfnIPXjB5+D1I8fOnAOVOiRMTOzVHh0TjOzxHTL4N/XEBI9kaR5kt6W9GqbsqMlPS1pQ/bvUeVsY1eSdJyk5yStk7RW0rVZeRLnQFI/SSslvZQd/x1ZeVU2TMqGbNiUPuVua1eSVCFpjaRfZfOpHf8mSa9IqpdUl5Xt92ug2wV/ziEkeqKHKQx70dZM4NmIGAI8m833VC3A/4iIocBoYEb2e0/lHHwEnB8RpwI1wARJoykMjzInO/5tFIZP6cmuBda1mU/t+AHOi4iaNrdw7vdroNsFP/mGkOhxImIFhTum2mo7VMYjwMUHtFEHUES8GRF/yKZ3UHjxH0si5yAKdmazvbOfAM6nMEwK9ODjB5A0EPgm8M/ZvEjo+Pdiv18D3TH4PQzEX/2niHgTCsEIfKHM7TkgstFfRwIvktA5yLo56oG3gaeBPwHvZsOkQM9/LfwYuAH4NJuvJK3jh8Kb/VOSVmcjHkAHXgPdcTz+XMNAWM8k6XDgceC/R8R7hYu+NGTPwNRIOhJYAgxtr9qBbdWBIelC4O2IWC1p7O7idqr2yONv4+yI2CLpC8DTkl7vyEa64xV/rmEgEvGWpC8CZP++Xeb2dClJvSmE/oKIeCIrTuocAETEu8BvKHzWcWQ2TAr07NfC2cBFkjZR6N49n8JfAKkcPwARsSX7920Kb/5n0IHXQHcM/jxDSKSi7VAZU4F/K2NbulTWn/sQsC4i7m2zKIlzIGlAdqWPpEOBr1L4nOM5CsOkQA8+/oiYFREDI2IQhdf88oi4gkSOH0DS30g6Yvc0MB54lQ68BrrlA1ySvkHh3X73EBJd8WTwQUXSvwBjKYzE9xZwG/CvwGPA8cAbwGURUfwBcI8g6Rzgt8Ar/LWP90YK/fw9/hxIqqbwwV0FhQu2xyJitqTBFK6AjwbWAP81Ij4qX0u7XtbVc31EXJjS8WfHuiSb7QX8IiLulFTJfr4GumXwm5lZx3XHrh4zM+sEB7+ZWWIc/GZmiXHwm5klxsFvZpYYB7+ZWWIc/GZmiXHwm5kl5v8DG3PVJ1FAsxMAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure()\n", + "plt.hist(matched_unmatched_tracks['trackchi2'][matched_unmatched_tracks['matched'] == 1], label='matched', alpha=0.5, normed=True)\n", + "plt.hist(matched_unmatched_tracks['trackchi2'][matched_unmatched_tracks['matched'] == 0], label='not matched', alpha=0.5, normed=True)\n", + "plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "matched_unmatched_tracks = np.array(matched_unmatched_tracks)\n", + "matched_unmatched_tracks = matched_unmatched_tracks.astype('float32')\n", + "\n", + "#print(matched_unmatched_tracks[0])\n", + "\n", + "track = matched_unmatched_tracks[:,:-3]\n", + "\n", + "#print(track[0,:])\n", + "\n", + "chi2 = matched_unmatched_tracks[:,-2]\n", + "chi = np.zeros((chi2.shape[0],1))\n", + "chi[:,0] = chi2[:]\n", + "chi2 = chi\n", + "#print(chi2[0])\n", + "\n", + "event_id = matched_unmatched_tracks[:,-3]\n", + "\n", + "#print(event_id[0])\n", + "\n", + "truth = matched_unmatched_tracks[:,-1]\n", + "\n", + "#print(truth[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.5201008914506333\n" + ] + } + ], + "source": [ + "true_event_dic = {}\n", + "idi = event_id[0]\n", + "chi2i = chi2[0]\n", + "chi2ic = 1\n", + "num_of_tracks = event_id.shape[0]\n", + "num_right_tracks = 0\n", + "best_tr = 0\n", + "\n", + "for i in range(event_id.shape[0]):\n", + " if event_id[i] == idi:\n", + " if chi2i > chi2[i]:\n", + " chi2i = chi2[i]\n", + " best_tr = i\n", + " elif chi2i == chi2[i]:\n", + " chi2ic += 1\n", + " else:\n", + " if truth[best_tr] == 1:\n", + " num_right_tracks += chi2ic\n", + " chi2i = chi2[i]\n", + " chi2ic = 1\n", + " idi = event_id[i]\n", + "\n", + "print(num_right_tracks/num_of_tracks)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "#Reshapes the 2D-array to a 3D-array\n", + "\n", + "def reshapor(arr_orig, num_inputs=3):\n", + " timesteps = int(arr_orig.shape[1]/num_inputs)\n", + " number_examples = int(arr_orig.shape[0])\n", + " arr = np.zeros((number_examples, timesteps, num_inputs))\n", + " \n", + " for i in range(number_examples):\n", + " for t in range(timesteps):\n", + " arr[i,t,:] = arr_orig[i,num_inputs*t:num_inputs*t+num_inputs]\n", + " \n", + " return arr\n", + "\n", + "#The inverse transformation of the reshapor function (3D to 2D)\n", + "\n", + "def reshapor_inv(array_shaped):\n", + " num_inputs = array_shaped.shape[2]\n", + " timesteps = int(array_shaped.shape[1])\n", + " num_examples = int(array_shaped.shape[0])\n", + " arr = np.zeros((num_examples, timesteps*num_inputs))\n", + " \n", + " for i in range(num_examples):\n", + " for t in range(timesteps):\n", + " arr[i,num_inputs*t:num_inputs*t+num_inputs] = array_shaped[i,t,:]\n", + " \n", + " return arr" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-0.46716718]\n" + ] + } + ], + "source": [ + "#Normalize the data advanced version with scikit learn\n", + "def correct_array_steps(arr, steps= 8, num_inputs= 3): #steps > array_steps\n", + " if arr.shape[1] != steps:\n", + " _ = np.zeros((arr.shape[0], steps, num_inputs))\n", + " _[:,:arr.shape[1],:] += arr\n", + " arr = _\n", + " return arr\n", + "\n", + "\n", + "#set the transormation based on training set\n", + "def set_min_max_scaler(arr, feature_range= (-.9,0.9)):\n", + " min_max_scalor = preprocessing.MinMaxScaler(feature_range=feature_range)\n", + " if len(arr.shape) == 3:\n", + " arr = reshapor(min_max_scalor.fit_transform(reshapor_inv(arr))) \n", + " else:\n", + " arr = min_max_scalor.fit_transform(arr)\n", + " return min_max_scalor\n", + "\n", + "\n", + "with open(\"scalor.pkl\" , \"rb\" ) as f:\n", + " scalor = pkl.load( f )\n", + "\n", + "min_max_chi2 = preprocessing.MinMaxScaler(feature_range=(-1,1))\n", + "chi2 = min_max_chi2.fit_transform(chi2)\n", + "print(chi2[0])\n", + "\n", + "#transform data\n", + "def min_max_scaler3D(arr, min_max_scalor= scalor):\n", + " num_inputs = arr.shape[2]\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(min_max_scalor.transform(reshapor_inv(arr)), num_inputs=num_inputs)\n", + " return arr\n", + " \n", + "#inverse transformation\n", + "def min_max_scaler_inv3D(arr, min_max_scalor= scalor):\n", + " num_inputs = arr.shape[2]\n", + " arr = correct_array_steps(arr)\n", + " arr = reshapor(min_max_scalor.inverse_transform(reshapor_inv(arr)), num_inputs=num_inputs)\n", + " return arr" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(109821, 8, 3)\n" + ] + } + ], + "source": [ + "track = reshapor(track)\n", + "print(track.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "track = min_max_scaler3D(track)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(109821, 8, 4)\n", + "[[-0.08353733 0.85633283 -0.10962403 -0.46716718]\n", + " [-0.19387332 0.88403828 -0.10405403 -0.46716718]\n", + " [-0.2766562 0.85442146 -0.06179106 -0.46716718]\n", + " [-0.24266536 0.86545245 -0.0436893 -0.46716718]\n", + " [ 0.89736453 0.03339616 0.10172874 -0.46716718]\n", + " [ 0.89590581 0.00228753 0.10719893 -0.46716718]\n", + " [ 0.8453981 0.11812349 0.09198397 -0.46716718]\n", + " [ 0.7698897 0.25082128 0.08465644 -0.46716718]]\n" + ] + } + ], + "source": [ + "track_and_chi = np.zeros((track.shape[0], track.shape[1], track.shape[2]+1))\n", + "print(track_and_chi.shape)\n", + "\n", + "track_and_chi[:,:,:3] += track[:,:,:]\n", + "\n", + "for step in range(track_and_chi.shape[1]):\n", + " track_and_chi[:,step,3] += chi2[:,0]\n", + " \n", + "print(track_and_chi[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 1. 124.]\n" + ] + } + ], + "source": [ + "truth_and_event_id = np.zeros((truth.shape[0], 2))\n", + "\n", + "truth_and_event_id[:,0] += truth[:]\n", + "truth_and_event_id[:,1] += event_id[:]\n", + "\n", + "print(truth_and_event_id[0,:])" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(87856, 8, 4)\n", + "(21965, 8, 4)\n", + "(87856, 2)\n", + "(21965, 2)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\sklearn\\model_selection\\_split.py:2026: FutureWarning: From version 0.21, test_size will always complement train_size unless both are specified.\n", + " FutureWarning)\n" + ] + } + ], + "source": [ + "track_and_chi_train, track_and_chi_test, truth_and_event_id_train, truth_and_event_id_test = skl.model_selection.train_test_split(track_and_chi, truth_and_event_id, shuffle=True, train_size=0.8)\n", + "\n", + "\n", + "print(track_and_chi_train.shape)\n", + "print(track_and_chi_test.shape)\n", + "print(truth_and_event_id_train.shape)\n", + "print(truth_and_event_id_test.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "filepath = \"trained_models/keras_RNN.h5\"\n", + "model_4_to_4 = load_model(filepath)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "RNN_pred_input_train = track_and_chi_train[:,:4,:3]\n", + "RNN_pred_input_test = track_and_chi_test[:,:4,:3]" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "Classifier_input_train = track_and_chi_train[:,4:,:]\n", + "Classifier_input_test = track_and_chi_test[:,4:,:]" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "prediction_train = reshapor(model_4_to_4.predict(RNN_pred_input_train[:,:,:]))\n", + "prediction_test = reshapor(model_4_to_4.predict(RNN_pred_input_test[:,:,:]))" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "Classifier_input_train[:,:,:3] -= prediction_train[:,:,:]\n", + "Classifier_input_test[:,:,:3] -= prediction_test[:,:,:]" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[-0.01692565 -0.06858504 0.07611917 0.02718735]\n", + " [-0.01577255 -0.06891568 0.07830505 0.02718735]\n", + " [-0.04065302 -0.1537843 0.09975575 0.02718735]\n", + " [-0.04712729 -0.18797903 0.10059148 0.02718735]]\n" + ] + } + ], + "source": [ + "print(Classifier_input_train[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[-0.01692565 -0.06858504 0.07611917]\n", + " [-0.01577255 -0.06891568 0.07830505]\n", + " [-0.04065302 -0.1537843 0.09975575]\n", + " [-0.04712729 -0.18797903 0.10059148]]\n", + "[[-0.00343056 -0.28258272 0.21165785]\n", + " [ 0.00093881 -0.23269528 0.21068021]\n", + " [-0.03962432 -0.23904903 0.21239483]\n", + " [ 0.01538819 -0.20903568 0.20812456]]\n" + ] + } + ], + "source": [ + "min_max_diff = preprocessing.MinMaxScaler(feature_range=(-1,1))\n", + "\n", + "track_diff_train = Classifier_input_train[:,:,:3] \n", + "track_diff_test = Classifier_input_test[:,:,:3] \n", + "\n", + "print(track_diff_train[0])\n", + "\n", + "track_diff_train = reshapor(min_max_diff.fit_transform(reshapor_inv(track_diff_train)))\n", + "\n", + "track_diff_test = reshapor(min_max_diff.transform(reshapor_inv(track_diff_test)))\n", + "\n", + "print(track_diff_train[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "Classifier_input_train[:,:,:3] = track_diff_train[:,:,:]\n", + "Classifier_input_test[:,:,:3] = track_diff_test[:,:,:]" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(87856, 4, 4)\n" + ] + } + ], + "source": [ + "X_train = Classifier_input_train\n", + "y_train = truth_and_event_id_train[:,0]\n", + "X_test = Classifier_input_test\n", + "y_test = truth_and_event_id_test[:,0]\n", + "\n", + "print(X_train.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "# truncate and pad input sequences\n", + "max_review_length = 4\n", + "filepath = \"trained_models/keras_model_classifier.h5\"\n", + "\n", + "callbacks = [\n", + " EarlyStopping(monitor='val_loss', patience=20, min_delta=0.0003),\n", + " ModelCheckpoint(filepath, monitor='val_loss', save_best_only=True),\n", + " History(),\n", + " TensorBoard(log_dir= filepath[:-3] + \"/logs\", histogram_freq=1, batch_size=32)\n", + "]\n", + "\n", + "#\n", + "\n", + "# create the model\n", + "model = Sequential()\n", + "#model.add(Dense(12, input_shape=(4,4)))\n", + "model.add(Bidirectional(LSTM(30, return_sequences=True, activation = 'relu'), input_shape=(4,4)))\n", + "model.add(BatchNormalization())\n", + "model.add(Bidirectional(LSTM(30, return_sequences=True, activation = 'relu')))\n", + "model.add(BatchNormalization())\n", + "model.add(Bidirectional(LSTM(30, return_sequences=False, activation = 'relu')))\n", + "model.add(BatchNormalization())\n", + "#model.add(LSTM(40, return_sequences=True, activation = 'relu')) \n", + "#model.add(Dropout(0.5))\n", + "#model.add(LSTM(4, activation = 'relu')) \n", + "#model.add(BatchNormalization())\n", + "model.add(Dense(100, activation='relu'))\n", + "model.add(Dense(1, activation='sigmoid'))\n", + "model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n", + "print(model.summary())\n", + "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=2, batch_size=50, callbacks= callbacks, verbose = 1)\n", + "model = load_model(filepath)\n", + "# Final evaluation of the model\n", + "scores = model.evaluate(X_test, y_test, verbose=0)\n", + "print(\"Accuracy: %.2f%%\" % (scores[1]*100))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pkl.dump( X_test, open( \"X_test.pkl\", \"wb\" ) )\n", + "pkl.dump( X_train, open( \"X_train.pkl\", \"wb\" ) )\n", + "pkl.dump( y_test, open( \"y_test.pkl\", \"wb\" ) )\n", + "pkl.dump( y_train, open( \"y_train.pkl\", \"wb\" ) )\n", + "pkl.dump( min_max_chi2, open( \"min_max_chi2_scaler.pkl\", \"wb\" ) )\n", + "pkl.dump( min_max_diff, open( \"min_max_diff_scaler.pkl\", \"wb\" ) )" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "X_test = pkl.load( open( \"X_test.pkl\", \"rb\" ) )\n", + "X_train = pkl.load( open( \"X_train.pkl\", \"rb\" ) )\n", + "y_test = pkl.load( open( \"y_test.pkl\", \"rb\" ) )\n", + "y_train = pkl.load( open( \"y_train.pkl\", \"rb\" ) )\n", + "min_max_chi2 = pkl.load( open( \"min_max_chi2_scaler.pkl\", \"rb\" ) )\n", + "min_max_diff = pkl.load( open( \"min_max_diff_scaler.pkl\", \"rb\" ) )\n", + "\n", + "filepath = \"trained_models/RNN_Classifier.h5\"\n", + "RNN_classifier = load_model(filepath)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "# fit model no training data\n", + "\n", + "y_train_neg = y_train*(-1) + 1\n", + "y_test_neg = y_test*(-1) + 1\n", + " \n", + "Y_train = np.zeros((y_train.shape[0], 2))\n", + "Y_train[:,1] = y_train[:]\n", + "Y_train[:,0] = y_train_neg[:]\n", + "\n", + "X_train = reshapor_inv(X_train)\n", + "X_test = reshapor_inv(X_test)\n", + "\n", + "modelx = XGBClassifier(max_depth=3, n_estimators=100, learning_rate=0.05).fit(X_train, y_train, verbose = 1)\n", + "\n", + "predictions = modelx.predict_proba(X_test)\n", + "with open(\"trained_models/XGB_Classifier.xgb\" , \"wb\" ) as f:\n", + " pkl.dump(modelx , f )" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [], + "source": [ + "predictions_true = predictions[:, 0]\n", + "predictions_false = predictions[:, 1]" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [], + "source": [ + "cut = 0.5\n", + "predictions_hard = (predictions_true > cut).astype(int)" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\matplotlib\\axes\\_axes.py:6462: UserWarning: The 'normed' kwarg is deprecated, and has been replaced by the 'density' kwarg.\n", + " warnings.warn(\"The 'normed' kwarg is deprecated, and has been \"\n" + ] + }, + { + "data": { + "text/plain": [ + "Text(0.5,0,'cut')" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAW8AAAEKCAYAAADdBdT9AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAADvxJREFUeJzt3X+MpVVhxvHncReCC1gtOzYWsCOJa7SbVOgNXUqiLrCGUgL/GAMNNRjbSUxqdSttIP2D/khjf6hbm5i2I1Btq1iloBvir41dSm3Y1bv82LKsEATEFdq9tAVBUvnh0z/upZ2duTP33N373jvnzveTTJjZe+bdJyfDM++ee973dRIBAOryskkHAAAMj/IGgApR3gBQIcobACpEeQNAhShvAKgQ5Q0AFaK8AaBClDcAVGh9EwfduHFjZmdnmzg0AEylffv2PZFkpnR8I+U9OzurdrvdxKEBYCrZ/u4w41k2AYAKUd4AUCHKGwAqRHkDQIUobwCoEOUNABWivAGgQpQ3AFSI8gaACjVyheWx2LHrgaJx27dtajgJAKxenHkDQIUobwCoEOUNABWivAGgQpQ3AFSoqLxtb7d9wPa9tm+0fULTwQAAyxtY3rZPlfSbklpJNktaJ+mypoMBAJZXumyyXtLLba+XtEHSY81FAgAMMrC8k3xf0oclPSrpcUlPJfna4nG252y3bbc7nc7okwIA/k/JssmrJF0q6XWSflrSibavWDwuyXySVpLWzEzxMzQBAEehZNnkAkkPJ+kkeV7SzZJ+sdlYAICVlJT3o5K22N5g25LOl3Sw2VgAgJWUrHnvlXSTpDsl/Vvve+YbzgUAWEHRXQWTXCvp2oazAAAKcYUlAFSI8gaAClHeAFAhyhsAKkR5A0CFKG8AqBDlDQAVorwBoEKUNwBUiPIGgApR3gBQIcobACpEeQNAhShvAKgQ5Q0AFSp5huUbbN+94OMHtj8wjnAAgP4GPowhyf2S3ixJttdJ+r6kWxrOBQBYwbDLJudL+k6S7zYRBgBQZtjyvkzSjU0EAQCUKy5v28dLukTS55d5fc5223a70+mMKh8AoI9hzrx/SdKdSf6j34tJ5pO0krRmZmZGkw4A0Ncw5X25WDIBgFWhqLxtb5C0TdLNzcYBAJQYuFVQkpI8K+mUhrMAAApxhSUAVIjyBoAKUd4AUCHKGwAqRHkDQIUobwCoEOUNABWivAGgQpQ3AFSI8gaAClHeAFAhyhsAKkR5A0CFKG8AqBDlDQAVorwBoEKlT9J5pe2bbH/b9kHb5zQdDACwvKIn6Uj6mKSvJHlH7ynyGxrMBAAYYGB5236FpLdIulKSkjwn6blmYwEAVlKybHKGpI6kv7F9l+3rbJ/YcC4AwApKynu9pLMk/WWSMyX9UNLViwfZnrPdtt3udDojjgkAWKikvA9JOpRkb+/rm9Qt8yMkmU/SStKamZkZZUYAwCIDyzvJv0v6nu039P7ofEn3NZoKALCi0t0m75P06d5Ok4ckvbu5SACAQYrKO8ndkloNZwEAFOIKSwCoEOUNABWivAGgQpQ3AFSI8gaAClHeAFAhyhsAKkR5A0CFKG8AqBDlDQAVorwBoEKUNwBUiPIGgApR3gBQIcobACpEeQNAhYoexmD7EUlPS3pR0gtJeDADAExQ6WPQJGlrkicaSwIAKMayCQBUqLS8I+lrtvfZnus3wPac7bbtdqfTGV1CAMASpcsm5yZ5zParJe2y/e0kty8ckGRe0rwktVqtjDjnEjt2PVA0bvu2TQ0nAYDxKzrzTvJY77+HJd0i6ewmQwEAVjawvG2faPvklz6X9HZJ9zYdDACwvJJlk5+SdIvtl8Z/JslXGk0FAFjRwPJO8pCknxtDFgBAoWH2eQPA2rT7Q2Xjtl7TbI4F2OcNABWivAGgQpQ3AFSI8gaAClHeAFAhyhsAKkR5A0CFKG8AqBDlDQAVorwBoEKUNwBUiPIGgApR3gBQIcobACpUXN6219m+y/atTQYCAAw2zJn3+yUdbCoIAKBcUXnbPk3SL0u6rtk4AIASpWfefy7pdyT9uMEsAIBCJU+Pv1jS4ST7Boybs9223e50OiMLCABYquTM+1xJl9h+RNJnJZ1n++8XD0oyn6SVpDUzMzPimACAhQaWd5JrkpyWZFbSZZL+KckVjScDACyLfd4AUKH1wwxOcpuk2xpJAgAoxpk3AFSI8gaAClHeAFAhyhsAKkR5A0CFKG8AqBDlDQAVorwBoEKUNwBUiPIGgApR3gBQIcobACpEeQNAhShvAKgQ5Q0AFaK8AaBCJQ8gPsH2N23fY/uA7d8fRzAAwPJKnqTzI0nnJXnG9nGSvmH7y0n2NJwNALCMgeWdJJKe6X15XO8jTYaahB27Higat33bpoaTAMBgRWvettfZvlvSYUm7kuxtNhYAYCVF5Z3kxSRvlnSapLNtb148xvac7bbtdqfTGXVOAMACQ+02SfKkuk+Pv7DPa/NJWklaMzMzI4oHAOhn4Jq37RlJzyd50vbLJV0g6U8aTwYATdv9oUknOGolu01eI+lTttepe6b+uSS3NhsLALCSkt0m+yWdOYYsAIBCJWfeVSvdAggANeHyeACoEOUNABWivAGgQpQ3AFSI8gaAClHeAFAhyhsAKkR5A0CFKG8AqBDlDQAVmvrL4wGsQRXfLbAUZ94AUCHKGwAqRHkDQIUobwCo0MDytn267d22D9o+YPv94wgGAFheyW6TFyR9MMmdtk+WtM/2riT3NZwNwFowzM6Qrdc0l6MyA8+8kzye5M7e509LOijp1KaDAQCWN9Sat+1ZdZ9nubfPa3O227bbnU5nNOkAAH0Vl7ftkyT9o6QPJPnB4teTzCdpJWnNzMyMMiMAYJGi8rZ9nLrF/ekkNzcbCQAwyMA3LG1b0vWSDib5aPORAGAZa+Cy91IlZ97nSvpVSefZvrv3cVHDuQAAKxh45p3kG5I8hiwAgEJcYQkAFeKWsA3aseuBonHbt21qOAmAacOZNwBUiPIGgApR3gBQIcobACrEG5YAmsEFNY2ivIdUuoMEAJrEsgkAVIjyBoAKsWxSES76AfASzrwBoEKceQMYDrtIVgXOvAGgQpQ3AFSo5Ek6N0i6WNLhJJubj4RjxRubwPQrOfP+pKQLG84BABhCyZN0brc923wUjBtn6DgCb0RWhd0mwLSjlKfSyN6wtD1nu2273el0RnVYAEAfIzvzTjIvaV6SWq1WRnXctYCbXa1CpWerW6+Z3N+NNY2tggBQoZKtgjdKepukjbYPSbo2yfVNBdry6PxIj7fntXMjPR4ArAYlu00uH0cQDFb6i41fWGM0yeUVrGksmwBAhdgq2KBJnSmPeulJ+vCIjwfgWFHePSxJAKgJ5Y2RGWbL42q/avOOh/6zaNw5Z5zScBKgP8obIzPccs0aW4ph7zZGbOrLe/Trv1iVKEesMVNf3jh2pcshW5r4yylloC/KGxNxx/VXFY1jTRnoj/IeUhPLMCzt1Is3NjEplDcGmuQvF8oR6I8rLAGgQpx5A5XiXyVrG+UNjEFp0QKlKG9MBcpxedM0N/wr4v+x5g0AFaK8AaBCRcsmti+U9DFJ6yRdl+SPG00FAH2Megmo5mWYksegrZP0cUnbJB2S9C3bO5Pc13Q4AGjSyH8ZbB3p4VZUsmxytqQHkzyU5DlJn5V0abOxAAArKSnvUyV9b8HXh3p/BgCYkJI1b/f5sywZZM9JeukxM8/Yvv9Ygq1yGyU9MekQqwxzciTmY6npn5Nf+8iw37FwTn5mmG8sKe9Dkk5f8PVpkh5bPCjJvKQ1cYcl2+0krUnnWE2YkyMxH0sxJ0sdy5yULJt8S9Lrbb/O9vGSLpO082j+MgDAaAw8807ygu3fkPRVdbcK3pDkQOPJAADLKtrnneRLkr7UcJaarInloSExJ0diPpZiTpY66jlxsuS9RwDAKsfl8QBQIcp7GbYvtH2/7QdtX93n9d+yfZ/t/ba/bnuobT41GjQnC8a9w3ZsT/3OgpI5sf3O3s/KAdufGXfGcSv4f+e1tnfbvqv3/89Fk8g5LrZvsH3Y9r3LvG7bf9Gbr/22zyo6cBI+Fn2o+8bsdySdIel4SfdIetOiMVslbeh9/l5J/zDp3JOek964kyXdLmmPpNakc096TiS9XtJdkl7V+/rVk869CuZkXtJ7e5+/SdIjk87d8Jy8RdJZku5d5vWLJH1Z3WtqtkjaW3Jczrz7G3hLgCS7kzzb+3KPuvvfp1npbRL+UNKfSvqfcYabkJI5+XVJH0/y35KU5PCYM45byZxE0it6n/+E+lw3Mk2S3C7pv1YYcqmkv03XHkmvtP2aQcelvPsb9pYA71H3N+c0Gzgnts+UdHqSW8cZbIJKfk42Sdpk+19t7+ndoXOalczJ70m6wvYhdXexvW880Vato7oFCU/S6a/olgCSZPsKSS1Jb2000eStOCe2XyZph6QrxxVoFSj5OVmv7tLJ29T919m/2N6c5MmGs01KyZxcLumTST5i+xxJf9ebkx83H29VKu6bhTjz7q/olgC2L5D0u5IuSfKjMWWblEFzcrKkzZJus/2Iumt3O6f8TcuSn5NDkr6Y5PkkD0u6X90yn1Ylc/IeSZ+TpCR3SDpB3Xt8rFVFfbMY5d3fwFsC9JYI/lrd4p72dUxpwJwkeSrJxiSzSWbVfR/gkiTtycQdi5JbR3xB3Te3ZXujussoD4015XiVzMmjks6XJNtvVLe8O2NNubrslPSu3q6TLZKeSvL4oG9i2aSPLHNLANt/IKmdZKekP5N0kqTP25akR5NcMrHQDSuckzWlcE6+Kunttu+T9KKk304yPU8EXqRwTj4o6RO2t6u7PHBletsuppHtG9VdNtvYW+e/VtJxkpTkr9Rd979I0oOSnpX07qLjTvGcAcDUYtkEACpEeQNAhShvAKgQ5Q0AFaK8AaBClDfWNNuztn9l0jmAYVHeWOtmJVHeqA77vDGVbL9L0lXqXgSyX90LZG5NclPv9WeSnGR7j6Q3SnpY0qeS7JhUZmAYXGGJqWP7Z9W958y5SZ6w/ZOSPrrM8KslXZXk4rEFBEaAZRNMo/Mk3ZTkCUlKstK9lIEqUd6YRtbSW2q+oN7Pu7s3ozl+3KGAUaK8MY2+Lumdtk+RpN6yySOSfr73+qXq3RhI0tPq3s4WqArljamT5ICkP5L0z7bvUXe9+xOS3mr7m5J+QdIPe8P3S3rB9j29u9wBVWC3CQBUiDNvAKgQ5Q0AFaK8AaBClDcAVIjyBoAKUd4AUCHKGwAqRHkDQIX+F9QFrBAYzD6gAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "n_bins = 20\n", + "\n", + "plt.figure()\n", + "n, bins, patches = plt.hist(predictions_true[y_test == 1], bins=30, alpha=0.5, normed=True)\n", + "plt.hist(predictions_true[y_test == 0], bins=bins, alpha=0.5, normed=True)\n", + "plt.xlabel('cut')" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ROC AUC: 0.8754601565187058\n" + ] + } + ], + "source": [ + "fpr, tpr, _ = skl.metrics.roc_curve(y_test_neg, predictions_true, pos_label=1)\n", + "roc_auc = skl.metrics.roc_auc_score(y_true=y_test, y_score=predictions_false)\n", + "print(\"ROC AUC:\", roc_auc)" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Text(0,0.5,'true positives')" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEKCAYAAAD9xUlFAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3Xl8XXWd//HXp9n3NE3XdN+AsgnEAoPKjqAI4qCCMooy4qjguDHiMo6DMw+3ccGRn1CVwXFDQZGqRUYEFMFCW5a2lFJaaGnapk3aZl9v8vn9cU7DJSTNbcnJuTf3/Xw87iPnnvu9535Ok57P/X7PdzF3R0REBGBC3AGIiEj6UFIQEZEBSgoiIjJASUFERAYoKYiIyAAlBRERGaCkICIiA5QURERkgJKCiIgMyI07gENVXV3tc+fOjTsMEZGMsmbNmkZ3nzxSuYxLCnPnzmX16tVxhyEiklHMbFsq5dR8JCIiA5QURERkgJKCiIgMUFIQEZEBSgoiIjIgsqRgZrea2R4zWz/M62Zm3zGzzWa21sxOjCoWERFJTZQ1hduA8w/y+gXAovBxNfC9CGMREZEURDZOwd3/YmZzD1LkYuB/PVgPdKWZVZrZdHffFVVMIiLpor/fae9J0N7dR3tPgu7efnr6+unq7aOzp4+Onj46ehJ09h7Y7uPsI6dw/KzKSOOKc/BaDbA96XlduO8VScHMriaoTTB79uwxCU5EZDB3p7O3j5bOBK1dvbR2J2jvTtDWlaDtwHZ3grbuvqTtxMu227qC5+09fYf8+VPKCsZ1UrAh9vlQBd19GbAMoLa2dsgyIiKp6urto7mzl+bOXpo6DvzsGdj3sv2dvbSEr7d0JejrH/kSlJdjlBbkUlqYS0l+LmWFuVSV5DOrqpiyglxKCnKD18PtkoIcCnJzyM81CnNzKMrPoTg/l+L8A9s5FObmMGHCUJfN0RVnUqgDZiU9nwnsjCkWEclALV29NLZ209KVePlFvSO4mB+4uLd09tLU2TPwvDvRP+wxzaCiKI+Kojwqi/IoL8pjdlUxFUW5VBTlUVaYR3lhHqWFuZQNuvAnX+AzVZxJYTlwjZndDpwMNOt+gkj26k70Ud/cxc6mLnY2dbKruZOmjl72dfQETS49LzXLHGiyae1ODHu8kvyc4OJenE9FUS7zqkuoLMqnojjvpYt+8YGLf35YNo+ygtwx+UaeriJLCmb2c+AMoNrM6oB/A/IA3P1mYAXwJmAz0AG8L6pYRCReB26q7mjqZPOeNp7Z1UJ7dx9723uob+5k+75Odrd24YNaZooPXNiL8igpCL6p11QWUpIffEOfXlHI5LKCsEz+wIW+vDCP/FwNwzocUfY+unyE1x34SFSfLyLRc3eaOnrZ09rN7pYu9rR2s6e1iz0tyT+7qW/pomeIJpu5k4qZUl7IaQurmVVVRE1l8JhRWcS0ikIK8zK3GSZTZdzU2SISvf5+Z297T3Bhb+2moWXQRb+1mz0t3TS0dtPT98qLfVlBLpPLC5haVsgJsyuZVl7IpNJ8ygrzmFicz4mzK6kqySc3R9/m042SgkiWcXeaO3vZ0dTJ7pYudocX/CdebKKrt4+dzZ3sauoiMUQvm4qiPKaWFzClrJCT55UMXPinhPsOvFaUr2/4mUpJQWSc6e939nf0BBf71i52N3exfX8Hu5q62Fjfyra97UP2kZ9Ukk+fO6cvnsyM44qYVl7IlLKCgQv+5LICNedkASUFkQzh7nQn+tnf0UNDaze7W7pZW9dEa1eC+uYudie15ff2vfxbfs4EY2pZAYumlrF0XhUzJwZt91MrCplWXkh1aYFuzAqgpCCSdrp6+3hmVwubdrfys8e2s6+9m+aOXtp7+oYdOLVoSilTyws5eX4JU8sLmVpWwNTyQqaE3/anVRSSp/Z7SYGSgkgM2ruD7pl1+zuo29/J9n3Bzxca29m8p+1l7fkzJxZxyQk1lBbmUpyfS3lhLlPDG7czKouYWlaY1f3qZXQpKYhEwN1paOvmhYZ2Xmhs55ldLWzd20FHT4KdTV3saOp8Wfn83AnMnFjErInFnH3UFI6tqWTJ9HJmVBaqh46MKSUFkcPgHnTZrNvfyd62bjbWt/Lw5kZ6+/pp6uhlV3MXbUmjbQ/MhTOjsoiT51Uxs6qYBZNLmFVVzMyJRVSXFOjbvqQFJQWRYbg7Da3dbNvXwdbGdrbubWfr3g627W1nW2PHK6ZYmFSST2VxHoumlHHqgknMry5h3uRS5leXMKOyiBxd9CUDKCmIECSATbvbeGzrviABNLbzVF0zjW3dA2VyJhizJhYxZ1IJJ82eyJxJJcyuKqa6rIDZVcVUleTHeAYio0NJQbJKf7+zo6mTLQ1tbN7TxpaGdrbsaWNjfQstXcE3/8K8CcydVMJpCyexaEop8yeXsmR6OTUTi9SDR8Y9JQUZl7p6+3hqexOPv9jE5j1tPLenlZbOXna3dNPZ+9LArYnFeSycUspbjp/BkdPKOGX+JBZOKcVMTT2SnZQUZFzY2dTJI1v2srauiTXb9rN5T9vAnPlmcNzMSiqK8njD4skcOa2cRVNLWTC5VE0+IoMoKUhG6u93nm9s46/PNfL9h14Y6OJZkDuBpfOqePfJczh1wSSOnFZGTWWRevaIpEhJQTLC/vYe1mzbz5Pbm3hyexNPhdM7ABTl5fDB0+dz1hFTeO3cKiUAkVdBSUHSVmNbNz/86ws8+GwDG+tbcA96AB0xtYy3HD+D18yq5IRZlSyYXKpEIDJKlBQkbST6+vn14zt4Yvt+7t+4h90tQXfQU+ZX8fFzFnPK/EkcW1OhaZlFIqSkILFJ9PWzsb6VJ7Y38dfnGnhk896BAWHHzazg2JpKLjmhhjcfNz3mSEWyh5KCjKnmjl5++tg2HtrUyFN1TXSE8/pXlxbwxmOmce6SqZx95BTN9yMSEyUFidzetm7W1jVzz/pd/HJ13cD+9546hxPnTOTE2ROZObFIYwNE0oCSgkTC3Vn+1E7+sL6ee9bXD+y/fOlsLj2phhNnT1QSEElDSgoyajp6EqzZtp8/btjNQ8818kJjOwCXnFDD6xdVc+YRU5iowWIiaU1JQV6Vtu4E//2n5/jzpgY21rcO7D9iahn/8dZjuOSEGkoK9Gcmkin0v1UOy/72Hm57ZCs/fXQbjW09FORO4J/PXsTsqmLOPHKKpo8QyVBKCnJInt7ZzC1/fp7frt2JO5y2cBI3n7OY2rlVcYcmIqNASUFS8mx9K7f8ZQu/fnwHAJcvncW7T57DMTUVMUcmIqNJSUGGtaWhjT+sr+dnj77IjqZOivJyeN9pc3n/afOYVVUcd3giEgElBXkZd+epumZuemAzf9ywGwgmnHv9omq+/c7XMKm0IOYIRSRKSgoCQENrN9/4v2d56LlGdjR1kp87gQ+fsYCLX1PDEdPK4g5PRMaIkkIWc3dWb9vPT1Zu43drd9HX77x27kQ+ePp8LjxuhnoQiWShSJOCmZ0P3AjkAD9w968Men028COgMixzvbuviDImCeYfumPNdu5cU8fG+lbKCnK54uTZXHHKHBZNVa1AJJtFlhTMLAe4CTgXqANWmdlyd9+QVOzzwC/d/XtmtgRYAcyNKqZst2l3K3euqWPZX54H4OgZ5Xzlbcdy0WtmUJyvSqOIRFtTWApsdvfnAczsduBiIDkpOFAeblcAOyOMJyu5Ow9uauAzv1pHfUsXOROMc46aygdPn0/tHM0/JCIvF2VSqAG2Jz2vA04eVOaLwP+Z2bVACXBOhPFkla7ePu56Ygc/e/RF1u1oprq0gKteN48Pnj6fKWWFcYcnImkqyqQw1FdQH/T8cuA2d/+GmZ0K/NjMjnH3/pcdyOxq4GqA2bNnRxLseLGjqZOfrtzG/3twCwCLppTyH289hktPmklhnlYsE5GDizIp1AGzkp7P5JXNQ1cB5wO4+9/MrBCoBvYkF3L3ZcAygNra2sGJRQiaie5cU8d/rniGpo5els6t4u21M7n0pJlqIhKRlEWZFFYBi8xsHrADuAx416AyLwJnA7eZ2VFAIdAQYUzjTneij4c2NfLNP25iw64WAO7559dz1PTyEd4pIvJKkSUFd0+Y2TXAvQTdTW9196fN7AZgtbsvBz4JfN/MPk7QtHSlu6smkKKVz+/l/betoqOnj5rKIr508dG8vXaWmolE5LBF2g8xHHOwYtC+LyRtbwBOizKG8ag70ce/3LmWu5/cSV6O8dW/P5a3nlBDQa6SgYi8OuqcnkF6Ev3cv3E33/rjczy7u5Ur/24unzxvMWWFeXGHJiLjhJJChnjouQY+e9c6tu/rZHJZAd991wlceNyMuMMSkXFGSSHNuTs3/uk5vn3fc5jBf15yDO+snUVuzoS4QxORcUhJIY3taOrkqttWsbG+lRkVhdz3ydM1HYWIREpXmDS1r72H9/3PY2za3cZHzlzAtWctUq8iEYmckkKacXfuWF3H9b9eS7/Dv164hKteNy/usEQkSygppJlP3vEUv358B7VzJvLZNx/FibMnxh2SiGQRJYU08tNHt/Hrx3eweGopv/zgqUyYoOkpRGRsKSmkiRca27nhtxs4ano5v/qQEoKIxEP9GtNAR0+CD/54NUX5Odx6Za16GIlIbHT1idmLezt4838/RGtXgluvrGV6RVHcIYlIFlNNIUZ9/c5bvvtXWrsSXPfGIzjryKlxhyQiWU5JIUbvvfUxmjt7+afTF/CRMxfGHY6IiJJCXG5/7EX+urkRgE+ff0TM0YiIBJQUYvCbJ3Zw/a/XcdrCSTxy/VlaGU1E0oZuNI8hd2f5Uzv51B1Pccr8Kn743tdq6goRSStKCmOkvrmLz961jvs37qGqJJ9l76lVQhCRtKOkMAZ2NnXyd1+5H4APvmE+nzhvsVZJE5G0pKQQsV3NnVxw40MA3PIPJ/HGo6fFHJGIyPCUFCK0tbGdM/7rQQpyJ3DdG49QQhCRtKekEJG27gSX3vw3AL53xYkamCYiGWHELqlmtsDMCsLtM8zso2ZWGX1omcvd+fxd62hs6+aT5y5WQhCRjJHKOIVfAX1mthD4ITAP+FmkUWW4X6zazm+e3MnHzlnEtWcvijscEZGUpdJ81O/uCTO7BPi2u/+3mT0RdWCZ6sb7nuNb923i9YuqufYsJQQRySyp1BR6zexy4L3A78J9edGFlLnW1jXxrfs2UVWSz81XnESO1kQQkQyTSlJ4H3Aq8J/u/oKZzQN+Em1Ymaev33n/basBWH7NaZQU6B6+iGSeEa9c7r7BzD4NzA6fvwB8JerAMs2n7niKxrZuPnLmAmZOLI47HBGRw5JK76O3AE8Cfwifv8bMlkcdWCbZtLuVu57YwfGzKrnujUfGHY6IyGFLpfnoi8BSoAnA3Z8k6IEkBN1Pb/zTc5jB999zUtzhiIi8KqkkhYS7Nw/a51EEk4l+/th2fr92F5ecUMOUssK4wxEReVVSuRu63szeBeSY2SLgo8Aj0YaVGfr7nc/etY751SV84+3Hxx2OiMirlkpN4VrgaKCbYNBaM/CxVA5uZueb2bNmttnMrh+mzDvMbIOZPW1mGTUo7vsPPQ/ABcdO00I5IjIupFJTOMLdPwd87lAObGY5wE3AuUAdsMrMlrv7hqQyi4DPAKe5+34zm3IonxGnlq5evnzPRqpLC/jkuVpOU0TGh1RqCt80s41m9iUzO/oQjr0U2Ozuz7t7D3A7cPGgMh8AbnL3/QDuvucQjh+rz/xqHQCfOm8xEzRITUTGiRGTgrufCZwBNADLzGydmX0+hWPXANuTnteF+5ItBhab2cNmttLMzh/qQGZ2tZmtNrPVDQ0NKXx0tJ7Z1cLv1+2iujSfd752VtzhiIiMmlRqCrh7vbt/B/gngjELX0jhbUN9fR7caykXWESQdC4HfjDUDKzuvszda929dvLkyamEHKkv/S5oAfvJP56sewkiMq6kMnjtKDP7opmtB75L0PNoZgrHrgOSv0bPBHYOUeZud+8NR0o/S5Ak0tb6Hc08smUvHztnEUdOK487HBGRUZVKTeF/gP3Aee5+urt/L8W2/1XAIjObZ2b5wGXA4JHQvwHOBDCzaoLmpOdTjn6MuTvv+v5KJhbn8b7TNH5PRMafVOY+OuVwDhxOt30NcC+QA9zq7k+b2Q3AandfHr52npltAPqA69x97+F83lhYsa6elq4E1561kIoiTRQrIuOPuQ89ONnMfunu7zCzdbz8XoAB7u7HjUWAg9XW1vrq1avH/HP7+535n10BwBP/ei4TS/LHPAYRkcNlZmvcvXakcgerKfxz+PPC0Qkpsz2yJajAfO5NRykhiMi4New9BXffFW5+2N23JT+AD49NeOnjy/c8Q3F+DpefPDvuUEREIpPKjeZzh9h3wWgHks46e/p4emcLFx0/g1ItniMi49iwVzgz+xBBjWC+ma1NeqkMeDjqwNLJD8I5jk6aMzHmSEREonWwr70/A+4BvgwkT2bX6u77Io0qzdy3cQ+lBbm87cRUhmeIiGSugzUfubtvBT4CtCY9MLOq6ENLD/dt2M1T25u49qyF5GiOIxEZ50aqKVwIrCHokpp8RXRgfoRxpY071mynKC+H95w6N+5QREQiN2xScPcLw59ZO3R3a2M7/7dhNx94/XyK8nPiDkdEJHKpzH10mpmVhNtXmNk3zSwr+mX+8K8v4A5XvS5r86KIZJlUuqR+D+gws+OBfwG2AT+ONKo08eOV2wCYWq61l0UkO6SSFBIezIVxMXCju99I0C11XLv7yR0AXHT8jJgjEREZO6mMxGo1s88A/wC8Plxmc9zPBnf/xmAi2K/8/bExRyIiMnZSqSm8E+gG3u/u9QSrp3090qhi1pPo54GNe3jbiTUU52sEs4hkj1SW46wHfgpUmNmFQJe7/2/kkcXo4c2NtHQluPC46XGHIiIyplLpffQO4DHg7cA7gEfN7NKoA4vTd+5/jrLCXF63MP6lP0VExlIqbSOfA157YLU1M5sM3AfcGWVgcWnu6GVtXTNvWFRNfm5KS1iLiIwbqVz1JgxafnNviu/LSA9u2kNfv/OhMxbGHYqIyJhLpabwBzO7F/h5+PydwIroQorXoy/soygvRzOiikhWSmWN5uvM7G3A6wjmP1rm7ndFHlkM3J0V63Zx8vwqTX4nIlkp1f6WjwB9QD+wKrpw4vW3LXtp6uhlyfTyuEMREYlFKr2P/pGg99ElwKXASjN7f9SBxeGhzY0AvKN2VsyRiIjEI5WawnXACe6+F8DMJhHUHG6NMrA4PLBxD0vnVjG3uiTuUEREYpFKL6I6wsV1Qq3A9mjCic/2fR1srG/lvKOnxh2KiEhsUqkp7CAYsHY3weI6FwOPmdknANz9mxHGN2YeDpuOXreoOuZIRETik0pS2BI+Drg7/DmuZkr986YGplcUcsTUcXVaIiKHJJUuqf8+FoHEqaMnwT3r63ln7SzM1BVVRLLXuB2ZfCh+taYOgGNq1BVVRLKbkgLwxPYmivJyePfJc+IORUQkVkoKwOqt+znjiMlM0ChmEclyqQxeW2xmfzKz9eHz48zs89GHNjb2tnXz4r4OjqmpiDsUEZHYpVJT+D7wGaAXwN3XApelcnAzO9/MnjWzzWZ2/UHKXWpmbma1qRx3ND3wbAOAeh2JiJBaUih298cG7UuM9KZwLeebgAuAJcDlZrZkiHJlwEeBR1OIZdTVN3cCcMqCSXF8vIhIWkklKTSa2QKCgWuEq67tSuF9S4HN7v68u/cAtxMMfBvsS8DXgK7UQh5dO5o6KcnPobRAazGLiKSSFD4C3AIcaWY7gI8BH0rhfTW8fDqMunDfADM7AZjl7r9LLdzR95dNjZw0tyqujxcRSSupDF57HjjHzEoIVmFrHek9oaG68vjAi2YTgG8BV454ILOrgasBZs+eneLHj6y9O8GOpk7edfLoHVNEJJONmBTM7AuDngPg7jeM8NY6IHkO6pnAzqTnZcAxwIPhMacBy83sIndfnXwgd18GLAOora11Rskf1tcDMK28cLQOKSKS0VJpPmpPevQR3Diem8L7VgGLzGyemeUT9FhafuBFd29292p3n+vuc4GVwCsSQpTqW4LbGKct1CR4IiKQWvPRN5Kfm9l/kXRxP8j7EmZ2DXAvkAPc6u5Pm9kNwGp3H/EYUduypw2AqeUFMUciIpIeDqfLTTEwP5WC7r4CWDFo3xeGKXvGYcTyqtQ1dTK/ukST4ImIhFK5p7COl24Q5wCTgZHuJ6S93r5+Ht+2n3OO0qI6IiIHpFJTuDBpOwHsdvcRB6+lu11NXST6naNnaGZUEZEDDpoUwm6jv3f3Y8YonjGzaus+AE6cMzHmSERE0sdBex+5ez/wlJmNu478G+tbmGBw6nxNbyEickAqzUfTgafN7DGCbqkAuPtFkUU1Bva0djO9okjTZYuIJEklKYzL5Tj3d/QysSQv7jBERNJKKknhTe7+6eQdZvZV4M/RhDQ29rR0MXNicdxhiIiklVRGNJ87xL4LRjuQsba3vYdJJflxhyEiklaGrSmY2YeADwPzzWxt0ktlwMNRBxalRF8/jW3dGsksIjLIwZqPfgbcA3wZSF41rdXd90UaVcTq9nfiDpPLlBRERJINmxTcvRloBi4fu3DGxuZwzqPyIt1oFhFJlso9hXFn696gZ+2xNRUxRyIikl6yMinsbOqiMG8C8yeXxh2KiEhaycqk8FRdk+4niIgMISuTwppt+ynKy4k7DBGRtJN1SaGvP5gFfHZVScyRiIikn6xLCs/taQXglPlVMUciIpJ+si4p1DcH6zIfP6sy5khERNJP1iWFrt4+AEryD2clUhGR8S3rksKWhmCMQmFe1p26iMiIsu7KWBj2OppUqi6pIiKDZV1S6OgOlpdWl1QRkVfKuqTw7O6g91FejlZcExEZLOuSQs4EozBvAmZKCiIig2VdUtjf0csCzXkkIjKkrEsK3b19lBSoO6qIyFCyLilsaWgb6IEkIiIvl3VJobwoj/3tPXGHISKSlrIuKST6nAWTNRmeiMhQsi4p9Pb1k5eTdactIpKSrLs69iT6yc/NutMWEUlJpFdHMzvfzJ41s81mdv0Qr3/CzDaY2Voz+5OZzYkyHoC27gTF+brRLCIylMiSgpnlADcBFwBLgMvNbMmgYk8Ate5+HHAn8LWo4gFI9PXTneintCAvyo8REclYUdYUlgKb3f15d+8BbgcuTi7g7g+4e0f4dCUwM8J4aO8Jp80uUE1BRGQoUSaFGmB70vO6cN9wrgLuGeoFM7vazFab2eqGhobDDqi5oxeAiiLVFEREhhJlUhhqciEfsqDZFUAt8PWhXnf3Ze5e6+61kydPPuyAWruDpFCupCAiMqQo53uoA2YlPZ8J7BxcyMzOAT4HnO7u3RHGw962YNBaWaGmuRARGUqUNYVVwCIzm2dm+cBlwPLkAmZ2AnALcJG774kwFgAaWoOcM6OiKOqPEhHJSJElBXdPANcA9wLPAL9096fN7AYzuygs9nWgFLjDzJ40s+XDHG5UdCf6AShSl1QRkSFF2o7i7iuAFYP2fSFp+5woP3+w7kTQ+0gjmkVEhpZVV8fO3iApaClOEZGhZVVSWFfXDEBhXladtohIyrLq6rgvnDJbS3GKiAwtq5KCGUwqyY87DBGRtJVVSWH9jhaWzCiPOwwRkbSVVUmhqiSfRN+Qg6pFRIQsSwqJvn5mTtTANRGR4WRVUujpc/K0wI6IyLCy6grZ2NZN3gT1PBIRGU5WJQWA1q5E3CGIiKStrEkKvX3BvEdzq0tijkREJH1lTVI4MMWFRjOLiAwva66QXeFSnO3dfTFHIiKSvrImKST6g/EJMyoLY45ERCR9ZU1S6AuTQs6ErDllEZFDljVXyAM1hVx1SRURGVbWJIW+/qD3UY6SgojIsLImKXT2BElBNQURkeFlTVLoCpfi7AnHK4iIyCtlTVLoD+8pVJcWxByJiEj6ypqk0BtOmZ2XkzWnLCJyyLLmCtkb3mjOzdE9BRGR4WRNUmjvDibCy1dNQURkWFlzhWwLZ0etLM6LORIRkfSVNUnhwCKcuRrRLCIyrKy5QvZ7kBY0TEFEZHhZlBSCn2bKCiIiw8mapOBhTUE5QURkeFmUFIKfE5QVRESGlTVJQfcURERGFmlSMLPzzexZM9tsZtcP8XqBmf0ifP1RM5sbVSwD9xRQVhARGU5kScHMcoCbgAuAJcDlZrZkULGrgP3uvhD4FvDVqOIZuKeQNXUjEZFDF+Ulcimw2d2fd/ce4Hbg4kFlLgZ+FG7fCZxtEXUP0j0FEZGRRZkUaoDtSc/rwn1DlnH3BNAMTIoimAP3FJQSRESGF2VSGOr664dRBjO72sxWm9nqhoaGwwpm/uRS3nzsdK28JiJyELkRHrsOmJX0fCawc5gydWaWC1QA+wYfyN2XAcsAamtrX5E0UnHukqmcu2Tq4bxVRCRrRFlTWAUsMrN5ZpYPXAYsH1RmOfDecPtS4H4/cEdYRETGXGQ1BXdPmNk1wL1ADnCruz9tZjcAq919OfBD4MdmtpmghnBZVPGIiMjIomw+wt1XACsG7ftC0nYX8PYoYxARkdSp176IiAxQUhARkQFKCiIiMkBJQUREBigpiIjIAMu0YQFm1gBsO8y3VwONoxhOJtA5Zwedc3Z4Nec8x90nj1Qo45LCq2Fmq929Nu44xpLOOTvonLPDWJyzmo9ERGSAkoKIiAzItqSwLO4AYqBzzg465+wQ+Tln1T0FERE5uGyrKYiIyEGMy6RgZueb2bNmttnMrh/i9QIz+0X4+qNmNnfsoxxdKZzzJ8xsg5mtNbM/mdmcOOIcTSOdc1K5S83MzSzje6qkcs5m9o7wd/20mf1srGMcbSn8bc82swfM7Inw7/tNccQ5WszsVjPbY2brh3ndzOw74b/HWjM7cVQDcPdx9SCYpnsLMB/IB54Clgwq82Hg5nD7MuAXccc9Bud8JlAcbn8oG845LFcG/AVYCdTGHfcY/J4XAU8AE8PnU+KOewzOeRnwoXB7CbA17rhf5Tm/ATgRWD/M628C7iFYufIU4NHR/PzxWFNYCmx29+fdvQe4Hbh4UJmLgR+F23cCZ5tZJq/TOeI5u/sD7t7yl+1RAAAF30lEQVQRPl1JsBJeJkvl9wzwJeBrQNdYBheRVM75A8BN7r4fwN33jHGMoy2Vc3agPNyu4JUrPGYUd/8LQ6xAmeRi4H89sBKoNLPpo/X54zEp1ADbk57XhfuGLOPuCaAZmDQm0UUjlXNOdhXBN41MNuI5m9kJwCx3/91YBhahVH7Pi4HFZvawma00s/PHLLpopHLOXwSuMLM6gvVbrh2b0GJzqP/fD0mki+zEZKhv/IO7WKVSJpOkfD5mdgVQC5weaUTRO+g5m9kE4FvAlWMV0BhI5fecS9CEdAZBbfAhMzvG3Zsiji0qqZzz5cBt7v4NMzuVYDXHY9y9P/rwYhHp9Ws81hTqgFlJz2fyyurkQBkzyyWoch6supbuUjlnzOwc4HPARe7ePUaxRWWkcy4DjgEeNLOtBG2vyzP8ZnOqf9t3u3uvu78APEuQJDJVKud8FfBLAHf/G1BIMEfQeJXS//fDNR6TwipgkZnNM7N8ghvJyweVWQ68N9y+FLjfwzs4GWrEcw6bUm4hSAiZ3s4MI5yzuze7e7W7z3X3uQT3US5y99XxhDsqUvnb/g1BpwLMrJqgOen5MY1ydKVyzi8CZwOY2VEESaFhTKMcW8uB94S9kE4Bmt1912gdfNw1H7l7wsyuAe4l6Llwq7s/bWY3AKvdfTnwQ4Iq5maCGsJl8UX86qV4zl8HSoE7wnvqL7r7RbEF/SqleM7jSornfC9wnpltAPqA69x9b3xRvzopnvMnge+b2ccJmlGuzOQveWb2c4Lmv+rwPsm/AXkA7n4zwX2TNwGbgQ7gfaP6+Rn8byciIqNsPDYfiYjIYVJSEBGRAUoKIiIyQElBREQGKCmIiMgAJQXJeGb2UTN7xsx+epAyZ5hZWkx3YWYXHZjt08zeamZLkl67IRxkKBILdUmVjGdmG4ELwhG8w5U5A/iUu184ZoGlwMxuA37n7nfGHYsIqKYgGc7MbiaYVnm5mX3czJaa2SPh3PqPmNkRQ7zndDN7Mnw8YWZl4f7rzGxVOEf9vw/zeW1m9g0zezxcl2JyuP814QR0a83sLjObGO7/qL20jsXt4b4rzey7ZvZ3wEXA18NYFpjZbRas/3CBmf0y6XPPMLPfhtvnmdnfwhjuMLPScP9Xkj7rv0bz31mySNxzh+uhx6t9AFuB6nC7HMgNt88BfhVun0HwjRzgt8Bp4XYpwcj+8wjm5TeCL0u/A94wxGc58O5w+wvAd8PttcDp4fYNwLfD7Z1AQbhdGf68Mul9twGXJh3/NoKpV3IJpm8oCfd/D7iCYE6fvyTt/3QYRxXBPEeW/Fl66HGoD9UUZLypIJjKYz3BLKlHD1HmYeCbZvZRgotngiApnEewQM3jwJEMPZFcP/CLcPsnwOvMrCI8zp/D/T8iWCgFgmTx03B22kSqJxHG9AfgLeGkjW8G7iaY2G8J8LCZPUkwh9ccoIVgzYgfmNnbCKY/EDlk427uI8l6XwIecPdLLFhm9cHBBdz9K2b2e4L5Y1aGN3YN+LK733KInzfSTbk3EySIi4B/NbOhktRwfgF8hGB+rlXu3mrBxFV/dPfLBxc2s6UEE8NdBlwDnHUInyUC6J6CjD8VwI5w+8qhCpjZAndf5+5fBVYT1AruBd6f1D5fY2ZThnj7BILmHYB3AX9192Zgv5m9Ptz/D8CfLVjTYZa7PwD8C1BJ0FyVrJVgmu+hPEiwLOMHeKl2shI4zcwWhnEWm9niMO4Kd18BfAx4zTDHFDko1RRkvPka8CMz+wRw/zBlPmZmZxLMIroBuMfdu8Npl/8WziLbRtCGP3ia8XbgaDNbQ7Bi3zvD/e8FbjazYoKpqt9HMKvnT8LmJQO+5e5N9vKVX28nmOHzo7yUbABw976wG+2V4fFx9wYzuxL4uZkVhEU/T5Bc7jazwvCzPj7iv5TIENQlVeQQmFmbuw/+ti8ybqj5SEREBqimICIiA1RTEBGRAUoKIiIyQElBREQGKCmIiMgAJQURERmgpCAiIgP+P98NOwpwjhSmAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure()\n", + "plt.plot(fpr, tpr, label = 'XGBoost')\n", + "plt.xlabel('false positives')\n", + "plt.ylabel('true positives')\n", + "#plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.8074208968814023" + ] + }, + "execution_count": 75, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "skl.metrics.accuracy_score(y_true=y_test_neg, y_pred=predictions_hard)" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 60, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD8CAYAAACfF6SlAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3Xt4VNW9//H3d3IhQCDcwp0IClYx3GK4KYiWgHiKQVo8IFrxHI+0UrSn/qrlVB/LD7VHtFr0Ecvh4ahozxGrLQpVq4Cg/UlFUUABgQRECeESboFILiRZvz9mGEMuJGQPmcD+vJ4nz+w9s2avlUX4rD1r79nbnHOIiIi/BKLdABERaXgKfxERH1L4i4j4kMJfRMSHFP4iIj6k8BcR8SGFv4iIDyn8RUR8SOEvIuJDsdFuQE3atWvnunfvHu1miIicUz799NMDzrnk2so12vDv3r07a9eujXYzRETOKWb2dV3KadpHRMSHFP4iIj6k8BcR8aFGO+cvImffiRMnyMnJoaioKNpNkTOUkJBA165diYuLq9f7Ff4iPpaTk0OLFi3o3r07Zhbt5kgdOec4ePAgOTk59OjRo17b0LSPiI8VFRXRtm1bBf85xsxo27atp09sCn8Rn1Pwn5u8/rtFZNrHzMYATwExwALn3KOVXv8p8DOgDCgApjrnNkeibvnOwYJi/p51gB15BWBGwMAIPgYCwT+UFgmx3Dq0e3QbKiJR5zn8zSwGmAuMAnKAT8xsSaVw/1/n3LxQ+UzgSWCM17r9rrSsnHW7jvD+1jze35bHxtx8arslc6ekBIW/NBoHDx5k5MiRAOzdu5eYmBiSk4NfTv3444+Jj4+PSD2fffYZ+/fvZ8yYyMTOsGHDeOaZZ+jfv39EthcNkdjzHwRkO+d2AJjZImAcEA5/59zRCuWbA7prfD3tPlLIB9vyeH9rHh9mH+BYcSkBg7SU1tyTcTFXXZxMapckAgbOBTu63DmcCz6KNCZt27Zl/fr1AMycOZPExER++ctfnlLGOYdzjkCg/rPUn332GRs3bqw2/EtLS4mN9d+5L5H4jbsAuyqs5wCDKxcys58B9wDxwPcjUK8vFJ0o4+OvDvH+tuDeffb+AiC4B/+Dvp0YcXEyV/RsR1LTqqd7nZwSjEFzunJuyc7O5oYbbmDYsGGsWbOG119/nX79+nHkyBEAFi1axPLly1mwYAH79u3jzjvv5JtvviEQCPD0008zZMiQ8LYKCwuZNWsWhYWFrFq1igceeID169eTl5fHjh076NixIzNnzuS2226joKCAQCDAs88+y+DBwRj77W9/y8svv0wgEGDs2LE88sgj4W2XlZUxZcoUevbsycyZMxu0j7yKRPhXlyxVdjGdc3OBuWY2GXgAmFJlQ2ZTgakAKSkpEWjaucc5x44D34anctZ8dZCiE+XExwYY3KMNkwZ246qLk+nVPlEH6iSi/u/STWzOPVp7wTPQu3NLfnP9ZfV67+bNm3n++eeZN28epaWlNZa7++67ue+++xgyZAg7d+5k7NixbNy4Mfx606ZNefDBB9m4cSNz5swBYP369axbt44PPviAhIQEjh8/zrJly0hISGDLli1MmTKFNWvWsHTpUt5++20+/vhjmjZtyqFDh8LbLS0tZfLkyaSlpfGrX/2qXr9jNEUi/HOAbhXWuwK5pym/CPhDdS845+YD8wHS09N9M0dxrOgEq7cfDE7nbMsj53AhABe2a86kgSmM+F4yQ3q0pWl8TJRbKtJwLrroIgYOHFhrueXLl7N169bw+uHDhyksLKRp06anfd+4ceNISEgAoLi4mOnTp7NhwwZiY2PZvn17eNv/+q//Gt5WmzZtwu+//fbbmTx58jkZ/BCZ8P8E6GVmPYDdwCRgcsUCZtbLOZcVWv0BkIWPlZc7Nu85yvvb8vhgWx6ffn2Y0nJH8/gYrujZjp+OuIgRFyfTrU2zaDdVfKS+e+hnS/PmzcPLgUAAV+GYVcXz251z9To4XHH7TzzxBN26deOPf/wjJ06cIDExMbztmj5hX3nllaxYsYJ///d/p0mTJmdUd2PgOfydc6VmNh14h+Cpns855zaZ2SxgrXNuCTDdzDKAE8BhqpnyOd8d+raEv2flhQL/AAcKigHo3akld1x1ISMuTiYtpTXxsfrqhUhlgUCA1q1bk5WVxUUXXcTixYvDZwVlZGQwd+5cfvGLXwDBKZ3KZ+G0aNGCY8eO1bj9/Px8evbsiZmxcOHC8EAzevRoZs+ezcSJE8PTPif3/qdOncq7777LpEmTePXVV8+5g8YRaa1z7i3grUrPPVhh+eeRqOdcUlpWzvpdR8JTOZ/vDp6G2bpZHMN7JTPi4mSGX9yO9i0Sot1UkXPC7NmzGTNmDCkpKfTu3Zvi4uAO1Ny5c7nzzjt5/vnnKS0t5ZprrmHu3LmnvPf73/8+jz/+OAMGDOD++++vsu3p06czYcIEXn75ZTIyMsJ78mPHjmXDhg2kp6cTFxfH9ddfz0MPPRR+33333cf999/PbbfdxosvvujpjKSGZq6Rnv6Xnp7uzsWbufxt4x6WbMjl71kHOFYUPA1zQEprRlwcDPzULknEBHSgVhqHL7/8kksvvTTazZB6qu7fz8w+dc6l1/bec+tzSiNXXu6477XPOVp06pkJuUcKWbl1P1v2HqVjy6b8n9EX07yJul5EokcJFEGBgPHhjO/zzaHj7M0vYk9+0XePRwvZsvcYH2w7wIzrLol2U0XE5xT+EdYiIY7LOidxWeekal8/3dkDIiIN5dw5OnGeUPCLSGOg8BcR8SGFv4iIDyn8RSSqYmJi6N+/f/hn586dNZbduXMnqampnuucM2cOx48f97wdgFWrVjF27NiIbKsh6YCviERV06ZNw5d1bihz5szhlltuoVmzqpdQKSsrIybm/L+Olvb8RaTR2blzJ8OHDyctLY20tDRWr15dpcymTZsYNGgQ/fv3p2/fvmRlBS8Z9sc//jH8/E9+8hPKyspOed/TTz9Nbm4u11xzDddccw0AiYmJPPjggwwePJh//OMfzJo1i4EDB5KamsrUqVPDl3vIzs4mIyODfv36kZaWFr4A3EmffPIJAwYMYMeOHWejWyJKe/4iEvT2DNj7RWS32bEPXPfoaYsUFhaGr8XTo0cPFi9eTPv27cOXWM7KyuKmm26i8jf+582bx89//nNuvvlmSkpKKCsr48svv+SVV17hww8/JC4ujmnTpvE///M/3HrrreH33X333Tz55JOsXLmSdu3aAfDtt9+SmprKrFmzAOjduzcPPhi8Qs2Pf/xj/vrXv3L99ddz8803M2PGDMaPH09RURHl5eXs2hW8ncnq1au56667eOONN86JS9Ir/EUkqqqb9jlx4gTTp09n/fr1xMTEsG3btirvGzp0KI888gg5OTn88Ic/pFevXqxYsYJPP/00fCnowsJC2rdvX2sbYmJi+NGPfhReX7lyJY899hjHjx/n0KFDXHbZZVx99dXs3r2b8ePHA4QvBw3ByyycvNBb586d69UPDU3hLyJBteyhN6Tf//73dOjQgQ0bNlBeXn5K0J40efJkBg8ezJtvvsm1117LggULcM4xZcoU/vM///OM6ktISAjP8xcVFTFt2jTWrl1Lt27dmDlzJkVFRZzuOmidOnWiqKiIdevWnTPhrzl/EWl08vPz6dSpE4FAgJdeeqnKvD3Ajh07uPDCC7n77rvJzMzk888/Z+TIkbz22mvs378fgEOHDvH1119Xee/pLvF88l4B7dq1o6CggNdeew2Ali1b0rVrV15//XUgeAOYk2cMtWrVijfffJNf//rXrFq1yvPv3xAU/iLS6EybNo2FCxcyZMgQtm3bdsqNV0565ZVXSE1NpX///mzZsoVbb72V3r178/DDDzN69Gj69u3LqFGj2LNnT5X3Tp06leuuuy58wLeiVq1acccdd9CnTx9uuOGGU+4m9tJLL/H000/Tt29frrjiCvbu3Rt+rUOHDixdupSf/exnrFmzJkI9cfboks4iPqZLOp/bvFzSWXv+IiI+pPAXEfGhiIS/mY0xs61mlm1mM6p5/R4z22xmn5vZCjO7IBL1iohI/XgOfzOLAeYC1wG9gZvMrHelYuuAdOdcX+A14DGv9YqISP1FYs9/EJDtnNvhnCsBFgHjKhZwzq10zp28itJHQNcI1CsiIvUUifDvAuyqsJ4Teq4mtwNvR6BeERGpp0iEf3W3pqr2/FEzuwVIBx6v4fWpZrbWzNbm5eVFoGki0tidvKRzamoqN954o6dLLVe8vPKSJUt49NGav7V85MgRnn322TOuY+bMmfzud7+r8vzrr7/O5s2bz3h7NUlMTIzYtqoTifDPAbpVWO8K5FYuZGYZwP1ApnOuuLoNOefmO+fSnXPpycnJEWiaiDR2J6/ts3HjRuLj45k3b94przvnKC8vP+PtZmZmMmNGlfNPwuob/jU5XfiXlpZGrJ5IiUT4fwL0MrMeZhYPTAKWVCxgZgOA/yIY/PsjUKeInIeGDx9OdnY2O3fu5NJLL2XatGmkpaWxa9cu3n33XYYOHUpaWho33ngjBQUFAPztb3/jkksuYdiwYfzlL38Jb+uFF15g+vTpAOzbt4/x48fTr18/+vXrx+rVq5kxYwbbt2+nf//+3HvvvQA8/vjjDBw4kL59+/Kb3/wmvK1HHnmE733ve2RkZLB169Yq7V69ejVLlizh3nvvpX///mzfvp2rr76aX//614wYMYKnnnqKpUuXMnjwYAYMGEBGRgb79u0DoKCggH/5l3+hT58+9O3blz//+c+nbPvAgQMMHTqUN998M6J97fnCbs65UjObDrwDxADPOec2mdksYK1zbgnBaZ5E4NXQDcy/cc5leq1bRCJn9sez2XJoS0S3eUmbS/jVoF/VqWxpaSlvv/02Y8aMAWDr1q08//zzPPvssxw4cICHH36Y5cuX07x5c2bPns2TTz7Jfffdxx133MF7771Hz549mThxYrXbvvvuuxkxYgSLFy+mrKyMgoICHn30UTZu3Bi+oui7775LVlYWH3/8Mc45MjMz+eCDD2jevDmLFi1i3bp1lJaWkpaWxuWXX37K9q+44goyMzMZO3YsEyZMCD9/5MgR3n//fQAOHz7MRx99hJmxYMECHnvsMZ544gkeeughkpKS+OKLL8LlTtq3bx+ZmZk8/PDDjBo1qo69XjcRuaqnc+4t4K1Kzz1YYTkjEvWIyPmn4vX8hw8fzu23305ubi4XXHABQ4YMAeCjjz5i8+bNXHnllQCUlJQwdOhQtmzZQo8ePejVqxcAt9xyC/Pnz69Sx3vvvceLL74IBI8xJCUlnRKyEAz/d999lwEDBgDBPfKsrCyOHTvG+PHjw3f9ysys+35rxcEoJyeHiRMnsmfPHkpKSujRowcAy5cvZ9GiReFyrVu3BoKXtR45ciRz585lxIgRda6zrnRJZxEBqPMeeqTVdBvHihdzc84xatQoXn755VPKrF+/ntBsgmfOOf7jP/6Dn/zkJ6c8P2fOnHrXUfF3uOuuu7jnnnvIzMxk1apVzJw5M1xvdduPjY3l8ssv55133jkr4a/LO4hIozdkyBA+/PBDsrOzATh+/Djbtm3jkksu4auvvgrfTrHy4HDSyJEj+cMf/gAE79F79OjRKpd1vvbaa3nuuefCxxJ2797N/v37ueqqq1i8eDGFhYUcO3aMpUuXVlvH6S4TDcHLVHfpEjwLfuHCheHnR48ezTPPPBNeP/mJxMx47rnn2LJly2nPWqovhb+INHrJycm88MIL3HTTTfTt25chQ4awZcsWEhISmD9/Pj/4wQ8YNmwYF1xQ/ZVjnnrqKVauXEmfPn24/PLL2bRpE23btuXKK68kNTWVe++9l9GjRzN58mSGDh1Knz59mDBhAseOHSMtLY2JEyfSv39/fvSjHzF8+PBq65g0aRKPP/44AwYMqHJvXwieInrjjTcyfPjw8O0jAR544AEOHz5Mamoq/fr1Y+XKleHXYmJiWLRoEStXrozomUmgSzqL+Jou6Xxu0yWdRUTkjCj8RUR8SOEv4nONdepXTs/rv5vCX8THEhISOHjwoAaAc4xzjoMHD5KQkFDvbeg8fxEf69q1Kzk5OehCiueehIQEunat/9XxFf4iPhYXFxf+pqn4i6Z9RER8SOEvIuJDCn8RER9S+IuI+JDCX0TEhxT+IiI+pPAXEfGhiIS/mY0xs61mlm1mVe6YbGZXmdlnZlZqZhOq24aIiDQcz1/yMrMYYC4wCsgBPjGzJc65irex/wa4Dfil1/pqVXgE5g2HuKYQlwBxzULLzSA2Ibic1A16joTOAyAQc9abJCLS2ETiG76DgGzn3A4AM1sEjAPC4e+c2xl6rTwC9dWu+5VwojD0cxyKjkLB/uByyXEo2AerfgtN2wQHgZ6jgo/N29W+bRGR80Akwr8LsKvCeg4wOALbrZ+mrWD8vNOXOX4Itr8HWcsgezl88Spg0Ll/cCDoNQq6XK5PBSJy3opE+Fd3Z+N6XSLQzKYCUwFSUlK8tOn0mrWBPhOCP+XlsHcDZC2H7GXw99/BB49BQiu46PvBgaBnBiS2P3vtERFpYJEI/xygW4X1rkBufTbknJsPzIfgbRy9N60OAoHg3H/nATDi3uCngh2rgp8IspfDpr8Ey3XqF5oeyoCuAyFG18QTkXNXJBLsE6CXmfUAdgOTgMkR2G50NGsDqT8M/pSXw74vQtNDK+D//T74ySAhCS68Jvip4KKR0LJTtFstInJGInIDdzP7J2AOEAM855x7xMxmAWudc0vMbCCwGGgNFAF7nXOXnW6bjfIG7oVHQp8KQoPBsT3B5zv0gV4ZwU8G3QZBTFxUmyki/lXXG7hHJPzPhkYZ/hU5B/s2BQeCrOWw6yMoL4UmLeHCEd9NESV1iXZLRcRH6hr+mriuLzPomBr8GfaL4OmkFY8VfLk0WK597+Ag0GsUdBsCsfFRbbaICGjP/+xwDvZ/GZoeWg5f/wPKT0B8IvQY8d0UUatutW9LROQMaM8/msygQ+/gz5U/h+Jj8NUH332vYOubwXLJlwQ/FfTMgAuugNgm0W23iPiG9vwbmnNwYFtoIFgGX6+GshKIaw49rgp+Kug1Glqdxe85iMh5Swd8zxXFBbD1bXjvITjydfC5mHj4xSZ9sUxEzpimfRoz52DvF6GDwyu+O1MoPhEuvBouGQvNk6PdShE5jyn8o2Hjn+HPtweXO/aBK+4KfXN4kM4GEpEGofCPhguvgXHPBq8k2qJjtFsjIj6k8I+G5m1hwM3RboWI+Jhu4ygi4kMKfxERH1L4i4j4kMJfRMSHFP4iIj6k8BcR8SGFv4iIDyn8RUR8SOEvIuJDEQl/MxtjZlvNLNvMZlTzehMzeyX0+hoz6x6JekVEpH48h7+ZxQBzgeuA3sBNZta7UrHbgcPOuZ7A74HZXusVEZH6i8Se/yAg2zm3wzlXAiwCxlUqMw5YGFp+DRhpZhaBukVEpB4iEf5dgF0V1nNCz1VbxjlXCuQDbStvyMymmtlaM1ubl5cXgaaJiEh1IhH+1e3BV749WF3K4Jyb75xLd86lJyfrZiYiImdLJMI/B+hWYb0rkFtTGTOLBZKAQxGoW0RE6iES4f8J0MvMephZPDAJWFKpzBJgSmh5AvCea6w3DxYR8QHPN3NxzpWa2XTgHSAGeM45t8nMZgFrnXNLgP8GXjKzbIJ7/JO81isiIvUXkTt5OefeAt6q9NyDFZaLgBsjUZeIiHinb/iKiPiQwl9ExIcU/iIiPqTwFxHxIYW/iIgPKfxFRHxI4S8i4kMKfxERH1L4i4j4kMJfRMSHFP4iIj6k8BcR8SGFv4iIDyn8RUR8SOEvIuJDCn8RER9S+IuI+JCn8DezNma2zMyyQo+tayj3NzM7YmZ/9VKfiIhEhtc9/xnACudcL2BFaL06jwM/9liXiIhEiNfwHwcsDC0vBG6orpBzbgVwzGNdIiISIV7Dv4Nzbg9A6LG99yaJiMjZFltbATNbDnSs5qX7I90YM5sKTAVISUmJ9OZFRCSk1vB3zmXU9JqZ7TOzTs65PWbWCdjvpTHOufnAfID09HTnZVsiIlIzr9M+S4ApoeUpwBsetyciIg3Aa/g/CowysyxgVGgdM0s3swUnC5nZ34FXgZFmlmNm13qsV0REPKh12ud0nHMHgZHVPL8W+LcK68O91CMiIpGlb/iKiPiQwl9ExIcU/iIiPqTwFxHxIYW/iIgPKfxFRHxI4S8i4kMKfxERH1L4i4j4kMJfRMSHFP4iIj6k8BcR8SGFv4iIDyn8RUR8SOEvIuJDCn8RER9S+IuI+JCn8DezNma2zMyyQo+tqynT38z+YWabzOxzM5vopU4REfHO657/DGCFc64XsCK0Xtlx4Fbn3GXAGGCOmbXyWK+IiHjgNfzHAQtDywuBGyoXcM5tc85lhZZzgf1Assd6RUTEA6/h38E5twcg9Nj+dIXNbBAQD2z3WK+IiHgQW1sBM1sOdKzmpfvPpCIz6wS8BExxzpXXUGYqMBUgJSXlTDYvIiJnoNbwd85l1PSame0zs07OuT2hcN9fQ7mWwJvAA865j05T13xgPkB6erqrrW0iIlI/Xqd9lgBTQstTgDcqFzCzeGAx8KJz7lWP9YmISAR4Df9HgVFmlgWMCq1jZulmtiBU5p+Bq4DbzGx96Ke/x3pFRMQDc65xzq6kp6e7tWvXRrsZIiLnFDP71DmXXls5fcNXRMSHFP4iIj6k8BcR8SGFv4iIDyn8RUR8SOEvIuJDCn8RER9S+IuI+JDCX0TEhxT+IiI+pPAXEfEhhb+IiA8p/EVEfEjhLyLiQwp/EREfUviLiPiQwl9ExIcU/iIiPuQp/M2sjZktM7Os0GPraspcYGafhu7du8nMfuqlThER8c7rnv8MYIVzrhewIrRe2R7gCudcf2AwMMPMOnusV0REPPAa/uOAhaHlhcANlQs450qcc8Wh1SYRqFNERDzyGsQdnHN7AEKP7asrZGbdzOxzYBcw2zmX67FeERHxILa2Ama2HOhYzUv317US59wuoG9ouud1M3vNObevmrqmAlMBUlJS6rp5ERE5Q7WGv3Muo6bXzGyfmXVyzu0xs07A/lq2lWtmm4DhwGvVvD4fmA+Qnp7uamubiIjUj9dpnyXAlNDyFOCNygXMrKuZNQ0ttwauBLZ6rFdERDzwGv6PAqPMLAsYFVrHzNLNbEGozKXAGjPbALwP/M4594XHekVExINap31Oxzl3EBhZzfNrgX8LLS8D+nqpR0REIkunXYqI+JDCX0TEhxT+IiI+pPAXEfEhhb+IiA8p/EVEfEjhLyLiQwp/EREfUviLiPiQwl9ExIcU/iIiPqTwFxHxIYW/iIgPKfxFRHxI4S8i4kMKfxERH1L4i4j4kKfwN7M2ZrbMzLJCj61PU7alme02s2e81CkiIt553fOfAaxwzvUCVoTWa/IQwXv4iohIlHm6hy8wDrg6tLwQWAX8qnIhM7sc6AD8DUj3WKdIg/rDhj9QUlZCwALhnxiLqXa54nqMxWBmNa5XLB8fE0/L+JYkNUkiqUkSCTEJmFm0f3U5j3kN/w7OuT0Azrk9Zta+cgEzCwBPAD+mmpu9izR2f9r6J44UHaHMleFwDVJnfCA+PBC0jG9JqyatwuvVPhcffGwa21SDhtRJreFvZsuBjtW8dH8d65gGvOWc21XbH6WZTQWmAqSkpNRx8yJn18p/Xhleds5R7sopd+WUubJTlp1z4efOdL2krIT8knzyi0M/JfkcLT5KfnE+R4qPsKtgFxsPbuRo8VGKyopqbGtcIO6UwaBlk5YkxSedMlCcfC6pyXfPN4ttpkHDZ2oNf+dcRk2vmdk+M+sU2uvvBOyvpthQYLiZTQMSgXgzK3DOVTk+4JybD8wHSE9Pb5hdLJEzcHLaJoYY4oiLShuKSovCA0R+cWiQCC0fKT4SfK4kOHDsKdjDl8VfcrTkKIWlhTVuM9Zig4PCyQEhPim8XnGgqPxcYlyiBo1zlNdpnyXAFODR0OMblQs4524+uWxmtwHp1QW/iNRNQmwCCbEJdGje4YzeV1xWHP40kV8SHCgqrp8cPI4WH2Xv8b1sPbyV/OJ8jpcer3GbMRYTnoaqPAVVeb3i4JEYl0jAdKZ5NHkN/0eBP5nZ7cA3wI0AZpYO/NQ5928ety8iEdIkpgnJzZJJbpZ8Ru87UXbi1CmpCoNF5fW843lkH84mvySfb098W+M2AxaoecCoYfBIapJEi/gWGjQixJxrnLMr6enpbu3atdFuhojU04nyE+EpqaPFR8NTUqcbPI4WH+XYiWM1btOwU45ZtGzSMjxNVfFgeMXjGUnxwUEjJhDTgL999JjZp865Ws+q9LrnLyJSrbhAHG2btqVt07Zn9L7S8tLwMYuKxy8qHtMIHxAvyufr/K/JL8nnWEnNgwZAi/gWpwwU1R34rjx4tIhvQWzg/IzJ8/O3EpFzVmwgljYJbWiT0OaM3ldWXsaxkmM1HvyufKxj17Fd4UHkdKfwtohrccqB7uoOfLdr2o7OiZ3p1LwTCbEJXrugQSj8ReS8EBOIoVVCK1oltDqj95W78uCgUcuU1Mnl3G9zw4NKuSuvsr2TA0GX5l3onNg5uJzYpdENDgp/EfG1gAXCUz5notyVU3CigPyifPIK89hdsJvcglxyv81ld8FuNh7cyLJvllFaXnrK+9omtA0PBhUHhs6JnencvHODDQ4KfxGRejh5xlLL+JZ0a9mNtA5pVcqUlZdxoPBAeEDILcgltyC4vPngZpZ/s7zawWFQx0E8NuKxs9p+hb+IyFkSE4ihQ/MOdGjegQHtB1R5vdyVk3c8r8rg0DqhxgskR4zCX0QkSgIWOO3gcFbrbtDaRESkUVD4i4j4kMJfRMSHFP4iIj6k8BcR8SGFv4iIDyn8RUR8SOEvIuJDjfZ6/maWB3wd7XZ40A44EO1GNDLqk6rUJ6dSf1R1pn1ygXOu1jv2NNrwP9eZ2dq63FDBT9QnValPTqX+qOps9YmmfUREfEjhLyLiQwr/s2d+tBvQCKlPqlKfnEr9UdVZ6RPN+YuI+JD2/EVEfEjh75GZjTGzrWaWbWYzqnlFGEW7AAADEklEQVS9iZm9Enp9jZl1b/hWNqw69Mk9ZrbZzD43sxVmdkE02tlQauuPCuUmmJkzs/P+bJe69ImZ/XPo72STmf1vQ7exodXh/02Kma00s3Wh/zv/5KlC55x+6vkDxADbgQuBeGAD0LtSmWnAvNDyJOCVaLe7EfTJNUCz0PKd53Of1KU/QuVaAB8AHwHp0W53tPsE6AWsA1qH1ttHu92NoE/mA3eGlnsDO73UqT1/bwYB2c65Hc65EmARMK5SmXHAwtDya8BIM7MGbGNDq7VPnHMrnXPHQ6sfAV0buI0NqS5/IwAPAY8BRQ3ZuCipS5/cAcx1zh0GcM7tb+A2NrS69IkDWoaWk4BcLxUq/L3pAuyqsJ4Teq7aMs65UiAfaNsgrYuOuvRJRbcDb5/VFkVXrf1hZgOAbs65vzZkw6KoLn8jFwMXm9mHZvaRmY1psNZFR136ZCZwi5nlAG8Bd3mpUPfw9aa6PfjKp0/Vpcz5pM6/r5ndAqQDI85qi6LrtP1hZgHg98BtDdWgRqAufyOxBKd+rib4yfDvZpbqnDtyltsWLXXpk5uAF5xzT5jZUOClUJ+U16dC7fl7kwN0q7DelaofxcJlzCyW4Me1Qw3SuuioS59gZhnA/UCmc664gdoWDbX1RwsgFVhlZjuBIcCS8/ygb13/37zhnDvhnPsK2EpwMDhf1aVPbgf+BOCc+weQQPC6P/Wi8PfmE6CXmfUws3iCB3SXVCqzBJgSWp4AvOdCR2zOU7X2SWia478IBv/5Ppd72v5wzuU759o557o757oTPAaS6ZxbG53mNoi6/L95neCJAZhZO4LTQDsatJUNqy598g0wEsDMLiUY/nn1rVDh70FoDn868A7wJfAn59wmM5tlZpmhYv8NtDWzbOAeoMZT/c4HdeyTx4FE4FUzW29mlf/Izxt17A9fqWOfvAMcNLPNwErgXufcwei0+OyrY5/8H+AOM9sAvAzc5mVHUt/wFRHxIe35i4j4kMJfRMSHFP4iIj6k8BcR8SGFv4iIDyn8RUR8SOEvIuJDCn8RER/6/6OyYlCmIiEjAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure()\n", + "#plt.plot(RNN_pred_input_test[-1,:,0], RNN_pred_input_test[-1,:,1])\n", + "plt.plot(Classifier_input_test[-25,:,0], Classifier_input_test[-25,:,2], label = 'True track')\n", + "plt.plot(Classifier_input_test[-26,:,0], Classifier_input_test[-26,:,2], label = 'False track')\n", + "plt.plot(prediction_test[-25,:,0], prediction_test[-25,:,2], label = 'Predicted track')\n", + "plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "RNN_pred = RNN_classifier.predict_proba(X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "RNN_predictions_true = RNN_pred\n", + "RNN_predictions_false = (-1) * (RNN_pred - 1)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "cut = 0.5\n", + "predictions_hard = (RNN_predictions_true > cut).astype(int)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\matplotlib\\axes\\_axes.py:6462: UserWarning: The 'normed' kwarg is deprecated, and has been replaced by the 'density' kwarg.\n", + " warnings.warn(\"The 'normed' kwarg is deprecated, and has been \"\n" + ] + }, + { + "data": { + "text/plain": [ + "Text(0.5,0,'cut')" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEKCAYAAAACS67iAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAADnhJREFUeJzt3X+MZWddx/H3hy4VC0V+7GCQokMTIGBjAk60SAJCqWmQsP7RkKKVotUNGBGrREv4A6MxEAUqRiKuUKiKBV1RNgSUtbRWSVuY0t9dKLXUsrKyU9HKjyhd+frHvZB1u7tz7j3n3jv3mfcrmcy95z5zz/eZO/OZZ55zznNTVUiSlt/DFl2AJGkYBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpETvmubOdO3fW6urqPHcpSUvvxhtvvL+qVjZrN9dAX11dZX19fZ67lKSll+RfurRzykWSGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhox1ytFe7n6Td3aveD1s61DkrYoR+iS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjNg30JJcnOZzk9qO2/W6SzyS5NclfJ3nMbMuUJG2mywj9vcB5x2zbD5xVVT8A3AW4IpYkLdimgV5V1wJfPmbbx6rqyPju9cAZM6hNkjSBIebQfxb46ADPI0nqoVegJ3kDcAR430na7E6ynmR9Y2Ojz+4kSScxdaAnuQh4CfBTVVUnaldVe6pqrarWVlZWpt2dJGkTU71jUZLzgF8Hnl9VXx+2JEnSNLqctnglcB3w9CQHk1wM/AFwOrA/yc1J3jnjOiVJm9h0hF5VLz/O5nfPoBZJUg9eKSpJjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIzYN9CSXJzmc5Pajtj0uyf4knxt/fuxsy5QkbabLCP29wHnHbLsUuKqqngpcNb4vSVqgTQO9qq4FvnzM5l3AFePbVwA/MXBdkqQJTTuH/t1VdQhg/PkJw5UkSZrGzA+KJtmdZD3J+sbGxqx3J0nb1rSB/qUkTwQYfz58ooZVtaeq1qpqbWVlZcrdSZI2M22g7wMuGt++CPjQMOVIkqbV5bTFK4HrgKcnOZjkYuDNwLlJPgecO74vSVqgHZs1qKqXn+ChcwauRZLUg1eKSlIjDHRJasSmUy6SpOO7bP9dndtecu7TZljJiCN0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1olegJ7kkyR1Jbk9yZZJHDFWYJGkyUwd6kicBvwSsVdVZwCnABUMVJkmaTN8plx3AdybZAZwGfLF/SZKkaUwd6FX1r8BbgPuAQ8ADVfWxY9sl2Z1kPcn6xsbG9JVKkk6qz5TLY4FdwFOA7wEemeTCY9tV1Z6qWquqtZWVlekrlSSdVJ8plxcBn6+qjap6EPgg8CPDlCVJmlSfQL8PODvJaUkCnAMcGKYsSdKk+syh3wDsBT4N3DZ+rj0D1SVJmtCOPl9cVW8E3jhQLZKkHrxSVJIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RG9Ar0JI9JsjfJZ5IcSPKcoQqTJE1mR8+vfzvwt1V1fpJTgdMGqEmSNIWpAz3Jo4HnAa8EqKpvAN8YpixJ0qT6TLmcCWwA70lyU5J3JXnksY2S7E6ynmR9Y2Ojx+4kSSfTJ9B3AM8G/rCqngV8Dbj02EZVtaeq1qpqbWVlpcfuJEkn0yfQDwIHq+qG8f29jAJekrQAUwd6Vf0b8IUkTx9vOge4c5CqJEkT63uWy2uA943PcLkH+Jn+JUmSptEr0KvqZmBtoFokST14pagkNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEX2vFJWk5ly2/65FlzAVR+iS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmN6B3oSU5JclOSDw9RkCRpOkOM0F8LHBjgeSRJPfQK9CRnAD8OvGuYciRJ0+o7Qv894NeAbw5QiySph6kDPclLgMNVdeMm7XYnWU+yvrGxMe3uJEmb6DNCfy7w0iT3Au8HXpjkz45tVFV7qmqtqtZWVlZ67E6SdDJTB3pVvb6qzqiqVeAC4ONVdeFglUmSJuJ56JLUiB1DPElVXQNcM8RzSZKm4whdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMGufRfkpbBZfvvWnQJM+UIXZIaYaBLUiMMdElqhIEuSY1o76Do1W/q1u4Fr59tHZI0Z47QJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqxNSBnuTJSa5OciDJHUleO2RhkqTJ9Lmw6Ajwq1X16SSnAzcm2V9Vdw5UmyRpAlMHelUdAg6Nb38lyQHgSYCBLmmuWl8Wt6tB5tCTrALPAm4Y4vkkSZPrHehJHgX8FfDLVfVfx3l8d5L1JOsbGxt9dydJOoFegZ7k4YzC/H1V9cHjtamqPVW1VlVrKysrfXYnSTqJPme5BHg3cKCq3jZcSZKkafQZoT8X+GnghUluHn+8eKC6JEkT6nOWyz8BGbAWSVIP7b3BRVdd3wgDfDMMSUvBS/8lqRHbd4QuacvzgqHJOEKXpEYY6JLUCANdkhphoEtSIwx0SWqEZ7lImjvPXpkNA72LrhcheQGSpAVyykWSGuEIfUiTLCfQhSN+LRmnUhbLEbokNcJAl6RGOOUi6aScRlkejtAlqRGO0LcyT5eUNAEDvQUGvyQMdEma2tn37Zmg9VtmVse3GOjbydDnyYOj/i3Ig5j9TRbUW4eBrn5m8Ueii234h8Sg7m9Zg7qrpQn06+75907tnnPm42dcidTNdgzg1gNzq1uaQJf+nwUts3Ddu1/X+SnPnrYWaUq9Aj3JecDbgVOAd1XVmwepStpE1//YOrune1BLW9XUgZ7kFOAdwLnAQeBTSfZV1Z1DFTeNwX/Rt6FJpq38fktbR58R+g8Bd1fVPQBJ3g/sAhYa6OrPkJaWU59L/58EfOGo+wfH2yRJC9BnhJ7jbKuHNEp2A7vHd7+a5LNT7m8ncP+UX7us7PP2YJ+3g597a58+f1+XRn0C/SDw5KPunwF88dhGVbUH6H0uU5L1qlrr+zzLxD5vD/Z5e5hHn/tMuXwKeGqSpyQ5FbgA2DdMWZKkSU09Qq+qI0l+Efg7RqctXl5VdwxWmSRpIr3OQ6+qjwAfGaiWzWzHS9Ds8/Zgn7eHmfc5VQ85jilJWkK+Y5EkNWLLBXqS85J8NsndSS49zuPfkeQD48dvSLI6/yqH1aHPv5LkziS3JrkqSadTmLayzfp8VLvzk1SSpT4jokt/k7xs/DrfkeTP513j0Dr8XH9vkquT3DT+2X7xIuocUpLLkxxOcvsJHk+S3x9/T25N8uxBC6iqLfPB6ODqPwNnAqcCtwDPPKbNLwDvHN++APjAouueQ59fAJw2vv3q7dDncbvTgWuB64G1Rdc949f4qcBNwGPH95+w6Lrn0Oc9wKvHt58J3Lvougfo9/OAZwO3n+DxFwMfZXQdz9nADUPuf6uN0L+9nEBVfQP41nICR9sFXDG+vRc4J8nxLnJaFpv2uaqurqqvj+9ez+ic/2XW5XUG+C3gd4D/nmdxM9Clvz8PvKOq/gOgqg7PucahdelzAY8e3/4ujnMdy7KpqmuBL5+kyS7gT2rkeuAxSZ441P63WqB3WU7g222q6gjwALDMi6BPuoTCxYz+wi+zTfuc5FnAk6vqw/MsbEa6vMZPA56W5BNJrh+vZLrMuvT5N4ALkxxkdLbca+ZT2kLNdMmUrbYeepflBDotObBEOvcnyYXAGvD8mVY0eyftc5KHAZcBr5xXQTPW5TXewWja5UcZ/Qf2j0nOqqr/nHFts9Klzy8H3ltVb03yHOBPx33+5uzLW5iZ5tdWG6F3WU7g222S7GD0r9rJ/sXZ6jotoZDkRcAbgJdW1f/MqbZZ2azPpwNnAdckuZfRXOO+JT4w2vXn+kNV9WBVfR74LKOAX1Zd+nwx8BcAVXUd8AhGa7y0rNPv+7S2WqB3WU5gH3DR+Pb5wMdrfLRhSW3a5/H0wx8xCvNln1uFTfpcVQ9U1c6qWq2qVUbHDV5aVeuLKbe3Lj/Xf8Po4DdJdjKagrlnrlUOq0uf7wPOAUjyDEaBvjHXKudvH/CK8dkuZwMPVNWhwZ590UeFT3AU+C5GR8jfMN72m4x+oWH0ov8lcDfwSeDMRdc8hz7/PfAl4Obxx75F1zzrPh/T9hqW+CyXjq9xgLcxej+B24ALFl3zHPr8TOATjM6AuRn4sUXXPECfrwQOAQ8yGo1fDLwKeNVRr/M7xt+T24b+ufZKUUlqxFabcpEkTclAl6RGGOiS1AgDXZIaYaBLUiMMdOk4kqwm+clF1yFNwkCXjm8VMNC1VDwPXdtKklcAr2O0fsatwP8CH66qvePHv1pVj0pyPfAM4PPAFVV12aJqlrraaotzSTOT5PsZrYfz3Kq6P8njGF2deTyXAq+rqpfMrUCpJ6dctJ28ENhbVfcDVNUyL+omPYSBru0kPHSp0iOMfw/Gb5Ry6ryLkoZioGs7uQp4WZLHA4ynXO4FfnD8+C7g4ePbX2G0jK+0NAx0bRtVdQfw28A/JLmF0fz5HwPPT/JJ4IeBr42b3wocSXJLkksWUrA0Ic9ykaRGOEKXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNeL/AFXeu20gtVg+AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "n_bins = 20\n", + "\n", + "plt.figure()\n", + "n, bins, patches = plt.hist(RNN_predictions_true[y_test == 1], bins=30, alpha=0.5, normed=True)\n", + "plt.hist(RNN_predictions_true[y_test == 0], bins=bins, alpha=0.5, normed=True)\n", + "plt.xlabel('cut')" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ROC AUC: 0.9265346173526295\n" + ] + } + ], + "source": [ + "y_train_neg = y_train*(-1) + 1\n", + "y_test_neg = y_test*(-1) + 1\n", + "RNN_fpr, RNN_tpr, _ = skl.metrics.roc_curve(y_test, RNN_predictions_true, pos_label=1)\n", + "RNN_roc_auc = skl.metrics.roc_auc_score(y_true=y_test, y_score=RNN_predictions_true)\n", + "print(\"ROC AUC:\", RNN_roc_auc)" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Text(0,0.5,'true positives')" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEKCAYAAAD9xUlFAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3XucXHV9//HXZ2bv11x2k5D7hQRIuAUjqKiJghQohmopEkt/YP3JryjyQ1tb/KFosT7qpUprpWqs/NBWQNQqkUvTqiAKBgi3EAKEkEDYJITdJLvZ++zMfPrHOVkmy252kuyZs7vzfj4e89hzznznnM/ZTc5nvud8L+buiIiIACTiDkBEREYPJQUREemnpCAiIv2UFEREpJ+SgoiI9FNSEBGRfkoKIiLST0lBRET6KSmIiEi/krgDOFwNDQ0+d+7cuMMQERlTHnvssRZ3bxyu3JhLCnPnzmX9+vVxhyEiMqaY2cv5lNPtIxER6aekICIi/ZQURESkn5KCiIj0U1IQEZF+kSUFM7vZzF4zs41DvG9m9g0z22JmG8zstKhiERGR/ERZU7gFOPcQ758HLAxfVwDfijAWERHJQ2T9FNz9ATObe4giFwI/8GA+0HVmNsHMjnH3XVHFJCJyQDbrZN1JZ52+TJZ0xunLZslknVQ6S286iztkwnJZ9/7lTBb2d/dRVpIg6447YRnC9WD59e1Bmd50hnT2wHsexgBOuH7QPoIYd7b10FhTBsBZJ0zllFkTIv29xNl5bQbwSs56U7jtDUnBzK4gqE0we/bsggQnUsyyWac3nSXjTibjpLPhctbp6cuSyWZJpZ3edIb2njR9mSx9GSeVydLRkyadDS6ymWxw0W3p6MUdSpJGJpt7cQ1+7u1M0drVR015Sf9xBl6Qsw4v7+lkSm3F0BfRcNuu1h4SCaMkYcH5HCiXff0zY40ZTKmrGNdJwQbZNuifyt1XA6sBli1bNgb/nCKvc3e6+zJ09mbo7A0uqL3pLPu7+8AgHV6E+zLBxbSyNBluC7b39GXY05GiuryEdCZLX9Zp7+ljf3ea6vIkqXTwzfelPZ1Mqi4jlc6ye38P7lCaTPRfdA+8drR2U1tegkPwjTncHoWykgSlCSORMJIJI2nBsgH7e/qYXl9JbUUJZq+/bxbEnTDjhGPqaG7vZdakKhIGCTMSYZlgOfhpc43X2ns4dkpN/zFy3w/WIWlGXzb4xj65poySZIKSMJl09qZprK0gmQg+kzywjzAuCC7UlWXJg49tYBiJxOsxWRhn0oyK0kS4nlM+N7YB52M5PwshzqTQBMzKWZ8J7IwpFpFDcnf296Rpbu8lnc3S0p4ilQm+Je/pSLF5dzvV5SV09KRpau2iuqyEF5s7qK0opSuVZl9XH719wa2DrlRmRGMrTRpmRiqdpaa8hPrKUkqTwUVsZ2s3cydXM7Wugr2dKWZMqKSiNNn/LTphwc+Wjl7mTK6mNGmUJhMkE0Zbdx8zJlQGF+fwVZKw8HaHM7m6nLISo7qshIrSJCXhZytLk1SVJSlJJEgmg88c+GyhLmxy5OJMCmuAq8zsduAMoE3PEyRKfZksXb0Z9vf00dOXYX9Pmt50hlQ6S3tPmr2dKda/vI+sO73h+zv2dbO3M0V3X34X8glVpWSyTlVZkmn1lezrSnH8tFpOqSiluryE0qRRkkzQncowv7Ga7lSG6RMqg2/QyeCCWV9ZGlxQE0ZJ0ihJJKgoTfRfrEsTCUqSRnlJsK4LrYykyJKCmd0GrAAazKwJ+BxQCuDu3wbuAc4HtgBdwIeiikXGr75McGtk+54uXtrTxe79PfT0ZdjbmeLF5g66UhlaOnpp6Ujltb+EQdbhhGPqqCpLctqciUytLaeqLElnKsO0ugpmTKwkYUZNeQlT6sqZXF0WXMiT6vYjY1+UrY9WDfO+Ax+L6vgyPvSmM2xr6eS1/b28vKeT3ft72b2/h+6+DA9v20tbdx+pdPagz1SUJqgpL2VSdSmGcdbxU6mrLKGjN82CxhpqK0qoKiuhvCRB1qGxtpzK0iQNNWVMrC6jVBd3KWJjbuhsGT8yWWdPZy+7WnvY2drNy3u72LijjW0tneztTLGrreeQnz/z2MnUlpdy5sIGZk2sZM7kamZNrNQ3dpGjoKQgkepNZ3i6qY0Ht+zh6R2t9KaztHSkaOnopbm99w3lkwmjtqKEydVlvPv4KUytK2d6fSXHH1PHjAmVTK0rZ0JVGcmE7qOLREFJQY5aNuu81t7Lw9v2sKcjxc7Wbpr2dbOtpZMtzR0HNW+cVlfBkul1LJleR31lKRWlCU6aMYGGmjJmTapiSm25HpyKxEhJQQ5LKp1l8+52XnitnQc2t/BUUysv7+k66MJfVpJgWl0FcyZXccb8SSyZXseb5kxkQWONLvgio5ySghxSZ2+auzfsYldbDy+81s5dG15vNVxekmDO5CouPWM28xtrWNBYw7FTaphSW05Ct3dExiQlBQGCzlnP7mrnoRdb6OhNs3HHfnr6Mjz0Ykv/kADH1Few8pTpnDSjnpNn1rN09kTKSvRQV2Q8UVIoch29aW57eDtfvOfZN7w3ra6CD54xm+WLpvDWBZOpKdc/F5HxTv/Li8wLu9tZ/cBWnn11Py/s7qA3bON/7JQa3rmwkYveNJN5DdVUliVjjlRE4qCkUAS27+niR+u38x+P7zio7f/8hmoWT6/jojfNZPmiRj0EFhElhfHquVf3c/sjr/C7LS1sea0DgLqKEj72rgW895TpHDe1VklARN5ASWGceamlk/d+83e096QBeNOciXx0xQLes3gqp86aoEQgIoekpDBOvLK3i3/85Qv89PEmAJbOnsA3P3gaMyZUxhyZiIwlSgpj3M7Wbr7726388OHtpNJZptaV841LlnLG/MlxhyYiY5CSwhj16Et7+bu7NrFx534yWWfFcY184uxFkU/VJyLjm5LCGNOdyvBXP36Ku58OehYvX9TIp88/nuOn1cUcmYiMB0oKY8jaZ17lmtufpLsvwzsWNvC59y7h2Ck1cYclIuOIksIY8GJzB9f97GnWbd0LwI0fOIX3LZ0Zc1QiMh4pKYxyv39xD6u+u46ShPHBM2Zz5fIFzJpUFXdYIjJOKSmMUulMlht/uZmb7nsRgH9etZTzTjom5qhEZLxTUhiF7t6wi8/euZG9nSkaasr54vtO5A+WTIs7LBEpAkoKo8wdj77CX/90A1VlSb5+8SmsPGW65hwWkYJRUhhF/t/PnubWh7cze1IVP/vo25hcUx53SCJSZJQURonP3bmRWx/eztS6cn5x1dupryqNOyQRKUJKCjHLZp2P3/4Ed2/YxfT6Cn5+1ZlKCCISGyWFGL3W3sPpX/wVAKfPm8TNl79Zs5uJSKx0BYrJvs5Uf0JYcVwjN1/2Zk12LyKxU1KIQTbr/NG/PAjAtecdz18sXxBzRCIiAbV1LDB358ofPsbLe7pYOnuCEoKIjCpKCgX2jV9tYe0zuzl97iTu+D9vjTscEZGDKCkU0Pcfeokbf7kZgB98+HRK1SlNREaZSK9KZnaumT1vZlvM7NpB3p9tZveZ2RNmtsHMzo8ynjjtaO3m7+7eBMDaa95JRWky5ohERN4osqRgZkngJuA8YDGwyswWDyj2GeAOd18KXAL8S1TxxMndueIH60lnnbXXvJPjptXGHZKIyKCirCmcDmxx963ungJuBy4cUMaBA1OG1QM7I4wnNv/86y08s3M/Vy5foIQgIqNalE1SZwCv5Kw3AWcMKPN54L/M7ONANXB2hPHEor2nj6//92aqy5JcfdbCuMMRETmkKGsKg/XE8gHrq4Bb3H0mcD7wb2b2hpjM7AozW29m65ubmyMINTrX/WwjAP9y6Zv0HEFERr0ok0ITMCtnfSZvvD30YeAOAHf/PVABNAzckbuvdvdl7r6ssbExonBH3k8ea2LNUzt57ynTWb5o7MQtIsUryqTwKLDQzOaZWRnBg+Q1A8psB84CMLMTCJLC2KoKHMK//nYrAJ+94ISYIxERyU9kScHd08BVwFrgWYJWRs+Y2Q1mtjIs9pfAR8zsKeA24HJ3H3iLaUxq7Urx3KvtHD+tlim1FXGHIyKSl0jHPnL3e4B7Bmy7Pmd5E3BmlDHEZflX7wfg7/7oxHgDERE5DOpSG4F7n95FW3cfyxc1smzupLjDERHJm5LCCAsGvHscUC1BRMYeJYURdusj2wG44p3zmTWpKuZoREQOj5LCCOpNZ/jmr7cAqKOaiIxJSgoj6I71Texq6+HLf3ySptUUkTFJSWEE/eb5oIvFhafOiDkSEZEjo6QwQtq6+vjls7s589jJGs5CRMYsJYUR0J3K8O6v3Q/A+5fOjDcYEZGjoKQwAlZ9dx17OlN85B3z+OM3KSmIyNilpHCU3J0nX2llen0F1/3hwDmERETGFiWFo/Tfm3YD8KdvmRNzJCIiR09J4Sh96d7nALjsbXPjDUREZAQoKRyFJ7bvY2tLJ396xmz1SxCRcUFJ4Sh87b82A6oliMj4oaRwFH63pYUz5k1i0dTauEMRERkRSgpH6IXd7QCcOntCzJGIiIwcJYUjdN3PNwKw8pTpMUciIjJylBSOQG86wyPb9jK1rpwl0+vjDkdEZMQoKRyBuzfsAuAj75gfcyQiIiNLSeEIbGvpBODiN8+KORIRkZGlpHAEfvtCC7MnVVFXURp3KCIiI2rYpGBmC8ysPFxeYWZXm1nRNrnp6cvw5CutnHCMmqGKyPiTT03hp0DGzI4FvgfMA26NNKpR7LlXg6aoJxxTF3MkIiIjL5+kkHX3NPA+4B/d/RPAMdGGNXp9NmyKev5JRfsrEJFxLJ+k0Gdmq4DLgLvCbUV7M70vkwVQL2YRGZfySQofAt4KfNHdt5nZPODfow1rdOrpy7B5dzuXvmV23KGIiERi2KE93X2Tmf0NMDtc3wZ8KerARqPHt+8j67BszqS4QxERiUQ+rY/eCzwJ/Ge4fqqZrYk6sNHoe7/dBsCps4q28ZWIjHP53D76PHA60Arg7k8StEAqKu09ffzqudc4ddYE5jZUxx2OiEgk8kkKaXdvG7DNowhmNPuPx3cA8CfLZsYciYhIdPKZLmyjmX0QSJrZQuBq4KFowxp9nt4R5MX3LZ0RcyQiItHJp6bwcWAJ0EvQaa0NuCafnZvZuWb2vJltMbNrhyhzsZltMrNnzGzUdorb0NTK3MlVVJVp2k0RGb/yucId5+7XAdcdzo7NLAncBLwHaAIeNbM17r4pp8xC4NPAme6+z8ymHM4xCmnz7g7euagx7jBERCKVT03h62b2nJl9wcyWHMa+Twe2uPtWd08BtwMXDijzEeAmd98H4O6vHcb+C+bALGsLp9TEHImISLSGTQru/i5gBdAMrDazp83sM3nsewbwSs56U7gt1yJgkZk9aGbrzOzcwXZkZleY2XozW9/c3JzHoUfWxp3B8wTVFERkvMtr6Gx3f9XdvwH8BUGfhevz+JgNtqsB6yXAQoKkswr418FGYHX31e6+zN2XNTYW/sL8zI79ACyaqpqCiIxv+XReO8HMPm9mG4FvErQ8yqddZhOQOwvNTGDnIGXudPe+sKf08wRJYlTZ2dYNwNTaipgjERGJVj41hf8P7APOcffl7v6tPO/9PwosNLN5ZlYGXAIM7An9c+BdAGbWQHA7aWve0RdIc3sv8xurSSQGq/yIiIwf+Yx99JYj2bG7p83sKmAtkARudvdnzOwGYL27rwnfO8fMNgEZ4FPuvudIjhelzbs7WHGcnieIyPg3ZFIwszvc/WIze5qDnwUY4O5+8nA7d/d7gHsGbLs+Z9mBT4avUamtq4+27j4mV5fHHYqISOQOVVP4v+HPCwoRyGj12y1Ba6d5DVUxRyIiEr0hnym4+65w8aPu/nLuC/hoYcKL3y+eCp6NrzxFw1uIyPiXz4Pm9wyy7byRDmS0Wrd1LwD1VUU72ZyIFJFDPVO4kqBGMN/MNuS8VQs8GHVgo0FfJktbdx9vWzA57lBERAriUM8UbgXuBf4eyB3Mrt3d90Ya1SjxxPZWAN48VzOtiUhxOFRScHd/ycw+NvANM5tUDInh0ZeCU3z7woaYIxERKYzhagoXAI8RNEnN7bnlwPwI4xoVHtgctDw6Zaam3xSR4jBkUnD3C8KfRTf15gEPb9vL9PoKykryGiJKRGTMy2fsozPNrDpcvtTMvm5ms6MPLV4bw5nWzpivh8wiUjzy+Qr8LaDLzE4B/hp4Gfi3SKMaBVY/EAzB9Fd/cFzMkYiIFE4+SSEdDkdxIfBP7v5PBM1Sx7VUOgvAjAmVMUciIlI4+UzH2W5mnwb+DHhHOM3muO/JtXbTq5w8sz7uMERECiqfmsIHgF7gz939VYLZ074aaVQxa+/pw121BBEpPvlMx/kq8EOg3swuAHrc/QeRRxajnz7WBMCfvWVOzJGIiBRWPq2PLgYeAf4EuBh42MwuijqwON0f9k9Ypp7MIlJk8nmmcB3w5gOzrZlZI/BL4CdRBhanza+2M2dylfoniEjRyeeqlxgw/eaePD83Jrk7O9t6WLFIM62JSPHJp6bwn2a2FrgtXP8AA2ZTG0827doPQGVZPr8aEZHxJZ85mj9lZu8H3k4w/tFqd/9Z5JHF5PGX9wFw7onTYo5ERKTw8v06/BCQAbLAo9GFE79Nu9oBmDe5OuZIREQKL5/WR/+boPXR+4CLgHVm9udRBxaX37/YAmimNREpTvnUFD4FLHX3PQBmNpmg5nBzlIHFIZXO8tKeLhYfUxd3KCIiscinFVET0J6z3g68Ek048brv+aCR1WVvU6c1ESlO+dQUdhB0WLuTYHKdC4FHzOyTAO7+9QjjK6htLZ0AnKxJdUSkSOWTFF4MXwfcGf4cdyOlbtoZNEc9dkpNzJGIiMQjnyapf1uIQEaDvZ0pkgmjNDlu++aJiBySrn45mvZ1sVw9mUWkiCkp5HhpTxczJ2q4bBEpXkoKoe5UJu4QRERil0/ntUVm9isz2xiun2xmn4k+tMJ6sbkDgNNmT4w5EhGR+ORTU/gu8GmgD8DdNwCX5LNzMzvXzJ43sy1mdu0hyl1kZm5my/LZbxSeezXoijG/UcNbiEjxyicpVLn7IwO2pYf7UDiX803AecBiYJWZLR6kXC1wNfBwHrFEZuOONgDmaMwjESli+SSFFjNbQNBxjXDWtV15fO50YIu7b3X3FHA7Qce3gb4AfAXoyS/kaLzY3EF5SYL6So15JCLFK5+k8DHgO8DxZrYDuAa4Mo/PzeDg4TCawm39zGwpMMvd78ov3Ohsa+nk3cdPiTsMEZFY5dN5bStwtplVE8zC1j7cZ0I22O763zRLADcClw+7I7MrgCsAZs+enefh85fJOjtauzl3ieZQEJHiNmxSMLPrB6wD4O43DPPRJmBWzvpMYGfOei1wInB/uM9pwBozW+nu63N35O6rgdUAy5Ytc0ZYVyqNO0ytqxjpXYuIjCn53D7qzHllCB4cz83jc48CC81snpmVEbRYWnPgTXdvc/cGd5/r7nOBdcAbEkIhvNbeC0DGRzzfiIiMKfncPvpa7rqZ/QM5F/dDfC5tZlcBa4EkcLO7P2NmNwDr3X3YfRTKE9tbAVjQqIHwRKS4Hcns9FXA/HwKuvs9wD0Dtl0/RNkVRxDLiOhNB72Zj5s67gZ+FRE5LPk8U3ia1x8QJ4FGYLjnCWPKztZuABpqy2KOREQkXvnUFC7IWU4Du9192M5rY8njL7fSUFNGVdmRVJxERMaPQ14Fw2ajd7v7iQWKJxbNHb3oGbOIyDCtj9w9CzxlZiPfOWAU6ctkmaLmqCIied0+OgZ4xsweIWiWCoC7r4wsqgJ7eU8X7186Y/iCIiLjXD5JYVxPx5nNBveNUplszJGIiMQvn6Rwvrv/Te4GM/sy8JtoQiqs5o6g49rJM+tjjkREJH759Gh+zyDbzhvpQOKyaed+AOoqNDqqiMiQNQUzuxL4KDDfzDbkvFULPBh1YIWyI+yjcJJqCiIih7x9dCtwL/D3QO6sae3uvjfSqAqoKxV0udBgeCIih0gK7t4GtAGrChdO4W3csZ+qsiSTq9WbWUQkn2cK49rm3e1Mqi7rHxJcRKSYFX1SSGWyGghPRCSkpJDOUl+llkciIqCkQNO+bspLiv7XICICKCmQTBgdvZm4wxARGRWKOilksk4m6xyrGddERIAiTwobmoJpOKvLkzFHIiIyOhR1UvjdCy0ArDiuMeZIRERGh6JOCiXJ4PSPqa+MORIRkdGhqJNCOhwuu0ytj0REgCJPCn3hXAolCfVmFhGBIk8K6UyWkoRpiAsRkVBRJ4X9PX3quCYikqOor4gdPWkmanRUEZF+RZ0UtjR3UFWmPgoiIgcUdVIoSybo6EnHHYaIyKhR1Elh4879LJ6uaThFRA4o6qSQSmdRwyMRkdcVbVJIpYOOa9M0N7OISL+iTQp7O1MAzG2ojjkSEZHRI9KkYGbnmtnzZrbFzK4d5P1PmtkmM9tgZr8yszlRxpOrub0XgMpStT4SETkgsqRgZkngJuA8YDGwyswWDyj2BLDM3U8GfgJ8Jap4Bnrkpb0AHDtFcymIiBwQZU3hdGCLu2919xRwO3BhbgF3v8/du8LVdcDMCOM5yIHB8I6bWluoQ4qIjHpRJoUZwCs5603htqF8GLh3sDfM7AozW29m65ubm0ckuGd37QegtqJkRPYnIjIeRJkUBmvs6YMWNLsUWAZ8dbD33X21uy9z92WNjSMzIU5lWZAMEhohVUSkX5Rfk5uAWTnrM4GdAwuZ2dnAdcByd++NMJ6D3PbIdmZPqirU4URExoQoawqPAgvNbJ6ZlQGXAGtyC5jZUuA7wEp3fy3CWN7ATPMoiIgMFFlScPc0cBWwFngWuMPdnzGzG8xsZVjsq0AN8GMze9LM1gyxuxHV0ZvGHc47aVohDiciMmZE+pTV3e8B7hmw7fqc5bOjPP5QntzeCsCJGvdIROQgRdmjecOOICmcNmdizJGIiIwuRZkU9nQEQ1xMrNIEOyIiuYoyKRx4vlymqThFRA5SlFfFJ7a3UqdOayIib1CUSeH53e1UahpOEZE3KMqkkMk6DTXlcYchIjLqFGVSyLqrOaqIyCCKLilksk5PX5aGWrU8EhEZqOiSwvOvtgNQltQzBRGRgYouKTTtC6ZvOG3OhJgjEREZfYouKXT0pgGoryyNORIRkdGn6JLCfc8Hk/TMbaiOORIRkdGn6JJCdypNadKoq1BNQURkoKJLCu6wSPMyi4gMquiSQiqTpVxjHomIDKroro6tXX0a4kJEZAhFlxSe3tHGtLrKuMMQERmViiopZLMOgOMxRyIiMjoVVVLY0xlMrjO9XjUFEZHBFFVS2NrcAcCCKeqjICIymKJKCi82dwIwta4i5khEREanokoK21qCmsI89WYWERlUUSWF9p5g3KOptaopiIgMpqiSwv6ePgASCYs5EhGR0amokkIykWDGBLU8EhEZSlElhdauFBOrNRCeiMhQiiopPLilRX0UREQOoaiSQl1lKVlXb2YRkaEUVVLoSmWYO1nNUUVEhlI0SaEvkyWVzpJMquWRiMhQIk0KZnaumT1vZlvM7NpB3i83sx+F7z9sZnOjimX73i4AGmvKozqEiMiYF1lSMLMkcBNwHrAYWGVmiwcU+zCwz92PBW4EvhxVPJtfbQfguGmadU1EZChR1hROB7a4+1Z3TwG3AxcOKHMh8P1w+SfAWWYWyf2dtu6g49rkatUURESGEmVSmAG8krPeFG4btIy7p4E2YHIUwXSmMsEB1XlNRGRIUSaFwb7xD2wPmk8ZzOwKM1tvZuubm5uPKJhZEys5d8k0qss1FaeIyFBKItx3EzArZ30msHOIMk1mVgLUA3sH7sjdVwOrAZYtW3ZEHQ3OWTKNc5ZMO5KPiogUjShrCo8CC81snpmVAZcAawaUWQNcFi5fBPzaXb3LRETiEllNwd3TZnYVsBZIAje7+zNmdgOw3t3XAN8D/s3MthDUEC6JKh4RERlelLePcPd7gHsGbLs+Z7kH+JMoYxARkfwVTY9mEREZnpKCiIj0U1IQEZF+SgoiItJPSUFERPrZWOsWYGbNwMtH+PEGoGUEwxkLdM7FQedcHI7mnOe4e+NwhcZcUjgaZrbe3ZfFHUch6ZyLg865OBTinHX7SERE+ikpiIhIv2JLCqvjDiAGOufioHMuDpGfc1E9UxARkUMrtpqCiIgcwrhMCmZ2rpk9b2ZbzOzaQd4vN7Mfhe8/bGZzCx/lyMrjnD9pZpvMbIOZ/crM5sQR50ga7pxzyl1kZm5mY76lSj7nbGYXh3/rZ8zs1kLHONLy+Lc928zuM7Mnwn/f58cR50gxs5vN7DUz2zjE+2Zm3wh/HxvM7LQRDcDdx9WLYJjuF4H5QBnwFLB4QJmPAt8Oly8BfhR33AU453cBVeHylcVwzmG5WuABYB2wLO64C/B3Xgg8AUwM16fEHXcBznk1cGW4vBh4Ke64j/Kc3wmcBmwc4v3zgXsJZq58C/DwSB5/PNYUTge2uPtWd08BtwMXDihzIfD9cPknwFlmNtjUoGPFsOfs7ve5e1e4uo5gJryxLJ+/M8AXgK8APYUMLiL5nPNHgJvcfR+Au79W4BhHWj7n7EBduFzPG2d4HFPc/QEGmYEyx4XADzywDphgZseM1PHHY1KYAbySs94Ubhu0jLungTZgckGii0Y+55zrwwTfNMayYc/ZzJYCs9z9rkIGFqF8/s6LgEVm9qCZrTOzcwsWXTTyOefPA5eaWRPB/C0fL0xosTnc/++HJdJJdmIy2Df+gU2s8ikzluR9PmZ2KbAMWB5pRNE75DmbWQK4Ebi8UAEVQD5/5xKCW0grCGqDvzWzE929NeLYopLPOa8CbnH3r5nZWwlmczzR3bPRhxeLSK9f47Gm0ATMylmfyRurk/1lzKyEoMp5qOraaJfPOWNmZwPXASvdvbdAsUVluHOuBU4E7jezlwjuva4Z4w+b8/23fae797n7NuB5giQxVuVzzh8G7gBw998DFQRjBI1Xef1/P1LjMSk8Ciw0s3lmVkbwIHnNgDJrgMvC5YuAX3v4BGeMGvacw1sp3yFICGP9PjMMc87u3uZo/0xgAAAET0lEQVTuDe4+193nEjxHWenu6+MJd0Tk82/75wSNCjCzBoLbSVsLGuXIyuectwNnAZjZCQRJobmgURbWGuB/ha2Q3gK0ufuukdr5uLt95O5pM7sKWEvQcuFmd3/GzG4A1rv7GuB7BFXMLQQ1hEvii/jo5XnOXwVqgB+Hz9S3u/vK2II+Snme87iS5zmvBc4xs01ABviUu++JL+qjk+c5/yXwXTP7BMFtlMvH8pc8M7uN4PZfQ/ic5HNAKYC7f5vgucn5wBagC/jQiB5/DP/uRERkhI3H20ciInKElBRERKSfkoKIiPRTUhARkX5KCiIi0k9JQcY8M7vazJ41sx8eoswKMxsVw12Y2coDo32a2R+Z2eKc924IOxmKxEJNUmXMM7PngPPCHrxDlVkB/JW7X1CwwPJgZrcAd7n7T+KORQRUU5Axzsy+TTCs8hoz+4SZnW5mD4Vj6z9kZscN8pnlZvZk+HrCzGrD7Z8ys0fDMer/dojjdZjZ18zs8XBeisZw+6nhAHQbzOxnZjYx3H61vT6Pxe3htsvN7Jtm9jZgJfDVMJYFZnaLBfM/nGdmd+Qcd4WZ/SJcPsfMfh/G8GMzqwm3fynnWP8wkr9nKSJxjx2ul15H+wJeAhrC5TqgJFw+G/hpuLyC4Bs5wC+AM8PlGoKe/ecQjMtvBF+W7gLeOcixHPjTcPl64Jvh8gZgebh8A/CP4fJOoDxcnhD+vDznc7cAF+Xs/xaCoVdKCIZvqA63fwu4lGBMnwdytv9NGMckgnGOLPdYeul1uC/VFGS8qScYymMjwSipSwYp8yDwdTO7muDimSZICucQTFDzOHA8gw8klwV+FC7/O/B2M6sP9/ObcPv3CSZKgSBZ/DAcnTad70mEMf0n8N5w0MY/BO4kGNhvMfCgmT1JMIbXHGA/wZwR/2pm7ycY/kDksI27sY+k6H0BuM/d32fBNKv3Dyzg7l8ys7sJxo9ZFz7YNeDv3f07h3m84R7K/SFBglgJfNbMBktSQ/kR8DGC8bkedfd2Cwau+m93XzWwsJmdTjAw3CXAVcC7D+NYIoCeKcj4Uw/sCJcvH6yAmS1w96fd/cvAeoJawVrgz3Puz88wsymDfDxBcHsH4IPA79y9DdhnZu8It/8Z8BsL5nSY5e73AX8NTCC4XZWrnWCY78HcTzAt40d4vXayDjjTzI4N46wys0Vh3PXufg9wDXDqEPsUOSTVFGS8+QrwfTP7JPDrIcpcY2bvIhhFdBNwr7v3hsMu/z4cRbaD4B7+wGHGO4ElZvYYwYx9Hwi3XwZ828yqCIaq/hDBqJ7/Ht5eMuBGd2+1g2d+vZ1ghM+reT3ZAODumbAZ7eXh/nH3ZjO7HLjNzMrDop8hSC53mllFeKxPDPubEhmEmqSKHAYz63D3gd/2RcYN3T4SEZF+qimIiEg/1RRERKSfkoKIiPRTUhARkX5KCiIi0k9JQURE+ikpiIhIv/8B+Ryd6UnnWgsAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure()\n", + "plt.plot(RNN_fpr, RNN_tpr, label = 'XGBoost')\n", + "plt.xlabel('false positives')\n", + "plt.ylabel('true positives')\n", + "#plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEKCAYAAAD9xUlFAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3Xd8HPWZ+PHPsyutumRbkqssd2NccMExzYBpCSZgQo7QQoITLtyREB8k4Y7AHcn5Lr+QIwmEckkgIaRADCkEA+YcIJQYMFgGY1wo7paLrGL1tuX5/fEdy7IsW2t5Vytpn/frta+dmZ2ZfWZlzzPfmW8RVcUYY4wB8CU6AGOMMb2HJQVjjDFtLCkYY4xpY0nBGGNMG0sKxhhj2lhSMMYY08aSgjHGmDaWFIwxxrSxpGCMMaZNSqIDOFYFBQU6evToRIdhjDF9yurVqytUtbCr9fpcUhg9ejQlJSWJDsMYY/oUEdkezXp2+8gYY0wbSwrGGGPaWFIwxhjTxpKCMcaYNpYUjDHGtIlbUhCRR0Rkn4isO8LnIiL3icgmEVkrIrPiFYsxxpjoxLOk8Chw4VE+nw9M8F43AD+NYyzGGGOiELd2Cqr6moiMPsoqlwK/UTce6EoRGSAiw1R1T7xiMsaYAyIRJaJKKKIEwxFCYSUYiRCOKK2hCC2hCKoQ9taLqLZNhyNQ2xQkkOIjoooq3jp482764HK3TksoTCjifRYJ4w824A82kBJuwhduRiKt+EIt+MONpISb8IeaaKyvZWBqkNRwM/mzFjBx1tlx/V0S2XhtBLCz3Xypt+ywpCAiN+BKExQXF/dIcMYks0hEaQlFCKsSDiuhiDcdUZqDEcKRCK0hpSUUpq45RDAcIRhWWsMR6ptDhCLuJBuOuJNuRX0LqpDiF8KR9idX917V0Ep1Y5DstJS27+l4Qo4obK9sYHBOOgreibf9ifjgCXhPdTM+n5DiE3c8B9aLHNyme5QMWsilkRxpJIcmsqSZbJrIliay6DAtTWRz6PTBz5qP+dvfyh0K/TgpSCfLOv1TqepDwEMAs2fP7vaf05jeQFVpCoZpaAnT0OJOqC2hCLVNQRAIeSfhYNidTDNS/d4yt7w5GKayvpWstBRC4QjBiFLXHKS2KURWmp/WkLvy3VbZwKCsAK2hCGW1zahCqt/XdtI98NpV3UROWgoK7orZWx4PgRQfqT7B5xP8PsEvblqA2uYgw/MyyElPQeTg5yIubp8IJw7LpbyuhZGDMvEJ+ETweeu4afcuo4V9dc2MH5zd9h3tP0+llcxQLZnhOlJaawgEaylIaSAjXE96qIb0YB2+lv1kawOB1hpSg7WktlaTEqzDp+Gu/8a+VMKBbCKpOWggi0ggBw0MQVOz8aXnoGnZNAZyIC0HAtmQlo2kpCMpASQ1AwKZ+AJZSCALScvCF8hEUjM5xRf/ukGJTAqlwMh280XA7gTFYsxRqSq1zSHK61oIRSJU1LXSGnZXyZX1rXxUVkdWWgr1zSFKqxvJCqSwubyenPRUGltD7G8M0hJ0tw4aW7s+qRyLVL8gIrSGImSnpZCXkUqq351Ud1c3MTo/iyG56VQ1tDJiQAbpqf62q2ifuPeK+hZG5WeR6hdS/T78PqGmKciIARnu5Oy9Unzibn2g5GelEUgRsgIppKf6SfG2zUj1kxnwk+Lz4fe7bQ5sK9LZteBxaKqGhgporoGmKmiuhqb9bnnu/kPnm9rNh452lS6QMQDSB0DGQBhUABkTvPkBkJYL6XnuhJ6W671neyf4HO8En9b3+hDyJDLupcBNIrIEOAWosecJJp6C4QiNLWFqm4M0B8PUNodoCYVpDUWoaw5R1dBKyfb9RFRp8T7ftb+JqoZWmoLRncgHZKYSjiiZAT9D8zLY39jKpKE5TE9PJSsthVS/kOL30dQaZmxhFk2tYYYPyHBX0H53wszLSHUnVJ+Q4hdSfD7SU31tJ+tUn48Uv5CW4uZjfqJNlFAL1O6Cml1QUwq1pe5k3lgFLbXQUget9dBS76Zb6qC17sj7C2QfPLFnDID8cQenMwYe+ln7+bRc6IEr8t4qbklBRH4PzAMKRKQU+A6QCqCqPwOWARcBm4BG4EvxisX0X8GwuzWyo7KRbZWNlNU20xwMU9XQyubyehpbw1TUt1BR3xrV/nwCEYUTh+WSGfAza9RAhuSkkRnw09AaZmhuOiMGZuATITsthcG5aeRnBdyJ3J+8J5IuRSLuhF6zE8o/gLL10NoA9fugbg/s3+7eO95BPnBiT89zV+PpAyCvyLsiz4Hc4ZA95PATe3oepAQScqh9XTxrH13dxecKfC1e32/6h5ZQmK0VDeyrbWF7ZQNltS2U1TbTFAzz1tYqapqCtIYih2yTnuojOy2VQVmpCMJ5k4aQm5FCfUuIcYXZ5KSnkBlIIS3FR0ShMCeNjFQ/BdkBBmYFSLWTe/RU3ZV8/V6o2wv1Ze7kXld2cNmBV7jl8O0HjYWcYTB2HgwcBXkj3Uk/rwhyR0Bqek8fUdLrq7e9TD8QjiiVDS3sqW5md3UT26saWberhq0VDVQ1tLKn5ui1M84Yn09OWipnTChg5MAMRuVnMXJghl2xx0IkAo0VRz/R15e5V7iTUlhaLuQMdVfxI+e4E39WIaTnQmYBFH0CMvPBb6eg3sb+IiauWkJh3i+t4fVNlby/q5qWUISK+lYq6lsorzv8ytHvE3LSU8jPCnDupMEMyU1jeF4Gk4blMmJABkNy0xiQGcDv6yf30RNB1T1srdkJtXvcCb++DEpXQbDZW74LIqHDt80YCNlDIWcIFExwJ/0DJ/+cYW559lAIZPb8cZmYsKRgjlskouyra+GtrZVU1reyu7qJ0v1NbK1oYFN5/SHVG4fmpjNleC5ThueSl5FKeqqPaSMGUJAdYOSgTAbnpPWfB6eJEom4mjh1e9wVfe1uqN7uHt6WbYD9W939/Y4yC0AjMP48dxvnwP36Ayf97CF2OycJWFIwx6Q1FOGjsjo+3lfHax9V8F5pNdsrGw858QdSfAzNTWdUfianjB3ElOG5nDxqIOMKs+2EfzxUXQ2dxsqDt3R2veNq4dTuandbZ+/hV/m+FHcFP3gSjD7DnfQHjHT37XOGQfZg8Kcm5rhMr2JJwRxVQ0uI59buYU9NMx/vq+PZtQdrDael+BiVn8m1pxQztjCbcYXZjB+czeCcNHx2e6f7gk2wdx3s2wCrH3VJoLnaVcU8UsOpwhPdFX3BRPeeM6zd+xDIGW73701U7F+JAVzjrI176nhjcwX1LSHW7aqlORjmjc0VbV0CDMtLZ8H04UwbkcdJRXnMLB5IIMUe6nZLSz1U72j32u7eq7bAvo2HnvzziuGkK73GUVnuIW7ucPfg9sCVfhLXqzexZUkhydW3hPj9Wzv43rKNh302NDeda04p5uyJgzltXD7ZafbPJWqqrg5+5Sb3KlsHlZsh2AjVO13DrPZS0mFAMQwYBRM/BcNnwpCp7jaPXeGbHmT/2pLMx2V1PPTaFjbureXjsnpavDr+4wdnc9aEQi4/uYgxBVlkBPwJjrSXU3XdK1TvgIZyd9Lf+iqEWt2tnppdh7a29aW6K/28Ihg919XJL5joksCAYndP3563mF7AkkIS2FHZyBMlO/jzO7sOqfs/tiCLycNzufzkIs6eWGgPgTtSdQ90q7Z08trqul5oL7PA1b0vPAHGnAX5E1zXCvnjXTLwWaI1vZ8lhX7qg721LHl7Jys2VbBpn6t+mJuewtfOGccl04dzwpAcSwLtqbp7+dtfdyf9ys2w+x1XCjhA/O4Kf9BYGHmqex84CrIGw6AxkDkocfEbEyOWFPqZbRUNXPLACuqaXZXEk0cN5KvzxnHB5CHMGDnAEkEk4hpnVXzkXuUfuveyDdBS49ZJzXQn/LHnuKv+ggkwdJp3f9+qbZr+zZJCP7GzqpF7X/yYP73jHmDOLB7AA9fMYsSAjARHliDBJleHv/Rtd+Lft9F1r1y3F0JNB9fLGORO/NP+AQZPdrd9Ciba/X2TtCwp9HG7q5t4+O9beOytHbSGIgzJTeO+q2Zyytj8RIfWs6p3wtbX3C2fnW9BxccH+8wXHwyf5W7vjD8fhkyBwknu5J+VZL+TMV2wpNBHrdpWxX8/u4F1u2sJR5R5JxRyy/kTmT5yQKJD6xmRiLvts+UVeOP+g1U8U9Jh1Okw+iwYc6a7+s8bafX4jYmSJYU+pqk1zLf+8B7Pve9aFp89sZBvXzSJSUNzExxZnDVWuRJAaQnsKvG6d/Bq/6Rmwhn/AhMvdA+ALQEY022WFPqQ5ev3cvOSNTQFw5w5oYDvXDKF8YOzEx1W/NTvgzcfhE0vukFZUFcDaMgUmHY5jJgNRbNd1U9LBMbEhCWFPmBzeT13PPU+K7dUAXDPldO5bGZRgqOKg3AQ3vu968L5o7+6jt0ARp8J59zhOnIbNsO6ZTYmjiwp9HJvbq7k6odXkuITrjmlmBvPHsfIQf3kpBgOuZbApavcs4Gtrx28JTR8lisFTPscTPlMQsM0JplYUuilQuEI97z4EQ++vBmA+6+eyfxpwxIcVQw0VsHqX8Hml2HXatcXELgGYCcugEkXwYRPWX8/xiSI/c/rhZ5bu4f/eHodVQ2tFGSn8b3LpvKpKUMTHVb31Ze7qqIblsKa3x1cPuef3FCNRZ9w/f9Y2wBjEs6SQi/z5Kqd/Ouf1pIZ8PPjK6azYPrwvjnmsCq8/wfYuBQ2PnNw+clfghmfh5GfSFxsxpgjsqTQi9z+1Ps8/tYOigdl8tRXTyc/Oy3RIR2blnrYuRI+fB42/831IQRw0lVuiMfx51v/QMb0cpYUeonvPL2Ox9/awZDcNJ65aS55mX2kj52WOnjlLpcE9m04uHzwFLj4Hph2BaT142qzxvQzlhQSLBJRvr7kXZ5bu4fheen85aYz+kZCaKyClT+FkkegscI1IJv3bRg4GiZ80koExvRRlhQSaF9dM3O+9xIAc8YM4pGFn+j9o5vteQ9e/wms+5ObHzvPtSEYOSeRURljYqSXn4H6r/0NrW0JYd4JhTxy3Sd692D3e9fBG/fB2ifc/MkLYfaXYdj0hIZljIktSwoJEIkon/nf1wG4bf4k/vnscQmO6AjKP4KNT0PJo67DudQsOPVrcMo/ucFljDH9jiWFHqaq3PjYarZXNjKzeEDvSwiqrlHZ338EHy5zy1KzYNx58NmHratpY/o5Swo97L6XNrF8fRlzRg/isa+ckuhwDqorg78tdi2Na3eBPw3O/KarPTR4UqKjM8b0EEsKPejXb2zjnhc/AuA3188hNdGN0lRhx5uw6pew/s+gERh1Bsy9Bab+g9UgMiYJxTUpiMiFwE8AP/ALVb2rw+fFwK+BAd46t6nqsnjGlCi7qpv47+dcPf7lN59Feqo/ccE0VsG7v4M1j0P5RkjLgzk3uAfHhSckLi5jTMLFLSmIiB94ELgAKAVWichSVW3Xwol/B55U1Z+KyGRgGTA6XjEliqpyw29KCEWU5TefxQlDcxITSNkGWPMYvPmAmx82Axbc70oFgazExGSM6VXiWVKYA2xS1S0AIrIEuBRonxQUODBkWB6wO47xJMz9f9vE+t21fHXeuJ5PCKrw8V/hmX+Buj3gS4FJF8Ppi6C4Fz3TMMb0CvFMCiOAne3mS4GOZ6HvAn8Vka8DWcD5cYwnIeqag/z4hY/ICvhZdN6Envvi1kZYuwRKfgV710L2EDjtJjdsZfbgnovDGNOnxDMpdNYSSzvMXw08qqo/EpHTgN+KyFRVjRyyI5EbgBsAiouL4xJsvNzx1DoA/vfak3vmOUL1Dvfg+PV73fzgya4PoulXQ2pG/L/fGNOnxTMplAIj280XcfjtoeuBCwFU9U0RSQcKgH3tV1LVh4CHAGbPnt0xsfRaf1xdytL3dnPJ9OGcPbEwvl+m6h4e//XfobkaRs2FmZ93ycDGKTDGRCmeSWEVMEFExgC7gKuAazqsswM4D3hURE4E0oHyOMbUo37xd9d19H9cfGL8viTY7Hooffn/Qdn7btmNb7jB7Y0x5hjFLSmoakhEbgKW46qbPqKq60VkMVCiqkuBbwIPi8gtuFtLC1W1z5QEjqa6sZUP9tYxaWgOg3PS4/MlW1+Dx690Q1oOKIZP/whmXAupcfo+Y0y/F9d2Cl6bg2Udlt3ZbnoDcEY8Y0iUs+9+BYD//szU2O882AxPfw3W/RH8AVjwAJx0JaQEYv9dxpikYi2a4+D59/dQ0xTk7ImFzB4dw1bBoVbXH9Er34fyD+CUG+Gc2yE9t+ttjTEmCpYUYsx1ePcOEONSwqYX4dlbXO2inGHwuUdhymWx278xxmBJIeYef3sHADecNZaRgzKPf4eqbrjLV+8C8cEl97mB7/32pzPGxJ6dWWKoJRTmgb9tAohNQ7XqHfD4VbBvPeSNhK+9DYEYJBpjjDmCBHfT2b88WVLKnppmfvAP045/WM2GCnjscy4hnHUrfH21JQRjTNxZSSGGXv3QNbG4dMaI7u9EFd75jeurCIULfwCn/nNsAjTGmC5YUoiRmsYgL24s44zx+cfXncVT/+TGQS4+DT75PSg6OXZBGmNMFywpxEBTa5hzf/QKAJ+dWdT9Ha36pUsIg6fAwmXgs7t7xpieZWedGLj64ZVUNrTylTPH8A8ndzMpVHwMy2+HodPgH1+0hGCMSQg78xwnVWXNzmqG56Vzx6cnd28nLXXwxLWQmgnXPGkPlI0xCWO3j47TCxvKAPj8qaO6t4OqLfDzs6GlFq75A+QOj2F0xhhzbKykcJzuev4DAK47ffSxbxwJw0PzXEI4/7sw8ZMxjMwYY46dJYXj8O6O/WypaODzpxR3r13Cby+D5hqY+w2Ye0vsAzTGmGNkSeE4/OivHwHdLCWsfhS2vuqmz7vzqKsaY0xPsaRwHFZsquCUMYOYOCTn2DZ8b4lrnDb2HPjGRhsZzRjTa9iD5m76uKwOgBnFA6LfSBXWPgl/+SqMOQuuXmID4hhjehVLCt10x1/WAbBgepS1hWp2udLBphcgqxCufMwSgjGm17Gk0A0toTBvb61iSG4aU4bndb1B9U641xtbYe43YN63bZQ0Y0yvZEmhG55buweAr5w5tuuVq3fCz7wRR6/6PUy6KI6RGWPM8bEHzd2wtaIBgCs+MfLoK1ZudiWEUIurYWQJwRjTy1lJoRv+/nEFxYMyyU1PPfJKzbXwyKfc9BW/tYZpxpg+ocuSgoiME5E0b3qeiCwSkWOoctO/NAfDrNlZzYnDjlINVdWNp9xQ7koIlhCMMX1ENLeP/gSERWQ88EtgDPB4XKPqxT7Y66qinjgs98grrX4U1v0RzrkDzvxmzwRmjDExEE1SiKhqCLgMuFdVbwGGxTes3us/vKqoF007wk/w8vfh2Zth3LmWEIwxfU40SSEoIlcD1wHPesuOcjO9fwuGIwCdt2LetRpevcu1Q7jit+A7jhHYjDEmAaJJCl8CTgO+p6pbRWQM8Lv4htU7NQfDfFRWx7WnFh/+YSQMj13hpr/yMqRl92xwxhgTA13WPlLVDSLyb0CxN78VuCvegfVG7+zYT0Rh9qhBh3/45xugsQLOuhUGdFFV1Rhjeqloah9dAqwB/s+bnyEiS+MdWG/0y79vBWDGyA6Vr8rWuwfLRZ+Ac/89AZEZY0xsRHP76LvAHKAaQFXX4GogJZW65iAvfbCPGSMHMLog6+AHkQi8+gNA4KqkrZRljOknokkKIVWt6bBM4xFMb/bnd3YB8LnZRYd+sPpXsOFpmHENZA9OQGTGGBM70bRoXici1wB+EZkALALeiG9Yvc/7u1xevGzmiIMLIxF47huQPwEufTBBkRljTOxEU1L4OjAFaME1WqsBbo5m5yJyoYh8KCKbROS2I6xzhYhsEJH1ItJr77+sLa1mdH4mmYF2efT1e937lM/YQDnGmH4hmpLCCap6B3DHsexYRPzAg8AFQCmwSkSWquqGdutMAL4NnKGq+0Wk195/+aisnrMmFh5c0FQNL/0nZA2GebcnLjBjjImhaEoKPxaRD0Tkv0RkyjHsew6wSVW3qGorsAS4tMM6XwEeVNX9AKq67xj232MOjLI2YXC7tgfPLHLv590JPuts1hjTP3R5NlPVc4B5QDnwkIi8LyLR1LscAexsN1/qLWtvIjBRRF4XkZUicmFnOxKRG0SkRERKysvLo/jq2Fq32z1PaCsp7FnrHi5nD4WZ1/Z4PMYYEy9RXeKq6l5VvQ/4Z1ybhTuj2Kyzm+wday2lABNwSedq4Bed9cCqqg+p6mxVnV1YWNjx47hbv6sWgIlDvJLCcu920ReesmcJxph+JZrGayeKyHdFZB3wAK7mUVEXm4ErGbRv2lsE7O5knadVNei1lP4QlyR6ld01TQAMyUmH3e/Ctr+7HlCHTE5wZMYYE1vRlBR+BewHPqmqZ6vqT6O8978KmCAiY0QkAFwFdGwJ/RfgHAARKcDdTtoSdfQ9pLyuhbGFWfgE+PUCyMyHU/4p0WEZY0zMRdP30and2bGqhkTkJmA54AceUdX1IrIYKFHVpd5nnxSRDUAYuFVVK7vzffH0UVk9804ohHV/gpZaOOtfIT0v0WEZY0zMHTEpiMiTqnqFiLzPoc8CBFBVPamrnavqMmBZh2V3tptW4Bveq1eqaQxS0xSkIDMF/nS9W3jqjYkNyhhj4uRoJYV/8d4v7olAequ/b3K1nU6TtW7BhXdBZie9pBpjTD9wxGcKqrrHm/yqqm5v/wK+2jPhJd4z77ln4+fs/CmkZsHMLyQ4ImOMiZ9oHjRf0Mmy+bEOpLdauaWKDJrx73sfpl1ug+cYY/q1oz1TuBFXIhgrcuDeCQA5wOvxDqw3CIYj1DQFubvwFagDik9LdEjGGBNXR3um8DjwPPB9oH1ndnWqWhXXqHqJd3dUA3CmlkBaLpx0RYIjMsaY+Dra7SNV1W3A13DXyQdeiEhSPGldta2K832rGVq/wQ2z6fMnOiRjjImrrkoKFwOrcVVS2/fnoMDYOMbVK7z2UTlf9r+KpmYhc76S6HCMMSbujpgUVPVi7z3pht48oGzbei5IW43MWQSpGYkOxxhj4i6avo/OEJEsb/paEfmxiBTHP7TEWrerhq/4l+FD4dSkqYFrjEly0VRJ/SnQKCLTgX8FtgO/jWtUvcBDr23h8ykvuZmcoYkNxhhjekg0SSHkdUdxKfATVf0JrlpqvzZj/3I3cdKViQ3EGGN6UDTDcdaJyLeBLwBnesNspsY3rMTL3/Oq68bv4nsSHYoxxvSYaEoKVwItwJdVdS9u9LS74xpVgtXV13OObw1v5nwKAlmJDscYY3pMNMNx7gUeA/JE5GKgWVV/E/fIEuitF/9ErjSSN/tziQ7FGGN6VDS1j64A3gY+B1wBvCUil8c7sEQas+F/qdVMxp96SaJDMcaYHhXNM4U7gE8cGG1NRAqBF4E/xjOwhGmsYlTrR7ydMovT09ITHY0xxvSoaJ4p+DoMv1kZ5XZ9kn78AilE2Dj2y4kOxRhjelw0JYX/E5HlwO+9+SvpMJpaf7J/4yukaRoVA2ckOhRjjOlx0YzRfKuIfBaYi+v/6CFVfSrukSWCKlmbnmFFZDKfmjYi0dEYY0yPi6akAPAGEAYiwKr4hZNgW18lLVTHBh3FF/OtKqoxJvlEU/voH3G1jy4DLgdWikj/vOG++WUAngyfTV5mv2+fZ4wxh4mmpHArMFNVKwFEJB9XcngknoElQuTjv7IqMomcoRMSHYoxxiRENLWISvEG1/HUATvjE04C7d+Gb98Gloc/wXWnj0p0NMYYkxDRlBR24RqsPY0bXOdS4G0R+QaAqv44jvH1nC2vALAiMpXPFQ1IbCzGGJMg0SSFzd7rgKe99/7VU+qmF6lKGcxHzUWMH5yd6GiMMSYhoqmS+p89EUhCtdTBxmd4L2s+/kYfqf5+2zbPGGOOys5+AO8tAeDd4CjOnliY4GCMMSZxLCkAlJZAIJv7a+dSNNDGYjbGJC9LCgA73iQ89lzUfg5jTJKLpvHaRBF5SUTWefMnici/xz+0HlJfDtXbKc+ZBMCs4oEJDsgYYxInmkvjh4FvA0EAVV0LXBXNzkXkQhH5UEQ2ichtR1nvchFREZkdzX5j6mM3FvNmcW0TxhZa9xbGmOQVTVLIVNW3OywLdbWRN5bzg8B8YDJwtYhM7mS9HGAR8FYUscRe3R4AXm05AYBR1ueRMSaJRZMUKkRkHK7hGt6oa3ui2G4OsElVt6hqK7AE1/Cto/8C/gdoji7kGKveAYFsNlZFSEvxkZdhfR4ZY5JXNEnha8DPgUkisgu4Gbgxiu1GcGh3GKXesjYiMhMYqarPRhduHGz6GxSfxtaKBs6dNDhhYRhjTG/QZVLwrvTPBwqBSao6V1W3RbFv6Wx3bR+K+IB7gG92uSORG0SkRERKysvLo/jqKLXUQW0pkeLT2VXdxIgBVh3VGJPcumzRLCJ3dpgHQFUXd7FpKTCy3XwRsLvdfA4wFXjF2+dQYKmILFDVkvY7UtWHgIcAZs+ercTKhqUAtGQOQRWG5NqYzMaY5BbN7aOGdq8w7sHx6Ci2WwVMEJExIhLA1VhaeuBDVa1R1QJVHa2qo4GVwGEJIa7qXI4qKzgFgLDGLt8YY0xfFE3fRz9qPy8iP6Tdyf0o24VE5CZgOeAHHlHV9SKyGChR1S73EXcVmwAoqUgDYFyhdYRnjElu0Q7H2V4mMDaaFVV1GbCsw7I7j7DuvG7Ecnyqd0DBCbSEIwCcMKR/dfxqjDHHKppnCu9z8AGxH/fAuavnCb1fOAg7V8IJF7G7ugmAgpxAgoMyxpjEiqakcHG76RBQpqpdNl7r9Wp2gkZg+Aze+aCaguwAmYHuFJyMMab/OOpZ0Ks2+pyqTu2heHrO9jfd+8hTKC9pwZ4xG2NMF7WPVDUCvCcixT0UT8/ZtwHEB6PPJBiOMNiqoxpjTFS3j4YB60XkbVy1VABUdUHcouoJ9WWQWwQibK9s5LMzR3S9jTHG9HPRJIX+ORxnYyVkDiIScfeNWr0aSMYYk8yiSQoXqeq/tV8gIj8AXo1PSD2krgwGjqK8vgWAk4ryEhyQMcYkXjQtmi/oZNn8WAd03xTRAAAULElEQVTS4xrKIauADbtrAchNt95RjTHmiCUFEbkR+CowVkTWtvsoB3g93oHFVTjkkkLOMHZ5bRSmWUnBGGOOevvoceB54PtA+1HT6lS1Kq5RxVv1dkAhq5DGZtfkwjrDM8aYoyQFVa0BaoCrey6cHlLxkXvPGMi6zbVkBvzkZ1lrZmOMieaZQv9T6TrCY/hMPiqrY1BWoK1LcGOMSWbJmRRqd0NqJuSPozUcsY7wjDHGk5xJoXQVZBUC0BqKkJdpNY+MMQaSOSmkZrrJ/U2kpSTnz2CMMR0l39kwEnbv+eMA8PuE+pZwAgMyxpjeI/mSwr6N7n3U6YQjSjiijLcR14wxBkjGpFC3x70Pn8Xa0moAstL8CQzIGGN6j+RLCsFG956WzYqPKwCYd0JhAgMyxpjeI/mGGqv42L2nZJDid20ThuVlJDAgY4zpPZKvpJDqJYCsAkJed9kBq31kjDFAMiaFVm+coNRMgt5YCik+a81sjDGQjEmh/AP37k8lFI6Q4hPr4sIYYzzJlxTE5xquiVDbHLSGa8YY007ynREbq6BgAgD1zSEGWu+oxhjTJvmSQqgZAq4DvE3l9WQGrI2CMcYckHxJofwDSEkDIOD3Ue8NsmOMMSYZk0LGIGisBGDd7lomD7dhOI0x5oDkSwqRIBRMBFy32VbxyBhjDkq+pBAOgj9Aa8g1XBtqYzMbY0yb5EsKoRZICVDV0ArA6IKsBAdkjDG9R1yTgohcKCIfisgmEbmtk8+/ISIbRGStiLwkIqPiGQ8ArfUQyKK8rgWAjFSrfWSMMQfELSmIiB94EJgPTAauFpHJHVZ7F5itqicBfwT+J17xABAOtVVJfXtbFQDjB9tYCsYYc0A8SwpzgE2qukVVW4ElwKXtV1DVl1XV68ualUBRHOOB1jr3Hshq6wzvhCE5cf1KY4zpS+KZFEYAO9vNl3rLjuR64PnOPhCRG0SkRERKysvLux9RkxtUh8xBbNxTC0BOevL1Hm6MMUcSz6TQWWVP7XRFkWuB2cDdnX2uqg+p6mxVnV1YeBwD4rS4REBaLhkBlwx81kOqMca0iedlcikwst18EbC740oicj5wB3C2qrbEMR5o8EoZ6bn8/u0dFA/KjOvXGWNMXxPPksIqYIKIjBGRAHAVsLT9CiIyE/g5sEBV98UxFqfeSwp5RYjYOArGGNNR3JKCqoaAm4DlwEbgSVVdLyKLRWSBt9rdQDbwBxFZIyJLj7C72Ag1AdAQSUUV5k8bGtevM8aYviauT1lVdRmwrMOyO9tNnx/P7z9MyDVYe39vMwBTrd8jY4w5RHK1aA662q9ry1xSmDVqYCKjMcaYXie5ksLudwEoa3SHPTDTBtgxxpj2kispNFQAB6uhBmwoTmOMOURynRXFB5kFvLujmlxrtGaMMYdJrqSwZw0MncaHZXVk2DCcxhhzmORKClkFoGHCEaUgOy3R0RhjTK+TXPdQwiHIKyaiatVRjenlgsEgpaWlNDc3JzqUPiU9PZ2ioiJSU1O7tX2SJYVWIv5UmoMRCnKs5pExvVlpaSk5OTmMHj0asXFzo6KqVFZWUlpaypgxY7q1j+S6fdSwj+pm1ydfwG/PFIzpzZqbm8nPz7eEcAxEhPz8/OMqXSVXUgCa6/cDMGvUgARHYozpiiWEY3e8v1nyJIVwEID9GcUA5GV0736bMSZ5+P1+ZsyYwdSpU7nkkkuornZjsmzbtg0R4f77729b96abbuLRRx8FYOHChYwYMYKWFtfxc0VFBaNHj+7p8LsleZJCawMAH1W65DC6ICuR0Rhj+oCMjAzWrFnDunXrGDRoEA8++GDbZ4MHD+YnP/kJra2tnW7r9/t55JFHeirUmEmepBBy99j8wQZS/UJuupUUjDHRO+2009i1a1fbfGFhIeeddx6//vWvO13/5ptv5p577iEUCvVUiDGRPLWPIu4PU+kfzEQbl9mYPuU/n1nPht21Md3n5OG5fOeSKVGtGw6Heemll7j++usPWX7bbbcxf/58vvzlLx+2TXFxMXPnzuW3v/0tl1xySUxi7gnJU1LwkkKrCmnW55ExJgpNTU3MmDGD/Px8qqqquOCCCw75fMyYMcyZM4fHH3+80+1vv/127r77biKRSE+EGxNJVFIIA1DXChmZVh3VmL4k2iv6WDvwTKGmpoaLL76YBx98kEWLFh2yzu23387ll1/OWWedddj248ePZ8aMGTz55JM9FfJxS55LZq+ksK2qhaG5GQkOxhjTl+Tl5XHffffxwx/+kGAweMhnkyZNYvLkyTz77LOdbnvHHXfwwx/+sCfCjInkSQpe7aMQfhRNcDDGmL5m5syZTJ8+nSVLlhz22R133EFpaWmn202ZMoVZs2bFO7yYSZ7bR2FXbSxAkOF5VlIwxnStvr7+kPlnnnmmbXrdunVt09OnTz/kucGB9goH/PnPf45PgHGQPCUF75lCOQMYN9jaKBhjTGeSJyl4JYWQ+hmSm57gYIwxpndKnqTgPWgOksIYa81sjDGdSp6k0OIavrSSwpAcKykYY0xnkigpuAdG1ZqNz2c9LxpjTGeSJymoqxlQmJeZ4ECMMab3Sp6k4LVNyM20EdeMMdHZuXMnY8aMoaqqCoD9+/czZswYtm/fzscff8zFF1/MuHHjOPnkkznnnHN47bXXAFcltbCwkBkzZjBlyhQuv/xyGhsbYxbXmjVrWLZsWcz2117yJAWvpDA4x9ooGGOiM3LkSG688UZuu+02wHWAd8MNNzBkyBA+/elPc8MNN7B582ZWr17N/fffz5YtW9q2vfLKK1mzZg3r168nEAjwxBNPxCwuSwqxoK6k0He6pTLG9Aa33HILK1eu5N5772XFihV885vf5LHHHuO0005jwYIFbetNnTqVhQsXHrZ9KBSioaGBgQMHArB9+3bOO+88TjrpJM477zx27Nhx1OV/+MMfmDp1KtOnT+ess86itbWVO++8kyeeeIIZM2bENNlAMrVo9pJC0cDsBAdijDlmz98Ge9+P7T6HToP5d3W5WmpqKnfffTcXXnghf/3rXwkEAqxfv77LriueeOIJVqxYwZ49e5g4cWJb99k33XQTX/ziF7nuuut45JFHWLRoEX/5y1+OuHzx4sUsX76cESNGUF1dTSAQYPHixZSUlPDAAw/E5KdoL2lKCmGvRbPPus02xhyj559/nmHDhh3StUV7l112GVOnTuWzn/1s27IDt4/27t3LtGnTuPvuuwF48803ueaaawD4whe+wIoVK466/IwzzmDhwoU8/PDDhMPhuB3jAXEtKYjIhcBPAD/wC1W9q8PnacBvgJOBSuBKVd0Wj1j2NzRTAORnWRsFY/qcKK7o42XNmjW88MILrFy5krlz53LVVVcxZcqUtofKAE899RQlJSV861vfOmx7EeGSSy7h/vvvb3s20fHzzhxY/rOf/Yy33nqL5557jhkzZrBmzZoYHVnn4nbZLCJ+4EFgPjAZuFpEJndY7Xpgv6qOB+4BfhCveMpr3XCc44fkxusrjDH9jKpy4403cu+991JcXMytt97Kt771La655hpef/11li5d2rbu0WoXrVixgnHjxgFw+umnt/W0+thjjzF37tyjLt+8eTOnnHIKixcvpqCggJ07d5KTk0NdXV1cjhlVjcsLOA1Y3m7+28C3O6yzHDjNm04BKgA52n5PPvlk7Y53fr9Y9Tu5unHrrm5tb4zpWRs2bEh0CPrzn/9cr7jiirb5UCiks2bN0ldeeUU3btyo8+fP1zFjxuipp56qF1xwgb7wwguqqvqrX/1KCwoKdPr06Tpt2jSdP3++lpWVqarq1q1b9ZxzztFp06bpueeeq9u3bz/q8ssuu0ynTp2qU6ZM0UWLFmkkEtHKykqdPXu2Tp8+XZcsWXJY3J39dkCJRnHuFtX4jC0gIpcDF6rqP3rzXwBOUdWb2q2zzlun1Jvf7K1TcaT9zp49W0tKSo45nrd/9x3mbLqXmpu3kTdg4DFvb4zpWRs3buTEE09MdBh9Ume/nYisVtXZXW0bz6eund0o65iBolkHEblBREpEpKS8vLxbwaQMPZGV2eeTlWHPFIwx5kji+aC5FBjZbr4I2H2EdUpFJAXIA6o67khVHwIeAldS6E4ws86/Cs6/qjubGmNM0ohnSWEVMEFExohIALgKWNphnaXAdd705cDfNF73s4wxxnQpbiUFVQ2JyE24h8l+4BFVXS8ii3EPPJYCvwR+KyKbcCUEu5Q3xrRR1SNW2TSdO97r6ri2U1DVZcCyDsvubDfdDHwunjEYY/qm9PR0Kisryc/Pt8QQJVWlsrKS9PTuPztNnm4ujDF9SlFREaWlpXS3ckmySk9Pp6ioqNvbW1IwxvRKqampjBkzJtFhJB3rCMgYY0wbSwrGGGPaWFIwxhjTJm7dXMSLiJQD27u5eQGuf6VkYsecHOyYk8PxHPMoVS3saqU+lxSOh4iURNP3R39ix5wc7JiTQ08cs90+MsYY08aSgjHGmDbJlhQeSnQACWDHnBzsmJND3I85qZ4pGGOMObpkKykYY4w5in6ZFETkQhH5UEQ2ichhI2WLSJqIPOF9/paIjO75KGMrimP+hohsEJG1IvKSiIxKRJyx1NUxt1vvchFREenzNVWiOWYRucL7W68Xkcd7OsZYi+LfdrGIvCwi73r/vi9KRJyxIiKPiMg+b2TKzj4XEbnP+z3WisismAYQzZidfemF66Z7MzAWCADvAZM7rPNV4Gfe9FXAE4mOuweO+Rwg05u+MRmO2VsvB3gNWAnMTnTcPfB3ngC8Cwz05gcnOu4eOOaHgBu96cnAtkTHfZzHfBYwC1h3hM8vAp7HjVx5KvBWLL+/P5YU5gCbVHWLqrYCS4BLO6xzKfBrb/qPwHnSt/vm7fKYVfVlVW30ZlfiRsLry6L5OwP8F/A/QHNPBhcn0RzzV4AHVXU/gKru6+EYYy2aY1Yg15vO4/ARHvsUVX2NTkagbOdS4DfqrAQGiMiwWH1/f0wKI4Cd7eZLvWWdrqOqIaAGyO+R6OIjmmNu73rclUZf1uUxi8hMYKSqPtuTgcVRNH/nicBEEXldRFaKyIU9Fl18RHPM3wWuFZFS3PgtX++Z0BLmWP+/H5P+2HV2Z1f8HatYRbNOXxL18YjItcBs4Oy4RhR/Rz1mEfEB9wALeyqgHhDN3zkFdwtpHq40+HcRmaqq1XGOLV6iOeargUdV9UcichpuNMepqhqJf3gJEdfzV38sKZQCI9vNF3F4cbJtHRFJwRU5j1Zc6+2iOWZE5HzgDmCBqrb0UGzx0tUx5wBTgVdEZBvu3uvSPv6wOdp/20+ralBVtwIf4pJEXxXNMV8PPAmgqm8C6bg+gvqrqP6/d1d/TAqrgAkiMkZEArgHyUs7rLMUuM6bvhz4m3pPcPqoLo/Zu5Xyc1xC6Ov3maGLY1bVGlUtUNXRqjoa9xxlgaqWJCbcmIjm3/ZfcJUKEJEC3O2kLT0aZWxFc8w7gPMAROREXFLoz8O1LQW+6NVCOhWoUdU9sdp5v7t9pKohEbkJWI6rufCIqq4XkcVAiaouBX6JK2JuwpUQrkpcxMcvymO+G8gG/uA9U9+hqgsSFvRxivKY+5Uoj3k58EkR2QCEgVtVtTJxUR+fKI/5m8DDInIL7jbKwr58kSciv8fd/ivwnpN8B0gFUNWf4Z6bXARsAhqBL8X0+/vwb2eMMSbG+uPtI2OMMd1kScEYY0wbSwrGGGPaWFIwxhjTxpKCMcaYNpYUTJ8nIotEZKOIPHaUdeaJSK/o7kJEFhzo7VNEPiMik9t9tthrZGhMQliVVNPnicgHwHyvBe+R1pkHfEtVL+6xwKIgIo8Cz6rqHxMdizFgJQXTx4nIz3DdKi8VkVtEZI6IvOH1rf+GiJzQyTZni8ga7/WuiOR4y28VkVVeH/X/eYTvqxeRH4nIO964FIXe8hleB3RrReQpERnoLV8kB8exWOItWygiD4jI6cAC4G4vlnEi8qi48R/mi8iT7b53nog8401/UkTe9GL4g4hke8vvavddP4zl72ySSKL7DreXvY73BWwDCrzpXCDFmz4f+JM3PQ93RQ7wDHCGN52Na9n/SVy//IK7WHoWOKuT71Lg8970ncAD3vRa4GxvejFwrze9G0jzpgd47wvbbfcocHm7/T+K63olBdd9Q5a3/KfAtbg+fV5rt/zfvDgG4fo5kvbfZS97HevLSgqmv8nDdeWxDtdL6pRO1nkd+LGILMKdPEO4pPBJ3AA17wCT6LwjuQjwhDf9O2CuiOR5+3nVW/5r3EAp4JLFY17vtKFoD8KL6f+AS7xOGz8NPI3r2G8y8LqIrMH14TUKqMWNGfELEfksrvsDY45Zv+v7yCS9/wJeVtXLxA2z+krHFVT1LhF5Dtd/zErvwa4A31fVnx/j93X1UO7TuASxAPgPEeksSR3JE8DXcP1zrVLVOnEdV72gqld3XFlE5uA6hrsKuAk49xi+yxjAnimY/icP2OVNL+xsBREZp6rvq+oPgBJcqWA58OV29+dHiMjgTjb34W7vAFwDrFDVGmC/iJzpLf8C8Kq4MR1GqurLwL8CA3C3q9qrw3Xz3ZlXcMMyfoWDpZOVwBkiMt6LM1NEJnpx56nqMuBmYMYR9mnMUVlJwfQ3/wP8WkS+AfztCOvcLCLn4HoR3QA8r6otXrfLb3q9yNbj7uF37Ga8AZgiIqtxI/Zd6S2/DviZiGTiuqr+Eq5Xz995t5cEuEdVq+XQkV+X4Hr4XMTBZAOAqoa9arQLvf2jquUishD4vYikeav+Oy65PC0i6d533dLlL2VMJ6xKqjHHQETqVbXj1b4x/YbdPjLGGNPGSgrGGGPaWEnBGGNMG0sKxhhj2lhSMMYY08aSgjHGmDaWFIwxxrSxpGCMMabN/wcLxbtulFNOAQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure()\n", + "plt.plot(RNN_fpr, RNN_tpr, label = 'RNN')\n", + "plt.plot(fpr, tpr, label = 'XGBoost')\n", + "plt.xlabel('false positives')\n", + "plt.ylabel('true positives')\n", + "plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/X_test.pkl b/X_test.pkl new file mode 100644 index 0000000..d1dedc2 --- /dev/null +++ b/X_test.pkl Binary files differ diff --git a/X_train.pkl b/X_train.pkl new file mode 100644 index 0000000..b3364e7 --- /dev/null +++ b/X_train.pkl Binary files differ diff --git a/depositphotos_38511129-stock-illustration-standard-model-of-elementary-particles.jpg b/depositphotos_38511129-stock-illustration-standard-model-of-elementary-particles.jpg deleted file mode 100644 index 82cc366..0000000 --- a/depositphotos_38511129-stock-illustration-standard-model-of-elementary-particles.jpg +++ /dev/null Binary files differ diff --git a/min_max_chi2_scaler.pkl b/min_max_chi2_scaler.pkl new file mode 100644 index 0000000..1e340e3 --- /dev/null +++ b/min_max_chi2_scaler.pkl Binary files differ diff --git a/min_max_diff_scaler.pkl b/min_max_diff_scaler.pkl new file mode 100644 index 0000000..3cba4cf --- /dev/null +++ b/min_max_diff_scaler.pkl Binary files differ diff --git a/trained_models/RNN_Classifier.h5 b/trained_models/RNN_Classifier.h5 new file mode 100644 index 0000000..d845379 --- /dev/null +++ b/trained_models/RNN_Classifier.h5 Binary files differ diff --git a/trained_models/XGB_Classifier.xgb b/trained_models/XGB_Classifier.xgb new file mode 100644 index 0000000..691f846 --- /dev/null +++ b/trained_models/XGB_Classifier.xgb Binary files differ diff --git a/trained_models/keras_RNN.h5 b/trained_models/keras_RNN.h5 new file mode 100644 index 0000000..894ffcc --- /dev/null +++ b/trained_models/keras_RNN.h5 Binary files differ diff --git a/trained_models/scalor.pkl b/trained_models/scalor.pkl new file mode 100644 index 0000000..a5aafe6 --- /dev/null +++ b/trained_models/scalor.pkl Binary files differ diff --git a/y_test.pkl b/y_test.pkl new file mode 100644 index 0000000..5e0f08b --- /dev/null +++ b/y_test.pkl Binary files differ diff --git a/y_train.pkl b/y_train.pkl new file mode 100644 index 0000000..dc3b0eb --- /dev/null +++ b/y_train.pkl Binary files differ