diff --git a/1_to_1_multi_layer.ipynb b/1_to_1_multi_layer.ipynb index f9c5ab7..674d7ce 100644 --- a/1_to_1_multi_layer.ipynb +++ b/1_to_1_multi_layer.ipynb @@ -58,7 +58,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -89,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -122,7 +122,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ @@ -134,14 +134,14 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ "#Normalize the data advanced version with scikit learn\n", "\n", "#set the transormation based on training set\n", - "def set_min_max_scalor(arr, feature_range= (-1,1)):\n", + "def set_min_max_scaler(arr, feature_range= (-1,1)):\n", " min_max_scalor = preprocessing.MinMaxScaler(feature_range=feature_range)\n", " if len(arr.shape) == 3:\n", " arr = reshapor(min_max_scalor.fit_transform(reshapor_inv(arr))) \n", @@ -149,35 +149,131 @@ " arr = min_max_scalor.fit_transform(arr)\n", " return min_max_scalor\n", "\n", - "min_max_scalor = set_min_max_scalor(train_set)\n", + "min_max_scalor = set_min_max_scaler(train_set)\n", "\n", "\n", "#transform data\n", "def min_max_scaler(arr, min_max_scalor= min_max_scalor):\n", " \n", " if len(arr.shape) == 3:\n", - " arr = reshapor(min_max_scalor.transform(reshapor_inv(arr))) \n", + " if arr.shape[1] == 8:\n", + " arr = reshapor(min_max_scalor.transform(reshapor_inv(arr)))\n", + " else: \n", + " arr_ = np.zeros((arr.shape[0],24))\n", + " arr = reshapor_inv(arr)\n", + " arr_[:,:arr.shape[1]] += arr\n", + " arr = min_max_scalor.transform(arr_)[:,:arr.shape[1]]\n", + " arr = reshapor(arr)\n", + " \n", " else:\n", - " arr = min_max_scalor.transform(arr)\n", + " if arr.shape[1] == 24:\n", + " arr = min_max_scalor.transform(arr)\n", + " else:\n", + " arr_ = np.zeros((arr.shape[0],24))\n", + " arr_[:,:arr.shape[1]] += arr\n", + " arr = min_max_scalor.transform(arr_)[:,:arr.shape[1]]\n", " \n", " return arr\n", " \n", "#inverse transformation\n", "def min_max_scaler_inv(arr, min_max_scalor= min_max_scalor):\n", + " \n", " if len(arr.shape) == 3:\n", - " arr = reshapor(min_max_scalor.inverse_transform(reshapor_inv(arr)))\n", + " if arr.shape[1] == 8:\n", + " arr = reshapor(min_max_scalor.inverse_transform(reshapor_inv(arr)))\n", + " else: \n", + " arr_ = np.zeros((arr.shape[0],24))\n", + " arr = reshapor_inv(arr)\n", + " arr_[:,:arr.shape[1]] += arr\n", + " arr = min_max_scalor.inverse_transform(arr_)[:,:arr.shape[1]]\n", + " arr = reshapor(arr)\n", + " \n", " else:\n", - " arr = min_max_scalor.inverse_transform(arr)\n", + " if arr.shape[1] == 24:\n", + " arr = min_max_scalor.inverse_transform(arr)\n", + " else:\n", + " arr_ = np.zeros((arr.shape[0],24))\n", + " arr_[:,:arr.shape[1]] += arr\n", + " arr = min_max_scalor.nverse_transform(arr_)[:,:arr.shape[1]]\n", " \n", " return arr" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ + "#Normalize the data advanced version with scikit learn - Standard scaler\n", + "\n", + "#set the transormation based on training set\n", + "def set_std_scaler(arr):\n", + " std_scalor = preprocessing.StandardScaler()\n", + " if len(arr.shape) == 3:\n", + " arr = reshapor(std_scalor.fit(reshapor_inv(arr))) \n", + " else:\n", + " arr = std_scalor.fit(arr)\n", + " return std_scalor\n", + "\n", + "std_scalor = set_std_scaler(train_set)\n", + "\n", + "#transform data\n", + "def std_scaler(arr, std_scalor= std_scalor):\n", + " \n", + " if len(arr.shape) == 3:\n", + " if arr.shape[1] == 8:\n", + " arr = reshapor(std_scalor.transform(reshapor_inv(arr)))\n", + " else: \n", + " arr_ = np.zeros((arr.shape[0],24))\n", + " arr = reshapor_inv(arr)\n", + " arr_[:,:arr.shape[1]] += arr\n", + " arr = std_scalor.transform(arr_)[:,:arr.shape[1]]\n", + " arr = reshapor(arr)\n", + " \n", + " else:\n", + " if arr.shape[1] == 24:\n", + " arr = std_scalor.transform(arr)\n", + " else:\n", + " arr_ = np.zeros((arr.shape[0],24))\n", + " arr_[:,:arr.shape[1]] += arr\n", + " arr = std_scalor.transform(arr_)[:,:arr.shape[1]]\n", + " \n", + " return arr\n", + " \n", + "#inverse transformation\n", + "def std_scaler_inv(arr, std_scalor= std_scalor):\n", + " \n", + " if len(arr.shape) == 3:\n", + " if arr.shape[1] == 8:\n", + " arr = reshapor(std_scalor.inverse_transform(reshapor_inv(arr)))\n", + " else: \n", + " arr_ = np.zeros((arr.shape[0],24))\n", + " arr = reshapor_inv(arr)\n", + " arr_[:,:arr.shape[1]] += arr\n", + " arr = std_scalor.inverse_transform(arr_)[:,:arr.shape[1]]\n", + " arr = reshapor(arr)\n", + " \n", + " else:\n", + " if arr.shape[1] == 24:\n", + " arr = std_scalor.inverse_transform(arr)\n", + " else:\n", + " arr_ = np.zeros((arr.shape[0],24))\n", + " arr_[:,:arr.shape[1]] += arr\n", + " arr = std_scalor.inverse_transform(arr_)[:,:arr.shape[1]]\n", + " \n", + " return arr\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "#reshape the data\n", + "\n", "train_set = reshapor(train_set)\n", "test_set = reshapor(test_set)\n", "\n", @@ -186,30 +282,74 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 37, "metadata": {}, "outputs": [], "source": [ - "train_set = min_max_scaler(train_set)\n", - "test_set = min_max_scaler(test_set)\n", + "#Scale data either with MinMax scaler or with Standard scaler\n", + "#Return scalor if fit = True and and scaled array otherwise\n", "\n", - "#print(train_set[0,:,:])" + "def scaler(arr, std_scalor= std_scalor, min_max_scalor= min_max_scalor, scalerfunc= \"std\"):\n", + " \n", + " if scalerfunc == \"std\":\n", + " arr = std_scaler(arr, std_scalor= std_scalor)\n", + " return arr\n", + " \n", + " elif scalerfunc == \"minmax\":\n", + " arr = min_max_scaler(arr, min_max_scalor= min_max_scalor)\n", + " return arr\n", + " \n", + " else:\n", + " raise ValueError(\"Uknown scaler chosen: {}\".format(scalerfunc))\n", + "\n", + "def scaler_inv(arr, std_scalor= std_scalor, min_max_scalor= min_max_scalor, scalerfunc= \"std\"):\n", + "\n", + " if scalerfunc == \"std\":\n", + " arr = std_scaler_inv(arr, std_scalor= std_scalor)\n", + " return arr\n", + " \n", + " elif scalerfunc == \"minmax\":\n", + " arr = min_max_scaler_inv(arr, min_max_scalor= std_scalor)\n", + " return arr\n", + " \n", + " else:\n", + " raise ValueError(\"Uknown scaler chosen: {}\".format(scalerfunc))\n" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 21, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[-0.02109399 0.0394468 -0.01875739]\n", + " [-0.0158357 0.02916325 -0.02021501]\n", + " [-0.00411211 0.01346626 -0.01817778]\n", + " [-0.00314466 0.01169437 -0.00971874]\n", + " [ 0.00827457 -0.00905463 -0.00903793]\n", + " [ 0.00906477 -0.01100179 -0.00610165]\n", + " [ 0.01623521 -0.02745446 0.00036546]\n", + " [ 0.01879028 -0.03098714 -0.0009012 ]]\n" + ] + } + ], "source": [ - "#train_set = min_max_scaler_inv(train_set)\n", + "#scale the data\n", "\n", - "#print(train_set[0,:,:])" + "func = \"minmax\"\n", + "\n", + "train_set = scaler(train_set, scalerfunc = func)\n", + "test_set = scaler(test_set, scalerfunc = func)\n", + "\n", + "print(train_set[0,:,:])" ] }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ @@ -254,7 +394,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 24, "metadata": {}, "outputs": [], "source": [ @@ -269,7 +409,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ @@ -305,7 +445,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 26, "metadata": {}, "outputs": [], "source": [ @@ -446,7 +586,7 @@ " epoche_save = iep\n", " \n", " #early stopping with patience\n", - " if iep > 1 and abs(self.loss_list[iep]-self.loss_list[iep-1]) < 2/1000000:\n", + " if iep > 1 and abs(self.loss_list[iep]-self.loss_list[iep-1]) < 2/10**7:\n", " patience_cnt += 1\n", " #print(\"Patience now at: \", patience_cnt, \" of \", patience)\n", " \n", @@ -458,7 +598,7 @@ " #Note that the loss here is multiplied with 1000 for easier reading\n", " if iep%print_step==0:\n", " print(\"Epoch number \",iep)\n", - " print(\"Cost: \",loss*1000, \"e-3\")\n", + " print(\"Cost: \",loss*10**6, \"e-6\")\n", " print(\"Patience: \",patience_cnt, \"/\", patience)\n", " print(\"Last checkpoint at: Epoch \", epoche_save, \"\\n\")\n", " \n", @@ -485,7 +625,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 27, "metadata": {}, "outputs": [], "source": [ @@ -502,7 +642,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 28, "metadata": {}, "outputs": [ { @@ -523,7 +663,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ @@ -532,7 +672,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 39, "metadata": { "scrolled": true }, @@ -542,579 +682,398 @@ "output_type": "stream", "text": [ "Epoch number 0\n", - "Cost: 3770.231458734959 e4\n", + "Cost: 10.041199672838395 e-3\n", "Patience: 0 / 200\n", "Last checkpoint at: Epoch 0 \n", "\n", "Epoch number 5\n", - "Cost: 1649.7736788810569 e4\n", + "Cost: 0.14646259134021053 e-3\n", "Patience: 0 / 200\n", "Last checkpoint at: Epoch 5 \n", "\n", "Epoch number 10\n", - "Cost: 625.2868418046768 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.14038292159811852 e-3\n", + "Patience: 5 / 200\n", "Last checkpoint at: Epoch 10 \n", "\n", "Epoch number 15\n", - "Cost: 294.9610768639027 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.13558934176429868 e-3\n", + "Patience: 10 / 200\n", "Last checkpoint at: Epoch 15 \n", "\n", "Epoch number 20\n", - "Cost: 209.0108957379422 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.12642440127278182 e-3\n", + "Patience: 14 / 200\n", "Last checkpoint at: Epoch 20 \n", "\n", "Epoch number 25\n", - "Cost: 174.1866168982171 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.1116786241912818 e-3\n", + "Patience: 16 / 200\n", "Last checkpoint at: Epoch 25 \n", "\n", "Epoch number 30\n", - "Cost: 149.8719225538538 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.10637743763893129 e-3\n", + "Patience: 20 / 200\n", "Last checkpoint at: Epoch 30 \n", "\n", "Epoch number 35\n", - "Cost: 131.33942407179387 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.10180761176904544 e-3\n", + "Patience: 21 / 200\n", "Last checkpoint at: Epoch 35 \n", "\n", "Epoch number 40\n", - "Cost: 115.83642023516462 e4\n", - "Patience: 0 / 200\n", - "Last checkpoint at: Epoch 40 \n", + "Cost: 0.10329305703325713 e-3\n", + "Patience: 25 / 200\n", + "Last checkpoint at: Epoch 35 \n", "\n", "Epoch number 45\n", - "Cost: 107.55172256935151 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.09893714772299567 e-3\n", + "Patience: 26 / 200\n", "Last checkpoint at: Epoch 45 \n", "\n", "Epoch number 50\n", - "Cost: 98.54952309359895 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.09669851916693548 e-3\n", + "Patience: 28 / 200\n", "Last checkpoint at: Epoch 50 \n", "\n", "Epoch number 55\n", - "Cost: 95.66065657170529 e4\n", - "Patience: 0 / 200\n", + "Cost: 0.09474931256919901 e-3\n", + "Patience: 30 / 200\n", "Last checkpoint at: Epoch 55 \n", "\n", "Epoch number 60\n", - "Cost: 90.34742145462239 e4\n", - "Patience: 1 / 200\n", + "Cost: 0.09272654031210163 e-3\n", + "Patience: 33 / 200\n", "Last checkpoint at: Epoch 60 \n", "\n", "Epoch number 65\n", - "Cost: 84.77292855844853 e4\n", - "Patience: 2 / 200\n", - "Last checkpoint at: Epoch 65 \n", + "Cost: 0.09420149952812279 e-3\n", + "Patience: 35 / 200\n", + "Last checkpoint at: Epoch 60 \n", "\n", "Epoch number 70\n", - "Cost: 78.54001398416275 e4\n", - "Patience: 3 / 200\n", - "Last checkpoint at: Epoch 70 \n", + "Cost: 0.09541216964630331 e-3\n", + "Patience: 36 / 200\n", + "Last checkpoint at: Epoch 60 \n", "\n", "Epoch number 75\n", - "Cost: 75.23123551397882 e4\n", - "Patience: 3 / 200\n", + "Cost: 0.09047800716522962 e-3\n", + "Patience: 39 / 200\n", "Last checkpoint at: Epoch 75 \n", "\n", "Epoch number 80\n", - "Cost: 73.33986362085697 e4\n", - "Patience: 4 / 200\n", - "Last checkpoint at: Epoch 80 \n", + "Cost: 0.09089725666699257 e-3\n", + "Patience: 39 / 200\n", + "Last checkpoint at: Epoch 75 \n", "\n", "Epoch number 85\n", - "Cost: 69.12997319422504 e4\n", - "Patience: 5 / 200\n", + "Cost: 0.08590354093726962 e-3\n", + "Patience: 40 / 200\n", "Last checkpoint at: Epoch 85 \n", "\n", "Epoch number 90\n", - "Cost: 65.79162087291479 e4\n", - "Patience: 5 / 200\n", + "Cost: 0.08550771444595041 e-3\n", + "Patience: 41 / 200\n", "Last checkpoint at: Epoch 90 \n", "\n", "Epoch number 95\n", - "Cost: 61.82488113483216 e4\n", - "Patience: 6 / 200\n", + "Cost: 0.08262849370750816 e-3\n", + "Patience: 42 / 200\n", "Last checkpoint at: Epoch 95 \n", "\n", "Epoch number 100\n", - "Cost: 59.33671109774646 e4\n", - "Patience: 8 / 200\n", + "Cost: 0.08081882078066825 e-3\n", + "Patience: 45 / 200\n", "Last checkpoint at: Epoch 100 \n", "\n", "Epoch number 105\n", - "Cost: 57.19678456637453 e4\n", - "Patience: 9 / 200\n", - "Last checkpoint at: Epoch 105 \n", + "Cost: 0.08332692371542624 e-3\n", + "Patience: 48 / 200\n", + "Last checkpoint at: Epoch 100 \n", "\n", "Epoch number 110\n", - "Cost: 55.66507161773266 e4\n", - "Patience: 10 / 200\n", - "Last checkpoint at: Epoch 110 \n", + "Cost: 0.0850605532871262 e-3\n", + "Patience: 50 / 200\n", + "Last checkpoint at: Epoch 100 \n", "\n", "Epoch number 115\n", - "Cost: 54.365597526602286 e4\n", - "Patience: 13 / 200\n", - "Last checkpoint at: Epoch 115 \n", + "Cost: 0.08140491588571248 e-3\n", + "Patience: 50 / 200\n", + "Last checkpoint at: Epoch 100 \n", "\n", "Epoch number 120\n", - "Cost: 52.487826807067755 e4\n", - "Patience: 14 / 200\n", - "Last checkpoint at: Epoch 120 \n", + "Cost: 0.0823190781916987 e-3\n", + "Patience: 52 / 200\n", + "Last checkpoint at: Epoch 100 \n", "\n", "Epoch number 125\n", - "Cost: 51.60155072015651 e4\n", - "Patience: 17 / 200\n", + "Cost: 0.0766505290038309 e-3\n", + "Patience: 55 / 200\n", "Last checkpoint at: Epoch 125 \n", "\n", "Epoch number 130\n", - "Cost: 51.004822227232 e4\n", - "Patience: 20 / 200\n", + "Cost: 0.07502320984210027 e-3\n", + "Patience: 56 / 200\n", "Last checkpoint at: Epoch 130 \n", "\n", "Epoch number 135\n", - "Cost: 49.656663347590474 e4\n", - "Patience: 22 / 200\n", - "Last checkpoint at: Epoch 135 \n", + "Cost: 0.0758755330102855 e-3\n", + "Patience: 57 / 200\n", + "Last checkpoint at: Epoch 130 \n", "\n", "Epoch number 140\n", - "Cost: 49.04315717756114 e4\n", - "Patience: 26 / 200\n", + "Cost: 0.0731801113207884 e-3\n", + "Patience: 58 / 200\n", "Last checkpoint at: Epoch 140 \n", "\n", "Epoch number 145\n", - "Cost: 48.333713487583275 e4\n", - "Patience: 29 / 200\n", - "Last checkpoint at: Epoch 145 \n", + "Cost: 0.0745931863499944 e-3\n", + "Patience: 60 / 200\n", + "Last checkpoint at: Epoch 140 \n", "\n", "Epoch number 150\n", - "Cost: 47.4689517447606 e4\n", - "Patience: 33 / 200\n", + "Cost: 0.05597170093096793 e-3\n", + "Patience: 60 / 200\n", "Last checkpoint at: Epoch 150 \n", "\n", "Epoch number 155\n", - "Cost: 46.82262457827938 e4\n", - "Patience: 38 / 200\n", + "Cost: 0.0448569248584 e-3\n", + "Patience: 61 / 200\n", "Last checkpoint at: Epoch 155 \n", "\n", "Epoch number 160\n", - "Cost: 46.189470573308625 e4\n", - "Patience: 43 / 200\n", + "Cost: 0.0377340710404864 e-3\n", + "Patience: 63 / 200\n", "Last checkpoint at: Epoch 160 \n", "\n", "Epoch number 165\n", - "Cost: 45.566867759570165 e4\n", - "Patience: 48 / 200\n", + "Cost: 0.03712705128759324 e-3\n", + "Patience: 64 / 200\n", "Last checkpoint at: Epoch 165 \n", "\n", "Epoch number 170\n", - "Cost: 45.00874754120695 e4\n", - "Patience: 53 / 200\n", - "Last checkpoint at: Epoch 170 \n", + "Cost: 0.037240219558527236 e-3\n", + "Patience: 67 / 200\n", + "Last checkpoint at: Epoch 165 \n", "\n", "Epoch number 175\n", - "Cost: 44.46649339367101 e4\n", - "Patience: 58 / 200\n", - "Last checkpoint at: Epoch 175 \n", + "Cost: 0.041023939860330774 e-3\n", + "Patience: 67 / 200\n", + "Last checkpoint at: Epoch 165 \n", "\n", "Epoch number 180\n", - "Cost: 43.92929008587244 e4\n", - "Patience: 63 / 200\n", + "Cost: 0.03179026030108056 e-3\n", + "Patience: 69 / 200\n", "Last checkpoint at: Epoch 180 \n", "\n", "Epoch number 185\n", - "Cost: 43.44754183585656 e4\n", - "Patience: 68 / 200\n", - "Last checkpoint at: Epoch 185 \n", + "Cost: 0.037844479401370486 e-3\n", + "Patience: 71 / 200\n", + "Last checkpoint at: Epoch 180 \n", "\n", "Epoch number 190\n", - "Cost: 42.95319576371223 e4\n", - "Patience: 73 / 200\n", + "Cost: 0.02333719505181665 e-3\n", + "Patience: 72 / 200\n", "Last checkpoint at: Epoch 190 \n", "\n", "Epoch number 195\n", - "Cost: 42.52819289909082 e4\n", - "Patience: 78 / 200\n", + "Cost: 0.02318771433412157 e-3\n", + "Patience: 77 / 200\n", "Last checkpoint at: Epoch 195 \n", "\n", "Epoch number 200\n", - "Cost: 41.93341770665126 e4\n", - "Patience: 83 / 200\n", - "Last checkpoint at: Epoch 200 \n", + "Cost: 0.025808127712151234 e-3\n", + "Patience: 79 / 200\n", + "Last checkpoint at: Epoch 195 \n", "\n", "Epoch number 205\n", - "Cost: 41.554861285902085 e4\n", - "Patience: 88 / 200\n", + "Cost: 0.021487966265301518 e-3\n", + "Patience: 82 / 200\n", "Last checkpoint at: Epoch 205 \n", "\n", "Epoch number 210\n", - "Cost: 41.090038733834284 e4\n", - "Patience: 93 / 200\n", + "Cost: 0.020788879447401144 e-3\n", + "Patience: 85 / 200\n", "Last checkpoint at: Epoch 210 \n", "\n", "Epoch number 215\n", - "Cost: 40.845294889221165 e4\n", - "Patience: 98 / 200\n", + "Cost: 0.02056433168810203 e-3\n", + "Patience: 85 / 200\n", "Last checkpoint at: Epoch 215 \n", "\n", "Epoch number 220\n", - "Cost: 40.25109122170412 e4\n", - "Patience: 103 / 200\n", + "Cost: 0.016506806942027438 e-3\n", + "Patience: 89 / 200\n", "Last checkpoint at: Epoch 220 \n", "\n", "Epoch number 225\n", - "Cost: 39.58158948002977 e4\n", - "Patience: 108 / 200\n", - "Last checkpoint at: Epoch 225 \n", + "Cost: 0.020985714496767265 e-3\n", + "Patience: 91 / 200\n", + "Last checkpoint at: Epoch 220 \n", "\n", "Epoch number 230\n", - "Cost: 38.97598008327979 e4\n", - "Patience: 113 / 200\n", + "Cost: 0.011625469693520225 e-3\n", + "Patience: 94 / 200\n", "Last checkpoint at: Epoch 230 \n", "\n", "Epoch number 235\n", - "Cost: 38.51150915502234 e4\n", - "Patience: 118 / 200\n", - "Last checkpoint at: Epoch 235 \n", + "Cost: 0.013143771576188614 e-3\n", + "Patience: 98 / 200\n", + "Last checkpoint at: Epoch 230 \n", "\n", "Epoch number 240\n", - "Cost: 38.299499218292695 e4\n", - "Patience: 123 / 200\n", - "Last checkpoint at: Epoch 240 \n", + "Cost: 0.017444268354317522 e-3\n", + "Patience: 100 / 200\n", + "Last checkpoint at: Epoch 230 \n", "\n", "Epoch number 245\n", - "Cost: 37.74655878821269 e4\n", - "Patience: 128 / 200\n", - "Last checkpoint at: Epoch 245 \n", + "Cost: 0.013935790078942367 e-3\n", + "Patience: 101 / 200\n", + "Last checkpoint at: Epoch 230 \n", "\n", "Epoch number 250\n", - "Cost: 37.40582783567778 e4\n", - "Patience: 133 / 200\n", + "Cost: 0.01056458899875771 e-3\n", + "Patience: 103 / 200\n", "Last checkpoint at: Epoch 250 \n", "\n", "Epoch number 255\n", - "Cost: 37.24810196720856 e4\n", - "Patience: 138 / 200\n", - "Last checkpoint at: Epoch 255 \n", + "Cost: 0.013950063650090088 e-3\n", + "Patience: 106 / 200\n", + "Last checkpoint at: Epoch 250 \n", "\n", "Epoch number 260\n", - "Cost: 37.280498320197175 e4\n", - "Patience: 143 / 200\n", - "Last checkpoint at: Epoch 255 \n", + "Cost: 0.015239623812800694 e-3\n", + "Patience: 109 / 200\n", + "Last checkpoint at: Epoch 250 \n", "\n", "Epoch number 265\n", - "Cost: 36.25094043487247 e4\n", - "Patience: 147 / 200\n", - "Last checkpoint at: Epoch 265 \n", + "Cost: 0.014050647958820845 e-3\n", + "Patience: 112 / 200\n", + "Last checkpoint at: Epoch 250 \n", "\n", "Epoch number 270\n", - "Cost: 36.03106825315255 e4\n", - "Patience: 152 / 200\n", + "Cost: 0.009441311336326799 e-3\n", + "Patience: 112 / 200\n", "Last checkpoint at: Epoch 270 \n", "\n", "Epoch number 275\n", - "Cost: 35.67509779191398 e4\n", - "Patience: 156 / 200\n", + "Cost: 0.00812686008391617 e-3\n", + "Patience: 116 / 200\n", "Last checkpoint at: Epoch 275 \n", "\n", "Epoch number 280\n", - "Cost: 35.42137842506487 e4\n", - "Patience: 161 / 200\n", - "Last checkpoint at: Epoch 280 \n", + "Cost: 0.009064912048531968 e-3\n", + "Patience: 118 / 200\n", + "Last checkpoint at: Epoch 275 \n", "\n", "Epoch number 285\n", - "Cost: 35.79035718390282 e4\n", - "Patience: 164 / 200\n", - "Last checkpoint at: Epoch 280 \n", + "Cost: 0.007350245905786808 e-3\n", + "Patience: 119 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 290\n", - "Cost: 33.758991754594 e4\n", - "Patience: 165 / 200\n", - "Last checkpoint at: Epoch 290 \n", + "Cost: 0.009190695427025004 e-3\n", + "Patience: 123 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 295\n", - "Cost: 34.39420328891658 e4\n", - "Patience: 166 / 200\n", - "Last checkpoint at: Epoch 290 \n", + "Cost: 0.009242598896386706 e-3\n", + "Patience: 126 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 300\n", - "Cost: 33.66679522862777 e4\n", - "Patience: 166 / 200\n", - "Last checkpoint at: Epoch 300 \n", + "Cost: 0.009243554339921871 e-3\n", + "Patience: 131 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 305\n", - "Cost: 34.23552023880976 e4\n", - "Patience: 167 / 200\n", - "Last checkpoint at: Epoch 300 \n", + "Cost: 0.008543941756680069 e-3\n", + "Patience: 134 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 310\n", - "Cost: 33.27848409560132 e4\n", - "Patience: 168 / 200\n", - "Last checkpoint at: Epoch 310 \n", + "Cost: 0.008661668753700995 e-3\n", + "Patience: 137 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 315\n", - "Cost: 32.72916789741275 e4\n", - "Patience: 171 / 200\n", - "Last checkpoint at: Epoch 315 \n", + "Cost: 0.008509848796282003 e-3\n", + "Patience: 142 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 320\n", - "Cost: 32.42362023113255 e4\n", - "Patience: 173 / 200\n", - "Last checkpoint at: Epoch 320 \n", + "Cost: 0.009688999833745953 e-3\n", + "Patience: 145 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 325\n", - "Cost: 33.13556412591579 e4\n", - "Patience: 173 / 200\n", - "Last checkpoint at: Epoch 320 \n", + "Cost: 0.010096690673774302 e-3\n", + "Patience: 148 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 330\n", - "Cost: 34.35548811041294 e4\n", - "Patience: 173 / 200\n", - "Last checkpoint at: Epoch 320 \n", + "Cost: 0.008155997478597589 e-3\n", + "Patience: 152 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 335\n", - "Cost: 31.17884152588692 e4\n", - "Patience: 174 / 200\n", - "Last checkpoint at: Epoch 335 \n", + "Cost: 0.012822152828138837 e-3\n", + "Patience: 156 / 200\n", + "Last checkpoint at: Epoch 285 \n", "\n", "Epoch number 340\n", - "Cost: 33.64366251341206 e4\n", - "Patience: 174 / 200\n", - "Last checkpoint at: Epoch 335 \n", + "Cost: 0.00638995292552244 e-3\n", + "Patience: 159 / 200\n", + "Last checkpoint at: Epoch 340 \n", "\n", "Epoch number 345\n", - "Cost: 32.388941939682404 e4\n", - "Patience: 175 / 200\n", - "Last checkpoint at: Epoch 335 \n", + "Cost: 0.0066921474113924165 e-3\n", + "Patience: 164 / 200\n", + "Last checkpoint at: Epoch 340 \n", "\n", "Epoch number 350\n", - "Cost: 29.8897856648298 e4\n", - "Patience: 175 / 200\n", + "Cost: 0.006151222709028862 e-3\n", + "Patience: 169 / 200\n", "Last checkpoint at: Epoch 350 \n", "\n", "Epoch number 355\n", - "Cost: 30.779531522792706 e4\n", - "Patience: 176 / 200\n", - "Last checkpoint at: Epoch 350 \n", + "Cost: 0.006081407573606641 e-3\n", + "Patience: 170 / 200\n", + "Last checkpoint at: Epoch 355 \n", "\n", "Epoch number 360\n", - "Cost: 32.77950439641767 e4\n", - "Patience: 177 / 200\n", - "Last checkpoint at: Epoch 350 \n", + "Cost: 0.007673800716494293 e-3\n", + "Patience: 175 / 200\n", + "Last checkpoint at: Epoch 355 \n", "\n", "Epoch number 365\n", - "Cost: 34.279519781232516 e4\n", - "Patience: 177 / 200\n", - "Last checkpoint at: Epoch 350 \n", + "Cost: 0.0072596388893911585 e-3\n", + "Patience: 180 / 200\n", + "Last checkpoint at: Epoch 355 \n", "\n", "Epoch number 370\n", - "Cost: 29.02430596147129 e4\n", - "Patience: 177 / 200\n", - "Last checkpoint at: Epoch 370 \n", + "Cost: 0.006717292966099427 e-3\n", + "Patience: 184 / 200\n", + "Last checkpoint at: Epoch 355 \n", "\n", "Epoch number 375\n", - "Cost: 31.375054398828997 e4\n", - "Patience: 178 / 200\n", - "Last checkpoint at: Epoch 370 \n", + "Cost: 0.006316999443175093 e-3\n", + "Patience: 189 / 200\n", + "Last checkpoint at: Epoch 355 \n", "\n", "Epoch number 380\n", - "Cost: 33.813590144223355 e4\n", - "Patience: 178 / 200\n", - "Last checkpoint at: Epoch 370 \n", + "Cost: 0.006750347554461382 e-3\n", + "Patience: 193 / 200\n", + "Last checkpoint at: Epoch 355 \n", "\n", "Epoch number 385\n", - "Cost: 28.6719871268786 e4\n", - "Patience: 178 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n", - "Epoch number 390\n", - "Cost: 31.848519872081408 e4\n", - "Patience: 179 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n", - "Epoch number 395\n", - "Cost: 29.007866582337847 e4\n", - "Patience: 181 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n", - "Epoch number 400\n", - "Cost: 33.16965553552863 e4\n", - "Patience: 181 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n", - "Epoch number 405\n", - "Cost: 32.650657305295795 e4\n", - "Patience: 181 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n", - "Epoch number 410\n", - "Cost: 28.816359365319318 e4\n", - "Patience: 181 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch number 415\n", - "Cost: 29.141941761716886 e4\n", - "Patience: 181 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n", - "Epoch number 420\n", - "Cost: 30.577135856877614 e4\n", - "Patience: 182 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n", - "Epoch number 425\n", - "Cost: 29.400000695456217 e4\n", - "Patience: 183 / 200\n", - "Last checkpoint at: Epoch 385 \n", - "\n", - "Epoch number 430\n", - "Cost: 26.99479599423865 e4\n", - "Patience: 183 / 200\n", - "Last checkpoint at: Epoch 430 \n", - "\n", - "Epoch number 435\n", - "Cost: 30.304402994744958 e4\n", - "Patience: 184 / 200\n", - "Last checkpoint at: Epoch 430 \n", - "\n", - "Epoch number 440\n", - "Cost: 29.647010675770172 e4\n", - "Patience: 184 / 200\n", - "Last checkpoint at: Epoch 430 \n", - "\n", - "Epoch number 445\n", - "Cost: 27.00613232012442 e4\n", - "Patience: 185 / 200\n", - "Last checkpoint at: Epoch 430 \n", - "\n", - "Epoch number 450\n", - "Cost: 27.036350567210864 e4\n", - "Patience: 186 / 200\n", - "Last checkpoint at: Epoch 430 \n", - "\n", - "Epoch number 455\n", - "Cost: 27.08697458729148 e4\n", - "Patience: 187 / 200\n", - "Last checkpoint at: Epoch 430 \n", - "\n", - "Epoch number 460\n", - "Cost: 28.004820329791055 e4\n", - "Patience: 188 / 200\n", - "Last checkpoint at: Epoch 430 \n", - "\n", - "Epoch number 465\n", - "Cost: 26.3666685551722 e4\n", - "Patience: 188 / 200\n", - "Last checkpoint at: Epoch 465 \n", - "\n", - "Epoch number 470\n", - "Cost: 26.36444576560183 e4\n", - "Patience: 188 / 200\n", - "Last checkpoint at: Epoch 470 \n", - "\n", - "Epoch number 475\n", - "Cost: 31.123574119695324 e4\n", - "Patience: 188 / 200\n", - "Last checkpoint at: Epoch 470 \n", - "\n", - "Epoch number 480\n", - "Cost: 27.53822227068087 e4\n", - "Patience: 189 / 200\n", - "Last checkpoint at: Epoch 470 \n", - "\n", - "Epoch number 485\n", - "Cost: 26.472763485334657 e4\n", - "Patience: 189 / 200\n", - "Last checkpoint at: Epoch 470 \n", - "\n", - "Epoch number 490\n", - "Cost: 25.98736776990142 e4\n", - "Patience: 190 / 200\n", - "Last checkpoint at: Epoch 490 \n", - "\n", - "Epoch number 495\n", - "Cost: 25.32091308781441 e4\n", - "Patience: 191 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 500\n", - "Cost: 26.51548171614079 e4\n", - "Patience: 191 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 505\n", - "Cost: 25.78474184934129 e4\n", - "Patience: 191 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 510\n", - "Cost: 26.016250708477294 e4\n", - "Patience: 191 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 515\n", - "Cost: 28.13248825754891 e4\n", - "Patience: 191 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 520\n", - "Cost: 28.441735156910852 e4\n", - "Patience: 191 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 525\n", - "Cost: 25.8854781079324 e4\n", - "Patience: 193 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 530\n", - "Cost: 25.448204473929202 e4\n", - "Patience: 193 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 535\n", - "Cost: 26.26546668483222 e4\n", - "Patience: 193 / 200\n", - "Last checkpoint at: Epoch 495 \n", - "\n", - "Epoch number 540\n", - "Cost: 24.608338271525312 e4\n", - "Patience: 196 / 200\n", - "Last checkpoint at: Epoch 540 \n", - "\n", - "Epoch number 545\n", - "Cost: 25.521852422822665 e4\n", - "Patience: 196 / 200\n", - "Last checkpoint at: Epoch 540 \n", - "\n", - "Epoch number 550\n", - "Cost: 24.915404786217085 e4\n", + "Cost: 0.006520240363665544 e-3\n", "Patience: 198 / 200\n", - "Last checkpoint at: Epoch 540 \n", - "\n", - "Epoch number 555\n", - "Cost: 25.868487217404105 e4\n", - "Patience: 198 / 200\n", - "Last checkpoint at: Epoch 540 \n", - "\n", - "Epoch number 560\n", - "Cost: 27.24954412576366 e4\n", - "Patience: 199 / 200\n", - "Last checkpoint at: Epoch 540 \n", + "Last checkpoint at: Epoch 355 \n", "\n", "\n", - " Early stopping at epoch 565 , difference: 2.3366942843223992e-05\n", - "Cost: 0.002444114783739156\n" + " Early stopping at epoch 387 , difference: 9.317458766708246e-07\n", + "Cost: 5.49251195054162e-06\n" ] } ], @@ -1124,14 +1083,14 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 40, "metadata": { "scrolled": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEKCAYAAADjDHn2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3X2QXXd93/H35z7v6vlhMbZkWTKI1nIgdljkJARIgnFEkrFpArEoaZ3WrYcMnpDSJthDxrROM03IDAEGN8FpNG0oRDyFRkOVGMcGppQYtMbGRgbXsmLsRTKWrWdpH+7Dt3+cs7tHV/fuXT0crXb385q5c8/53XPu/s7qaj/39/udc36KCMzMzKZTmO0KmJnZxc9hYWZmPTkszMysJ4eFmZn15LAwM7OeHBZmZtaTw8LMzHpyWJiZWU8OCzMz66k02xU4X1avXh3r16+f7WqYmc0pDz/88IsRMdBru3kTFuvXr2doaGi2q2FmNqdI+sFMtnM3lJmZ9eSwMDOznhwWZmbWk8PCzMx6cliYmVlPDgszM+vJYWFmZj0t+LA4Mdbgw19+kkeePTTbVTEzu2gt+LAYa7T42IN7eGz4yGxXxczsorXgw6JYEAD1ZmuWa2JmdvFa8GFRLiZh0WzFLNfEzOziteDDYqJl0XBYmJl1teDDolRIfgWNpsPCzKybBR8WxYKQoNnymIWZWTcLPiwASgVRdzeUmVlXDguSrigPcJuZdeewIG1Z+NRZM7OuHBZAqSi3LMzMpuGwAIqFAnWfDWVm1lWuYSFpi6QnJe2RdEeH198t6XFJj0r6uqRNafl6SSNp+aOS/izPepYK8tlQZmbTKOX1xpKKwD3AW4BhYJekHRHxRGazT0fEn6Xb3wh8GNiSvvZ0RFyTV/2ySkX5ojwzs2nk2bLYDOyJiL0RMQ5sB27KbhARRzOri4BZ+YtdKsgX5ZmZTSPPsFgDPJdZH07LTiHpPZKeBj4E/FbmpQ2SHpH0NUlvyLGelIo+ddbMbDp5hoU6lJ32Fzki7omIVwDvB34vLd4PrIuIa4H3AZ+WtPS0HyDdJmlI0tCBAwfOuqI+ddbMbHp5hsUwcHlmfS2wb5rttwNvA4iIsYh4KV1+GHgaeFX7DhFxb0QMRsTgwMDAWVfUp86amU0vz7DYBWyUtEFSBdgK7MhuIGljZvWXgKfS8oF0gBxJVwIbgb15VbRYKPh2H2Zm08jtbKiIaEi6HbgPKALbImK3pLuBoYjYAdwu6XqgDhwCbkl3fyNwt6QG0ATeHREH86qrT501M5tebmEBEBE7gZ1tZXdllt/bZb8vAF/Is25ZPhvKzGx6voIbX2dhZtaLw4LkrrMOCzOz7hwWTHRDeczCzKwbhwU+ddbMrBeHBUk3lC/KMzPrzmFBMg+3WxZmZt05LPDZUGZmvTgs8HUWZma9OCxI7jrrloWZWXcOC9KWhW/3YWbWlcOC5GyopruhzMy6cliQDHDX3bIwM+vKYYFPnTUz68VhAZQLPnXWzGw6DguSyY8icOvCzKwLhwXJmAXgM6LMzLpwWJCcOgv4wjwzsy5yDQtJWyQ9KWmPpDs6vP5uSY9LelTS1yVtyrx2Z7rfk5J+Ic96lorJr8HjFmZmneUWFpKKwD3AW4FNwDuzYZD6dES8OiKuAT4EfDjddxOwFbga2AL81/T9cjHVsnA3lJlZJ3m2LDYDeyJib0SMA9uBm7IbRMTRzOoiYOKr/U3A9ogYi4h/BPak75eLYhoWHuA2M+uslON7rwGey6wPA9e1byTpPcD7gArw85l9H2rbd00+1YTy5AC3w8LMrJM8WxbqUHbaX+OIuCciXgG8H/i9M9lX0m2ShiQNHThw4KwrWiykYxYe4DYz6yjPsBgGLs+srwX2TbP9duBtZ7JvRNwbEYMRMTgwMHDWFS371Fkzs2nlGRa7gI2SNkiqkAxY78huIGljZvWXgKfS5R3AVklVSRuAjcC38qroxJiFu6HMzDrLbcwiIhqSbgfuA4rAtojYLeluYCgidgC3S7oeqAOHgFvSfXdL+izwBNAA3hMRzbzqWnI3lJnZtPIc4CYidgI728ruyiy/d5p9/wD4g/xqN2Xy1Fl3Q5mZdeQruIGiz4YyM5uWwwIop91Qvs7CzKwzhwVTA9x1X8FtZtaRw4KpU2fdsjAz68xhQebUWZ8NZWbWkcMCKPuus2Zm03JYkG1ZeMzCzKwThwXZ6yzcsjAz68RhwdTkRx7gNjPrzGHBVMvCp86amXXmsABKPnXWzGxaDgsyF+U5LMzMOnJYkLndh7uhzMw6cljgGwmamfXisMCnzpqZ9eKwYGryIw9wm5l15rBgqmUx3vCYhZlZJw4LoFAQlWKBcQ9wm5l1lGtYSNoi6UlJeyTd0eH190l6QtJjkh6QdEXmtaakR9PHjjzrCVAtFRit5zbNt5nZnJbbHNySisA9wFuAYWCXpB0R8URms0eAwYg4Kek3gQ8BN6evjUTENXnVr121XGDM3VBmZh3l2bLYDOyJiL0RMQ5sB27KbhARX4mIk+nqQ8DaHOszrWqpyFjdYWFm1kmeYbEGeC6zPpyWdXMr8LeZ9ZqkIUkPSXpbpx0k3ZZuM3TgwIFzqmy1XGC04W4oM7NOcuuGAtShrOO5qZJ+HRgE3pQpXhcR+yRdCTwo6fGIePqUN4u4F7gXYHBw8JzOe3XLwsysuzxbFsPA5Zn1tcC+9o0kXQ98ALgxIsYmyiNiX/q8F/gqcG2OdaVWLjDmloWZWUd5hsUuYKOkDZIqwFbglLOaJF0LfIIkKF7IlK+QVE2XVwOvB7ID4+ddteQBbjOzbnLrhoqIhqTbgfuAIrAtInZLuhsYiogdwB8Di4HPSQJ4NiJuBK4CPiGpRRJof9h2FtV5Vy0VOXxyPM8fYWY2Z+U5ZkFE7AR2tpXdlVm+vst+3wBenWfd2tV86qyZWVe+gjtVLRUdFmZmXTgsUr6C28ysO4dFqlZ2y8LMrBuHRapaKjDmloWZWUcOi1RyBbdbFmZmnTgsUrVSkWYraPg25WZmp3FYpKrl5FfhcQszs9M5LFLVUhHAZ0SZmXXgsEjV3LIwM+vKYZGaaFk4LMzMTuewSFVLya/C3VBmZqdzWKRqZbcszMy6cVikJloWvjDPzOx0DovUxKmzvjDPzOx0DovU5AC3WxZmZqdxWKR86qyZWXcOi5QvyjMz625GYSHpkzMp67DNFklPStoj6Y4Or79P0hOSHpP0gKQrMq/dIump9HHLTOp5Lny7DzOz7mbasrg6uyKpCLx2uh3Sbe4B3gpsAt4paVPbZo8AgxHxGuDzwIfSfVcCHwSuAzYDH5S0YoZ1PSu+KM/MrLtpw0LSnZKOAa+RdDR9HANeAP6mx3tvBvZExN6IGAe2AzdlN4iIr0TEyXT1IWBtuvwLwP0RcTAiDgH3A1vO6MjO0OSpsw13Q5mZtZs2LCLiv0TEEuCPI2Jp+lgSEasi4s4e770GeC6zPpyWdXMr8Ldnue85m7qC2y0LM7N2M+2G+pKkRQCSfl3Sh7PjC12oQ1l03FD6dWAQ+OMz2VfSbZKGJA0dOHCgR3WmJymZLc8tCzOz08w0LP4UOCnpx4HfBX4A/GWPfYaByzPra4F97RtJuh74AHBjRIydyb4RcW9EDEbE4MDAwAwPpbtkalW3LMzM2s00LBoRESRjDh+NiI8CS3rsswvYKGmDpAqwFdiR3UDStcAnSILihcxL9wE3SFqRDmzfkJblqlYuumVhZtZBaYbbHZN0J/AvgDekZzqVp9shIhqSbif5I18EtkXEbkl3A0MRsYOk22kx8DlJAM9GxI0RcVDS75MEDsDdEXHwjI/uDFXLblmYmXUy07C4GfjnwL+OiOclrWNqfKGriNgJ7GwruyuzfP00+24Dts2wfudFtVT0qbNmZh3MqBsqIp4HPgUsk/TLwGhE9BqzmHNq5YKv4DYz62CmV3D/GvAt4B3ArwHflPT2PCs2G9yyMDPrbKbdUB8AXjcxCC1pAPh7kquu5w2fOmtm1tlMz4YqtJ2t9NIZ7Dtn1MpFX5RnZtbBTFsWfyfpPuCv0vWbaRu4ng/csjAz62zasJD0SuCSiPgdSb8C/AzJ1dX/QDLgPa8kYeGWhZlZu15dSR8BjgFExF9HxPsi4t+RtCo+knflLrSkG8otCzOzdr3CYn1EPNZeGBFDwPpcajSL3LIwM+usV1jUpnmt73xW5GJQLRd9BbeZWQe9wmKXpH/bXijpVuDhfKo0e2qlAqONJsltsMzMbEKvs6F+G/iipHcxFQ6DQAX4Z3lWbDZUy0UioN4MKqVOd0k3M1uYpg2LiPgR8NOSfg74sbT4f0fEg7nXbBZkZ8urlObdZSRmZmdtRtdZRMRXgK/kXJdZVy0n83CP1lssmW60xsxsgfHX54ypqVV9+qyZWZbDIqMvbVn4Km4zs1M5LDJqmW4oMzOb4rDImGhZjLgbyszsFLmGhaQtkp6UtEfSHR1ef6Okb0tqtM+PIakp6dH0saN93zzUyh6zMDPrZKZ3nT1j6Tzd9wBvAYZJLvDbERFPZDZ7FvgN4D90eIuRiLgmr/p1MtENNTLusDAzy8otLIDNwJ6I2AsgaTtwEzAZFhHxTPraRTFIMDlm4ftDmZmdIs9uqDXAc5n14bRspmqShiQ9JOlt57dqnfVV0rBwy8LM7BR5tiw63S/jTG66tC4i9km6EnhQ0uMR8fQpP0C6DbgNYN26dWdf01Rt4joLnzprZnaKPFsWw8DlmfW1wL6Z7hwR+9LnvcBXgWs7bHNvRAxGxODAwMC51RaPWZiZdZNnWOwCNkraIKkCbAVmdFaTpBWSqunyauD1ZMY68uLrLMzMOsstLCKiAdwO3Ad8D/hsROyWdLekGwEkvU7SMPAO4BOSdqe7XwUMSfoOyT2p/rDtLKpcFAuiUiy4G8rMrE2eYxZExE6SKVizZXdllneRdE+17/cN4NV51q2bWrngbigzsza+grtNrVz0vaHMzNo4LNr0VYpuWZiZtXFYtKmVih7gNjNr47BoU6sUfSNBM7M2Dos2tVLBNxI0M2vjsGjTVyk6LMzM2jgs2njMwszsdA6LNrVywWMWZmZtHBZt3A1lZnY6h0WbaslhYWbWzmHRJmlZeMzCzCzLYdGmVioy3mzRbJ3J1BtmZvObw6JNXyWdAMldUWZmkxwWbabmtHBYmJlNcFi0mZwtz2FhZjbJYdHGs+WZmZ3OYdGmz/Nwm5mdxmHRZnE1mTzw+FhjlmtiZnbxyDUsJG2R9KSkPZLu6PD6GyV9W1JD0tvbXrtF0lPp45Y865nlsDAzO11uYSGpCNwDvBXYBLxT0qa2zZ4FfgP4dNu+K4EPAtcBm4EPSlqRV12zFteSsDjhsDAzm5Rny2IzsCci9kbEOLAduCm7QUQ8ExGPAe2jyb8A3B8RByPiEHA/sCXHuk5aVE3GLI45LMzMJuUZFmuA5zLrw2nZedtX0m2ShiQNHThw4KwrmrWkWgbg+KjDwsxsQp5hoQ5lM72Hxoz2jYh7I2IwIgYHBgbOqHLd1MoFigW5G8rMLCPPsBgGLs+srwX2XYB9z4kkFlWKHuA2M8vIMyx2ARslbZBUAbYCO2a4733ADZJWpAPbN6RlF8SSWplj7oYyM5uUW1hERAO4neSP/PeAz0bEbkl3S7oRQNLrJA0D7wA+IWl3uu9B4PdJAmcXcHdadkEsqhbdDWVmllHK880jYiews63srszyLpIupk77bgO25Vm/bpbUyhwbq8/GjzYzuyj5Cu4OlveVOXTCYWFmNsFh0cHy/gqHT47PdjXMzC4aDosOVi4qc+ikWxZmZhMcFh0s768wUm96AiQzs5TDooMV/RUADrkryswMcFh0tKI/ueWHB7nNzBIOiw5WLEpaFh7kNjNLOCw6mOiGOuiwMDMDHBYdTXZD+YwoMzPAYdHR8rRlcfiEWxZmZuCw6KhSKrC4WnLLwsws5bDoYnl/2afOmpmlHBZdrOivOCzMzFIOiy5WLKq4G8rMLOWw6GJFf9nXWZiZpRwWXazor3DQZ0OZmQEOi66W9ydTqzaardmuipnZrMs1LCRtkfSkpD2S7ujwelXSZ9LXvylpfVq+XtKIpEfTx5/lWc9OVk7c8mPE4xZmZrlNqyqpCNwDvAUYBnZJ2hERT2Q2uxU4FBGvlLQV+CPg5vS1pyPimrzq18vkhXknx1m9uDpb1TAzuyjk2bLYDOyJiL0RMQ5sB25q2+Ym4H+ky58H3ixJOdZpxiZu+XHQd541M8s1LNYAz2XWh9OyjttERAM4AqxKX9sg6RFJX5P0hhzr2ZHntDAzm5JbNxTQqYUQM9xmP7AuIl6S9Frgf0m6OiKOnrKzdBtwG8C6devOQ5WnvGxJ0vX0o6Oj5/V9zczmojxbFsPA5Zn1tcC+bttIKgHLgIMRMRYRLwFExMPA08Cr2n9ARNwbEYMRMTgwMHBeK796cZVKscAPD42c1/c1M5uL8gyLXcBGSRskVYCtwI62bXYAt6TLbwcejIiQNJAOkCPpSmAjsDfHup6mUBCXLa8xfNhhYWaWWzdURDQk3Q7cBxSBbRGxW9LdwFBE7AD+AvikpD3AQZJAAXgjcLekBtAE3h0RB/OqazdrVvS5ZWFmRr5jFkTETmBnW9ldmeVR4B0d9vsC8IU86zYT61b287fffZ6I4CI5ScvMbFb4Cu5p/NOXL+XwyTrPe5DbzBY4h8U0rr5sKQC7f3i0x5ZmZvObw2Iamy5bSqkgHn720GxXxcxsVjksptFfKXHN5cv5xp4XZ7sqZmazymHRw5teNcBjPzzCcwdPznZVzMxmjcOih1957VoAPjv0XI8tzczmL4dFD2uW9/GmVw3w2aHnGGs0Z7s6ZmazwmExA7f+zAZ+dHSMbV9/ZrarYmY2KxwWM/CGjQNcf9UlfPzBp9h/xFd0m9nC47CYobt+eRPNCN79P7/tubnNbMFxWMzQulX9fGzrtXx//1He/qff4NmXfHaUmS0cDoszcMPVL+dT/+Y6Xjoxzps//FXe//nHOOzJkcxsAVBE+3xEc9Pg4GAMDQ1dkJ/13MGT/Pn/2csnH/oBlWKBG65+Oddevpx3DK5lSa18QepgZnY+SHo4IgZ7buewOHu79x3hM7ue428e3ceRkTpLayWuu3IVb3zVAL/6E2vor+R6U18zs3PmsLiAWq3g8R8eYdv//Ue+9Nh+mq3kd3rl6kW8bv1KNm9YyY+tWcYVq/qplYuzUkczs04cFrMkInjw+y9w/xM/4uEfHOLpA8dJswMJLlvWx7qV/axaXGHVogovW1rjsuU1Ll3Wx2XL+hhYUqVWLnj+DDO7IGYaFu4nOc8k8earLuHNV10CJK2O3fuOsvfF4zzz4kmeeekEzx48ya5nDnJyvMmx0cZp71EpFfgnlyzhZUuqLOsvs7yvwvL+MtVSgXKxwIbVi1i1uEK1VORlS6qUimJRpUSh4IAxs3w4LHJWKIhXr13Gq9cu6/j6yfEG+4+Msv/wKPuOjHDg2BgHT4zz/eeP8vzRUb7//DGOjNQ5PnZ6qGRVigUuXV5jUaVEtVygWiqwqFKir1KkWipOllVLRSqlieXkUUlDqFxMlivpc6kgAigWkjAqFUVBUJAoFQqUiqJUEMX0USgk6wWlZZIDzGyeyDUsJG0BPkoyB/d/i4g/bHu9Cvwl8FrgJeDmiHgmfe1O4FaSObh/KyLuy7Ous6W/UuIVA4t5xcDiabdrNFuMN1uM1ls89aNjHBttcPDEOEdG6pwYbzBSb7L/8CgnxxuMNVqM1pvsPzLKSL3JeKPFWKPJWKPFWKPFeKN1gY4uMRUcTAZINkyKabgoDaLscmHyeWp/peXJdlPbZNez27W/Z/v7d96v888oiFNenwjDbF3V9jyxz2nrE/sVQGRfn1ie+lmCZLvM+rHRBrVygUYruGRpjWYriAgmOpazPcwFQb0ZnBhrUG+2uGLVIvYfGWGk3uTK1YspFODwyTrjjRbL+sq8eHyM5f1lFlfLLO0r8cLRMV46Mcaa5f28dHyMsWaLK1b2A1AuFmhFcHSkwcrFlcnP29JamdF6k6OjDS5bVuOlE+NIMLC4ytHRBuWiaLaCxdUSJ8aak19oKqUCx0YblIsFRupNVi+usO/wKK1IjvPoSJ2+cjGp54kxFlVKjDda1JstVi6qcHikzor+CkdH6iyplSgWxNMHTvDyZTWK0indvBHB/iOjvHxpbfLfcqzRpFwonPJFZ7TenPxi1K2LuNkKBIzUm/RXikRwyns0mi2Ufv5mMlXzxBDBxdIlnVtYSCoC9wBvAYaBXZJ2RMQTmc1uBQ5FxCslbQX+CLhZ0iZgK3A1cBnw95JeFREL9k5+pWKBUrFAfwWuu3LVOb1XqxWMN1tpeDSpN4N6+p9tvNmi3gzG6k0a6Ye/GcHx0QbNCFqR7N9oBc1Wsm2zlTxaMVEetFqRbJ8+N1vQilO3PfU5eb2VXY6g1ZpYzpSldciuN5qt07drte3T9v4RTNbh1P06/7xgaj+bfQUxOR7YTaWYXEo23pz6gtRfKdJMP8NFifFmi4KS0IuAeqtFXzlpgUf6OTg+1iAClvWVJ4M3IigURKM58Rmfer9KsUC5KJb2lSc/QxP/h4oSjVaLpbUyAaf8f2n/P1ErF1i1qMpYo8lovUVE0F8tEQEj4w1WL6lSkNh06VLueddP5PjbzrdlsRnYExF7ASRtB24CsmFxE/Af0+XPAx9XEqM3AdsjYgz4R0l70vf7hxzru2AUCqJWKKZnZvm6kDMVaWBkQwam1putgIlAYip0Isjs12E9fe9smHV6bkWyXalYoN5sEQHHx+pTLbDMN9GJxUYrKKffihutFsdGG1y6LGmNHDg2RpD8saw3ky8NtXIx+fKQtkYXVUusWlThpRPjDCyuMtpocujEOAWJevqNeVGlyMGT45STphLHRhv0lYv0V4qcGE/+2AZwYqzBqkUVjo02WFwtcXhknGqpSCuCkXoTIZbUSun+BQ6erFOUqJQKjNab9KV/7I+PNVhSKzFab1FOu0xbEfSVixwZqbNyUYUDx8dotZJv8bVykVYrODHemOxmHW+0qJQKk/9uBYlyURwbbdCKSFt0oq9cpBnBsdEGpYIopyHUaLYopcsRUC6JSrHAyfFm0pqvtyZbsrVykVJBjKZlY40WxWxrW1PduRNlR06OJy3ISpFaKTmTcqSe/C7LxQLHRuu0Aq5Y1Z/75z7PsFgDZCeBGAau67ZNRDQkHQFWpeUPte27pv0HSLoNuA1g3bp1563iZtOZ7FLi4ugeMLsQ8rzdR6f/Se2Nxm7bzGRfIuLeiBiMiMGBgYGzqKKZmc1EnmExDFyeWV8L7Ou2jaQSsAw4OMN9zczsAskzLHYBGyVtkFQhGbDe0bbNDuCWdPntwIORnAKwA9gqqSppA7AR+FaOdTUzs2nkNmaRjkHcDtxHcurstojYLeluYCgidgB/AXwyHcA+SBIopNt9lmQwvAG8ZyGfCWVmNtt8uw8zswVsprf78HwWZmbWk8PCzMx6cliYmVlP82bMQtIB4Afn8BargRfPU3UuJvPxuObjMYGPa66ZL8d1RUT0vFBt3oTFuZI0NJNBnrlmPh7XfDwm8HHNNfP1uLpxN5SZmfXksDAzs54cFlPune0K5GQ+Htd8PCbwcc018/W4OvKYhZmZ9eSWhZmZ9bTgw0LSFklPStoj6Y7Zrs+ZkLRN0guSvpspWynpfklPpc8r0nJJ+lh6nI9JyndarXMg6XJJX5H0PUm7Jb03LZ/TxyapJulbkr6THtd/Sss3SPpmelyfSW+8SXojzc+kx/VNSetns/7TkVSU9IikL6Xr8+GYnpH0uKRHJQ2lZXP6M3guFnRYZKZ+fSuwCXhnOqXrXPHfgS1tZXcAD0TERuCBdB2SY9yYPm4D/vQC1fFsNIB/HxFXAT8JvCf9d5nrxzYG/HxE/DhwDbBF0k+STCf8J+lxHSKZbhgy0w4Df5Jud7F6L/C9zPp8OCaAn4uIazKnyM71z+DZS6aIXJgP4KeA+zLrdwJ3zna9zvAY1gPfzaw/CVyaLl8KPJkufwJ4Z6ftLvYH8Dckc7nPm2MD+oFvk8we+SJQSssnP5Mkd2z+qXS5lG6n2a57h2NZS/KH8+eBL5FMXjanjymt3zPA6rayefMZPNPHgm5Z0Hnq19Omb51jLomI/QDp88vS8jl5rGk3xbXAN5kHx5Z21zwKvADcDzwNHI6IRrpJtu6nTDsMTEw7fLH5CPC7QCtdX8XcPyZIZuf8sqSH0ymcYR58Bs9WnnNwzwUzmr51nphzxyppMfAF4Lcj4qjUdc7rOXNskczLco2k5cAXgas6bZY+X/THJemXgRci4mFJPztR3GHTOXNMGa+PiH2SXgbcL+n702w7l47rrCz0lsV8nL71R5IuBUifX0jL59SxSiqTBMWnIuKv0+J5cWwAEXEY+CrJmMzydFphOLXu3aYdvpi8HrhR0jPAdpKuqI8wt48JgIjYlz6/QBLsm5lHn8EztdDDYiZTv8412alqbyHp758o/5fpWRs/CRyZaE5fbJQ0If4C+F5EfDjz0pw+NkkDaYsCSX3A9SSDwl8hmVYYTj+uTtMOXzQi4s6IWBsR60n+/zwYEe9iDh8TgKRFkpZMLAM3AN9ljn8Gz8lsD5rM9gP4ReD/kfQdf2C263OGdf8rYD9QJ/lmcytJ/+8DwFPp88p0W5Gc+fU08DgwONv1n+a4foakCf8Y8Gj6+MW5fmzAa4BH0uP6LnBXWn4lyRzze4DPAdW0vJau70lfv3K2j6HH8f0s8KX5cExp/b+TPnZP/G2Y65/Bc3n4Cm4zM+tpoXdDmZnZDDgszMysJ4eFmZn15LAwM7OeHBZmZtaTw8LsDEhqpnchnXictzsVS1qvzB2EzS4mC/12H2ZnaiQirpntSphdaG5ZmJ0H6dwHf5TOV/EtSa9My6+Q9EA6x8EDktal5ZdI+mI6t8V3JP10+lZFSX+eznfx5fRKb7NZ57AwOzN9bd1QN2deOxoRm4GPk9wfiXT5LyPiNcCngI+l5R8DvhbJ3BY/QXKVMCTzIdwTEVcDh4Ffzfl4zGbEV3CbnQFJxyNicYfyZ0gmNtqb3gRoJxbiAAAA4ElEQVTx+YhYJelFknkN6mn5/ohYLekAsDYixjLvsR64P5KJdZD0fqAcEf85/yMzm55bFmbnT3RZ7rZNJ2OZ5SYeV7SLhMPC7Py5OfP8D+nyN0juxgrwLuDr6fIDwG/C5IRISy9UJc3Ohr+1mJ2ZvnSmuwl/FxETp89WJX2T5EvYO9Oy3wK2Sfod4ADwr9Ly9wL3SrqVpAXxmyR3EDa7KHnMwuw8SMcsBiPixdmui1ke3A1lZmY9uWVhZmY9uWVhZmY9OSzMzKwnh4WZmfXksDAzs54cFmZm1pPDwszMevr/9+FuGHi0DGAAAAAASUVORK5CYII=\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAEKCAYAAAA4t9PUAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAHq5JREFUeJzt3X2QXfV93/H35967u1o9IIxQXJAwkoPSRHj8uFH9VE9rUls4nshp5SDGTpiUKa0LtZ00cVA9JS5TpqVpjc0YO0MMMSaOBZXt8Y5LjB0LJ+PWkVhsbCOI7C3gsgabxSAhkPbh3vvtH+e3q6vLvecsu3t2L+LzmtnZc3/3nHO/9+zu/ezvPPyOIgIzM7P5qix3AWZm9sLmIDEzswVxkJiZ2YI4SMzMbEEcJGZmtiAOEjMzWxAHiZmZLYiDxMzMFsRBYmZmC1Jb7gKWwplnnhmbNm1a7jLMzF4w7rnnniciYv1c5n1RBMmmTZsYGRlZ7jLMzF4wJP14rvN615aZmS2Ig8TMzBbEQWJmZgviIDEzswVxkJiZ2YKUGiSStks6JGlU0pUdnh+QdFt6fr+kTal9naS7JD0j6RNty7xO0g/SMtdLUpnvwczM8pUWJJKqwA3AhcBW4GJJW9tmuxR4KiLOA64Drk3tE8B/BP6gw6o/BVwGbElf2xe/ejMzm6syeyTbgNGIeDAipoA9wI62eXYAt6TpvcAFkhQRz0bEt8gCZZaks4DTIuLbkd0j+LPAu8p6A9d/40f8zQ/Hy1q9mdkpocwg2QA80vJ4LLV1nCci6sARYF3BOscK1rloPvnNUf736BNlrd7M7JRQZpB0OnYR85hnXvNLukzSiKSR8fH59SqEyDo+ZmbWTZlBMgac0/J4I/Bot3kk1YC1wJMF69xYsE4AIuLGiBiKiKH16+c0XMxzSOAcMTPLV2aQ3A1skbRZUj+wCxhum2cYuCRN7wT2RU4XICIeA45Ken06W+t3gC8vfukZkd89MjOzEgdtjIi6pCuAO4EqcHNEHJR0NTASEcPATcCtkkbJeiK7ZpaX9DBwGtAv6V3A2yLifuB9wGeAQeCv0lcpJLlHYmZWoNTRfyPiDuCOtrarWqYngHd3WXZTl/YR4BWLV2V3WY/ESWJmlsdXtufxMRIzs0IOkhy+ZN7MrJiDJEd2jMRdEjOzPA6SHJLP2jIzK+IgySF8jMTMrIiDJIckn7VlZlbAQZKjImg6R8zMcjlIcvmCRDOzIg6SHNkts5wkZmZ5HCQ5fLDdzKyYgySHR/81MyvmIMkhfNaWmVkRB0kO90jMzIo5SHL4fiRmZsUcJDl8PxIzs2IOkgI+RmJmls9BkkPet2VmVshBksOj/5qZFXOQ5BC+H4mZWREHSQ73SMzMijlIcniIFDOzYg6SHNn9SMzMLI+DJEfWI3GUmJnlcZDk8TESM7NCDpIcvh2JmVkxB0kO37PdzKyYgySHz9oyMyvmIMnhYeTNzIo5SHL4xlZmZsUcJDncIzEzK+YgKeAcMTPLV2qQSNou6ZCkUUlXdnh+QNJt6fn9kja1PLc7tR+S9PaW9t+TdFDSfZI+L2lFifW7R2JmVqC0IJFUBW4ALgS2AhdL2to226XAUxFxHnAdcG1adiuwCzgf2A58UlJV0gbg/cBQRLwCqKb5ynkP+Mp2M7MiZfZItgGjEfFgREwBe4AdbfPsAG5J03uBCyQpte+JiMmIeAgYTesDqAGDkmrASuDRst5ApeJdW2ZmRcoMkg3AIy2Px1Jbx3kiog4cAdZ1WzYifgL8d+D/AY8BRyLia51eXNJlkkYkjYyPj8/rDfh+JGZmxcoMEnVoa/9U7jZPx3ZJLyHrrWwGzgZWSXpvpxePiBsjYigihtavX/88ym4pzmNtmZkVKjNIxoBzWh5v5Lm7oWbnSbuq1gJP5iz7a8BDETEeEdPAF4E3llI9vrLdzGwuygySu4EtkjZL6ic7KD7cNs8wcEma3gnsi2xf0jCwK53VtRnYAhwg26X1ekkr07GUC4AHSnsHvh+JmVmhWlkrjoi6pCuAO8nOrro5Ig5KuhoYiYhh4CbgVkmjZD2RXWnZg5JuB+4H6sDlEdEA9kvaC3wntX8XuLGs9+CztszMipUWJAARcQdwR1vbVS3TE8C7uyx7DXBNh/Y/Bv54cSvtTJ2O1JiZ2Ul8ZXsOHyMxMyvmIMnh+5GYmRVzkORwj8TMrJiDJIdH/zUzK+YgyeH7kZiZFXOQ5HGPxMyskIMkh/AQKWZmRRwkOeQkMTMr5CDJ4WMkZmbFHCQ5fNaWmVkxB0kODyNvZlbMQZLDN7YyMyvmIMnhHomZWTEHSQF3SMzM8jlIcsg3tjIzK+QgySFwl8TMrICDJIePkZiZFXOQ5BDQdI/EzCyXgySHJO/ZMjMr4CDJUfGV7WZmhRwkuXzWlplZEQdJjmysLUeJmVkeB0kOLXcBZmYvAA6SHB7918ysmIMkh+9HYmZWzEGSwz0SM7NiDpIcvrLdzKyYgySH70diZlbMQZLHPRIzs0IOkhzZ6L/LXYWZWW8rNUgkbZd0SNKopCs7PD8g6bb0/H5Jm1qe253aD0l6e0v76ZL2Svp7SQ9IekOJ9TtHzMwKlBYkkqrADcCFwFbgYklb22a7FHgqIs4DrgOuTctuBXYB5wPbgU+m9QF8HPhqRPwy8CrggdLeA76y3cysSJk9km3AaEQ8GBFTwB5gR9s8O4Bb0vRe4AJJSu17ImIyIh4CRoFtkk4D3gLcBBARUxFxuKw34LO2zMyKlRkkG4BHWh6PpbaO80REHTgCrMtZ9uXAOPDnkr4r6dOSVpVT/kyPpKy1m5mdGsoMkk5DVbV/LHebp1t7DXgt8KmIeA3wLPCcYy8Aki6TNCJpZHx8fO5Vn7wOX9luZlagzCAZA85pebwReLTbPJJqwFrgyZxlx4CxiNif2veSBctzRMSNETEUEUPr16+f1xtwj8TMrFiZQXI3sEXSZkn9ZAfPh9vmGQYuSdM7gX2RHd0eBnals7o2A1uAAxHxU+ARSf8wLXMBcH9p78BDpJiZFaqVteKIqEu6ArgTqAI3R8RBSVcDIxExTHbQ/FZJo2Q9kV1p2YOSbicLiTpweUQ00qr/HfC5FE4PAr9b1nuQB5I3MytUWpAARMQdwB1tbVe1TE8A7+6y7DXANR3a7wWGFrfSznxjKzOzYr6yPYfw6b9mZkUcJDk8jLyZWTEHSQ7f2MrMrJiDJIcETeeImVkuB0kO79oyMys2pyCRdOtc2k49PtxuZlZkrj2S81sfpJF4X7f45fSWinskZmaFcoMk3RPkKPBKSU+nr6PA48CXl6TCZeTRf83MiuUGSUT8l4hYA/xJRJyWvtZExLqI2L1ENS4b37PdzKzYXHdtfWVmuHZJ75X0UUnnllhXT3CPxMys2FyD5FPAMUmvAj4E/Bj4bGlV9QiP/mtmVmyuQVJPo/LuAD4eER8H1pRXVm+QvGvLzKzIXAdtPCppN/DbwD9OZ231lVdW73CMmJnlm2uP5CJgEviX6Z4gG4A/Ka2qHiFfRmJmVmhOQZLC43PAWknvBCYi4kVwjETOETOzAnO9sv23gANk9w75LWC/pJ1lFtYLfD8SM7Nicz1G8mHgVyPicQBJ64G/Jrtn+inLe7bMzIrN9RhJZSZEkp8/j2VfsDxoo5lZsbn2SL4q6U7g8+nxRbTdQvdUJPl+JGZmRXKDRNJ5wEsj4g8l/XPgzWR7fL5NdvD9lOYLEs3MihXtnvoYcBQgIr4YEb8fEb9H1hv5WNnFLTsPkWJmVqgoSDZFxPfbGyNiBNhUSkU9RE4SM7NCRUGyIue5wcUspBdlgzY6SczM8hQFyd2S/lV7o6RLgXvKKal3+BiJmVmxorO2Pgh8SdJ7OBEcQ0A/8JtlFtYLPIy8mVmx3CCJiJ8Bb5T0T4FXpOb/FRH7Sq+sB/jGVmZmxeZ0HUlE3AXcVXItPcc9EjOzYqf81ekL4WMkZmbFHCR5JMADN5qZ5XGQ5FD67hwxM+vOQZIjdUh8nMTMLEepQSJpu6RDkkYlXdnh+QFJt6Xn90va1PLc7tR+SNLb25arSvqupK+UWj/etWVmVqS0IEn3db8BuBDYClwsaWvbbJcCT0XEecB1wLVp2a3ALuB8YDvwybS+GR8AHiir9hnukZiZFSuzR7INGI2IByNiCtgD7GibZwdwS5reC1wgSal9T0RMRsRDwGhaH5I2Ar8OfLrE2gGozASJk8TMrKsyg2QD8EjL47HU1nGeiKgDR4B1Bct+DPgQ0Mx7cUmXSRqRNDI+Pj6vN6CZs7bcJzEz66rMIFGHtvZP5G7zdGyX9E7g8YgoHOcrIm6MiKGIGFq/fn1xtbnrWtDiZmantDKDZAw4p+XxRuDRbvNIqgFrgSdzln0T8BuSHibbVfZWSX9RRvFZTWWt2czs1FFmkNwNbJG0WVI/2cHz4bZ5hoFL0vROYF9kp0gNA7vSWV2bgS3AgYjYHREbI2JTWt++iHhvWW/gxFlbZb2CmdkL31zv2f68RURd0hXAnUAVuDkiDkq6GhiJiGHgJuBWSaNkPZFdadmDkm4H7gfqwOUR0Sir1m5OnLXlJDEz66a0IAGIiDvIbsvb2nZVy/QE8O4uy14DXJOz7m8C31yMOrvxle1mZsV8ZXsOX0diZlbMQZLDV7abmRVzkORwj8TMrJiDZA7cITEz685BkkPukpiZFXKQ5Jg9a8tJYmbWlYMkhzxoo5lZIQdJjhM9EjMz68ZBkkO+Z7uZWSEHSQ4fazczK+YgyeEhUszMijlI8vjGVmZmhRwkOWZvR+IcMTPrykGSw8dIzMyKOUhyzAza2PRBEjOzrhwkOXxBoplZMQdJDl+QaGZWzEGS40SPxFFiZtaNgyTHiRtbLXMhZmY9zEGSQyqex8zsxc5BkuPEWFvLXIiZWQ9zkOTw/UjMzIo5SHL49F8zs2IOkhy+st3MrJiDJMeJs7YcJWZm3ThIcrhHYmZWzEEyB+6QmJl15yDJIXmQFDOzIg6SHL5DoplZMQdJDh8jMTMrVmqQSNou6ZCkUUlXdnh+QNJt6fn9kja1PLc7tR+S9PbUdo6kuyQ9IOmgpA+UWr/H2jIzK1RakEiqAjcAFwJbgYslbW2b7VLgqYg4D7gOuDYtuxXYBZwPbAc+mdZXB/59RPwK8Hrg8g7rXMT3kH33le1mZt2V2SPZBoxGxIMRMQXsAXa0zbMDuCVN7wUuUHaEewewJyImI+IhYBTYFhGPRcR3ACLiKPAAsKGsN+BjJGZmxcoMkg3AIy2Px3juh/7sPBFRB44A6+aybNoN9hpg/yLWfBIPkWJmVqzMIOk0CHv7R3K3eXKXlbQa+ALwwYh4uuOLS5dJGpE0Mj4+PseSn7OW9MJOEjOzbsoMkjHgnJbHG4FHu80jqQasBZ7MW1ZSH1mIfC4ivtjtxSPixogYioih9evXz+sNuEdiZlaszCC5G9giabOkfrKD58Nt8wwDl6TpncC+yAa2GgZ2pbO6NgNbgAPp+MlNwAMR8dESawc6d4vMzOxktbJWHBF1SVcAdwJV4OaIOCjpamAkIobJQuFWSaNkPZFdadmDkm4H7ic7U+vyiGhIejPw28APJN2bXuo/RMQdZbwH39jKzKxYaUECkD7g72hru6plegJ4d5dlrwGuaWv7FkvYUfCNrczMivnK9hwzx0iazhEzs64cJDlOHGx3kpiZdeMgyTE7RMoy12Fm1sscJHl8+q+ZWSEHSY4TR/WdJGZm3ThIcvj0XzOzYg6SHL4/oplZMQdJjop7JGZmhRwkOXz6r5lZMQdJDu/aMjMr5iDJ49N/zcwKOUhyyPcjMTMr5CDJIe/bMjMr5CDJ4RwxMyvmIMnhCxLNzIo5SHLMnv7rPomZWVcOkhyzu7acI2ZmXTlIcpzokZiZWTcOklwzx0gcJWZm3ThIcrhHYmZWzEGSY/Z+JE4SM7OuHCQ5Zk//dZKYmXXlIMnhs7bMzIo5SHLIgzaamRVykOQ4MWijmZl14yDJMdMjabpLYmbWlYNkDpwjZmbdOUhyyOf/mpkVcpDkmD1G4hwxM+vKQZJjpkcy1WgubyFmZj2sVubKJW0HPg5UgU9HxH9te34A+CzwOuDnwEUR8XB6bjdwKdAA3h8Rd85lnYtpw0sGecnKPq768kH23jPGulX9rOirsqKvykBfhRW1anpcOfG9duL5gVqVVQNVzljZz+kr++mvObfN7NRTWpBIqgI3AP8MGAPuljQcEfe3zHYp8FREnCdpF3AtcJGkrcAu4HzgbOCvJf1SWqZonYvmtBV93Pav38An9o3y0BPP8vDPn2ViusnkdIOJepOp+vPrqazqr3L6yn4G+ipEZINBBtBoBhPTTV6+fhUDtQrVStYVmphuMNhXZaBW5ZnJ+mxgTUw3GahVqFTE8ak6jWZw1umDiOxoTrYrLtJrQK0qVg/UOD7dYEVflXojCIJGMxjsrzJQrYBEo9mk3gxW1KpUJAb6KhybarCqv8p0o0kzoCKYagSnD/YxUW/QV6nM1lVvBvXmif2Aannvanmglmdm2mdaKhWxfvUAz0zW6a9V6K9VGKhWeGayTl+1Mrv+/lqFgRTMzWYw0FelryqOTtSpVcSaFX3Um02EWDvYx9qVfawZqFGptFZlZouhzB7JNmA0Ih4EkLQH2AG0fujvAD6SpvcCn1A2LskOYE9ETAIPSRpN62MO61xUv/TSNVx/8Ws6PtdsBpP1JhPTDSbqDSanm0zUG0xMp7bpBs9ONnjq2BRPPjvFkePTHD42zUS9QUWiouwDtCJRqYiHn3iWoxP12dONV9SqPPHMFBPTDVavqPHUsSbHphoM1CpMN5opCGoI+P7YEbJtAiCU1i3BVL3Js1NZKE1MN2aDqr+WBcVMINYqoloRU43mKXlcqCJYO9jHaYN9HJ2os23TGZx9+iDHpxv0V8WxqQYSnLtuFdWKqEocOT5NtaIU4A36qtnPqirNbsfDx6Y5c3U/Z58+yHQjaEQw2FdlZX+VqUaTnx2Z4Pyz1/KTw8f4+v2Ps/N1G1k72HdSbVL6avvZQfZPQLUiJqYbNJpBrSqmG8FTx6Z42RkrOT7VoFLJfp+qEkq/W9XKydMVZeuuSGm+bBig6UaT6UaTlf2dPw6azaAZQa26/D3qiens91/yPwS9pMwg2QA80vJ4DPhH3eaJiLqkI8C61P53bctuSNNF61wylYoY7K8y2F9drhIWxcww+TN/nNPpmNBMj2ii3qS/WkGCeiNm//Nf0Vdlqt5kstFgYqpJX03UKtmHzUnjk3WenA2r1nmn6k3Gj05y2mAfU/UmU40mk9NNBvurNJrN2V7JdL3JdCOoKPs5TKYe4poVNSbrTY5P1alVKjQjeHqizuFjUxw+Ns3h41McOZ71Wg489CR/+6NxVvbXmG40qVZEvdHk6Yn6bD3Vimg081NVen4nZHzhO2Nzn7lkM7VLsLq/xnQz+wdluhFUK2KgVqGewnGgVqGRQkUpjCbqDfqrFfqqFVLupRBsDUSdFIytQXnk+HS2W7hWpdG2Edu3aUTw82enOHN1PwO16myPvhlBM/W+K4JjUw0G+6vUKjrpdyzixO9fJQXqTC++3gyazZgN3ueGeva3Ualk7RWdeF/MI9PmG4PPN0DPWNnP7f/mDfN8tbkrM0g6veP2P7du83Rr7/QvUcc/YUmXAZcBvOxlL+tepT3nl7Mv/ec58311y3+ifSkzX7KqHyCF6Mn/XS/UuetWLer6no9mM5hqNGlG2vWX3vBkvclg2n0381wjsg+n01bU+OnTE/z8mSn6axUqguNTTY5N1WlEsHawj7GnjrNmoMZ5v7Caex85zMnZdOJDLvve+jg4OlEnItt9V6todvfeQK3C+NFJ1g72EQGNiJYP1Ugf+idPNyNSD2PmAzgLDJH1vmpVUatkX82AyXrW2+mrVGZ7s5WKZtcz88/EdNpdOvvBnT7kn/t+Wt5vwKqBGhP1Bo1GkP0PcvLvYvvn5pmrB3j08HGaES29es0u22wGKweqHJ/Kem8zgTCzrtZhj2a2FUBfVemfhlR7h7pnH88G2PzuVTTvzv48FlyzotTD4LPKfJUx4JyWxxuBR7vMMyapBqwFnixYtmidAETEjcCNAENDQ6fgjhorQ6UiVlSe28Oc2a3T3+UYy1lrBzlr7WDX9Z5/9trZ6bed/w8WWKVZbylzp+fdwBZJmyX1kx08H26bZxi4JE3vBPZFFvHDwC5JA5I2A1uAA3Ncp5mZLaHSeiTpmMcVwJ1kp+reHBEHJV0NjETEMHATcGs6mP4kWTCQ5rud7CB6Hbg8IhoAndZZ1nswM7NiejHcj3xoaChGRkaWuwwzsxcMSfdExNBc5l3+8/nMzOwFzUFiZmYL4iAxM7MFcZCYmdmCOEjMzGxBXhRnbUkaB348z8XPBJ5YxHIWk2ubv16uz7XNXy/X90Kr7dyIWD+XhV8UQbIQkkbmegrcUnNt89fL9bm2+evl+k7l2rxry8zMFsRBYmZmC+IgKXbjcheQw7XNXy/X59rmr5frO2Vr8zESMzNbEPdIzMxsQRwkXUjaLumQpFFJVy53PQCSHpb0A0n3ShpJbWdI+rqkH6XvL1miWm6W9Lik+1raOtaizPVpW35f0muXobaPSPpJ2nb3SnpHy3O7U22HJL295NrOkXSXpAckHZT0gdTeK9uuW33Lvv0krZB0QNL3Um3/KbVvlrQ/bbvb0i0mSLehuC3Vtl/SpmWo7TOSHmrZbq9O7Uv6c02vWZX0XUlfSY8Xb7tldwPzV+sX2RD1/xd4OdAPfA/Y2gN1PQyc2db234Ar0/SVwLVLVMtbgNcC9xXVArwD+Cuy29+9Hti/DLV9BPiDDvNuTT/fAWBz+rlXS6ztLOC1aXoN8MNUQ69su271Lfv2S9tgdZruA/anbXI7sCu1/ynwvjT9b4E/TdO7gNtK3G7davsMsLPD/Ev6c02v+fvAXwJfSY8Xbbu5R9LZNmA0Ih6MiClgD7BjmWvqZgdwS5q+BXjXUrxoRPwt2T1k5lLLDuCzkfk74HRJZy1xbd3sAPZExGREPASMkv38y6rtsYj4Tpo+CjwAbKB3tl23+rpZsu2XtsEz6WFf+grgrcDe1N6+7Wa26V7gAul53vR84bV1s6Q/V0kbgV8HPp0ei0Xcbg6SzjYAj7Q8HiP/j2mpBPA1Sfcouyc9wEsj4jHIPgSAX1i26rrX0ivb84q0G+Hmll2Ay1Zb2mXwGrL/Xntu27XVBz2w/dLumXuBx4Gvk/WADkdEvcPrz9aWnj8CrFuq2iJiZrtdk7bbdZIG2mvrUHcZPgZ8CGimx+tYxO3mIOmsU/r2wultb4qI1wIXApdLestyFzRHvbA9PwX8IvBq4DHgf6T2ZalN0mrgC8AHI+LpvFk7tC1HfT2x/SKiERGvBjaS9Xx+Jef1l7U2Sa8AdgO/DPwqcAbwR0tdm6R3Ao9HxD2tzTmv/7xrc5B0Ngac0/J4I/DoMtUyKyIeTd8fB75E9of0s5kucfr++PJV2LWWZd+eEfGz9IfeBP6ME7tflrw2SX1kH9Kfi4gvpuae2Xad6uul7ZfqOQx8k+z4wumSZm4b3vr6s7Wl59cy912ei1Hb9rSrMCJiEvhzlme7vQn4DUkPk+2mfytZD2XRtpuDpLO7gS3prIZ+sgNOw8tZkKRVktbMTANvA+5LdV2SZrsE+PLyVAg5tQwDv5POVHk9cGRmN85Sadv//Jtk226mtl3pTJXNwBbgQIl1CLgJeCAiPtryVE9su2719cL2k7Re0ulpehD4NbJjOHcBO9Ns7dtuZpvuBPZFOoK8RLX9fcs/ByI7BtG63Zbk5xoRuyNiY0RsIvss2xcR72Ext1vZZwq8UL/Izqr4Idk+2A/3QD0vJzs75nvAwZmayPZdfgP4Ufp+xhLV83myXRzTZP/BXNqtFrKu8g1pW/4AGFqG2m5Nr/399IdyVsv8H061HQIuLLm2N5PtJvg+cG/6ekcPbbtu9S379gNeCXw31XAfcFXL38YBsgP9/xMYSO0r0uPR9PzLl6G2fWm73Qf8BSfO7FrSn2tLnf+EE2dtLdp285XtZma2IN61ZWZmC+IgMTOzBXGQmJnZgjhIzMxsQRwkZma2IA4Ss0UgqdEywuu9WsQRoyVtUstIxma9plY8i5nNwfHIhscwe9Fxj8SsRMruIXNtulfFAUnnpfZzJX0jDeb3DUkvS+0vlfQlZfe1+J6kN6ZVVSX9mbJ7XXwtXT1t1hMcJGaLY7Bt19ZFLc89HRHbgE+QjXFEmv5sRLwS+BxwfWq/HvibiHgV2T1VDqb2LcANEXE+cBj4FyW/H7M585XtZotA0jMRsbpD+8PAWyPiwTQY4k8jYp2kJ8iGGZlO7Y9FxJmSxoGNkQ3yN7OOTWTDkm9Jj/8I6IuI/1z+OzMr5h6JWfmiy3S3eTqZbJlu4OOb1kMcJGblu6jl+7fT9P8hG4kV4D3At9L0N4D3weyNkk5bqiLN5sv/1ZgtjsF0d7wZX42ImVOAByTtJ/vH7eLU9n7gZkl/CIwDv5vaPwDcKOlSsp7H+8hGMjbrWT5GYlaidIxkKCKeWO5azMriXVtmZrYg7pGYmdmCuEdiZmYL4iAxM7MFcZCYmdmCOEjMzGxBHCRmZrYgDhIzM1uQ/w/ptlV1wVOjOwAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] @@ -1141,6 +1100,8 @@ } ], "source": [ + "#Plot the loss\n", + "\n", "plt.plot(rnn.loss_list)\n", "plt.xlabel(\"Epoch\")\n", "plt.ylabel(\"Cost\")\n", @@ -1149,7 +1110,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 45, "metadata": {}, "outputs": [], "source": [ @@ -1160,7 +1121,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 33, "metadata": {}, "outputs": [ { @@ -1178,7 +1139,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 34, "metadata": {}, "outputs": [], "source": [ @@ -1187,7 +1148,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 42, "metadata": {}, "outputs": [], "source": [ @@ -1198,36 +1159,50 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 50, "metadata": {}, "outputs": [ { - "ename": "ValueError", - "evalue": "operands could not be broadcast together with shapes (469,21) (24,) (469,21) ", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[1;31m#Here i subtract a prediction (random particle) from the target to get an idea of the predictions\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[0mmin_max_scaler\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtest_input\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[1;31m#print(min_max_scaler_inv(test_pred)-min_max_scaler_inv(test_target))\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32m\u001b[0m in \u001b[0;36mmin_max_scaler\u001b[1;34m(arr, min_max_scalor)\u001b[0m\n\u001b[0;32m 13\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 14\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0marr\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m==\u001b[0m \u001b[1;36m3\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 15\u001b[1;33m \u001b[0marr\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mreshapor\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmin_max_scalor\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtransform\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mreshapor_inv\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0marr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 16\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 17\u001b[0m \u001b[0marr\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmin_max_scalor\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtransform\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0marr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;32mc:\\users\\sa_li\\anaconda3\\envs\\rnn-tf-ker\\lib\\site-packages\\sklearn\\preprocessing\\data.py\u001b[0m in \u001b[0;36mtransform\u001b[1;34m(self, X)\u001b[0m\n\u001b[0;32m 367\u001b[0m \u001b[0mX\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcheck_array\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcopy\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcopy\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mFLOAT_DTYPES\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 368\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 369\u001b[1;33m \u001b[0mX\u001b[0m \u001b[1;33m*=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mscale_\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 370\u001b[0m \u001b[0mX\u001b[0m \u001b[1;33m+=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmin_\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 371\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mX\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", - "\u001b[1;31mValueError\u001b[0m: operands could not be broadcast together with shapes (469,21) (24,) (469,21) " + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.00610282 0.00100984 0.02600916]\n", + " [ 0.01632101 -0.01520294 0.02987524]\n", + " [ 0.06068288 0.00697896 0.06441782]\n", + " [-0.0119639 -0.04535145 0.07225598]\n", + " [ 0.04132241 0.01145548 0.05150088]\n", + " [-0.03290992 0.10355402 0.09310361]\n", + " [ 0.00265487 0.04124176 0.08941123]]\n" ] } ], "source": [ "#Here i subtract a prediction (random particle) from the target to get an idea of the predictions\n", - "min_max_scaler(test_input)\n", + "\n", + "#scaler_inv(test_input, scalerfunc = func)[0,:,:]\n", "\n", "\n", - "#print(min_max_scaler_inv(test_pred)-min_max_scaler_inv(test_target))" + "diff = scaler_inv(test_pred, scalerfunc = func)-scaler_inv(test_target, scalerfunc = func )\n", + "\n", + "print(diff[0,:,:])" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 44, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "7.513113e-06" + ] + }, + "execution_count": 44, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "#Here I evaluate my model on the test set based on mean_squared_error\n", "\n", diff --git "a/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/checkpoint" "b/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/checkpoint" deleted file mode 100644 index 774833f..0000000 --- "a/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/checkpoint" +++ /dev/null @@ -1,3 +0,0 @@ -model_checkpoint_path: "rnn_basic" -all_model_checkpoint_paths: "..\\rnn_model_lstm[50,40,30,20,10]c_checkpoint\\rnn_basic" -all_model_checkpoint_paths: "rnn_basic" diff --git "a/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.data-00000-of-00001" "b/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.data-00000-of-00001" deleted file mode 100644 index d91c1ce..0000000 --- "a/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.data-00000-of-00001" +++ /dev/null Binary files differ diff --git "a/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.index" "b/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.index" deleted file mode 100644 index 361718e..0000000 --- "a/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.index" +++ /dev/null Binary files differ diff --git "a/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.meta" "b/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.meta" deleted file mode 100644 index b941ef6..0000000 --- "a/trained_models/rnn_model_lstm_5l_\13350,40,30,20,10\135c/rnn_basic.meta" +++ /dev/null Binary files differ diff --git "a/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/checkpoint" "b/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/checkpoint" new file mode 100644 index 0000000..cb76c82 --- /dev/null +++ "b/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/checkpoint" @@ -0,0 +1,3 @@ +model_checkpoint_path: "rnn_basic" +all_model_checkpoint_paths: "..\\rnn_model_lstm_leaky_relu_[50,40,30,20,10]c_checkpoint\\rnn_basic" +all_model_checkpoint_paths: "rnn_basic" diff --git "a/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.data-00000-of-00001" "b/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.data-00000-of-00001" new file mode 100644 index 0000000..0b21e8b --- /dev/null +++ "b/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.data-00000-of-00001" Binary files differ diff --git "a/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.index" "b/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.index" new file mode 100644 index 0000000..e9f42c1 --- /dev/null +++ "b/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.index" Binary files differ diff --git "a/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.meta" "b/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.meta" new file mode 100644 index 0000000..d1badc9 --- /dev/null +++ "b/trained_models/rnn_model_lstm_leaky_relu_5l_\13350,40,30,20,10\135c/rnn_basic.meta" Binary files differ