diff --git a/example-cifar10.ipynb b/example-cifar10.ipynb index 3b6814c..5750868 100644 --- a/example-cifar10.ipynb +++ b/example-cifar10.ipynb @@ -14,7 +14,20 @@ "execution_count": 1, "id": "dcf3bcff", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torchtext\\data\\__init__.py:4: UserWarning: \n", + "/!\\ IMPORTANT WARNING ABOUT TORCHTEXT STATUS /!\\ \n", + "Torchtext is deprecated and the last released version will be 0.18 (this one). You can silence this warning by calling the following at the beginnign of your scripts: `import torchtext; torchtext.disable_torchtext_deprecation_warning()`\n", + " warnings.warn(torchtext._TORCHTEXT_DEPRECATION_MSG)\n", + "c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\utils.py:7: TqdmExperimentalWarning: Using `tqdm.autonotebook.tqdm` in notebook mode. Use `tqdm.tqdm` instead to force console mode (e.g. in jupyter console)\n", + " from tqdm.autonotebook import tqdm\n" + ] + } + ], "source": [ "import lava" ] @@ -63,10 +76,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "a54003f7", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.18.0\n", + "2.3.0\n", + "0\n", + "Cuda device: 0\n", + "cude devices: 1\n" + ] + } + ], "source": [ "cuda_num = 0\n", "import torchvision\n", @@ -92,13 +117,13 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "id": "23633651", "metadata": {}, "outputs": [], "source": [ - "training_size = 40000\n", - "valid_size = 10000\n", + "training_size = 50\n", + "valid_size = 10\n", "resize = 32\n", "portion = 0.25" ] @@ -114,12 +139,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 30, "id": "0f7319d3", "metadata": { "scrolled": true }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Files already downloaded and verified\n", + "Files already downloaded and verified\n", + "CIFAR TEN\n" + ] + } + ], "source": [ "loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='CIFAR10', resize=resize,\n", " training_size=training_size, test_size=valid_size, currupt_por=portion)" @@ -136,7 +171,28 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 12, + "id": "3efe71ba", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], + "source": [ + "%load_ext autoreload\n", + "\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 6, "id": "58be73c8", "metadata": {}, "outputs": [], @@ -144,6 +200,113 @@ "feature_extractor = lava.load_pretrained_feature_extractor('cifar10_embedder_preact_resnet18.pth', device)" ] }, + { + "cell_type": "code", + "execution_count": 31, + "id": "ee5f6733", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 3072])\n" + ] + } + ], + "source": [ + "import pickle\n", + "with open(\"save_x1y1x2y2.txt\", \"rb\") as f:\n", + " loaded_data = pickle.load(f)\n", + "X1, Y1, X2, Y2 = loaded_data\n", + "\n", + "# Now you can use X1, Y1, X2, and Y2 in your code\n", + "print(X1.shape) # Example usage" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "bd77208b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 3072])\n", + "torch.Size([5, 3072])\n", + "torch.Size([50])\n", + "torch.Size([5])\n" + ] + } + ], + "source": [ + "print(X1.shape)\n", + "print(X2.shape)\n", + "print(Y1.shape)\n", + "print(Y2.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "8d405db5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "10 2\n" + ] + } + ], + "source": [ + "c1 = torch.unique(Y1)\n", + "c2 = torch.unique(Y2)\n", + "n1, n2 = len(c1), len(c2)\n", + "print(n1, n2)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "d9b1fa49", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([2, 2, 9, 7, 8, 4, 7, 7, 2, 9, 1, 4, 8, 5, 6, 7, 8, 5, 0, 3, 9, 0, 5, 6,\n", + " 1, 3, 6, 0, 3, 5, 1, 7, 6, 3, 2, 0, 6, 5, 7, 1, 9, 0, 2, 0, 5, 8, 1, 6,\n", + " 1, 6])\n" + ] + } + ], + "source": [ + "print(Y1)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "id": "6355aaf0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([11, 10, 10, 10, 11])\n" + ] + } + ], + "source": [ + "print(Y2)" + ] + }, { "cell_type": "markdown", "id": "d345bcac", @@ -154,10 +317,130 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 36, "id": "5abb7145", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "fcfa65d78b69466ab9416d272ef4ba27", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/1 [00:00\n", + " dual_sol, trained_with_flag = lava.compute_dual(feature_extractor, loaders['train'], loaders['test'],\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\lava.py\", line 166, in compute_dual\n", + " dual_sol = get_OT_dual_sol(feature_extractor, trainloader, testloader, p=2, resize=32, device=device)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\lava.py\", line 114, in get_OT_dual_sol\n", + " dual_sol = dist.dual_sol(maxsamples = training_size, return_coupling = True)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\distance_fast.py\", line 851, in dual_sol\n", + " _ = self._get_label_distances()\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\distance_fast.py\", line 551, in _get_label_distances\n", + " DYY1 = pwdist(self.X1, self.Y1)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\wasserstein.py\", line 314, in pwdist_exact\n", + " print('cost function:',cost_function.shape)\n", + " ^^^^^^^^^^^^^^^^^^^\n", + "AttributeError: 'FeatureCost' object has no attribute 'shape'\n", + "\n", + "During handling of the above exception, another exception occurred:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\interactiveshell.py\", line 2144, in showtraceback\n", + " stb = self.InteractiveTB.structured_traceback(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py\", line 1435, in structured_traceback\n", + " return FormattedTB.structured_traceback(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py\", line 1326, in structured_traceback\n", + " return VerboseTB.structured_traceback(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py\", line 1173, in structured_traceback\n", + " formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py\", line 1088, in format_exception_as_a_whole\n", + " frames.append(self.format_record(record))\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py\", line 970, in format_record\n", + " frame_info.lines, Colors, self.has_colors, lvals\n", + " ^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py\", line 792, in lines\n", + " return self._sd.lines\n", + " ^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\stack_data\\utils.py\", line 145, in cached_property_wrapper\n", + " value = obj.__dict__[self.func.__name__] = self.func(obj)\n", + " ^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\stack_data\\core.py\", line 698, in lines\n", + " pieces = self.included_pieces\n", + " ^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\stack_data\\utils.py\", line 145, in cached_property_wrapper\n", + " value = obj.__dict__[self.func.__name__] = self.func(obj)\n", + " ^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\stack_data\\core.py\", line 649, in included_pieces\n", + " pos = scope_pieces.index(self.executing_piece)\n", + " ^^^^^^^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\stack_data\\utils.py\", line 145, in cached_property_wrapper\n", + " value = obj.__dict__[self.func.__name__] = self.func(obj)\n", + " ^^^^^^^^^^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\stack_data\\core.py\", line 628, in executing_piece\n", + " return only(\n", + " ^^^^^\n", + " File \"c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\executing\\executing.py\", line 164, in only\n", + " raise NotOneValueFound('Expected one value, found 0')\n", + "executing.executing.NotOneValueFound: Expected one value, found 0\n" + ] + } + ], "source": [ "dual_sol, trained_with_flag = lava.compute_dual(feature_extractor, loaders['train'], loaders['test'], \n", " training_size, shuffle_ind, resize=resize)" @@ -487,7 +770,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAEWCAYAAACDoeeyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAl20lEQVR4nO3deZwcZbX/8c+ZZEhCDGQFwyKBwIUbUBAQN7jmCrlIXPDngiyKKC8RENfIBQEhRq7KxSj6Q0XZlwBGuCIKCjE/EbwYMMFABEESCCJbAhqIECAh5/fH8wxUZnqpmenqruX7fr36Nd1V1VWnq7vPPF3PU6fM3RERkXLq6nQAIiKSHSV5EZESU5IXESkxJXkRkRJTkhcRKTEleRGRElOSl0Exs7vNbGqn4+gkMzvCzH6XePxPM9uukzHlkZntY2b3dTqOqlGSzzkzW25ma8xstZmtMrNbzexoM0v13pnZVDP7W4tiucjMTk9Oc/ed3f2mVqw/C2Y20czONbNHY/J9IL6OnbLapru/yt0fGOx6au3vGsu4mW0/2G1loXds7n6Lu+/YyZiqSEm+GN7t7qOAbYBvACcA53c2pPwzs3HArcDGwD7AKGB34LfAtDrPGdq2AEXawd11y/ENWA7s12vaXsB6YJf4eBjwTeCvwBPAOcAIYCSwJi77z3jbgvDP/URgGfAUMBcYm1j/3oTkuAp4GDgCOApYC7wY1/Pz3vHFOM4CHo23s4Bhcd5U4G/ADGAF8BjwsTqv+UPAwl7TPg9cG+9PB+4BVgOPAF+ss57TgTuBrgb7dxLgwJFx/90cp/8EeBx4GrgZ2DnxnHHAtcAzwO3AV4HfJeY7sH2j96bZPqm3v2vEn9zWzPheXhL3zd3AnollT4j7azVwH7Bv4nlXAT+O8+4Adk08bwvgamAl8CDwmcS8IcBJhM/SamARsHXcZw48G+P/UM/rTTz3X4GbCJ+zu4H3JOZdBHwPuC6u9zZgcqe/j0W8dTwA3Zq8QTWSfJz+V+CYeP/bMemMJbRWfw58Pc7b4IsVp30WWABsFZPQD4Er4rxt4pfqEKA7JrTd4ryLgNPrxQfMiuvdDJhA+Efx1UQc6+Iy3YRE/RwwpsZr2zjGsENi2h+Ag+P9x4B94v0xwO519t0CYGaT/TspJqNLCP8UexLwx+O+7PnHtTjxnCsJyXQksAshcdZL8s3em7r7pNb+rhF/7yT/fFzPEODrwII4b0fCP+wtEq97cuJ5a4EPxDi+SEjm3YQGwSLgVGAjYDvgAWD/+NzjgSVx/QbsCozrHVvvz2Jc91LCP4iNgLfH93zHxGt/itCgGQrMAa7s9PexiLeOB6BbkzeofpJfAJwcv1jPkmjlAG8GHoz3X/5iJeb/mdiKi48nxi/5UOBLwE/rxNIn6bBhkl8GTE/M2x9YnohjDTA0MX8F8KY627oMODXe3yEmgI3j478CnwQ2abLvlgJHJx6/h9BqXA3cGKdNislouwbrGR2X2ZSQPNcCOyXmf40aST7le1N3n9Ta3zVi653kf52YNwVYE+9vH9e9H9Ddax0zif8M4uMu4j9S4I3AX3st/yXgwnj/PuDAZrH1/izGdT9O4lcWcAXxn3J87ecl5k0H7u3kd7GoNx2TL64tgb8TWswbA4tix+wq4Fdxej3bAD9NLP9n4CVgc8JP7WUDjGkL4KHE44fitB5Pufu6xOPngFfVWdflhF8TAIcC17j7c/Hx+wlf+ofM7Ldm9uY663iK8A8MAHe/1t1HEw79bNRr2Yd77pjZEDP7hpktM7NnCP/IAMYT9uvQ5PJs+JqT0rw3/dknaTzea13DzWyouy8FPkdI6CvM7EozS743L78ed19POIy0BeGzskVP/PE1nET4rMDAPy9bAA/HbfV4iPC5rvdaBrNfKktJvoDM7A2EL8PvgCcJrcGd3X10vG3q7j1fiFplRh8GDkgsP9rdh7v7I3He5Dqbblay9FFCUujxmjhtIOYBE8xsN0Kyv/zlINz/4O4HEg4LXUM4dFLLfOC9KUciJV/bocCBhFbvpoTWPoSW+UrCIZatE8u/ps46m703/Ylp0Nz9cnffm/AeOXBGYvbLryfur60I793DhF8eyc/KKHefHhdv9Hlp5FFg617vzWsIh76khZTkC8TMNjGzdxGOCV/m7ktiS+hc4Ntmtllcbksz2z8+7QlgnJltmljVOcB/mdk2cfkJZnZgnDcH2M/MDjKzoWY2LibannU1Gv99BXBKXN94wnHcywbyWt19LaHz80zC8ex5MdaNzOwwM9s0LvMMoWO5lm8RjtlfamaTLRgF7FZn+R6jgBcIvwQ2JhyO6YnrJeB/gJlmtrGZTQE+Wuc1NHtvmmm2v1Mzsx3N7O1mNoxw3L6nQ77HHmb2vji66HOE17+A0LG82sxOMLMR8VfOLrGhAXAe8FUz2yHu39fFUU3N4r+N0Dr/TzPrjudavJvw2ZYWUpIvhp+b2WpCq+lkQvL6WGL+CYTjzwvi4YVfEzrCcPd7Ccn3gfhzewvgO4TOwBvjehcQjr3i7n8lHAqZQTgctJjQmQZh2OaUuJ5rasR5OrAQuIvQGXdHnDZQlxNa0z/pdUjjI8Dy+FqPBg6r9WR3fxJ4EyGp/Y5wLH4xIYkf02C7lxAOHTxCGMWzoNf84wiHDh4nHDu+sMG66r43KTTb3/0xjDD89klC3JsRjq33+BlhBMw/CPv3fe6+Nv5TexfhH+OD8fnnEX7hQPgszgVuJPzDPZ8wsgvCoaGLY/wHJYNx9xcJSf2AuM7vA4fHz6u0kMVODRGpKDObSegg/XCnY5HWU0teRKTElORFREpMh2tEREpMLXkRkRLLVTGm8ePH+6RJkzodhohIYSxatOhJd6978mOukvykSZNYuHBhp8MQESkMM6t3xjWgwzUiIqWmJC8iUmJK8iIiJaYkLyJSYkryIiIlpiQvIlJiuRpCKSL5dOycO7h+yWOdDqOU5nXPYNuux1m+/tVs/9XWF+FUkhdp4k1f+zWPP/NCp8OQkjl76FkcMOR2ugAzmNz1GEu/vFPLE72SvLTdvHue4BOX6KQ3qa5l3YfS1RWSew8z2K6r9b+WlORlULY/6TrW1bsuk4hsoHfrvbcsykUqyUtTZ95wH9/7zdJOhyFSaLVa70nuMOQ1e7V8u0ryUpOOQ0stQ7tg6dfe2ekwiuXsPeHJ+5suZq/ZC46c1/LNK8nLy3b7yg2sWrOu+YIVNXrEUBaflvYa3CLAzE2bLwMw8+nMQlCSFyadeF1HtmvAg99Qq1BKaPZOsDpFJ+rW2bTek5TkK2y7L13H+hb19Ex/7US+f9jurVmZSJGlar0bzFyVdSSAknxlDaT13mXwwNfV8hap6YxJsOYfzZfbZwbse2rm4fRQkq+g/iR4HYcWaeLe6+HKQ5ovN3QYnLIi+3h6b7btW5SOSpPgX73JMBactF8bohEpuNM3g3UpRqEdfAXsND37eGpQkq+QZgn+3MP3ZNqUzdsUjUiBpW29bzQSTno0+3gaUJKviEYJ/lP/vj3H779jG6MRKbBZ42B9iqHGGQ6L7A8l+QrY8ZTr685briGMIunMPRzu+Vnz5UZNhBmtryY5UEryJXfmDffxwrra4ySV4EVSysFJTQOlJF9y9WrOfOrft29zJCIFdP40ePj25suN3wGOy2dlVSX5Ejt2zh01p4/cqEvH4EWaKXDrPUlJvsTqXcnn7lkHtDkSkQJJWVCsHSUJWkFJvqTe/4Nba06fPGFkmyMRKZCStN6TlORLatFDtU+vnj9jansDESmCtAXFphwIB12SfTwtpCRfQvWOxU9/7cQ2RyKSc2lPampjQbFWU5IvoXrH4lUlUiTha1vAi882X67NBcVaTUm+ZM684b6a09WKF4lyXlCs1ZTkS+YHdcbFqxUvQiEKirWaknyJzLvnCdbXmL7HNmPaHotIrsyfBbfMbr7ciDFwwvLMw2knJfkS+cwVtTtcrz7mLW2ORCRHZo4GUlwCrUDDIvtDSb5E1qzt247XuHiprIIWFGs1JfmS2Hf2TTWna1y8VFIJT2oaKCX5kli2su9QsFdvMqwDkYh0UAkKirWaknwJ1CthoEv4SaWo9V6TknwJ1CphMMQ6EIhIJ5SsoFirKckX3Lx7nqg5/eipqhcvFaDWe1NK8gU3Y+7imtNVL15KrcQFxVpNSb7gnnm+7wWFNWxSSittSYKuoXDqU9nHUwBK8gVWr8NVwyallCpYkqAVlOQLrFaH64jurg5EIpKhihUUazUl+YKqV23yu4eoEJmUiFrvg6YkX1A/unlZzenTpmze5khEMlDhgmKtpiRfUGtf6ltwSdUmpRQqXlCs1ZTkC6heh6uqTUqhqSRBJpTkC6hWh+smw/VWSoHppKbMKDMUTL0O19kH7dbeQERaQa33zCnJF8w5N9W+vJ86XKVw1HpvCyX5gqnR36qLdEuxqCRBWynJF0i9C4PoIt1SGGq9t52SfIHUujCI6tRIIaj13jFK8gVx7JzaF+lWnRrJNRUU6zgl+YL45ZK+rSBdF0RyTSUJckFJviBqnf93gDpcJY9UkiBXlOQLQB2uUhhfGQO+vvly6lhtG9WlLQB1uEruzZ8VRs40S/AjxijBt5la8jlX7wxXdbhKbqigWK4pyedcvZLCIh2nkgSFoCSfc7VKCusMV+k4ndRUGEryOVavpLA6XKVj1HovHCX5HFNJYckVtd4LSRkjp1RSWHJDJQkKTUk+p3QNV8mFNK1364LT+v7qlHxQks8pXcNVOuqMSbAmReLeZwbse2rm4cjAKcnnkK7hKh2jgmKl0zTJm9kHgV+5+2ozOwXYHTjd3WuXRZRBU4erdIQKipVSmrIGX44Jfm9gP+B84AfZhlVd8+55ouZ0dbhKZnpKEjRL8D0lCZTgCyVN8/Cl+PedwI/c/TozOz3DmCrtM1fU/oGkDlfJhAqKlV6alvwjZvZD4EPA9WY2LOXzZADWrO37hVOHq7Tc3MPTFRQbNVEJvuDStOQPAt4BfNPdV5nZROD4bMOqJnW4SlvopKZKaZrk3f05M1sB7A3cD6yLf6XF1OEqmVJJgkpKM7rmNGBPYEfgQqAbuAx4a7ahVYs6XCVTar1XVppm4v8BXg/cAeDuj5rZqEyjqiB1uEomzt4Tnkzxw3vrveDIednHI22XJsm/6O5uZg5gZrokUQbU4Sotp9a7kC7Jz42ja0ab2SeAjwPnZhtWtRw7p3YrXh2uMiAqKCYJaTpev2lm04BnCMflT3V3/a5roV8u6fuF7O6yDkQihaeCYtJLqqEbMakrsWek1tUxj3rb5LbHIQWmgmJSR5rRNavpm4eeBhYCM9z9gSwCq4p9Z99Uc/rx++/Y3kCkmObPgltmN19u6DA4ZUX28UjupGnJnwX8DbgcMOBgYDJhtM0FwNSMYquEZSuf7TNt8gT1bUsKaUsSqKBYpaVJ8u9x910Tj39kZovd/QQzOymrwKqg3tWf5s+Y2t5ApFjStt5HjIETlmcejuRbmiT/nJkdBFwVH38AeD7er3U4WVKqdfUndbdKQyooJv2UptDYYcBHgBXAE/H+h81sBHBchrGVXq2rPx3w2okdiERyTwXFZIDSDKF8AHh3ndm/a2041VGvGNn3D9u9zZFI7umkJhmENKNrhgNHAjsDw3umu/vHM4yr9FSMTJpSQTFpgTRZ5VLgXmB/YBbh8M2fswyq7Op1uKoYmbxMrXdpkTRJfnt3/6CZHejuF5vZ5cAtWQdWZrU6XEHFyAQVFJOWS5Pk18a/q8xsF+BxYLPsQiq/Wh2uKkYm6VrvBjNXZR2JlEiaJP8jMxsDfBm4FngVoPOiB0hXf5I+VJJAMpRmdM158e5vge2yDaf81OEqG1BBMclYmtE1o4HDgUnJ5d39M5lFVVK6+pO8TK13aZM0TcjrgQXAEiDFqXZSz4y5i2tOV4drhaQtSbDRSDjp0ezjkdJLk+SHu/sXMo+kAp55fl2faepwrRCVJJAOSFPW4FIz+4SZTTSzsT23zCMrmXpj49XhWgEqSSAdlOoar8CZwMm8UpDMUSdsv6gYWUXNHE2qOn5K7pKRNEl+BuGEqCezDqbMVIysYuYeDvf8rPlyoybCjHuzj0cqK02SXwo8l3UgZaZiZBWjkgSSI2mS/LPAYjP7DfBCz0QNoUxPY+MrQiUJJIfSZJpr4k0GQGPjK0Ktd8mpNGe8XtyOQMrqM1fcUXO6xsaXhFrvknN1k7yZLaHBsAB3f10mEZXMmrV9h81pbHxJqKCYFECjlvy72hZFSakYWUmpJIEUSN0k7+4PtTOQMlKHa8ncez1ceUjz5bqGwqlPZR+PSArKOBlRh2vJnL4ZrHuh+XJqvUvOKMlnRMXISiJt610FxSSnlOQzomJkJTBrHKzv+z72oWGRkmMaXZOBY+fUHjapDteCUEkCKZE0o2s+Ff9eGv8ell045fDLJY/1mdbdpXJkhaCTmqRkmo6uMbNp7v76xKwTzewO4MSsgyuqWj9/jnrb5LbHIf2g1ruUVJpj8mZmb3X3/40P3kK6OvSVtO/sm2pOP37/HdsbiKSn1ruUWJokfyRwgZn1fBNWAR/PLKKCW7by2T7TJk8Y2YFIpCmVJJAKSFO7ZhGwa0+Sd3c1Z+qod/Wn+TOmtjcQaU6td6mIpoddzGxzMzsfuNLdnzazKWZ2ZBtiK5xzblraZ5q6W3Nm9k7pEvyUA5XgpRTSHK65CLiQcPk/gL8APwbOzyimwqpx8Sdd/SlPVFBMKihNB+p4d58LrAdw93XAS5lGVUC6+lOOnTEpXYLfZ4YSvJROqitDmdk44shAM3sToN+xvagYWQ6lLUkwdBicsiL7eEQ6IE0W+gJwLTDZzP4XmAB8MNOoCkbFyHIobUGxg6+AnaZnH49Ih6RJ8ncDbwN2JPQj3ofGyW9AxchyZP4suGV28+VUUEwqIk2S/727705I9gDEM151sDlSMbKc+MoY8L5X4upDo2akQhoVKHs1sCUwwsxezyujATcBNm5DbIWgYmQ5oJIEInU1asnvDxwBbAV8KzH9GeCkDGMqFBUj6zCd1CTSUKMCZRcDF5vZ+9396jbGVCgqRtYh50+Dh29vvtz4HeC4hdnHI5JTaY7J72Fm8919FYCZjQFmuPspmUZWACpG1iFqvYuklmaUzAE9CR7A3f8BaMwZKkbWdmfvmS7Bb72XErxIlKYlP8TMhrn7CwBmNgIYlm1Y+adiZG2m1rvIgKRJ8nOA+WZ2YXz8MeDi7EIqBhUja5PZO8Hqvp3bfUw5EA66JPt4RAomTanhM8zsTmC/OOmr7n5DtmHln4qRZSxtSQLrgtP6lpQQkSBtcZU/A+vc/ddmtrGZjXL31VkGlmf1OlxVjKxF0pYk2GcG7Htq9vGIFFjTJG9mnwCOAsYCkwknSJ0D7JttaPmlDteMqKCYSMulacl/CtgLuA3A3e83s80yjSrH1OGaERUUE8lEmiT/gru/aBa6Fc1sKLXPAaqEH928rM80dbgOQtqCYiPGwAnLMw9HpGzSJPnfmtlJhBo204BjgZ9nG1Z+ra3R46oO1wGaOZpU7QUNixQZsDQnQ50IrASWAJ8Ernf3kxs/pZzqFSNTh2s/zT08jntvkuBHTVSCFxmkNC35T7v7d4BzeyaY2WfjtEpRMbIW0ElNIm2VpiX/0RrTjmhxHIWgYmSDcP60dAl+/A5K8CIt1Kie/CHAocC2ZnZtYtYo4O9ZB5Y3KkY2CGq9i3RMo8M1twKPAeOB5PCH1cBdWQaVRxobPwAqSSDScY3qyT8EPAS82cy2AXaIZ7yOAEYQkn0laGz8AKj1LpILAznjdSsqdsarxsb3g1rvIrmiM15T0Nj4FNKWJOgaCqc+lX08IgLojNemVIwsBZUkEMktnfHaRK0O102Gpy3eWXJpSxKooJhIx6TJVicCR5I44xU4L8ug8qJeh+vsg3ZrbyB5NGscrF/XfDm13kU6Ks1FQ9ab2TXANe6+MvuQ8qNWhyvAtCmbtzmSHFFBMZFCaXQylAGnAccRz4w1s5eA/+vus9oTXmfV6nCdXuUOVxUUEymcRmUNPg+8FXiDu49197HAG4G3mtnn2xJdB73/B7fWnF7JDteXSxI0SfAqSSCSO40O13wEmObuT/ZMcPcHzOzDwI3At7MOrpMWPdT3uqGV7HDVSU0ihdYoa3UnE3wPd19pZt0ZxtRx8+55oub0SnW4nj8NHr69+XLjd4DjFmYfj4gMSKMk/+IA5xXejLmLa06vTIerWu8ipdEoye9qZs/UmG7A8IziyYVnnu87NHCPbcZ0IJI2U0kCkdJpVKBsSDsDyYt6Y+OvPuYtbY6kzVK13g1mrso6EhFpoQr2JDZWuWJkar2LlJqSfC+VKUamgmIilaAkn1CZsfEqKCZSGUryCaUfG6+SBCKVU6IMNjilHxv/lTHg65svp2GRIqXSqKxBpZR2bPz8WWHkTLMEP2KMErxICaklH5VybLwKiolUnpI89TtcCzs2XiUJRCRSkqd2h2t3V0FHx6skgYgkVD7J1+twPeptk9scySCdvSc8eX/z5bbeC46cl308IpILlU/y9Tpcj99/x/YGMhhqvYtIHZVP8oXucFVJAhFpotJJ/tg5d9ScXogO1zStd+uC0/r2N4hIdVQ6yf9ySd9WcO47XM+YBGtSJO59ZsC+p2YejojkW6WTfK0R5LntcE1bkkAFxUQkobJJft/ZN9WcnssO11njYH3fvoM+VFBMRHqpbJJftvLZPtMmTxjZgUgaUEExERmkSib5eld/mj9jansDaUQFxUSkBSqZ5HN99ae5h8M9P2u+3KiJMOPe7OMRkUKrZJLP7dWfdFKTiLRY5ZJ8vbHxHb36kwqKiUhGKpfkczc2Xq13EclQ5ZJ8bsbGq6CYiLRBpZJ8bsbGq/UuIm1SqSTf8bHxKigmIm1WmSRfr8O1bWPjVVBMRDqgMkm+VofrkHb0t6qgmIh0UGWSfK0O16Onbp/dBtOWJNhoJJz0aHZxiEilVSLJt73DVSUJRCQnujodQDu0rcN1/qxw7L1Zgh8xRgleRNqi9C35thUjU+tdRHKo9Ek+82JkKigmIjlW+iSfaTEyndQkIjlX6iT//h/cWnP6oIuRqSSBiBREqZP8oof6jk/fZPggX7Ja7yJSIKVN8vPueaLm9NkH7TawFar1LiIFVNokP2Pu4prTp03ZvP8rS9V6N5i5qv/rFhHJUGmT/DPPr+szbY9txvRvJSpJICIFV8okX29s/NXHvCX9SlRQTERKoJRJflBj49V6F5ESKWWSH9DYeBUUE5ESKl2Sr1eMrOHYeJUkEJGSKl2Sr1WMrO7YeJUkEJGSK1WSr9fhWnNsvE5qEpEKKFWSr9XhCr3Gxqv1LiIVUqokX6vDdXqyw1WtdxGpmNIk+YbFyFSSQEQqqjRJvm4xMrXeRaTCSpHkaxUjm9c9g8k81vzJar2LSImVIsn3Lka2rPtQurqaneWqgmIiUn6lSPI9xcgWdX+CsV1hnLw1yvAqSSAiFdGV1YrN7AIzW2VmL5jZUjM7MYvtHDvnDvbrWsSy7kMZ2/UsZg0S/NBh4di7EryIVERmSR64BFgNLAemAIeY2ZRWb2Tt3b/g3O7ZDBnSpPV+8BVwyopWb15EJNeyPFzzAvAAMM7dXzSzK4EDgXtauZHvD/1W4+SugmIiUmFZJvktgceAcfHx34A39l7IzC4F3gcwduzYfm9kqPU9AeplGhYpIhWX5eGaVNz9I+4+0t1Hbrvttv1+/rqujXDvWRc4hJIESvAiIpkm+UeAZBH3reK0luo+bWVI9ISEbzOfVs0ZEZEoy8M1fwC2BdaY2UbAwcChWWyo+7SV4W8WKxcRKbAsW/KXAq8C/gV4Dljm7ndnuD0REekls5a8ux+S1bpFRCSdjne8iohIdpTkRURKTEleRKTElORFRErM3BucMdpmZrYSeGiATx8PPNnCcFpFcfWP4uofxdU/ZYxrG3efUG9mrpL8YJjZQnffs9Nx9Ka4+kdx9Y/i6p8qxqXDNSIiJaYkLyJSYmVK8j/qdAB1KK7+UVz9o7j6p3JxleaYvIiI9FWmlryIiPSiJC8iUmKFT/Jm9g4zuy/Li4XX2OZyM1tiZovNbGGcNtbM5pnZ/fHvmDjdzOy7Mb67zGz3xHo+Gpe/38w+OoA4LjCzFWb2p8S0lsVhZnvE17k0PrfRhRabxTXTzB6J+2yxmU1PzPtS3MZ9ZrZ/YnrN99bMtjWz2+L0H8dS1mni2trMfmNm95jZ3Wb22TzsswZxdXSfmdlwM7vdzO6McX2l0brMbFh8vDTOnzTQeAcY10Vm9mBif+0Wp7ftsx+fO8TM/mhmv8jD/sLdC3sDhgDLgO2AjYA7gSlt2O5yYHyvaf8NnBjvnwicEe9PB34JGPAm4LY4fSzhGrhjgTHx/ph+xvFvwO7An7KIA7g9LmvxuQcMIq6ZwBdrLDslvm/DCNcfWBbf17rvLTAXODjePwc4JmVcE4Hd4/1RwF/i9ju6zxrE1dF9Fl/Dq+L9buC2+Npqrgs4Fjgn3j8Y+PFA4x1gXBcBH6ixfNs++/G5XwAuB37RaN+3a38VvSW/F7DU3R9w9xeBnouFd8KBwMXx/sXAexPTL/FgATDazCYC+wPz3P3v7v4PYB7wjv5s0N1vBv6eRRxx3ibuvsDDJ++SxLoGElc9BwJXuvsL7v4gsJTwvtZ8b2OL6u3AVTVeY7O4HnP3O+L91cCfCdci7ug+axBXPW3ZZ/F1/zM+7I43b7Cu5H68Ctg3brtf8Q4irnra9tk3s62AdwLnxceN9n1b9lfRk/yWwMOJx3+j8ZejVRy40cwWmdlRcdrm7v5YvP84sHmTGLOKvVVxbBnvtzK+4+LP5QssHhIZQFzjgFXuvm4wccWfxq8ntAJzs896xQUd3mfx0MNiYAUhCS5rsK6Xtx/nPx233fLvQO+43L1nf/1X3F/fNrNhveNKuf3BvI9nAf8JrI+PG+37tuyvoif5Ttnb3XcHDgA+ZWb/lpwZ//t3fGxqXuKIfgBMBnYDHgNmdyoQM3sVcDXwOXd/Jjmvk/usRlwd32fu/pK770a4RvNewE7tjqGW3nGZ2S7AlwjxvYFwCOaEdsZkZu8CVrj7onZut5miJ/lHgK0TjzO5WHhv7v5I/LsC+Cnhw/9E/JlH/LuiSYxZxd6qOB6J91sSn7s/Eb+Y64FzCftsIHE9Rfi5PbTX9FTMrJuQSOe4+//EyR3fZ7Xiyss+i7GsAn4DvLnBul7efpy/adx2Zt+BRFzviIe93N1fAC5k4PtroO/jW4H3mNlywqGUtwPfodP7q9lB+zzfCJcvfIDQOdHTEbFzxtscCYxK3L+VcCz9TDbsvPvveP+dbNjpc7u/0unzIKHDZ0y8P3YA8Uxiww7OlsVB386n6YOIa2Li/ucJxxwBdmbDTqYHCB1Mdd9b4Cds2JF1bMqYjHB89axe0zu6zxrE1dF9BkwARsf7I4BbgHfVWxfwKTbsSJw70HgHGNfExP48C/hGJz778flTeaXjtbP7q79JJW83Qs/5XwjHCk9uw/a2izv3TuDunm0SjqXNB+4Hfp34sBjwvRjfEmDPxLo+TuhUWQp8bACxXEH4Gb+WcHzuyFbGAewJ/Ck+52ziGdIDjOvSuN27gGvZMIGdHLdxH4lRDPXe2/ge3B7j/QkwLGVcexMOxdwFLI636Z3eZw3i6ug+A14H/DFu/0/AqY3WBQyPj5fG+dsNNN4BxvX/4v76E3AZr4zAadtnP/H8qbyS5Du6v1TWQESkxIp+TF5ERBpQkhcRKTEleRGRElOSFxEpMSV5EZESU5KXjjGzfzZfKtPtH2FmW/TzOZMsUV2z2XSRTlOSlyo7AuhXkhcpGiV56Tgzm2pmN5nZVWZ2r5nN6anfbWbfsFBn/S4z+2acdpGZnWNmC83sL7FmSE/RqjPN7A9x+U8mtnFCrA9+Z1znBwgnvMyxUHt8RKwh/ttYeO6GRKmDPeLz7iScpdjs9RxhZtdYqE2/3MyOM7MvxBrjC8xsbFzuEzHWO83sajPbOE6fHJdbYmanJ3/xmNnxidfXU0d9pJldF9fzJzP7UIveGimD/p5lqZturboB/4x/pxIq8G1FaHj8nnAW6DjCGX89J+2Njn8vAn4Vl92BcFbtcOAo4JS4zDBgIeEU8AMI5Sc2jvN6zmi9iXj2I6Fc7a3AhPj4Q8AF8f5dwL/F+2eSKNeQeC2TeqYTfiEsJdSGnxBf29Fx3rcJBcgAxiWefzrw6Xj/F8Ah8f7Rif30H4QLPlt87b8g1O5/P3BuYl2bdvq91S0/N7XkJS9ud/e/eSjGtZiQNJ8GngfON7P3Ac8llp/r7uvd/X5CPY+dCEnw8FiC9jbCP4kdgP2AC939OQB3r1XrfkdgF2BefP4pwFZmNprwz+XmuNylKV/Pb9x9tbuvjK/j53H6kvjaAHYxs1vMbAlwGKFmCYQiYD+J9y9PrPM/4u2PwB3xNe8Q1znNzM4ws33c/emUMUoFDG2+iEhbvJC4/xIw1N3XmdlewL7AB4DjCJX9oG85YCe0cD/t7jckZ1ji8mkNGHC3u7+513NHp34FG0q+nvWJx+t55Xt3EfBed7/TzI4g/KJpFuPX3f2HfWaES9pNB043s/nuPmuAcUvJqCUvuRXrq2/q7tcTqjDumpj9QTPrMrPJhAJQ9wE3AMfEsr2Y2b+Y2UjCxS4+ljjmPTauYzXhkArx+RPM7M1xmW4z29lDKdtVZrZ3XO6wFr7EUcBjMd7kehcQDsFAqE7Y4wbg43G/YGZbmtlmcYTQc+5+GeFw0u6IRGrJS56NAn5mZsMJrdgvJOb9lVC5bxPC8e7nzew8wqGQO2LH7UpCS/lXFi7qvNDMXgSuB04itKTPMbM1hEMkHwC+a2abEr4bZxEqjX4MuMDMHLixha/vy4TDSivj355/OJ8DLjOzkwl9D08DuPuNZvavwO9jv/Q/gQ8D2wNnmtl6QuXPY1oYoxScqlBK4ZjZRYQyrlc1W7aI4i+ONe7uZnYwoRP2wE7HJcWklrxI/uwBnB1/jawi1DwXGRC15EVESkwdryIiJaYkLyJSYkryIiIlpiQvIlJiSvIiIiX2/wEk8SrTAyQ4XgAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAEWCAYAAACDoeeyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAl20lEQVR4nO3deZwcZbX/8c+ZZEhCDGQFwyKBwIUbUBAQN7jmCrlIXPDngiyKKC8RENfIBQEhRq7KxSj6Q0XZlwBGuCIKCjE/EbwYMMFABEESCCJbAhqIECAh5/fH8wxUZnqpmenqruX7fr36Nd1V1VWnq7vPPF3PU6fM3RERkXLq6nQAIiKSHSV5EZESU5IXESkxJXkRkRJTkhcRKTEleRGRElOSl0Exs7vNbGqn4+gkMzvCzH6XePxPM9uukzHlkZntY2b3dTqOqlGSzzkzW25ma8xstZmtMrNbzexoM0v13pnZVDP7W4tiucjMTk9Oc/ed3f2mVqw/C2Y20czONbNHY/J9IL6OnbLapru/yt0fGOx6au3vGsu4mW0/2G1loXds7n6Lu+/YyZiqSEm+GN7t7qOAbYBvACcA53c2pPwzs3HArcDGwD7AKGB34LfAtDrPGdq2AEXawd11y/ENWA7s12vaXsB6YJf4eBjwTeCvwBPAOcAIYCSwJi77z3jbgvDP/URgGfAUMBcYm1j/3oTkuAp4GDgCOApYC7wY1/Pz3vHFOM4CHo23s4Bhcd5U4G/ADGAF8BjwsTqv+UPAwl7TPg9cG+9PB+4BVgOPAF+ss57TgTuBrgb7dxLgwJFx/90cp/8EeBx4GrgZ2DnxnHHAtcAzwO3AV4HfJeY7sH2j96bZPqm3v2vEn9zWzPheXhL3zd3AnollT4j7azVwH7Bv4nlXAT+O8+4Adk08bwvgamAl8CDwmcS8IcBJhM/SamARsHXcZw48G+P/UM/rTTz3X4GbCJ+zu4H3JOZdBHwPuC6u9zZgcqe/j0W8dTwA3Zq8QTWSfJz+V+CYeP/bMemMJbRWfw58Pc7b4IsVp30WWABsFZPQD4Er4rxt4pfqEKA7JrTd4ryLgNPrxQfMiuvdDJhA+Efx1UQc6+Iy3YRE/RwwpsZr2zjGsENi2h+Ag+P9x4B94v0xwO519t0CYGaT/TspJqNLCP8UexLwx+O+7PnHtTjxnCsJyXQksAshcdZL8s3em7r7pNb+rhF/7yT/fFzPEODrwII4b0fCP+wtEq97cuJ5a4EPxDi+SEjm3YQGwSLgVGAjYDvgAWD/+NzjgSVx/QbsCozrHVvvz2Jc91LCP4iNgLfH93zHxGt/itCgGQrMAa7s9PexiLeOB6BbkzeofpJfAJwcv1jPkmjlAG8GHoz3X/5iJeb/mdiKi48nxi/5UOBLwE/rxNIn6bBhkl8GTE/M2x9YnohjDTA0MX8F8KY627oMODXe3yEmgI3j478CnwQ2abLvlgJHJx6/h9BqXA3cGKdNislouwbrGR2X2ZSQPNcCOyXmf40aST7le1N3n9Ta3zVi653kf52YNwVYE+9vH9e9H9Ddax0zif8M4uMu4j9S4I3AX3st/yXgwnj/PuDAZrH1/izGdT9O4lcWcAXxn3J87ecl5k0H7u3kd7GoNx2TL64tgb8TWswbA4tix+wq4Fdxej3bAD9NLP9n4CVgc8JP7WUDjGkL4KHE44fitB5Pufu6xOPngFfVWdflhF8TAIcC17j7c/Hx+wlf+ofM7Ldm9uY663iK8A8MAHe/1t1HEw79bNRr2Yd77pjZEDP7hpktM7NnCP/IAMYT9uvQ5PJs+JqT0rw3/dknaTzea13DzWyouy8FPkdI6CvM7EozS743L78ed19POIy0BeGzskVP/PE1nET4rMDAPy9bAA/HbfV4iPC5rvdaBrNfKktJvoDM7A2EL8PvgCcJrcGd3X10vG3q7j1fiFplRh8GDkgsP9rdh7v7I3He5Dqbblay9FFCUujxmjhtIOYBE8xsN0Kyv/zlINz/4O4HEg4LXUM4dFLLfOC9KUciJV/bocCBhFbvpoTWPoSW+UrCIZatE8u/ps46m703/Ylp0Nz9cnffm/AeOXBGYvbLryfur60I793DhF8eyc/KKHefHhdv9Hlp5FFg617vzWsIh76khZTkC8TMNjGzdxGOCV/m7ktiS+hc4Ntmtllcbksz2z8+7QlgnJltmljVOcB/mdk2cfkJZnZgnDcH2M/MDjKzoWY2LibannU1Gv99BXBKXN94wnHcywbyWt19LaHz80zC8ex5MdaNzOwwM9s0LvMMoWO5lm8RjtlfamaTLRgF7FZn+R6jgBcIvwQ2JhyO6YnrJeB/gJlmtrGZTQE+Wuc1NHtvmmm2v1Mzsx3N7O1mNoxw3L6nQ77HHmb2vji66HOE17+A0LG82sxOMLMR8VfOLrGhAXAe8FUz2yHu39fFUU3N4r+N0Dr/TzPrjudavJvw2ZYWUpIvhp+b2WpCq+lkQvL6WGL+CYTjzwvi4YVfEzrCcPd7Ccn3gfhzewvgO4TOwBvjehcQjr3i7n8lHAqZQTgctJjQmQZh2OaUuJ5rasR5OrAQuIvQGXdHnDZQlxNa0z/pdUjjI8Dy+FqPBg6r9WR3fxJ4EyGp/Y5wLH4xIYkf02C7lxAOHTxCGMWzoNf84wiHDh4nHDu+sMG66r43KTTb3/0xjDD89klC3JsRjq33+BlhBMw/CPv3fe6+Nv5TexfhH+OD8fnnEX7hQPgszgVuJPzDPZ8wsgvCoaGLY/wHJYNx9xcJSf2AuM7vA4fHz6u0kMVODRGpKDObSegg/XCnY5HWU0teRKTElORFREpMh2tEREpMLXkRkRLLVTGm8ePH+6RJkzodhohIYSxatOhJd6978mOukvykSZNYuHBhp8MQESkMM6t3xjWgwzUiIqWmJC8iUmJK8iIiJaYkLyJSYkryIiIlpiQvIlJiuRpCKSL5dOycO7h+yWOdDqOU5nXPYNuux1m+/tVs/9XWF+FUkhdp4k1f+zWPP/NCp8OQkjl76FkcMOR2ugAzmNz1GEu/vFPLE72SvLTdvHue4BOX6KQ3qa5l3YfS1RWSew8z2K6r9b+WlORlULY/6TrW1bsuk4hsoHfrvbcsykUqyUtTZ95wH9/7zdJOhyFSaLVa70nuMOQ1e7V8u0ryUpOOQ0stQ7tg6dfe2ekwiuXsPeHJ+5suZq/ZC46c1/LNK8nLy3b7yg2sWrOu+YIVNXrEUBaflvYa3CLAzE2bLwMw8+nMQlCSFyadeF1HtmvAg99Qq1BKaPZOsDpFJ+rW2bTek5TkK2y7L13H+hb19Ex/7US+f9jurVmZSJGlar0bzFyVdSSAknxlDaT13mXwwNfV8hap6YxJsOYfzZfbZwbse2rm4fRQkq+g/iR4HYcWaeLe6+HKQ5ovN3QYnLIi+3h6b7btW5SOSpPgX73JMBactF8bohEpuNM3g3UpRqEdfAXsND37eGpQkq+QZgn+3MP3ZNqUzdsUjUiBpW29bzQSTno0+3gaUJKviEYJ/lP/vj3H779jG6MRKbBZ42B9iqHGGQ6L7A8l+QrY8ZTr685briGMIunMPRzu+Vnz5UZNhBmtryY5UEryJXfmDffxwrra4ySV4EVSysFJTQOlJF9y9WrOfOrft29zJCIFdP40ePj25suN3wGOy2dlVSX5Ejt2zh01p4/cqEvH4EWaKXDrPUlJvsTqXcnn7lkHtDkSkQJJWVCsHSUJWkFJvqTe/4Nba06fPGFkmyMRKZCStN6TlORLatFDtU+vnj9jansDESmCtAXFphwIB12SfTwtpCRfQvWOxU9/7cQ2RyKSc2lPampjQbFWU5IvoXrH4lUlUiTha1vAi882X67NBcVaTUm+ZM684b6a09WKF4lyXlCs1ZTkS+YHdcbFqxUvQiEKirWaknyJzLvnCdbXmL7HNmPaHotIrsyfBbfMbr7ciDFwwvLMw2knJfkS+cwVtTtcrz7mLW2ORCRHZo4GUlwCrUDDIvtDSb5E1qzt247XuHiprIIWFGs1JfmS2Hf2TTWna1y8VFIJT2oaKCX5kli2su9QsFdvMqwDkYh0UAkKirWaknwJ1CthoEv4SaWo9V6TknwJ1CphMMQ6EIhIJ5SsoFirKckX3Lx7nqg5/eipqhcvFaDWe1NK8gU3Y+7imtNVL15KrcQFxVpNSb7gnnm+7wWFNWxSSittSYKuoXDqU9nHUwBK8gVWr8NVwyallCpYkqAVlOQLrFaH64jurg5EIpKhihUUazUl+YKqV23yu4eoEJmUiFrvg6YkX1A/unlZzenTpmze5khEMlDhgmKtpiRfUGtf6ltwSdUmpRQqXlCs1ZTkC6heh6uqTUqhqSRBJpTkC6hWh+smw/VWSoHppKbMKDMUTL0O19kH7dbeQERaQa33zCnJF8w5N9W+vJ86XKVw1HpvCyX5gqnR36qLdEuxqCRBWynJF0i9C4PoIt1SGGq9t52SfIHUujCI6tRIIaj13jFK8gVx7JzaF+lWnRrJNRUU6zgl+YL45ZK+rSBdF0RyTSUJckFJviBqnf93gDpcJY9UkiBXlOQLQB2uUhhfGQO+vvly6lhtG9WlLQB1uEruzZ8VRs40S/AjxijBt5la8jlX7wxXdbhKbqigWK4pyedcvZLCIh2nkgSFoCSfc7VKCusMV+k4ndRUGEryOVavpLA6XKVj1HovHCX5HFNJYckVtd4LSRkjp1RSWHJDJQkKTUk+p3QNV8mFNK1364LT+v7qlHxQks8pXcNVOuqMSbAmReLeZwbse2rm4cjAKcnnkK7hKh2jgmKl0zTJm9kHgV+5+2ozOwXYHTjd3WuXRZRBU4erdIQKipVSmrIGX44Jfm9gP+B84AfZhlVd8+55ouZ0dbhKZnpKEjRL8D0lCZTgCyVN8/Cl+PedwI/c/TozOz3DmCrtM1fU/oGkDlfJhAqKlV6alvwjZvZD4EPA9WY2LOXzZADWrO37hVOHq7Tc3MPTFRQbNVEJvuDStOQPAt4BfNPdV5nZROD4bMOqJnW4SlvopKZKaZrk3f05M1sB7A3cD6yLf6XF1OEqmVJJgkpKM7rmNGBPYEfgQqAbuAx4a7ahVYs6XCVTar1XVppm4v8BXg/cAeDuj5rZqEyjqiB1uEomzt4Tnkzxw3vrveDIednHI22XJsm/6O5uZg5gZrokUQbU4Sotp9a7kC7Jz42ja0ab2SeAjwPnZhtWtRw7p3YrXh2uMiAqKCYJaTpev2lm04BnCMflT3V3/a5roV8u6fuF7O6yDkQihaeCYtJLqqEbMakrsWek1tUxj3rb5LbHIQWmgmJSR5rRNavpm4eeBhYCM9z9gSwCq4p9Z99Uc/rx++/Y3kCkmObPgltmN19u6DA4ZUX28UjupGnJnwX8DbgcMOBgYDJhtM0FwNSMYquEZSuf7TNt8gT1bUsKaUsSqKBYpaVJ8u9x910Tj39kZovd/QQzOymrwKqg3tWf5s+Y2t5ApFjStt5HjIETlmcejuRbmiT/nJkdBFwVH38AeD7er3U4WVKqdfUndbdKQyooJv2UptDYYcBHgBXAE/H+h81sBHBchrGVXq2rPx3w2okdiERyTwXFZIDSDKF8AHh3ndm/a2041VGvGNn3D9u9zZFI7umkJhmENKNrhgNHAjsDw3umu/vHM4yr9FSMTJpSQTFpgTRZ5VLgXmB/YBbh8M2fswyq7Op1uKoYmbxMrXdpkTRJfnt3/6CZHejuF5vZ5cAtWQdWZrU6XEHFyAQVFJOWS5Pk18a/q8xsF+BxYLPsQiq/Wh2uKkYm6VrvBjNXZR2JlEiaJP8jMxsDfBm4FngVoPOiB0hXf5I+VJJAMpRmdM158e5vge2yDaf81OEqG1BBMclYmtE1o4HDgUnJ5d39M5lFVVK6+pO8TK13aZM0TcjrgQXAEiDFqXZSz4y5i2tOV4drhaQtSbDRSDjp0ezjkdJLk+SHu/sXMo+kAp55fl2faepwrRCVJJAOSFPW4FIz+4SZTTSzsT23zCMrmXpj49XhWgEqSSAdlOoar8CZwMm8UpDMUSdsv6gYWUXNHE2qOn5K7pKRNEl+BuGEqCezDqbMVIysYuYeDvf8rPlyoybCjHuzj0cqK02SXwo8l3UgZaZiZBWjkgSSI2mS/LPAYjP7DfBCz0QNoUxPY+MrQiUJJIfSZJpr4k0GQGPjK0Ktd8mpNGe8XtyOQMrqM1fcUXO6xsaXhFrvknN1k7yZLaHBsAB3f10mEZXMmrV9h81pbHxJqKCYFECjlvy72hZFSakYWUmpJIEUSN0k7+4PtTOQMlKHa8ncez1ceUjz5bqGwqlPZR+PSArKOBlRh2vJnL4ZrHuh+XJqvUvOKMlnRMXISiJt610FxSSnlOQzomJkJTBrHKzv+z72oWGRkmMaXZOBY+fUHjapDteCUEkCKZE0o2s+Ff9eGv8ell045fDLJY/1mdbdpXJkhaCTmqRkmo6uMbNp7v76xKwTzewO4MSsgyuqWj9/jnrb5LbHIf2g1ruUVJpj8mZmb3X3/40P3kK6OvSVtO/sm2pOP37/HdsbiKSn1ruUWJokfyRwgZn1fBNWAR/PLKKCW7by2T7TJk8Y2YFIpCmVJJAKSFO7ZhGwa0+Sd3c1Z+qod/Wn+TOmtjcQaU6td6mIpoddzGxzMzsfuNLdnzazKWZ2ZBtiK5xzblraZ5q6W3Nm9k7pEvyUA5XgpRTSHK65CLiQcPk/gL8APwbOzyimwqpx8Sdd/SlPVFBMKihNB+p4d58LrAdw93XAS5lGVUC6+lOOnTEpXYLfZ4YSvJROqitDmdk44shAM3sToN+xvagYWQ6lLUkwdBicsiL7eEQ6IE0W+gJwLTDZzP4XmAB8MNOoCkbFyHIobUGxg6+AnaZnH49Ih6RJ8ncDbwN2JPQj3ofGyW9AxchyZP4suGV28+VUUEwqIk2S/727705I9gDEM151sDlSMbKc+MoY8L5X4upDo2akQhoVKHs1sCUwwsxezyujATcBNm5DbIWgYmQ5oJIEInU1asnvDxwBbAV8KzH9GeCkDGMqFBUj6zCd1CTSUKMCZRcDF5vZ+9396jbGVCgqRtYh50+Dh29vvtz4HeC4hdnHI5JTaY7J72Fm8919FYCZjQFmuPspmUZWACpG1iFqvYuklmaUzAE9CR7A3f8BaMwZKkbWdmfvmS7Bb72XErxIlKYlP8TMhrn7CwBmNgIYlm1Y+adiZG2m1rvIgKRJ8nOA+WZ2YXz8MeDi7EIqBhUja5PZO8Hqvp3bfUw5EA66JPt4RAomTanhM8zsTmC/OOmr7n5DtmHln4qRZSxtSQLrgtP6lpQQkSBtcZU/A+vc/ddmtrGZjXL31VkGlmf1OlxVjKxF0pYk2GcG7Htq9vGIFFjTJG9mnwCOAsYCkwknSJ0D7JttaPmlDteMqKCYSMulacl/CtgLuA3A3e83s80yjSrH1OGaERUUE8lEmiT/gru/aBa6Fc1sKLXPAaqEH928rM80dbgOQtqCYiPGwAnLMw9HpGzSJPnfmtlJhBo204BjgZ9nG1Z+ra3R46oO1wGaOZpU7QUNixQZsDQnQ50IrASWAJ8Ernf3kxs/pZzqFSNTh2s/zT08jntvkuBHTVSCFxmkNC35T7v7d4BzeyaY2WfjtEpRMbIW0ElNIm2VpiX/0RrTjmhxHIWgYmSDcP60dAl+/A5K8CIt1Kie/CHAocC2ZnZtYtYo4O9ZB5Y3KkY2CGq9i3RMo8M1twKPAeOB5PCH1cBdWQaVRxobPwAqSSDScY3qyT8EPAS82cy2AXaIZ7yOAEYQkn0laGz8AKj1LpILAznjdSsqdsarxsb3g1rvIrmiM15T0Nj4FNKWJOgaCqc+lX08IgLojNemVIwsBZUkEMktnfHaRK0O102Gpy3eWXJpSxKooJhIx6TJVicCR5I44xU4L8ug8qJeh+vsg3ZrbyB5NGscrF/XfDm13kU6Ks1FQ9ab2TXANe6+MvuQ8qNWhyvAtCmbtzmSHFFBMZFCaXQylAGnAccRz4w1s5eA/+vus9oTXmfV6nCdXuUOVxUUEymcRmUNPg+8FXiDu49197HAG4G3mtnn2xJdB73/B7fWnF7JDteXSxI0SfAqSSCSO40O13wEmObuT/ZMcPcHzOzDwI3At7MOrpMWPdT3uqGV7HDVSU0ihdYoa3UnE3wPd19pZt0ZxtRx8+55oub0SnW4nj8NHr69+XLjd4DjFmYfj4gMSKMk/+IA5xXejLmLa06vTIerWu8ipdEoye9qZs/UmG7A8IziyYVnnu87NHCPbcZ0IJI2U0kCkdJpVKBsSDsDyYt6Y+OvPuYtbY6kzVK13g1mrso6EhFpoQr2JDZWuWJkar2LlJqSfC+VKUamgmIilaAkn1CZsfEqKCZSGUryCaUfG6+SBCKVU6IMNjilHxv/lTHg65svp2GRIqXSqKxBpZR2bPz8WWHkTLMEP2KMErxICaklH5VybLwKiolUnpI89TtcCzs2XiUJRCRSkqd2h2t3V0FHx6skgYgkVD7J1+twPeptk9scySCdvSc8eX/z5bbeC46cl308IpILlU/y9Tpcj99/x/YGMhhqvYtIHZVP8oXucFVJAhFpotJJ/tg5d9ScXogO1zStd+uC0/r2N4hIdVQ6yf9ySd9WcO47XM+YBGtSJO59ZsC+p2YejojkW6WTfK0R5LntcE1bkkAFxUQkobJJft/ZN9WcnssO11njYH3fvoM+VFBMRHqpbJJftvLZPtMmTxjZgUgaUEExERmkSib5eld/mj9jansDaUQFxUSkBSqZ5HN99ae5h8M9P2u+3KiJMOPe7OMRkUKrZJLP7dWfdFKTiLRY5ZJ8vbHxHb36kwqKiUhGKpfkczc2Xq13EclQ5ZJ8bsbGq6CYiLRBpZJ8bsbGq/UuIm1SqSTf8bHxKigmIm1WmSRfr8O1bWPjVVBMRDqgMkm+VofrkHb0t6qgmIh0UGWSfK0O16Onbp/dBtOWJNhoJJz0aHZxiEilVSLJt73DVSUJRCQnujodQDu0rcN1/qxw7L1Zgh8xRgleRNqi9C35thUjU+tdRHKo9Ek+82JkKigmIjlW+iSfaTEyndQkIjlX6iT//h/cWnP6oIuRqSSBiBREqZP8oof6jk/fZPggX7Ja7yJSIKVN8vPueaLm9NkH7TawFar1LiIFVNokP2Pu4prTp03ZvP8rS9V6N5i5qv/rFhHJUGmT/DPPr+szbY9txvRvJSpJICIFV8okX29s/NXHvCX9SlRQTERKoJRJflBj49V6F5ESKWWSH9DYeBUUE5ESKl2Sr1eMrOHYeJUkEJGSKl2Sr1WMrO7YeJUkEJGSK1WSr9fhWnNsvE5qEpEKKFWSr9XhCr3Gxqv1LiIVUqokX6vDdXqyw1WtdxGpmNIk+YbFyFSSQEQqqjRJvm4xMrXeRaTCSpHkaxUjm9c9g8k81vzJar2LSImVIsn3Lka2rPtQurqaneWqgmIiUn6lSPI9xcgWdX+CsV1hnLw1yvAqSSAiFdGV1YrN7AIzW2VmL5jZUjM7MYvtHDvnDvbrWsSy7kMZ2/UsZg0S/NBh4di7EryIVERmSR64BFgNLAemAIeY2ZRWb2Tt3b/g3O7ZDBnSpPV+8BVwyopWb15EJNeyPFzzAvAAMM7dXzSzK4EDgXtauZHvD/1W4+SugmIiUmFZJvktgceAcfHx34A39l7IzC4F3gcwduzYfm9kqPU9AeplGhYpIhWX5eGaVNz9I+4+0t1Hbrvttv1+/rqujXDvWRc4hJIESvAiIpkm+UeAZBH3reK0luo+bWVI9ISEbzOfVs0ZEZEoy8M1fwC2BdaY2UbAwcChWWyo+7SV4W8WKxcRKbAsW/KXAq8C/gV4Dljm7ndnuD0REekls5a8ux+S1bpFRCSdjne8iohIdpTkRURKTEleRKTElORFRErM3BucMdpmZrYSeGiATx8PPNnCcFpFcfWP4uofxdU/ZYxrG3efUG9mrpL8YJjZQnffs9Nx9Ka4+kdx9Y/i6p8qxqXDNSIiJaYkLyJSYmVK8j/qdAB1KK7+UVz9o7j6p3JxleaYvIiI9FWmlryIiPSiJC8iUmKFT/Jm9g4zuy/Li4XX2OZyM1tiZovNbGGcNtbM5pnZ/fHvmDjdzOy7Mb67zGz3xHo+Gpe/38w+OoA4LjCzFWb2p8S0lsVhZnvE17k0PrfRhRabxTXTzB6J+2yxmU1PzPtS3MZ9ZrZ/YnrN99bMtjWz2+L0H8dS1mni2trMfmNm95jZ3Wb22TzsswZxdXSfmdlwM7vdzO6McX2l0brMbFh8vDTOnzTQeAcY10Vm9mBif+0Wp7ftsx+fO8TM/mhmv8jD/sLdC3sDhgDLgO2AjYA7gSlt2O5yYHyvaf8NnBjvnwicEe9PB34JGPAm4LY4fSzhGrhjgTHx/ph+xvFvwO7An7KIA7g9LmvxuQcMIq6ZwBdrLDslvm/DCNcfWBbf17rvLTAXODjePwc4JmVcE4Hd4/1RwF/i9ju6zxrE1dF9Fl/Dq+L9buC2+Npqrgs4Fjgn3j8Y+PFA4x1gXBcBH6ixfNs++/G5XwAuB37RaN+3a38VvSW/F7DU3R9w9xeBnouFd8KBwMXx/sXAexPTL/FgATDazCYC+wPz3P3v7v4PYB7wjv5s0N1vBv6eRRxx3ibuvsDDJ++SxLoGElc9BwJXuvsL7v4gsJTwvtZ8b2OL6u3AVTVeY7O4HnP3O+L91cCfCdci7ug+axBXPW3ZZ/F1/zM+7I43b7Cu5H68Ctg3brtf8Q4irnra9tk3s62AdwLnxceN9n1b9lfRk/yWwMOJx3+j8ZejVRy40cwWmdlRcdrm7v5YvP84sHmTGLOKvVVxbBnvtzK+4+LP5QssHhIZQFzjgFXuvm4wccWfxq8ntAJzs896xQUd3mfx0MNiYAUhCS5rsK6Xtx/nPx233fLvQO+43L1nf/1X3F/fNrNhveNKuf3BvI9nAf8JrI+PG+37tuyvoif5Ttnb3XcHDgA+ZWb/lpwZ//t3fGxqXuKIfgBMBnYDHgNmdyoQM3sVcDXwOXd/Jjmvk/usRlwd32fu/pK770a4RvNewE7tjqGW3nGZ2S7AlwjxvYFwCOaEdsZkZu8CVrj7onZut5miJ/lHgK0TjzO5WHhv7v5I/LsC+Cnhw/9E/JlH/LuiSYxZxd6qOB6J91sSn7s/Eb+Y64FzCftsIHE9Rfi5PbTX9FTMrJuQSOe4+//EyR3fZ7Xiyss+i7GsAn4DvLnBul7efpy/adx2Zt+BRFzviIe93N1fAC5k4PtroO/jW4H3mNlywqGUtwPfodP7q9lB+zzfCJcvfIDQOdHTEbFzxtscCYxK3L+VcCz9TDbsvPvveP+dbNjpc7u/0unzIKHDZ0y8P3YA8Uxiww7OlsVB386n6YOIa2Li/ucJxxwBdmbDTqYHCB1Mdd9b4Cds2JF1bMqYjHB89axe0zu6zxrE1dF9BkwARsf7I4BbgHfVWxfwKTbsSJw70HgHGNfExP48C/hGJz778flTeaXjtbP7q79JJW83Qs/5XwjHCk9uw/a2izv3TuDunm0SjqXNB+4Hfp34sBjwvRjfEmDPxLo+TuhUWQp8bACxXEH4Gb+WcHzuyFbGAewJ/Ck+52ziGdIDjOvSuN27gGvZMIGdHLdxH4lRDPXe2/ge3B7j/QkwLGVcexMOxdwFLI636Z3eZw3i6ug+A14H/DFu/0/AqY3WBQyPj5fG+dsNNN4BxvX/4v76E3AZr4zAadtnP/H8qbyS5Du6v1TWQESkxIp+TF5ERBpQkhcRKTEleRGRElOSFxEpMSV5EZESU5KXjjGzfzZfKtPtH2FmW/TzOZMsUV2z2XSRTlOSlyo7AuhXkhcpGiV56Tgzm2pmN5nZVWZ2r5nN6anfbWbfsFBn/S4z+2acdpGZnWNmC83sL7FmSE/RqjPN7A9x+U8mtnFCrA9+Z1znBwgnvMyxUHt8RKwh/ttYeO6GRKmDPeLz7iScpdjs9RxhZtdYqE2/3MyOM7MvxBrjC8xsbFzuEzHWO83sajPbOE6fHJdbYmanJ3/xmNnxidfXU0d9pJldF9fzJzP7UIveGimD/p5lqZturboB/4x/pxIq8G1FaHj8nnAW6DjCGX89J+2Njn8vAn4Vl92BcFbtcOAo4JS4zDBgIeEU8AMI5Sc2jvN6zmi9iXj2I6Fc7a3AhPj4Q8AF8f5dwL/F+2eSKNeQeC2TeqYTfiEsJdSGnxBf29Fx3rcJBcgAxiWefzrw6Xj/F8Ah8f7Rif30H4QLPlt87b8g1O5/P3BuYl2bdvq91S0/N7XkJS9ud/e/eSjGtZiQNJ8GngfON7P3Ac8llp/r7uvd/X5CPY+dCEnw8FiC9jbCP4kdgP2AC939OQB3r1XrfkdgF2BefP4pwFZmNprwz+XmuNylKV/Pb9x9tbuvjK/j53H6kvjaAHYxs1vMbAlwGKFmCYQiYD+J9y9PrPM/4u2PwB3xNe8Q1znNzM4ws33c/emUMUoFDG2+iEhbvJC4/xIw1N3XmdlewL7AB4DjCJX9oG85YCe0cD/t7jckZ1ji8mkNGHC3u7+513NHp34FG0q+nvWJx+t55Xt3EfBed7/TzI4g/KJpFuPX3f2HfWaES9pNB043s/nuPmuAcUvJqCUvuRXrq2/q7tcTqjDumpj9QTPrMrPJhAJQ9wE3AMfEsr2Y2b+Y2UjCxS4+ljjmPTauYzXhkArx+RPM7M1xmW4z29lDKdtVZrZ3XO6wFr7EUcBjMd7kehcQDsFAqE7Y4wbg43G/YGZbmtlmcYTQc+5+GeFw0u6IRGrJS56NAn5mZsMJrdgvJOb9lVC5bxPC8e7nzew8wqGQO2LH7UpCS/lXFi7qvNDMXgSuB04itKTPMbM1hEMkHwC+a2abEr4bZxEqjX4MuMDMHLixha/vy4TDSivj355/OJ8DLjOzkwl9D08DuPuNZvavwO9jv/Q/gQ8D2wNnmtl6QuXPY1oYoxScqlBK4ZjZRYQyrlc1W7aI4i+ONe7uZnYwoRP2wE7HJcWklrxI/uwBnB1/jawi1DwXGRC15EVESkwdryIiJaYkLyJSYkryIiIlpiQvIlJiSvIiIiX2/wEk8SrTAyQ4XgAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -499,7 +782,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXgAAAEWCAYAAABsY4yMAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAXKUlEQVR4nO3debhddX3v8fcnk4kBAZsIUjSIRtpoW4RURMXhCo71ghULiFVrrRPWPq3aUm0VcbhVrt6neKlWvVQcwLFWtCgGC4KKSgApkBqDAQoKIQjKEMj4vX/sdcImnGGFc/Y5OSvv1/PsZ+817N/67t8+53PW+e2110pVIUnqnhlTXYAkaTAMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXjucJNcmOax5/LYkn5jqmgYtySOT3Jlk5lTXMqT/fdD0ZMBruyQ5JskPk9yV5Obm8RuSZBDbq6r3VdWrx9tOkn2TVJJZE1FXi+3c2dyuTXLCWM+rqv+uql2qavMg69POxYBXa0neDPwjcDKwF7An8DrgKcCcEZ6zw+yRTrLdq2oX4FjgHUmeO9UFaedjwKuVJLsBJwFvqKovVdUd1XNZVR1XVeub9T6Z5CNJzk5yF/DMJC9IclmS25Ncn+TEbdr+4yTXJfllkrdvs+zEJJ/pm35Sku8n+VWSy5M8o2/Z+UneneR7Se5I8q0kC5rFFzT3v2r2rA/ZZjt7J7k7yUP75j0hyS1JZid5TJLvJPl1M+/zbfqtqi4CrgIen2RGkr9rXuvNST7V9Ov9/sNI8sokq5vXcU2S45r5bdp4RZL/burc2p/Nc09I8rOmr7+wzesd8X3Q9GTAq61DgAcBX22x7kuB9wK7At8F7gJeDuwOvAB4fZIjAZIsAT4C/DGwN/AbwD7DNZrkN4F/B94DPBR4C/DlJAu32fafAA+j91/FW5r5T2vud2+GQi7qb7uqfgFcBLx4m7a+VFUbgXcD3wL2aOr78FidkJ6nAI8DLgNe2dyeCewH7AL832GeNx84BXheVe0KPBn4cbO4TRtPBfYHnkXvv4ffbub/OXAk8HR6fX0bcGqzzdbvg6YPA15tLQBuqapNQzP69qTvTvK0vnW/WlXfq6otVXVPVZ1fVVc00/8JnEkvZACOAr5eVRc0/wX8PbBlhBpeBpxdVWc3bS0DlgPP71vnX6rqp1V1N/AF4IDteI1n0BtSoflM4ZhmHsBGYBGwd/OavjtGW7cAtwKfAE6oqm8DxwEfqqrVVXUn8LfAMSN8LrCF3l7/vKq6saquaua3aeNdVXV3VV0OXA78XjP/dcDbq+qGpq9PBI5qnrs974OmCQNebf0SWNAfJFX15KravVnW/7N0ff8Tkxyc5Lwka5P8ml7QDA2d7N2/flXd1bQ3nEXAS5o/Kr9K8it6e6sP71vnpr7H6+jt4bb1ZeCQJA+nt8e/BbiwWfbXQIAfJbkqyavGaGtBVe1RVb9dVac08/YGrutb5zpgFr3PMrZq+uBoev10Y5J/T/Jb29HGSH2wCPhKX9/9F7C5ee72vA+aJgx4tXURsB44osW6256i9AzgLOARVbUb8FF6YQlwI/CIoRWTPJje8MBwrgc+XVW7993mV9U/PICa7r9C1W30hmGOpjc887lqTrdaVTdV1Z9V1d7Aa4F/SvKYFtvt9wt6ITvkkcAmYM0wtZxTVYfT++P1E+Dj29vGMK6nN+zT339zq+rnbN/7oGnCgFcrVfUr4F30gu2oJLs2H9odAMwf4+m7ArdW1T1JnkgvPId8CfiDJE9NMofeB7kj/Vx+BnhhkuckmZlkbpJnJGkzVryW3h75fmOsdwa9zwuO4t7hGZK8pG87t9H7g7G9QxhnAn+Z5FFJdgHeB3y+f9ir2daeSY5oxuLXA3f2batVGyP4KPDeJIua7SxMMvQHe3veB00TvoFqrao+APwVveGKNc3tn4G/Ab4/ylPfAJyU5A7gHfTGxofavAo4nl6Y3kgvPG8YYfvX0/sP4m30Avt64K20+DmuqnX0Pvj9XjNE8aQRVj0LWAzc1IxhD/l94IdJ7mzW+YuqWj3WdrdxGvBpekf0XAPcQ++Dz23NoNfPv6A3jv904PXb2cZw/rGp/VvNe/ED4GDYvvdB00e84IckdZN78JLUUQa8JHWUAS9JHWXAS1JHDfTMettrwYIFte+++051GZI0bVxyySW3VNXC4ZbtUAG/7777snz58qkuQ5KmjSTXjbTMIRpJ6igDXpI6yoCXpI4y4CWpowx4SeqogQV8ktOaS4pdOahtSJJGNsjDJD9J71JinxrgNibMyees5NwVN/Hoh+3Kgl3msOvc2Xz98l9w3a3rpro0SY05M8Neu81j8+Yt7LX7PK655S42bNrMb8yfw0Pmzea6X67j9nvanDkZdp83izvXb+LBc2Zx0KI9eOnBvdPsX7hqLYcuXsjhS3rXUFm2Yg1n/PA6brlzPQt2eRAvPXjR1mWjWbZizda2hmt3uPXatLs9Bno2yST70rsM2OPbrL906dKaiuPgTz5nJaeed/Wkb1fSjmPWjDAjYcPmLcybPZNTjn0CAMd/9lI2bL731P9zZs7g1OMOHDWMl61Yw5vOvIy7N25mzszeQEl/u/1/PIbW23ZZW0kuqaqlwy2b8jH4JK9JsjzJ8rVr105JDeeuuGnslSR12qYttTXI7964mQtXreXCVWvvE+7QC+oLV42eVReuWsvdGzdvXX/bdodbb9tlE2HKA76qPlZVS6tq6cKFw37bduAOW7LXlGxX0o5j1oxs3dueN3smhy5eyKGLF26dN2TOzBlbh11GcujihcybPXPr+tu2O9x62y6bCA7RNByDl3Z8jsHf32hDNAa8JE1jUzIGn+RM4CJg/yQ3JPnTQW1LknR/AztMsqqOHVTbkqSxTfmHrJKkwTDgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeooA16SOsqAl6SOMuAlqaMMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeooA16SOsqAl6SOMuAlqaMMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeooA16SOsqAl6SOMuAlqaMMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeooA16SOsqAl6SOMuAlqaMMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeqoVgGfZFGSw5rH85LsOtiyJEnjNWbAJ/kz4EvAPzez9gH+bYA1SZImQJs9+OOBpwC3A1TVKuBhgyxKkjR+bQJ+fVVtGJpIMguowZUkSZoIbQL+O0neBsxLcjjwReBrgy1LkjRebQL+BGAtcAXwWuBs4O8GWZQkafxmjbVCVW0BPt7cJEnTxJgBn+QK7j/m/mtgOfCeqvrlIAqTJI3PmAEPfAPYDJzRTB8DPBi4Cfgk8MKBVCZJGpc2AX9YVR3YN31Fkkur6sAkLxtUYZKk8WnzIevMJE8cmkjy+8DMZnLTQKqSJI1bmz34VwOnJdkFCL0vPL06yXzgfw2yOEnSA9fmKJqLgd9Jslsz/eu+xV8YVGGSpPFpswdPkhcAjwPmJgGgqk4aYF2SpHFqc7KxjwJHA39Ob4jmJcCiAdclSRqnNh+yPrmqXg7cVlXvAg4BHjvYsiRJ49Um4O9u7tcl2RvYCDx8cCVJkiZCmzH4ryfZHTgZuJTet1o/MciiJEnj1+Yomnc3D7+c5OvA3G2OpJEk7YDanItmJvACYN+h9ZNQVR8abGmSpPFoM0TzNeAeeqcL3jLYciRJE6VNwO9TVb878EokSROqzVE030jy7IFXIkmaUG324H8AfCXJDHqHSAaoqnrIQCuTJI1Lm4D/EL0vN11RVV5sW5KmiTZDNNcDVxrukjS9tNmDXw2cn+QbwPqhmR4mKUk7tjYBf01zm9PcJEnTQJtvsr5rMgqRJE2sEQM+ydfonXdmWFX1PwdSkSRpQoy2B/+/J60KSdKEGzHgq+o7k1mIJGlitTlMUpI0DRnwktRRBrwkdZRH0UhSR7U5iuYPgb2AzzTTxwJrBlmUJGn8xjyKJskHq2pp36KvJVk+8MokSePSZgx+fpL9hiaSPAqYP7iSJEkToc25aP6S3snGVtM7F/wi4LUDrUqSNG5tzkXzzSSLgd9qZv2kqtaP9hxJ0tQbc4gmyYOBtwJvrKrLgUcm+YOBVyZJGpc2Y/D/Amygd1UngJ8D7xlYRZKkCdEm4B9dVR+gdz1WqmodvbF4SdIOrE3Ab0gyj+ZLT0keTd+VnSRJO6Y2R9G8E/gm8IgknwWeArxykEVJksavzVE0y5JcCjyJ3tDMX1TVLQOvTJI0Lm324AHmArc16y9JQlVdMLiyJEnjNWbAJ3k/cDRwFbClmV2AAS9JO7A2e/BHAvv75SZJml7aHEWzGpg96EIkSROrzR78OuDHSb5N3+GRVfWmgVUlSRq3NgF/VnOTJE0jbQ6TPD3JHOCxzayVVbVxsGVJksarzVE0zwBOB66ldxz8I5K8wsMkJWnH1maI5oPAs6tqJUCSxwJnAgcNsjBJ0vi0OYpm9lC4A1TVT/GoGkna4bXZg1+e5BPce9HtlwFek1WSdnBtAv71wPHA0GGRFwL/NLCKJEkTos1RNOuBDwEfSvJQYB+/1SpJO742l+w7P8lDmnC/BPh4kv8z+NIkSePR5kPW3arqduAPgU9V1cHAswZbliRpvNoE/KwkDwf+CPj6gOuRJE2QNgF/EnAOcHVVXZxkP2DVYMuSJI1Xmw9Zvwh8sW96NfDiQRYlSRq/EQM+yV9X1QeSfJjmgtv9PJukJO3YRtuD/6/m3i81SdI0NGLAV9XXmvvTAZLs0kzfOTmlSZLGo81x8I9Pchm9a7KuSHJJkscNvjRJ0ni0OYrmY8BfVdWiqnok8Gbg44MtS5I0Xm0Cfn5VnTc0UVXnA/MHVpEkaUK0OdnY6iR/D3y6mX4ZvQtxS5J2YG324F8FLAT+FfgysKCZJ0nagY12HPxc4HXAY4ArgDd7LVZJmj5G24M/HVhKL9yfB5w8KRVJkibEaGPwS6rqdwCS/D/gR5NTkiRpIoy2B791OKaqNk1CLZKkCTTaHvzvJbm9eRxgXjMdoKrqIQOvTpL0gI12qoKZk1mIJGlitTlMUpI0DRnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHWUAS9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHWUAS9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHWUAS9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHWUAS9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHXUQAM+yXOTrExydZITBrktSdJ9zRpUw0lmAqcChwM3ABcnOauqVkz0tpatWMOFq9ay69zZ3HHPRm65cwPnXHkjm2uitzQ1Zs2ATVsgwH4L53Pbuo2sW7+JzVuKTVuKAh4ydxYHLdqDeXNm8bOb7+CwJXvx1ufsz7IVazjjh9cB8NKDF3H4kj3v1/7J56zk3BU3bX3OSIb6+dDFC+/XzlhtjPZcSYMxsIAHnghcXVWrAZJ8DjgCmNCAX7ZiDW868zLu3rh5IpvdoWza0rsv4Gdr7xp2ndvv2cR5K9dunV655mquueUuzl2xhg2bew187+pfcupxB94nYE8+ZyWnnnf11ucAIwb0UD9/cfkNnHLsE7a2M1Yboz1X0uAMcojmN4Hr+6ZvaObdR5LXJFmeZPnatWu3XTymC1et7XS4j8f3r167NdwBNmzewoWr7tvH5664adTpIf39fPfGzfdpZ6w2RnuupMGZ8g9Zq+pjVbW0qpYuXLhwu59/6OKFzJs9cwCVTX9PfsxC5sy89y2eM3MGhy6+bx8ftmSvUaeH9PfzvNkz79POWG2M9lxJgzPIIZqfA4/om96nmTehDl+yJ6cc+wTH4B/gGPzQUMpYY/D9/bztOPpYbYz2XEmDk6rBpGCSWcBPgWfRC/aLgZdW1VUjPWfp0qW1fPnygdQjSV2U5JKqWjrcsoHtwVfVpiRvBM4BZgKnjRbukqSJNcghGqrqbODsQW5DkjS8Kf+QVZI0GAa8JHWUAS9JHWXAS1JHDewwyQciyVrguqmuYxIsAG6Z6iKmAfupHfupna7206KqGvbbgztUwO8skiwf6bhV3ct+asd+amdn7CeHaCSpowx4SeooA35qfGyqC5gm7Kd27Kd2drp+cgxekjrKPXhJ6igDXpI6yoCfRF6E/L6SXJvkiiQ/TrK8mffQJMuSrGru92jmJ8kpTd/9Z5IDp7b6wUpyWpKbk1zZN2+7+ybJK5r1VyV5xVS8lkEaoZ9OTPLz5ufqx0me37fsb5t+WpnkOX3zu/m7WVXeJuFG75TJPwP2A+YAlwNLprquKe6Ta4EF28z7AHBC8/gE4P3N4+cD36B33ZMnAT+c6voH3DdPAw4ErnygfQM8FFjd3O/RPN5jql/bJPTTicBbhll3SfN79yDgUc3v48wu/266Bz95tl6EvKo2AEMXIdd9HQGc3jw+HTiyb/6nqucHwO5JHj4F9U2KqroAuHWb2dvbN88BllXVrVV1G7AMeO7Ai59EI/TTSI4APldV66vqGuBqer+Xnf3dNOAnT6uLkO9kCvhWkkuSvKaZt2dV3dg8vgkYur6f/bf9fbMz99kbm+Gq04aGstgJ+8mA11R6alUdCDwPOD7J0/oXVu//ao/jHYZ9M6qPAI8GDgBuBD44pdVMIQN+8kzKRcink6r6eXN/M/AVev8qrxkaemnub25Wt/+2v292yj6rqjVVtbmqtgAfp/dzBTthPxnwk+diYHGSRyWZAxwDnDXFNU2ZJPOT7Dr0GHg2cCW9Phk62uMVwFebx2cBL2+OGHkS8Ou+4Yqdxfb2zTnAs5Ps0QxTPLuZ12nbfDbzIno/V9Drp2OSPCjJo4DFwI/o8O/mQK/JqnuVFyHf1p7AV5JA7+fwjKr6ZpKLgS8k+VN6p47+o2b9s+kdLXI1sA74k8kvefIkORN4BrAgyQ3AO4F/YDv6pqpuTfJuegEGcFJVtf1AcloYoZ+ekeQAekNY1wKvBaiqq5J8AVgBbAKOr6rNTTud/N30VAWS1FEO0UhSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8OqkJHsmOSPJ6uZUCBcledE42jsxyVuaxyclOewBtnNA/9kNpUEy4NU56R1c/2/ABVW1X1UdRO/LK/tss94D+h5IVb2jqs59gOUdQO+YdWngDHh10f8ANlTVR4dmVNV1VfXhJK9MclaS/wC+nWSXJN9OcmlzbvqtZxFM8vYkP03yXWD/vvmfTHJU8/igJN9p/ks4p+9UAucneX+SHzVtHNp8S/Ik4OjmPOVHT1J/aCflN1nVRY8DLh1l+YHA7zbf9JwFvKiqbk+yAPhBkrOadY6ht8c9q2nvkv5GkswGPgwcUVVrm8B+L/CqZpVZVfXEZkjmnVV1WJJ3AEur6o0T9mqlERjw6rwkpwJPBTYAp9KcI31oMfC+5kyWW+idJnZP4FDgK1W1rmljuHOT7A88HljWnHJhJr2zFw751+b+EmDfCXxJUisGvLroKuDFQxNVdXyzd768mXVX37rHAQuBg6pqY5JrgbkttxPgqqo6ZITl65v7zfi7pingGLy66D+AuUle3zfvwSOsuxtwcxPuzwQWNfMvAI5MMq856+ULh3nuSmBhkkOgN2ST5HFj1HYHsGvbFyKNhwGvzmkuhnEk8PQk1yT5Eb1L3P3NMKt/Flia5Arg5cBPmjYuBT5P7/qc3+DeMzL2b2cDcBTw/iSXAz8GnjxGeecBS/yQVZPBs0lKUke5By9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRR/x/s/bZITxvZTgAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXgAAAEWCAYAAABsY4yMAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAXKUlEQVR4nO3debhddX3v8fcnk4kBAZsIUjSIRtpoW4RURMXhCo71ghULiFVrrRPWPq3aUm0VcbhVrt6neKlWvVQcwLFWtCgGC4KKSgApkBqDAQoKIQjKEMj4vX/sdcImnGGFc/Y5OSvv1/PsZ+817N/67t8+53PW+e2110pVIUnqnhlTXYAkaTAMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXjucJNcmOax5/LYkn5jqmgYtySOT3Jlk5lTXMqT/fdD0ZMBruyQ5JskPk9yV5Obm8RuSZBDbq6r3VdWrx9tOkn2TVJJZE1FXi+3c2dyuTXLCWM+rqv+uql2qavMg69POxYBXa0neDPwjcDKwF7An8DrgKcCcEZ6zw+yRTrLdq2oX4FjgHUmeO9UFaedjwKuVJLsBJwFvqKovVdUd1XNZVR1XVeub9T6Z5CNJzk5yF/DMJC9IclmS25Ncn+TEbdr+4yTXJfllkrdvs+zEJJ/pm35Sku8n+VWSy5M8o2/Z+UneneR7Se5I8q0kC5rFFzT3v2r2rA/ZZjt7J7k7yUP75j0hyS1JZid5TJLvJPl1M+/zbfqtqi4CrgIen2RGkr9rXuvNST7V9Ov9/sNI8sokq5vXcU2S45r5bdp4RZL/burc2p/Nc09I8rOmr7+wzesd8X3Q9GTAq61DgAcBX22x7kuB9wK7At8F7gJeDuwOvAB4fZIjAZIsAT4C/DGwN/AbwD7DNZrkN4F/B94DPBR4C/DlJAu32fafAA+j91/FW5r5T2vud2+GQi7qb7uqfgFcBLx4m7a+VFUbgXcD3wL2aOr78FidkJ6nAI8DLgNe2dyeCewH7AL832GeNx84BXheVe0KPBn4cbO4TRtPBfYHnkXvv4ffbub/OXAk8HR6fX0bcGqzzdbvg6YPA15tLQBuqapNQzP69qTvTvK0vnW/WlXfq6otVXVPVZ1fVVc00/8JnEkvZACOAr5eVRc0/wX8PbBlhBpeBpxdVWc3bS0DlgPP71vnX6rqp1V1N/AF4IDteI1n0BtSoflM4ZhmHsBGYBGwd/OavjtGW7cAtwKfAE6oqm8DxwEfqqrVVXUn8LfAMSN8LrCF3l7/vKq6saquaua3aeNdVXV3VV0OXA78XjP/dcDbq+qGpq9PBI5qnrs974OmCQNebf0SWNAfJFX15KravVnW/7N0ff8Tkxyc5Lwka5P8ml7QDA2d7N2/flXd1bQ3nEXAS5o/Kr9K8it6e6sP71vnpr7H6+jt4bb1ZeCQJA+nt8e/BbiwWfbXQIAfJbkqyavGaGtBVe1RVb9dVac08/YGrutb5zpgFr3PMrZq+uBoev10Y5J/T/Jb29HGSH2wCPhKX9/9F7C5ee72vA+aJgx4tXURsB44osW6256i9AzgLOARVbUb8FF6YQlwI/CIoRWTPJje8MBwrgc+XVW7993mV9U/PICa7r9C1W30hmGOpjc887lqTrdaVTdV1Z9V1d7Aa4F/SvKYFtvt9wt6ITvkkcAmYM0wtZxTVYfT++P1E+Dj29vGMK6nN+zT339zq+rnbN/7oGnCgFcrVfUr4F30gu2oJLs2H9odAMwf4+m7ArdW1T1JnkgvPId8CfiDJE9NMofeB7kj/Vx+BnhhkuckmZlkbpJnJGkzVryW3h75fmOsdwa9zwuO4t7hGZK8pG87t9H7g7G9QxhnAn+Z5FFJdgHeB3y+f9ir2daeSY5oxuLXA3f2batVGyP4KPDeJIua7SxMMvQHe3veB00TvoFqrao+APwVveGKNc3tn4G/Ab4/ylPfAJyU5A7gHfTGxofavAo4nl6Y3kgvPG8YYfvX0/sP4m30Avt64K20+DmuqnX0Pvj9XjNE8aQRVj0LWAzc1IxhD/l94IdJ7mzW+YuqWj3WdrdxGvBpekf0XAPcQ++Dz23NoNfPv6A3jv904PXb2cZw/rGp/VvNe/ED4GDYvvdB00e84IckdZN78JLUUQa8JHWUAS9JHWXAS1JHDfTMettrwYIFte+++051GZI0bVxyySW3VNXC4ZbtUAG/7777snz58qkuQ5KmjSTXjbTMIRpJ6igDXpI6yoCXpI4y4CWpowx4SeqogQV8ktOaS4pdOahtSJJGNsjDJD9J71JinxrgNibMyees5NwVN/Hoh+3Kgl3msOvc2Xz98l9w3a3rpro0SY05M8Neu81j8+Yt7LX7PK655S42bNrMb8yfw0Pmzea6X67j9nvanDkZdp83izvXb+LBc2Zx0KI9eOnBvdPsX7hqLYcuXsjhS3rXUFm2Yg1n/PA6brlzPQt2eRAvPXjR1mWjWbZizda2hmt3uPXatLs9Bno2yST70rsM2OPbrL906dKaiuPgTz5nJaeed/Wkb1fSjmPWjDAjYcPmLcybPZNTjn0CAMd/9lI2bL731P9zZs7g1OMOHDWMl61Yw5vOvIy7N25mzszeQEl/u/1/PIbW23ZZW0kuqaqlwy2b8jH4JK9JsjzJ8rVr105JDeeuuGnslSR12qYttTXI7964mQtXreXCVWvvE+7QC+oLV42eVReuWsvdGzdvXX/bdodbb9tlE2HKA76qPlZVS6tq6cKFw37bduAOW7LXlGxX0o5j1oxs3dueN3smhy5eyKGLF26dN2TOzBlbh11GcujihcybPXPr+tu2O9x62y6bCA7RNByDl3Z8jsHf32hDNAa8JE1jUzIGn+RM4CJg/yQ3JPnTQW1LknR/AztMsqqOHVTbkqSxTfmHrJKkwTDgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeooA16SOsqAl6SOMuAlqaMMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeooA16SOsqAl6SOMuAlqaMMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeooA16SOsqAl6SOMuAlqaMMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeooA16SOsqAl6SOMuAlqaMMeEnqKANekjrKgJekjjLgJamjDHhJ6igDXpI6yoCXpI4y4CWpowx4SeqoVgGfZFGSw5rH85LsOtiyJEnjNWbAJ/kz4EvAPzez9gH+bYA1SZImQJs9+OOBpwC3A1TVKuBhgyxKkjR+bQJ+fVVtGJpIMguowZUkSZoIbQL+O0neBsxLcjjwReBrgy1LkjRebQL+BGAtcAXwWuBs4O8GWZQkafxmjbVCVW0BPt7cJEnTxJgBn+QK7j/m/mtgOfCeqvrlIAqTJI3PmAEPfAPYDJzRTB8DPBi4Cfgk8MKBVCZJGpc2AX9YVR3YN31Fkkur6sAkLxtUYZKk8WnzIevMJE8cmkjy+8DMZnLTQKqSJI1bmz34VwOnJdkFCL0vPL06yXzgfw2yOEnSA9fmKJqLgd9Jslsz/eu+xV8YVGGSpPFpswdPkhcAjwPmJgGgqk4aYF2SpHFqc7KxjwJHA39Ob4jmJcCiAdclSRqnNh+yPrmqXg7cVlXvAg4BHjvYsiRJ49Um4O9u7tcl2RvYCDx8cCVJkiZCmzH4ryfZHTgZuJTet1o/MciiJEnj1+Yomnc3D7+c5OvA3G2OpJEk7YDanItmJvACYN+h9ZNQVR8abGmSpPFoM0TzNeAeeqcL3jLYciRJE6VNwO9TVb878EokSROqzVE030jy7IFXIkmaUG324H8AfCXJDHqHSAaoqnrIQCuTJI1Lm4D/EL0vN11RVV5sW5KmiTZDNNcDVxrukjS9tNmDXw2cn+QbwPqhmR4mKUk7tjYBf01zm9PcJEnTQJtvsr5rMgqRJE2sEQM+ydfonXdmWFX1PwdSkSRpQoy2B/+/J60KSdKEGzHgq+o7k1mIJGlitTlMUpI0DRnwktRRBrwkdZRH0UhSR7U5iuYPgb2AzzTTxwJrBlmUJGn8xjyKJskHq2pp36KvJVk+8MokSePSZgx+fpL9hiaSPAqYP7iSJEkToc25aP6S3snGVtM7F/wi4LUDrUqSNG5tzkXzzSSLgd9qZv2kqtaP9hxJ0tQbc4gmyYOBtwJvrKrLgUcm+YOBVyZJGpc2Y/D/Amygd1UngJ8D7xlYRZKkCdEm4B9dVR+gdz1WqmodvbF4SdIOrE3Ab0gyj+ZLT0keTd+VnSRJO6Y2R9G8E/gm8IgknwWeArxykEVJksavzVE0y5JcCjyJ3tDMX1TVLQOvTJI0Lm324AHmArc16y9JQlVdMLiyJEnjNWbAJ3k/cDRwFbClmV2AAS9JO7A2e/BHAvv75SZJml7aHEWzGpg96EIkSROrzR78OuDHSb5N3+GRVfWmgVUlSRq3NgF/VnOTJE0jbQ6TPD3JHOCxzayVVbVxsGVJksarzVE0zwBOB66ldxz8I5K8wsMkJWnH1maI5oPAs6tqJUCSxwJnAgcNsjBJ0vi0OYpm9lC4A1TVT/GoGkna4bXZg1+e5BPce9HtlwFek1WSdnBtAv71wPHA0GGRFwL/NLCKJEkTos1RNOuBDwEfSvJQYB+/1SpJO742l+w7P8lDmnC/BPh4kv8z+NIkSePR5kPW3arqduAPgU9V1cHAswZbliRpvNoE/KwkDwf+CPj6gOuRJE2QNgF/EnAOcHVVXZxkP2DVYMuSJI1Xmw9Zvwh8sW96NfDiQRYlSRq/EQM+yV9X1QeSfJjmgtv9PJukJO3YRtuD/6/m3i81SdI0NGLAV9XXmvvTAZLs0kzfOTmlSZLGo81x8I9Pchm9a7KuSHJJkscNvjRJ0ni0OYrmY8BfVdWiqnok8Gbg44MtS5I0Xm0Cfn5VnTc0UVXnA/MHVpEkaUK0OdnY6iR/D3y6mX4ZvQtxS5J2YG324F8FLAT+FfgysKCZJ0nagY12HPxc4HXAY4ArgDd7LVZJmj5G24M/HVhKL9yfB5w8KRVJkibEaGPwS6rqdwCS/D/gR5NTkiRpIoy2B791OKaqNk1CLZKkCTTaHvzvJbm9eRxgXjMdoKrqIQOvTpL0gI12qoKZk1mIJGlitTlMUpI0DRnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHWUAS9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHWUAS9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHWUAS9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHWUAS9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8JLUUQa8JHXUQAM+yXOTrExydZITBrktSdJ9zRpUw0lmAqcChwM3ABcnOauqVkz0tpatWMOFq9ay69zZ3HHPRm65cwPnXHkjm2uitzQ1Zs2ATVsgwH4L53Pbuo2sW7+JzVuKTVuKAh4ydxYHLdqDeXNm8bOb7+CwJXvx1ufsz7IVazjjh9cB8NKDF3H4kj3v1/7J56zk3BU3bX3OSIb6+dDFC+/XzlhtjPZcSYMxsIAHnghcXVWrAZJ8DjgCmNCAX7ZiDW868zLu3rh5IpvdoWza0rsv4Gdr7xp2ndvv2cR5K9dunV655mquueUuzl2xhg2bew187+pfcupxB94nYE8+ZyWnnnf11ucAIwb0UD9/cfkNnHLsE7a2M1Yboz1X0uAMcojmN4Hr+6ZvaObdR5LXJFmeZPnatWu3XTymC1et7XS4j8f3r167NdwBNmzewoWr7tvH5664adTpIf39fPfGzfdpZ6w2RnuupMGZ8g9Zq+pjVbW0qpYuXLhwu59/6OKFzJs9cwCVTX9PfsxC5sy89y2eM3MGhy6+bx8ftmSvUaeH9PfzvNkz79POWG2M9lxJgzPIIZqfA4/om96nmTehDl+yJ6cc+wTH4B/gGPzQUMpYY/D9/bztOPpYbYz2XEmDk6rBpGCSWcBPgWfRC/aLgZdW1VUjPWfp0qW1fPnygdQjSV2U5JKqWjrcsoHtwVfVpiRvBM4BZgKnjRbukqSJNcghGqrqbODsQW5DkjS8Kf+QVZI0GAa8JHWUAS9JHWXAS1JHDewwyQciyVrguqmuYxIsAG6Z6iKmAfupHfupna7206KqGvbbgztUwO8skiwf6bhV3ct+asd+amdn7CeHaCSpowx4SeooA35qfGyqC5gm7Kd27Kd2drp+cgxekjrKPXhJ6igDXpI6yoCfRF6E/L6SXJvkiiQ/TrK8mffQJMuSrGru92jmJ8kpTd/9Z5IDp7b6wUpyWpKbk1zZN2+7+ybJK5r1VyV5xVS8lkEaoZ9OTPLz5ufqx0me37fsb5t+WpnkOX3zu/m7WVXeJuFG75TJPwP2A+YAlwNLprquKe6Ta4EF28z7AHBC8/gE4P3N4+cD36B33ZMnAT+c6voH3DdPAw4ErnygfQM8FFjd3O/RPN5jql/bJPTTicBbhll3SfN79yDgUc3v48wu/266Bz95tl6EvKo2AEMXIdd9HQGc3jw+HTiyb/6nqucHwO5JHj4F9U2KqroAuHWb2dvbN88BllXVrVV1G7AMeO7Ai59EI/TTSI4APldV66vqGuBqer+Xnf3dNOAnT6uLkO9kCvhWkkuSvKaZt2dV3dg8vgkYur6f/bf9fbMz99kbm+Gq04aGstgJ+8mA11R6alUdCDwPOD7J0/oXVu//ao/jHYZ9M6qPAI8GDgBuBD44pdVMIQN+8kzKRcink6r6eXN/M/AVev8qrxkaemnub25Wt/+2v292yj6rqjVVtbmqtgAfp/dzBTthPxnwk+diYHGSRyWZAxwDnDXFNU2ZJPOT7Dr0GHg2cCW9Phk62uMVwFebx2cBL2+OGHkS8Ou+4Yqdxfb2zTnAs5Ps0QxTPLuZ12nbfDbzIno/V9Drp2OSPCjJo4DFwI/o8O/mQK/JqnuVFyHf1p7AV5JA7+fwjKr6ZpKLgS8k+VN6p47+o2b9s+kdLXI1sA74k8kvefIkORN4BrAgyQ3AO4F/YDv6pqpuTfJuegEGcFJVtf1AcloYoZ+ekeQAekNY1wKvBaiqq5J8AVgBbAKOr6rNTTud/N30VAWS1FEO0UhSRxnwktRRBrwkdZQBL0kdZcBLUkcZ8OqkJHsmOSPJ6uZUCBcledE42jsxyVuaxyclOewBtnNA/9kNpUEy4NU56R1c/2/ABVW1X1UdRO/LK/tss94D+h5IVb2jqs59gOUdQO+YdWngDHh10f8ANlTVR4dmVNV1VfXhJK9MclaS/wC+nWSXJN9OcmlzbvqtZxFM8vYkP03yXWD/vvmfTHJU8/igJN9p/ks4p+9UAucneX+SHzVtHNp8S/Ik4OjmPOVHT1J/aCflN1nVRY8DLh1l+YHA7zbf9JwFvKiqbk+yAPhBkrOadY6ht8c9q2nvkv5GkswGPgwcUVVrm8B+L/CqZpVZVfXEZkjmnVV1WJJ3AEur6o0T9mqlERjw6rwkpwJPBTYAp9KcI31oMfC+5kyWW+idJnZP4FDgK1W1rmljuHOT7A88HljWnHJhJr2zFw751+b+EmDfCXxJUisGvLroKuDFQxNVdXyzd768mXVX37rHAQuBg6pqY5JrgbkttxPgqqo6ZITl65v7zfi7pingGLy66D+AuUle3zfvwSOsuxtwcxPuzwQWNfMvAI5MMq856+ULh3nuSmBhkkOgN2ST5HFj1HYHsGvbFyKNhwGvzmkuhnEk8PQk1yT5Eb1L3P3NMKt/Flia5Arg5cBPmjYuBT5P7/qc3+DeMzL2b2cDcBTw/iSXAz8GnjxGeecBS/yQVZPBs0lKUke5By9JHWXAS1JHGfCS1FEGvCR1lAEvSR1lwEtSRxnwktRR/x/s/bZITxvZTgAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -519,7 +802,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaAAAAEWCAYAAAAgpUMxAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAyl0lEQVR4nO3debxVVf3/8dcbRNEgASUyQKFCUQYRL4PhgJqAQ6KWQ2miKTToL7W00K8pX4fUb1ZqX1MxDe2bU06RWI6oaaEMIZMDoBAQIKIgKKAXP78/9rp4uN7hXLjnnju8n4/Hedx91l5777XXOed+zlp7nb0UEZiZmdW1ZsUugJmZNU0OQGZmVhQOQGZmVhQOQGZmVhQOQGZmVhQOQGZmVhRNLgBJWiDpq2n5Ikm/K3aZACR9X9JySWsl7VRL+9w17a95beyvNuTWf4GPM1jS4kIfp9AkzZY0uNjlKCNpnKQril2OhkzSGEn/V4P8BalzSd+VdF0B9ttB0iuStqsub70KQJJOkvSipPclvZWWfyBJhTheRPw8Is7c2v1I6iIpJG2zhdu3AH4FDImIVhGxspL9r02PBZJGV7ffiPh32t/GLSlXY5bqc6akZjlpV0gaV6DjLZC0Lr1+y9M/lVbVbRcRPSLimUKUqbZJOk3S88Uux5Zq6OWvCUnbAhcDv8hJGyvpNUkfSzqtmu1PkPQPSR9IeiZ3XUQsByYCo6orR70JQJJ+DFxPViGfBzoA3wMGAdtWsk29+Wa/lToALYHZ1eRrExGtgG8Cl0gaVvCSNW5fAE6qw+N9Lb1+fYESsn8AZsUwHHg1IpbkpL0M/ACYlsf27wDXAVdXsv6PwHer20m9CECSdgQuA34QEfdHxJrI/CsiTo6IDSnfOEk3SXpU0vvAwZKOlPQvSe9JWiRpTLl9f1vSQkkrJf1XuXWbNYUlDUxRfZWkl3O7PiQ9I+lySS9IWiPpcUk7p9XPpb+r0jfc/So4x+0kXSfpP+lxXUrbHXgtZ/unq6uviPgnWbDqKamZpIvTOb4l6c5Un59qmaVveG+k8r8p6eSUns8+Rkj6t6S3c+sxbTta0vxUx/dJapdP/VdQR5W+lnmUY/v0/nhX0hygX3X1CPwP8N+qpOUq6WhlXWCr0uu/Z866BZLOlzRD0mpJ90pqmccxSR/6vwI98zxOWZdxf0lTUv0sl/Sr2iirpKMkTU/b/kNS75x1+0ialt4z95J9UcpLOu4F6bjvS7pNWffMX9P+npTUNif/nyQtS2V8TlKPnHU7SfpLOvfJylqrz+es7y7pCUnvKPsWf0LOuiMkzUnHXCLp/HzPIWcfpyvrVlqTPkPfzVk3WNJiST9Jn5+lko5Jx309lemicrtsmV6HNal+987ZX6V1LqmtpEckrUjv9Uckdarp+QCHA8/mJkTEjRHxFLC+uo0j4smIuA/4TyVZXgS+KGm36nZU9AcwDCgFtqkm3zhgNVmrqBnZCzMY6JWe9waWA8ek/HsBa4EDge3IurlKga+m9WOA/0vLHYGVwBFpX4el5+3T+meA+cDuwPbp+dVpXRcgqio/WYCdBHwOaA/8A7g8n+1z1wNK5/8BcCjwHWAe8EWgFfAg8IcKtvsM8B6wR1q3C9AjLeezj1vTee8NbAD2TOvPSefVKdXxLcDd+dR/BedZ1WtZXTmuBv4OtAM6A7OAxVW8HgF0A6YCZ6a0K4BxaXl34P30PmgB/CTV0bZp/QLgJbJWVDvgFeB7VRxvAZ+87zqTfYG4PM/jlG33T+DbabkVMHBrywrsA7wFDACaAyNS/u3Ieh4WAuel/X4D+Ai4opJzPA14vtw5TyJr4XdMx5mWjtkSeBq4NCf/d4DW6djXAdNz1t2THjuQva8WlR2L7L29CDid7L2+D/A2sFdavxQ4IC23BfrmU/5y644EvkT2+TuI7PPXN+d9WwpckuppJLACuCudTw9gHdA15//OR6k+WwDnA2+m5SrrHNgJ+Hqqh9bAn4CHc8r5W2BVJY8ZOfkmA8dXcq7PA6fl+b/7TOCZStbNAI6ucvuaBIpCPYBTgGXl0v6RKm0dcGBKGwfcWc2+rgN+nZYvAe7JWfcZ4EMqDkA/Jf3Tzcn/GDAiLT8DXJyz7gfA39JyF6oPQPOBI3KeDwUW5LN9zvpVwLtk/0B+mNY9RdZyLMu7R3rDbsOnA9Cq9Obdvtz+89lHp5z1LwEnpeVXgENz1u2Ss22V9Z/H+yL3tayuHG8Aw3LWjaL6APRlsi8cC8k++LkB6GfAfTn5mwFLgMHp+QLglJz1/wPcXMXxFpAF41XpeL8lC6T5HKfs/foc8N/AzuX2vcVlBW4ifRHKWf8a2T/ZA8m+4arc57ImAejknOcPADflPP9/5PzzLLevNuk12pEsMH5E+vKU1l/BJwHoRODv5ba/hRTcgH+TdQd9tpr322blrybvw8A5aXkw2f+p5ul561T2ATn5p/LJl6kxwKRyr9dS4IAtqPM+wLv5lLncdnPJ+byUW1dbAegF4NSqtq8XXXBkLY2dc7tCIuIrEdEmrcst56LcDSUNkDQxNUlXk103Kusa+0Ju/oh4P+2vIrsBx6duiFWSVgH7k/1DLbMsZ/kDsm+h+foC2T+eMgtTWk3sHBFtI2LPiLihiv1uQ/atc5N07ieS1c9SSRMkda/BPio7992Ah3Lq7BVgY9q2JvVf3WtZXTk2O1a586lURDwKLObT/dWb1UlEfJz237G6sqQuprIBIyfn5DkmItpExG4R8YOIWJfnccqcQdbaeTV1Qx21tWUle/1+XO593znt8wvAkkj/TZK86jXH8pzldRU8L6uz5pKuVtaV+x5Z8ILs9W9P9n7MfX1zl3cDBpQ7h5PJriVD9qXrCGChpGdVQRd5dSQdLmlS6k5blfaX+95cGZ8M9lmX/lZ4ruXLn16vxeRR55J2kHSLsm7t98i+lLRRza+Hv0sWKAupNdkXrkrVlwD0T7LulOF55I1yz+8CxgOdI2JH4GayZjJk3yo6l2WUtANZE7Yii8haQG1yHp+JiMouslVVpor8h+yDUmZXKu8/rYmK9lvK5m9+ACLisYg4jCyovkrWnVWjfVRgEXB4uXprGdl1jprUP1T9WlZns2Olc8jXfwEXkXVrlNmsTiQp7X8J1YiIwyMbfdgqIv5YTfa8jxMRcyPim2TduNcA90v6zNaUlez1u7Lc67dDRNxNVqcd0/7K1KRea+JbZJ//r5K1erqkdJF1Z5WSdfOWyX2tFwHPljuHVhHxfYCImBwRw8nq7WHgvpoUTNlw4geAa4EO6Yvxo+T/3qxI7ueiGdm5/Yfq6/zHZD0UAyLis2QtJsrKIunmnC8/5R+5g5xmkH2ZKYjUmPgy2cCGStWLABQRq8i6Fn4r6RuSWiu7uN2HrNumKq2BdyJivaT+ZG/kMvcDR0naX9mww8uo/Jz/D/iapKHp21jLdHExnwt8K4CPya6hVOZu4GJJ7ZUNXrgkHXNr3Q2cJ6mrsmG9PwfujYjS3EzKLv4OT/+wNpB1B31ck31U4mbgyrKLjen8yr5I1KT+oerXsjr3ARemi7SdyLp38hLZMOdZZNc/cvd3pKRDlQ2T/zFZvf2jBmXKR97HkXSKpPbpG/OqlPzxVpb1VuB7qfUpSZ9RNhikNdkXw1Lgh5JaSDoO6L91p1up1qnMK8m+CPy8bEVqWTwIjEktgO7AqTnbPgLsrmzAS4v06CdpT0nbSjpZ0o4R8RHZddCPqZzSZ3/Tg6x7djtSIJR0ODBkK893X0nHpX/U56Zzn0T1dd6arDW1Stlgn0tzdxoR38v58lP+0SMn66Nk3ay5J75tOl8BLdL5N0vrBkuKnLzNU95tgGYpb4uc3fUnu8RQZYu5XgQggIj4H+BHZBdQl6fHLWTXZqr6IP0AuEzSGrJ/6pu+3UTEbOAssm/WS8manRX+ODEiFpF9A7uI7I22CLiAPOooIj4ArgReSF0AAyvIdgUwheybx0yyi7G18eOy24E/kDXF3yQbwVLRP99mZPX7H7IhlAcB36/hPipyPVmr5fH0Gkwiu6Bdo/pPKn0t8/DfZF0VbwKPp/OpiYvJLtADEBGvkV2b/A3ZBe2vkQ2j/rCG+61SDY8zDJgtaS1ZvZ8UEeu2pqwRMYXsovn/kr0+88iuhZC2Py49f4esC/fBLT3XatxJ9votAeaQvY9ynU3WMlpG9treTfZPm4hYQxYQTiJ7fy8jayGW/RDy28CC1GX1PbLuucp8hewffPnHD8nej++SfTEav8VnmvkzWX2+m8p3XER8lEedX0d27fBtsjr62xYe/y9Ad0m5lwEeJzvXrwBj03JZC6szm/8f/nZafxPZtat1fNKjAlkd31xdIbR5V6OZWf0n6Rrg8xExotrMViFJo8hGCp6bR97fAX+KiMfyyPs5siHe+0RElUO6HYDMrN5L3W7bkvUe9CPrQjozIh4uZrls62zRrWPMzOpYa7Juty+Qdc//kqwbyxowt4DMzKwo6s0gBDMza1oaZRfczjvvHF26dCl2MczMGpSpU6e+HRHt6+p4jTIAdenShSlTphS7GGZmDYqkmt7pYqu4C87MzIrCAcjMzIrCAcjMzIqiUV4DqshHH33E4sWLWb++2rmWrAFq2bIlnTp1okWLFtVnNrN6ockEoMWLF9O6dWu6dOnC5jeatYYuIli5ciWLFy+ma9euxS6OmeWpyXTBrV+/np122snBpxGSxE477eTWrVkD02QCEODg04j5tTVreJpUADIzs/qj4NeA0lSxU8immT1KUlfgHrKZMacC346ID9Osg3cC+5JNSnViRCxI+7iQbDrijcAP87kleHW6jJ6wtbvYzIKrj6w2z7Jlyzj33HOZPHkybdq0oUOHDlx33XVsu+22HHXUUcyaNatWy5RrzJgxtGrVivPPP79gxzAzq4m6aAGdA7yS8/wa4NcR8WWyyZjOSOlnAO+m9F+nfEjai2yiqR5kE3L9VjWf/7zoIoJjjz2WwYMHM3/+fKZOncpVV13FP2bO45Wl7xW7eGZmda6gAShNjXwk8Lv0XMAhZFM1A9wBHJOWh6fnpPWHpvzDgXsiYkNEvEk2Y2OhpgUumIkTJ9KiRQu+973vbUrbe++96TvgK5vl27hxIxdccAH9+vWjd+/e3HLLLQCsXbuWQw89lL59+9KrVy/+/OfsTvQLFixgzz33ZOTIkfTo0YMhQ4awbt26KssyePBgzjvvPEpKSthzzz2ZPHkyxx13HN26dePiiy/elO+YY45h3333pUePHowdO3ZT+m233cbuu+9O//79GTlyJGeffTYAK1as4Otf/zr9+vWjX79+vPDCCwA8++yz9OnThz59+rDPPvuwZs2arahJM2ssCt0Fdx3ZFNut0/OdgFURUZqeLwY6puWOZNNgExGlklan/B3ZfHre3G02SbP7jQLYdddda/UkasOsWbPYd999q8132223seOOOzJ58mQ2bNjAoEGDGDJkCJ07d+ahhx7is5/9LG+//TYDBw7k6KOPBmDu3Lncfffd3HrrrZxwwgk88MADnHLKKVUeZ9ttt2XKlClcf/31DB8+nKlTp9KuXTu+9KUvcd5557HTTjtx++23065dO9atW0e/fv34+te/zoYNG7j88suZNm0arVu35pBDDmHvvfcG4JxzzuG8885j//3359///jdDhw7llVde4dprr+XGG29k0KBBrF27lpYtW259hZpZg1ewACTpKOCtiJgqaXChjlMmIsaSzWNOSUlJg5jkaMbiVZ9Ke/zxx5kxYwb33581ElevXs3cuXPp1KkTF110Ec899xzNmjVjyZIlLF++HICuXbvSp08fAPbdd18WLFhQ7bHLglevXr3o0aMHu+yyCwBf/OIXWbRoETvttBM33HADDz30EACLFi1i7ty5LFu2jIMOOoh27doBcPzxx/P6668D8OSTTzJnzpxNx3jvvfdYu3YtgwYN4kc/+hEnn3wyxx13HJ06dap5ZZlZo1PIFtAg4GhJRwAtgc8C1wNtJG2TWkGdgCUp/xKgM7BY0jbAjmSDEcrSy+Ru02D06NFjU1CpSkTwm9/8hqFDh26WPm7cOFasWMHUqVNp0aIFXbp02fS7l+22225TvubNm1fbBZe7TbNmzTbbvlmzZpSWlvLMM8/w5JNP8s9//pMddtiBwYMHV/s7m48//phJkyZ9qoUzevRojjzySB599FEGDRrEY489Rvfu3asto5k1bgW7BhQRF0ZEp4joQjaI4OmIOBmYCHwjZRvBJ9Pqjk/PSeufjmy61vHASZK2SyPougEvFarchXLIIYewYcOGza6lvP7KLKa9+I/N8g0dOpSbbrqJjz76KMvz+uu8//77rF69ms997nO0aNGCiRMnsnBhYe+avnr1atq2bcsOO+zAq6++yqRJWS9ov379ePbZZ3n33XcpLS3lgQce2LTNkCFD+M1vfrPp+fTp0wGYP38+vXr14qc//Sn9+vXj1VdfLWjZzaxhKMateH4K3CPpCuBfwG0p/TbgD5LmAe+QBS0iYrak+4A5QClwVkRs3NpC5DNsujZJ4qGHHuLcc8/lmmuuoWXLluz0+Y5cMOaqzfKdeeaZLFiwgL59+xIRtG/fnocffpiTTz6Zr33ta/Tq1YuSkpKCtyCGDRvGzTffzJ577skee+zBwIEDAejYsSMXXXQR/fv3p127dnTv3p0dd9wRgBtuuIGzzjqL3r17U1payoEHHsjNN9/Mddddx8SJE2nWrBk9evTg8MMPL2jZzaxhUNbIaFxKSkqi/IR0r7zyCnvuuWeRSlSx3GtAvTu1KVo5amrt2rW0atWK0tJSjj32WL7zne9w7LHHFrtY9fI1NmtIJE2NiJK6Op7vhGA1NmbMGPr06UPPnj3p2rUrxxxzTLGLZGYNUJO5G7bVnmuvvbbYRTCzRsAtIDMzKwoHIDMzKwoHIDMzKwoHIDMzK4qmOwhh4lXV56mJgy+sNkvz5s3p1asXpaWl7Lnnnpz/8+vZfvsdKsw7fvx45syZw+jRo2u3nNV45plnuPbaa3nkkUfySjcz21JuAdWh7bffnunTp3PX357ng1L40x9+X2neo48+us6Dj5lZXXIAKpJ9+u/HogVvsPrddzn3jJPp3bs3AwcOZMaMGUB277eyaQ7+9Kc/0bNnT/bee28OPPBAANavX8/pp59Or1692GeffZg4ceKm7Y477jiGDRtGt27d+MlPfrLpmI8//jj77bcfffv25fjjj2ft2rUA/O1vf6N79+707duXBx98sNqyjxkzhhEjRnDAAQew22678eCDD/KTn/yEXr16MWzYsE23Ebrsssvo168fPXv2ZNSoUZT96Hny5Mn07t2bPn36cMEFF9CzZ0+g8qkoli5dyoEHHrjpt0d///vft7r+zaz4HICKoLS0lBcmPkm37nvx219dRfeevZkxYwY///nPOfXUUz+V/7LLLuOxxx7j5ZdfZvz48QDceOONSGLmzJncfffdjBgxYtPNQqdPn869997LzJkzuffee1m0aBFvv/02V1xxBU8++STTpk2jpKSEX/3qV6xfv56RI0fyl7/8halTp7Js2bK8zmH+/Pk8/fTTjB8/nlNOOYWDDz6YmTNnsv322zNhQjbb7Nlnn83kyZOZNWsW69at29R9d/rpp3PLLbcwffp0mjf/ZG7B3KkoJk+ezK233sqbb77JXXfdxdChQ5k+fTovv/zypjt/m1nD5gBUh9atW0efPn341pEH8/mOnTj2pG/zr8mTOOq4E4HshqUrV67kvfc2nyF10KBBnHbaadx6661s3JjdBu/555/fNOdP9+7d2W233TZNi3DooYey44470rJlS/baay8WLlzIpEmTmDNnDoMGDaJPnz7ccccdLFy4kFdffZWuXbvSrVs3JFU7j1CZww8/nBYtWtCrVy82btzIsGHDgGx6h7LpICZOnMiAAQPo1asXTz/9NLNnz2bVqlWsWbOG/fbbD4Bvfetbm/b5+OOPc+edd9KnTx8GDBjAypUrmTt3Lv369eP3v/89Y8aMYebMmbRu3fpT5TGzhqfpDkIogrJrQBXNA1SVm2++mRdffJEJEyaw7777MnXq1Crzl5+eobS0lIjgsMMO4+67794sb9kdq2sqdzqHFi1akE1e+8l0DuvXr+cHP/gBU6ZMoXPnzowZM6ba6Rwqm4oC4LnnnmPChAmcdtpp/OhHP6qwpWhmDYtbQEXWt/9+THjoT0A20mznnXfms5/97GZ55s+fz4ABA7jsssto3749ixYt4oADDuCPf/wjkE3Z8O9//5s99tij0uMMHDiQF154gXnz5gHw/vvv8/rrr9O9e3cWLFjA/PnzAT4VoLZUWbDZeeedWbt27aa5kNq0aUPr1q158cUXAbjnnns2bVPZVBQLFy6kQ4cOjBw5kjPPPJNp06bVShnNrLiabgsoj2HTdeH7543m0vPPpnfv3uywww7ccccdn8pzwQUXMHfuXCKCQw89lL333pvu3bvz/e9/n169erHNNtswbty4zVo+5bVv355x48bxzW9+kw0bNgBwxRVXsPvuuzN27FiOPPJIdthhBw444ADWrFmz1efVpk0bRo4cSc+ePfn85z9Pv379Nq277bbbGDlyJM2aNeOggw7aNJ1DZVNRPPPMM/ziF7+gRYsWtGrVijvvvHOry2dmxefpGIqgoi64hjQdw9Yqm84B4Oqrr2bp0qVcf/31W73f+vQamzVEdT0dQ9NtAVnRTJgwgauuuorS0lJ22203xo0bV+wimVkRFCwASWoJPAdsl45zf0RcKmkccBCwOmU9LSKmK7uKfT1wBPBBSp+W9jUCuDjlvyIiPt1PZQ3GiSeeyIknnljsYphZkRWyBbQBOCQi1kpqATwv6a9p3QURcX+5/IcD3dJjAHATMEBSO+BSoAQIYKqk8RHxbk0LFBGbRmtZ49IYu5LNGruCjYKLzNr0tEV6VPVfYjhwZ9puEtBG0i7AUOCJiHgnBZ0ngGE1LU/Lli1ZuXKl/1E1QhHBypUradmyZbGLYmY1UNBrQJKaA1OBLwM3RsSLkr4PXCnpEuApYHREbAA6AotyNl+c0ipLL3+sUcAogF133fVTZenUqROLFy9mxYoVtXFqW2X5u+s+lfbKmu2LUJLGo2XLlnTq1KnYxTCzGihoAIqIjUAfSW2AhyT1BC4ElgHbAmOBnwKX1cKxxqb9UVJS8qlmTosWLejatevWHqZWHD56wqfSFlx9ZBFKYmZWPHXyQ9SIWAVMBIZFxNLUzbYB+D3QP2VbAnTO2axTSqss3czMGrCCBSBJ7VPLB0nbA4cBr6brOqRRb8cAs9Im44FTlRkIrI6IpcBjwBBJbSW1BYakNDMza8AK2QW3C3BHug7UDLgvIh6R9LSk9oCA6cD3Uv5HyYZgzyMbhn06QES8I+lyYHLKd1lEvFPAcpuZWR0oWACKiBnAPhWkH1JJ/gDOqmTd7cDttVpAMzMrKt+M1MzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIBqI51qWA6bjOzpsgByMzMisIByMzMiqJgAUhSS0kvSXpZ0mxJ/53Su0p6UdI8SfdK2jalb5eez0vru+Ts68KU/pqkoYUqs5mZ1Z1CtoA2AIdExN5AH2CYpIHANcCvI+LLwLvAGSn/GcC7Kf3XKR+S9gJOAnoAw4DfSmpewHKbmVkdKFgAisza9LRFegRwCHB/Sr8DOCYtD0/PSesPlaSUfk9EbIiIN4F5QP9CldvMzOpGQa8BSWouaTrwFvAEMB9YFRGlKctioGNa7ggsAkjrVwM75aZXsE3usUZJmiJpyooVKwpwNmZmVpuqDUCSjpfUOi1fLOlBSX3z2XlEbIyIPkAnslZL960pbDXHGhsRJRFR0r59+0IdxszMakk+LaCfRcQaSfsDXwVuA26qyUEiYhUwEdgPaCNpm7SqE7AkLS8BOgOk9TsCK3PTK9jGzMwaqHwC0Mb090hgbERMALatbiNJ7SW1ScvbA4cBr5AFom+kbCOAP6fl8ek5af3TEREp/aQ0Sq4r0A14KY9ym5lZPbZN9VlYIukWsgByjaTtyC9w7QLckUasNQPui4hHJM0B7pF0BfAvshYV6e8fJM0D3iEb+UZEzJZ0HzAHKAXOioiNmJlZg5ZPADqBbPjztRGxStIuwAXVbRQRM4B9Kkh/gwpGsUXEeuD4SvZ1JXBlHmU1M7MGotqWTER8QDaKbf+UVArMLWShzMys8ctnFNylwE+BC1NSC+D/ClkoMzNr/PK5lnMscDTwPkBE/AdoXchCmZlZ45dPAPowjUYLAEmfKWyRzMysKcgnAN2XRsG1kTQSeBK4tbDFMjOzxq7aUXARca2kw4D3gD2ASyLiiYKXzMzMGrV8hmGTAo6DjpmZ1ZpqA5CkNaTrPzlWA1OAH6ff9ZiZmdVIPi2g68juQH0XILI7FHwJmAbcDgwuUNnMzKwRy2cQwtERcUtErImI9yJiLDA0Iu4F2ha4fGZm1kjlE4A+kHSCpGbpcQKwPq0r3zVnZmaWl3wC0MnAt8lux7M8LZ+S7nB9dgHLZmZmjVg+w7DfAL5Wyerna7c4ZmbWVOQzCq4lcAbQA2hZlh4R3ylguczMrJHLpwvuD8DngaHAs2Qzkq4pZKHMzKzxyycAfTkifga8HxF3kM2MOqCwxTIzs8YunwD0Ufq7SlJPYEfgc9VtJKmzpImS5kiaLemclD5G0hJJ09PjiJxtLpQ0T9JrkobmpA9LafMkja7ZKZqZWX2Uzw9Rx0pqC/wMGA+0Ai7JY7tSsjslTJPUGpgqqex2Pr+OiGtzM0vai+xHrj2ALwBPSto9rb6RbErwxcBkSeMjYk4eZTAzs3oqn1Fwv0uLzwJfzHfHEbEUWJqW10h6BehYxSbDgXsiYgPwpqR5fDJ197yyW/5IuifldQAyM2vA8hkF1wY4FeiSmz8ifpjvQSR1AfYBXgQGAWdLOpVP7if3LllwmpSz2WI+CViLyqX7GpSZWQOXzzWgR8mCz0xgas4jL5JaAQ8A50bEe8BNZPeS60PWQvpljUpc+XFGSZoiacqKFStqY5dmZlZA+VwDahkRP9qSnUtqQRZ8/hgRDwJExPKc9bcCj6SnS4DOOZt3SmlUkb5JukfdWICSkhLfIsjMrJ7L63dAkkZK2kVSu7JHdRtJEnAb8EpE/ConfZecbMcCs9LyeOAkSdtJ6gp0A14CJgPdJHWVtC3ZQIXxeZ2dmZnVW/m0gD4EfgH8F5/cfDSofkDCILL7xs2UND2lXQR8U1KftI8FwHcBImK2pPvIBheUAmdFxEYASWcDjwHNgdsjYnYe5TYzs3osnwD0Y7Ifo75dkx1HxPNk8weV92gV21wJXFlB+qNVbWdmZg1PPl1w84APCl0QMzNrWvJpAb0PTJc0EdhQlliTYdhmZmbl5ROAHk4PMzOzWpPPnRDuqIuCmJlZ01JpAJI0kyqm3I6I3gUpkZmZNQlVtYCOqrNSmJlZk1NpAIqIhXVZEDMza1ryGYZtZmZW6xyAzMysKCoNQJKeSn+vqbvimJlZU1HVIIRdJH0FODpNArfZbXUiYlpBS9bIdBk9odhFMDOrV6oKQJeQTcPdCfhVuXUBHFKoQpmZWeNX1Si4+4H7Jf0sIi6vwzKZmVkTUO0ghIi4XNLRkq5ND/8+qADcRWdmTU21AUjSVcA5ZPP0zAHOkfTzQhfMzMwat3xuRnok0CciPgaQdAfwL7LJ5czMzLZIvr8DapOzvGMBymFmZk1MPgHoKuBfksal1s9UKpi1tDxJnSVNlDRH0mxJ56T0dpKekDQ3/W2b0iXpBknzJM2Q1DdnXyNS/rmSRmzZqZqZWX2SzyCEu4GBwIPAA8B+EXFvHvsuBX4cEXul7c+StBcwGngqIroBT6XnAIcD3dJjFHATZAELuBQYAPQHLi0LWmZm1nDl1QUXEUsjYnx6LKvBNtPS8hrgFaAjMBwom2PoDuCYtDwcuDMyk4A2knYBhgJPRMQ7EfEu8AQwLL/TMzOz+qpO7gUnqQuwD/Ai0CEilqZVy4AOabkjsChns8UprbL08scYJWmKpCkrVqyo3RMwM7NaV/AAJKkVWdfduRHxXu66iAiqmPSuJiJibESURERJ+/bta2OXZmZWQFUGIEnNJb26pTuX1IIs+PwxIh5MyctT1xrp71spfQnQOWfzTimtsnQzM2vAqgxAEbEReE3SrjXdsSQBtwGvRETuveTGA2Uj2UYAf85JPzWNhhsIrE5ddY8BQyS1TYMPhqQ0MzNrwPL5IWpbYLakl4D3yxIj4uhqthsEfBuYKWl6SrsIuBq4T9IZwELghLTuUeAIYB7wAXB6Os47ki4HJqd8l0XEO3mU28zM6rF8AtDPtmTHEfE85aZwyHFoBfkDOKuSfd0O3L4l5TAzs/qp2gAUEc9K2g3oFhFPStoBaF74opmZWWOWz81IRwL3A7ekpI7AwwUsk5mZNQH5DMM+i+x6znsAETEX+FwhC2VmZo1fPgFoQ0R8WPZE0jbU0m93zMys6conAD0r6SJge0mHAX8C/lLYYpmZWWOXTwAaDawAZgLfJRsufXEhC2VmZo1fPqPgPk7TMLxI1vX2WhoybWZmtsWqDUCSjgRuBuaT/a6nq6TvRsRfC104MzNrvPL5IeovgYMjYh6ApC8BEwAHIDMz22L5XANaUxZ8kjeANQUqT6PUZfSEYhfBzKzeqbQFJOm4tDhF0qPAfWTXgI7nk/uymZmZbZGquuC+lrO8HDgoLa8Ati9YiczMrEmoNABFxOl1WRAzM2ta8hkF1xX4f0CX3Px5TMdgZmZWqXxGwT1MNrHcX4CPC1oaMzNrMvIJQOsj4oaCl8TMzJqUfALQ9ZIuBR4HNpQlRsS0gpXKzMwavXx+B9QLGEk2lfYv0+Pa6jaSdLuktyTNykkbI2mJpOnpcUTOugslzZP0mqShOenDUto8SaNrcnJmZlZ/5dMCOh74Yu6UDHkaB/wvcGe59F9HxGYBTNJewElAD+ALwJOSdk+rbwQOAxYDkyWNj4g5NSyLmZnVM/m0gGYBbWq644h4Dngnz+zDgXsiYkNEvAnMA/qnx7yIeCMFwHtSXjMza+DyaQG1AV6VNJnNrwFt6TDssyWdCkwBfhwR75JN8z0pJ8/ilAawqFz6gIp2KmkUMApg11133cKimZlZXcknAF1ai8e7Cbic7JY+l5NdT/pObew4IsYCYwFKSko8XYSZWT2Xz3xAz9bWwSJiedmypFuBR9LTJUDnnKydUhpVpJuZWQNW7TUgSWskvZce6yVtlPTelhxM0i45T48lu74EMB44SdJ26c4L3YCXyG562k1SV0nbkg1UGL8lxzYzs/olnxZQ67JlSSIbBDCwuu0k3Q0MBnaWtJisK2+wpD5kXXALyKb4JiJmS7oPmAOUAmdFxMa0n7OBx4DmwO0RMTv/0zMzs/oqn2tAm6SpuB9OP0yt8jc5EfHNCpJvqyL/lcCVFaQ/Cjxak3KamVn9l8/NSI/LedoMKAHWF6xEDVCX0RNYcPWRxS6GmVmDkk8LKHdeoFKyrjP/FsfMzLZKPteAPC+QmZnVuqqm5L6kiu0iIi4vQHnMzKyJqKoF9H4FaZ8BzgB2IvshqZmZ2RapakruX5YtS2oNnAOcTnY/tl9Wtp2ZmVk+qrwGJKkd8CPgZOAOoG+6d5uZmdlWqeoa0C+A48jur9YrItbWWakaoC6jJwB4OLaZWZ6quhXPj8nm5rkY+E/O7XjWbOmteMzMzMpUdQ0on7mCzMzMtoiDjJmZFYUDkJmZFYUDkJmZFYUDUD3SZfSETaPpzMwaOwcgMzMrCgcgMzMrioIFIEm3S3pL0qyctHaSnpA0N/1tm9Il6QZJ8yTNkNQ3Z5sRKf9cSSMKVV4zM6tbhWwBjQOGlUsbDTwVEd2Ap/hkVtXDgW7pMQq4CTbdCuhSYADQH7i0LGiZmVnDVrAAFBHPAe+USx5Odk850t9jctLvjMwkoI2kXYChwBMR8U66B90TfDqomZlZA1TX14A6RMTStLwM6JCWOwKLcvItTmmVpZuZWQNXtEEIERFA1Nb+JI2SNEXSlBUrVtTWbs3MrEDqOgAtT11rpL9vpfQlQOecfJ1SWmXpnxIRYyOiJCJK2rdvX+sFNzOz2lXXAWg8UDaSbQTw55z0U9NouIHA6tRV9xgwRFLbNPhgSEqrF/zDUTOzLVflhHRbQ9LdwGBgZ0mLyUazXQ3cJ+kMYCFwQsr+KHAEMA/4gGzmVSLiHUmXA5NTvssiovzABjMza4AKFoAi4puVrDq0grwBnFXJfm4Hbq/FohVUl9ETPCmdmVkefCcEMzMrCgcgMzMrCgcgMzMrCgcgMzMrCgcgMzMrCgcgMzMrCgcgMzMrioL9DsjwXRLMzKrgFpCZmRWFA5CZmRWFA5CZmRWFA1A95GtHZtYUOACZmVlReBRcAbgFY2ZWPbeAzMysKByAzMysKByAzMysKIoSgCQtkDRT0nRJU1JaO0lPSJqb/rZN6ZJ0g6R5kmZI6luMMpuZWe0qZgvo4IjoExEl6flo4KmI6AY8lZ4DHA50S49RwE11XlIzM6t19akLbjhwR1q+AzgmJ/3OyEwC2kjapQjlMzOzWlSsABTA45KmShqV0jpExNK0vAzokJY7Aotytl2c0jYjaZSkKZKmrFixolDlNjOzWlKs3wHtHxFLJH0OeELSq7krIyIkRU12GBFjgbEAJSUlNdrWzMzqXlFaQBGxJP19C3gI6A8sL+taS3/fStmXAJ1zNu+U0hq1LqMn+AetZtao1XkAkvQZSa3LloEhwCxgPDAiZRsB/DktjwdOTaPhBgKrc7rqzMysgSpGF1wH4CFJZce/KyL+JmkycJ+kM4CFwAkp/6PAEcA84APg9LovspmZ1bY6D0AR8QawdwXpK4FDK0gP4Kw6KJqZmdWh+jQM28zMmhAHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHoHrONyQ1s8bKAWgL1dvAMPGq7FF+2cysninWfEBWA2XBbsHVR26+oqrgkrsud/ngCz95fvCFtVRCM7OacwuooSlr1dRGy6ayfVQWvMzMapFbQA3Audvcny1MnLH1O8sn6JR/Xr7FVL5FlZvPrSozy5MDUD23KfgUSk1aOBXlrSpwlQ9OlaWVpbtr0KxJcQCqhwoedOpKZQGrogBTWYuron3kG6AaSkBzy9GaKAegeqS6wHPdU69z7qG711FpCijfVlc+3YWVBamq9lldAKxIVQGiskBXXQCs6fW1yvI7eFkDpWzC0fpP0jDgeqA58LuIuLqyvCUlJTFlypSClqcQw7Br0vJpFIGoqaiNAJRPoHUgsq0kaWpElNTZ8RpCAJLUHHgdOAxYDEwGvhkRcyrK31ACUG11tTkYWZVyB4/UJBhW1qKrbH1tcHdkUTkAVUDSfsCYiBianl8IEBEVfiWs7QBUm62dYl3fcZCyeqGikZS1vY/aai3mU8ZGFiwdgCog6RvAsIg4Mz3/NjAgIs7OyTMKGJWe7gG8VucFrXs7A28XuxANhOsqP66n/DTWetotItrX1cEazSCEiBgLjC12OeqSpCl1+W2lIXNd5cf1lB/XU+1oKHdCWAJ0znneKaWZmVkD1VAC0GSgm6SukrYFTgLGF7lMZma2FRpEF1xElEo6G3iMbBj27RExu8jFqg+aVJfjVnJd5cf1lB/XUy1oEIMQzMys8WkoXXBmZtbIOACZmVlROAA1UJKGSXpN0jxJo4tdnmKTtEDSTEnTJU1Jae0kPSFpbvrbNqVL0g2p7mZI6lvc0heWpNslvSVpVk5ajetG0oiUf66kEcU4l0KqpJ7GSFqS3lfTJR2Rs+7CVE+vSRqak+7PZr4iwo8G9iAbiDEf+CKwLfAysFexy1XkOlkA7Fwu7X+A0Wl5NHBNWj4C+CsgYCDwYrHLX+C6ORDoC8za0roB2gFvpL9t03LbYp9bHdTTGOD8CvLulT532wFd0+exuT+bNXu4BdQw9QfmRcQbEfEhcA8wvMhlqo+GA3ek5TuAY3LS74zMJKCNpF2KUL46ERHPAe+US65p3QwFnoiIdyLiXeAJYFjBC1+HKqmnygwH7omIDRHxJjCP7HPpz2YNOAA1TB2BRTnPF6e0piyAxyVNTbdlAugQEUvT8jKgQ1p2/dW8bppynZ2duiNvL+uqxPVUKxyArLHYPyL6AocDZ0k6MHdlZP0m/s1BBVw3VboJ+BLQB1gK/LKopWlkHIAaJt+aqJyIWJL+vgU8RNYVsrysay39fStld/3VvG6aZJ1FxPKI2BgRHwO3kr2vwPVUKxyAGibfmiiHpM9Ial22DAwBZpHVSdlorRHAn9PyeODUNOJrILA6pzuqqahp3TwGDJHUNnVDDUlpjVq5a4PHkr2vIKunkyRtJ6kr0A14CX82a6RB3IrHNhe+NVF5HYCHJEH2nr4rIv4maTJwn6QzgIXACSn/o2SjveYBHwCn132R646ku4HBwM6SFgOXAldTg7qJiHckXU72DxbgsojI94J9g1BJPQ2W1Iesi3IB8F2AiJgt6T5gDlAKnBURG9N+/NnMk2/FY2ZmReEuODMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHILNqSOog6S5Jb6Rb/fxT0rFbsb8xks5Py5dJ+uoW7qdP7t2ZzRoaByCzKij7cdHDwHMR8cWI2Jfsx4WdyuXbot/URcQlEfHkFhavD9lvdswaJAcgs6odAnwYETeXJUTEwoj4jaTTJI2X9DTwlKRWkp6SNC3NTbTpLsiS/kvS65KeB/bISR8n6RtpeV9Jz6ZW1mM5t8p5RtI1kl5K+zgg/cr+MuDENE/NiXVUH2a1xndCMKtaD2BaFev7Ar3TnQK2AY6NiPck7QxMkjQ+5TmJrMWyTdrf1NydSGoB/AYYHhErUkC5EvhOyrJNRPRPXW6XRsRXJV0ClETE2bV2tmZ1yAHIrAYk3QjsD3wI3EiaI6dsNfDzdCfuj8luw98BOAB4KCI+SPuo6N5gewA9gSfSLYWak919ucyD6e9UoEstnpJZ0TgAmVVtNvD1sicRcVZq3UxJSe/n5D0ZaA/sGxEfSVoAtMzzOAJmR8R+lazfkP5uxJ9bayR8Dcisak8DLSV9Pydth0ry7gi8lYLPwcBuKf054BhJ26e7dn+tgm1fA9pL2g+yLjlJPaop2xqgdb4nYlbfOACZVSFN1nYMcJCkNyW9RDaF9U8ryP5HoETSTOBU4NW0j2nAvcDLwF/55I7Sucf5EPgGcI2kl4HpwFeqKd5EYC8PQrCGynfDNjOzonALyMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMiuL/A6xU1omOs57dAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaAAAAEWCAYAAAAgpUMxAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAyl0lEQVR4nO3debxVVf3/8dcbRNEgASUyQKFCUQYRL4PhgJqAQ6KWQ2miKTToL7W00K8pX4fUb1ZqX1MxDe2bU06RWI6oaaEMIZMDoBAQIKIgKKAXP78/9rp4uN7hXLjnnju8n4/Hedx91l5777XXOed+zlp7nb0UEZiZmdW1ZsUugJmZNU0OQGZmVhQOQGZmVhQOQGZmVhQOQGZmVhQOQGZmVhRNLgBJWiDpq2n5Ikm/K3aZACR9X9JySWsl7VRL+9w17a95beyvNuTWf4GPM1jS4kIfp9AkzZY0uNjlKCNpnKQril2OhkzSGEn/V4P8BalzSd+VdF0B9ttB0iuStqsub70KQJJOkvSipPclvZWWfyBJhTheRPw8Is7c2v1I6iIpJG2zhdu3AH4FDImIVhGxspL9r02PBZJGV7ffiPh32t/GLSlXY5bqc6akZjlpV0gaV6DjLZC0Lr1+y9M/lVbVbRcRPSLimUKUqbZJOk3S88Uux5Zq6OWvCUnbAhcDv8hJGyvpNUkfSzqtmu1PkPQPSR9IeiZ3XUQsByYCo6orR70JQJJ+DFxPViGfBzoA3wMGAdtWsk29+Wa/lToALYHZ1eRrExGtgG8Cl0gaVvCSNW5fAE6qw+N9Lb1+fYESsn8AZsUwHHg1IpbkpL0M/ACYlsf27wDXAVdXsv6PwHer20m9CECSdgQuA34QEfdHxJrI/CsiTo6IDSnfOEk3SXpU0vvAwZKOlPQvSe9JWiRpTLl9f1vSQkkrJf1XuXWbNYUlDUxRfZWkl3O7PiQ9I+lySS9IWiPpcUk7p9XPpb+r0jfc/So4x+0kXSfpP+lxXUrbHXgtZ/unq6uviPgnWbDqKamZpIvTOb4l6c5Un59qmaVveG+k8r8p6eSUns8+Rkj6t6S3c+sxbTta0vxUx/dJapdP/VdQR5W+lnmUY/v0/nhX0hygX3X1CPwP8N+qpOUq6WhlXWCr0uu/Z866BZLOlzRD0mpJ90pqmccxSR/6vwI98zxOWZdxf0lTUv0sl/Sr2iirpKMkTU/b/kNS75x1+0ialt4z95J9UcpLOu4F6bjvS7pNWffMX9P+npTUNif/nyQtS2V8TlKPnHU7SfpLOvfJylqrz+es7y7pCUnvKPsWf0LOuiMkzUnHXCLp/HzPIWcfpyvrVlqTPkPfzVk3WNJiST9Jn5+lko5Jx309lemicrtsmV6HNal+987ZX6V1LqmtpEckrUjv9Uckdarp+QCHA8/mJkTEjRHxFLC+uo0j4smIuA/4TyVZXgS+KGm36nZU9AcwDCgFtqkm3zhgNVmrqBnZCzMY6JWe9waWA8ek/HsBa4EDge3IurlKga+m9WOA/0vLHYGVwBFpX4el5+3T+meA+cDuwPbp+dVpXRcgqio/WYCdBHwOaA/8A7g8n+1z1wNK5/8BcCjwHWAe8EWgFfAg8IcKtvsM8B6wR1q3C9AjLeezj1vTee8NbAD2TOvPSefVKdXxLcDd+dR/BedZ1WtZXTmuBv4OtAM6A7OAxVW8HgF0A6YCZ6a0K4BxaXl34P30PmgB/CTV0bZp/QLgJbJWVDvgFeB7VRxvAZ+87zqTfYG4PM/jlG33T+DbabkVMHBrywrsA7wFDACaAyNS/u3Ieh4WAuel/X4D+Ai4opJzPA14vtw5TyJr4XdMx5mWjtkSeBq4NCf/d4DW6djXAdNz1t2THjuQva8WlR2L7L29CDid7L2+D/A2sFdavxQ4IC23BfrmU/5y644EvkT2+TuI7PPXN+d9WwpckuppJLACuCudTw9gHdA15//OR6k+WwDnA2+m5SrrHNgJ+Hqqh9bAn4CHc8r5W2BVJY8ZOfkmA8dXcq7PA6fl+b/7TOCZStbNAI6ucvuaBIpCPYBTgGXl0v6RKm0dcGBKGwfcWc2+rgN+nZYvAe7JWfcZ4EMqDkA/Jf3Tzcn/GDAiLT8DXJyz7gfA39JyF6oPQPOBI3KeDwUW5LN9zvpVwLtk/0B+mNY9RdZyLMu7R3rDbsOnA9Cq9Obdvtz+89lHp5z1LwEnpeVXgENz1u2Ss22V9Z/H+yL3tayuHG8Aw3LWjaL6APRlsi8cC8k++LkB6GfAfTn5mwFLgMHp+QLglJz1/wPcXMXxFpAF41XpeL8lC6T5HKfs/foc8N/AzuX2vcVlBW4ifRHKWf8a2T/ZA8m+4arc57ImAejknOcPADflPP9/5PzzLLevNuk12pEsMH5E+vKU1l/BJwHoRODv5ba/hRTcgH+TdQd9tpr322blrybvw8A5aXkw2f+p5ul561T2ATn5p/LJl6kxwKRyr9dS4IAtqPM+wLv5lLncdnPJ+byUW1dbAegF4NSqtq8XXXBkLY2dc7tCIuIrEdEmrcst56LcDSUNkDQxNUlXk103Kusa+0Ju/oh4P+2vIrsBx6duiFWSVgH7k/1DLbMsZ/kDsm+h+foC2T+eMgtTWk3sHBFtI2LPiLihiv1uQ/atc5N07ieS1c9SSRMkda/BPio7992Ah3Lq7BVgY9q2JvVf3WtZXTk2O1a586lURDwKLObT/dWb1UlEfJz237G6sqQuprIBIyfn5DkmItpExG4R8YOIWJfnccqcQdbaeTV1Qx21tWUle/1+XO593znt8wvAkkj/TZK86jXH8pzldRU8L6uz5pKuVtaV+x5Z8ILs9W9P9n7MfX1zl3cDBpQ7h5PJriVD9qXrCGChpGdVQRd5dSQdLmlS6k5blfaX+95cGZ8M9lmX/lZ4ruXLn16vxeRR55J2kHSLsm7t98i+lLRRza+Hv0sWKAupNdkXrkrVlwD0T7LulOF55I1yz+8CxgOdI2JH4GayZjJk3yo6l2WUtANZE7Yii8haQG1yHp+JiMouslVVpor8h+yDUmZXKu8/rYmK9lvK5m9+ACLisYg4jCyovkrWnVWjfVRgEXB4uXprGdl1jprUP1T9WlZns2Olc8jXfwEXkXVrlNmsTiQp7X8J1YiIwyMbfdgqIv5YTfa8jxMRcyPim2TduNcA90v6zNaUlez1u7Lc67dDRNxNVqcd0/7K1KRea+JbZJ//r5K1erqkdJF1Z5WSdfOWyX2tFwHPljuHVhHxfYCImBwRw8nq7WHgvpoUTNlw4geAa4EO6Yvxo+T/3qxI7ueiGdm5/Yfq6/zHZD0UAyLis2QtJsrKIunmnC8/5R+5g5xmkH2ZKYjUmPgy2cCGStWLABQRq8i6Fn4r6RuSWiu7uN2HrNumKq2BdyJivaT+ZG/kMvcDR0naX9mww8uo/Jz/D/iapKHp21jLdHExnwt8K4CPya6hVOZu4GJJ7ZUNXrgkHXNr3Q2cJ6mrsmG9PwfujYjS3EzKLv4OT/+wNpB1B31ck31U4mbgyrKLjen8yr5I1KT+oerXsjr3ARemi7SdyLp38hLZMOdZZNc/cvd3pKRDlQ2T/zFZvf2jBmXKR97HkXSKpPbpG/OqlPzxVpb1VuB7qfUpSZ9RNhikNdkXw1Lgh5JaSDoO6L91p1up1qnMK8m+CPy8bEVqWTwIjEktgO7AqTnbPgLsrmzAS4v06CdpT0nbSjpZ0o4R8RHZddCPqZzSZ3/Tg6x7djtSIJR0ODBkK893X0nHpX/U56Zzn0T1dd6arDW1Stlgn0tzdxoR38v58lP+0SMn66Nk3ay5J75tOl8BLdL5N0vrBkuKnLzNU95tgGYpb4uc3fUnu8RQZYu5XgQggIj4H+BHZBdQl6fHLWTXZqr6IP0AuEzSGrJ/6pu+3UTEbOAssm/WS8manRX+ODEiFpF9A7uI7I22CLiAPOooIj4ArgReSF0AAyvIdgUwheybx0yyi7G18eOy24E/kDXF3yQbwVLRP99mZPX7H7IhlAcB36/hPipyPVmr5fH0Gkwiu6Bdo/pPKn0t8/DfZF0VbwKPp/OpiYvJLtADEBGvkV2b/A3ZBe2vkQ2j/rCG+61SDY8zDJgtaS1ZvZ8UEeu2pqwRMYXsovn/kr0+88iuhZC2Py49f4esC/fBLT3XatxJ9votAeaQvY9ynU3WMlpG9treTfZPm4hYQxYQTiJ7fy8jayGW/RDy28CC1GX1PbLuucp8hewffPnHD8nej++SfTEav8VnmvkzWX2+m8p3XER8lEedX0d27fBtsjr62xYe/y9Ad0m5lwEeJzvXrwBj03JZC6szm/8f/nZafxPZtat1fNKjAlkd31xdIbR5V6OZWf0n6Rrg8xExotrMViFJo8hGCp6bR97fAX+KiMfyyPs5siHe+0RElUO6HYDMrN5L3W7bkvUe9CPrQjozIh4uZrls62zRrWPMzOpYa7Juty+Qdc//kqwbyxowt4DMzKwo6s0gBDMza1oaZRfczjvvHF26dCl2MczMGpSpU6e+HRHt6+p4jTIAdenShSlTphS7GGZmDYqkmt7pYqu4C87MzIrCAcjMzIrCAcjMzIqiUV4DqshHH33E4sWLWb++2rmWrAFq2bIlnTp1okWLFtVnNrN6ockEoMWLF9O6dWu6dOnC5jeatYYuIli5ciWLFy+ma9euxS6OmeWpyXTBrV+/np122snBpxGSxE477eTWrVkD02QCEODg04j5tTVreJpUADIzs/qj4NeA0lSxU8immT1KUlfgHrKZMacC346ID9Osg3cC+5JNSnViRCxI+7iQbDrijcAP87kleHW6jJ6wtbvYzIKrj6w2z7Jlyzj33HOZPHkybdq0oUOHDlx33XVsu+22HHXUUcyaNatWy5RrzJgxtGrVivPPP79gxzAzq4m6aAGdA7yS8/wa4NcR8WWyyZjOSOlnAO+m9F+nfEjai2yiqR5kE3L9VjWf/7zoIoJjjz2WwYMHM3/+fKZOncpVV13FP2bO45Wl7xW7eGZmda6gAShNjXwk8Lv0XMAhZFM1A9wBHJOWh6fnpPWHpvzDgXsiYkNEvEk2Y2OhpgUumIkTJ9KiRQu+973vbUrbe++96TvgK5vl27hxIxdccAH9+vWjd+/e3HLLLQCsXbuWQw89lL59+9KrVy/+/OfsTvQLFixgzz33ZOTIkfTo0YMhQ4awbt26KssyePBgzjvvPEpKSthzzz2ZPHkyxx13HN26dePiiy/elO+YY45h3333pUePHowdO3ZT+m233cbuu+9O//79GTlyJGeffTYAK1as4Otf/zr9+vWjX79+vPDCCwA8++yz9OnThz59+rDPPvuwZs2arahJM2ssCt0Fdx3ZFNut0/OdgFURUZqeLwY6puWOZNNgExGlklan/B3ZfHre3G02SbP7jQLYdddda/UkasOsWbPYd999q8132223seOOOzJ58mQ2bNjAoEGDGDJkCJ07d+ahhx7is5/9LG+//TYDBw7k6KOPBmDu3Lncfffd3HrrrZxwwgk88MADnHLKKVUeZ9ttt2XKlClcf/31DB8+nKlTp9KuXTu+9KUvcd5557HTTjtx++23065dO9atW0e/fv34+te/zoYNG7j88suZNm0arVu35pBDDmHvvfcG4JxzzuG8885j//3359///jdDhw7llVde4dprr+XGG29k0KBBrF27lpYtW259hZpZg1ewACTpKOCtiJgqaXChjlMmIsaSzWNOSUlJg5jkaMbiVZ9Ke/zxx5kxYwb33581ElevXs3cuXPp1KkTF110Ec899xzNmjVjyZIlLF++HICuXbvSp08fAPbdd18WLFhQ7bHLglevXr3o0aMHu+yyCwBf/OIXWbRoETvttBM33HADDz30EACLFi1i7ty5LFu2jIMOOoh27doBcPzxx/P6668D8OSTTzJnzpxNx3jvvfdYu3YtgwYN4kc/+hEnn3wyxx13HJ06dap5ZZlZo1PIFtAg4GhJRwAtgc8C1wNtJG2TWkGdgCUp/xKgM7BY0jbAjmSDEcrSy+Ru02D06NFjU1CpSkTwm9/8hqFDh26WPm7cOFasWMHUqVNp0aIFXbp02fS7l+22225TvubNm1fbBZe7TbNmzTbbvlmzZpSWlvLMM8/w5JNP8s9//pMddtiBwYMHV/s7m48//phJkyZ9qoUzevRojjzySB599FEGDRrEY489Rvfu3asto5k1bgW7BhQRF0ZEp4joQjaI4OmIOBmYCHwjZRvBJ9Pqjk/PSeufjmy61vHASZK2SyPougEvFarchXLIIYewYcOGza6lvP7KLKa9+I/N8g0dOpSbbrqJjz76KMvz+uu8//77rF69ms997nO0aNGCiRMnsnBhYe+avnr1atq2bcsOO+zAq6++yqRJWS9ov379ePbZZ3n33XcpLS3lgQce2LTNkCFD+M1vfrPp+fTp0wGYP38+vXr14qc//Sn9+vXj1VdfLWjZzaxhKMateH4K3CPpCuBfwG0p/TbgD5LmAe+QBS0iYrak+4A5QClwVkRs3NpC5DNsujZJ4qGHHuLcc8/lmmuuoWXLluz0+Y5cMOaqzfKdeeaZLFiwgL59+xIRtG/fnocffpiTTz6Zr33ta/Tq1YuSkpKCtyCGDRvGzTffzJ577skee+zBwIEDAejYsSMXXXQR/fv3p127dnTv3p0dd9wRgBtuuIGzzjqL3r17U1payoEHHsjNN9/Mddddx8SJE2nWrBk9evTg8MMPL2jZzaxhUNbIaFxKSkqi/IR0r7zyCnvuuWeRSlSx3GtAvTu1KVo5amrt2rW0atWK0tJSjj32WL7zne9w7LHHFrtY9fI1NmtIJE2NiJK6Op7vhGA1NmbMGPr06UPPnj3p2rUrxxxzTLGLZGYNUJO5G7bVnmuvvbbYRTCzRsAtIDMzKwoHIDMzKwoHIDMzKwoHIDMzK4qmOwhh4lXV56mJgy+sNkvz5s3p1asXpaWl7Lnnnpz/8+vZfvsdKsw7fvx45syZw+jRo2u3nNV45plnuPbaa3nkkUfySjcz21JuAdWh7bffnunTp3PX357ng1L40x9+X2neo48+us6Dj5lZXXIAKpJ9+u/HogVvsPrddzn3jJPp3bs3AwcOZMaMGUB277eyaQ7+9Kc/0bNnT/bee28OPPBAANavX8/pp59Or1692GeffZg4ceKm7Y477jiGDRtGt27d+MlPfrLpmI8//jj77bcfffv25fjjj2ft2rUA/O1vf6N79+707duXBx98sNqyjxkzhhEjRnDAAQew22678eCDD/KTn/yEXr16MWzYsE23Ebrsssvo168fPXv2ZNSoUZT96Hny5Mn07t2bPn36cMEFF9CzZ0+g8qkoli5dyoEHHrjpt0d///vft7r+zaz4HICKoLS0lBcmPkm37nvx219dRfeevZkxYwY///nPOfXUUz+V/7LLLuOxxx7j5ZdfZvz48QDceOONSGLmzJncfffdjBgxYtPNQqdPn869997LzJkzuffee1m0aBFvv/02V1xxBU8++STTpk2jpKSEX/3qV6xfv56RI0fyl7/8halTp7Js2bK8zmH+/Pk8/fTTjB8/nlNOOYWDDz6YmTNnsv322zNhQjbb7Nlnn83kyZOZNWsW69at29R9d/rpp3PLLbcwffp0mjf/ZG7B3KkoJk+ezK233sqbb77JXXfdxdChQ5k+fTovv/zypjt/m1nD5gBUh9atW0efPn341pEH8/mOnTj2pG/zr8mTOOq4E4HshqUrV67kvfc2nyF10KBBnHbaadx6661s3JjdBu/555/fNOdP9+7d2W233TZNi3DooYey44470rJlS/baay8WLlzIpEmTmDNnDoMGDaJPnz7ccccdLFy4kFdffZWuXbvSrVs3JFU7j1CZww8/nBYtWtCrVy82btzIsGHDgGx6h7LpICZOnMiAAQPo1asXTz/9NLNnz2bVqlWsWbOG/fbbD4Bvfetbm/b5+OOPc+edd9KnTx8GDBjAypUrmTt3Lv369eP3v/89Y8aMYebMmbRu3fpT5TGzhqfpDkIogrJrQBXNA1SVm2++mRdffJEJEyaw7777MnXq1Crzl5+eobS0lIjgsMMO4+67794sb9kdq2sqdzqHFi1akE1e+8l0DuvXr+cHP/gBU6ZMoXPnzowZM6ba6Rwqm4oC4LnnnmPChAmcdtpp/OhHP6qwpWhmDYtbQEXWt/9+THjoT0A20mznnXfms5/97GZ55s+fz4ABA7jsssto3749ixYt4oADDuCPf/wjkE3Z8O9//5s99tij0uMMHDiQF154gXnz5gHw/vvv8/rrr9O9e3cWLFjA/PnzAT4VoLZUWbDZeeedWbt27aa5kNq0aUPr1q158cUXAbjnnns2bVPZVBQLFy6kQ4cOjBw5kjPPPJNp06bVShnNrLiabgsoj2HTdeH7543m0vPPpnfv3uywww7ccccdn8pzwQUXMHfuXCKCQw89lL333pvu3bvz/e9/n169erHNNtswbty4zVo+5bVv355x48bxzW9+kw0bNgBwxRVXsPvuuzN27FiOPPJIdthhBw444ADWrFmz1efVpk0bRo4cSc+ePfn85z9Pv379Nq277bbbGDlyJM2aNeOggw7aNJ1DZVNRPPPMM/ziF7+gRYsWtGrVijvvvHOry2dmxefpGIqgoi64hjQdw9Yqm84B4Oqrr2bp0qVcf/31W73f+vQamzVEdT0dQ9NtAVnRTJgwgauuuorS0lJ22203xo0bV+wimVkRFCwASWoJPAdsl45zf0RcKmkccBCwOmU9LSKmK7uKfT1wBPBBSp+W9jUCuDjlvyIiPt1PZQ3GiSeeyIknnljsYphZkRWyBbQBOCQi1kpqATwv6a9p3QURcX+5/IcD3dJjAHATMEBSO+BSoAQIYKqk8RHxbk0LFBGbRmtZ49IYu5LNGruCjYKLzNr0tEV6VPVfYjhwZ9puEtBG0i7AUOCJiHgnBZ0ngGE1LU/Lli1ZuXKl/1E1QhHBypUradmyZbGLYmY1UNBrQJKaA1OBLwM3RsSLkr4PXCnpEuApYHREbAA6AotyNl+c0ipLL3+sUcAogF133fVTZenUqROLFy9mxYoVtXFqW2X5u+s+lfbKmu2LUJLGo2XLlnTq1KnYxTCzGihoAIqIjUAfSW2AhyT1BC4ElgHbAmOBnwKX1cKxxqb9UVJS8qlmTosWLejatevWHqZWHD56wqfSFlx9ZBFKYmZWPHXyQ9SIWAVMBIZFxNLUzbYB+D3QP2VbAnTO2axTSqss3czMGrCCBSBJ7VPLB0nbA4cBr6brOqRRb8cAs9Im44FTlRkIrI6IpcBjwBBJbSW1BYakNDMza8AK2QW3C3BHug7UDLgvIh6R9LSk9oCA6cD3Uv5HyYZgzyMbhn06QES8I+lyYHLKd1lEvFPAcpuZWR0oWACKiBnAPhWkH1JJ/gDOqmTd7cDttVpAMzMrKt+M1MzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIBqI51qWA6bjOzpsgByMzMisIByMzMiqJgAUhSS0kvSXpZ0mxJ/53Su0p6UdI8SfdK2jalb5eez0vru+Ts68KU/pqkoYUqs5mZ1Z1CtoA2AIdExN5AH2CYpIHANcCvI+LLwLvAGSn/GcC7Kf3XKR+S9gJOAnoAw4DfSmpewHKbmVkdKFgAisza9LRFegRwCHB/Sr8DOCYtD0/PSesPlaSUfk9EbIiIN4F5QP9CldvMzOpGQa8BSWouaTrwFvAEMB9YFRGlKctioGNa7ggsAkjrVwM75aZXsE3usUZJmiJpyooVKwpwNmZmVpuqDUCSjpfUOi1fLOlBSX3z2XlEbIyIPkAnslZL960pbDXHGhsRJRFR0r59+0IdxszMakk+LaCfRcQaSfsDXwVuA26qyUEiYhUwEdgPaCNpm7SqE7AkLS8BOgOk9TsCK3PTK9jGzMwaqHwC0Mb090hgbERMALatbiNJ7SW1ScvbA4cBr5AFom+kbCOAP6fl8ek5af3TEREp/aQ0Sq4r0A14KY9ym5lZPbZN9VlYIukWsgByjaTtyC9w7QLckUasNQPui4hHJM0B7pF0BfAvshYV6e8fJM0D3iEb+UZEzJZ0HzAHKAXOioiNmJlZg5ZPADqBbPjztRGxStIuwAXVbRQRM4B9Kkh/gwpGsUXEeuD4SvZ1JXBlHmU1M7MGotqWTER8QDaKbf+UVArMLWShzMys8ctnFNylwE+BC1NSC+D/ClkoMzNr/PK5lnMscDTwPkBE/AdoXchCmZlZ45dPAPowjUYLAEmfKWyRzMysKcgnAN2XRsG1kTQSeBK4tbDFMjOzxq7aUXARca2kw4D3gD2ASyLiiYKXzMzMGrV8hmGTAo6DjpmZ1ZpqA5CkNaTrPzlWA1OAH6ff9ZiZmdVIPi2g68juQH0XILI7FHwJmAbcDgwuUNnMzKwRy2cQwtERcUtErImI9yJiLDA0Iu4F2ha4fGZm1kjlE4A+kHSCpGbpcQKwPq0r3zVnZmaWl3wC0MnAt8lux7M8LZ+S7nB9dgHLZmZmjVg+w7DfAL5Wyerna7c4ZmbWVOQzCq4lcAbQA2hZlh4R3ylguczMrJHLpwvuD8DngaHAs2Qzkq4pZKHMzKzxyycAfTkifga8HxF3kM2MOqCwxTIzs8YunwD0Ufq7SlJPYEfgc9VtJKmzpImS5kiaLemclD5G0hJJ09PjiJxtLpQ0T9JrkobmpA9LafMkja7ZKZqZWX2Uzw9Rx0pqC/wMGA+0Ai7JY7tSsjslTJPUGpgqqex2Pr+OiGtzM0vai+xHrj2ALwBPSto9rb6RbErwxcBkSeMjYk4eZTAzs3oqn1Fwv0uLzwJfzHfHEbEUWJqW10h6BehYxSbDgXsiYgPwpqR5fDJ197yyW/5IuifldQAyM2vA8hkF1wY4FeiSmz8ifpjvQSR1AfYBXgQGAWdLOpVP7if3LllwmpSz2WI+CViLyqX7GpSZWQOXzzWgR8mCz0xgas4jL5JaAQ8A50bEe8BNZPeS60PWQvpljUpc+XFGSZoiacqKFStqY5dmZlZA+VwDahkRP9qSnUtqQRZ8/hgRDwJExPKc9bcCj6SnS4DOOZt3SmlUkb5JukfdWICSkhLfIsjMrJ7L63dAkkZK2kVSu7JHdRtJEnAb8EpE/ConfZecbMcCs9LyeOAkSdtJ6gp0A14CJgPdJHWVtC3ZQIXxeZ2dmZnVW/m0gD4EfgH8F5/cfDSofkDCILL7xs2UND2lXQR8U1KftI8FwHcBImK2pPvIBheUAmdFxEYASWcDjwHNgdsjYnYe5TYzs3osnwD0Y7Ifo75dkx1HxPNk8weV92gV21wJXFlB+qNVbWdmZg1PPl1w84APCl0QMzNrWvJpAb0PTJc0EdhQlliTYdhmZmbl5ROAHk4PMzOzWpPPnRDuqIuCmJlZ01JpAJI0kyqm3I6I3gUpkZmZNQlVtYCOqrNSmJlZk1NpAIqIhXVZEDMza1ryGYZtZmZW6xyAzMysKCoNQJKeSn+vqbvimJlZU1HVIIRdJH0FODpNArfZbXUiYlpBS9bIdBk9odhFMDOrV6oKQJeQTcPdCfhVuXUBHFKoQpmZWeNX1Si4+4H7Jf0sIi6vwzKZmVkTUO0ghIi4XNLRkq5ND/8+qADcRWdmTU21AUjSVcA5ZPP0zAHOkfTzQhfMzMwat3xuRnok0CciPgaQdAfwL7LJ5czMzLZIvr8DapOzvGMBymFmZk1MPgHoKuBfksal1s9UKpi1tDxJnSVNlDRH0mxJ56T0dpKekDQ3/W2b0iXpBknzJM2Q1DdnXyNS/rmSRmzZqZqZWX2SzyCEu4GBwIPAA8B+EXFvHvsuBX4cEXul7c+StBcwGngqIroBT6XnAIcD3dJjFHATZAELuBQYAPQHLi0LWmZm1nDl1QUXEUsjYnx6LKvBNtPS8hrgFaAjMBwom2PoDuCYtDwcuDMyk4A2knYBhgJPRMQ7EfEu8AQwLL/TMzOz+qpO7gUnqQuwD/Ai0CEilqZVy4AOabkjsChns8UprbL08scYJWmKpCkrVqyo3RMwM7NaV/AAJKkVWdfduRHxXu66iAiqmPSuJiJibESURERJ+/bta2OXZmZWQFUGIEnNJb26pTuX1IIs+PwxIh5MyctT1xrp71spfQnQOWfzTimtsnQzM2vAqgxAEbEReE3SrjXdsSQBtwGvRETuveTGA2Uj2UYAf85JPzWNhhsIrE5ddY8BQyS1TYMPhqQ0MzNrwPL5IWpbYLakl4D3yxIj4uhqthsEfBuYKWl6SrsIuBq4T9IZwELghLTuUeAIYB7wAXB6Os47ki4HJqd8l0XEO3mU28zM6rF8AtDPtmTHEfE85aZwyHFoBfkDOKuSfd0O3L4l5TAzs/qp2gAUEc9K2g3oFhFPStoBaF74opmZWWOWz81IRwL3A7ekpI7AwwUsk5mZNQH5DMM+i+x6znsAETEX+FwhC2VmZo1fPgFoQ0R8WPZE0jbU0m93zMys6conAD0r6SJge0mHAX8C/lLYYpmZWWOXTwAaDawAZgLfJRsufXEhC2VmZo1fPqPgPk7TMLxI1vX2WhoybWZmtsWqDUCSjgRuBuaT/a6nq6TvRsRfC104MzNrvPL5IeovgYMjYh6ApC8BEwAHIDMz22L5XANaUxZ8kjeANQUqT6PUZfSEYhfBzKzeqbQFJOm4tDhF0qPAfWTXgI7nk/uymZmZbZGquuC+lrO8HDgoLa8Ati9YiczMrEmoNABFxOl1WRAzM2ta8hkF1xX4f0CX3Px5TMdgZmZWqXxGwT1MNrHcX4CPC1oaMzNrMvIJQOsj4oaCl8TMzJqUfALQ9ZIuBR4HNpQlRsS0gpXKzMwavXx+B9QLGEk2lfYv0+Pa6jaSdLuktyTNykkbI2mJpOnpcUTOugslzZP0mqShOenDUto8SaNrcnJmZlZ/5dMCOh74Yu6UDHkaB/wvcGe59F9HxGYBTNJewElAD+ALwJOSdk+rbwQOAxYDkyWNj4g5NSyLmZnVM/m0gGYBbWq644h4Dngnz+zDgXsiYkNEvAnMA/qnx7yIeCMFwHtSXjMza+DyaQG1AV6VNJnNrwFt6TDssyWdCkwBfhwR75JN8z0pJ8/ilAawqFz6gIp2KmkUMApg11133cKimZlZXcknAF1ai8e7Cbic7JY+l5NdT/pObew4IsYCYwFKSko8XYSZWT2Xz3xAz9bWwSJiedmypFuBR9LTJUDnnKydUhpVpJuZWQNW7TUgSWskvZce6yVtlPTelhxM0i45T48lu74EMB44SdJ26c4L3YCXyG562k1SV0nbkg1UGL8lxzYzs/olnxZQ67JlSSIbBDCwuu0k3Q0MBnaWtJisK2+wpD5kXXALyKb4JiJmS7oPmAOUAmdFxMa0n7OBx4DmwO0RMTv/0zMzs/oqn2tAm6SpuB9OP0yt8jc5EfHNCpJvqyL/lcCVFaQ/Cjxak3KamVn9l8/NSI/LedoMKAHWF6xEDVCX0RNYcPWRxS6GmVmDkk8LKHdeoFKyrjP/FsfMzLZKPteAPC+QmZnVuqqm5L6kiu0iIi4vQHnMzKyJqKoF9H4FaZ8BzgB2IvshqZmZ2RapakruX5YtS2oNnAOcTnY/tl9Wtp2ZmVk+qrwGJKkd8CPgZOAOoG+6d5uZmdlWqeoa0C+A48jur9YrItbWWakaoC6jJwB4OLaZWZ6quhXPj8nm5rkY+E/O7XjWbOmteMzMzMpUdQ0on7mCzMzMtoiDjJmZFYUDkJmZFYUDkJmZFYUDUD3SZfSETaPpzMwaOwcgMzMrCgcgMzMrioIFIEm3S3pL0qyctHaSnpA0N/1tm9Il6QZJ8yTNkNQ3Z5sRKf9cSSMKVV4zM6tbhWwBjQOGlUsbDTwVEd2Ap/hkVtXDgW7pMQq4CTbdCuhSYADQH7i0LGiZmVnDVrAAFBHPAe+USx5Odk850t9jctLvjMwkoI2kXYChwBMR8U66B90TfDqomZlZA1TX14A6RMTStLwM6JCWOwKLcvItTmmVpZuZWQNXtEEIERFA1Nb+JI2SNEXSlBUrVtTWbs3MrEDqOgAtT11rpL9vpfQlQOecfJ1SWmXpnxIRYyOiJCJK2rdvX+sFNzOz2lXXAWg8UDaSbQTw55z0U9NouIHA6tRV9xgwRFLbNPhgSEqrF/zDUTOzLVflhHRbQ9LdwGBgZ0mLyUazXQ3cJ+kMYCFwQsr+KHAEMA/4gGzmVSLiHUmXA5NTvssiovzABjMza4AKFoAi4puVrDq0grwBnFXJfm4Hbq/FohVUl9ETPCmdmVkefCcEMzMrCgcgMzMrCgcgMzMrCgcgMzMrCgcgMzMrCgcgMzMrCgcgMzMrioL9DsjwXRLMzKrgFpCZmRWFA5CZmRWFA5CZmRWFA1A95GtHZtYUOACZmVlReBRcAbgFY2ZWPbeAzMysKByAzMysKByAzMysKIoSgCQtkDRT0nRJU1JaO0lPSJqb/rZN6ZJ0g6R5kmZI6luMMpuZWe0qZgvo4IjoExEl6flo4KmI6AY8lZ4DHA50S49RwE11XlIzM6t19akLbjhwR1q+AzgmJ/3OyEwC2kjapQjlMzOzWlSsABTA45KmShqV0jpExNK0vAzokJY7Aotytl2c0jYjaZSkKZKmrFixolDlNjOzWlKs3wHtHxFLJH0OeELSq7krIyIkRU12GBFjgbEAJSUlNdrWzMzqXlFaQBGxJP19C3gI6A8sL+taS3/fStmXAJ1zNu+U0hq1LqMn+AetZtao1XkAkvQZSa3LloEhwCxgPDAiZRsB/DktjwdOTaPhBgKrc7rqzMysgSpGF1wH4CFJZce/KyL+JmkycJ+kM4CFwAkp/6PAEcA84APg9LovspmZ1bY6D0AR8QawdwXpK4FDK0gP4Kw6KJqZmdWh+jQM28zMmhAHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHoHrONyQ1s8bKAWgL1dvAMPGq7FF+2cysninWfEBWA2XBbsHVR26+oqrgkrsud/ngCz95fvCFtVRCM7OacwuooSlr1dRGy6ayfVQWvMzMapFbQA3Audvcny1MnLH1O8sn6JR/Xr7FVL5FlZvPrSozy5MDUD23KfgUSk1aOBXlrSpwlQ9OlaWVpbtr0KxJcQCqhwoedOpKZQGrogBTWYuron3kG6AaSkBzy9GaKAegeqS6wHPdU69z7qG711FpCijfVlc+3YWVBamq9lldAKxIVQGiskBXXQCs6fW1yvI7eFkDpWzC0fpP0jDgeqA58LuIuLqyvCUlJTFlypSClqcQw7Br0vJpFIGoqaiNAJRPoHUgsq0kaWpElNTZ8RpCAJLUHHgdOAxYDEwGvhkRcyrK31ACUG11tTkYWZVyB4/UJBhW1qKrbH1tcHdkUTkAVUDSfsCYiBianl8IEBEVfiWs7QBUm62dYl3fcZCyeqGikZS1vY/aai3mU8ZGFiwdgCog6RvAsIg4Mz3/NjAgIs7OyTMKGJWe7gG8VucFrXs7A28XuxANhOsqP66n/DTWetotItrX1cEazSCEiBgLjC12OeqSpCl1+W2lIXNd5cf1lB/XU+1oKHdCWAJ0znneKaWZmVkD1VAC0GSgm6SukrYFTgLGF7lMZma2FRpEF1xElEo6G3iMbBj27RExu8jFqg+aVJfjVnJd5cf1lB/XUy1oEIMQzMys8WkoXXBmZtbIOACZmVlROAA1UJKGSXpN0jxJo4tdnmKTtEDSTEnTJU1Jae0kPSFpbvrbNqVL0g2p7mZI6lvc0heWpNslvSVpVk5ajetG0oiUf66kEcU4l0KqpJ7GSFqS3lfTJR2Rs+7CVE+vSRqak+7PZr4iwo8G9iAbiDEf+CKwLfAysFexy1XkOlkA7Fwu7X+A0Wl5NHBNWj4C+CsgYCDwYrHLX+C6ORDoC8za0roB2gFvpL9t03LbYp9bHdTTGOD8CvLulT532wFd0+exuT+bNXu4BdQw9QfmRcQbEfEhcA8wvMhlqo+GA3ek5TuAY3LS74zMJKCNpF2KUL46ERHPAe+US65p3QwFnoiIdyLiXeAJYFjBC1+HKqmnygwH7omIDRHxJjCP7HPpz2YNOAA1TB2BRTnPF6e0piyAxyVNTbdlAugQEUvT8jKgQ1p2/dW8bppynZ2duiNvL+uqxPVUKxyArLHYPyL6AocDZ0k6MHdlZP0m/s1BBVw3VboJ+BLQB1gK/LKopWlkHIAaJt+aqJyIWJL+vgU8RNYVsrysay39fStld/3VvG6aZJ1FxPKI2BgRHwO3kr2vwPVUKxyAGibfmiiHpM9Ial22DAwBZpHVSdlorRHAn9PyeODUNOJrILA6pzuqqahp3TwGDJHUNnVDDUlpjVq5a4PHkr2vIKunkyRtJ6kr0A14CX82a6RB3IrHNhe+NVF5HYCHJEH2nr4rIv4maTJwn6QzgIXACSn/o2SjveYBHwCn132R646ku4HBwM6SFgOXAldTg7qJiHckXU72DxbgsojI94J9g1BJPQ2W1Iesi3IB8F2AiJgt6T5gDlAKnBURG9N+/NnMk2/FY2ZmReEuODMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHIDMzKwoHILNqSOog6S5Jb6Rb/fxT0rFbsb8xks5Py5dJ+uoW7qdP7t2ZzRoaByCzKij7cdHDwHMR8cWI2Jfsx4WdyuXbot/URcQlEfHkFhavD9lvdswaJAcgs6odAnwYETeXJUTEwoj4jaTTJI2X9DTwlKRWkp6SNC3NTbTpLsiS/kvS65KeB/bISR8n6RtpeV9Jz6ZW1mM5t8p5RtI1kl5K+zgg/cr+MuDENE/NiXVUH2a1xndCMKtaD2BaFev7Ar3TnQK2AY6NiPck7QxMkjQ+5TmJrMWyTdrf1NydSGoB/AYYHhErUkC5EvhOyrJNRPRPXW6XRsRXJV0ClETE2bV2tmZ1yAHIrAYk3QjsD3wI3EiaI6dsNfDzdCfuj8luw98BOAB4KCI+SPuo6N5gewA9gSfSLYWak919ucyD6e9UoEstnpJZ0TgAmVVtNvD1sicRcVZq3UxJSe/n5D0ZaA/sGxEfSVoAtMzzOAJmR8R+lazfkP5uxJ9bayR8Dcisak8DLSV9Pydth0ry7gi8lYLPwcBuKf054BhJ26e7dn+tgm1fA9pL2g+yLjlJPaop2xqgdb4nYlbfOACZVSFN1nYMcJCkNyW9RDaF9U8ryP5HoETSTOBU4NW0j2nAvcDLwF/55I7Sucf5EPgGcI2kl4HpwFeqKd5EYC8PQrCGynfDNjOzonALyMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMisIByMzMiuL/A6xU1omOs57dAAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -1556,20 +1839,148 @@ "calibrated_gradient" ] }, + { + "cell_type": "code", + "execution_count": 19, + "id": "2d68f427", + "metadata": {}, + "outputs": [], + "source": [ + "ktr = None" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "de75c36d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([3, 32, 32])\n" + ] + } + ], + "source": [ + "for batch in loaders['train']:\n", + " print(batch[0][0].size())\n", + " ktr = batch[0][0]\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "e3e37e64", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: pillow in c:\\users\\21520\\anaconda3\\lib\\site-packages (10.2.0)\n" + ] + } + ], + "source": [ + "!pip install pillow" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "be3b5ad9", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from PIL import Image\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "afd221a7", + "metadata": {}, + "outputs": [], + "source": [ + "np_array = ktr.numpy()\n", + "np_array = np.transpose(np_array, (1, 2, 0))\n", + "image = Image.fromarray(np.uint8(np_array * 255))\n", + "image.save('output_image.png')\n", + "image.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dc95577e", + "metadata": {}, + "outputs": [], + "source": [ + "print(np_array.shape)" + ] + }, { "cell_type": "code", "execution_count": null, "id": "ecd27488", "metadata": {}, "outputs": [], - "source": [] + "source": [ + " def __call__(self, X1, X2):\n", + " print(1)\n", + " _orig_device = X1.device\n", + " device = process_device_arg(self.device)\n", + " #print(\"Device call: \", device)\n", + " #print(\"Self Device call: \", self.device)\n", + " if self.src_emb is not None:\n", + " B1, N1, D1 = self._get_batch_shape(X1)\n", + " print(self.src_emb)\n", + " print(B1, N1, D1)\n", + " print(X1.shape)\n", + " #try:\n", + " self.src_emb.to(device)\n", + " print(1)\n", + " #X1 = self.src_emb(X1.view(-1,*self.src_dim)).reshape(B2, N2, -1)\n", + " X1 = X1.view(1, 50, 1, 1024) # Reshape with dummy channel\n", + " flattened_X1 = self.src_emb(X1.view(-1,*self.src_dim)).reshape(B1, N1, -1)\n", + " print(flattened_X1.shape)\n", + " #X1 = self.src_emb(X1.view(-1,*self.src_dim).to(self.device)).reshape(B1, N1, -1)\n", + " #except: # Memory error?\n", + " #print('Batchifying feature distance computation')\n", + " #X1 = self._batchify_computation(X1.view(-1,*self.src_dim).to(self.device), 'x').reshape(B1, N1, -1)\n", + " if self.tgt_emb is not None:\n", + " B2, N2, D2 = self._get_batch_shape(X2)\n", + " X2 = self.tgt_emb(X2.view(-1,*self.tgt_dim))\n", + " print(X2)\n", + " #try:\n", + " #X2 = self.tgt_emb(X2.view(-1,*self.tgt_dim).to(self.device)).reshape(B2, N2, -1)\n", + " #except:\n", + " # print('Batchifying feature distance computation')\n", + " # X2 = self._batchify_computation(X2.view(-1,*self.tgt_dim).to(self.device), 'y').reshape(B2, N2, -1)\n", + " if self.p == 1:\n", + " print(1)\n", + " print(X1.shape, X2.shape)\n", + " c = geomloss.utils.distances(X1, X2)\n", + " elif self.p == 2:\n", + " print(1)\n", + " print(X1.shape, X2.shape)\n", + " c = geomloss.utils.squared_distances(X1, X2) / 2\n", + " #else:\n", + " # raise ValueError()\n", + " return c.to(_orig_device)" + ] } ], "metadata": { "kernelspec": { - "display_name": "otdd", + "display_name": "base", "language": "python", - "name": "otdd" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -1581,7 +1992,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.12" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/example-stl10.ipynb b/example-stl10.ipynb index fe89bfb..e34aa8e 100644 --- a/example-stl10.ipynb +++ b/example-stl10.ipynb @@ -14,17 +14,16 @@ "execution_count": 1, "id": "dcf3bcff", "metadata": {}, - "outputs": [], "source": [ "import lava" - ] + ], + "outputs": [] }, { "cell_type": "code", "execution_count": 2, "id": "4e34f958", "metadata": {}, - "outputs": [], "source": [ "import torch\n", "import torchvision\n", @@ -46,7 +45,8 @@ "import numpy as n\n", "\n", "from torch.utils.data import Dataset, TensorDataset, DataLoader" - ] + ], + "outputs": [] }, { "cell_type": "code", @@ -55,17 +55,16 @@ "metadata": { "scrolled": true }, - "outputs": [], "source": [ "! nvidia-smi" - ] + ], + "outputs": [] }, { "cell_type": "code", "execution_count": null, "id": "a54003f7", "metadata": {}, - "outputs": [], "source": [ "cuda_num = 0\n", "import torchvision\n", @@ -79,7 +78,8 @@ "print(\"Cuda device: \", torch.cuda.current_device())\n", "print(\"cude devices: \", torch.cuda.device_count())\n", "device = torch.device('cuda:' + str(cuda_num) if torch.cuda.is_available() else 'cpu')" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -94,13 +94,13 @@ "execution_count": 5, "id": "23633651", "metadata": {}, - "outputs": [], "source": [ "training_size = 5000\n", "valid_size = 2000\n", "resize = 32\n", "portion = 0.3" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -118,71 +118,11 @@ "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "seeed\n", - "Files already downloaded and verified\n", - "Files already downloaded and verified\n", - "Train Type: Train: Dataset STL10\n", - " Number of datapoints: 5000\n", - " Root location: /home/just/Programming/otdd-main/data\n", - " Split: train\n", - " StandardTransform\n", - "Transform: Compose(\n", - " Resize(size=(32, 32), interpolation=bilinear, max_size=None, antialias=None)\n", - " ToTensor()\n", - " Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225))\n", - " )\n", - "Teest Type: Train: Dataset STL10\n", - " Number of datapoints: 8000\n", - " Root location: /home/just/Programming/otdd-main/data\n", - " Split: test\n", - " StandardTransform\n", - "Transform: Compose(\n", - " Resize(size=(32, 32), interpolation=bilinear, max_size=None, antialias=None)\n", - " ToTensor()\n", - " Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225))\n", - " )\n", - "FOLD IDSSS: [ 0 1 2 ... 4997 4998 4999]\n", - "K: train IDXS: [ 0 1 2 ... 4997 4998 4999]\n", - "SAMPLER: \n", - "\n", - "FOLD SAMPLER: {'train': } \n", - "\n", - "TRAIN: Dataset STL10\n", - " Number of datapoints: 5000\n", - " Root location: /home/just/Programming/otdd-main/data\n", - " Split: train\n", - " StandardTransform\n", - "Transform: Compose(\n", - " Resize(size=(32, 32), interpolation=bilinear, max_size=None, antialias=None)\n", - " ToTensor()\n", - " Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225))\n", - " ) \n", - "\n", - "IDXS: [ 0 1 2 ... 4997 4998 4999] \n", - "\n", - "\n", - "STL11\n", - "len classes: 10\n", - "len train: 5000\n", - "i = 0 \n", - "i = 1000 \n", - "i = 2000 \n", - "i = 3000 \n", - "i = 4000 \n", - "MAX TEST: 2000\n", - "Fold Sizes: 5000/8000 (train/test)\n" - ] - } - ], "source": [ "loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='STL10', resize=resize,\n", " training_size=training_size, test_size=valid_size, currupt_por=portion)" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -198,10 +138,10 @@ "execution_count": 7, "id": "58be73c8", "metadata": {}, - "outputs": [], "source": [ "feature_extractor = lava.load_pretrained_feature_extractor('preresnet18_test_stl10.pth', device)" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -216,129 +156,11 @@ "execution_count": 8, "id": "5abb7145", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "WROGNG: unexpected line 659\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - " 0%| | 0/79 [00:00" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXgAAAEWCAYAAABsY4yMAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAazUlEQVR4nO3de5wcZZ3v8c93JhkSEiBAIhACCZHAGnSFkJU7yhrA6wFXPNxUXNcb4upZlV28rIt42aMeOa/FZUVxWUEFL7CsgYXFwHIJysWEiyFBCASQW8JEAiEQJpf57R/1TNKZdE9q0l1zefi+X69+TVd1ddWvq5/6TvXT1VWKCMzMLD9tg12AmZlVwwFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7wNOZIelTQr3f+8pB8Mdk1Vk7SnpFWS2ge7lh6174MNTw546xdJJ0m6Q9KLkp5J9z8uSVUsLyK+HhEfanY+kqZICkkjWlFXieWsSrdHJZ21pedFxB8iYmxErK+yPntlccBbaZI+A/wT8C1gV2AX4GPAYUBHg+cMmT3SATYuIsYCJwNfkvSWwS7IXnkc8FaKpB2Ac4CPR8TlEfFCFO6OiFMjoitN90NJ35V0jaQXgaMkvV3S3ZJWSnpc0tm95v0+SY9J+qOkL/R67GxJP64ZPljSbyQ9J+leSW+qeewmSV+R9GtJL0j6laTx6eFb0t/n0p71Ib2WM1HSakk71Yw7QNJySSMl7S3pZknPp3E/K7PeIuI2YCHwWkltkr6YXuszki5J63WzTxiSPiBpSXodj0g6NY0vM4/TJP0h1blhfabnniXp4bSuf97r9TZ8H2x4csBbWYcA2wC/LDHtKcDXgO2AW4EXgfcD44C3A6dLOh5A0nTgu8D7gInAzsCkejOVtDvwn8BXgZ2AzwJXSJrQa9l/CbyK4lPFZ9P4I9Pfcakr5LbaeUfEU8BtwLt7zevyiFgLfAX4FbBjqu87W1oJKhwG7AfcDXwg3Y4CpgJjgX+u87wxwHnAWyNiO+BQ4J70cJl5HA7sC7yZ4tPDa9L4vwaOB95Isa5XAOenZZZ+H2z4cMBbWeOB5RGxrmdEzZ70aklH1kz7y4j4dUR0R8TLEXFTRCxIw78DLqMIGYATgKsj4pb0KeDvge4GNbwXuCYirknzmgPMA95WM82/RcSDEbEa+Dmwfz9e46UUXSqk7xROSuMA1gKTgYnpNd26hXktB54FfgCcFRE3AKcC50bEkohYBXwOOKnB9wLdFHv9oyPi6YhYmMaXmceXI2J1RNwL3Au8Po3/GPCFiHgireuzgRPSc/vzPtgw4YC3sv4IjK8Nkog4NCLGpcdq29LjtU+UdJCkGyV1SnqeImh6uk4m1k4fES+m+dUzGXhP+qfynKTnKPZWd6uZZmnN/Zco9nDLugI4RNJuFHv83cDc9NjfAgLulLRQ0ge3MK/xEbFjRLwmIs5L4yYCj9VM8xgwguK7jA3SOjiRYj09Lek/Jf1JP+bRaB1MBq6sWXf3A+vTc/vzPtgw4YC3sm4DuoDjSkzb+xSllwKzgT0iYgfgAoqwBHga2KNnQknbUnQP1PM48KOIGFdzGxMR/3cratp8gogVFN0wJ1J0z/w00ulWI2JpRHw4IiYCHwX+RdLeJZZb6ymKkO2xJ7AOWFanlusi4miKf16/By7s7zzqeJyi26d2/Y2KiCfp3/tgw4QD3kqJiOeAL1ME2wmStktf2u0PjNnC07cDno2IlyW9gSI8e1wOvEPS4ZI6KL7IbdQufwy8U9KxktoljZL0Jkll+oo7KfbIp25hukspvi84gY3dM0h6T81yVlD8w+hvF8ZlwN9I2kvSWODrwM9qu73SsnaRdFzqi+8CVtUsq9Q8GrgA+JqkyWk5EyT1/MPuz/tgw4TfQCstIr4JfJqiu2JZun0P+DvgN3089ePAOZJeAL5E0TfeM8+FwBkUYfo0RXg+0WD5j1N8gvg8RWA/DpxJiXYcES9RfPH769RFcXCDSWcD04ClqQ+7x58Bd0halab5VEQs2dJye7kI+BHFET2PAC9TfPHZWxvFen6Koh//jcDp/ZxHPf+Uav9Vei9uBw6C/r0PNnzIF/wwM8uT9+DNzDLlgDczy5QD3swsUw54M7NMVXpmvf4aP358TJkyZbDLMDMbNubPn788IibUe2xIBfyUKVOYN2/eYJdhZjZsSHqs0WPuojEzy5QD3swsUw54M7NMOeDNzDLlgDczy1RlAS/ponRJsfuqWoaZmTVW5WGSP6S4lNglFS4je3MWLWPu4k6OmDaBo6dvcl0IvnXdA1y/aCmzpu/K/nuM49I7iqOlpk/cgUVPPc/yVV2MH7sNK19ex++fXklbm4iA1WvW0R0wamQb7W1trFvfzZr13XT7vHOvOO2C7th4svy2NNwsASPaRHcE3QHbjRrBuG07WNW1lnXrg1VdRRsUMKajndMO22tDG36480VWda1l93Gj+eSb9wHYMP7ZF9cAwc5jOpg6YSynHFScGn/u4k4WPrVyQzvffpsRHD9jEmceuy9zFi3bsG2cctBkjp6+y2bb1cd/chc3P/AM23a0s+sOoxg/dhtOOWgyV979JDc/8Azjx3bwxXfst2EbrN32zjx236bWVV/beLMqPZukpCkUlwF7bZnpZ86cGT4OfqM5i5bxycvuZvXa9Ywe2c55Jx+wSQM7/8aHNkzbqg3TbLDUa8NtgjaJdQ0a94g20SaxZn39U/O/7XW7cf2iZRse72hv48NHTuWiWx/ZsF1Nn7g98x9bsXk9bHrC/zbB9943k3sef26Tbe+Mo/be6pDvaxsvS9L8iJhZ77FB74OX9BFJ8yTN6+zsHOxyhpS5iztZvXY9AKvXrmfu4o3r5/pFSzeZ1uFuw129NtwdNAx3KB5rFO4Av3moc5PH16zv5vpFSzfZrhY88Vz9eurUMndx52bbXu/h/uhrG2+FQQ/4iPh+RMyMiJkTJtT9te0r1hHTJjB6ZDsAo0e2c8S0jetn1vRdN5m2TZgNa/XacJuKvfRGRrSJjvbGMXbo3hM2ebyjvY1Z03fdZLt63aRx9eupU8sR0yZstu31Hu6PvrbxVhhSpyqwTR09fRfOO/mAuv1zPR8J3QdvzXil9sHvv8e4re6D79kOW9EH39c23grugzczG8YGpQ9e0mXAbcC+kp6Q9FdVLcvMzDZXWRdNRJxc1bzNzGzLBv1LVjMzq4YD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMOeDNzDLlgDczy5QD3swsUw54M7NMlQp4SZMlzUr3R0vartqyzMysWVsMeEkfBi4HvpdGTQL+o8KazMysBcrswZ8BHAasBIiIxcCrqizKzMyaVybguyJiTc+ApBFAVFeSmZm1QpmAv1nS54HRko4GfgFcVW1ZZmbWrDIBfxbQCSwAPgpcA3yxyqLMzKx5I7Y0QUR0Axemm5mZDRNbDHhJC9i8z/15YB7w1Yj4YxWFmZlZc7YY8MC1wHrg0jR8ErAtsBT4IfDOSiozM7OmlAn4WRExo2Z4gaS7ImKGpPdWVZiZmTWnzJes7ZLe0DMg6c+A9jS4rpKqzMysaWX24D8EXCRpLCCKHzx9SNIY4B+rLM7MzLZemaNofgu8TtIOafj5mod/XlVhZmbWnDJ78Eh6O7AfMEoSABFxToV1mZlZk8qcbOwC4ETgrym6aN4DTK64LjMza1KZL1kPjYj3Aysi4svAIcA+1ZZlZmbNKhPwq9PflyRNBNYCu1VXkpmZtUKZPvirJY0DvgXcRfGr1h9UWZSZmTWvzFE0X0l3r5B0NTCq15E0ZmY2BJU5F0078HZgSs/0koiIc6stzczMmlGmi+Yq4GWK0wV3V1uOmZm1SpmAnxQRf1p5JWZm1lJljqK5VtIxlVdiZmYtVWYP/nbgSkltFIdICoiI2L7SyszMrCllAv5cih83LYgIX2zbzGyYKNNF8zhwn8PdzGx4KbMHvwS4SdK1QFfPSB8maWY2tJUJ+EfSrSPdzMxsGCjzS9YvD0QhZmbWWg0DXtJVFOedqSsi/lclFZmZWUv0tQf//wasCjMza7mGAR8RNw9kIWZm1lplDpM0M7NhyAFvZpYpB7yZWaZ8FI2ZWabKHEXzF8CuwI/T8MnAsiqLMjOz5m3xKBpJ346ImTUPXSVpXuWVmZlZU8r0wY+RNLVnQNJewJjqSjIzs1Yocy6av6E42dgSinPBTwY+WmlVZmbWtDLnovkvSdOAP0mjfh8RXX09x8zMBt8Wu2gkbQucCXwiIu4F9pT0jsorMzOzppTpg/83YA3FVZ0AngS+WllFZmbWEmUC/tUR8U2K67ESES9R9MWbmdkQVibg10gaTfrRk6RXU3NlJzMzG5rKHEXzD8B/AXtI+glwGPCBKosyM7PmlTmKZo6ku4CDKbpmPhURyyuvzMzMmlJmDx5gFLAiTT9dEhFxS3VlmZlZs7YY8JK+AZwILAS60+gAHPBmZkNYmT3444F9/eMmM7PhpcxRNEuAkVUXYmZmrVVmD/4l4B5JN1BzeGREfLKyqszMrGllAn52upmZ2TBS5jDJiyV1APukUQ9ExNpqyzIzs2aVOYrmTcDFwKMUx8HvIek0HyZpZja0lemi+TZwTEQ8ACBpH+Ay4MAqCzMzs+aUOYpmZE+4A0TEg/ioGjOzIa/MHvw8ST9g40W33wv4mqxmZkNcmYA/HTgD6Dksci7wL5VVZGZmLVHmKJou4FzgXEk7AZP8q1Yzs6GvzCX7bpK0fQr3+cCFkv5/9aWZmVkzynzJukNErAT+ArgkIg4C3lxtWWZm1qwyAT9C0m7A/waurrgeMzNrkTIBfw5wHfBQRPxW0lRgcbVlmZlZs8p8yfoL4Bc1w0uAd1dZlJmZNa9hwEv624j4pqTvkC64XctnkzQzG9r62oO/P/31j5rMzIahhgEfEVelvxcDSBqbhlcNTGlmZtaMMsfBv1bS3RTXZF0kab6k/aovzczMmlHmKJrvA5+OiMkRsSfwGeDCassyM7NmlQn4MRFxY89ARNwEjKmsIjMza4kyJxtbIunvgR+l4fdSXIjbzMyGsDJ78B8EJgD/DlwBjE/jzMxsCOvrOPhRwMeAvYEFwGd8LVYzs+Gjrz34i4GZFOH+VuBbA1KRmZm1RF998NMj4nUAkv4VuHNgSjIzs1boaw9+Q3dMRKwbgFrMzKyF+tqDf72klem+gNFpWEBExPaVV2dmZlutr1MVtA9kIWZm1lplDpM0M7NhyAFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYc8GZmmXLAm5llygFvZpYpB7yZWaYqDXhJb5H0gKSHJJ1V5bLMzGxTI6qasaR24HzgaOAJ4LeSZkfEolYva86iZcxd3MkR0yZw9PRdSk9Xb/jSOx4D4JSDJm8ybvmqLsaP3YbpE3fg5gc7eXT5i3S0i65163lxTTdtgu5o9SuzZowe2UbX2m66e40f0Va8VzuP6WDPnccw/7EVmz13bEc7R+77KhY++TzLV73MyPY2VnWtY2R7G+1tbXS0i4NfPZ4/PPsSK1Z18fo9d2Thk8+zdOXL7Lr9Nrzj9btz84OdrFjVxfEzJrH/HuOYu7iT7UaN5IWX126xDfa0t5522KNn+uWr1vDwMy8wa/qunHnsvqXXSdltZTjJ8TW1iiKqSSVJhwBnR8SxafhzABHxj42eM3PmzJg3b16/ljNn0TI+edndrF67ntEj2znv5APqvsm9p/vg4Xtx0a2PbDJ84S1LWLO+iIOO9jY+fOTUTcaZba0RbWJdzR5AmTYIRTs8/9QZG8K/pw3XOuOovUuFfNltZTjJ8TX1l6T5ETGz3mNVdtHsDjxeM/xEGrcJSR+RNE/SvM7Ozn4vZO7izg0NfvXa9cxdXH8evae7ftHSzYZrN6w167s3G2e2tdb1+nhXpg1C0Q572nRtG651/aKlpWoou60MJzm+plYa9C9ZI+L7ETEzImZOmDCh388/YtoERo9sB4q9oiOm1Z9H7+lmTd91s+GO9o2ro6O9bbNxZltrRJs2GS7TBqFohz1turYN15o1fddSNZTdVoaTHF9TKw37LhpwH7zV5z74zeXYX53ja+qPvrpoqgz4EcCDwJuBJ4HfAqdExMJGz9nagDcze6XqK+ArO4omItZJ+gRwHdAOXNRXuJuZWWtVFvAAEXENcE2VyzAzs/r8DaKZWaYc8GZmmXLAm5llygFvZpapyg6T3BqSOoHHgPHA8kEupx7X1X9DtTbX1T+uq38Gsq7JEVH3F15DKuB7SJrX6LjOweS6+m+o1ua6+sd19c9QqctdNGZmmXLAm5llaqgG/PcHu4AGXFf/DdXaXFf/uK7+GRJ1Dck+eDMza95Q3YM3M7MmOeDNzDI1KAEv6T2SFkrqljSzZvwUSasl3ZNuF9Q8dqCkBekC3udJUhq/k6Q5khanvzu2uq702OfSsh+QdGzN+LoXFpe0l6Q70vifSerY2rp61XG2pCdr1tHbtrbGKg32BdclPZrayz2S5qVxdduKCuelWn8naUYL67hI0jOS7qsZ1+86JJ2Wpl8s6bSK6hr0tiVpD0k3SlqUtsVPpfGDus76qGvQ11mfImLAb8BrgH2Bm4CZNeOnAPc1eM6dwMGAgGuBt6bx3wTOSvfPAr5RQV3TgXuBbYC9gIcpToHcnu5PBTrSNNPTc34OnJTuXwCc3qJ1dzbw2Trj+11jhe/vgC+zTg2PAuN7javbVoC3pTal1MbuaGEdRwIzatt1f+sAdgKWpL87pvs7VlDXoLctYDdgRrq/HcU1JaYP9jrro65BX2d93QZlDz4i7o+IB8pOL2k3YPuIuD2KtXcJcHx6+Djg4nT/4prxrazrOOCnEdEVEY8ADwFvSLeHImJJRKwBfgocJ0nAnwOXt6KukvpVY8W1DMYyy2jUVo4DLonC7cC41OaaFhG3AM82WcexwJyIeDYiVgBzgLdUUFcjA9a2IuLpiLgr3X8BuJ/iWs6Dus76qKuRIbE9DsU++L0k3S3pZklHpHG7U1y0u0ftBbx3iYin0/2lQBXX7Gp0AfFG43cGnouIdXXqbYVPpI+jF2ljl1R/a6zSYCyztwB+JWm+pI+kcY3aykDX2986BrK+IdO2JE0BDgDuYAits151wRBaZ71VFvCSrpd0X51bX/+tngb2jIgDgE8Dl0ravuwy0959n8d9bmVdA2oLNX4XeDWwP8X6+vZg1jqEHR4RM4C3AmdIOrL2wTJtZSAMlTqSIdO2JI0FrgD+T0SsrH1sMNdZnbqGzDqrp8pL9s3aiud0AV3p/nxJDwP7UFzTdVLNpJPSOIBlknaLiKfTR7NnWl1XWtYeDZZfb/wfKT4qjkh78bXTb1HZGiVdCFy9lTVWqa9aBkREPJn+PiPpSoqPxo3aykDX2986ngTe1Gv8Ta0uKiKW9dwfzLYlaSRFiP4kIv49jR70dVavrqGyzhoZUl00kiZIak/3pwLTgCXpo9lKSQen/u33A79MT5sN9HxDflrN+FaaDZwkaRtJe6W67qS4kPg0FUfMdAAnAbPTHsaNwAmtrqtX3/C7gJ6jIPpVYytq6cNgLHMDSWMkbddzHziGYj01aiuzgfenIzIOBp6v6Q6oQn/ruA44RtKOqQvgmDSupYZC20rb978C90fEuTUPDeo6a1TXUFhnfarq29u+bmlFPEGxt74MuC6NfzewELgHuAt4Z81zZqaV9zDwz2z8Fe7OwA3AYuB6YKdW15Ue+0Ja9gOkI3hi47f4D6bHvlAzfirFG/oQ8Atgmxatux8BC4DfUTSM3ba2xorf4wFfZq91f2+6LexZfqO2QnEExvmp1gXUHEHVglouo/jovja1rb/amjqAD6a29BDwlxXVNehtCzicovvldykH7knLGNR11kddg77O+rr5VAVmZpkaUl00ZmbWOg54M7NMOeDNzDLlgDczy5QD3swsUw54y5KkXSRdKmlJOl3BbZLe1cT8zpb02XT/HElb84M5JO2vmjMOmlXJAW/ZST9K+Q/gloiYGhEHUvygZFKv6bbql9wR8aWIuH4ry9uf4jhos8o54C1Hfw6siYgN1xOIiMci4juSPiBptqT/Bm6QNFbSDZLuUnH++A3nJJL0BUkPSrqV4jTSPeN/KOmEdP9AFSfGmy/pup5fNkq6SdI3JN2Z5nFE+uXiOcCJKs4dfuIArQ97harsXDRmg2g/il9CNzID+NOIeDbtxb8rIlZKGg/cLml2muYkij3uEWl+82tnks5N8h3guIjoTIH9NYpfUAKMiIg3pC6Zf4iIWZK+RPFry0+07NWaNeCAt+xJOp/ip+ZrKH7WPicies6FLuDrKs422U1x6tZdgCOAKyPipTSPeucL2Rd4LTCn6BWineLn/z16TpQ1n+JiNmYDygFvOVpIcV4jACLijLR3Pi+NerFm2lOBCcCBEbFW0qPAqJLLEbAwIg5p8HhX+rseb2s2CNwHbzn6b2CUpNNrxm3bYNodgGdSuB8FTE7jbwGOlzQ6nZnynXWe+wAwQdIhUHTZSNpvC7W9QHHJN7PKOeAtO1GcQe944I2SHpF0J8Vl3v6uzuQ/AWZKWkBxGurfp3ncBfyM4oyU11Kc5rX3ctZQnBL6G5LupTjD4KFbKO9GYLq/ZLWB4LNJmpllynvwZmaZcsCbmWXKAW9mlikHvJlZphzwZmaZcsCbmWXKAW9mlqn/AULmtO+HRN3nAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Number of poisoned images: 1499 out of 10000.\n", - "last index of poison 4675\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZkAAAEWCAYAAAC0Q+rDAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAyKElEQVR4nO3de7xUZd338c+Xk4igCBKZoBtLBRFEDoo3YSolKCqIafZo4rlSy0OlZKbcZqVlhvpYiOmj3rfh+UBieQo0K4xDiHgEFQUSRBQFRBT9PX+stXHY7sPsvWfN7D1836/XvPaadfytNWvPb65rXetaigjMzMyy0KLUAZiZWflykjEzs8w4yZiZWWacZMzMLDNOMmZmlhknGTMzy0xZJhlJiyR9NR2+QNIfSh0TgKTvSlouaY2kzgVa547p+loWYn2FkHv8M97O/pKWZL2drEl6VtL+pY6jkqSbJF1a6jiaM0njJf1vPebP5JhL+rakCRmst6uk5yVtUde8RU8yko6R9JSktZLeTIdPl6QsthcRv4iIUxq7HkkVkkJSqwYu3xq4EjgoItpHxMoa1r8mfS2SNK6u9UbE6+n6Pm5IXOUsPZ7PSGqRM+5SSTdltL1Fktaln9/y9IujfV3LRUTviJieRUyFJukESU+WOo6Gau7x14ekNsCFwK9zxk2S9KKkTySdUMfyR0v6h6T3JU3PnRYRy4FpwGl1xVHUJCPpB8BVJDv9eaAr8B1gCNCmhmWazC/0RuoKtAWerWO+jhHRHvgmcJGkEZlHVt6+ABxTxO0dln5+/YGBJP/kZqUwCnghIpbmjHsaOB2Yk8fybwMTgMtqmH4r8O26VlK0JCNpG+AS4PSIuCsiVkfi3xFxbESsT+e7SdLvJT0oaS1wgKSRkv4t6T1JiyWNr7Lub0l6TdJKST+pMm2TYqukwWl2XiXp6dxqCknTJf1M0t8lrZb0sKTt0slPpH9Xpb9U961mH7eQNEHSf9LXhHTcrsCLOcv/ta7jFRH/JElIe0hqIenCdB/flHRLejw/U8JKf6m9ksb/qqRj0/H5rGOspNclvZV7HNNlx0l6OT3Gd0jqlM/xr+YY1fhZ5hHHlun58Y6k54BBdR1H4FfAf6uGEqikw5VUV61KP/9eOdMWSfqhpHmS3pV0u6S2eWyT9B/7z8AeeW6nsnp3b0mz0uOzXNKVhYhV0qGS5qbL/kNS35xpe0mak54zt5P8GMpLut0fpdtdK+kGJVUpf07X96ikbXPmv1PSsjTGJyT1zpnWWdKf0n2fqaTU+WTO9J6SHpH0tpJf40fnTDtE0nPpNpdK+mG++5CzjhOVVAGtTv+Hvp0zbX9JSySdl/7/vCFpdLrdl9KYLqiyyrbp57A6Pb575qyvxmMuaVtJD0hakZ7rD0jqVt/9AQ4GHs8dERHXRsRjwAd1LRwRj0bEHcB/apjlKWBnSTvVtaKivIARwAagVR3z3QS8S1K6aUFy8PcH+qTv+wLLgdHp/LsDa4D9gC1IqqQ2AF9Np48H/jcd3gFYCRySrutr6fsu6fTpwMvArsCW6fvL0mkVQNQWP0kSnQF8DugC/AP4WT7L504HlO7/+8Aw4CRgIbAz0B64B/ifapbbCngP2C2dtj3QOx3OZx3Xp/u9J7Ae6JVOPyvdr27pMb4OmJzP8a9mP2v7LOuK4zLgb0AnoDswH1hSy+cRwC7AbOCUdNylwE3p8K7A2vQ8aA2clx6jNun0RcC/SEpDnYDnge/Usr1FfHredSf5kfCzPLdTudw/gW+lw+2BwY2NFdgLeBPYB2gJjE3n34KkBuE14Jx0vV8HPgIurWEfTwCerLLPM0hK6juk25mTbrMt8Ffg4pz5TwI6pNueAMzNmXZb+mpHcl4trtwWybm9GDiR5FzfC3gL2D2d/gYwNB3eFuifT/xVpo0Evkjy//cVkv+//jnn7QbgovQ4nQqsAP6Y7k9vYB3QI+d756P0eLYGfgi8mg7XesyBzsCR6XHoANwJ3JcT5++AVTW85uXMNxM4qoZ9fRI4Ic/v7lOA6TVMmwccXuvy9UkUjXkBxwHLqoz7R3pg1gH7peNuAm6pY10TgN+mwxcBt+VM2wr4kOqTzPmkX6w58z8EjE2HpwMX5kw7HfhLOlxB3UnmZeCQnPfDgUX5LJ8zfRXwDsmXxPfTaY+RlAAr590tPSlb8dkksyo9Qbessv581tEtZ/q/gGPS4eeBYTnTts9Zttbjn8d5kftZ1hXHK8CInGmnUXeS+RLJj4rXSP65c5PMT4E7cuZvASwF9k/fLwKOy5n+K2BiLdtbRJJwV6Xb+x1JssxnO5Xn6xPAfwPbVVl3g2MFfk/6Yydn+oskX6T7kfxSVZX/y/okmWNz3t8N/D7n/ffI+YKssq6O6We0DUny+4j0B1I6/VI+TTLfAP5WZfnrSBMY8DpJ1c3WdZxvm8Rfx7z3AWelw/uTfE+1TN93SGPfJ2f+2Xz6g2k8MKPK5/UGMLQBx7wf8E4+MVdZbgE5/y9VphUqyfwdOL625Yt5TWYlsF1utUVE/FdEdEyn5cayOHdBSftImpYWH98luY5TWY31hdz5I2Jtur7q7AQclVYZrJK0CvgyyZdmpWU5w++T/JrM1xdIvlwqvZaOq4/tImLbiOgVEVfXst5WJL8eN0r3/Rskx+cNSVMl9azHOmra952Ae3OO2fPAx+my9Tn+dX2WdcWxybaq7E+NIuJBYAmfrT/e5JhExCfp+neoK5a0OqiykcaxOfOMjoiOEbFTRJweEevy3E6lk0lKLS+kVUaHNjZWks/vB1XO++7pOr8ALI30GyOV13HNsTxneF017yuPWUtJlympdn2PJEFB8vl3ITkfcz/f3OGdgH2q7MOxJNd2IflhdQjwmqTHVU11dl0kHSxpRlr1tSpdX+65uTI+bWCzLv1b7b5WjT/9vJaQxzGX1E7SdUqqoN8j+eHRUfW/Pv0OSTLMUgeSH1U1KmaS+SdJ1ceoPOaNKu//CEwBukfENsBEkiItJL8OulfOKKkdSXGzOotJSjIdc15bRURNF7Zqi6k6/yH5Z6i0IzXXZ9ZHdevdwKYnOAAR8VBEfI0kcb5AUvVUr3VUYzFwcJXj1jaS6w71Of5Q+2dZl022le5Dvn4CXEBSBVFpk2MiSen6l1KHiDg4klZ97SPi1jpmz3s7EbEgIr5JUuV6OXCXpK0aEyvJ5/fzKp9fu4iYTHJMd0jXV6k+x7U+/g/J//9XSUovFel4kVQ9bSCpkq2U+1kvBh6vsg/tI+K7ABExMyJGkRy3+4A76hOYkqa4dwNXAF3TH78Pkv+5WZ3c/4sWJPv2H+o+5j8gqWnYJyK2Jin5UBmLpIk5P3CqvnIbFs0j+cGSibTA8CWSxgQ1KlqSiYhVJNUAv5P0dUkdlFxQ7kdSxVKbDsDbEfGBpL1JTtZKdwGHSvqykiZ7l1Dzfv0vcJik4emvqrbpBb18LqqtAD4huaZRk8nAhZK6KGkwcFG6zcaaDJwjqYeSJrG/AG6PiA25Mym54Doq/VJaT1J180l91lGDicDPKy/wpftX+WOhPscfav8s63IH8OP0wmg3kqqYvETSRHg+yfWI3PWNlDRMSRPzH5Act3/UI6Z85L0dScdJ6pL+8l2Vjv6kkbFeD3wnLUVK0lZKGmB0IPnxtwH4vqTWksYAezdud2vUIY15JUmy/0XlhLSEcA8wPv0l3xM4PmfZB4BdlTQyaZ2+BknqJamNpGMlbRMRH5Fcl/yEmin939/4IqlK3YI02Uk6GDiokfs7QNKY9Mv47HTfZ1D3Me9AUipapaSBzcW5K42I7+T8wKn66p0z64MkVaK5O94m3V8BrdP9b5FO219S5MzbMp23FdAinbd1zur2JrkcUGvJt6hNmCPiV8C5JBctl6ev60iuldT2z3I6cImk1SRf3Bt/pUTEs8AZJL+Q3yApIlZ7g15ELCb5JXUBycm0GPgReRyHiHgf+Dnw97S4Pria2S4FZpH8gniG5AJoIW6wuhH4H5Ji86skLUOq+4JtQXJ8/0PS/PArwHfruY7qXEVS+ng4/QxmkFxErtfxT9X4Webhv0mqFV4FHk73pz4uJLkoDkBEvEhyrfAakovIh5E0Qf6wnuutVT23MwJ4VtIakuN+TESsa0ysETGL5EL1/yX5fBaSXJsgXX5M+v5tkurWexq6r3W4heTzWwo8R3Ie5TqTpISzjOSznUzyxUxErCb50j+G5PxeRlLSq7wZ8FvAorR66TskVWk1+S+SL/Gqr++TnI/vkPz4mdLgPU3cT3I830njGxMRH+VxzCeQXMt7i+QY/aWB2/8T0FNSbpX9wyT7+l/ApHS4sqTUnU2/h7+VTv89ybWkdXxaMwLJMZ5YVxDatFrQzKxpkHQ58PmIGFvnzFYtSaeRtMA7O495/wDcGREP5THv50iaR+8VEbU2h3aSMbMmIa0ia0NSCzCIpLrnlIi4r5RxWeM0qIsUM7MMdCCpIvsCSVX6b0iqnKwZc0nGzMwyU5a9MJuZWdPQrKvLtttuu6ioqCh1GGZmzcrs2bPfioguxdhWs04yFRUVzJo1q9RhmJk1K5Lq26tDg7m6zMzMMuMkY2ZmmXGSMTOzzDTrazLV+eijj1iyZAkffFDnM3msmWnbti3dunWjdevWdc9sZk1C2SWZJUuW0KFDByoqKti0k1NrziKClStXsmTJEnr06FHqcMwsT2VXXfbBBx/QuXNnJ5gyI4nOnTu7hGrWzJRdkgGcYMqUP1ez5qcsk4yZmTUNZXdNpqqKcVMLur5Fl42sc55ly5Zx9tlnM3PmTDp27EjXrl2ZMGECbdq04dBDD2X+/PkFjSnX+PHjad++PT/84Q8z24aZWb7KPskUW0RwxBFHMHbsWG677TYAnn76aZYvX0737t3rWLq8zFuyCoC+3TqWNA4zKx1XlxXYtGnTaN26Nd/5znc2jttzzz0ZOnToJvN9/PHH/OhHP2LQoEH07duX6667DoA1a9YwbNgw+vfvT58+fbj//qSn80WLFtGrVy9OPfVUevfuzUEHHcS6detqjWX//ffnnHPOYeDAgfTq1YuZM2cyZswYdtllFy688MKN840ePZoBAwbQu3dvJk2atHH8DTfcwK677sree+/NqaeeyplnngnAihUrOPLIIxk0aBCDBg3i73//OwCPP/44/fr1o1+/fuy1116sXbO6EUfSzMqBSzIFNn/+fAYMGFDnfDfccAPbbLMNM2fOZP369QwZMoSDDjqI7t27c++997L11lvz1ltvMXjwYA4//HAAFixYwOTJk7n++us5+uijufvuuznuuONq3U6bNm2YNWsWV111FaNGjWL27Nl06tSJL37xi5xzzjl07tyZG2+8kU6dOrFu3ToGDRrEkUceyfr16/nZz37GnDlz6NChAwceeCB77rknAGeddRbnnHMOX/7yl3n99dcZPnw4zz//PFdccQXXXnstQ4YMYc2aNbz0lluCmW3unGRK5OGHH2bevHncddddALz77rssWLCAbt26ccEFF/DEE0/QokULli5dyvLlywHo0aMH/fr1A2DAgAEsWrSozu1UJqg+ffrQu3dvtt9+ewB23nlnFi9eTOfOnbn66qu59957AVi8eDELFixg2bJlfOUrX6FTp04AHHXUUbz00ksAPProozz33HMbt/Hee++xZs0ahgwZwrnnnsuxxx7LmDFjaNWqfeMPlJk1a04yBda7d++NiaM2EcE111zD8OHDNxl/0003sWLFCmbPnk3r1q2pqKjYeG/IFltssXG+li1b1lldlrtMixYtNlm+RYsWbNiwgenTp/Poo4/yz3/+k3bt2rH//vvXeS/KJ598wowZM2jbtu0m48eNG8fIkSN58MEHGTJkCFfffCc9vrRrnTGaWfnyNZkCO/DAA1m/fv0m1zbmzZvH3/72t03mGz58OL///e/56KOPAHjppZdYu3Yt7777Lp/73Odo3bo106ZN47XXsu2R+91332XbbbelXbt2vPDCC8yYMQOAQYMG8fjjj/POO++wYcMG7r777o3LHHTQQVxzzTUb38+dOxeAl19+mT59+nD++eczaNAgXl24INPYzazpy6wkI+lG4FDgzYjYo8q0HwBXAF0i4i0ld9ldBRwCvA+cEBFzChFHPk2OC0kS9957L2effTaXX345bdu2paKiggkTJmwy3ymnnMKiRYvo378/EUGXLl247777OPbYYznssMPo06cPAwcOpGfPnpnGO2LECCZOnEivXr3YbbfdGDx4MAA77LADF1xwAXvvvTedOnWiZ8+ebLPNNgBcffXVnHHGGfTt25cNGzaw3377MXHiRCZMmMC0adNo0aIFvXv35ssHfDXT2M2s6VNEZLNiaT9gDXBLbpKR1B34A9ATGJAmmUOA75EkmX2AqyJin7q2MXDgwKj60LLnn3+eXr16FW5HNmNr1qyhffv2bNiwgSOOOIKTTjqJI444Iu/ls2jC7M/XrPEkzY6IgcXYVmbVZRHxBPB2NZN+C5wH5Ga3USTJKCJiBtBR0vZZxWb5GT9+PP369WOPPfagR48ejB49utQhmVkzU9QL/5JGAUsj4ukq/VDtACzOeb8kHfdGEcOzKq644opSh2BmzVzRkoykdsAFwEGNXM9pwGkAO+64YwEiMzOzrBSzddkXgR7A05IWAd2AOZI+DywFcvtc6ZaO+4yImBQRAyNiYJcuXTIO2czMGqNoSSYinomIz0VERURUkFSJ9Y+IZcAU4HglBgPvRoSryszMmrnMkoykycA/gd0kLZF0ci2zPwi8AiwErgdOzyouMzMrnsyuyUTEN+uYXpEzHMAZmQQy7ZeFXd8BP65zlpYtW9KnTx82bNhAr169uPnmm2nXrl21806ZMoXnnnuOcePGFTbOOkyfPp0rrriCBx54IK/xZmYN4Tv+M7Dlllsyd+5c5s+fT5s2bZg4cWKN8x5++OFFTzClMG/Jqo33zZjZ5sNJJmNDhw5l4cKFvP3224wePZq+ffsyePBg5s2bByR9lVV2oX/nnXeyxx57sOeee7LffvsB8MEHH3DiiSfSp08f9tprL6ZNm7ZxuTFjxjBixAh22WUXzjvvvI3bfPjhh9l3333p378/Rx11FGvWrAHgL3/5Cz179qR///7cc889dcY+fvx4xo4dy9ChQ9lpp5245557OO+88+jTpw8jRozY2CXOJZdcwqBBg9hjjz047bTTqLzBd/7cOXz9a0Po168fV176U8YM2xeo+TEHb7zxBvvtt9/Ge3OqdsVjZs2Pk0yGNmzYwJ///Gf69OnDxRdfzF577cW8efP4xS9+wfHHH/+Z+S+55BIeeughnn76aaZMmQLAtddeiySeeeYZJk+ezNixYzd2YDl37lxuv/12nnnmGW6//XYWL17MW2+9xaWXXsqjjz7KnDlzGDhwIFdeeSUffPABp556Kn/605+YPXs2y5Yty2sfXn75Zf76178yZcoUjjvuOA444ACeeeYZttxyS6ZOTZ46euaZZzJz5kzmz5/PunXreOCBB5i3ZBUX/eAMfnrZb5k7dy4tWrbcuM7cxxzMnDmT66+/nldffZU//vGPDB8+nLlz5/L0009v7HHazJov98KcgXXr1m38ghw6dCgnn3wy++yzz8ZOJg888EBWrlzJe++9t8lyQ4YM4YQTTuDoo49mzJgxADz55JN873vfA6Bnz57stNNOG7vcHzZs2Mb+xHbffXdee+01Vq1axXPPPceQIUMA+PDDD9l333154YUX6NGjB7vssgsAxx133CadeNbk4IMPpnXr1vTp04ePP/6YESNGAMmjAyofNTBt2jR+9atf8f777/P222/Tu3dvtt25L++vXcOeA/YG4JDRX+eJRx8Can7MwaBBgzjppJP46KOPGD16tJOMWRlwkslA5TWZ+po4cSJPPfUUU6dOZcCAAcyePbvW+at2/b9hwwYigq997WtMnjx5k3kbEk/uNlq0aEHr1q2p7Kmh8lEBH3zwAaeffjqzZs2ie/fujB8/vs5HBdT0mAOAJ554gqlTp3LCCSdw7rnnVlviM7Pmw9VlRTJ06FBuvfVWIGnBtd1227H11ltvMs/LL7/MPvvswyWXXEKXLl1YvHjxJsu99NJLvP766+y22241bmfw4MH8/e9/Z+HChQCsXbuWl156iZ49e7Jo0SJefvllgM8koYaqTCjbbbcda9as2Vg62XqbbWi3VXvm/TvpwPQv9396Daimxxy89tprdO3alVNPPZVTTjmFOXMK0hG3mZVQ+Zdk8mhyXAzjx4/npJNOom/fvrRr146bb775M/P86Ec/YsGCBUQEw4YNY88996Rnz55897vfpU+fPrRq1YqbbrppkxJMVV26dOGmm27im9/8JuvXrwfg0ksvZdddd2XSpEmMHDmSdu3aMXToUFavXt3o/erYsSOnnnoqe+yxB5///OcZNGjQp/t8xTVcct5ZXPHT1vQeMJgOaVKt6TEH06dP59e//jWtW7emffv23HLLLY2Oz8xKK7Ou/ovBXf03XfOWrOL9tWtot1V7+nbryFk/vpi33lzOrTfU3Jw7H/58zRqvmF39l39Jxkrmicce5sZrf0srBZ26foFLrvxdqUMysyJzkrHMjDh8DCMOH0Pfbh19I6bZZqosL/w35ypAq5k/V7Pmp+ySTNu2bVm5cqW/kMpMRLBy5Uratm1b6lDMrB7KrrqsW7duLFmyhBUrVpQ6lM3a8nfWbRx+fvWWG98/v3rLBq+zbdu2dOvWrdGxmVnxlF2Sad26NT169Ch1GJu9g8dN3Ti86LKRG98vumxkqUIysxIou+oyMzNrOpxkzMwsM04ylrmKnKozM9u8OMmYmVlmnGTMzCwzTjJWUhXjpro6zayMZZZkJN0o6U1J83PG/VrSC5LmSbpXUsecaT+WtFDSi5I++6ARaxacNMwsV5YlmZuAEVXGPQLsERF9gZeAHwNI2h04BuidLvM7SS0xM7NmLbMkExFPAG9XGfdwRGxI384AKm/fHgXcFhHrI+JVYCGwd1axmZlZcZTyjv+TgNvT4R1Ikk6lJem4z5B0GnAawI477phlfJYRV6eZbT5KcuFf0k+ADcCt9V02IiZFxMCIGNilS5fCB2dmZgVT9JKMpBOAQ4Fh8WlXyUuB7jmzdUvHmZlZM1bUkoykEcB5wOER8X7OpCnAMZK2kNQD2AX4VzFjMzOzwsusJCNpMrA/sJ2kJcDFJK3JtgAekQQwIyK+ExHPSroDeI6kGu2MiPg4q9issCrcw7KZ1SCzJBMR36xm9A21zP9z4OdZxWNmZsXnO/7NzCwzTjJmZpYZJxkzM8uMk4yZmWXGScYKxnfym1lVTjJWVE5EZpsXJxkzM8uMk4yZmWXGScbMzDLjJGNmZpmpM8lIOkpSh3T4Qkn3SOqffWhmZtbc5dN32U8j4k5JXwa+Cvwa+D2wT6aRWZPnlmJmVpd8qssqe0MeCUyKiKlAm+xCMjOzcpFPklkq6TrgG8CDkrbIczkzM9vM5ZMsjgYeAoZHxCqgE/CjLIMyM7PyUGeSSZ9g+Sbw5XTUBmBBlkFZ0+frMWaWj3xal10MnE/yVEuA1sD/ZhmUmZmVh3yqy44ADgfWAkTEf4AOWQZlZmblIZ8k82FEBBAAkrbKNiQzMysX+SSZO9LWZR0lnQo8Clxf10KSbpT0pqT5OeM6SXpE0oL077bpeEm6WtJCSfN8s6eZWXnI58L/FcBdwN3AbsBFEXFNHuu+CRhRZdw44LGI2AV4LH0PcDCwS/o6jeRmTzMza+byueOfiHgEeKQ+K46IJyRVVBk9Ctg/Hb4ZmE7SqGAUcEtaLTdDUkdJ20fEG/XZppmZNS35tC5bLem9Kq/Fku6VtHM9t9c1J3EsA7qmwzsAi3PmW5KOqy6e0yTNkjRrxYoV9dy8mZkVUz4lmQkkX/p/BAQcA3wRmAPcyKclk3qJiJAUDVhuEjAJYODAgfVe3szMiiefC/+HR8R1EbE6It5Lv+SHR8TtwLb13N5ySdsDpH/fTMcvBbrnzNctHWdmZs1YPknmfUlHS2qRvo4GPkin1bckMQUYmw6PBe7PGX982spsMPCur8eYmTV/+SSZY4FvkZQ6lqfDx0naEjizpoUkTQb+CewmaYmkk4HLgK9JWkDy2IDL0tkfBF4BFpI0jz69YbtjZmZNSZ3XZCLiFeCwGiY/Wcty36xh0rBq5g3gjLpiMTOz5qXOJCOpLXAy0BtoWzk+Ik7KMC4zMysD+VSX/Q/weWA48DjJRfnVWQZlZmblIZ8k86WI+CmwNiJuJnlCph+9bGZmdconyXyU/l0laQ9gG+Bz2YVkZmblIp+bMSelHVn+lKSpcXvgokyjMjOzspBP67I/pIOPA/XtRsbMzDZj+bQu6wgcD1Tkzh8R388sKjMzKwv5VJc9CMwAngE+yTYcMzMrJ/kkmbYRcW7mkZiZWdnJ6z4ZSadK2j59smUnSZ0yj8zMzJq9fEoyHwK/Bn7Cpx1iBm4EYGZmdcgnyfyA5IbMt7IOxszMyks+1WULgfezDsTMzMpPPiWZtcBcSdOA9ZUj3YTZzMzqkk+SuS99mWWmYtxUFl02stRhmFmB5XPH/83FCMTMzMpPjUlG0jPU8njliOibSURmZlY2aivJHFq0KMzMrCzVmGQi4rViBmJmZuUnnybMBSfpHEnPSpovabKktpJ6SHpK0kJJt0tqU4rYzMyscIqeZCTtAHwfGBgRewAtgWOAy4HfRsSXgHeAk4sdm5mZFVaNSUbSY+nfyzPYbitgS0mtgHbAG8CBwF3p9JuB0Rls18zMiqi2ksz2kv4LOFzSXpL6574ausGIWApcAbxOklzeBWYDqyJiQzrbEmCH6paXdJqkWZJmrVixoqFhWD1UjJtKxbipRd2emZWH2lqXXUTyyOVuwJVVpgVJyaPe0kc5jwJ6AKuAO4ER+S4fEZOASQADBw6ssYm1mZmVXm2ty+4C7pL004j4WQG3+VXg1YhYASDpHmAI0FFSq7Q00w1YWsBtmplZCeRzx//PJB0O7JeOmh4RDzRim68DgyW1A9YBw4BZwDTg68BtwFjg/kZsw8zMmoA6W5dJ+iVwFvBc+jpL0i8ausGIeIrkAv8ckkc6tyCp/jofOFfSQqAzcENDt2HZ8LUSM6uvfDrIHAn0i4hPACTdDPwbuKChG42Ii4GLq4x+Bdi7oes0M7OmJ58kA9AReDsd3iabUKw5cGnGzOojnyTzS+Df6fNkRHJtZlymUZmZWVnI58L/ZEnTgUHpqPMjYlmmUZmZWVnIq7osIt4ApmQci5mZlZmSdJBpZmabBycZMzPLTK1JRlJLSS8UKxgzMysvtSaZiPgYeFHSjkWKx8zMykg+F/63BZ6V9C9gbeXIiDg8s6jMzKws5JNkfpp5FGZmVpbyuU/mcUk7AbtExKNpx5Ytsw/NNjfuTcCs/OTTQeapJB1aXpeO2gG4L8OYzMysTOTThPkMkue9vAcQEQuAz2UZlJmZlYd8ksz6iPiw8o2kViRPxjQzM6tVPknmcUkXAFtK+hrJ45L/lG1YZmZWDvJJMuOAFSQPGPs28CBwYZZBmZlZecinddkn6YPKniKpJnsxIlxdZmZmdaozyUgaCUwEXiZ5nkwPSd+OiD9nHZyZmTVv+dyM+RvggIhYCCDpi8BUwEnGzMxqlc81mdWVCSb1CrC6MRuV1FHSXZJekPS8pH0ldZL0iKQF6d9tG7MNa94qxk31zZlmZaDGJCNpjKQxwCxJD0o6QdJYkpZlMxu53auAv0RET2BP4HmSBgaPRcQuwGP4Ec9mZs1ebdVlh+UMLwe+kg6vALZs6AYlbQPsB5wAkN6D86GkUcD+6Ww3A9OB8xu6HSsflSWaRZeNLHEkZlZfNSaZiDgxo232IElU/0/SnsBs4Cyga/qYZ4BlQNfqFpZ0GnAawI47+gkEZmZNWT6ty3oA3wMqcudvRFf/rYD+wPci4ilJV1GlaiwiQlK1zaQjYhIwCWDgwIFuSm1m1oTl07rsPuAGkmsxnxRgm0uAJRHxVPr+LpIks1zS9hHxhqTtgTcLsC0zMyuhfJLMBxFxdaE2GBHLJC2WtFtEvAgMA55LX2OBy9K/9xdqm1Z/vg5iZoWQT5K5StLFwMPA+sqRETGnEdv9HnCrpDYkTaJPJGnpdoekk4HXgKMbsX4zM2sC8kkyfYBvAQfyaXVZpO8bJCLmAgOrmTSsoeu0bPheFTNrjHySzFHAzrnd/ZuZmeUjnzv+5wMdM47DSqBqKcV32ZtZoeVTkukIvCBpJptek2loE2YzM9tM5JNkLs48CrMauGRl1rzl8zyZx4sRiJmZlZ987vhfTdKaDKAN0BpYGxFbZxmYmZk1f3Ve+I+IDhGxdZpUtgSOBH6XeWRWMK5yMrNSyad12UaRuA8Ynk04ZmZWTvKpLhuT87YFyU2UH2QWkZmZlY18WpflPldmA7AIGJVJNGZmVlbyaV2W1XNlzMyszNWYZCRdVMtyERE/yyAey4h7VTazUqitJLO2mnFbAScDnQEnGTMzq1Vtj1/+TeWwpA4kj0g+EbgN+E1Ny1nz5GbOZpaFWq/JSOoEnAscC9wM9I+Id4oRmJmZNX+1XZP5NTAGmAT0iYg1RYvKzMzKQm03Y/4A+AJwIfAfSe+lr9WS3itOeGZm1pzVdk2mXr0BmJmZVZXPzZhWxnzB38yyVLLSiqSWkv4t6YH0fQ9JT0laKOl2SW1KFZuZmRVGKavEzgKez3l/OfDbiPgS8A7J/ThmZtaMlSTJSOoGjAT+kL4XcCBwVzrLzcDoUsRmrkIzs8IpVUlmAnAe8En6vjOwKiI2pO+XADtUt6Ck0yTNkjRrxYoVmQdqZmYNV/QkI+lQ4M2ImN2Q5SNiUkQMjIiBXbp0KXB0ZmZWSKVoXTYEOFzSIUBbYGvgKqCjpFZpaaYbsLQEsZWVcqj2OrvVXUzY8PVSh2FmDVT0kkxE/DgiukVEBXAM8NeIOBaYBlR+m4wF7i92bM1BxbipjUoe5ZB4zKz5aEo3XJ4PnCtpIck1mhtKHI+ZmTVSSW/GjIjpwPR0+BVg71LGY2ZmheU7/stQXVViTanK7OxWSat1X3cxK09NqbrMNnNnt7prY9Ixs/LgJGNmZplxdVmZaEpVYA2RW4Jxs2Wz8uGSjBWFq8LMNk9OMmZmlhknGTMzy4yTjBVdQ6vOGtvbgZkVn5OMmZllxknGiqohJRg3GjBrvpxkLBNOCmYGvk+mLGwu1yk+TVwjSxqHmeXPJRkzM8uMk4w1T9N+mbzMrElzdZk1SbVd06kYN5VFw4sYjJk1mEsyZmaWGScZa3aaTcs1V+mZOclY6RQiWeS2rNtcWtmZNSe+JtMM1PTl6S/VTSVJq8DNmytLIgf8uLDrNdtMFL0kI6m7pGmSnpP0rKSz0vGdJD0iaUH6d9tix2ZmZoVViuqyDcAPImJ3YDBwhqTdgXHAYxGxC/BY+t5qsLmXYiY89hITHnsJyKPjTF8XMSuZoieZiHgjIuakw6uB54EdgFHAzelsNwOjix2bNU6zuCDvhGNWVCW98C+pAtgLeAroGhFvpJOWAV1LFZeZmRVGyZKMpPbA3cDZEfFe7rSICCBqWO40SbMkzVqxYkURIrXGKmoJx82GzZqUkiQZSa1JEsytEXFPOnq5pO3T6dsDb1a3bERMioiBETGwS5cuxQnYyktDE5ETmFm9laJ1mYAbgOcj4sqcSVOAsenwWOD+YsdmzU/epaSGJAgnFLNGK8V9MkOAbwHPSJqbjrsAuAy4Q9LJwGvA0SWIzczMCqgUrcuejAhFRN+I6Je+HoyIlRExLCJ2iYivRsTbxY6tqdjcmycXRDmXQsp536zs+I7/JqTckkuxLvjnbqfy3pmzh+26ybiq7wHOPqARGy1ETwCF7k1g2i/dM4E1Oe67zMzMMuOSTBNXbqWbslBbdVVN03JLLYWo7nKfatZMOMk0EU4mGWhoa7KGfnH7WonZZzjJWNmqvPZSq2KUCJx8bDPmazJmZpYZl2SaqOZafdYsOsmsVNf1k3zmNbNaOclYozXFxJJXVVmuxiSRQieg6hoJNIcL/M0pVisaV5eZmVlmXJIxa4CC3NBZH41pNl2pphKGb+K0DLkkY9YYTbXTzdoST03XnCrHF6u3afdqvVlwkjEzs8y4usw2K/VuEJDH+nL7RWuQ3Oqqxv6yr8/yhWjsUJ9qttztuXpus+EkYw3WFFuV1aS65JJPgqiuw81MFKPZdD7rq62araEJxTZrTjIlUHkPzKLLRjbb+2HKRW7yqUwkRUssTV1DE0VWCcZNpJslX5MxM7PMOMkUSXUlFpdirNGaQrVUIR5t7ZZmZctJxup0dqu7Nl5/qRxuTtdjGiq3Ki3fBgMTHnvpM/MWurFBQRTqC72Qjy3Ife+EUzacZMzMLDNOMlat6kosm0PpJR/VlVRqar2WTymmISWmkqgsYWRZyshdd03D9V1fXTEXuoGDS2GbaHJJRtIISS9KWihpXKnjaYiKcVOb7PWW2hLF5lINVpN8k0Jjt1HM7dW07Watut4JGvLFXsiE2Zjm4WWuSTVhltQSuBb4GrAEmClpSkQ8V+ht5TYjrmu+3Hlqan5c3XqqLlsMZ7e6iwkbvp73vJa/ur6kayuRNPT6TqXc5tS59/dUXVd1za4bk1xK3py7ITeX1vSI65x7fSZceOKm+1TbsrX1hp3P47YLrZk15W5qJZm9gYUR8UpEfAjcBowqcUxmZtZAiohSx7CRpK8DIyLilPT9t4B9IuLMnHlOA05L3+4GvJgObwe8VcRw8+W46sdx1Y/jqh/HldgpIroUY0NNqrosHxExCZhUdbykWRExsAQh1cpx1Y/jqh/HVT+Oq/iaWnXZUqB7zvtu6TgzM2uGmlqSmQnsIqmHpDbAMcCUEsdkZmYN1KSqyyJig6QzgYeAlsCNEfFsnot/pgqtiXBc9eO46sdx1Y/jKrImdeHfzMzKS1OrLjMzszLiJGNmZplpFklG0lGSnpX0iaSBOeMrJK2TNDd9TcyZNkDSM2n3NFdLUjq+k6RHJC1I/25b6LjSaT9Ot/2ipOE546vtNidt7PBUOv72tOFDQUgaL2lpznE6pKFxZqnUXQpJWpSeM3MlzUrHVXu+KHF1Gus8Sf0LGMeNkt6UND9nXL3jkDQ2nX+BpLEZxVXSc0tSd0nTJD2X/i+elY4v6fGqJa5m8b9YUBHR5F9AL5IbL6cDA3PGVwDza1jmX8BgQMCfgYPT8b8CxqXD44DLM4hrd+BpYAugB/AySUOGlunwzkCbdJ7d02XuAI5JhycC3y3g8RsP/LCa8fWOM8PPuOjbrCaGRcB2VcZVe74Ah6TnldLz7KkCxrEf0D/33K5vHEAn4JX077bp8LYZxFXScwvYHuifDncAXkq3XdLjVUtcTf5/sdCvZlGSiYjnI+LFuudMSNoe2DoiZkTyCd4CjE4njwJuTodvzhlfyLhGAbdFxPqIeBVYSNJlTrXd5kgScCBQ2ZlYo+Kqh3rFmXEsTbVLoZrOl1HALZGYAXRMz7tGi4gngLcbGcdw4JGIeDsi3gEeAUZkEFdNinJuRcQbETEnHV4NPA/sQImPVy1x1aQp/S8WVLNIMnXoIenfkh6XNDQdtwNJB5uVlvDpB9w1It5Ih5cBXTOIaQdgcTXbr2l8Z2BVRGyoJt5COTOtHrhRn1YR1jfOLJVim1UF8LCk2Uq6L4Kaz5dix1vfOIoZX5M4tyRVAHsBT9GEjleVuKCJHK9iaTJJRtKjkuZX86ota78B7BgRewHnAn+UtHW+20xLObW24W5gXEVXR5y/B74I9CM5Zr8pZaxN2Jcjoj9wMHCGpP1yJ+ZzvhRDU4kj1STOLUntgbuBsyPivdxppTxe1cTVJI5XMTWZmzEj4qsNWGY9sD4dni3pZWBXkq5ouuXMmts9zXJJ20fEG2kx+c1Cx0Xt3eNUN34lSbG9VVqaqXd3OvnGKel64IEGxpmlkncpFBFL079vSrqXpKqipvOl2PHWN46lwP5Vxk8vdFARsbxyuFTnlqTWJF/kt0bEPenokh+v6uJqCser2JpMSaYhJHVR8gwaJO0M7AK8khaT35M0OL3ecTxwf7rYFKCy5cjYnPGFNAU4RtIWknqkcf2LGrrNSX9pTQMqHwRT0LiqXCs4AqhsHVSvOAsVTw1K2qWQpK0kdagcBg4iOU41nS9TgOPT1kqDgXdzqmeyUN84HgIOkrRtWiVzUDquoEp9bqX/3zcAz0fElTmTSnq8aoqr1MerJErd8iCfF8mHsYSk1LIceCgdfyTwLDAXmAMclrPMQJIP8GXg//Jp7wadgceABcCjQKdCx5VO+0m67RdJW7al4w8haWnyMvCTnPE7k5xUC4E7gS0KePz+B3gGmEdygm7f0Dgz/pyLvs0qx//p9PVs5fZrOl9IWiddm8b6DDmtCwsQy2SSqpSP0vPr5IbEAZyUnk8LgRMziquk5xbwZZKqsHnp98DcdP0lPV61xNUs/hcL+XK3MmZmlplmXV1mZmZNm5OMmZllxknGzMwy4yRjZmaZcZIxM7PMOMmYAZK6SvqjpFfSbmX+KemIRqxvvKQfpsOXSGrITb1I6qecnnrNmhsnGdvspTfO3Qc8ERE7R8QAkpveulWZr0E9ZETERRHxaAPD60dyn4RZs+QkY5b0gP1hRGx8HlFEvBYR10g6QdIUSX8FHpPUXtJjkuYoefbMxj7sJP1E0kuSniR5BETl+JskfT0dHqCkM9fZkh6qvANc0nRJl0v6V7qOoekd3pcA31Dy7JFvFOl4mBVMk+m7zKyEepP0GFGT/kDfiHg7Lc0cERHvSdoOmCFpSjrPMSQlj1bp+mbnriTty+oaYFRErEiTxs9J7jQHaBURe6fVYxdHxFclXURyV/qZBdtbsyJykjGrQtK1JN2CfEjSBckjEVH5HBUBv1DSQ/MnJN2udwWGAvdGxPvpOqrrX2o3YA/gkaSGjpYk3bRUquzccTbJA/nMmj0nGbOkr7IjK99ExBlpKWVWOmptzrzHAl2AARHxkaRFQNs8tyPg2YjYt4bp69O/H+P/TSsTviZjBn8F2kr6bs64djXMuw3wZppgDgB2Ssc/AYyWtGXam/Nh1Sz7ItBF0r6QVJ9J6l1HbKtJHt9r1iw5ydhmL5JeYkcDX5H0qqR/kTyy9/xqZr8VGCjpGZJHSLyQrmMOcDtJL85/Jumivep2PiR5nMPlkp4m6Zn3v+oIbxqwuy/8W3PlXpjNzCwzLsmYmVlmnGTMzCwzTjJmZpYZJxkzM8uMk4yZmWXGScbMzDLjJGNmZpn5/zD6xSDWYwNvAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], "source": [ "%matplotlib inline\n", "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion)" - ] + ], + "outputs": [] }, { "cell_type": "code", "execution_count": 10, "id": "a72963e8", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[2663.899,\n", - " 2418.3823,\n", - " 2408.9248,\n", - " 2350.5876,\n", - " 2338.0808,\n", - " 2329.4268,\n", - " 2326.6265,\n", - " 2297.5244,\n", - " 2287.1013,\n", - " 2263.9346,\n", - " 2258.9753,\n", - " 2253.043,\n", - " 2252.0122,\n", - " 2201.0159,\n", - " 2147.6462,\n", - " 2133.2493,\n", - " 2081.9287,\n", - " 2079.318,\n", - " 2073.779,\n", - " 2061.6963,\n", - " 2053.6355,\n", - " 2051.4255,\n", - " 2036.6858,\n", - " 2031.4175,\n", - " 1995.8184,\n", - " 1971.8591,\n", - " 1971.0344,\n", - " 1963.4426,\n", - " 1947.4111,\n", - " 1947.0498,\n", - " 1945.0283,\n", - " 1925.0142,\n", - " 1884.5908,\n", - " 1874.2131,\n", - " 1867.8916,\n", - " 1858.4092,\n", - " 1850.8071,\n", - " 1813.2207,\n", - " 1803.3213,\n", - " 1799.1411,\n", - " 1789.0459,\n", - " 1788.5667,\n", - " 1784.4619,\n", - " 1783.9531,\n", - " 1781.6553,\n", - " 1767.4937,\n", - " 1763.6116,\n", - " 1762.5593,\n", - " 1750.5083,\n", - " 1738.9062,\n", - " 1736.5793,\n", - " 1727.5774,\n", - " 1714.363,\n", - " 1705.2625,\n", - " 1704.6936,\n", - " 1701.2852,\n", - " 1697.2239,\n", - " 1694.096,\n", - " 1687.8237,\n", - " 1676.8035,\n", - " 1669.9917,\n", - " 1666.7358,\n", - " 1662.8354,\n", - " 1661.73,\n", - " 1652.4321,\n", - " 1650.4236,\n", - " 1646.2275,\n", - " 1641.9666,\n", - " 1638.2361,\n", - " 1631.4382,\n", - " 1624.0493,\n", - " 1622.6262,\n", - " 1622.4265,\n", - " 1622.2219,\n", - " 1621.0908,\n", - " 1619.897,\n", - " 1619.4084,\n", - " 1615.0515,\n", - " 1612.9219,\n", - " 1610.9905,\n", - " 1609.1643,\n", - " 1608.0625,\n", - " 1597.667,\n", - " 1595.4299,\n", - " 1595.0217,\n", - " 1594.2593,\n", - " 1591.8801,\n", - " 1590.6313,\n", - " 1586.8433,\n", - " 1585.0679,\n", - " 1577.3813,\n", - " 1576.6333,\n", - " 1571.4861,\n", - " 1569.4502,\n", - " 1569.3916,\n", - " 1564.5869,\n", - " 1563.0879,\n", - " 1561.2795,\n", - " 1552.4768,\n", - " 1547.9126,\n", - " 1544.9043,\n", - " 1544.02,\n", - " 1543.3743,\n", - " 1542.5935,\n", - " 1540.04,\n", - " 1535.8984,\n", - " 1535.1047,\n", - " 1529.2646,\n", - " 1528.8088,\n", - " 1525.8662,\n", - " 1520.9766,\n", - " 1518.9983,\n", - " 1517.73,\n", - " 1516.8323,\n", - " 1512.9878,\n", - " 1509.8906,\n", - " 1508.8042,\n", - " 1505.9153,\n", - " 1499.7556,\n", - " 1495.5806,\n", - " 1493.7957,\n", - " 1492.8123,\n", - " 1490.3396,\n", - " 1487.2336,\n", - " 1480.8591,\n", - " 1474.2617,\n", - " 1472.8625,\n", - " 1471.8333,\n", - " 1470.8904,\n", - " 1465.5349,\n", - " 1464.8186,\n", - " 1463.8066,\n", - " 1461.0259,\n", - " 1460.2793,\n", - " 1459.6179,\n", - " 1458.4612,\n", - " 1454.7158,\n", - " 1453.0215,\n", - " 1448.5115,\n", - " 1447.9233,\n", - " 1443.7229,\n", - " 1435.6265,\n", - " 1433.5774,\n", - " 1432.6968,\n", - " 1424.9639,\n", - " 1415.76,\n", - " 1412.5178,\n", - " 1410.4724,\n", - " 1409.936,\n", - " 1407.4138,\n", - " 1406.1782,\n", - " 1399.9534,\n", - " 1398.0647,\n", - " 1385.959,\n", - " 1385.9431,\n", - " 1383.1135,\n", - " 1370.4912,\n", - " 1368.9897,\n", - " 1364.6028,\n", - " 1355.8042,\n", - " 1353.8494,\n", - " 1352.5593,\n", - " 1345.5139,\n", - " 1344.8209,\n", - " 1344.8202,\n", - " 1344.1777,\n", - " 1333.495,\n", - " 1333.2297,\n", - " 1330.2782,\n", - " 1327.4403,\n", - " 1321.0449,\n", - " 1320.9287,\n", - " 1318.9495,\n", - " 1311.6697,\n", - " 1309.6028,\n", - " 1302.8479,\n", - " 1300.1674,\n", - " 1296.874,\n", - " 1296.7354,\n", - " 1294.0991,\n", - " 1290.5173,\n", - " 1288.8652,\n", - " 1287.6316,\n", - " 1287.4701,\n", - " 1281.884,\n", - " 1281.022,\n", - " 1279.3862,\n", - " 1278.9757,\n", - " 1276.1699,\n", - " 1274.2076,\n", - " 1270.3828,\n", - " 1270.0415,\n", - " 1265.3506,\n", - " 1258.82,\n", - " 1257.717,\n", - " 1253.6329,\n", - " 1251.3843,\n", - " 1251.0995,\n", - " 1250.0282,\n", - " 1248.6414,\n", - " 1245.4634,\n", - " 1241.8962,\n", - " 1241.1239,\n", - " 1239.3059,\n", - " 1234.169,\n", - " 1233.7899,\n", - " 1231.5303,\n", - " 1230.698,\n", - " 1227.1699,\n", - " 1226.6295,\n", - " 1225.2931,\n", - " 1218.2384,\n", - " 1217.7433,\n", - " 1217.6711,\n", - " 1211.0923,\n", - " 1210.9381,\n", - " 1210.8464,\n", - " 1210.0364,\n", - " 1205.9957,\n", - " 1205.6212,\n", - " 1203.9235,\n", - " 1202.8687,\n", - " 1191.6522,\n", - " 1184.575,\n", - " 1184.1488,\n", - " 1181.2472,\n", - " 1175.4486,\n", - " 1174.0198,\n", - " 1172.7352,\n", - " 1172.1118,\n", - " 1170.4711,\n", - " 1170.2284,\n", - " 1167.6708,\n", - " 1166.517,\n", - " 1165.8794,\n", - " 1155.8302,\n", - " 1154.6438,\n", - " 1153.9292,\n", - " 1153.8081,\n", - " 1153.1477,\n", - " 1152.7694,\n", - " 1150.071,\n", - " 1148.7996,\n", - " 1144.2443,\n", - " 1135.2875,\n", - " 1133.6198,\n", - " 1131.0701,\n", - " 1130.8463,\n", - " 1129.9513,\n", - " 1124.8054,\n", - " 1122.7885,\n", - " 1119.658,\n", - " 1118.1664,\n", - " 1118.0905,\n", - " 1116.0891,\n", - " 1115.1405,\n", - " 1114.9282,\n", - " 1107.61,\n", - " 1105.5348,\n", - " 1104.3372,\n", - " 1103.6382,\n", - " 1102.7383,\n", - " 1099.6337,\n", - " 1098.4093,\n", - " 1095.632,\n", - " 1090.8948,\n", - " 1089.1511,\n", - " 1089.1212,\n", - " 1088.9426,\n", - " 1083.7148,\n", - " 1083.5466,\n", - " 1081.3716,\n", - " 1080.5405,\n", - " 1078.1995,\n", - " 1074.0426,\n", - " 1073.9324,\n", - " 1072.5017,\n", - " 1072.2646,\n", - " 1065.5371,\n", - " 1063.801,\n", - " 1062.5417,\n", - " 1058.3298,\n", - " 1054.4844,\n", - " 1052.3193,\n", - " 1051.7814,\n", - " 1049.7252,\n", - " 1049.1086,\n", - " 1048.0952,\n", - " 1048.0922,\n", - " 1046.658,\n", - " 1046.6533,\n", - " 1044.6914,\n", - " 1044.2473,\n", - " 1043.7323,\n", - " 1043.3058,\n", - " 1041.1985,\n", - " 1040.8118,\n", - " 1038.8436,\n", - " 1036.8083,\n", - " 1031.0182,\n", - " 1030.3387,\n", - " 1030.2136,\n", - " 1028.2576,\n", - " 1026.1758,\n", - " 1025.5194,\n", - " 1022.5399,\n", - " 1019.57056,\n", - " 1018.1738,\n", - " 1017.5022,\n", - " 1015.63184,\n", - " 1014.75415,\n", - " 1010.4082,\n", - " 1010.152,\n", - " 1009.6083,\n", - " 1003.38586,\n", - " 997.4502,\n", - " 994.38,\n", - " 994.2002,\n", - " 988.0239,\n", - " 987.02954,\n", - " 986.24023,\n", - " 984.73193,\n", - " 983.48206,\n", - " 980.8484,\n", - " 980.63464,\n", - " 979.2776,\n", - " 979.1256,\n", - " 978.44055,\n", - " 978.10364,\n", - " 976.5177,\n", - " 976.45715,\n", - " 974.9298,\n", - " 973.6881,\n", - " 971.1615,\n", - " 970.56726,\n", - " 970.4335,\n", - " 970.27515,\n", - " 969.8839,\n", - " 969.79126,\n", - " 968.8131,\n", - " 966.6482,\n", - " 966.3568,\n", - " 963.7926,\n", - " 963.38513,\n", - " 963.19775,\n", - " 959.1194,\n", - " 958.8291,\n", - " 958.1738,\n", - " 957.5725,\n", - " 954.4133,\n", - " 954.2838,\n", - " 951.0459,\n", - " 945.67456,\n", - " 943.8081,\n", - " 940.7892,\n", - " 938.8722,\n", - " 938.2727,\n", - " 937.3617,\n", - " 936.63916,\n", - " 934.35815,\n", - " 933.1559,\n", - " 928.67456,\n", - " 924.5698,\n", - " 923.6261,\n", - " 921.98145,\n", - " 921.96533,\n", - " 918.8264,\n", - " 916.7001,\n", - " 914.916,\n", - " 912.0117,\n", - " 909.01465,\n", - " 907.6251,\n", - " 907.5635,\n", - " 907.49097,\n", - " 905.8613,\n", - " 905.397,\n", - " 903.6316,\n", - " 900.91003,\n", - " 900.8938,\n", - " 900.4967,\n", - " 900.04724,\n", - " 892.74414,\n", - " 891.6416,\n", - " 888.9929,\n", - " 888.98645,\n", - " 886.7097,\n", - " 882.2129,\n", - " 878.6483,\n", - " 878.6194,\n", - " 873.71643,\n", - " 873.4133,\n", - " 871.37915,\n", - " 868.23535,\n", - " 867.67126,\n", - " 865.4374,\n", - " 862.3972,\n", - " 860.3264,\n", - " 857.9928,\n", - " 857.81067,\n", - " 857.4342,\n", - " 846.0005,\n", - " 845.1819,\n", - " 843.8137,\n", - " 843.1338,\n", - " 842.6821,\n", - " 842.1737,\n", - " 841.4591,\n", - " 840.25903,\n", - " 839.78406,\n", - " 839.14075,\n", - " 835.422,\n", - " 833.8324,\n", - " 829.91614,\n", - " 828.2988,\n", - " 826.2992,\n", - " 819.4398,\n", - " 817.1433,\n", - " 815.6626,\n", - " 813.17065,\n", - " 812.2661,\n", - " 811.9354,\n", - " 810.39075,\n", - " 808.81726,\n", - " 802.2743,\n", - " 801.3441,\n", - " 799.6803,\n", - " 799.4315,\n", - " 799.35986,\n", - " 794.19495,\n", - " 793.3876,\n", - " 792.6616,\n", - " 791.99304,\n", - " 791.6411,\n", - " 791.32544,\n", - " 787.0282,\n", - " 784.6156,\n", - " 783.5984,\n", - " 782.27625,\n", - " 782.18994,\n", - " 781.9271,\n", - " 781.43274,\n", - " 778.0597,\n", - " 777.9508,\n", - " 777.6033,\n", - " 777.05054,\n", - " 774.8196,\n", - " 774.7185,\n", - " 773.04065,\n", - " 772.5045,\n", - " 770.7899,\n", - " 770.5547,\n", - " 770.43164,\n", - " 765.58936,\n", - " 765.5487,\n", - " 763.39636,\n", - " 763.2743,\n", - " 763.1903,\n", - " 761.5967,\n", - " 759.9712,\n", - " 757.56555,\n", - " 755.76355,\n", - " 755.7063,\n", - " 754.1964,\n", - " 753.5576,\n", - " 751.31287,\n", - " 748.396,\n", - " 744.75903,\n", - " 744.1644,\n", - " 742.72864,\n", - " 739.51306,\n", - " 737.8944,\n", - " 737.6376,\n", - " 736.9938,\n", - " 735.31116,\n", - " 730.3843,\n", - " 730.08167,\n", - " 729.5432,\n", - " 728.95593,\n", - " 727.3773,\n", - " 726.7732,\n", - " 726.3849,\n", - " 722.93823,\n", - " 720.23645,\n", - " 719.4729,\n", - " 717.0386,\n", - " 716.9343,\n", - " 716.6621,\n", - " 715.8983,\n", - " 714.1494,\n", - " 713.7262,\n", - " 712.2512,\n", - " 711.47217,\n", - " 710.7146,\n", - " 709.48486,\n", - " 709.041,\n", - " 708.6201,\n", - " 705.65063,\n", - " 702.7783,\n", - " 702.2532,\n", - " 701.1804,\n", - " 700.7239,\n", - " 699.22546,\n", - " 698.64795,\n", - " 695.7804,\n", - " 694.63965,\n", - " 694.46277,\n", - " 694.0165,\n", - " 693.44763,\n", - " 692.7385,\n", - " 692.62354,\n", - " 691.5741,\n", - " 690.6748,\n", - " 689.5603,\n", - " 688.76404,\n", - " 687.933,\n", - " 687.60876,\n", - " 687.13684,\n", - " 686.34937,\n", - " 686.26624,\n", - " 686.0852,\n", - " 684.63257,\n", - " 684.4928,\n", - " 683.38745,\n", - " 683.2269,\n", - " 683.20764,\n", - " 682.7495,\n", - " 682.0989,\n", - " 680.2654,\n", - " 679.93945,\n", - " 679.8119,\n", - " 679.1531,\n", - " 679.1106,\n", - " 677.42615,\n", - " 676.3812,\n", - " 673.9763,\n", - " 673.2666,\n", - " 671.7792,\n", - " 667.74194,\n", - " 665.0824,\n", - " 664.0762,\n", - " 662.75256,\n", - " 658.984,\n", - " 658.3624,\n", - " 657.4253,\n", - " 656.458,\n", - " 656.0917,\n", - " 654.2572,\n", - " 653.96497,\n", - " 653.9602,\n", - " 653.8727,\n", - " 653.5408,\n", - " 652.8146,\n", - " 652.1887,\n", - " 650.6255,\n", - " 648.53906,\n", - " 648.4885,\n", - " 648.47546,\n", - " 648.1378,\n", - " 648.0383,\n", - " 646.65967,\n", - " 646.2826,\n", - " 646.03687,\n", - " 645.14185,\n", - " 642.5685,\n", - " 642.39575,\n", - " 641.72095,\n", - " 640.81604,\n", - " 637.21216,\n", - " 635.73376,\n", - " 635.16895,\n", - " 631.5996,\n", - " 630.80066,\n", - " 630.2853,\n", - " 627.7517,\n", - " 627.5592,\n", - " 625.7689,\n", - " 623.8508,\n", - " 623.32263,\n", - " 623.1472,\n", - " 621.6372,\n", - " 619.31885,\n", - " 618.0785,\n", - " 615.4812,\n", - " 614.31067,\n", - " 613.3816,\n", - " 612.4663,\n", - " 612.3229,\n", - " 610.90076,\n", - " 610.0753,\n", - " 608.0624,\n", - " 603.4142,\n", - " 603.13513,\n", - " 601.7355,\n", - " 600.5969,\n", - " 599.2189,\n", - " 598.7318,\n", - " 598.4326,\n", - " 596.08484,\n", - " 595.56213,\n", - " 591.7235,\n", - " 591.06274,\n", - " 589.646,\n", - " 589.32837,\n", - " 589.08044,\n", - " 588.5332,\n", - " 587.86194,\n", - " 584.71375,\n", - " 583.97595,\n", - " 581.95105,\n", - " 581.60376,\n", - " 581.2826,\n", - " 577.5139,\n", - " 575.5558,\n", - " 574.7926,\n", - " 574.1311,\n", - " 573.44556,\n", - " 572.34314,\n", - " 569.87317,\n", - " 569.5261,\n", - " 568.6161,\n", - " 566.95593,\n", - " 566.7949,\n", - " 565.948,\n", - " 565.59436,\n", - " 564.5049,\n", - " 563.5563,\n", - " 562.8616,\n", - " 562.6483,\n", - " 562.23706,\n", - " 560.4159,\n", - " 559.86206,\n", - " 559.4098,\n", - " 558.7633,\n", - " 558.2865,\n", - " 557.7363,\n", - " 557.71106,\n", - " 556.2733,\n", - " 555.7036,\n", - " 555.5093,\n", - " 555.04565,\n", - " 554.88464,\n", - " 553.10315,\n", - " 551.3375,\n", - " 551.25366,\n", - " 550.7135,\n", - " 550.30164,\n", - " 550.26404,\n", - " 550.1747,\n", - " 548.8999,\n", - " 548.1113,\n", - " 546.9938,\n", - " 544.6559,\n", - " 540.6465,\n", - " 540.1765,\n", - " 540.073,\n", - " 539.96875,\n", - " 538.95435,\n", - " 538.14514,\n", - " 537.9354,\n", - " 533.25916,\n", - " 530.5409,\n", - " 529.3231,\n", - " 526.3428,\n", - " 525.3042,\n", - " 525.15405,\n", - " 524.4342,\n", - " 524.2446,\n", - " 522.13025,\n", - " 521.9098,\n", - " 521.0045,\n", - " 520.4662,\n", - " 519.87195,\n", - " 519.2246,\n", - " 519.01855,\n", - " 517.7189,\n", - " 517.02026,\n", - " 515.09265,\n", - " 514.9236,\n", - " 513.89343,\n", - " 513.4707,\n", - " 511.5686,\n", - " 510.8064,\n", - " 509.4254,\n", - " 507.86646,\n", - " 507.70605,\n", - " 507.11804,\n", - " 506.5431,\n", - " 505.91467,\n", - " 505.06555,\n", - " 504.1991,\n", - " 501.93506,\n", - " 498.88147,\n", - " 496.53088,\n", - " 495.11487,\n", - " 494.34033,\n", - " 494.1726,\n", - " 492.1853,\n", - " 491.9485,\n", - " 491.7262,\n", - " 490.96814,\n", - " 489.7749,\n", - " 486.81287,\n", - " 485.61536,\n", - " 485.099,\n", - " 482.97546,\n", - " 482.04175,\n", - " 481.94226,\n", - " 481.40442,\n", - " 480.8357,\n", - " 480.79602,\n", - " 479.09937,\n", - " 478.80933,\n", - " 478.48303,\n", - " 478.38318,\n", - " 477.73633,\n", - " 477.71228,\n", - " 477.60522,\n", - " 477.2782,\n", - " 474.65857,\n", - " 474.6371,\n", - " 474.04565,\n", - " 473.6681,\n", - " 473.25647,\n", - " 472.99268,\n", - " 472.75806,\n", - " 472.5796,\n", - " 472.0204,\n", - " 471.5293,\n", - " 470.45996,\n", - " 470.13538,\n", - " 469.19397,\n", - " 468.94934,\n", - " 467.78516,\n", - " 467.33997,\n", - " 466.0327,\n", - " 465.30383,\n", - " 464.93225,\n", - " 463.8734,\n", - " 460.0808,\n", - " 459.31543,\n", - " 457.22644,\n", - " 457.01392,\n", - " 456.22205,\n", - " 454.33362,\n", - " 454.25403,\n", - " 454.10767,\n", - " 453.6958,\n", - " 452.57166,\n", - " 449.66162,\n", - " 448.8379,\n", - " 448.3828,\n", - " 447.54163,\n", - " 447.0149,\n", - " 446.58142,\n", - " 446.25977,\n", - " 445.72998,\n", - " 443.0995,\n", - " 439.76404,\n", - " 438.58826,\n", - " 438.5769,\n", - " 437.03174,\n", - " 435.46924,\n", - " 434.56592,\n", - " 433.99023,\n", - " 432.6433,\n", - " 432.54077,\n", - " 428.06213,\n", - " 427.67358,\n", - " 427.13196,\n", - " 425.7201,\n", - " 425.14978,\n", - " 425.12878,\n", - " 421.5818,\n", - " 421.56006,\n", - " 420.3003,\n", - " 419.83557,\n", - " 416.7318,\n", - " 416.58435,\n", - " 415.95312,\n", - " 414.86426,\n", - " 412.76526,\n", - " 412.64,\n", - " 410.9087,\n", - " 410.55054,\n", - " 409.7743,\n", - " 407.6128,\n", - " 406.75745,\n", - " 406.74585,\n", - " 405.0327,\n", - " 403.3551,\n", - " 402.48315,\n", - " 400.2633,\n", - " 399.84863,\n", - " 397.89954,\n", - " 394.60864,\n", - " 392.89185,\n", - " 392.41113,\n", - " 392.3109,\n", - " 392.07104,\n", - " 389.07495,\n", - " 388.35864,\n", - " 387.00684,\n", - " 385.9939,\n", - " 385.1714,\n", - " 384.56128,\n", - " 383.98572,\n", - " 383.91724,\n", - " 382.68298,\n", - " 382.53052,\n", - " 382.406,\n", - " 381.53906,\n", - " 381.09302,\n", - " 380.71143,\n", - " 379.615,\n", - " 376.6366,\n", - " 375.92566,\n", - " 373.2926,\n", - " 373.27844,\n", - " 371.25806,\n", - " 370.5487,\n", - " 369.90027,\n", - " 369.72803,\n", - " 368.6975,\n", - " 367.6018,\n", - " 365.85632,\n", - " 365.7661,\n", - " 362.56873,\n", - " 360.64136,\n", - " 359.68872,\n", - " 359.5404,\n", - " 358.33325,\n", - " 356.1422,\n", - " 354.96814,\n", - " 354.42297,\n", - " 353.48132,\n", - " 352.83557,\n", - " 352.06653,\n", - " 350.50586,\n", - " 350.42468,\n", - " 348.88245,\n", - " 348.05493,\n", - " 347.9204,\n", - " 347.88354,\n", - " 347.60535,\n", - " 346.10156,\n", - " 345.8523,\n", - " 344.4359,\n", - " 344.1842,\n", - " 344.16272,\n", - " 344.01282,\n", - " 343.59143,\n", - " 342.77124,\n", - " 342.45605,\n", - " 341.906,\n", - " 341.4043,\n", - " 341.36462,\n", - " 339.14197,\n", - " 338.69714,\n", - " 337.4574,\n", - " 337.08423,\n", - " 336.91528,\n", - " 336.56458,\n", - " 335.85986,\n", - " 335.43152,\n", - " 335.37378,\n", - " 335.29016,\n", - " 335.00513,\n", - " 334.8269,\n", - " 333.7915,\n", - " 333.05408,\n", - " 332.83496,\n", - " 332.1455,\n", - " 330.85974,\n", - " 330.511,\n", - " 330.38086,\n", - " 329.0714,\n", - " 328.78003,\n", - " 328.30896,\n", - " 328.15186,\n", - " 327.828,\n", - " 327.5979,\n", - " 327.33423,\n", - " 326.9364,\n", - " 326.2605,\n", - " 325.2495,\n", - " 325.02515,\n", - " 324.2024,\n", - " 322.58435,\n", - " 321.7118,\n", - " 321.3352,\n", - " 319.06165,\n", - " 317.64685,\n", - " 317.08765,\n", - " 316.30902,\n", - " 315.97333,\n", - " 315.81213,\n", - " 315.69775,\n", - " 313.80347,\n", - " 313.58484,\n", - " 313.28076,\n", - " 311.76917,\n", - " 309.979,\n", - " 309.17218,\n", - " 308.80945,\n", - " 308.16736,\n", - " 308.01764,\n", - " 306.19965,\n", - " 306.19403,\n", - " 304.28058,\n", - " 302.72925,\n", - " 302.0005,\n", - " 301.77252,\n", - " 300.44763,\n", - " 300.1792,\n", - " 299.4417,\n", - " 298.52124,\n", - " 296.39307,\n", - " 296.23798,\n", - " 296.18164,\n", - " 296.0865,\n", - " 295.99585,\n", - " 295.8177,\n", - " 294.30017,\n", - " 294.12177,\n", - " 294.0819,\n", - " 292.4134,\n", - " 291.16833,\n", - " 290.9162,\n", - " 290.87225,\n", - " 290.18152,\n", - " 288.6944,\n", - " 288.1938,\n", - " 288.07074,\n", - " 287.96777,\n", - " 287.85815,\n", - " 287.3664,\n", - " 286.9831,\n", - " 286.89746,\n", - " 286.01727,\n", - " 285.7337,\n", - " 285.61792,\n", - " 285.4837,\n", - " 285.3824,\n", - " 285.15393,\n", - " 284.34216,\n", - " 283.88092,\n", - " 282.74548,\n", - " 282.4707,\n", - " 282.372,\n", - " 281.91382,\n", - " 281.29742,\n", - " 280.46655,\n", - " 279.85492,\n", - " 279.49304,\n", - " 278.26117,\n", - " 277.61047,\n", - " 276.98572,\n", - " 276.62183,\n", - " 276.0066,\n", - " 275.74707,\n", - " 274.3576,\n", - " 274.1809,\n", - " 274.01062,\n", - " 271.5113,\n", - " 271.1494,\n", - " 270.8523,\n", - " 268.24683,\n", - " 267.78088,\n", - " 267.68256,\n", - " 267.64142,\n", - " 265.69427,\n", - " 262.614,\n", - " 262.16418,\n", - " 262.0078,\n", - " 261.95074,\n", - " 261.0147,\n", - " 260.21124,\n", - " 259.22906,\n", - " 259.16864,\n", - " 258.16553,\n", - " 257.21112,\n", - " 256.11285,\n", - " 255.578,\n", - " 255.12506,\n", - " 254.97644,\n", - " 254.54468,\n", - " 253.79132,\n", - " 253.34753,\n", - " 253.19922,\n", - " 252.89435,\n", - " 251.67499,\n", - " 250.41339,\n", - " 249.07642,\n", - " 247.24133,\n", - " 247.00641,\n", - " 246.79785,\n", - " 245.62463,\n", - " 244.67957,\n", - " 244.46143,\n", - " 244.25385,\n", - " ...]" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ "calibrated_gradient" - ] + ], + "outputs": [] }, { "cell_type": "code", "execution_count": null, "id": "5b4759d8", "metadata": {}, - "outputs": [], - "source": [] + "source": [], + "outputs": [] } ], "metadata": { diff --git a/imbalanced_settings.ipynb b/imbalanced_settings.ipynb new file mode 100644 index 0000000..cb15e44 --- /dev/null +++ b/imbalanced_settings.ipynb @@ -0,0 +1,2638 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torchtext\\data\\__init__.py:4: UserWarning: \n", + "/!\\ IMPORTANT WARNING ABOUT TORCHTEXT STATUS /!\\ \n", + "Torchtext is deprecated and the last released version will be 0.18 (this one). You can silence this warning by calling the following at the beginnign of your scripts: `import torchtext; torchtext.disable_torchtext_deprecation_warning()`\n", + " warnings.warn(torchtext._TORCHTEXT_DEPRECATION_MSG)\n", + "c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\utils.py:7: TqdmExperimentalWarning: Using `tqdm.autonotebook.tqdm` in notebook mode. Use `tqdm.tqdm` instead to force console mode (e.g. in jupyter console)\n", + " from tqdm.autonotebook import tqdm\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] + } + ], + "source": [ + "import lava\n", + "import torch\n", + "print(torch.cuda.is_available()) # Should return True if GPU is available\n", + "import os\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import torch.optim as optim\n", + "import torchvision.models as models\n", + "from torch.autograd import Variable\n", + "\n", + "import matplotlib.pyplot as plt\n", + "from torch import tensor\n", + "from torchvision import datasets, transforms\n", + "import pandas as pd\n", + "import numpy as n\n", + "\n", + "from torch.utils.data import Dataset, TensorDataset, DataLoader" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "import pickle\n", + "with open('imbalanced_train.pkl', 'rb') as f:\n", + " X_train_imbalanced, y_train_imbalanced = pickle.load(f)\n", + "with open('imbalanced_dev.pkl', 'rb') as f:\n", + " X_dev_imbalanced, y_dev_imbalanced = pickle.load(f)\n", + "with open('imbalanced_test.pkl', 'rb') as f:\n", + " X_test_imbalanced, y_test_imbalanced = pickle.load(f)\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(1000, 28, 28) (1000,)\n", + "\n" + ] + } + ], + "source": [ + "print(X_train_imbalanced.shape, y_train_imbalanced.shape)\n", + "print(type(X_train_imbalanced))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n", + "torch.Size([8, 28, 28]) torch.Size([8])\n" + ] + } + ], + "source": [ + "# Chuyển dữ liệu NumPy thành tensor\n", + "X_tensor = torch.tensor(X_train_imbalanced, dtype=torch.float32)\n", + "y_tensor = torch.tensor(y_train_imbalanced, dtype=torch.long)\n", + "\n", + "# Tạo DataLoader với batch size mong muốn\n", + "batch_size = 8\n", + "dataset = TensorDataset(X_tensor, y_tensor)\n", + "dataloader = {}\n", + "dataloader['train'] = DataLoader(dataset, batch_size=batch_size, shuffle=True)\n", + "\n", + "# Duyệt qua từng batch\n", + "for X_batch, y_batch in dataloader['train']:\n", + " print(X_batch.shape, y_batch.shape)\n", + "X_tensor = torch.tensor(X_test_imbalanced, dtype= torch.float32)\n", + "y_tensor = torch.tensor(y_test_imbalanced, dtype = torch.long)\n", + "dataset = TensorDataset(X_tensor, y_tensor)\n", + "dataloader['test'] = DataLoader(dataset, batch_size=batch_size, shuffle=True)\n", + "for X_batch, y_batch in dataloader['test']:\n", + " print(X_batch.shape, y_batch.shape)\n", + "#print(dataloader)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + ")" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "from torchvision import transforms, datasets\n", + "from torch.utils.data import DataLoader, TensorDataset\n", + "import pickle\n", + "\n", + "# Định nghĩa mô hình PreActResNet18 như đã thực hiện trước đó\n", + "class PreActBlock(nn.Module):\n", + " expansion = 1\n", + "\n", + " def __init__(self, in_planes, planes, stride=1):\n", + " super(PreActBlock, self).__init__()\n", + " self.bn1 = nn.BatchNorm2d(in_planes)\n", + " self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)\n", + " self.bn2 = nn.BatchNorm2d(planes)\n", + " self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)\n", + "\n", + " self.shortcut = nn.Sequential()\n", + " if stride != 1 or in_planes != self.expansion * planes:\n", + " self.shortcut = nn.Sequential(\n", + " nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False)\n", + " )\n", + "\n", + " def forward(self, x):\n", + " out = F.relu(self.bn1(x))\n", + " shortcut = self.shortcut(out)\n", + " out = self.conv1(out)\n", + " out = self.conv2(F.relu(self.bn2(out)))\n", + " out += shortcut\n", + " return out\n", + "\n", + "class PreActResNet(nn.Module):\n", + " def __init__(self, block, num_blocks, num_classes=100):\n", + " super(PreActResNet, self).__init__()\n", + " self.in_planes = 64\n", + "\n", + " self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)\n", + " self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)\n", + " self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)\n", + " self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)\n", + " self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)\n", + " self.linear = nn.Linear(512*block.expansion, num_classes)\n", + "\n", + " def _make_layer(self, block, planes, num_blocks, stride):\n", + " strides = [stride] + [1]*(num_blocks-1)\n", + " layers = []\n", + " for stride in strides:\n", + " layers.append(block(self.in_planes, planes, stride))\n", + " self.in_planes = planes * block.expansion\n", + " return nn.Sequential(*layers)\n", + "\n", + " def forward(self, x):\n", + " out = self.conv1(x)\n", + " out = self.layer1(out)\n", + " out = self.layer2(out)\n", + " out = self.layer3(out)\n", + " out = self.layer4(out)\n", + " out = F.avg_pool2d(out, 4)\n", + " out = out.view(out.size(0), -1)\n", + " out = self.linear(out)\n", + " return out\n", + "\n", + "def PreActResNet18():\n", + " return PreActResNet(PreActBlock, [2,2,2,2])\n", + "\n", + "# Khởi tạo mô hình\n", + "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", + "net_test = PreActResNet18().to(device)\n", + "feature_extractor_name = 'preact_resnet18_test_mnist.pth'\n", + "net_test.load_state_dict(torch.load('checkpoint/' + feature_extractor_name, map_location=torch.device('cpu')))\n", + "net_test.eval()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + " (fc): Identity()\n", + ")" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "embedder = net_test.to(device)\n", + "embedder.fc = torch.nn.Identity()\n", + "for p in embedder.parameters():\n", + " p.requires_grad = False\n", + "embedder.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.18.0\n", + "2.3.0\n", + "Cuda device: 0\n", + "cude devices: 1\n", + "cuda:0\n" + ] + } + ], + "source": [ + "cuda_num = 0\n", + "import torchvision\n", + "print(torchvision.__version__)\n", + "import torch\n", + "print(torch.__version__)\n", + "import os\n", + "#os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(cuda_num)\n", + "#print(os.environ[\"CUDA_VISIBLE_DEVICES\"])\n", + "#torch.cuda.set_device(cuda_num)\n", + "print(\"Cuda device: \", torch.cuda.current_device())\n", + "print(\"cude devices: \", torch.cuda.device_count())\n", + "device = torch.device('cuda:' + str(cuda_num) if torch.cuda.is_available() else 'cpu')\n", + "print(device)\n", + "training_size = 1000\n", + "valid_size = 200\n", + "resize = 32\n", + "portion = 0.3" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "transfer to batch" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [], + "source": [ + "from otdd.pytorch.distance_fast import DatasetDistance, FeatureCost, batch_augmented_cost\n", + "from otdd.pytorch.wasserstein import pwdist_exact\n", + "from functools import partial\n", + "from lava import train_with_corrupt_flag, get_indices, values, sort_and_keep_indices\n", + "resize = 28" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "feature_cost = FeatureCost(src_embedding = embedder,\n", + " src_dim = (1, resize,resize),\n", + " tgt_embedding = embedder,\n", + " tgt_dim = (1, resize,resize),\n", + " p = 2,\n", + " device='cuda')\n", + "dist = DatasetDistance(dataloader['train'], dataloader['test'],\n", + " inner_ot_method = 'exact',\n", + " debiased_loss = True,\n", + " feature_cost = feature_cost,\n", + " λ_x=1.0, λ_y=1.0,\n", + " sqrt_method = 'spectral',\n", + " sqrt_niters=10,\n", + " precision='single',\n", + " p = 2, entreg = 1e-1,\n", + " device='cuda')" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "81d4f1664dcd4a6b843dcf02815a9bbf", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/125 [00:00 maxsamples:\n", + " idxs_1 = sorted(np.random.choice(\n", + " dist.X1.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_1 = np.s_[:] # hack to get a full slice\n", + "\n", + "if maxsamples and dist.X2.shape[0] > maxsamples:\n", + " idxs_2 = sorted(np.random.choice(\n", + " dist.X2.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_2 = np.s_[:] # hack to get a full slice\n", + "Z1 = torch.cat((dist.X1[idxs_1],\n", + " dist.Y1[idxs_1].type(dist.X1.dtype).unsqueeze(1)), -1)\n", + "Z2 = torch.cat((dist.X2[idxs_2],\n", + " dist.Y2[idxs_2].type(dist.X2.dtype).unsqueeze(1)), -1)\n", + "Z1 = Z1.to(device)\n", + "Z2 = Z2.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1000, 785]) torch.Size([4000, 785])\n" + ] + } + ], + "source": [ + "print(Z1.shape, Z2.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1000, 785]) torch.Size([4000, 785])\n", + "Z1 shape in batch: torch.Size([1, 1000, 785])\n", + "Z2 shape in batch: torch.Size([1, 4000, 785])\n", + "1 1000 784\n", + "torch.Size([1, 1000, 100])\n", + "1 4000 784\n", + "torch.Size([1, 4000, 100])\n", + "torch.Size([1, 1000, 4000])\n", + "torch.Size([1, 1000, 4000])\n", + "Gia tri M: tensor([[[3, 2, 2, ..., 2, 2, 2],\n", + " [7, 6, 6, ..., 6, 6, 6],\n", + " [7, 6, 6, ..., 6, 6, 6],\n", + " ...,\n", + " [7, 6, 6, ..., 6, 6, 6],\n", + " [3, 2, 2, ..., 2, 2, 2],\n", + " [3, 2, 2, ..., 2, 2, 2]]], device='cuda:0')\n", + "torch.Size([1, 1000, 4000])\n", + "torch.Size([1, 1000, 4000])\n", + "gia tri D: tensor([[[6.1884e+11, 4.5241e+12, 8.3257e+10, ..., 1.4598e+12,\n", + " 2.0739e+12, 1.4346e+11],\n", + " [6.6336e+10, 1.2097e+12, 1.7407e+12, ..., 3.9091e+10,\n", + " 1.7530e+11, 4.3406e+11],\n", + " [1.2148e+12, 6.5539e+10, 4.7208e+12, ..., 4.6557e+11,\n", + " 2.0537e+11, 2.2803e+12],\n", + " ...,\n", + " [1.5724e+12, 1.5813e+10, 5.4037e+12, ..., 6.9498e+11,\n", + " 3.6443e+11, 2.7618e+12],\n", + " [4.8911e+11, 4.1545e+11, 3.1323e+12, ..., 7.7165e+10,\n", + " 3.2155e+09, 1.2247e+12],\n", + " [4.4865e+12, 6.0413e+11, 1.0172e+13, ..., 2.8753e+12,\n", + " 2.1430e+12, 6.3812e+12]]], device='cuda:0')\n", + "torch.Size([1, 1000, 4000])\n", + "Z1 shape in batch: torch.Size([1, 4000, 785])\n", + "Z2 shape in batch: torch.Size([1, 1000, 785])\n", + "1 4000 784\n", + "torch.Size([1, 4000, 100])\n", + "1 1000 784\n", + "torch.Size([1, 1000, 100])\n", + "torch.Size([1, 4000, 1000])\n", + "torch.Size([1, 4000, 1000])\n", + "Gia tri M: tensor([[[12, 13, 13, ..., 13, 12, 12],\n", + " [ 8, 9, 9, ..., 9, 8, 8],\n", + " [ 8, 9, 9, ..., 9, 8, 8],\n", + " ...,\n", + " [ 8, 9, 9, ..., 9, 8, 8],\n", + " [ 8, 9, 9, ..., 9, 8, 8],\n", + " [ 8, 9, 9, ..., 9, 8, 8]]], device='cuda:0')\n", + "torch.Size([1, 4000, 1000])\n", + "torch.Size([1, 4000, 1000])\n", + "gia tri D: tensor([[[6.1884e+11, 6.6336e+10, 1.2148e+12, ..., 1.5724e+12,\n", + " 4.8911e+11, 4.4865e+12],\n", + " [4.5241e+12, 1.2097e+12, 6.5539e+10, ..., 1.5813e+10,\n", + " 4.1545e+11, 6.0413e+11],\n", + " [8.3258e+10, 1.7407e+12, 4.7208e+12, ..., 5.4037e+12,\n", + " 3.1323e+12, 1.0172e+13],\n", + " ...,\n", + " [1.4598e+12, 3.9091e+10, 4.6557e+11, ..., 6.9498e+11,\n", + " 7.7165e+10, 2.8753e+12],\n", + " [2.0739e+12, 1.7530e+11, 2.0537e+11, ..., 3.6443e+11,\n", + " 3.2155e+09, 2.1430e+12],\n", + " [1.4346e+11, 4.3406e+11, 2.2803e+12, ..., 2.7618e+12,\n", + " 1.2247e+12, 6.3812e+12]]], device='cuda:0')\n", + "torch.Size([1, 4000, 1000])\n" + ] + } + ], + "source": [ + "with torch.no_grad():\n", + " loss.debias = False\n", + " loss.potentials = True\n", + " print(Z1.shape, Z2.shape)\n", + " F_i, G_j = loss(Z1, Z2)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "π = [F_i, G_j]\n", + "dual_sol = π\n", + "for i in range(len(dual_sol)):\n", + " dual_sol[i] = dual_sol[i].to('cpu')" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1000\n" + ] + } + ], + "source": [ + "print(training_size)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "calibrated_gradient = values(dual_sol, training_size)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "with open('calibrated_gradient.pkl', 'wb') as f:\n", + " pickle.dump(calibrated_gradient, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "with open('calibrated_gradient.pkl', 'rb') as f:\n", + " loaded = pickle.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-787359700.0, 3658300000.0, 3414521900.0, -761628860.0, 3264091600.0, -787799100.0, -799351500.0, -777636860.0, -670477000.0, -778479740.0, -766360770.0, -694445500.0, -773709600.0, -771062900.0, -777780000.0, -697117700.0, 2670000000.0, -768481000.0, 3159559700.0, -721409700.0, 3570951700.0, -774206140.0, -741824260.0, -734327100.0, -734817300.0, -789919040.0, -749694140.0, -690686140.0, -741808060.0, -777462140.0, -720458100.0, 3955789300.0, -720562940.0, -713150600.0, 3360567800.0, -726425800.0, -761662400.0, -774631940.0, -782463400.0, 3692124200.0, -790641300.0, -755597250.0, -775275900.0, -768129600.0, -748097660.0, 2961792000.0, -723380000.0, 4443694000.0, -792601600.0, -742540000.0, -724486100.0, -788722900.0, -476425100.0, -713548740.0, -740720300.0, -737659100.0, -724926200.0, 3719465000.0, -778126140.0, -762900900.0, -769574600.0, -802372500.0, -805926600.0, -816397100.0, -794836700.0, -738564900.0, -803942000.0, -769908900.0, -741420200.0, -737920960.0, -761706240.0, -759030850.0, -786805440.0, -756763840.0, -791396100.0, -773516350.0, -746544900.0, -759255700.0, -722296640.0, -768681300.0, -771222600.0, -814797800.0, 2917576200.0, -778363970.0, 3436430300.0, -787790200.0, -789222660.0, -804739260.0, -766082750.0, -811085440.0, -775014500.0, 3204250000.0, 3249477600.0, -603555840.0, -787188160.0, -751149000.0, -726633600.0, -774776770.0, -776355300.0, -668185400.0, -695152800.0, -796312260.0, -748100500.0, 3356590000.0, -695138600.0, 3398654000.0, 3394100200.0, -822337660.0, -762170100.0, -800497540.0, -780250900.0, 3409910800.0, 3125612000.0, 2102406800.0, -795490560.0, -760655000.0, -791026500.0, -775446340.0, 3358762000.0, -767640260.0, 2926049300.0, -769811900.0, -785371500.0, -757402300.0, -739285760.0, -771198700.0, -767744400.0, -779869440.0, 3019980300.0, -744033660.0, -762641340.0, -795135400.0, 3203787300.0, -762056450.0, -800607100.0, -749294700.0, -798862000.0, -741501250.0, -785978500.0, -777791000.0, -758481800.0, -785873900.0, -713235900.0, -741267650.0, -770043900.0, -733734700.0, 3845601300.0, -779973000.0, 3172292000.0, 3470401500.0, -746640600.0, 3167988700.0, -775923100.0, -711902460.0, -738577200.0, -800678340.0, -783505500.0, 469208960.0, 3388788700.0, -760174400.0, -737957950.0, -785780000.0, -703334140.0, 3533981700.0, -785152400.0, -781162940.0, -733231550.0, -757094660.0, -793743800.0, -792304500.0, -791885250.0, -778032600.0, -786010750.0, -827333600.0, -809626200.0, -763983740.0, -766007700.0, -809264960.0, 3372189200.0, -783462500.0, -816558000.0, -768052030.0, 3363842000.0, 583064600.0, -816097000.0, -812505200.0, 3281921500.0, -784388540.0, -772710500.0, 3207329800.0, -767506370.0, -610967100.0, 3207731700.0, -766162800.0, -804511800.0, -769145400.0, 3205061600.0, -797490800.0, -767388100.0, -781967940.0, -810055200.0, -748630400.0, -789771200.0, 3214306800.0, -740088770.0, -761012600.0, 2783895000.0, -774420700.0, -789345000.0, -809184400.0, -708009660.0, -772730050.0, -763615400.0, -790154430.0, -620266000.0, -714880830.0, -770231600.0, 3341227500.0, 3234430000.0, -714173100.0, -795511500.0, -733877600.0, -787657600.0, -752637100.0, -786157700.0, -707612500.0, -753242000.0, -784294700.0, -767343000.0, -776754200.0, -752152640.0, -747148500.0, -798551200.0, -796140540.0, -764117500.0, -806536500.0, -749119200.0, -677619900.0, -766573950.0, -709444100.0, -777187650.0, -804929100.0, -677838000.0, -779561660.0, -810288450.0, -773969540.0, -707748300.0, -601450430.0, -774158000.0, 3005683200.0, -726332800.0, 4033518000.0, -795642800.0, -741373900.0, -759996700.0, -794123300.0, -749822800.0, 3064079400.0, 3111690800.0, -675506200.0, -495457660.0, -777631940.0, -779382700.0, -690646700.0, -791014600.0, -761498300.0, -790799800.0, -789378240.0, -758607600.0, -734285060.0, -766725900.0, 3407102500.0, -819550340.0, -781618300.0, -751145540.0, -792030600.0, -764887800.0, -785344400.0, -770173900.0, -734691200.0, -755942400.0, -786970050.0, -783872960.0, -767758800.0, 2984214000.0, -766299100.0, -764262600.0, -772730700.0, -784814400.0, -804950700.0, 3374380000.0, -806371400.0, -773415550.0, 3466176000.0, 3481033700.0, -811053760.0, -781009660.0, 3316967400.0, -743001600.0, -747240200.0, -767646900.0, -784332400.0, -716145300.0, -742686800.0, -760593340.0, -767762900.0, -783814460.0, 3412202000.0, -783491140.0, -773577340.0, -785525800.0, -769810000.0, -751548540.0, -628690560.0, 2786166300.0, -739922700.0, -760519940.0, -782976700.0, -771426100.0, -723411700.0, -777519040.0, -643399500.0, -798318600.0, -799059800.0, -764537900.0, 3364233700.0, -776606140.0, -798844740.0, -767700300.0, -749618800.0, 3335825400.0, 3281943600.0, -754301400.0, 3460058600.0, -800644700.0, 3338201600.0, -777931140.0, -717837400.0, -748042300.0, -721159740.0, -763386940.0, 3351648300.0, -751022400.0, -729053060.0, -787836100.0, -233768960.0, -756961540.0, -771731000.0, -786014850.0, -738729860.0, -794808700.0, -773906370.0, -766673400.0, -556486340.0, -747905860.0, -790911800.0, -782524300.0, -745749950.0, -799583740.0, -793348030.0, -769783360.0, -809692700.0, 3190721500.0, -728146940.0, -737553340.0, -763954940.0, -630119500.0, -787927040.0, -734922500.0, -789348100.0, -757146300.0, -790161500.0, -793091260.0, -731241300.0, -766226100.0, 2750578700.0, -758229760.0, -761041900.0, -770021250.0, -782379970.0, -751012800.0, -780222340.0, -769589250.0, -772904770.0, -761910400.0, -744832200.0, -794257100.0, -728107300.0, -784072450.0, -808745540.0, -766172000.0, -722933200.0, -742125300.0, 3521223700.0, -794760900.0, -781894900.0, 3416678400.0, -776853950.0, -769997300.0, -748232700.0, -799779260.0, -735059300.0, -729633500.0, -627110100.0, -788612030.0, -764064200.0, -758603650.0, -769202400.0, -764085000.0, -747751800.0, -776163700.0, -653762700.0, -789831300.0, -790608640.0, -808080450.0, 2946709500.0, -732840500.0, -684897150.0, -762108860.0, -687687300.0, -760193700.0, -755622340.0, -753416900.0, -796462300.0, 3463348200.0, -713287550.0, -734310800.0, -779357950.0, 2634532900.0, -765715100.0, 122052480.0, 3412327400.0, 69672180000.0, -776252400.0, -766967230.0, -720294850.0, -628076160.0, -789848200.0, -762756900.0, -793098940.0, -775504000.0, -811814100.0, 3252923000.0, -753781950.0, 3296692700.0, -765369100.0, 3901150200.0, -780102660.0, -601797100.0, -691885250.0, 3209568300.0, -776703800.0, -793260860.0, -768136770.0, -746152770.0, -687124400.0, -794086300.0, -782395300.0, -748316740.0, -815068160.0, -794713860.0, 3511636000.0, 3369763300.0, -777628600.0, -738607800.0, -757673400.0, -754132600.0, 3338899500.0, -830580030.0, -761182800.0, -749472450.0, -693929150.0, -775314750.0, -750280800.0, -749749400.0, -740619840.0, -783499460.0, 3584875000.0, 3629163000.0, -794204900.0, -653158460.0, -752769100.0, -746015900.0, -745269600.0, -684810940.0, 3468031000.0, -729811400.0, -785331300.0, -721960400.0, -760213950.0, -790740100.0, -756772900.0, -805308900.0, -751961100.0, -799707500.0, -742697400.0, -785249300.0, -794573600.0, -784661200.0, -784054900.0, 3693731300.0, -800496500.0, -793856260.0, -630923200.0, -718876860.0, 3103576600.0, -649227900.0, -784249800.0, -794297700.0, -735385540.0, -780581570.0, 3355611100.0, -814375900.0, -762177800.0, -794555260.0, -797891840.0, -770163140.0, -763796350.0, -801660600.0, 3462867500.0, -767750340.0, 3236338200.0, -739378050.0, -662883140.0, -766892200.0, -730766800.0, -802445100.0, -784833800.0, -794095500.0, -723189600.0, -768504770.0, -772138050.0, -765241700.0, -787394050.0, -712206100.0, -750494900.0, -749122940.0, -713537150.0, -776630400.0, -682469300.0, -750219600.0, -695611900.0, -784993000.0, -763495300.0, -793074240.0, -703204600.0, -798876740.0, 3151817700.0, -773084160.0, 3176090600.0, -764917000.0, -779100600.0, -806232200.0, -737166500.0, 2606588400.0, -783087000.0, -779106940.0, -762313500.0, -803242800.0, -665074900.0, 3630165000.0, -728130300.0, -796392200.0, -601053950.0, -776850370.0, -798946560.0, -768052400.0, -730623300.0, -711024000.0, -765403260.0, -760491100.0, -782927900.0, -699635140.0, -798237250.0, 3133651000.0, -801919040.0, -701924500.0, 3141167000.0, -750650700.0, -646487700.0, -744309300.0, -766098900.0, -698737660.0, -762163700.0, -635191300.0, -808477500.0, -785499400.0, -765840700.0, -777770560.0, -779253060.0, -763160640.0, -781173300.0, 3197474300.0, -755671940.0, -713631040.0, 3218118100.0, 3362132000.0, 3480057300.0, -778464500.0, -762353400.0, -764225400.0, -782915460.0, -702299650.0, -771535550.0, -660409700.0, 3597165600.0, -799857700.0, -782984260.0, -795406400.0, -771817200.0, -795112960.0, -712181200.0, 3464757800.0, -742500740.0, -771986900.0, -811167400.0, -763349760.0, -771394700.0, -802726600.0, 3458549800.0, -658577100.0, -783299460.0, -759677630.0, -775203800.0, -726570700.0, 3182888000.0, -746408900.0, 3366912500.0, -670285600.0, -763449340.0, -801789400.0, -776754050.0, -804271800.0, -756715900.0, -758302300.0, 3576539600.0, -787813500.0, -786499460.0, -786069570.0, -735085200.0, -775917300.0, -719037600.0, -797958700.0, -761517100.0, 2930614300.0, -735057400.0, -811641800.0, -690137700.0, -715567940.0, 3012814300.0, -778959800.0, -779867400.0, -753965400.0, -727270900.0, -706960600.0, -715314750.0, -740813200.0, -770734340.0, -771386900.0, -734228030.0, 2997066800.0, -770649100.0, -781748350.0, -786283400.0, -715471500.0, -586971140.0, -730052300.0, -653110400.0, -751588350.0, -746242500.0, -818856640.0, -787678340.0, -697275900.0, -790514940.0, -764325300.0, -779700200.0, -713285250.0, -732799900.0, -752271940.0, -746040260.0, 3711263200.0, -773801300.0, -619881700.0, -791002400.0, -774266800.0, 3843897300.0, -781095230.0, -756357060.0, -745390900.0, -791032100.0, -734482000.0, -725273900.0, -780471700.0, -499381570.0, -799480000.0, -799408600.0, -729357000.0, -673492030.0, -767781400.0, -742977540.0, -785896960.0, -776892860.0, -673278500.0, -764835400.0, 3089685000.0, -783735000.0, 3603912200.0, -798654850.0, -807194430.0, -718154900.0, -791730940.0, 3409813500.0, -633524740.0, -720069000.0, -756572300.0, 2525629400.0, 3284020700.0, -753256200.0, -783176300.0, 3411553300.0, -769951400.0, -772119500.0, -803140350.0, -746059650.0, -745125760.0, -320520200.0, -736806300.0, 3967600000.0, -736976260.0, 3316149200.0, -774824300.0, -785079550.0, -728556900.0, -799627600.0, -769708740.0, -798591170.0, -780362750.0, 2910360600.0, -763062300.0, -717082430.0, -751904700.0, -716079700.0, -813714370.0, -797642900.0, -754210200.0, -711639040.0, -768609150.0, 3767735800.0, -773858200.0, -777911900.0, 3075124700.0, 3071200800.0, -795336260.0, -737164500.0, -731844000.0, -758744600.0, 3435028000.0, -714037600.0, 2387837400.0, 3193111600.0, -775648640.0, 3381067800.0, 4644369000.0, -794228800.0, -739821700.0, -775057340.0, -731925500.0, -808966100.0, -806118400.0, -766507200.0, -782444700.0, -750515840.0, -755034700.0, -743473600.0, -796890240.0, -639111100.0, 1542991500.0, -788517570.0, -781795600.0, -796887550.0, -790788000.0, 3806365200.0, -781641200.0, -740913800.0, 3250834400.0, -752935740.0, -504565570.0, -773233000.0, -788815100.0, -814723900.0, -637966300.0, -759826500.0, -725092500.0, -763498500.0, 3268506600.0, -768479940.0, -809098800.0, -765564200.0, -786814400.0, -810046100.0, -754673340.0, 3467080700.0, -750972100.0, 3108231700.0, -754895200.0, -791734800.0, -762025300.0, -735766340.0, 3236222000.0, -775347460.0, -742843600.0, -761550600.0, -772784060.0, -785089100.0, -686981440.0, -792085950.0, -807429440.0, -787335600.0, -700722800.0, -714598400.0, -761768800.0, 3128989700.0, -769798100.0, -802648060.0, -780654200.0, 1645795700.0, -789858240.0, -761389630.0, -759684540.0, -775882900.0, -778593340.0, 4280734700.0, -755847230.0, -748347800.0, -699001100.0, 3462027800.0, 3374563300.0, -789734660.0, -821355840.0, -585160300.0, 3391676000.0, -764010900.0, -791578050.0, -744262800.0, -773533100.0, -645852540.0, -758789200.0, -772553860.0, -792068300.0, -803699650.0, -741688450.0, 2910360600.0, -758267900.0, 3594157000.0, -744397700.0, -772379700.0, -781171400.0, -722459460.0, -759161150.0, -778931600.0, -736004800.0, -797484740.0, -209095680.0, -542500400.0, -775238400.0, -753518460.0, 3502064600.0, 3633917000.0, -765492860.0, -800881540.0, 3431393800.0, 3598826500.0, -786964030.0, -772524860.0, -786941760.0, 3391915500.0, -773058940.0, -793895230.0, 3485116000.0, -669029000.0, -719075200.0, 3340158000.0, 3397297200.0, -744358300.0, 2690558500.0, 3364120600.0, -785707840.0, -703828540.0, -687792960.0, -784414500.0, -791072450.0, 3434736000.0, -757821950.0, -664891800.0, 3349571600.0, -761962500.0, -634457340.0, 3591550500.0, -784689700.0, 3401236000.0, -720094100.0, -774062800.0, -763613300.0, -691229630.0, -768489100.0, 3139972000.0, -792325440.0, -776964540.0, -747465200.0, -771522560.0, -774430140.0, 3331874300.0, -773588600.0, -788004300.0, -657228700.0, 3297213400.0, -741595800.0, -733799600.0, -686103100.0, 3295691300.0, 3525328000.0, 3379195400.0, -739151600.0, -794746750.0, 2896210000.0, -773379000.0, 3119820800.0, 3369477600.0, -755491260.0, -762349300.0, -806311100.0, -791964740.0, 2412174300.0, -604918800.0, -585776060.0, 3217965600.0, -784737400.0, -752619800.0, -664709700.0, -730630100.0, -769878500.0, -703661100.0, -775096960.0, -758645900.0, -753918800.0, -743835100.0, -727433800.0, -788348860.0, -575917900.0, -786001540.0, -767915000.0, -789927230.0, -777224900.0, -793435650.0, 3388377000.0, 3458445800.0, 3142306800.0, 3488222200.0, -768346750.0, -738638700.0, -789971100.0, -761866940.0, -801682240.0, -558078000.0, -762106940.0, -779215360.0, -744430600.0, -759799600.0, -773319300.0, -808841150.0, -767605700.0, -769629950.0, -784393660.0, -763801660.0, -747908100.0, -797578050.0, -775320000.0, -795814850.0, -781433800.0, -743918700.0, 3306087000.0, -787538240.0, 3155844000.0, -801296830.0, -811297300.0, -763881700.0, -778656800.0, -790225660.0, -758682100.0, -752401200.0, -787870500.0, -703431550.0, -789570560.0, -765128300.0, -766432640.0, -769569600.0, -763499140.0, 3445255700.0, -808960450.0, -796493060.0]\n" + ] + } + ], + "source": [ + "print(loaded)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[array([432], dtype=int64), array([762], dtype=int64), array([47], dtype=int64), array([831], dtype=int64), array([251], dtype=int64), array([727], dtype=int64), array([31], dtype=int64), array([446], dtype=int64), array([146], dtype=int64), array([685], dtype=int64), array([781], dtype=int64), array([747], dtype=int64), array([57], dtype=int64), array([680], dtype=int64), array([500], dtype=int64), array([39], dtype=int64), array([1], dtype=int64), array([867], dtype=int64), array([560], dtype=int64), array([478], dtype=int64), array([706], dtype=int64), array([871], dtype=int64), array([605], dtype=int64), array([853], dtype=int64), array([897], dtype=int64), array([477], dtype=int64), array([635], dtype=int64), array([20], dtype=int64), array([163], dtype=int64), array([920], dtype=int64), array([393], dtype=int64), array([461], dtype=int64), array([866], dtype=int64), array([957], dtype=int64), array([878], dtype=int64), array([294], dtype=int64), array([597], dtype=int64), array([149], dtype=int64), array([485], dtype=int64), array([801], dtype=int64), array([293], dtype=int64), array([612], dtype=int64), array([424], dtype=int64), array([519], dtype=int64), array([835], dtype=int64), array([333], dtype=int64), array([619], dtype=int64), array([955], dtype=int64), array([997], dtype=int64), array([84], dtype=int64), array([756], dtype=int64), array([891], dtype=int64), array([870], dtype=int64), array([396], dtype=int64), array([2], dtype=int64), array([431], dtype=int64), array([307], dtype=int64), array([719], dtype=int64), array([111], dtype=int64), array([711], dtype=int64), array([271], dtype=int64), array([899], dtype=int64), array([105], dtype=int64), array([882], dtype=int64), array([106], dtype=int64), array([875], dtype=int64), array([840], dtype=int64), array([158], dtype=int64), array([954], dtype=int64), array([761], dtype=int64), array([921], dtype=int64), array([836], dtype=int64), array([290], dtype=int64), array([178], dtype=int64), array([462], dtype=int64), array([927], dtype=int64), array([627], dtype=int64), array([325], dtype=int64), array([885], dtype=int64), array([182], dtype=int64), array([596], dtype=int64), array([34], dtype=int64), array([118], dtype=int64), array([103], dtype=int64), array([511], dtype=int64), array([341], dtype=int64), array([894], dtype=int64), array([217], dtype=int64), array([881], dtype=int64), array([467], dtype=int64), array([335], dtype=int64), array([330], dtype=int64), array([911], dtype=int64), array([297], dtype=int64), array([729], dtype=int64), array([980], dtype=int64), array([915], dtype=int64), array([444], dtype=int64), array([919], dtype=int64), array([716], dtype=int64), array([331], dtype=int64), array([186], dtype=int64), array([794], dtype=int64), array([4], dtype=int64), array([442], dtype=int64), array([784], dtype=int64), array([92], dtype=int64), array([521], dtype=int64), array([808], dtype=int64), array([218], dtype=int64), array([595], dtype=int64), array([935], dtype=int64), array([203], dtype=int64), array([450], dtype=int64), array([192], dtype=int64), array([189], dtype=int64), array([196], dtype=int64), array([91], dtype=int64), array([132], dtype=int64), array([592], dtype=int64), array([759], dtype=int64), array([362], dtype=int64), array([625], dtype=int64), array([549], dtype=int64), array([148], dtype=int64), array([151], dtype=int64), array([18], dtype=int64), array([982], dtype=int64), array([547], dtype=int64), array([956], dtype=int64), array([577], dtype=int64), array([905], dtype=int64), array([574], dtype=int64), array([821], dtype=int64), array([112], dtype=int64), array([926], dtype=int64), array([258], dtype=int64), array([803], dtype=int64), array([505], dtype=int64), array([704], dtype=int64), array([750], dtype=int64), array([751], dtype=int64), array([257], dtype=int64), array([128], dtype=int64), array([649], dtype=int64), array([249], dtype=int64), array([660], dtype=int64), array([284], dtype=int64), array([45], dtype=int64), array([415], dtype=int64), array([644], dtype=int64), array([120], dtype=int64), array([82], dtype=int64), array([737, 851], dtype=int64), array([737, 851], dtype=int64), array([924], dtype=int64), array([314], dtype=int64), array([206], dtype=int64), array([375], dtype=int64), array([884], dtype=int64), array([16], dtype=int64), array([428], dtype=int64), array([554], dtype=int64), array([715], dtype=int64), array([932], dtype=int64), array([758], dtype=int64), array([113], dtype=int64), array([825], dtype=int64), array([776], dtype=int64), array([183], dtype=int64), array([157], dtype=int64), array([430], dtype=int64), array([862], dtype=int64), array([345], dtype=int64), array([725], dtype=int64), array([52], dtype=int64), array([260], dtype=int64), array([693], dtype=int64), array([786], dtype=int64), array([863], dtype=int64), array([353], dtype=int64), array([963], dtype=int64), array([948], dtype=int64), array([839], dtype=int64), array([934], dtype=int64), array([665], dtype=int64), array([563], dtype=int64), array([247], dtype=int64), array([448], dtype=int64), array([93], dtype=int64), array([933], dtype=int64), array([191], dtype=int64), array([682], dtype=int64), array([214], dtype=int64), array([403], dtype=int64), array([436], dtype=int64), array([313], dtype=int64), array([366], dtype=int64), array([503], dtype=int64), array([712], dtype=int64), array([896], dtype=int64), array([584], dtype=int64), array([790], dtype=int64), array([775], dtype=int64), array([321], dtype=int64), array([845], dtype=int64), array([579], dtype=int64), array([506], dtype=int64), array([667], dtype=int64), array([480], dtype=int64), array([411], dtype=int64), array([914], dtype=int64), array([620], dtype=int64), array([604], dtype=int64), array([523], dtype=int64), array([938], dtype=int64), array([893], dtype=int64), array([559], dtype=int64), array([99], dtype=int64), array([879], dtype=int64), array([628], dtype=int64), array([8], dtype=int64), array([702], dtype=int64), array([697], dtype=int64), array([259], dtype=int64), array([237], dtype=int64), array([242], dtype=int64), array([539], dtype=int64), array([484], dtype=int64), array([417], dtype=int64), array([918], dtype=int64), array([814], dtype=int64), array([455], dtype=int64), array([419], dtype=int64), array([888], dtype=int64), array([647], dtype=int64), array([263], dtype=int64), array([27], dtype=int64), array([903], dtype=int64), array([449], dtype=int64), array([471], dtype=int64), array([11], dtype=int64), array([104], dtype=int64), array([100], dtype=int64), array([541], dtype=int64), array([15], dtype=int64), array([672], dtype=int64), array([582], dtype=int64), array([834], dtype=int64), array([572], dtype=int64), array([818], dtype=int64), array([576], dtype=int64), array([602], dtype=int64), array([545], dtype=int64), array([162], dtype=int64), array([991], dtype=int64), array([941], dtype=int64), array([887], dtype=int64), array([654], dtype=int64), array([225], dtype=int64), array([246], dtype=int64), array([210], dtype=int64), array([239], dtype=int64), array([568], dtype=int64), array([745], dtype=int64), array([153], dtype=int64), array([611], dtype=int64), array([534], dtype=int64), array([33], dtype=int64), array([142], dtype=int64), array([676], dtype=int64), array([425], dtype=int64), array([537], dtype=int64), array([53], dtype=int64), array([594], dtype=int64), array([757], dtype=int64), array([219], dtype=int64), array([819], dtype=int64), array([215], dtype=int64), array([655], dtype=int64), array([664], dtype=int64), array([648], dtype=int64), array([741], dtype=int64), array([302], dtype=int64), array([739], dtype=int64), array([337], dtype=int64), array([709], dtype=int64), array([504], dtype=int64), array([641], dtype=int64), array([880], dtype=int64), array([713], dtype=int64), array([900], dtype=int64), array([435], dtype=int64), array([30], dtype=int64), array([32], dtype=int64), array([339], dtype=int64), array([19], dtype=int64), array([488], dtype=int64), array([78], dtype=int64), array([857], dtype=int64), array([391], dtype=int64), array([529], dtype=int64), array([46], dtype=int64), array([319], dtype=int64), array([50], dtype=int64), array([56], dtype=int64), array([792], dtype=int64), array([691], dtype=int64), array([250], dtype=int64), array([35], dtype=int64), array([624], dtype=int64), array([96], dtype=int64), array([653], dtype=int64), array([946], dtype=int64), array([387], dtype=int64), array([561], dtype=int64), array([363], dtype=int64), array([732], dtype=int64), array([343], dtype=int64), array([696], dtype=int64), array([402], dtype=int64), array([486], dtype=int64), array([666], dtype=int64), array([567], dtype=int64), array([939], dtype=int64), array([525], dtype=int64), array([373], dtype=int64), array([754], dtype=int64), array([766], dtype=int64), array([677], dtype=int64), array([416], dtype=int64), array([166], dtype=int64), array([145], dtype=int64), array([917], dtype=int64), array([221], dtype=int64), array([659], dtype=int64), array([269], dtype=int64), array([426], dtype=int64), array([23], dtype=int64), array([690], dtype=int64), array([279], dtype=int64), array([24], dtype=int64), array([368], dtype=int64), array([645], dtype=int64), array([401], dtype=int64), array([639], dtype=int64), array([509], dtype=int64), array([807], dtype=int64), array([860], dtype=int64), array([726], dtype=int64), array([728], dtype=int64), array([753], dtype=int64), array([553], dtype=int64), array([364], dtype=int64), array([55], dtype=int64), array([69], dtype=int64), array([160], dtype=int64), array([65], dtype=int64), array([154], dtype=int64), array([464], dtype=int64), array([959], dtype=int64), array([349], dtype=int64), array([922], dtype=int64), array([124], dtype=int64), array([522], dtype=int64), array([764], dtype=int64), array([315], dtype=int64), array([204], dtype=int64), array([475], dtype=int64), array([54], dtype=int64), array([656], dtype=int64), array([783], dtype=int64), array([143], dtype=int64), array([253], dtype=int64), array([68], dtype=int64), array([137], dtype=int64), array([916], dtype=int64), array([850], dtype=int64), array([28], dtype=int64), array([22], dtype=int64), array([392], dtype=int64), array([613], dtype=int64), array([49], dtype=int64), array([303], dtype=int64), array([495], dtype=int64), array([810], dtype=int64), array([699], dtype=int64), array([298], dtype=int64), array([773], dtype=int64), array([945], dtype=int64), array([979], dtype=int64), array([129], dtype=int64), array([843], dtype=int64), array([580], dtype=int64), array([883], dtype=int64), array([854], dtype=int64), array([966], dtype=int64), array([385], dtype=int64), array([724], dtype=int64), array([483], dtype=int64), array([688], dtype=int64), array([357], dtype=int64), array([482], dtype=int64), array([679], dtype=int64), array([723], dtype=int64), array([454], dtype=int64), array([669], dtype=int64), array([626], dtype=int64), array([76], dtype=int64), array([150], dtype=int64), array([231], dtype=int64), array([299], dtype=int64), array([908], dtype=int64), array([409], dtype=int64), array([354], dtype=int64), array([974], dtype=int64), array([338], dtype=int64), array([44], dtype=int64), array([102], dtype=int64), array([399], dtype=int64), array([458], dtype=int64), array([833], dtype=int64), array([201], dtype=int64), array([236], dtype=int64), array([536], dtype=int64), array([135], dtype=int64), array([470], dtype=int64), array([329], dtype=int64), array([26], dtype=int64), array([474], dtype=int64), array([256], dtype=int64), array([540], dtype=int64), array([473], dtype=int64), array([535], dtype=int64), array([771], dtype=int64), array([578], dtype=int64), array([802], dtype=int64), array([380], dtype=int64), array([342], dtype=int64), array([274], dtype=int64), array([95], dtype=int64), array([312], dtype=int64), array([668], dtype=int64), array([740], dtype=int64), array([493], dtype=int64), array([230], dtype=int64), array([678], dtype=int64), array([989], dtype=int64), array([937], dtype=int64), array([223], dtype=int64), array([481], dtype=int64), array([785], dtype=int64), array([226], dtype=int64), array([717], dtype=int64), array([422], dtype=int64), array([865], dtype=int64), array([443], dtype=int64), array([944], dtype=int64), array([652], dtype=int64), array([466], dtype=int64), array([744], dtype=int64), array([332], dtype=int64), array([800], dtype=int64), array([804], dtype=int64), array([772], dtype=int64), array([928], dtype=int64), array([41], dtype=int64), array([421], dtype=int64), array([593], dtype=int64), array([832], dtype=int64), array([280], dtype=int64), array([687], dtype=int64), array([714], dtype=int64), array([633], dtype=int64), array([73], dtype=int64), array([491], dtype=int64), array([346], dtype=int64), array([167], dtype=int64), array([370], dtype=int64), array([123], dtype=int64), array([465], dtype=int64), array([892], dtype=int64), array([376], dtype=int64), array([852], dtype=int64), array([634], dtype=int64), array([140], dtype=int64), array([406], dtype=int64), array([268], dtype=int64), array([943], dtype=int64), array([988], dtype=int64), array([755], dtype=int64), array([846], dtype=int64), array([71], dtype=int64), array([858], dtype=int64), array([77], dtype=int64), array([622], dtype=int64), array([828], dtype=int64), array([967], dtype=int64), array([791], dtype=int64), array([254], dtype=int64), array([159], dtype=int64), array([420], dtype=int64), array([489], dtype=int64), array([570], dtype=int64), array([316], dtype=int64), array([304], dtype=int64), array([115], dtype=int64), array([205], dtype=int64), array([377], dtype=int64), array([469], dtype=int64), array([827], dtype=int64), array([265], dtype=int64), array([643], dtype=int64), array([811], dtype=int64), array([3], dtype=int64), array([36], dtype=int64), array([70], dtype=int64), array([820], dtype=int64), array([961], dtype=int64), array([384], dtype=int64), array([895], dtype=int64), array([806], dtype=int64), array([133], dtype=int64), array([964], dtype=int64), array([418], dtype=int64), array([583], dtype=int64), array([108], dtype=int64), array([513], dtype=int64), array([557], dtype=int64), array([929], dtype=int64), array([599], dtype=int64), array([130], dtype=int64), array([438], dtype=int64), array([59], dtype=int64), array([738], dtype=int64), array([590], dtype=int64), array([616], dtype=int64), array([340], dtype=int64), array([629], dtype=int64), array([543], dtype=int64), array([793], dtype=int64), array([996], dtype=int64), array([902], dtype=int64), array([212], dtype=int64), array([517], dtype=int64), array([973], dtype=int64), array([985], dtype=int64), array([365], dtype=int64), array([175], dtype=int64), array([841], dtype=int64), array([405], dtype=int64), array([408], dtype=int64), array([234], dtype=int64), array([600], dtype=int64), array([286], dtype=int64), array([674], dtype=int64), array([324], dtype=int64), array([703], dtype=int64), array([276], dtype=int64), array([550], dtype=int64), array([993], dtype=int64), array([532], dtype=int64), array([445], dtype=int64), array([569], dtype=int64), array([868], dtype=int64), array([797], dtype=int64), array([429], dtype=int64), array([587], dtype=int64), array([176], dtype=int64), array([88], dtype=int64), array([581], dtype=int64), array([193], dtype=int64), array([390], dtype=int64), array([374], dtype=int64), array([285], dtype=int64), array([10], dtype=int64), array([994], dtype=int64), array([769], dtype=int64), array([238], dtype=int64), array([352], dtype=int64), array([270], dtype=int64), array([524], dtype=int64), array([434], dtype=int64), array([228], dtype=int64), array([198], dtype=int64), array([190], dtype=int64), array([970], dtype=int64), array([119], dtype=int64), array([300], dtype=int64), array([328], dtype=int64), array([126], dtype=int64), array([520], dtype=int64), array([283], dtype=int64), array([305], dtype=int64), array([698], dtype=int64), array([950], dtype=int64), array([181], dtype=int64), array([566], dtype=int64), array([43], dtype=int64), array([453], dtype=int64), array([958], dtype=int64), array([795], dtype=int64), array([17], dtype=int64), array([904], dtype=int64), array([530], dtype=int64), array([746], dtype=int64), array([79], dtype=int64), array([195], dtype=int64), array([407], dtype=int64), array([995], dtype=int64), array([60], dtype=int64), array([382], dtype=int64), array([971], dtype=int64), array([734], dtype=int64), array([360], dtype=int64), array([822], dtype=int64), array([311], dtype=int64), array([121], dtype=int64), array([940], dtype=int64), array([67], dtype=int64), array([720], dtype=int64), array([398], dtype=int64), array([378], dtype=int64), array([144], dtype=int64), array([516], dtype=int64), array([278], dtype=int64), array([216], dtype=int64), array([661], dtype=int64), array([657], dtype=int64), array([13], dtype=int64), array([125], dtype=int64), array([80], dtype=int64), array([658], dtype=int64), array([617], dtype=int64), array([318], dtype=int64), array([909], dtype=int64), array([603], dtype=int64), array([347], dtype=int64), array([609], dtype=int64), array([614], dtype=int64), array([721], dtype=int64), array([531], dtype=int64), array([855], dtype=int64), array([873], dtype=int64), array([847], dtype=int64), array([188], dtype=int64), array([211], dtype=int64), array([287], dtype=int64), array([812], dtype=int64), array([383], dtype=int64), array([876], dtype=int64), array([548], dtype=int64), array([787], dtype=int64), array([968], dtype=int64), array([925], dtype=int64), array([292], dtype=int64), array([75], dtype=int64), array([844], dtype=int64), array([309], dtype=int64), array([912], dtype=int64), array([12], dtype=int64), array([681], dtype=int64), array([748], dtype=int64), array([351], dtype=int64), array([245], dtype=int64), array([901], dtype=int64), array([248], dtype=int64), array([21], dtype=int64), array([684], dtype=int64), array([207], dtype=int64), array([910], dtype=int64), array([37], dtype=int64), array([97], dtype=int64), array([730], dtype=int64), array([90], dtype=int64), array([765], dtype=int64), array([942], dtype=int64), array([623], dtype=int64), array([864], dtype=int64), array([42], dtype=int64), array([472], dtype=int64), array([976], dtype=int64), array([809], dtype=int64), array([117], dtype=int64), array([440], dtype=int64), array([760], dtype=int64), array([829], dtype=int64), array([640], dtype=int64), array([152], dtype=int64), array([410], dtype=int64), array([433], dtype=int64), array([98], dtype=int64), array([326], dtype=int64), array([538], dtype=int64), array([451], dtype=int64), array([631], dtype=int64), array([229], dtype=int64), array([564], dtype=int64), array([397], dtype=int64), array([701], dtype=int64), array([907], dtype=int64), array([240], dtype=int64), array([952], dtype=int64), array([29], dtype=int64), array([320], dtype=int64), array([463], dtype=int64), array([261], dtype=int64), array([7], dtype=int64), array([588], dtype=int64), array([14], dtype=int64), array([139], dtype=int64), array([749], dtype=int64), array([336], dtype=int64), array([171], dtype=int64), array([58], dtype=int64), array([83], dtype=int64), array([598], dtype=int64), array([9], dtype=int64), array([830], dtype=int64), array([986], dtype=int64), array([859], dtype=int64), array([650], dtype=int64), array([551], dtype=int64), array([556], dtype=int64), array([965], dtype=int64), array([589], dtype=int64), array([427], dtype=int64), array([262], dtype=int64), array([243], dtype=int64), array([675], dtype=int64), array([651], dtype=int64), array([127], dtype=int64), array([147], dtype=int64), array([447], dtype=int64), array([381], dtype=int64), array([110], dtype=int64), array([736], dtype=int64), array([692], dtype=int64), array([510], dtype=int64), array([824], dtype=int64), array([296], dtype=int64), array([686], dtype=int64), array([165], dtype=int64), array([856], dtype=int64), array([591], dtype=int64), array([978], dtype=int64), array([273], dtype=int64), array([782], dtype=int64), array([662], dtype=int64), array([778], dtype=int64), array([395], dtype=int64), array([199], dtype=int64), array([379], dtype=int64), array([457], dtype=int64), array([770], dtype=int64), array([38], dtype=int64), array([356], dtype=int64), array([601], dtype=int64), array([571], dtype=int64), array([317], dtype=int64), array([607], dtype=int64), array([555], dtype=int64), array([718], dtype=int64), array([621], dtype=int64), array([179], dtype=int64), array([308], dtype=int64), array([476], dtype=int64), array([156], dtype=int64), array([705], dtype=int64), array([306], dtype=int64), array([282], dtype=int64), array([499], dtype=int64), array([388], dtype=int64), array([507], dtype=int64), array([227], dtype=int64), array([301], dtype=int64), array([187], dtype=int64), array([972], dtype=int64), array([889], dtype=int64), array([498], dtype=int64), array([898], dtype=int64), array([936], dtype=int64), array([288], dtype=int64), array([527], dtype=int64), array([542], dtype=int64), array([731], dtype=int64), array([813], dtype=int64), array([164], dtype=int64), array([496], dtype=int64), array([487], dtype=int64), array([277], dtype=int64), array([122], dtype=int64), array([586], dtype=int64), array([310], dtype=int64), array([886], dtype=int64), array([161], dtype=int64), array([141], dtype=int64), array([700], dtype=int64), array([138], dtype=int64), array([949], dtype=int64), array([172], dtype=int64), array([348], dtype=int64), array([638], dtype=int64), array([224], dtype=int64), array([663], dtype=int64), array([637], dtype=int64), array([72], dtype=int64), array([798], dtype=int64), array([874], dtype=int64), array([872], dtype=int64), array([281], dtype=int64), array([94], dtype=int64), array([817], dtype=int64), array([0], dtype=int64), array([533], dtype=int64), array([981], dtype=int64), array([222], dtype=int64), array([671], dtype=int64), array([85], dtype=int64), array([5], dtype=int64), array([636], dtype=int64), array([344], dtype=int64), array([990], dtype=int64), array([367], dtype=int64), array([913], dtype=int64), array([947], dtype=int64), array([777], dtype=int64), array([404], dtype=int64), array([51], dtype=int64), array([788], dtype=int64), array([86], dtype=int64), array([208], dtype=int64), array([369], dtype=int64), array([267], dtype=int64), array([992], dtype=int64), array([837], dtype=int64), array([202], dtype=int64), array([412], dtype=int64), array([437], dtype=int64), array([826], dtype=int64), array([25], dtype=int64), array([951], dtype=int64), array([960], dtype=int64), array([213], dtype=int64), array([371], dtype=int64), array([987], dtype=int64), array([673], dtype=int64), array([413], dtype=int64), array([40], dtype=int64), array([490], dtype=int64), array([780], dtype=int64), array([266], dtype=int64), array([355], dtype=int64), array([683], dtype=int64), array([264], dtype=int64), array([116], dtype=int64), array([689], dtype=int64), array([890], dtype=int64), array([74], dtype=int64), array([842], dtype=int64), array([710], dtype=int64), array([805], dtype=int64), array([170], dtype=int64), array([931], dtype=int64), array([275], dtype=int64), array([848], dtype=int64), array([815], dtype=int64), array([169], dtype=int64), array([906], dtype=int64), array([48], dtype=int64), array([544], dtype=int64), array([372], dtype=int64), array([439], dtype=int64), array([452], dtype=int64), array([359], dtype=int64), array([953], dtype=int64), array([168], dtype=int64), array([502], dtype=int64), array([877], dtype=int64), array([456], dtype=int64), array([528], dtype=int64), array([255], dtype=int64), array([479], dtype=int64), array([763], dtype=int64), array([386], dtype=int64), array([508], dtype=int64), array([514], dtype=int64), array([497], dtype=int64), array([460], dtype=int64), array([923], dtype=int64), array([394], dtype=int64), array([350], dtype=int64), array([64], dtype=int64), array([610], dtype=int64), array([131], dtype=int64), array([752], dtype=int64), array([608], dtype=int64), array([114], dtype=int64), array([220], dtype=int64), array([252], dtype=int64), array([977], dtype=int64), array([233], dtype=int64), array([101], dtype=int64), array([562], dtype=int64), array([423], dtype=int64), array([999], dtype=int64), array([779], dtype=int64), array([774], dtype=int64), array([861], dtype=int64), array([197], dtype=int64), array([975], dtype=int64), array([743], dtype=int64), array([515], dtype=int64), array([642], dtype=int64), array([573], dtype=int64), array([322], dtype=int64), array([232], dtype=int64), array([735], dtype=int64), array([707], dtype=int64), array([327], dtype=int64), array([136], dtype=int64), array([546], dtype=int64), array([565], dtype=int64), array([323], dtype=int64), array([6], dtype=int64), array([695], dtype=int64), array([694], dtype=int64), array([358], dtype=int64), array([733], dtype=int64), array([494], dtype=int64), array([400], dtype=int64), array([606], dtype=int64), array([501], dtype=int64), array([109], dtype=int64), array([134], dtype=int64), array([334], dtype=int64), array([155], dtype=int64), array([869], dtype=int64), array([983], dtype=int64), array([518], dtype=int64), array([962], dtype=int64), array([630], dtype=int64), array([575], dtype=int64), array([61], dtype=int64), array([526], dtype=int64), array([823], dtype=int64), array([618], dtype=int64), array([722], dtype=int64), array([558], dtype=int64), array([849], dtype=int64), array([66], dtype=int64), array([632], dtype=int64), array([194], dtype=int64), array([87], dtype=int64), array([241], dtype=int64), array([289], dtype=int64), array([492], dtype=int64), array([62], dtype=int64), array([768], dtype=int64), array([552], dtype=int64), array([930], dtype=int64), array([291], dtype=int64), array([235], dtype=int64), array([708], dtype=int64), array([816], dtype=int64), array([414], dtype=int64), array([585], dtype=int64), array([389], dtype=int64), array([969], dtype=int64), array([998], dtype=int64), array([767], dtype=int64), array([796], dtype=int64), array([209], dtype=int64), array([177], dtype=int64), array([174], dtype=int64), array([361], dtype=int64), array([799], dtype=int64), array([200], dtype=int64), array([244], dtype=int64), array([295], dtype=int64), array([89], dtype=int64), array([615], dtype=int64), array([984], dtype=int64), array([646], dtype=int64), array([441], dtype=int64), array([185], dtype=int64), array([742], dtype=int64), array([512], dtype=int64), array([789], dtype=int64), array([81], dtype=int64), array([459], dtype=int64), array([184], dtype=int64), array([63], dtype=int64), array([180], dtype=int64), array([670], dtype=int64), array([272], dtype=int64), array([838], dtype=int64), array([107], dtype=int64), array([173], dtype=int64), array([468], dtype=int64)]\n" + ] + } + ], + "source": [ + "sorted_gradient_ind = sort_and_keep_indices(loaded, training_size)\n", + "print(sorted_gradient_ind)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "# Trích xuất đặc trưng từ các tập dữ liệu\n", + "# Hàm để trích xuất đặc trưng từ X\n", + "def extract_features(net, X):\n", + " X = torch.tensor(X).float().unsqueeze(1) # Thêm chiều để có kích thước [N, 1, 28, 28]\n", + " X = X.repeat(1, 3, 1, 1).to(device) # Chuyển thành [N, 3, 28, 28]\n", + " with torch.no_grad():\n", + " features = net(X)\n", + " return features.cpu().numpy()\n", + "X_train_features = extract_features(net_test, X_train_imbalanced)\n", + "X_test_features = extract_features(net_test, X_test_imbalanced)\n", + "\n", + "# Lưu các đặc trưng đã trích xuất\n", + "with open('balanced_train_features.pkl', 'wb') as f:\n", + " pickle.dump((X_train_features, y_train_imbalanced), f)\n", + "with open('balanced_test_features.pkl', 'wb') as f:\n", + " pickle.dump((X_test_features, y_test_imbalanced), f)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "X_train_features_loaded shape: (1000, 100)\n", + "y_train_loaded shape: (1000,)\n", + "X_test_features_loaded shape: (4000, 100)\n", + "y_test_loaded shape: (4000,)\n" + ] + } + ], + "source": [ + "import pickle\n", + "\n", + "# Load lại các đặc trưng đã lưu cho tập huấn luyện\n", + "with open('balanced_train_features.pkl', 'rb') as f:\n", + " X_train_features_loaded, y_train_loaded = pickle.load(f)\n", + "\n", + "\n", + "# Load lại các đặc trưng đã lưu cho tập kiểm tra (test)\n", + "with open('balanced_test_features.pkl', 'rb') as f:\n", + " X_test_features_loaded, y_test_loaded = pickle.load(f)\n", + "\n", + "# Kiểm tra kích thước để đảm bảo rằng dữ liệu đã được tải đúng\n", + "print(f\"X_train_features_loaded shape: {X_train_features_loaded.shape}\")\n", + "print(f\"y_train_loaded shape: {y_train_loaded.shape}\")\n", + "print(f\"X_test_features_loaded shape: {X_test_features_loaded.shape}\")\n", + "print(f\"y_test_loaded shape: {y_test_loaded.shape}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [], + "source": [ + "from sklearn.preprocessing import StandardScaler\n", + "\n", + "scaler = StandardScaler()\n", + "X_train_scaled = scaler.fit_transform(X_train_features_loaded)\n", + "X_test_scaled = scaler.transform(X_test_features_loaded)" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{0: 69672180000.0, 1: 4644369000.0, 2: 4443694000.0, 3: 4280734700.0, 4: 4033518000.0, 5: 3967600000.0, 6: 3955789300.0, 7: 3901150200.0, 8: 3845601300.0, 9: 3843897300.0, 10: 3806365200.0, 11: 3767735800.0, 12: 3719465000.0, 13: 3711263200.0, 14: 3693731300.0, 15: 3692124200.0, 16: 3658300000.0, 17: 3633917000.0, 18: 3630165000.0, 19: 3629163000.0, 20: 3603912200.0, 21: 3598826500.0, 22: 3597165600.0, 23: 3594157000.0, 24: 3591550500.0, 25: 3584875000.0, 26: 3576539600.0, 27: 3570951700.0, 28: 3533981700.0, 29: 3525328000.0, 30: 3521223700.0, 31: 3511636000.0, 32: 3502064600.0, 33: 3488222200.0, 34: 3485116000.0, 35: 3481033700.0, 36: 3480057300.0, 37: 3470401500.0, 38: 3468031000.0, 39: 3467080700.0, 40: 3466176000.0, 41: 3464757800.0, 42: 3463348200.0, 43: 3462867500.0, 44: 3462027800.0, 45: 3460058600.0, 46: 3458549800.0, 47: 3458445800.0, 48: 3445255700.0, 49: 3436430300.0, 50: 3435028000.0, 51: 3434736000.0, 52: 3431393800.0, 53: 3416678400.0, 54: 3414521900.0, 55: 3412327400.0, 56: 3412202000.0, 57: 3411553300.0, 58: 3409910800.0, 59: 3409813500.0, 60: 3407102500.0, 61: 3401236000.0, 62: 3398654000.0, 63: 3397297200.0, 64: 3394100200.0, 65: 3391915500.0, 66: 3391676000.0, 67: 3388788700.0, 68: 3388377000.0, 69: 3381067800.0, 70: 3379195400.0, 71: 3374563300.0, 72: 3374380000.0, 73: 3372189200.0, 74: 3369763300.0, 75: 3369477600.0, 76: 3366912500.0, 77: 3364233700.0, 78: 3364120600.0, 79: 3363842000.0, 80: 3362132000.0, 81: 3360567800.0, 82: 3358762000.0, 83: 3356590000.0, 84: 3355611100.0, 85: 3351648300.0, 86: 3349571600.0, 87: 3341227500.0, 88: 3340158000.0, 89: 3338899500.0, 90: 3338201600.0, 91: 3335825400.0, 92: 3331874300.0, 93: 3316967400.0, 94: 3316149200.0, 95: 3306087000.0, 96: 3297213400.0, 97: 3296692700.0, 98: 3295691300.0, 99: 3284020700.0, 100: 3281943600.0, 101: 3281921500.0, 102: 3268506600.0, 103: 3264091600.0, 104: 3252923000.0, 105: 3250834400.0, 106: 3249477600.0, 107: 3236338200.0, 108: 3236222000.0, 109: 3234430000.0, 110: 3218118100.0, 111: 3217965600.0, 112: 3214306800.0, 113: 3209568300.0, 114: 3207731700.0, 115: 3207329800.0, 116: 3205061600.0, 117: 3204250000.0, 118: 3203787300.0, 119: 3197474300.0, 120: 3193111600.0, 121: 3190721500.0, 122: 3182888000.0, 123: 3176090600.0, 124: 3172292000.0, 125: 3167988700.0, 126: 3159559700.0, 127: 3155844000.0, 128: 3151817700.0, 129: 3142306800.0, 130: 3141167000.0, 131: 3139972000.0, 132: 3133651000.0, 133: 3128989700.0, 134: 3125612000.0, 135: 3119820800.0, 136: 3111690800.0, 137: 3108231700.0, 138: 3103576600.0, 139: 3089685000.0, 140: 3075124700.0, 141: 3071200800.0, 142: 3064079400.0, 143: 3019980300.0, 144: 3012814300.0, 145: 3005683200.0, 146: 2997066800.0, 147: 2984214000.0, 148: 2961792000.0, 149: 2946709500.0, 150: 2930614300.0, 151: 2926049300.0, 152: 2917576200.0, 153: 2910360600.0, 154: 2910360600.0, 155: 2896210000.0, 156: 2786166300.0, 157: 2783895000.0, 158: 2750578700.0, 159: 2690558500.0, 160: 2670000000.0, 161: 2634532900.0, 162: 2606588400.0, 163: 2525629400.0, 164: 2412174300.0, 165: 2387837400.0, 166: 2102406800.0, 167: 1645795700.0, 168: 1542991500.0, 169: 583064600.0, 170: 469208960.0, 171: 122052480.0, 172: -209095680.0, 173: -233768960.0, 174: -320520200.0, 175: -476425100.0, 176: -495457660.0, 177: -499381570.0, 178: -504565570.0, 179: -542500400.0, 180: -556486340.0, 181: -558078000.0, 182: -575917900.0, 183: -585160300.0, 184: -585776060.0, 185: -586971140.0, 186: -601053950.0, 187: -601450430.0, 188: -601797100.0, 189: -603555840.0, 190: -604918800.0, 191: -610967100.0, 192: -619881700.0, 193: -620266000.0, 194: -627110100.0, 195: -628076160.0, 196: -628690560.0, 197: -630119500.0, 198: -630923200.0, 199: -633524740.0, 200: -634457340.0, 201: -635191300.0, 202: -637966300.0, 203: -639111100.0, 204: -643399500.0, 205: -645852540.0, 206: -646487700.0, 207: -649227900.0, 208: -653110400.0, 209: -653158460.0, 210: -653762700.0, 211: -657228700.0, 212: -658577100.0, 213: -660409700.0, 214: -662883140.0, 215: -664709700.0, 216: -664891800.0, 217: -665074900.0, 218: -668185400.0, 219: -669029000.0, 220: -670285600.0, 221: -670477000.0, 222: -673278500.0, 223: -673492030.0, 224: -675506200.0, 225: -677619900.0, 226: -677838000.0, 227: -682469300.0, 228: -684810940.0, 229: -684897150.0, 230: -686103100.0, 231: -686981440.0, 232: -687124400.0, 233: -687687300.0, 234: -687792960.0, 235: -690137700.0, 236: -690646700.0, 237: -690686140.0, 238: -691229630.0, 239: -691885250.0, 240: -693929150.0, 241: -694445500.0, 242: -695138600.0, 243: -695152800.0, 244: -695611900.0, 245: -697117700.0, 246: -697275900.0, 247: -698737660.0, 248: -699001100.0, 249: -699635140.0, 250: -700722800.0, 251: -701924500.0, 252: -702299650.0, 253: -703204600.0, 254: -703334140.0, 255: -703431550.0, 256: -703661100.0, 257: -703828540.0, 258: -706960600.0, 259: -707612500.0, 260: -707748300.0, 261: -708009660.0, 262: -709444100.0, 263: -711024000.0, 264: -711639040.0, 265: -711902460.0, 266: -712181200.0, 267: -712206100.0, 268: -713150600.0, 269: -713235900.0, 270: -713285250.0, 271: -713287550.0, 272: -713537150.0, 273: -713548740.0, 274: -713631040.0, 275: -714037600.0, 276: -714173100.0, 277: -714598400.0, 278: -714880830.0, 279: -715314750.0, 280: -715471500.0, 281: -715567940.0, 282: -716079700.0, 283: -716145300.0, 284: -717082430.0, 285: -717837400.0, 286: -718154900.0, 287: -718876860.0, 288: -719037600.0, 289: -719075200.0, 290: -720069000.0, 291: -720094100.0, 292: -720294850.0, 293: -720458100.0, 294: -720562940.0, 295: -721159740.0, 296: -721409700.0, 297: -721960400.0, 298: -722296640.0, 299: -722459460.0, 300: -722933200.0, 301: -723189600.0, 302: -723380000.0, 303: -723411700.0, 304: -724486100.0, 305: -724926200.0, 306: -725092500.0, 307: -725273900.0, 308: -726332800.0, 309: -726425800.0, 310: -726570700.0, 311: -726633600.0, 312: -727270900.0, 313: -727433800.0, 314: -728107300.0, 315: -728130300.0, 316: -728146940.0, 317: -728556900.0, 318: -729053060.0, 319: -729357000.0, 320: -729633500.0, 321: -729811400.0, 322: -730052300.0, 323: -730623300.0, 324: -730630100.0, 325: -730766800.0, 326: -731241300.0, 327: -731844000.0, 328: -731925500.0, 329: -732799900.0, 330: -732840500.0, 331: -733231550.0, 332: -733734700.0, 333: -733799600.0, 334: -733877600.0, 335: -734228030.0, 336: -734285060.0, 337: -734310800.0, 338: -734327100.0, 339: -734482000.0, 340: -734691200.0, 341: -734817300.0, 342: -734922500.0, 343: -735057400.0, 344: -735059300.0, 345: -735085200.0, 346: -735385540.0, 347: -735766340.0, 348: -736004800.0, 349: -736806300.0, 350: -736976260.0, 351: -737164500.0, 352: -737166500.0, 353: -737553340.0, 354: -737659100.0, 355: -737920960.0, 356: -737957950.0, 357: -738564900.0, 358: -738577200.0, 359: -738607800.0, 360: -738638700.0, 361: -738729860.0, 362: -739151600.0, 363: -739285760.0, 364: -739378050.0, 365: -739821700.0, 366: -739922700.0, 367: -740088770.0, 368: -740619840.0, 369: -740720300.0, 370: -740813200.0, 371: -740913800.0, 372: -741267650.0, 373: -741373900.0, 374: -741420200.0, 375: -741501250.0, 376: -741595800.0, 377: -741688450.0, 378: -741808060.0, 379: -741824260.0, 380: -742125300.0, 381: -742500740.0, 382: -742540000.0, 383: -742686800.0, 384: -742697400.0, 385: -742843600.0, 386: -742977540.0, 387: -743001600.0, 388: -743473600.0, 389: -743835100.0, 390: -743918700.0, 391: -744033660.0, 392: -744262800.0, 393: -744309300.0, 394: -744358300.0, 395: -744397700.0, 396: -744430600.0, 397: -744832200.0, 398: -745125760.0, 399: -745269600.0, 400: -745390900.0, 401: -745749950.0, 402: -746015900.0, 403: -746040260.0, 404: -746059650.0, 405: -746152770.0, 406: -746242500.0, 407: -746408900.0, 408: -746544900.0, 409: -746640600.0, 410: -747148500.0, 411: -747240200.0, 412: -747465200.0, 413: -747751800.0, 414: -747905860.0, 415: -747908100.0, 416: -748042300.0, 417: -748097660.0, 418: -748100500.0, 419: -748232700.0, 420: -748316740.0, 421: -748347800.0, 422: -748630400.0, 423: -749119200.0, 424: -749122940.0, 425: -749294700.0, 426: -749472450.0, 427: -749618800.0, 428: -749694140.0, 429: -749749400.0, 430: -749822800.0, 431: -750219600.0, 432: -750280800.0, 433: -750494900.0, 434: -750515840.0, 435: -750650700.0, 436: -750972100.0, 437: -751012800.0, 438: -751022400.0, 439: -751145540.0, 440: -751149000.0, 441: -751548540.0, 442: -751588350.0, 443: -751904700.0, 444: -751961100.0, 445: -752152640.0, 446: -752271940.0, 447: -752401200.0, 448: -752619800.0, 449: -752637100.0, 450: -752769100.0, 451: -752935740.0, 452: -753242000.0, 453: -753256200.0, 454: -753416900.0, 455: -753518460.0, 456: -753781950.0, 457: -753918800.0, 458: -753965400.0, 459: -754132600.0, 460: -754210200.0, 461: -754301400.0, 462: -754673340.0, 463: -754895200.0, 464: -755034700.0, 465: -755491260.0, 466: -755597250.0, 467: -755622340.0, 468: -755671940.0, 469: -755847230.0, 470: -755942400.0, 471: -756357060.0, 472: -756572300.0, 473: -756715900.0, 474: -756763840.0, 475: -756772900.0, 476: -756961540.0, 477: -757094660.0, 478: -757146300.0, 479: -757402300.0, 480: -757673400.0, 481: -757821950.0, 482: -758229760.0, 483: -758267900.0, 484: -758302300.0, 485: -758481800.0, 486: -758603650.0, 487: -758607600.0, 488: -758645900.0, 489: -758682100.0, 490: -758744600.0, 491: -758789200.0, 492: -759030850.0, 493: -759161150.0, 494: -759255700.0, 495: -759677630.0, 496: -759684540.0, 497: -759799600.0, 498: -759826500.0, 499: -759996700.0, 500: -760174400.0, 501: -760193700.0, 502: -760213950.0, 503: -760491100.0, 504: -760519940.0, 505: -760593340.0, 506: -760655000.0, 507: -761012600.0, 508: -761041900.0, 509: -761182800.0, 510: -761389630.0, 511: -761498300.0, 512: -761517100.0, 513: -761550600.0, 514: -761628860.0, 515: -761662400.0, 516: -761706240.0, 517: -761768800.0, 518: -761866940.0, 519: -761910400.0, 520: -761962500.0, 521: -762025300.0, 522: -762056450.0, 523: -762106940.0, 524: -762108860.0, 525: -762163700.0, 526: -762170100.0, 527: -762177800.0, 528: -762313500.0, 529: -762349300.0, 530: -762353400.0, 531: -762641340.0, 532: -762756900.0, 533: -762900900.0, 534: -763062300.0, 535: -763160640.0, 536: -763349760.0, 537: -763386940.0, 538: -763449340.0, 539: -763495300.0, 540: -763498500.0, 541: -763499140.0, 542: -763613300.0, 543: -763615400.0, 544: -763796350.0, 545: -763801660.0, 546: -763881700.0, 547: -763954940.0, 548: -763983740.0, 549: -764010900.0, 550: -764064200.0, 551: -764085000.0, 552: -764117500.0, 553: -764225400.0, 554: -764262600.0, 555: -764325300.0, 556: -764537900.0, 557: -764835400.0, 558: -764887800.0, 559: -764917000.0, 560: -765128300.0, 561: -765241700.0, 562: -765369100.0, 563: -765403260.0, 564: -765492860.0, 565: -765564200.0, 566: -765715100.0, 567: -765840700.0, 568: -766007700.0, 569: -766082750.0, 570: -766098900.0, 571: -766162800.0, 572: -766172000.0, 573: -766226100.0, 574: -766299100.0, 575: -766360770.0, 576: -766432640.0, 577: -766507200.0, 578: -766573950.0, 579: -766673400.0, 580: -766725900.0, 581: -766892200.0, 582: -766967230.0, 583: -767343000.0, 584: -767388100.0, 585: -767506370.0, 586: -767605700.0, 587: -767640260.0, 588: -767646900.0, 589: -767700300.0, 590: -767744400.0, 591: -767750340.0, 592: -767758800.0, 593: -767762900.0, 594: -767781400.0, 595: -767915000.0, 596: -768052030.0, 597: -768052400.0, 598: -768129600.0, 599: -768136770.0, 600: -768346750.0, 601: -768479940.0, 602: -768481000.0, 603: -768489100.0, 604: -768504770.0, 605: -768609150.0, 606: -768681300.0, 607: -769145400.0, 608: -769202400.0, 609: -769569600.0, 610: -769574600.0, 611: -769589250.0, 612: -769629950.0, 613: -769708740.0, 614: -769783360.0, 615: -769798100.0, 616: -769810000.0, 617: -769811900.0, 618: -769878500.0, 619: -769908900.0, 620: -769951400.0, 621: -769997300.0, 622: -770021250.0, 623: -770043900.0, 624: -770163140.0, 625: -770173900.0, 626: -770231600.0, 627: -770649100.0, 628: -770734340.0, 629: -771062900.0, 630: -771198700.0, 631: -771222600.0, 632: -771386900.0, 633: -771394700.0, 634: -771426100.0, 635: -771522560.0, 636: -771535550.0, 637: -771731000.0, 638: -771817200.0, 639: -771986900.0, 640: -772119500.0, 641: -772138050.0, 642: -772379700.0, 643: -772524860.0, 644: -772553860.0, 645: -772710500.0, 646: -772730050.0, 647: -772730700.0, 648: -772784060.0, 649: -772904770.0, 650: -773058940.0, 651: -773084160.0, 652: -773233000.0, 653: -773319300.0, 654: -773379000.0, 655: -773415550.0, 656: -773516350.0, 657: -773533100.0, 658: -773577340.0, 659: -773588600.0, 660: -773709600.0, 661: -773801300.0, 662: -773858200.0, 663: -773906370.0, 664: -773969540.0, 665: -774062800.0, 666: -774158000.0, 667: -774206140.0, 668: -774266800.0, 669: -774420700.0, 670: -774430140.0, 671: -774631940.0, 672: -774776770.0, 673: -774824300.0, 674: -775014500.0, 675: -775057340.0, 676: -775096960.0, 677: -775203800.0, 678: -775238400.0, 679: -775275900.0, 680: -775314750.0, 681: -775320000.0, 682: -775347460.0, 683: -775446340.0, 684: -775504000.0, 685: -775648640.0, 686: -775882900.0, 687: -775917300.0, 688: -775923100.0, 689: -776163700.0, 690: -776252400.0, 691: -776355300.0, 692: -776606140.0, 693: -776630400.0, 694: -776703800.0, 695: -776754050.0, 696: -776754200.0, 697: -776850370.0, 698: -776853950.0, 699: -776892860.0, 700: -776964540.0, 701: -777187650.0, 702: -777224900.0, 703: -777462140.0, 704: -777519040.0, 705: -777628600.0, 706: -777631940.0, 707: -777636860.0, 708: -777770560.0, 709: -777780000.0, 710: -777791000.0, 711: -777911900.0, 712: -777931140.0, 713: -778032600.0, 714: -778126140.0, 715: -778363970.0, 716: -778464500.0, 717: -778479740.0, 718: -778593340.0, 719: -778656800.0, 720: -778931600.0, 721: -778959800.0, 722: -779100600.0, 723: -779106940.0, 724: -779215360.0, 725: -779253060.0, 726: -779357950.0, 727: -779382700.0, 728: -779561660.0, 729: -779700200.0, 730: -779867400.0, 731: -779869440.0, 732: -779973000.0, 733: -780102660.0, 734: -780222340.0, 735: -780250900.0, 736: -780362750.0, 737: -780471700.0, 738: -780581570.0, 739: -780654200.0, 740: -781009660.0, 741: -781095230.0, 742: -781162940.0, 743: -781171400.0, 744: -781173300.0, 745: -781433800.0, 746: -781618300.0, 747: -781641200.0, 748: -781748350.0, 749: -781795600.0, 750: -781894900.0, 751: -781967940.0, 752: -782379970.0, 753: -782395300.0, 754: -782444700.0, 755: -782463400.0, 756: -782524300.0, 757: -782915460.0, 758: -782927900.0, 759: -782976700.0, 760: -782984260.0, 761: -783087000.0, 762: -783176300.0, 763: -783299460.0, 764: -783462500.0, 765: -783491140.0, 766: -783499460.0, 767: -783505500.0, 768: -783735000.0, 769: -783814460.0, 770: -783872960.0, 771: -784054900.0, 772: -784072450.0, 773: -784249800.0, 774: -784294700.0, 775: -784332400.0, 776: -784388540.0, 777: -784393660.0, 778: -784414500.0, 779: -784661200.0, 780: -784689700.0, 781: -784737400.0, 782: -784814400.0, 783: -784833800.0, 784: -784993000.0, 785: -785079550.0, 786: -785089100.0, 787: -785152400.0, 788: -785249300.0, 789: -785331300.0, 790: -785344400.0, 791: -785371500.0, 792: -785499400.0, 793: -785525800.0, 794: -785707840.0, 795: -785780000.0, 796: -785873900.0, 797: -785896960.0, 798: -785978500.0, 799: -786001540.0, 800: -786010750.0, 801: -786014850.0, 802: -786069570.0, 803: -786157700.0, 804: -786283400.0, 805: -786499460.0, 806: -786805440.0, 807: -786814400.0, 808: -786941760.0, 809: -786964030.0, 810: -786970050.0, 811: -787188160.0, 812: -787335600.0, 813: -787359700.0, 814: -787394050.0, 815: -787538240.0, 816: -787657600.0, 817: -787678340.0, 818: -787790200.0, 819: -787799100.0, 820: -787813500.0, 821: -787836100.0, 822: -787870500.0, 823: -787927040.0, 824: -788004300.0, 825: -788348860.0, 826: -788517570.0, 827: -788612030.0, 828: -788722900.0, 829: -788815100.0, 830: -789222660.0, 831: -789345000.0, 832: -789348100.0, 833: -789378240.0, 834: -789570560.0, 835: -789734660.0, 836: -789771200.0, 837: -789831300.0, 838: -789848200.0, 839: -789858240.0, 840: -789919040.0, 841: -789927230.0, 842: -789971100.0, 843: -790154430.0, 844: -790161500.0, 845: -790225660.0, 846: -790514940.0, 847: -790608640.0, 848: -790641300.0, 849: -790740100.0, 850: -790788000.0, 851: -790799800.0, 852: -790911800.0, 853: -791002400.0, 854: -791014600.0, 855: -791026500.0, 856: -791032100.0, 857: -791072450.0, 858: -791396100.0, 859: -791578050.0, 860: -791730940.0, 861: -791734800.0, 862: -791885250.0, 863: -791964740.0, 864: -792030600.0, 865: -792068300.0, 866: -792085950.0, 867: -792304500.0, 868: -792325440.0, 869: -792601600.0, 870: -793074240.0, 871: -793091260.0, 872: -793098940.0, 873: -793260860.0, 874: -793348030.0, 875: -793435650.0, 876: -793743800.0, 877: -793856260.0, 878: -793895230.0, 879: -794086300.0, 880: -794095500.0, 881: -794123300.0, 882: -794204900.0, 883: -794228800.0, 884: -794257100.0, 885: -794297700.0, 886: -794555260.0, 887: -794573600.0, 888: -794713860.0, 889: -794746750.0, 890: -794760900.0, 891: -794808700.0, 892: -794836700.0, 893: -795112960.0, 894: -795135400.0, 895: -795336260.0, 896: -795406400.0, 897: -795490560.0, 898: -795511500.0, 899: -795642800.0, 900: -795814850.0, 901: -796140540.0, 902: -796312260.0, 903: -796392200.0, 904: -796462300.0, 905: -796493060.0, 906: -796887550.0, 907: -796890240.0, 908: -797484740.0, 909: -797490800.0, 910: -797578050.0, 911: -797642900.0, 912: -797891840.0, 913: -797958700.0, 914: -798237250.0, 915: -798318600.0, 916: -798551200.0, 917: -798591170.0, 918: -798654850.0, 919: -798844740.0, 920: -798862000.0, 921: -798876740.0, 922: -798946560.0, 923: -799059800.0, 924: -799351500.0, 925: -799408600.0, 926: -799480000.0, 927: -799583740.0, 928: -799627600.0, 929: -799707500.0, 930: -799779260.0, 931: -799857700.0, 932: -800496500.0, 933: -800497540.0, 934: -800607100.0, 935: -800644700.0, 936: -800678340.0, 937: -800881540.0, 938: -801296830.0, 939: -801660600.0, 940: -801682240.0, 941: -801789400.0, 942: -801919040.0, 943: -802372500.0, 944: -802445100.0, 945: -802648060.0, 946: -802726600.0, 947: -803140350.0, 948: -803242800.0, 949: -803699650.0, 950: -803942000.0, 951: -804271800.0, 952: -804511800.0, 953: -804739260.0, 954: -804929100.0, 955: -804950700.0, 956: -805308900.0, 957: -805926600.0, 958: -806118400.0, 959: -806232200.0, 960: -806311100.0, 961: -806371400.0, 962: -806536500.0, 963: -807194430.0, 964: -807429440.0, 965: -808080450.0, 966: -808477500.0, 967: -808745540.0, 968: -808841150.0, 969: -808960450.0, 970: -808966100.0, 971: -809098800.0, 972: -809184400.0, 973: -809264960.0, 974: -809626200.0, 975: -809692700.0, 976: -810046100.0, 977: -810055200.0, 978: -810288450.0, 979: -811053760.0, 980: -811085440.0, 981: -811167400.0, 982: -811297300.0, 983: -811641800.0, 984: -811814100.0, 985: -812505200.0, 986: -813714370.0, 987: -814375900.0, 988: -814723900.0, 989: -814797800.0, 990: -815068160.0, 991: -816097000.0, 992: -816397100.0, 993: -816558000.0, 994: -818856640.0, 995: -819550340.0, 996: -821355840.0, 997: -822337660.0, 998: -827333600.0, 999: -830580030.0}\n" + ] + } + ], + "source": [ + "dic_loaded = {}\n", + "for i, l in enumerate(loaded):\n", + " dic_loaded[i] = l\n", + "print(dic_loaded)" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The weighted accuracy drop is 0.024\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAyjUlEQVR4nO3de3zU1Z3/8fdcMjO5TkhCbiSEoCJIBDWIglLrLRVRt5etVK1old1Sr0hrW2R/1bK2cbstS90K1nrbrlZZq+1ql1qjtQiCojFYFBQUJIEkhASYhFxmkpnz+yNkZEgCucF3wryej8c8Ct8535kzJ6nz5pzP93xtxhgjAAAAi9it7gAAAIhthBEAAGApwggAALAUYQQAAFiKMAIAACxFGAEAAJYijAAAAEsRRgAAgKWcVnegL0KhkKqrq5WcnCybzWZ1dwAAQB8YY9TU1KTc3FzZ7b3PfwyLMFJdXa38/HyruwEAAAagqqpKeXl5vT4/LMJIcnKypM4Pk5KSYnFvAABAXzQ2Nio/Pz/8Pd6bYRFGupZmUlJSCCMAAAwzRyuxoIAVAABYijACAAAsRRgBAACWIowAAABLEUYAAIClCCMAAMBShBEAAGApwggAALAUYQQAAFiKMAIAACxFGAEAAJYijAAAAEsNixvlHQ8tgQ5V72+VzWaTTdL2+mat/2yvUuNd+lrxKGUmewb82m3tQYWMkSTZZFO8y6FQyMhmO/LNg3yt7UpwORTnOHJmNMaorT2keJdjwH0EAMAqMR1Gvvfc+/q/v9dIkto6gjqYF7r5t5c/kifOrgSXU/FxnV/4nUGi83mbbOE/72nyq7U9qBRPnBx2m1oDQbW2ByNez2aTjJGS3E6lJsTJZpPsNpua/UE1NPvlctiV6HZqb3NA8XEOTRkzQk575xtU729T5d4WZXs9SomP077mgKr3typojM4/OUNup0PN/o6I9xuRGKd9ze1KS3IpK9mjwpGJ2tPYppCRTs5MktNh0/6Wdm3b06zPGprlOhh+Gpr93fqe4olTjjdeUwtHyO10aE+TX/tbA8pNjQ8Hp1Gp8Tpz9IgB/1wOFQwZOQ5+9v0tAXnj445690cAwPAS02Ek0BGK+LLt+jINGSO7zabzTk7Xp3XN+nh3k9raQ2prD/T5tX2t7b0+1xV6Dvg7dOCw4CBJ/o6Q/B2d79XaHtTqrfXd2myvb+52rKd2x8rz7+084vNfOXOUzshPld1u0+i0BH3hlIyIENEeDOnj2ibtb2nXmIwE5Xjjw6FD6vzZfP/37+v/NtaoPWiU6HKoORBUgsuhEQkuBYIhuRx2Tc736qzRI5Tkdmr1J/X6tO6AQsbIYbcrzmFTRpJbF0/IVOiwoGmTVJCeIKfdrswUt04amTSUwwMA6AebMb3NB0SPxsZGeb1e+Xw+paSkDNnrNhzwqyXQGUYcdpuyUzyy2yP/1W2MUUNzQK2BoFoCQbW1B2UOHu/8X0kyB9tKiW6n0pNc2tPkl9NulyfOrtR4l+Kcna9b3xTQR7WNKsxIlL8jpI6QUcgYGWNks9k0KjVetb422W02jRoRr/XbG9TU9nlgiXPYdUpWkvY0+dUe7DwvPcktX2tAH+xqlN0mjU5PVNen6AiFtKfJr8xkj6p9rfqsvln7WtqV4olTMBRSbWNb+POPzUhS3ojOGQ5/R0iJbqeS3E4lezozq5FU62vTxl0+fbL7gPa1BOR02FW9v1WT81NljNGaT+p7nGGKc9g0Jj0xHDj2NPnV0Px5uLPZpC+OG6ktuw+orT2oQDAU8bmPtawUty48NVO5qfFqbG2Xr7VdTodN10wdrfQkt9ISXLLZpNZAUK9sqtVlRTnyxscN+P0+qWvSf6/boVOzU3TtOaOH8JMAx0egIxRefrbbbIpz2LR6a7227Tkgf0dIcQ67rpiUoyfXfqZVW/Zoy+4meZwO/eLqySqZmG1x709c2+ubdf1jb+ufvzBWc6aNsbo7ff7+jukwgqHX7O/Q42u2q9rXKl9ru2p9bXqvcn+PbT1xdrW1h3p9LU+cXfddOVGZKW7t2teqc8amqz0Y0o6GFvk7gkp0OfXG1j3aUntA8S6HMpPduuS0LCW7neoIGbW2B/X6R3Xaua9VSW6nDl3dCXSEtK2+WdX7W+Xv6L0PvclKcevROWdr1Ij4Pp/ja23X3ma/Kir3q/TPHyl4cLqmc1krVQXpCTKms2+7m/yaWpimWafnaNe+VpVt3q34OIeSPU6Ny0pWWmKcHnr9U31U26QJ2cmanJ+qrBSPvPFxOndsGktZOKb+9U+b9OTaz8K/wzZb52zj4TOQPclPi9dvbzpHY9IT+D2V9Fl9s55/b6e+Xpyv0ekJg369a3/zltZ+2tD52g/MGvTrDRZhBFFjT5Nfe5r82rmvRXFOu2p9beoIhnRZUY4k6bXNu7WvpV1Nbe0qSE/QGfkjZLNJWckeeRMGPvvQF+3BkB55Y5vagyF1BI3qmtrkdjrkjY/Tr17/5Ji+97HictiV7HHqpMwkfb04T8kep6r3t8nfEVJBeoJmFmXzJWCBQEdIb35aL298nFLj4xTnsMvltGtEgksOu011TW2Kc9iVnuhS/YGAGpr9envbXpVMzFKON17twZCq97cqNzX+qEXtHcHOgL23JaAEV+cMZ6AjpKWvbtGnew4oGOqcNa0/4Fd7h1Gi26FJealKS3RpRKJLJ2Uk6pyx6drd2Kb/3VCtZn+HPqpt1EmZSbro1EzNfuStHt83zmHT1MI0pSW69beP6tR0cBn6hmkFmn32aF3+4Opw2/NOTleON15TCkboG1Njc3awxteqaaV/lSTZbdKvr5+iS0/LUmsgqC27mzQ5P7Xfr/nFf39dnzW0SCKMDDnCCKzwcW2T1m9v0Ol5qTo5s7OmpMXfoWV/+1QVlfu0cZevT/8S7OKw25Sb6pHd1rkkeO+VE/Xi+9Xyxsdp6+4mtYeMbJKyvR7919rPwjM2TrtNRaO8qtrbErG0VZCeoHML07Xmk3rt2t8qh90W/pfq0RSNStHZY9J0Rn6qQsbo/SqfPq5t0vb6ZqUmxCklPk4uR2fdjTc+TpkpHm3bc0CT81I1Oj1BrYGg9rYEVNfo1+i0BJ2e59X47GQFOkJqCXQWcA9lkGwPhmS32SLqio7E3xGU027vc/tjaU+TXynxTt305Dt685OGbs93FbQfSd6IeNU1+RXoCMlmk1Lj4/Tv/zhZl5yWFW7zP+9W6ZE3tqmxtV17DviP+pqDNbUwTY/feLYkqa6xTXVNfk3MTVGyp/Pn3h7sXCJOdDnDvwu3P1Ohl96v7vZaT9x4ti4cnylJeu7dKiV7nLpwfKbczhP3CsFAR0hX/WqNPqptijg+45SMcP1fdopHd15yiq45Slj773Wfaef+Vs2/eJym/uTVcAgkjAwxwgiiUag/SeSgw2uS+vr6drtNXf9X/bC6UTabNC4rWXEOu4wx2rW/VZnJHjW1tav+QEC79rfotc112rq7s6A3JT5ONkmvfVTX7z4P1Mhkt5LdTp2e55XDZlN7yCgYCumM/FS1B43WflqvA20dam0PqmiUV3ua/PqwulH+9qDsNlu4uNxmkzoOjkWc3R6++izZ03k1mstpV/X+NmUmu+WOc6i+ya9d+1vD5zrtNnmcDjkcts4asYPDOiEnWckHr3rrepySmaQvnpqpprZ2tQc767k6QkahUOcYb6pu1ClZSUpLdGlUaryM6VyjDxmj5kBQ/vag/B0h1TW1KSvFo+31zfrbx3sixmVEQpw6QkbtwZDag6bPAbInSe7Pr0HoqRj+cC6HXZPyvNrbHFB+WoKumTpazf4OVVTtk9NuV7O/Qzv2tuj9qv3hMHxKZpLOHZuuDVX7tftg6Ej2OLV09hm6eELWUd4xUsMBv/78Qa2eemtHty9hT5xdaQkuVfvawsdGpyXo8RvPVq2vTeeOTZPz4IxQV43dcPXkm9t130ubwn+/5/LxWvrq1nAN4+Feuu18nZ7n7fG5t7Y16Bu9zFQRRoYYYQQYGq9t3q3q/a1qbOvQ7sY2vV+1Xx0ho1Ozk3VaTopSDhblJrqcag+GtKFqv/7092qNSo3X+zt94dmdU7OS5Y5zaERCnDbXNIVfB0d2x8WnaMGl48J/N8bo0z0H5IlzqKmtQ9989G01NAf006+crqun5Gn9Z3u1emu96pv8uuac0TotJ0Wbaxp1zW/e6lZvZbNJ/zxjrK46I1dJbqfcToeMjOb+17ualJeqW754kkYmu+WJO/psQ1t7UL7WdtU1+jUuOyk8Q2GMkb8jJLfTPqgwYIxRSyCo9yr36frH1vf5vIvHZyrL69Fz71bptFyvFl81cUBLGVabVvqaag6GrnsuH69//sJJ+n35Tr3+UZ1GpyfI43Rob7Nf/7VuhyTpx1dN1A3Tx3R7nQf+/JEeXvVpj+/hdtr18f0zj9ln6CvCCIDjpu3gLIbL0TlzsXGXT83+oD5raNbe5oDiHDY57XbV+Fq1emu9bDab3E67zh2bLptN4eLcsSOTNDotQcFQ59VcNtlkZOS0d/6LOBAMyRijjqDR7sY2tQeN9rcG5Dx4KbcxksNh04TsFLmddnWEjDpCITX7gwqGOmsj2oOdV30lxDnCS1vBkNHO/a16+q0d8neENHZkouIcdjlsNtntCi/55KZ61BoIal9Lu/a1dC6ZJXucGpnkVrInTp64zjoQu82mQDCkxtYOtQdD+sbZ+UpPcvepaLOprT281NGblkCH6hr9EccS3U6NTHYP9EdomZ37WvQ/71Spomq/ZhblaGphmj6pO6A3P6nXf7+144jnup12LbvurB5naNrag3p7+16dlpOi9ERXv2Ylj6UD/g4V3fsXSdJ3vniS7i45tde+LSnbogdf26qvF+fp378+OeK5isp9+sqytd3OmVIwQu/u2KcUj1N/v+9LQ/8B+qmv398xvc8IgKFx+L+2J+WlSpKmnZR+zN5zTEbigM8t7OXc2y86WcZILqd1d8o4WhCRpASXU2MyToz/fOeNSNCCklMjjp2cmaTLirL1g5njtWV3k8o/26dqX+cymSSdMTpV72zfq/cq92vhCxv19j2ZenVzncp37NOOhmZtqmlUY2u79rV07vfksHfWPi25erJ2N7bptByvikal6M1PGnTfSx/K5bArPy1eb2yp16Q8r56ae85Ri4QHompvi/7hoTclSRlJLv3gsvFHbD86rfPqmp37Oq9ONMaoqa1DI5Pd4b2eUjxO/e3uC9Xs79D67Xs1tTBNM372+hGvVIxGJ8ZvMwAMgWPxBYSBS3I7ddboETqrhx2dm/0dmnjvX1TX5Nc/PPSm/r7T1+vrBENGe5sDuvGJd8LH3E57xGX9m2o6g87b2/fqqbd26FvnFQ7hJ+n00OufaO/BIvQrJ+cetX3XTNe6bQ2a/ONXemzzy2+cqbREl9ISXcpPS9D+gzN2gWBIHcFQuM4m2g2PXgIAcIhEt1MXHbwCpyuIZKW4dcO0An1h3Ej904xCvbPoEpX/yyWaNSmn2/mHBpHMw5a3fvzSpvDS41D6dM8BSdL9Xy7SvVdOPGr7kUlHXnY7e8wIXTBuZMSxQ2cpD7+dhzFG/7thl9Zv39vXLh83zIwAAIalW754kv56yBVif/vehT3eMLT0q6crLzVeXyvOkyS99H619re0KyXeqaJcry4rylZdk19vb9+rO56pkCS9smm3rjg9Z0hrTXbu67zKa2Ju32ofM1M+DyOjUuP106+ervNPzlD1/tZD7m0W2b/O4uLOy8Xf2FKvM0enakSCS/Euh158v1p3PrtBcQ6btv7k8iH7XEOBMAIAGJamjEnTNVNH640te3TfVRN7vXN5iidOCy+fEP77dw+rUZGkrBSPrpqcq/95p0prPqnXHc9U6IGVm/XHW89TZsqR79pujNFP/m+znA67vv+lUxUy5uAW+Ac0Mtmti8dn6pevbQ1fQZOf1redVtMSXOE/l0zMCs+CHOl8m63zUvbW9qBu/d17kjrrU15dcIH+WLFLksK3Eommy6MJIwCAYav0q6cP6etdMSlHaz7p3HSs2temNZ/U66tn5YWf/6y+WUtf3SJvfJxGJrs1akS8Tsvx6tE12yVJ47KS1Ozv0P/73w/D5xSkJ2jHwV1RJ+V5lZ7oUl/Y7TY980/nav32vbru3L7vUhsIRhav1h8I6Kx/LYu4fUVbe6jX8GYFwggAAAd9Y+ponZKVrF++tlVvbNmjl96v1s59rbpiUo7GjkzSd55+T5sPFrv25Gcvf6ymtsi7tncFkUsmZGr5N4v7NSMx7aT0fl+V1tOrh4xUtbc1/PcD/o6oCiMUsAIAcIjighG67OCdhV//eI+WlG3RRb9YpQ+rfeEgMiEnRVkp3QtMaxvb1HxwJ9Wff31yxA0677l8wnG5YqsvdS7zV1Qc8370BzMjAAAc5orJOdpQtU/b65v1zmf7JEmzHlwjqTOI/PnOGZKk1Vv3ROwi23WbgtNHefUPZ+QqK8Wt//fHD/SVM/M0dmTScem7025T4ChterpPkpUIIwAAHCbFE6ef/WPnrqeHbrtus0nfODs/3G5KQVr4zy/cMr3bnigzThmpv9194XHo8ecWXDpO9//f5uP6noNFGAEA4Ai+/6VTdeP0MfK1tqsgPSFiL494l0PLrztLjW3tPW7OZoVvnVeo9dv36pVNu63uSp9RMwIAwBHY7TZlez06NTu5xxsNzjw9R7PP7vvVLseaw27TlDHdg9G0scfu9gyDRRgBAOAE03VzyUM988/n6h+L83pobb0BhZFly5apsLBQHo9HxcXFWr169RHbP/3005o8ebISEhKUk5Ojb33rW2poiK7iGQAAThROR89X1NxzyOZvwZA5Xt05qn6HkRUrVmj+/PlatGiRKioqNGPGDM2cOVOVlZU9tl+zZo3mzJmjm2++WR9++KGee+45vfPOO5o7d+6gOw8AALpzHHZ576lZyZIkT9znX/uBjui5s2+/w8iSJUt08803a+7cuZowYYKWLl2q/Px8LV++vMf2b731lsaMGaM77rhDhYWFOv/88/Xtb39b77777qA7DwAAunMeFkb+Y/YZkiTXIfucHIubAQ5Uv8JIIBBQeXm5SkpKIo6XlJRo7dq1PZ4zffp07dy5UytXrpQxRrt379bvf/97zZo1q9f38fv9amxsjHgAAIC+ObRm5MJTR+q0gzfnczrs4aDiH64zI/X19QoGg8rKyoo4npWVpdra2h7PmT59up5++mnNnj1bLpdL2dnZSk1N1X/+53/2+j6lpaXyer3hR35+fq9tAQBApENrRpI8cRHPdV0R5O8YpjMjXQ7fV/9Id//btGmT7rjjDv3oRz9SeXm5Xn75ZW3fvl3z5s3r9fUXLlwon88XflRVVQ2kmwAAxKRDa0biDluycTs7v/qff2/Xce3TkfRr07OMjAw5HI5usyB1dXXdZku6lJaW6rzzztPdd98tSZo0aZISExM1Y8YM3X///crJyel2jtvtltvdfc9/AABwdIcu0xxezNoVRh58basumZCpSXmpx7NrPerXzIjL5VJxcbHKysoijpeVlWn69Ok9ntPS0iL7Ydc7OxydU0TGRM9lRQAAnCgOLWB1HnZzPvchG7d13VHYav1eplmwYIEeffRRPf7449q8ebPuuusuVVZWhpddFi5cqDlz5oTbX3nllXrhhRe0fPlybdu2TW+++abuuOMOTZ06Vbm5uUP3SQAAgCTJcUjNyOFX1hy6v0hPO8paod/3ppk9e7YaGhq0ePFi1dTUqKioSCtXrlRBQYEkqaamJmLPkRtvvFFNTU361a9+pe9+97tKTU3VRRddpH/7t38buk8BAADCDg0ghy/ThKJwVWJAN8q75ZZbdMstt/T43JNPPtnt2O23367bb799IG8FAAD66dCakbjDdmNNcn/+1R8te41wbxoAAE4wh17a6zisbjPxkDDSShgBAADHQsSlvYfNjBx6R19mRgAAwDERd4RLe+dfPC7859YAYQQAABwDkTMjkV/18S6HvnnuaEks0wAAgGMksmak+w7p8Qcv6SWMAACAYyJi07MjhJE2lmkAAMCxcOilvS5n9696j6szjLQQRgAAwLEQ5/x8NmRqYVq35zMSO+//9lz5Tv3qr1uPW796QxgBAOAEk53i0Y3Tx2j+JadofHZKt+fH5ySH//zzV7Ycz671aEA7sAIAgOhls9l031UTe31+XFZyxN+NMbLZuteWHC/MjAAAEGMOv0HeoTfPswJhBACAGHTrhSeF/2xxFiGMAAAQi759waFhhJkRAABwnB26/wjLNAAA4LizH1KwyswIAAA47iLCSMjCjogwAgBATDr0njVBZkYAAMDxdugta1imAQAAx53NZlPXSk2IAlYAAGAFx8E0wjINAACwhP3gWg2bngEAAEvYWaYBAABW6lqmoYAVAABYomuZhh1YAQCAJezMjAAAACs5KGAFAABW6poZYZkGAABYoutqGsIIAACwRNcyjcUlI4QRAABilZ0dWAEAgJXsB1MAyzQAAMASXZueGWZGAACAFdj0DAAAWOrzTc8s7oe1bw8AAKzCvWkAAIClWKYBAACW6tr0jJkRAABgic/vTUMYAQAAFvj83jQW98PatwcAAFZhmQYAAFgqvExDASsAALAC96YBAACWYtMzAABgKZZpAACApexc2gsAAKzUdTUNO7ACAABLcG8aAABgqc+XaSzuh7VvDwAArMIyDQAAsBT3pgEAAJYK7zPCzAgAALDC5zuwWtwPa98eAABYpatmxLBMAwAAYhlhBAAAWIowAgBAjLIdrBmxGmEEAIAYZ3HJCGEEAIBYFR3zIoQRAABinhFX0wAAACtEydQIYQQAgBhHzQgAALCELUqmRggjAADEOIsnRggjAADEqijZZoQwAgBArKNmBAAAWCJKJkYGFkaWLVumwsJCeTweFRcXa/Xq1Uds7/f7tWjRIhUUFMjtduukk07S448/PqAOAwCAE4uzvyesWLFC8+fP17Jly3Teeefp17/+tWbOnKlNmzZp9OjRPZ5z9dVXa/fu3Xrsscd08sknq66uTh0dHYPuPAAAGDyrNz3rdxhZsmSJbr75Zs2dO1eStHTpUv3lL3/R8uXLVVpa2q39yy+/rFWrVmnbtm1KS0uTJI0ZM2ZwvQYAAIM2LAtYA4GAysvLVVJSEnG8pKREa9eu7fGcF198UVOmTNHPfvYzjRo1SuPGjdP3vvc9tba29vo+fr9fjY2NEQ8AAHBsWF3A2q+Zkfr6egWDQWVlZUUcz8rKUm1tbY/nbNu2TWvWrJHH49Ef/vAH1dfX65ZbbtHevXt7rRspLS3Vj3/84/50DQAA9NOw3vTMdti8jjGm27EuoVBINptNTz/9tKZOnarLL79cS5Ys0ZNPPtnr7MjChQvl8/nCj6qqqoF0EwAADAP9mhnJyMiQw+HoNgtSV1fXbbakS05OjkaNGiWv1xs+NmHCBBljtHPnTp1yyindznG73XK73f3pGgAA6KdhWTPicrlUXFyssrKyiONlZWWaPn16j+ecd955qq6u1oEDB8LHtmzZIrvdrry8vAF0GQAADCVjcdFIv5dpFixYoEcffVSPP/64Nm/erLvuukuVlZWaN2+epM4lljlz5oTbX3vttUpPT9e3vvUtbdq0SW+88Ybuvvtu3XTTTYqPjx+6TwIAAPolWmZG+n1p7+zZs9XQ0KDFixerpqZGRUVFWrlypQoKCiRJNTU1qqysDLdPSkpSWVmZbr/9dk2ZMkXp6em6+uqrdf/99w/dpwAAAANm9dU0NmP13EwfNDY2yuv1yufzKSUlxeruAABwQlj4wkY9s75S3710nG6/uHsN52D19fube9MAAABLEUYAAIhxVi+REEYAAIhR0VLAShgBACDGWV09ShgBACBGRcnECGEEAIBYZyyuGiGMAAAQo6gZAQAAUYGaEQAAYAlblFSNEEYAAIhx7DMCAAAsQc0IAACACCMAAMDiClbCCAAAMSpKVmkIIwAAxDoKWAEAgCVsUVLBShgBACDGsekZAACIaYQRAABiHDfKAwAAloiSkhHCCAAAsY6aEQAAYAlulAcAAKIC+4wAAABLUDMCAAAgwggAADGPAlYAAGCJKFmlIYwAABDr2PQMAABYggJWAAAQHagZAQAAVrBFydQIYQQAgBjHpmcAAMAS0TEvQhgBACDmGYs3GiGMAAAQq6JkaoQwAgAALEUYAQAgxrEdPAAAsIQtStZpCCMAAMQ4Lu0FAACWiJI9zwgjAADEOmpGAACAJaJkYoQwAgBArDMWV40QRgAAiFHUjAAAgKhAzQgAALAE+4wAAACIMAIAACxGGAEAIEZRwAoAAKKCsbiClTACAECMipKJEcIIAACxjhvlAQAAa0RJ0QhhBACAGMemZwAAwBLRMS9CGAEAIOZxozwAAGCJKCkZIYwAABDrqBkBAACW4EZ5AAAAIowAABDz2PQMAABYggJWAAAQFShgBQAAloiSiRHCCAAAYNMzAABgAWpGAABAVBiWNSPLli1TYWGhPB6PiouLtXr16j6d9+abb8rpdOqMM84YyNsCAIAhZIuSqZF+h5EVK1Zo/vz5WrRokSoqKjRjxgzNnDlTlZWVRzzP5/Npzpw5uvjiiwfcWQAAMPSG3czIkiVLdPPNN2vu3LmaMGGCli5dqvz8fC1fvvyI533729/Wtddeq2nTpg24swAA4MTTrzASCARUXl6ukpKSiOMlJSVau3Ztr+c98cQT+vTTT3Xvvff26X38fr8aGxsjHgAA4MTUrzBSX1+vYDCorKysiONZWVmqra3t8ZytW7fqhz/8oZ5++mk5nc4+vU9paam8Xm/4kZ+f359uAgCAfjDD8dLewwtejDE9FsEEg0Fde+21+vGPf6xx48b1+fUXLlwon88XflRVVQ2kmwAA4AiipH5VfZuqOCgjI0MOh6PbLEhdXV232RJJampq0rvvvquKigrddtttkqRQKCRjjJxOp1555RVddNFF3c5zu91yu9396RoAABigYVXA6nK5VFxcrLKysojjZWVlmj59erf2KSkp2rhxozZs2BB+zJs3T6eeeqo2bNigc845Z3C9BwAAA2aLkg3h+zUzIkkLFizQ9ddfrylTpmjatGl65JFHVFlZqXnz5knqXGLZtWuXfvvb38put6uoqCji/MzMTHk8nm7HAQCANSyeGOl/GJk9e7YaGhq0ePFi1dTUqKioSCtXrlRBQYEkqaam5qh7jgAAAOtFS82IzRirV4qOrrGxUV6vVz6fTykpKVZ3BwCAE8LDqz7VA3/+SF87K0+/uHrykL9+X7+/uTcNAAAxKkomRggjAADEumG5zwgAABj+oqVmhDACAAAsRRgBACDWDadNzwAAwIkjWjY9I4wAABDjrN7jgzACAECMooAVAABEBav3PyWMAAAASxFGAACIcdSMAAAAS9iipGiEMAIAQIyz+pa5hBEAAGJUdMyLEEYAAIh51IwAAABLREnJCGEEAABYizACAECMY9MzAABgiShZpSGMAAAQ6yhgBQAAlmDTMwAAEB3Y9AwAAFghSiZGCCMAAMQ6Y/HUCGEEAIAYFSUTI4QRAABiHTfKAwAA1oiSohHCCAAAsBRhBACAGMcyDQAAsER0LNIQRgAAiHlc2gsAACwRJfWrhBEAAGIdNSMAAMAStiipGiGMAAAQ4yyeGCGMAAAQq6gZAQAAUYGaEQAAYIkomRghjAAAAGsRRgAAiHlsegYAACxAASsAAIgKFLACAABLsOkZAACICmx6BgAArBEdEyOEEQAAYp2xuGiEMAIAQIyKkokRwggAALGOmhEAAGAJW5RsNEIYAQAAliKMAAAQ49j0DAAAWCI6FmkIIwAAxDwKWAEAgCWipH6VMAIAQKxj0zMAAGAJZkYAAABEGAEAIGbZouR6GsIIAAAxjn1GAACAJagZAQAAUcFYvNMIYQQAAFiKMAIAACxFGAEAIMZRwAoAACxhi5IKVsIIAAAxjpkRAABgieiYFxlgGFm2bJkKCwvl8XhUXFys1atX99r2hRde0KWXXqqRI0cqJSVF06ZN01/+8pcBdxgAAAytYXdp74oVKzR//nwtWrRIFRUVmjFjhmbOnKnKysoe27/xxhu69NJLtXLlSpWXl+vCCy/UlVdeqYqKikF3HgAADFyUlIzIZvp53+BzzjlHZ511lpYvXx4+NmHCBH35y19WaWlpn15j4sSJmj17tn70ox/1qX1jY6O8Xq98Pp9SUlL6010AANCLP/29Wrf9rkLnFKZpxbenDfnr9/X7u18zI4FAQOXl5SopKYk4XlJSorVr1/bpNUKhkJqampSWltZrG7/fr8bGxogHAAAYWsPyRnn19fUKBoPKysqKOJ6VlaXa2to+vcYvfvELNTc36+qrr+61TWlpqbxeb/iRn5/fn24CAIB+sPhimoEVsB5+XbIxpk/XKj/zzDO67777tGLFCmVmZvbabuHChfL5fOFHVVXVQLoJAACOIFpqRpz9aZyRkSGHw9FtFqSurq7bbMnhVqxYoZtvvlnPPfecLrnkkiO2dbvdcrvd/ekaAAAYpvo1M+JyuVRcXKyysrKI42VlZZo+fXqv5z3zzDO68cYb9bvf/U6zZs0aWE8BAMCxYfE6Tb9mRiRpwYIFuv766zVlyhRNmzZNjzzyiCorKzVv3jxJnUssu3bt0m9/+1tJnUFkzpw5+uUvf6lzzz03PKsSHx8vr9c7hB8FAAD0R5Ss0vQ/jMyePVsNDQ1avHixampqVFRUpJUrV6qgoECSVFNTE7HnyK9//Wt1dHTo1ltv1a233ho+fsMNN+jJJ58c/CcAAACDYvWmZ/3eZ8QK7DMCAMDQe/mDGs176j2dPWaEnpvXe7nFQB2TfUYAAMCJx+ppCcIIAAAxKzqqRggjAADEOKvrNQgjAADEqGjZ9IwwAgBAjLP6WhbCCAAAMSpKJkYIIwAAwFqEEQAAYhwFrAAAwBK2KKlgJYwAABDj2PQMAABYIjrmRQgjAADEPGpGAACAJaKkZIQwAgBAzGPTMwAAYAVmRgAAQFSgZgQAAFjCFiXX0xBGAACIcewzAgAArBEdEyOEEQAAYC3CCAAAMc5YXMJKGAEAIEZFySoNYQQAgFhHASsAALCELUp2PSOMAAAQ45gZAQAAloiOeRHCCAAAMY/t4AEAgCWipGSEMAIAQKwzFheNEEYAAIhR3CgPAABAhBEAAGAxwggAADGKAlYAABAV2PQMAABYIkomRggjAADEOmPxtmeEEQAAYlWUTI0QRgAAiHHUjAAAAEuw6RkAAIgK3CgPAABYgn1GAAAARBgBACDmcddeAABgiShZpSGMAAAQ6yhgBQAAlrBFSQUrYQQAgFjHpmcAAMAKUTIxQhgBACDWUTMCAAAsESUTI4QRAABiHfuMAAAAS1AzAgAAogI1IwAAwCLRMTVCGAEAAJYijAAAEOMsrl8ljAAAEKsoYAUAAFHBWFzCShgBACBGRcnECGEEAIBYR80IAACwhC1KikYIIwAAxDhmRgAAgCWiY16EMAIAACxGGAEAIEZFSckIYQQAAFhrQGFk2bJlKiwslMfjUXFxsVavXn3E9qtWrVJxcbE8Ho/Gjh2rhx9+eECdBQAAQ89YXMHa7zCyYsUKzZ8/X4sWLVJFRYVmzJihmTNnqrKyssf227dv1+WXX64ZM2aooqJC99xzj+644w49//zzg+48AAAYOFuUlLD2O4wsWbJEN998s+bOnasJEyZo6dKlys/P1/Lly3ts//DDD2v06NFaunSpJkyYoLlz5+qmm27Sz3/+80F3HgAADJ7FV/b2L4wEAgGVl5erpKQk4nhJSYnWrl3b4znr1q3r1v5LX/qS3n33XbW3t/d4jt/vV2NjY8QDAAAMra4C1hpfmz7Y5bOsH/0KI/X19QoGg8rKyoo4npWVpdra2h7Pqa2t7bF9R0eH6uvrezyntLRUXq83/MjPz+9PNwEAQB8kuZ3hP2+rb7asHwMqYD18+1hjzBG3lO2pfU/HuyxcuFA+ny/8qKqqGkg3AQDAEYzJSNR/zJ6sWy88SadkJlnWD+fRm3wuIyNDDoej2yxIXV1dt9mPLtnZ2T22dzqdSk9P7/Ect9stt9vdn64BAIAB+MqZeVZ3oX8zIy6XS8XFxSorK4s4XlZWpunTp/d4zrRp07q1f+WVVzRlyhTFxcX1s7sAAOBE0+9lmgULFujRRx/V448/rs2bN+uuu+5SZWWl5s2bJ6lziWXOnDnh9vPmzdOOHTu0YMECbd68WY8//rgee+wxfe973xu6TwEAAIatfi3TSNLs2bPV0NCgxYsXq6amRkVFRVq5cqUKCgokSTU1NRF7jhQWFmrlypW666679NBDDyk3N1cPPvigvva1rw3dpwAAAMOWzVi97VofNDY2yuv1yufzKSUlxeruAACAPujr9zf3pgEAAJYijAAAAEsRRgAAgKUIIwAAwFKEEQAAYCnCCAAAsBRhBAAAWIowAgAALEUYAQAAlur3dvBW6NoktrGx0eKeAACAvur63j7aZu/DIow0NTVJkvLz8y3uCQAA6K+mpiZ5vd5enx8W96YJhUKqrq5WcnKybDbbkL1uY2Oj8vPzVVVVxT1vjjHG+vhgnI8Pxvn4YJyPj2M5zsYYNTU1KTc3V3Z775Uhw2JmxG63Ky8v75i9fkpKCr/oxwljfXwwzscH43x8MM7Hx7Ea5yPNiHShgBUAAFiKMAIAACwV02HE7Xbr3nvvldvttrorJzzG+vhgnI8Pxvn4YJyPj2gY52FRwAoAAE5cMT0zAgAArEcYAQAAliKMAAAASxFGAACApWI6jCxbtkyFhYXyeDwqLi7W6tWrre7SsFFaWqqzzz5bycnJyszM1Je//GV9/PHHEW2MMbrvvvuUm5ur+Ph4ffGLX9SHH34Y0cbv9+v2229XRkaGEhMTddVVV2nnzp3H86MMK6WlpbLZbJo/f374GOM8dHbt2qVvfvObSk9PV0JCgs444wyVl5eHn2esB6+jo0P/8i//osLCQsXHx2vs2LFavHixQqFQuA3j3H9vvPGGrrzySuXm5spms+mPf/xjxPNDNab79u3T9ddfL6/XK6/Xq+uvv1779+8f/AcwMerZZ581cXFx5je/+Y3ZtGmTufPOO01iYqLZsWOH1V0bFr70pS+ZJ554wnzwwQdmw4YNZtasWWb06NHmwIED4TYPPPCASU5ONs8//7zZuHGjmT17tsnJyTGNjY3hNvPmzTOjRo0yZWVl5r333jMXXnihmTx5suno6LDiY0W19evXmzFjxphJkyaZO++8M3yccR4ae/fuNQUFBebGG280b7/9ttm+fbt59dVXzSeffBJuw1gP3v3332/S09PNn/70J7N9+3bz3HPPmaSkJLN06dJwG8a5/1auXGkWLVpknn/+eSPJ/OEPf4h4fqjG9LLLLjNFRUVm7dq1Zu3ataaoqMhcccUVg+5/zIaRqVOnmnnz5kUcGz9+vPnhD39oUY+Gt7q6OiPJrFq1yhhjTCgUMtnZ2eaBBx4It2lrazNer9c8/PDDxhhj9u/fb+Li4syzzz4bbrNr1y5jt9vNyy+/fHw/QJRramoyp5xyiikrKzMXXHBBOIwwzkPnBz/4gTn//PN7fZ6xHhqzZs0yN910U8Sxr371q+ab3/ymMYZxHgqHh5GhGtNNmzYZSeatt94Kt1m3bp2RZD766KNB9Tkml2kCgYDKy8tVUlIScbykpERr1661qFfDm8/nkySlpaVJkrZv367a2tqIMXa73brgggvCY1xeXq729vaINrm5uSoqKuLncJhbb71Vs2bN0iWXXBJxnHEeOi+++KKmTJmir3/968rMzNSZZ56p3/zmN+HnGeuhcf755+u1117Tli1bJEnvv/++1qxZo8svv1wS43wsDNWYrlu3Tl6vV+ecc064zbnnniuv1zvocR8WN8obavX19QoGg8rKyoo4npWVpdraWot6NXwZY7RgwQKdf/75KioqkqTwOPY0xjt27Ai3cblcGjFiRLc2/Bw+9+yzz+q9997TO++80+05xnnobNu2TcuXL9eCBQt0zz33aP369brjjjvkdrs1Z84cxnqI/OAHP5DP59P48ePlcDgUDAb1k5/8RNdcc40kfqePhaEa09raWmVmZnZ7/czMzEGPe0yGkS42my3i78aYbsdwdLfddpv+/ve/a82aNd2eG8gY83P4XFVVle6880698sor8ng8vbZjnAcvFAppypQp+ulPfypJOvPMM/Xhhx9q+fLlmjNnTrgdYz04K1as0FNPPaXf/e53mjhxojZs2KD58+crNzdXN9xwQ7gd4zz0hmJMe2o/FOMek8s0GRkZcjgc3ZJcXV1dt+SII7v99tv14osv6vXXX1deXl74eHZ2tiQdcYyzs7MVCAS0b9++XtvEuvLyctXV1am4uFhOp1NOp1OrVq3Sgw8+KKfTGR4nxnnwcnJydNppp0UcmzBhgiorKyXxOz1U7r77bv3whz/UN77xDZ1++um6/vrrddddd6m0tFQS43wsDNWYZmdna/fu3d1ef8+ePYMe95gMIy6XS8XFxSorK4s4XlZWpunTp1vUq+HFGKPbbrtNL7zwgv7617+qsLAw4vnCwkJlZ2dHjHEgENCqVavCY1xcXKy4uLiINjU1Nfrggw/4ORx08cUXa+PGjdqwYUP4MWXKFF133XXasGGDxo4dyzgPkfPOO6/b5elbtmxRQUGBJH6nh0pLS4vs9sivHofDEb60l3EeekM1ptOmTZPP59P69evDbd5++235fL7Bj/ugyl+Hsa5Lex977DGzadMmM3/+fJOYmGg+++wzq7s2LHznO98xXq/X/O1vfzM1NTXhR0tLS7jNAw88YLxer3nhhRfMxo0bzTXXXNPjpWR5eXnm1VdfNe+995656KKLYvryvL449GoaYxjnobJ+/XrjdDrNT37yE7N161bz9NNPm4SEBPPUU0+F2zDWg3fDDTeYUaNGhS/tfeGFF0xGRob5/ve/H27DOPdfU1OTqaioMBUVFUaSWbJkiamoqAhvVzFUY3rZZZeZSZMmmXXr1pl169aZ008/nUt7B+uhhx4yBQUFxuVymbPOOit8WSqOTlKPjyeeeCLcJhQKmXvvvddkZ2cbt9ttvvCFL5iNGzdGvE5ra6u57bbbTFpamomPjzdXXHGFqaysPM6fZng5PIwwzkPnpZdeMkVFRcbtdpvx48ebRx55JOJ5xnrwGhsbzZ133mlGjx5tPB6PGTt2rFm0aJHx+/3hNoxz/73++us9/jf5hhtuMMYM3Zg2NDSY6667ziQnJ5vk5GRz3XXXmX379g26/zZjjBnc3AoAAMDAxWTNCAAAiB6EEQAAYCnCCAAAsBRhBAAAWIowAgAALEUYAQAAliKMAAAASxFGAACApQgjAADAUoQRAABgKcIIAACwFGEEAABY6v8DXYHj/KCvDhYAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from eval import data_removal_f1\n", + "from metrics import weighted_acc_drop\n", + "acc = data_removal_f1(dic_loaded, X_train_scaled, y_train_imbalanced, X_test_scaled, y_test_imbalanced)\n", + "plt.plot(range(len(acc)), acc)\n", + "res = weighted_acc_drop(acc)\n", + "print(\"The weighted accuracy drop is {:.3f}\".format(res))" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'loaders' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[19], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m train_indices \u001b[38;5;241m=\u001b[39m get_indices(loaders[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtrain\u001b[39m\u001b[38;5;124m'\u001b[39m])\n\u001b[0;32m 2\u001b[0m trained_with_flag \u001b[38;5;241m=\u001b[39m train_with_corrupt_flag(loaders[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtrain\u001b[39m\u001b[38;5;124m'\u001b[39m], shuffle_ind, train_indices)\n", + "\u001b[1;31mNameError\u001b[0m: name 'loaders' is not defined" + ] + } + ], + "source": [ + "train_indices = get_indices(dataloader['train'])\n", + "trained_with_flag = train_with_corrupt_flag(dataloader['train'], train_indices)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(1000, 28, 28) (1000,)\n", + "(4000, 28, 28) (4000,)\n" + ] + } + ], + "source": [ + "import pickle\n", + "with open('balanced_train.pkl', 'rb') as f:\n", + " X_train_balanced, y_train_balanced = pickle.load(f)\n", + "with open('balanced_test.pkl', 'rb') as f:\n", + " X_test_balanced, y_test_balanced = pickle.load(f)\n", + "print(X_train_balanced.shape, y_train_balanced.shape)\n", + "print(X_test_balanced.shape, y_test_balanced.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torchtext\\data\\__init__.py:4: UserWarning: \n", + "/!\\ IMPORTANT WARNING ABOUT TORCHTEXT STATUS /!\\ \n", + "Torchtext is deprecated and the last released version will be 0.18 (this one). You can silence this warning by calling the following at the beginnign of your scripts: `import torchtext; torchtext.disable_torchtext_deprecation_warning()`\n", + " warnings.warn(torchtext._TORCHTEXT_DEPRECATION_MSG)\n", + "c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\utils.py:7: TqdmExperimentalWarning: Using `tqdm.autonotebook.tqdm` in notebook mode. Use `tqdm.tqdm` instead to force console mode (e.g. in jupyter console)\n", + " from tqdm.autonotebook import tqdm\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] + } + ], + "source": [ + "import lava\n", + "import torch\n", + "print(torch.cuda.is_available()) # Should return True if GPU is available\n", + "import os\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import torch.optim as optim\n", + "import torchvision.models as models\n", + "from torch.autograd import Variable\n", + "\n", + "import matplotlib.pyplot as plt\n", + "from torch import tensor\n", + "from torchvision import datasets, transforms\n", + "import pandas as pd\n", + "import numpy as n\n", + "\n", + "from torch.utils.data import Dataset, TensorDataset, DataLoader" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "X_tensor = torch.tensor(X_train_balanced, dtype=torch.float32)\n", + "y_tensor = torch.tensor(y_train_balanced, dtype=torch.long)\n", + "batch_size = 8\n", + "dataset = TensorDataset(X_tensor, y_tensor)\n", + "dataloader = {}\n", + "dataloader['train'] = DataLoader(dataset, batch_size=batch_size, shuffle=True)\n", + "X_tensor = torch.tensor(X_test_balanced, dtype= torch.float32)\n", + "y_tensor = torch.tensor(y_test_balanced, dtype = torch.long)\n", + "dataset = TensorDataset(X_tensor, y_tensor) \n", + "dataloader['test'] = DataLoader(dataset, batch_size=batch_size, shuffle=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential()\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + ")" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "from torchvision import transforms, datasets\n", + "from torch.utils.data import DataLoader, TensorDataset\n", + "import pickle\n", + "\n", + "# Định nghĩa mô hình PreActResNet18 như đã thực hiện trước đó\n", + "class PreActBlock(nn.Module):\n", + " expansion = 1\n", + "\n", + " def __init__(self, in_planes, planes, stride=1):\n", + " super(PreActBlock, self).__init__()\n", + " self.bn1 = nn.BatchNorm2d(in_planes)\n", + " self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)\n", + " self.bn2 = nn.BatchNorm2d(planes)\n", + " self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)\n", + "\n", + " self.shortcut = nn.Sequential()\n", + " if stride != 1 or in_planes != self.expansion * planes:\n", + " self.shortcut = nn.Sequential(\n", + " nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False)\n", + " )\n", + "\n", + " def forward(self, x):\n", + " out = F.relu(self.bn1(x))\n", + " shortcut = self.shortcut(out)\n", + " out = self.conv1(out)\n", + " out = self.conv2(F.relu(self.bn2(out)))\n", + " out += shortcut\n", + " return out\n", + "\n", + "class PreActResNet(nn.Module):\n", + " def __init__(self, block, num_blocks, num_classes=100):\n", + " super(PreActResNet, self).__init__()\n", + " self.in_planes = 64\n", + "\n", + " self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)\n", + " self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)\n", + " self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)\n", + " self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)\n", + " self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)\n", + " self.linear = nn.Linear(512*block.expansion, num_classes)\n", + "\n", + " def _make_layer(self, block, planes, num_blocks, stride):\n", + " strides = [stride] + [1]*(num_blocks-1)\n", + " layers = []\n", + " for stride in strides:\n", + " layers.append(block(self.in_planes, planes, stride))\n", + " self.in_planes = planes * block.expansion\n", + " return nn.Sequential(*layers)\n", + "\n", + " def forward(self, x):\n", + " out = self.conv1(x)\n", + " out = self.layer1(out)\n", + " out = self.layer2(out)\n", + " out = self.layer3(out)\n", + " out = self.layer4(out)\n", + " out = F.avg_pool2d(out, 4)\n", + " out = out.view(out.size(0), -1)\n", + " out = self.linear(out)\n", + " return out\n", + "\n", + "def PreActResNet18():\n", + " return PreActResNet(PreActBlock, [2,2,2,2])\n", + "\n", + "# Khởi tạo mô hình\n", + "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", + "net_test = PreActResNet18().to(device)\n", + "feature_extractor_name = 'preact_resnet18_test_mnist.pth'\n", + "net_test.load_state_dict(torch.load('checkpoint/' + feature_extractor_name, map_location=torch.device('cpu')))\n", + "net_test.eval()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.18.0\n", + "2.3.0\n", + "Cuda device: 0\n", + "cude devices: 1\n", + "cuda:0\n" + ] + } + ], + "source": [ + "embedder = net_test.to(device)\n", + "embedder.fc = torch.nn.Identity()\n", + "for p in embedder.parameters():\n", + " p.requires_grad = False\n", + "embedder.to(device)\n", + "cuda_num = 0\n", + "import torchvision\n", + "print(torchvision.__version__)\n", + "import torch\n", + "print(torch.__version__)\n", + "import os\n", + "#os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(cuda_num)\n", + "#print(os.environ[\"CUDA_VISIBLE_DEVICES\"])\n", + "#torch.cuda.set_device(cuda_num)\n", + "print(\"Cuda device: \", torch.cuda.current_device())\n", + "print(\"cude devices: \", torch.cuda.device_count())\n", + "device = torch.device('cuda:' + str(cuda_num) if torch.cuda.is_available() else 'cpu')\n", + "print(device)\n", + "training_size = 1000\n", + "valid_size = 200\n", + "resize = 32\n", + "portion = 0.3" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "from otdd.pytorch.distance_fast import DatasetDistance, FeatureCost, batch_augmented_cost\n", + "from otdd.pytorch.wasserstein import pwdist_exact\n", + "from functools import partial\n", + "from lava import train_with_corrupt_flag, get_indices, values, sort_and_keep_indices\n", + "resize = 28\n", + "feature_cost = FeatureCost(src_embedding = embedder,\n", + " src_dim = (1, resize,resize),\n", + " tgt_embedding = embedder,\n", + " tgt_dim = (1, resize,resize),\n", + " p = 2,\n", + " device='cuda')\n", + "dist = DatasetDistance(dataloader['train'], dataloader['test'],\n", + " inner_ot_method = 'exact',\n", + " debiased_loss = True,\n", + " feature_cost = feature_cost,\n", + " λ_x=1.0, λ_y=1.0,\n", + " sqrt_method = 'spectral',\n", + " sqrt_niters=10,\n", + " precision='single',\n", + " p = 2, entreg = 1e-1,\n", + " device='cuda')" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "8c8fbe7df2984b1ca9e7290b2920d1b8", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/125 [00:00 maxsamples:\n", + " idxs_1 = sorted(np.random.choice(\n", + " dist.X1.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_1 = np.s_[:] # hack to get a full slice\n", + "\n", + "if maxsamples and dist.X2.shape[0] > maxsamples:\n", + " idxs_2 = sorted(np.random.choice(\n", + " dist.X2.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_2 = np.s_[:] # hack to get a full slice\n", + "Z1 = torch.cat((dist.X1[idxs_1],\n", + " dist.Y1[idxs_1].type(dist.X1.dtype).unsqueeze(1)), -1)\n", + "Z2 = torch.cat((dist.X2[idxs_2],\n", + " dist.Y2[idxs_2].type(dist.X2.dtype).unsqueeze(1)), -1)\n", + "Z1 = Z1.to(device)\n", + "Z2 = Z2.to(device)\n", + "print(Z1.shape, Z2.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1000, 785]) torch.Size([4000, 785])\n", + "Z1 shape in batch: torch.Size([1, 1000, 785])\n", + "Z2 shape in batch: torch.Size([1, 4000, 785])\n", + "1 1000 784\n", + "torch.Size([1, 1000, 100])\n", + "1 4000 784\n", + "torch.Size([1, 4000, 100])\n", + "torch.Size([1, 1000, 4000])\n", + "torch.Size([1, 1000, 4000])\n", + "Gia tri M: tensor([[[6, 6, 7, ..., 7, 7, 7],\n", + " [2, 2, 3, ..., 3, 3, 3],\n", + " [6, 6, 7, ..., 7, 7, 7],\n", + " ...,\n", + " [6, 6, 7, ..., 7, 7, 7],\n", + " [2, 2, 3, ..., 3, 3, 3],\n", + " [6, 6, 7, ..., 7, 7, 7]]], device='cuda:0')\n", + "torch.Size([1, 1000, 4000])\n", + "torch.Size([1, 1000, 4000])\n", + "gia tri D: tensor([[[8.1817e+10, 2.5932e+12, 2.9280e+13, ..., 1.6112e+13,\n", + " 1.1389e+13, 1.6589e+13],\n", + " [9.0699e+12, 1.2609e+12, 7.1840e+12, ..., 1.6479e+12,\n", + " 4.1574e+11, 1.8018e+12],\n", + " [2.9469e+12, 3.3708e+10, 1.5806e+13, ..., 6.6494e+12,\n", + " 3.7614e+12, 6.9566e+12],\n", + " ...,\n", + " [3.2776e+13, 1.4712e+13, 2.0984e+09, ..., 2.0478e+12,\n", + " 4.2854e+12, 1.8837e+12],\n", + " [5.3718e+12, 1.8541e+11, 1.1385e+13, ..., 3.9096e+12,\n", + " 1.7906e+12, 4.1460e+12],\n", + " [3.3070e+12, 8.7188e+09, 1.5006e+13, ..., 6.1343e+12,\n", + " 3.3765e+12, 6.4294e+12]]], device='cuda:0')\n", + "torch.Size([1, 1000, 4000])\n", + "Z1 shape in batch: torch.Size([1, 4000, 785])\n", + "Z2 shape in batch: torch.Size([1, 1000, 785])\n", + "1 4000 784\n", + "torch.Size([1, 4000, 100])\n", + "1 1000 784\n", + "torch.Size([1, 1000, 100])\n", + "torch.Size([1, 4000, 1000])\n", + "torch.Size([1, 4000, 1000])\n", + "Gia tri M: tensor([[[ 9, 8, 9, ..., 9, 8, 9],\n", + " [ 9, 8, 9, ..., 9, 8, 9],\n", + " [13, 12, 13, ..., 13, 12, 13],\n", + " ...,\n", + " [13, 12, 13, ..., 13, 12, 13],\n", + " [13, 12, 13, ..., 13, 12, 13],\n", + " [13, 12, 13, ..., 13, 12, 13]]], device='cuda:0')\n", + "torch.Size([1, 4000, 1000])\n", + "torch.Size([1, 4000, 1000])\n", + "gia tri D: tensor([[[8.1817e+10, 9.0699e+12, 2.9469e+12, ..., 3.2776e+13,\n", + " 5.3718e+12, 3.3070e+12],\n", + " [2.5932e+12, 1.2609e+12, 3.3708e+10, ..., 1.4712e+13,\n", + " 1.8541e+11, 8.7188e+09],\n", + " [2.9280e+13, 7.1840e+12, 1.5806e+13, ..., 2.0984e+09,\n", + " 1.1385e+13, 1.5006e+13],\n", + " ...,\n", + " [1.6112e+13, 1.6479e+12, 6.6494e+12, ..., 2.0478e+12,\n", + " 3.9096e+12, 6.1343e+12],\n", + " [1.1389e+13, 4.1574e+11, 3.7614e+12, ..., 4.2854e+12,\n", + " 1.7906e+12, 3.3765e+12],\n", + " [1.6589e+13, 1.8018e+12, 6.9566e+12, ..., 1.8837e+12,\n", + " 4.1460e+12, 6.4294e+12]]], device='cuda:0')\n", + "torch.Size([1, 4000, 1000])\n" + ] + } + ], + "source": [ + "with torch.no_grad():\n", + " loss.debias = False\n", + " loss.potentials = True\n", + " print(Z1.shape, Z2.shape)\n", + " F_i, G_j = loss(Z1, Z2)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "π = [F_i, G_j]\n", + "dual_sol = π\n", + "for i in range(len(dual_sol)):\n", + " dual_sol[i] = dual_sol[i].to('cpu')\n", + "calibrated_gradient = values(dual_sol, training_size)\n", + "with open('calibrated_gradient.pkl', 'wb') as f:\n", + " pickle.dump(calibrated_gradient, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-105436540.0, 25310208.0, -153372000.0, 11471168.0, 206820480.0, 55940290.0, -161260160.0, 34693250.0, 10488832.0, 1445568.0, -142162110.0, -104090620.0, -9019072.0, 35934464.0, 27531392.0, -186393250.0, -124985380.0, 158693310.0, 77251580.0, 87436930.0, -128984510.0, -10882752.0, 83981440.0, -61718910.0, 70794430.0, 139254720.0, -127395710.0, 123487420.0, 28170496.0, -196940260.0, -79057860.0, 47008000.0, 13428416.0, 104150460.0, 51729470.0, 43572736.0, 40529790.0, -110372480.0, -177694200.0, -188921500.0, 25147648.0, 389410370.0, -187263740.0, -157860900.0, 33210368.0, 12548672.0, -179210720.0, 20706112.0, 52150144.0, -170180510.0, 33033600.0, -113388670.0, -62483200.0, 222385150.0, -78486590.0, -145819100.0, -181676740.0, -152524640.0, -150749380.0, 39723390.0, 377143870.0, -126252990.0, 919648400.0, -126964420.0, 48309056.0, 143271800.0, 130538560.0, 20231488.0, 27271552.0, -131344800.0, -125991940.0, 21103808.0, -160133380.0, 69509440.0, -150001700.0, -162275170.0, 471608830.0, -141778240.0, 77563070.0, -153614050.0, 42698690.0, -169892800.0, 42761280.0, -128875740.0, -188234620.0, 359141700.0, 105145980.0, 110170880.0, -181409920.0, -146193980.0, -142682530.0, 183327550.0, -123235230.0, -123979390.0, -128961280.0, -129695260.0, -159095650.0, -92901250.0, 92939710.0, 33198848.0, -122026430.0, 23838976.0, 58595520.0, 10195456.0, 15513664.0, -31581888.0, 31589504.0, 229996930.0, -155986560.0, 47885824.0, 12333440.0, -200388450.0, 152447490.0, 298914560.0, 259317310.0, -113662210.0, 85856700.0, -173729760.0, 19407360.0, -149080900.0, -180593800.0, -112647230.0, -123800260.0, -139598620.0, 1429824.0, 111904380.0, -138749310.0, 135477120.0, 40303550.0, -175006500.0, 17752320.0, 34700736.0, -176519620.0, 41657280.0, 128411650.0, 104448260.0, 57668670.0, 199019140.0, 28866304.0, 42668990.0, 34007360.0, -187329950.0, 62934016.0, -164499460.0, 77617540.0, 187762370.0, 35830336.0, 10875776.0, 27483328.0, -120361570.0, 71918980.0, -179685400.0, 18214464.0, 13789248.0, 24159424.0, -183254880.0, -36544000.0, -123284670.0, 95640700.0, -145846530.0, 29756416.0, 1502976.0, -179623400.0, -18647488.0, -151561540.0, -122196670.0, -163186500.0, -146231650.0, 26124032.0, 158798660.0, 381835070.0, -185279800.0, -164436320.0, 13841664.0, -108295620.0, 73824320.0, -101751170.0, 82816190.0, -97322750.0, -167615230.0, 90319620.0, 88899330.0, 7820032.0, 64293630.0, 102976320.0, -177478430.0, -142107490.0, -165140600.0, -164748130.0, 98879810.0, -164922820.0, 57625920.0, -141808900.0, 246292220.0, -151043970.0, -102149630.0, -179980930.0, -147475940.0, 65973824.0, -135993950.0, 291801280.0, 79646460.0, -138815840.0, 152325180.0, -167697950.0, 54243264.0, 81447620.0, 8842176.0, -167793120.0, 22382016.0, -116742460.0, 85556030.0, -150783100.0, -166977250.0, 34643710.0, -166648900.0, 2957312.0, 120129090.0, 3603904.0, 29692480.0, -85495940.0, 113421700.0, 20701696.0, -161681060.0, -131893150.0, 393472200.0, -179139680.0, 16169920.0, 65806144.0, -134623000.0, 8837696.0, -81313470.0, -113467650.0, -166130530.0, 29148608.0, -93080320.0, -146137090.0, -148509090.0, 18745600.0, 255080130.0, 13902336.0, -163407900.0, 47625856.0, 19824576.0, -146740380.0, -154847000.0, 583571840.0, 71235710.0, 42275970.0, 48863550.0, -131470780.0, 80499070.0, -145859070.0, 16077440.0, 75109700.0, -162238460.0, -1083328.0, 25645056.0, -129573120.0, -15313280.0, -179404600.0, -159118780.0, 82954180.0, 119396160.0, -151362850.0, 402850000.0, 109045380.0, 27849664.0, 15672192.0, -110697410.0, 16786944.0, 40853310.0, 76939970.0, -167076800.0, -169572160.0, -122637890.0, -157292960.0, -152183300.0, 190012930.0, 173469890.0, 4731456.0, 12204736.0, 75019840.0, 9315712.0, -117480740.0, -81100160.0, 237856640.0, 77414590.0, -159898430.0, -127137760.0, 29112960.0, -181006400.0, -104499970.0, -157638820.0, -114703420.0, -88998530.0, 139807230.0, 254174780.0, 206580220.0, 81337540.0, 29211968.0, 31721024.0, -136136450.0, -127150300.0, -158066500.0, 40304960.0, 22457216.0, -174945440.0, -157490690.0, 274029950.0, -142837280.0, -3583744.0, -166956960.0, -157959580.0, 181936380.0, -167267840.0, 82924990.0, 7892736.0, 71931070.0, -163583040.0, -149557150.0, 14579072.0, -123265790.0, -141898880.0, -139194460.0, 1361434100.0, -142858200.0, 27949312.0, -163410880.0, 573303200.0, -159615940.0, -64343104.0, -196114240.0, 254202370.0, 1192154400.0, -138015330.0, -151761660.0, 271410800.0, -171391420.0, -139394050.0, -146748960.0, -181564860.0, 29978496.0, 535015040.0, -194384580.0, 51789950.0, -148554690.0, 26894144.0, -135309470.0, -164677250.0, -173124130.0, -132806210.0, 19766144.0, 36366400.0, 26847808.0, -104739900.0, 52110464.0, 48994750.0, -138414880.0, -157275040.0, -167283170.0, -3507328.0, 63614336.0, -42603970.0, -157432960.0, 190520130.0, 111832190.0, -148381470.0, -102007360.0, 41840256.0, 114253950.0, -196465920.0, 56752510.0, 104133060.0, -148942660.0, 36387136.0, -142187230.0, 25324224.0, 5471552.0, -135803400.0, -82326210.0, 587190400.0, -120827780.0, 79996990.0, -61014656.0, -115503550.0, -148580320.0, 76146180.0, -160926530.0, 32427264.0, -8885568.0, 114829700.0, 37811390.0, 89331710.0, 30541568.0, 13457984.0, 129196540.0, 96979390.0, -139689250.0, -167032770.0, -170588830.0, -160275780.0, -157447900.0, -120307740.0, 146358780.0, -142175040.0, 28813696.0, -175810050.0, 73236930.0, -166610080.0, -134066910.0, 94720450.0, 24654464.0, -149907360.0, -133048320.0, -135240060.0, -13811648.0, 41401344.0, 61702720.0, -136084130.0, 56505470.0, -156726140.0, -155232640.0, -157322600.0, -153192100.0, 160014140.0, 12652480.0, -161461120.0, 435132160.0, 134709630.0, -181603900.0, 54689150.0, 29144256.0, 182421180.0, -142488030.0, 75102140.0, 36126400.0, -140461800.0, 48793790.0, -104576900.0, -127452450.0, 68271740.0, 131004930.0, 9395840.0, 284226370.0, 12666176.0, -123474370.0, 40691776.0, 181253380.0, -150378560.0, 89782910.0, 47234750.0, 55221310.0, -182427580.0, 50538176.0, 44105024.0, 208187460.0, -113251780.0, -136159360.0, 47463744.0, -106456700.0, 206900860.0, -151275800.0, -125987140.0, 35482176.0, -151661540.0, -166703200.0, -82217220.0, -163146530.0, 216782270.0, 260055550.0, 6918848.0, -126823040.0, -5037056.0, 13935040.0, 47277890.0, 41433664.0, 24461120.0, 181642180.0, 112168320.0, -114246530.0, 26177856.0, 94848960.0, 10621632.0, 36205504.0, -139866600.0, -102865660.0, 54464.0, -150853820.0, -157298400.0, 14741824.0, 106525890.0, 30983552.0, -167313150.0, -127361630.0, 119404290.0, 26044992.0, 32830080.0, -27947520.0, 154886530.0, 59532544.0, -187872030.0, -70905020.0, -162493500.0, -9198400.0, -129511970.0, 441704580.0, -140206880.0, 45134976.0, -158956290.0, -148620580.0, -166675680.0, 309764860.0, 23177408.0, 39023040.0, -158148350.0, -123036700.0, 28129536.0, -102584960.0, -150355070.0, -101154370.0, -184246020.0, 37671490.0, -151297000.0, 125284290.0, 81020220.0, -171555170.0, 302172540.0, 13414464.0, -72741250.0, 194992830.0, 39678336.0, 228550780.0, 269212540.0, 53658240.0, -129973440.0, 20614848.0, 87077500.0, -164944000.0, 19043520.0, -131614110.0, 42186880.0, -164155360.0, -165734050.0, 176057860.0, 170905280.0, -135682940.0, 413354000.0, 138389380.0, 42277310.0, -146732100.0, -129083740.0, -184217820.0, 300880830.0, -136632060.0, -10462272.0, -114176130.0, 332948160.0, 45641410.0, 118537920.0, 648020860.0, -131701950.0, 282598980.0, -145057600.0, -1680256.0, -161299680.0, -166706780.0, 92227580.0, 32790272.0, -276160.0, 469521920.0, -172256600.0, -7244928.0, -175078020.0, -70436930.0, 67448960.0, 222217980.0, -9103936.0, -181015400.0, -177534300.0, 7202432.0, -97817790.0, -160501250.0, -70135940.0, -86245440.0, 21449856.0, 163574270.0, -154112380.0, -67352260.0, 79834180.0, -149403520.0, -160743200.0, -62941376.0, -61049536.0, -174346370.0, -171447100.0, -173961150.0, 11864576.0, 16953472.0, 125604800.0, 220013700.0, 194900860.0, 95823490.0, 93310020.0, -99526910.0, 94077060.0, 15495872.0, -115680770.0, 72143870.0, 118061310.0, -149453820.0, -80953280.0, 56078400.0, 85120060.0, -187627710.0, -122700900.0, 19746048.0, 120101760.0, -129156700.0, -169380380.0, -98068160.0, -178026560.0, -94785090.0, -7319936.0, 42707584.0, 6079616.0, -141165980.0, -89817600.0, -108735170.0, -175204510.0, 95262720.0, 25536384.0, 62789504.0, 612968800.0, 312339400.0, -160542460.0, 33838210.0, -163195800.0, 302970300.0, 176153730.0, 110369730.0, 27444608.0, 552971000.0, -110344450.0, 418872130.0, -120589440.0, -105468480.0, -159461310.0, 61962496.0, 4322112.0, -95354560.0, -174898020.0, -172785150.0, -94755260.0, -147118110.0, -138030850.0, 236040260.0, 20823936.0, -183558050.0, 12200640.0, -136523550.0, -168611550.0, -134605540.0, -176429400.0, -171105180.0, 26802112.0, 75059780.0, -169097730.0, 29900224.0, 15380416.0, -163183260.0, -3563072.0, 593849200.0, -12309184.0, -172586850.0, -156572960.0, 87021700.0, 85761340.0, -153379680.0, -121192670.0, -157142270.0, -164564130.0, 88192260.0, -56039616.0, -157069000.0, -137423100.0, 24342848.0, 151329220.0, 7416444000.0, 180343680.0, 65007870.0, 5647680.0, 16360192.0, -184791680.0, 57953470.0, -18851456.0, 15447872.0, -113035580.0, 583976700.0, -113598210.0, -166657000.0, 48474430.0, 46182080.0, -131012930.0, 33858176.0, -186642720.0, 73466750.0, 65240704.0, 186084860.0, 54263360.0, -132427740.0, -144956640.0, -139105280.0, -147831710.0, 35351616.0, 217233090.0, 162277440.0, -93778750.0, -8457536.0, -140756380.0, 155512380.0, -106070140.0, -153653760.0, -172019230.0, 111210690.0, 64943870.0, -117642660.0, 264403400.0, 60374910.0, -171123620.0, -42832960.0, 65339520.0, 45359170.0, 105630140.0, 35522370.0, 82380350.0, -145192600.0, 119052100.0, 31904704.0, 22238528.0, 82767620.0, -157371870.0, -118062750.0, 229934780.0, -62605376.0, -5312768.0, -155275260.0, -144662180.0, 100494400.0, 397199170.0, 27036864.0, 384530240.0, 72132160.0, -111069820.0, -172639100.0, 33914110.0, 24125568.0, -183403580.0, -127236420.0, 22430784.0, -84256260.0, -109615100.0, -147052960.0, 13408320.0, -94718340.0, -149915940.0, 20614848.0, -75934720.0, -138632800.0, 4382592.0, 57995650.0, -142490750.0, -161317730.0, -10952576.0, 57242240.0, 194401730.0, 212608.0, 36948990.0, 37330944.0, 87272260.0, 35533760.0, 37083330.0, 383166200.0, 159677120.0, -145186910.0, 1154944.0, -182838500.0, 12810176.0, 407853630.0, -106964290.0, -7035456.0, -150844200.0, -94222530.0, 693147800.0, -162462850.0, 157739520.0, 31074240.0, -141286690.0, 100305790.0, -126503230.0, 47217856.0, 207988600.0, -150474050.0, 334077000.0, 80324610.0, 417549630.0, -146822180.0, -102370690.0, 435987700.0, -180058430.0, -187483400.0, -165042050.0, -168173660.0, 74717950.0, -146260100.0, 12693632.0, -158667360.0, -70242110.0, 95687870.0, -182065310.0, 6802624.0, 150674240.0, -156284510.0, -195655360.0, 117355780.0, 92711040.0, 57247936.0, -184487460.0, 28071104.0, 58995070.0, 50180800.0, -86725250.0, -171461730.0, 508673540.0, 5368000.0, 49009470.0, -136430270.0, -161994460.0, 18253568.0, 16272384.0, 9867968.0, 33275328.0, 15230592.0, 72636290.0, -176323740.0, 30820416.0, -147241120.0, 47960830.0, -154375140.0, -162193440.0, -161411680.0, -156969980.0, -64491456.0, 976954600.0, 240256580.0, -84653570.0, 17145472.0, -148150340.0, -129646240.0, -156502940.0, 75274880.0, 40597056.0, 415857540.0, -158474980.0, -104335620.0, -68464380.0, 168742460.0, -171810560.0, -184379620.0, -175411550.0, 174730430.0, -146330850.0, -172538240.0, -186134270.0, 152915840.0, -128964160.0, -158427680.0, -155430240.0, -154407260.0, 18076096.0, 27633472.0, 990569600.0, -138224380.0, -99988930.0, -168989020.0, 20126528.0, -134318460.0, -53531650.0, -169030530.0, -137066110.0, 13056320.0, -116234820.0, 213904830.0, -148829700.0, 78830270.0, 83324860.0, -122018240.0, -158642400.0, 88980420.0, -118283550.0, 381857470.0, 181611650.0, -155633700.0, 350674300.0, -165853700.0, -169369660.0, 41267070.0, -161504860.0, -125857470.0, 45834560.0, -159892420.0, 38794880.0, 153371400.0, 24458880.0, 163215680.0, -2237504.0, 72383420.0, 41773440.0, -88613060.0, 1998528.0, -21368576.0, -184759000.0, 79550720.0, -186660510.0, -160116960.0, -125802300.0, 38955010.0, 219538050.0, 6836480.0, 39428160.0, 37924670.0, -134457380.0, -94544130.0, 184303550.0, 427278340.0, -131396190.0, -168339800.0, -72398590.0, 36213250.0, -174477920.0, 6703232.0, -118244190.0, -147436320.0, -175172030.0, 90930240.0, -87699900.0, -137057860.0, 81356990.0, 26318464.0, 334902080.0, -145697800.0, 53075710.0, -144975040.0, -128048960.0, -127546080.0, 322528500.0, -180030240.0, 6855936.0, 204020100.0, 27333568.0, -168003000.0, 22575104.0, 68607300.0, -166790240.0, -13429952.0, 34976256.0, 178943300.0, -151017730.0, -152262200.0, -157851360.0, 41008770.0, -124287840.0, 31738816.0, 77604480.0, 100084420.0, -170965090.0, -128589860.0, 35584704.0, 42443776.0, -80297600.0, -101072320.0, -25139008.0, 643953000.0, 404180600.0, 191040.0, -114964290.0, 24350464.0, 7891584.0, 111036610.0, 70855100.0, -145057000.0, -168113180.0, 30126272.0, -155948640.0, -128049920.0, -150456220.0, -157291420.0, -131550980.0, 55829250.0, -114528160.0]\n" + ] + } + ], + "source": [ + "with open('calibrated_gradient.pkl', 'rb') as f:\n", + " loaded = pickle.load(f)\n", + "print(loaded)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "# Trích xuất đặc trưng từ các tập dữ liệu\n", + "# Hàm để trích xuất đặc trưng từ X\n", + "def extract_features(net, X):\n", + " X = torch.tensor(X).float().unsqueeze(1) # Thêm chiều để có kích thước [N, 1, 28, 28]\n", + " X = X.repeat(1, 3, 1, 1).to(device) # Chuyển thành [N, 3, 28, 28]\n", + " with torch.no_grad():\n", + " features = net(X)\n", + " return features.cpu().numpy()\n", + "X_train_features = extract_features(net_test, X_train_balanced)\n", + "X_test_features = extract_features(net_test, X_test_balanced)\n", + "\n", + "# Lưu các đặc trưng đã trích xuất\n", + "with open('balanced_train_features.pkl', 'wb') as f:\n", + " pickle.dump((X_train_features, y_train_balanced), f)\n", + "with open('balanced_test_features.pkl', 'wb') as f:\n", + " pickle.dump((X_test_features, y_test_balanced), f)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "X_train_features_loaded shape: (1000, 100)\n", + "y_train_loaded shape: (1000,)\n", + "X_test_features_loaded shape: (4000, 100)\n", + "y_test_loaded shape: (4000,)\n" + ] + } + ], + "source": [ + "import pickle\n", + "\n", + "# Load lại các đặc trưng đã lưu cho tập huấn luyện\n", + "with open('balanced_train_features.pkl', 'rb') as f:\n", + " X_train_features_loaded, y_train_loaded = pickle.load(f)\n", + "\n", + "\n", + "# Load lại các đặc trưng đã lưu cho tập kiểm tra (test)\n", + "with open('balanced_test_features.pkl', 'rb') as f:\n", + " X_test_features_loaded, y_test_loaded = pickle.load(f)\n", + "\n", + "# Kiểm tra kích thước để đảm bảo rằng dữ liệu đã được tải đúng\n", + "print(f\"X_train_features_loaded shape: {X_train_features_loaded.shape}\")\n", + "print(f\"y_train_loaded shape: {y_train_loaded.shape}\")\n", + "print(f\"X_test_features_loaded shape: {X_test_features_loaded.shape}\")\n", + "print(f\"y_test_loaded shape: {y_test_loaded.shape}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "from sklearn.preprocessing import StandardScaler\n", + "\n", + "scaler = StandardScaler()\n", + "X_train_scaled = scaler.fit_transform(X_train_features_loaded)\n", + "X_test_scaled = scaler.transform(X_test_features_loaded)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{0: -105436540.0, 1: 25310208.0, 2: -153372000.0, 3: 11471168.0, 4: 206820480.0, 5: 55940290.0, 6: -161260160.0, 7: 34693250.0, 8: 10488832.0, 9: 1445568.0, 10: -142162110.0, 11: -104090620.0, 12: -9019072.0, 13: 35934464.0, 14: 27531392.0, 15: -186393250.0, 16: -124985380.0, 17: 158693310.0, 18: 77251580.0, 19: 87436930.0, 20: -128984510.0, 21: -10882752.0, 22: 83981440.0, 23: -61718910.0, 24: 70794430.0, 25: 139254720.0, 26: -127395710.0, 27: 123487420.0, 28: 28170496.0, 29: -196940260.0, 30: -79057860.0, 31: 47008000.0, 32: 13428416.0, 33: 104150460.0, 34: 51729470.0, 35: 43572736.0, 36: 40529790.0, 37: -110372480.0, 38: -177694200.0, 39: -188921500.0, 40: 25147648.0, 41: 389410370.0, 42: -187263740.0, 43: -157860900.0, 44: 33210368.0, 45: 12548672.0, 46: -179210720.0, 47: 20706112.0, 48: 52150144.0, 49: -170180510.0, 50: 33033600.0, 51: -113388670.0, 52: -62483200.0, 53: 222385150.0, 54: -78486590.0, 55: -145819100.0, 56: -181676740.0, 57: -152524640.0, 58: -150749380.0, 59: 39723390.0, 60: 377143870.0, 61: -126252990.0, 62: 919648400.0, 63: -126964420.0, 64: 48309056.0, 65: 143271800.0, 66: 130538560.0, 67: 20231488.0, 68: 27271552.0, 69: -131344800.0, 70: -125991940.0, 71: 21103808.0, 72: -160133380.0, 73: 69509440.0, 74: -150001700.0, 75: -162275170.0, 76: 471608830.0, 77: -141778240.0, 78: 77563070.0, 79: -153614050.0, 80: 42698690.0, 81: -169892800.0, 82: 42761280.0, 83: -128875740.0, 84: -188234620.0, 85: 359141700.0, 86: 105145980.0, 87: 110170880.0, 88: -181409920.0, 89: -146193980.0, 90: -142682530.0, 91: 183327550.0, 92: -123235230.0, 93: -123979390.0, 94: -128961280.0, 95: -129695260.0, 96: -159095650.0, 97: -92901250.0, 98: 92939710.0, 99: 33198848.0, 100: -122026430.0, 101: 23838976.0, 102: 58595520.0, 103: 10195456.0, 104: 15513664.0, 105: -31581888.0, 106: 31589504.0, 107: 229996930.0, 108: -155986560.0, 109: 47885824.0, 110: 12333440.0, 111: -200388450.0, 112: 152447490.0, 113: 298914560.0, 114: 259317310.0, 115: -113662210.0, 116: 85856700.0, 117: -173729760.0, 118: 19407360.0, 119: -149080900.0, 120: -180593800.0, 121: -112647230.0, 122: -123800260.0, 123: -139598620.0, 124: 1429824.0, 125: 111904380.0, 126: -138749310.0, 127: 135477120.0, 128: 40303550.0, 129: -175006500.0, 130: 17752320.0, 131: 34700736.0, 132: -176519620.0, 133: 41657280.0, 134: 128411650.0, 135: 104448260.0, 136: 57668670.0, 137: 199019140.0, 138: 28866304.0, 139: 42668990.0, 140: 34007360.0, 141: -187329950.0, 142: 62934016.0, 143: -164499460.0, 144: 77617540.0, 145: 187762370.0, 146: 35830336.0, 147: 10875776.0, 148: 27483328.0, 149: -120361570.0, 150: 71918980.0, 151: -179685400.0, 152: 18214464.0, 153: 13789248.0, 154: 24159424.0, 155: -183254880.0, 156: -36544000.0, 157: -123284670.0, 158: 95640700.0, 159: -145846530.0, 160: 29756416.0, 161: 1502976.0, 162: -179623400.0, 163: -18647488.0, 164: -151561540.0, 165: -122196670.0, 166: -163186500.0, 167: -146231650.0, 168: 26124032.0, 169: 158798660.0, 170: 381835070.0, 171: -185279800.0, 172: -164436320.0, 173: 13841664.0, 174: -108295620.0, 175: 73824320.0, 176: -101751170.0, 177: 82816190.0, 178: -97322750.0, 179: -167615230.0, 180: 90319620.0, 181: 88899330.0, 182: 7820032.0, 183: 64293630.0, 184: 102976320.0, 185: -177478430.0, 186: -142107490.0, 187: -165140600.0, 188: -164748130.0, 189: 98879810.0, 190: -164922820.0, 191: 57625920.0, 192: -141808900.0, 193: 246292220.0, 194: -151043970.0, 195: -102149630.0, 196: -179980930.0, 197: -147475940.0, 198: 65973824.0, 199: -135993950.0, 200: 291801280.0, 201: 79646460.0, 202: -138815840.0, 203: 152325180.0, 204: -167697950.0, 205: 54243264.0, 206: 81447620.0, 207: 8842176.0, 208: -167793120.0, 209: 22382016.0, 210: -116742460.0, 211: 85556030.0, 212: -150783100.0, 213: -166977250.0, 214: 34643710.0, 215: -166648900.0, 216: 2957312.0, 217: 120129090.0, 218: 3603904.0, 219: 29692480.0, 220: -85495940.0, 221: 113421700.0, 222: 20701696.0, 223: -161681060.0, 224: -131893150.0, 225: 393472200.0, 226: -179139680.0, 227: 16169920.0, 228: 65806144.0, 229: -134623000.0, 230: 8837696.0, 231: -81313470.0, 232: -113467650.0, 233: -166130530.0, 234: 29148608.0, 235: -93080320.0, 236: -146137090.0, 237: -148509090.0, 238: 18745600.0, 239: 255080130.0, 240: 13902336.0, 241: -163407900.0, 242: 47625856.0, 243: 19824576.0, 244: -146740380.0, 245: -154847000.0, 246: 583571840.0, 247: 71235710.0, 248: 42275970.0, 249: 48863550.0, 250: -131470780.0, 251: 80499070.0, 252: -145859070.0, 253: 16077440.0, 254: 75109700.0, 255: -162238460.0, 256: -1083328.0, 257: 25645056.0, 258: -129573120.0, 259: -15313280.0, 260: -179404600.0, 261: -159118780.0, 262: 82954180.0, 263: 119396160.0, 264: -151362850.0, 265: 402850000.0, 266: 109045380.0, 267: 27849664.0, 268: 15672192.0, 269: -110697410.0, 270: 16786944.0, 271: 40853310.0, 272: 76939970.0, 273: -167076800.0, 274: -169572160.0, 275: -122637890.0, 276: -157292960.0, 277: -152183300.0, 278: 190012930.0, 279: 173469890.0, 280: 4731456.0, 281: 12204736.0, 282: 75019840.0, 283: 9315712.0, 284: -117480740.0, 285: -81100160.0, 286: 237856640.0, 287: 77414590.0, 288: -159898430.0, 289: -127137760.0, 290: 29112960.0, 291: -181006400.0, 292: -104499970.0, 293: -157638820.0, 294: -114703420.0, 295: -88998530.0, 296: 139807230.0, 297: 254174780.0, 298: 206580220.0, 299: 81337540.0, 300: 29211968.0, 301: 31721024.0, 302: -136136450.0, 303: -127150300.0, 304: -158066500.0, 305: 40304960.0, 306: 22457216.0, 307: -174945440.0, 308: -157490690.0, 309: 274029950.0, 310: -142837280.0, 311: -3583744.0, 312: -166956960.0, 313: -157959580.0, 314: 181936380.0, 315: -167267840.0, 316: 82924990.0, 317: 7892736.0, 318: 71931070.0, 319: -163583040.0, 320: -149557150.0, 321: 14579072.0, 322: -123265790.0, 323: -141898880.0, 324: -139194460.0, 325: 1361434100.0, 326: -142858200.0, 327: 27949312.0, 328: -163410880.0, 329: 573303200.0, 330: -159615940.0, 331: -64343104.0, 332: -196114240.0, 333: 254202370.0, 334: 1192154400.0, 335: -138015330.0, 336: -151761660.0, 337: 271410800.0, 338: -171391420.0, 339: -139394050.0, 340: -146748960.0, 341: -181564860.0, 342: 29978496.0, 343: 535015040.0, 344: -194384580.0, 345: 51789950.0, 346: -148554690.0, 347: 26894144.0, 348: -135309470.0, 349: -164677250.0, 350: -173124130.0, 351: -132806210.0, 352: 19766144.0, 353: 36366400.0, 354: 26847808.0, 355: -104739900.0, 356: 52110464.0, 357: 48994750.0, 358: -138414880.0, 359: -157275040.0, 360: -167283170.0, 361: -3507328.0, 362: 63614336.0, 363: -42603970.0, 364: -157432960.0, 365: 190520130.0, 366: 111832190.0, 367: -148381470.0, 368: -102007360.0, 369: 41840256.0, 370: 114253950.0, 371: -196465920.0, 372: 56752510.0, 373: 104133060.0, 374: -148942660.0, 375: 36387136.0, 376: -142187230.0, 377: 25324224.0, 378: 5471552.0, 379: -135803400.0, 380: -82326210.0, 381: 587190400.0, 382: -120827780.0, 383: 79996990.0, 384: -61014656.0, 385: -115503550.0, 386: -148580320.0, 387: 76146180.0, 388: -160926530.0, 389: 32427264.0, 390: -8885568.0, 391: 114829700.0, 392: 37811390.0, 393: 89331710.0, 394: 30541568.0, 395: 13457984.0, 396: 129196540.0, 397: 96979390.0, 398: -139689250.0, 399: -167032770.0, 400: -170588830.0, 401: -160275780.0, 402: -157447900.0, 403: -120307740.0, 404: 146358780.0, 405: -142175040.0, 406: 28813696.0, 407: -175810050.0, 408: 73236930.0, 409: -166610080.0, 410: -134066910.0, 411: 94720450.0, 412: 24654464.0, 413: -149907360.0, 414: -133048320.0, 415: -135240060.0, 416: -13811648.0, 417: 41401344.0, 418: 61702720.0, 419: -136084130.0, 420: 56505470.0, 421: -156726140.0, 422: -155232640.0, 423: -157322600.0, 424: -153192100.0, 425: 160014140.0, 426: 12652480.0, 427: -161461120.0, 428: 435132160.0, 429: 134709630.0, 430: -181603900.0, 431: 54689150.0, 432: 29144256.0, 433: 182421180.0, 434: -142488030.0, 435: 75102140.0, 436: 36126400.0, 437: -140461800.0, 438: 48793790.0, 439: -104576900.0, 440: -127452450.0, 441: 68271740.0, 442: 131004930.0, 443: 9395840.0, 444: 284226370.0, 445: 12666176.0, 446: -123474370.0, 447: 40691776.0, 448: 181253380.0, 449: -150378560.0, 450: 89782910.0, 451: 47234750.0, 452: 55221310.0, 453: -182427580.0, 454: 50538176.0, 455: 44105024.0, 456: 208187460.0, 457: -113251780.0, 458: -136159360.0, 459: 47463744.0, 460: -106456700.0, 461: 206900860.0, 462: -151275800.0, 463: -125987140.0, 464: 35482176.0, 465: -151661540.0, 466: -166703200.0, 467: -82217220.0, 468: -163146530.0, 469: 216782270.0, 470: 260055550.0, 471: 6918848.0, 472: -126823040.0, 473: -5037056.0, 474: 13935040.0, 475: 47277890.0, 476: 41433664.0, 477: 24461120.0, 478: 181642180.0, 479: 112168320.0, 480: -114246530.0, 481: 26177856.0, 482: 94848960.0, 483: 10621632.0, 484: 36205504.0, 485: -139866600.0, 486: -102865660.0, 487: 54464.0, 488: -150853820.0, 489: -157298400.0, 490: 14741824.0, 491: 106525890.0, 492: 30983552.0, 493: -167313150.0, 494: -127361630.0, 495: 119404290.0, 496: 26044992.0, 497: 32830080.0, 498: -27947520.0, 499: 154886530.0, 500: 59532544.0, 501: -187872030.0, 502: -70905020.0, 503: -162493500.0, 504: -9198400.0, 505: -129511970.0, 506: 441704580.0, 507: -140206880.0, 508: 45134976.0, 509: -158956290.0, 510: -148620580.0, 511: -166675680.0, 512: 309764860.0, 513: 23177408.0, 514: 39023040.0, 515: -158148350.0, 516: -123036700.0, 517: 28129536.0, 518: -102584960.0, 519: -150355070.0, 520: -101154370.0, 521: -184246020.0, 522: 37671490.0, 523: -151297000.0, 524: 125284290.0, 525: 81020220.0, 526: -171555170.0, 527: 302172540.0, 528: 13414464.0, 529: -72741250.0, 530: 194992830.0, 531: 39678336.0, 532: 228550780.0, 533: 269212540.0, 534: 53658240.0, 535: -129973440.0, 536: 20614848.0, 537: 87077500.0, 538: -164944000.0, 539: 19043520.0, 540: -131614110.0, 541: 42186880.0, 542: -164155360.0, 543: -165734050.0, 544: 176057860.0, 545: 170905280.0, 546: -135682940.0, 547: 413354000.0, 548: 138389380.0, 549: 42277310.0, 550: -146732100.0, 551: -129083740.0, 552: -184217820.0, 553: 300880830.0, 554: -136632060.0, 555: -10462272.0, 556: -114176130.0, 557: 332948160.0, 558: 45641410.0, 559: 118537920.0, 560: 648020860.0, 561: -131701950.0, 562: 282598980.0, 563: -145057600.0, 564: -1680256.0, 565: -161299680.0, 566: -166706780.0, 567: 92227580.0, 568: 32790272.0, 569: -276160.0, 570: 469521920.0, 571: -172256600.0, 572: -7244928.0, 573: -175078020.0, 574: -70436930.0, 575: 67448960.0, 576: 222217980.0, 577: -9103936.0, 578: -181015400.0, 579: -177534300.0, 580: 7202432.0, 581: -97817790.0, 582: -160501250.0, 583: -70135940.0, 584: -86245440.0, 585: 21449856.0, 586: 163574270.0, 587: -154112380.0, 588: -67352260.0, 589: 79834180.0, 590: -149403520.0, 591: -160743200.0, 592: -62941376.0, 593: -61049536.0, 594: -174346370.0, 595: -171447100.0, 596: -173961150.0, 597: 11864576.0, 598: 16953472.0, 599: 125604800.0, 600: 220013700.0, 601: 194900860.0, 602: 95823490.0, 603: 93310020.0, 604: -99526910.0, 605: 94077060.0, 606: 15495872.0, 607: -115680770.0, 608: 72143870.0, 609: 118061310.0, 610: -149453820.0, 611: -80953280.0, 612: 56078400.0, 613: 85120060.0, 614: -187627710.0, 615: -122700900.0, 616: 19746048.0, 617: 120101760.0, 618: -129156700.0, 619: -169380380.0, 620: -98068160.0, 621: -178026560.0, 622: -94785090.0, 623: -7319936.0, 624: 42707584.0, 625: 6079616.0, 626: -141165980.0, 627: -89817600.0, 628: -108735170.0, 629: -175204510.0, 630: 95262720.0, 631: 25536384.0, 632: 62789504.0, 633: 612968800.0, 634: 312339400.0, 635: -160542460.0, 636: 33838210.0, 637: -163195800.0, 638: 302970300.0, 639: 176153730.0, 640: 110369730.0, 641: 27444608.0, 642: 552971000.0, 643: -110344450.0, 644: 418872130.0, 645: -120589440.0, 646: -105468480.0, 647: -159461310.0, 648: 61962496.0, 649: 4322112.0, 650: -95354560.0, 651: -174898020.0, 652: -172785150.0, 653: -94755260.0, 654: -147118110.0, 655: -138030850.0, 656: 236040260.0, 657: 20823936.0, 658: -183558050.0, 659: 12200640.0, 660: -136523550.0, 661: -168611550.0, 662: -134605540.0, 663: -176429400.0, 664: -171105180.0, 665: 26802112.0, 666: 75059780.0, 667: -169097730.0, 668: 29900224.0, 669: 15380416.0, 670: -163183260.0, 671: -3563072.0, 672: 593849200.0, 673: -12309184.0, 674: -172586850.0, 675: -156572960.0, 676: 87021700.0, 677: 85761340.0, 678: -153379680.0, 679: -121192670.0, 680: -157142270.0, 681: -164564130.0, 682: 88192260.0, 683: -56039616.0, 684: -157069000.0, 685: -137423100.0, 686: 24342848.0, 687: 151329220.0, 688: 7416444000.0, 689: 180343680.0, 690: 65007870.0, 691: 5647680.0, 692: 16360192.0, 693: -184791680.0, 694: 57953470.0, 695: -18851456.0, 696: 15447872.0, 697: -113035580.0, 698: 583976700.0, 699: -113598210.0, 700: -166657000.0, 701: 48474430.0, 702: 46182080.0, 703: -131012930.0, 704: 33858176.0, 705: -186642720.0, 706: 73466750.0, 707: 65240704.0, 708: 186084860.0, 709: 54263360.0, 710: -132427740.0, 711: -144956640.0, 712: -139105280.0, 713: -147831710.0, 714: 35351616.0, 715: 217233090.0, 716: 162277440.0, 717: -93778750.0, 718: -8457536.0, 719: -140756380.0, 720: 155512380.0, 721: -106070140.0, 722: -153653760.0, 723: -172019230.0, 724: 111210690.0, 725: 64943870.0, 726: -117642660.0, 727: 264403400.0, 728: 60374910.0, 729: -171123620.0, 730: -42832960.0, 731: 65339520.0, 732: 45359170.0, 733: 105630140.0, 734: 35522370.0, 735: 82380350.0, 736: -145192600.0, 737: 119052100.0, 738: 31904704.0, 739: 22238528.0, 740: 82767620.0, 741: -157371870.0, 742: -118062750.0, 743: 229934780.0, 744: -62605376.0, 745: -5312768.0, 746: -155275260.0, 747: -144662180.0, 748: 100494400.0, 749: 397199170.0, 750: 27036864.0, 751: 384530240.0, 752: 72132160.0, 753: -111069820.0, 754: -172639100.0, 755: 33914110.0, 756: 24125568.0, 757: -183403580.0, 758: -127236420.0, 759: 22430784.0, 760: -84256260.0, 761: -109615100.0, 762: -147052960.0, 763: 13408320.0, 764: -94718340.0, 765: -149915940.0, 766: 20614848.0, 767: -75934720.0, 768: -138632800.0, 769: 4382592.0, 770: 57995650.0, 771: -142490750.0, 772: -161317730.0, 773: -10952576.0, 774: 57242240.0, 775: 194401730.0, 776: 212608.0, 777: 36948990.0, 778: 37330944.0, 779: 87272260.0, 780: 35533760.0, 781: 37083330.0, 782: 383166200.0, 783: 159677120.0, 784: -145186910.0, 785: 1154944.0, 786: -182838500.0, 787: 12810176.0, 788: 407853630.0, 789: -106964290.0, 790: -7035456.0, 791: -150844200.0, 792: -94222530.0, 793: 693147800.0, 794: -162462850.0, 795: 157739520.0, 796: 31074240.0, 797: -141286690.0, 798: 100305790.0, 799: -126503230.0, 800: 47217856.0, 801: 207988600.0, 802: -150474050.0, 803: 334077000.0, 804: 80324610.0, 805: 417549630.0, 806: -146822180.0, 807: -102370690.0, 808: 435987700.0, 809: -180058430.0, 810: -187483400.0, 811: -165042050.0, 812: -168173660.0, 813: 74717950.0, 814: -146260100.0, 815: 12693632.0, 816: -158667360.0, 817: -70242110.0, 818: 95687870.0, 819: -182065310.0, 820: 6802624.0, 821: 150674240.0, 822: -156284510.0, 823: -195655360.0, 824: 117355780.0, 825: 92711040.0, 826: 57247936.0, 827: -184487460.0, 828: 28071104.0, 829: 58995070.0, 830: 50180800.0, 831: -86725250.0, 832: -171461730.0, 833: 508673540.0, 834: 5368000.0, 835: 49009470.0, 836: -136430270.0, 837: -161994460.0, 838: 18253568.0, 839: 16272384.0, 840: 9867968.0, 841: 33275328.0, 842: 15230592.0, 843: 72636290.0, 844: -176323740.0, 845: 30820416.0, 846: -147241120.0, 847: 47960830.0, 848: -154375140.0, 849: -162193440.0, 850: -161411680.0, 851: -156969980.0, 852: -64491456.0, 853: 976954600.0, 854: 240256580.0, 855: -84653570.0, 856: 17145472.0, 857: -148150340.0, 858: -129646240.0, 859: -156502940.0, 860: 75274880.0, 861: 40597056.0, 862: 415857540.0, 863: -158474980.0, 864: -104335620.0, 865: -68464380.0, 866: 168742460.0, 867: -171810560.0, 868: -184379620.0, 869: -175411550.0, 870: 174730430.0, 871: -146330850.0, 872: -172538240.0, 873: -186134270.0, 874: 152915840.0, 875: -128964160.0, 876: -158427680.0, 877: -155430240.0, 878: -154407260.0, 879: 18076096.0, 880: 27633472.0, 881: 990569600.0, 882: -138224380.0, 883: -99988930.0, 884: -168989020.0, 885: 20126528.0, 886: -134318460.0, 887: -53531650.0, 888: -169030530.0, 889: -137066110.0, 890: 13056320.0, 891: -116234820.0, 892: 213904830.0, 893: -148829700.0, 894: 78830270.0, 895: 83324860.0, 896: -122018240.0, 897: -158642400.0, 898: 88980420.0, 899: -118283550.0, 900: 381857470.0, 901: 181611650.0, 902: -155633700.0, 903: 350674300.0, 904: -165853700.0, 905: -169369660.0, 906: 41267070.0, 907: -161504860.0, 908: -125857470.0, 909: 45834560.0, 910: -159892420.0, 911: 38794880.0, 912: 153371400.0, 913: 24458880.0, 914: 163215680.0, 915: -2237504.0, 916: 72383420.0, 917: 41773440.0, 918: -88613060.0, 919: 1998528.0, 920: -21368576.0, 921: -184759000.0, 922: 79550720.0, 923: -186660510.0, 924: -160116960.0, 925: -125802300.0, 926: 38955010.0, 927: 219538050.0, 928: 6836480.0, 929: 39428160.0, 930: 37924670.0, 931: -134457380.0, 932: -94544130.0, 933: 184303550.0, 934: 427278340.0, 935: -131396190.0, 936: -168339800.0, 937: -72398590.0, 938: 36213250.0, 939: -174477920.0, 940: 6703232.0, 941: -118244190.0, 942: -147436320.0, 943: -175172030.0, 944: 90930240.0, 945: -87699900.0, 946: -137057860.0, 947: 81356990.0, 948: 26318464.0, 949: 334902080.0, 950: -145697800.0, 951: 53075710.0, 952: -144975040.0, 953: -128048960.0, 954: -127546080.0, 955: 322528500.0, 956: -180030240.0, 957: 6855936.0, 958: 204020100.0, 959: 27333568.0, 960: -168003000.0, 961: 22575104.0, 962: 68607300.0, 963: -166790240.0, 964: -13429952.0, 965: 34976256.0, 966: 178943300.0, 967: -151017730.0, 968: -152262200.0, 969: -157851360.0, 970: 41008770.0, 971: -124287840.0, 972: 31738816.0, 973: 77604480.0, 974: 100084420.0, 975: -170965090.0, 976: -128589860.0, 977: 35584704.0, 978: 42443776.0, 979: -80297600.0, 980: -101072320.0, 981: -25139008.0, 982: 643953000.0, 983: 404180600.0, 984: 191040.0, 985: -114964290.0, 986: 24350464.0, 987: 7891584.0, 988: 111036610.0, 989: 70855100.0, 990: -145057000.0, 991: -168113180.0, 992: 30126272.0, 993: -155948640.0, 994: -128049920.0, 995: -150456220.0, 996: -157291420.0, 997: -131550980.0, 998: 55829250.0, 999: -114528160.0}\n" + ] + } + ], + "source": [ + "dic_loaded = {}\n", + "for i, l in enumerate(loaded):\n", + " dic_loaded[i] = l\n", + "print(dic_loaded)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(688, 7416444000.0), (325, 1361434100.0), (334, 1192154400.0), (881, 990569600.0), (853, 976954600.0), (62, 919648400.0), (793, 693147800.0), (560, 648020860.0), (982, 643953000.0), (633, 612968800.0), (672, 593849200.0), (381, 587190400.0), (698, 583976700.0), (246, 583571840.0), (329, 573303200.0), (642, 552971000.0), (343, 535015040.0), (833, 508673540.0), (76, 471608830.0), (570, 469521920.0), (506, 441704580.0), (808, 435987700.0), (428, 435132160.0), (934, 427278340.0), (644, 418872130.0), (805, 417549630.0), (862, 415857540.0), (547, 413354000.0), (788, 407853630.0), (983, 404180600.0), (265, 402850000.0), (749, 397199170.0), (225, 393472200.0), (41, 389410370.0), (751, 384530240.0), (782, 383166200.0), (900, 381857470.0), (170, 381835070.0), (60, 377143870.0), (85, 359141700.0), (903, 350674300.0), (949, 334902080.0), (803, 334077000.0), (557, 332948160.0), (955, 322528500.0), (634, 312339400.0), (512, 309764860.0), (638, 302970300.0), (527, 302172540.0), (553, 300880830.0), (113, 298914560.0), (200, 291801280.0), (444, 284226370.0), (562, 282598980.0), (309, 274029950.0), (337, 271410800.0), (533, 269212540.0), (727, 264403400.0), (470, 260055550.0), (114, 259317310.0), (239, 255080130.0), (333, 254202370.0), (297, 254174780.0), (193, 246292220.0), (854, 240256580.0), (286, 237856640.0), (656, 236040260.0), (107, 229996930.0), (743, 229934780.0), (532, 228550780.0), (53, 222385150.0), (576, 222217980.0), (600, 220013700.0), (927, 219538050.0), (715, 217233090.0), (469, 216782270.0), (892, 213904830.0), (456, 208187460.0), (801, 207988600.0), (461, 206900860.0), (4, 206820480.0), (298, 206580220.0), (958, 204020100.0), (137, 199019140.0), (530, 194992830.0), (601, 194900860.0), (775, 194401730.0), (365, 190520130.0), (278, 190012930.0), (145, 187762370.0), (708, 186084860.0), (933, 184303550.0), (91, 183327550.0), (433, 182421180.0), (314, 181936380.0), (478, 181642180.0), (901, 181611650.0), (448, 181253380.0), (689, 180343680.0), (966, 178943300.0), (639, 176153730.0), (544, 176057860.0), (870, 174730430.0), (279, 173469890.0), (545, 170905280.0), (866, 168742460.0), (586, 163574270.0), (914, 163215680.0), (716, 162277440.0), (425, 160014140.0), (783, 159677120.0), (169, 158798660.0), (17, 158693310.0), (795, 157739520.0), (720, 155512380.0), (499, 154886530.0), (912, 153371400.0), (874, 152915840.0), (112, 152447490.0), (203, 152325180.0), (687, 151329220.0), (821, 150674240.0), (404, 146358780.0), (65, 143271800.0), (296, 139807230.0), (25, 139254720.0), (548, 138389380.0), (127, 135477120.0), (429, 134709630.0), (442, 131004930.0), (66, 130538560.0), (396, 129196540.0), (134, 128411650.0), (599, 125604800.0), (524, 125284290.0), (27, 123487420.0), (217, 120129090.0), (617, 120101760.0), (495, 119404290.0), (263, 119396160.0), (737, 119052100.0), (559, 118537920.0), (609, 118061310.0), (824, 117355780.0), (391, 114829700.0), (370, 114253950.0), (221, 113421700.0), (479, 112168320.0), (125, 111904380.0), (366, 111832190.0), (724, 111210690.0), (988, 111036610.0), (640, 110369730.0), (87, 110170880.0), (266, 109045380.0), (491, 106525890.0), (733, 105630140.0), (86, 105145980.0), (135, 104448260.0), (33, 104150460.0), (373, 104133060.0), (184, 102976320.0), (748, 100494400.0), (798, 100305790.0), (974, 100084420.0), (189, 98879810.0), (397, 96979390.0), (602, 95823490.0), (818, 95687870.0), (158, 95640700.0), (630, 95262720.0), (482, 94848960.0), (411, 94720450.0), (605, 94077060.0), (603, 93310020.0), (98, 92939710.0), (825, 92711040.0), (567, 92227580.0), (944, 90930240.0), (180, 90319620.0), (450, 89782910.0), (393, 89331710.0), (898, 88980420.0), (181, 88899330.0), (682, 88192260.0), (19, 87436930.0), (779, 87272260.0), (537, 87077500.0), (676, 87021700.0), (116, 85856700.0), (677, 85761340.0), (211, 85556030.0), (613, 85120060.0), (22, 83981440.0), (895, 83324860.0), (262, 82954180.0), (316, 82924990.0), (177, 82816190.0), (740, 82767620.0), (735, 82380350.0), (206, 81447620.0), (947, 81356990.0), (299, 81337540.0), (525, 81020220.0), (251, 80499070.0), (804, 80324610.0), (383, 79996990.0), (589, 79834180.0), (201, 79646460.0), (922, 79550720.0), (894, 78830270.0), (144, 77617540.0), (973, 77604480.0), (78, 77563070.0), (287, 77414590.0), (18, 77251580.0), (272, 76939970.0), (387, 76146180.0), (860, 75274880.0), (254, 75109700.0), (435, 75102140.0), (666, 75059780.0), (282, 75019840.0), (813, 74717950.0), (175, 73824320.0), (706, 73466750.0), (408, 73236930.0), (843, 72636290.0), (916, 72383420.0), (608, 72143870.0), (752, 72132160.0), (318, 71931070.0), (150, 71918980.0), (247, 71235710.0), (989, 70855100.0), (24, 70794430.0), (73, 69509440.0), (962, 68607300.0), (441, 68271740.0), (575, 67448960.0), (198, 65973824.0), (228, 65806144.0), (731, 65339520.0), (707, 65240704.0), (690, 65007870.0), (725, 64943870.0), (183, 64293630.0), (362, 63614336.0), (142, 62934016.0), (632, 62789504.0), (648, 61962496.0), (418, 61702720.0), (728, 60374910.0), (500, 59532544.0), (829, 58995070.0), (102, 58595520.0), (770, 57995650.0), (694, 57953470.0), (136, 57668670.0), (191, 57625920.0), (826, 57247936.0), (774, 57242240.0), (372, 56752510.0), (420, 56505470.0), (612, 56078400.0), (5, 55940290.0), (998, 55829250.0), (452, 55221310.0), (431, 54689150.0), (709, 54263360.0), (205, 54243264.0), (534, 53658240.0), (951, 53075710.0), (48, 52150144.0), (356, 52110464.0), (345, 51789950.0), (34, 51729470.0), (454, 50538176.0), (830, 50180800.0), (835, 49009470.0), (357, 48994750.0), (249, 48863550.0), (438, 48793790.0), (701, 48474430.0), (64, 48309056.0), (847, 47960830.0), (109, 47885824.0), (242, 47625856.0), (459, 47463744.0), (475, 47277890.0), (451, 47234750.0), (800, 47217856.0), (31, 47008000.0), (702, 46182080.0), (909, 45834560.0), (558, 45641410.0), (732, 45359170.0), (508, 45134976.0), (455, 44105024.0), (35, 43572736.0), (82, 42761280.0), (624, 42707584.0), (80, 42698690.0), (139, 42668990.0), (978, 42443776.0), (549, 42277310.0), (248, 42275970.0), (541, 42186880.0), (369, 41840256.0), (917, 41773440.0), (133, 41657280.0), (476, 41433664.0), (417, 41401344.0), (906, 41267070.0), (970, 41008770.0), (271, 40853310.0), (447, 40691776.0), (861, 40597056.0), (36, 40529790.0), (305, 40304960.0), (128, 40303550.0), (59, 39723390.0), (531, 39678336.0), (929, 39428160.0), (514, 39023040.0), (926, 38955010.0), (911, 38794880.0), (930, 37924670.0), (392, 37811390.0), (522, 37671490.0), (778, 37330944.0), (781, 37083330.0), (777, 36948990.0), (375, 36387136.0), (353, 36366400.0), (938, 36213250.0), (484, 36205504.0), (436, 36126400.0), (13, 35934464.0), (146, 35830336.0), (977, 35584704.0), (780, 35533760.0), (734, 35522370.0), (464, 35482176.0), (714, 35351616.0), (965, 34976256.0), (131, 34700736.0), (7, 34693250.0), (214, 34643710.0), (140, 34007360.0), (755, 33914110.0), (704, 33858176.0), (636, 33838210.0), (841, 33275328.0), (44, 33210368.0), (99, 33198848.0), (50, 33033600.0), (497, 32830080.0), (568, 32790272.0), (389, 32427264.0), (738, 31904704.0), (972, 31738816.0), (301, 31721024.0), (106, 31589504.0), (796, 31074240.0), (492, 30983552.0), (845, 30820416.0), (394, 30541568.0), (992, 30126272.0), (342, 29978496.0), (668, 29900224.0), (160, 29756416.0), (219, 29692480.0), (300, 29211968.0), (234, 29148608.0), (432, 29144256.0), (290, 29112960.0), (138, 28866304.0), (406, 28813696.0), (28, 28170496.0), (517, 28129536.0), (828, 28071104.0), (327, 27949312.0), (267, 27849664.0), (880, 27633472.0), (14, 27531392.0), (148, 27483328.0), (641, 27444608.0), (959, 27333568.0), (68, 27271552.0), (750, 27036864.0), (347, 26894144.0), (354, 26847808.0), (665, 26802112.0), (948, 26318464.0), (481, 26177856.0), (168, 26124032.0), (496, 26044992.0), (257, 25645056.0), (631, 25536384.0), (377, 25324224.0), (1, 25310208.0), (40, 25147648.0), (412, 24654464.0), (477, 24461120.0), (913, 24458880.0), (986, 24350464.0), (686, 24342848.0), (154, 24159424.0), (756, 24125568.0), (101, 23838976.0), (513, 23177408.0), (961, 22575104.0), (306, 22457216.0), (759, 22430784.0), (209, 22382016.0), (739, 22238528.0), (585, 21449856.0), (71, 21103808.0), (657, 20823936.0), (47, 20706112.0), (222, 20701696.0), (536, 20614848.0), (766, 20614848.0), (67, 20231488.0), (885, 20126528.0), (243, 19824576.0), (352, 19766144.0), (616, 19746048.0), (118, 19407360.0), (539, 19043520.0), (238, 18745600.0), (838, 18253568.0), (152, 18214464.0), (879, 18076096.0), (130, 17752320.0), (856, 17145472.0), (598, 16953472.0), (270, 16786944.0), (692, 16360192.0), (839, 16272384.0), (227, 16169920.0), (253, 16077440.0), (268, 15672192.0), (104, 15513664.0), (606, 15495872.0), (696, 15447872.0), (669, 15380416.0), (842, 15230592.0), (490, 14741824.0), (321, 14579072.0), (474, 13935040.0), (240, 13902336.0), (173, 13841664.0), (153, 13789248.0), (395, 13457984.0), (32, 13428416.0), (528, 13414464.0), (763, 13408320.0), (890, 13056320.0), (787, 12810176.0), (815, 12693632.0), (445, 12666176.0), (426, 12652480.0), (45, 12548672.0), (110, 12333440.0), (281, 12204736.0), (659, 12200640.0), (597, 11864576.0), (3, 11471168.0), (147, 10875776.0), (483, 10621632.0), (8, 10488832.0), (103, 10195456.0), (840, 9867968.0), (443, 9395840.0), (283, 9315712.0), (207, 8842176.0), (230, 8837696.0), (317, 7892736.0), (987, 7891584.0), (182, 7820032.0), (580, 7202432.0), (471, 6918848.0), (957, 6855936.0), (928, 6836480.0), (820, 6802624.0), (940, 6703232.0), (625, 6079616.0), (691, 5647680.0), (378, 5471552.0), (834, 5368000.0), (280, 4731456.0), (769, 4382592.0), (649, 4322112.0), (218, 3603904.0), (216, 2957312.0), (919, 1998528.0), (161, 1502976.0), (9, 1445568.0), (124, 1429824.0), (785, 1154944.0), (776, 212608.0), (984, 191040.0), (487, 54464.0), (569, -276160.0), (256, -1083328.0), (564, -1680256.0), (915, -2237504.0), (361, -3507328.0), (671, -3563072.0), (311, -3583744.0), (473, -5037056.0), (745, -5312768.0), (790, -7035456.0), (572, -7244928.0), (623, -7319936.0), (718, -8457536.0), (390, -8885568.0), (12, -9019072.0), (577, -9103936.0), (504, -9198400.0), (555, -10462272.0), (21, -10882752.0), (773, -10952576.0), (673, -12309184.0), (964, -13429952.0), (416, -13811648.0), (259, -15313280.0), (163, -18647488.0), (695, -18851456.0), (920, -21368576.0), (981, -25139008.0), (498, -27947520.0), (105, -31581888.0), (156, -36544000.0), (363, -42603970.0), (730, -42832960.0), (887, -53531650.0), (683, -56039616.0), (384, -61014656.0), (593, -61049536.0), (23, -61718910.0), (52, -62483200.0), (744, -62605376.0), (592, -62941376.0), (331, -64343104.0), (852, -64491456.0), (588, -67352260.0), (865, -68464380.0), (583, -70135940.0), (817, -70242110.0), (574, -70436930.0), (502, -70905020.0), (937, -72398590.0), (529, -72741250.0), (767, -75934720.0), (54, -78486590.0), (30, -79057860.0), (979, -80297600.0), (611, -80953280.0), (285, -81100160.0), (231, -81313470.0), (467, -82217220.0), (380, -82326210.0), (760, -84256260.0), (855, -84653570.0), (220, -85495940.0), (584, -86245440.0), (831, -86725250.0), (945, -87699900.0), (918, -88613060.0), (295, -88998530.0), (627, -89817600.0), (97, -92901250.0), (235, -93080320.0), (717, -93778750.0), (792, -94222530.0), (932, -94544130.0), (764, -94718340.0), (653, -94755260.0), (622, -94785090.0), (650, -95354560.0), (178, -97322750.0), (581, -97817790.0), (620, -98068160.0), (604, -99526910.0), (883, -99988930.0), (980, -101072320.0), (520, -101154370.0), (176, -101751170.0), (368, -102007360.0), (195, -102149630.0), (807, -102370690.0), (518, -102584960.0), (486, -102865660.0), (11, -104090620.0), (864, -104335620.0), (292, -104499970.0), (439, -104576900.0), (355, -104739900.0), (0, -105436540.0), (646, -105468480.0), (721, -106070140.0), (460, -106456700.0), (789, -106964290.0), (174, -108295620.0), (628, -108735170.0), (761, -109615100.0), (643, -110344450.0), (37, -110372480.0), (269, -110697410.0), (753, -111069820.0), (121, -112647230.0), (697, -113035580.0), (457, -113251780.0), (51, -113388670.0), (232, -113467650.0), (699, -113598210.0), (115, -113662210.0), (556, -114176130.0), (480, -114246530.0), (999, -114528160.0), (294, -114703420.0), (985, -114964290.0), (385, -115503550.0), (607, -115680770.0), (891, -116234820.0), (210, -116742460.0), (284, -117480740.0), (726, -117642660.0), (742, -118062750.0), (941, -118244190.0), (899, -118283550.0), (403, -120307740.0), (149, -120361570.0), (645, -120589440.0), (382, -120827780.0), (679, -121192670.0), (896, -122018240.0), (100, -122026430.0), (165, -122196670.0), (275, -122637890.0), (615, -122700900.0), (516, -123036700.0), (92, -123235230.0), (322, -123265790.0), (157, -123284670.0), (446, -123474370.0), (122, -123800260.0), (93, -123979390.0), (971, -124287840.0), (16, -124985380.0), (925, -125802300.0), (908, -125857470.0), (463, -125987140.0), (70, -125991940.0), (61, -126252990.0), (799, -126503230.0), (472, -126823040.0), (63, -126964420.0), (289, -127137760.0), (303, -127150300.0), (758, -127236420.0), (494, -127361630.0), (26, -127395710.0), (440, -127452450.0), (954, -127546080.0), (953, -128048960.0), (994, -128049920.0), (976, -128589860.0), (83, -128875740.0), (94, -128961280.0), (875, -128964160.0), (20, -128984510.0), (551, -129083740.0), (618, -129156700.0), (505, -129511970.0), (258, -129573120.0), (858, -129646240.0), (95, -129695260.0), (535, -129973440.0), (703, -131012930.0), (69, -131344800.0), (935, -131396190.0), (250, -131470780.0), (997, -131550980.0), (540, -131614110.0), (561, -131701950.0), (224, -131893150.0), (710, -132427740.0), (351, -132806210.0), (414, -133048320.0), (410, -134066910.0), (886, -134318460.0), (931, -134457380.0), (662, -134605540.0), (229, -134623000.0), (415, -135240060.0), (348, -135309470.0), (546, -135682940.0), (379, -135803400.0), (199, -135993950.0), (419, -136084130.0), (302, -136136450.0), (458, -136159360.0), (836, -136430270.0), (660, -136523550.0), (554, -136632060.0), (946, -137057860.0), (889, -137066110.0), (685, -137423100.0), (335, -138015330.0), (655, -138030850.0), (882, -138224380.0), (358, -138414880.0), (768, -138632800.0), (126, -138749310.0), (202, -138815840.0), (712, -139105280.0), (324, -139194460.0), (339, -139394050.0), (123, -139598620.0), (398, -139689250.0), (485, -139866600.0), (507, -140206880.0), (437, -140461800.0), (719, -140756380.0), (626, -141165980.0), (797, -141286690.0), (77, -141778240.0), (192, -141808900.0), (323, -141898880.0), (186, -142107490.0), (10, -142162110.0), (405, -142175040.0), (376, -142187230.0), (434, -142488030.0), (771, -142490750.0), (90, -142682530.0), (310, -142837280.0), (326, -142858200.0), (747, -144662180.0), (711, -144956640.0), (952, -144975040.0), (990, -145057000.0), (563, -145057600.0), (784, -145186910.0), (736, -145192600.0), (950, -145697800.0), (55, -145819100.0), (159, -145846530.0), (252, -145859070.0), (236, -146137090.0), (89, -146193980.0), (167, -146231650.0), (814, -146260100.0), (871, -146330850.0), (550, -146732100.0), (244, -146740380.0), (340, -146748960.0), (806, -146822180.0), (762, -147052960.0), (654, -147118110.0), (846, -147241120.0), (942, -147436320.0), (197, -147475940.0), (713, -147831710.0), (857, -148150340.0), (367, -148381470.0), (237, -148509090.0), (346, -148554690.0), (386, -148580320.0), (510, -148620580.0), (893, -148829700.0), (374, -148942660.0), (119, -149080900.0), (590, -149403520.0), (610, -149453820.0), (320, -149557150.0), (413, -149907360.0), (765, -149915940.0), (74, -150001700.0), (519, -150355070.0), (449, -150378560.0), (995, -150456220.0), (802, -150474050.0), (58, -150749380.0), (212, -150783100.0), (791, -150844200.0), (488, -150853820.0), (967, -151017730.0), (194, -151043970.0), (462, -151275800.0), (523, -151297000.0), (264, -151362850.0), (164, -151561540.0), (465, -151661540.0), (336, -151761660.0), (277, -152183300.0), (968, -152262200.0), (57, -152524640.0), (424, -153192100.0), (2, -153372000.0), (678, -153379680.0), (79, -153614050.0), (722, -153653760.0), (587, -154112380.0), (848, -154375140.0), (878, -154407260.0), (245, -154847000.0), (422, -155232640.0), (746, -155275260.0), (877, -155430240.0), (902, -155633700.0), (993, -155948640.0), (108, -155986560.0), (822, -156284510.0), (859, -156502940.0), (675, -156572960.0), (421, -156726140.0), (851, -156969980.0), (684, -157069000.0), (680, -157142270.0), (359, -157275040.0), (996, -157291420.0), (276, -157292960.0), (489, -157298400.0), (423, -157322600.0), (741, -157371870.0), (364, -157432960.0), (402, -157447900.0), (308, -157490690.0), (293, -157638820.0), (969, -157851360.0), (43, -157860900.0), (313, -157959580.0), (304, -158066500.0), (515, -158148350.0), (876, -158427680.0), (863, -158474980.0), (897, -158642400.0), (816, -158667360.0), (509, -158956290.0), (96, -159095650.0), (261, -159118780.0), (647, -159461310.0), (330, -159615940.0), (910, -159892420.0), (288, -159898430.0), (924, -160116960.0), (72, -160133380.0), (401, -160275780.0), (582, -160501250.0), (635, -160542460.0), (591, -160743200.0), (388, -160926530.0), (6, -161260160.0), (565, -161299680.0), (772, -161317730.0), (850, -161411680.0), (427, -161461120.0), (907, -161504860.0), (223, -161681060.0), (837, -161994460.0), (849, -162193440.0), (255, -162238460.0), (75, -162275170.0), (794, -162462850.0), (503, -162493500.0), (468, -163146530.0), (670, -163183260.0), (166, -163186500.0), (637, -163195800.0), (241, -163407900.0), (328, -163410880.0), (319, -163583040.0), (542, -164155360.0), (172, -164436320.0), (143, -164499460.0), (681, -164564130.0), (349, -164677250.0), (188, -164748130.0), (190, -164922820.0), (538, -164944000.0), (811, -165042050.0), (187, -165140600.0), (543, -165734050.0), (904, -165853700.0), (233, -166130530.0), (409, -166610080.0), (215, -166648900.0), (700, -166657000.0), (511, -166675680.0), (466, -166703200.0), (566, -166706780.0), (963, -166790240.0), (312, -166956960.0), (213, -166977250.0), (399, -167032770.0), (273, -167076800.0), (315, -167267840.0), (360, -167283170.0), (493, -167313150.0), (179, -167615230.0), (204, -167697950.0), (208, -167793120.0), (960, -168003000.0), (991, -168113180.0), (812, -168173660.0), (936, -168339800.0), (661, -168611550.0), (884, -168989020.0), (888, -169030530.0), (667, -169097730.0), (905, -169369660.0), (619, -169380380.0), (274, -169572160.0), (81, -169892800.0), (49, -170180510.0), (400, -170588830.0), (975, -170965090.0), (664, -171105180.0), (729, -171123620.0), (338, -171391420.0), (595, -171447100.0), (832, -171461730.0), (526, -171555170.0), (867, -171810560.0), (723, -172019230.0), (571, -172256600.0), (872, -172538240.0), (674, -172586850.0), (754, -172639100.0), (652, -172785150.0), (350, -173124130.0), (117, -173729760.0), (596, -173961150.0), (594, -174346370.0), (939, -174477920.0), (651, -174898020.0), (307, -174945440.0), (129, -175006500.0), (573, -175078020.0), (943, -175172030.0), (629, -175204510.0), (869, -175411550.0), (407, -175810050.0), (844, -176323740.0), (663, -176429400.0), (132, -176519620.0), (185, -177478430.0), (579, -177534300.0), (38, -177694200.0), (621, -178026560.0), (226, -179139680.0), (46, -179210720.0), (260, -179404600.0), (162, -179623400.0), (151, -179685400.0), (196, -179980930.0), (956, -180030240.0), (809, -180058430.0), (120, -180593800.0), (291, -181006400.0), (578, -181015400.0), (88, -181409920.0), (341, -181564860.0), (430, -181603900.0), (56, -181676740.0), (819, -182065310.0), (453, -182427580.0), (786, -182838500.0), (155, -183254880.0), (757, -183403580.0), (658, -183558050.0), (552, -184217820.0), (521, -184246020.0), (868, -184379620.0), (827, -184487460.0), (921, -184759000.0), (693, -184791680.0), (171, -185279800.0), (873, -186134270.0), (15, -186393250.0), (705, -186642720.0), (923, -186660510.0), (42, -187263740.0), (141, -187329950.0), (810, -187483400.0), (614, -187627710.0), (501, -187872030.0), (84, -188234620.0), (39, -188921500.0), (344, -194384580.0), (823, -195655360.0), (332, -196114240.0), (371, -196465920.0), (29, -196940260.0), (111, -200388450.0)]\n" + ] + } + ], + "source": [ + "import operator\n", + "sorted_dct = sorted(dic_loaded.items(), key=operator.itemgetter(1), reverse=True)\n", + "print(sorted_dct)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The weighted accuracy drop is 0.067\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA/KUlEQVR4nO3de3yU5Z3///cckpkcB5JAIJCEgMopHjBRDIjWUyxqux5aqVjQCq3UQ0V+upXSXZVtG7frWuxuQVHRulZlXWh/tku1cVXEgqIRFAREAUmAhJAQZkIOM5mZ+/tHyJDJATIhcE+Y1/PxmMeX3HPdk2vu+t28H9f1uT+3xTAMQwAAACaxmj0BAAAQ2wgjAADAVIQRAABgKsIIAAAwFWEEAACYijACAABMRRgBAACmIowAAABT2c2eQE8Eg0Ht27dPKSkpslgsZk8HAAD0gGEYqq+vV1ZWlqzW7tc/+kUY2bdvn7Kzs82eBgAA6IWKigoNHz682/f7RRhJSUmR1PplUlNTTZ4NAADoCY/Ho+zs7NDf8e70izDStjWTmppKGAEAoJ85XokFBawAAMBUhBEAAGAqwggAADAVYQQAAJiKMAIAAExFGAEAAKYijAAAAFMRRgAAgKkIIwAAwFSEEQAAYCrCCAAAMBVhBAAAmKpfPCjvZHljc5W+3F8vSRqQFK8J2QOU7LBrv6dZ8XarPik/pAavXw57a2ZrCQQlSbdOzNXApPjQ5wSChnbVNGjUoKTjPgwIAACEi+kwsmpTpV7/dF/E5z393k5dMCJNjT6/Gn0B7appUH2zX5mpDuWmJcnd1KK6Rp/8QUMDEuNU7fFqQs4ADRuQoEZfQMMGJmjUoGSNz0qVzx/UEJdTe+qaJBnKy0hWWrugAwDA6S6mw8ikUelKctglGdpSWa9PKw5JkoYNSFB9c4syU53KTU/SwQavstMSZZH01tZq1Tf79fa26k6ft9/j1X6PN+zYwQafJGnNlzU9nld2WoJGZiTr69oG7a5t1IDEOA1MjNeI9EQNSIxXelK8BqU4lJHskCshTonxNqU445SWHK8hqU5ZJFmtFgWDhmoOe2W1WpSR7Oj0e4JBQ+6mFqU47bLberZjFwwaMiTZrEdXgA41+mS3WZXsiOn/nAAAvWQxDMMwexLH4/F45HK55Ha7lZqaetJ+j9cf0OFmv9K7+MPdptrTrHe3H9CBeq8ykltDgTPOJpvFogOHvbJaLEqIs8likVKccWr0+RVns+qDnbWyWSyq9/q1q6ZBe+uaVOlukqfZr0DQ0OAUh5pbAvI0+/vs+8TbrfL5W7eWrhqXKWecTQcbvPqiql6eJr+sVqm5JRgaP2ZIii4ama5DjT7trGnQGYOSVV3vVYrTLq8/qN21DdpxoEGSZLVIQePo/ytJZwxOltcfUF1Di1oCQWWnJep7F2Trwrw0eZpaV45GDUqWxSIdqPdq8z63NpYf0kWj0jU+yyUZ0lcH6rXvULMuHT1IRlBqCQZDQaq5JaC6Rp8yU5yyHglDXn9AX9c0yt3UorFDU5TijOuz6wcAODE9/ftNGDGZzx/U3kNNGpGeKEnavv+wtu+v18aKQ0pLildeRpLOHJysA/VelR9s/aN7oN6r7dWH5fMH5Gnyy93UIndTiw57+y7InCyuhDi5m1qOOy7eblUwaMh/JKgV5A7UO19Uq7klqIzkeAWChnz+oLz+oPxH0pArIU6FuQPl9Qc1KMWh3PREWS0WpSfHKynerr2HmjQuK1VNvoD2HWrSZ3vcGupyqnBEmj7YWauzMpN1+ZhMDUrpPowCAHqOMBKDGn1+/XVTlTbtdSsjOV7XnpOlFKddi97aruaWoEYNSpbPH1TeoCQNdTk1MDFeKU679tQ1yWqRdh5o0PbqesmQBqc6tWWfR4NTHWr0+jUoxaHhAxN1xuBkOexWeZr92r6/XkHD0LnDBygh3qbVXxyQM86mC0YM1KsfVehws1+f7XVra6Unou+RmerotN11LClOu6wWS49CTk8MTIzTmCGpGpeVqrSkeJXXNur83AH6h/OGaU9do2oO+5SdlqhhAxL65PcBwOmKMIKo0ejzy261aldNg2oOt277pCXFa/jARFUcbJRhSO9ur1aWK0FnZiYrJy1RO2tat4P21DVpT12j9tQ1aUL2ABWNSlfZ7jr5A4accTZlpyUoJy1RgaChv26u0iflddpV06D0JIcq6hq1t65JGSkOOexW7ag+rLpGnwalOFTf7NflYwZrd22jtlZ6QqsrkRiflaorx2YqxWnXgXqvvP6g6hp92lrpUdaABCU57Jo0Kl12q0V765p0zvABGpeVqiGprdtMh71+1R72akBivFwJcTIMQ3vqmjTU5dSne9z6y2f7NCI9ScMHJig3PVFDXAmd6nIO1HtltUhpSfHcyQUg6hBGgC54/QE57LZu3/+iql7bqjz6748rVHvYp7pGX0SrNJGwWS0KBA3ZrRZdPmawDhz2akP5oWOec/ukEdpW5VFdQ4tqG7yqOewLvXfT+cN19rBU5Q1K1vaqeo0clCSptZYne2CiLBYRWACcUoQRoA/VNfhkt1mUGG9XSyConQca9MbmSr3yUYUavX6NHJQsq0UaNShZnma/Up127axpUGK8TdX1XgWChvYeagoVFLdpXwB8ssXZLJqQM1CFuQN1xuBkJcbbtK2qXhZZ5PUHNGpQsr59XpbienhnFQAcD2EEiDKGYejtbdX6YGetLBaLrj17qM4Z7tLb26q1rapeqQlxmpA9QDarRSPSk+SwW7W/vln7PV6t2lSplZ/sVc1hr+LtVl179lBJrVtF5QcbtfNAgxp9rXdmbd7nUeBIwsnLSNLXtQ3q6f8vHz4wQf9wXpauOydLZ2WmyGqRvP6gnHHdryYBQHcII8BpqLklcNxg0BIIyt62BWSzat+hJlXXe3Wwwav/KdujVZuq5EqIU4rTrsEpDg1IjNf7X9V0WrVpE2+3avqFORo9JEXeloCyBiQo2WlXepJDo4eknIyvCeA0QRgB0GOGYei25z/S2q9qelzMa7FINxdka399sy45c5AmjkzTfk+zmluCmjQqXQ67TXabRXarRRaLRVXuZh1q8ml0Zgq1K0CMIIwA6JVA0NDKT/bI3dSi1IQ4uRtbtG5nrQzDULzdqrU7alUfYXO+hDibmloCklprV5bOKNTFZ2aorsEnh92m1AS7WgKtnx+ptlvHM1OdcsZZlRgffseRp7lFyfH2UKM8AKcOYQTASREIGgoEDT21eof+852vut3e6SmrRUpy2NXg9SsnLVFFo9I1e8pI7a5t0BdVh/Xl/nrtPdSk0UNSFG+zylBrQXFL0NDeukZ90u4OpAGJcbp1Yo7e2FylQSkO2awWrdtRqxEZSfrl9WeraFT6iX15ABEhjAA46Xz+oOJsFm3a69ZZmSlq8PplsVi0rdKjTXvdGuJyaseBBg1JdSo9OV4VBxv1m9LtavAFTvlcrRbp5R9epItGEkiAU4UwAiBqNbcE9El5nawWi7JcCTrU5Gt9pEGzXz//02bVNviUmdr6IMgxQ1I11OVUS8BQXaNPcTaLBibFy2m3aXCqQ5eeNUhfVR/Wmi9r9N72Axricqr2sE9jhqbo7GEu5aQl6uHXP9eeuiadnzNAK++arAavX3WNPn1ZfVh56UmKs1vV6PVrREYStzYDfYgwAqBf8voDCgalhPi+u514W5VH31y0RpJ0XvYAba30yNvF9tKUMzM0fGCCBibG65YLc+Q/8uTrYNDQBSPSVNPglTPOplQeyAj0CGEEAI4wDEPTn/lQ63bWntDnWCySYUhnZSarcESappyRoa9rG7V5n1uXnjlI3ykYTqEs0A5hBADaafT5Nf2ZD7WtyqOZRSN0+6QRqq736pk1OzU+K1Vvfr5fn1YckiRlJDvkaW6JuDjXZrXon64dq+kTc3t1ZxBwuiGMAEAE/IGgGlsCYVsw/kBQNqtFf91cpQP1Xp0xOFlbKz1at6NWb39RrRHpSUp22DVsQILe+LwqdN7wgQl6/Lvn6sIRaayUIKYRRgDgFHI3tejf3tymlz4oDzt+y4U5KswdqM373EqMt2neVaNlI6AgRhBGAMAEFQcbtfjdHXplfXmX72e5nBqYFK+bzh+uJIdN63bU6qwhKbrlghwNTIo/xbMFTi7CCACYqOJgo/782T59srtOdY0tKttdd9xzbpgwTJJUPC5TU488DBHozwgjABBlDtR79ebnVXr3iwN6a+v+447/weQR+uk3x8hutWj3wUZt2uPWmKEpykh2KCPZcQpmDJyYkxpGFi9erH/7t39TZWWlxo8fr0WLFmnKlCldjr399tv1+9//vtPxcePG6fPPP+/R7yOMADjd1B72qtkflLcloO37D+vHfyiTYUiDUhw6UO895rlJ8TY9NaNAQ1KdOjOz85OT2/7POg8khNlOWhhZvny5ZsyYocWLF2vy5Ml6+umn9eyzz2rLli3KycnpNN7tdqupqSn0s9/v17nnnqt7771XjzzySJ9+GQDorxp9fiXE2RQ0pCff2q6n39vZZWO2jhbfer4mn5EhSXIlxGndjlr9/E+btONAgyRpav4QLfl+wUmdO9CdkxZGJk6cqPPPP19LliwJHRs7dqyuv/56lZSUHPf8P/3pT7rxxhu1a9cu5ebm9uh3EkYAxKLq+mZ9UVUvf9BQYpxN++u9slqkZ9fs0sYjPVHaGzkoSXvqmjr1R/nLvRcrf5jrFM0aOOqkhBGfz6fExES99tpruuGGG0LH77vvPm3cuFGrV68+7md861vfktfr1d/+9rdux3i9Xnm9R5cpPR6PsrOzCSMAcMR+T7PueOEjfb7P0+m9gtyB+rqmQbUNvtCxH39jlH78jVG0sscp1dMwYo/kQ2tqahQIBJSZmRl2PDMzU1VVVd2cdVRlZaX++te/6uWXXz7muJKSEj366KORTA0AYkpmqlP/9p1z9aP/+lhWi0XlBxslSQ9ePVpzLh0lfzCoT3Yf0i3PfCBJWvLuDi15d4cWTTtP/qCh7IEJqmv0adxQlw42+vS/n+1T2e463XP5GTp72AANSqFAFqdORGGkTceiKMMwelQo9cILL2jAgAG6/vrrjzlu/vz5mjdvXujntpURAMBR47JS9f5PL5ckBYKGag97NTjVKUmyWW0qGpWu/76zSDc/vS50ztzlG4/5mXe88HHo32lJ8fpOwXB9Y/QgvfxhuYpGpeuWC3LoKos+F9HDEzIyMmSz2TqtglRXV3daLenIMAwtW7ZMM2bMUHz8sRv7OBwOpaamhr0AAN2zWS2hINLehXlpWvvQ5bps9KBuz7VbLUrs4inJBxt8WvreTk1/5kP95bNKLfjjZo382So9+daX6gddIdCPRBRG4uPjVVBQoNLS0rDjpaWlmjRp0jHPXb16tb766ivNmjUr8lkCAHota0CCnv/Bhfqn68bJGWdVdlqCnp5RoHce+IY2P3q1vvzlVH3+6NWaWdSzmwp+89Z2vf7pvpM86+j2102V+ujrg5KkLfs8uveVDXrt4wqTZ9W1r6oPq6jk/3TbsvUKBKMzREa8TTNv3jzNmDFDhYWFKioq0tKlS1VeXq45c+ZIat1i2bt3r1588cWw85577jlNnDhR+fn5fTNzAEBEZl2cp1kX53X7/vypYzUhZ4AuH5Op+uYWDRuQoGV//1qHGn360SUjtbu2Udf9x/uSpJ+u+ExTzhyktHYt7H3+4Gn1tGKfP6i/bq5Uoy+gioON2lPXpJrDXq3dUdvl+D9/uk/js1walxVdq/l//6pGle5mVbqb9XVtg0YNSjZ7Sp1EHEamTZum2tpaLVy4UJWVlcrPz9eqVatCt+lWVlaqvDz8mQxut1srVqzQk08+2TezBgD0uYR4m26YMFxSa88SSWHhJX+YS1/84pu65sk12nGgQT988WP98oZ8/de63frDh+WyWqSXZk/UpFEZpsy/L1R7mrX43R16Ye3XvTr/jxv2aFzWuG4/+/VP92n6xBwlxveqZLNXvP5A6N+HGltO2e+NBO3gAQAR+ftXNbpt2Xr5u1ny//zRq5XkOP4f29c/3Sd3U4sGpzj0wt+/VrM/oJ9ccaYuGz24r6d8TF9V16u5Jaiaw179bOUm7XM39+i8eVedpWEDEtTUEpDDbtWD//OZxgxJ0RtzL+k01jAMXfPb97W10qMJOQP07MxC+YOGnv/71/rhlDylJzu039Os+ma/zhjctysX//n2l3r8b9slSctuL9TlY45d49mXTsqtvQAATD4jQ/OvGavH3/xCTS2BTu//+dN9SnbaNTIjucstiwavXyV/3aqXPuj8ZONFb30ZCiPBoCFDrcW5bbZVefTI65/rwavHqCB3YK+/wyvry/VV9WFdc/ZQ3bRkbZdjhrqc+u87i/R1bYMm5AxUQpxND772qWoafHpmZoEc9qNFv3UNPlks0raqeu33NCuzQzFx2e46ba1s7QmzofyQZi5bL4fdqk/KD2nT3kP6w+yLNPFX/ydJ+mjBlX16a3X7JnjRujJCGAEARKyt/mTZ+7u0q6ZB/3TdOE1buk4byg/poZWbQuOuHDtYz952gSSpuSUgu9WiH7zwkdbvOtjl5365v15S6x/Qmcs+bH1uz6WjdPvkEYqzWbXwz1v0wc6DumnJWn392LW9mrthGJp/ZI7Pvb8r7L2Rg5L0rzedowtGpIWOZaclhv79xLTzuvzMgUnxOmeYS5/ucevdL6o17YLwx6Ms/yi8uLV9s7q/f1WrmcvWh37evr++T8OIN0AYAQCcxu5oV1Py3YJsbSg/FPb+W1ur9eRbX+qCvIH64e8/ltVqUX2zX5L0z9eN0/SJOdpWVS/DMHTD4rVq9AX01pb9end7tT7Y2RpYfrlqq2xWi+64OE+1h492lf2iql6jh4Q/KHDzXre+qKrXjecP67b/VXUXDyKcmJem539wwQnVclwxNlOf7nGrdEt4GAkGDf11c2tLjNfmFOnxN7/Qhx3C2HvbD4T+fduy9dr+i6l91s8lbGWkKTrDyOlT9gwAMFXx+EwNdTllt1p05dijdR+/eWu7pj/zoRp8gVAQKRqZrjsuzpMzzqbzsgdoQs5AffvcLEnS7Bc/7rSFs25nrT7++qC+OLJyIklXL3pPD772qdxH/sB+UVWvGxev1f/32qf6v63V3c5zd21j2M+ZqQ69/MOLTriotO2BhZv3usOOV9Q16rDXr3i7VefnDFTGcVY9/EFDL324+5hjPthZq5+8skHV9V3Xtyz/qFy/P1KE2z6MuBt9XY43GysjAIA+kZHs0DsPfEONvoDSkuK139OsV9dX6Ddvbe809spxnYso/+X6fG3a69aumtYnDp85OFn/cn2+vrf0A5Vu2a/SLftDY+NtVvkCQb1Wtkevle3Rqp9M0cOvb5bvyJbEf77zlS4fMzi0uvDrN7bpjxv2SpIqOxSoPlA8OqwupbfOOHLLbJWnWQ1ef6iId2tlfeh9m9WijKRjN/6UpCff+lIzi0Z0+/73lra2+U+Mt+mxm84Je68lENRPV7RuQ03NHxL29GdWRgAApz1nnC3UeyQz1an7rjxTF+alKcVh1/XnZYXGtV85aeNKiNOLd1wY+vln14zVOcO7ftrwG3On6PZJI0I/z3mpTB99XRf6eWPFIf19R42k1ocKLn53R6jXRpuhLqd+N/18fbewbx434kqMU/qR794WqCTp1Y9aV3navovddvw/vfVef7fvtb8Jtu2ZRO01tysq9jS39IsCVsIIAOCk+sPsiVo7/3I9+M0xSoq36YIRA5WbntTl2Oy0RG16pFj//92TddmYwUqMt+vBq0eHjbm5cLhGDkrWI98er6n5QyQd/aM8MS8t1El2+UcVavD69cBrn3b5u6ZfmKNrzxnaV19TkkINxXYcOCypNTi0Fet+/6LWeXXVBTU7LUE7fnWN1i+4QlLr1kqwm1un93uO1rwkd3ELdXPL0fDh9QepGQEAIM5mVYozTsMGJGj1P16mF35w4THHpzjjdG72gNDPV43LlMXS+uC+FT8u0iPfHh96b/Gt54edOzEvTTcfWen4y2eVGv/wm1rzZU2XvyexB71QIjVyUGvIuu/Vjdq8160D9V41+gKyWqSzMluLbe+YnBcKEf9xywT9/Nqxev3ui2WzWsLqVtpvr7TXdouw1Pr8oI7ar4w0+gKhrStJ+rTikOa+uqHL88xEzQgA4JTJSI78ltWzMlO08seTNNSVoCGu8P4dFotFP/7GKC15d4ck6QeT8zQgMa7TZ+SkJar8YKMe+dY4uZv8+tuWKn3n/OG9+xLH0BZGJOmhlZ/pn65t7cY6fGBiqFV+TnqiNj96dZfnO9u1029uCSihwwMMff6gfvDCR6Gf93dRwNq+4+phrz9sZUSS/rRxn5pbgnpqRkFPv9ZJRxgBAES9CTndNzi774ozdX7OQH1j9CDFdVGPseonUzR2aIoOHPZqcEprmLnvyjNPyjyvGJupX63aJkmqcntV5WkNC1kDOj9RuSt2m1V2q0X+oNFpZeRQoy8siEjSoYbO2y7tt2kauggjkvT2F93fbWQGtmkAAP2aM86mq8ZlhgWRh6aOkST9+jvnaFxWqiwWSyiInEyjBiVr3lVnSWq9ZfgX/7tVkjQg4fh30LRxxrWuhjR36G67+N0dnfq41Hv9agmEh4325zV4/WErJW0S4mydjpmJMAIAOO3MvjhP7zzwDX23oO+3Yo7n0rMGSWrtsnrgSIO11ISeb0Q441r/NDd3CBHt79Bpz92hKLX9yshhbyC0wvL7Oy7UPZedIan1LpuOIcZMhBEAwGnHbrMqLyOp2y6sJ1NaF31EUp2d61i60/bMm/ahQgpfzfjZNWOUcqQI9lCHRmbtV0YON/tDBawOu1X3X3WWbFaLDKPr4lezEEYAAOhDXT1XJpLW7qGVkQ7bNG31J9edM1Q/umSU0pJbQ09dh94h7WtNGlv8ajjSsyQp3i6b1RIKSxUHG7VuR61mvfCRKrroV3IqUcAKAEAfcsbZ9MMpeXpmzdGH8Hm7eLpxd46ujBw951CjT59WHJIk/eiSkZKkAYnx2l3b2KmRWfvzvC3B0ApIW3gZ6nLqQL1XP3zx41CQSXTY9R+3TOjxHPsaKyMAAPSxBdeO0/qfXRH6ubueIV1pWxlpe46PJD365y3y+oPKcjmVn9XayXVAQuvWT13HbZp2tSa1DT61BFqbp7V1h32geLSslvAVFb/J9SOEEQAAToLBqU5ddeQZPG3dV3tiw5EVkHtf2RA61tbRdfrEnNCWz8Aj/VTcnVZGjgaLfYeaJLU+w6btLp1LzhqkkhvPDvU9kdTp6cenGmEEAICT5KnvF+ijBVcqf1jXz9jpysDEowWwz7y3U9LRZ8oUjUoPvTcgsa1m5OjKyIF6r/7lL1tCP1cdeRZP+8+UpGkX5Ojjn18Z+rmrtvKnEmEEAICTxGa1dFnQeiwlN54d+vcvV22VYRihwDGgXaho6zTbfrtl9fYDYZ+1z926MpKe3PUdPjdMGBbR3E4WwggAAFFkQrvn8kjSjgMNofqRtjoR6ehqh7vp6MpIx9qPtgf8dnW7cVfjzEIYAQAgiiQ7w7dMvqo+HPq3q10YCa2MtGsJH+gmVXQXRk59F5auEUYAAIgiHVu1t/UASXHaZW/X8r7tCb+N7W7lDQRbw0j74lRJSks8zsqIzF0aIYwAABBFOnaNbVsZ6fjE47ZbgL1dhJGkDk/7TeuiZqT1l53QVPsMYQQAgCi2bX+9pM5P/m27Vbd9D5O2MNK2atIm/Tg1I2YjjAAAEMXaOq9muRLCjjvsndvG+9tWRhzhKyPta026QgErAAAI8+ubzul0LGtAeBhpWxlp7mKbpuPKSMcakjaWKNmnIYwAABBlbr4gWzd26AHSsVeIs4un+wa6WRmJsx37z73JCyOEEQAAotFtk0aE/ZzS4Zbf0NN9/QEZR/ZZ2rZpEuLCx3YXRizRsTBCGAEAIBqdmz1A/3rT0W6syY7wug/HkW0aw1DoYXjBI2GkLai0Od7KiNmie3YAAMSw9gGk4/NjHO3qQNqe1Ht0ZSR8myb+eNs0FLACAICutK/96LhN47BbQ9ssbUWsQaNtZaRDzYi96/2YKNmlIYwAABCt2q+GdAwjFosltDriPVLE6g/0bpuGDqwAAKBLCe06qXbcppHaNz5rXRkJBFtDicPes22afl3AunjxYuXl5cnpdKqgoEBr1qw55niv16sFCxYoNzdXDodDo0aN0rJly3o1YQAAYkX7PiAdH6AntW981hpC2h6UZ7NawgJId31GokXnb3Ycy5cv19y5c7V48WJNnjxZTz/9tKZOnaotW7YoJyeny3Nuvvlm7d+/X88995zOOOMMVVdXy+/3n/DkAQA4nY0clKQUp10DEuM6rXZInRuftfUZsVstslolHemHdtxtGpMLWCMOI0888YRmzZql2bNnS5IWLVqkN998U0uWLFFJSUmn8W+88YZWr16tnTt3Ki0tTZI0YsSIE5s1AAAxwBln00cLrpS1m/2Ujo3P2mpGbLbw8XG27gpYo2OfJqJ1G5/Pp7KyMhUXF4cdLy4u1tq1a7s85/XXX1dhYaF+/etfa9iwYTrrrLP0wAMPqKmpqdvf4/V65fF4wl4AAMQiZ5yt222W0JN722pG2rZpLB3DyGm0TVNTU6NAIKDMzMyw45mZmaqqqurynJ07d+r999+X0+nUH//4R9XU1Oiuu+7SwYMHu60bKSkp0aOPPhrJ1AAAiDmOuPCVkbZtGpvVErb1clp2YLV0mL1hGJ2OtQkGg7JYLPrDH/6gCy+8UNdcc42eeOIJvfDCC92ujsyfP19utzv0qqio6M00AQA4rXV8cm/7mpH2bNYoSR3diGhlJCMjQzabrdMqSHV1dafVkjZDhw7VsGHD5HK5QsfGjh0rwzC0Z88enXnmmZ3OcTgccjgckUwNAICYEypg9YeHkUjDh2FyBWtEKyPx8fEqKChQaWlp2PHS0lJNmjSpy3MmT56sffv26fDhw6Fj27dvl9Vq1fDhw3sxZQAAILW/m+ZIAWsojFh71Mas327TzJs3T88++6yWLVumrVu36v7771d5ebnmzJkjqXWLZebMmaHx06dPV3p6un7wgx9oy5Yteu+99/Tggw/qjjvuUEJCQt99EwAAYozTHl7AGuxmm+Z4+t2tvdOmTVNtba0WLlyoyspK5efna9WqVcrNzZUkVVZWqry8PDQ+OTlZpaWluvfee1VYWKj09HTdfPPN+sUvftF33wIAgBjkiAtveta2MmLtcRiJjqWRiMOIJN1111266667unzvhRde6HRszJgxnbZ2AADAiWnrM+LtooDV5w+aNq9IRfeNxwAAoFvddWCNuIC1b6cVMcIIAAD91NGmZ0G9vW2/1u2sldTzMBItBay92qYBAADma1sZKdtdp1c/OtqTq30YcSXEHfdzzC5gZWUEAIB+qq3p2ZfVh8OO260WZbmckqRrzh7S7flRsjDCyggAAP1VWzv4jqxWi5bfWaQ3Nldp+sScUzyryBFGAADop5zdhBGbxaLstET98JKRPfocw+QSVrZpAADop5zdPM33/NyBPTo/WgpYCSMAAPRTXW3TrLxrkpIdkW18UMAKAAB6pauVkXhbz/+0W6KkhJUwAgBAP9VVzYijm62baNb/ZgwAACR1HUbiexFG6MAKAAB6pa0Da3uRhBEKWAEAwAlx2LtYGYmgZiTE5ApWwggAAP3UCa+M9OVkTgBhBACAfqrrAtauG6FFM8IIAAD9VFd3zsTZIl/voIAVAAD0isVi6bRVY4mgKjWSsScTYQQAgH4sxRl3wp9BB1YAANBrKc7+/8xbwggAAP1YYvzRgtXnbis0cSa9RxgBAKAfc7a7e2bSqIxefYZhcgkrYQQAgH7Maj1ahBrpnTRRUr9KGAEAoD/bc7Ax9G97b7qvigJWAABwAgYmxff6XEuU9GAljAAA0I/9603nqGhkulb8eJLZU+m1/n8/EAAAMSx/mEuv/OiiE/oMOrACAABTUMAKAACiAgWsAADAFFGyMEIYAQAA5iKMAAAQ4+jACgAATEEBKwAAiA79sYB18eLFysvLk9PpVEFBgdasWdPt2HfffVcWi6XTa9u2bb2eNAAAOHGWKFkaiTiMLF++XHPnztWCBQu0YcMGTZkyRVOnTlV5efkxz/viiy9UWVkZep155pm9njQAADh9RBxGnnjiCc2aNUuzZ8/W2LFjtWjRImVnZ2vJkiXHPG/w4MEaMmRI6GWz2Y45HgAAnBr9qgOrz+dTWVmZiouLw44XFxdr7dq1xzx3woQJGjp0qK644gq98847xxzr9Xrl8XjCXgAAoG9FxyZNhGGkpqZGgUBAmZmZYcczMzNVVVXV5TlDhw7V0qVLtWLFCq1cuVKjR4/WFVdcoffee6/b31NSUiKXyxV6ZWdnRzJNAAAQAcPkFqy9elBex4IXwzC6LYIZPXq0Ro8eHfq5qKhIFRUVevzxx3XJJZd0ec78+fM1b9680M8ej4dAAgBAX4uSpZGIVkYyMjJks9k6rYJUV1d3Wi05losuukhffvllt+87HA6lpqaGvQAAwOkpojASHx+vgoIClZaWhh0vLS3VpEmTevw5GzZs0NChQyP51QAA4CQx+0F5EW/TzJs3TzNmzFBhYaGKioq0dOlSlZeXa86cOZJat1j27t2rF198UZK0aNEijRgxQuPHj5fP59NLL72kFStWaMWKFX37TQAAQEQsUbJPE3EYmTZtmmpra7Vw4UJVVlYqPz9fq1atUm5uriSpsrIyrOeIz+fTAw88oL179yohIUHjx4/X//7v/+qaa67pu28BAAB6zexbey2G2SW0PeDxeORyueR2u6kfAQCgj/zrG9u05N0dmnVxnv7punF9/vk9/fvNs2kAAICpCCMAAMQ4s/dICCMAAMSo6ChfJYwAABDzDJNLWAkjAADEqG6ap59yhBEAAGAqwggAADGOAlYAAGCKaOnAShgBAACmIowAABCjKGAFAAAQYQQAgJhn9mPqCCMAAMSoKNmlIYwAABDrTL6zlzACAEDMipIKVsIIAAAwFWEEAIAYRwdWAABgiujYpCGMAAAQ8wyTS1gJIwAAxKgoqV8ljAAAAHMRRgAAiHEUsAIAAFNYoqSElTACAECMowMrAAAwBQWsAAAAIowAABDzKGAFAACmiJJdGsIIAAAwF2EEAICYRzt4AABgAu6mAQAAUYECVgAAYApLlCyN9CqMLF68WHl5eXI6nSooKNCaNWt6dN7f//532e12nXfeeb35tQAA4DQUcRhZvny55s6dqwULFmjDhg2aMmWKpk6dqvLy8mOe53a7NXPmTF1xxRW9niwAAOh7/W6b5oknntCsWbM0e/ZsjR07VosWLVJ2draWLFlyzPPuvPNOTZ8+XUVFRb2eLAAAOP1EFEZ8Pp/KyspUXFwcdry4uFhr167t9rznn39eO3bs0MMPP9y7WQIAgJPGMPnWXnskg2tqahQIBJSZmRl2PDMzU1VVVV2e8+WXX+qhhx7SmjVrZLf37Nd5vV55vd7Qzx6PJ5JpAgCAHoiS+tXeFbB2rL41DKPLitxAIKDp06fr0Ucf1VlnndXjzy8pKZHL5Qq9srOzezNNAADQD0QURjIyMmSz2TqtglRXV3daLZGk+vp6ffzxx7rnnntkt9tlt9u1cOFCffrpp7Lb7Xr77be7/D3z58+X2+0OvSoqKiKZJgAAiIDZBawRbdPEx8eroKBApaWluuGGG0LHS0tL9Q//8A+dxqempmrTpk1hxxYvXqy3335b//M//6O8vLwuf4/D4ZDD4YhkagAAIEKWKHlUXkRhRJLmzZunGTNmqLCwUEVFRVq6dKnKy8s1Z84cSa2rGnv37tWLL74oq9Wq/Pz8sPMHDx4sp9PZ6TgAADCHyQsjkYeRadOmqba2VgsXLlRlZaXy8/O1atUq5ebmSpIqKyuP23MEAACYL1oKWC2GYfZO0fF5PB65XC653W6lpqaaPR0AAE4LT63eocf+uk3fKRiux797bp9/fk//fvNsGgAAYpzZyxKEEQAAYlSU7NIQRgAAiHVmd2AljAAAEKOipYCVMAIAAExFGAEAINZRwAoAAMwQLR1YCSMAAMQ4sxuOEUYAAIhRFLACAACIMAIAQMwz+8kwhBEAAGAqwggAADGOAlYAAGAKS5RUsBJGAACAqQgjAADEOJPrVwkjAADEqujYpCGMAAAQ8yhgBQAApoiS+lXCCAAAMBdhBACAGEcHVgAAYIoo2aUhjAAAEOsoYAUAAKagAysAAIAIIwAAgA6sAADADFGyS0MYAQAg1hkmL40QRgAAiFFRsjBCGAEAAOYijAAAEONMbsBKGAEAIGZFSQUrYQQAgBjHyggAADBFdKyL9DKMLF68WHl5eXI6nSooKNCaNWu6Hfv+++9r8uTJSk9PV0JCgsaMGaPf/OY3vZ4wAAA4vdgjPWH58uWaO3euFi9erMmTJ+vpp5/W1KlTtWXLFuXk5HQan5SUpHvuuUfnnHOOkpKS9P777+vOO+9UUlKSfvSjH/XJlwAAAL1ndp8Ri2FEtlM0ceJEnX/++VqyZEno2NixY3X99derpKSkR59x4403KikpSf/1X//Vo/Eej0cul0tut1upqamRTBcAAHTjDx/u1oI/btbV4zP19IzCPv/8nv79jmibxufzqaysTMXFxWHHi4uLtXbt2h59xoYNG7R27Vpdeuml3Y7xer3yeDxhLwAAcHL0qwLWmpoaBQIBZWZmhh3PzMxUVVXVMc8dPny4HA6HCgsLdffdd2v27Nndji0pKZHL5Qq9srOzI5kmAADoAUuUlLD2qoDV0uG+ZMMwOh3raM2aNfr444/11FNPadGiRXrllVe6HTt//ny53e7Qq6KiojfTBAAA/UBEBawZGRmy2WydVkGqq6s7rZZ0lJeXJ0k6++yztX//fj3yyCO65ZZbuhzrcDjkcDgimRoAAOglk3dpIlsZiY+PV0FBgUpLS8OOl5aWatKkST3+HMMw5PV6I/nVAACgj0VJA9bIb+2dN2+eZsyYocLCQhUVFWnp0qUqLy/XnDlzJLVusezdu1cvvviiJOl3v/udcnJyNGbMGEmtfUcef/xx3XvvvX34NQAAQG+ZXcAacRiZNm2aamtrtXDhQlVWVio/P1+rVq1Sbm6uJKmyslLl5eWh8cFgUPPnz9euXbtkt9s1atQoPfbYY7rzzjv77lsAAICIRcnCSOR9RsxAnxEAAPreq+vL9dDKTbpybKaeva2f9BkBAACnI3PXJQgjAADEqGgpYCWMAAAQ48wu2CCMAAAQo/p1B1YAAIC+QhgBACDGmX1bLWEEAIBYFR27NIQRAABindktxwgjAADEqChZGCGMAAAAcxFGAACIcRSwAgAAU1iipAUrYQQAgBhHB1YAAGCK6FgXIYwAAACTEUYAAIhxFLACAABTREn9KmEEAIBYRwdWAABgClZGAAAARBgBAAAmI4wAABCjLFHSaYQwAgBAjKMDKwAAMAUFrAAAACKMAAAQ8wyTe7ASRgAAgKkIIwAAxDgKWAEAgCksUVLBShgBAACmIowAABDj2KYBAACmiI5NGsIIAAAxr1/e2rt48WLl5eXJ6XSqoKBAa9as6XbsypUrddVVV2nQoEFKTU1VUVGR3nzzzV5PGAAA9I0oqV+NPIwsX75cc+fO1YIFC7RhwwZNmTJFU6dOVXl5eZfj33vvPV111VVatWqVysrKdNlll+lb3/qWNmzYcMKTBwAA/Z/FMCIrW5k4caLOP/98LVmyJHRs7Nixuv7661VSUtKjzxg/frymTZumf/7nf+7ReI/HI5fLJbfbrdTU1EimCwAAuvGXz/bpnpc3aGJempbfWdTnn9/Tv98RrYz4fD6VlZWpuLg47HhxcbHWrl3bo88IBoOqr69XWlpat2O8Xq88Hk/YCwAA9C1LlJSwRhRGampqFAgElJmZGXY8MzNTVVVVPfqMf//3f1dDQ4NuvvnmbseUlJTI5XKFXtnZ2ZFMEwAARMDkO3t7V8DasWObYRg96uL2yiuv6JFHHtHy5cs1ePDgbsfNnz9fbrc79KqoqOjNNAEAwDFESwGrPZLBGRkZstlsnVZBqqurO62WdLR8+XLNmjVLr732mq688spjjnU4HHI4HJFMDQAA9FMRrYzEx8eroKBApaWlYcdLS0s1adKkbs975ZVXdPvtt+vll1/Wtdde27uZAgCAk8PkfZqIVkYkad68eZoxY4YKCwtVVFSkpUuXqry8XHPmzJHUusWyd+9evfjii5Jag8jMmTP15JNP6qKLLgqtqiQkJMjlcvXhVwEAAJGIkl2ayMPItGnTVFtbq4ULF6qyslL5+flatWqVcnNzJUmVlZVhPUeefvpp+f1+3X333br77rtDx2+77Ta98MILJ/4NAADACTG7A2vEfUbMQJ8RAAD63hubKzXnpU90wYiBem1O9+UWvXVS+owAAAD0NcIIAAAxzuw9EsIIAAAxKzpKWAkjAADEOLOLRwkjAADEqGjpwEoYAQAApiKMAAAQ48zu8kEYAQAgRkXJLg1hBACAWEcBKwAAMIUlSipYCSMAAMBUhBEAAGIcHVgBAIApomOThjACAEDMo4AVAACYIkrqVwkjAADAXIQRAABiHR1YAQCAGdimAQAAUYECVgAAYApLlNzcSxgBAACmIowAABDj6MAKAADMER27NIQRAABinWFyCSthBACAGBUlCyOEEQAAYC7CCAAAMY4CVgAAYApLlLRgJYwAABDjWBkBAACmiI51EcIIAAAwGWEEAIAYx4PyAACAKaKkfrV3YWTx4sXKy8uT0+lUQUGB1qxZ0+3YyspKTZ8+XaNHj5bVatXcuXN7O1cAAHASGCZXsEYcRpYvX665c+dqwYIF2rBhg6ZMmaKpU6eqvLy8y/Fer1eDBg3SggULdO65557whAEAQN+wREkJa8Rh5IknntCsWbM0e/ZsjR07VosWLVJ2draWLFnS5fgRI0boySef1MyZM+VyuU54wgAA4PQSURjx+XwqKytTcXFx2PHi4mKtXbu2zybl9Xrl8XjCXgAA4PQUURipqalRIBBQZmZm2PHMzExVVVX12aRKSkrkcrlCr+zs7D77bAAA0KpfF7B2bB9rGEaftpSdP3++3G536FVRUdFnnw0AAMKZ3YHVHsngjIwM2Wy2Tqsg1dXVnVZLToTD4ZDD4eizzwMAAJ1FycJIZCsj8fHxKigoUGlpadjx0tJSTZo0qU8nBgAAYkNEKyOSNG/ePM2YMUOFhYUqKirS0qVLVV5erjlz5khq3WLZu3evXnzxxdA5GzdulCQdPnxYBw4c0MaNGxUfH69x48b1zbcAAAC9ZpjcgzXiMDJt2jTV1tZq4cKFqqysVH5+vlatWqXc3FxJrU3OOvYcmTBhQujfZWVlevnll5Wbm6uvv/76xGYPAAB6L0r2aSIOI5J011136a677uryvRdeeKHTMbM7uwEAgO6Z/WeaZ9MAABCj+m0HVgAAgL5EGAEAIMaZXUxBGAEAIEb16w6sAADg9GH2jSaEEQAAYlSULIwQRgAAgLkIIwAAxDgKWAEAgCksUVLBShgBACDW0YEVAACYIUoWRggjAADAXIQRAABiHAWsAADAFFGyS0MYAQAg1tGBFQAAmIICVgAAABFGAACIeRSwAgAAk0THPg1hBACAGGdy/SphBACAWEUBKwAAgAgjAADEPMPkElbCCAAAMSpKdmkIIwAAxDoKWAEAgCksUVLBShgBAACmIowAABDj2KYBAACmiI5NGsIIAAAwGWEEAIAYFSX1q4QRAABgLsIIAAAxzjC5grVXYWTx4sXKy8uT0+lUQUGB1qxZc8zxq1evVkFBgZxOp0aOHKmnnnqqV5MFAAB9xxIlJawRh5Hly5dr7ty5WrBggTZs2KApU6Zo6tSpKi8v73L8rl27dM0112jKlCnasGGDfvazn+knP/mJVqxYccKTBwAAJ87kO3sjDyNPPPGEZs2apdmzZ2vs2LFatGiRsrOztWTJki7HP/XUU8rJydGiRYs0duxYzZ49W3fccYcef/zxE548AADovX5ZwOrz+VRWVqbi4uKw48XFxVq7dm2X56xbt67T+Kuvvloff/yxWlpaujzH6/XK4/GEvQAAwMlR6W7W5r1u035/RGGkpqZGgUBAmZmZYcczMzNVVVXV5TlVVVVdjvf7/aqpqenynJKSErlcrtArOzs7kmkCAIAeSHbYQ//eWdNg2jx6VcDa8cE6hmEc82E7XY3v6nib+fPny+12h14VFRW9mSYAADiGERlJ+s20c3X3ZaN05uBk0+ZhP/6QozIyMmSz2TqtglRXV3da/WgzZMiQLsfb7Xalp6d3eY7D4ZDD4YhkagAAoBdumDDc7ClEtjISHx+vgoIClZaWhh0vLS3VpEmTujynqKio0/i//e1vKiwsVFxcXITTBQAAp5uIt2nmzZunZ599VsuWLdPWrVt1//33q7y8XHPmzJHUusUyc+bM0Pg5c+Zo9+7dmjdvnrZu3aply5bpueee0wMPPNB33wIAAPRbEW3TSNK0adNUW1urhQsXqrKyUvn5+Vq1apVyc3MlSZWVlWE9R/Ly8rRq1Srdf//9+t3vfqesrCz99re/1U033dR33wIAAPRbFsPsHrA94PF45HK55Ha7lZqaavZ0AABAD/T07zfPpgEAAKYijAAAAFMRRgAAgKkIIwAAwFSEEQAAYCrCCAAAMBVhBAAAmIowAgAATEUYAQAApoq4HbwZ2prEejwek2cCAAB6qu3v9vGavfeLMFJfXy9Jys7ONnkmAAAgUvX19XK5XN2+3y+eTRMMBrVv3z6lpKTIYrH02ed6PB5lZ2eroqKCZ96cZFzrU4PrfGpwnU8NrvOpcTKvs2EYqq+vV1ZWlqzW7itD+sXKiNVq1fDhw0/a56empvIf+inCtT41uM6nBtf51OA6nxon6zofa0WkDQWsAADAVIQRAABgqpgOIw6HQw8//LAcDofZUzntca1PDa7zqcF1PjW4zqdGNFznflHACgAATl8xvTICAADMRxgBAACmIowAAABTEUYAAICpYjqMLF68WHl5eXI6nSooKNCaNWvMnlK/UVJSogsuuEApKSkaPHiwrr/+en3xxRdhYwzD0COPPKKsrCwlJCToG9/4hj7//POwMV6vV/fee68yMjKUlJSkb3/729qzZ8+p/Cr9SklJiSwWi+bOnRs6xnXuO3v37tX3v/99paenKzExUeedd57KyspC73OtT5zf79fPf/5z5eXlKSEhQSNHjtTChQsVDAZDY7jOkXvvvff0rW99S1lZWbJYLPrTn/4U9n5fXdO6ujrNmDFDLpdLLpdLM2bM0KFDh078Cxgx6tVXXzXi4uKMZ555xtiyZYtx3333GUlJScbu3bvNnlq/cPXVVxvPP/+8sXnzZmPjxo3Gtddea+Tk5BiHDx8OjXnssceMlJQUY8WKFcamTZuMadOmGUOHDjU8Hk9ozJw5c4xhw4YZpaWlxieffGJcdtllxrnnnmv4/X4zvlZUW79+vTFixAjjnHPOMe67777Qca5z3zh48KCRm5tr3H777caHH35o7Nq1y3jrrbeMr776KjSGa33ifvGLXxjp6enGX/7yF2PXrl3Ga6+9ZiQnJxuLFi0KjeE6R27VqlXGggULjBUrVhiSjD/+8Y9h7/fVNf3mN79p5OfnG2vXrjXWrl1r5OfnG9ddd90Jzz9mw8iFF15ozJkzJ+zYmDFjjIceesikGfVv1dXVhiRj9erVhmEYRjAYNIYMGWI89thjoTHNzc2Gy+UynnrqKcMwDOPQoUNGXFyc8eqrr4bG7N2717BarcYbb7xxar9AlKuvrzfOPPNMo7S01Lj00ktDYYTr3Hd++tOfGhdffHG373Ot+8a1115r3HHHHWHHbrzxRuP73/++YRhc577QMYz01TXdsmWLIcn44IMPQmPWrVtnSDK2bdt2QnOOyW0an8+nsrIyFRcXhx0vLi7W2rVrTZpV/+Z2uyVJaWlpkqRdu3apqqoq7Bo7HA5deumloWtcVlamlpaWsDFZWVnKz8/nf4cO7r77bl177bW68sorw45znfvO66+/rsLCQn33u9/V4MGDNWHCBD3zzDOh97nWfePiiy/W//3f/2n79u2SpE8//VTvv/++rrnmGklc55Ohr67punXr5HK5NHHixNCYiy66SC6X64Sve794UF5fq6mpUSAQUGZmZtjxzMxMVVVVmTSr/sswDM2bN08XX3yx8vPzJSl0Hbu6xrt37w6NiY+P18CBAzuN4X+Ho1599VV98skn+uijjzq9x3XuOzt37tSSJUs0b948/exnP9P69ev1k5/8RA6HQzNnzuRa95Gf/vSncrvdGjNmjGw2mwKBgH75y1/qlltukcR/0ydDX13TqqoqDR48uNPnDx48+ISve0yGkTYWiyXsZ8MwOh3D8d1zzz367LPP9P7773d6rzfXmP8djqqoqNB9992nv/3tb3I6nd2O4zqfuGAwqMLCQv3qV7+SJE2YMEGff/65lixZopkzZ4bGca1PzPLly/XSSy/p5Zdf1vjx47Vx40bNnTtXWVlZuu2220LjuM59ry+uaVfj++K6x+Q2TUZGhmw2W6ckV11d3Sk54tjuvfdevf7663rnnXc0fPjw0PEhQ4ZI0jGv8ZAhQ+Tz+VRXV9ftmFhXVlam6upqFRQUyG63y263a/Xq1frtb38ru90euk5c5xM3dOhQjRs3LuzY2LFjVV5eLon/pvvKgw8+qIceekjf+973dPbZZ2vGjBm6//77VVJSIonrfDL01TUdMmSI9u/f3+nzDxw4cMLXPSbDSHx8vAoKClRaWhp2vLS0VJMmTTJpVv2LYRi65557tHLlSr399tvKy8sLez8vL09DhgwJu8Y+n0+rV68OXeOCggLFxcWFjamsrNTmzZv53+GIK664Qps2bdLGjRtDr8LCQt16663auHGjRo4cyXXuI5MnT+50e/r27duVm5srif+m+0pjY6Os1vA/PTabLXRrL9e57/XVNS0qKpLb7db69etDYz788EO53e4Tv+4nVP7aj7Xd2vvcc88ZW7ZsMebOnWskJSUZX3/9tdlT6xd+/OMfGy6Xy3j33XeNysrK0KuxsTE05rHHHjNcLpexcuVKY9OmTcYtt9zS5a1kw4cPN9566y3jk08+MS6//PKYvj2vJ9rfTWMYXOe+sn79esNutxu//OUvjS+//NL4wx/+YCQmJhovvfRSaAzX+sTddtttxrBhw0K39q5cudLIyMgw/vEf/zE0huscufr6emPDhg3Ghg0bDEnGE088YWzYsCHUrqKvruk3v/lN45xzzjHWrVtnrFu3zjj77LO5tfdE/e53vzNyc3ON+Ph44/zzzw/dlorjk9Tl6/nnnw+NCQaDxsMPP2wMGTLEcDgcxiWXXGJs2rQp7HOampqMe+65x0hLSzMSEhKM6667zigvLz/F36Z/6RhGuM59589//rORn59vOBwOY8yYMcbSpUvD3udanziPx2Pcd999Rk5OjuF0Oo2RI0caCxYsMLxeb2gM1zly77zzTpf/N/m2224zDKPvrmltba1x6623GikpKUZKSopx6623GnV1dSc8f4thGMaJra0AAAD0XkzWjAAAgOhBGAEAAKYijAAAAFMRRgAAgKkIIwAAwFSEEQAAYCrCCAAAMBVhBAAAmIowAgAATEUYAQAApiKMAAAAUxFGAACAqf4fYjN7KPk92KAAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from eval import data_removal_f1\n", + "from metrics import weighted_acc_drop\n", + "acc = data_removal_f1(dic_loaded, X_train_scaled, y_train_balanced, X_test_scaled, y_test_balanced)\n", + "plt.plot(range(len(acc)), acc)\n", + "res = weighted_acc_drop(acc)\n", + "print(\"The weighted accuracy drop is {:.3f}\".format(res))" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{0: -0.0004520000000000002, 1: -0.002193999999999999, 2: 0.0009899999999999993, 3: 0.000688, 4: 0.001975999999999998, 5: 0.001773999999999998, 6: -0.0007680000000000006, 7: 0.0007699999999999997, 8: 0.0004660000000000004, 9: 0.0019260000000000006, 10: 0.002362000000000002, 11: 0.0007120000000000026, 12: 0.0012820000000000002, 13: -0.0003839999999999993, 14: 0.0005640000000000003, 15: 0.0017879999999999975, 16: 0.0010200000000000007, 17: 0.0007939999999999988, 18: 0.0009019999999999995, 19: 0.0015500000000000008, 20: 0.0001859999999999987, 21: 0.0009160000000000006, 22: 0.00048200000000000033, 23: -0.0008460000000000009, 24: 0.0017059999999999996, 25: 0.0016219999999999993, 26: 0.0006419999999999998, 27: 0.0011360000000000025, 28: 0.00029400000000000064, 29: 4.199999999999945e-05, 30: 0.00041400000000000106, 31: -0.001978000000000001, 32: -0.0005200000000000003, 33: 0.0016099999999999999, 34: 0.0008940000000000005, 35: 0.0015059999999999993, 36: 0.0008240000000000006, 37: 0.0016019999999999988, 38: -0.0001720000000000005, 39: 0.00020799999999999963, 40: -0.0003959999999999986, 41: 0.0016740000000000004, 42: 0.0011020000000000005, 43: 0.0005680000000000001, 44: 0.0013999999999999993, 45: 0.0017439999999999986, 46: -0.0005060000000000002, 47: 0.0016620000000000016, 48: 0.0010239999999999984, 49: 0.0015139999999999989, 50: 0.0010160000000000002, 51: 0.0018699999999999993, 52: 0.0015800000000000022, 53: -0.000551999999999999, 54: 3.1999999999998637e-05, 55: 0.0007820000000000007, 56: 0.0009839999999999985, 57: 0.0007499999999999998, 58: 0.0018160000000000006, 59: 0.0018379999999999978, 60: 0.0007080000000000011, 61: 0.0017319999999999994, 62: 0.0004099999999999996, 63: 0.0007499999999999996, 64: -0.0004900000000000003, 65: -0.00023600000000000018, 66: 0.0008959999999999991, 67: 0.0007580000000000009, 68: 0.0013739999999999987, 69: -0.0003979999999999992, 70: 0.0026360000000000008, 71: 0.001387999999999999, 72: 0.0019279999999999976, 73: -9.599999999999892e-05, 74: 0.001933999999999998, 75: -0.0003660000000000005, 76: 0.0018020000000000009, 77: 0.0017920000000000028, 78: -0.0011700000000000016, 79: 0.0014599999999999993, 80: 0.0016560000000000008, 81: 0.0020899999999999994, 82: 0.00014200000000000112, 83: 0.000800000000000001, 84: 0.0006459999999999992, 85: 0.002243999999999999, 86: 0.0007259999999999994, 87: 0.0014899999999999991, 88: -0.0015699999999999987, 89: 0.002871999999999996, 90: -6.999999999999944e-05, 91: 0.0021039999999999987, 92: 0.0020579999999999956, 93: 0.0008380000000000001, 94: -0.00014399999999999778, 95: 0.002217999999999997, 96: -0.00024999999999999984, 97: -5.3999999999998914e-05, 98: -0.0008380000000000002, 99: 0.0012400000000000015, 100: 0.00031799999999999927, 101: 0.0016879999999999996, 102: -0.0009040000000000006, 103: 0.001620000000000001, 104: 0.00011799999999999964, 105: -2.1999999999999593e-05, 106: 0.002144000000000001, 107: 0.0008580000000000024, 108: 0.0003100000000000004, 109: 0.000915999999999997, 110: 0.0005619999999999991, 111: 0.001746000000000004, 112: 0.000458, 113: 0.0010119999999999992, 114: 0.0007259999999999992, 115: 0.0006980000000000005, 116: 0.0006639999999999983, 117: 0.0007639999999999995, 118: 0.0008380000000000001, 119: -0.00024800000000000034, 120: 0.00028000000000000095, 121: 0.0004539999999999992, 122: 0.0006439999999999997, 123: -7.1999999999999e-05, 124: 0.00039399999999999955, 125: -0.0005720000000000002, 126: 0.0014639999999999994, 127: 0.0005459999999999998, 128: -0.001205999999999997, 129: -3.800000000000069e-05, 130: 0.0007260000000000004, 131: 0.0006279999999999999, 132: 0.0006040000000000007, 133: 0.0008240000000000014, 134: 0.0010339999999999995, 135: 0.0006560000000000001, 136: 0.0018840000000000005, 137: -0.0009139999999999979, 138: -4.19999999999991e-05, 139: 0.00044599999999999994, 140: 0.0010280000000000003, 141: 0.0009440000000000017, 142: -0.0005759999999999999, 143: -0.00041000000000000026, 144: 0.0016019999999999993, 145: 0.0013920000000000007, 146: 0.0011920000000000001, 147: 0.0005140000000000021, 148: 0.0006419999999999995, 149: 0.003, 150: 0.002352, 151: 0.0016539999999999988, 152: 0.0015339999999999993, 153: 0.0010060000000000021, 154: 0.0002959999999999995, 155: 0.00026600000000000034, 156: 0.0004279999999999999, 157: 0.000416, 158: 0.0016360000000000003, 159: 0.0007859999999999996, 160: 0.0020519999999999974, 161: -0.0004600000000000003, 162: 0.0019899999999999983, 163: 0.001548, 164: -0.0006880000000000007, 165: 0.0016499999999999998, 166: 0.002816, 167: 0.0009599999999999985, 168: -0.0005279999999999987, 169: 0.0014980000000000002, 170: -0.0005739999999999971, 171: 0.0010399999999999997, 172: 0.001078000000000001, 173: 0.0007379999999999997, 174: -0.00045999999999999985, 175: 0.002314000000000002, 176: 0.0009720000000000007, 177: 0.0009039999999999994, 178: 0.0030559999999999993, 179: 0.00046599999999999946, 180: 0.0006119999999999999, 181: 0.0003060000000000011, 182: 0.0003040000000000001, 183: 0.001686000000000001, 184: -0.0004020000000000002, 185: 0.0001759999999999991, 186: -0.00034999999999999935, 187: 0.001382, 188: -0.0008119999999999998, 189: 0.00039799999999999965, 190: 0.0003540000000000002, 191: 0.00018999999999999969, 192: 0.0006200000000000007, 193: 0.001957999999999999, 194: 0.0006819999999999993, 195: 0.001074000000000001, 196: 0.001276, 197: -0.000344, 198: -0.0011279999999999994, 199: 0.0019679999999999997, 200: 0.0015880000000000022, 201: 0.0020460000000000035, 202: 0.00031999999999999976, 203: -0.00022599999999999918, 204: 0.0014199999999999992, 205: 0.0005820000000000011, 206: 0.002261999999999998, 207: -0.0001160000000000008, 208: 0.0015220000000000025, 209: 0.0007359999999999991, 210: 0.0003099999999999997, 211: 0.0002980000000000013, 212: 0.0005759999999999989, 213: 0.002654, 214: 0.0014780000000000004, 215: 0.0010879999999999998, 216: 0.0025120000000000025, 217: 0.0006759999999999994, 218: -0.0006599999999999984, 219: 0.000858000000000001, 220: 0.0008439999999999989, 221: 0.00030199999999999953, 222: -0.00037600000000000025, 223: 0.0006119999999999999, 224: 0.0021639999999999962, 225: 0.0014300000000000011, 226: 0.002569999999999996, 227: 0.0016859999999999978, 228: 0.0005640000000000007, 229: 0.0007539999999999998, 230: 0.0030720000000000022, 231: -0.0009379999999999992, 232: -0.0012600000000000016, 233: 0.0007300000000000002, 234: 0.0005259999999999999, 235: 0.002046000000000003, 236: 0.0004700000000000005, 237: -0.00010199999999999939, 238: 0.0010560000000000003, 239: 0.00036400000000000007, 240: 0.0011159999999999985, 241: 0.0001740000000000001, 242: -0.00021599999999999885, 243: 0.0002859999999999991, 244: 0.0008499999999999993, 245: -0.0001240000000000004, 246: 0.0001640000000000006, 247: -0.0008579999999999994, 248: 0.00041200000000000167, 249: 0.0029219999999999966, 250: 0.0008799999999999998, 251: -0.00011600000000000022, 252: -0.0009479999999999995, 253: 0.001976000000000002, 254: 0.0017259999999999986, 255: -5.9999999999999365e-05, 256: 0.0017580000000000017, 257: 0.0020499999999999993, 258: -0.00024399999999999926, 259: -3.199999999999978e-05, 260: 0.0009340000000000007, 261: 0.00032000000000000084, 262: 0.0016500000000000006, 263: 0.0018340000000000008, 264: 0.0011340000000000011, 265: 0.0017260000000000005, 266: 0.002173999999999999, 267: -0.0002860000000000005, 268: 0.0011800000000000005, 269: 0.0007300000000000007, 270: 0.0032039999999999985, 271: 0.000534, 272: -0.00013799999999999937, 273: 0.0006559999999999999, 274: 0.0006220000000000008, 275: 0.0006640000000000011, 276: -0.000326, 277: 0.0018779999999999984, 278: -0.0005420000000000005, 279: 0.0016859999999999985, 280: 0.0005799999999999989, 281: -0.0011019999999999992, 282: 0.00042999999999999934, 283: -0.0008919999999999984, 284: 0.0004539999999999998, 285: -0.0007379999999999997, 286: -0.00016600000000000005, 287: 0.0044579999999999976, 288: -0.0006020000000000001, 289: 0.001784, 290: 0.0010920000000000018, 291: 0.00031400000000000004, 292: 0.0004579999999999986, 293: 0.0006679999999999988, 294: 0.0003499999999999996, 295: -0.0013839999999999996, 296: -5.400000000000008e-05, 297: 0.002516, 298: 0.0011560000000000008, 299: -0.0009840000000000005, 300: 0.002221999999999999, 301: 0.0008440000000000011, 302: 0.00034600000000000066, 303: 0.0020240000000000032, 304: 0.0002880000000000005, 305: 0.0014000000000000004, 306: 0.001524000000000001, 307: 0.003934000000000007, 308: 0.0010220000000000003, 309: 0.001174000000000001, 310: 0.0013779999999999995, 311: 0.002035999999999998, 312: 0.0005519999999999991, 313: 0.0007859999999999998, 314: 0.0003720000000000017, 315: 0.0009839999999999988, 316: 0.0001519999999999996, 317: -5.999999999998748e-06, 318: 0.002006, 319: -6.0000000000001645e-06, 320: -0.00019999999999999968, 321: 0.0006539999999999999, 322: 0.00017800000000000067, 323: 0.00048800000000000075, 324: 0.00020200000000000009, 325: 0.0013659999999999998, 326: 0.0004499999999999992, 327: 0.0005599999999999986, 328: -0.0004959999999999997, 329: -0.00015199999999999966, 330: -0.00011400000000000045, 331: 0.002618, 332: 0.0021700000000000014, 333: 0.0006179999999999994, 334: 0.0016659999999999995, 335: -0.00022599999999999986, 336: -0.0005719999999999988, 337: 0.00036200000000000007, 338: -0.00018999999999999993, 339: -0.0007419999999999997, 340: 0.0016760000000000006, 341: 0.0021960000000000013, 342: -0.000479999999999999, 343: 0.002572, 344: -1.999999999999815e-06, 345: 0.0007340000000000005, 346: 0.0003000000000000004, 347: 0.0004559999999999984, 348: -0.0005140000000000004, 349: 0.0009320000000000006, 350: 0.0021559999999999978, 351: 4.000000000000923e-06, 352: 0.0005199999999999996, 353: -0.0004420000000000017, 354: 5.799999999999989e-05, 355: 0.0005579999999999997, 356: 0.00016799999999999915, 357: 0.00020600000000000056, 358: 0.0004520000000000004, 359: 0.0016060000000000017, 360: 6.200000000000007e-05, 361: -0.00016199999999999976, 362: 0.0009359999999999991, 363: 0.0006120000000000001, 364: 0.0017500000000000018, 365: 0.0013259999999999997, 366: 0.0003240000000000001, 367: -0.00046199999999999876, 368: 0.0010839999999999997, 369: 0.0023219999999999985, 370: 0.0010140000000000017, 371: 0.0005819999999999997, 372: 0.0075200000000000015, 373: 0.0007119999999999991, 374: -0.0013439999999999997, 375: -0.0004059999999999995, 376: 0.0013739999999999996, 377: 0.0006420000000000008, 378: 0.0013979999999999989, 379: 0.0008700000000000016, 380: 0.00043600000000000095, 381: 0.000597999999999999, 382: 0.0018020000000000004, 383: 0.001669999999999998, 384: 0.0009239999999999997, 385: 0.0005499999999999988, 386: 0.0007980000000000002, 387: 0.0009400000000000016, 388: -0.0004599999999999988, 389: 0.0005259999999999999, 390: -0.001255999999999997, 391: 0.003610000000000004, 392: 0.0012, 393: 0.0007460000000000004, 394: 0.003530000000000004, 395: 0.0008659999999999997, 396: 0.001234000000000001, 397: 0.0004799999999999986, 398: 0.0009160000000000012, 399: 0.0005220000000000002, 400: 0.0011839999999999993, 401: 0.0005260000000000005, 402: 0.0007159999999999998, 403: 0.0017199999999999958, 404: 0.0014899999999999987, 405: 0.0008440000000000003, 406: 0.00033600000000000134, 407: -0.0003320000000000001, 408: 0.0006980000000000028, 409: 0.0011299999999999988, 410: 0.0008500000000000001, 411: 0.002822000000000002, 412: 0.0009999999999999994, 413: 0.0014319999999999992, 414: 0.0004460000000000003, 415: 0.0017299999999999991, 416: 0.0006979999999999996, 417: 0.0013620000000000017, 418: -0.0010059999999999995, 419: 0.0006860000000000019, 420: 0.0005480000000000012, 421: 4.799999999999975e-05, 422: -0.0008000000000000004, 423: -7.399999999999983e-05, 424: 0.0012659999999999998, 425: 0.001389999999999999, 426: -1.1999999999999387e-05, 427: 0.0003320000000000015, 428: 0.0015460000000000003, 429: 0.0021919999999999995, 430: -0.000639999999999999, 431: 0.0007819999999999996, 432: 0.0025279999999999994, 433: -0.000717999999999998, 434: 0.0014640000000000015, 435: 0.0023100000000000004, 436: 0.0006899999999999988, 437: 0.002360000000000003, 438: 0.00017000000000000017, 439: 0.001976, 440: 0.0011439999999999998, 441: 0.0007999999999999996, 442: 0.0005400000000000003, 443: 0.00148, 444: 0.0023640000000000037, 445: -0.0008540000000000008, 446: -0.00038599999999999984, 447: 0.0011240000000000002, 448: 4.000000000000026e-05, 449: 0.0011659999999999986, 450: 0.002243999999999999, 451: -3.799999999999912e-05, 452: 0.0016359999999999997, 453: -0.00099, 454: -0.0002340000000000008, 455: 0.0011600000000000013, 456: 0.00025000000000000076, 457: 0.000977999999999996, 458: 0.00086, 459: 0.00027800000000000107, 460: 0.0008380000000000005, 461: -0.0018999999999999987, 462: 0.00207, 463: -2.4000000000000038e-05, 464: 0.00045600000000000073, 465: -7.000000000000005e-05, 466: 0.00219, 467: -0.00043000000000000015, 468: 0.0006840000000000007, 469: 0.0009400000000000016, 470: 0.0018879999999999997, 471: -0.0004959999999999993, 472: 0.0008799999999999999, 473: 0.002205999999999996, 474: 0.0019500000000000032, 475: -0.0005760000000000001, 476: 0.000423999999999999, 477: 0.0008759999999999999, 478: -0.001375999999999999, 479: 0.0020220000000000012, 480: 0.001691999999999998, 481: 0.0004000000000000002, 482: 0.0004840000000000002, 483: 0.0016920000000000019, 484: 0.002316000000000002, 485: 0.0006540000000000014, 486: 0.0008580000000000001, 487: -0.000572, 488: 0.0012839999999999987, 489: -0.0007339999999999998, 490: -3.999999999999562e-06, 491: 0.0010479999999999988, 492: 0.0009699999999999997, 493: 0.0007560000000000008, 494: 0.0014100000000000026, 495: 0.0018180000000000006, 496: 0.001740000000000002, 497: 0.0005620000000000007, 498: 0.0009979999999999993, 499: -0.001475999999999999, 500: 0.0006380000000000005, 501: -0.00022200000000000035, 502: 0.001120000000000001, 503: 0.001862000000000002, 504: 0.0009200000000000001, 505: 0.0015559999999999994, 506: 0.00164, 507: 0.0014120000000000018, 508: 0.0003079999999999986, 509: 0.0004720000000000012, 510: 0.0025099999999999966, 511: 0.0017800000000000008, 512: -0.0008179999999999985, 513: 0.0007419999999999991, 514: 0.00044799999999999994, 515: -1.0000000000000207e-05, 516: 0.0005519999999999989, 517: -0.0001419999999999992, 518: -0.00044999999999999966, 519: 0.000498, 520: 0.0004960000000000005, 521: 0.0007179999999999982, 522: -0.0006599999999999997, 523: -0.0001339999999999996, 524: -3.599999999999952e-05, 525: 0.0010700000000000002, 526: 9.800000000000051e-05, 527: 0.0003899999999999999, 528: 0.0009999999999999998, 529: 0.002786000000000002, 530: 8.200000000000067e-05, 531: 0.001379999999999998, 532: 0.001246, 533: 0.0009639999999999988, 534: 0.0020240000000000015, 535: -7.200000000000007e-05, 536: -0.0009539999999999976, 537: 0.001168, 538: 0.000831999999999999, 539: 0.00028599999999999974, 540: -0.00016599999999999945, 541: 0.0015559999999999997, 542: 0.0011520000000000007, 543: 0.00075, 544: 0.0005859999999999998, 545: 0.0017940000000000002, 546: 0.0018480000000000003, 547: -0.0006919999999999991, 548: 0.001533999999999998, 549: 0.0007939999999999959, 550: 0.00029600000000000015, 551: 0.0006539999999999985, 552: 0.0006139999999999989, 553: 0.0011419999999999972, 554: 0.0001019999999999995, 555: 0.001031999999999999, 556: 0.0007199999999999981, 557: 0.0036379999999999984, 558: -0.0012380000000000017, 559: 0.0010239999999999993, 560: -0.00048800000000000064, 561: 0.0004900000000000006, 562: 0.0019899999999999974, 563: 0.0008119999999999998, 564: -0.0003960000000000003, 565: 0.00043600000000000035, 566: -0.00021999999999999995, 567: 0.003, 568: 0.0018140000000000003, 569: 0.0006160000000000006, 570: 0.0008040000000000002, 571: -0.00024200000000000038, 572: -0.000807999999999998, 573: -0.00015200000000000014, 574: -0.0014019999999999998, 575: -0.0005740000000000003, 576: -0.0007299999999999987, 577: 0.001105999999999999, 578: -0.00020799999999999982, 579: 0.0007200000000000003, 580: 9.999999999999961e-05, 581: 0.0012740000000000002, 582: 0.0017820000000000023, 583: -0.0002579999999999998, 584: 0.001974000000000002, 585: 0.001978000000000002, 586: 0.0007300000000000002, 587: -7.800000000000074e-05, 588: 0.000712000000000003, 589: 0.000984, 590: 0.0003060000000000003, 591: -0.0006379999999999997, 592: 5.60000000000008e-05, 593: 0.0012120000000000026, 594: 0.00020599999999999953, 595: 0.000639999999999999, 596: 0.0025460000000000005, 597: 0.00030400000000000045, 598: 0.0007500000000000006, 599: 0.0017660000000000013, 600: 2.800000000000034e-05, 601: 0.0006779999999999998, 602: 0.0003440000000000001, 603: 0.0007979999999999984, 604: 0.00027799999999999966, 605: 0.0012679999999999992, 606: 0.0005979999999999997, 607: 0.0017760000000000005, 608: 0.00017000000000000072, 609: 6.400000000000105e-05, 610: 0.0020340000000000002, 611: 0.0015980000000000007, 612: 0.002334, 613: 0.0011959999999999996, 614: 0.0004500000000000013, 615: 0.00011999999999999973, 616: 0.0013039999999999996, 617: 0.0008360000000000004, 618: 0.0004479999999999997, 619: 0.0005939999999999988, 620: 0.0019740000000000005, 621: 0.0006720000000000006, 622: 0.0017840000000000028, 623: -0.0010859999999999993, 624: 0.0005420000000000007, 625: 0.0012239999999999994, 626: 0.003086000000000001, 627: 0.00020999999999999936, 628: 0.0008500000000000002, 629: 0.000607999999999999, 630: 0.0007319999999999984, 631: 0.002672000000000001, 632: 3.000000000000028e-05, 633: 0.001577999999999999, 634: 0.0009679999999999995, 635: 0.00039000000000000075, 636: -0.00027600000000000026, 637: 0.0012960000000000029, 638: 0.0010099999999999994, 639: 0.00047400000000000084, 640: 0.0007279999999999982, 641: 0.0009540000000000012, 642: -0.0006999999999999989, 643: 0.0010219999999999988, 644: 0.0008680000000000004, 645: 0.002398000000000001, 646: 4.6000000000000535e-05, 647: 0.0007459999999999987, 648: 0.00020599999999999885, 649: 0.0018059999999999977, 650: 0.002792000000000001, 651: 0.0001720000000000008, 652: -6.000000000000058e-05, 653: 0.0006260000000000003, 654: -0.0004779999999999992, 655: 0.0018339999999999984, 656: 0.001177999999999998, 657: -0.00037199999999999977, 658: 0.0012879999999999988, 659: -0.00025199999999999946, 660: 0.0014300000000000003, 661: -0.00014200000000000033, 662: 0.001038000000000001, 663: 0.0009479999999999992, 664: 0.0014259999999999998, 665: 0.0007879999999999989, 666: 0.0006199999999999991, 667: 0.0021919999999999986, 668: -0.0008560000000000027, 669: 0.0003179999999999989, 670: -0.000597999999999999, 671: -0.0008500000000000001, 672: 0.0004100000000000006, 673: 0.0015659999999999988, 674: -0.0017699999999999994, 675: 0.0004159999999999996, 676: 0.000738, 677: -7.79999999999991e-05, 678: -0.0005759999999999991, 679: -3.999999999999735e-06, 680: -0.0008839999999999981, 681: 0.0021040000000000013, 682: 0.0011120000000000001, 683: 0.0008420000000000003, 684: 0.003653999999999998, 685: 0.0010739999999999997, 686: 0.0021759999999999982, 687: 0.0017120000000000013, 688: 0.0005939999999999987, 689: 0.0017119999999999982, 690: 0.0015900000000000003, 691: 0.0015379999999999984, 692: 0.0017439999999999995, 693: 0.0008380000000000006, 694: 0.0010239999999999987, 695: 0.0009160000000000004, 696: 0.0015020000000000003, 697: 0.0012800000000000016, 698: 0.00023800000000000118, 699: 0.00078, 700: 1.1999999999999741e-05, 701: -0.00033600000000000134, 702: 0.0006400000000000004, 703: 0.0017820000000000023, 704: 0.000640000000000001, 705: 0.00029200000000000016, 706: -0.0005640000000000002, 707: -0.00018199999999999927, 708: 0.0009819999999999978, 709: -0.0005599999999999997, 710: 0.002087999999999997, 711: 0.0016939999999999998, 712: 0.0003040000000000005, 713: 0.0005039999999999992, 714: 0.001268000000000001, 715: 0.0005040000000000006, 716: 0.0009780000000000014, 717: 0.0016279999999999995, 718: -0.00038799999999999935, 719: -0.0013640000000000006, 720: 0.0016260000000000005, 721: 0.0006519999999999997, 722: 0.0015760000000000019, 723: 0.0010719999999999992, 724: 0.0009980000000000006, 725: 0.0011200000000000016, 726: 0.00044399999999999925, 727: -0.0003940000000000002, 728: 0.0005560000000000008, 729: -0.0007339999999999995, 730: 0.0015060000000000004, 731: 0.0011599999999999994, 732: 0.0008579999999999997, 733: -0.0011439999999999988, 734: 0.0018580000000000016, 735: 0.00040599999999999984, 736: 0.000888, 737: 0.0015799999999999985, 738: 0.0013260000000000004, 739: 0.003487999999999998, 740: 0.003684, 741: 0.0011299999999999967, 742: 0.002544000000000001, 743: 0.000925999999999999, 744: 0.001048, 745: 0.0010160000000000008, 746: 0.0007279999999999984, 747: 9.00000000000007e-05, 748: 0.0006640000000000007, 749: -7.000000000000001e-05, 750: 0.0006640000000000006, 751: 0.00019599999999999972, 752: -0.0004980000000000006, 753: 0.001224, 754: 0.0017640000000000006, 755: 0.0015940000000000006, 756: 0.0009179999999999984, 757: -0.0009439999999999986, 758: 0.0025720000000000022, 759: 0.0018559999999999985, 760: 0.001088000000000001, 761: 0.00014400000000000063, 762: 0.0008559999999999996, 763: -0.00041600000000000025, 764: 0.002273999999999999, 765: -0.0011119999999999986, 766: -0.001316000000000001, 767: 0.0020300000000000014, 768: 0.0004680000000000013, 769: 0.0016299999999999986, 770: 0.002243999999999995, 771: 0.0004480000000000003, 772: 0.0008520000000000018, 773: 0.0003440000000000013, 774: -0.00028, 775: 0.0004819999999999998, 776: -9.999999999999948e-05, 777: 0.000208, 778: 0.0017200000000000023, 779: 0.001769999999999997, 780: 0.0002580000000000006, 781: 0.000501999999999999, 782: 0.001941999999999999, 783: 0.0018459999999999974, 784: 0.0011260000000000007, 785: 0.0007959999999999993, 786: -7.400000000000074e-05, 787: 0.0018499999999999975, 788: -0.0003520000000000005, 789: -0.0008059999999999993, 790: 0.0004999999999999988, 791: 0.0009680000000000005, 792: -0.0005880000000000006, 793: -0.0004279999999999995, 794: 0.001003999999999999, 795: 0.0003959999999999995, 796: 0.0007359999999999999, 797: 0.0007680000000000014, 798: -0.0002599999999999999, 799: 0.001697999999999999, 800: -0.0002640000000000009, 801: 0.000964, 802: 0.0009620000000000042, 803: 0.0010140000000000014, 804: 0.0018179999999999997, 805: 0.0010400000000000001, 806: 0.0005280000000000011, 807: -0.00014999999999999896, 808: 0.0009640000000000002, 809: 0.001754, 810: 0.0012700000000000014, 811: -0.0007859999999999996, 812: -0.00048599999999999804, 813: 0.0007980000000000006, 814: 0.0014380000000000007, 815: 0.001971999999999999, 816: 0.0013220000000000005, 817: -0.00017599999999999975, 818: -0.00024399999999999964, 819: 0.0007479999999999996, 820: 0.0015219999999999984, 821: -0.0004699999999999994, 822: 0.000140000000000001, 823: 0.0004900000000000018, 824: 0.001022000000000004, 825: 0.000961999999999998, 826: 0.00022600000000000015, 827: -0.000728, 828: 0.003075999999999998, 829: 0.001, 830: 0.0006159999999999991, 831: 0.0009420000000000003, 832: 0.0007580000000000005, 833: -2.9999999999999997e-05, 834: 0.0020280000000000025, 835: 0.0011359999999999988, 836: 0.00015199999999999868, 837: 0.0014560000000000003, 838: 0.000272000000000001, 839: 0.00036800000000000027, 840: -6.199999999999973e-05, 841: 6.799999999999969e-05, 842: -0.0010960000000000002, 843: 6.0000000000000056e-05, 844: -0.0008720000000000005, 845: 0.0005719999999999989, 846: 0.0001319999999999983, 847: 0.0006140000000000001, 848: -0.00010399999999999934, 849: -0.00013600000000000013, 850: -2.200000000000001e-05, 851: 0.0023159999999999973, 852: 0.0007880000000000005, 853: -0.001234, 854: -0.0005379999999999987, 855: 0.0014199999999999985, 856: 0.0013620000000000006, 857: 0.001108000000000001, 858: 0.0005100000000000016, 859: 0.0006779999999999997, 860: 0.0013499999999999994, 861: 2.4000000000001024e-05, 862: 0.0015720000000000013, 863: 0.0009800000000000006, 864: 0.0011659999999999997, 865: 0.0011639999999999997, 866: 0.0009100000000000004, 867: -0.00039600000000000144, 868: 0.00043800000000000067, 869: -0.0005579999999999991, 870: 0.000501999999999999, 871: 0.001598, 872: 0.0018159999999999993, 873: 0.0006380000000000003, 874: 0.0005440000000000001, 875: 0.0020880000000000057, 876: -0.0007379999999999983, 877: 0.001407999999999999, 878: 0.002513999999999999, 879: -0.0014159999999999976, 880: -0.0005819999999999999, 881: 0.001933999999999998, 882: -0.001285999999999998, 883: 0.0013380000000000026, 884: -0.0007199999999999996, 885: 0.0027259999999999984, 886: 0.001004000000000001, 887: 0.0009700000000000008, 888: 0.00024000000000000017, 889: 0.002374000000000001, 890: 0.0005280000000000009, 891: 0.0007139999999999997, 892: 0.0006980000000000006, 893: 0.0007579999999999991, 894: 0.001130000000000002, 895: -0.002116, 896: -0.000903999999999999, 897: -0.00042799999999999994, 898: 0.0039480000000000045, 899: 0.00025799999999999993, 900: -0.00040199999999999915, 901: -7.599999999999994e-05, 902: 0.000645999999999999, 903: 0.0008079999999999988, 904: 0.00185, 905: 0.001268000000000002, 906: 0.0025960000000000015, 907: -0.00018999999999999906, 908: 0.0008220000000000006, 909: 0.001427999999999999, 910: 0.0016900000000000018, 911: -0.0004199999999999997, 912: 0.0007659999999999994, 913: 0.0016340000000000011, 914: 0.0004959999999999985, 915: 0.0015979999999999996, 916: 0.0011320000000000002, 917: 0.0018499999999999999, 918: 0.0011739999999999993, 919: 9.600000000000002e-05, 920: 0.00022400000000000065, 921: -0.00011599999999999999, 922: 0.002907999999999998, 923: 0.0012639999999999993, 924: -0.0001279999999999994, 925: 0.000248, 926: 0.0006600000000000014, 927: 0.0006319999999999987, 928: -0.0006299999999999988, 929: 0.0010579999999999997, 930: 0.00017400000000000136, 931: 0.0025979999999999996, 932: -0.0008760000000000005, 933: 0.001652000000000001, 934: -0.00021399999999999924, 935: 0.0018540000000000008, 936: -0.0009840000000000005, 937: 0.0011600000000000009, 938: 0.00015199999999999933, 939: 0.0015380000000000005, 940: 0.0006640000000000012, 941: 0.0004980000000000006, 942: 0.0008199999999999999, 943: -0.00022600000000000002, 944: 7.600000000000103e-05, 945: 0.0024339999999999987, 946: 0.000646, 947: 0.0007839999999999988, 948: 0.00038400000000000104, 949: 0.0002539999999999999, 950: 0.00040800000000000114, 951: 0.0009000000000000013, 952: 0.0018819999999999991, 953: 0.0005399999999999994, 954: -0.0003879999999999994, 955: 0.0019160000000000008, 956: 0.001254, 957: 0.002186000000000002, 958: -9.599999999999831e-05, 959: 0.0008919999999999989, 960: 0.0008340000000000001, 961: 0.0022600000000000003, 962: 0.0007980000000000001, 963: 0.0014899999999999998, 964: 0.00026199999999999997, 965: -0.0003139999999999991, 966: 0.00026999999999999957, 967: 0.0008439999999999981, 968: 0.0016960000000000005, 969: -0.0013439999999999984, 970: 0.0016379999999999997, 971: 0.0014999999999999985, 972: 0.0011959999999999994, 973: -6.199999999999993e-05, 974: 0.0007640000000000017, 975: -0.00033200000000000026, 976: 0.00048400000000000054, 977: 0.0007939999999999988, 978: -0.0007219999999999989, 979: 0.0016919999999999997, 980: 0.0002779999999999987, 981: -0.0014939999999999997, 982: 0.0002559999999999999, 983: 0.0019819999999999994, 984: 0.0014739999999999987, 985: 0.00038000000000000094, 986: -0.0002659999999999994, 987: 0.0020919999999999975, 988: 0.0004519999999999996, 989: 0.0011700000000000013, 990: 0.0018619999999999995, 991: 0.0005899999999999999, 992: 0.002895999999999999, 993: 0.0006299999999999994, 994: 0.0021259999999999972, 995: -0.000652000000000001, 996: -0.0004300000000000004, 997: 0.0007259999999999999, 998: -0.0002960000000000007, 999: 0.001158}\n" + ] + } + ], + "source": [ + "with open('vals_mt.pkl', 'rb') as f:\n", + " accs = pickle.load(f)\n", + "print(accs)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(372, 0.0075200000000000015), (287, 0.0044579999999999976), (898, 0.0039480000000000045), (307, 0.003934000000000007), (740, 0.003684), (684, 0.003653999999999998), (557, 0.0036379999999999984), (391, 0.003610000000000004), (394, 0.003530000000000004), (739, 0.003487999999999998), (270, 0.0032039999999999985), (626, 0.003086000000000001), (828, 0.003075999999999998), (230, 0.0030720000000000022), (178, 0.0030559999999999993), (149, 0.003), (567, 0.003), (249, 0.0029219999999999966), (922, 0.002907999999999998), (992, 0.002895999999999999), (89, 0.002871999999999996), (411, 0.002822000000000002), (166, 0.002816), (650, 0.002792000000000001), (529, 0.002786000000000002), (885, 0.0027259999999999984), (631, 0.002672000000000001), (213, 0.002654), (70, 0.0026360000000000008), (331, 0.002618), (931, 0.0025979999999999996), (906, 0.0025960000000000015), (758, 0.0025720000000000022), (343, 0.002572), (226, 0.002569999999999996), (596, 0.0025460000000000005), (742, 0.002544000000000001), (432, 0.0025279999999999994), (297, 0.002516), (878, 0.002513999999999999), (216, 0.0025120000000000025), (510, 0.0025099999999999966), (945, 0.0024339999999999987), (645, 0.002398000000000001), (889, 0.002374000000000001), (444, 0.0023640000000000037), (10, 0.002362000000000002), (437, 0.002360000000000003), (150, 0.002352), (612, 0.002334), (369, 0.0023219999999999985), (484, 0.002316000000000002), (851, 0.0023159999999999973), (175, 0.002314000000000002), (435, 0.0023100000000000004), (764, 0.002273999999999999), (206, 0.002261999999999998), (961, 0.0022600000000000003), (85, 0.002243999999999999), (450, 0.002243999999999999), (770, 0.002243999999999995), (300, 0.002221999999999999), (95, 0.002217999999999997), (473, 0.002205999999999996), (341, 0.0021960000000000013), (429, 0.0021919999999999995), (667, 0.0021919999999999986), (466, 0.00219), (957, 0.002186000000000002), (686, 0.0021759999999999982), (266, 0.002173999999999999), (332, 0.0021700000000000014), (224, 0.0021639999999999962), (350, 0.0021559999999999978), (106, 0.002144000000000001), (994, 0.0021259999999999972), (681, 0.0021040000000000013), (91, 0.0021039999999999987), (987, 0.0020919999999999975), (81, 0.0020899999999999994), (875, 0.0020880000000000057), (710, 0.002087999999999997), (462, 0.00207), (92, 0.0020579999999999956), (160, 0.0020519999999999974), (257, 0.0020499999999999993), (201, 0.0020460000000000035), (235, 0.002046000000000003), (311, 0.002035999999999998), (610, 0.0020340000000000002), (767, 0.0020300000000000014), (834, 0.0020280000000000025), (303, 0.0020240000000000032), (534, 0.0020240000000000015), (479, 0.0020220000000000012), (318, 0.002006), (162, 0.0019899999999999983), (562, 0.0019899999999999974), (983, 0.0019819999999999994), (585, 0.001978000000000002), (253, 0.001976000000000002), (439, 0.001976), (4, 0.001975999999999998), (584, 0.001974000000000002), (620, 0.0019740000000000005), (815, 0.001971999999999999), (199, 0.0019679999999999997), (193, 0.001957999999999999), (474, 0.0019500000000000032), (782, 0.001941999999999999), (74, 0.001933999999999998), (881, 0.001933999999999998), (72, 0.0019279999999999976), (9, 0.0019260000000000006), (955, 0.0019160000000000008), (470, 0.0018879999999999997), (136, 0.0018840000000000005), (952, 0.0018819999999999991), (277, 0.0018779999999999984), (51, 0.0018699999999999993), (503, 0.001862000000000002), (990, 0.0018619999999999995), (734, 0.0018580000000000016), (759, 0.0018559999999999985), (935, 0.0018540000000000008), (904, 0.00185), (917, 0.0018499999999999999), (787, 0.0018499999999999975), (546, 0.0018480000000000003), (783, 0.0018459999999999974), (59, 0.0018379999999999978), (263, 0.0018340000000000008), (655, 0.0018339999999999984), (495, 0.0018180000000000006), (804, 0.0018179999999999997), (58, 0.0018160000000000006), (872, 0.0018159999999999993), (568, 0.0018140000000000003), (649, 0.0018059999999999977), (76, 0.0018020000000000009), (382, 0.0018020000000000004), (545, 0.0017940000000000002), (77, 0.0017920000000000028), (15, 0.0017879999999999975), (622, 0.0017840000000000028), (289, 0.001784), (582, 0.0017820000000000023), (703, 0.0017820000000000023), (511, 0.0017800000000000008), (607, 0.0017760000000000005), (5, 0.001773999999999998), (779, 0.001769999999999997), (599, 0.0017660000000000013), (754, 0.0017640000000000006), (256, 0.0017580000000000017), (809, 0.001754), (364, 0.0017500000000000018), (111, 0.001746000000000004), (692, 0.0017439999999999995), (45, 0.0017439999999999986), (496, 0.001740000000000002), (61, 0.0017319999999999994), (415, 0.0017299999999999991), (265, 0.0017260000000000005), (254, 0.0017259999999999986), (778, 0.0017200000000000023), (403, 0.0017199999999999958), (687, 0.0017120000000000013), (689, 0.0017119999999999982), (24, 0.0017059999999999996), (799, 0.001697999999999999), (968, 0.0016960000000000005), (711, 0.0016939999999999998), (483, 0.0016920000000000019), (979, 0.0016919999999999997), (480, 0.001691999999999998), (910, 0.0016900000000000018), (101, 0.0016879999999999996), (183, 0.001686000000000001), (279, 0.0016859999999999985), (227, 0.0016859999999999978), (340, 0.0016760000000000006), (41, 0.0016740000000000004), (383, 0.001669999999999998), (334, 0.0016659999999999995), (47, 0.0016620000000000016), (80, 0.0016560000000000008), (151, 0.0016539999999999988), (933, 0.001652000000000001), (262, 0.0016500000000000006), (165, 0.0016499999999999998), (506, 0.00164), (970, 0.0016379999999999997), (158, 0.0016360000000000003), (452, 0.0016359999999999997), (913, 0.0016340000000000011), (769, 0.0016299999999999986), (717, 0.0016279999999999995), (720, 0.0016260000000000005), (25, 0.0016219999999999993), (103, 0.001620000000000001), (33, 0.0016099999999999999), (359, 0.0016060000000000017), (144, 0.0016019999999999993), (37, 0.0016019999999999988), (611, 0.0015980000000000007), (871, 0.001598), (915, 0.0015979999999999996), (755, 0.0015940000000000006), (690, 0.0015900000000000003), (200, 0.0015880000000000022), (52, 0.0015800000000000022), (737, 0.0015799999999999985), (633, 0.001577999999999999), (722, 0.0015760000000000019), (862, 0.0015720000000000013), (673, 0.0015659999999999988), (541, 0.0015559999999999997), (505, 0.0015559999999999994), (19, 0.0015500000000000008), (163, 0.001548), (428, 0.0015460000000000003), (939, 0.0015380000000000005), (691, 0.0015379999999999984), (152, 0.0015339999999999993), (548, 0.001533999999999998), (306, 0.001524000000000001), (208, 0.0015220000000000025), (820, 0.0015219999999999984), (49, 0.0015139999999999989), (730, 0.0015060000000000004), (35, 0.0015059999999999993), (696, 0.0015020000000000003), (971, 0.0014999999999999985), (169, 0.0014980000000000002), (963, 0.0014899999999999998), (87, 0.0014899999999999991), (404, 0.0014899999999999987), (443, 0.00148), (214, 0.0014780000000000004), (984, 0.0014739999999999987), (434, 0.0014640000000000015), (126, 0.0014639999999999994), (79, 0.0014599999999999993), (837, 0.0014560000000000003), (814, 0.0014380000000000007), (413, 0.0014319999999999992), (225, 0.0014300000000000011), (660, 0.0014300000000000003), (909, 0.001427999999999999), (664, 0.0014259999999999998), (204, 0.0014199999999999992), (855, 0.0014199999999999985), (507, 0.0014120000000000018), (494, 0.0014100000000000026), (877, 0.001407999999999999), (305, 0.0014000000000000004), (44, 0.0013999999999999993), (378, 0.0013979999999999989), (145, 0.0013920000000000007), (425, 0.001389999999999999), (71, 0.001387999999999999), (187, 0.001382), (531, 0.001379999999999998), (310, 0.0013779999999999995), (376, 0.0013739999999999996), (68, 0.0013739999999999987), (325, 0.0013659999999999998), (417, 0.0013620000000000017), (856, 0.0013620000000000006), (860, 0.0013499999999999994), (883, 0.0013380000000000026), (738, 0.0013260000000000004), (365, 0.0013259999999999997), (816, 0.0013220000000000005), (616, 0.0013039999999999996), (637, 0.0012960000000000029), (658, 0.0012879999999999988), (488, 0.0012839999999999987), (12, 0.0012820000000000002), (697, 0.0012800000000000016), (196, 0.001276), (581, 0.0012740000000000002), (810, 0.0012700000000000014), (905, 0.001268000000000002), (714, 0.001268000000000001), (605, 0.0012679999999999992), (424, 0.0012659999999999998), (923, 0.0012639999999999993), (956, 0.001254), (532, 0.001246), (99, 0.0012400000000000015), (396, 0.001234000000000001), (753, 0.001224), (625, 0.0012239999999999994), (593, 0.0012120000000000026), (392, 0.0012), (613, 0.0011959999999999996), (972, 0.0011959999999999994), (146, 0.0011920000000000001), (400, 0.0011839999999999993), (268, 0.0011800000000000005), (656, 0.001177999999999998), (309, 0.001174000000000001), (918, 0.0011739999999999993), (989, 0.0011700000000000013), (537, 0.001168), (864, 0.0011659999999999997), (449, 0.0011659999999999986), (865, 0.0011639999999999997), (455, 0.0011600000000000013), (937, 0.0011600000000000009), (731, 0.0011599999999999994), (999, 0.001158), (298, 0.0011560000000000008), (542, 0.0011520000000000007), (440, 0.0011439999999999998), (553, 0.0011419999999999972), (27, 0.0011360000000000025), (835, 0.0011359999999999988), (264, 0.0011340000000000011), (916, 0.0011320000000000002), (894, 0.001130000000000002), (409, 0.0011299999999999988), (741, 0.0011299999999999967), (784, 0.0011260000000000007), (447, 0.0011240000000000002), (725, 0.0011200000000000016), (502, 0.001120000000000001), (240, 0.0011159999999999985), (682, 0.0011120000000000001), (857, 0.001108000000000001), (577, 0.001105999999999999), (42, 0.0011020000000000005), (290, 0.0010920000000000018), (760, 0.001088000000000001), (215, 0.0010879999999999998), (368, 0.0010839999999999997), (172, 0.001078000000000001), (195, 0.001074000000000001), (685, 0.0010739999999999997), (723, 0.0010719999999999992), (525, 0.0010700000000000002), (929, 0.0010579999999999997), (238, 0.0010560000000000003), (744, 0.001048), (491, 0.0010479999999999988), (805, 0.0010400000000000001), (171, 0.0010399999999999997), (662, 0.001038000000000001), (134, 0.0010339999999999995), (555, 0.001031999999999999), (140, 0.0010280000000000003), (559, 0.0010239999999999993), (694, 0.0010239999999999987), (48, 0.0010239999999999984), (824, 0.001022000000000004), (308, 0.0010220000000000003), (643, 0.0010219999999999988), (16, 0.0010200000000000007), (745, 0.0010160000000000008), (50, 0.0010160000000000002), (370, 0.0010140000000000017), (803, 0.0010140000000000014), (113, 0.0010119999999999992), (638, 0.0010099999999999994), (153, 0.0010060000000000021), (886, 0.001004000000000001), (794, 0.001003999999999999), (829, 0.001), (528, 0.0009999999999999998), (412, 0.0009999999999999994), (724, 0.0009980000000000006), (498, 0.0009979999999999993), (2, 0.0009899999999999993), (589, 0.000984), (315, 0.0009839999999999988), (56, 0.0009839999999999985), (708, 0.0009819999999999978), (863, 0.0009800000000000006), (716, 0.0009780000000000014), (457, 0.000977999999999996), (176, 0.0009720000000000007), (887, 0.0009700000000000008), (492, 0.0009699999999999997), (791, 0.0009680000000000005), (634, 0.0009679999999999995), (808, 0.0009640000000000002), (801, 0.000964), (533, 0.0009639999999999988), (802, 0.0009620000000000042), (825, 0.000961999999999998), (167, 0.0009599999999999985), (641, 0.0009540000000000012), (663, 0.0009479999999999992), (141, 0.0009440000000000017), (831, 0.0009420000000000003), (387, 0.0009400000000000016), (469, 0.0009400000000000016), (362, 0.0009359999999999991), (260, 0.0009340000000000007), (349, 0.0009320000000000006), (743, 0.000925999999999999), (384, 0.0009239999999999997), (504, 0.0009200000000000001), (756, 0.0009179999999999984), (398, 0.0009160000000000012), (21, 0.0009160000000000006), (695, 0.0009160000000000004), (109, 0.000915999999999997), (866, 0.0009100000000000004), (177, 0.0009039999999999994), (18, 0.0009019999999999995), (951, 0.0009000000000000013), (66, 0.0008959999999999991), (34, 0.0008940000000000005), (959, 0.0008919999999999989), (736, 0.000888), (472, 0.0008799999999999999), (250, 0.0008799999999999998), (477, 0.0008759999999999999), (379, 0.0008700000000000016), (644, 0.0008680000000000004), (395, 0.0008659999999999997), (458, 0.00086), (107, 0.0008580000000000024), (219, 0.000858000000000001), (486, 0.0008580000000000001), (732, 0.0008579999999999997), (762, 0.0008559999999999996), (772, 0.0008520000000000018), (628, 0.0008500000000000002), (410, 0.0008500000000000001), (244, 0.0008499999999999993), (301, 0.0008440000000000011), (405, 0.0008440000000000003), (220, 0.0008439999999999989), (967, 0.0008439999999999981), (683, 0.0008420000000000003), (693, 0.0008380000000000006), (460, 0.0008380000000000005), (93, 0.0008380000000000001), (118, 0.0008380000000000001), (617, 0.0008360000000000004), (960, 0.0008340000000000001), (538, 0.000831999999999999), (133, 0.0008240000000000014), (36, 0.0008240000000000006), (908, 0.0008220000000000006), (942, 0.0008199999999999999), (563, 0.0008119999999999998), (903, 0.0008079999999999988), (570, 0.0008040000000000002), (83, 0.000800000000000001), (441, 0.0007999999999999996), (813, 0.0007980000000000006), (386, 0.0007980000000000002), (962, 0.0007980000000000001), (603, 0.0007979999999999984), (785, 0.0007959999999999993), (17, 0.0007939999999999988), (977, 0.0007939999999999988), (549, 0.0007939999999999959), (852, 0.0007880000000000005), (665, 0.0007879999999999989), (313, 0.0007859999999999998), (159, 0.0007859999999999996), (947, 0.0007839999999999988), (55, 0.0007820000000000007), (431, 0.0007819999999999996), (699, 0.00078), (7, 0.0007699999999999997), (797, 0.0007680000000000014), (912, 0.0007659999999999994), (974, 0.0007640000000000017), (117, 0.0007639999999999995), (67, 0.0007580000000000009), (832, 0.0007580000000000005), (893, 0.0007579999999999991), (493, 0.0007560000000000008), (229, 0.0007539999999999998), (598, 0.0007500000000000006), (543, 0.00075), (57, 0.0007499999999999998), (63, 0.0007499999999999996), (819, 0.0007479999999999996), (393, 0.0007460000000000004), (647, 0.0007459999999999987), (513, 0.0007419999999999991), (676, 0.000738), (173, 0.0007379999999999997), (796, 0.0007359999999999999), (209, 0.0007359999999999991), (345, 0.0007340000000000005), (630, 0.0007319999999999984), (269, 0.0007300000000000007), (233, 0.0007300000000000002), (586, 0.0007300000000000002), (746, 0.0007279999999999984), (640, 0.0007279999999999982), (130, 0.0007260000000000004), (997, 0.0007259999999999999), (86, 0.0007259999999999994), (114, 0.0007259999999999992), (579, 0.0007200000000000003), (556, 0.0007199999999999981), (521, 0.0007179999999999982), (402, 0.0007159999999999998), (891, 0.0007139999999999997), (588, 0.000712000000000003), (11, 0.0007120000000000026), (373, 0.0007119999999999991), (60, 0.0007080000000000011), (408, 0.0006980000000000028), (892, 0.0006980000000000006), (115, 0.0006980000000000005), (416, 0.0006979999999999996), (436, 0.0006899999999999988), (3, 0.000688), (419, 0.0006860000000000019), (468, 0.0006840000000000007), (194, 0.0006819999999999993), (601, 0.0006779999999999998), (859, 0.0006779999999999997), (217, 0.0006759999999999994), (621, 0.0006720000000000006), (293, 0.0006679999999999988), (940, 0.0006640000000000012), (275, 0.0006640000000000011), (748, 0.0006640000000000007), (750, 0.0006640000000000006), (116, 0.0006639999999999983), (926, 0.0006600000000000014), (135, 0.0006560000000000001), (273, 0.0006559999999999999), (485, 0.0006540000000000014), (321, 0.0006539999999999999), (551, 0.0006539999999999985), (721, 0.0006519999999999997), (946, 0.000646), (84, 0.0006459999999999992), (902, 0.000645999999999999), (122, 0.0006439999999999997), (377, 0.0006420000000000008), (26, 0.0006419999999999998), (148, 0.0006419999999999995), (704, 0.000640000000000001), (702, 0.0006400000000000004), (595, 0.000639999999999999), (500, 0.0006380000000000005), (873, 0.0006380000000000003), (927, 0.0006319999999999987), (993, 0.0006299999999999994), (131, 0.0006279999999999999), (653, 0.0006260000000000003), (274, 0.0006220000000000008), (192, 0.0006200000000000007), (666, 0.0006199999999999991), (333, 0.0006179999999999994), (569, 0.0006160000000000006), (830, 0.0006159999999999991), (847, 0.0006140000000000001), (552, 0.0006139999999999989), (363, 0.0006120000000000001), (180, 0.0006119999999999999), (223, 0.0006119999999999999), (629, 0.000607999999999999), (132, 0.0006040000000000007), (606, 0.0005979999999999997), (381, 0.000597999999999999), (619, 0.0005939999999999988), (688, 0.0005939999999999987), (991, 0.0005899999999999999), (544, 0.0005859999999999998), (205, 0.0005820000000000011), (371, 0.0005819999999999997), (280, 0.0005799999999999989), (212, 0.0005759999999999989), (845, 0.0005719999999999989), (43, 0.0005680000000000001), (228, 0.0005640000000000007), (14, 0.0005640000000000003), (497, 0.0005620000000000007), (110, 0.0005619999999999991), (327, 0.0005599999999999986), (355, 0.0005579999999999997), (728, 0.0005560000000000008), (312, 0.0005519999999999991), (516, 0.0005519999999999989), (385, 0.0005499999999999988), (420, 0.0005480000000000012), (127, 0.0005459999999999998), (874, 0.0005440000000000001), (624, 0.0005420000000000007), (442, 0.0005400000000000003), (953, 0.0005399999999999994), (271, 0.000534), (806, 0.0005280000000000011), (890, 0.0005280000000000009), (401, 0.0005260000000000005), (234, 0.0005259999999999999), (389, 0.0005259999999999999), (399, 0.0005220000000000002), (352, 0.0005199999999999996), (147, 0.0005140000000000021), (858, 0.0005100000000000016), (715, 0.0005040000000000006), (713, 0.0005039999999999992), (781, 0.000501999999999999), (870, 0.000501999999999999), (790, 0.0004999999999999988), (941, 0.0004980000000000006), (519, 0.000498), (520, 0.0004960000000000005), (914, 0.0004959999999999985), (823, 0.0004900000000000018), (561, 0.0004900000000000006), (323, 0.00048800000000000075), (976, 0.00048400000000000054), (482, 0.0004840000000000002), (22, 0.00048200000000000033), (775, 0.0004819999999999998), (397, 0.0004799999999999986), (639, 0.00047400000000000084), (509, 0.0004720000000000012), (236, 0.0004700000000000005), (768, 0.0004680000000000013), (8, 0.0004660000000000004), (179, 0.00046599999999999946), (112, 0.000458), (292, 0.0004579999999999986), (464, 0.00045600000000000073), (347, 0.0004559999999999984), (284, 0.0004539999999999998), (121, 0.0004539999999999992), (358, 0.0004520000000000004), (988, 0.0004519999999999996), (614, 0.0004500000000000013), (326, 0.0004499999999999992), (771, 0.0004480000000000003), (514, 0.00044799999999999994), (618, 0.0004479999999999997), (414, 0.0004460000000000003), (139, 0.00044599999999999994), (726, 0.00044399999999999925), (868, 0.00043800000000000067), (380, 0.00043600000000000095), (565, 0.00043600000000000035), (282, 0.00042999999999999934), (156, 0.0004279999999999999), (476, 0.000423999999999999), (157, 0.000416), (675, 0.0004159999999999996), (30, 0.00041400000000000106), (248, 0.00041200000000000167), (672, 0.0004100000000000006), (62, 0.0004099999999999996), (950, 0.00040800000000000114), (735, 0.00040599999999999984), (481, 0.0004000000000000002), (189, 0.00039799999999999965), (795, 0.0003959999999999995), (124, 0.00039399999999999955), (635, 0.00039000000000000075), (527, 0.0003899999999999999), (948, 0.00038400000000000104), (985, 0.00038000000000000094), (314, 0.0003720000000000017), (839, 0.00036800000000000027), (239, 0.00036400000000000007), (337, 0.00036200000000000007), (190, 0.0003540000000000002), (294, 0.0003499999999999996), (302, 0.00034600000000000066), (773, 0.0003440000000000013), (602, 0.0003440000000000001), (406, 0.00033600000000000134), (427, 0.0003320000000000015), (366, 0.0003240000000000001), (261, 0.00032000000000000084), (202, 0.00031999999999999976), (100, 0.00031799999999999927), (669, 0.0003179999999999989), (291, 0.00031400000000000004), (108, 0.0003100000000000004), (210, 0.0003099999999999997), (508, 0.0003079999999999986), (181, 0.0003060000000000011), (590, 0.0003060000000000003), (712, 0.0003040000000000005), (597, 0.00030400000000000045), (182, 0.0003040000000000001), (221, 0.00030199999999999953), (346, 0.0003000000000000004), (211, 0.0002980000000000013), (550, 0.00029600000000000015), (154, 0.0002959999999999995), (28, 0.00029400000000000064), (705, 0.00029200000000000016), (304, 0.0002880000000000005), (539, 0.00028599999999999974), (243, 0.0002859999999999991), (120, 0.00028000000000000095), (459, 0.00027800000000000107), (604, 0.00027799999999999966), (980, 0.0002779999999999987), (838, 0.000272000000000001), (966, 0.00026999999999999957), (155, 0.00026600000000000034), (964, 0.00026199999999999997), (780, 0.0002580000000000006), (899, 0.00025799999999999993), (982, 0.0002559999999999999), (949, 0.0002539999999999999), (456, 0.00025000000000000076), (925, 0.000248), (888, 0.00024000000000000017), (698, 0.00023800000000000118), (826, 0.00022600000000000015), (920, 0.00022400000000000065), (627, 0.00020999999999999936), (777, 0.000208), (39, 0.00020799999999999963), (357, 0.00020600000000000056), (594, 0.00020599999999999953), (648, 0.00020599999999999885), (324, 0.00020200000000000009), (751, 0.00019599999999999972), (191, 0.00018999999999999969), (20, 0.0001859999999999987), (322, 0.00017800000000000067), (185, 0.0001759999999999991), (930, 0.00017400000000000136), (241, 0.0001740000000000001), (651, 0.0001720000000000008), (608, 0.00017000000000000072), (438, 0.00017000000000000017), (356, 0.00016799999999999915), (246, 0.0001640000000000006), (316, 0.0001519999999999996), (938, 0.00015199999999999933), (836, 0.00015199999999999868), (761, 0.00014400000000000063), (82, 0.00014200000000000112), (822, 0.000140000000000001), (846, 0.0001319999999999983), (615, 0.00011999999999999973), (104, 0.00011799999999999964), (554, 0.0001019999999999995), (580, 9.999999999999961e-05), (526, 9.800000000000051e-05), (919, 9.600000000000002e-05), (747, 9.00000000000007e-05), (530, 8.200000000000067e-05), (944, 7.600000000000103e-05), (841, 6.799999999999969e-05), (609, 6.400000000000105e-05), (360, 6.200000000000007e-05), (843, 6.0000000000000056e-05), (354, 5.799999999999989e-05), (592, 5.60000000000008e-05), (421, 4.799999999999975e-05), (646, 4.6000000000000535e-05), (29, 4.199999999999945e-05), (448, 4.000000000000026e-05), (54, 3.1999999999998637e-05), (632, 3.000000000000028e-05), (600, 2.800000000000034e-05), (861, 2.4000000000001024e-05), (700, 1.1999999999999741e-05), (351, 4.000000000000923e-06), (344, -1.999999999999815e-06), (490, -3.999999999999562e-06), (679, -3.999999999999735e-06), (317, -5.999999999998748e-06), (319, -6.0000000000001645e-06), (515, -1.0000000000000207e-05), (426, -1.1999999999999387e-05), (105, -2.1999999999999593e-05), (850, -2.200000000000001e-05), (463, -2.4000000000000038e-05), (833, -2.9999999999999997e-05), (259, -3.199999999999978e-05), (524, -3.599999999999952e-05), (451, -3.799999999999912e-05), (129, -3.800000000000069e-05), (138, -4.19999999999991e-05), (97, -5.3999999999998914e-05), (296, -5.400000000000008e-05), (255, -5.9999999999999365e-05), (652, -6.000000000000058e-05), (840, -6.199999999999973e-05), (973, -6.199999999999993e-05), (90, -6.999999999999944e-05), (749, -7.000000000000001e-05), (465, -7.000000000000005e-05), (123, -7.1999999999999e-05), (535, -7.200000000000007e-05), (423, -7.399999999999983e-05), (786, -7.400000000000074e-05), (901, -7.599999999999994e-05), (677, -7.79999999999991e-05), (587, -7.800000000000074e-05), (958, -9.599999999999831e-05), (73, -9.599999999999892e-05), (776, -9.999999999999948e-05), (237, -0.00010199999999999939), (848, -0.00010399999999999934), (330, -0.00011400000000000045), (921, -0.00011599999999999999), (251, -0.00011600000000000022), (207, -0.0001160000000000008), (245, -0.0001240000000000004), (924, -0.0001279999999999994), (523, -0.0001339999999999996), (849, -0.00013600000000000013), (272, -0.00013799999999999937), (517, -0.0001419999999999992), (661, -0.00014200000000000033), (94, -0.00014399999999999778), (807, -0.00014999999999999896), (329, -0.00015199999999999966), (573, -0.00015200000000000014), (361, -0.00016199999999999976), (540, -0.00016599999999999945), (286, -0.00016600000000000005), (38, -0.0001720000000000005), (817, -0.00017599999999999975), (707, -0.00018199999999999927), (907, -0.00018999999999999906), (338, -0.00018999999999999993), (320, -0.00019999999999999968), (578, -0.00020799999999999982), (934, -0.00021399999999999924), (242, -0.00021599999999999885), (566, -0.00021999999999999995), (501, -0.00022200000000000035), (203, -0.00022599999999999918), (335, -0.00022599999999999986), (943, -0.00022600000000000002), (454, -0.0002340000000000008), (65, -0.00023600000000000018), (571, -0.00024200000000000038), (258, -0.00024399999999999926), (818, -0.00024399999999999964), (119, -0.00024800000000000034), (96, -0.00024999999999999984), (659, -0.00025199999999999946), (583, -0.0002579999999999998), (798, -0.0002599999999999999), (800, -0.0002640000000000009), (986, -0.0002659999999999994), (636, -0.00027600000000000026), (774, -0.00028), (267, -0.0002860000000000005), (998, -0.0002960000000000007), (965, -0.0003139999999999991), (276, -0.000326), (407, -0.0003320000000000001), (975, -0.00033200000000000026), (701, -0.00033600000000000134), (197, -0.000344), (186, -0.00034999999999999935), (788, -0.0003520000000000005), (75, -0.0003660000000000005), (657, -0.00037199999999999977), (222, -0.00037600000000000025), (13, -0.0003839999999999993), (446, -0.00038599999999999984), (718, -0.00038799999999999935), (954, -0.0003879999999999994), (727, -0.0003940000000000002), (40, -0.0003959999999999986), (564, -0.0003960000000000003), (867, -0.00039600000000000144), (69, -0.0003979999999999992), (900, -0.00040199999999999915), (184, -0.0004020000000000002), (375, -0.0004059999999999995), (143, -0.00041000000000000026), (763, -0.00041600000000000025), (911, -0.0004199999999999997), (793, -0.0004279999999999995), (897, -0.00042799999999999994), (467, -0.00043000000000000015), (996, -0.0004300000000000004), (353, -0.0004420000000000017), (518, -0.00044999999999999966), (0, -0.0004520000000000002), (388, -0.0004599999999999988), (174, -0.00045999999999999985), (161, -0.0004600000000000003), (367, -0.00046199999999999876), (821, -0.0004699999999999994), (654, -0.0004779999999999992), (342, -0.000479999999999999), (812, -0.00048599999999999804), (560, -0.00048800000000000064), (64, -0.0004900000000000003), (471, -0.0004959999999999993), (328, -0.0004959999999999997), (752, -0.0004980000000000006), (46, -0.0005060000000000002), (348, -0.0005140000000000004), (32, -0.0005200000000000003), (168, -0.0005279999999999987), (854, -0.0005379999999999987), (278, -0.0005420000000000005), (53, -0.000551999999999999), (869, -0.0005579999999999991), (709, -0.0005599999999999997), (706, -0.0005640000000000002), (336, -0.0005719999999999988), (487, -0.000572), (125, -0.0005720000000000002), (170, -0.0005739999999999971), (575, -0.0005740000000000003), (678, -0.0005759999999999991), (142, -0.0005759999999999999), (475, -0.0005760000000000001), (880, -0.0005819999999999999), (792, -0.0005880000000000006), (670, -0.000597999999999999), (288, -0.0006020000000000001), (928, -0.0006299999999999988), (591, -0.0006379999999999997), (430, -0.000639999999999999), (995, -0.000652000000000001), (218, -0.0006599999999999984), (522, -0.0006599999999999997), (164, -0.0006880000000000007), (547, -0.0006919999999999991), (642, -0.0006999999999999989), (433, -0.000717999999999998), (884, -0.0007199999999999996), (978, -0.0007219999999999989), (827, -0.000728), (576, -0.0007299999999999987), (729, -0.0007339999999999995), (489, -0.0007339999999999998), (876, -0.0007379999999999983), (285, -0.0007379999999999997), (339, -0.0007419999999999997), (6, -0.0007680000000000006), (811, -0.0007859999999999996), (422, -0.0008000000000000004), (789, -0.0008059999999999993), (572, -0.000807999999999998), (188, -0.0008119999999999998), (512, -0.0008179999999999985), (98, -0.0008380000000000002), (23, -0.0008460000000000009), (671, -0.0008500000000000001), (445, -0.0008540000000000008), (668, -0.0008560000000000027), (247, -0.0008579999999999994), (844, -0.0008720000000000005), (932, -0.0008760000000000005), (680, -0.0008839999999999981), (283, -0.0008919999999999984), (896, -0.000903999999999999), (102, -0.0009040000000000006), (137, -0.0009139999999999979), (231, -0.0009379999999999992), (757, -0.0009439999999999986), (252, -0.0009479999999999995), (536, -0.0009539999999999976), (299, -0.0009840000000000005), (936, -0.0009840000000000005), (453, -0.00099), (418, -0.0010059999999999995), (623, -0.0010859999999999993), (842, -0.0010960000000000002), (281, -0.0011019999999999992), (765, -0.0011119999999999986), (198, -0.0011279999999999994), (733, -0.0011439999999999988), (78, -0.0011700000000000016), (128, -0.001205999999999997), (853, -0.001234), (558, -0.0012380000000000017), (390, -0.001255999999999997), (232, -0.0012600000000000016), (882, -0.001285999999999998), (766, -0.001316000000000001), (969, -0.0013439999999999984), (374, -0.0013439999999999997), (719, -0.0013640000000000006), (478, -0.001375999999999999), (295, -0.0013839999999999996), (574, -0.0014019999999999998), (879, -0.0014159999999999976), (499, -0.001475999999999999), (981, -0.0014939999999999997), (88, -0.0015699999999999987), (674, -0.0017699999999999994), (461, -0.0018999999999999987), (31, -0.001978000000000001), (895, -0.002116), (1, -0.002193999999999999)]\n" + ] + } + ], + "source": [ + "import operator\n", + "sorted_dct = sorted(accs.items(), key=operator.itemgetter(1), reverse=True)\n", + "print(sorted_dct)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The weighted accuracy drop is 0.452\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA6g0lEQVR4nO3de1yUdf7//+cwwIAI4wEBD4BopihlCpunrG0tSjus27ZrZVqbfTa3w2p+asvc7eBnW/ruwbU+m5pl9e3s9rP2125uRWt5CLMiLFPzrBCCCOoMiA4wXN8/yLEJDwwCF9fF4367ze3GvOd9Da95Z87T9/W+3pfDMAxDAAAAJgkzuwAAANCxEUYAAICpCCMAAMBUhBEAAGAqwggAADAVYQQAAJiKMAIAAExFGAEAAKYKN7uApqivr9fevXsVGxsrh8NhdjkAAKAJDMNQZWWlevXqpbCwk89/WCKM7N27V8nJyWaXAQAAmqGoqEh9+vQ56euWCCOxsbGSGj5MXFycydUAAICm8Hq9Sk5ODnyPn4wlwsixUzNxcXGEEQAALOZ0SyxYwAoAAExFGAEAAKYijAAAAFMRRgAAgKkIIwAAwFSEEQAAYCrCCAAAMBVhBAAAmIowAgAATEUYAQAApiKMAAAAUxFGAACAqSxxo7y2UHm0Vlv3VWrNtgptKvHo6qG9lRYfo4FJsXKGnfoGPwAAoPk6dBjJ+fdmrdhcpnrD0I79h4Nee3fjvsDPiXEuXTwwQZK0p6JaPxneW8NTuuhobb3S4mMU4+rQwwgAwBnp0N+iO8qqtK2sKvC8a6cIde0UqZ3lh9Up0qk6v6Eaf732eX167dOiQL+1OysCP8e6wvW7KwcruVsnjezXTb66em3/9j379+gsV3iYduyv0o79h9U3vpP6do9RVISz7T4kAADtnMMwDMPsIk7H6/XK7XbL4/EoLi6uxd636EC19lRU60itX8NTuqh7Z1fQ654jtSo6UK1d5Yf12e4DkqRPdh9UeZVPtf56HaqubdLvcTikY6PcKdKpfj1iNG5Qovr1iNHGvV4VVlTraJ1fV57bS84w6UhNvWKjwtXTHaWhyV0U4WRpDwDAepr6/d2hw8iZKig8qP964TMd9vl1pNYfaI+KCFN9vVTjrw+0xUWFy3u0LuTfERPp1P3jByk6Mlzn9+2mlO6dWqR2AABaG2GkjR08XKMd+6tUebROF53dQ4akDcUe7dxfpeEpXdU3Pka1/nq9/WWJPtxSpq37qlRvGPIcqdX5ad20u6JaBw77FBcVoaS4KBUUHdKBwzWNfs+krGT1jY/R9Iv6yeFgYS0AoP0ijFicYRi6f9kGrdy6Xw6HVOI5GvT6lJGp+tUP+6tXl2iTKgQA4NQIIzbzzy/26u0vS/TOxtKg9vjOkbry3F66ZUyaEuJcinCGcSkyAKBdIIzYlOdIra5f/LE2lXhP+Lo7OkI/HNhDQ3rF6afD+wQW5frrDR2t9cuQ1JlLkQEAbYAwYnNllUdV7fPr1U8L9ebnxSqr9DXqc1ZCZy26cbje3bhPS9bsCqxBSYqLUvaQRP3ywn7q3SVahiHVG4acYQ7WoQAAWgxhpIPx1fnlq6vXkx9s11MrdzbpmDCHFBMZrkpfw1U+sa5wRUU61T0mUlec01NXDe2lvvExrVk2AMDGCCMd2N5DR1TqPao7Xv5cpd6jykzpqhtGpGjcoETV1dfrk10H9OxHu/Tp7oNNer97Lxuom0b35fQOACAkhBGovt7QkVr/Sber/3hnhUo9R3VechcZko7UNOyX8v/lF+nr0koVFB4K6t+vR4zSk+I06QfJOj+tGzvJAgBOiTCCM/ZVsUd3vVqgwgPV8tc3/mMydVSqHrl6COtMAAAn1NTvb+bdcVIZvd364J4fSmpYMLtkza6g9SgvrN0jZ5hDv71iMJcTAwCajZkRhMQwDL3zVaneLCjWe5sa7mwc3zlSl2ck6V9flqhThFPdOkdqRFp33XZhPyXERZlcMQDALJymQav7+6dFuu+NL3WqP0GDkmJ10dk9tK2sSl8UHVJkeJh6uqN01dBeumlUXx2t86tTJBN0AGBHhBG0ibe/LNGsv6+XKzxMyd06acI5PRUXHaG/5m494b11Tua3V6Rr2gVprD8BABshjMBUvjq/tu2r0oZij7aUViq+c6SGpXRV5dE6fVXs0VOrdqjWH/xHzxnm0NRRqbry3J4antKVYAIAFkcYQbtWdKBa/ygo1r7Ko3rp48JGr//2inTdMiZNYSyMBQDLIozAUrbuq9Rb6/fqbx9sD7QNSorVg1cN1tmJsYr/9h47AADrIIzAko7U+PXzp9ZqQ7EnqP1HgxK0YPJwNloDAAtp6vd3WBvWBJxWdKRTb94+Wo9cPSSofcXXZfrLe1tMqgoA0JqYGUG7VfXtDfz+sHyzXlnXsK7k+vNT9PDVg+UKZ4YEANq7Vp0ZWbBggdLS0hQVFaXMzEytXr36pH1vvvlmORyORo8hQ4ac9BhAkjq7wtXZFa5HJ2bo3ssGyuGQXv2kUAN/+45+9VK+/pq7VV+XeiVJn+4+oM0lXpMrBgA0R8gzI0uXLtWUKVO0YMECjRkzRk899ZSeeeYZbdq0SSkpKY36ezweHTlyJPC8rq5OQ4cO1V133aWHH364Sb+TmRFI0odbynTbi/ny1dWf8HWHQ7pxRKrS4mN0pNavn2X2UXxnl7bsq1RafAzrTQCgjbXaAtYRI0Zo+PDhWrhwYaAtPT1dEydOVE5OzmmP/8c//qFrrrlGu3btUmpqapN+J2EEx6wvOqRZS9erylenCGeYig8dOf1B3xqYGKtHfjxEI/t1b8UKAQDHtEoYqampUadOnfT666/rJz/5SaB9xowZWr9+vVauXHna97jqqqvk8/n03nvvnbSPz+eTz+cLPPd6vUpOTiaMoJHcTfu0dV+lxg6I1z6vT/9YX6xt+yq1dV/VCfv3ckdp9X0/4sZ+ANAGWuWuveXl5fL7/UpMTAxqT0xMVGlp6WmPLykp0b///W+98sorp+yXk5OjRx55JJTS0EFdOjhRlw5ODHouNSx+XbV1v2KjwlXm9em/X/9CkrTXc1S3PP+pFkwerhgX98QBgPagWQtYv79Nt2EYTdq6+/nnn1eXLl00ceLEU/abPXu2PB5P4FFUVNScMtGBdXaFa8I5PTV2QA/9NLOP8n97iSZlJUuSVm7dr+sWf6zD316tAwAwV0j/NIyPj5fT6Ww0C1JWVtZotuT7DMPQs88+qylTpigyMvKUfV0ul1wudtxEy+ne2aUHrxqsH6R106Nvb9KGYo+GPPSukrtFy+83NOOSAbpmeB+tLzqkw746XXBWvMKdbMMDAG0hpDASGRmpzMxM5ebmBq0Zyc3N1Y9//ONTHrty5Upt375d06ZNa16lwBmKcYXr2sw+io0K120v5kuSig40LIC9b9kG3bdsQ6DvOb3dmnNFOotdAaANNPvS3kWLFmnUqFFavHixnn76aW3cuFGpqamaPXu2iouL9cILLwQdN2XKFG3btk0ff/xxyEVyNQ1aWonniAorqvXZnoNasmaXjtb6VV3jb9TvpWkjdMGAeBMqBADra5UFrJI0adIkVVRUaO7cuSopKVFGRoaWL18euEy3pKREhYXBd2H1eDxatmyZHn/88VB/HdAqerqj1dMdrRH9uuuOi8+Sr86vj7aXq7yqRindOmnKknWq9Ru6cck6RYaH6Qd9u2rGuLN1flo3s0sHANthO3jgBMqrfPrZorXaVX440BYd4dR7d1+o5G6dTKwMAKyDG+UBZyC+s0vvzByra4b1Vi93lCTpSK1fY//4gR7550bV+k+8CywAIHRstACchCvcqXmTzpMkvfH5N5r194a9Sp77aLcO++r0x2uHmlgdANgHMyNAE/z4vN66/Yf9Azu3/v2zb/Tyuj0mVwUA9kAYAZrAGebQby4fpB1/mKDhKV0kSXPe/EoVVb5THwgAOC3CCBCie7IHBn5+9ZPCU/QEADQFYQQI0eiz4vWna8+VJD3xn+3K33Mg8BoLWwEgdCxgBZph4rDe+vdXpVrxdZl+unCtfjQoQYUHqrXPc1Qv3TpCQ5O7SGq4q/A3B6t106i+CuNOwQBwQoQRoBkinGH63+uHaeKTH2lbWZVWfF0WeO2+ZV/q6alZenr1Tr2wtmGRq2FIt1yQZla5ANCusekZcAYKK6r1+H+2qehgtQ5V12jrvqqT9v31uAG6dngfvbuxVN8crNb1I1I0MDG2SXe8BgAraur3N2EEaEF/W7FNL31cqFLvUUnSzaP76vm83Sft/+txAzTr0rPbqDoAaFuEEcBENXX1qjxaq+6dXfrmYLWuXbg2EFC+7w8/OUeTfpAc2MMEAOyi1W6UB+D0IsPD1L2zS5LUp2snffzAOHmO1CrWFS5fXb1q6+t16byV2uf16YE3N0iSbhiRYmbJAGAaLu0F2og7OkJhYQ5FRzoVFxWhp6dmBV6b//5WHTxcY2J1AGAeTtMAJtrnParL5q/Soepa9esRo+4xkaqoqtHI/t31uysGKzrSaXaJANBs3LUXsIDEuCj99efnSZJ27j+sT3cf1M7yw3plXaGGzn1PXxQdMrU+AGgLhBHAZBcPStCDVw7W0D5u9XRHBdpr6ur14yc/0sNvbZQFJjABoNk4TQO0Q+uLDmnikx8Fni+6MVOXZySZWBEAhI7TNICFnZfcRXn3/yjw/MkPtjM7AsC2CCNAO9WrS7Q+/92lcoWHaUOxR8s3lEqSqmvqTK4MAFoWYQRox7rFROr68xv2H7njlc81Zck6DX7wXd324meqPFprcnUA0DIII0A7N3vCIPXvESNJWr2tXJL07sZ9Oufh91RQeNDM0gCgRRBGgHbOFe7UMzf9QJNHpOjSwYnq271T4LV7Xv9Cdf56E6sDgDPH1TSABX1zsFpj//iBDEMaOyBev7lskIb0ilMY97cB0I5wNQ1gY326dtKdF58lqeHUzVV/W6P0B9/R55y2AWBBhBHAov47e2DQc19dvaa/mM8lwAAshzACWNjvrhwc9Lys0qdd5YdNqgYAmocwAljYLWP66sVp52vVvRfrrITOkqSnV+80uSoACA1hBLAwh8OhsQN6KKV7J108sIck6dVPivRVscfkygCg6QgjgE1ceHaPwM9rd1SYWAkAhIYwAtjEBWfFa3DPhkvnHl2+Wc9/tIvFrAAsgTAC2ITD4dDDVw8JPH/4n5v07Ee7zSsIAJqIMALYyPCULorvHBl4/vu3N7H3CIB2jzAC2Ei4M0wv3zpSL00bofEZSTIM6aW1e8wuCwBOqVlhZMGCBUpLS1NUVJQyMzO1evXqU/b3+XyaM2eOUlNT5XK51L9/fz377LPNKhjAqQ1MitUFA+J12ZAkSdIbBcVat5MFrQDar5DDyNKlSzVz5kzNmTNHBQUFGjt2rMaPH6/CwsKTHvPzn/9c//nPf7RkyRJt2bJFr776qgYNGnRGhQM4tYsHJahTpFOS9MCbG3TgcI2qfHUmVwUAjYV8o7wRI0Zo+PDhWrhwYaAtPT1dEydOVE5OTqP+77zzjq677jrt3LlT3bp1a1aR3CgPaJ6KKp9GPbZCNXXH7+x7xTk9dd/lg5Tynbv/AkBraJUb5dXU1Cg/P1/Z2dlB7dnZ2crLyzvhMW+99ZaysrL0xz/+Ub1799bZZ5+te+65R0eOHAnlVwNohu6dXZoxbkBQ29sbSnTz85+ovp7LfgG0D+GhdC4vL5ff71diYmJQe2JiokpLS094zM6dO7VmzRpFRUXpzTffVHl5uW6//XYdOHDgpOtGfD6ffD5f4LnX6w2lTADfccfFZ6nKV6eFH+4ItO3cf1if7D6gkf26m1gZADRo1gJWh8MR9NwwjEZtx9TX18vhcOjll1/W+eefrwkTJmjevHl6/vnnTzo7kpOTI7fbHXgkJyc3p0wA37rv8kFa9qvRenfmhRqf0bCwtaDwkLlFAcC3Qgoj8fHxcjqdjWZBysrKGs2WHNOzZ0/17t1bbrc70Jaeni7DMPTNN9+c8JjZs2fL4/EEHkVFRaGUCeAEMlO7amBSrDJ6N/y/+H/e+Vrby6pMrgoAQgwjkZGRyszMVG5ublB7bm6uRo8efcJjxowZo71796qq6vhfelu3blVYWJj69OlzwmNcLpfi4uKCHgBaxmVDktSlU4Qk6ZJ5K7Vq636TKwLQ0YV8mmbWrFl65pln9Oyzz2rz5s26++67VVhYqOnTp0tqmNWYOnVqoP8NN9yg7t276xe/+IU2bdqkVatW6d5779Utt9yi6OjolvskAJrkrITOeuuOCwLPpz77iTbu5S6/AMwTchiZNGmS5s+fr7lz5+q8887TqlWrtHz5cqWmpkqSSkpKgvYc6dy5s3Jzc3Xo0CFlZWVp8uTJuuqqq/TEE0+03KcAEJKU7p30p2vPDTx/d+M+E6sB0NGFvM+IGdhnBGgdj/37ay1auUNXD+2lJ64fZnY5AGymVfYZAWAv5yV3kSTtrjgcaPMerdXsNzboh3/6QMWH2A8IQOsLaZ8RAPYyKClWkrS5xCvv0VodqKrRhCdWq7rGL0ka89gKDU/polf+a6SiIpxmlgrAxggjQAfWNz5G/XvEaMf+wzr34fdO2OfzwkN656tSTRzWu42rA9BRcJoG6OB+eWG/oOfdYyK19JcjlXv3hYHTOKu27tf6okN68eM9qvPXq8pXJwssNwNgESxgBTo4wzC04MMd+tO7W3T9+cnKueb4VTYrvt6nW57/TK7wMPm+c7M9SRqR1k2v/XLkSXdfBoCmfn9zmgbo4BwOh+64+Cz9dHgfxXeODHptaJ8ucjjUKIhI0rpdB7Sr/LD69ejcVqUCsClO0wCQJCW5oxTuDP4roXtnl8YNOn6rhzkT0vX320bprISGAPLyukL56vwqKDyoMu9RHa31t2nNAOyB0zQATqmiyqdFK3doaHIXXXluL0nSv77cqztfKWjUt5c7Sn/62VCNOSu+rcsE0A419fubMAIgZIZh6JF/btLzebtP+PqcCeka1b+7BveMU1gYa0qAjopNzwC0GofDoYeuGqwesa5A26/HDdA5394R+NHlm3Xl/67RM2t2mlUiAAshjABoFofDoffvvkg3j+6rf88Yq1mXnq0Fk4cro/fxf/08tXKnav2NF78CwHdxmgZAi6vz12tkzgqVV/n03M0/0MWDEswuCYAJOE0DwDThzjBdOrjhKpy7Xi3QJ7sOyFfnV972ctXXt/t//wBoY4QRAK1ieEoXSVKVr04/f2qtxj++Wjc8s06LV7OOBEAwwgiAVnHBgHhFf+fmejv3N9wZ+LF/f633NpZKargq58W1u5W3o9yUGgG0D6wZAdBqPEdqtffQEV3/9Mc6VF0b9Np9lw9Sry5RmvHaeknSjj9MkJPLgAFbYZ8RAO3GgcM1WrV1v8YOiFfm798PtGemdlX+noOSGjZM+/eMC+XuFGFWmQBaGAtYAbQb3WIiNXFYb3Xv7NKMcQMC7ceCiCTt9RzVix/vNqE6AGYjjABoUzMvGaBY1/F7dD4wYZBuGZMmSXpixXaVVR41qzQAJuGuvQDalMPh0ENXD9GX3xzSLy/spz5dO6m+3tC6XRXauNert9bv1a1j+5ldJoA2xMwIgDZ3bWYfzf1xhvp07SRJCgtz6CfDekuS5uVu1d5DR8wsD0AbI4wAaBduGt1XmaldVV3j1+jHVshzpPb0BwGwBcIIgHYhwhmmey8bGHi+hM3RgA6DMAKg3RjZr7suPLuHJOlfX5bIAjsPAGgBhBEA7cqCycPlCg/TzvLD2lDsMbscAG2AMAKgXensCte49Ia7/F79t4+0ZM0ukysC0NoIIwDanZ9lJgd+fuuLvSZWAqAtEEYAtDsXD0rQ49edJ0naXOJVrb/e3IIAtCrCCIB26apze8kdHaGaunrWjgA2RxgB0C6FhTk0un93SdI7X5WaXA2A1kQYAdBuTfx2V9bFq3Zq3c4K+eu51BewI8IIgHYre3CikuKiJEmTFn+smUvXm1sQgFZBGAHQbjkcDl13/vEra1Zt3c9GaIANEUYAtGszxg1Q3v0/kjPMIc+RWu3z+swuCUALa1YYWbBggdLS0hQVFaXMzEytXr36pH0//PBDORyORo+vv/662UUD6DgcDod6dYlWWnyMJGnLvkqTKwLQ0kIOI0uXLtXMmTM1Z84cFRQUaOzYsRo/frwKCwtPedyWLVtUUlISeAwYMKDZRQPoeAYmxkqStpR6Ta4EQEsLOYzMmzdP06ZN06233qr09HTNnz9fycnJWrhw4SmPS0hIUFJSUuDhdDqbXTSAjmdgUkMY+br0+MwI60cAewgpjNTU1Cg/P1/Z2dlB7dnZ2crLyzvlscOGDVPPnj01btw4ffDBB6fs6/P55PV6gx4AOrZjYWTrt6dpig5UK/P37+tP73LKF7C6kMJIeXm5/H6/EhMTg9oTExNVWnriTYl69uypxYsXa9myZXrjjTc0cOBAjRs3TqtWrTrp78nJyZHb7Q48kpOTT9oXQMdw7DTNV8UN28O/tG6PDhyu0ZMf7DC5MgBnKrw5BzkcjqDnhmE0ajtm4MCBGjhwYOD5qFGjVFRUpD//+c+68MILT3jM7NmzNWvWrMBzr9dLIAE6uJRundQp0qnqGr/m5W5VVPjxU737vEeV+O1+JACsJ6SZkfj4eDmdzkazIGVlZY1mS05l5MiR2rZt20lfd7lciouLC3oA6NjCwhy6cEAPSdK2fZWqrqkLvPb/ry82qywALSCkMBIZGanMzEzl5uYGtefm5mr06NFNfp+CggL17NkzlF8NAJr07QZoxYeOqryqJtD+/qYys0oC0AJCPk0za9YsTZkyRVlZWRo1apQWL16swsJCTZ8+XVLDKZbi4mK98MILkqT58+erb9++GjJkiGpqavTSSy9p2bJlWrZsWct+EgC218sdLUnaXOLV5pLjC9u/+OaQaurqFRnOPo6AFYUcRiZNmqSKigrNnTtXJSUlysjI0PLly5WamipJKikpCdpzpKamRvfcc4+Ki4sVHR2tIUOG6O2339aECRNa7lMA6BB6dQleFxIT6dThGr98dfV6ed0e/WJMmkmVATgTDsMCF+p7vV653W55PB7WjwAd3Pz3t2rBBzuUEOfS8784Xx9tL9dDb21UdIRTBQ9eqqgI9jAC2oumfn8362oaADDLzEvO1vSL+is8zKFwZ5j6xcfoz+9tUeXROm0vq1JGb7fZJQIIESdYAVhOVIRT4c6Gv77Cwhwa0qvhX1zz39+qA4drTnUogHaIMALA8i44K16S9P7mMt3+cr7J1QAIFWEEgOVNv6i/Jo9IkdSwQ6sFlsIB+A7CCADLC3eG6XdXDpbDIVX56lTBqRrAUggjAGwhKsIZ2Ick6/fv67Cv7jRHAGgvCCMAbOO7+5Cs3lZuYiUAQkEYAWAb/Xt0Dvz84Ra2iAesgjACwDb+O/v4HcJf+7RIJZ4jJlYDoKkIIwBso0esSx/d/6PA81fXFZ6iN4D2gjACwFZ6d4nW49edJ0n615cl5hYDoEkIIwBs5+JBCZKkneWHdaiay3yB9o4wAsB24qIilBTXcGXN9rIqk6sBcDqEEQC21D8hRpJ07aK1XFkDtHOEEQC2dNmQpMDPNz/3qYoOVJtYDYBTIYwAsKWpo/oGPR/7xw9UXVPHfWuAdogwAsC2Fk4eHvR88IPvavIz6+SvJ5AA7QlhBIBtjT+np+69bGBQW96OCr29gUt+gfaEMALA1u64+Cxtf3S8tj86XjeOTJEkvfNVieqZHQHaDcIIANsLd4Yp3Bmmy4f0lCQt31Cq9AffUVnlUZMrAyARRgB0IOf0dgd+9tXV65K/rNQT/9mmFV/vM7EqAOFmFwAAbcXdKSLoufdoneblbpUk7fjDBDnDHGaUBXR4zIwA6FD+Z2KGRqR10zXDege1v795n6Y9/6n+b95ucwoDOjCHYYGL7r1er9xutzwej+Li4swuB4BN9L3/7cDPYQ7p2JrWT+dcoh6xLpOqAuyjqd/fzIwAgI4HEUnauNdjXiFAB0QYAdBhLf3lSHWLiWzUvnLrfhOqATouTtMA6PA+3lmhbw4eUa2/XrPf2KBOkU599fBlCmNBK3BGmvr9zdU0ADq8kf26S5Jq/fWa8+YGVdf4VV7lU0JclMmVAR0Dp2kA4FsRzjD1dEdLkooOcpdfoK0QRgDgO/p0/TaMHDhiciVAx0EYAYDvSO7WSZI0c+l6PfGfbSZXA3QMhBEA+I7krp0CP8/L3aqvS70mVgN0DIQRAPiO5G7RQc+3l1WZVAnQcTQrjCxYsEBpaWmKiopSZmamVq9e3aTjPvroI4WHh+u8885rzq8FgFZ37DTNMd8cZO0I0NpCDiNLly7VzJkzNWfOHBUUFGjs2LEaP368CgsLT3mcx+PR1KlTNW7cuGYXCwCtLel7l/N+w1U1QKsLOYzMmzdP06ZN06233qr09HTNnz9fycnJWrhw4SmPu+2223TDDTdo1KhRzS4WAFrb9+9Jw8wI0PpCCiM1NTXKz89XdnZ2UHt2drby8vJOetxzzz2nHTt26KGHHmrS7/H5fPJ6vUEPAGgLURHOoOeEEaD1hRRGysvL5ff7lZiYGNSemJio0tLSEx6zbds23X///Xr55ZcVHt60DV9zcnLkdrsDj+Tk5FDKBIAW883BalngrhmApTVrAavDEXy/BsMwGrVJkt/v1w033KBHHnlEZ599dpPff/bs2fJ4PIFHUVFRc8oEgGa57aJ+iopo+OvxaG29Nu5ldhZoTSGFkfj4eDmdzkazIGVlZY1mSySpsrJSn332me68806Fh4crPDxcc+fO1RdffKHw8HCtWLHihL/H5XIpLi4u6AEAbWX2+HR99fBlyh7c8PfaX3O3mlwRYG8hhZHIyEhlZmYqNzc3qD03N1ejR49u1D8uLk4bNmzQ+vXrA4/p06dr4MCBWr9+vUaMGHFm1QNAKwl3huk3lw+UJK3YUqYjNX6TKwLsK+S79s6aNUtTpkxRVlaWRo0apcWLF6uwsFDTp0+X1HCKpbi4WC+88ILCwsKUkZERdHxCQoKioqIatQNAe9O/R2fFRDp1uMavEs8R9evR2eySAFsKOYxMmjRJFRUVmjt3rkpKSpSRkaHly5crNTVVklRSUnLaPUcAwAocDod6donW9rIq7T10lDACtBKHYYFl4l6vV263Wx6Ph/UjANrUlCXrtHpbuf547bn6eRZX9gGhaOr3N/emAYBT6Olu2JF1n+eoyZUA9kUYAYBTiO/csCNrxeEakysB7IswAgCncCyM7K/ymVwJYF+EEQA4he6dIyVJFYQRoNUQRgDgFHocO01TxWkaoLUQRgDgFI7dxXfvoSPy17f7iw8BSyKMAMAp9PvOxmf9H1iuD7eUmV0SYDuEEQA4BWeYQ5dn9Aw8/z/vbDGxGsCeCCMAcBr/M3GInp6aJUnaXOLVpfNWqqau3uSqAPsgjADAaXSKDNelgxP1g75dJUnbyqr02e4DJlcF2AdhBACa6JX/GqmR/bpJkvJ2VJhcDWAfhBEAaKIIZ5guH5IkSdpWVmlyNYB9EEYAIAT9Exru3Ltj/2GTKwHsgzACACHo36MhjOypOKxaf8Mi1m8OVmv++1t1gPvXAM0SbnYBAGAlSXFRio5w6kitX0UHqtWvR2fd8vyn2rqvSjv2H9b/Xj/M7BIBy2FmBABCEBbmUL8eMZKOn6rZuq9KkpS7qdS0ugArY2YEAEJ0dmKsNu716ouiQ/rmYHWgPTrCaWJVgHUxMwIAIbrgrHhJ0oqvy7Ro5Y5A+8HqWu09dMSssgDLYmYEAEL0w4E95HBIm0q8kqQIp0OdXeE6WF2r0Y+tUP8eMTqnt1v/MzFDsVERJlcLtH/MjABAiLp3dikzpWvg+UVnJ+iP1w4NPN+x/7D+sX6v/vbBdjPKAyyHmREAaIY//Wyo3v5yrxwOhyYO663eXaK1YPJwvbexVK5wp5Z+VqT/m7db4wYl6vy0hl1bDcPQ+5vLtLv8sNLiYzQuPUEOh8PkTwKYz2EYhmF2Eafj9Xrldrvl8XgUFxdndjkAcEqHfXUa+8cPAvuORIaHqWunCJVX1chff/yv3IRYl9J7xok8Eppz+3TRrEvPNrsMNEFTv78JIwDQCjaXePW3Fdv1zsbSoAASHuZQcrdOKjxQHdSO0Kx7YJwS46LMLgOn0dTvb07TAEArSO8ZpycnD5fnSK0qqnzaXFKpbjGRGtwzTu5OESqrPKp1Ow/IV1dvdqmWMvuNL1XrN1TDuNkKYQQAWpE7OkLu6Aj1+3Yb+WMSYqN01dBeJlVlXb/7x1eq9fvNLgMtjKtpAACWwfoaeyKMAAAs41gWaf+rHREKwggAwHIMkUbshDACALCMY/uyMDNiL4QRAIBlBE7TmFoFWhphBABgHSxgtSXCCADAciywXydCQBgBAFgGp2nsqVlhZMGCBUpLS1NUVJQyMzO1evXqk/Zds2aNxowZo+7duys6OlqDBg3SX//612YXDADouFjAak8h78C6dOlSzZw5UwsWLNCYMWP01FNPafz48dq0aZNSUlIa9Y+JidGdd96pc889VzExMVqzZo1uu+02xcTE6Je//GWLfAgAQMfApmf2FPKN8kaMGKHhw4dr4cKFgbb09HRNnDhROTk5TXqPa665RjExMXrxxReb1J8b5QEAJGnY3Pd0sLpW78+6UGclxJpdDk6jqd/fIZ2mqampUX5+vrKzs4Pas7OzlZeX16T3KCgoUF5eni666KKT9vH5fPJ6vUEPAACO4TSNvYQURsrLy+X3+5WYmBjUnpiYqNLS0lMe26dPH7lcLmVlZemOO+7QrbfeetK+OTk5crvdgUdycnIoZQIAbCqwZsTkOtCymrWA1fG9k3aGYTRq+77Vq1frs88+06JFizR//ny9+uqrJ+07e/ZseTyewKOoqKg5ZQIAbIZ709hTSAtY4+Pj5XQ6G82ClJWVNZot+b60tDRJ0jnnnKN9+/bp4Ycf1vXXX3/Cvi6XSy6XK5TSAAAdAAtY7SmkmZHIyEhlZmYqNzc3qD03N1ejR49u8vsYhiGfzxfKrwYAIIAb5dlLyJf2zpo1S1OmTFFWVpZGjRqlxYsXq7CwUNOnT5fUcIqluLhYL7zwgiTpySefVEpKigYNGiSpYd+RP//5z7rrrrta8GMAADoG9hmxo5DDyKRJk1RRUaG5c+eqpKREGRkZWr58uVJTUyVJJSUlKiwsDPSvr6/X7NmztWvXLoWHh6t///567LHHdNttt7XcpwAAdAjHTtMQRuwl5H1GzMA+IwAASTr/0fdVVunT27++QEN6uc0uB6fRKvuMAABgJhaw2hNhBABgOe1/Th+hIIwAACzDIaZG7IgwAgCwDBaw2hNhBABgGcyL2BNhBABgOWx6Zi+EEQCAZQRulEcWsRXCCADAcsgi9kIYAQBYxvEFrMQROyGMAAAsg03P7IkwAgCwHOZF7IUwAgCwDAd37bUlwggAwDKOn6YhjdgJYQQAYBnHsggzI/ZCGAEAWIaDFay2RBgBAFgOEyP2QhgBAFgGp2nsiTACALAONj2zJcIIAMAyAjMjplaBlkYYAQAApiKMAAAsg7v22hNhBABgGcdP05BG7IQwAgCwDAeLRmyJMAIAsAyH2PTMjggjAADLYWLEXggjAADLcAT2GTG3DrQswggAwHJYwGovhBEAgGVwaa89EUYAAICpCCMAAMvgyl57IowAACzDwY3ybIkwAgCwjEAYMbcMtDDCCADAMgKbnpFGbKVZYWTBggVKS0tTVFSUMjMztXr16pP2feONN3TppZeqR48eiouL06hRo/Tuu+82u2AAAGAvIYeRpUuXaubMmZozZ44KCgo0duxYjR8/XoWFhSfsv2rVKl166aVavny58vPzdfHFF+uqq65SQUHBGRcPAOhYjp+mYWrEThxGiKuARowYoeHDh2vhwoWBtvT0dE2cOFE5OTlNeo8hQ4Zo0qRJevDBB5vU3+v1yu12y+PxKC4uLpRyAQA28uO/rdEX33i05KYsjUtPNLscnEZTv79DmhmpqalRfn6+srOzg9qzs7OVl5fXpPeor69XZWWlunXrdtI+Pp9PXq836AEAgNj0zJZCCiPl5eXy+/1KTAxOo4mJiSotLW3Se/zlL3/R4cOH9fOf//ykfXJycuR2uwOP5OTkUMoEAAAW0qwFrMe24z3GMIxGbSfy6quv6uGHH9bSpUuVkJBw0n6zZ8+Wx+MJPIqKippTJgDAZtj0zJ7CQ+kcHx8vp9PZaBakrKys0WzJ9y1dulTTpk3T66+/rksuueSUfV0ul1wuVyilAQA6ADY9s6eQZkYiIyOVmZmp3NzcoPbc3FyNHj36pMe9+uqruvnmm/XKK6/oiiuuaF6lAIAOj5kRewppZkSSZs2apSlTpigrK0ujRo3S4sWLVVhYqOnTp0tqOMVSXFysF154QVJDEJk6daoef/xxjRw5MjCrEh0dLbfb3YIfBQBgd9y1155CDiOTJk1SRUWF5s6dq5KSEmVkZGj58uVKTU2VJJWUlATtOfLUU0+prq5Od9xxh+64445A+0033aTnn3/+zD8BAACwtJD3GTED+4wAACTp2oV5+mzPQS26cbguz+hpdjk4jVbZZwQAADMdX8Bqbh1oWYQRAIBlHLtRHlnEXggjAADLYWbEXggjAADrOP3+mrAgwggAwDKO7zPC1IidEEYAAJbBAlZ7IowAACyDBaz2RBgBAACmIowAACyDG+XZE2EEAGAZDq6msSXCCADAMgJrRpgYsRXCCADAMgKnaVjCaiuEEQAAYCrCCADAcjhNYy+EEQCAZTgcrBmxI8IIAMAyjm8HDzshjAAALId9RuyFMAIAsAz2GbEnwggAwDI4TWNPhBEAgGU4jm80AhshjAAALOP4zAhpxE4IIwAAy2H9qr0QRgAAlsECVnsijAAALOTbTc9MrgItizACALCMwPpV0oitEEYAAJbDAlZ7IYwAACyDJSP2RBgBAFgGp2nsiTACALAMBwtYbYkwAgCwjMClvUyN2AphBABgOUQReyGMAAAsg03P7IkwAgCwjMCaEaZGbKVZYWTBggVKS0tTVFSUMjMztXr16pP2LSkp0Q033KCBAwcqLCxMM2fObG6tAICOLnA1DWnETkIOI0uXLtXMmTM1Z84cFRQUaOzYsRo/frwKCwtP2N/n86lHjx6aM2eOhg4desYFAwBAFLGXkMPIvHnzNG3aNN16661KT0/X/PnzlZycrIULF56wf9++ffX4449r6tSpcrvdZ1wwAKDj4mIaewopjNTU1Cg/P1/Z2dlB7dnZ2crLy2uxonw+n7xeb9ADAAAHK1htKaQwUl5eLr/fr8TExKD2xMRElZaWtlhROTk5crvdgUdycnKLvTcAwLoCMyOmVoGW1qwFrN9PpoZhtGhanT17tjweT+BRVFTUYu8NALAuBwtYbSk8lM7x8fFyOp2NZkHKysoazZacCZfLJZfL1WLvBwAA2q+QZkYiIyOVmZmp3NzcoPbc3FyNHj26RQsDAOD7WDFiTyHNjEjSrFmzNGXKFGVlZWnUqFFavHixCgsLNX36dEkNp1iKi4v1wgsvBI5Zv369JKmqqkr79+/X+vXrFRkZqcGDB7fMpwAAdAjHlgRwlsZeQg4jkyZNUkVFhebOnauSkhJlZGRo+fLlSk1NldSwydn39xwZNmxY4Of8/Hy98sorSk1N1e7du8+segBAh3J8AStpxE5CDiOSdPvtt+v2228/4WvPP/98ozYWGgEAWhJfK/bCvWkAANZx7Goac6tACyOMAAAsw8ESVlsijAAALOP4PiPm1oGWRRgBAFgGC1jtiTACALAcZkbshTACALAM7pNnT4QRAIBlsIDVnggjAADL4EZ59kQYAQBYDlnEXggjAADLcLDpmS0RRgAAFsKaETsijAAALINNz+yJMAIAsAw2PbMnwggAwHKYGbEXwggAwDJYwGpPhBEAgGWw6Zk9EUYAAJYR2A6e8zS2QhgBAFjG8QWssBPCCADAcpgYsRfCCADAMhzfnqfh0l57IYwAAABTEUYAAJbBDqz2RBgBAFgOWcReCCMAAMs4ts8IMyP2QhgBAFiGgz3PbIkwAgCwDG6UZ0+EEQCAZTjY9cyWCCMAAMshi9gLYQQAYBmBTc9YwWorhBEAgGWwftWeCCMAAOtg0zNbIowAACyHLGIvhBEAgGWw6Zk9NSuMLFiwQGlpaYqKilJmZqZWr159yv4rV65UZmamoqKi1K9fPy1atKhZxQIAOrbAvWmYG7GVkMPI0qVLNXPmTM2ZM0cFBQUaO3asxo8fr8LCwhP237VrlyZMmKCxY8eqoKBADzzwgH79619r2bJlZ1w8AKBjYQGrPYUcRubNm6dp06bp1ltvVXp6uubPn6/k5GQtXLjwhP0XLVqklJQUzZ8/X+np6br11lt1yy236M9//vMZFw8A6Fi4a689hYfSuaamRvn5+br//vuD2rOzs5WXl3fCY9auXavs7Oygtssuu0xLlixRbW2tIiIiGh3j8/nk8/kCz71ebyhlAgBsbt2uA3rknxvNLsNWfjq8jzJ6u0353SGFkfLycvn9fiUmJga1JyYmqrS09ITHlJaWnrB/XV2dysvL1bNnz0bH5OTk6JFHHgmlNABABxAX1fAP2M0lXm0u4R+qLWlYSldrhJFjHN+7baJhGI3aTtf/RO3HzJ49W7NmzQo893q9Sk5Obk6pAAAbue78FBmSKo/Wml2K7QxI6Gza7w4pjMTHx8vpdDaaBSkrK2s0+3FMUlLSCfuHh4ere/fuJzzG5XLJ5XKFUhoAoANwR0do+kX9zS4DLSykBayRkZHKzMxUbm5uUHtubq5Gjx59wmNGjRrVqP97772nrKysE64XAQAAHUvIV9PMmjVLzzzzjJ599llt3rxZd999twoLCzV9+nRJDadYpk6dGug/ffp07dmzR7NmzdLmzZv17LPPasmSJbrnnnta7lMAAADLCnnNyKRJk1RRUaG5c+eqpKREGRkZWr58uVJTUyVJJSUlQXuOpKWlafny5br77rv15JNPqlevXnriiSf005/+tOU+BQAAsCyHYYH7MHu9Xrndbnk8HsXFxZldDgAAaIKmfn9zbxoAAGAqwggAADAVYQQAAJiKMAIAAExFGAEAAKYijAAAAFMRRgAAgKkIIwAAwFSEEQAAYKqQt4M3w7FNYr1er8mVAACApjr2vX26zd4tEUYqKyslScnJySZXAgAAQlVZWSm3233S1y1xb5r6+nrt3btXsbGxcjgcLfa+Xq9XycnJKioq4p43rYyxbhuMc9tgnNsG49w2WnOcDcNQZWWlevXqpbCwk68MscTMSFhYmPr06dNq7x8XF8cf9DbCWLcNxrltMM5tg3FuG601zqeaETmGBawAAMBUhBEAAGCqDh1GXC6XHnroIblcLrNLsT3Gum0wzm2DcW4bjHPbaA/jbIkFrAAAwL469MwIAAAwH2EEAACYijACAABMRRgBAACm6tBhZMGCBUpLS1NUVJQyMzO1evVqs0uyjJycHP3gBz9QbGysEhISNHHiRG3ZsiWoj2EYevjhh9WrVy9FR0frhz/8oTZu3BjUx+fz6a677lJ8fLxiYmJ09dVX65tvvmnLj2IpOTk5cjgcmjlzZqCNcW45xcXFuvHGG9W9e3d16tRJ5513nvLz8wOvM9Znrq6uTr/97W+Vlpam6Oho9evXT3PnzlV9fX2gD+MculWrVumqq65Sr1695HA49I9//CPo9ZYa04MHD2rKlClyu91yu92aMmWKDh06dOYfwOigXnvtNSMiIsJ4+umnjU2bNhkzZswwYmJijD179phdmiVcdtllxnPPPWd89dVXxvr1640rrrjCSElJMaqqqgJ9HnvsMSM2NtZYtmyZsWHDBmPSpElGz549Da/XG+gzffp0o3fv3kZubq7x+eefGxdffLExdOhQo66uzoyP1a598sknRt++fY1zzz3XmDFjRqCdcW4ZBw4cMFJTU42bb77ZWLdunbFr1y7j/fffN7Zv3x7ow1ifud///vdG9+7djX/961/Grl27jNdff93o3LmzMX/+/EAfxjl0y5cvN+bMmWMsW7bMkGS8+eabQa+31JhefvnlRkZGhpGXl2fk5eUZGRkZxpVXXnnG9XfYMHL++ecb06dPD2obNGiQcf/995tUkbWVlZUZkoyVK1cahmEY9fX1RlJSkvHYY48F+hw9etRwu93GokWLDMMwjEOHDhkRERHGa6+9FuhTXFxshIWFGe+8807bfoB2rrKy0hgwYICRm5trXHTRRYEwwji3nPvuu8+44IILTvo6Y90yrrjiCuOWW24JarvmmmuMG2+80TAMxrklfD+MtNSYbtq0yZBkfPzxx4E+a9euNSQZX3/99RnV3CFP09TU1Cg/P1/Z2dlB7dnZ2crLyzOpKmvzeDySpG7dukmSdu3apdLS0qAxdrlcuuiiiwJjnJ+fr9ra2qA+vXr1UkZGBv8dvueOO+7QFVdcoUsuuSSonXFuOW+99ZaysrL0s5/9TAkJCRo2bJiefvrpwOuMdcu44IIL9J///Edbt26VJH3xxRdas2aNJkyYIIlxbg0tNaZr166V2+3WiBEjAn1Gjhwpt9t9xuNuiRvltbTy8nL5/X4lJiYGtScmJqq0tNSkqqzLMAzNmjVLF1xwgTIyMiQpMI4nGuM9e/YE+kRGRqpr166N+vDf4bjXXntNn3/+uT799NNGrzHOLWfnzp1auHChZs2apQceeECffPKJfv3rX8vlcmnq1KmMdQu577775PF4NGjQIDmdTvn9fj366KO6/vrrJfFnujW01JiWlpYqISGh0fsnJCSc8bh3yDByjMPhCHpuGEajNpzenXfeqS+//FJr1qxp9Fpzxpj/DscVFRVpxowZeu+99xQVFXXSfozzmauvr1dWVpb+8Ic/SJKGDRumjRs3auHChZo6dWqgH2N9ZpYuXaqXXnpJr7zyioYMGaL169dr5syZ6tWrl2666aZAP8a55bXEmJ6of0uMe4c8TRMfHy+n09koyZWVlTVKjji1u+66S2+99ZY++OAD9enTJ9CelJQkSacc46SkJNXU1OjgwYMn7dPR5efnq6ysTJmZmQoPD1d4eLhWrlypJ554QuHh4YFxYpzPXM+ePTV48OCgtvT0dBUWFkriz3RLuffee3X//ffruuuu0znnnKMpU6bo7rvvVk5OjiTGuTW01JgmJSVp3759jd5///79ZzzuHTKMREZGKjMzU7m5uUHtubm5Gj16tElVWYthGLrzzjv1xhtvaMWKFUpLSwt6PS0tTUlJSUFjXFNTo5UrVwbGODMzUxEREUF9SkpK9NVXX/Hf4Vvjxo3Thg0btH79+sAjKytLkydP1vr169WvXz/GuYWMGTOm0eXpW7duVWpqqiT+TLeU6upqhYUFf/U4nc7Apb2Mc8trqTEdNWqUPB6PPvnkk0CfdevWyePxnPm4n9HyVws7dmnvkiVLjE2bNhkzZ840YmJijN27d5tdmiX86le/Mtxut/Hhhx8aJSUlgUd1dXWgz2OPPWa43W7jjTfeMDZs2GBcf/31J7yUrE+fPsb7779vfP7558aPfvSjDn15XlN892oaw2CcW8onn3xihIeHG48++qixbds24+WXXzY6depkvPTSS4E+jPWZu+mmm4zevXsHLu194403jPj4eOM3v/lNoA/jHLrKykqjoKDAKCgoMCQZ8+bNMwoKCgLbVbTUmF5++eXGueeea6xdu9ZYu3atcc4553Bp75l68sknjdTUVCMyMtIYPnx44LJUnJ6kEz6ee+65QJ/6+nrjoYceMpKSkgyXy2VceOGFxoYNG4Le58iRI8add95pdOvWzYiOjjauvPJKo7CwsI0/jbV8P4wwzi3nn//8p5GRkWG4XC5j0KBBxuLFi4NeZ6zPnNfrNWbMmGGkpKQYUVFRRr9+/Yw5c+YYPp8v0IdxDt0HH3xwwr+Tb7rpJsMwWm5MKyoqjMmTJxuxsbFGbGysMXnyZOPgwYNnXL/DMAzjzOZWAAAAmq9DrhkBAADtB2EEAACYijACAABMRRgBAACmIowAAABTEUYAAICpCCMAAMBUhBEAAGAqwggAADAVYQQAAJiKMAIAAExFGAEAAKb6f0PY4UZdM3ijAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from eval import data_removal_f1\n", + "from metrics import weighted_acc_drop\n", + "acc = data_removal_f1(accs, X_train_scaled, y_train_balanced, X_test_scaled, y_test_balanced)\n", + "plt.plot(range(len(acc)), acc)\n", + "res = weighted_acc_drop(acc)\n", + "print(\"The weighted accuracy drop is {:.3f}\".format(res))" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[0.7594814520847701, 0.7592399840937211, 0.7592399840937211, 0.7595227571735715, 0.760046797169232, 0.760046797169232, 0.760046797169232, 0.7603690115851012, 0.760872438031351, 0.760872438031351, 0.7611340569662554, 0.7602057097838649, 0.7609509020832248, 0.7601862796480878, 0.760428169218241, 0.760428169218241, 0.7601862796480878, 0.759499033467873, 0.7597218157306604, 0.7600021735652206, 0.7592570173466388, 0.7592760320992962, 0.7590339626286324, 0.7585871918937603, 0.7588479267742609, 0.7589028546316828, 0.7586605054704876, 0.7593875167754609, 0.7586785235952075, 0.7579512724241463, 0.7579512724241463, 0.7579512724241463, 0.7577088303569721, 0.7579512724241463, 0.7582291921914562, 0.7575190302017586, 0.7555607879559549, 0.7555607879559549, 0.7558034368405185, 0.7562886938807608, 0.7555778583433014, 0.7550924290592252, 0.7553520506597081, 0.7553520506597081, 0.7546237074813209, 0.7553520506597081, 0.7555948035637191, 0.755837542540033, 0.7556116236687757, 0.7558544078452116, 0.7556116236687757, 0.7551754497474069, 0.755482747098617, 0.7545257125531234, 0.7550277049609901, 0.7550121477326952, 0.755482747098617, 0.7562437590023969, 0.7560316897767239, 0.7560161963075611, 0.755529697368857, 0.7558037241688872, 0.7550431372549019, 0.7548150946317744, 0.7540698306168506, 0.7545868175480966, 0.7543433889232831, 0.7535372549019608, 0.7535679323119868, 0.7538564836516858, 0.7538264645960863, 0.7533396846900791, 0.7541441758762251, 0.7544023042031011, 0.7549184228912715, 0.7549184228912715, 0.7537137928552422, 0.7534699635565657, 0.7534699635565657, 0.7539853711576436, 0.7537276900359527, 0.7539853711576436, 0.7539990667762764, 0.7539990667762764, 0.7540260833900472, 0.7537820283947992, 0.7532805661308375, 0.7535772504924225, 0.7511707098607251, 0.7514152665607676, 0.7519043222777889, 0.7512178394096122, 0.7514855128830461, 0.7515079235656408, 0.7517639382763914, 0.7493130369554782, 0.7495686946844203, 0.7500798842854839, 0.7498345846925436, 0.7500798842854839, 0.7478811343421135, 0.7466531681872483, 0.7471813853408757, 0.7459519858013439, 0.7466896948762235, 0.7459519858013439, 0.7462236375412824, 0.745739637388686, 0.7447778929613496, 0.7450243305965987, 0.7442849436375993, 0.7440529047399563, 0.7440599485155195, 0.7440736507499698, 0.744093241116011, 0.7440803092500521, 0.7433521708504099, 0.7426108650857579, 0.7413859512044813, 0.7428639750427977, 0.7428698278503494, 0.7418861718017647, 0.740401287589606, 0.7396679791426787, 0.7394246937364898, 0.7396725363090856, 0.7396679791426787, 0.740424982820035, 0.7409377602969112, 0.7416938022578536, 0.741949422086729, 0.741701165376579, 0.741701165376579, 0.741949422086729, 0.741701165376579, 0.7414563061157335, 0.7414687177148435, 0.740720036959271, 0.7402301113679016, 0.7399812136426857, 0.7392362936076827, 0.7387397945232236, 0.7387380962545106, 0.7394889934099715, 0.7394958319333109, 0.7392480280632123, 0.73924633315156, 0.7397441279768875, 0.73924633315156, 0.7392486799464423, 0.7389994127486789, 0.7392499837031239, 0.7395, 0.7392498533280426, 0.7379989519958079, 0.7379994104986737, 0.7379983624897656, 0.7374967843356081, 0.7372440715693647, 0.7369920440093313, 0.7357379595607825, 0.7362396968631588, 0.7352360833466308, 0.7352340974979811, 0.7349785332611942, 0.7344734473447345, 0.7347133738651794, 0.7339515126631829, 0.7339478537793408, 0.7329358378350398, 0.7336959236284918, 0.7326836888475647, 0.7316654578483134, 0.7329035781917271, 0.732392957182873, 0.7311168860534019, 0.7298443903688524, 0.729337534855297, 0.7283092051393092, 0.7270398035887511, 0.7272937659104698, 0.7260235413333834, 0.7265079424358021, 0.7270164455076596, 0.7264997301593289, 0.72777105545764, 0.7275006460599353, 0.7279757825873344, 0.7277034690778257, 0.7277034690778257, 0.7271938432906329, 0.7271561485361927, 0.7258797835151236, 0.7256141448912533, 0.7258696269742784, 0.7253375772057731, 0.7250710420934263, 0.7247816988594654, 0.7247702573567139, 0.7247816988594654, 0.7250377885225064, 0.7255498331137649, 0.7258057882398102, 0.7255272893741246, 0.72527113040975, 0.7257129287715928, 0.7253933940073624, 0.7251100389901465, 0.724812030075188, 0.7235238180282899, 0.7235095412708697, 0.7227063161534102, 0.7224630265657664, 0.7239224402598313, 0.7231317411402158, 0.7233272791840336, 0.7217732676669741, 0.7222750291319572, 0.7209787123318124, 0.7209787123318124, 0.7212040796801753, 0.7203902288181794, 0.7193504594820385, 0.7195375432706556, 0.719760711807792, 0.7173947488553074, 0.7165712355986528, 0.7170938858863267, 0.7160278885743695, 0.7144149314734102, 0.7151369451479594, 0.7145686487862158, 0.7150935360117181, 0.7139762055623076, 0.7144337809992667, 0.7133577130274075, 0.7114884251724084, 0.7106718827145904, 0.7098540807814597, 0.7087958759853329, 0.7082664147378366, 0.7070742821303212, 0.7064886434555666, 0.7072039738549037, 0.7060834234898254, 0.7061115985075767, 0.7067693630006024, 0.7065312875056579, 0.7056908160970478, 0.7054247495164602, 0.7043879721216669, 0.7048077730925474, 0.7048077730925474, 0.705578845676074, 0.7063778810724471, 0.7056630541871921, 0.7050458418433853, 0.7042458635427762, 0.7040365889497784, 0.7040365889497784, 0.7041587405441865, 0.7035353535353536, 0.7042177699825871, 0.7036516595791286, 0.7025793283409871, 0.7027539993615278, 0.7026271224799482, 0.7034016830626999, 0.7031647592347617, 0.7038112298669572, 0.7029709984323752, 0.7007800224339039, 0.7021293638000752, 0.7029709984323752, 0.7013199632674179, 0.6981109903048547, 0.6981109903048547, 0.6976997625245389, 0.6968501821433605, 0.6958890296786563, 0.6939446725055729, 0.6941794934153102, 0.6938685370977519, 0.6938685370977519, 0.6932839337655794, 0.6935955439602497, 0.6939446725055729, 0.6922683176522205, 0.6925415622684264, 0.6922683176522205, 0.6919169275456808, 0.6912505553201818, 0.6915643353859295, 0.6910958375164105, 0.6928937090266959, 0.6925012230064994, 0.6920659103549449, 0.6906919648080714, 0.6904169482093883, 0.6904169482093883, 0.6882140653507137, 0.6873439743316533, 0.6871107779962499, 0.6855990077769659, 0.6861523114041602, 0.6869383030922483, 0.6860646599777035, 0.6863414372199711, 0.6849121883919204, 0.6818114344901224, 0.6812547487204731, 0.6829238350783043, 0.6817651494430984, 0.6812082132147873, 0.6797671033478894, 0.6766432450294604, 0.6774832448456737, 0.6751418824223365, 0.6726111798930533, 0.6717143781932307, 0.6703034687495016, 0.6688371673377072, 0.6678258774029959, 0.6683929850882635, 0.6683929850882635, 0.6675521460182856, 0.6678364266386971, 0.6686887304317677, 0.6694858734721099, 0.6674413314827321, 0.6652191207707879, 0.6652191207707879, 0.6650462658061117, 0.664754324398856, 0.6650411981947497, 0.6645172926906254, 0.6661225701142602, 0.6666968030453713, 0.6677857223059482, 0.668300783201598, 0.6650808282558852, 0.665126973119345, 0.6632088755825402, 0.6635379833831179, 0.6623145056863549, 0.6628950029095988, 0.6626048033829491, 0.6627043044517859, 0.662413660838646, 0.6634751080136346, 0.6639920433315861, 0.6632216291382095, 0.6620586579226285, 0.6629310345385101, 0.6608941005927882, 0.6587198535060802, 0.6579338882679557, 0.6588124391384651, 0.6558118479633198, 0.6551548635109182, 0.6530223217757928, 0.6517698136051765, 0.6517698136051765, 0.6523610102201833, 0.6528919770690137, 0.6519379449124061, 0.6517917088895283, 0.6530618207315648, 0.6515715590782932, 0.6515701284332357, 0.6510436832535275, 0.6488672173852067, 0.649390515123828, 0.6474290215159977, 0.6480315430779013, 0.6464449006065277, 0.6438637788139496, 0.6418213959722239, 0.6409643392955331, 0.6389141414490075, 0.6383881605916863, 0.6388296053713641, 0.6384392258178602, 0.6386598438381537, 0.6400501642287558, 0.638574617790355, 0.638574617790355, 0.6375703623163783, 0.6375703623163783, 0.6354669070158377, 0.6335293212186787, 0.6334399913800297, 0.6307831628658844, 0.6277997554737113, 0.6312752346011319, 0.6312752346011319, 0.6281113885854566, 0.6297324817180412, 0.6296075509190264, 0.6246763745514933, 0.6208133102765505, 0.6225051485476111, 0.6175740184688313, 0.6140903460937056, 0.6152512741801943, 0.6164198685711593, 0.6148305187598075, 0.6139866053885975, 0.6126092869905496, 0.6084359291906463, 0.6084359291906463, 0.6067085962877155, 0.6044416131451061, 0.6031430159585812, 0.6040051893051311, 0.6011745168277712, 0.6005215831405034, 0.6038656469436431, 0.6022341406853678, 0.6022341406853678, 0.6022341406853678, 0.5999220616882142, 0.5947715254141984, 0.5944410302387471, 0.5935372971475087, 0.5912141934007912, 0.5921232697125477, 0.5896742083679902, 0.5900070935818293, 0.5829331511624514, 0.5796330867471561, 0.5779379403712522, 0.5775984177950036, 0.5761040686361802, 0.5786291807757962, 0.5786291807757962, 0.5788353568537308, 0.5789852888481196, 0.5811931757376242, 0.5814001021972407, 0.581868938248784, 0.583111546085635, 0.5806805745495646, 0.5802788192764677, 0.580556624342696, 0.5808975740480398, 0.5783704016021924, 0.5734074410696829, 0.571042208330914, 0.5704642441484546, 0.5727888006045577, 0.5727888006045577, 0.5753463426082515, 0.5740102285921654, 0.5736608679567987, 0.573864392505461, 0.5737182206898945, 0.5742714741348136, 0.5742714741348136, 0.5707654707838913, 0.5685537304693519, 0.5678968476685639, 0.5666810550064747, 0.567035736027778, 0.5636812265071178, 0.5674388856168631, 0.5679952343170344, 0.5662662382907379, 0.5664674255715678, 0.5678420800904388, 0.5666686243212739, 0.566090382200659, 0.5657326777432761, 0.5632642569494596, 0.5632642569494596, 0.5629046524910097, 0.5632642569494596, 0.5633045972828589, 0.561141827135279, 0.558247235045869, 0.5535166367452009, 0.5547308996464902, 0.5543646578194944, 0.5545901718304517, 0.5518456133569222, 0.5513508973088201, 0.5506107637217159, 0.552459548793225, 0.5545002261420172, 0.5515694749687313, 0.5515694749687313, 0.550478021246791, 0.5475169709558192, 0.5492045569539351, 0.543224844006437, 0.543988920556325, 0.5443653562561088, 0.5447532382848835, 0.5458414184089595, 0.5482805058661349, 0.5505363873751297, 0.5466194528163847, 0.5466194528163847, 0.5466194528163847, 0.5441935011326307, 0.5428678787520579, 0.5396530331436226, 0.5385167696947315, 0.5368017133460835, 0.5345056983925729, 0.5348889288922691, 0.5318120005044459, 0.5292896080297654, 0.5259673508928351, 0.5263574283810794, 0.5267472714067539, 0.5216418273601293, 0.5178331842576028, 0.5161880050225405, 0.5127401263846758, 0.513542666483843, 0.513542666483843, 0.5132915052968351, 0.5071927511464531, 0.5018903614679016, 0.4957171470375158, 0.49530347320753354, 0.4864848391346778, 0.4864848391346778, 0.4856441633009791, 0.4864848391346778, 0.48774377907656447, 0.4881628742465106, 0.4994282185602713, 0.49447532157788554, 0.492161579235104, 0.487568060619836, 0.4856441633009791, 0.484381068022655, 0.47871812865497076, 0.4777825243009495, 0.4777825243009495, 0.4773550115559865, 0.47021259790167746, 0.46587031322146855, 0.46994918488113663, 0.4638554103014301, 0.4638554103014301, 0.46155883867301384, 0.4585291683226801, 0.45202135774218166, 0.45202135774218166, 0.4478560655031243, 0.4511658295112044, 0.4453342271832118, 0.44850170167863823, 0.44488043436877867, 0.44549450468474244, 0.44504054897068007, 0.44504054897068007, 0.44504054897068007, 0.44338092272681845, 0.439583206587997, 0.43405236377127515, 0.4385071200073796, 0.43605797101449273, 0.4369807831838336, 0.4342123079583028, 0.4343680601125731, 0.4339037698412699, 0.4275207938129286, 0.43513952425369673, 0.430644157179797, 0.4262634377961348, 0.42688672575072883, 0.4262634377961348, 0.42688672575072883, 0.4309528843128923, 0.4304855017836187, 0.4251539164432195, 0.4235653607796248, 0.4156059600651506, 0.4076798337333601, 0.39744036294907586, 0.4130124804127555, 0.41204624606022144, 0.4038559107052258, 0.3957327732621159, 0.3941545500239494, 0.3920624106551152, 0.38308150145720815, 0.37136468791234417, 0.367444607249596, 0.3679683632691332, 0.3679683632691332, 0.367444607249596, 0.3632389628562861, 0.3632389628562861, 0.36348952741873364, 0.35990701462514246, 0.35320436899592056, 0.3468387715117635, 0.34368687704890893, 0.3398513470691716, 0.3398513470691716, 0.3398513470691716, 0.3398513470691716, 0.34368687704890893, 0.3458683217719362, 0.3453236583042709, 0.3511701139530142, 0.34368687704890893, 0.3414979628214922, 0.3414979628214922, 0.33930153215642683, 0.3404006899913104, 0.3404006899913104, 0.3404006899913104, 0.3404006899913104, 0.33941548233754254, 0.33610510837168966, 0.3349978367449659, 0.3349978367449659, 0.3344434824928323, 0.3338886482746291, 0.3338886482746291, 0.3338886482746291, 0.3338886482746291, 0.3333333333333333, 0.3333333333333333, 0.3338886482746291, 0.3338886482746291, 0.3338886482746291, 0.3349978367449659, 0.3349978367449659, 0.3349978367449659, 0.3349978367449659, 0.3338886482746291, 0.3338886482746291, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]\n" + ] + } + ], + "source": [ + "print(acc)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/inference.py b/inference.py new file mode 100644 index 0000000..e86c776 --- /dev/null +++ b/inference.py @@ -0,0 +1,53 @@ +import lava +from preact_resnet import PreActResNet18 +import torch +print(torch.cuda.is_available()) # Should return True if GPU is available +import os +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +import torchvision.models as models +from torch.autograd import Variable + +import matplotlib.pyplot as plt +from torch import tensor +from torchvision import datasets, transforms +import pandas as pd +import numpy as n + +from torch.utils.data import Dataset, TensorDataset, DataLoader +cuda_num = 0 +import torchvision +print(torchvision.__version__) +import torch +print(torch.__version__) +print(1) +import os +#os.environ["CUDA_VISIBLE_DEVICES"]=str(cuda_num) +#print(os.environ["CUDA_VISIBLE_DEVICES"]) +#torch.cuda.set_device(cuda_num) +#print("Cuda device: ", torch.cuda.current_device()) +#print("cude devices: ", torch.cuda.device_count()) +device = torch.device('cuda:' + str(cuda_num) if torch.cuda.is_available() else 'cpu') +print(device) +training_size = 5000 +valid_size = 2000 +resize = 32 +portion = 0.3 +net_test = PreActResNet18() +net_test = net_test.to(device) +feature_extractor_name = 'preact_resnet18_test_mnist.pth' +net_test.load_state_dict(torch.load('checkpoint/'+feature_extractor_name, map_location=torch.device('cpu'))) +net_test.eval() +def modify_for_mnist(model): + model.linear = nn.Linear(512, 10) +modify_for_mnist(net_test) +net_test.eval() +print(net_test) +#feature_extractor = lava.load_pretrained_feature_extractor('preact_resnet18_test_mnist.pth', device) +loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='MNIST', resize=resize, + training_size=training_size, test_size=valid_size, currupt_por=portion) +#loaders, shuffle_ind +print(shuffle_ind) +#dual_sol, trained_with_flag = lava.compute_dual(feature_extractor, loaders['train'], loaders['test'], +# training_size, shuffle_ind, resize=resize) \ No newline at end of file diff --git a/inference_np.ipynb b/inference_np.ipynb new file mode 100644 index 0000000..c8c1269 --- /dev/null +++ b/inference_np.ipynb @@ -0,0 +1,5627 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "2fc3e35bbb4864fb", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:44:00.657220Z", + "start_time": "2024-05-15T18:44:00.651415Z" + } + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torchtext\\data\\__init__.py:4: UserWarning: \n", + "/!\\ IMPORTANT WARNING ABOUT TORCHTEXT STATUS /!\\ \n", + "Torchtext is deprecated and the last released version will be 0.18 (this one). You can silence this warning by calling the following at the beginnign of your scripts: `import torchtext; torchtext.disable_torchtext_deprecation_warning()`\n", + " warnings.warn(torchtext._TORCHTEXT_DEPRECATION_MSG)\n", + "c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\utils.py:7: TqdmExperimentalWarning: Using `tqdm.autonotebook.tqdm` in notebook mode. Use `tqdm.tqdm` instead to force console mode (e.g. in jupyter console)\n", + " from tqdm.autonotebook import tqdm\n" + ] + } + ], + "source": [ + "import lava" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "initial_id", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T20:54:06.004457Z", + "start_time": "2024-05-15T20:54:05.931845Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] + } + ], + "source": [ + "from preact_resnet import PreActResNet18\n", + "import torch\n", + "print(torch.cuda.is_available()) # Should return True if GPU is available\n", + "import os\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import torch.optim as optim\n", + "import torchvision.models as models\n", + "from torch.autograd import Variable\n", + "\n", + "import matplotlib.pyplot as plt\n", + "from torch import tensor\n", + "from torchvision import datasets, transforms\n", + "import pandas as pd\n", + "import numpy as n\n", + "\n", + "from torch.utils.data import Dataset, TensorDataset, DataLoader" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "537970a917d28b24", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-16T00:14:46.804668Z", + "start_time": "2024-05-16T00:14:45.374590Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] + } + ], + "source": [ + "#from preact_resnet import PreActResNet18\n", + "import torch\n", + "print(torch.cuda.is_available()) # Should return True if GPU is available" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "56be66a2f085e903", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-16T00:16:35.514864Z", + "start_time": "2024-05-16T00:16:35.500286Z" + } + }, + "outputs": [], + "source": [ + "import torch" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "fe997c54393baffe", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-16T00:16:41.576883Z", + "start_time": "2024-05-16T00:16:41.520494Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2.3.0\n" + ] + } + ], + "source": [ + "print(torch.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "55c712589afa2ff1", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T20:52:00.418149Z", + "start_time": "2024-05-15T20:51:59.938637Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.18.0\n", + "2.3.0\n", + "Cuda device: 0\n", + "cude devices: 1\n" + ] + } + ], + "source": [ + "cuda_num = 0\n", + "import torchvision\n", + "print(torchvision.__version__)\n", + "import torch\n", + "print(torch.__version__)\n", + "import os\n", + "#os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(cuda_num)\n", + "#print(os.environ[\"CUDA_VISIBLE_DEVICES\"])\n", + "#torch.cuda.set_device(cuda_num)\n", + "print(\"Cuda device: \", torch.cuda.current_device())\n", + "print(\"cude devices: \", torch.cuda.device_count())" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "825a489cd5206aef", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:35:03.922180Z", + "start_time": "2024-05-15T18:35:03.914980Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cuda:0\n" + ] + } + ], + "source": [ + "device = torch.device('cuda:' + str(cuda_num) if torch.cuda.is_available() else 'cpu')\n", + "print(device)\n", + "training_size = 50\n", + "valid_size = 20\n", + "resize = 32\n", + "portion = 0.3" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "5d3c8b14127bb226", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:35:19.946464Z", + "start_time": "2024-05-15T18:35:19.690173Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + ")" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net_test = PreActResNet18()\n", + "net_test = net_test.to(device)\n", + "feature_extractor_name = 'preact_resnet18_test_mnist.pth'\n", + "net_test.load_state_dict(torch.load('checkpoint/'+feature_extractor_name, map_location=torch.device('cpu')))\n", + "net_test.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "b5e6439994689b2", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:35:33.478036Z", + "start_time": "2024-05-15T18:35:33.470130Z" + } + }, + "outputs": [], + "source": [ + "def modify_for_mnist(model):\n", + " model.linear = nn.Linear(512, 10)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "f68232230f8b6740", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:35:42.670701Z", + "start_time": "2024-05-15T18:35:42.652868Z" + } + }, + "outputs": [], + "source": [ + "modify_for_mnist(net_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "f33dc85661655622", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:35:55.181194Z", + "start_time": "2024-05-15T18:35:55.168070Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=10, bias=True)\n", + ")" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net_test.eval()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "66c8935a2c67bc29", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:36:35.229992Z", + "start_time": "2024-05-15T18:36:32.404108Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MNIST\n", + "Currrent label: 9\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 9\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 7\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 6\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 6\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 3\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 5\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 5\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 6\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 3\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 5\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 3\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 6\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n" + ] + } + ], + "source": [ + "loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='MNIST', resize=resize,\n", + " training_size=training_size, test_size=valid_size, currupt_por=portion)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "0d3b1312", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "print(loaders['train'])" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "c4799164", + "metadata": {}, + "outputs": [], + "source": [ + "ktr = None" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "3621d447", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1, 32, 32])\n" + ] + } + ], + "source": [ + "for batch in loaders['train']:\n", + " print(batch[0][0].size())\n", + " ktr = batch[0][0]\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "2a3bd5d5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: pillow in c:\\users\\21520\\anaconda3\\lib\\site-packages (10.2.0)\n" + ] + } + ], + "source": [ + "!pip install pillow" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "a525b969", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from PIL import Image\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "c67357fe", + "metadata": {}, + "outputs": [], + "source": [ + "np_array = ktr.numpy()\n", + "np_array = np_array.squeeze()\n", + "image = Image.fromarray(np.uint8(np_array * 255), 'L')\n", + "image.save('output_image.png')\n", + "image.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "id": "e4fa7fda", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(32, 32)\n" + ] + } + ], + "source": [ + "print(np_array.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "748b12bf", + "metadata": {}, + "outputs": [], + "source": [ + "rgb = ktr.repeat(3,1,1)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "id": "6abe713f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([3, 32, 32])\n" + ] + } + ], + "source": [ + "print(rgb.size())" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "30b1eaa4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(1024)\n" + ] + } + ], + "source": [ + "count = 0 \n", + "for i in range(0,32):\n", + " for j in range(0,32):\n", + " #print(ktr[0,i,j])\n", + " count = count + (ktr[0,i,j]!=tensor(-0.4242))\n", + "print(count)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "f6c00aea", + "metadata": {}, + "outputs": [], + "source": [ + "net_test = None" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "604b8fcb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + ")" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net_test = PreActResNet18()\n", + "net_test = net_test.to(device)\n", + "net_test.load_state_dict(torch.load('checkpoint/'+'preact_resnet18_test_mnist.pth', map_location='cuda:0'))\n", + "net_test.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "ebd5d411", + "metadata": {}, + "outputs": [], + "source": [ + "embedder = net_test.to(device)\n", + "embedder.fc = torch.nn.Identity()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "a9abc5d0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + " (fc): Identity()\n", + ")\n" + ] + } + ], + "source": [ + "print(embedder)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "18c9b2d4", + "metadata": {}, + "outputs": [], + "source": [ + "for p in embedder.parameters():\n", + " p.requires_grad = False" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "f2481c80", + "metadata": {}, + "outputs": [], + "source": [ + "from otdd.pytorch.distance_fast import DatasetDistance, FeatureCost" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "ab8cfeb4", + "metadata": {}, + "outputs": [], + "source": [ + "feature_cost = FeatureCost(src_embedding = embedder,\n", + " src_dim = (1, resize,resize),\n", + " tgt_embedding = embedder,\n", + " tgt_dim = (1, resize,resize),\n", + " p = 2,\n", + " device='cuda')" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "92d04203", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 1024])\n" + ] + } + ], + "source": [ + "import pickle\n", + "with open(\"save_x1y1x2y2.txt\", \"rb\") as f:\n", + " loaded_data = pickle.load(f)\n", + "X1, Y1, X2, Y2 = loaded_data\n", + "\n", + "# Now you can use X1, Y1, X2, and Y2 in your code\n", + "print(X1.shape) # Example usage" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "50540c63", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(-0.4242)\n" + ] + } + ], + "source": [ + "print(X1[0][0])" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "4bfe3f5f", + "metadata": {}, + "outputs": [], + "source": [ + "def _get_batch_shape(b):\n", + " if b.ndim == 3: return b.shape\n", + " elif b.ndim == 2: return (1,*b.shape)\n", + " elif b.ndim == 1: return (1,1,b.shape[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "8e54879a", + "metadata": {}, + "outputs": [], + "source": [ + "B1, N1, D1 = _get_batch_shape(X1)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "da47d4dd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1 50 1024\n" + ] + } + ], + "source": [ + "print(B1, N1, D1)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "521d88fc", + "metadata": {}, + "outputs": [], + "source": [ + "src_dim = (1, 32, 32)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "91d5249c", + "metadata": {}, + "outputs": [], + "source": [ + "X_test = X1.view(-1, *src_dim)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "2b14697a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 1, 32, 32])\n" + ] + } + ], + "source": [ + "print(X_test.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "cd264433", + "metadata": {}, + "outputs": [], + "source": [ + "X_test_rgb = X_test.repeat(1, 3, 1, 1)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "05451076", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 3, 32, 32])\n" + ] + } + ], + "source": [ + "print(X_test_rgb.size())" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "ccb69085", + "metadata": {}, + "outputs": [], + "source": [ + "X_test_rgb = X_test_rgb.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "6e37de6c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([50, 3, 32, 32])" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X_test_rgb.size()" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "b1c4520f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + " (fc): Identity()\n", + ")" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "embedder.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "08ec46d5", + "metadata": {}, + "outputs": [], + "source": [ + "X_test_emb = embedder(X_test_rgb)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "8fbf2d92", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 100])\n" + ] + } + ], + "source": [ + "print(X_test_emb.size())" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "97f11e47", + "metadata": {}, + "outputs": [], + "source": [ + "flattened_X1 = X_test_emb.reshape(B1, N1, -1)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "4953bab9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[[ 4.5680, 35.7887, -0.4968, ..., -24.5336, -30.4800, -24.2358],\n", + " [ -1.0291, 14.3661, 0.8592, ..., -15.6310, -18.3019, -15.5440],\n", + " [ 1.1676, 18.7959, -5.3507, ..., -21.4980, -27.0970, -21.4995],\n", + " ...,\n", + " [ 0.4916, 14.6977, -0.4040, ..., -14.9157, -17.9280, -14.8729],\n", + " [ -1.3570, 20.5337, 2.7240, ..., -18.8162, -21.8036, -18.5932],\n", + " [ -9.1459, 11.4589, 17.1523, ..., -9.8075, -6.5931, -9.3166]]],\n", + " device='cuda:0')\n" + ] + } + ], + "source": [ + "print(flattened_X1)" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "469c0f5c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1, 50, 100])\n" + ] + } + ], + "source": [ + "print(flattened_X1.size())" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "5ac46d3c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([17, 1024])\n" + ] + } + ], + "source": [ + "print(X2.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "ee69c666", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1 17 1024\n", + "torch.Size([17, 1, 32, 32])\n", + "torch.Size([17, 3, 32, 32])\n", + "torch.Size([17, 3, 32, 32])\n", + "torch.Size([17, 100])\n", + "torch.Size([1, 17, 100])\n" + ] + } + ], + "source": [ + "B2, N2, D2 = _get_batch_shape(X2)\n", + "print(B2, N2, D2)\n", + "tgt_dim = (1, 32, 32)\n", + "X_test_1 = X2.view(-1, *tgt_dim)\n", + "print(X_test_1.shape)\n", + "X_test_rgb_1 = X_test_1.repeat(1, 3, 1, 1)\n", + "print(X_test_rgb_1.size())\n", + "X_test_rgb_1 = X_test_rgb_1.to(device)\n", + "print(X_test_rgb_1.size())\n", + "X_test_emb_1 = embedder(X_test_rgb_1)\n", + "print(X_test_emb_1.size())\n", + "flattened_X2 = X_test_emb_1.reshape(B2, N2, -1)\n", + "print(flattened_X2.size())" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "id": "826855f9", + "metadata": {}, + "outputs": [], + "source": [ + "import geomloss\n", + "c = geomloss.utils.squared_distances(flattened_X1, flattened_X2) / 2" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "8062eec3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[1.2867e+03, 1.0123e+04, 3.3894e+03, 2.2643e+04, 1.8316e+04,\n", + " 1.6929e+03, 1.5964e+03, 1.4539e+04, 5.8768e+03, 6.0583e+03,\n", + " 1.0668e+04, 9.8217e+03, 2.2888e+03, 1.4502e+04, 3.2149e+03,\n", + " 2.4505e+03, 2.2549e+04],\n", + " [1.1393e+04, 8.6758e+00, 2.4432e+03, 2.4312e+03, 1.7133e+03,\n", + " 4.2093e+03, 6.8048e+03, 3.6053e+02, 1.0514e+03, 9.3430e+02,\n", + " 6.5640e+02, 1.1870e+02, 5.7243e+03, 3.0782e+03, 6.9086e+03,\n", + " 3.9213e+03, 3.0672e+03],\n", + " [4.9633e+03, 3.0817e+03, 1.3474e+03, 1.1285e+04, 9.3767e+03,\n", + " 1.2645e+02, 1.4237e+03, 5.6676e+03, 1.5117e+03, 1.6831e+03,\n", + " 4.5970e+03, 3.3720e+03, 5.6225e+02, 8.8407e+03, 1.1159e+03,\n", + " 1.7912e+02, 1.2387e+04],\n", + " [5.2531e+02, 1.4524e+04, 5.0961e+03, 2.6884e+04, 2.0289e+04,\n", + " 6.0954e+03, 5.6210e+03, 1.9096e+04, 9.3927e+03, 9.0594e+03,\n", + " 1.2565e+04, 1.3099e+04, 8.2856e+03, 1.3663e+04, 1.0490e+04,\n", + " 7.6179e+03, 2.4402e+04],\n", + " [1.2070e+03, 1.9307e+04, 8.1230e+03, 3.4301e+04, 2.7247e+04,\n", + " 7.6277e+03, 6.9201e+03, 2.4853e+04, 1.3225e+04, 1.2738e+04,\n", + " 1.7761e+04, 1.7965e+04, 9.4412e+03, 1.9753e+04, 1.1302e+04,\n", + " 9.4261e+03, 3.2035e+04],\n", + " [1.1570e+03, 5.5217e+03, 7.0455e+02, 1.4841e+04, 1.0829e+04,\n", + " 1.0254e+03, 1.8253e+03, 8.7075e+03, 2.7213e+03, 2.4557e+03,\n", + " 5.1649e+03, 4.9254e+03, 2.5725e+03, 7.5647e+03, 4.0291e+03,\n", + " 1.8139e+03, 1.4077e+04],\n", + " [4.8594e+03, 5.9823e+03, 2.7054e+03, 1.5536e+04, 1.3111e+04,\n", + " 1.3160e+03, 2.5164e+01, 9.3540e+03, 2.3649e+03, 4.5292e+03,\n", + " 7.9105e+03, 6.4558e+03, 5.4466e+02, 1.2515e+04, 1.1528e+03,\n", + " 5.5979e+02, 1.7034e+04],\n", + " [1.6105e+04, 3.6851e+03, 6.2770e+03, 2.6880e+03, 6.9095e+02,\n", + " 1.1958e+04, 1.5877e+04, 2.8058e+03, 5.9928e+03, 4.4634e+03,\n", + " 1.4917e+03, 2.6201e+03, 1.5914e+04, 4.3482e+02, 1.8530e+04,\n", + " 1.2530e+04, 7.8442e+02],\n", + " [2.3353e+04, 1.3619e+04, 1.4768e+04, 1.1020e+04, 7.0062e+03,\n", + " 2.3692e+04, 2.9704e+04, 1.2111e+04, 1.6961e+04, 1.2932e+04,\n", + " 8.2063e+03, 1.1116e+04, 3.0617e+04, 3.7019e+03, 3.4625e+04,\n", + " 2.5891e+04, 6.0525e+03],\n", + " [7.9460e+03, 1.2250e+03, 1.4591e+03, 6.2360e+03, 4.8835e+03,\n", + " 2.1979e+03, 2.5055e+03, 2.7686e+03, 4.7787e+01, 1.5269e+03,\n", + " 2.5032e+03, 1.5597e+03, 2.4576e+03, 5.7827e+03, 3.4690e+03,\n", + " 1.3673e+03, 7.3773e+03],\n", + " [1.4518e+04, 5.6414e+03, 6.5478e+03, 5.4235e+03, 2.2785e+03,\n", + " 1.2871e+04, 1.7099e+04, 5.0859e+03, 7.5813e+03, 5.2892e+03,\n", + " 2.3626e+03, 4.0450e+03, 1.7683e+04, 3.3618e+02, 2.0734e+04,\n", + " 1.4107e+04, 2.4059e+03],\n", + " [1.9868e+03, 4.0631e+03, 2.6373e+02, 1.1680e+04, 7.7702e+03,\n", + " 1.6596e+03, 3.5463e+03, 6.5340e+03, 2.3761e+03, 1.3100e+03,\n", + " 3.0275e+03, 3.2074e+03, 4.0665e+03, 4.5300e+03, 5.8210e+03,\n", + " 2.7550e+03, 1.0344e+04],\n", + " [1.4242e+03, 1.5820e+04, 6.5825e+03, 3.0760e+04, 2.5228e+04,\n", + " 4.3417e+03, 4.4063e+03, 2.1149e+04, 1.0683e+04, 1.0096e+04,\n", + " 1.5794e+04, 1.5156e+04, 5.3804e+03, 1.9487e+04, 6.4031e+03,\n", + " 5.8388e+03, 2.9934e+04],\n", + " [1.3714e+04, 4.1768e+03, 5.4367e+03, 4.2029e+03, 1.4694e+03,\n", + " 1.1141e+04, 1.5121e+04, 3.7014e+03, 6.0333e+03, 4.0992e+03,\n", + " 1.4900e+03, 2.8440e+03, 1.5456e+04, 1.0554e+02, 1.8236e+04,\n", + " 1.2117e+04, 1.7474e+03],\n", + " [1.9276e+04, 5.2338e+03, 8.5280e+03, 3.0358e+03, 1.1259e+03,\n", + " 1.4981e+04, 1.9397e+04, 3.8781e+03, 8.1703e+03, 6.3200e+03,\n", + " 2.6343e+03, 4.0242e+03, 1.9398e+04, 9.8788e+02, 2.2225e+04,\n", + " 1.5647e+04, 8.0939e+02],\n", + " [3.7154e+03, 8.9819e+03, 3.7874e+03, 2.0321e+04, 1.7019e+04,\n", + " 2.0613e+03, 2.2074e+02, 1.3098e+04, 4.3112e+03, 6.5025e+03,\n", + " 1.0640e+04, 9.2697e+03, 1.2752e+03, 1.5217e+04, 1.9700e+03,\n", + " 1.5574e+03, 2.1450e+04],\n", + " [8.0948e+03, 3.5465e+02, 1.1155e+03, 4.6314e+03, 3.2951e+03,\n", + " 2.2364e+03, 3.9889e+03, 1.4869e+03, 2.3166e+02, 4.9382e+02,\n", + " 1.1056e+03, 4.5151e+02, 3.3748e+03, 3.8723e+03, 4.4987e+03,\n", + " 1.9714e+03, 5.2640e+03],\n", + " [1.0498e+04, 3.2444e+03, 3.4972e+03, 4.7282e+03, 1.7509e+03,\n", + " 8.4075e+03, 1.1871e+04, 3.3485e+03, 4.3467e+03, 2.6735e+03,\n", + " 8.7857e+02, 2.0074e+03, 1.2313e+04, 3.8236e+01, 1.4909e+04,\n", + " 9.3642e+03, 2.4957e+03],\n", + " [8.9926e+03, 3.7149e+03, 3.6528e+03, 1.1591e+04, 1.0988e+04,\n", + " 1.2649e+03, 2.2517e+03, 6.1220e+03, 2.4457e+03, 3.4085e+03,\n", + " 6.6417e+03, 4.6661e+03, 6.1797e+02, 1.2284e+04, 5.4797e+02,\n", + " 6.1966e+02, 1.4088e+04],\n", + " [3.3169e+03, 4.3797e+03, 1.2942e+03, 1.3694e+04, 1.1066e+04,\n", + " 1.0902e+01, 1.3705e+03, 7.3986e+03, 2.3422e+03, 2.1078e+03,\n", + " 5.4489e+03, 4.4202e+03, 7.7270e+02, 9.4317e+03, 1.4616e+03,\n", + " 4.6727e+02, 1.4283e+04],\n", + " [9.3901e+03, 2.8532e+02, 1.7671e+03, 4.4863e+03, 3.6774e+03,\n", + " 2.3075e+03, 4.5027e+03, 1.3037e+03, 6.0868e+02, 6.8533e+02,\n", + " 1.5071e+03, 5.9031e+02, 3.2782e+03, 4.8153e+03, 4.1193e+03,\n", + " 2.0099e+03, 5.5616e+03],\n", + " [5.3112e+03, 1.5104e+03, 3.6620e+02, 6.3460e+03, 3.5772e+03,\n", + " 2.5955e+03, 4.8454e+03, 2.8914e+03, 1.1000e+03, 3.4129e+02,\n", + " 7.4604e+02, 8.8595e+02, 4.9537e+03, 2.0700e+03, 6.7116e+03,\n", + " 3.1783e+03, 5.4592e+03],\n", + " [5.7487e+03, 9.7007e+02, 3.6482e+02, 6.3360e+03, 4.2637e+03,\n", + " 1.4631e+03, 3.2356e+03, 2.5335e+03, 3.5483e+02, 2.4395e+02,\n", + " 1.2586e+03, 8.1135e+02, 2.9237e+03, 3.7376e+03, 4.1980e+03,\n", + " 1.5886e+03, 6.4432e+03],\n", + " [8.1763e+03, 1.1558e+03, 1.6981e+03, 6.8193e+03, 5.8743e+03,\n", + " 1.3756e+03, 2.5761e+03, 2.7987e+03, 4.7145e+02, 1.2715e+03,\n", + " 2.9199e+03, 1.6664e+03, 1.6144e+03, 6.9479e+03, 2.2275e+03,\n", + " 8.0800e+02, 8.3483e+03],\n", + " [1.2715e+04, 2.4658e+02, 3.1463e+03, 1.6732e+03, 9.0599e+02,\n", + " 5.6624e+03, 8.8996e+03, 1.8781e+02, 1.9105e+03, 1.2784e+03,\n", + " 3.7406e+02, 1.4880e+02, 7.7673e+03, 2.1412e+03, 9.1850e+03,\n", + " 5.6017e+03, 1.8561e+03],\n", + " [1.0204e+04, 6.0455e+03, 4.7656e+03, 7.9218e+03, 3.8134e+03,\n", + " 1.0635e+04, 1.4087e+04, 6.3726e+03, 6.6917e+03, 4.5727e+03,\n", + " 2.5511e+03, 4.2798e+03, 1.5314e+04, 5.8370e+02, 1.8435e+04,\n", + " 1.2083e+04, 4.5737e+03],\n", + " [7.2003e+03, 4.0103e+03, 2.9887e+03, 1.2586e+04, 1.1424e+04,\n", + " 6.6226e+02, 1.8393e+03, 6.6819e+03, 2.5039e+03, 3.0378e+03,\n", + " 6.5065e+03, 4.7331e+03, 3.7738e+02, 1.1845e+04, 4.4528e+02,\n", + " 3.8040e+02, 1.4611e+04],\n", + " [2.2486e+03, 2.3471e+04, 1.0838e+04, 3.9566e+04, 3.1713e+04,\n", + " 1.0384e+04, 9.6625e+03, 2.9461e+04, 1.6854e+04, 1.6000e+04,\n", + " 2.1404e+04, 2.1848e+04, 1.2548e+04, 2.3045e+04, 1.4592e+04,\n", + " 1.2585e+04, 3.6719e+04],\n", + " [5.0328e+03, 4.3195e+03, 2.0304e+03, 1.3365e+04, 1.1410e+04,\n", + " 3.2512e+02, 6.3590e+02, 7.3072e+03, 1.9319e+03, 2.8967e+03,\n", + " 6.2683e+03, 4.7915e+03, 1.0913e+02, 1.1003e+04, 5.2755e+02,\n", + " 3.0605e+01, 1.4860e+04],\n", + " [2.1667e+03, 3.9340e+03, 2.1377e+02, 1.1465e+04, 7.6849e+03,\n", + " 1.6379e+03, 2.6011e+03, 6.4647e+03, 1.7577e+03, 1.5614e+03,\n", + " 3.2067e+03, 3.2403e+03, 3.5049e+03, 4.9044e+03, 5.2380e+03,\n", + " 2.2745e+03, 1.0467e+04],\n", + " [2.2469e+03, 2.3241e+04, 1.0639e+04, 3.8824e+04, 3.0829e+04,\n", + " 1.0764e+04, 9.8306e+03, 2.9086e+04, 1.6584e+04, 1.5898e+04,\n", + " 2.0889e+04, 2.1513e+04, 1.3051e+04, 2.2105e+04, 1.5296e+04,\n", + " 1.2896e+04, 3.5747e+04],\n", + " [1.7828e+04, 1.1633e+03, 5.9568e+03, 4.3416e+02, 3.8714e+02,\n", + " 9.2715e+03, 1.3056e+04, 2.4376e+02, 3.9243e+03, 3.2341e+03,\n", + " 1.2625e+03, 1.1312e+03, 1.1597e+04, 2.6713e+03, 1.3098e+04,\n", + " 8.9963e+03, 7.2548e+02],\n", + " [3.5606e+02, 7.8961e+03, 1.5884e+03, 1.8283e+04, 1.3439e+04,\n", + " 2.0286e+03, 2.6848e+03, 1.1530e+04, 4.4820e+03, 3.9532e+03,\n", + " 6.9975e+03, 7.0123e+03, 3.8949e+03, 9.0005e+03, 5.5636e+03,\n", + " 3.1747e+03, 1.6924e+04],\n", + " [1.6209e+04, 8.0126e+03, 8.5351e+03, 7.3566e+03, 3.6928e+03,\n", + " 1.5736e+04, 2.0306e+04, 7.3077e+03, 1.0108e+04, 7.3483e+03,\n", + " 3.9380e+03, 6.0661e+03, 2.1220e+04, 1.0294e+03, 2.4641e+04,\n", + " 1.7304e+04, 3.5912e+03],\n", + " [1.1837e+03, 5.8566e+03, 9.8074e+02, 1.5113e+04, 1.0875e+04,\n", + " 1.4317e+03, 3.7095e+03, 8.8785e+03, 3.8849e+03, 2.2343e+03,\n", + " 4.9193e+03, 4.9698e+03, 3.9036e+03, 6.8773e+03, 5.4500e+03,\n", + " 2.9477e+03, 1.3738e+04],\n", + " [1.6704e+04, 1.4351e+03, 5.4628e+03, 6.7661e+02, 4.5867e+01,\n", + " 9.5245e+03, 1.2673e+04, 6.1803e+02, 3.6437e+03, 3.2954e+03,\n", + " 1.0058e+03, 1.1768e+03, 1.1990e+04, 1.7681e+03, 1.3834e+04,\n", + " 9.2143e+03, 4.8446e+02],\n", + " [1.4248e+04, 5.2168e+02, 3.9357e+03, 1.0931e+03, 4.5415e+02,\n", + " 6.9890e+03, 1.0054e+04, 1.7521e+02, 2.3399e+03, 1.9685e+03,\n", + " 5.4336e+02, 4.1474e+02, 9.1089e+03, 2.0189e+03, 1.0657e+04,\n", + " 6.7403e+03, 1.2423e+03],\n", + " [1.2558e+04, 3.0229e+01, 3.0136e+03, 2.1478e+03, 1.7670e+03,\n", + " 4.6314e+03, 7.2956e+03, 2.4342e+02, 1.2956e+03, 1.2670e+03,\n", + " 9.3813e+02, 2.6969e+02, 5.9923e+03, 3.5886e+03, 7.0610e+03,\n", + " 4.2118e+03, 3.0540e+03],\n", + " [2.1474e+04, 3.8060e+03, 8.8564e+03, 9.9654e+02, 3.4846e+02,\n", + " 1.4402e+04, 1.8580e+04, 2.1684e+03, 7.1455e+03, 6.0263e+03,\n", + " 2.4600e+03, 3.1958e+03, 1.7872e+04, 2.0013e+03, 2.0161e+04,\n", + " 1.4399e+04, 3.1182e+01],\n", + " [5.4692e+03, 1.1585e+03, 3.7383e+02, 7.0039e+03, 4.9685e+03,\n", + " 1.0695e+03, 2.7654e+03, 2.8937e+03, 3.8691e+02, 3.2394e+02,\n", + " 1.6637e+03, 1.0799e+03, 2.3518e+03, 4.4514e+03, 3.4993e+03,\n", + " 1.1806e+03, 7.2881e+03],\n", + " [1.7009e+04, 8.8311e+02, 5.3685e+03, 5.9193e+02, 5.8169e+02,\n", + " 8.4495e+03, 1.0973e+04, 2.4833e+02, 2.7913e+03, 3.1967e+03,\n", + " 1.4316e+03, 1.0620e+03, 1.0017e+04, 3.2392e+03, 1.1438e+04,\n", + " 7.6453e+03, 1.3316e+03],\n", + " [1.8133e+03, 4.1855e+03, 2.8478e+02, 1.2575e+04, 8.9476e+03,\n", + " 8.5752e+02, 1.9782e+03, 6.9853e+03, 1.9320e+03, 1.5944e+03,\n", + " 3.8817e+03, 3.6454e+03, 2.4852e+03, 6.2067e+03, 3.9347e+03,\n", + " 1.5595e+03, 1.1917e+04],\n", + " [3.3097e+03, 2.9371e+03, 1.8155e+02, 9.1312e+03, 5.5834e+03,\n", + " 2.2965e+03, 3.9747e+03, 4.9096e+03, 1.5995e+03, 9.5451e+02,\n", + " 1.8751e+03, 2.1326e+03, 4.6526e+03, 3.0614e+03, 6.5415e+03,\n", + " 3.0511e+03, 7.8805e+03],\n", + " [6.9148e+03, 1.1478e+03, 9.5200e+02, 6.4109e+03, 4.7593e+03,\n", + " 1.7913e+03, 2.4307e+03, 2.7657e+03, 1.2160e+01, 1.0676e+03,\n", + " 2.1072e+03, 1.3207e+03, 2.3877e+03, 5.1366e+03, 3.4933e+03,\n", + " 1.2458e+03, 7.2144e+03],\n", + " [1.2104e+04, 2.3742e+02, 2.8292e+03, 1.8254e+03, 8.9231e+02,\n", + " 5.4401e+03, 8.3782e+03, 2.7935e+02, 1.6305e+03, 1.1882e+03,\n", + " 2.9594e+02, 1.0760e+02, 7.4860e+03, 1.9818e+03, 8.9664e+03,\n", + " 5.3303e+03, 1.9412e+03],\n", + " [5.7397e+03, 4.1148e+03, 1.8115e+03, 8.3351e+03, 4.3135e+03,\n", + " 5.8496e+03, 8.6486e+03, 5.3182e+03, 3.8020e+03, 2.1272e+03,\n", + " 1.6337e+03, 2.7252e+03, 9.5891e+03, 1.0913e+03, 1.2149e+04,\n", + " 7.1453e+03, 5.8180e+03],\n", + " [5.7689e+03, 1.6306e+04, 9.0283e+03, 3.1727e+04, 2.8128e+04,\n", + " 4.3921e+03, 3.3217e+03, 2.1777e+04, 1.0965e+04, 1.2077e+04,\n", + " 1.8831e+04, 1.6803e+04, 3.2660e+03, 2.5179e+04, 3.1668e+03,\n", + " 4.6324e+03, 3.3367e+04],\n", + " [1.4619e+04, 2.1986e+02, 4.0482e+03, 1.5787e+03, 1.6333e+03,\n", + " 5.7810e+03, 8.3182e+03, 1.5071e+02, 1.7368e+03, 2.0385e+03,\n", + " 1.3872e+03, 6.1115e+02, 6.9205e+03, 4.1574e+03, 7.9300e+03,\n", + " 5.0726e+03, 2.7947e+03],\n", + " [3.8991e+03, 1.9507e+03, 5.6479e+01, 8.4063e+03, 5.6486e+03,\n", + " 1.0597e+03, 2.9861e+03, 3.9273e+03, 9.3858e+02, 3.5574e+02,\n", + " 1.8055e+03, 1.5468e+03, 2.8703e+03, 4.0443e+03, 4.2631e+03,\n", + " 1.6126e+03, 8.0109e+03],\n", + " [1.7658e+04, 5.6276e+03, 7.9834e+03, 4.0774e+03, 1.6215e+03,\n", + " 1.4540e+04, 1.8999e+04, 4.5332e+03, 8.2564e+03, 6.1160e+03,\n", + " 2.6259e+03, 4.2076e+03, 1.9248e+04, 6.7726e+02, 2.2221e+04,\n", + " 1.5497e+04, 1.4148e+03]]], device='cuda:0')" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "_orig_device = flattened_X1.device\n", + "c.to(_orig_device)" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "id": "f244e8a6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1, 50, 17])\n" + ] + } + ], + "source": [ + "print(c.size())" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "id": "c503bf3c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 1, 32, 32])\n" + ] + } + ], + "source": [ + "for batch in loaders['train']:\n", + " print(batch[0].size())\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "id": "dd7e679c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([20, 1, 32, 32])\n" + ] + } + ], + "source": [ + "for batch in loaders['test']:\n", + " print(batch[0].size())\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "c231b2f9", + "metadata": {}, + "outputs": [], + "source": [ + "dist = DatasetDistance(loaders['train'], loaders['test'], X1=X1, X2=X2, Y1=Y1, Y2=Y2,\n", + " inner_ot_method = 'exact',\n", + " debiased_loss = True,\n", + " feature_cost = c,\n", + " λ_x=1.0, λ_y=1.0,\n", + " sqrt_method = 'spectral',\n", + " sqrt_niters=10,\n", + " precision='single',\n", + " p = 2, entreg = 1e-1,\n", + " device='cuda')" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "id": "e01dca3d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], + "source": [ + "%load_ext autoreload\n", + "\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "d8f36d8c", + "metadata": {}, + "outputs": [], + "source": [ + "from otdd.pytorch.wasserstein import pwdist_exact" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "id": "f18a1bd7", + "metadata": {}, + "outputs": [], + "source": [ + "symmetric_tasks = False\n", + "inner_ot_p = 2\n", + "inner_ot_loss='sinkhorn'\n", + "inner_ot_debiased = False\n", + "inner_ot_entreg = 0.1" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "id": "41a1c764", + "metadata": {}, + "outputs": [], + "source": [ + "from functools import partial" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "91645b39", + "metadata": {}, + "outputs": [], + "source": [ + "pwdist = partial(pwdist_exact,\n", + " symmetric=symmetric_tasks,\n", + " p = inner_ot_p,\n", + " loss = inner_ot_loss,\n", + " debias=inner_ot_debiased,\n", + " entreg = inner_ot_entreg,\n", + " cost_function = c,\n", + " device=device)" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "id": "d4e94d37", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([14, 13, 15, 12, 12, 14, 11, 12, 11, 14, 15, 13, 10, 15, 10, 10, 12])\n" + ] + } + ], + "source": [ + "print(Y2)" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "id": "51b7ec10", + "metadata": {}, + "outputs": [], + "source": [ + "c1 = torch.unique(Y1)\n", + "c2 = torch.unique(Y1)\n", + "n1, n2 = len(c1), len(c2)" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "9455b756", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])\n" + ] + } + ], + "source": [ + "print(c1)" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "id": "afa1340b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([10, 11, 12, 13, 14, 15])\n" + ] + } + ], + "source": [ + "print(c2)" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "id": "183d1502", + "metadata": {}, + "outputs": [], + "source": [ + "X_label = X1[Y1 == c1[0]]" + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "id": "2c0fe000", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]])\n" + ] + } + ], + "source": [ + "print(X_label)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e6108d2b", + "metadata": {}, + "outputs": [], + "source": [ + "X_label1 = X2[Y2 == c2[0]]" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "id": "934b4979", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([4, 1024])\n" + ] + } + ], + "source": [ + "print(X_label.size())" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "id": "9124f539", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([4, 1024])\n", + "torch.Size([3, 1024])\n", + "torch.Size([7, 1024])\n", + "torch.Size([5, 1024])\n", + "torch.Size([9, 1024])\n", + "torch.Size([7, 1024])\n", + "torch.Size([3, 1024])\n", + "torch.Size([2, 1024])\n", + "torch.Size([5, 1024])\n", + "torch.Size([5, 1024])\n" + ] + } + ], + "source": [ + "for i in range(10):\n", + " X_label = X1[Y1 == c1[i]]\n", + " print(X_label.size())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2c9db360", + "metadata": {}, + "outputs": [], + "source": [ + "self.feature_cost = partial(FeatureCost,\n", + " src_emb = self.src_embedding,\n", + " src_dim = (3,32,32),\n", + " tgt_emb = self.tgt_embedding,\n", + " tgt_dim = (3,32,32),\n", + " p = self.p, device=self.device)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "56b9b220", + "metadata": {}, + "outputs": [], + "source": [ + "pwdist = partial(pwdist_exact,\n", + " symmetric=self.symmetric_tasks,\n", + " p = self.inner_ot_p,\n", + " loss = self.inner_ot_loss,\n", + " debias=self.inner_ot_debiased,\n", + " entreg = self.inner_ot_entreg,\n", + " cost_function = self.feature_cost,\n", + " device=self.device)" + ] + }, + { + "cell_type": "code", + "execution_count": 99, + "id": "128a01de", + "metadata": {}, + "outputs": [], + "source": [ + "distance = geomloss.SamplesLoss(\n", + " loss='sinkhorn', p=2,\n", + " cost=c,\n", + " debias=False,\n", + " blur=0.1**(1 / p),\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "id": "94be7fd6", + "metadata": {}, + "outputs": [], + "source": [ + "import itertools\n", + "pairs = list(itertools.combinations(range(n1), 2))" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "id": "e55d88ff", + "metadata": {}, + "outputs": [], + "source": [ + "from tqdm.autonotebook import tqdm\n" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "id": "f6004a9e", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "7dc9fcd0e773426ba6e436aae1b58d37", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/45 [00:00 1\u001b[0m co \u001b[38;5;241m=\u001b[39m distance(x_tensor_cuda\u001b[38;5;241m.\u001b[39mto(device),y_tensor_cuda\u001b[38;5;241m.\u001b[39mto(device))\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1532\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1530\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1531\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1532\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1541\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1536\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1537\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1538\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1539\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1540\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1541\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m forward_call(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 1543\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 1544\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\samples_loss.py:265\u001b[0m, in \u001b[0;36mSamplesLoss.forward\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 262\u001b[0m α, x, β, y \u001b[38;5;241m=\u001b[39m α\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), x\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), β\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), y\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m)\n\u001b[0;32m 264\u001b[0m \u001b[38;5;66;03m# Run --------------------------------------------------------------------------------\u001b[39;00m\n\u001b[1;32m--> 265\u001b[0m values \u001b[38;5;241m=\u001b[39m routines[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mloss][backend](\n\u001b[0;32m 266\u001b[0m α,\n\u001b[0;32m 267\u001b[0m x,\n\u001b[0;32m 268\u001b[0m β,\n\u001b[0;32m 269\u001b[0m y,\n\u001b[0;32m 270\u001b[0m p\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mp,\n\u001b[0;32m 271\u001b[0m blur\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mblur,\n\u001b[0;32m 272\u001b[0m reach\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreach,\n\u001b[0;32m 273\u001b[0m diameter\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdiameter,\n\u001b[0;32m 274\u001b[0m scaling\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscaling,\n\u001b[0;32m 275\u001b[0m truncate\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtruncate,\n\u001b[0;32m 276\u001b[0m cost\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcost,\n\u001b[0;32m 277\u001b[0m kernel\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkernel,\n\u001b[0;32m 278\u001b[0m cluster_scale\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcluster_scale,\n\u001b[0;32m 279\u001b[0m debias\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdebias,\n\u001b[0;32m 280\u001b[0m potentials\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials,\n\u001b[0;32m 281\u001b[0m labels_x\u001b[38;5;241m=\u001b[39ml_x,\n\u001b[0;32m 282\u001b[0m labels_y\u001b[38;5;241m=\u001b[39ml_y,\n\u001b[0;32m 283\u001b[0m verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose,\n\u001b[0;32m 284\u001b[0m )\n\u001b[0;32m 286\u001b[0m \u001b[38;5;66;03m# Make sure that the output has the correct shape ------------------------------------\u001b[39;00m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 288\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials\n\u001b[0;32m 289\u001b[0m ): \u001b[38;5;66;03m# Return some dual potentials (= test functions) sampled on the input measures\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\sinkhorn_samples.py:191\u001b[0m, in \u001b[0;36msinkhorn_tensorized\u001b[1;34m(a, x, b, y, p, blur, reach, diameter, scaling, cost, debias, potentials, **kwargs)\u001b[0m\n\u001b[0;32m 186\u001b[0m C_yy \u001b[38;5;241m=\u001b[39m cost(y, y\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;28;01mif\u001b[39;00m debias \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;66;03m# (B,M,M) torch Tensor\u001b[39;00m\n\u001b[0;32m 188\u001b[0m \u001b[38;5;66;03m# Compute the relevant values of the diameter of the configuration,\u001b[39;00m\n\u001b[0;32m 189\u001b[0m \u001b[38;5;66;03m# target temperature epsilon, temperature schedule across itereations\u001b[39;00m\n\u001b[0;32m 190\u001b[0m \u001b[38;5;66;03m# and strength of the marginal constraints:\u001b[39;00m\n\u001b[1;32m--> 191\u001b[0m diameter, eps, eps_list, rho \u001b[38;5;241m=\u001b[39m scaling_parameters(\n\u001b[0;32m 192\u001b[0m x, y, p, blur, reach, diameter, scaling\n\u001b[0;32m 193\u001b[0m )\n\u001b[0;32m 195\u001b[0m \u001b[38;5;66;03m# Use an optimal transport solver to retrieve the dual potentials:\u001b[39;00m\n\u001b[0;32m 196\u001b[0m f_aa, g_bb, g_ab, f_ba \u001b[38;5;241m=\u001b[39m sinkhorn_loop(\n\u001b[0;32m 197\u001b[0m softmin_tensorized,\n\u001b[0;32m 198\u001b[0m log_weights(a),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 206\u001b[0m debias\u001b[38;5;241m=\u001b[39mdebias,\n\u001b[0;32m 207\u001b[0m )\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\sinkhorn_divergence.py:163\u001b[0m, in \u001b[0;36mscaling_parameters\u001b[1;34m(x, y, p, blur, reach, diameter, scaling)\u001b[0m\n\u001b[0;32m 161\u001b[0m eps \u001b[38;5;241m=\u001b[39m blur \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m p\n\u001b[0;32m 162\u001b[0m rho \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01mif\u001b[39;00m reach \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m reach \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m p\n\u001b[1;32m--> 163\u001b[0m eps_list \u001b[38;5;241m=\u001b[39m epsilon_schedule(p, diameter, blur, scaling)\n\u001b[0;32m 164\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m diameter, eps, eps_list, rho\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\sinkhorn_divergence.py:147\u001b[0m, in \u001b[0;36mepsilon_schedule\u001b[1;34m(p, diameter, blur, scaling)\u001b[0m\n\u001b[0;32m 116\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mepsilon_schedule\u001b[39m(p, diameter, blur, scaling):\n\u001b[0;32m 117\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"Creates a list of values for the temperature \"epsilon\" across Sinkhorn iterations.\u001b[39;00m\n\u001b[0;32m 118\u001b[0m \n\u001b[0;32m 119\u001b[0m \u001b[38;5;124;03m We use an aggressive strategy with an exponential cooling\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 140\u001b[0m \u001b[38;5;124;03m list of float: list of values for the temperature epsilon.\u001b[39;00m\n\u001b[0;32m 141\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m 142\u001b[0m eps_list \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m 143\u001b[0m [diameter \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m p]\n\u001b[0;32m 144\u001b[0m \u001b[38;5;241m+\u001b[39m [\n\u001b[0;32m 145\u001b[0m np\u001b[38;5;241m.\u001b[39mexp(e)\n\u001b[0;32m 146\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m e \u001b[38;5;129;01min\u001b[39;00m np\u001b[38;5;241m.\u001b[39marange(\n\u001b[1;32m--> 147\u001b[0m p \u001b[38;5;241m*\u001b[39m np\u001b[38;5;241m.\u001b[39mlog(diameter), p \u001b[38;5;241m*\u001b[39m np\u001b[38;5;241m.\u001b[39mlog(blur), p \u001b[38;5;241m*\u001b[39m np\u001b[38;5;241m.\u001b[39mlog(scaling)\n\u001b[0;32m 148\u001b[0m )\n\u001b[0;32m 149\u001b[0m ]\n\u001b[0;32m 150\u001b[0m \u001b[38;5;241m+\u001b[39m [blur \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m p]\n\u001b[0;32m 151\u001b[0m )\n\u001b[0;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m eps_list\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\_tensor.py:1087\u001b[0m, in \u001b[0;36mTensor.__array__\u001b[1;34m(self, dtype)\u001b[0m\n\u001b[0;32m 1085\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(Tensor\u001b[38;5;241m.\u001b[39m__array__, (\u001b[38;5;28mself\u001b[39m,), \u001b[38;5;28mself\u001b[39m, dtype\u001b[38;5;241m=\u001b[39mdtype)\n\u001b[0;32m 1086\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m dtype \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m-> 1087\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mnumpy()\n\u001b[0;32m 1088\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1089\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mnumpy()\u001b[38;5;241m.\u001b[39mastype(dtype, copy\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m)\n", + "\u001b[1;31mTypeError\u001b[0m: can't convert cuda:0 device type tensor to numpy. Use Tensor.cpu() to copy the tensor to host memory first." + ] + } + ], + "source": [ + "co = distance(x_tensor_cuda.to(device),y_tensor_cuda.to(device))" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "id": "fb1b2c69", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 1\n" + ] + }, + { + "ename": "TypeError", + "evalue": "'Tensor' object is not callable", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mTypeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[102], line 3\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i, j \u001b[38;5;129;01min\u001b[39;00m pbar:\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28mprint\u001b[39m(i,j)\n\u001b[1;32m----> 3\u001b[0m D[i, j] \u001b[38;5;241m=\u001b[39m distance(X1[Y1\u001b[38;5;241m==\u001b[39mc1[i]]\u001b[38;5;241m.\u001b[39mto(device), X1[Y1\u001b[38;5;241m==\u001b[39mc2[j]]\u001b[38;5;241m.\u001b[39mto(device))\u001b[38;5;241m.\u001b[39mitem()\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1532\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1530\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1531\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1532\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1541\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1536\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1537\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1538\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1539\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1540\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1541\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m forward_call(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 1543\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 1544\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\samples_loss.py:265\u001b[0m, in \u001b[0;36mSamplesLoss.forward\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 262\u001b[0m α, x, β, y \u001b[38;5;241m=\u001b[39m α\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), x\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), β\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), y\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m)\n\u001b[0;32m 264\u001b[0m \u001b[38;5;66;03m# Run --------------------------------------------------------------------------------\u001b[39;00m\n\u001b[1;32m--> 265\u001b[0m values \u001b[38;5;241m=\u001b[39m routines[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mloss][backend](\n\u001b[0;32m 266\u001b[0m α,\n\u001b[0;32m 267\u001b[0m x,\n\u001b[0;32m 268\u001b[0m β,\n\u001b[0;32m 269\u001b[0m y,\n\u001b[0;32m 270\u001b[0m p\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mp,\n\u001b[0;32m 271\u001b[0m blur\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mblur,\n\u001b[0;32m 272\u001b[0m reach\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreach,\n\u001b[0;32m 273\u001b[0m diameter\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdiameter,\n\u001b[0;32m 274\u001b[0m scaling\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscaling,\n\u001b[0;32m 275\u001b[0m truncate\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtruncate,\n\u001b[0;32m 276\u001b[0m cost\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcost,\n\u001b[0;32m 277\u001b[0m kernel\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkernel,\n\u001b[0;32m 278\u001b[0m cluster_scale\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcluster_scale,\n\u001b[0;32m 279\u001b[0m debias\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdebias,\n\u001b[0;32m 280\u001b[0m potentials\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials,\n\u001b[0;32m 281\u001b[0m labels_x\u001b[38;5;241m=\u001b[39ml_x,\n\u001b[0;32m 282\u001b[0m labels_y\u001b[38;5;241m=\u001b[39ml_y,\n\u001b[0;32m 283\u001b[0m verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose,\n\u001b[0;32m 284\u001b[0m )\n\u001b[0;32m 286\u001b[0m \u001b[38;5;66;03m# Make sure that the output has the correct shape ------------------------------------\u001b[39;00m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 288\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials\n\u001b[0;32m 289\u001b[0m ): \u001b[38;5;66;03m# Return some dual potentials (= test functions) sampled on the input measures\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\sinkhorn_samples.py:180\u001b[0m, in \u001b[0;36msinkhorn_tensorized\u001b[1;34m(a, x, b, y, p, blur, reach, diameter, scaling, cost, debias, potentials, **kwargs)\u001b[0m\n\u001b[0;32m 173\u001b[0m cost \u001b[38;5;241m=\u001b[39m cost_routines[p]\n\u001b[0;32m 175\u001b[0m \u001b[38;5;66;03m# Compute the relevant cost matrices C(x_i, y_j), C(y_j, x_i), etc.\u001b[39;00m\n\u001b[0;32m 176\u001b[0m \u001b[38;5;66;03m# Note that we \"detach\" the gradients of the \"right-hand sides\":\u001b[39;00m\n\u001b[0;32m 177\u001b[0m \u001b[38;5;66;03m# this is coherent with the way we compute our gradients\u001b[39;00m\n\u001b[0;32m 178\u001b[0m \u001b[38;5;66;03m# in the `sinkhorn_loop(...)` routine, in the `sinkhorn_divergence.py` file.\u001b[39;00m\n\u001b[0;32m 179\u001b[0m \u001b[38;5;66;03m# Please refer to the comments in this file for more details.\u001b[39;00m\n\u001b[1;32m--> 180\u001b[0m C_xy \u001b[38;5;241m=\u001b[39m cost(x, y\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,N,M) torch Tensor\u001b[39;00m\n\u001b[0;32m 181\u001b[0m C_yx \u001b[38;5;241m=\u001b[39m cost(y, x\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,M,N) torch Tensor\u001b[39;00m\n\u001b[0;32m 183\u001b[0m \u001b[38;5;66;03m# N.B.: The \"auto-correlation\" matrices C(x_i, x_j) and C(y_i, y_j)\u001b[39;00m\n\u001b[0;32m 184\u001b[0m \u001b[38;5;66;03m# are only used by the \"debiased\" Sinkhorn algorithm.\u001b[39;00m\n", + "\u001b[1;31mTypeError\u001b[0m: 'Tensor' object is not callable" + ] + } + ], + "source": [ + "for i, j in pbar:\n", + " print(i,j)\n", + " D[i, j] = distance(X1[Y1==c1[i]].to(device), X1[Y1==c2[j]].to(device)).item()" + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "id": "53cdb7d9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([4, 1024]) torch.Size([3, 1024])\n" + ] + } + ], + "source": [ + "print(X1[Y1==c1[0]].size(), X1[Y1==c2[1]].size())" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "id": "dee0b8db", + "metadata": {}, + "outputs": [ + { + "ename": "TypeError", + "evalue": "'Tensor' object is not callable", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mTypeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[76], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m distance(X1[Y1\u001b[38;5;241m==\u001b[39mc1[\u001b[38;5;241m0\u001b[39m]]\u001b[38;5;241m.\u001b[39mto(device), X1[Y1\u001b[38;5;241m==\u001b[39mc2[\u001b[38;5;241m1\u001b[39m]]\u001b[38;5;241m.\u001b[39mto(device))\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1532\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1530\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1531\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1532\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1541\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1536\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1537\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1538\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1539\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1540\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1541\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m forward_call(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 1543\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 1544\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\samples_loss.py:265\u001b[0m, in \u001b[0;36mSamplesLoss.forward\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 262\u001b[0m α, x, β, y \u001b[38;5;241m=\u001b[39m α\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), x\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), β\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m), y\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m)\n\u001b[0;32m 264\u001b[0m \u001b[38;5;66;03m# Run --------------------------------------------------------------------------------\u001b[39;00m\n\u001b[1;32m--> 265\u001b[0m values \u001b[38;5;241m=\u001b[39m routines[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mloss][backend](\n\u001b[0;32m 266\u001b[0m α,\n\u001b[0;32m 267\u001b[0m x,\n\u001b[0;32m 268\u001b[0m β,\n\u001b[0;32m 269\u001b[0m y,\n\u001b[0;32m 270\u001b[0m p\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mp,\n\u001b[0;32m 271\u001b[0m blur\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mblur,\n\u001b[0;32m 272\u001b[0m reach\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreach,\n\u001b[0;32m 273\u001b[0m diameter\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdiameter,\n\u001b[0;32m 274\u001b[0m scaling\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscaling,\n\u001b[0;32m 275\u001b[0m truncate\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtruncate,\n\u001b[0;32m 276\u001b[0m cost\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcost,\n\u001b[0;32m 277\u001b[0m kernel\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkernel,\n\u001b[0;32m 278\u001b[0m cluster_scale\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcluster_scale,\n\u001b[0;32m 279\u001b[0m debias\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdebias,\n\u001b[0;32m 280\u001b[0m potentials\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials,\n\u001b[0;32m 281\u001b[0m labels_x\u001b[38;5;241m=\u001b[39ml_x,\n\u001b[0;32m 282\u001b[0m labels_y\u001b[38;5;241m=\u001b[39ml_y,\n\u001b[0;32m 283\u001b[0m verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose,\n\u001b[0;32m 284\u001b[0m )\n\u001b[0;32m 286\u001b[0m \u001b[38;5;66;03m# Make sure that the output has the correct shape ------------------------------------\u001b[39;00m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m 288\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials\n\u001b[0;32m 289\u001b[0m ): \u001b[38;5;66;03m# Return some dual potentials (= test functions) sampled on the input measures\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\sinkhorn_samples.py:180\u001b[0m, in \u001b[0;36msinkhorn_tensorized\u001b[1;34m(a, x, b, y, p, blur, reach, diameter, scaling, cost, debias, potentials, **kwargs)\u001b[0m\n\u001b[0;32m 173\u001b[0m cost \u001b[38;5;241m=\u001b[39m cost_routines[p]\n\u001b[0;32m 175\u001b[0m \u001b[38;5;66;03m# Compute the relevant cost matrices C(x_i, y_j), C(y_j, x_i), etc.\u001b[39;00m\n\u001b[0;32m 176\u001b[0m \u001b[38;5;66;03m# Note that we \"detach\" the gradients of the \"right-hand sides\":\u001b[39;00m\n\u001b[0;32m 177\u001b[0m \u001b[38;5;66;03m# this is coherent with the way we compute our gradients\u001b[39;00m\n\u001b[0;32m 178\u001b[0m \u001b[38;5;66;03m# in the `sinkhorn_loop(...)` routine, in the `sinkhorn_divergence.py` file.\u001b[39;00m\n\u001b[0;32m 179\u001b[0m \u001b[38;5;66;03m# Please refer to the comments in this file for more details.\u001b[39;00m\n\u001b[1;32m--> 180\u001b[0m C_xy \u001b[38;5;241m=\u001b[39m cost(x, y\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,N,M) torch Tensor\u001b[39;00m\n\u001b[0;32m 181\u001b[0m C_yx \u001b[38;5;241m=\u001b[39m cost(y, x\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,M,N) torch Tensor\u001b[39;00m\n\u001b[0;32m 183\u001b[0m \u001b[38;5;66;03m# N.B.: The \"auto-correlation\" matrices C(x_i, x_j) and C(y_i, y_j)\u001b[39;00m\n\u001b[0;32m 184\u001b[0m \u001b[38;5;66;03m# are only used by the \"debiased\" Sinkhorn algorithm.\u001b[39;00m\n", + "\u001b[1;31mTypeError\u001b[0m: 'Tensor' object is not callable" + ] + } + ], + "source": [ + "distance(X1[Y1==c1[0]].to(device), X1[Y1==c2[1]].to(device))" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "id": "9480c4ab", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "10 10\n", + "[(0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (0, 7), (0, 8), (0, 9), (1, 2), (1, 3), (1, 4), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9), (2, 3), (2, 4), (2, 5), (2, 6), (2, 7), (2, 8), (2, 9), (3, 4), (3, 5), (3, 6), (3, 7), (3, 8), (3, 9), (4, 5), (4, 6), (4, 7), (4, 8), (4, 9), (5, 6), (5, 7), (5, 8), (5, 9), (6, 7), (6, 8), (6, 9), (7, 8), (7, 9), (8, 9)]\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "0a9f4854faa7471bb1573125b2209f4a", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/45 [00:00 337\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m:\n\u001b[0;32m 338\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mThis is awkward. Distance computation failed. Geomloss is hard to debug\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[0;32m 339\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mBut here\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124ms a few things that might be happening: \u001b[39m\u001b[38;5;124m\"\u001b[39m\\\n\u001b[0;32m 340\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m 1. Too many samples with this label, causing memory issues\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[0;32m 341\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m 2. Datatype errors, e.g., if the two datasets have different type\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1532\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1531\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1532\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1541\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1538\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1539\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1540\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1541\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m forward_call(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 1543\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\samples_loss.py:265\u001b[0m, in \u001b[0;36mSamplesLoss.forward\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 264\u001b[0m \u001b[38;5;66;03m# Run --------------------------------------------------------------------------------\u001b[39;00m\n\u001b[1;32m--> 265\u001b[0m values \u001b[38;5;241m=\u001b[39m routines[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mloss][backend](\n\u001b[0;32m 266\u001b[0m α,\n\u001b[0;32m 267\u001b[0m x,\n\u001b[0;32m 268\u001b[0m β,\n\u001b[0;32m 269\u001b[0m y,\n\u001b[0;32m 270\u001b[0m p\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mp,\n\u001b[0;32m 271\u001b[0m blur\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mblur,\n\u001b[0;32m 272\u001b[0m reach\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreach,\n\u001b[0;32m 273\u001b[0m diameter\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdiameter,\n\u001b[0;32m 274\u001b[0m scaling\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscaling,\n\u001b[0;32m 275\u001b[0m truncate\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtruncate,\n\u001b[0;32m 276\u001b[0m cost\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcost,\n\u001b[0;32m 277\u001b[0m kernel\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkernel,\n\u001b[0;32m 278\u001b[0m cluster_scale\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcluster_scale,\n\u001b[0;32m 279\u001b[0m debias\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdebias,\n\u001b[0;32m 280\u001b[0m potentials\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials,\n\u001b[0;32m 281\u001b[0m labels_x\u001b[38;5;241m=\u001b[39ml_x,\n\u001b[0;32m 282\u001b[0m labels_y\u001b[38;5;241m=\u001b[39ml_y,\n\u001b[0;32m 283\u001b[0m verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose,\n\u001b[0;32m 284\u001b[0m )\n\u001b[0;32m 286\u001b[0m \u001b[38;5;66;03m# Make sure that the output has the correct shape ------------------------------------\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\sinkhorn_samples.py:180\u001b[0m, in \u001b[0;36msinkhorn_tensorized\u001b[1;34m(a, x, b, y, p, blur, reach, diameter, scaling, cost, debias, potentials, **kwargs)\u001b[0m\n\u001b[0;32m 175\u001b[0m \u001b[38;5;66;03m# Compute the relevant cost matrices C(x_i, y_j), C(y_j, x_i), etc.\u001b[39;00m\n\u001b[0;32m 176\u001b[0m \u001b[38;5;66;03m# Note that we \"detach\" the gradients of the \"right-hand sides\":\u001b[39;00m\n\u001b[0;32m 177\u001b[0m \u001b[38;5;66;03m# this is coherent with the way we compute our gradients\u001b[39;00m\n\u001b[0;32m 178\u001b[0m \u001b[38;5;66;03m# in the `sinkhorn_loop(...)` routine, in the `sinkhorn_divergence.py` file.\u001b[39;00m\n\u001b[0;32m 179\u001b[0m \u001b[38;5;66;03m# Please refer to the comments in this file for more details.\u001b[39;00m\n\u001b[1;32m--> 180\u001b[0m C_xy \u001b[38;5;241m=\u001b[39m cost(x, y\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,N,M) torch Tensor\u001b[39;00m\n\u001b[0;32m 181\u001b[0m C_yx \u001b[38;5;241m=\u001b[39m cost(y, x\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,M,N) torch Tensor\u001b[39;00m\n", + "\u001b[1;31mTypeError\u001b[0m: 'Tensor' object is not callable", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mSystemExit\u001b[0m Traceback (most recent call last)", + " \u001b[1;31m[... skipping hidden 1 frame]\u001b[0m\n", + "Cell \u001b[1;32mIn[54], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m DYY1 \u001b[38;5;241m=\u001b[39m pwdist(X1, Y1)\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\wasserstein.py:343\u001b[0m, in \u001b[0;36mpwdist_exact\u001b[1;34m(X1, Y1, X2, Y2, symmetric, loss, cost_function, p, debias, entreg, device)\u001b[0m\n\u001b[0;32m 342\u001b[0m sys\u001b[38;5;241m.\u001b[39mexit(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mDistance computation failed. Aborting.\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m--> 343\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m symmetric:\n\u001b[0;32m 344\u001b[0m D[j, i] \u001b[38;5;241m=\u001b[39m D[i, j]\n", + "\u001b[1;31mSystemExit\u001b[0m: Distance computation failed. Aborting.", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", + " \u001b[1;31m[... skipping hidden 1 frame]\u001b[0m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\interactiveshell.py:2121\u001b[0m, in \u001b[0;36mInteractiveShell.showtraceback\u001b[1;34m(self, exc_tuple, filename, tb_offset, exception_only, running_compiled_code)\u001b[0m\n\u001b[0;32m 2118\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m exception_only:\n\u001b[0;32m 2119\u001b[0m stb \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mAn exception has occurred, use \u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mtb to see \u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[0;32m 2120\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mthe full traceback.\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m'\u001b[39m]\n\u001b[1;32m-> 2121\u001b[0m stb\u001b[38;5;241m.\u001b[39mextend(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mInteractiveTB\u001b[38;5;241m.\u001b[39mget_exception_only(etype,\n\u001b[0;32m 2122\u001b[0m value))\n\u001b[0;32m 2123\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 2125\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcontains_exceptiongroup\u001b[39m(val):\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:710\u001b[0m, in \u001b[0;36mListTB.get_exception_only\u001b[1;34m(self, etype, value)\u001b[0m\n\u001b[0;32m 702\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_exception_only\u001b[39m(\u001b[38;5;28mself\u001b[39m, etype, value):\n\u001b[0;32m 703\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Only print the exception type and message, without a traceback.\u001b[39;00m\n\u001b[0;32m 704\u001b[0m \n\u001b[0;32m 705\u001b[0m \u001b[38;5;124;03m Parameters\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 708\u001b[0m \u001b[38;5;124;03m value : exception value\u001b[39;00m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 710\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ListTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\u001b[38;5;28mself\u001b[39m, etype, value)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:568\u001b[0m, in \u001b[0;36mListTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, context)\u001b[0m\n\u001b[0;32m 565\u001b[0m chained_exc_ids\u001b[38;5;241m.\u001b[39madd(\u001b[38;5;28mid\u001b[39m(exception[\u001b[38;5;241m1\u001b[39m]))\n\u001b[0;32m 566\u001b[0m chained_exceptions_tb_offset \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 567\u001b[0m out_list \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m--> 568\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 569\u001b[0m etype,\n\u001b[0;32m 570\u001b[0m evalue,\n\u001b[0;32m 571\u001b[0m (etb, chained_exc_ids), \u001b[38;5;66;03m# type: ignore\u001b[39;00m\n\u001b[0;32m 572\u001b[0m chained_exceptions_tb_offset,\n\u001b[0;32m 573\u001b[0m context,\n\u001b[0;32m 574\u001b[0m )\n\u001b[0;32m 575\u001b[0m \u001b[38;5;241m+\u001b[39m chained_exception_message\n\u001b[0;32m 576\u001b[0m \u001b[38;5;241m+\u001b[39m out_list)\n\u001b[0;32m 578\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out_list\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1435\u001b[0m, in \u001b[0;36mAutoFormattedTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1433\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1434\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtb \u001b[38;5;241m=\u001b[39m etb\n\u001b[1;32m-> 1435\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m FormattedTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 1436\u001b[0m \u001b[38;5;28mself\u001b[39m, etype, evalue, etb, tb_offset, number_of_lines_of_context\n\u001b[0;32m 1437\u001b[0m )\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1326\u001b[0m, in \u001b[0;36mFormattedTB.structured_traceback\u001b[1;34m(self, etype, value, tb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1323\u001b[0m mode \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmode\n\u001b[0;32m 1324\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mode \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose_modes:\n\u001b[0;32m 1325\u001b[0m \u001b[38;5;66;03m# Verbose modes need a full traceback\u001b[39;00m\n\u001b[1;32m-> 1326\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m VerboseTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 1327\u001b[0m \u001b[38;5;28mself\u001b[39m, etype, value, tb, tb_offset, number_of_lines_of_context\n\u001b[0;32m 1328\u001b[0m )\n\u001b[0;32m 1329\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mMinimal\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[0;32m 1330\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ListTB\u001b[38;5;241m.\u001b[39mget_exception_only(\u001b[38;5;28mself\u001b[39m, etype, value)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1173\u001b[0m, in \u001b[0;36mVerboseTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1164\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mstructured_traceback\u001b[39m(\n\u001b[0;32m 1165\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m 1166\u001b[0m etype: \u001b[38;5;28mtype\u001b[39m,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1170\u001b[0m number_of_lines_of_context: \u001b[38;5;28mint\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m5\u001b[39m,\n\u001b[0;32m 1171\u001b[0m ):\n\u001b[0;32m 1172\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Return a nice text document describing the traceback.\"\"\"\u001b[39;00m\n\u001b[1;32m-> 1173\u001b[0m formatted_exception \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mformat_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,\n\u001b[0;32m 1174\u001b[0m tb_offset)\n\u001b[0;32m 1176\u001b[0m colors \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mColors \u001b[38;5;66;03m# just a shorthand + quicker name lookup\u001b[39;00m\n\u001b[0;32m 1177\u001b[0m colorsnormal \u001b[38;5;241m=\u001b[39m colors\u001b[38;5;241m.\u001b[39mNormal \u001b[38;5;66;03m# used a lot\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1063\u001b[0m, in \u001b[0;36mVerboseTB.format_exception_as_a_whole\u001b[1;34m(self, etype, evalue, etb, number_of_lines_of_context, tb_offset)\u001b[0m\n\u001b[0;32m 1060\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(tb_offset, \u001b[38;5;28mint\u001b[39m)\n\u001b[0;32m 1061\u001b[0m head \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprepare_header(\u001b[38;5;28mstr\u001b[39m(etype), \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlong_header)\n\u001b[0;32m 1062\u001b[0m records \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m-> 1063\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_records(etb, number_of_lines_of_context, tb_offset) \u001b[38;5;28;01mif\u001b[39;00m etb \u001b[38;5;28;01melse\u001b[39;00m []\n\u001b[0;32m 1064\u001b[0m )\n\u001b[0;32m 1066\u001b[0m frames \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m 1067\u001b[0m skipped \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1131\u001b[0m, in \u001b[0;36mVerboseTB.get_records\u001b[1;34m(self, etb, number_of_lines_of_context, tb_offset)\u001b[0m\n\u001b[0;32m 1129\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m cf \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 1130\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1131\u001b[0m mod \u001b[38;5;241m=\u001b[39m inspect\u001b[38;5;241m.\u001b[39mgetmodule(cf\u001b[38;5;241m.\u001b[39mtb_frame)\n\u001b[0;32m 1132\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mod \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 1133\u001b[0m mod_name \u001b[38;5;241m=\u001b[39m mod\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\n", + "\u001b[1;31mAttributeError\u001b[0m: 'tuple' object has no attribute 'tb_frame'" + ] + } + ], + "source": [ + "DYY1 = pwdist(X1, Y1)" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "id": "5c0e4294", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Calculate the same thing again...\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "652cfe77ed10430ea68915533f824f98", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/45 [00:00 336\u001b[0m D[i, j] \u001b[38;5;241m=\u001b[39m distance(X1[Y1\u001b[38;5;241m==\u001b[39mc1[i]]\u001b[38;5;241m.\u001b[39mto(device), X2[Y2\u001b[38;5;241m==\u001b[39mc2[j]]\u001b[38;5;241m.\u001b[39mto(device))\u001b[38;5;241m.\u001b[39mitem()\n\u001b[0;32m 337\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1532\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1531\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1532\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1541\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1538\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1539\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1540\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1541\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m forward_call(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 1543\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\samples_loss.py:265\u001b[0m, in \u001b[0;36mSamplesLoss.forward\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 264\u001b[0m \u001b[38;5;66;03m# Run --------------------------------------------------------------------------------\u001b[39;00m\n\u001b[1;32m--> 265\u001b[0m values \u001b[38;5;241m=\u001b[39m routines[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mloss][backend](\n\u001b[0;32m 266\u001b[0m α,\n\u001b[0;32m 267\u001b[0m x,\n\u001b[0;32m 268\u001b[0m β,\n\u001b[0;32m 269\u001b[0m y,\n\u001b[0;32m 270\u001b[0m p\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mp,\n\u001b[0;32m 271\u001b[0m blur\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mblur,\n\u001b[0;32m 272\u001b[0m reach\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreach,\n\u001b[0;32m 273\u001b[0m diameter\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdiameter,\n\u001b[0;32m 274\u001b[0m scaling\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscaling,\n\u001b[0;32m 275\u001b[0m truncate\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtruncate,\n\u001b[0;32m 276\u001b[0m cost\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcost,\n\u001b[0;32m 277\u001b[0m kernel\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkernel,\n\u001b[0;32m 278\u001b[0m cluster_scale\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcluster_scale,\n\u001b[0;32m 279\u001b[0m debias\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdebias,\n\u001b[0;32m 280\u001b[0m potentials\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials,\n\u001b[0;32m 281\u001b[0m labels_x\u001b[38;5;241m=\u001b[39ml_x,\n\u001b[0;32m 282\u001b[0m labels_y\u001b[38;5;241m=\u001b[39ml_y,\n\u001b[0;32m 283\u001b[0m verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose,\n\u001b[0;32m 284\u001b[0m )\n\u001b[0;32m 286\u001b[0m \u001b[38;5;66;03m# Make sure that the output has the correct shape ------------------------------------\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\sinkhorn_samples.py:180\u001b[0m, in \u001b[0;36msinkhorn_tensorized\u001b[1;34m(a, x, b, y, p, blur, reach, diameter, scaling, cost, debias, potentials, **kwargs)\u001b[0m\n\u001b[0;32m 175\u001b[0m \u001b[38;5;66;03m# Compute the relevant cost matrices C(x_i, y_j), C(y_j, x_i), etc.\u001b[39;00m\n\u001b[0;32m 176\u001b[0m \u001b[38;5;66;03m# Note that we \"detach\" the gradients of the \"right-hand sides\":\u001b[39;00m\n\u001b[0;32m 177\u001b[0m \u001b[38;5;66;03m# this is coherent with the way we compute our gradients\u001b[39;00m\n\u001b[0;32m 178\u001b[0m \u001b[38;5;66;03m# in the `sinkhorn_loop(...)` routine, in the `sinkhorn_divergence.py` file.\u001b[39;00m\n\u001b[0;32m 179\u001b[0m \u001b[38;5;66;03m# Please refer to the comments in this file for more details.\u001b[39;00m\n\u001b[1;32m--> 180\u001b[0m C_xy \u001b[38;5;241m=\u001b[39m cost(x, y\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,N,M) torch Tensor\u001b[39;00m\n\u001b[0;32m 181\u001b[0m C_yx \u001b[38;5;241m=\u001b[39m cost(y, x\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,M,N) torch Tensor\u001b[39;00m\n", + "\u001b[1;31mTypeError\u001b[0m: 'Tensor' object is not callable", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mSystemExit\u001b[0m Traceback (most recent call last)", + " \u001b[1;31m[... skipping hidden 1 frame]\u001b[0m\n", + "Cell \u001b[1;32mIn[40], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m dual_sol \u001b[38;5;241m=\u001b[39m dist\u001b[38;5;241m.\u001b[39mdual_sol(maxsamples \u001b[38;5;241m=\u001b[39m training_size, return_coupling \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m)\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\distance_fast.py:918\u001b[0m, in \u001b[0;36mDatasetDistance.dual_sol\u001b[1;34m(self, maxsamples, return_coupling)\u001b[0m\n\u001b[0;32m 917\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mCalculate the same thing again...\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m--> 918\u001b[0m W \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_label_distances()\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mdevice(device_dists))\n\u001b[0;32m 920\u001b[0m to_save \u001b[38;5;241m=\u001b[39m [W]\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\distance_fast.py:551\u001b[0m, in \u001b[0;36mDatasetDistance._get_label_distances\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 550\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m: \u001b[38;5;66;03m# Exact\u001b[39;00m\n\u001b[1;32m--> 551\u001b[0m DYY1 \u001b[38;5;241m=\u001b[39m pwdist(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mX1, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mY1)\n\u001b[0;32m 552\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\wasserstein.py:342\u001b[0m, in \u001b[0;36mpwdist_exact\u001b[1;34m(X1, Y1, X2, Y2, symmetric, loss, cost_function, p, debias, entreg, device)\u001b[0m\n\u001b[0;32m 338\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mThis is awkward. Distance computation failed. Geomloss is hard to debug\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[0;32m 339\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mBut here\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124ms a few things that might be happening: \u001b[39m\u001b[38;5;124m\"\u001b[39m\\\n\u001b[0;32m 340\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m 1. Too many samples with this label, causing memory issues\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[0;32m 341\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m 2. Datatype errors, e.g., if the two datasets have different type\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m--> 342\u001b[0m sys\u001b[38;5;241m.\u001b[39mexit(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mDistance computation failed. Aborting.\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m 343\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m symmetric:\n", + "\u001b[1;31mSystemExit\u001b[0m: Distance computation failed. Aborting.", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", + " \u001b[1;31m[... skipping hidden 1 frame]\u001b[0m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\interactiveshell.py:2121\u001b[0m, in \u001b[0;36mInteractiveShell.showtraceback\u001b[1;34m(self, exc_tuple, filename, tb_offset, exception_only, running_compiled_code)\u001b[0m\n\u001b[0;32m 2118\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m exception_only:\n\u001b[0;32m 2119\u001b[0m stb \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mAn exception has occurred, use \u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mtb to see \u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[0;32m 2120\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mthe full traceback.\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m'\u001b[39m]\n\u001b[1;32m-> 2121\u001b[0m stb\u001b[38;5;241m.\u001b[39mextend(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mInteractiveTB\u001b[38;5;241m.\u001b[39mget_exception_only(etype,\n\u001b[0;32m 2122\u001b[0m value))\n\u001b[0;32m 2123\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 2125\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcontains_exceptiongroup\u001b[39m(val):\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:710\u001b[0m, in \u001b[0;36mListTB.get_exception_only\u001b[1;34m(self, etype, value)\u001b[0m\n\u001b[0;32m 702\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_exception_only\u001b[39m(\u001b[38;5;28mself\u001b[39m, etype, value):\n\u001b[0;32m 703\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Only print the exception type and message, without a traceback.\u001b[39;00m\n\u001b[0;32m 704\u001b[0m \n\u001b[0;32m 705\u001b[0m \u001b[38;5;124;03m Parameters\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 708\u001b[0m \u001b[38;5;124;03m value : exception value\u001b[39;00m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 710\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ListTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\u001b[38;5;28mself\u001b[39m, etype, value)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:568\u001b[0m, in \u001b[0;36mListTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, context)\u001b[0m\n\u001b[0;32m 565\u001b[0m chained_exc_ids\u001b[38;5;241m.\u001b[39madd(\u001b[38;5;28mid\u001b[39m(exception[\u001b[38;5;241m1\u001b[39m]))\n\u001b[0;32m 566\u001b[0m chained_exceptions_tb_offset \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 567\u001b[0m out_list \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m--> 568\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 569\u001b[0m etype,\n\u001b[0;32m 570\u001b[0m evalue,\n\u001b[0;32m 571\u001b[0m (etb, chained_exc_ids), \u001b[38;5;66;03m# type: ignore\u001b[39;00m\n\u001b[0;32m 572\u001b[0m chained_exceptions_tb_offset,\n\u001b[0;32m 573\u001b[0m context,\n\u001b[0;32m 574\u001b[0m )\n\u001b[0;32m 575\u001b[0m \u001b[38;5;241m+\u001b[39m chained_exception_message\n\u001b[0;32m 576\u001b[0m \u001b[38;5;241m+\u001b[39m out_list)\n\u001b[0;32m 578\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out_list\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1435\u001b[0m, in \u001b[0;36mAutoFormattedTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1433\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1434\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtb \u001b[38;5;241m=\u001b[39m etb\n\u001b[1;32m-> 1435\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m FormattedTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 1436\u001b[0m \u001b[38;5;28mself\u001b[39m, etype, evalue, etb, tb_offset, number_of_lines_of_context\n\u001b[0;32m 1437\u001b[0m )\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1326\u001b[0m, in \u001b[0;36mFormattedTB.structured_traceback\u001b[1;34m(self, etype, value, tb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1323\u001b[0m mode \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmode\n\u001b[0;32m 1324\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mode \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose_modes:\n\u001b[0;32m 1325\u001b[0m \u001b[38;5;66;03m# Verbose modes need a full traceback\u001b[39;00m\n\u001b[1;32m-> 1326\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m VerboseTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 1327\u001b[0m \u001b[38;5;28mself\u001b[39m, etype, value, tb, tb_offset, number_of_lines_of_context\n\u001b[0;32m 1328\u001b[0m )\n\u001b[0;32m 1329\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mMinimal\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[0;32m 1330\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ListTB\u001b[38;5;241m.\u001b[39mget_exception_only(\u001b[38;5;28mself\u001b[39m, etype, value)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1173\u001b[0m, in \u001b[0;36mVerboseTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1164\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mstructured_traceback\u001b[39m(\n\u001b[0;32m 1165\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m 1166\u001b[0m etype: \u001b[38;5;28mtype\u001b[39m,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1170\u001b[0m number_of_lines_of_context: \u001b[38;5;28mint\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m5\u001b[39m,\n\u001b[0;32m 1171\u001b[0m ):\n\u001b[0;32m 1172\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Return a nice text document describing the traceback.\"\"\"\u001b[39;00m\n\u001b[1;32m-> 1173\u001b[0m formatted_exception \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mformat_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,\n\u001b[0;32m 1174\u001b[0m tb_offset)\n\u001b[0;32m 1176\u001b[0m colors \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mColors \u001b[38;5;66;03m# just a shorthand + quicker name lookup\u001b[39;00m\n\u001b[0;32m 1177\u001b[0m colorsnormal \u001b[38;5;241m=\u001b[39m colors\u001b[38;5;241m.\u001b[39mNormal \u001b[38;5;66;03m# used a lot\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1063\u001b[0m, in \u001b[0;36mVerboseTB.format_exception_as_a_whole\u001b[1;34m(self, etype, evalue, etb, number_of_lines_of_context, tb_offset)\u001b[0m\n\u001b[0;32m 1060\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(tb_offset, \u001b[38;5;28mint\u001b[39m)\n\u001b[0;32m 1061\u001b[0m head \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprepare_header(\u001b[38;5;28mstr\u001b[39m(etype), \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlong_header)\n\u001b[0;32m 1062\u001b[0m records \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m-> 1063\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_records(etb, number_of_lines_of_context, tb_offset) \u001b[38;5;28;01mif\u001b[39;00m etb \u001b[38;5;28;01melse\u001b[39;00m []\n\u001b[0;32m 1064\u001b[0m )\n\u001b[0;32m 1066\u001b[0m frames \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m 1067\u001b[0m skipped \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1131\u001b[0m, in \u001b[0;36mVerboseTB.get_records\u001b[1;34m(self, etb, number_of_lines_of_context, tb_offset)\u001b[0m\n\u001b[0;32m 1129\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m cf \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 1130\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1131\u001b[0m mod \u001b[38;5;241m=\u001b[39m inspect\u001b[38;5;241m.\u001b[39mgetmodule(cf\u001b[38;5;241m.\u001b[39mtb_frame)\n\u001b[0;32m 1132\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mod \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 1133\u001b[0m mod_name \u001b[38;5;241m=\u001b[39m mod\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\n", + "\u001b[1;31mAttributeError\u001b[0m: 'tuple' object has no attribute 'tb_frame'" + ] + } + ], + "source": [ + "dual_sol = dist.dual_sol(maxsamples = training_size, return_coupling = True)" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "id": "974313d4b1133b94", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:51:11.300391Z", + "start_time": "2024-05-15T18:51:00.814095Z" + } + }, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "08cfb6cf3da24061b6b831e07af5dfc8", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/1 [00:00\n", + "cost function: \n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "93a3cb5e000b49f0af8d385cc87c92d3", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/45 [00:00 1457\u001b[0m X1 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msrc_emb(X1\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m,\u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msrc_dim)\u001b[38;5;241m.\u001b[39mto(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdevice))\u001b[38;5;241m.\u001b[39mreshape(B1, N1, \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m 1458\u001b[0m \u001b[38;5;28mprint\u001b[39m(X1\u001b[38;5;241m.\u001b[39mshape)\n", + "\u001b[1;31mRuntimeError\u001b[0m: shape '[-1, 3, 32, 32]' is invalid for input of size 4096", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\wasserstein.py:338\u001b[0m, in \u001b[0;36mpwdist_exact\u001b[1;34m(X1, Y1, X2, Y2, symmetric, loss, cost_function, p, debias, entreg, device)\u001b[0m\n\u001b[0;32m 337\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m:\n\u001b[1;32m--> 338\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mThis is awkward. Distance computation failed. Geomloss is hard to debug\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[0;32m 339\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mBut here\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124ms a few things that might be happening: \u001b[39m\u001b[38;5;124m\"\u001b[39m\\\n\u001b[0;32m 340\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m 1. Too many samples with this label, causing memory issues\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[0;32m 341\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m 2. Datatype errors, e.g., if the two datasets have different type\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 342\u001b[0m sys\u001b[38;5;241m.\u001b[39mexit(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mDistance computation failed. Aborting.\u001b[39m\u001b[38;5;124m'\u001b[39m)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1532\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1531\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1532\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1541\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1538\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1539\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1540\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1541\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m forward_call(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 1543\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\samples_loss.py:265\u001b[0m, in \u001b[0;36mSamplesLoss.forward\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 264\u001b[0m \u001b[38;5;66;03m# Run --------------------------------------------------------------------------------\u001b[39;00m\n\u001b[1;32m--> 265\u001b[0m values \u001b[38;5;241m=\u001b[39m routines[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mloss][backend](\n\u001b[0;32m 266\u001b[0m α,\n\u001b[0;32m 267\u001b[0m x,\n\u001b[0;32m 268\u001b[0m β,\n\u001b[0;32m 269\u001b[0m y,\n\u001b[0;32m 270\u001b[0m p\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mp,\n\u001b[0;32m 271\u001b[0m blur\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mblur,\n\u001b[0;32m 272\u001b[0m reach\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreach,\n\u001b[0;32m 273\u001b[0m diameter\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdiameter,\n\u001b[0;32m 274\u001b[0m scaling\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscaling,\n\u001b[0;32m 275\u001b[0m truncate\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtruncate,\n\u001b[0;32m 276\u001b[0m cost\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcost,\n\u001b[0;32m 277\u001b[0m kernel\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkernel,\n\u001b[0;32m 278\u001b[0m cluster_scale\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcluster_scale,\n\u001b[0;32m 279\u001b[0m debias\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdebias,\n\u001b[0;32m 280\u001b[0m potentials\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpotentials,\n\u001b[0;32m 281\u001b[0m labels_x\u001b[38;5;241m=\u001b[39ml_x,\n\u001b[0;32m 282\u001b[0m labels_y\u001b[38;5;241m=\u001b[39ml_y,\n\u001b[0;32m 283\u001b[0m verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose,\n\u001b[0;32m 284\u001b[0m )\n\u001b[0;32m 286\u001b[0m \u001b[38;5;66;03m# Make sure that the output has the correct shape ------------------------------------\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\geomloss\\sinkhorn_samples.py:180\u001b[0m, in \u001b[0;36msinkhorn_tensorized\u001b[1;34m(a, x, b, y, p, blur, reach, diameter, scaling, cost, debias, potentials, **kwargs)\u001b[0m\n\u001b[0;32m 175\u001b[0m \u001b[38;5;66;03m# Compute the relevant cost matrices C(x_i, y_j), C(y_j, x_i), etc.\u001b[39;00m\n\u001b[0;32m 176\u001b[0m \u001b[38;5;66;03m# Note that we \"detach\" the gradients of the \"right-hand sides\":\u001b[39;00m\n\u001b[0;32m 177\u001b[0m \u001b[38;5;66;03m# this is coherent with the way we compute our gradients\u001b[39;00m\n\u001b[0;32m 178\u001b[0m \u001b[38;5;66;03m# in the `sinkhorn_loop(...)` routine, in the `sinkhorn_divergence.py` file.\u001b[39;00m\n\u001b[0;32m 179\u001b[0m \u001b[38;5;66;03m# Please refer to the comments in this file for more details.\u001b[39;00m\n\u001b[1;32m--> 180\u001b[0m C_xy \u001b[38;5;241m=\u001b[39m cost(x, y\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,N,M) torch Tensor\u001b[39;00m\n\u001b[0;32m 181\u001b[0m C_yx \u001b[38;5;241m=\u001b[39m cost(y, x\u001b[38;5;241m.\u001b[39mdetach()) \u001b[38;5;66;03m# (B,M,N) torch Tensor\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\distance_fast.py:1461\u001b[0m, in \u001b[0;36mFeatureCost.__call__\u001b[1;34m(self, X1, X2)\u001b[0m\n\u001b[0;32m 1460\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mBatchifying feature distance computation\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m-> 1461\u001b[0m X1 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_batchify_computation(X1\u001b[38;5;241m.\u001b[39mview(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m,\u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msrc_dim)\u001b[38;5;241m.\u001b[39mto(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdevice), \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mx\u001b[39m\u001b[38;5;124m'\u001b[39m)\u001b[38;5;241m.\u001b[39mreshape(B1, N1, \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m 1462\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtgt_emb \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "\u001b[1;31mRuntimeError\u001b[0m: shape '[-1, 3, 32, 32]' is invalid for input of size 4096", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mSystemExit\u001b[0m Traceback (most recent call last)", + " \u001b[1;31m[... skipping hidden 1 frame]\u001b[0m\n", + "Cell \u001b[1;32mIn[104], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m dual_sol, trained_with_flag \u001b[38;5;241m=\u001b[39m lava\u001b[38;5;241m.\u001b[39mcompute_dual(net_test, loaders[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtrain\u001b[39m\u001b[38;5;124m'\u001b[39m], loaders[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtest\u001b[39m\u001b[38;5;124m'\u001b[39m],\n\u001b[0;32m 2\u001b[0m training_size, shuffle_ind, resize\u001b[38;5;241m=\u001b[39mresize, device \u001b[38;5;241m=\u001b[39m device)\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\lava.py:166\u001b[0m, in \u001b[0;36mcompute_dual\u001b[1;34m(feature_extractor, trainloader, testloader, training_size, shuffle_ind, p, resize, device)\u001b[0m\n\u001b[0;32m 164\u001b[0m \u001b[38;5;66;03m# to return 1\u001b[39;00m\n\u001b[0;32m 165\u001b[0m \u001b[38;5;66;03m# OT Dual calculation\u001b[39;00m\n\u001b[1;32m--> 166\u001b[0m dual_sol \u001b[38;5;241m=\u001b[39m get_OT_dual_sol(feature_extractor, trainloader, testloader, p\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m2\u001b[39m, resize\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m32\u001b[39m, device\u001b[38;5;241m=\u001b[39mdevice)\n\u001b[0;32m 167\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m dual_sol, trained_with_flag\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\lava.py:114\u001b[0m, in \u001b[0;36mget_OT_dual_sol\u001b[1;34m(feature_extractor, trainloader, testloader, training_size, p, resize, device)\u001b[0m\n\u001b[0;32m 113\u001b[0m tic \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mperf_counter()\n\u001b[1;32m--> 114\u001b[0m dual_sol \u001b[38;5;241m=\u001b[39m dist\u001b[38;5;241m.\u001b[39mdual_sol(maxsamples \u001b[38;5;241m=\u001b[39m training_size, return_coupling \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m 116\u001b[0m toc \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mperf_counter()\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\distance_fast.py:851\u001b[0m, in \u001b[0;36mDatasetDistance.dual_sol\u001b[1;34m(self, maxsamples, return_coupling)\u001b[0m\n\u001b[0;32m 850\u001b[0m s \u001b[38;5;241m=\u001b[39m time()\n\u001b[1;32m--> 851\u001b[0m _ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_label_distances()\n\u001b[0;32m 852\u001b[0m logger\u001b[38;5;241m.\u001b[39minfo(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m/* Time to precompute label distances: \u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m */\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mformat(time() \u001b[38;5;241m-\u001b[39m s))\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\distance_fast.py:551\u001b[0m, in \u001b[0;36mDatasetDistance._get_label_distances\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 550\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m: \u001b[38;5;66;03m# Exact\u001b[39;00m\n\u001b[1;32m--> 551\u001b[0m DYY1 \u001b[38;5;241m=\u001b[39m pwdist(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mX1, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mY1)\n\u001b[0;32m 552\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "File \u001b[1;32mc:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\wasserstein.py:345\u001b[0m, in \u001b[0;36mpwdist_exact\u001b[1;34m(X1, Y1, X2, Y2, symmetric, loss, cost_function, p, debias, entreg, device)\u001b[0m\n\u001b[0;32m 344\u001b[0m D[j, i] \u001b[38;5;241m=\u001b[39m D[i, j]\n\u001b[1;32m--> 345\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m D\n", + "\u001b[1;31mSystemExit\u001b[0m: Distance computation failed. Aborting.", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", + " \u001b[1;31m[... skipping hidden 1 frame]\u001b[0m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\interactiveshell.py:2121\u001b[0m, in \u001b[0;36mInteractiveShell.showtraceback\u001b[1;34m(self, exc_tuple, filename, tb_offset, exception_only, running_compiled_code)\u001b[0m\n\u001b[0;32m 2118\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m exception_only:\n\u001b[0;32m 2119\u001b[0m stb \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mAn exception has occurred, use \u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mtb to see \u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[0;32m 2120\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mthe full traceback.\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m'\u001b[39m]\n\u001b[1;32m-> 2121\u001b[0m stb\u001b[38;5;241m.\u001b[39mextend(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mInteractiveTB\u001b[38;5;241m.\u001b[39mget_exception_only(etype,\n\u001b[0;32m 2122\u001b[0m value))\n\u001b[0;32m 2123\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 2125\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcontains_exceptiongroup\u001b[39m(val):\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:710\u001b[0m, in \u001b[0;36mListTB.get_exception_only\u001b[1;34m(self, etype, value)\u001b[0m\n\u001b[0;32m 702\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_exception_only\u001b[39m(\u001b[38;5;28mself\u001b[39m, etype, value):\n\u001b[0;32m 703\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Only print the exception type and message, without a traceback.\u001b[39;00m\n\u001b[0;32m 704\u001b[0m \n\u001b[0;32m 705\u001b[0m \u001b[38;5;124;03m Parameters\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 708\u001b[0m \u001b[38;5;124;03m value : exception value\u001b[39;00m\n\u001b[0;32m 709\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 710\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ListTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\u001b[38;5;28mself\u001b[39m, etype, value)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:568\u001b[0m, in \u001b[0;36mListTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, context)\u001b[0m\n\u001b[0;32m 565\u001b[0m chained_exc_ids\u001b[38;5;241m.\u001b[39madd(\u001b[38;5;28mid\u001b[39m(exception[\u001b[38;5;241m1\u001b[39m]))\n\u001b[0;32m 566\u001b[0m chained_exceptions_tb_offset \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m 567\u001b[0m out_list \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m--> 568\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 569\u001b[0m etype,\n\u001b[0;32m 570\u001b[0m evalue,\n\u001b[0;32m 571\u001b[0m (etb, chained_exc_ids), \u001b[38;5;66;03m# type: ignore\u001b[39;00m\n\u001b[0;32m 572\u001b[0m chained_exceptions_tb_offset,\n\u001b[0;32m 573\u001b[0m context,\n\u001b[0;32m 574\u001b[0m )\n\u001b[0;32m 575\u001b[0m \u001b[38;5;241m+\u001b[39m chained_exception_message\n\u001b[0;32m 576\u001b[0m \u001b[38;5;241m+\u001b[39m out_list)\n\u001b[0;32m 578\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out_list\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1435\u001b[0m, in \u001b[0;36mAutoFormattedTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1433\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 1434\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtb \u001b[38;5;241m=\u001b[39m etb\n\u001b[1;32m-> 1435\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m FormattedTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 1436\u001b[0m \u001b[38;5;28mself\u001b[39m, etype, evalue, etb, tb_offset, number_of_lines_of_context\n\u001b[0;32m 1437\u001b[0m )\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1326\u001b[0m, in \u001b[0;36mFormattedTB.structured_traceback\u001b[1;34m(self, etype, value, tb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1323\u001b[0m mode \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmode\n\u001b[0;32m 1324\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mode \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose_modes:\n\u001b[0;32m 1325\u001b[0m \u001b[38;5;66;03m# Verbose modes need a full traceback\u001b[39;00m\n\u001b[1;32m-> 1326\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m VerboseTB\u001b[38;5;241m.\u001b[39mstructured_traceback(\n\u001b[0;32m 1327\u001b[0m \u001b[38;5;28mself\u001b[39m, etype, value, tb, tb_offset, number_of_lines_of_context\n\u001b[0;32m 1328\u001b[0m )\n\u001b[0;32m 1329\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mMinimal\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[0;32m 1330\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ListTB\u001b[38;5;241m.\u001b[39mget_exception_only(\u001b[38;5;28mself\u001b[39m, etype, value)\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1173\u001b[0m, in \u001b[0;36mVerboseTB.structured_traceback\u001b[1;34m(self, etype, evalue, etb, tb_offset, number_of_lines_of_context)\u001b[0m\n\u001b[0;32m 1164\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mstructured_traceback\u001b[39m(\n\u001b[0;32m 1165\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m 1166\u001b[0m etype: \u001b[38;5;28mtype\u001b[39m,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1170\u001b[0m number_of_lines_of_context: \u001b[38;5;28mint\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m5\u001b[39m,\n\u001b[0;32m 1171\u001b[0m ):\n\u001b[0;32m 1172\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Return a nice text document describing the traceback.\"\"\"\u001b[39;00m\n\u001b[1;32m-> 1173\u001b[0m formatted_exception \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mformat_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,\n\u001b[0;32m 1174\u001b[0m tb_offset)\n\u001b[0;32m 1176\u001b[0m colors \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mColors \u001b[38;5;66;03m# just a shorthand + quicker name lookup\u001b[39;00m\n\u001b[0;32m 1177\u001b[0m colorsnormal \u001b[38;5;241m=\u001b[39m colors\u001b[38;5;241m.\u001b[39mNormal \u001b[38;5;66;03m# used a lot\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1063\u001b[0m, in \u001b[0;36mVerboseTB.format_exception_as_a_whole\u001b[1;34m(self, etype, evalue, etb, number_of_lines_of_context, tb_offset)\u001b[0m\n\u001b[0;32m 1060\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(tb_offset, \u001b[38;5;28mint\u001b[39m)\n\u001b[0;32m 1061\u001b[0m head \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprepare_header(\u001b[38;5;28mstr\u001b[39m(etype), \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlong_header)\n\u001b[0;32m 1062\u001b[0m records \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m-> 1063\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_records(etb, number_of_lines_of_context, tb_offset) \u001b[38;5;28;01mif\u001b[39;00m etb \u001b[38;5;28;01melse\u001b[39;00m []\n\u001b[0;32m 1064\u001b[0m )\n\u001b[0;32m 1066\u001b[0m frames \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m 1067\u001b[0m skipped \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n", + "File \u001b[1;32mc:\\Users\\21520\\anaconda3\\Lib\\site-packages\\IPython\\core\\ultratb.py:1131\u001b[0m, in \u001b[0;36mVerboseTB.get_records\u001b[1;34m(self, etb, number_of_lines_of_context, tb_offset)\u001b[0m\n\u001b[0;32m 1129\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m cf \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 1130\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 1131\u001b[0m mod \u001b[38;5;241m=\u001b[39m inspect\u001b[38;5;241m.\u001b[39mgetmodule(cf\u001b[38;5;241m.\u001b[39mtb_frame)\n\u001b[0;32m 1132\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mod \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 1133\u001b[0m mod_name \u001b[38;5;241m=\u001b[39m mod\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\n", + "\u001b[1;31mAttributeError\u001b[0m: 'tuple' object has no attribute 'tb_frame'" + ] + } + ], + "source": [ + "dual_sol, trained_with_flag = lava.compute_dual(net_test, loaders['train'], loaders['test'],\n", + " training_size, shuffle_ind, resize=resize, device = device)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "2b0e35b2a233fc80", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:50:07.982095Z", + "start_time": "2024-05-15T18:50:07.952972Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'C:\\\\Users\\\\21520\\\\PycharmProjects\\\\LAVA'" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import os\n", + "os.getcwd()" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "cf7ab8aa67755222", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:50:51.336347Z", + "start_time": "2024-05-15T18:50:51.191860Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import importlib\n", + "import lava\n", + "importlib.reload(lava)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "8f8ac057cb273194", + "metadata": { + "ExecuteTime": { + "end_time": "2024-05-15T18:46:37.843738Z", + "start_time": "2024-05-15T18:46:37.623845Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Volume in drive C has no label.\n", + " Volume Serial Number is BB40-D6E0\n", + "\n", + " Directory of C:\\Users\\21520\\PycharmProjects\\LAVA\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "File Not Found\n" + ] + } + ], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8140bf08cee7487c", + "metadata": {}, + "outputs": [], + "source": [ + "def evaluate_model(model, test_loader):\n", + " model.eval()\n", + " correct = 0\n", + " total = 0\n", + " with torch.no_grad():\n", + " for data, target in test_loader:\n", + " data = data.repeat(1, 3, 1, 1) # Convert grayscale to RGB\n", + " outputs = model(data)\n", + " _, predicted = torch.max(outputs.data, 1)\n", + " total += target.size(0)\n", + " correct += (predicted == target).sum().item()\n", + " print(f'Accuracy: {100 * correct / total:.2f}%')" + ] + }, + { + "cell_type": "code", + "execution_count": 105, + "id": "c071dfef", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 1024])\n", + "torch.Size([17, 1024])\n", + "torch.Size([50])\n", + "torch.Size([17])\n", + "10 6\n" + ] + } + ], + "source": [ + "import pickle\n", + "with open(\"save_x1y1x2y2.txt\", \"rb\") as f:\n", + " loaded_data = pickle.load(f)\n", + "X1, Y1, X2, Y2 = loaded_data\n", + "\n", + "# Now you can use X1, Y1, X2, and Y2 in your code\n", + "print(X1.shape) # Example usage\n", + "print(X2.shape)\n", + "print(Y1.shape)\n", + "print(Y2.shape)\n", + "c1 = torch.unique(Y1)\n", + "c2 = torch.unique(Y2)\n", + "n1, n2 = len(c1), len(c2)\n", + "print(n1, n2)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "id": "e4d787d3", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MNIST\n", + "Currrent label: 9\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 9\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 7\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 6\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 6\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 3\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 5\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 5\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 6\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 3\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 5\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 3\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 6\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n" + ] + } + ], + "source": [ + "loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='MNIST', resize=resize,\n", + " training_size=training_size, test_size=valid_size, currupt_por=portion)" + ] + }, + { + "cell_type": "code", + "execution_count": 114, + "id": "f52872ad", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "50\n" + ] + } + ], + "source": [ + "for batch in loaders['train']:\n", + " print(len(batch[1]))" + ] + }, + { + "cell_type": "code", + "execution_count": 115, + "id": "ba596ac0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "20\n" + ] + } + ], + "source": [ + "for batch in loaders['test']:\n", + " print(len(batch[1]))" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "id": "e167629e", + "metadata": {}, + "outputs": [], + "source": [ + "feature_cost = FeatureCost(src_embedding = embedder,\n", + " src_dim = (1, resize,resize),\n", + " tgt_embedding = embedder,\n", + " tgt_dim = (1, resize,resize),\n", + " p = 2,\n", + " device='cuda')" + ] + }, + { + "cell_type": "code", + "execution_count": 134, + "id": "22d2a969", + "metadata": {}, + "outputs": [], + "source": [ + "dist = DatasetDistance(loaders['train'], loaders['test'],\n", + " inner_ot_method = 'exact',\n", + " debiased_loss = True,\n", + " feature_cost = feature_cost,\n", + " λ_x=1.0, λ_y=1.0,\n", + " sqrt_method = 'spectral',\n", + " sqrt_niters=10,\n", + " precision='single',\n", + " p = 2, entreg = 1e-1,\n", + " device='cuda')" + ] + }, + { + "cell_type": "code", + "execution_count": 135, + "id": "c0124f23", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) tensor([0, 1, 2, 4, 6, 8])\n" + ] + } + ], + "source": [ + "print(dist.V1, dist.V2)" + ] + }, + { + "cell_type": "code", + "execution_count": 128, + "id": "dbb6ddd5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], + "source": [ + "%load_ext autoreload\n", + "\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 136, + "id": "9dc78168", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "0566fe75836e47d9ba3ae26f7533aeb1", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/1 [00:00 maxsamples:\n", + " idxs_1 = sorted(np.random.choice(\n", + " dist.X1.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_1 = np.s_[:] # hack to get a full slice\n", + "\n", + "if maxsamples and dist.X2.shape[0] > maxsamples:\n", + " idxs_2 = sorted(np.random.choice(\n", + " dist.X2.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_2 = np.s_[:] # hack to get a full slice\n", + "Z1 = torch.cat((dist.X1[idxs_1],\n", + " dist.Y1[idxs_1].type(dist.X1.dtype).unsqueeze(1)), -1)\n", + "Z2 = torch.cat((dist.X2[idxs_2],\n", + " dist.Y2[idxs_2].type(dist.X2.dtype).unsqueeze(1)), -1)\n", + "Z1 = Z1.to(device)\n", + "Z2 = Z2.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 155, + "id": "2f72394f", + "metadata": {}, + "outputs": [], + "source": [ + "with torch.no_grad():\n", + " loss.debias = False\n", + " loss.potentials = True\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 154, + "id": "04f6ce11", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([50, 1025]) torch.Size([17, 1025])\n" + ] + } + ], + "source": [ + "print(Z1.shape, Z2.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 157, + "id": "8f728ea6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Z1 shape in batch: torch.Size([1, 50, 1025])\n", + "Z2 shape in batch: torch.Size([1, 17, 1025])\n", + "1 50 1024\n", + "torch.Size([1, 50, 100])\n", + "1 17 1024\n", + "torch.Size([1, 17, 100])\n", + "torch.Size([1, 50, 17])\n", + "torch.Size([1, 50, 17])\n", + "Gia tri M: tensor([[[110, 109, 111, 108, 108, 110, 107, 108, 107, 110, 111, 109, 106, 111,\n", + " 106, 106, 108],\n", + " [158, 157, 159, 156, 156, 158, 155, 156, 155, 158, 159, 157, 154, 159,\n", + " 154, 154, 156],\n", + " [ 62, 61, 63, 60, 60, 62, 59, 60, 59, 62, 63, 61, 58, 63,\n", + " 58, 58, 60],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [ 94, 93, 95, 92, 92, 94, 91, 92, 91, 94, 95, 93, 90, 95,\n", + " 90, 90, 92],\n", + " [ 30, 29, 31, 28, 28, 30, 27, 28, 27, 30, 31, 29, 26, 31,\n", + " 26, 26, 28],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [ 62, 61, 63, 60, 60, 62, 59, 60, 59, 62, 63, 61, 58, 63,\n", + " 58, 58, 60],\n", + " [ 30, 29, 31, 28, 28, 30, 27, 28, 27, 30, 31, 29, 26, 31,\n", + " 26, 26, 28],\n", + " [ 62, 61, 63, 60, 60, 62, 59, 60, 59, 62, 63, 61, 58, 63,\n", + " 58, 58, 60],\n", + " [ 94, 93, 95, 92, 92, 94, 91, 92, 91, 94, 95, 93, 90, 95,\n", + " 90, 90, 92],\n", + " [158, 157, 159, 156, 156, 158, 155, 156, 155, 158, 159, 157, 154, 159,\n", + " 154, 154, 156],\n", + " [ 46, 45, 47, 44, 44, 46, 43, 44, 43, 46, 47, 45, 42, 47,\n", + " 42, 42, 44],\n", + " [ 62, 61, 63, 60, 60, 62, 59, 60, 59, 62, 63, 61, 58, 63,\n", + " 58, 58, 60],\n", + " [142, 141, 143, 140, 140, 142, 139, 140, 139, 142, 143, 141, 138, 143,\n", + " 138, 138, 140],\n", + " [ 14, 13, 15, 12, 12, 14, 11, 12, 11, 14, 15, 13, 10, 15,\n", + " 10, 10, 12],\n", + " [ 46, 45, 47, 44, 44, 46, 43, 44, 43, 46, 47, 45, 42, 47,\n", + " 42, 42, 44],\n", + " [ 14, 13, 15, 12, 12, 14, 11, 12, 11, 14, 15, 13, 10, 15,\n", + " 10, 10, 12],\n", + " [110, 109, 111, 108, 108, 110, 107, 108, 107, 110, 111, 109, 106, 111,\n", + " 106, 106, 108],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [142, 141, 143, 140, 140, 142, 139, 140, 139, 142, 143, 141, 138, 143,\n", + " 138, 138, 140],\n", + " [142, 141, 143, 140, 140, 142, 139, 140, 139, 142, 143, 141, 138, 143,\n", + " 138, 138, 140],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [158, 157, 159, 156, 156, 158, 155, 156, 155, 158, 159, 157, 154, 159,\n", + " 154, 154, 156],\n", + " [ 46, 45, 47, 44, 44, 46, 43, 44, 43, 46, 47, 45, 42, 47,\n", + " 42, 42, 44],\n", + " [ 94, 93, 95, 92, 92, 94, 91, 92, 91, 94, 95, 93, 90, 95,\n", + " 90, 90, 92],\n", + " [ 94, 93, 95, 92, 92, 94, 91, 92, 91, 94, 95, 93, 90, 95,\n", + " 90, 90, 92],\n", + " [ 14, 13, 15, 12, 12, 14, 11, 12, 11, 14, 15, 13, 10, 15,\n", + " 10, 10, 12],\n", + " [142, 141, 143, 140, 140, 142, 139, 140, 139, 142, 143, 141, 138, 143,\n", + " 138, 138, 140],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [ 94, 93, 95, 92, 92, 94, 91, 92, 91, 94, 95, 93, 90, 95,\n", + " 90, 90, 92],\n", + " [ 62, 61, 63, 60, 60, 62, 59, 60, 59, 62, 63, 61, 58, 63,\n", + " 58, 58, 60],\n", + " [ 46, 45, 47, 44, 44, 46, 43, 44, 43, 46, 47, 45, 42, 47,\n", + " 42, 42, 44],\n", + " [ 46, 45, 47, 44, 44, 46, 43, 44, 43, 46, 47, 45, 42, 47,\n", + " 42, 42, 44],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [ 46, 45, 47, 44, 44, 46, 43, 44, 43, 46, 47, 45, 42, 47,\n", + " 42, 42, 44],\n", + " [ 46, 45, 47, 44, 44, 46, 43, 44, 43, 46, 47, 45, 42, 47,\n", + " 42, 42, 44],\n", + " [ 14, 13, 15, 12, 12, 14, 11, 12, 11, 14, 15, 13, 10, 15,\n", + " 10, 10, 12],\n", + " [126, 125, 127, 124, 124, 126, 123, 124, 123, 126, 127, 125, 122, 127,\n", + " 122, 122, 124],\n", + " [ 94, 93, 95, 92, 92, 94, 91, 92, 91, 94, 95, 93, 90, 95,\n", + " 90, 90, 92],\n", + " [158, 157, 159, 156, 156, 158, 155, 156, 155, 158, 159, 157, 154, 159,\n", + " 154, 154, 156],\n", + " [ 30, 29, 31, 28, 28, 30, 27, 28, 27, 30, 31, 29, 26, 31,\n", + " 26, 26, 28],\n", + " [158, 157, 159, 156, 156, 158, 155, 156, 155, 158, 159, 157, 154, 159,\n", + " 154, 154, 156],\n", + " [ 94, 93, 95, 92, 92, 94, 91, 92, 91, 94, 95, 93, 90, 95,\n", + " 90, 90, 92],\n", + " [110, 109, 111, 108, 108, 110, 107, 108, 107, 110, 111, 109, 106, 111,\n", + " 106, 106, 108],\n", + " [ 78, 77, 79, 76, 76, 78, 75, 76, 75, 78, 79, 77, 74, 79,\n", + " 74, 74, 76],\n", + " [142, 141, 143, 140, 140, 142, 139, 140, 139, 142, 143, 141, 138, 143,\n", + " 138, 138, 140],\n", + " [126, 125, 127, 124, 124, 126, 123, 124, 123, 126, 127, 125, 122, 127,\n", + " 122, 122, 124]]], device='cuda:0')\n", + "torch.Size([1, 50, 17])\n", + "torch.Size([1, 50, 17])\n", + "gia tri D: tensor([[[ 2584.6584, 15209.9414, 8176.5547, 32161.0352, 27834.1055,\n", + " 2990.8381, 3083.6206, 24056.8398, 7364.0854, 7356.2441,\n", + " 15455.3613, 14908.4277, 3276.7131, 19288.6309, 4202.7832,\n", + " 3438.3303, 32066.8223],\n", + " [12525.3623, 1772.0425, 3562.7827, 5578.2061, 4860.2495,\n", + " 5341.5220, 8240.2461, 3507.5161, 2486.8325, 2066.5142,\n", + " 1775.9574, 1882.0630, 8617.3662, 4197.7856, 9801.7021,\n", + " 6814.3896, 6214.1680],\n", + " [ 9937.3975, 6097.2266, 2583.4045, 13423.9473, 11515.6328,\n", + " 5100.4990, 6494.7739, 7806.5405, 6582.7778, 6657.1553,\n", + " 5832.9492, 6387.5264, 8869.7158, 10076.7148, 9423.3174,\n", + " 8486.5830, 14525.5605],\n", + " [ 1771.5786, 17763.0684, 6989.6353, 31628.3164, 25033.7852,\n", + " 7341.6567, 7880.9888, 23841.1484, 11652.6328, 10305.6367,\n", + " 14458.3242, 16338.4395, 12358.1348, 15556.6055, 14562.9629,\n", + " 11690.4238, 29146.6094],\n", + " [ 2453.3091, 22546.2363, 10016.5527, 39045.8125, 31991.3867,\n", + " 8873.9180, 9180.0156, 29597.5586, 15484.5742, 13984.1641,\n", + " 19654.3125, 21204.5449, 13513.6465, 21646.5586, 15374.8418,\n", + " 13498.5605, 36779.7773],\n", + " [ 1755.0695, 9009.6221, 2967.5854, 21385.2734, 17372.6602,\n", + " 1623.4757, 3562.0535, 15251.4629, 4458.0576, 3053.7490,\n", + " 7427.9702, 8413.3594, 4930.1172, 9827.7012, 6386.6875,\n", + " 4171.5156, 20620.5156],\n", + " [ 6191.5508, 7430.3584, 4428.4941, 19306.7422, 16882.5078,\n", + " 2648.0979, 83.2082, 13125.0596, 2422.9231, 5861.3047,\n", + " 9633.5312, 7903.8496, 1341.9628, 14238.0234, 1950.0800,\n", + " 1357.0956, 20804.8828],\n", + " [17351.4395, 6924.3169, 8170.5981, 7432.8252, 5435.7241,\n", + " 13203.8340, 18136.5137, 7550.5845, 8252.7480, 5709.6255,\n", + " 3385.2466, 5859.3101, 19986.4766, 2328.4048, 22602.2266,\n", + " 16602.1641, 5529.1953],\n", + " [28327.1328, 16634.0625, 16003.4795, 13159.0098, 9145.0977,\n", + " 28666.1406, 34775.5703, 14250.3691, 22031.7188, 17906.5508,\n", + " 9442.2881, 14131.7715, 38924.5352, 4937.8232, 42932.8984,\n", + " 34198.4219, 8191.4414],\n", + " [ 9278.1445, 2673.0010, 3182.1680, 10007.0977, 8654.5859,\n", + " 3530.0276, 2563.5442, 6539.6865, 105.8312, 2859.0452,\n", + " 4226.2363, 3007.7227, 3254.9551, 7505.7109, 4266.2832,\n", + " 2164.6211, 11148.3965],\n", + " [19492.3398, 8656.9668, 7783.8047, 7562.4551, 4417.4458,\n", + " 17845.0508, 22170.5332, 7224.8218, 12652.4551, 10263.2861,\n", + " 3598.5706, 7060.5127, 25990.7148, 1572.1506, 29041.7578,\n", + " 22414.8711, 4544.8428],\n", + " [ 2584.8662, 7551.0850, 2526.7671, 18224.3047, 14314.1084,\n", + " 2257.7412, 5283.0303, 13077.9609, 4112.8213, 1908.1144,\n", + " 5290.5073, 6695.3535, 6424.0547, 6793.0151, 8178.5703,\n", + " 5112.5215, 16888.0996],\n", + " [ 2556.3853, 17583.6016, 7702.0679, 33906.6602, 28375.3848,\n", + " 5473.9399, 5841.7661, 24295.5762, 12118.2383, 11228.1084,\n", + " 16913.6016, 16919.1562, 8273.4912, 20606.5508, 9296.2100,\n", + " 8731.8818, 33080.7031],\n", + " [16640.1465, 5588.0601, 5983.5723, 5494.4238, 2760.9097,\n", + " 14067.7041, 18420.3301, 4992.9595, 9332.5068, 7025.7158,\n", + " 2036.8564, 4255.3306, 21019.7793, 652.4385, 23799.8613,\n", + " 17680.7520, 3038.9258],\n", + " [24250.0664, 8249.2734, 9763.9512, 5174.7427, 3264.8477,\n", + " 19955.4102, 24468.1250, 6016.9746, 13241.3906, 11294.0732,\n", + " 3870.2542, 7039.7236, 27705.6797, 2223.8518, 30532.0781,\n", + " 23954.5195, 2948.3062],\n", + " [ 4442.0425, 10559.1719, 4915.7886, 24206.7031, 20904.7383,\n", + " 2788.0112, 785.7383, 16984.2617, 4876.1641, 7229.2144,\n", + " 11768.3291, 10846.9746, 2696.7612, 16345.7354, 3391.5425,\n", + " 2979.0269, 25335.7402],\n", + " [ 9180.2500, 1551.8628, 2876.6479, 8243.1699, 6906.8701,\n", + " 3321.8364, 4427.2153, 5098.6797, 669.9810, 1579.2388,\n", + " 2866.7983, 1648.7251, 3941.8179, 5633.4976, 5065.7437,\n", + " 2538.4624, 8875.7188],\n", + " [13424.5635, 4655.7212, 4044.1211, 6019.7549, 3042.4321,\n", + " 11334.0068, 15170.3047, 4640.0308, 7645.9473, 5599.9678,\n", + " 1425.4746, 3418.6401, 17876.4688, 585.1367, 20472.9414,\n", + " 14927.9883, 3787.2021],\n", + " [10078.0039, 4912.1558, 5413.9839, 15202.8730, 14599.6865,\n", + " 2350.3481, 2690.0200, 9733.7617, 2883.9692, 4493.9458,\n", + " 8402.9199, 5863.2954, 1185.0327, 14045.5098, 1115.0327,\n", + " 1186.7280, 17700.0254],\n", + " [ 4614.8691, 9466.4297, 6081.3359, 23211.9590, 20583.7148,\n", + " 1308.8616, 2857.7690, 16916.5781, 3829.4175, 3405.7288,\n", + " 10236.0488, 9506.8340, 1760.5725, 14218.7949, 2449.5022,\n", + " 1455.1428, 23800.8672],\n", + " [10636.3711, 3524.5396, 3660.6919, 9231.1240, 8422.1289,\n", + " 3553.7231, 6762.6958, 6048.5161, 2868.6392, 1931.6001,\n", + " 3400.6470, 3829.5308, 7350.7173, 6708.8833, 8191.7485,\n", + " 6082.3794, 10306.3438],\n", + " [ 6037.8628, 3087.7163, 1494.5912, 10232.1533, 7463.3691,\n", + " 3322.1968, 5410.3555, 6777.5068, 1664.9824, 1067.9584,\n", + " 1874.4369, 2463.2661, 6375.3003, 3198.3511, 8133.1519,\n", + " 4599.9155, 9345.3691],\n", + " [ 6475.3940, 2547.3862, 1493.2181, 10222.1377, 8149.8359,\n", + " 2189.7261, 3800.5703, 6419.6689, 919.8281, 970.6166,\n", + " 2386.9780, 2388.6665, 4345.3237, 4865.9546, 5619.6089,\n", + " 3010.1558, 10329.3711],\n", + " [ 9422.5234, 4395.0610, 3591.7212, 11564.1084, 10619.0703,\n", + " 2621.8560, 4836.0903, 7543.4497, 2731.4146, 2517.7271,\n", + " 4813.5278, 4905.5952, 5686.8540, 8841.4395, 6299.9517,\n", + " 4880.4868, 13093.1230],\n", + " [13846.9131, 2009.9438, 4265.8608, 4820.2266, 4052.9712,\n", + " 6794.5767, 10335.0859, 3334.7944, 3345.9106, 2410.6274,\n", + " 1493.6166, 1912.1655, 10660.3857, 3260.7085, 12078.0771,\n", + " 8494.7529, 5003.0596],\n", + " [13130.0986, 7456.7974, 5312.4512, 9213.3525, 5104.9067,\n", + " 13561.8994, 17386.3418, 7664.1343, 9990.9590, 7499.2354,\n", + " 3098.0039, 5691.1206, 20877.4121, 1130.6016, 23998.4434,\n", + " 17647.1309, 5865.2295],\n", + " [ 7798.4287, 7498.2549, 5251.7495, 19130.2500, 17967.6367,\n", + " 1260.3585, 3576.0535, 13225.8262, 4240.6357, 3635.8857,\n", + " 8769.5332, 8221.0234, 2734.9453, 14107.9668, 2802.8516,\n", + " 2737.9688, 21154.6582],\n", + " [ 2846.6553, 26959.3164, 13100.7109, 46109.6953, 38257.2500,\n", + " 10982.1631, 11399.2725, 36004.5000, 18591.1738, 16598.4492,\n", + " 23667.2793, 25335.7422, 14905.6797, 25307.6152, 16949.3984,\n", + " 14942.3906, 43262.5469],\n", + " [ 6118.2661, 5516.6909, 3791.6206, 16976.8730, 15021.6572,\n", + " 1410.5474, 1074.2192, 10918.9180, 2370.2192, 3982.0981,\n", + " 8029.4937, 5988.6704, 676.1929, 12764.2559, 1094.6147,\n", + " 597.6694, 18472.0371],\n", + " [ 2893.3823, 5511.3433, 1342.1674, 15351.5381, 11571.0684,\n", + " 2364.5894, 3166.1250, 10350.8643, 2322.6641, 2288.1011,\n", + " 4335.1382, 4817.6587, 4926.4995, 6032.7710, 6659.6011,\n", + " 3696.0933, 14353.1416],\n", + " [ 3493.1528, 26480.1934, 12532.3027, 43568.4023, 35573.5430,\n", + " 12009.8008, 12090.5625, 33831.0703, 18844.0273, 17144.6953,\n", + " 22782.1836, 24752.4902, 17123.7090, 23998.8555, 19368.0918,\n", + " 16968.3105, 40491.3008],\n", + " [19074.5859, 4402.5728, 7850.3931, 5178.9375, 5131.9214,\n", + " 10517.7812, 15315.5195, 4988.5396, 6184.2427, 4480.3931,\n", + " 3156.0571, 4370.3784, 15669.9082, 4564.8931, 17170.9238,\n", + " 13068.7988, 5470.2539],\n", + " [ 954.1554, 11384.0205, 3851.3979, 24826.7109, 19982.9609,\n", + " 2626.6475, 4421.4834, 18074.2129, 6218.7061, 4551.3467,\n", + " 9260.5332, 10500.2930, 6252.4219, 11263.5566, 7921.2109,\n", + " 5532.3125, 23467.8340],\n", + " [21182.8047, 11028.1465, 9771.0371, 9495.5684, 5831.7603,\n", + " 20709.6562, 25376.9316, 9446.6289, 15178.9570, 12322.3916,\n", + " 5174.0137, 9081.6260, 29527.0547, 2265.3655, 32948.5430,\n", + " 25611.1172, 5730.0986],\n", + " [ 4110.1572, 7267.8794, 1527.6387, 16404.3906, 12166.0352,\n", + " 4358.1416, 7008.7285, 10169.9902, 7184.1191, 5160.8096,\n", + " 5466.1836, 6381.0620, 9467.3145, 7424.2305, 11013.7598,\n", + " 8511.4160, 15029.6230],\n", + " [19630.2949, 2846.3940, 6009.7402, 1968.1357, 1337.3960,\n", + " 12450.9971, 15971.7949, 1909.5610, 6942.9316, 6221.9307,\n", + " 1552.6533, 2588.0825, 17553.4043, 2315.0107, 19398.0293,\n", + " 14778.0879, 1775.9912],\n", + " [15493.8242, 3760.9009, 5829.2563, 5837.8340, 5198.9292,\n", + " 8235.2734, 12314.0391, 4919.9907, 4599.9106, 3214.7427,\n", + " 2436.9360, 3653.9624, 13181.3730, 3912.5044, 14729.8965,\n", + " 10812.8105, 5987.0918],\n", + " [15484.5166, 1441.5181, 3560.5078, 3439.3223, 3058.4790,\n", + " 7557.8623, 10594.8145, 1534.9526, 4594.8359, 4193.5342,\n", + " 1485.0293, 1680.9780, 11556.0098, 4135.4961, 12624.7988,\n", + " 9775.5664, 4345.5459],\n", + " [24400.7305, 5217.3071, 9403.2988, 2288.0737, 1639.9854,\n", + " 17328.9570, 21879.3828, 3459.8921, 10444.7695, 8952.8018,\n", + " 3006.8594, 4607.1265, 23436.0742, 2548.2129, 25724.9062,\n", + " 19963.1348, 1322.7104],\n", + " [ 6554.6685, 2355.7124, 2135.0132, 10615.7031, 8580.2197,\n", + " 2154.9653, 3203.7310, 6505.4277, 825.2290, 1409.3657,\n", + " 3424.9136, 2277.0708, 2918.9077, 6212.5620, 4066.3452,\n", + " 1747.6187, 10899.8965],\n", + " [22128.5820, 2155.5857, 6626.7271, 1076.4940, 1066.2504,\n", + " 13569.1846, 15780.3281, 732.8959, 7599.0547, 8316.4385,\n", + " 2689.8628, 2334.4910, 16925.9688, 4497.4624, 18347.3164,\n", + " 14554.4990, 1816.1473],\n", + " [ 2411.3779, 7673.4893, 2547.8159, 19119.2539, 15491.5322,\n", + " 1455.6124, 3714.9714, 13529.2637, 3668.7253, 2192.5459,\n", + " 6144.7280, 7133.3867, 4842.7539, 8469.7637, 6292.2773,\n", + " 3917.0625, 18460.8711],\n", + " [ 4441.8931, 4700.4438, 1301.1088, 12278.1328, 8730.3438,\n", + " 3428.6821, 5410.1333, 8056.5405, 3034.9282, 2086.7153,\n", + " 2994.6968, 3895.9312, 7545.6475, 4180.9272, 9434.6240,\n", + " 5944.1494, 11027.4639],\n", + " [ 8246.8828, 2595.7881, 2675.0527, 10181.9980, 8530.3867,\n", + " 3123.4534, 2488.7356, 6536.8154, 70.2043, 2399.6995,\n", + " 3830.2852, 2768.6699, 3185.0254, 6859.6680, 4290.5840,\n", + " 2043.0839, 10985.4922],\n", + " [13235.9717, 2000.7896, 3948.7378, 4972.3652, 4039.2876,\n", + " 6572.3442, 9813.6426, 3426.3315, 3065.9517, 2320.4575,\n", + " 1415.4916, 1870.9663, 10379.0986, 3101.3843, 11859.5049,\n", + " 8223.3721, 5088.2021],\n", + " [ 6337.7881, 7602.7334, 4074.4937, 14879.0645, 10857.4326,\n", + " 6447.7393, 10385.3486, 11862.1621, 5538.6768, 2725.2822,\n", + " 3896.7026, 6213.1270, 11946.7051, 3354.3159, 14507.0410,\n", + " 9502.8379, 12361.9746],\n", + " [ 7066.8223, 21392.3750, 13815.4297, 41244.7266, 37645.5781,\n", + " 5690.0098, 4808.9956, 31294.8203, 12451.9375, 13375.3652,\n", + " 23618.5098, 21889.6191, 4253.8770, 29965.7715, 4154.6973,\n", + " 5620.2598, 42885.4297],\n", + " [15864.7949, 3459.0825, 5941.7427, 6323.4854, 6378.0776,\n", + " 7027.2290, 10578.1426, 4895.4829, 3996.7681, 3284.8149,\n", + " 3280.8188, 3850.3784, 10992.9531, 6050.9712, 12002.5215,\n", + " 9145.0508, 7539.4707],\n", + " [ 4625.7925, 3528.0015, 1184.8724, 12292.4189, 9534.7422,\n", + " 1786.3705, 3551.0820, 7813.4053, 1503.5801, 1082.4154,\n", + " 2933.9214, 3124.0786, 4291.9019, 5172.6714, 5684.7065,\n", + " 3034.1929, 11897.0430],\n", + " [22778.0156, 6900.0283, 9241.6602, 4561.9268, 2106.0647,\n", + " 19660.1445, 23806.6230, 5017.7271, 13064.0859, 11235.6807,\n", + " 3884.1196, 5480.0996, 26156.9023, 1935.4945, 29130.4727,\n", + " 22405.7891, 1899.3651]]], device='cuda:0')\n", + "torch.Size([1, 50, 17])\n", + "Z1 shape in batch: torch.Size([1, 17, 1025])\n", + "Z2 shape in batch: torch.Size([1, 50, 1025])\n", + "1 17 1024\n", + "torch.Size([1, 17, 100])\n", + "1 50 1024\n", + "torch.Size([1, 50, 100])\n", + "torch.Size([1, 17, 50])\n", + "torch.Size([1, 17, 50])\n", + "Gia tri M: tensor([[[230, 233, 227, 228, 228, 229, 225, 228, 227, 225, 227, 229, 233, 226,\n", + " 227, 232, 224, 226, 224, 230, 228, 232, 232, 228, 233, 226, 229, 229,\n", + " 224, 232, 228, 228, 229, 227, 226, 226, 228, 226, 226, 224, 231, 229,\n", + " 233, 225, 233, 229, 230, 228, 232, 231],\n", + " [214, 217, 211, 212, 212, 213, 209, 212, 211, 209, 211, 213, 217, 210,\n", + " 211, 216, 208, 210, 208, 214, 212, 216, 216, 212, 217, 210, 213, 213,\n", + " 208, 216, 212, 212, 213, 211, 210, 210, 212, 210, 210, 208, 215, 213,\n", + " 217, 209, 217, 213, 214, 212, 216, 215],\n", + " [246, 249, 243, 244, 244, 245, 241, 244, 243, 241, 243, 245, 249, 242,\n", + " 243, 248, 240, 242, 240, 246, 244, 248, 248, 244, 249, 242, 245, 245,\n", + " 240, 248, 244, 244, 245, 243, 242, 242, 244, 242, 242, 240, 247, 245,\n", + " 249, 241, 249, 245, 246, 244, 248, 247],\n", + " [198, 201, 195, 196, 196, 197, 193, 196, 195, 193, 195, 197, 201, 194,\n", + " 195, 200, 192, 194, 192, 198, 196, 200, 200, 196, 201, 194, 197, 197,\n", + " 192, 200, 196, 196, 197, 195, 194, 194, 196, 194, 194, 192, 199, 197,\n", + " 201, 193, 201, 197, 198, 196, 200, 199],\n", + " [198, 201, 195, 196, 196, 197, 193, 196, 195, 193, 195, 197, 201, 194,\n", + " 195, 200, 192, 194, 192, 198, 196, 200, 200, 196, 201, 194, 197, 197,\n", + " 192, 200, 196, 196, 197, 195, 194, 194, 196, 194, 194, 192, 199, 197,\n", + " 201, 193, 201, 197, 198, 196, 200, 199],\n", + " [230, 233, 227, 228, 228, 229, 225, 228, 227, 225, 227, 229, 233, 226,\n", + " 227, 232, 224, 226, 224, 230, 228, 232, 232, 228, 233, 226, 229, 229,\n", + " 224, 232, 228, 228, 229, 227, 226, 226, 228, 226, 226, 224, 231, 229,\n", + " 233, 225, 233, 229, 230, 228, 232, 231],\n", + " [182, 185, 179, 180, 180, 181, 177, 180, 179, 177, 179, 181, 185, 178,\n", + " 179, 184, 176, 178, 176, 182, 180, 184, 184, 180, 185, 178, 181, 181,\n", + " 176, 184, 180, 180, 181, 179, 178, 178, 180, 178, 178, 176, 183, 181,\n", + " 185, 177, 185, 181, 182, 180, 184, 183],\n", + " [198, 201, 195, 196, 196, 197, 193, 196, 195, 193, 195, 197, 201, 194,\n", + " 195, 200, 192, 194, 192, 198, 196, 200, 200, 196, 201, 194, 197, 197,\n", + " 192, 200, 196, 196, 197, 195, 194, 194, 196, 194, 194, 192, 199, 197,\n", + " 201, 193, 201, 197, 198, 196, 200, 199],\n", + " [182, 185, 179, 180, 180, 181, 177, 180, 179, 177, 179, 181, 185, 178,\n", + " 179, 184, 176, 178, 176, 182, 180, 184, 184, 180, 185, 178, 181, 181,\n", + " 176, 184, 180, 180, 181, 179, 178, 178, 180, 178, 178, 176, 183, 181,\n", + " 185, 177, 185, 181, 182, 180, 184, 183],\n", + " [230, 233, 227, 228, 228, 229, 225, 228, 227, 225, 227, 229, 233, 226,\n", + " 227, 232, 224, 226, 224, 230, 228, 232, 232, 228, 233, 226, 229, 229,\n", + " 224, 232, 228, 228, 229, 227, 226, 226, 228, 226, 226, 224, 231, 229,\n", + " 233, 225, 233, 229, 230, 228, 232, 231],\n", + " [246, 249, 243, 244, 244, 245, 241, 244, 243, 241, 243, 245, 249, 242,\n", + " 243, 248, 240, 242, 240, 246, 244, 248, 248, 244, 249, 242, 245, 245,\n", + " 240, 248, 244, 244, 245, 243, 242, 242, 244, 242, 242, 240, 247, 245,\n", + " 249, 241, 249, 245, 246, 244, 248, 247],\n", + " [214, 217, 211, 212, 212, 213, 209, 212, 211, 209, 211, 213, 217, 210,\n", + " 211, 216, 208, 210, 208, 214, 212, 216, 216, 212, 217, 210, 213, 213,\n", + " 208, 216, 212, 212, 213, 211, 210, 210, 212, 210, 210, 208, 215, 213,\n", + " 217, 209, 217, 213, 214, 212, 216, 215],\n", + " [166, 169, 163, 164, 164, 165, 161, 164, 163, 161, 163, 165, 169, 162,\n", + " 163, 168, 160, 162, 160, 166, 164, 168, 168, 164, 169, 162, 165, 165,\n", + " 160, 168, 164, 164, 165, 163, 162, 162, 164, 162, 162, 160, 167, 165,\n", + " 169, 161, 169, 165, 166, 164, 168, 167],\n", + " [246, 249, 243, 244, 244, 245, 241, 244, 243, 241, 243, 245, 249, 242,\n", + " 243, 248, 240, 242, 240, 246, 244, 248, 248, 244, 249, 242, 245, 245,\n", + " 240, 248, 244, 244, 245, 243, 242, 242, 244, 242, 242, 240, 247, 245,\n", + " 249, 241, 249, 245, 246, 244, 248, 247],\n", + " [166, 169, 163, 164, 164, 165, 161, 164, 163, 161, 163, 165, 169, 162,\n", + " 163, 168, 160, 162, 160, 166, 164, 168, 168, 164, 169, 162, 165, 165,\n", + " 160, 168, 164, 164, 165, 163, 162, 162, 164, 162, 162, 160, 167, 165,\n", + " 169, 161, 169, 165, 166, 164, 168, 167],\n", + " [166, 169, 163, 164, 164, 165, 161, 164, 163, 161, 163, 165, 169, 162,\n", + " 163, 168, 160, 162, 160, 166, 164, 168, 168, 164, 169, 162, 165, 165,\n", + " 160, 168, 164, 164, 165, 163, 162, 162, 164, 162, 162, 160, 167, 165,\n", + " 169, 161, 169, 165, 166, 164, 168, 167],\n", + " [198, 201, 195, 196, 196, 197, 193, 196, 195, 193, 195, 197, 201, 194,\n", + " 195, 200, 192, 194, 192, 198, 196, 200, 200, 196, 201, 194, 197, 197,\n", + " 192, 200, 196, 196, 197, 195, 194, 194, 196, 194, 194, 192, 199, 197,\n", + " 201, 193, 201, 197, 198, 196, 200, 199]]], device='cuda:0')\n", + "torch.Size([1, 17, 50])\n", + "torch.Size([1, 17, 50])\n", + "gia tri D: tensor([[[ 2584.6584, 12525.3613, 9937.3975, 1771.5786, 2453.3091,\n", + " 1755.0695, 6191.5508, 17351.4395, 28327.1328, 9278.1426,\n", + " 19492.3359, 2584.8682, 2556.3853, 16640.1465, 24250.0664,\n", + " 4442.0425, 9180.2500, 13424.5615, 10078.0039, 4614.8691,\n", + " 10636.3691, 6037.8608, 6475.3940, 9422.5234, 13846.9131,\n", + " 13130.0986, 7798.4287, 2846.6514, 6118.2661, 2893.3823,\n", + " 3493.1528, 19074.5859, 954.1554, 21182.8047, 4110.1572,\n", + " 19630.2949, 15493.8242, 15484.5186, 24400.7305, 6554.6704,\n", + " 22128.5820, 2411.3779, 4441.8950, 8246.8828, 13235.9736,\n", + " 6337.7861, 7066.8223, 15864.7949, 4625.7944, 22778.0156],\n", + " [15209.9395, 1772.0425, 6097.2256, 17763.0684, 22546.2363,\n", + " 9009.6211, 7430.3574, 6924.3169, 16634.0625, 2673.0000,\n", + " 8656.9668, 7551.0840, 17583.6016, 5588.0601, 8249.2734,\n", + " 10559.1699, 1551.8618, 4655.7212, 4912.1548, 9466.4277,\n", + " 3524.5386, 3087.7153, 2547.3853, 4395.0601, 2009.9438,\n", + " 7456.7974, 7498.2539, 26959.3164, 5516.6899, 5511.3423,\n", + " 26480.1934, 4402.5728, 11384.0195, 11028.1465, 7267.8784,\n", + " 2846.3940, 3760.9009, 1441.5181, 5217.3076, 2355.7114,\n", + " 2155.5857, 7673.4883, 4700.4429, 2595.7871, 2000.7896,\n", + " 7602.7324, 21392.3730, 3459.0825, 3528.0005, 6900.0283],\n", + " [ 8176.5547, 3562.7817, 2583.4045, 6989.6353, 10016.5527,\n", + " 2967.5854, 4428.4941, 8170.5981, 16003.4795, 3182.1680,\n", + " 7783.8047, 2526.7671, 7702.0679, 5983.5732, 9763.9512,\n", + " 4915.7886, 2876.6479, 4044.1211, 5413.9839, 6081.3359,\n", + " 3660.6919, 1494.5912, 1493.2181, 3591.7212, 4265.8608,\n", + " 5312.4512, 5251.7495, 13100.7109, 3791.6206, 1342.1674,\n", + " 12532.3027, 7850.3931, 3851.3979, 9771.0371, 1527.6387,\n", + " 6009.7393, 5829.2563, 3560.5078, 9403.2988, 2135.0132,\n", + " 6626.7261, 2547.8159, 1301.1088, 2675.0527, 3948.7378,\n", + " 4074.4937, 13815.4297, 5941.7427, 1184.8724, 9241.6602],\n", + " [32161.0352, 5578.2056, 13423.9473, 31628.3164, 39045.8086,\n", + " 21385.2734, 19306.7422, 7432.8252, 13159.0098, 10007.0967,\n", + " 7562.4551, 18224.3047, 33906.6602, 5494.4238, 5174.7427,\n", + " 24206.7031, 8243.1699, 6019.7554, 15202.8730, 23211.9590,\n", + " 9231.1230, 10232.1533, 10222.1377, 11564.1074, 4820.2261,\n", + " 9213.3535, 19130.2500, 46109.6953, 16976.8730, 15351.5381,\n", + " 43568.4023, 5178.9380, 24826.7109, 9495.5684, 16404.3906,\n", + " 1968.1357, 5837.8345, 3439.3218, 2288.0737, 10615.7031,\n", + " 1076.4935, 19119.2539, 12278.1328, 10181.9971, 4972.3647,\n", + " 14879.0645, 41244.7266, 6323.4849, 12292.4189, 4561.9268],\n", + " [27834.1055, 4860.2495, 11515.6348, 25033.7852, 31991.3867,\n", + " 17372.6621, 16882.5078, 5435.7241, 9145.0977, 8654.5850,\n", + " 4417.4458, 14314.1094, 28375.3848, 2760.9097, 3264.8477,\n", + " 20904.7383, 6906.8711, 3042.4321, 14599.6875, 20583.7148,\n", + " 8422.1270, 7463.3701, 8149.8369, 10619.0703, 4052.9712,\n", + " 5104.9067, 17967.6387, 38257.2500, 15021.6562, 11571.0674,\n", + " 35573.5430, 5131.9214, 19982.9629, 5831.7603, 12166.0371,\n", + " 1337.3960, 5198.9292, 3058.4790, 1639.9854, 8580.2207,\n", + " 1066.2504, 15491.5332, 8730.3438, 8530.3877, 4039.2876,\n", + " 10857.4336, 37645.5781, 6378.0776, 9534.7412, 2106.0647],\n", + " [ 2990.8381, 5341.5210, 5100.4990, 7341.6567, 8873.9180,\n", + " 1623.4757, 2648.0979, 13203.8340, 28666.1406, 3530.0295,\n", + " 17845.0508, 2257.7432, 5473.9399, 14067.7051, 19955.4102,\n", + " 2788.0112, 3321.8364, 11334.0068, 2350.3481, 1308.8616,\n", + " 3553.7212, 3322.1968, 2189.7261, 2621.8560, 6794.5767,\n", + " 13561.8994, 1260.3585, 10982.1631, 1410.5474, 2364.5894,\n", + " 12009.8008, 10517.7812, 2626.6475, 20709.6562, 4358.1416,\n", + " 12450.9980, 8235.2734, 7557.8623, 17328.9570, 2154.9673,\n", + " 13569.1836, 1455.6124, 3428.6841, 3123.4534, 6572.3442,\n", + " 6447.7393, 5690.0098, 7027.2290, 1786.3724, 19660.1445],\n", + " [ 3083.6206, 8240.2461, 6494.7739, 7880.9888, 9180.0156,\n", + " 3562.0535, 83.2082, 18136.5137, 34775.5703, 2563.5422,\n", + " 22170.5332, 5283.0322, 5841.7661, 18420.3320, 24468.1250,\n", + " 785.7383, 4427.2153, 15170.3047, 2690.0200, 2857.7690,\n", + " 6762.6938, 5410.3535, 3800.5703, 4836.0903, 10335.0859,\n", + " 17386.3418, 3576.0535, 11399.2725, 1074.2192, 3166.1250,\n", + " 12090.5625, 15315.5195, 4421.4834, 25376.9316, 7008.7285,\n", + " 15971.7959, 12314.0391, 10594.8164, 21879.3828, 3203.7329,\n", + " 15780.3271, 3714.9714, 5410.1313, 2488.7356, 9813.6426,\n", + " 10385.3486, 4808.9956, 10578.1426, 3551.0840, 23806.6230],\n", + " [24056.8398, 3507.5161, 7806.5405, 23841.1484, 29597.5586,\n", + " 15251.4629, 13125.0596, 7550.5845, 14250.3691, 6539.6865,\n", + " 7224.8218, 13077.9609, 24295.5762, 4992.9595, 6016.9746,\n", + " 16984.2617, 5098.6797, 4640.0308, 9733.7617, 16916.5781,\n", + " 6048.5161, 6777.5068, 6419.6689, 7543.4497, 3334.7944,\n", + " 7664.1343, 13225.8262, 36004.5000, 10918.9180, 10350.8643,\n", + " 33831.0703, 4988.5396, 18074.2129, 9446.6289, 10169.9902,\n", + " 1909.5610, 4919.9907, 1534.9526, 3459.8926, 6505.4277,\n", + " 732.8959, 13529.2637, 8056.5405, 6536.8154, 3426.3315,\n", + " 11862.1621, 31294.8203, 4895.4829, 7813.4053, 5017.7271],\n", + " [ 7364.0854, 2486.8315, 6582.7778, 11652.6328, 15484.5742,\n", + " 4458.0576, 2422.9231, 8252.7480, 22031.7188, 105.8312,\n", + " 12652.4551, 4112.8213, 12118.2383, 9332.5068, 13241.3916,\n", + " 4876.1641, 669.9810, 7645.9473, 2883.9692, 3829.4175,\n", + " 2868.6392, 1664.9824, 919.8281, 2731.4146, 3345.9106,\n", + " 9990.9590, 4240.6357, 18591.1738, 2370.2192, 2322.6641,\n", + " 18844.0273, 6184.2427, 6218.7061, 15178.9570, 7184.1191,\n", + " 6942.9326, 4599.9106, 4594.8359, 10444.7705, 825.2290,\n", + " 7599.0537, 3668.7253, 3034.9282, 70.2043, 3065.9517,\n", + " 5538.6768, 12451.9375, 3996.7681, 1503.5801, 13064.0859],\n", + " [ 7356.2441, 2066.5132, 6657.1553, 10305.6367, 13984.1641,\n", + " 3053.7490, 5861.3047, 5709.6255, 17906.5508, 2859.0452,\n", + " 10263.2861, 1908.1144, 11228.1084, 7025.7168, 11294.0742,\n", + " 7229.2144, 1579.2388, 5599.9678, 4493.9458, 3405.7288,\n", + " 1931.6001, 1067.9584, 970.6166, 2517.7271, 2410.6274,\n", + " 7499.2354, 3635.8857, 16598.4492, 3982.0981, 2288.1011,\n", + " 17144.6953, 4480.3931, 4551.3467, 12322.3916, 5160.8096,\n", + " 6221.9316, 3214.7427, 4193.5342, 8952.8027, 1409.3657,\n", + " 8316.4395, 2192.5459, 2086.7153, 2399.6995, 2320.4575,\n", + " 2725.2822, 13375.3652, 3284.8149, 1082.4154, 11235.6807],\n", + " [15455.3633, 1775.9574, 5832.9492, 14458.3262, 19654.3145,\n", + " 7427.9702, 9633.5312, 3385.2466, 9442.2881, 4226.2363,\n", + " 3598.5706, 5290.5073, 16913.6035, 2036.8564, 3870.2546,\n", + " 11768.3311, 2866.7983, 1425.4746, 8402.9199, 10236.0488,\n", + " 3400.6470, 1874.4369, 2386.9780, 4813.5278, 1493.6166,\n", + " 3098.0039, 8769.5332, 23667.2773, 8029.4937, 4335.1382,\n", + " 22782.1816, 3156.0571, 9260.5352, 5174.0137, 5466.1836,\n", + " 1552.6533, 2436.9360, 1485.0293, 3006.8599, 3424.9136,\n", + " 2689.8628, 6144.7280, 2994.6968, 3830.2852, 1415.4916,\n", + " 3896.7026, 23618.5117, 3280.8188, 2933.9214, 3884.1196],\n", + " [14908.4277, 1882.0630, 6387.5264, 16338.4395, 21204.5449,\n", + " 8413.3594, 7903.8496, 5859.3101, 14131.7715, 3007.7227,\n", + " 7060.5127, 6695.3535, 16919.1562, 4255.3306, 7039.7241,\n", + " 10846.9746, 1648.7251, 3418.6401, 5863.2954, 9506.8340,\n", + " 3829.5308, 2463.2661, 2388.6665, 4905.5952, 1912.1655,\n", + " 5691.1206, 8221.0234, 25335.7422, 5988.6704, 4817.6587,\n", + " 24752.4902, 4370.3784, 10500.2930, 9081.6260, 6381.0620,\n", + " 2588.0825, 3653.9624, 1680.9780, 4607.1270, 2277.0708,\n", + " 2334.4910, 7133.3867, 3895.9312, 2768.6699, 1870.9663,\n", + " 6213.1270, 21889.6191, 3850.3784, 3124.0786, 5480.0996],\n", + " [ 3276.7131, 8617.3652, 8869.7158, 12358.1348, 13513.6465,\n", + " 4930.1172, 1341.9628, 19986.4766, 38924.5352, 3254.9531,\n", + " 25990.7148, 6424.0566, 8273.4912, 21019.7812, 27705.6797,\n", + " 2696.7612, 3941.8179, 17876.4688, 1185.0327, 1760.5725,\n", + " 7350.7153, 6375.2983, 4345.3237, 5686.8540, 10660.3857,\n", + " 20877.4121, 2734.9453, 14905.6797, 676.1929, 4926.4995,\n", + " 17123.7090, 15669.9082, 6252.4219, 29527.0547, 9467.3145,\n", + " 17553.4062, 13181.3730, 11556.0078, 23436.0742, 2918.9097,\n", + " 16925.9668, 4842.7539, 7545.6455, 3185.0254, 10379.0986,\n", + " 11946.7051, 4253.8770, 10992.9531, 4291.9038, 26156.9023],\n", + " [19288.6289, 4197.7856, 10076.7148, 15556.6074, 21646.5566,\n", + " 9827.7012, 14238.0234, 2328.4048, 4937.8232, 7505.7109,\n", + " 1572.1506, 6793.0151, 20606.5488, 652.4385, 2223.8523,\n", + " 16345.7354, 5633.4976, 585.1367, 14045.5098, 14218.7949,\n", + " 6708.8833, 3198.3511, 4865.9546, 8841.4395, 3260.7085,\n", + " 1130.6016, 14107.9668, 25307.6133, 12764.2559, 6032.7710,\n", + " 23998.8535, 4564.8931, 11263.5566, 2265.3655, 7424.2305,\n", + " 2315.0107, 3912.5044, 4135.4961, 2548.2134, 6212.5620,\n", + " 4497.4624, 8469.7637, 4180.9272, 6859.6680, 3101.3843,\n", + " 3354.3159, 29965.7695, 6050.9712, 5172.6714, 1935.4945],\n", + " [ 4202.7832, 9801.7012, 9423.3174, 14562.9629, 15374.8418,\n", + " 6386.6875, 1950.0800, 22602.2266, 42932.8984, 4266.2812,\n", + " 29041.7578, 8178.5723, 9296.2100, 23799.8613, 30532.0781,\n", + " 3391.5425, 5065.7437, 20472.9414, 1115.0327, 2449.5022,\n", + " 8191.7466, 8133.1499, 5619.6089, 6299.9517, 12078.0771,\n", + " 23998.4434, 2802.8516, 16949.3984, 1094.6147, 6659.6011,\n", + " 19368.0918, 17170.9238, 7921.2109, 32948.5430, 11013.7598,\n", + " 19398.0312, 14729.8965, 12624.8008, 25724.9062, 4066.3433,\n", + " 18347.3145, 6292.2773, 9434.6221, 4290.5840, 11859.5049,\n", + " 14507.0410, 4154.6973, 12002.5234, 5684.7046, 29130.4727],\n", + " [ 3438.3303, 6814.3887, 8486.5830, 11690.4238, 13498.5605,\n", + " 4171.5156, 1357.0956, 16602.1641, 34198.4219, 2164.6211,\n", + " 22414.8711, 5112.5215, 8731.8818, 17680.7539, 23954.5195,\n", + " 2979.0269, 2538.4624, 14927.9883, 1186.7280, 1455.1428,\n", + " 6082.3794, 4599.9155, 3010.1558, 4880.4868, 8494.7529,\n", + " 17647.1309, 2737.9688, 14942.3906, 597.6694, 3696.0933,\n", + " 16968.3105, 13068.7988, 5532.3125, 25611.1172, 8511.4160,\n", + " 14778.0898, 10812.8105, 9775.5664, 19963.1348, 1747.6206,\n", + " 14554.4980, 3917.0625, 5944.1494, 2043.0839, 8223.3721,\n", + " 9502.8379, 5620.2598, 9145.0508, 3034.1929, 22405.7891],\n", + " [32066.8223, 6214.1685, 14525.5605, 29146.6094, 36779.7773,\n", + " 20620.5156, 20804.8828, 5529.1958, 8191.4414, 11148.3975,\n", + " 4544.8433, 16888.0996, 33080.7031, 3038.9263, 2948.3062,\n", + " 25335.7402, 8875.7188, 3787.2026, 17700.0254, 23800.8672,\n", + " 10306.3438, 9345.3701, 10329.3721, 13093.1230, 5003.0601,\n", + " 5865.2300, 21154.6582, 43262.5469, 18472.0371, 14353.1416,\n", + " 40491.3008, 5470.2539, 23467.8340, 5730.0991, 15029.6230,\n", + " 1775.9907, 5987.0923, 4345.5464, 1322.7104, 10899.8965,\n", + " 1816.1478, 18460.8711, 11027.4648, 10985.4932, 5088.2026,\n", + " 12361.9746, 42885.4297, 7539.4712, 11897.0439, 1899.3651]]],\n", + " device='cuda:0')\n", + "torch.Size([1, 17, 50])\n" + ] + } + ], + "source": [ + "F_i, G_j = loss(Z1, Z2)" + ] + }, + { + "cell_type": "code", + "execution_count": 163, + "id": "84fd1daf", + "metadata": {}, + "outputs": [], + "source": [ + "π = [F_i, G_j]\n", + "dual_sol = π\n", + "for i in range(len(dual_sol)):\n", + " dual_sol[i] = dual_sol[i].to('cpu')" + ] + }, + { + "cell_type": "code", + "execution_count": 159, + "id": "ddbc1e66", + "metadata": {}, + "outputs": [], + "source": [ + "from lava import train_with_corrupt_flag, get_indices" + ] + }, + { + "cell_type": "code", + "execution_count": 160, + "id": "b6ce14ad", + "metadata": {}, + "outputs": [], + "source": [ + "train_indices = get_indices(loaders['train'])\n", + "trained_with_flag = train_with_corrupt_flag(loaders['train'], shuffle_ind, train_indices)" + ] + }, + { + "cell_type": "code", + "execution_count": 164, + "id": "42a4eb70", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "inspected: 10, found: 6 detection rate: 0.40 baseline: 1.8\n", + "inspected: 20, found: 8 detection rate: 0.53 baseline: 3.6\n", + "inspected: 30, found: 11 detection rate: 0.73 baseline: 5.4\n", + "inspected: 40, found: 14 detection rate: 0.93 baseline: 7.2\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA5PElEQVR4nO3dd3QU9f7/8deSDoEAgSRAGr1IC4QmaKQJubSINPEKEQERkAvRq4JfLmChqYDlKgpHuCoXEC5wVYqEFlRACdK+EvhaQpNA6GBoJvn8/uCXlSWFLCbshDwf5+w5zmdmZ977yaz7YuYzMzZjjBEAAIAFlXB1AQAAALkhqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqMCl5s+fL5vNZn95e3srKChIbdu21ZQpU5Samnrb6963b58mTpyogwcPFlzBTm4nNjZW4eHhhbp9Vzpx4oTGjRunxo0bq0yZMvL09FRwcLB69uypzz77TBkZGXekjk2bNslms2nTpk32tjvR98eOHdPEiRO1a9eufC2fVefSpUsLta47La9+mDhxomw2250vCncNggosYd68edq6davi4+P1z3/+U40bN9a0adNUt25drVu37rbWuW/fPk2aNOmOBJXctjN+/HgtX768ULfvKtu2bVODBg00Z84cde/eXYsWLdK6des0depUeXh4qGfPnpo/f77L6rsTfX/s2DFNmjQp30HlbpVXPwwePFhbt26980XhruHu6gIASapfv74iIyPt0w8//LDGjBmjNm3aqGfPnvrxxx8VGBjowgpvT/Xq1V1dQqE4d+6cYmJi5Ovrq2+++UaVKlVymP/Xv/5Ve/bs0enTp/Ncz+XLl+Xt7V0o/+K+W/u+qAkODlZwcLCry0ARxhEVWFZoaKjeeOMNXbx4Ue+//77DvMTERHXv3l3ly5eXt7e3IiIi9Omnn9rnz58/X71795YktW3b1n5q6cZ/4a9bt07t27dXmTJlVLJkSbVu3Vrr16/PVsf+/fv1yCOPKDAwUF5eXgoNDdWAAQN09erVW24np9MPV65c0dixY1W1alV5enqqSpUqGjFihM6dO+ewXHh4uLp27ao1a9aoSZMm8vHxUZ06dfThhx/m2W+///67AgIC9Nhjj2Wbd+7cOfn4+CguLk6SlJmZqVdeeUW1a9eWj4+PypYtq4YNG+rNN9/Mcxtz5szRiRMnNH369GwhJUvDhg3Vtm1b+3TWab61a9dq0KBBqlixokqWLKmrV6/qp59+0uOPP66aNWuqZMmSqlKlirp166a9e/dmW+/+/fvVuXNnlSxZUhUqVNCwYcN08eLFbMvl1PfGGL377rtq3LixfHx8VK5cOfXq1Uu//PKLw3IPPPCA6tevr+3bt+u+++5TyZIlVa1aNU2dOlWZmZmSrp/GadasmSTp8ccft//tJ06cmGff3Szr1MgPP/ygRx55RH5+fgoMDNSgQYN0/vx5h2WXLFmiFi1ayM/Pz17ToEGD7POzTi198skniouLU1BQkHx8fBQVFaWdO3dm2/atvkdZfv31Vw0dOlQhISHy9PRU5cqV1atXL504ceKW/ZDTqZ/MzExNnz5dderUkZeXlwICAjRgwAAdPXrU6b8DigEDuNC8efOMJLN9+/Yc5//222/Gzc3NtG/f3t62YcMG4+npae677z6zePFis2bNGhMbG2skmXnz5hljjElNTTWTJ082ksw///lPs3XrVrN161aTmppqjDHm448/NjabzcTExJhly5aZzz//3HTt2tW4ubmZdevW2be1a9cu4+vra8LDw83s2bPN+vXrzSeffGL69OljLly4cMvtDBw40ISFhdnXl5mZaTp16mTc3d3N+PHjzdq1a83rr79uSpUqZSIiIsyVK1fsy4aFhZng4GBTr14989FHH5kvv/zS9O7d20gyCQkJefbrmDFjjI+Pjzl//rxD+7vvvmskmT179hhjjJkyZYpxc3MzEyZMMOvXrzdr1qwxs2bNMhMnTsxz/R07djRubm4mLS0tz+VulPW3rlKlihk6dKhZvXq1Wbp0qUlPTzcJCQnmmWeeMUuXLjUJCQlm+fLlJiYmxvj4+Jj9+/fb13H8+HETEBBgqlSpYubNm2dWrVplHn30URMaGmokmY0bN9qXvbnvjTFmyJAhxsPDwzzzzDNmzZo15t///repU6eOCQwMNMePH7cvFxUVZfz9/U3NmjXN7NmzTXx8vBk+fLiRZP71r38ZY4w5f/68/TP9z//8j/1vf+TIkVz7YOPGjUaSWbJkib1twoQJRpKpXbu2+cc//mHi4+PNjBkzjJeXl3n88cfty23ZssXYbDbTr18/s2rVKrNhwwYzb94889hjj2Vbf0hIiOnRo4f5/PPPzSeffGJq1KhhypQpY37++Wf7svn5HhljzNGjR02lSpVMhQoVzIwZM8y6devM4sWLzaBBg0xSUtIt+yHr891o6NChRpIZOXKkWbNmjZk9e7apWLGiCQkJMSdPnnTq74C7H0EFLnWroGKMMYGBgaZu3br26Tp16piIiAjz+++/OyzXtWtXU6lSJZORkWGMMWbJkiXZfryMMSYtLc2UL1/edOvWzaE9IyPDNGrUyDRv3tze1q5dO1O2bFl78MhJbtsxJvuP5Zo1a4wkM336dIflFi9ebCSZDz74wN4WFhZmvL29zaFDh+xtly9fNuXLlzdPPvlkrvUYY8yePXuyrc8YY5o3b26aNm1qn+7atatp3LhxnuvKSZ06dUxQUFC29oyMDPP777/bX1l/C2P++FsPGDDglutPT083165dMzVr1jRjxoyxtz///PPGZrOZXbt2OSzfsWPHWwaVrVu3GknmjTfecHjvkSNHjI+Pj3nuuefsbVFRUUaS+fbbbx2WrVevnunUqZN9evv27dl+2POSV1C5eZ8YPny48fb2NpmZmcYYY15//XUjyZw7d+6W62/SpIn9fcYYc/DgQePh4WEGDx5sb8vv92jQoEHGw8PD7Nu3L9ft5tUPNweVpKQkI8kMHz7cYblvv/3WSDLjxo2zt+X374C7G6d+YHnGGPt///TTT9q/f78effRRSVJ6err99Ze//EUpKSk6cOBAnuvbsmWLzpw5o4EDBzq8PzMzU507d9b27duVlpamS5cuKSEhQX369FHFihUL5LNs2LBB0vXTEjfq3bu3SpUqle3UU+PGjRUaGmqf9vb2Vq1atXTo0KE8t9OgQQM1bdpU8+bNs7clJSXpu+++czhV0Lx5c+3evVvDhw/Xl19+qQsXLtzuR5MkxcXFycPDw/7q3r17tmUefvjhbG3p6emaPHmy6tWrJ09PT7m7u8vT01M//vijkpKS7Mtt3LhR99xzjxo1auTw/v79+9+yti+++EI2m01//etfHf7uQUFBatSokcMVQ5IUFBSk5s2bO7Q1bNjwln1/u27uq4YNG+rKlSv2K9+yTq/06dNHn376qX799ddc19W/f3+H0y1hYWG69957tXHjRknOfY9Wr16ttm3bqm7dugXyObNquPk70Lx5c9WtWzfbd+BO/x1gPQQVWFpaWppOnz6typUrS7p+OawkPfvssw4/iB4eHho+fLgk6dSpU3muM2sdvXr1yraOadOmyRijM2fO6OzZs8rIyCjQgYCnT5+Wu7t7tuBjs9kUFBSUbfCpv79/tnV4eXnp8uXLt9zWoEGDtHXrVu3fv1/S9SurvLy89Mgjj9iXGTt2rF5//XVt27ZN0dHR8vf3V/v27ZWYmJjnukNDQ3Xy5EldunTJof2ZZ57R9u3btX379lzHruTUHhcXp/HjxysmJkaff/65vv32W23fvl2NGjVy+KynT59WUFBQtvfn1HazEydOyBijwMDAbH/3bdu2Zdtv/kzf346bt+fl5SVJ9u3df//9WrFihdLT0zVgwAAFBwerfv36WrhwYbZ15dZHWfuXM9+jkydPFvh3QMp5P6hcuXKBfgdwd+CqH1jaypUrlZGRoQceeECSVKFCBUnXf2B79uyZ43tq166d5zqz1vH222+rZcuWOS4TGBiojIwMubm5ZRvg92f4+/srPT1dJ0+edAgrxhgdP37c/q/mgvDII48oLi5O8+fP16uvvqqPP/5YMTExKleunH0Zd3d3xcXFKS4uTufOndO6des0btw4derUSUeOHFHJkiVzXHfHjh21du1arVq1Sr169bK3h4SEKCQkRJLk6emZ43tzusLnk08+0YABAzR58mSH9lOnTqls2bL2aX9/fx0/fjzb+3Nqu1mFChVks9n01Vdf2UPAjXJqs5oePXqoR48eunr1qrZt26YpU6aof//+Cg8PV6tWrezL5dZHWT/6znyPKlasWODfAUlKSUnJFoCOHTtmrw3IwhEVWNbhw4f17LPPys/PT08++aSk6//zrFmzpnbv3q3IyMgcX6VLl5aU/V+kWVq3bq2yZctq3759ua7D09PTfrXEkiVL8jxKk9t2ctK+fXtJ13+Yb/Sf//xHaWlp9vkFoVy5coqJidFHH32kL774QsePH3c47XOzsmXLqlevXhoxYoTOnDmT5/1nBg8erMDAQD333HNKSUn507XabLZsQWHlypXZTm+0bdtWP/zwg3bv3u3Q/u9///uW2+jatauMMfr1119z/Js3aNDA6bqd+dsXJC8vL0VFRWnatGmSlO2KnoULFzqcMj106JC2bNliD/zOfI+io6O1cePGPE+pOtMP7dq1k5T9O7B9+3YlJSUV6HcAdweOqMAS/vd//9d+jjw1NVVfffWV5s2bJzc3Ny1fvtzh6MP777+v6OhoderUSbGxsapSpYrOnDmjpKQkff/991qyZImk6/dmkaQPPvhApUuXlre3t6pWrSp/f3+9/fbbGjhwoM6cOaNevXopICBAJ0+e1O7du3Xy5Em99957kqQZM2aoTZs2atGihV544QXVqFFDJ06c0Geffab3339fpUuXznM7N+vYsaM6deqk559/XhcuXFDr1q21Z88eTZgwQRERETleUvxnDBo0SIsXL9bIkSMVHBysDh06OMzv1q2b/R42FStW1KFDhzRr1iyFhYWpZs2aua63bNmyWrFihbp166ZGjRrpqaeeUsuWLeXr66vTp09r8+bNOn78uO6999581dm1a1fNnz9fderUUcOGDbVjxw699tpr2f7FPXr0aH344Yfq0qWLXnnlFQUGBmrBggX201t5ad26tYYOHarHH39ciYmJuv/++1WqVCmlpKTo66+/VoMGDfTUU0/lq94s1atXl4+PjxYsWKC6devK19dXlStXtp+qLEj/+Mc/dPToUbVv317BwcE6d+6c3nzzTXl4eCgqKsph2dTUVD300EMaMmSIzp8/rwkTJsjb21tjx461L5Pf79FLL72k1atX6/7779e4cePUoEEDnTt3TmvWrFFcXJzq1KnjVD/Url1bQ4cO1dtvv60SJUooOjpaBw8e1Pjx4xUSEqIxY8YUeN+hiHPlSF4g60qQrJenp6cJCAgwUVFRZvLkyblebbN7927Tp08fExAQYDw8PExQUJBp166dmT17tsNys2bNMlWrVjVubm7ZrkpISEgwXbp0MeXLlzceHh6mSpUqpkuXLg5XZBhjzL59+0zv3r2Nv7+/8fT0NKGhoSY2NtbhUuLctpPTJbKXL182zz//vAkLCzMeHh6mUqVK5qmnnjJnz551WC4sLMx06dIl22ePiooyUVFReXfs/5eRkWFCQkKMJPPiiy9mm//GG2+Ye++911SoUMH+2Z544glz8ODBfK3/+PHjZuzYsaZhw4amVKlSxsPDw1SuXNl069bNfPTRRw5XlOR1hdfZs2fNE088YQICAkzJkiVNmzZtzFdffZXjZ923b5/p2LGj8fb2NuXLlzdPPPGE+e9//5uvy5ONMebDDz80LVq0MKVKlTI+Pj6mevXqZsCAASYxMdG+TFRUlLnnnnuyvTendS5cuNDUqVPHeHh4GElmwoQJufZXXlf93HhZ7o39lZycbIwx5osvvjDR0dGmSpUq9u/JX/7yF/PVV19lW//HH39sRo0aZSpWrGi8vLzMfffd5/D5suT3e3TkyBEzaNAgExQUZP8b9+nTx5w4ceKW/ZDT5ckZGRlm2rRpplatWsbDw8NUqFDB/PWvf812abczfwfcvWzG3HB8EABQZG3atElt27bVkiVLHMYOAUUZY1QAAIBlEVQAAIBlceoHAABYFkdUAACAZRFUAACAZRFUAACAZRXpG75lZmbq2LFjKl26dI635QYAANZjjNHFixdVuXJllSiR9zGTIh1Ujh07Zn+uCAAAKFqOHDlyy4deFumgkvUsiiNHjqhMmTIurgYAAOTHhQsXFBISYv8dz0uRDipZp3vKlClDUAEAoIjJz7ANBtMCAADLIqgAAADLIqgAAADLIqgAAADLIqgAAADLIqgAAADLIqgAAADLIqgAAADLKtI3fAMAAIVn5+GzSj6VpqoVSikitJxLaiCoAACAbKauTtLshF/s08OiqumF6Lp3vA5O/QAAAAc7D591CCmSNDvhF+08fPaO10JQAQAADpJPpTnVXpgIKgAAwEHVCqWcai9MBBUAAOAgIrSchkVVc2h7KqqaSwbUMpgWAABk80J0XXW6J4irfgAAgDVFhJZzWUDJwqkfAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWQQVAABgWe6uLgAAAFjU0UTp9E+Sfw0pONIlJRBUAABAdvETpG9m/THderTUcdIdL4NTPwAAwNHRRMeQIl2fPpp4x0shqAAAAEenf3KuvRARVAAAgCP/Gs61FyKCCgAAcBQceX1Myo1aj3HJgFoG0wIAgOw6TpLqduOqHwAAYFHBkS4LKFk49QMAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACzL3dUFAAD+nJ2Hzyr5VJqqViiliNByri4HKFAEFQAowqauTtLshF/s08OiqumF6LourAgoWJz6AYAiaufhsw4hRZJmJ/yinYfPuqgioOARVACgiEo+leZUO1AUEVQAoIiqWqGUU+1AUURQAYAiKiK0nIZFVXNoeyqqGgNqcVdhMC0AFGEvRNdVp3uCuOoHdy2CCgAUcRGh5QgouGtx6gcAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFhWgQSVc+fOFcRqAAAAHDgdVKZNm6bFixfbp/v06SN/f39VqVJFu3fvLtDiAABA8eZ0UHn//fcVEhIiSYqPj1d8fLxWr16t6Oho/f3vfy/wAgEAQPHl7uwbUlJS7EHliy++UJ8+ffTggw8qPDxcLVq0KPACAQBA8eX0EZVy5crpyJEjkqQ1a9aoQ4cOkiRjjDIyMgq2OgAAUKw5fUSlZ8+e6t+/v2rWrKnTp08rOjpakrRr1y7VqFGjwAsEAADFl9NBZebMmQoPD9eRI0c0ffp0+fr6Srp+Smj48OEFXiAAACi+bMYY4+oibteFCxfk5+en8+fPq0yZMq4uBwAA5IMzv9+3dR+Vjz/+WG3atFHlypV16NAhSdKsWbP03//+93ZWBwAAkCOng8p7772nuLg4RUdH69y5c/YBtGXLltWsWbMKuj4AAFCMOR1U3n77bc2ZM0cvvvii3Nzc7O2RkZHau3dvgRYHAACKN6cH0yYnJysiIiJbu5eXl9LS0gqkKACAE44mSqd/kvxrSMGRrq4GKFBOB5WqVatq165dCgsLc2hfvXq16tWrV2CFAQDyIX6C9M2sP6Zbj5Y6TnJVNUCBczqo/P3vf9eIESN05coVGWP03XffaeHChZoyZYrmzp1bGDUCAHJyNNExpEjXp+t248gK7hpOB5XHH39c6enpeu6553Tp0iX1799fVapU0Ztvvql+/foVRo0AgJyc/in3doIK7hJOBxVJGjJkiIYMGaJTp04pMzNTAQEBBV0XAOBW/HO5G3hu7UARdFv3UclSoUIFQgoAuEpw5PUxKTdqPYajKbirOH1EJSIiQjabLVu7zWaTt7e3atSoodjYWLVt27ZACgQA5KHjpOtjUrjqB3cpp4+odO7cWb/88otKlSqltm3b6oEHHpCvr69+/vlnNWvWTCkpKerQoQN3qQWAOyU4UmrUj5CCu5LTR1ROnTqlZ555RuPHj3dof+WVV3To0CGtXbtWEyZM0Msvv6wePXoUWKEAAKD4cfqhhH5+ftqxY4dq1HAcrPXTTz+padOmOn/+vPbv369mzZrp4sWLBVrszXgoIQAARU+hPpTQ29tbW7Zsyda+ZcsWeXt7S5IyMzPl5eXl7KoBAAAcOH3q5+mnn9awYcO0Y8cONWvWTDabTd99953mzp2rcePGSZK+/PLLHG+zDwAA4AynT/1I0oIFC/TOO+/owIEDkqTatWvr6aefVv/+/SVJly9ftl8FVJg49QMAQNHjzO/3bQUVqyCoAABQ9BTqGBUAAIA7xekxKhkZGZo5c6Y+/fRTHT58WNeuXXOYf+bMmQIrDgAAFG9OH1GZNGmSZsyYoT59+uj8+fOKi4tTz549VaJECU2cOLEQSgQAAMWV00FlwYIFmjNnjp599lm5u7vrkUce0dy5c/WPf/xD27ZtK4waAQBAMeV0UDl+/LgaNGggSfL19dX58+clSV27dtXKlSsLtjoAAFCsOR1UgoODlZKSIkmqUaOG1q5dK0navn07N3kDAAAFyumg8tBDD2n9+vWSpL/97W8aP368atasqQEDBmjQoEEFXiAAACi+/vR9VLZt26YtW7aoRo0a6t69e0HVlS/cRwUAgKLHmd9vpy9PvlnLli3VsmXLP7saAACAbG4rqPz666/65ptvlJqaqszMTId5o0aNKpDCAAAAnA4q8+bN07Bhw+Tp6Sl/f3/ZbDb7PJvNRlABAAAFxukxKiEhIRo2bJjGjh2rEiVcewd+xqgAAFD0FOoYlUuXLqlfv34uDylAUbTz8Fkln0pT1QqlFBFaztXlAIDlOZ02nnjiCS1ZsqQwagHualNXJ+mhd7co7tPdeujdLZq6OsnVJQGA5Tl96icjI0Ndu3bV5cuX1aBBA3l4eDjMnzFjRoEWmBdO/aCo2Hn4rB56d0u29uXD7+XICoBip1BP/UyePFlffvmlateuLUnZBtMCyC75VFqu7QQVAMid00FlxowZ+vDDDxUbG1sI5QB3p6oVSjnVDgC4zukxKl5eXmrdunVh1ALctSJCy2lYVDWHtqeiqnE0BQBuwekxKlOmTFFKSoreeuutwqop3xijgqKGq34AoJDHqHz33XfasGGDvvjiC91zzz3ZBtMuW7bM2VUCxUZEaDkCCgA4wemgUrZsWfXs2bMwagEAAHBwW7fQBwAAuBO4vSwAALCsfB9RiYiIyNd9Ur7//vs/VRAAAECWfAeVmJiYQiwDAAAgO6cvT7YSLk8GAKDoceb3mzEqAADAsggqAADAsggqAADAsggqAADAsggqAADAsvJ1ebIzDyAcNWrUbRcDAABwo3xdnly1alWH6ZMnT+rSpUsqW7asJOncuXMqWbKkAgIC9MsvvxRKoTnh8mQAAIqeAr88OTk52f569dVX1bhxYyUlJenMmTM6c+aMkpKS1KRJE7388ssF8gEAAACk27jhW/Xq1bV06VJFREQ4tO/YsUO9evVScnJygRaYF46oAABQ9BTqDd9SUlL0+++/Z2vPyMjQiRMnnF0dAABArpwOKu3bt9eQIUOUmJiorIMxiYmJevLJJ9WhQ4cCLxAAABRfTgeVDz/8UFWqVFHz5s3l7e0tLy8vtWjRQpUqVdLcuXMLo0YAAFBM5fvpyVkqVqyoVatW6f/+7/+0f/9+GWNUt25d1apVqzDqAwAAxZjTQSVLeHi4jDGqXr263N1vezUAAAC5cvrUz6VLl/TEE0+oZMmSuueee3T48GFJ12/0NnXq1AIvEAAAFF9OB5WxY8dq9+7d2rRpk7y9ve3tHTp00OLFiwu0OAAAULw5fc5mxYoVWrx4sVq2bCmbzWZvr1evnn7++ecCLQ4AABRvTh9ROXnypAICArK1p6WlOQQXAACAP8vpoNKsWTOtXLnSPp0VTubMmaNWrVoVXGUAAKDYc/rUz5QpU9S5c2ft27dP6enpevPNN/XDDz9o69atSkhIKIwaAQBAMeX0EZV7771X33zzjS5duqTq1atr7dq1CgwM1NatW9W0adPCqBEAABRTTj+U0Ep4KCEAAEVPoT6U0M3NTampqdnaT58+LTc3N2dXBwAAkCung0puB2CuXr0qT0/PP10QAABAlnwPpn3rrbckXb/KZ+7cufL19bXPy8jI0ObNm1WnTp2CrxAAABRb+Q4qM2fOlHT9iMrs2bMdTvN4enoqPDxcs2fPLvgKAQBAsZXvoJKcnCxJatu2rZYtW6Zy5coVWlEAAADSbdxHZePGjYVRBwAAQDZOD6bt1atXjk9Jfu2119S7d+8CKQoAAEC6jaCSkJCgLl26ZGvv3LmzNm/eXCBFAQAASLcRVH777bccL0P28PDQhQsXCqQoAAAA6TaCSv369bV48eJs7YsWLVK9evUKpCgAAADpNgbTjh8/Xg8//LB+/vlntWvXTpK0fv16LVy4UEuWLCnwAgEAQPHldFDp3r27VqxYocmTJ2vp0qXy8fFRw4YNtW7dOkVFRRVGjQAAoJjioYQAAOCOKtSHEkrSuXPnNHfuXI0bN05nzpyRJH3//ff69ddfb2d1AAAAOXL61M+ePXvUoUMH+fn56eDBgxo8eLDKly+v5cuX69ChQ/roo48Ko847bufhs0o+laaqFUopIpS78KKAHE2UTv8k+deQgiNdXQ0AWJ7TQSUuLk6xsbGaPn26SpcubW+Pjo5W//79C7Q4V5m6OkmzE36xTw+LqqYXouu6sCLcFeInSN/M+mO69Wip4yRXVQMARYLTp362b9+uJ598Mlt7lSpVdPz48QIpypV2Hj7rEFIkaXbCL9p5+KyLKsJd4WiiY0iRrk8fTXRFNQBQZDgdVLy9vXO8sduBAwdUsWLFAinKlZJPpTnVDuTL6Z+cawcASLqNoNKjRw+99NJL+v333yVJNptNhw8f1gsvvKCHH364wAu806pWKOVUO5Av/jWcawcASLqNoPL666/r5MmTCggI0OXLlxUVFaUaNWqodOnSevXVVwujxjsqIrSchkVVc2h7KqoaA2rx5wRHXh+TcqPWYxhQCwC3cNv3UdmwYYO+//57ZWZmqkmTJurQoUNB13ZLhXkfFa76QaHgqh8AcOr32+mg8tFHH6lv377y8vJyaL927ZoWLVqkAQMGOF/xbeKGbwAAFD2FGlTc3NyUkpKigIAAh/bTp08rICBAGRkZzld8mwgqAAAUPYV6Z1pjjGw2W7b2o0ePys/Pz9nVAQAA5CrfN3yLiIiQzWaTzWZT+/bt5e7+x1szMjKUnJyszp07F0qRAACgeMp3UImJiZEk7dq1S506dZKvr699nqenp8LDw++Ky5MBAIB15DuoTJgwQZIUHh6uvn37ytvbu9CKAgAAkG5jjMrAgQN15coVzZ07V2PHjuXpyQAAoND86acnDxky5K58ejIAAHA9p4+ojBkzRrGxsfrxxx8dTv9ER0dr8+bNBVocAAAo3pw+opKYmKgPPvggW/vd8vRkAABgHTw9GQAAWBZPTwYAAJbF05MBAIBlOT1GpUyZMvr6668t8fRkAABwd3P6oYRWwkMJAQAoepz5/XbqiEpmZqbmz5+vZcuW6eDBg7LZbKpatap69eqlxx57LMeHFQIAANyufI9RMcaoe/fuGjx4sH799Vc1aNBA99xzjw4dOqTY2Fg99NBDhVknAAAohvJ9RGX+/PnavHmz1q9fr7Zt2zrM27Bhg2JiYvTRRx9pwIABBV4kAAAonvJ9RGXhwoUaN25ctpAiSe3atdMLL7ygBQsWFGhxAACgeMt3UNmzZ486d+6c6/zo6Gjt3r27QIoCAACQnAgqZ86cUWBgYK7zAwMDdfbs2QIpCgAAQHIiqGRkZMjdPfchLW5ubkpPTy+QogAAACQnBtMaYxQbGysvL68c51+9erXAigIAAJCcCCoDBw685TJc8QMAAApSvoPKvHnzCrMOAACAbJx+KCEAAMCdQlABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACW5e7qAizraKJ0+ifJv4YUHOnqagAAKJYIKjmJnyB9M+uP6dajpY6TXFUNAADFFqd+bnY00TGkSNenjya6ohoAAIo1gsrNTv/kXDsAACg0BJWb+ddwrh0AABQagsrNgiOvj0m5UesxDKgFAMAFGEybk46TpLrduOoHAAAXI6jkJjiSgAIAgItx6gcAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFgWQQUAAFiWS4PK5s2b1a1bN1WuXFk2m00rVqxwZTkAAMBiXBpU0tLS1KhRI73zzjuuLAMAAFiUuys3Hh0drejoaFeWAAAALMylQcVZV69e1dWrV+3TFy5ccGE1AACgsBWpwbRTpkyRn5+f/RUSEuLqkgAAQCEqUkFl7NixOn/+vP115MgRV5cEAAAKUZE69ePl5SUvLy9XlwEAAO6QInVEBQAAFC8uPaLy22+/6aeffrJPJycna9euXSpfvrxCQ0NdWBkAALAClwaVxMREtW3b1j4dFxcnSRo4cKDmz5/voqoAAIBVuDSoPPDAAzLGuLIEAABgYYxRAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAluXyoPLuu++qatWq8vb2VtOmTfXVV1+5uiQAAGARLg0qixcv1ujRo/Xiiy9q586duu+++xQdHa3Dhw+7siwAAGARNmOMcdXGW7RooSZNmui9996zt9WtW1cxMTGaMmXKLd9/4cIF+fn56fz58ypTpkxhlgoAAAqIM7/fLjuicu3aNe3YsUMPPvigQ/uDDz6oLVu2uKgqAABgJe6u2vCpU6eUkZGhwMBAh/bAwEAdP348x/dcvXpVV69etU+fP39e0vVkBgAAioas3+38nNRxWVDJYrPZHKaNMdnaskyZMkWTJk3K1h4SElIotQEAgMJz8eJF+fn55bmMy4JKhQoV5Obmlu3oSWpqarajLFnGjh2ruLg4+3RmZqbOnDkjf3//XMPN7bpw4YJCQkJ05MgRxr/cAn2Vf/RV/tFX+Udf5R995ZzC6i9jjC5evKjKlSvfclmXBRVPT081bdpU8fHxeuihh+zt8fHx6tGjR47v8fLykpeXl0Nb2bJlC7NMlSlThp05n+ir/KOv8o++yj/6Kv/oK+cURn/d6khKFpee+omLi9Njjz2myMhItWrVSh988IEOHz6sYcOGubIsAABgES4NKn379tXp06f10ksvKSUlRfXr19eqVasUFhbmyrIAAIBFuHww7fDhwzV8+HBXl5GNl5eXJkyYkO1UE7Kjr/KPvso/+ir/6Kv8o6+cY4X+cukN3wAAAPLi8mf9AAAA5IagAgAALIugAgAALIugAgAALKtYB5XNmzerW7duqly5smw2m1asWOEw3xijiRMnqnLlyvLx8dEDDzygH374wTXFWsCt+is2NlY2m83h1bJlS9cU60JTpkxRs2bNVLp0aQUEBCgmJkYHDhxwWIZ967r89BX71XXvvfeeGjZsaL/xVqtWrbR69Wr7fPYpR7fqL/arnE2ZMkU2m02jR4+2t7l63yrWQSUtLU2NGjXSO++8k+P86dOna8aMGXrnnXe0fft2BQUFqWPHjrp48eIdrtQabtVfktS5c2elpKTYX6tWrbqDFVpDQkKCRowYoW3btik+Pl7p6el68MEHlZaWZl+Gfeu6/PSVxH4lScHBwZo6daoSExOVmJiodu3aqUePHvYfDPYpR7fqL4n96mbbt2/XBx98oIYNGzq0u3zfMjDGGCPJLF++3D6dmZlpgoKCzNSpU+1tV65cMX5+fmb27NkuqNBabu4vY4wZOHCg6dGjh0vqsbLU1FQjySQkJBhj2LfycnNfGcN+lZdy5cqZuXPnsk/lU1Z/GcN+dbOLFy+amjVrmvj4eBMVFWX+9re/GWOs8f+rYn1EJS/Jyck6fvy4HnzwQXubl5eXoqKitGXLFhdWZm2bNm1SQECAatWqpSFDhig1NdXVJbnc+fPnJUnly5eXxL6Vl5v7Kgv7laOMjAwtWrRIaWlpatWqFfvULdzcX1nYr/4wYsQIdenSRR06dHBot8K+5fI701pV1lOdb36Sc2BgoA4dOuSKkiwvOjpavXv3VlhYmJKTkzV+/Hi1a9dOO3bsKLZ3gTTGKC4uTm3atFH9+vUlsW/lJqe+ktivbrR37161atVKV65cka+vr5YvX6569erZfzDYpxzl1l8S+9WNFi1apO+//17bt2/PNs8K/78iqNyCzWZzmDbGZGvDdX379rX/d/369RUZGamwsDCtXLlSPXv2dGFlrjNy5Ejt2bNHX3/9dbZ57FuOcusr9qs/1K5dW7t27dK5c+f0n//8RwMHDlRCQoJ9PvuUo9z6q169euxX/9+RI0f0t7/9TWvXrpW3t3euy7ly3+LUTy6CgoIk/ZEms6SmpmZLlshZpUqVFBYWph9//NHVpbjE008/rc8++0wbN25UcHCwvZ19K7vc+ionxXm/8vT0VI0aNRQZGakpU6aoUaNGevPNN9mncpFbf+WkuO5XO3bsUGpqqpo2bSp3d3e5u7srISFBb731ltzd3e37jyv3LYJKLqpWraqgoCDFx8fb265du6aEhATde++9Lqys6Dh9+rSOHDmiSpUqubqUO8oYo5EjR2rZsmXasGGDqlat6jCffesPt+qrnBTX/SonxhhdvXqVfSqfsvorJ8V1v2rfvr327t2rXbt22V+RkZF69NFHtWvXLlWrVs31+9YdGbJrURcvXjQ7d+40O3fuNJLMjBkzzM6dO82hQ4eMMcZMnTrV+Pn5mWXLlpm9e/eaRx55xFSqVMlcuHDBxZW7Rl79dfHiRfPMM8+YLVu2mOTkZLNx40bTqlUrU6VKlWLXX0899ZTx8/MzmzZtMikpKfbXpUuX7Muwb113q75iv/rD2LFjzebNm01ycrLZs2ePGTdunClRooRZu3atMYZ96mZ59Rf7Vd5uvOrHGNfvW8U6qGzcuNFIyvYaOHCgMeb6ZVkTJkwwQUFBxsvLy9x///1m7969ri3ahfLqr0uXLpkHH3zQVKxY0Xh4eJjQ0FAzcOBAc/jwYVeXfcfl1EeSzLx58+zLsG9dd6u+Yr/6w6BBg0xYWJjx9PQ0FStWNO3bt7eHFGPYp26WV3+xX+Xt5qDi6n3LZowxd+bYDQAAgHMYowIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAIAACyLoAKgyAgPD9esWbNcXQaAO4igAhQhsbGxiomJcXUZTrmT4WLixIlq3LjxHdkWgDuDoAIAACyLoAIUYQ888IBGjRql5557TuXLl1dQUJAmTpzosMzEiRMVGhoqLy8vVa5cWaNGjbLPCw8P18svv6z+/fvL19dXlStX1ttvv+3w/vPnz2vo0KEKCAhQmTJl1K5dO+3evdthmc8++0yRkZHy9vZWhQoV1LNnT3t9hw4d0pgxY2Sz2WSz2ezv2bJli+6//375+PgoJCREo0aNUlpamn1+amqqunXrJh8fH1WtWlULFixwun+yjkBNnjxZgYGBKlu2rCZNmqT09HT9/e9/V/ny5RUcHKwPP/zQ4X3PP/+8atWqpZIlS6patWoaP368fv/9d4dlXnnlFQUEBKh06dIaPHiwXnjhhWxHc+bNm6e6devK29tbderU0bvvvmufd+3aNY0cOVKVKlWSt7e3wsPDNWXKFKc/I3C3I6gARdy//vUvlSpVSt9++62mT5+ul156yf5I9qVLl2rmzJl6//339eOPP2rFihVq0KCBw/tfe+01NWzYUN9//73Gjh2rMWPG2N9vjFGXLl10/PhxrVq1Sjt27FCTJk3Uvn17nTlzRpK0cuVK9ezZU126dNHOnTu1fv16RUZGSpKWLVum4OBgvfTSS0pJSVFKSookae/everUqZN69uypPXv2aPHixfr66681cuRIe12xsbE6ePCgNmzYoKVLl+rdd99Vamqq0/2zYcMGHTt2TJs3b9aMGTM0ceJEde3aVeXKldO3336rYcOGadiwYTpy5Ij9PaVLl9b8+fO1b98+vfnmm5ozZ45mzpxpn79gwQK9+uqrmjZtmnbs2KHQ0FC99957DtudM2eOXnzxRb366qtKSkrS5MmTNX78eP3rX/+SJL311lv67LPP9Omnn+rAgQP65JNPFB4e7vTnA+56d+zxhwD+tIEDB5oePXrYp6OiokybNm0clmnWrJl5/vnnjTHGvPHGG6ZWrVrm2rVrOa4vLCzMdO7c2aGtb9++Jjo62hhjzPr1602ZMmXMlStXHJapXr26ef/9940xxrRq1co8+uijudYcFhZmZs6c6dD22GOPmaFDhzq0ffXVV6ZEiRLm8uXL5sCBA0aS2bZtm31+UlKSkZRtXTeaMGGCadSokX164MCBJiwszGRkZNjbateube677z77dHp6uilVqpRZuHBhruudPn26adq0qX26RYsWZsSIEQ7LtG7d2mHbISEh5t///rfDMi+//LJp1aqVMcaYp59+2rRr185kZmbmul0AxnBEBSjiGjZs6DBdqVIl+5GH3r176/Lly6pWrZqGDBmi5cuXKz093WH5Vq1aZZtOSkqSJO3YsUO//fab/P395evra38lJyfr559/liTt2rVL7du3d6rmHTt2aP78+Q7r7NSpkzIzM5WcnKykpCS5u7vbj8xIUp06dVS2bFmntiNJ99xzj0qU+ON/dYGBgQ5Hldzc3OTv7+9wtGbp0qVq06aNgoKC5Ovrq/Hjx+vw4cP2+QcOHFDz5s0dtnPj9MmTJ3XkyBE98cQTDp/xlVdesfdbbGysdu3apdq1a2vUqFFau3at058NKA7cXV0AgD/Hw8PDYdpmsykzM1OSFBISogMHDig+Pl7r1q3T8OHD9dprrykhISHb+25ehyRlZmaqUqVK2rRpU7ZlskKDj4+P0zVnZmbqySefdBgvkyU0NFQHDhxwqOPPyKl/8uqzbdu2qV+/fpo0aZI6deokPz8/LVq0SG+88Ua299zIGGP/76x1zZkzRy1atHBYzs3NTZLUpEkTJScna/Xq1Vq3bp369OmjDh06aOnSpX/i0wJ3H4IKcJfz8fFR9+7d1b17d40YMUJ16tTR3r171aRJE0nXf5hvtG3bNtWpU0fS9R/T48ePy93dPdfxEw0bNtT69ev1+OOP5zjf09NTGRkZDm1NmjTRDz/8oBo1auT4nrp16yo9PV2JiYn2IxUHDhzQuXPn8vuxb9s333yjsLAwvfjii/a2Q4cOOSxTu3Ztfffdd3rsscfsbYmJifb/DgwMVJUqVfTLL7/o0UcfzXVbZcqUUd++fdW3b1/16tVLnTt31pkzZ1S+fPkC/ERA0UZQAe5i8+fPV0ZGhlq0aKGSJUvq448/lo+Pj8LCwuzLfPPNN5o+fbpiYmIUHx+vJUuWaOXKlZKkDh06qFWrVoqJidG0adNUu3ZtHTt2TKtWrVJMTIwiIyM1YcIEtW/fXtWrV1e/fv2Unp6u1atX67nnnpN0/cqizZs3q1+/fvLy8lKFChX0/PPPq2XLlhoxYoSGDBmiUqVKKSkpSfHx8Xr77bdVu3Ztde7cWUOGDNEHH3wgd3d3jR49+raO3jirRo0aOnz4sBYtWqRmzZpp5cqVWr58ucMyTz/9tIYMGaLIyEjde++9Wrx4sfbs2aNq1arZl5k4caJGjRqlMmXKKDo6WlevXlViYqLOnj2ruLg4zZw5U5UqVVLjxo1VokQJLVmyREFBQbd1egu4mzFGBbiLlS1bVnPmzFHr1q3tRz4+//xz+fv725d55plntGPHDkVEROjll1/WG2+8oU6dOkm6fnpj1apVuv/++zVo0CDVqlVL/fr108GDBxUYGCjp+iXIS5Ys0WeffabGjRurXbt2+vbbb+3rf+mll3Tw4EFVr15dFStWlHT9KExCQoJ+/PFH3XfffYqIiND48eNVqVIl+/vmzZunkJAQRUVFqWfPnvZLpAtbjx49NGbMGI0cOVKNGzfWli1bNH78eIdlHn30UY0dO1bPPvus/RRObGysvL297csMHjxYc+fO1fz589WgQQNFRUVp/vz5qlq1qiTJ19dX06ZNU2RkpJo1a6aDBw9q1apVDuNpAEg2c+OJVQDFSnh4uEaPHq3Ro0e7upQir2PHjgoKCtLHH3/s6lKAuwqnfgDASZcuXdLs2bPVqVMnubm5aeHChVq3bp39/jMACg5BBQCclHVK7JVXXtHVq1dVu3Zt/ec//1GHDh1cXRpw1+HUDwAAsCxGbQEAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMv6f7ewETv1m7IbAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA0MElEQVR4nO3dd3hU1b7G8XcCaYQwBJBACCUUAxwIoBGkKKACIlVREVEBxUNV6lHwqiAWIqjneFREPJSrXopXwIoFFEQBlaZEEJSaQCgCIcGEUJJ1//BmZNLIhBmyQr6f55nnYdasvea3955kXvZee8dhjDECAACwkF9xFwAAAJAfggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCuAlW7Zs0QMPPKB69eopODhYwcHBatCggYYMGaINGzZcsjomT54sh8Ph1lanTh0NHDjQp++7du1aTZ48WSdOnPDp+xTWwIED5XA4XI/AwEBFR0dr0qRJysjI8Hg8h8OhyZMne7/QS6xDhw7q0KFDcZcBFFrZ4i4AuBy88cYbGjlypKKjozVq1Cj97W9/k8Ph0C+//KIFCxbommuu0c6dO1WvXr1iqW/p0qWqUKGCT99j7dq1euqppzRw4EBVrFjRp+9VWMHBwfrqq68kScnJyVqwYIGmTJmi7du3a9GiRR6NtW7dOkVGRvqiTAAFIKgAF2nNmjUaPny4unXrpvfee08BAQGu12644QaNGDFC//u//6vg4OACx0lPT1e5cuV8UmOLFi18Mq7t/Pz8dO2117qed+3aVXv37tW7776rl156STVq1Cj0WOePA+DS4dQPcJGee+45lSlTRm+88YZbSDnfHXfcoYiICNfzgQMHqnz58oqPj1fnzp0VGhqqG2+8UZK0fPly9erVS5GRkQoKClL9+vU1ZMgQHT16NNe4n3zyiZo3b67AwEBFRUXphRdeyPP98zr1k5qaqvHjxysqKkoBAQGqUaOGRo8erbS0NLd+DodDI0eO1Ntvv61GjRqpXLlyatasmT7++GNXn8mTJ+sf//iHJCkqKsp1umXVqlV51vOvf/1LDodDO3fuzPXao48+qoCAANf6bt68Wd27d1fVqlUVGBioiIgIdevWTfv3789z7AvJDhz79u2TJCUkJOiee+5xjd+oUSO9+OKLysrKyrUdzj/1k56e7tp+QUFBqlSpkmJjY7VgwQK35T788EO1bt1a5cqVU2hoqDp16qR169a59ck+Xbd161b169dPTqdT4eHhuv/++5WSkuLW1xijGTNmqHnz5goODlZYWJhuv/127d69O1e/adOmqXbt2goKCtJVV12lTz/9tEjbDChOHFEBLkJmZqZWrlyp2NhYVa9e3aNlz5w5o549e2rIkCGaMGGCzp07J0natWuXWrdurcGDB8vpdGrv3r166aWX1K5dO8XHx8vf31+S9OWXX6pXr15q3bq1Fi5cqMzMTE2bNk2HDx++4Hunp6erffv22r9/vx577DHFxMRo69atevLJJxUfH68VK1a4zXP55JNPtH79ek2ZMkXly5fXtGnTdOutt2rHjh2qW7euBg8erOPHj+uVV17RkiVLXNuicePGeb7/Pffco0cffVTz5s3TM88847Y933nnHfXo0UNVqlRRWlqaOnXqpKioKL322msKDw/XoUOHtHLlSp08edKj7Z0tOxxdccUV+v3339WmTRudOXNGTz/9tOrUqaOPP/5Y48eP165duzRjxox8xxk7dqzefvttPfPMM2rRooXS0tL0888/69ixY64+8+fPV//+/dW5c2ctWLBAp0+f1rRp09ShQwd9+eWXateunduYffr0Ud++ffXAAw8oPj5eEydOlCTNmTPH1WfIkCGaN2+eHn74YT3//PM6fvy4pkyZojZt2uinn35SeHi4JOmpp57SU089pQceeEC33367EhMT9eCDDyozM1PR0dFF2nZAsTAAiuzQoUNGkrnrrrtyvXbu3Dlz9uxZ1yMrK8v12oABA4wkM2fOnALHz8rKMmfPnjX79u0zkswHH3zgeq1Vq1YmIiLCnDp1ytWWmppqKlWqZHL+aNeuXdsMGDDA9Xzq1KnGz8/PrF+/3q3fe++9ZySZZcuWudokmfDwcJOamuq23n5+fmbq1KmutunTpxtJZs+ePQWuU7bbbrvNREZGmszMTFfbsmXLjCTz0UcfGWOM2bBhg5Fk3n///UKNeb4BAwaYkJAQ1/b//fffzcsvv2wcDoe55pprjDHGTJgwwUgy33//vduyw4YNMw6Hw+zYscNtO0yaNMn1vEmTJqZ37975vn9mZqaJiIgwTZs2dVvHkydPmqpVq5o2bdq42iZNmmQkmWnTprmNMXz4cBMUFOT67Kxbt85IMi+++KJbv8TERBMcHGweeeQRY4wxycnJJigoyNx6661u/dasWWMkmfbt2+dbN2AbTv0APnL11VfL39/f9XjxxRdz9enTp0+utiNHjmjo0KGqWbOmypYtK39/f9WuXVuS9Msvv0iS0tLStH79et12220KCgpyLRsaGqoePXpcsLaPP/5YTZo0UfPmzXXu3DnXo0uXLnmesunYsaNCQ0Ndz8PDw1W1alXX6ZOiGDRokPbv368VK1a42ubOnatq1aqpa9eukqT69esrLCxMjz76qGbOnKlt27Z59B5paWmu7X/FFVdo9OjR6tq1q5YuXSpJ+uqrr9S4cWO1bNnSbbmBAwfKGOOaiJuXli1b6tNPP9WECRO0atUqnTp1yu31HTt2KCkpSffee6/8/P76VVu+fHn16dNH3333ndLT092W6dmzp9vzmJgYZWRk6MiRI5L+3G8Oh0P33HOP236rVq2amjVr5tpv69atU0ZGhvr37+82Xps2bVyfJaCk4NQPcBGqVKmi4ODgPL+w58+fr/T0dB08eDDXF5AklStXLteVOFlZWercubOSkpL0xBNPqGnTpgoJCVFWVpauvfZa15dhcnKysrKyVK1atVzj5tWW0+HDh7Vz507XaaSccs6HqVy5cq4+gYGBub6cPdG1a1dVr15dc+fOVefOnZWcnKwPP/xQo0aNUpkyZSRJTqdTX3/9tZ599lk99thjSk5OVvXq1fXggw/q8ccfz7f+bMHBwVq9erWr3tq1a7tt82PHjqlOnTq5lsueT3T+aZyc/v3vfysyMlKLFi3S888/r6CgIHXp0kXTp09XgwYNXMvmdUowIiJCWVlZSk5OdptAnXM7BwYGSpJrOx8+fFjGGNfpnZzq1q3rVndRPx+ATQgqwEUoU6aMbrjhBn3xxRc6ePCg25dS9vyMvXv35rlsznudSNLPP/+sn376SfPmzdOAAQNc7TknnYaFhcnhcOjQoUO5xsirLafsgHX+3Iecr/tamTJldO+99+rf//63Tpw4ofnz5+v06dMaNGiQW7+mTZtq4cKFMsZoy5YtmjdvnqZMmaLg4GBNmDChwPfw8/NTbGxsvq9XrlxZBw8ezNWelJQkqeDtEBIS4poHcvjwYdfRlR49emj79u2u0JHf+H5+fgoLCyuw/pyqVKkih8Ohb775xhVizpfdlv3e+X0+8gpngK049QNcpIkTJyozM1NDhw7V2bNnL2qs7PCS80vojTfecHseEhKili1basmSJW43Lzt58qQ++uijC75P9+7dtWvXLlWuXFmxsbG5HkX5Isv5v//CGDRokDIyMrRgwQLNmzdPrVu3VsOGDfPs63A41KxZM/3zn/9UxYoVtWnTJo9rzOnGG2/Utm3bco311ltvyeFwqGPHjoUaJzw8XAMHDlS/fv20Y8cOpaenKzo6WjVq1ND8+fNljHH1TUtL0+LFi11XAnmie/fuMsbowIEDee63pk2bSvrzyqagoCD9z//8j9vya9euvajTdUBx4IgKcJHatm2r1157TQ899JCuuuoq/f3vf9ff/vY3+fn56eDBg1q8eLEkFeqGaw0bNlS9evU0YcIEGWNUqVIlffTRR1q+fHmuvk8//bRuvvlmderUSePGjVNmZqaef/55hYSE6Pjx4wW+z+jRo7V48WJdf/31GjNmjGJiYpSVlaWEhAR98cUXGjdunFq1auXRdsj+knz55Zc1YMAA+fv7Kzo62m1uS17r27p1a02dOlWJiYmaNWuW2+sff/yxZsyYod69e6tu3boyxmjJkiU6ceKEOnXq5FF9eRkzZozeeustdevWTVOmTFHt2rX1ySefaMaMGRo2bJiuvPLKfJdt1aqVunfvrpiYGIWFhemXX37R22+/7RZApk2bpv79+6t79+4aMmSITp8+renTp+vEiROKi4vzuN62bdvq73//uwYNGqQNGzbo+uuvV0hIiA4ePKhvv/1WTZs21bBhwxQWFqbx48frmWee0eDBg3XHHXcoMTFRkydP5tQPSp5inMgLXFZ+/PFHM2jQIBMVFWUCAwNNUFCQqV+/vrnvvvvMl19+6dY3+4qUvGzbts106tTJhIaGmrCwMHPHHXeYhISEXFedGGPMhx9+aGJiYkxAQICpVauWiYuLc11Bcr6cV/0YY8wff/xhHn/8cRMdHW0CAgKM0+k0TZs2NWPGjDGHDh1y9ZNkRowYkavOvMacOHGiiYiIMH5+fkaSWblyZcEbzRgza9YsI8kEBweblJQUt9e2b99u+vXrZ+rVq2eCg4ON0+k0LVu2NPPmzbvguAVt4/Pt27fP3H333aZy5crG39/fREdHm+nTp7tdqWNM7qt+JkyYYGJjY01YWJgJDAw0devWNWPGjDFHjx51W+799983rVq1MkFBQSYkJMTceOONZs2aNW59svfZ77//7tY+d+7cPK+kmjNnjmnVqpUJCQkxwcHBpl69eua+++4zGzZscPXJysoyU6dONTVr1jQBAQEmJibGfPTRR6Z9+/Zc9YMSxWHMecckAQAALMIcFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAa5XoG75lZWUpKSlJoaGhed6OHAAA2McYo5MnTyoiIsLtj3bmpUQHlaSkJNWsWbO4ywAAAEWQmJioyMjIAvuU6KCSfWvuxMTEQt2eHAAAFL/U1FTVrFmzwD+xka1EB5Xs0z0VKlQgqAAAUMIUZtoGk2kBAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFrFGlRWr16tHj16KCIiQg6HQ++//35xlgMAACxTrH/rJy0tTc2aNdOgQYPUp0+f4iyl1NickKw9R9MUVSVELWqFFXc5PpVzXS/1uuf1fvnVYMt+uZg6PFnWhvUtafXiL97aH7aNYyMb1q1Yg0rXrl3VtWvX4iyhVIn79BfN/Hq36/nQ9nU1oWujYqzId3Kua/OaTv2YmOJ67ut1z2tbS8pz+9uyXy6mDk+WtWF9S1q9+Iu39odt49jIlnUrUXNUTp8+rdTUVLcHCmdzQrLbB07680tzc0JyMVXkO3mt6/khRfLtuue3rfNqW7Q+wYr9cjGfD0+WteFzWNLqxV+8tT9sG8dGNq1biQoqU6dOldPpdD1q1qxZ3CWVGHuOpnnUXpIVdp18te6ejPtT4omLHsMbLubz4cmyNnwOS1q9+Iu39odt49jIpnUrUUFl4sSJSklJcT0SExOLu6QSI6pKiEftJVlh18lX6+7JuM1qVrzoMbzhYj4fnixrw+ewpNWLv3hrf9g2jo1sWrcSFVQCAwNVoUIFtwcKp0WtMNc8iWzD2te97CZ+SXmva/OaTrfnvlz3/LZ1Xm19r6llxX65mM+HJ8va8DksafXiL97aH7aNYyOb1s1hjDGX/F3z4HA4tHTpUvXu3bvQy6SmpsrpdColJYXQUkg2zOC+VLjqx3Nc9XPxfeF7tl2tczl/Pny1bp58fxdrUPnjjz+0c+dOSVKLFi300ksvqWPHjqpUqZJq1ap1weUJKgAAlDyefH8X6+XJGzZsUMeOHV3Px44dK0kaMGCA5s2bV0xVAQAAWxRrUOnQoYMsOfMEAAAsVKIm0wIAgNKFoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1ipyUNm5c6c+//xznTp1SpJkjPFaUQAAAFIRgsqxY8d000036corr9Qtt9yigwcPSpIGDx6scePGeb1AAABQenkcVMaMGaOyZcsqISFB5cqVc7X37dtXn332mVeLAwAApVtZTxf44osv9PnnnysyMtKtvUGDBtq3b5/XCgMAAPD4iEpaWprbkZRsR48eVWBgoFeKAgAAkIoQVK6//nq99dZbrucOh0NZWVmaPn26Onbs6NXiAABA6ebxqZ/p06erQ4cO2rBhg86cOaNHHnlEW7du1fHjx7VmzRpf1AgAAEopj4+oNG7cWFu2bFHLli3VqVMnpaWl6bbbbtPmzZtVr149X9QIAABKKYcpwTdASU1NldPpVEpKiipUqFDc5QAAgELw5Pvb41M/W7ZsybPd4XAoKChItWrVYlItAADwCo+DSvPmzeVwOCT9dTfa7OeS5O/vr759++qNN95QUFCQl8oEAAClkcdzVJYuXaoGDRpo1qxZ+umnn/Tjjz9q1qxZio6O1vz58zV79mx99dVXevzxx31RLwAAKEU8PqLy7LPP6uWXX1aXLl1cbTExMYqMjNQTTzyhH374QSEhIRo3bpxeeOEFrxYLAABKF4+PqMTHx6t27dq52mvXrq34+HhJf54eyv4bQAAAAEXlcVBp2LCh4uLidObMGVfb2bNnFRcXp4YNG0qSDhw4oPDwcO9VCQAASiWPT/289tpr6tmzpyIjIxUTEyOHw6EtW7YoMzNTH3/8sSRp9+7dGj58uNeLBQAApUuR7qPyxx9/6J133tGvv/4qY4waNmyou+++W6Ghob6oMV/cRwUAgJLHp/dRkaTy5ctr6NChRSoOAACgsIoUVCRp27ZtSkhIcJurIkk9e/a86KIAAACkIgSV3bt369Zbb1V8fLwcDkeum75lZmZ6t0IAAFBqeXzVz6hRoxQVFaXDhw+rXLly2rp1q1avXq3Y2FitWrXKByUCAIDSyuMjKuvWrdNXX32lK664Qn5+fvLz81O7du00depUPfzww9q8ebMv6gQAAKWQx0dUMjMzVb58eUlSlSpVlJSUJOnPG77t2LHDu9UBAIBSzeMjKk2aNNGWLVtUt25dtWrVStOmTVNAQIBmzZqlunXr+qJGAABQSnkcVB5//HGlpaVJkp555hl1795d1113nSpXrqxFixZ5vUAAAFB6FemGbzkdP35cYWFhrit/LhVu+AYAQMnj8xu+5VSpUiVvDAMAAODG46CSkZGhV155RStXrtSRI0eUlZXl9vqmTZu8VhwAACjdPA4q999/v5YvX67bb79dLVu2vOSnewAAQOnhcVD55JNPtGzZMrVt29YX9QAAALh4fB+VGjVqXPK/kgwAAEonj4PKiy++qEcffVT79u3zRT0AAAAuHp/6iY2NVUZGhurWraty5crJ39/f7fXjx497rTgAAFC6eRxU+vXrpwMHDui5555TeHg4k2kBAIDPeBxU1q5dq3Xr1qlZs2a+qAcAAMDF4zkqDRs21KlTp3xRCwAAgBuPg0pcXJzGjRunVatW6dixY0pNTXV7AAAAeIvHf+vHz+/PbJNzbooxRg6HQ5mZmd6r7gL4Wz8AAJQ8Pv1bPytXrixyYQAAAJ7wOKi0b9/eF3UAAADkUuigsmXLlkL1i4mJKXIxAAAA5yt0UGnevLkcDocKmtJyqeeoAACAy1uhg8qePXt8WQcAAEAuhQ4qtWvX9mUdAAAAuXh8HxUAAIBLhaACAACsRVABAADWIqgAAABrEVQAAIC1CnXVT4sWLXL9bZ/8bNq06aIKAgAAyFaooNK7d2/XvzMyMjRjxgw1btxYrVu3liR999132rp1q4YPH+6TIgEAQOlUqKAyadIk178HDx6shx9+WE8//XSuPomJid6tDgAAlGoOU9A98fPgdDq1YcMGNWjQwK39t99+U2xsrFJSUrxaYEE8+TPRAADADp58f3s8mTY4OFjffvttrvZvv/1WQUFBng4HAACQr0LfQj/b6NGjNWzYMG3cuFHXXnutpD/nqMyZM0dPPvmk1wsEAACll8dBZcKECapbt65efvllzZ8/X5LUqFEjzZs3T3feeafXCwQAAKWXx3NUbMIcFQAASh6fzlGRpBMnTug///mPHnvsMR0/flzSn/dPOXDgQFGGAwAAyJPHp362bNmim266SU6nU3v37tXgwYNVqVIlLV26VPv27dNbb73lizoBAEAp5PERlbFjx2rgwIH67bff3K7y6dq1q1avXu3V4gAAQOnmcVBZv369hgwZkqu9Ro0aOnTokFeKAgAAkIoQVIKCgpSampqrfceOHbriiiu8UhQAAIBUhKDSq1cvTZkyRWfPnpUkORwOJSQkaMKECerTp4/XCwQAAKWXx0HlhRde0O+//66qVavq1KlTat++verXr6/Q0FA9++yzvqgRAACUUh5f9VOhQgV9++23+uqrr7Rp0yZlZWXpqquu0k033eSL+gAAQCnGDd8AAMAl5cn3t8dHVCTpyy+/1JdffqkjR44oKyvL7bU5c+YUZUgAAIBcPA4qTz31lKZMmaLY2FhVr15dDofDF3UBAAB4HlRmzpypefPm6d577/VFPQAAAC4eX/Vz5swZtWnTxhe1AAAAuPE4qAwePFjz58/3RS0AAABuPD71k5GRoVmzZmnFihWKiYmRv7+/2+svvfSS14oDAAClW5H+enLz5s0lST///LPba0ysBQAA3uRxUFm5cqUv6gAAAMjF4zkq59u/f78OHDjgrVoAAADceBxUsrKyNGXKFDmdTtWuXVu1atVSxYoV9fTTT+e6+RsAAMDF8PjUz3/9139p9uzZiouLU9u2bWWM0Zo1azR58mRlZGTwhwkBAIDXePy3fiIiIjRz5kz17NnTrf2DDz7Q8OHDL+mpIP7WDwAAJY8n398en/o5fvy4GjZsmKu9YcOGOn78uKfDAQAA5MvjoNKsWTO9+uqrudpfffVVNWvWzCtFAQAASEWYozJt2jR169ZNK1asUOvWreVwOLR27VolJiZq2bJlvqgRAACUUh4fUWnfvr1+/fVX3XrrrTpx4oSOHz+u2267TTt27NB1113nixoBAEAp5fFkWpswmRYAgJLHp5NpP/vsM3377beu56+99pqaN2+uu+++W8nJyZ5XCwAAkA+Pg8o//vEPpaamSpLi4+M1duxY3XLLLdq9e7fGjh3r9QIBAEDp5fFk2j179qhx48aSpMWLF6tHjx567rnntGnTJt1yyy1eLxAAAJReHh9RCQgIUHp6uiRpxYoV6ty5sySpUqVKriMtAAAA3uDxEZV27dpp7Nixatu2rX744QctWrRIkvTrr78qMjLS6wUCAIDSy+MjKq+++qrKli2r9957T6+//rpq1KghSfr000918803e71AAABQenF5MgAAuKQ8+f4u1Kmf1NRU10AXmodCYAAAAN5SqKASFhamgwcPqmrVqqpYsaIcDkeuPsYYORwOZWZmer1IAABQOhUqqHz11VeqVKmSJGnlypU+LQgAACAbc1QAAMAl5fU5KjmdOHFCs2fP1i+//CKHw6HGjRvr/vvvl9PpLFLBAAAAefH48uQNGzaoXr16+uc//6njx4/r6NGjeumll1SvXj1t2rTJFzUCAIBSyuNTP9ddd53q16+vN998U2XL/nlA5ty5cxo8eLB2796t1atX+6TQvHDqBwCAkseT72+Pg0pwcLA2b96shg0burVv27ZNsbGxrtvrXwoEFQAASh5Pvr89PvVToUIFJSQk5GpPTExUaGiop8MBAADky+Og0rdvXz3wwANatGiREhMTtX//fi1cuFCDBw9Wv379fFEjAAAopTy+6ueFF16Qw+HQfffdp3PnzkmS/P39NWzYMMXFxXm9QAAAUHoV+T4q6enp2rVrl4wxql+/vsqVK+ft2i6IOSoAAJQ8Ppmjkp6erhEjRqhGjRqqWrWqBg8erOrVqysmJqZYQgoAALj8FTqoTJo0SfPmzVO3bt101113afny5Ro2bJgvawMAAKVcoeeoLFmyRLNnz9Zdd90lSbrnnnvUtm1bZWZmqkyZMj4rEAAAlF6FPqKSmJio6667zvW8ZcuWKlu2rJKSknxSGAAAQKGDSmZmpgICAtzaypYt67ryBwAAwNsKferHGKOBAwcqMDDQ1ZaRkaGhQ4cqJCTE1bZkyRLvVggAAEqtQgeVAQMG5Gq75557vFoMAADA+QodVObOnevLOgAAAHLx+Bb6AAAAlwpBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWKvagMmPGDEVFRSkoKEhXX321vvnmm+IuCQAAWKJscb75okWLNHr0aM2YMUNt27bVG2+8oa5du2rbtm2qVatWcZamzQnJ2nM0TVFVQtSiVlix1uIpT2vPq/+FxtickKxVO45IkjpEV3XrU9Br3qw/v/fJ2S7pouopivPXQZLXPkvZ457NzJJ/Gb88x/TGZ3dzQrIW/pCg5PQzahAeqnpXlHetS1G2pSf7NK9+BS2fc5sUtG1sU5J/z8C3ivKzcLlyGGNMcb15q1atdNVVV+n11193tTVq1Ei9e/fW1KlTL7h8amqqnE6nUlJSVKFCBa/VFffpL5r59W7X86Ht62pC10ZeG9+XPK09r/6SChwj5zLn9ynoNW/Wn9/75Kw9L77en3nV5o33zm/c88f0xme3oPoLeu/CjlfYfZrfZ6qg9S1KfcWlJP+egW8V5WehpPHk+7vYTv2cOXNGGzduVOfOnd3aO3furLVr1+a5zOnTp5Wamur28LbNCcm5fvHN/Hq3Nicke/29vM3T2vPrX9AYeS2T3WfR+oR8XyvM9its/QXVUJgvWF/uz/xqu9j3Lmjc7DG98dm9UP35vbcn4xV2n+b3mSpofT2tr7iU5N8z8K2i/Cxc7ootqBw9elSZmZkKDw93aw8PD9ehQ4fyXGbq1KlyOp2uR82aNb1e156jaR6128TT2j1Zp+y+BS3zU+IJj2srTJ+c7d7YF77anxeznhezzJ6jaV7dz54oaJmL3af5faYKWl9P6isuJfn3DHyrKD8Ll7tin0zrcDjcnhtjcrVlmzhxolJSUlyPxMREr9eTfR6+sO028bR2T9Ypu29ByzSrWdHj2grTJ2e7N/aFr/bnxaznxSwTVSXEq/vZEwUtc7H7NL/PVEHr60l9xaUk/56BbxXlZ+FyV2xBpUqVKipTpkyuoydHjhzJdZQlW2BgoCpUqOD28LYWtcJccx2yDWtft0RMWvK09vz6FzRGXstk9+l7Ta18XyvM9its/QXVkFd7Uespivxqu9j3Lmjc7DG98dm9UP35vbcn4xV2n+b3mSpofT2tr7iU5N8z8K2i/Cxc7op9Mu3VV1+tGTNmuNoaN26sXr16FetkWqlkz6zmqh+u+uGqH676Qcl2uV/148n3d7EGlUWLFunee+/VzJkz1bp1a82aNUtvvvmmtm7dqtq1a19weV8GFQAA4BuefH8X631U+vbtq2PHjmnKlCk6ePCgmjRpomXLlhUqpAAAgMtfsR5RuVgcUQEAoOQpEfdRAQAAuBCCCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgrWK9hf7Fyr6pbmpqajFXAgAACiv7e7swN8cv0UHl5MmTkqSaNWsWcyUAAMBTJ0+elNPpLLBPif5bP1lZWUpKSlJoaKgcDkdxl4P/l5qaqpo1ayoxMZG/wWQh9o+92Dd2Y/94jzFGJ0+eVEREhPz8Cp6FUqKPqPj5+SkyMrK4y0A+KlSowA+zxdg/9mLf2I394x0XOpKSjcm0AADAWgQVAABgLYIKvC4wMFCTJk1SYGBgcZeCPLB/7MW+sRv7p3iU6Mm0AADg8sYRFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQQaHs3btXDzzwgKKiohQcHKx69epp0qRJOnPmjFu/hIQE9ejRQyEhIapSpYoefvjhXH3i4+PVvn17BQcHq0aNGpoyZUquv/fw9ddf6+qrr1ZQUJDq1q2rmTNn+nwdS4MZM2YoKipKQUFBuvrqq/XNN98Ud0mXlalTp+qaa65RaGioqlatqt69e2vHjh1ufYwxmjx5siIiIhQcHKwOHTpo69atbn1Onz6thx56SFWqVFFISIh69uyp/fv3u/VJTk7WvffeK6fTKafTqXvvvVcnTpzw9SpeVqZOnSqHw6HRo0e72tg/FjJAIXz66adm4MCB5vPPPze7du0yH3zwgalataoZN26cq8+5c+dMkyZNTMeOHc2mTZvM8uXLTUREhBk5cqSrT0pKigkPDzd33XWXiY+PN4sXLzahoaHmhRdecPXZvXu3KVeunBk1apTZtm2befPNN42/v7957733Luk6X24WLlxo/P39zZtvvmm2bdtmRo0aZUJCQsy+ffuKu7TLRpcuXczcuXPNzz//bH788UfTrVs3U6tWLfPHH3+4+sTFxZnQ0FCzePFiEx8fb/r27WuqV69uUlNTXX2GDh1qatSoYZYvX242bdpkOnbsaJo1a2bOnTvn6nPzzTebJk2amLVr15q1a9eaJk2amO7du1/S9S3JfvjhB1OnTh0TExNjRo0a5Wpn/9iHoIIimzZtmomKinI9X7ZsmfHz8zMHDhxwtS1YsMAEBgaalJQUY4wxM2bMME6n02RkZLj6TJ061URERJisrCxjjDGPPPKIadiwodt7DRkyxFx77bW+XJ3LXsuWLc3QoUPd2ho2bGgmTJhQTBVd/o4cOWIkma+//toYY0xWVpapVq2aiYuLc/XJyMgwTqfTzJw50xhjzIkTJ4y/v79ZuHChq8+BAweMn5+f+eyzz4wxxmzbts1IMt99952rz7p164wks3379kuxaiXayZMnTYMGDczy5ctN+/btXUGF/WMnTv2gyFJSUlSpUiXX83Xr1qlJkyaKiIhwtXXp0kWnT5/Wxo0bXX3at2/vdsOkLl26KCkpSXv37nX16dy5s9t7denSRRs2bNDZs2d9uEaXrzNnzmjjxo25tmvnzp21du3aYqrq8peSkiJJrp+TPXv26NChQ277ITAwUO3bt3fth40bN+rs2bNufSIiItSkSRNXn3Xr1snpdKpVq1auPtdee62cTif7sxBGjBihbt266aabbnJrZ//YiaCCItm1a5deeeUVDR061NV26NAhhYeHu/ULCwtTQECADh06lG+f7OcX6nPu3DkdPXrU6+tSGhw9elSZmZl5btfs7Q7vMsZo7NixateunZo0aSLpr894Qfvh0KFDCggIUFhYWIF9qlatmus9q1atyv68gIULF2rTpk2aOnVqrtfYP3YiqJRykydPlsPhKPCxYcMGt2WSkpJ0880364477tDgwYPdXnM4HLnewxjj1p6zj/n/ibSe9oHn8tqubFPfGDlypLZs2aIFCxbkeq0o++FCP0eFHac0S0xM1KhRo/TOO+8oKCgo337sH7uULe4CULxGjhypu+66q8A+derUcf07KSlJHTt2VOvWrTVr1iy3ftWqVdP333/v1pacnKyzZ8+6/odSrVq1XP+jOHLkiCRdsE/ZsmVVuXLlwq8cXKpUqaIyZcrkuV1z/u8RF++hhx7Shx9+qNWrVysyMtLVXq1aNUl//o+7evXqrvbz90O1atV05swZJScnu/2v/ciRI2rTpo2rz+HDh3O97++//87+LMDGjRt15MgRXX311a62zMxMrV69Wq+++qrrCi32j2WKaW4MSqD9+/ebBg0amLvuusttdnu27Mm0SUlJrraFCxfmmkxbsWJFc/r0aVefuLi4XJNpGzVq5Db20KFDmUx7kVq2bGmGDRvm1taoUSMm03pRVlaWGTFihImIiDC//vprnq9Xq1bNPP/8866206dP5zlZc9GiRa4+SUlJeU7W/P777119vvvuOyZrXkBqaqqJj493e8TGxpp77rnHxMfHs38sRVBBoRw4cMDUr1/f3HDDDWb//v3m4MGDrke27MuTb7zxRrNp0yazYsUKExkZ6XZ58okTJ0x4eLjp16+fiY+PN0uWLDEVKlTI8/LkMWPGmG3btpnZs2dzebIXZF+ePHv2bLNt2zYzevRoExISYvbu3VvcpV02hg0bZpxOp1m1apXbz0h6erqrT1xcnHE6nWbJkiUmPj7e9OvXL8/LXyMjI82KFSvMpk2bzA033JDn5a8xMTFm3bp1Zt26daZp06Zc/loE51/1Ywz7x0YEFRTK3LlzjaQ8H+fbt2+f6datmwkODjaVKlUyI0eOdLsU2RhjtmzZYq677joTGBhoqlWrZiZPnuw6mpJt1apVpkWLFiYgIMDUqVPHvP766z5fx9LgtddeM7Vr1zYBAQHmqquucl02C+/I72dk7ty5rj5ZWVlm0qRJplq1aiYwMNBcf/31Jj4+3m2cU6dOmZEjR5pKlSqZ4OBg0717d5OQkODW59ixY6Z///4mNDTUhIaGmv79+5vk5ORLsJaXl5xBhf1jH4cxOW4JCgAAYAmu+gEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAsBqAwcOVO/evV3PO3TooNGjRxdbPQAuLYIKAI8cOnRIo0aNUv369RUUFKTw8HC1a9dOM2fOVHp6us/ff8mSJXr66ae9OmbOMATAHvz1ZACFtnv3brVt21YVK1bUc889p6ZNm+rcuXP69ddfNWfOHEVERKhnz565ljt79qz8/f29UkOlSpW8Mg6AkoEjKgAKbfjw4Spbtqw2bNigO++8U40aNVLTpk3Vp08fffLJJ+rRo4ckyeFwaObMmerVq5dCQkL0zDPPKDMzUw888ICioqIUHBys6Ohovfzyy27jZ2ZmauzYsapYsaIqV66sRx55RDn/ykfOUz9nzpzRI488oho1aigkJEStWrXSqlWrXK/PmzdPFStW1Oeff65GjRqpfPnyuvnmm3Xw4EFJ0uTJk/Xf//3f+uCDD+RwOORwONyWB1C8CCoACuXYsWP64osvNGLECIWEhOTZx+FwuP49adIk9erVS/Hx8br//vuVlZWlyMhIvfvuu9q2bZuefPJJPfbYY3r33Xddy7z44ouaM2eOZs+erW+//VbHjx/X0qVLC6xr0KBBWrNmjRYuXKgtW7bojjvu0M0336zffvvN1Sc9PV0vvPCC3n77ba1evVoJCQkaP368JGn8+PG68847XeHl4MGDatOmzcVsKgBexKkfAIWyc+dOGWMUHR3t1l6lShVlZGRIkkaMGKHnn39eknT33Xfr/vvvd+v71FNPuf4dFRWltWvX6t1339Wdd94pSfrXv/6liRMnqk+fPpKkmTNn6vPPP8+3pl27dmnBggXav3+/IiIiJP0ZPD777DPNnTtXzz33nKQ/Tz3NnDlT9erVkySNHDlSU6ZMkSSVL19ewcHBOn36tKpVq1a0jQPAZwgqADxy/lETSfrhhx+UlZWl/v376/Tp06722NjYXMvOnDlT//nPf7Rv3z6dOnVKZ86cUfPmzSVJKSkpOnjwoFq3bu3qX7ZsWcXGxuY6/ZNt06ZNMsboyiuvdGs/ffq0Kleu7Hperlw5V0iRpOrVq+vIkSOFX2kAxYagAqBQ6tevL4fDoe3bt7u1161bV5IUHBzs1p7z9NC7776rMWPG6MUXX1Tr1q0VGhqq6dOn6/vvvy9yTVlZWSpTpow2btyoMmXKuL1Wvnx5179zTuR1OBz5hh8AdmGOCoBCqVy5sjp16qRXX31VaWlpHi//zTffqE2bNho+fLhatGih+vXra9euXa7XnU6nqlevru+++87Vdu7cOW3cuDHfMVu0aKHMzEwdOXJE9evXd3t4chonICBAmZmZHq8TAN8jqAAotBkzZujcuXOKjY3VokWL9Msvv2jHjh165513tH379lxHNc5Xv359bdiwQZ9//rl+/fVXPfHEE1q/fr1bn1GjRikuLk5Lly7V9u3bNXz4cJ04cSLfMa+88kr1799f9913n5YsWaI9e/Zo/fr1ev7557Vs2bJCr1edOnW0ZcsW7dixQ0ePHtXZs2cLvSwA3yKoACi0evXqafPmzbrppps0ceJENWvWTLGxsXrllVc0fvz4Am/ENnToUN12223q27evWrVqpWPHjmn48OFufcaNG6f77rtPAwcOdJ0euvXWWwusae7cubrvvvs0btw4RUdHq2fPnvr+++9Vs2bNQq/Xgw8+qOjoaMXGxuqKK67QmjVrCr0sAN9yGE7UAgAAS3FEBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABr/R+bGPLcITxOkwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of poisoned images: 15 out of 10000.\n", + "last index of poison 47\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkwAAAHFCAYAAAAAM6ZOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABofklEQVR4nO3dd1gUZ9cG8HtBWDoKSFMEFAULNmxg7GI3xh412GOMDUWjIbFhjKixYDcalZhExQRLLBExdsGCvUeNiiIEG6AYqc/3h9/O67jAsgRcxPt3XXPpPHPm2TNldw/TViGEECAiIiKiXOnpOgEiIiKi4o4FExEREZEGLJiIiIiINGDBRERERKQBCyYiIiIiDVgwEREREWnAgomIiIhIAxZMRERERBqwYCIiIiLS4L0smC5cuIAhQ4agUqVKMDY2hrGxMSpXrozPPvsMMTExby2P6dOnQ6FQyNpcXFwwcODAIn3dqKgoTJ8+HUlJSYXed1hYGKpXrw5jY2MoFAqcO3cux7iDBw9CoVBIg76+Puzs7NCzZ09cvXpV69cdOHAgXFxc/lvyxUBoaCgUCgXu3Lmj61Qk+d0nVdty9uzZatNUy/U231+va968uWx/MzY2Rq1atRASEoLs7Gyt+rpz5w4UCgVCQ0OLJtm3SJttO2rUqKJPqJhzcXFBp06ddJ0GgP99hv72228F7kNXn5tJSUmwsbHBpk2bpLb79+9j7NixaNasGUqXLq31eyy/82dkZKBSpUoICQnROu/3rmD6/vvv4eXlhRMnTsDf3x87d+7Erl27MHbsWFy+fBn169fHrVu3dJbf1q1bMWXKlCJ9jaioKAQFBRV6wfTw4UP4+fmhUqVK2LNnD6Kjo1GlSpU855k1axaio6Nx4MABTJo0CZGRkWjcuDHi4uK0eu0pU6Zg69at/yV9KiSzZ8/GkydPdJ2GmooVKyI6OhrR0dEICwtDuXLlMG7cOAQGBmrVj4ODA6Kjo9GxY8ciypSoZAsKCoKjoyN69+4ttd28eRO//PILDA0N0aFDB637zO/8BgYGmDp1KmbMmIHHjx9r9RqltM7qHXbs2DGMGDECHTt2xG+//QZDQ0NpWsuWLTFy5Ej8+uuvMDY2zrOfFy9ewMTEpEhyrFOnTpH0+zb89ddfyMjIwCeffIJmzZrla57KlSujUaNGAICmTZuidOnSGDJkCEJDQ/H111/n+7UrVapUoJypcLVu3RoHDx7Et99+i/nz5+s6HRljY2NpXwOA9u3bw8PDA0uXLsXMmTNhYGCQr36USqWsHyLKvydPnuD777/HwoULZWdYmjZtiocPHwIAYmJisHHjRq361Wb+Pn36ICAgAN9//z2++uqrfL/Ge3WEadasWdDX18f3338vK5Ze17NnTzg6OkrjAwcOhJmZGS5evIg2bdrA3NwcrVq1AgBERkaiS5cuKF++PIyMjODm5obPPvsMjx49Uut3165dqF27NpRKJVxdXTFv3rwcXz+nQ+QpKSmYMGECXF1dYWhoiHLlymHs2LFITU2VxakOm//000+oWrUqTExMUKtWLezcuVOKmT59Or744gsAgKurq3SK4uDBg3muu99//x3e3t4wMTGBubk5fH19ER0dLVtPH3zwAQCgd+/eUCgUaN68eZ595kT1RXT37l0AQHZ2NubOnQsPDw8olUrY2tqif//+uH//vmy+nA4t//rrr2jYsCEsLS1hYmKCihUrYvDgwbKY2NhYfPLJJ7C1tYVSqUTVqlUxf/582Wka1SmYefPmYcGCBXB1dYWZmRm8vb1x/PhxtWWIiYnBhx9+CCsrKxgZGaFOnTrYvHmzWtzx48fRuHFjGBkZwdHREYGBgcjIyMjXeoqJicHHH38MFxcXGBsbw8XFBX369JHWm4rqVNiBAwfw+eefw8bGBtbW1ujWrRsePHggi83IyMDEiRNhb28PExMTfPDBBzh58mS+8lFxd3fHkCFDsGzZMrVccqJpvwL+d+r68uXL6NOnDywtLWFnZ4fBgwcjOTlZq/xeZ2BgAC8vL7x48UL6oL106RK6dOmCMmXKwMjICLVr18aPP/4omy+nU3IPHz7EsGHD4OTkBKVSibJly6Jx48bYt2+fbN61a9eiVq1aMDIygpWVFbp27ap2Clr1mXPz5k106NABZmZmcHJywvjx45GWliaLTU9Px8yZM6X3R9myZTFo0CBpeVQKY9u+TnU6aMOGDZg0aRIcHBxgZmaGzp07459//sGzZ88wbNgw2NjYwMbGBoMGDcLz589lfSxbtgxNmzaFra0tTE1N4enpiblz56q9B4QQmDVrFpydnWFkZIR69eohMjISzZs3V/uMye9nZX4+Gwoqv98Lqv36woUL6NmzJywtLWFlZYWAgABkZmbi+vXraNeuHczNzeHi4oK5c+fm+HovX75EQEAA7O3tYWxsjGbNmuHs2bNqcaGhoXB3d5c+59avX59jf0FBQWjYsCGsrKxgYWGBunXrYs2aNRBC/PeV8/95ZGZmyo4uAYCe3n8rR7SZ39DQEL1798aqVau0Wy7xnsjMzBTGxsbC29tbq/kGDBggDAwMhIuLiwgODhZ//vmniIiIEEIIsWLFChEcHCx+//13cejQIfHjjz+KWrVqCXd3d5Geni71sW/fPqGvry8++OADsWXLFvHrr7+K+vXriwoVKog3N4Gzs7MYMGCANJ6amipq164tbGxsxIIFC8S+ffvEokWLhKWlpWjZsqXIzs6WYgEIFxcX0aBBA7F582axe/du0bx5c1GqVClx69YtIYQQ9+7dE6NHjxYAxJYtW0R0dLSIjo4WycnJua6DX375RQAQbdq0Edu2bRNhYWHCy8tLGBoaiiNHjgghhLh586ZYtmyZACBmzZoloqOjxeXLl3Pt88CBAwKA+PXXX2Xt27dvFwDEV199JYQQYtiwYQKAGDVqlNizZ49YuXKlKFu2rHBychIPHz6UbSdnZ2dpPCoqSigUCvHxxx+L3bt3i/3794t169YJPz8/KSYxMVGUK1dOlC1bVqxcuVLs2bNHjBo1SgAQn3/+uRR3+/Ztad22a9dObNu2TWzbtk14enqKMmXKiKSkJCl2//79wtDQUDRp0kSEhYWJPXv2iIEDBwoAYt26dVLc5cuXhYmJiahWrZrYuHGj2L59u2jbtq20T9y+fTvXdSeEEL/++quYOnWq2Lp1qzh06JDYtGmTaNasmShbtqxsvaxbt04AEBUrVhSjR48WERER4ocffhBlypQRLVq0kPU5YMAAoVAoxBdffCH27t0rFixYIMqVKycsLCxk+2RuAIiRI0eK+Ph4YWJiIlvXqjxOnTolteVnvxJCiGnTpgkAwt3dXUydOlVERkaKBQsWCKVSKQYNGqQxLyGEaNasmahevbpae926dUWpUqXEixcvxLVr14S5ubmoVKmSWL9+vdi1a5fo06ePACDmzJkjzaPaH17fnm3bthVly5YVq1atEgcPHhTbtm0TU6dOFZs2bZJiZs2aJQCIPn36iF27don169eLihUrCktLS/HXX39JcQMGDBCGhoaiatWqYt68eWLfvn1i6tSpQqFQiKCgICkuKytLtGvXTpiamoqgoCARGRkpfvjhB1GuXDlRrVo18eLFC1mfhbFtVVTvX2dnZzFw4EDpvWlmZiZatGghfH19xYQJE8TevXvFnDlzhL6+vhg9erSsz3HjxokVK1aIPXv2iP3794uFCxcKGxsbtW0aGBgoAIhhw4aJPXv2iNWrV4sKFSoIBwcH0axZMykuv5+V+flsyI2zs7Po2LFjnjH5/V54fb/+5ptvRGRkpJg4caL0eefh4SEWL14sIiMjxaBBgwQAER4errYNnJycRJcuXcSOHTvEzz//LNzc3ISFhYX0mS/E/95/b8Y5OTnJPjeFEGLgwIFizZo1IjIyUkRGRopvvvlGGBsby/Y9IV7tfxkZGRqHzMxM2XwtW7YUDRo0yHMdnjp1Su09po38zB8WFiYAiAsXLuS73/emYEpISBAAxMcff6w2LTMzU7aBXy9CBgwYIACItWvX5tl/dna2yMjIEHfv3hUAxPbt26VpDRs2FI6OjuLff/+V2lJSUoSVlZXGgik4OFjo6enJvmiEEOK3334TAMTu3bulNgDCzs5OpKSkyJZbT09PBAcHS23fffddvr6UhXj1pnB0dBSenp4iKytLan/27JmwtbUVPj4+UltuRVBOVLFhYWEiIyNDvHjxQhw+fFi4ubkJfX19cf78eXH16lUBQIwYMUI274kTJ2RFlRDqBdO8efMEAFkx86Yvv/xSABAnTpyQtX/++edCoVCI69evCyH+9wXp6ekpe/OfPHlSABAbN26U2jw8PESdOnVERkaGrM9OnToJBwcHaR327t1bGBsbi4SEBCkmMzNTeHh45HvbvC4zM1M8f/5cmJqaikWLFkntqg/KN9fh3LlzBQARHx8vhBDSuh43bpwsTlXUaPul+vXXXws9PT1x/vx5WR6q/Vib/Ur1xTJ37lzZ640YMUIYGRnJ3q+5URVMqvf4gwcPpO3fs2dPIYQQH3/8sVAqlSI2NlY2b/v27YWJiYm0L+VUMJmZmYmxY8fm+vpPnz4VxsbGokOHDrL22NhYoVQqRd++faU21WfO5s2bZbEdOnQQ7u7u0vjGjRvVvkSF+N+XxfLly4UQhb9thfjf+7dz586yuLFjxwoAYsyYMbL2jz76SFhZWeXav+rLd/369UJfX188efJECCHEkydPhFKpFL1795bFR0dHCwCygim/n5X5+WzITX4Kptfl9b2g2q/nz58vm6d27drSH7QqGRkZomzZsqJbt25Sm2ob1K1bV/YeuHPnjjAwMBBDhw4VQvzvvZZb3JsF0+tU22XGjBnC2to6x+9GTcPr20gIIUxMTMTw4cPzXG9vo2C6ceOGACBWrFiR737fq1NyufHy8oKBgYE05HTtRffu3dXaEhMTMXz4cDg5OaFUqVIwMDCAs7MzAEiH2VNTU3Hq1Cl069YNRkZG0rzm5ubo3Lmzxtx27tyJGjVqoHbt2sjMzJSGtm3b5ngqrUWLFjA3N5fG7ezsYGtrm6/TIzm5fv06Hjx4AD8/P9khTzMzM3Tv3h3Hjx/HixcvCtQ38Or0nYGBAUxMTNC0aVNkZWXht99+Q82aNXHgwAEAUDtF2aBBA1StWhV//vlnrv3Wr18fANCrVy9s3rw5x4vI9+/fj2rVqqFBgway9oEDB0IIgf3798vaO3bsCH19fWm8Zs2aAP53+vDmzZu4du0a+vXrBwCy7dWhQwfEx8fj+vXrAIADBw6gVatWsLOzk/rT19dXO0ydm+fPn2PSpElwc3NDqVKlUKpUKZiZmSE1NTXHuww//PBD2fibuavWtSp3lV69eqFUKe0vdZw4cSKsrKwwadKkHKcXZL/KaRlevnyJxMREAK9O376+zrOysmTxly9flt7jjo6OmD9/Pvr164fVq1cDeLU/tGrVCk5OTrL5Bg4ciBcvXqidKnxdgwYNEBoaipkzZ+L48eNqp5Wio6Px77//qu3LTk5OaNmypdq+rFAo1D4fatasKXsf79y5E6VLl0bnzp1ly127dm3Y29tLnw2FvW1f9+YdY1WrVgUAtQviq1atiidPnshOy509exYffvghrK2toa+vDwMDA/Tv3x9ZWVn466+/ALw6bZ2WloZevXrJ+mvUqJHaKfj8flbm57Phv8jP98LrclqHCoUC7du3l9pKlSoFNze3HD/H+/btK7sWyNnZGT4+PtJ2V73Xcot70/79+9G6dWtYWlpK22Xq1Kl4/Pix9F4DXp1SPHXqlMbh+++/l+ZJSkrCixcvYGtrq3E9FjVVDtps//fmom8bGxsYGxvnuMNt2LABL168QHx8vNqHMgCYmJjAwsJC1padnY02bdrgwYMHmDJlCjw9PWFqaors7Gw0atQI//77LwDg6dOnyM7Ohr29vVq/ObW96Z9//sHNmzdzvSD1zfPi1tbWajFKpVLKR1uquwgcHBzUpjk6OiI7OxtPnz4t8EXwc+bMQcuWLaGvrw8bGxvZl5Wm186rCGzatCm2bduGxYsXo3///khLS0P16tXx9ddfo0+fPlL/Od1Sq7qG7c07KN5ct0qlEgCkdfvPP/8AACZMmIAJEybkmJdqez1+/LjA+wTw6kPyzz//xJQpU1C/fn1YWFhAoVCgQ4cOOW5rTbmrlvXN1y9VqlSO+5QmFhYWmDx5MsaOHSt9cL+uIPuVpmUYPHiw7HqjZs2ayf6gqFSpEjZt2gSFQgEjIyO4urrK+n/8+HGu+byec07CwsIwc+ZM/PDDD5gyZQrMzMzQtWtXzJ07F/b29hqXNzIyUtZmYmIi+wNLtbwvX76Uxv/55x8kJSXlej3m6/saUHjb9nVWVlaycVUuubW/fPkSZmZmiI2NRZMmTeDu7o5FixbBxcUFRkZGOHnyJEaOHKm2X77+h4XKm235/azMz2dDQeX3e+F1Oa2rnLa/oaEhUlJS1ObP7XPk/PnzAHLf/qq21x9hcvLkSbRp0wbNmzfH6tWrUb58eRgaGmLbtm349ttvZflXqFAB5cuXz2NtvPJ6kaaa/81l0wVVDtp8N743BZO+vj5atmyJvXv3Ij4+XvbBVa1aNQDI9dk3bz4rCXh1cej58+cRGhqKAQMGSO03b96UxZUpUwYKhQIJCQlqfeTU9iZVobd27dpcpxcl1QdqfHy82rQHDx5AT08PZcqUKXD/FStWRL169TS+9ptvzAcPHmhc9i5duqBLly5IS0vD8ePHERwcjL59+8LFxQXe3t6wtrbOdbkA7detKj4wMBDdunXLMcbd3V1atoLuE8nJydi5cyemTZuGL7/8UmpPS0sr8O38qnWdkJCAcuXKSe2ZmZla33qr8vnnn2PRokWYNGkSPv/88xxfrzD3q+nTp8ueFfT6kVYA0gXDufkv+4ONjQ1CQkIQEhKC2NhY/P777/jyyy+RmJiIPXv2aFzegryPVRfw79mzJ8fpquUvim37X23btg2pqanYsmWLdPQFgNpz21S5q/4YeV1CQoLsDx5tPis1fTYUVH6/FwpTbp8jqnX3+vbXNO+mTZtgYGCAnTt3yoqabdu2qc375h8ouXn9DxdVLsXhsSOqHLR5771Xp+QCAwORlZWF4cOH5/tupNyoiijVX7kqrx9+BABTU1M0aNAAW7Zskf11+OzZM+zYsUPj63Tq1Am3bt2CtbU16tWrpzYU5KFjb/5lnhd3d3eUK1cOGzZskN1NkJqaivDwcOkOp6LQsmVLAMDPP/8saz916hSuXr0q3a2oiVKpRLNmzTBnzhwAkO4gadWqFa5cuYIzZ87I4tevXw+FQoEWLVpola+7uzsqV66M8+fP57it6tWrJ32JtWjRAn/++afsiyArKwthYWEaX0ehUEAIobbv/fDDD2qnofJLdbfRL7/8ImvfvHkzMjMzC9SnoaEhZs6ciVOnTuHXX3+VTSuK/crFxUW2rlXFaX61atUK+/fvV7t7cP369TAxMcn3owQqVKiAUaNGwdfXV9q3vL29YWxsrLYv379/XzoVqK1OnTrh8ePHyMrKynFfUy1/UWzb/yqnz08hhHR6VKVhw4ZQKpVq74vjx4+rHWEuyGdlbp8NhblcgPr3QmHauHGj7D109+5dREVFSdvd3d0dDg4Ouca9TqFQoFSpUrJLD/7991/89NNPaq9bkFNyhoaGqFixok6fdajy999/A/jfAZP8eG+OMAFA48aNsWzZMowePRp169bFsGHDUL16dejp6SE+Ph7h4eEAoHb6LSceHh6oVKkSvvzySwghYGVlhR07dqgdWgeAb775Bu3atYOvry/Gjx+PrKwszJkzB6amphor7bFjxyI8PBxNmzbFuHHjULNmTWRnZyM2NhZ79+7F+PHj0bBhQ63Wg6enJwBg0aJFGDBgAAwMDODu7q72Fznw6lbNuXPnol+/fujUqRM+++wzpKWl4bvvvkNSUlKOT3UuLO7u7hg2bBiWLFkCPT09tG/fHnfu3MGUKVPg5OSEcePG5Trv1KlTcf/+fbRq1Qrly5dHUlISFi1aBAMDA+kZUePGjcP69evRsWNHzJgxA87Ozti1axeWL1+Ozz//XONDN3Py/fffo3379mjbti0GDhyIcuXK4cmTJ7h69SrOnDkjFQ6TJ0/G77//jpYtW2Lq1KkwMTHBsmXL1G5/zomFhQWaNm2K7777DjY2NnBxccGhQ4ewZs0alC5dWuucgVfXTXzyyScICQmBgYEBWrdujUuXLmHevHn5ej/kpk+fPpg3bx7++OMPWbsu96vcTJs2DTt37kSLFi0wdepUWFlZ4ZdffsGuXbswd+5cWFpa5jhfcnIyWrRogb59+8LDwwPm5uY4deoU9uzZIx1pLF26NKZMmYKvvvoK/fv3R58+ffD48WMEBQXByMgI06ZN0zrfjz/+GL/88gs6dOgAf39/NGjQAAYGBrh//z4OHDiALl26oGvXrkW2bf8LX19fGBoaok+fPpg4cSJevnyJFStW4OnTp7I41W32wcHBKFOmDLp27Yr79+8jKCgIDg4Osuvf8vtZmZ/PhrwkJCTk+HRtFxcX1KpVK9/fC4UlMTERXbt2xaeffork5GRMmzYNRkZG0gNZ9fT08M0332Do0KFSXFJSEqZPn652mq5jx45YsGAB+vbti2HDhuHx48eYN2+eWgGoWt6C/MHevHlztc8DFdV6VRUzMTExMDMzAwD06NFDips+fTqCgoJw4MAB2aMl8js/8Kro1tfXR9OmTfOffIEuQX/HnTt3TgwaNEi4uroKpVIpjIyMhJubm+jfv7/4888/ZbEDBgwQpqamOfZz5coV4evrK8zNzUWZMmVEz549RWxsrAAgpk2bJov9/fffRc2aNYWhoaGoUKGCmD17tnSXxOvevEtOCCGeP38uJk+eLNzd3YWhoaGwtLQUnp6eYty4cbK7rPDGnSx59RkYGCgcHR2Fnp6eACAOHDiQ5zrbtm2baNiwoTAyMhKmpqaiVatW4tixY7KYgtwlpyk2KytLzJkzR1SpUkUYGBgIGxsb8cknn4h79+7J4t68S27nzp2iffv2oly5csLQ0FDY2tqKDh06yG5XF0KIu3fvir59+wpra2thYGAg3N3dxXfffSe7c0t1V9R3332nll9O2/r8+fOiV69ewtbWVhgYGAh7e3vRsmVLsXLlSlncsWPHRKNGjYRSqRT29vbiiy++EKtWrcrXXXL3798X3bt3F2XKlBHm5uaiXbt24tKlS2rbOqfb+YX43/p/fbunpaWJ8ePHC1tbW2FkZCQaNWokoqOjc9x/cpLb/rd3717pjpk388jPfqV6n7z+uITXly0/dxTm9liBN128eFF07txZWFpaCkNDQ1GrVi21O23evEvu5cuXYvjw4aJmzZrCwsJCGBsbC3d3dzFt2jSRmpoqm/eHH36QPgcsLS1Fly5d1B6/kdtnTk6fFxkZGWLevHmiVq1awsjISJiZmQkPDw/x2WefiRs3bkhxhb1tc3v/5ra/5bQNd+zYIeVdrlw58cUXX4g//vhDbb/Mzs4WM2fOFOXLlxeGhoaiZs2aYufOnaJWrVqia9eustfJz2dlfj8bcuLs7Jzr3WCq9Zjf74Xc9uvctv+b+7BqG/z0009izJgxomzZskKpVIomTZqImJgYtfl/+OEHUblyZWFoaCiqVKki1q5dq/a5KYQQa9euFe7u7kKpVIqKFSuK4OBgsWbNmgLdvZuTP//8UwAQJ0+eVJuW27p9c78fP368UCgU4urVqwWaXwghmjRponaXpyaK/38RIiKid8Lt27fh4eGBadOmafWkZioeatasicaNG2PFihUFmr9BgwZwdnZWO9WfX7du3ULlypUREREBX1/ffM/HgomIiIqt8+fPY+PGjfDx8YGFhQWuX7+OuXPnIiUlBZcuXcrxDjoq3vbs2YOuXbvixo0b+brT7nUpKSkoW7Yszp07Jz3GQluDBg3C/fv3tT5V+l5dw0RERO8WU1NTxMTEYM2aNUhKSoKlpSWaN2+Ob7/9lsXSO6pdu3b47rvvcPv2ba0LJgsLC7WfCNJGZmYmKlWqpPWPbgM8wkRERESk0Xv1WAEiIiKigmDBRERERKQBCyYiIiIiDXjRdw6ys7Px4MEDmJub5/izKERERFT8CCHw7NkzODo6yh5sWhhYMOXgwYMHar9YTkRERO+Ge/fuaX0HniYsmHKg+omQe/fu6eynA4iIiEg7KSkpcHJyyvGnvv4rFkw5UJ2Gs7CwYMFERET0jimKy2l40TcRERGRBiyYiIiIiDRgwURERESkAa9h+g+ysrKQkZGh6zSIAACGhoaFfhstERG9woKpAIQQSEhIQFJSkq5TIZLo6enB1dUVhoaGuk6FiKjEYcFUAKpiydbWFiYmJny4Jemc6mGr8fHxqFChAvdJIqJCxoJJS1lZWVKxZG1tret0iCRly5bFgwcPkJmZCQMDA12nQ0RUovCCBy2prlkyMTHRcSZEcqpTcVlZWTrOhIio5GHBVEA85UHFDfdJIqKiw4KJiIiISAOdFkzBwcGoX78+zM3NYWtri48++gjXr1/XON+hQ4fg5eUFIyMjVKxYEStXrlSLCQ8PR7Vq1aBUKlGtWjVs3bq1KBahRFIoFNi2bZuu0yAiIio2dHrR96FDhzBy5EjUr18fmZmZ+Prrr9GmTRtcuXIFpqamOc5z+/ZtdOjQAZ9++il+/vlnHDt2DCNGjEDZsmXRvXt3AEB0dDR69+6Nb775Bl27dsXWrVvRq1cvHD16FA0bNiyy5XH5cleR9Z2TO7M7aj1PQkICvv32W+zatQtxcXGwtbVF7dq1MXbsWLRq1aoIsiy45s2bo3bt2ggJCdF1KkRE9J7TacG0Z88e2fi6detga2uL06dPo2nTpjnOs3LlSlSoUEH6Eq1atSpiYmIwb948qWAKCQmBr68vAgMDAQCBgYE4dOgQQkJCsHHjxqJboGLuzp07aNy4MUqXLo25c+eiZs2ayMjIQEREBEaOHIlr167pOkUiIqJiqVhdw5ScnAwAsLKyyjUmOjoabdq0kbW1bdsWMTEx0h1sucVERUUVcsbvlhEjRkChUODkyZPo0aMHqlSpgurVqyMgIADHjx/Pdb64uDj07t0bZcqUgbW1Nbp06YI7d+5I00+dOgVfX1/Y2NjA0tISzZo1w5kzZ2R9KBQK/PDDD+jatStMTExQuXJl/P7771rl7+LigpkzZ6J///4wMzODs7Mztm/fjocPH6JLly4wMzODp6cnYmJipHkeP36MPn36oHz58jAxMYGnp6da0fzs2TP069cPpqamcHBwwMKFC9G8eXOMHTtWiklPT8fEiRNRrlw5mJqaomHDhjh48KA0/e7du+jcuTPKlCkDU1NTVK9eHbt379Zq+YiIqPgqNgWTEAIBAQH44IMPUKNGjVzjEhISYGdnJ2uzs7NDZmYmHj16lGdMQkJCjn2mpaUhJSVFNpQ0T548wZ49ezBy5MgcT3eWLl06x/levHiBFi1awMzMDIcPH8bRo0dhZmaGdu3aIT09HcCrgmPAgAE4cuQIjh8/jsqVK6NDhw549uyZrK+goCD06tULFy5cQIcOHdCvXz88efJEq+VYuHAhGjdujLNnz6Jjx47w8/ND//798cknn+DMmTNwc3ND//79IYQAALx8+RJeXl7YuXMnLl26hGHDhsHPzw8nTpyQ+gwICMCxY8fw+++/IzIyEkeOHFEr+AYNGoRjx45h06ZNuHDhAnr27Il27drhxo0bAICRI0ciLS0Nhw8fxsWLFzFnzhyYmZlptWxERFR8FZsHV44aNQoXLlzA0aNHNca+efu06svx9facYnK77To4OBhBQUHapvxOuXnzJoQQ8PDw0Gq+TZs2QU9PDz/88IO0/tatW4fSpUvj4MGDaNOmDVq2bCmb5/vvv0eZMmVw6NAhdOrUSWofOHAg+vTpAwCYNWsWlixZgpMnT6Jdu3b5zqdDhw747LPPAABTp07FihUrUL9+ffTs2RMAMGnSJHh7e+Off/6Bvb09ypUrhwkTJkjzjx49Gnv27MGvv/6Khg0b4tmzZ/jxxx+xYcMG6RqudevWwdHRUZrn1q1b2LhxI+7fv49H2SZIBTBhwgTs2bMH69atw6xZsxAbG4vu3bvD09MTAFCxYsV8L9P75vVr/VTX4anaCnJdHhHR21AsCqbRo0fj999/x+HDh1G+fPk8Y+3t7dWOFCUmJqJUqVLSk7dzi3nzqJNKYGAgAgICpPGUlBQ4OTkVZFGKrZyKyvw4ffo0bt68CXNzc1n7y5cvcevWLQCv1u3UqVOxf/9+/PPPP8jKysKLFy8QGxsrm6dmzZrS/01NTWFubo7ExESt8nm9D9X2VBUpr7clJibC3t4eWVlZmD17NsLCwhAXF4e0tDSkpaVJR9n+/vtvZGRkoEGDBlIflpaWcHd3l8bPnDkDIQSqVKmC7FerEXqKV0cmVfvcmDFj8Pnnn2Pv3r1o3bo1unfvLsuViIjebTotmIQQGD16NLZu3YqDBw/C1dVV4zze3t7YsWOHrG3v3r2oV6+e9HMQ3t7eiIyMxLhx42QxPj4+OfapVCqhVCr/w5IUf5UrV4ZCocDVq1fx0Ucf5Xu+7OxseHl54ZdfflGbVrZsWQCvjhw9fPgQISEhcHZ2hlKphLe3t3TKTuXNn+tQKBTIzs7Wajle70NV/OXUpup3/vz5WLhwIUJCQuDp6QlTU1OMHTtWyi23QlLVrupLX18fp0+fxo3EVACAh4MFAEin3YYOHYq2bdti165d2Lt3L4KDgzF//nyMHj1aq+UjIqLiSafXMI0cORI///wzNmzYAHNzcyQkJCAhIQH//vuvFBMYGIj+/ftL48OHD8fdu3cREBCAq1evYu3atVizZo3stIu/vz/27t2LOXPm4Nq1a5gzZw727dsnu4j3fWNlZYW2bdti2bJlSE1NVZuelJSU43x169bFjRs3YGtrCzc3N9lgaWkJADhy5AjGjBmDDh06oHr16lAqldL1ZLp25MgRdOnSBZ988glq1aqFihUrStcdAUClSpVgYGCAkydPSm0pKSmymDp16iArKwuJiYmo4FoRFVwrSuvA3t5einNycsLw4cOxZcsWjB8/HqtXr347C0lEREVOpwXTihUrkJycjObNm8PBwUEawsLCpJj4+HjZqR1XV1fs3r0bBw8eRO3atfHNN99g8eLF0iMFAMDHxwebNm3CunXrULNmTYSGhiIsLKxIn8H0Lli+fDmysrLQoEEDhIeH48aNG7h69SoWL14Mb2/vHOfp168fbGxs0KVLFxw5cgS3b9/GoUOH4O/vj/v37wMA3Nzc8NNPP+Hq1as4ceIE+vXrB2Nj47e5aLlyc3NDZGQkoqKicPXqVXz22Wey07Xm5uYYMGAAvvjiCxw4cACXL1/G4MGDoaenJx11qlKlCvr164f+/ftj3x87cD/2Lk6dOoU5c+ZId8KNHTsWERERuH37Ns6cOYP9+/ejatWqOllmIiIqfDo/JadJaGioWltOt62/qUePHujRo0dBUyuRXF1dcebMGXz77bcYP3484uPjUbZsWXh5eWHFihU5zmNiYoLDhw9j0qRJ6NatG549e4Zy5cqhVatWsLB4dVpq7dq1GDZsGOrUqYMKFSpg1qxZsiN+ujRlyhTcvn0bbdu2hYmJCYYNG4aPPvpIeoQFACxYsADDhw9Hp06dYGFhgYkTJ+LevXswMjKSYtatW4eZM2di/jeTkZgQDxtra3h7e6NDhw4AXv3g7ciRI3H//n1YWFigXbt2WLhw4VtfXiIiKhoKkZ+q5T2TkpICS0tLJCcnS0WBysuXL3H79m24urrKvlCp5EhNTUW5cuUwf/58DBkyRDbtwv0kAEDN8qXffmIavCv7Ju+SI6Kiktf3939VLO6SI9Kls2fP4tq1a2jQoAGSk5MxY8YMAECXLl10nBkRERUXLJiIAMybNw/Xr1+HoaEhvLy8cOTIEdjY2Og6LSIiKiZYMNF7r06dOjh9+rSu0yAiomKs2Pw0ChEREVFxxYKJiIiISAMWTEREREQasGAiIiIi0oAFExEREZEGLJiIiIiINGDBRPl28OBBKBSKXH+ot7iYPn06ateures0iIioBOFzmArTgeC3+3otArUKHzhwIH788UcAQKlSpeDk5IRu3bohKCgIpqamGuf38fFBfHw8LC0tC5RucXHnzh24urri7NmzLKyIiChfWDC9Z9q1a4d169YhIyMDR44cwdChQ5Gamprrj+++ztDQEPb29m8hSyIiouKFp+TeM0qlEvb29nByckLfvn3Rr18/bNu2DQCQlpaGMWPGwNbWFkZGRvjggw9w6tQpad43T8ndvXsXnTt3RpkyZWBqaorq1atj9+7dUvyhQ4fQoEEDKJVKODg44Msvv0RmZqY0vXnz5hgzZgwmTpwIKysr2NvbY/r06bJ8k5OTMWzYMNja2sLCwgItW7bE+fPnZTGzZ8+GnZ0dzM3NMWTIELx8+VKrdaJaroiICNSpUwfGxsZo2bIlEhMT8ccff6Bq1aqwsLBAnz598O+/L6T59uzZgw8++AClS5eGtbU1OnXqhFu3bsn6joqKQu3atWFkZIR69eph27ZtUCgUOHfunBRz5coVdOjQAWZmZrCzs4Ofnx8ePXokTf/tt9/g6ekJY2NjWFtbo3Xr1khNTdVqGYmI6L9hwfSeMzY2RkZGBgBg4sSJCA8Px48//ogzZ87Azc0Nbdu2xZMnT3Kcd+TIkUhLS8Phw4dx8eJFzJkzB2ZmZgCAuLg4dOjQAfXr18f58+exYsUKrFmzBjNnzpT18eOPP8LU1BQnTpzA3LlzMWPGDERGRgIAhBDo2LEjEhISsHv3bpw+fRp169ZFq1atpJw2b96MadOm4dtvv0VMTAwcHBywfPnyAq2L6dOnY+nSpYiKisK9e/fQq1cvhISEYMOGDdi1axciIyOxcd0qKT41NRUBAQE4deoU/vzzT+jp6aFr167Izs4GADx79gydO3eGp6cnzpw5g2+++QaTJk2SvWZ8fDyaNWuG2rVrIyYmBnv27ME///yDXr16SdP79OmDwYMH4+rVqzh48CC6desGIUSBlpGIiAqGp+TeYydPnsSGDRvQqlUr6bRcaGgo2rdvDwBYvXo1IiMjsWbNGnzxxRdq88fGxqJ79+7w9PQEAFSsWFGatnz5cjg5OWHp0qVQKBTw8PDAgwcPMGnSJEydOhV6eq9q9Zo1a2LatGkAgMqVK2Pp0qX4888/4evriwMHDuDixYtITEyEUqkE8OpHcrdt24bffvsNw4YNQ0hICAYPHoyhQ4cCAGbOnIl9+/ZpfZRJNW/jxo0BAEOGDEFgYCBu3bolLVePHj1wKuooBo8YCwDo3r27bP41a9bA1tYWV65cQY0aNfDLL79AoVBg9erVMDIyQrVq1RAXF4dPP/1UmmfFihWoW7cuZs2aJbWtXbsWTk5O+Ouvv/D8+XNkZmaiW7ducHZ2BgBpfRMR0dvDI0zvmZ07d8LMzAxGRkbw9vZG06ZNsWTJEty6dQsZGRlSwQAABgYGaNCgAa5evZpjX2PGjJGKjGnTpuHChQvStKtXr8Lb2xsKhUJqa9y4MZ4/f4779+9LbTVr1pT16eDggMTERADA6dOn8fz5c1hbW8PMzEwabt++LZ36Ur3O694cz6/Xc7Gzs4OJiYmsCLSzs8OTxw+l8Vu3bqFv376oWLEiLCws4OrqCuBVIQkA169fR82aNWFkZCTN06BBA9lrnj59GgcOHJAtn4eHh9R/rVq10KpVK3h6eqJnz55YvXo1nj59WqDlIyKiguMRpvdMixYtsGLFChgYGMDR0REGBgYAXp36ASArcIBXp8XebFMZOnQo2rZti127dmHv3r0IDg7G/PnzMXr06BznU51Ger1d9foqCoVCOqWVnZ0NBwcHHDx4UO21S5cunf+FzqfXc1EoFDnmJv4/NwDo3LkznJycsHr1ajg6OiI7Oxs1atRAeno6gJzX3Zun0rKzs9G5c2fMmTNHLR8HBwfo6+sjMjISUVFR2Lt3L5YsWYKvv/4aJ06ckAo0IiIqejzC9J4xNTWFm5sbnJ2dZQWBm5sbDA0NcfToUaktIyMDMTExqFq1aq79OTk5Yfjw4diyZQvGjx+P1atXAwCqVauGqKgoWYEQFRUFc3NzlCtXLl+51q1bFwkJCShVqhTc3Nxkg42NDQCgatWqOH78uGy+N8eLwuPHj3H16lVMnjwZrVq1QtWqVdWO/Hh4eODChQtIS0uT2mJiYmQxdevWxeXLl+Hi4qK2jKpHPSgUCjRu3BhBQUE4e/YsDA0NsXXr1iJfRiIi+h8WTATgVSH1+eef44svvsCePXtw5coVfPrpp3jx4gWGDBmS4zxjx45FREQEbt++jTNnzmD//v1ScTVixAjcu3cPo0ePxrVr17B9+3ZMmzYNAQEB0vVLmrRu3Rre3t746KOPEBERgTt37iAqKgqTJ0+WCg9/f3+sXbsWa9euxV9//YVp06bh8uXLhbNS8lCmTBlYW1tj1apVuHnzJvbv34+AgABZTN++fZGdnY1hw4bh6tWriIiIwLx58wD87yjbyJEj8eTJE/Tp0wcnT57E33//jb1792Lw4MHIysrCiRMnMGvWLMTExCA2NhZbtmzBw4cP8yxiiYio8PGUHElmz56N7Oxs+Pn54dmzZ6hXrx4iIiJQpkyZHOOzsrIwcuRI3L9/HxYWFmjXrh0WLlwIAChXrhx2796NL774ArVq1YKVlRWGDBmCyZMn5zsfhUKB3bt34+uvv8bgwYPx8OFD2Nvbo2nTprCzswMA9O7dG7du3cKkSZPw8uVLdO/eHZ9//jkiIiL++wrJg56eHjZt2oQxY8agRo0acHd3x+LFi9G8eXMpxsLCAjt27MDnn3+O2rVrw9PTE1OnTkXfvn2l65ocHR1x7NgxTJo0CW3btkVaWhqcnZ3Rrl076OnpwcLCAocPH0ZISAhSUlLg7OyM+fPnSxfmExHR26EQvD9ZTUpKCiwtLZGcnAwLCwvZtJcvX+L27dtwdXWVXcxL74cL95MAADXLly7Q/L/88gsGDRqE5ORkGBsbF15ieHf2TZcvd0n/vzO7o6xNNU5EVBB5fX//VzzCRFSE1q9fj4oVK6JcuXI4f/48Jk2ahF69ehV6sUREREWLBRNREUpISMDUqVORkJAABwcH9OzZE99++62u0yIiIi2xYCIqQhMnTsTEiRN1nQYREf1HvEuOiIiISAMWTAXEa+WpuOE+SURUdFgwaUn1sMcXL15oiCR6u1RPGNfX19dxJkREJQ+vYdKSvr4+SpcuLf3emYmJSa4/HUIlj8h8VZQU5Md9i1J2djYePnwIExMTlCrFtzURUWHjJ2sB2NvbA4BUNNH7I/HpvwAAw3+L32MB9PT0UKFCBRbwRERFgAVTASgUCjg4OMDW1hYZGRm6TofeoqFbDgIA/hzfXKd55MTQ0DDfPztDRETaYcH0H+jr6/N6kfdM3LMsACjWT9ImIqLCxz9HiYiIiDTQacF0+PBhdO7cGY6OjlAoFNi2bVue8QMHDoRCoVAbqlevLsWEhobmGFPcLtIlIiKid4dOC6bU1FTUqlULS5cuzVf8okWLEB8fLw337t2DlZUVevbsKYuzsLCQxcXHx/MUChERERWYTq9hat++Pdq3b5/veEtLS1haWkrj27Ztw9OnTzFo0CBZnEKhkO5kIyIiIvqv3ulrmNasWYPWrVvD2dlZ1v78+XM4OzujfPny6NSpE86ePaujDImIiKgkeGfvkouPj8cff/yBDRs2yNo9PDwQGhoKT09PpKSkYNGiRWjcuDHOnz+PypUr59hXWloa0tLSpPGUlJQizZ2IiIjeLe/sEabQ0FCULl0aH330kay9UaNG+OSTT1CrVi00adIEmzdvRpUqVbBkyZJc+woODpZO91laWsLJyamIsyciIqJ3yTtZMAkhsHbtWvj5+cHQ0DDPWD09PdSvXx83btzINSYwMBDJycnScO/evcJOmYiIiN5h7+QpuUOHDuHmzZsYMmSIxlghBM6dOwdPT89cY5RKJZRKZWGmSERERCWITgum58+f4+bNm9L47du3ce7cOVhZWaFChQoIDAxEXFwc1q9fL5tvzZo1aNiwIWrUqKHWZ1BQEBo1aoTKlSsjJSUFixcvxrlz57Bs2bIiXx4iIiIqmXRaMMXExKBFixbSeEBAAABgwIABCA0NRXx8PGJjY2XzJCcnIzw8HIsWLcqxz6SkJAwbNgwJCQmwtLREnTp1cPjwYTRo0KDoFoSIiIhKNJ0WTM2bN4cQItfpoaGham2WlpZ48eJFrvMsXLgQCxcuLIz0iIiIiAC8oxd9ExEREb1NLJiIiIiINGDBRERERKQBCyYiIiIiDVgwEREREWnAgomIiIhIAxZMRERERBqwYCIiIiLSgAUTERERkQYsmIiIiIg0YMFEREREpAELJiIiIiINWDARERERacCCiYiIiEgDFkxEREREGrBgIiIiItKABRMRERGRBiyYiIiIiDRgwURERESkAQsmIiIiIg1YMBERERFpwIKJiIiISAMWTEREREQasGAiIiIi0oAFExEREZEGLJiIiIiINGDBRERERKQBCyYiIiIiDVgwEREREWnAgomIiIhIAxZMRERERBqwYCIiIiLSgAUTERERkQY6LZgOHz6Mzp07w9HREQqFAtu2bcsz/uDBg1AoFGrDtWvXZHHh4eGoVq0alEolqlWrhq1btxbhUhAREVFJp9OCKTU1FbVq1cLSpUu1mu/69euIj4+XhsqVK0vToqOj0bt3b/j5+eH8+fPw8/NDr169cOLEicJOn4iIiN4TpXT54u3bt0f79u21ns/W1halS5fOcVpISAh8fX0RGBgIAAgMDMShQ4cQEhKCjRs3/pd0iYiI6D31Tl7DVKdOHTg4OKBVq1Y4cOCAbFp0dDTatGkja2vbti2ioqLeZopERERUguj0CJO2HBwcsGrVKnh5eSEtLQ0//fQTWrVqhYMHD6Jp06YAgISEBNjZ2cnms7OzQ0JCQq79pqWlIS0tTRpPSUkpmgUgIiKid9I7VTC5u7vD3d1dGvf29sa9e/cwb948qWACAIVCIZtPCKHW9rrg4GAEBQUVfsJERERUIryTp+Re16hRI9y4cUMat7e3VzualJiYqHbU6XWBgYFITk6Whnv37hVZvkRERPTueecLprNnz8LBwUEa9/b2RmRkpCxm79698PHxybUPpVIJCwsL2UBERESkotNTcs+fP8fNmzel8du3b+PcuXOwsrJChQoVEBgYiLi4OKxfvx7AqzvgXFxcUL16daSnp+Pnn39GeHg4wsPDpT78/f3RtGlTzJkzB126dMH27duxb98+HD169K0vHxEREZUMOi2YYmJi0KJFC2k8ICAAADBgwACEhoYiPj4esbGx0vT09HRMmDABcXFxMDY2RvXq1bFr1y506NBBivHx8cGmTZswefJkTJkyBZUqVUJYWBgaNmz49haMiIiIShSdFkzNmzeHECLX6aGhobLxiRMnYuLEiRr77dGjB3r06PFf0yMiIiICUAKuYSIiIiIqaiyYiIiIiDRgwURERESkAQsmIiIiIg1YMBERERFpwIKJiIiISAMWTEREREQasGAiIiIi0oAFExEREZEGLJiIiIiINNC6YPrxxx+xa9cuaXzixIkoXbo0fHx8cPfu3UJNjoiIiKg40LpgmjVrFoyNjQEA0dHRWLp0KebOnQsbGxuMGzeu0BMkIiIi0jWtf3z33r17cHNzAwBs27YNPXr0wLBhw9C4cWM0b968sPMjIiIi0jmtjzCZmZnh8ePHAIC9e/eidevWAAAjIyP8+++/hZsdERERUTGg9REmX19fDB06FHXq1MFff/2Fjh07AgAuX74MFxeXws6PiIiISOe0PsK0bNkyeHt74+HDhwgPD4e1tTUA4PTp0+jTp0+hJ0hERESka1ofYSpdujSWLl2q1h4UFFQoCREREREVNwV6DtORI0fwySefwMfHB3FxcQCAn376CUePHi3U5IiIiIiKA60LpvDwcLRt2xbGxsY4c+YM0tLSAADPnj3DrFmzCj1BIiIiIl3TumCaOXMmVq5cidWrV8PAwEBq9/HxwZkzZwo1OSIiIqLiQOuC6fr162jatKlau4WFBZKSkgojJyIiIqJiReuCycHBATdv3lRrP3r0KCpWrFgoSREREREVJ1oXTJ999hn8/f1x4sQJKBQKPHjwAL/88gsmTJiAESNGFEWORERERDql9WMFJk6ciOTkZLRo0QIvX75E06ZNoVQqMWHCBIwaNaoociQiIiLSKa0LJgD49ttv8fXXX+PKlSvIzs5GtWrVYGZmVti5ERERERULBSqYAMDExAT16tUrzFyIiIiIiiWtC6auXbtCoVCotSsUChgZGcHNzQ19+/aFu7t7oSRIREREpGtaX/RtaWmJ/fv348yZM1LhdPbsWezfvx+ZmZkICwtDrVq1cOzYsUJPloiIiEgXtD7CZG9vj759+2Lp0qXQ03tVb2VnZ8Pf3x/m5ubYtGkThg8fjkmTJvGnUoiIiKhE0PoI05o1azB27FipWAIAPT09jB49GqtWrYJCocCoUaNw6dKlQk2UiIiISFe0LpgyMzNx7do1tfZr164hKysLAGBkZJTjdU5ERERE7yKtT8n5+flhyJAh+Oqrr1C/fn0oFAqcPHkSs2bNQv/+/QEAhw4dQvXq1Qs9WSIiIiJd0LpgWrhwIezs7DB37lz8888/AAA7OzuMGzcOkyZNAgC0adMG7dq1K9xMiYiIiHRE61Ny+vr6+PrrrxEfH4+kpCQkJSUhPj4eX331FfT19QEAFSpUQPny5TX2dfjwYXTu3BmOjo5QKBTYtm1bnvFbtmyBr68vypYtCwsLC3h7eyMiIkIWExoaCoVCoTa8fPlS20UlIiIiAlCAgul1FhYWsLCwKPD8qampqFWrFpYuXZqv+MOHD8PX1xe7d+/G6dOn0aJFC3Tu3Blnz55Vyys+Pl42GBkZFThPIiIier8V6Enfv/32GzZv3ozY2Fikp6fLpp05cybf/bRv3x7t27fPd3xISIhsfNasWdi+fTt27NiBOnXqSO0KhQL29vb57peIiIgoL1ofYVq8eDEGDRoEW1tbnD17Fg0aNIC1tTX+/vtvrYqfwpCdnY1nz57ByspK1v78+XM4OzujfPny6NSpk9oRKCIiIiJtaF0wLV++HKtWrcLSpUthaGiIiRMnIjIyEmPGjEFycnJR5Jir+fPnIzU1Fb169ZLaPDw8EBoait9//x0bN26EkZERGjdujBs3buTaT1paGlJSUmQDERERkYrWBVNsbCx8fHwAAMbGxnj27BmAV48b2LhxY+Fml4eNGzdi+vTpCAsLg62trdTeqFEjfPLJJ6hVqxaaNGmCzZs3o0qVKliyZEmufQUHB8PS0lIanJyc3sYiEBER0TtC64LJ3t4ejx8/BgA4Ozvj+PHjAIDbt29DCFG42eUiLCwMQ4YMwebNm9G6des8Y/X09FC/fv08jzAFBgYiOTlZGu7du1fYKRMREdE7TOuCqWXLltixYwcAYMiQIRg3bhx8fX3Ru3dvdO3atdATfNPGjRsxcOBAbNiwAR07dtQYL4TAuXPn4ODgkGuMUqmU7vj7r3f+ERERUcmj9V1yq1atQnZ2NgBg+PDhsLKywtGjR9G5c2cMHz5cq76eP3+OmzdvSuO3b9/GuXPnYGVlhQoVKiAwMBBxcXFYv349gFfFUv/+/bFo0SI0atQICQkJAF6dGrS0tAQABAUFoVGjRqhcuTJSUlKwePFinDt3DsuWLdN2UYmIiIgAFKBg0tPTk/3wbq9evWQXXWsjJiYGLVq0kMYDAgIAAAMGDEBoaCji4+MRGxsrTf/++++RmZmJkSNHYuTIkVK7Kh4AkpKSMGzYMCQkJMDS0hJ16tTB4cOH0aBBgwLlSERERFSg5zC9fPkSFy5cQGJionS0SeXDDz/Mdz/NmzfP87onVRGkcvDgQY19Lly4EAsXLsx3DkRERESaaF0w7dmzB/3798ejR4/UpikUCmRlZRVKYkRERETFhdYXfY8aNQo9e/ZEfHw8srOzZQOLJSIiIiqJtC6YEhMTERAQADs7u6LIh4iIiKjY0bpg6tGjR76uJSIiIiIqKbS+hmnp0qXo2bMnjhw5Ak9PTxgYGMimjxkzptCSIyIiIioOtC6YNmzYgIiICBgbG+PgwYNQKBTSNIVCwYKJiIiIShytC6bJkydjxowZ+PLLL2XPYyIiIiIqqbSueNLT09G7d28WS0RERPTe0LrqGTBgAMLCwooiFyIiIqJiSetTcllZWZg7dy4iIiJQs2ZNtYu+FyxYUGjJERERERUHWhdMFy9eRJ06dQAAly5dkk17/QJwIiIiopJC64LpwIEDRZEHERERUbHFK7eJiIiINMj3EaZu3brlK27Lli0FToaIiIioOMp3wWRpaVmUeRAREREVW/kumNatW1eUeRAREREVW7yGiYiIiEgDFkxEREREGrBgIiIiItKABRMRERGRBvkqmOrWrYunT58CAGbMmIEXL14UaVJERERExUm+CqarV68iNTUVABAUFITnz58XaVJERERExUm+HitQu3ZtDBo0CB988AGEEJg3bx7MzMxyjJ06dWqhJkhERESka/kqmEJDQzFt2jTs3LkTCoUCf/zxB0qVUp9VoVCwYCIiIqISJ18Fk7u7OzZt2gQA0NPTw59//glbW9siTYyIiIiouMj3k75VsrOziyIPIiIiomJL64IJAG7duoWQkBBcvXoVCoUCVatWhb+/PypVqlTY+RERERHpnNbPYYqIiEC1atVw8uRJ1KxZEzVq1MCJEydQvXp1REZGFkWORERERDql9RGmL7/8EuPGjcPs2bPV2idNmgRfX99CS46IiIioOND6CNPVq1cxZMgQtfbBgwfjypUrhZIUERERUXGidcFUtmxZnDt3Tq393LlzvHOOiIiISiStT8l9+umnGDZsGP7++2/4+PhAoVDg6NGjmDNnDsaPH18UORIRERHplNYF05QpU2Bubo758+cjMDAQAODo6Ijp06djzJgxhZ4gERERka5pfUpOoVBg3LhxuH//PpKTk5GcnIz79+/D398fCoVCq74OHz6Mzp07w9HREQqFAtu2bdM4z6FDh+Dl5QUjIyNUrFgRK1euVIsJDw9HtWrVoFQqUa1aNWzdulWrvIiIiIhep3XB9Dpzc3OYm5sXeP7U1FTUqlULS5cuzVf87du30aFDBzRp0gRnz57FV199hTFjxiA8PFyKiY6ORu/eveHn54fz58/Dz88PvXr1wokTJwqcJxEREb3fCvTgysLSvn17tG/fPt/xK1euRIUKFRASEgIAqFq1KmJiYjBv3jx0794dABASEgJfX1/pdGFgYCAOHTqEkJAQbNy4sdCXgYiIiEq+/3SE6W2Ljo5GmzZtZG1t27ZFTEwMMjIy8oyJiop6a3kSERFRyaLTI0zaSkhIgJ2dnazNzs4OmZmZePToERwcHHKNSUhIyLXftLQ0pKWlSeMpKSmFmzgRERG907QqmDIyMtCmTRt8//33qFKlSlHllKc3LywXQqi15xST1wXpwcHBCAoKKsQs8+by5S4AwJ3ZHd/aa/5X+cr5QPCrf1sE5jqfajynvl6f9mbMm9O0XXfarPO88hhb6rf/j4FsHADGzlynVU4FIS1H2wuvxiNqysYB/G/957A9NPX7ury2T0H23ZDJg/7/fz3yfF01eSyH2nbVIlbbfTG32Pzm+p9itfQufsboXGFvj//aXyHnk9/9vcj3GdVyAUWy7xclrU7JGRgY4NKlS1rfDVdY7O3t1Y4UJSYmolSpUrC2ts4z5s2jTq8LDAyU7vhLTk7GvXv3Cj95IiIiemdpfQ1T//79sWbNmqLIRSNvb2+1H/jdu3cv6tWrBwMDgzxjfHx8cu1XqVTCwsJCNhARERGpaH0NU3p6On744QdERkaiXr16MDU1lU1fsGBBvvt6/vw5bt68KY3fvn0b586dg5WVFSpUqIDAwEDExcVh/fr1AIDhw4dj6dKlCAgIwKefforo6GisWbNGdvebv78/mjZtijlz5qBLly7Yvn079u3bh6NHj2q7qEREREQAClAwXbp0CXXr1gUA/PXXX7Jp2p6qi4mJQYsWLaTxgIAAAMCAAQMQGhqK+Ph4xMbGStNdXV2xe/dujBs3DsuWLYOjoyMWL14sPVIAAHx8fLBp0yZMnjwZU6ZMQaVKlRAWFoaGDRtqu6hEREREAApQMB04cKDQXrx58+bSRds5CQ0NVWtr1qwZzpw5k2e/PXr0QI8ePfKMISIiIsqvAj+H6ebNm4iIiMC///4LAHkWPkRERETvMq0LpsePH6NVq1aoUqUKOnTogPj4eADA0KFDMX78+EJPkIiIiEjXtC6Yxo0bBwMDA8TGxsLExERq7927N/bs2VOoyREREREVB1pfw7R3715ERESgfPnysvbKlSvj7t27hZYYERERUXGh9RGm1NRU2ZEllUePHkGpVBZKUkRERETFidYFU9OmTaXnIgGvHiWQnZ2N7777TvaIACIiIqKSQutTct999x2aN2+OmJgYpKenY+LEibh8+TKePHmCY8eOFUWORERERDql9RGmatWq4cKFC2jQoAF8fX2RmpqKbt264ezZs6hUqVJR5EhERESkU1ofYQJe/cBtUFBQYedCREREVCwVqGB6+vQp1qxZg6tXr0KhUKBq1aoYNGgQrKysCjs/IiIiIp3T+pTcoUOH4OrqisWLF+Pp06d48uQJFi9eDFdXVxw6dKgociQiIiLSKa2PMI0cORK9evXCihUroK+vDwDIysrCiBEjMHLkSFy6dKnQkyQiIiLSJa2PMN26dQvjx4+XiiUA0NfXR0BAAG7dulWoyREREREVB1oXTHXr1sXVq1fV2q9evYratWsXRk5ERERExUq+TslduHBB+v+YMWPg7++PmzdvolGjRgCA48ePY9myZZg9e3bRZElERESkQ/kqmGrXrg2FQgEhhNQ2ceJEtbi+ffuid+/ehZcdERERUTGQr4Lp9u3bRZ0HERERUbGVr4LJ2dm5qPMgIiIiKrYK9ODKuLg4HDt2DImJicjOzpZNGzNmTKEkRkRERFRcaF0wrVu3DsOHD4ehoSGsra2hUCikaQqFggUTERERlThaF0xTp07F1KlTERgYCD09rZ9KQERERPTO0briefHiBT7++GMWS0RERPTe0LrqGTJkCH799deiyIWIiIioWNL6lFxwcDA6deqEPXv2wNPTEwYGBrLpCxYsKLTkiIiIiIoDrQumWbNmISIiAu7u7gCgdtE3ERERUUmjdcG0YMECrF27FgMHDiyCdIiIiIiKH62vYVIqlWjcuHFR5EJERERULGldMPn7+2PJkiVFkQsRERFRsaT1KbmTJ09i//792LlzJ6pXr6520feWLVsKLTkiIiKi4kDrgql06dLo1q1bUeRCREREVCwV6KdRiIiIiN4nfFw3ERERkQZaH2FydXXN83lLf//9939KiIiIiKi40foI09ixY+Hv7y8NI0aMgLe3N5KTkzFs2DCtE1i+fDlcXV1hZGQELy8vHDlyJNfYgQMHQqFQqA3Vq1eXYkJDQ3OMefnypda5EREREQEFOMLk7++fY/uyZcsQExOjVV9hYWEYO3Ysli9fjsaNG+P7779H+/btceXKFVSoUEEtftGiRZg9e7Y0npmZiVq1aqFnz56yOAsLC1y/fl3WZmRkpFVuRERERCqFdg1T+/btER4ertU8CxYswJAhQzB06FBUrVoVISEhcHJywooVK3KMt7S0hL29vTTExMTg6dOnGDRokCxOoVDI4uzt7Qu8XERERESFVjD99ttvsLKyynd8eno6Tp8+jTZt2sja27Rpg6ioqHz1sWbNGrRu3RrOzs6y9ufPn8PZ2Rnly5dHp06dcPbs2XznRURERPQmrU/J1alTR3bRtxACCQkJePjwIZYvX57vfh49eoSsrCzY2dnJ2u3s7JCQkKBx/vj4ePzxxx/YsGGDrN3DwwOhoaHw9PRESkoKFi1ahMaNG+P8+fOoXLlyjn2lpaUhLS1NGk9JScn3chAREVHJp3XB9NFHH8nG9fT0ULZsWTRv3hweHh5aJ/DmHXdCiDzvwlMJDQ1F6dKl1fJp1KgRGjVqJI03btwYdevWxZIlS7B48eIc+woODkZQUJDWuRMREdH7QeuCadq0aYXywjY2NtDX11c7mpSYmKh21OlNQgisXbsWfn5+MDQ0zDNWT08P9evXx40bN3KNCQwMREBAgDSekpICJyenfCwFERERvQ909uBKQ0NDeHl5ITIyUtYeGRkJHx+fPOc9dOgQbt68iSFDhmh8HSEEzp07BwcHh1xjlEolLCwsZAMRERGRSr6PMOnp6Wk8VaZQKJCZmZnvFw8ICICfnx/q1asHb29vrFq1CrGxsRg+fDiAV0d+4uLisH79etl8a9asQcOGDVGjRg21PoOCgtCoUSNUrlwZKSkpWLx4Mc6dO4dly5blOy8iIiKi1+W7YNq6dWuu06KiorBkyRIIIbR68d69e+Px48eYMWMG4uPjUaNGDezevVu66y0+Ph6xsbGyeZKTkxEeHo5Fixbl2GdSUhKGDRuGhIQEWFpaok6dOjh8+DAaNGigVW5EREREKvkumLp06aLWdu3aNQQGBmLHjh3o168fvvnmG60TGDFiBEaMGJHjtNDQULU2S0tLvHjxItf+Fi5ciIULF2qdBxEREVFuCnQN04MHD/Dpp5+iZs2ayMzMxLlz5/Djjz/m+HRuIiIionedVgVTcnIyJk2aBDc3N1y+fBl//vknduzYkeO1REREREQlRb5Pyc2dOxdz5syBvb09Nm7cmOMpOiIiIqKSKN8F05dffgljY2O4ubnhxx9/xI8//phj3JYtWwotOSIiIqLiIN8FU//+/fP1BG4iIiKikibfBVNOd6wRERERvQ909qRvIiIioncFCyYiIiIiDVgwEREREWnAgomIiIhIAxZMRERERBqwYCIiIiLSgAUTERERkQYsmIiIiIg0YMFEREREpAELJiIiIiINWDARERERacCCiYiIiEgDFkxEREREGrBgIiIiItKABRMRERGRBiyYiIiIiDRgwURERESkAQsmIiIiIg1YMBERERFpwIKJiIiISAMWTEREREQasGAiIiIi0oAFExEREZEGLJiIiIiINGDBRERERKQBCyYiIiIiDVgwEREREWmg84Jp+fLlcHV1hZGREby8vHDkyJFcYw8ePAiFQqE2XLt2TRYXHh6OatWqQalUolq1ati6dWtRLwYRERGVYDotmMLCwjB27Fh8/fXXOHv2LJo0aYL27dsjNjY2z/muX7+O+Ph4aahcubI0LTo6Gr1794afnx/Onz8PPz8/9OrVCydOnCjqxSEiIqISSqcF04IFCzBkyBAMHToUVatWRUhICJycnLBixYo857O1tYW9vb006OvrS9NCQkLg6+uLwMBAeHh4IDAwEK1atUJISEgRLw0RERGVVDormNLT03H69Gm0adNG1t6mTRtERUXlOW+dOnXg4OCAVq1a4cCBA7Jp0dHRan22bdtWY59EREREuSmlqxd+9OgRsrKyYGdnJ2u3s7NDQkJCjvM4ODhg1apV8PLyQlpaGn766Se0atUKBw8eRNOmTQEACQkJWvUJAGlpaUhLS5PGU1JSCrpYREREVALprGBSUSgUsnEhhFqbiru7O9zd3aVxb29v3Lt3D/PmzZMKJm37BIDg4GAEBQUVJH0iIiJ6D+jslJyNjQ309fXVjvwkJiaqHSHKS6NGjXDjxg1p3N7eXus+AwMDkZycLA337t3L9+sTERFRyaezgsnQ0BBeXl6IjIyUtUdGRsLHxyff/Zw9exYODg7SuLe3t1qfe/fuzbNPpVIJCwsL2UBERESkotNTcgEBAfDz80O9evXg7e2NVatWITY2FsOHDwfw6shPXFwc1q9fD+DVHXAuLi6oXr060tPT8fPPPyM8PBzh4eFSn/7+/mjatCnmzJmDLl26YPv27di3bx+OHj2qk2UkIiKid59OC6bevXvj8ePHmDFjBuLj41GjRg3s3r0bzs7OAID4+HjZM5nS09MxYcIExMXFwdjYGNWrV8euXbvQoUMHKcbHxwebNm3C5MmTMWXKFFSqVAlhYWFo2LDhW18+IiIiKhl0ftH3iBEjMGLEiBynhYaGysYnTpyIiRMnauyzR48e6NGjR2GkR0RERKT7n0YhIiIiKu5YMBERERFpwIKJiIiISAMWTEREREQasGAiIiIi0oAFExEREZEGLJiIiIiINGDBRERERKQBCyYiIiIiDVgwEREREWnAgomIiIhIAxZMRERERBqwYCIiIiLSgAUTERERkQYsmIiIiIg0YMFEREREpAELJiIiIiINWDARERERacCCiYiIiEgDFkxEREREGrBgIiIiItKABRMRERGRBiyYiIiIiDRgwURERESkAQsmIiIiIg1YMBERERFpwIKJiIiISAMWTEREREQasGAiIiIi0oAFExEREZEGLJiIiIiINGDBRERERKQBCyYiIiIiDXReMC1fvhyurq4wMjKCl5cXjhw5kmvsli1b4Ovri7Jly8LCwgLe3t6IiIiQxYSGhkKhUKgNL1++LOpFISIiohJKpwVTWFgYxo4di6+//hpnz55FkyZN0L59e8TGxuYYf/jwYfj6+mL37t04ffo0WrRogc6dO+Ps2bOyOAsLC8THx8sGIyOjt7FIREREVAKV0uWLL1iwAEOGDMHQoUMBACEhIYiIiMCKFSsQHBysFh8SEiIbnzVrFrZv344dO3agTp06UrtCoYC9vX2R5k5ERETvD50dYUpPT8fp06fRpk0bWXubNm0QFRWVrz6ys7Px7NkzWFlZydqfP38OZ2dnlC9fHp06dVI7AkVERESkDZ0VTI8ePUJWVhbs7Oxk7XZ2dkhISMhXH/Pnz0dqaip69eoltXl4eCA0NBS///47Nm7cCCMjIzRu3Bg3btzItZ+0tDSkpKTIBiIiIiIVnZ6SA16dPnudEEKtLScbN27E9OnTsX37dtja2krtjRo1QqNGjaTxxo0bo27duliyZAkWL16cY1/BwcEICgoq4BIQERFRSaezI0w2NjbQ19dXO5qUmJiodtTpTWFhYRgyZAg2b96M1q1b5xmrp6eH+vXr53mEKTAwEMnJydJw7969/C8IERERlXg6K5gMDQ3h5eWFyMhIWXtkZCR8fHxynW/jxo0YOHAgNmzYgI4dO2p8HSEEzp07BwcHh1xjlEolLCwsZAMRERGRik5PyQUEBMDPzw/16tWDt7c3Vq1ahdjYWAwfPhzAqyM/cXFxWL9+PYBXxVL//v2xaNEiNGrUSDo6ZWxsDEtLSwBAUFAQGjVqhMqVKyMlJQWLFy/GuXPnsGzZMt0sJBEREb3zdFow9e7dG48fP8aMGTMQHx+PGjVqYPfu3XB2dgYAxMfHy57J9P333yMzMxMjR47EyJEjpfYBAwYgNDQUAJCUlIRhw4YhISEBlpaWqFOnDg4fPowGDRq81WUjIiKikkPnF32PGDECI0aMyHGaqghSOXjwoMb+Fi5ciIULFxZCZkRERESv6PynUYiIiIiKOxZMRERERBqwYCIiIiLSgAUTERERkQYsmIiIiIg0YMFEREREpAELJiIiIiINWDARERERacCCiYiIiEgDFkxEREREGrBgIiIiItKABRMRERGRBiyYiIiIiDRgwURERESkAQsmIiIiIg1YMBERERFpwIKJiIiISAMWTEREREQasGAiIiIi0oAFExEREZEGLJiIiIiINGDBRERERKQBCyYiIiIiDVgwEREREWnAgomIiIhIAxZMRERERBqwYCIiIiLSgAUTERERkQYsmIiIiIg0YMFEREREpAELJiIiIiINWDARERERacCCiYiIiEgDnRdMy5cvh6urK4yMjODl5YUjR47kGX/o0CF4eXnByMgIFStWxMqVK9ViwsPDUa1aNSiVSlSrVg1bt24tqvSJiIjoPaDTgiksLAxjx47F119/jbNnz6JJkyZo3749YmNjc4y/ffs2OnTogCZNmuDs2bP46quvMGbMGISHh0sx0dHR6N27N/z8/HD+/Hn4+fmhV69eOHHixNtaLCIiIiphdFowLViwAEOGDMHQoUNRtWpVhISEwMnJCStWrMgxfuXKlahQoQJCQkJQtWpVDB06FIMHD8a8efOkmJCQEPj6+iIwMBAeHh4IDAxEq1atEBIS8paWioiIiEoanRVM6enpOH36NNq0aSNrb9OmDaKionKcJzo6Wi2+bdu2iImJQUZGRp4xufVJREREpEkpXb3wo0ePkJWVBTs7O1m7nZ0dEhIScpwnISEhx/jMzEw8evQIDg4Oucbk1icApKWlIS0tTRpPTk4GAKSkpGi1TPmVnfaiSPsvCvnKOfUl/j8o1/lU4zn19fq0N2PenKbtutNmneeVx8u09Fcxma9iXmalFzingpCW4//X9Zvj/5/Iq39z2B6a+n1dXtunIMv65rrLSY795rEcattVi1ht98U8c8xHrv8pVkvv4meMzhX29viv/RVyPvnd34t8n8nps6oQqfIXQhR63xA6EhcXJwCIqKgoWfvMmTOFu7t7jvNUrlxZzJo1S9Z29OhRAUDEx8cLIYQwMDAQGzZskMX8/PPPQqlU5prLtGnTBAAOHDhw4MCBQwkY7t27V5DSJE86O8JkY2MDfX19tSM/iYmJakeIVOzt7XOML1WqFKytrfOMya1PAAgMDERAQIA0np2djSdPnsDa2hoKhUKr5aKilZKSAicnJ9y7dw8WFha6Todew21TfHHbFG/cPoVHCIFnz57B0dGx0PvWWcFkaGgILy8vREZGomvXrlJ7ZGQkunTpkuM83t7e2LFjh6xt7969qFevHgwMDKSYyMhIjBs3Thbj4+OTay5KpRJKpVLWVrp0aW0Xid4iCwsLfrAUU9w2xRe3TfHG7VM4LC0ti6RfnRVMABAQEAA/Pz/Uq1cP3t7eWLVqFWJjYzF8+HAAr478xMXFYf369QCA4cOHY+nSpQgICMCnn36K6OhorFmzBhs3bpT69Pf3R9OmTTFnzhx06dIF27dvx759+3D06FGdLCMRERG9+3RaMPXu3RuPHz/GjBkzEB8fjxo1amD37t1wdnYGAMTHx8ueyeTq6ordu3dj3LhxWLZsGRwdHbF48WJ0795divHx8cGmTZswefJkTJkyBZUqVUJYWBgaNmz41pePiIiISgaFEEVxKTlR0UhLS0NwcDACAwPVTqOSbnHbFF/cNsUbt8+7gQUTERERkQY6/y05IiIiouKOBRMRERGRBiyYiIiIiDRgwURERESkAQsm0qk7d+5gyJAhcHV1hbGxMSpVqoRp06YhPT1dFhcbG4vOnTvD1NQUNjY2GDNmjFrMxYsX0axZMxgbG6NcuXKYMWOG2u8JHTp0CF5eXjAyMkLFihWxcuXKIl/G98Hy5cvh6uoKIyMjeHl54ciRI7pOqcQJDg5G/fr1YW5uDltbW3z00Ue4fv26LEYIgenTp8PR0RHGxsZo3rw5Ll++LItJS0vD6NGjYWNjA1NTU3z44Ye4f/++LObp06fw8/ODpaUlLC0t4efnh6SkpKJexBIjODgYCoUCY8eOldq4bUqAQv+xFSIt/PHHH2LgwIEiIiJC3Lp1S2zfvl3Y2tqK8ePHSzGZmZmiRo0aokWLFuLMmTMiMjJSODo6ilGjRkkxycnJws7OTnz88cfi4sWLIjw8XJibm4t58+ZJMX///bcwMTER/v7+4sqVK2L16tXCwMBA/Pbbb291mUuaTZs2CQMDA7F69Wpx5coV4e/vL0xNTcXdu3d1nVqJ0rZtW7Fu3Tpx6dIlce7cOdGxY0dRoUIF8fz5cylm9uzZwtzcXISHh4uLFy+K3r17CwcHB5GSkiLFDB8+XJQrV05ERkaKM2fOiBYtWohatWqJzMxMKaZdu3aiRo0aIioqSkRFRYkaNWqITp06vdXlfVedPHlSuLi4iJo1awp/f3+pndvm3ceCiYqduXPnCldXV2l89+7dQk9PT8TFxUltGzduFEqlUiQnJwshhFi+fLmwtLQUL1++lGKCg4OFo6OjyM7OFkIIMXHiROHh4SF7rc8++0w0atSoKBenxGvQoIEYPny4rM3Dw0N8+eWXOsro/ZCYmCgAiEOHDgkhhMjOzhb29vZi9uzZUszLly+FpaWlWLlypRBCiKSkJGFgYCA2bdokxcTFxQk9PT2xZ88eIYQQV65cEQDE8ePHpZjo6GgBQFy7du1tLNo769mzZ6Jy5coiMjJSNGvWTCqYuG1KBp6So2InOTkZVlZW0nh0dDRq1Kgh+zHFtm3bIi0tDadPn5ZimjVrJnvoW9u2bfHgwQPcuXNHimnTpo3stdq2bYuYmBhkZGQU4RKVXOnp6Th9+rTaem3Tpg2ioqJ0lNX7ITk5GQCk98rt27eRkJAg2xZKpRLNmjWTtsXp06eRkZEhi3F0dESNGjWkmOjoaFhaWsp+HaFRo0awtLTkNtVg5MiR6NixI1q3bi1r57YpGVgwUbFy69YtLFmyRPo9QQBISEiAnZ2dLK5MmTIwNDREQkJCrjGqcU0xmZmZePToUaEvy/vg0aNHyMrKynG9qtY7FT4hBAICAvDBBx+gRo0aAP63n+e1LRISEmBoaIgyZcrkGWNra6v2mra2ttymedi0aRPOnDmD4OBgtWncNiUDCyYqEtOnT4dCochziImJkc3z4MEDtGvXDj179sTQoUNl0xQKhdprCCFk7W/GiP+/4FvbGNJeTuuV67TojBo1ChcuXJD98LhKQbaFpvdSfvt5X927dw/+/v74+eefYWRklGsct827Tac/vksl16hRo/Dxxx/nGePi4iL9/8GDB2jRogW8vb2xatUqWZy9vT1OnDgha3v69CkyMjKkv9js7e3V/sJKTEwEAI0xpUqVgrW1df4XjiQ2NjbQ19fPcb2++dc0FY7Ro0fj999/x+HDh1G+fHmp3d7eHsCroxAODg5S++vbwt7eHunp6Xj69KnsSEZiYiJ8fHykmH/++UftdR8+fMhtmovTp08jMTERXl5eUltWVhYOHz6MpUuXSnczctu823iEiYqEjY0NPDw88hxUf4nFxcWhefPmqFu3LtatWwc9Pflu6e3tjUuXLiE+Pl5q27t3L5RKpfQB5e3tjcOHD8seNbB37144OjpKhZm3tzciIyNlfe/duxf16tWDgYFBUayGEs/Q0BBeXl5q6zUyMlL6kKfCIYTAqFGjsGXLFuzfvx+urq6y6a6urrC3t5dti/T0dBw6dEjaFl5eXjAwMJDFxMfH49KlS1KMt7c3kpOTcfLkSSnmxIkTSE5O5jbNRatWrXDx4kWcO3dOGurVq4d+/frh3LlzqFixIrdNSaCba82JXomLixNubm6iZcuW4v79+yI+Pl4aVFSPFWjVqpU4c+aM2LdvnyhfvrzssQJJSUnCzs5O9OnTR1y8eFFs2bJFWFhY5PhYgXHjxokrV66INWvW8LEChUD1WIE1a9aIK1euiLFjxwpTU1Nx584dXadWonz++efC0tJSHDx4UPY+efHihRQze/ZsYWlpKbZs2SIuXrwo+vTpk+Ot6+XLlxf79u0TZ86cES1btszx1vWaNWuK6OhoER0dLTw9PXnrupZev0tOCG6bkoAFE+nUunXrBIAch9fdvXtXdOzYURgbGwsrKysxatQo2SMEhBDiwoULokmTJkKpVAp7e3sxffp06ZECKgcPHhR16tQRhoaGwsXFRaxYsaLIl/F9sGzZMuHs7CwMDQ1F3bp1pVvdqfDk9j5Zt26dFJOdnS2mTZsm7O3thVKpFE2bNhUXL16U9fPvv/+KUaNGCSsrK2FsbCw6deokYmNjZTGPHz8W/fr1E+bm5sLc3Fz069dPPH369C0sZcnxZsHEbfPuUwjxxqOQiYiIiEiG1zARERERacCCiYiIiEgDFkxEREREGrBgIiIiItKABRMRERGRBiyYiIiIiDRgwURERESkAQsmInovDRw4EB999JE03rx5c4wdO1Zn+RBR8caCiYiKhYSEBPj7+8PNzQ1GRkaws7PDBx98gJUrV+LFixdF/vpbtmzBN998U6h9vlmUEdG7q5SuEyAi+vvvv9G4cWOULl0as2bNgqenJzIzM/HXX39h7dq1cHR0xIcffqg2X0ZGRqH9cLKVlVWh9ENEJROPMBGRzo0YMQKlSpVCTEwMevXqhapVq8LT0xPdu3fHrl270LlzZwCAQqHAypUr0aVLF5iammLmzJnIysrCkCFD4OrqCmNjY7i7u2PRokWy/rOyshAQEIDSpUvD2toaEydOxJu/CvXmKbn09HRMnDgR5cqVg6mpKRo2bIiDBw9K00NDQ1G6dGlERESgatWqMDMzQ7t27RAfHw8AmD59On788Uds374dCoUCCoVCNj8RvVtYMBGRTj1+/Bh79+7FyJEjYWpqmmOMQqGQ/j9t2jR06dIFFy9exODBg5GdnY3y5ctj8+bNuHLlCqZOnYqvvvoKmzdvluaZP38+1q5dizVr1uDo0aN48uQJtm7dmmdegwYNwrFjx7Bp0yZcuHABPXv2RLt27XDjxg0p5sWLF5g3bx5++uknHD58GLGxsZgwYQIAYMKECejVq5dURMXHx8PHx+e/rCoi0iGekiMinbp58yaEEHB3d5e129jY4OXLlwCAkSNHYs6cOQCAvn37YvDgwbLYoKAg6f+urq6IiorC5s2b0atXLwBASEgIAgMD0b17dwDAypUrERERkWtOt27dwsaNG3H//n04OjoCeFUA7dmzB+vWrcOsWbMAvDoluHLlSlSqVAkAMGrUKMyYMQMAYGZmBmNjY6SlpcHe3r5gK4eIig0WTERULLx+FAkATp48iezsbPTr1w9paWlSe7169dTmXblyJX744QfcvXsX//77L9LT01G7dm0AQHJyMuLj4+Ht7S3FlypVCvXq1VM7Lady5swZCCFQpUoVWXtaWhqsra2lcRMTE6lYAgAHBwckJibmf6GJ6J3BgomIdMrNzQ0KhQLXrl2TtVesWBEAYGxsLGt/87Td5s2bMW7cOMyfPx/e3t4wNzfHd999hxMnThQ4p+zsbOjr6+P06dPQ19eXTTMzM5P+/+YF5wqFItcijIjebbyGiYh0ytraGr6+vli6dClSU1O1nv/IkSPw8fHBiBEjUKdOHbi5ueHWrVvSdEtLSzg4OOD48eNSW2ZmJk6fPp1rn3Xq1EFWVhYSExPh5uYmG7Q5vWZoaIisrCytl4mIih8WTESkc8uXL0dmZibq1auHsLAwXL16FdevX8fPP/+Ma9euqR3leZ2bmxtiYmIQERGBv/76C1OmTMGpU6dkMf7+/pg9eza2bt2Ka9euYcSIEUhKSsq1zypVqqBfv37o378/tmzZgtu3b+PUqVOYM2cOdu/ene/lcnFxwYULF3D9+nU8evQIGRkZ+Z6XiIoXFkxEpHOVKlXC2bNn0bp1awQGBqJWrVqoV68elixZggkTJuT5QMnhw4ejW7du6N27Nxo2bIjHjx9jxIgRspjx48ejf//+GDhwoHTarmvXrnnmtG7dOvTv3x/jx4+Hu7s7PvzwQ5w4cQJOTk75Xq5PP/0U7u7uqFevHsqWLYtjx47le14iKl4UgifciYiIiPLEI0xEREREGrBgIiIiItKABRMRERGRBiyYiIiIiDRgwURERESkAQsmIiIiIg1YMBERERFpwIKJiIiISAMWTEREREQasGAiIiIi0oAFExEREZEGLJiIiIiINPg/VPavnl0qI7QAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/lava.py b/lava.py index b92d77b..9612a26 100644 --- a/lava.py +++ b/lava.py @@ -19,7 +19,7 @@ from torch.autograd import Variable import time -import imageio +# import imageio import pickle from PIL import Image, ImageOps, ImageEnhance from copy import deepcopy as dpcp @@ -30,8 +30,8 @@ from torch.utils.data import Dataset, TensorDataset, DataLoader from vgg import vgg16 -from preact_resnet import PreActResNet18 -from resnet import ResNet18 +#from preact_resnet import load_state_dict +#from resnet import ResNet18 # Load clean data @@ -163,15 +163,15 @@ def compute_dual(feature_extractor, trainloader, testloader, training_size, shuf # to return 1 # OT Dual calculation - dual_sol = get_OT_dual_sol(feature_extractor, trainloader, testloader, p=2, resize=32, device='cuda') + dual_sol = get_OT_dual_sol(feature_extractor, trainloader, testloader, p=2, resize=32, device=device) return dual_sol, trained_with_flag # Get the data values and also visualizes the detection of 'bad' data -def compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion): +def compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion, poisoned=None): calibrated_gradient = values(dual_sol, training_size) sorted_gradient_ind = sort_and_keep_indices(calibrated_gradient, training_size) - visualize_values_distr_sorted(trained_with_flag, sorted_gradient_ind, training_size, portion, calibrated_gradient) + visualize_values_distr_sorted(trained_with_flag, sorted_gradient_ind, training_size, portion, calibrated_gradient, poisoned) return calibrated_gradient @@ -187,23 +187,31 @@ def sort_and_keep_indices(trainGradient, training_size): # Visualize based on sorted values (calibrated gradient) # Prints 3 graphs, with a random baselines (explained in paper...) -def visualize_values_distr_sorted(tdid, tsidx, trsize, portion, trainGradient): +# dual_sol, train_with_flag, training_size, porsion, calibrated_gradient +def visualize_values_distr_sorted(tdid, tsidx, trsize, portion, trainGradient, poisoned= None): x1, y1, base = [], [], [] - poisoned = trsize * portion + if poisoned == None: + poisoned = trsize * portion for vari in range(10,trsize,10): if vari < 3000: - found = sum(tdid[tsidx[i][0]][2] for i in range(vari)) - + #found = sum(tdid[tsidx[i][0]][2] for i in range(vari)) + found = 0 + actual_found = 0 + for i in range(vari): + if tdid[tsidx[i][0]][2]: + found += tdid[tsidx[i][0]][2] + if tsidx[i][0] < 1000: + actual_found+= 1 # print('inspected: '+str(vari), 'found: '+str(found), # 'detection rate: ', str(found / poisoned), 'baseline = '+str(vari*0.2*0.9)) - print(f'inspected: {vari}, found: {found} detection rate: {found / poisoned:.2f} baseline: {vari*0.2*0.9}') + print(f'inspected: {vari}, found: {found} actual found: {actual_found} sythetic found; {found - actual_found}, detection rate: {found / poisoned:.2f} baseline: {vari*0.2*0.9}') - x1.append(vari) - y1.append(sum(tdid[tsidx[i][0]][2] for i in range(vari))) - base.append(vari*portion*1.0) - plt.scatter(x1, y1, s=10) - plt.scatter(x1, base, s=10) + x1.append(vari) + y1.append(found) + base.append(vari*poisoned/trsize*1.0) + plt.scatter(x1, y1, s=10, color = 'red') + plt.scatter(x1, base, s=10, color = 'orange') # naming the x axis plt.xlabel('Inspected Images') # naming the y axis diff --git a/mnist.py b/mnist.py new file mode 100644 index 0000000..71bcd34 --- /dev/null +++ b/mnist.py @@ -0,0 +1,130 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.transforms as transforms +import torchvision.datasets as datasets +import torch.optim as optim + +class PreActBlock(nn.Module): + '''Pre-activation version of the BasicBlock.''' + expansion = 1 + + def __init__(self, in_planes, planes, stride=1): + super(PreActBlock, self).__init__() + self.bn1 = nn.BatchNorm2d(in_planes) + self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False) + + if stride != 1 or in_planes != self.expansion*planes: + self.shortcut = nn.Sequential( + nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False) + ) + + def forward(self, x): + out = F.relu(self.bn1(x)) + shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x + out = self.conv1(out) + out = self.conv2(F.relu(self.bn2(out))) + out += shortcut + return out + + +class PreActResNet(nn.Module): + def __init__(self, block, num_blocks, num_classes=100): + super(PreActResNet, self).__init__() + self.in_planes = 64 + + self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) + self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) + self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) + self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) + self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) + self.linear = nn.Linear(512*block.expansion, num_classes) + + def _make_layer(self, block, planes, num_blocks, stride): + strides = [stride] + [1]*(num_blocks-1) + layers = [] + for stride in strides: + layers.append(block(self.in_planes, planes, stride)) + self.in_planes = planes * block.expansion + return nn.Sequential(*layers) + + def forward(self, x): + out = self.conv1(x) + out = self.layer1(out) + out = self.layer2(out) + out = self.layer3(out) + out = self.layer4(out) + out = F.avg_pool2d(out, 4) + out = out.view(out.size(0), -1) + out = self.linear(out) + return out + + +def PreActResNet18(num_classes=100): + return PreActResNet(PreActBlock, [2, 2, 2, 2], num_classes=num_classes) + +# Load pretrained weights (example) +def load_pretrained_weights(model): + # Simulate loading pretrained weights here + # For instance, model.load_state_dict(torch.load('pretrained_model.pth')) + print("Pretrained weights loaded.") + +# Modify the final layer for MNIST +def modify_for_mnist(model): + model.linear = nn.Linear(512, 10) + +# Training the model on MNIST +def train_model(model, train_loader, criterion, optimizer, num_epochs=5): + model.train() + for epoch in range(num_epochs): + for data, target in train_loader: + data = data.repeat(1, 3, 1, 1) # Convert grayscale to RGB + optimizer.zero_grad() + output = model(data) + loss = criterion(output, target) + loss.backward() + optimizer.step() + print(f'Epoch {epoch+1}/{num_epochs}, Loss: {loss.item()}') + +# Evaluating the model on MNIST +def evaluate_model(model, test_loader): + model.eval() + correct = 0 + total = 0 + with torch.no_grad(): + for data, target in test_loader: + data = data.repeat(1, 3, 1, 1) # Convert grayscale to RGB + outputs = model(data) + _, predicted = torch.max(outputs.data, 1) + total += target.size(0) + correct += (predicted == target).sum().item() + print(f'Accuracy: {100 * correct / total:.2f}%') + + +# Main function to train and evaluate the model +if __name__ == "__main__": + # Create the model and modify for MNIST + model = PreActResNet18(num_classes=100) + load_pretrained_weights(model) + modify_for_mnist(model) + + # Load MNIST data + transform = transforms.Compose([ + transforms.Resize((32, 32)), + transforms.ToTensor(), + transforms.Normalize((0.5,), (0.5,)) + ]) + train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform) + train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=64, shuffle=True) + test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform) + test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=64, shuffle=False) + + # Define loss function and optimizer + criterion = nn.CrossEntropyLoss() + optimizer = optim.Adam(model.parameters(), lr=0.001) + + # Train and evaluate the model + train_model(model, train_loader, criterion, optimizer, num_epochs=5) + evaluate_model(model, test_loader) \ No newline at end of file diff --git a/mnist_2_class.ipynb b/mnist_2_class.ipynb new file mode 100644 index 0000000..1af851d --- /dev/null +++ b/mnist_2_class.ipynb @@ -0,0 +1,26983 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\21520\\anaconda3\\Lib\\site-packages\\torchtext\\data\\__init__.py:4: UserWarning: \n", + "/!\\ IMPORTANT WARNING ABOUT TORCHTEXT STATUS /!\\ \n", + "Torchtext is deprecated and the last released version will be 0.18 (this one). You can silence this warning by calling the following at the beginnign of your scripts: `import torchtext; torchtext.disable_torchtext_deprecation_warning()`\n", + " warnings.warn(torchtext._TORCHTEXT_DEPRECATION_MSG)\n", + "c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\otdd\\pytorch\\utils.py:7: TqdmExperimentalWarning: Using `tqdm.autonotebook.tqdm` in notebook mode. Use `tqdm.tqdm` instead to force console mode (e.g. in jupyter console)\n", + " from tqdm.autonotebook import tqdm\n" + ] + } + ], + "source": [ + "import lava" + ] + }, + { + "cell_type": "code", + "execution_count": 96, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], + "source": [ + "%load_ext autoreload\n", + "\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] + } + ], + "source": [ + "from preact_resnet import PreActResNet18\n", + "import torch\n", + "print(torch.cuda.is_available()) # Should return True if GPU is available\n", + "import os\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import torch.optim as optim\n", + "import torchvision.models as models\n", + "from torch.autograd import Variable\n", + "\n", + "import matplotlib.pyplot as plt\n", + "from torch import tensor\n", + "from torchvision import datasets, transforms\n", + "import pandas as pd\n", + "import numpy as n\n", + "\n", + "from torch.utils.data import Dataset, TensorDataset, DataLoader" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] + } + ], + "source": [ + "#from preact_resnet import PreActResNet18\n", + "import torch\n", + "print(torch.cuda.is_available()) # Should return True if GPU is available" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2.3.0\n" + ] + } + ], + "source": [ + "import torch\n", + "print(torch.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.18.0\n", + "2.3.0\n", + "Cuda device: 0\n", + "cude devices: 1\n" + ] + } + ], + "source": [ + "cuda_num = 0\n", + "import torchvision\n", + "print(torchvision.__version__)\n", + "import torch\n", + "print(torch.__version__)\n", + "import os\n", + "#os.environ[\"CUDA_VISIBLE_DEVICES\"]=str(cuda_num)\n", + "#print(os.environ[\"CUDA_VISIBLE_DEVICES\"])\n", + "#torch.cuda.set_device(cuda_num)\n", + "print(\"Cuda device: \", torch.cuda.current_device())\n", + "print(\"cude devices: \", torch.cuda.device_count())" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cuda:0\n" + ] + } + ], + "source": [ + "device = torch.device('cuda:' + str(cuda_num) if torch.cuda.is_available() else 'cpu')\n", + "print(device)\n", + "training_size = 1000\n", + "valid_size = 200\n", + "resize = 32\n", + "portion = 0.3" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + ")" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net_test = PreActResNet18()\n", + "net_test = net_test.to(device)\n", + "feature_extractor_name = 'preact_resnet18_test_mnist.pth'\n", + "net_test.load_state_dict(torch.load('checkpoint/'+feature_extractor_name, map_location=torch.device('cpu')))\n", + "net_test.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MNIST\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n" + ] + } + ], + "source": [ + "loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='MNIST', resize=resize,\n", + " training_size=training_size, test_size=valid_size, currupt_por=portion)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([52, 1, 32, 32])\n" + ] + } + ], + "source": [ + "for batch in loaders['train']:\n", + " print(batch[0].size())" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PreActResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (layer1): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): PreActBlock(\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (shortcut): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " )\n", + " )\n", + " (1): PreActBlock(\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (linear): Linear(in_features=512, out_features=100, bias=True)\n", + " (fc): Identity()\n", + ")" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "embedder = net_test.to(device)\n", + "embedder.fc = torch.nn.Identity()\n", + "for p in embedder.parameters():\n", + " p.requires_grad = False\n", + "embedder.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "from otdd.pytorch.distance_fast import DatasetDistance, FeatureCost, batch_augmented_cost\n", + "from otdd.pytorch.wasserstein import pwdist_exact\n", + "from functools import partial\n", + "from lava import train_with_corrupt_flag, get_indices" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# all lables" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "feature_cost = FeatureCost(src_embedding = embedder,\n", + " src_dim = (1, resize,resize),\n", + " tgt_embedding = embedder,\n", + " tgt_dim = (1, resize,resize),\n", + " p = 2,\n", + " device='cuda')\n", + "dist = DatasetDistance(loaders['train'], loaders['test'],\n", + " inner_ot_method = 'exact',\n", + " debiased_loss = True,\n", + " feature_cost = feature_cost,\n", + " λ_x=1.0, λ_y=1.0,\n", + " sqrt_method = 'spectral',\n", + " sqrt_niters=10,\n", + " precision='single',\n", + " p = 2, entreg = 1e-1,\n", + " device='cuda')" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "f7b9e7df2836490fa26bbb13eaaf09fd", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/8 [00:00 maxsamples:\n", + " idxs_1 = sorted(np.random.choice(\n", + " dist.X1.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_1 = np.s_[:] # hack to get a full slice\n", + "\n", + "if maxsamples and dist.X2.shape[0] > maxsamples:\n", + " idxs_2 = sorted(np.random.choice(\n", + " dist.X2.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_2 = np.s_[:] # hack to get a full slice\n", + "Z1 = torch.cat((dist.X1[idxs_1],\n", + " dist.Y1[idxs_1].type(dist.X1.dtype).unsqueeze(1)), -1)\n", + "Z2 = torch.cat((dist.X2[idxs_2],\n", + " dist.Y2[idxs_2].type(dist.X2.dtype).unsqueeze(1)), -1)\n", + "Z1 = Z1.to(device)\n", + "Z2 = Z2.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([500, 1025]) torch.Size([100, 1025])\n", + "Z1 shape in batch: torch.Size([1, 500, 1025])\n", + "Z2 shape in batch: torch.Size([1, 100, 1025])\n", + "1 500 1024\n", + "torch.Size([1, 500, 100])\n", + "1 100 1024\n", + "torch.Size([1, 100, 100])\n", + "torch.Size([1, 500, 100])\n", + "torch.Size([1, 500, 100])\n", + "Gia tri M: tensor([[[152, 156, 153, ..., 158, 153, 153],\n", + " [132, 136, 133, ..., 138, 133, 133],\n", + " [ 12, 16, 13, ..., 18, 13, 13],\n", + " ...,\n", + " [ 72, 76, 73, ..., 78, 73, 73],\n", + " [112, 116, 113, ..., 118, 113, 113],\n", + " [192, 196, 193, ..., 198, 193, 193]]], device='cuda:0')\n", + "torch.Size([1, 500, 100])\n", + "torch.Size([1, 500, 100])\n", + "gia tri D: tensor([[[ 1804.6853, 14974.2344, 7895.7363, ..., 9058.2617,\n", + " 17243.1738, 10226.9785],\n", + " [12706.6680, 2664.0090, 26863.2051, ..., 6163.3584,\n", + " 43521.8320, 27187.5566],\n", + " [12645.3027, 1490.4263, 27969.5547, ..., 8942.0752,\n", + " 45487.8203, 29173.7930],\n", + " ...,\n", + " [ 2939.3594, 28473.1777, 2046.5125, ..., 8808.6172,\n", + " 5523.6709, 2870.5437],\n", + " [11649.2510, 20137.9180, 17066.0781, ..., 2747.4856,\n", + " 25628.7285, 15248.9375],\n", + " [ 1811.7312, 18445.1406, 5485.3979, ..., 7962.3965,\n", + " 12360.3535, 7070.6616]]], device='cuda:0')\n", + "torch.Size([1, 500, 100])\n", + "Z1 shape in batch: torch.Size([1, 100, 1025])\n", + "Z2 shape in batch: torch.Size([1, 500, 1025])\n", + "1 100 1024\n", + "torch.Size([1, 100, 100])\n", + "1 500 1024\n", + "torch.Size([1, 500, 100])\n", + "torch.Size([1, 100, 500])\n", + "torch.Size([1, 100, 500])\n", + "Gia tri M: tensor([[[247, 246, 240, ..., 243, 245, 249],\n", + " [327, 326, 320, ..., 323, 325, 329],\n", + " [267, 266, 260, ..., 263, 265, 269],\n", + " ...,\n", + " [367, 366, 360, ..., 363, 365, 369],\n", + " [267, 266, 260, ..., 263, 265, 269],\n", + " [267, 266, 260, ..., 263, 265, 269]]], device='cuda:0')\n", + "torch.Size([1, 100, 500])\n", + "torch.Size([1, 100, 500])\n", + "gia tri D: tensor([[[ 1804.6853, 12706.6680, 12645.3027, ..., 2939.3594,\n", + " 11649.2510, 1811.7312],\n", + " [14974.2344, 2664.0090, 1490.4263, ..., 28473.1777,\n", + " 20137.9180, 18445.1406],\n", + " [ 7895.7363, 26863.2051, 27969.5547, ..., 2046.5125,\n", + " 17066.0781, 5485.3979],\n", + " ...,\n", + " [ 9058.2627, 6163.3604, 8942.0771, ..., 8808.6172,\n", + " 2747.4875, 7962.3965],\n", + " [17243.1738, 43521.8320, 45487.8203, ..., 5523.6709,\n", + " 25628.7285, 12360.3535],\n", + " [10226.9785, 27187.5566, 29173.7930, ..., 2870.5437,\n", + " 15248.9395, 7070.6616]]], device='cuda:0')\n", + "torch.Size([1, 100, 500])\n" + ] + } + ], + "source": [ + "with torch.no_grad():\n", + " loss.debias = False\n", + " loss.potentials = True\n", + " print(Z1.shape, Z2.shape)\n", + " F_i, G_j = loss(Z1, Z2)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "π = [F_i, G_j]\n", + "dual_sol = π\n", + "for i in range(len(dual_sol)):\n", + " dual_sol[i] = dual_sol[i].to('cpu')" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "train_indices = get_indices(loaders['train'])\n", + "trained_with_flag = train_with_corrupt_flag(loaders['train'], shuffle_ind, train_indices)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "inspected: 10, found: 7 detection rate: 0.05 baseline: 1.8\n", + "inspected: 20, found: 11 detection rate: 0.07 baseline: 3.6\n", + "inspected: 30, found: 17 detection rate: 0.11 baseline: 5.4\n", + "inspected: 40, found: 22 detection rate: 0.15 baseline: 7.2\n", + "inspected: 50, found: 29 detection rate: 0.19 baseline: 9.0\n", + "inspected: 60, found: 35 detection rate: 0.23 baseline: 10.8\n", + "inspected: 70, found: 43 detection rate: 0.29 baseline: 12.6\n", + "inspected: 80, found: 49 detection rate: 0.33 baseline: 14.4\n", + "inspected: 90, found: 55 detection rate: 0.37 baseline: 16.2\n", + "inspected: 100, found: 58 detection rate: 0.39 baseline: 18.0\n", + "inspected: 110, found: 64 detection rate: 0.43 baseline: 19.8\n", + "inspected: 120, found: 69 detection rate: 0.46 baseline: 21.6\n", + "inspected: 130, found: 71 detection rate: 0.47 baseline: 23.400000000000002\n", + "inspected: 140, found: 75 detection rate: 0.50 baseline: 25.2\n", + "inspected: 150, found: 81 detection rate: 0.54 baseline: 27.0\n", + "inspected: 160, found: 85 detection rate: 0.57 baseline: 28.8\n", + "inspected: 170, found: 88 detection rate: 0.59 baseline: 30.6\n", + "inspected: 180, found: 90 detection rate: 0.60 baseline: 32.4\n", + "inspected: 190, found: 94 detection rate: 0.63 baseline: 34.2\n", + "inspected: 200, found: 97 detection rate: 0.65 baseline: 36.0\n", + "inspected: 210, found: 102 detection rate: 0.68 baseline: 37.800000000000004\n", + "inspected: 220, found: 103 detection rate: 0.69 baseline: 39.6\n", + "inspected: 230, found: 107 detection rate: 0.71 baseline: 41.4\n", + "inspected: 240, found: 112 detection rate: 0.75 baseline: 43.2\n", + "inspected: 250, found: 117 detection rate: 0.78 baseline: 45.0\n", + "inspected: 260, found: 121 detection rate: 0.81 baseline: 46.800000000000004\n", + "inspected: 270, found: 123 detection rate: 0.82 baseline: 48.6\n", + "inspected: 280, found: 125 detection rate: 0.83 baseline: 50.4\n", + "inspected: 290, found: 130 detection rate: 0.87 baseline: 52.2\n", + "inspected: 300, found: 133 detection rate: 0.89 baseline: 54.0\n", + "inspected: 310, found: 133 detection rate: 0.89 baseline: 55.800000000000004\n", + "inspected: 320, found: 134 detection rate: 0.89 baseline: 57.6\n", + "inspected: 330, found: 135 detection rate: 0.90 baseline: 59.4\n", + "inspected: 340, found: 136 detection rate: 0.91 baseline: 61.2\n", + "inspected: 350, found: 137 detection rate: 0.91 baseline: 63.0\n", + "inspected: 360, found: 137 detection rate: 0.91 baseline: 64.8\n", + "inspected: 370, found: 137 detection rate: 0.91 baseline: 66.60000000000001\n", + "inspected: 380, found: 137 detection rate: 0.91 baseline: 68.4\n", + "inspected: 390, found: 137 detection rate: 0.91 baseline: 70.2\n", + "inspected: 400, found: 137 detection rate: 0.91 baseline: 72.0\n", + "inspected: 410, found: 140 detection rate: 0.93 baseline: 73.8\n", + "inspected: 420, found: 140 detection rate: 0.93 baseline: 75.60000000000001\n", + "inspected: 430, found: 140 detection rate: 0.93 baseline: 77.4\n", + "inspected: 440, found: 144 detection rate: 0.96 baseline: 79.2\n", + "inspected: 450, found: 145 detection rate: 0.97 baseline: 81.0\n", + "inspected: 460, found: 146 detection rate: 0.97 baseline: 82.8\n", + "inspected: 470, found: 147 detection rate: 0.98 baseline: 84.60000000000001\n", + "inspected: 480, found: 149 detection rate: 0.99 baseline: 86.4\n", + "inspected: 490, found: 149 detection rate: 0.99 baseline: 88.2\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABGNElEQVR4nO3dd3jUVfr38c8AaQQIoSVAEkKRIlICAQTUSNGQpYiIiLrSVUREiK6CPmyw0VRsu4jCCnYQBVelSAANKqCErkR+KCUgVQhlAUGS8/zBZpYhhZlk+rxf15UL5ztnvnNyQpjbc859bosxxggAAMALlfF0BwAAAIpCoAIAALwWgQoAAPBaBCoAAMBrEagAAACvRaACAAC8FoEKAADwWgQqAADAaxGoAAAAr0WgAo+aM2eOLBaL9Ss0NFTR0dHq1KmTJk2apMOHD5f43tu2bdOECRO0e/du53XYwfcZNGiQ4uPjXfr+nnTo0CE98cQTatmypSpVqqTg4GDFxMSoT58++uyzz5Sbm+uWfnz99deyWCz6+uuvrdfcMfb79+/XhAkTtGnTJrva5/fz448/dmm/3K24cZgwYYIsFov7OwW/QaACrzB79mytWbNG6enp+uc//6mWLVtqypQpatKkiZYvX16ie27btk1PPfWUWwKVot5n/PjxWrhwoUvf31PWrl2rZs2aaebMmerVq5fmzp2r5cuXa/LkyQoKClKfPn00Z84cj/XPHWO/f/9+PfXUU3YHKv6quHEYNmyY1qxZ4/5OwW+U83QHAEm65pprlJiYaH182223acyYMbruuuvUp08f7dixQ1FRUR7sYcnUr1/f011wiePHj6t3796qUKGCvvvuO9WsWdPm+b/+9a/asmWLjh49Wux9zp49q9DQUJf8H7e/jr2viYmJUUxMjKe7AR/GjAq8VlxcnF588UWdOnVKb7zxhs1zmZmZ6tWrl6pUqaLQ0FAlJCToo48+sj4/Z84c3X777ZKkTp06WZeWLv0//OXLl6tLly6qVKmSypcvr44dO2rFihUF+vHzzz/rzjvvVFRUlEJCQhQXF6cBAwbo3LlzV3yfwpYf/vjjD40bN05169ZVcHCwateurQcffFDHjx+3aRcfH68ePXpo6dKlatWqlcLCwtS4cWO99dZbxY7bn3/+qRo1auiee+4p8Nzx48cVFham1NRUSVJeXp6effZZNWrUSGFhYapcubKaN2+uV155pdj3mDlzpg4dOqSpU6cWCFLyNW/eXJ06dbI+zl/mW7ZsmYYMGaLq1aurfPnyOnfunH755RcNHjxYV111lcqXL6/atWurZ8+e2rp1a4H7/vzzz+rWrZvKly+vatWqafjw4Tp16lSBdoWNvTFG06dPV8uWLRUWFqbIyEj17dtXO3futGl344036pprrtG6det0/fXXq3z58qpXr54mT56svLw8SReXcdq0aSNJGjx4sPVnP2HChGLH7nL5SyM//fST7rzzTkVERCgqKkpDhgzRiRMnbNrOnz9f7dq1U0REhLVPQ4YMsT6fv7T03nvvKTU1VdHR0QoLC1NSUpI2btxY4L2v9HuU77ffftN9992n2NhYBQcHq1atWurbt68OHTp0xXEobOknLy9PU6dOVePGjRUSEqIaNWpowIAB2rdvn8M/BwQAA3jQ7NmzjSSzbt26Qp//z3/+Y8qWLWu6dOlivbZy5UoTHBxsrr/+ejNv3jyzdOlSM2jQICPJzJ492xhjzOHDh83EiRONJPPPf/7TrFmzxqxZs8YcPnzYGGPMu+++aywWi+ndu7dZsGCB+fzzz02PHj1M2bJlzfLly63vtWnTJlOhQgUTHx9vZsyYYVasWGHee+89069fP3Py5Mkrvs/AgQNNnTp1rPfLy8szycnJply5cmb8+PFm2bJl5oUXXjDh4eEmISHB/PHHH9a2derUMTExMebqq68277zzjvnyyy/N7bffbiSZjIyMYsd1zJgxJiwszJw4ccLm+vTp040ks2XLFmOMMZMmTTJly5Y1aWlpZsWKFWbp0qXm5ZdfNhMmTCj2/jfddJMpW7asOX36dLHtLpX/s65du7a57777zJIlS8zHH39sLly4YDIyMswjjzxiPv74Y5ORkWEWLlxoevfubcLCwszPP/9svcfBgwdNjRo1TO3atc3s2bPN4sWLzd13323i4uKMJPPVV19Z214+9sYYc++995qgoCDzyCOPmKVLl5oPPvjANG7c2ERFRZmDBw9a2yUlJZmqVauaq666ysyYMcOkp6ebESNGGEnm7bffNsYYc+LECev39P/+3/+z/uz37t1b5Bh89dVXRpKZP3++9VpaWpqRZBo1amT+/ve/m/T0dDNt2jQTEhJiBg8ebG23evVqY7FYTP/+/c3ixYvNypUrzezZs80999xT4P6xsbHmlltuMZ9//rl57733TIMGDUylSpXMr7/+am1rz++RMcbs27fP1KxZ01SrVs1MmzbNLF++3MybN88MGTLEZGVlXXEc8r+/S913331Gkhk5cqRZunSpmTFjhqlevbqJjY01R44ccejnAP9HoAKPulKgYowxUVFRpkmTJtbHjRs3NgkJCebPP/+0adejRw9Ts2ZNk5uba4wxZv78+QU+vIwx5vTp06ZKlSqmZ8+eNtdzc3NNixYtTNu2ba3XOnfubCpXrmwNPApT1PsYU/DDcunSpUaSmTp1qk27efPmGUnmzTfftF6rU6eOCQ0NNXv27LFeO3v2rKlSpYq5//77i+yPMcZs2bKlwP2MMaZt27amdevW1sc9evQwLVu2LPZehWncuLGJjo4ucD03N9f8+eef1q/8n4Ux//tZDxgw4Ir3v3Dhgjl//ry56qqrzJgxY6zXH3/8cWOxWMymTZts2t90001XDFTWrFljJJkXX3zR5rV79+41YWFh5rHHHrNeS0pKMpLM999/b9P26quvNsnJydbH69atK/DBXpziApXL/06MGDHChIaGmry8PGOMMS+88IKRZI4fP37F+7dq1cr6OmOM2b17twkKCjLDhg2zXrP392jIkCEmKCjIbNu2rcj3LW4cLg9UsrKyjCQzYsQIm3bff/+9kWSeeOIJ6zV7fw7wbyz9wOsZY6z//csvv+jnn3/W3XffLUm6cOGC9esvf/mLDhw4oO3btxd7v9WrV+vYsWMaOHCgzevz8vLUrVs3rVu3TqdPn9aZM2eUkZGhfv36qXr16k75XlauXCnp4rLEpW6//XaFh4cXWHpq2bKl4uLirI9DQ0PVsGFD7dmzp9j3adasmVq3bq3Zs2dbr2VlZemHH36wWSpo27atNm/erBEjRujLL7/UyZMnS/qtSZJSU1MVFBRk/erVq1eBNrfddluBaxcuXNDEiRN19dVXKzg4WOXKlVNwcLB27NihrKwsa7uvvvpKTZs2VYsWLWxef9ddd12xb1988YUsFov++te/2vzco6Oj1aJFC5uMIUmKjo5W27Ztba41b978imNfUpePVfPmzfXHH39YM9/yl1f69eunjz76SL/99luR97rrrrtsllvq1KmjDh066KuvvpLk2O/RkiVL1KlTJzVp0sQp32d+Hy7/HWjbtq2aNGlS4HfA3T8HeB8CFXi106dP6+jRo6pVq5aki+mwkvToo4/afCAGBQVpxIgRkqTff/+92Hvm36Nv374F7jFlyhQZY3Ts2DHl5OQoNzfXqRsBjx49qnLlyhUIfCwWi6KjowtsPq1atWqBe4SEhOjs2bNXfK8hQ4ZozZo1+vnnnyVdzKwKCQnRnXfeaW0zbtw4vfDCC1q7dq1SUlJUtWpVdenSRZmZmcXeOy4uTkeOHNGZM2dsrj/yyCNat26d1q1bV+TelcKup6amavz48erdu7c+//xzff/991q3bp1atGhh870ePXpU0dHRBV5f2LXLHTp0SMYYRUVFFfi5r127tsDfm9KMfUlc/n4hISGSZH2/G264QZ9++qkuXLigAQMGKCYmRtdcc40+/PDDAvcqaozy/3458nt05MgRp/8OSIX/PahVq5ZTfwfgH8j6gVdbtGiRcnNzdeONN0qSqlWrJuniB2yfPn0KfU2jRo2KvWf+PV577TVde+21hbaJiopSbm6uypYtW2CDX2lUrVpVFy5c0JEjR2yCFWOMDh48aP2/Zme48847lZqaqjlz5ui5557Tu+++q969eysyMtLaply5ckpNTVVqaqqOHz+u5cuX64knnlBycrL27t2r8uXLF3rvm266ScuWLdPixYvVt29f6/XY2FjFxsZKkoKDgwt9bWEZPu+9954GDBigiRMn2lz//fffVblyZevjqlWr6uDBgwVeX9i1y1WrVk0Wi0XffPONNQi4VGHXvM0tt9yiW265RefOndPatWs1adIk3XXXXYqPj1f79u2t7Yoao/wPfUd+j6pXr+703wFJOnDgQIEAaP/+/da+AfmYUYHXys7O1qOPPqqIiAjdf//9ki7+43nVVVdp8+bNSkxMLPSrYsWKkgr+H2m+jh07qnLlytq2bVuR9wgODrZmS8yfP7/YWZqi3qcwXbp0kXTxg/lSn3zyiU6fPm193hkiIyPVu3dvvfPOO/riiy908OBBm2Wfy1WuXFl9+/bVgw8+qGPHjhV7/sywYcMUFRWlxx57TAcOHCh1Xy0WS4FAYdGiRQWWNzp16qSffvpJmzdvtrn+wQcfXPE9evToIWOMfvvtt0J/5s2aNXO434787J0pJCRESUlJmjJliiQVyOj58MMPbZZM9+zZo9WrV1sDfkd+j1JSUvTVV18Vu6TqyDh07txZUsHfgXXr1ikrK8upvwPwD8yowCv8+OOP1jXyw4cP65tvvtHs2bNVtmxZLVy40Gb24Y033lBKSoqSk5M1aNAg1a5dW8eOHVNWVpY2bNig+fPnS7p4Noskvfnmm6pYsaJCQ0NVt25dVa1aVa+99poGDhyoY8eOqW/fvqpRo4aOHDmizZs368iRI3r99dclSdOmTdN1112ndu3aaezYsWrQoIEOHTqkzz77TG+88YYqVqxY7Ptc7qabblJycrIef/xxnTx5Uh07dtSWLVuUlpamhISEQlOKS2PIkCGaN2+eRo4cqZiYGHXt2tXm+Z49e1rPsKlevbr27Nmjl19+WXXq1NFVV11V5H0rV66sTz/9VD179lSLFi30wAMP6Nprr1WFChV09OhRrVq1SgcPHlSHDh3s6mePHj00Z84cNW7cWM2bN9f69ev1/PPPF/g/7tGjR+utt95S9+7d9eyzzyoqKkrvv/++dXmrOB07dtR9992nwYMHKzMzUzfccIPCw8N14MABffvtt2rWrJkeeOABu/qbr379+goLC9P777+vJk2aqEKFCqpVq5Z1qdKZ/v73v2vfvn3q0qWLYmJidPz4cb3yyisKCgpSUlKSTdvDhw/r1ltv1b333qsTJ04oLS1NoaGhGjdunLWNvb9HTz/9tJYsWaIbbrhBTzzxhJo1a6bjx49r6dKlSk1NVePGjR0ah0aNGum+++7Ta6+9pjJlyiglJUW7d+/W+PHjFRsbqzFjxjh97ODjPLmTF8jPBMn/Cg4ONjVq1DBJSUlm4sSJRWbbbN682fTr18/UqFHDBAUFmejoaNO5c2czY8YMm3Yvv/yyqVu3rilbtmyBrISMjAzTvXt3U6VKFRMUFGRq165tunfvbpORYYwx27ZtM7fffrupWrWqCQ4ONnFxcWbQoEE2qcRFvU9hKbJnz541jz/+uKlTp44JCgoyNWvWNA888IDJycmxaVenTh3TvXv3At97UlKSSUpKKn5g/ys3N9fExsYaSebJJ58s8PyLL75oOnToYKpVq2b93oYOHWp2795t1/0PHjxoxo0bZ5o3b27Cw8NNUFCQqVWrlunZs6d55513bDJKisvwysnJMUOHDjU1atQw5cuXN9ddd5355ptvCv1et23bZm666SYTGhpqqlSpYoYOHWr+/e9/25WebIwxb731lmnXrp0JDw83YWFhpn79+mbAgAEmMzPT2iYpKck0bdq0wGsLu+eHH35oGjdubIKCgowkk5aWVuR4FZf1c2la7qXjtWvXLmOMMV988YVJSUkxtWvXtv6e/OUvfzHffPNNgfu/++67ZtSoUaZ69eomJCTEXH/99TbfXz57f4/27t1rhgwZYqKjo60/4379+plDhw5dcRwKS0/Ozc01U6ZMMQ0bNjRBQUGmWrVq5q9//WuB1G5Hfg7wXxZjLpkfBAD4rK+//lqdOnXS/PnzbfYOAb6MPSoAAMBrEagAAACvxdIPAADwWsyoAAAAr0WgAgAAvBaBCgAA8Fo+feBbXl6e9u/fr4oVKxZ6LDcAAPA+xhidOnVKtWrVUpkyxc+Z+HSgsn//fmtdEQAA4Fv27t17xaKXPh2o5Nei2Lt3rypVquTh3gAAAHucPHlSsbGx1s/x4vh0oJK/3FOpUiUCFQAAfIw92zbYTAsAALwWgQoAAPBaBCoAAMBrEagAAACvRaACAAC8FoEKAADwWgQqAADAaxGoAAAAr0WgAgAAvBaBCgAA8FoEKgAAwGv5dK0fAADguI3ZOdr1+2nVrRauhLjIUrdzJQIVAAACyOQlWZqRsdP6eHhSPY1NaVLidq7G0g8AAAFiY3aOTfAhSTMydmpjdk6J2rkDgQoAAAFi1++n7bpubzt3IFABACBA1K0Wbtd1e9u5A4EKAAB+ZGN2jhZs2FfoMk1CXKSGJ9WzufZAUr0CG2Xz27W0/KJby3yjlpZfCm3nDmymBQDAT9izAXZsShMlN42+YjbP2HJzpZCX/3eh3DFJT7mg18VjRgUAAB9Q3ExJ/vP2boBNiItUn1YxRc+Q7MuUvnvZ9tp3L1+87mbMqAAA4OXsmSkpbgOsw0s2R38p+npMomP3KiVmVAAA8LDiZkvsnSlx6gbYqg0cu+5CBCoAAHjQ5CVZunX6aqV+tFm3Tl+tyUuybJ63N1XY3o2ydolJlDqOtr3WcYzbZ1Mkln4AAHCp4o6hL2q2JLlptLWtIzMl9m6U1b7Mi8s4VRsUHXzc9JTUpOeV27kYgQoAAC5ypb0l9uwryZ8pufQ+xc2UJMRFFj+Lkp5mu1G24+iLQUlhYhI9FqDkI1ABAMAFnDlbYvdMyZUUlc3TpKfHA5KisEcFAAAXsGdviSP7Sq6YUmyP4rJ5vBQzKgAAuIDbZ0vs4UXZPPZiRgUAABdw+2yJPbwom8deFmOM8XQnSurkyZOKiIjQiRMnVKlSJU93BwCAAorL+vEYe7J+XMiRz28CFQAA/ImHgxB7OPL5zR4VAAD8hSOpxz6CPSoAAPgDLyok6EzMqAAAAoa9+0Xsaed1e0+8qJCgMxGoAAACgj0ViO1tZ++93MoHU4/twdIPAMDv2VuB2J529t7L7Xww9dgezKgAAPyePTV17G1n7708wksKCToTgQoAwO/Ze0qsPe0cqWbsVPamHXtBIUFnYukHAOD37D0l1p52jpw46zTpadKsLtLC+y/+mZ7muvfyMhz4BgAIGD6Z9bMv82JwcrlhK3x25oQD3wAAJeLMD2hvvFdCXKRdQYU97ey9V6n5adqxvQhUAACSnJuW66338kl+mnZsL/aoAECA2JidowUb9hWaRuvMtFxvvZfP8tO0Y3sxowIAAeBKMw7OTMv11nv5ND9MO7YXgQoA+Lgr7c0oasYhuWm0tb0z03K99V5ezZ7UYz9LO7YXSz8A4MMmL8nSrdNXK/Wjzbp1+mpNXpJVoE1xMw75nJmW66338loBnHpsD9KTAcCLFTdbsjE7R7dOX13gNQtHdLBpa2+7K72fI228+V5exQ9Tj+1BejIA+AFn7CuR/jfjcOm9ippxcGZarrfey6sEeOqxPQhUAMALOWtfSb6xKU2U3DTa92Yc/F2Apx7bgz0qAOBkxaUB29vOWftKLpUQF6k+rWIIUrxJgKce24MZFQBwImcdYmbvbAkzJX4ggFOP7cGMCgA4iTMPMXNktoSZEi+3L1PaPPfin0WJSZRa9CdIKQQzKgDgJM48xExitsQvpKdJ3738v8cdR1+cQYHdCFQAwEmceYhZPp/MZMFF+zJtgxTp4uMmPZk5cQBLPwDgJM48xAx+oLjUY9iNGRUAcCJ7l2tY1gkApB47BYEKADjAntNPnXmIGXxYfuqxzR4VUo8dRaACAHayN/UYsCL1uNQIVADADvacFIsAY0/FYylgqx47C4EKANjB3pRiBAjSjt2GrB8AsIMjKcXwc0WlHRd3oBtKjEAFAOxASjGsSDt2K5Z+AMBOpBRDEmnHbsaMCgA4gLo6oOKxezGjAgCAo0g7dhsCFQAALmdP6jFpx25BoAIAwKVIPfYq7FEBAF080G3Bhn3amJ3j6a7Ak0g99jrMqAAIeByND6viUo9Z5vEIZlQABLSijsZnZiVAkXrsdQhUAAS04o7GRwAi9djrsPQDIKBxND4KIPXYqzCjAsDvFbdRlqPxA8y+TGnz3Ctvjo1JlFr0J0jxAsyoAPBr9myU5Wj8AEHasU9iRgWA33JkoyxH4/s50o59FoEKAL/FRllYUfHYZ7H0A8CnbczOKXLJho2ysCLt2GcxowLAZ01ekqVbp69W6kebdev01Zq8JMvmeTbKwoq0Y59lMcYYT3eipE6ePKmIiAidOHFClSpV8nR3ALjRxuwc3Tp9dYHrC0d0KBCIFDfrggBjT7FBuJwjn98s/QDwScXtP7k8GEmIiyRACQRUPPZLBCoAfBL7T2CD1GO/xR4VAD6J/SewIvXYrzGjAsBncVAbJFHx2M8RqADwaew/AanH/o2lHwCAbyP12K8xowIA8H1UPPZbBCoAvBJnn8AGqccBi0AFgNexp+IxAgipxwGNPSoAvIojFY8RAEg9DngEKgC8ChWPYYOqxwGPQAWAV+HEWdgg9TjgEagA8CqcOAsbpB4HPKonA/BKZP0ECHurGVP12K848vlNoAIA8AyyeQKWI5/fLP0AANyPbB7YiUAFAOB+ZPPATgQqAAD3I5sHdiJQAeB2G7NztGDDPg5xC2Rk88BOHKEPwK04Hh9WFBKEHQhUALhNUcfjJzeNJgXZH1FIEE5AoALAbYo7Hp9Axc+QegwnYY8KALfhePwAQeoxnIhABYDbcDx+gCD1GE7E0g8Atxqb0kTJTaM5Ht+fkXoMJ2JGBYDbJcRFqk+rGIIUf0XqMZyIGRUAgPORegwnIVABADjG3krGpB7DCQhUAAD2I+0YbsYeFQCAfUg7hgcQqABwGmr4+DnSjuEBLP0AcApq+AQA0o7hAcyoACi1omr4MLPiZ0g7hgcwowKg1KjhE0BIO4abEagAKDVq+PgRKh7Dy7D0A6DUqOHjJ9LTpFldpIX3X/wzPc3TPQJkMcYYT3eipE6ePKmIiAidOHFClSpV8nR3gIC3MTuHGj6+al/mxeDkcsNWMHsCp3Pk85ulHwBOkxAXSYDiq4pLPSZQgQex9AMAIPUYXotABYBdOMzNz5F6DC/F0g+AK+IwtwBB6jG8EIEKgGI3wRZ1mFty02j2o/gaUo/hgwhUgAB3pdkSDnPzE1Q9ho9ijwoQwOw5+p7D3PwAVY/hwwhUgABW3GxJPg5z8wNUPYYPY+kHCGD2zpaMTWmi5KbRHObmq0g9hg9zyozK8ePHnXEbAG7myGxJQlyk+rSKIUjxRaQew4c5fIT+lClTFB8frzvuuEOS1K9fP33yySeKjo7W4sWL1aJFC5d0tDAcoQ84B0ffBwh7sn4AN3Dk89vhQKVevXp677331KFDB6Wnp6tfv36aN2+ePvroI2VnZ2vZsmWl6rwjCFQAQAQg8DkurfVz4MABxcbGSpK++OIL9evXTzfffLPi4+PVrl27kvUYAFAypB3Dzzm8RyUyMlJ79+6VJC1dulRdu3aVJBljlJub69zeAQCKRtoxAoDDMyp9+vTRXXfdpauuukpHjx5VSkqKJGnTpk1q0IAd5ADgNlQ8RgBwOFB56aWXFB8fr71792rq1KmqUKGCpItLQiNGjHB6BwEARSDtGAHA4c203oTNtEDxyOYJAAX2qIyRbprgqd4AdnHpZlpJevfdd/XGG29o586dWrNmjerUqaOXX35ZdevW1S233FKiTgNwLioeBwgqHsPPObyZ9vXXX1dqaqpSUlJ0/Phx6wbaypUr6+WXX3Z2/wCUgD01fOAj9mVKm+cWv0E2JlFq0Z8gBX7J4UDltdde08yZM/Xkk0+qbNmy1uuJiYnaunWrUzsHoGTsqeEDH5CeJs3qIi28/+Kf6Wme7hHgdg4HKrt27VJCQkKB6yEhITp9mn8EAW9AxWM/QOoxIKkEgUrdunW1adOmAteXLFmiq6++2hl9AlBKVDz2A1Q8BiSVYDPt3/72Nz344IP6448/ZIzRDz/8oA8//FCTJk3SrFmzXNFHAIW4UkYPFY99HKnHgKQSBCqDBw/WhQsX9Nhjj+nMmTO66667VLt2bb3yyivq37+/K/oI4DL2ZvQkxEUSoPiq/IrHl6ces2EWAaZU56j8/vvvysvLU40aNZzZJ7txjgoC0cbsHN06fXWB6wtHdCAo8UcUHIQfcvk5KvmqVatWmpcDKIHiMnoIVHyMPUFITCIBCgKaw4FKQkKCLBZLgesWi0WhoaFq0KCBBg0apE6dOjmlgwBskdHjJ6h6DNjF4ayfbt26aefOnQoPD1enTp104403qkKFCvr111/Vpk0bHThwQF27dtW///1vV/QXCHhk9PgBUo8Buzk8o/L777/rkUce0fjx422uP/vss9qzZ4+WLVumtLQ0PfPMMxynD7gIGT0+jqrHgN0c3kwbERGh9evXq0ED2xS5X375Ra1bt9aJEyf0888/q02bNjp16pRTO3s5NtMC8En7Mi+eNHu5YSsIVBAQHPn8dnjpJzQ0VKtXF8w4WL16tUJDQyVJeXl5CgkJcfTWABAY8lOPL0XqMVAoh5d+HnroIQ0fPlzr169XmzZtZLFY9MMPP2jWrFl64oknJElffvllocfsAwD+i6rHgF1KdI7K+++/r3/84x/avn27JKlRo0Z66KGHdNddd0mSzp49a80CciWWfuCPrnTiLHwAZ58AxXLk87tUB755GoEK/I29J87Ci5F2DFyRS/eoAHCNjdk5NkGKJM3I2KmN2Tke6hEcRtox4HQOByq5ubl64YUX1LZtW0VHR6tKlSo2XwBKprgTZ+EjqHgMOJ3DgcpTTz2ladOmqV+/fjpx4oRSU1PVp08flSlTRhMmTHBBFwH/sTE7Rws27Ct0loQTZ/0AFY8Bp3M4UHn//fc1c+ZMPfrooypXrpzuvPNOzZo1S3//+9+1du1aV/QR8AuTl2Tp1umrlfrRZt06fbUmL8myeZ4TZ/0AaceA0zm8mTY8PFxZWVmKi4tTzZo1tWjRIrVq1Uo7d+5UQkKCTpw44aq+FsBmWvgKRyoek/XjB8j6AYrl0s20MTExOnDggCSpQYMGWrZsmSRp3bp1HPIGFMGR/ScJcZHq0yqGIMVb7cuUNs8tfoNsTKLUoj9BCuAEDh/4duutt2rFihVq166dHn74Yd15553617/+pezsbI0ZM8YVfQR8HvtP/ASpx4DblfoclbVr12r16tVq0KCBevXq5ax+2YWlH/iSy89IeSCpnh7njBTfQX0ewGkc+fx2eEblctdee62uvfba0t4G8HtUPPZxVDwGPKJEgcpvv/2m7777TocPH1ZeXp7Nc6NGjXJKxwBfY88m2IS4SAIUX0XqMeARDgcqs2fP1vDhwxUcHKyqVavKYrFYn7NYLAQqCEgcfR8A8lOPbfaokHoMuJrDe1RiY2M1fPhwjRs3TmXKePYEfvaowBs4knoMP0DqMVBqLt2jcubMGfXv39/jQQrgLYpLPSZQ8SH2BiAxiQQogBs5HG0MHTpU8+fPd0VfAJ9E6rEfSE+7mNGz8P6Lf6anebpHAP7L4aWf3Nxc9ejRQ2fPnlWzZs0UFBRk8/y0adOc2sHisPQDb0HqsQ8j7RhwO5cu/UycOFFffvmlGjVqJEkFNtMCgYjUYx9G2jHg1RwOVKZNm6a33npLgwYNckF3AN9F6rGPIu0Y8GoO71EJCQlRx44dXdEXAHA/Kh4DXs3hPSqTJk3SgQMH9Oqrr7qqT3ZjjwrcgWrGfsCejB7SjgG3cekelR9++EErV67UF198oaZNmxbYTLtgwQJHbwl4LQ5y8wP2FhIk7RjwSg4HKpUrV1afPn1c0RfAq2zMzrEJUiRpRsZOJTeNZmbFV+zLtA1SpIuPm/QkKAF8RImO0AcCAQe5+QEyegCfx/GyQBE4yM0PkNED+Dy7Z1QSEhLsOidlw4YNpeoQ4C0S4iI1PKlegYPcmE3xIRQSBHye3YFK7969XdgNwDtxkJsfuOmpi3tSyOgBfJLD6cnehPRkAKQVA77HpenJAOA17E09BuCz2EwLwDcVlXq8L9MTvQHgIgQqAHxTcanHAPwGgQoA30TqMRAQCFQA+CaKCQIBwa7NtI4UIBw1alSJOwMADiH1GPB7dqUn161b1+bxkSNHdObMGVWuXFmSdPz4cZUvX141atTQzp07C7mDa5CejNKiMrIXI+0Y8FtOT0/etWuX9b8/+OADTZ8+Xf/617/UqFEjSdL27dt177336v777y9FtwH3ojKyFyPtGMB/OXzgW/369fXxxx8rISHB5vr69evVt29fm6DG1ZhRQUltzM7RrdNXF7i+cEQHZlY8bV+mNKtLwevDVjCzAvgJRz6/Hd5Me+DAAf35558Frufm5urQoUOO3g7wiOIqI8PDSDsGcAmHA5UuXbro3nvvVWZmpvInYzIzM3X//fera9euTu8g4ApURvZipB0DuITDgcpbb72l2rVrq23btgoNDVVISIjatWunmjVratasWa7oI+B0+ZWRL0VlZC9B2jGAS5S4KOH//d//6eeff5YxRk2aNFHDhg2d3bcrYo8KSousHy9G1g/gt9xSlDA+Pl7GGNWvX1/lylHbEL4pIS6SAMUT7AlCYhIJUAA4vvRz5swZDR06VOXLl1fTpk2VnZ0t6eJBb5MnT3Z6BwH4mfS0i1k9C++/+Gd6mqd7BMCLORyojBs3Tps3b9bXX3+t0NBQ6/WuXbtq3rx5Tu0cUFIbs3O0YMM+bczO8XRXcCkqHgNwkMNrNp9++qnmzZuna6+9VhaLxXr96quv1q+//urUzgElwUFuXqy41GOWeQAUwuEZlSNHjqhGjRoFrp8+fdomcAE8YWN2jk2QIkkzMnYys+ItSD0G4CCHA5U2bdpo0aJF1sf5wcnMmTPVvn175/UMKAEOcvNypB4DcJDDSz+TJk1St27dtG3bNl24cEGvvPKKfvrpJ61Zs0YZGRmu6CNgNw5y8wFUPAbgAIdnVDp06KDvvvtOZ86cUf369bVs2TJFRUVpzZo1at26tSv6CNgobqMsB7l52L5MafPcK2+OjUmUWvQnSAFwRSU+8M0bcOBb4LF3oywHuXkAFY8B2MmlRQnLli2rw4cPF7h+9OhRlS1b1tHbAXZzZKNsQlyk+rSKIUhxF9KOAbiIw4FKURMw586dU3BwcKk7BBSFjbJejIrHAFzE7s20r776qqSLWT6zZs1ShQoVrM/l5uZq1apVaty4sfN7CPwXG2W9GGnHAFzE7kDlpZdeknRxRmXGjBk2yzzBwcGKj4/XjBkznN9D4L/yN8peuvzDRlkvkZ92bLNHhbRjAKXn8GbaTp06acGCBYqM9PyHA5tpAxMbZb0YFY8B2MGRz2+yfgDYhyAEgJO4NOunb9++hVZJfv7553X77bc7ejsAvoCKxwA8xOFAJSMjQ927dy9wvVu3blq1apVTOgXAi5B6DMCDHA5U/vOf/xSahhwUFKSTJ086pVMAvAipxwA8yOFA5ZprrtG8efMKXJ87d66uvvpqp3QKgBch9RiABzlclHD8+PG67bbb9Ouvv6pz586SpBUrVujDDz/U/Pnznd5BBBYyerwQqccAPKhEWT+LFi3SxIkTtWnTJoWFhal58+ZKS0tTUlKSK/pYJLJ+/Iu9dXzgIWT9AHAS0pPhczZm5+jW6asLXF84ogMzK+5AEALAjVyanixJx48f16xZs/TEE0/o2LFjkqQNGzbot99+K8ntAOr4eBKpxwC8mMN7VLZs2aKuXbsqIiJCu3fv1rBhw1SlShUtXLhQe/bs0TvvvOOKfsLPUcfHQ4pKPW7Sk5kVAF7B4RmV1NRUDRo0SDt27FBoaKj1ekpKCueooMTy6/hcijo+bkDqMQAv5/CMyrp16/TGG28UuF67dm0dPHjQKZ1CYBqb0kTJTaPJ+nEnUo8BeDmHZ1RCQ0MLPdht+/btql69ulM6hcCVEBepPq1iCFLcJT/1+FKkHgPwIg7PqNxyyy16+umn9dFHH0mSLBaLsrOzNXbsWN12221O7yAAF7vpqYt7Usj6AeCFHE5PPnnypP7yl7/op59+0qlTp1SrVi0dPHhQ7du31+LFixUe7r7Nj6QnA1dA2jEAL+TI57fDMyqVKlXSt99+q5UrV2rDhg3Ky8tTq1at1LVr1xJ3GIALpKdddprs6IuzJwDgQxyeUXnnnXd0xx13KCQkxOb6+fPnNXfuXA0YMMCpHSwOMypAEfZlXjwT5XLDVjCzAsDjXHrg2+DBg3XixIkC10+dOqXBgwc7ejsArkDaMQA/4fDSjzFGFoulwPV9+/YpIiLCKZ2C/6HYoJuRdgzAT9gdqCQkJMhischisahLly4qV+5/L83NzdWuXbvUrVs3l3QSvo1igx5AxWMAfsLuQKV3796SpE2bNik5OVkVKlSwPhccHKz4+HjSk1HAxuwcmyBFkmZk7FRy02hmVlyNtGMAfsDuQCUt7WKhsvj4eN1xxx02x+cDRSmu2CCBSinZk3ock0iAAsCnObxHZeDAgTp+/Ljee+89/frrr/rb3/6mKlWqaMOGDYqKilLt2rVd0U/4KIoNugipxwAChMNZP1u2bFHDhg01ZcoUvfDCCzp+/LgkaeHChRo3bpyz+wcfR7FBFyiq4vG+TE/0BgBcyuEZlTFjxmjQoEGaOnWqKlasaL2ekpKiu+66y6mdg3+g2KCTFZd6zDIPAD/jcKCSmZmpN998s8B1qicHLntSjxPiIglQnIXUYwABxOFAherJuBSpxx5A6jGAAEL1ZJQYqcceROoxgADh8GbaF154QUeOHFGNGjV09uxZJSUlqUGDBqpYsaKee+45V/QRXqq41GOUwr5MafPcK2+OjUmUWvQnSAHg16iejBIj9dgFSDsGABsOV0/2JlRP9rzL96g8kFRPj7NHpWSoeAwgQDjy+e3QjEpeXp7mzJmjBQsWaPfu3bJYLKpbt6769u2re+65p9BihfBvpB47EWnHAFCA3YGKMUa9evXS4sWL1aJFCzVr1kzGGGVlZWnQoEFasGCBPv30Uxd2Fd6K1GMnIe0YAAqwO1CZM2eOVq1apRUrVqhTp042z61cuVK9e/fWO++8owEDBji9k0BAIO0YAAqwe4/KzTffrM6dO2vs2LGFPj9x4kRlZGToyy+/dGoHi8MeFfgcewoJ2tMGAHyYI5/fdqcnb9myRd26dSvy+ZSUFG3evNn+XgKBJj3t4mbZhfdf/DM9rfB2pB0DgJXdgcqxY8cUFRVV5PNRUVHKyclxSqcAv0MhQQAoEbsDldzcXJUrV/SWlrJly+rChQtO6RTgd4rL6AEAFMmhrJ9BgwYpJCSk0OfPnTvntE4BfoeMHgAoEbsDlYEDB16xDRk//sWeqsiwExk9AFAinEyLQlEV2UXI6AEA151Mi8BAVeQSsicIiUkkQAEABxCooIDiqiITqBSBYoIA4BJ2Z/0gcFAV2UGkHgOAyxCooICEuEgNT6pnc+2BpHrMphSF1GMAcBmWflAoqiI7gNRjAHAZZlRQpIS4SPVpFUOQciX5qceXIvUYAJyCGRXAGW56SmrSk9RjAHAyAhXgSuw9+4TUYwBwOgIVoDikHQOAR7FHBSgKaccA4HEEKkBRSDsGAI8jUAGKQtoxAHgcgUqA2pidowUb9mljdo6nu+K9SDsGAI9jM20AojKyA0g7BgCPIlAJMFRGvgwVjwHAqxGoBBgqI1+C1GMA8HrsUQkwVEb+L1KPAcAnEKgEGCoj/xepxwDgE1j6CUBURhapxwDgI5hRCVABXxmZ1GMA8AnMqCBwkXoMAF6PQAX+iYrHAOAXCFTgf0g7BgC/wR4V+BfSjgHArxCowL+QdgwAfoVABf6FtGMA8CsEKvAvpB0DgF9hMy38D2nHAOA3CFTge6h4DAABg0AFvoXUYwAIKOxR8UMbs3O0YMM+bczO8XRXnIvUYwAIOMyo+JnJS7I0I2On9fHwpHoam9LEgz1youJSj1nmAQC/xIyKH9mYnWMTpEjSjIyd/jOzQuoxAAQcAhU/suv30w5d9zmkHgNAwGHpx4/UrRbu0HWfROoxAAQUZlT8SEJcpIYn1bO59kBSPSXERXqoRyWwL1PaPLf4DbIxiVKL/gQpABAAmFHxM2NTmii5abR2/X5adauF+1aQQuoxAOAyBCp+KCEu0rcCFKno1OMmPZk5AYAAxtIPvANVjwEAhSBQgXcg9RgAUAgCFXgHUo8BAIVgjwq8B6nHAIDLEKjAPeypeCxR9RgAYINABa5H2jEAoITYo+JDfLIqMhWPAQClwIyKj/DZqshUPAYAlAIzKj7Ap6sik3YMACgFAhUf4NNVkUk7BgCUAks/PsDnqyKTdgwAKCFmVHyA11dFpuIxAMBFmFHxEV5bFZnUYwCACxGo+BCvq4pMxWMAgIux9IOSo+IxAMDFCFRQcqQeAwBcjEAFJUfqMQDAxdijgtIh9RgA4EIEKigaFY8BAB5GoILCkXYMAPAC7FFBQVQ8BgB4CQIVFETaMQDAS7D04yU2Zud4z6mzpB0DALwEgYoXmLwkSzMydlofD0+qp7EpTTzXofy0Y5s9KqQdAwDcj0DFwzZm59gEKZI0I2OnkptGu3Zm5UoZPaQdAwC8AIGKh+36/XSR110WqNib0UPaMQDAw9hM62F1q4U7dL3UyOgBAPgQAhUPS4iL1PCkejbXHkiq57rZFDJ6AAA+hKUfLzA2pYmSm0a7J+uHjB4AgA9hRsVLJMRFqk+rGNenJlNIEADgQ5hRCURk9AAAfASBij+yp5ggGT0AAB9AoOJvKCYIAPAj7FHxJ6QeAwD8DIGKi23MztGCDfu0MTvH9W9G6jEAwM+w9ONCbq/hQ+oxAMDPMKPiIkXV8HHpzAqpxwAAP8OMiot4pIaPROoxAMCvEKi4iEtq+NiTdiyRegwA8Bss/biI02v4pKdJs7pIC++/+Gd6mhN6CQCAd7MYY4ynO1FSJ0+eVEREhE6cOKFKlSp5ujuF2pidU/oaPvsyLwYnlxu2gpkTAIDPceTzm6UfF0uIiyz9npTi0o4JVAAAfoylH19A2jEAIEARqPgC0o4BAAGKpR9fQdoxACAAEah4CyoeAwBQAIGKN6DiMQAAhWKPiqdR8RgAgCIRqJSCUyojU/EYAIAisfRTQk6rjEzqMQAARWJGpQScWhmZ1GMAAIrEjEoJOL0yMqnHAAAUikClBByqjEzFYwAASoylnxKwuzIyFY8BACgVqieXQrGVkal4DABAoaie7CbFVkam4jEAAKXG0o+rkHYMAECpEagUodSHuZF2DABAqbH0UwinHeZG2jEAAKVCoHKZog5zS24aXfiGWSoeAwDgMgQql7H7MDcqHgMA4HLsUbmMXYe5UfEYAAC3IFC5jF2HuVHxGAAAt2DppxBjU5oouWl00Ye5kXoMAIBbMKNShIS4SPVpFVP4gW6kHgMA4BbMqJQUqccAALgcgUpRSD0GAMDjCFQKQ+oxAABegT0qlyP1GAAAr+HRQGXVqlXq2bOnatWqJYvFok8//dST3bmI1GMAALyGRwOV06dPq0WLFvrHP/7hyW7YIvUYAACv4dE9KikpKUpJSfFkFwrKTz222aNC6jEAAJ7g8c2006dP1/PPPy9JeuSRR1S1alVdf/31nu0UqccAAHgFjy79zJs3T6NHj9aTTz4pSbr66quVkpKi7OzsQtufO3dOJ0+etPlymZhEqUV/ghQAADzIo4HKtGnTNHToUA0bNkySNHToUMXGxur1118vtP2kSZMUERFh/YqNjXVndwEAgJt5LFA5f/681q9fr5tvvtnm+s0336zVq1cX+prU1FTt3bvX+rVt2zZ3dBUAAHiIx/ao/P7778rNzVVUVJTN9aioKB08eLDQ10ybNk1PPcXBawAABAqPH/i2Y8cObdq0SZK0a9cu7d+/XxcuXCi07bhx43TixAnr1969e93YUwAA4G4WY4zxxBufP39eYWFhysvLK/BccbMqlzp58qQiIiJ04sQJVapUyRXdBAAATubI57fHZlSCg4OVmJioBx54QMYY61eTJk00ePBgT3ULAAB4EY+eo5Kamqp77rlHiYmJat++vd58801lZ2dr+PDhnuwWAADwEh4NVO644w4dPXpUTz/9tA4cOKBrrrlGixcvVp06dTzZLQAA4CU8tkfFGdijAgCA7/GJPSoAAABXQqACAAC8lseLEpZG/qqVS2v+AAAAp8r/3LZn94lPByqnTp2SJGr+AADgg06dOqWIiIhi2/j0Ztq8vDzt379fFStWlMVises1J0+eVGxsrPbu3csGXDdi3D2DcfcMxt0zGHfPKMm4G2N06tQp1apVS2XKFL8LxadnVMqUKaOYmJgSvbZSpUr8RfYAxt0zGHfPYNw9g3H3DEfH/UozKfnYTAsAALwWgQoAAPBaAReohISEKC0tTSEhIZ7uSkBh3D2DcfcMxt0zGHfPcPW4+/RmWgAA4N8CbkYFAAD4DgIVAADgtQhUAACA1yJQAQAAXivgApXp06erbt26Cg0NVevWrfXNN994uks+a9WqVerZs6dq1aoli8WiTz/91OZ5Y4wmTJigWrVqKSwsTDfeeKN++uknmzbnzp3TQw89pGrVqik8PFy9evXSvn373Phd+J5JkyapTZs2qlixomrUqKHevXtr+/btNm0Ye+d7/fXX1bx5c+uhVu3bt9eSJUuszzPmrjdp0iRZLBaNHj3aeo1xd40JEybIYrHYfEVHR1ufd+u4mwAyd+5cExQUZGbOnGm2bdtmHn74YRMeHm727Nnj6a75pMWLF5snn3zSfPLJJ0aSWbhwoc3zkydPNhUrVjSffPKJ2bp1q7njjjtMzZo1zcmTJ61thg8fbmrXrm3S09PNhg0bTKdOnUyLFi3MhQsX3Pzd+I7k5GQze/Zs8+OPP5pNmzaZ7t27m7i4OPOf//zH2oaxd77PPvvMLFq0yGzfvt1s377dPPHEEyYoKMj8+OOPxhjG3NV++OEHEx8fb5o3b24efvhh63XG3TXS0tJM06ZNzYEDB6xfhw8ftj7vznEPqEClbdu2Zvjw4TbXGjdubMaOHeuhHvmPywOVvLw8Ex0dbSZPnmy99scff5iIiAgzY8YMY4wxx48fN0FBQWbu3LnWNr/99pspU6aMWbp0qdv67usOHz5sJJmMjAxjDGPvTpGRkWbWrFmMuYudOnXKXHXVVSY9Pd0kJSVZAxXG3XXS0tJMixYtCn3O3eMeMEs/58+f1/r163XzzTfbXL/55pu1evVqD/XKf+3atUsHDx60Ge+QkBAlJSVZx3v9+vX6888/bdrUqlVL11xzDT8TB5w4cUKSVKVKFUmMvTvk5uZq7ty5On36tNq3b8+Yu9iDDz6o7t27q2vXrjbXGXfX2rFjh2rVqqW6deuqf//+2rlzpyT3j7tPFyV0xO+//67c3FxFRUXZXI+KitLBgwc91Cv/lT+mhY33nj17rG2Cg4MVGRlZoA0/E/sYY5SamqrrrrtO11xzjSTG3pW2bt2q9u3b648//lCFChW0cOFCXX311dZ/eBlz55s7d642bNigdevWFXiOv+uu065dO73zzjtq2LChDh06pGeffVYdOnTQTz/95PZxD5hAJZ/FYrF5bIwpcA3OU5Lx5mdiv5EjR2rLli369ttvCzzH2Dtfo0aNtGnTJh0/flyffPKJBg4cqIyMDOvzjLlz7d27Vw8//LCWLVum0NDQItsx7s6XkpJi/e9mzZqpffv2ql+/vt5++21de+21ktw37gGz9FOtWjWVLVu2QCR3+PDhAlEhSi9/d3hx4x0dHa3z588rJyenyDYo2kMPPaTPPvtMX331lWJiYqzXGXvXCQ4OVoMGDZSYmKhJkyapRYsWeuWVVxhzF1m/fr0OHz6s1q1bq1y5cipXrpwyMjL06quvqly5ctZxY9xdLzw8XM2aNdOOHTvc/vc9YAKV4OBgtW7dWunp6TbX09PT1aFDBw/1yn/VrVtX0dHRNuN9/vx5ZWRkWMe7devWCgoKsmlz4MAB/fjjj/xMimGM0ciRI7VgwQKtXLlSdevWtXmesXcfY4zOnTvHmLtIly5dtHXrVm3atMn6lZiYqLvvvlubNm1SvXr1GHc3OXfunLKyslSzZk33/313aOutj8tPT/7Xv/5ltm3bZkaPHm3Cw8PN7t27Pd01n3Tq1CmzceNGs3HjRiPJTJs2zWzcuNGa7j158mQTERFhFixYYLZu3WruvPPOQtPXYmJizPLly82GDRtM586dSRu8ggceeMBERESYr7/+2iZ18MyZM9Y2jL3zjRs3zqxatcrs2rXLbNmyxTzxxBOmTJkyZtmyZcYYxtxdLs36MYZxd5VHHnnEfP3112bnzp1m7dq1pkePHqZixYrWz0t3jntABSrGGPPPf/7T1KlTxwQHB5tWrVpZUzrhuK+++spIKvA1cOBAY8zFFLa0tDQTHR1tQkJCzA033GC2bt1qc4+zZ8+akSNHmipVqpiwsDDTo0cPk52d7YHvxncUNuaSzOzZs61tGHvnGzJkiPXfjurVq5suXbpYgxRjGHN3uTxQYdxdI/9clKCgIFOrVi3Tp08f89NPP1mfd+e4W4wxpsRzQQAAAC4UMHtUAACA7yFQAQAAXotABQAAeC0CFQAA4LUIVAAAgNciUAEAAF6LQAUAAHgtAhUAPiM+Pl4vv/yyp7sBwI0IVAAfMmjQIPXu3dvT3XCIO4OLCRMmqGXLlm55LwDuQaACAAC8FoEK4MNuvPFGjRo1So899piqVKmi6OhoTZgwwabNhAkTFBcXp5CQENWqVUujRo2yPhcfH69nnnlGd911lypUqKBatWrptddes3n9iRMndN9996lGjRqqVKmSOnfurM2bN9u0+eyzz5SYmKjQ0FBVq1ZNffr0sfZvz549GjNmjCwWiywWi/U1q1ev1g033KCwsDDFxsZq1KhROn36tPX5w4cPq2fPngoLC1PdunX1/vvvOzw++TNQEydOVFRUlCpXrqynnnpKFy5c0N/+9jdVqVJFMTExeuutt2xe9/jjj6thw4YqX7686tWrp/Hjx+vPP/+0afPss8+qRo0aqlixooYNG6axY8cWmM2ZPXu2mjRpotDQUDVu3FjTp0+3Pnf+/HmNHDlSNWvWVGhoqOLj4zVp0iSHv0fA3xGoAD7u7bffVnh4uL7//ntNnTpVTz/9tLW0+scff6yXXnpJb7zxhnbs2KFPP/1UzZo1s3n9888/r+bNm2vDhg0aN26cxowZY329MUbdu3fXwYMHtXjxYq1fv16tWrVSly5ddOzYMUnSokWL1KdPH3Xv3l0bN27UihUrlJiYKElasGCBYmJi9PTTT+vAgQM6cOCAJGnr1q1KTk5Wnz59tGXLFs2bN0/ffvutRo4cae3XoEGDtHv3bq1cuVIff/yxpk+frsOHDzs8PitXrtT+/fu1atUqTZs2TRMmTFCPHj0UGRmp77//XsOHD9fw4cO1d+9e62sqVqyoOXPmaNu2bXrllVc0c+ZMvfTSS9bn33//fT333HOaMmWK1q9fr7i4OL3++us27ztz5kw9+eSTeu6555SVlaWJEydq/PjxevvttyVJr776qj777DN99NFH2r59u9577z3Fx8c7/P0Bfq909RUBuNPAgQPNLbfcYn2clJRkrrvuOps2bdq0MY8//rgxxpgXX3zRNGzY0Jw/f77Q+9WpU8d069bN5todd9xhUlJSjDHGrFixwlSqVMn88ccfNm3q169v3njjDWOMMe3btzd33313kX2uU6eOeemll2yu3XPPPea+++6zufbNN9+YMmXKmLNnz5rt27cbSWbt2rXW57OysoykAve6VFpammnRooX18cCBA02dOnVMbm6u9VqjRo3M9ddfb3184cIFEx4ebj788MMi7zt16lTTunVr6+N27dqZBx980KZNx44dbd47NjbWfPDBBzZtnnnmGdO+fXtjjDEPPfSQ6dy5s8nLyyvyfQEYw4wK4OOaN29u87hmzZrWmYfbb79dZ8+eVb169XTvvfdq4cKFunDhgk379u3bF3iclZUlSVq/fr3+85//qGrVqqpQoYL1a9euXfr1118lSZs2bVKXLl0c6vP69es1Z84cm3smJycrLy9Pu3btUlZWlsqVK2edmZGkxo0bq3Llyg69jyQ1bdpUZcr875+6qKgom1mlsmXLqmrVqjazNR9//LGuu+46RUdHq0KFCho/fryys7Otz2/fvl1t27a1eZ9LHx85ckR79+7V0KFDbb7HZ5991jpugwYN0qZNm9SoUSONGjVKy5Ytc/h7AwJBOU93AEDpBAUF2Ty2WCzKy8uTJMXGxmr79u1KT0/X8uXLNWLECD3//PPKyMgo8LrL7yFJeXl5qlmzpr7++usCbfKDhrCwMIf7nJeXp/vvv99mv0y+uLg4bd++3aYfpVHY+BQ3ZmvXrlX//v311FNPKTk5WREREZo7d65efPHFAq+5lDHG+t/595o5c6batWtn065s2bKSpFatWmnXrl1asmSJli9frn79+qlr1676+OOPS/HdAv6HQAXwc2FhYerVq5d69eqlBx98UI0bN9bWrVvVqlUrSRc/mC+1du1aNW7cWNLFD9ODBw+qXLlyRe6faN68uVasWKHBgwcX+nxwcLByc3NtrrVq1Uo//fSTGjRoUOhrmjRpogsXLigzM9M6U7F9+3YdP37c3m+7xL777jvVqVNHTz75pPXanj17bNo0atRIP/zwg+655x7rtczMTOt/R0VFqXbt2tq5c6fuvvvuIt+rUqVKuuOOO3THHXeob9++6tatm44dO6YqVao48TsCfBuBCuDH5syZo9zcXLVr107ly5fXu+++q7CwMNWpU8fa5rvvvtPUqVPVu3dvpaena/78+Vq0aJEkqWvXrmrfvr169+6tKVOmqFGjRtq/f78WL16s3r17KzExUWlpaerSpYvq16+v/v3768KFC1qyZIkee+wxSRczi1atWqX+/fsrJCRE1apV0+OPP65rr71WDz74oO69916Fh4crKytL6enpeu2119SoUSN169ZN9957r958802VK1dOo0ePLtHsjaMaNGig7OxszZ07V23atNGiRYu0cOFCmzYPPfSQ7r33XiUmJqpDhw6aN2+etmzZonr16lnbTJgwQaNGjVKlSpWUkpKic+fOKTMzUzk5OUpNTdVLL72kmjVrqmXLlipTpozmz5+v6OjoEi1vAf6MPSqAH6tcubJmzpypjh07Wmc+Pv/8c1WtWtXa5pFHHtH69euVkJCgZ555Ri+++KKSk5MlXVzeWLx4sW644QYNGTJEDRs2VP/+/bV7925FRUVJupiCPH/+fH322Wdq2bKlOnfurO+//956/6efflq7d+9W/fr1Vb16dUkXZ2EyMjK0Y8cOXX/99UpISND48eNVs2ZN6+tmz56t2NhYJSUlqU+fPtYUaVe75ZZbNGbMGI0cOVItW7bU6tWrNX78eJs2d999t8aNG6dHH33UuoQzaNAghYaGWtsMGzZMs2bN0pw5c9SsWTMlJSVpzpw5qlu3riSpQoUKmjJlihITE9WmTRvt3r1bixcvttlPA0CymEsXVgEElPj4eI0ePVqjR4/2dFd83k033aTo6Gi9++67nu4K4FdY+gEAB505c0YzZsxQcnKyypYtqw8//FDLly+3nj8DwHkIVADAQflLYs8++6zOnTunRo0a6ZNPPlHXrl093TXA77D0AwAAvBa7tgAAgNciUAEAAF6LQAUAAHgtAhUAAOC1CFQAAIDXIlABAABei0AFAAB4LQIVAADgtQhUAACA1/r/FLTjd+RliFEAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA6RElEQVR4nO3dd3xUVf7/8feE9BCGABIIoSSAlKUTQYoCKlhARFlFRAUUlyIrzVV03QWxgFh2XRURl7LqUlzFXkFBpCglKBEQlJbQBUKChCQkc35/+Mt8mTRmwgw5mNfz8ZiHzJ1zz/3cM3cyb28bhzHGCAAAwEJB5V0AAABASQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCqAn2zatEl33323GjZsqIiICEVERKhx48YaPny41q9ff97qmDx5shwOh8e0Bg0aaMiQIQFd7urVqzV58mQdP348oMvx1pAhQ+RwONyPsLAwNWnSRJMmTVJ2drbP/TkcDk2ePNn/hZ5n3bt3V/fu3cu7DMBrweVdAPB78Morr2j06NFq0qSJxowZoz/84Q9yOBzaunWrFixYoEsuuUQ///yzGjZsWC71vfPOO6pSpUpAl7F69Wo9+uijGjJkiKpWrRrQZXkrIiJCX375pSQpPT1dCxYs0JQpU/Tjjz9q0aJFPvW1Zs0axcfHB6JMAKUgqADnaNWqVRo1apR69+6tt956S6Ghoe7XrrjiCt1777363//+p4iIiFL7ycrKUmRkZEBqbNu2bUD6tV1QUJAuvfRS9/Nrr71Wu3fv1ptvvqnnnntOderU8bqvM/sBcP5w6Ac4R08++aQqVaqkV155xSOknOnmm29WXFyc+/mQIUNUuXJlpaSkqFevXoqOjtaVV14pSVqyZIluuOEGxcfHKzw8XI0aNdLw4cN15MiRIv1+9NFHatOmjcLCwpSQkKBnnnmm2OUXd+gnMzNT999/vxISEhQaGqo6depo7NixOnnypEc7h8Oh0aNH6/XXX1ezZs0UGRmp1q1b68MPP3S3mTx5sv7yl79IkhISEtyHW5YvX15sPf/85z/lcDj0888/F3ntwQcfVGhoqHt9N27cqD59+qhmzZoKCwtTXFycevfurb179xbb99kUBI49e/ZIklJTU3X77be7+2/WrJmeffZZuVyuIuNw5qGfrKws9/iFh4erWrVqSkpK0oIFCzzme//999WpUydFRkYqOjpaPXv21Jo1azzaFByu27x5swYOHCin06nY2FjdddddysjI8GhrjNGMGTPUpk0bRUREKCYmRn/84x+1c+fOIu2mT5+u+vXrKzw8XO3atdMnn3xSpjEDyhN7VIBzkJ+fr2XLlikpKUm1a9f2ad7c3Fz17dtXw4cP18SJE5WXlydJ2rFjhzp16qRhw4bJ6XRq9+7deu6559S1a1elpKQoJCREkvTFF1/ohhtuUKdOnbRw4ULl5+dr+vTpOnTo0FmXnZWVpW7dumnv3r16+OGH1apVK23evFl///vflZKSoqVLl3qc5/LRRx9p3bp1mjJliipXrqzp06frxhtv1LZt25SYmKhhw4bp2LFjeuGFF7R48WL3WDRv3rzY5d9+++168MEHNW/ePD3++OMe4/nGG2/o+uuvV40aNXTy5En17NlTCQkJeumllxQbG6uDBw9q2bJlOnHihE/jXaAgHF100UX65Zdf1LlzZ+Xm5uqxxx5TgwYN9OGHH+r+++/Xjh07NGPGjBL7GT9+vF5//XU9/vjjatu2rU6ePKkffvhBR48edbeZP3++Bg0apF69emnBggXKycnR9OnT1b17d33xxRfq2rWrR5/9+/fXgAEDdPfddyslJUUPPfSQJGnOnDnuNsOHD9e8efN033336amnntKxY8c0ZcoUde7cWd9//71iY2MlSY8++qgeffRR3X333frjH/+otLQ03XPPPcrPz1eTJk3KNHZAuTAAyuzgwYNGkrn11luLvJaXl2dOnz7tfrhcLvdrgwcPNpLMnDlzSu3f5XKZ06dPmz179hhJ5r333nO/1rFjRxMXF2dOnTrlnpaZmWmqVatmCn+069evbwYPHux+PnXqVBMUFGTWrVvn0e6tt94ykszHH3/snibJxMbGmszMTI/1DgoKMlOnTnVPe/rpp40ks2vXrlLXqcBNN91k4uPjTX5+vnvaxx9/bCSZDz74wBhjzPr1640k8+6773rV55kGDx5soqKi3OP/yy+/mOeff944HA5zySWXGGOMmThxopFkvv32W495R44caRwOh9m2bZvHOEyaNMn9vEWLFqZfv34lLj8/P9/ExcWZli1beqzjiRMnTM2aNU3nzp3d0yZNmmQkmenTp3v0MWrUKBMeHu7edtasWWMkmWeffdajXVpamomIiDAPPPCAMcaY9PR0Ex4ebm688UaPdqtWrTKSTLdu3UqsG7ANh36AAGnfvr1CQkLcj2effbZIm/79+xeZdvjwYY0YMUJ169ZVcHCwQkJCVL9+fUnS1q1bJUknT57UunXrdNNNNyk8PNw9b3R0tK6//vqz1vbhhx+qRYsWatOmjfLy8tyPq6++uthDNj169FB0dLT7eWxsrGrWrOk+fFIWQ4cO1d69e7V06VL3tLlz56pWrVq69tprJUmNGjVSTEyMHnzwQc2cOVNbtmzxaRknT550j/9FF12ksWPH6tprr9U777wjSfryyy/VvHlzdejQwWO+IUOGyBjjPhG3OB06dNAnn3yiiRMnavny5Tp16pTH69u2bdP+/ft1xx13KCjo//7UVq5cWf3799c333yjrKwsj3n69u3r8bxVq1bKzs7W4cOHJf32vjkcDt1+++0e71utWrXUunVr9/u2Zs0aZWdna9CgQR79de7c2b0tARcKDv0A56BGjRqKiIgo9gt7/vz5ysrK0oEDB4p8AUlSZGRkkStxXC6XevXqpf379+tvf/ubWrZsqaioKLlcLl166aXuL8P09HS5XC7VqlWrSL/FTSvs0KFD+vnnn92HkQorfD5M9erVi7QJCwsr8uXsi2uvvVa1a9fW3Llz1atXL6Wnp+v999/XmDFjVKlSJUmS0+nUV199pSeeeEIPP/yw0tPTVbt2bd1zzz165JFHSqy/QEREhFasWOGut379+h5jfvToUTVo0KDIfAXnE515GKewf/3rX4qPj9eiRYv01FNPKTw8XFdffbWefvppNW7c2D1vcYcE4+Li5HK5lJ6e7nECdeFxDgsLkyT3OB86dEjGGPfhncISExM96i7r9gHYhKACnINKlSrpiiuu0Oeff64DBw54fCkVnJ+xe/fuYuctfK8TSfrhhx/0/fffa968eRo8eLB7euGTTmNiYuRwOHTw4MEifRQ3rbCCgHXmuQ+FXw+0SpUq6Y477tC//vUvHT9+XPPnz1dOTo6GDh3q0a5ly5ZauHChjDHatGmT5s2bpylTpigiIkITJ04sdRlBQUFKSkoq8fXq1avrwIEDRabv379fUunjEBUV5T4P5NChQ+69K9dff71+/PFHd+goqf+goCDFxMSUWn9hNWrUkMPh0Ndff+0OMWcqmFaw7JK2j+LCGWArDv0A5+ihhx5Sfn6+RowYodOnT59TXwXhpfCX0CuvvOLxPCoqSh06dNDixYs9bl524sQJffDBB2ddTp8+fbRjxw5Vr15dSUlJRR5l+SIr/H//3hg6dKiys7O1YMECzZs3T506dVLTpk2LbetwONS6dWv94x//UNWqVZWcnOxzjYVdeeWV2rJlS5G+XnvtNTkcDvXo0cOrfmJjYzVkyBANHDhQ27ZtU1ZWlpo0aaI6depo/vz5Msa42548eVJvv/22+0ogX/Tp00fGGO3bt6/Y961ly5aSfruyKTw8XP/973895l+9evU5Ha4DygN7VIBz1KVLF7300kv685//rHbt2ulPf/qT/vCHPygoKEgHDhzQ22+/LUle3XCtadOmatiwoSZOnChjjKpVq6YPPvhAS5YsKdL2scce0zXXXKOePXtqwoQJys/P11NPPaWoqCgdO3as1OWMHTtWb7/9ti6//HKNGzdOrVq1ksvlUmpqqj7//HNNmDBBHTt29GkcCr4kn3/+eQ0ePFghISFq0qSJx7ktxa1vp06dNHXqVKWlpWnWrFker3/44YeaMWOG+vXrp8TERBljtHjxYh0/flw9e/b0qb7ijBs3Tq+99pp69+6tKVOmqH79+vroo480Y8YMjRw5UhdffHGJ83bs2FF9+vRRq1atFBMTo61bt+r111/3CCDTp0/XoEGD1KdPHw0fPlw5OTl6+umndfz4cU2bNs3nert06aI//elPGjp0qNavX6/LL79cUVFROnDggFauXKmWLVtq5MiRiomJ0f3336/HH39cw4YN080336y0tDRNnjyZQz+48JTjibzA78p3331nhg4dahISEkxYWJgJDw83jRo1Mnfeeaf54osvPNoWXJFSnC1btpiePXua6OhoExMTY26++WaTmppa5KoTY4x5//33TatWrUxoaKipV6+emTZtmvsKkjMVvurHGGN+/fVX88gjj5gmTZqY0NBQ43Q6TcuWLc24cePMwYMH3e0kmXvvvbdIncX1+dBDD5m4uDgTFBRkJJlly5aVPmjGmFmzZhlJJiIiwmRkZHi89uOPP5qBAweahg0bmoiICON0Ok2HDh3MvHnzztpvaWN8pj179pjbbrvNVK9e3YSEhJgmTZqYp59+2uNKHWOKXvUzceJEk5SUZGJiYkxYWJhJTEw048aNM0eOHPGY79133zUdO3Y04eHhJioqylx55ZVm1apVHm0K3rNffvnFY/rcuXOLvZJqzpw5pmPHjiYqKspERESYhg0bmjvvvNOsX7/e3cblcpmpU6eaunXrmtDQUNOqVSvzwQcfmG7dunHVDy4oDmPO2CcJAABgEc5RAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACw1gV9wzeXy6X9+/crOjq62NuRAwAA+xhjdOLECcXFxXn8aGdxLuigsn//ftWtW7e8ywAAAGWQlpam+Pj4Uttc0EGl4NbcaWlpXt2eHAAAlL/MzEzVrVu31J/YKHBBB5WCwz1VqlQhqAAAcIHx5rQNTqYFAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGuVa1BZsWKFrr/+esXFxcnhcOjdd98tz3IAAIBlyvW3fk6ePKnWrVtr6NCh6t+/f3mWYrWNqenadeSkEmpEqW29GJ/bbUxN1/JthyVJ3ZvULNJH4fkKnp/Odyl5z/+9dmuHepKkhWtTlZ6VqyubxWrAJfVKrGPHL78q/WSuYqJC1fCiyh79L1ybqh/2ZehYVq4ckprXrqKef6il/cdP6WBGtiRp55GTOvprjqpHhUoOh47+mqPEGlHKN9L2QycUEuRQzSrhSqgRpXb1YxRSKUin81363/q92nYwU2HBQerWpKYyTp3Wlv0ZqhoZqma1q+hE9mm5XEZ/qOPU1z8d0U+HTsghKb5apFrWcapd/Rh3HbWc4erepKaWbj2kr7Yd1q85+Ur//zUnXlRZNSqHKiYyVLd2qKfth07o+7Tj7vU9ne9y93P01xwFBTnUODbaPRaSPMZ90bpU9/zBQQ6t3nFUp3LzdVF0mEIqOdxjeWWzWEnS92nH1bpuVfd7UNz7X/i9l1TqtiBJi9alanHyXhkj9W8f79F/4ff+zGUWrM/pfJf7vTjzv4XrKmmb9nZ793aes23/57Lcss5f1mWdj2UEwtlqOR/jYQNv6y34W3Dm57sicxhjTHkXIf32w0TvvPOO+vXr5/U8mZmZcjqdysjI+N3+KOG0T7Zq5lc73c9HdEvUxGubed2u8PTCfRR+vU1dp75Ly/C6vjZ1nXr33q4l1lFce1/6ryguig7VLydyyzRvm7pOXZpYvcj7L6nU96Kg3ZnbU7+XVhZ5f4rrv6w1F1dXadtjSdv7mUqb52zbvzd9eMOX+cu6rPOxjEA4Wy3nYzxs4G29hT+Dhf/G/l748v19QZ2jkpOTo8zMTI/H79nG1PQif2RnfrVTG1PTvWq3aF1qsV9UBX0UN5+vIeK7tAwtWpdaYh3FtUdRZQ0p0m9jWtz7f7b3oqBdwfa0aF1qse9Pcf1LZau5uLpK2x6L297PVNo8JW2Phfssy3K9reFc2p7vZQTC2Wo5H+NhA2/rLe4zeObf2IrqggoqU6dOldPpdD/q1q1b3iUF1K4jJ72aXlK779OOl9p3SfP5qmA5/uoP51fB+1ba9nI+avB2e/fmtbNt32e+VpblelvDubQ938sIhLPVcj7Gwwbe1lvSZ7A8P5s2uKCCykMPPaSMjAz3Iy0trbxLCqiC4/5nm15Su9Z1q5bad0nz+apgOf7qD+dXwftW2vZyPmrwdnv35rWzbd9nvlaW5Xpbw7m0Pd/LCISz1XI+xsMG3tZb0mewPD+bNriggkpYWJiqVKni8fg9a1svxn1Mv8DIbolFTsIqqd2AS+oVmX5mH8XN16au07ca6zrdJ3sV119hvvZfUdSMDi3zvG3rOot9/8/2XhS0K9ieBlxSr9j3p7j+pbLVXFxdpW2PxW3vHrWVMk9J22PhPsuyXG9rOJe253sZgXC2Ws7HeNjA23qL+wye+Te2ouJk2gsAV/1w1Q9X/ZSOq378t4xA4Kqf33DVz//x5fu7XIPKr7/+qp9//lmS1LZtWz333HPq0aOHqlWrpnr1zv7mVJSgAgDA74kv39/leh+V9evXq0ePHu7n48ePlyQNHjxY8+bNK6eqAACALco1qHTv3l2WHHkCAAAWuqBOpgUAABULQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArFXmoPLzzz/rs88+06lTpyRJxhi/FQUAACCVIagcPXpUV111lS6++GJdd911OnDggCRp2LBhmjBhgt8LBAAAFZfPQWXcuHEKDg5WamqqIiMj3dMHDBigTz/91K/FAQCAii3Y1xk+//xzffbZZ4qPj/eY3rhxY+3Zs8dvhQEAAPi8R+XkyZMee1IKHDlyRGFhYX4pCgAAQCpDULn88sv12muvuZ87HA65XC49/fTT6tGjh1+LAwAAFZvPh36efvppde/eXevXr1dubq4eeOABbd68WceOHdOqVasCUSMAAKigfN6j0rx5c23atEkdOnRQz549dfLkSd10003auHGjGjZsGIgaAQBABeUwF/ANUDIzM+V0OpWRkaEqVaqUdzkAAMALvnx/+3zoZ9OmTcVOdzgcCg8PV7169TipFgAA+IXPQaVNmzZyOByS/u9utAXPJSkkJEQDBgzQK6+8ovDwcD+VCQAAKiKfz1F555131LhxY82aNUvff/+9vvvuO82aNUtNmjTR/PnzNXv2bH355Zd65JFHAlEvAACoQHzeo/LEE0/o+eef19VXX+2e1qpVK8XHx+tvf/ub1q5dq6ioKE2YMEHPPPOMX4sFAAAVi897VFJSUlS/fv0i0+vXr6+UlBRJvx0eKvgNIAAAgLLyOag0bdpU06ZNU25urnva6dOnNW3aNDVt2lSStG/fPsXGxvqvSgAAUCH5fOjnpZdeUt++fRUfH69WrVrJ4XBo06ZNys/P14cffihJ2rlzp0aNGuX3YgEAQMVSpvuo/Prrr3rjjTe0fft2GWPUtGlT3XbbbYqOjg5EjSXiPioAAFx4AnofFUmqXLmyRowYUabiAAAAvFWmoCJJW7ZsUWpqqse5KpLUt2/fcy4KAABAKkNQ2blzp2688UalpKTI4XAUuelbfn6+fysEAAAVls9X/YwZM0YJCQk6dOiQIiMjtXnzZq1YsUJJSUlavnx5AEoEAAAVlc97VNasWaMvv/xSF110kYKCghQUFKSuXbtq6tSpuu+++7Rx48ZA1AkAACogn/eo5Ofnq3LlypKkGjVqaP/+/ZJ+u+Hbtm3b/FsdAACo0Hzeo9KiRQtt2rRJiYmJ6tixo6ZPn67Q0FDNmjVLiYmJgagRAABUUD4HlUceeUQnT56UJD3++OPq06ePLrvsMlWvXl2LFi3ye4EAAKDiKtMN3wo7duyYYmJi3Ff+nC/c8A0AgAtPwG/4Vli1atX80Q0AAIAHn4NKdna2XnjhBS1btkyHDx+Wy+XyeD05OdlvxQEAgIrN56By1113acmSJfrjH/+oDh06nPfDPQAAoOLwOah89NFH+vjjj9WlS5dA1AMAAODm831U6tSpc95/JRkAAFRMPgeVZ599Vg8++KD27NkTiHoAAADcfD70k5SUpOzsbCUmJioyMlIhISEerx87dsxvxQEAgIrN56AycOBA7du3T08++aRiY2M5mRYAAASMz0Fl9erVWrNmjVq3bh2IegAAANx8PkeladOmOnXqVCBqAQAA8OBzUJk2bZomTJig5cuX6+jRo8rMzPR4AAAA+IvPv/UTFPRbtil8booxRg6HQ/n5+f6r7iz4rR8AAC48Af2tn2XLlpW5MAAAAF/4HFS6desWiDoAAACK8DqobNq0yat2rVq1KnMxAAAAZ/I6qLRp00YOh0OlndJyvs9RAQAAv29eB5Vdu3YFsg4AAIAivA4q9evXD2QdAAAARfh8HxUAAIDzhaACAACsRVABAADWIqgAAABrEVQAAIC1vLrqp23btkV+26ckycnJ51QQAABAAa+CSr9+/dz/zs7O1owZM9S8eXN16tRJkvTNN99o8+bNGjVqVECKBAAAFZNXQWXSpEnufw8bNkz33XefHnvssSJt0tLS/FsdAACo0BymtHviF8PpdGr9+vVq3Lixx/SffvpJSUlJysjI8GuBpfHlZ6IBAIAdfPn+9vlk2oiICK1cubLI9JUrVyo8PNzX7gAAAErk9S30C4wdO1YjR47Uhg0bdOmll0r67RyVOXPm6O9//7vfCwQAABWXz0Fl4sSJSkxM1PPPP6/58+dLkpo1a6Z58+bplltu8XuBAACg4vL5HBWbcI4KAAAXnoCeoyJJx48f17///W89/PDDOnbsmKTf7p+yb9++snQHAABQLJ8P/WzatElXXXWVnE6ndu/erWHDhqlatWp65513tGfPHr322muBqBMAAFRAPu9RGT9+vIYMGaKffvrJ4yqfa6+9VitWrPBrcQAAoGLzOaisW7dOw4cPLzK9Tp06OnjwoF+KAgAAkMoQVMLDw5WZmVlk+rZt23TRRRf5pSgAAACpDEHlhhtu0JQpU3T69GlJksPhUGpqqiZOnKj+/fv7vUAAAFBx+RxUnnnmGf3yyy+qWbOmTp06pW7duqlRo0aKjo7WE088EYgaAQBABeXzVT9VqlTRypUr9eWXXyo5OVkul0vt2rXTVVddFYj6AABABcYN3wAAwHnly/e3z3tUJOmLL77QF198ocOHD8vlcnm8NmfOnLJ0CQAAUITPQeXRRx/VlClTlJSUpNq1a8vhcASiLgAAAN+DysyZMzVv3jzdcccdgagHAADAzeerfnJzc9W5c+dA1AIAAODB56AybNgwzZ8/PxC1AAAAePD50E92drZmzZqlpUuXqlWrVgoJCfF4/bnnnvNbcQAAoGIr068nt2nTRpL0ww8/eLzGibUAAMCffA4qy5YtC0QdAAAARfh8jsqZ9u7dq3379vmrFgAAAA8+BxWXy6UpU6bI6XSqfv36qlevnqpWrarHHnusyM3fAAAAzoXPh37++te/avbs2Zo2bZq6dOkiY4xWrVqlyZMnKzs7mx8mBAAAfuPzb/3ExcVp5syZ6tu3r8f09957T6NGjTqvh4L4rR8AAC48vnx/+3zo59ixY2ratGmR6U2bNtWxY8d87Q4AAKBEPgeV1q1b68UXXywy/cUXX1Tr1q39UhQAAIBUhnNUpk+frt69e2vp0qXq1KmTHA6HVq9erbS0NH388ceBqBEAAFRQPu9R6datm7Zv364bb7xRx48f17Fjx3TTTTdp27ZtuuyyywJRIwAAqKB8PpnWJpxMCwDAhSegJ9N++umnWrlypfv5Sy+9pDZt2ui2225Tenq679UCAACUwOeg8pe//EWZmZmSpJSUFI0fP17XXXeddu7cqfHjx/u9QAAAUHH5fDLtrl271Lx5c0nS22+/reuvv15PPvmkkpOTdd111/m9QAAAUHH5vEclNDRUWVlZkqSlS5eqV69ekqRq1aq597QAAAD4g897VLp27arx48erS5cuWrt2rRYtWiRJ2r59u+Lj4/1eIAAAqLh83qPy4osvKjg4WG+99ZZefvll1alTR5L0ySef6JprrvF7gQAAoOLi8mQAAHBe+fL97dWhn8zMTHdHZzsPhcAAAAD8xaugEhMTowMHDqhmzZqqWrWqHA5HkTbGGDkcDuXn5/u9SAAAUDF5FVS+/PJLVatWTZK0bNmygBYEAABQgHNUAADAeeX3c1QKO378uGbPnq2tW7fK4XCoefPmuuuuu+R0OstUMAAAQHF8vjx5/fr1atiwof7xj3/o2LFjOnLkiJ577jk1bNhQycnJgagRAABUUD4f+rnsssvUqFEjvfrqqwoO/m2HTF5enoYNG6adO3dqxYoVASm0OBz6AQDgwuPL97fPQSUiIkIbN25U06ZNPaZv2bJFSUlJ7tvrnw8EFQAALjy+fH/7fOinSpUqSk1NLTI9LS1N0dHRvnYHAABQIp+DyoABA3T33Xdr0aJFSktL0969e7Vw4UINGzZMAwcODESNAACggvL5qp9nnnlGDodDd955p/Ly8iRJISEhGjlypKZNm+b3AgEAQMVV5vuoZGVlaceOHTLGqFGjRoqMjPR3bWfFOSoAAFx4AnKOSlZWlu69917VqVNHNWvW1LBhw1S7dm21atWqXEIKAAD4/fM6qEyaNEnz5s1T7969deutt2rJkiUaOXJkIGsDAAAVnNfnqCxevFizZ8/WrbfeKkm6/fbb1aVLF+Xn56tSpUoBKxAAAFRcXu9RSUtL02WXXeZ+3qFDBwUHB2v//v0BKQwAAMDroJKfn6/Q0FCPacHBwe4rfwAAAPzN60M/xhgNGTJEYWFh7mnZ2dkaMWKEoqKi3NMWL17s3woBAECF5XVQGTx4cJFpt99+u1+LAQAAOJPXQWXu3LmBrAMAAKAIn2+hDwAAcL4QVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgrXIPKjNmzFBCQoLCw8PVvn17ff311+VdEgAAsERweS580aJFGjt2rGbMmKEuXbrolVde0bXXXqstW7aoXr165VmaNqama9eRk0qoEaW29WK8bn8636X9x09Jkro3qemet7T+Nqama/m2wzqYka1azvBi5zud71JIpSCP/ybv+e21KuHB2ns8W0d/zVFUaCVdXCtajWOjFRzk0OZ9GcrMzlOV8GBlZOfp6K85qh4VqsSLKmvnkZPaeiBTOafzVSU8RIkXRenIyVw5JFWNDNWhzGz9mn1axkg5eS7l5JsyjWUlhySHZFyS0W8PSQqpJEWFButUbr7yjFHl0EoKDw1W+slc5eUbhYcEqW61SAVXcuhUbr7a1otRfEyEVu846l6PX07k6vCJbMkhVY0IUWhwJaVn5epEdp4kKSYiRKHBQTp0IkcuIzkkVQmvpJioMOXm5csYKTykkiJCKynOGa6M7DztS89S1chQNatdRWHBQdrw/8fZGKOW8VV1WeMaHuP6hzpOxVWNUPKedP2wL0NGUpwzXNUrh0mSajnDFVc1wuP9K9gOCr+/CTWiJKnYbWVjaroWrk1VelaurmwWqwGX1NOidan6YushxUSG6tYO9bzaVktS0jZasH1Kntt0cfMXtMtzGf106IRHXd7240ttvjqzH6n4cS68LoGs9Wx/F/yxzheCirSupbFxHGyoyWGMKdu3jx907NhR7dq108svv+ye1qxZM/Xr109Tp0496/yZmZlyOp3KyMhQlSpV/FbXtE+2auZXO93PR3RL1MRrm3nd/kwjuiVKUon9lTRvcfPh96VNXae+S8sotU3BtlLcdhIREqRTp13FtvdVSdt8ccstbhmlfQak4tfV21p9/Tx6209JtXi7zudaa2lt/bXOF4KKtK6lsXEcAlmTL9/f5XboJzc3Vxs2bFCvXr08pvfq1UurV68udp6cnBxlZmZ6PPxtY2p6kT9SM7/aqY2p6V63LzxvSf2VNm9x8+H35WwhRfptO1i0LrXYbaFwSCloX9K2WpKStvmSllt4GWf7DEjFr6s3tfr6efSln+L6LKmdv2stra2/1vlCUJHWtTQ2joNNNZVbUDly5Ijy8/MVGxvrMT02NlYHDx4sdp6pU6fK6XS6H3Xr1vV7XbuOnPTLdG+WU9Z5UbF8n3bcp/a+blcltS9tuWfOcy7b8dnm9dfnzpv2Z/tM+rPW0tr6+2+NzSrSupbGxnGwqaZyP5nW4XB4PDfGFJlW4KGHHlJGRob7kZaW5vd6Co5dn+t0b5ZT1nlRsbSuW9Wn9r5uVyW1L225Z85zLtvx2eb11+fOm/Zn+0z6s9bS2vr7b43NKtK6lsbGcbCppnILKjVq1FClSpWK7D05fPhwkb0sBcLCwlSlShWPh7+1rRfjPj+kwMhuiSWeRFRc+8LzltRfafMWNx9+X9rUdZ61zchuiRpwSb1it4WIkKIf39K21ZKUtM2XtNzCyzjbZ0Aqfl29qdXXz6Mv/RTXZ0nt/F1raW39tc4Xgoq0rqWxcRxsqqncT6Zt3769ZsyY4Z7WvHlz3XDDDeV6Mq3EVT9c9cNVP2dO56ofrvoJpIq0rqWxcRwCVZMv39/lGlQWLVqkO+64QzNnzlSnTp00a9Ysvfrqq9q8ebPq169/1vkDGVQAAEBg+PL9Xa73URkwYICOHj2qKVOm6MCBA2rRooU+/vhjr0IKAAD4/SvXPSrnij0qAABceC6I+6gAAACcDUEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALBWud5C/1wV3FQ3MzOznCsBAADeKvje9ubm+Bd0UDlx4oQkqW7duuVcCQAA8NWJEyfkdDpLbXNB/9aPy+XS/v37FR0dLYfDUd7llFlmZqbq1q2rtLQ0frPITxjTwGBc/Y8x9T/GNDD8Oa7GGJ04cUJxcXEKCir9LJQLeo9KUFCQ4uPjy7sMv6lSpQofKj9jTAODcfU/xtT/GNPA8Ne4nm1PSgFOpgUAANYiqAAAAGsRVCwQFhamSZMmKSwsrLxL+d1gTAODcfU/xtT/GNPAKK9xvaBPpgUAAL9v7FEBAADWIqgAAABrEVQAAIC1CCoAAMBaBJUA2b17t+6++24lJCQoIiJCDRs21KRJk5Sbm+vRLjU1Vddff72ioqJUo0YN3XfffUXapKSkqFu3boqIiFCdOnU0ZcqUIr+P8NVXX6l9+/YKDw9XYmKiZs6cGfB1tNmMGTOUkJCg8PBwtW/fXl9//XV5l2SFqVOn6pJLLlF0dLRq1qypfv36adu2bR5tjDGaPHmy4uLiFBERoe7du2vz5s0ebXJycvTnP/9ZNWrUUFRUlPr27au9e/d6tElPT9cdd9whp9Mpp9OpO+64Q8ePHw/0Kpa7qVOnyuFwaOzYse5pjGnZ7Nu3T7fffruqV6+uyMhItWnTRhs2bHC/zrj6Ji8vT4888oj7eykxMVFTpkyRy+Vyt7FyTA0C4pNPPjFDhgwxn332mdmxY4d57733TM2aNc2ECRPcbfLy8kyLFi1Mjx49THJyslmyZImJi4szo0ePdrfJyMgwsbGx5tZbbzUpKSnm7bffNtHR0eaZZ55xt9m5c6eJjIw0Y8aMMVu2bDGvvvqqCQkJMW+99dZ5XWdbLFy40ISEhJhXX33VbNmyxYwZM8ZERUWZPXv2lHdp5e7qq682c+fONT/88IP57rvvTO/evU29evXMr7/+6m4zbdo0Ex0dbd5++22TkpJiBgwYYGrXrm0yMzPdbUaMGGHq1KljlixZYpKTk02PHj1M69atTV5enrvNNddcY1q0aGFWr15tVq9ebVq0aGH69OlzXtf3fFu7dq1p0KCBadWqlRkzZox7OmPqu2PHjpn69eubIUOGmG+//dbs2rXLLF261Pz888/uNoyrbx5//HFTvXp18+GHH5pdu3aZ//3vf6Zy5crmn//8p7uNjWNKUDmPpk+fbhISEtzPP/74YxMUFGT27dvnnrZgwQITFhZmMjIyjDHGzJgxwzidTpOdne1uM3XqVBMXF2dcLpcxxpgHHnjANG3a1GNZw4cPN5deemkgV8daHTp0MCNGjPCY1rRpUzNx4sRyqshehw8fNpLMV199ZYwxxuVymVq1aplp06a522RnZxun02lmzpxpjDHm+PHjJiQkxCxcuNDdZt++fSYoKMh8+umnxhhjtmzZYiSZb775xt1mzZo1RpL58ccfz8eqnXcnTpwwjRs3NkuWLDHdunVzBxXGtGwefPBB07Vr1xJfZ1x917t3b3PXXXd5TLvpppvM7bffboyxd0w59HMeZWRkqFq1au7na9asUYsWLRQXF+eedvXVVysnJ8e9e3PNmjXq1q2bxw12rr76au3fv1+7d+92t+nVq5fHsq6++mqtX79ep0+fDuAa2Sc3N1cbNmwoMh69evXS6tWry6kqe2VkZEiSe7vctWuXDh486DF+YWFh6tatm3v8NmzYoNOnT3u0iYuLU4sWLdxt1qxZI6fTqY4dO7rbXHrppXI6nb/b9+Hee+9V7969ddVVV3lMZ0zL5v3331dSUpJuvvlm1axZU23bttWrr77qfp1x9V3Xrl31xRdfaPv27ZKk77//XitXrtR1110nyd4xJaicJzt27NALL7ygESNGuKcdPHhQsbGxHu1iYmIUGhqqgwcPltim4PnZ2uTl5enIkSN+XxebHTlyRPn5+cWOR8F44TfGGI0fP15du3ZVixYtJP3fNlXa+B08eFChoaGKiYkptU3NmjWLLLNmzZq/y/dh4cKFSk5O1tSpU4u8xpiWzc6dO/Xyyy+rcePG+uyzzzRixAjdd999eu211yQxrmXx4IMPauDAgWratKlCQkLUtm1bjR07VgMHDpRk75gSVHw0efJkORyOUh/r16/3mGf//v265pprdPPNN2vYsGEerzkcjiLLMMZ4TC/cxvz/E2l9bVORFDceFXUsSjJ69Ght2rRJCxYsKPJaWcbvbNutt/1caNLS0jRmzBi98cYbCg8PL7EdY+obl8uldu3a6cknn1Tbtm01fPhw3XPPPXr55Zc92jGu3lu0aJHeeOMNzZ8/X8nJyfrPf/6jZ555Rv/5z3882tk2pgQVH40ePVpbt24t9VHwf6fSbyGlR48e6tSpk2bNmuXRV61atYqky/T0dJ0+fdqdaItrc/jwYUk6a5vg4GBVr17dPyt+gahRo4YqVapU7HgU/r+EiuzPf/6z3n//fS1btkzx8fHu6bVq1ZKkUsevVq1ays3NVXp6eqltDh06VGS5v/zyy+/ufdiwYYMOHz6s9u3bKzg4WMHBwfrqq6/0r3/9S8HBwUX2gBZgTEtXu3ZtNW/e3GNas2bNlJqaKolttSz+8pe/aOLEibr11lvVsmVL3XHHHRo3bpx7T6CtY0pQ8VGNGjXUtGnTUh8F/1e1b98+de/eXe3atdPcuXMVFOQ53J06ddIPP/ygAwcOuKd9/vnnCgsLU/v27d1tVqxY4XHJ8ueff664uDg1aNDA3WbJkiUefX/++edKSkpSSEhIIIbBWqGhoWrfvn2R8ViyZIk6d+5cTlXZwxij0aNHa/Hixfryyy+VkJDg8XpCQoJq1arlMX65ubn66quv3OPXvn17hYSEeLQ5cOCAfvjhB3ebTp06KSMjQ2vXrnW3+fbbb5WRkfG7ex+uvPJKpaSk6LvvvnM/kpKSNGjQIH333XdKTExkTMugS5cuRS6d3759u+rXry+JbbUssrKyinwPVapUyX15srVj6vPpt/DKvn37TKNGjcwVV1xh9u7daw4cOOB+FCi4PPnKK680ycnJZunSpSY+Pt7j8uTjx4+b2NhYM3DgQJOSkmIWL15sqlSpUuzlyePGjTNbtmwxs2fP5vLkkBAze/Zss2XLFjN27FgTFRVldu/eXd6llbuRI0cap9Npli9f7rFNZmVludtMmzbNOJ1Os3jxYpOSkmIGDhxY7OWJ8fHxZunSpSY5OdlcccUVxV6e2KpVK7NmzRqzZs0a07Jly9/lJZ/FOfOqH2MY07JYu3atCQ4ONk888YT56aefzH//+18TGRlp3njjDXcbxtU3gwcPNnXq1HFfnrx48WJTo0YN88ADD7jb2DimBJUAmTt3rpFU7ONMe/bsMb179zYRERGmWrVqZvTo0R6XIhtjzKZNm8xll11mwsLCTK1atczkyZPdlyYXWL58uWnbtq0JDQ01DRo0MC+//HLA19FmL730kqlfv74JDQ017dq1c19+W9GVtE3OnTvX3cblcplJkyaZWrVqmbCwMHP55ZeblJQUj35OnTplRo8ebapVq2YiIiJMnz59TGpqqkebo0ePmkGDBpno6GgTHR1tBg0aZNLT08/DWpa/wkGFMS2bDz74wLRo0cKEhYWZpk2bmlmzZnm8zrj6JjMz04wZM8bUq1fPhIeHm8TERPPXv/7V5OTkuNvYOKYOYwrd4hQAAMASnKMCAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQWA1YYMGaJ+/fq5n3fv3l1jx44tt3oAnF8EFQA+OXjwoMaMGaNGjRopPDxcsbGx6tq1q2bOnKmsrKyAL3/x4sV67LHH/Npn4TAEwB7B5V0AgAvHzp071aVLF1WtWlVPPvmkWrZsqby8PG3fvl1z5sxRXFyc+vbtW2S+06dP++0HMqtVq+aXfgBcGNijAsBro0aNUnBwsNavX69bbrlFzZo1U8uWLdW/f3999NFHuv766yVJDodDM2fO1A033KCoqCg9/vjjys/P1913362EhARFRESoSZMmev755z36z8/P1/jx41W1alVVr15dDzzwgAr/ykfhQz+5ubl64IEHVKdOHUVFRaljx45avny5+/V58+apatWq+uyzz9SsWTNVrlxZ11xzjftXyydPnqz//Oc/eu+99+RwOORwODzmB1C+CCoAvHL06FF9/vnnuvfeexUVFVVsG4fD4f73pEmTdMMNNyglJUV33XWXXC6X4uPj9eabb2rLli36+9//rocfflhvvvmme55nn31Wc+bM0ezZs7Vy5UodO3ZM77zzTql1DR06VKtWrdLChQu1adMm3Xzzzbrmmmv0008/udtkZWXpmWee0euvv64VK1YoNTVV999/vyTp/vvv1y233OIOLwcOHCjbT9EDCAgO/QDwys8//yxjjJo0aeIxvUaNGsrOzpYk3XvvvXrqqackSbfddpvuuusuj7aPPvqo+98JCQlavXq13nzzTd1yyy2SpH/+85966KGH1L9/f0nSzJkz9dlnn5VY044dO7RgwQLt3btXcXFxkn4LHp9++qnmzp2rJ598UtJvh55mzpyphg0bSpJGjx6tKVOmSJIqV66siIgI5eTkqFatWmUbHAABQ1AB4JMz95pI0tq1a+VyuTRo0CDl5OS4pyclJRWZd+bMmfr3v/+tPXv26NSpU8rNzVWbNm0kSRkZGTpw4IA6derkbh8cHKykpKQih38KJCcnyxijiy++2GN6Tk6Oqlev7n4eGRnpDimSVLt2bR0+fNj7lQZQbggqALzSqFEjORwO/fjjjx7TExMTJUkREREe0wsfHnrzzTc1btw4Pfvss+rUqZOio6P19NNP69tvvy1zTS6XS5UqVdKGDRtUqVIlj9cqV67s/nfhE3kdDkeJ4QeAXThHBYBXqlevrp49e+rFF1/UyZMnfZ7/66+/VufOnTVq1Ci1bdtWjRo10o4dO9yvO51O1a5dW9988417Wl5enjZs2FBin23btlV+fr4OHz6sRo0aeTx8OYwTGhqq/Px8n9cJQOARVAB4bcaMGcrLy1NSUpIWLVqkrVu3atu2bXrjjTf0448/FtmrcaZGjRpp/fr1+uyzz7R9+3b97W9/07p16zzajBkzRtOmTdM777yjH3/8UaNGjdLx48dL7PPiiy/WoEGDdOedd2rx4sXatWuX1q1bp6eeekoff/yx1+vVoEEDbdq0Sdu2bdORI0d0+vRpr+cFEFgEFQBea9iwoTZu3KirrrpKDz30kFq3bq2kpCS98MILuv/++0u9EduIESN00003acCAAerYsaOOHj2qUaNGebSZMGGC7rzzTg0ZMsR9eOjGG28staa5c+fqzjvv1IQJE9SkSRP17dtX3377rerWrev1et1zzz1q0qSJkpKSdNFFF2nVqlVezwsgsByGA7UAAMBS7FEBAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFr/D417CzTOvbk0AAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of poisoned images: 150 out of 10000.\n", + "last index of poison 490\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAHFCAYAAAAdTZjVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABexklEQVR4nO3dd1gUV9sG8HtpSwcBaYqAoogK9m4UrNhi1KhBo2KJvWCJJUZBY8Qek9hiQ02sscWoL0jssSN2saNYQKygokg53x9+O2FZUBZ3Bdz7d117yZ4558wzZ2dnH6fKhBACRERERDpCr6ADICIiIvqYmPwQERGRTmHyQ0RERDqFyQ8RERHpFCY/REREpFOY/BAREZFOYfJDREREOoXJDxEREekUJj9ERESkU4p88nPu3Dn06dMHZcqUgYmJCUxMTFC2bFn0798fUVFRHy2OkJAQyGQypTI3NzcEBgZqdb5HjhxBSEgInj17pvG+N2zYgIoVK8LExAQymQxnzpzJsd7+/fshk8mkl76+PhwcHNCpUyfExMSoPd/AwEC4ubl9WPCFwMqVKyGTyXDr1q2CDkWS13VS8VlOnz5dZZpiuT7m9ysrX19fpfXNxMQElStXxrx585CZmalWX7du3YJMJsPKlSu1E+xHpM5nO2TIEO0HVMi5ubmhTZs2BR0GgP+2oZs2bcp3HwW13Xz27Bns7Oywfv16qezu3bsICgpCo0aNYG1trfZ3LK/t09LSUKZMGcybN0/tuIt08vPbb7+hevXqOH78OIYPH44dO3Zg586dCAoKwsWLF1GzZk3cuHGjwOLbunUrJk6cqNV5HDlyBJMnT9Z48vPw4UN0794dZcqUQXh4OI4ePYpy5cq9s820adNw9OhR7Nu3D2PHjkVkZCTq16+Pe/fuqTXviRMnYuvWrR8SPmnI9OnT8eTJk4IOQ0Xp0qVx9OhRHD16FBs2bECJEiUwYsQIjB8/Xq1+nJyccPToUbRu3VpLkRJ92iZPngxnZ2d06dJFKrt+/TrWrFkDIyMjtGrVSu0+89re0NAQkyZNwpQpU/D48WO15mGgdlSFxOHDhzFo0CC0bt0amzZtgpGRkTStcePGGDx4MP7880+YmJi8s5+UlBSYmppqJcaqVatqpd+P4erVq0hLS8PXX3+NRo0a5alN2bJlUadOHQBAw4YNYW1tjT59+mDlypWYMGFCnuddpkyZfMVMmtW0aVPs378fP/74I+bMmVPQ4SgxMTGR1jUAaNmyJcqXL4/58+dj6tSpMDQ0zFM/crlcqR8iyrsnT57gt99+w08//aR05KNhw4Z4+PAhACAqKgrr1q1Tq1912gcEBGDkyJH47bff8N133+V5HkV2z8+0adOgr6+P3377TSnxyapTp05wdnaW3gcGBsLc3Bznz59H8+bNYWFhgSZNmgAAIiMj0a5dO5QsWRLGxsbw8PBA//798ejRI5V+d+7ciSpVqkAul8Pd3R2zZ8/Ocf457YZOTk7G6NGj4e7uDiMjI5QoUQJBQUF4+fKlUj3Frunff/8dXl5eMDU1ReXKlbFjxw6pTkhICL799lsAgLu7u3QYYP/+/e8cu+3bt6Nu3bowNTWFhYUFmjVrhqNHjyqNU4MGDQAAXbp0gUwmg6+v7zv7zIniR+X27dsAgMzMTMycORPly5eHXC6Hvb09evTogbt37yq1y2n37Z9//onatWvDysoKpqamKF26NHr37q1UJy4uDl9//TXs7e0hl8vh5eWFOXPmKB0KURzmmD17NubOnQt3d3eYm5ujbt26OHbsmMoyREVF4fPPP4eNjQ2MjY1RtWpVbNy4UaXesWPHUL9+fRgbG8PZ2Rnjx49HWlpansYpKioKX331Fdzc3GBiYgI3NzcEBARI46agONy0b98+DBw4EHZ2drC1tUWHDh1w//59pbppaWkYM2YMHB0dYWpqigYNGuDEiRN5ikfB09MTffr0wYIFC1Riycn71ivgv8PDFy9eREBAAKysrODg4IDevXsjKSlJrfiyMjQ0RPXq1ZGSkiJtNC9cuIB27dqhWLFiMDY2RpUqVbBq1Sqldjkd9nr48CH69esHFxcXyOVyFC9eHPXr18c///yj1HbFihWoXLkyjI2NYWNjg/bt26sc5lVsc65fv45WrVrB3NwcLi4uGDVqFFJTU5XqvnnzBlOnTpW+H8WLF0evXr2k5VHQxGebleKQy9q1azF27Fg4OTnB3Nwcbdu2xYMHD/D8+XP069cPdnZ2sLOzQ69evfDixQulPhYsWICGDRvC3t4eZmZm8Pb2xsyZM1W+A0IITJs2Da6urjA2NkaNGjUQGRkJX19flW1MXreVedk25FdefxcU6/W5c+fQqVMnWFlZwcbGBiNHjkR6ejquXLkCf39/WFhYwM3NDTNnzsxxfq9fv8bIkSPh6OgIExMTNGrUCKdPn1apt3LlSnh6ekrbudWrV+fY3+TJk1G7dm3Y2NjA0tIS1apVw/Lly6Gp55mvXLkS6enpSnt9AEBP78NSC3XaGxkZoUuXLliyZIl6yyWKoPT0dGFiYiLq1q2rVruePXsKQ0ND4ebmJkJDQ8WePXtERESEEEKIRYsWidDQULF9+3Zx4MABsWrVKlG5cmXh6ekp3rx5I/Xxzz//CH19fdGgQQOxZcsW8eeff4qaNWuKUqVKiezD6erqKnr27Cm9f/nypahSpYqws7MTc+fOFf/884/4+eefhZWVlWjcuLHIzMyU6gIQbm5uolatWmLjxo1i165dwtfXVxgYGIgbN24IIYS4c+eOGDp0qAAgtmzZIo4ePSqOHj0qkpKSch2DNWvWCACiefPmYtu2bWLDhg2ievXqwsjISBw6dEgIIcT169fFggULBAAxbdo0cfToUXHx4sVc+9y3b58AIP7880+l8r/++ksAEN99950QQoh+/foJAGLIkCEiPDxcLF68WBQvXly4uLiIhw8fKn1Orq6u0vsjR44ImUwmvvrqK7Fr1y6xd+9eERYWJrp37y7VSUxMFCVKlBDFixcXixcvFuHh4WLIkCECgBg4cKBULzY2Vhpbf39/sW3bNrFt2zbh7e0tihUrJp49eybV3bt3rzAyMhKfffaZ2LBhgwgPDxeBgYECgAgLC5PqXbx4UZiamooKFSqIdevWib/++ku0aNFCWidiY2NzHTshhPjzzz/FpEmTxNatW8WBAwfE+vXrRaNGjUTx4sWVxiUsLEwAEKVLlxZDhw4VERERYtmyZaJYsWLCz89Pqc+ePXsKmUwmvv32W7F7924xd+5cUaJECWFpaam0TuYGgBg8eLCIj48XpqamSmOtiOPkyZNSWV7WKyGECA4OFgCEp6enmDRpkoiMjBRz584Vcrlc9OrV671xCSFEo0aNRMWKFVXKq1WrJgwMDERKSoq4fPmysLCwEGXKlBGrV68WO3fuFAEBAQKAmDFjhtRGsT5k/TxbtGghihcvLpYsWSL2798vtm3bJiZNmiTWr18v1Zk2bZoAIAICAsTOnTvF6tWrRenSpYWVlZW4evWqVK9nz57CyMhIeHl5idmzZ4t//vlHTJo0SchkMjF58mSpXkZGhvD39xdmZmZi8uTJIjIyUixbtkyUKFFCVKhQQaSkpCj1qYnPVkHx/XV1dRWBgYHSd9Pc3Fz4+fmJZs2aidGjR4vdu3eLGTNmCH19fTF06FClPkeMGCEWLVokwsPDxd69e8VPP/0k7OzsVD7T8ePHCwCiX79+Ijw8XCxdulSUKlVKODk5iUaNGkn18rqtzMu2ITeurq6idevW76yT19+FrOv1Dz/8ICIjI8WYMWOk7V358uXFL7/8IiIjI0WvXr0EALF582aVz8DFxUW0a9dO/P333+KPP/4QHh4ewtLSUtrmC/Hf9y97PRcXF6XtphBCBAYGiuXLl4vIyEgRGRkpfvjhB2FiYqK07gnxdv1LS0t77ys9PV2pXePGjUWtWrXeOYYnT55U+Y6pIy/tN2zYIACIc+fO5bnfIpn8JCQkCADiq6++UpmWnp6u9GFlTSh69uwpAIgVK1a8s//MzEyRlpYmbt++LQCIv/76S5pWu3Zt4ezsLF69eiWVJScnCxsbm/cmP6GhoUJPT0/pR0MIITZt2iQAiF27dkllAISDg4NITk5WWm49PT0RGhoqlc2aNStPP7BCvF3BnZ2dhbe3t8jIyJDKnz9/Luzt7UW9evWkstwSmpwo6m7YsEGkpaWJlJQUcfDgQeHh4SH09fXF2bNnRUxMjAAgBg0apNT2+PHjSgmSEKrJz+zZswUApcQku3HjxgkA4vjx40rlAwcOFDKZTFy5ckUI8d+Pnbe3t9IX+cSJEwKAWLdunVRWvnx5UbVqVZGWlqbUZ5s2bYSTk5M0hl26dBEmJiYiISFBqpOeni7Kly+f588mq/T0dPHixQthZmYmfv75Z6lcsdHLPoYzZ84UAER8fLwQQkhjPWLECKV6igRF3R/ICRMmCD09PXH27FmlOBTrsTrrleJHYubMmUrzGzRokDA2Nlb6vuZGkfwovuP379+XPv9OnToJIYT46quvhFwuF3FxcUptW7ZsKUxNTaV1Kafkx9zcXAQFBeU6/6dPnwoTExPRqlUrpfK4uDghl8tF165dpTLFNmfjxo1KdVu1aiU8PT2l9+vWrVP5QRTivw3/woULhRCa/2yF+O/727ZtW6V6QUFBAoAYNmyYUvkXX3whbGxscu1f8UO6evVqoa+vL548eSKEEOLJkydCLpeLLl26KNU/evSoAKCU/OR1W5mXbUNu8pL8ZPWu3wXFej1nzhylNlWqVJH+c6qQlpYmihcvLjp06CCVKT6DatWqKX0Hbt26JQwNDUXfvn2FEP9913Krlz35yUrxuUyZMkXY2trm+Nv4vlfWz0gIIUxNTcWAAQPeOW4fI/m5du2aACAWLVqU536L7GGv3FSvXh2GhobSK6dzFTp27KhSlpiYiAEDBsDFxQUGBgYwNDSEq6srAEi7sl++fImTJ0+iQ4cOMDY2ltpaWFigbdu2741tx44dqFSpEqpUqYL09HTp1aJFixwPV/n5+cHCwkJ67+DgAHt7+zwdgsjJlStXcP/+fXTv3l1pt6K5uTk6duyIY8eOISUlJV99A28PkRkaGsLU1BQNGzZERkYGNm3aBB8fH+zbtw8AVA4D1qpVC15eXtizZ0+u/dasWRMA0LlzZ2zcuDHHE6j37t2LChUqoFatWkrlgYGBEEJg7969SuWtW7eGvr6+9N7HxwfAf4forl+/jsuXL6Nbt24AoPR5tWrVCvHx8bhy5QoAYN++fWjSpAkcHByk/vT19VV2BefmxYsXGDt2LDw8PGBgYAADAwOYm5vj5cuXOV4t9/nnnyu9zx67YqwVsSt07twZBgbqn+Y3ZswY2NjYYOzYsTlOz896ldMyvH79GomJiQDeHiLNOuYZGRlK9S9evCh9x52dnTFnzhx069YNS5cuBfB2fWjSpAlcXFyU2gUGBiIlJUXlcFxWtWrVwsqVKzF16lQcO3ZM5dDN0aNH8erVK5V12cXFBY0bN1ZZl2Uymcr2wcfHR+l7vGPHDlhbW6Nt27ZKy12lShU4OjpK2wZNf7ZZZb/yycvLCwBUTgb38vLCkydPlA59nT59Gp9//jlsbW2hr68PQ0ND9OjRAxkZGbh69SqAt4eGU1NT0blzZ6X+6tSpo3KYO6/byrxsGz5EXn4XssppDGUyGVq2bCmVGRgYwMPDI8fteNeuXZXOnXF1dUW9evWkz13xXcutXnZ79+5F06ZNYWVlJX0ukyZNwuPHj6XvGvD2sN3Jkyff+/rtt9+kNs+ePUNKSgrs7e3fO47apohBnc+/SJ7wbGdnBxMTkxxXnrVr1yIlJQXx8fEqG1gAMDU1haWlpVJZZmYmmjdvjvv372PixInw9vaGmZkZMjMzUadOHbx69QoA8PTpU2RmZsLR0VGl35zKsnvw4AGuX7+e68mY2Y8j29raqtSRy+VSPOpSnA3v5OSkMs3Z2RmZmZl4+vRpvk8AnzFjBho3bgx9fX3Y2dkp/fC8b97vSugaNmyIbdu24ZdffkGPHj2QmpqKihUrYsKECQgICJD6z+kyT8U5X9mvBMg+tnK5HACksX3w4AEAYPTo0Rg9enSOcSk+r8ePH+d7nQDebvD27NmDiRMnombNmrC0tIRMJkOrVq1y/KzfF7tiWbPP38DAIMd16n0sLS3x/fffIygoSNoIZ5Wf9ep9y9C7d2+l83MaNWqk9J+DMmXKYP369ZDJZDA2Noa7u7tS/48fP841nqwx52TDhg2YOnUqli1bhokTJ8Lc3Bzt27fHzJkz4ejo+N7ljYyMVCozNTVV+s+SYnlfv34tvX/w4AGePXuW6/mLWdc1QHOfbVY2NjZK7xWx5Fb++vVrmJubIy4uDp999hk8PT3x888/w83NDcbGxjhx4gQGDx6ssl5m/U+CQvayvG4r87JtyK+8/i5kldNY5fT5GxkZITk5WaV9btuRs2fPAsj981eUZb2txokTJ9C8eXP4+vpi6dKlKFmyJIyMjLBt2zb8+OOPSvGXKlUKJUuWfMdovJU14VK0z75sBUERgzq/jUUy+dHX10fjxo2xe/duxMfHK22EKlSoAAC53lsl+714gLcnRp49exYrV65Ez549pfLr168r1StWrBhkMhkSEhJU+sipLDtF0rZixYpcp2uTYuMYHx+vMu3+/fvQ09NDsWLF8t1/6dKlUaNGjffOO/uX7P79++9d9nbt2qFdu3ZITU3FsWPHEBoaiq5du8LNzQ1169aFra1trssFqD+2ivrjx49Hhw4dcqzj6ekpLVt+14mkpCTs2LEDwcHBGDdunFSempqa70vMFWOdkJCAEiVKSOXp6elqXw6qMHDgQPz8888YO3YsBg4cmOP8NLlehYSEKN2LJuseUADSybK5+ZD1wc7ODvPmzcO8efMQFxeH7du3Y9y4cUhMTER4ePh7lzc/32PFyevh4eE5TlcsvzY+2w+1bds2vHz5Elu2bJH2igBQuS+YInbFfyyySkhIUPrPizrbyvdtG/Irr78LmpTbdkQxdlk///e1Xb9+PQwNDbFjxw6lBGXbtm0qbbP/ZyM3Wf8TooilMNwKQxGDOt+9InvYa/z48cjIyMCAAQPyfFVNbhQJkeJ/nwpZd/EBgJmZGWrVqoUtW7Yo/a/t+fPn+Pvvv987nzZt2uDGjRuwtbVFjRo1VF75uUFV9v8xv4unpydKlCiBtWvXKp0V//LlS2zevFm6UkcbGjduDAD4448/lMpPnjyJmJgY6aq795HL5WjUqBFmzJgBANKVEE2aNMGlS5cQHR2tVH/16tWQyWTw8/NTK15PT0+ULVsWZ8+ezfGzqlGjhvSD5Ofnhz179iht1DMyMrBhw4b3zkcmk0EIobLuLVu2TOVQT14prppZs2aNUvnGjRuRnp6erz6NjIwwdepUnDx5En/++afSNG2sV25ubkpjrUg086pJkybYu3evylVwq1evhqmpaZ4vby9VqhSGDBmCZs2aSetW3bp1YWJiorIu3717Vzrcpq42bdrg8ePHyMjIyHFdUyy/Nj7bD5XT9lMIIR2CVKhduzbkcrnK9+LYsWMqe37zs63MbdugyeUCVH8XNGndunVK36Hbt2/jyJEj0ufu6ekJJyenXOtlJZPJYGBgoHR4/9WrV/j9999V5pufw15GRkYoXbp0gd5LT+HmzZsA/tv5kRdFcs8PANSvXx8LFizA0KFDUa1aNfTr1w8VK1aEnp4e4uPjsXnzZgBQOcSVk/Lly6NMmTIYN24chBCwsbHB33//rbL7GgB++OEH+Pv7o1mzZhg1ahQyMjIwY8YMmJmZvTcDDgoKwubNm9GwYUOMGDECPj4+yMzMRFxcHHbv3o1Ro0ahdu3aao2Dt7c3AODnn39Gz549YWhoCE9PT5X/KQNvLx+cOXMmunXrhjZt2qB///5ITU3FrFmz8OzZsxzv5qspnp6e6NevH3799Vfo6emhZcuWuHXrFiZOnAgXFxeMGDEi17aTJk3C3bt30aRJE5QsWRLPnj3Dzz//DENDQ+keRCNGjMDq1avRunVrTJkyBa6urti5cycWLlyIgQMHvvcGjTn57bff0LJlS7Ro0QKBgYEoUaIEnjx5gpiYGERHR0tJwPfff4/t27ejcePGmDRpEkxNTbFgwQKVS3JzYmlpiYYNG2LWrFmws7ODm5sbDhw4gOXLl8Pa2lrtmIG35xl8/fXXmDdvHgwNDdG0aVNcuHABs2fPztP3ITcBAQGYPXs2/ve//ymVF+R6lZvg4GDs2LEDfn5+mDRpEmxsbLBmzRrs3LkTM2fOhJWVVY7tkpKS4Ofnh65du6J8+fKwsLDAyZMnER4eLu0BtLa2xsSJE/Hdd9+hR48eCAgIwOPHjzF58mQYGxsjODhY7Xi/+uorrFmzBq1atcLw4cNRq1YtGBoa4u7du9i3bx/atWuH9u3ba+2z/RDNmjWDkZERAgICMGbMGLx+/RqLFi3C06dPleopLv0ODQ1FsWLF0L59e9y9exeTJ0+Gk5OT0vlied1W5mXb8C4JCQk53lXZzc0NlStXzvPvgqYkJiaiffv2+Oabb5CUlITg4GAYGxtLN+/U09PDDz/8gL59+0r1nj17hpCQEJVDYa1bt8bcuXPRtWtX9OvXD48fP8bs2bNVkjnF8ubnP9++vr4q2wMFxbgqEpOoqCiYm5sDAL788kupXkhICCZPnox9+/Yp3e4gr+2Btwm0vr4+GjZsmPfg83X6dSFy5swZ0atXL+Hu7i7kcrkwNjYWHh4eokePHmLPnj1KdXv27CnMzMxy7OfSpUuiWbNmwsLCQhQrVkx06tRJxMXFCQAiODhYqe727duFj4+PMDIyEqVKlRLTp0+XzvbPKvvVXkII8eLFC/H9998LT09PYWRkJKysrIS3t7cYMWKE0tVCyHZFxrv6HD9+vHB2dhZ6enoCgNi3b987x2zbtm2idu3awtjYWJiZmYkmTZqIw4cPK9XJz9Ve76ubkZEhZsyYIcqVKycMDQ2FnZ2d+Prrr8WdO3eU6mW/2mvHjh2iZcuWokSJEsLIyEjY29uLVq1aKV1CLYQQt2/fFl27dhW2trbC0NBQeHp6ilmzZildgaS4umfWrFkq8eX0WZ89e1Z07txZ2NvbC0NDQ+Ho6CgaN24sFi9erFTv8OHDok6dOkIulwtHR0fx7bffiiVLluTpaq+7d++Kjh07imLFigkLCwvh7+8vLly4oPJZ53SJuRD/jX/Wzz01NVWMGjVK2NvbC2NjY1GnTh1x9OjRHNefnOS2/u3evVu68iN7HHlZrxTfk6yX8GddtrxcGZfbpe7ZnT9/XrRt21ZYWVkJIyMjUblyZZUrRrJf7fX69WsxYMAA4ePjIywtLYWJiYnw9PQUwcHB4uXLl0ptly1bJm0HrKysRLt27VRuCZHbNien7UVaWpqYPXu2qFy5sjA2Nhbm5uaifPnyon///uLatWtSPU1/trl9f3Nb33L6DP/++28p7hIlSohvv/1W/O9//1NZLzMzM8XUqVNFyZIlhZGRkfDx8RE7duwQlStXFu3bt1eaT162lXndNuTE1dU116uaFOOY19+F3Nbr3D7/7Ouw4jP4/fffxbBhw0Tx4sWFXC4Xn332mYiKilJpv2zZMlG2bFlhZGQkypUrJ1asWKGy3RRCiBUrVghPT08hl8tF6dKlRWhoqFi+fHm+rkLNyZ49ewQAceLECZVpuY1t9vV+1KhRQiaTiZiYmHy1F0KIzz77TOVqxfeR/f9MiIiIPrrY2FiUL18ewcHBat2hlwoHHx8f1K9fH4sWLcpX+1q1asHV1VXlcHpe3bhxA2XLlkVERASaNWuW53ZMfoiI6KM4e/Ys1q1bh3r16sHS0hJXrlzBzJkzkZycjAsXLuR4JRgVbuHh4Wjfvj2uXbuWpyvGskpOTkbx4sVx5swZ6dYK6urVqxfu3r2r9uHIInvODxERFS1mZmaIiorC8uXL8ezZM1hZWcHX1xc//vgjE58iyt/fH7NmzUJsbKzayY+lpaXKY17UkZ6ejjJlyqj9QGOAe36IiIhIxxTZS92JiIiI8oPJDxEREekUJj9ERESkUwr0hOfQ0FBs2bIFly9fhomJCerVq4cZM2Yo3c01MDBQ5bbbtWvXxrFjx/I0j8zMTNy/fx8WFhY5PtqCiIiICh8hBJ4/fw5nZ2elm2BqQoEmPwcOHMDgwYNRs2ZNpKenY8KECWjevDkuXboEMzMzqZ6/vz/CwsKk97k9/C8n9+/fV3myMxERERUNd+7cUftKsvcp0OQn+0P8wsLCYG9vj1OnTindploul+f5CdnZKR7zcOfOnQK7/TsRERGpJzk5GS4uLjk+rulDFar7/CQlJQF4+wyYrPbv3w97e3tYW1ujUaNG+PHHH2Fvb59jH6mpqUr3DXj+/DmAt/cTYPJDRERUtGjjlJVCc58fIQTatWuHp0+f4tChQ1L5hg0bYG5uDldXV8TGxmLixIlIT0/HqVOncnxAm+IhadklJSUx+SEiIioikpOTYWVlpZXf70KT/AwePBg7d+7Ev//++85je/Hx8XB1dcX69eulpyxnlX3Pj2K3GZMfIiKiokObyU+hOOw1dOhQbN++HQcPHnzvSU1OTk5wdXXFtWvXcpwul8tz3CNEREREBBRw8iOEwNChQ7F161bs378f7u7u723z+PFj3LlzB05OThqNJSMjA2lpaRrtkyg/DA0Noa+vX9BhEBF9sgo0+Rk8eDDWrl2Lv/76CxYWFkhISAAAWFlZwcTEBC9evEBISAg6duwIJycn3Lp1C9999x3s7OzQvn17jcQghEBCQgKePXumkf6INMHa2hqOjo68NxURkRYUaPKzaNEiAICvr69SeVhYGAIDA6Gvr4/z589j9erVePbsGZycnODn54cNGzZo7NI3ReJjb28PU1NT/thQgRJCICUlBYmJiQCg8T2cRERUCA57vYuJiQkiIiK0Nv+MjAwp8bG1tdXafIjUYWJiAgBITEyEvb09D4EREWmYTj/bS3GOj6mpaQFHQqRMsU7yPDQiIs3T6eRHgYe6qLDhOklEpD1MfoiIiEinMPn5hMlkMmzbtq2gwyAiIipUCsVNDgsjt3E7P9q8bk1vrXabhIQE/Pjjj9i5cyfu3bsHe3t7VKlSBUFBQWjSpIkWosw/X19fVKlSBfPmzSvoUIiIiJj8FEW3bt1C/fr1YW1tjZkzZ8LHxwdpaWmIiIjA4MGDcfny5YIOkYiIqNDiYa8iaNCgQZDJZDhx4gS+/PJLlCtXDhUrVsTIkSNx7NixXNvdu3cPXbp0QbFixWBra4t27drh1q1b0vSTJ0+iWbNmsLOzg5WVFRo1aoTo6GilPmQyGZYtW4b27dvD1NQUZcuWxfbt29WK383NDVOnTkWPHj2kh9b+9ddfePjwIdq1awdzc3N4e3sjKipKavP48WMEBASgZMmSMDU1hbe3N9atW6fU7/Pnz9GtWzeYmZnByckJP/30E3x9fREUFCTVefPmDcaMGYMSJUrAzMwMtWvXxv79+6Xpt2/fRtu2bVGsWDGYmZmhYsWK2LVrl1rLR0REhRuTnyLmyZMnCA8Px+DBg2FmZqYy3draOsd2KSkp8PPzg7m5OQ4ePIh///0X5ubm8Pf3x5s3bwC8TR569uyJQ4cO4dixYyhbtixatWqF58+fK/U1efJkdO7cGefOnUOrVq3QrVs3PHnyRK3l+Omnn1C/fn2cPn0arVu3Rvfu3dGjRw98/fXXiI6OhoeHB3r06CHdC+r169eoXr06duzYgQsXLqBfv37o3r07jh8/LvU5cuRIHD58GNu3b0dkZCQOHTqkkrz16tULhw8fxvr163Hu3Dl06tQJ/v7+0rPiBg8ejNTUVBw8eBDnz5/HjBkzYG5urtayERFR4cbDXkXM9evXIYRA+fLl1Wq3fv166OnpYdmyZdJl1GFhYbC2tsb+/fvRvHlzNG7cWKnNb7/9hmLFiuHAgQNo06aNVB4YGIiAgAAAwLRp0/Drr7/ixIkT8Pf3z3X+j16k4tzdZ9L7Vq1aoX///gCASZMmYdGiRahZsyY6deoEABg7dizq1q2LBw8ewNHRESVKlMDo0aOl9kOHDkV4eDj+/PNP1K5dG8+fP8eqVauwdu1a6ZynsLAwODs7S21u3LiBdevW4e7du1L56NGjER4ejrCwMEybNg1xcXHo2LEjvL29AQClS5dWa5yJiKjwY/JTxCj2hKh7H5hTp07h+vXrKo8Fef36NW7cuAHg7R2FJ02ahL179+LBgwfIyMhASkoK4uLilNr4+PhIf5uZmcHCwkJ6HENeZe3DwcEBAKSEI2tZYmIiHB0dkZGRgenTp2PDhg24d+8eUlNTkZqaKu39unnzJtLS0lCrVi2pDysrK3h6ekrvo6OjIYRAuXLllGJJTU2V7vA9bNgwDBw4ELt370bTpk3RsWNHpViJiKjoY/JTxJQtWxYymQwxMTH44osv8twuMzMT1atXx5o1a1SmFS9eHMDbPToPHz7EvHnz4OrqCrlcjrp160qHxRQMDQ2V3stkMmRmZqq1HFn7UCRyOZUp+p0zZw5++uknzJs3D97e3jAzM0NQUJAUW25JYdZHqGRmZkJfXx+nTp1SeWSE4tBW37590aJFC+zcuRO7d+9GaGgo5syZg6FDh6q1fEREVHjxnJ8ixsbGBi1atMCCBQvw8uVLlem5PZ2+WrVquHbtGuzt7eHh4aH0srKyAgAcOnQIw4YNQ6tWrVCxYkXI5XI8evRIm4uTZ4cOHUK7du3w9ddfo3LlyihdurR0ng4AlClTBoaGhjhx4oRUlpycrFSnatWqyMjIQGJiosoYODo6SvVcXFwwYMAAbNmyBaNGjcLSpUs/zkISEdFHweSnCFq4cCEyMjJQq1YtbN68GdeuXUNMTAx++eUX1K1bN8c23bp1g52dHdq1a4dDhw4hNjYWBw4cwPDhw3H37l0AgIeHB37//XfExMTg+PHj6Natm/SQzYLm4eGByMhIHDlyBDExMejfvz8SEhKk6RYWFujZsye+/fZb7Nu3DxcvXkTv3r2hp6cn7Q0qV64cunXrhh49emDLli2IjY3FyZMnMWPGDOmKrqCgIERERCA2NhbR0dHYu3cvvLy8CmSZiYhIO5j8FEHu7u6Ijo6Gn58fRo0ahUqVKqFZs2bYs2cPFi1alGMbU1NTHDx4EKVKlUKHDh3g5eWF3r1749WrV7C0tAQArFixAk+fPkXVqlXRvXt3DBs2DPb29h9z0XI1ceJEVKtWDS1atICvry8cHR1VDvvNnTsXdevWRZs2bdC0aVPUr18fXl5eMDY2luqEhYWhR48eGDVqFDw9PfH555/j+PHjcHFxAQBkZGRg8ODB8PLygr+/Pzw9PbFw4cKPuahERKRlMpH1pIhPUHJyMqysrJCUlCT9yCu8fv0asbGxcHd3V/qBJM3LeqWXT0nrjzLPly9fokSJEpgzZw769OnzUeapKVw3iUjXvev3+0PxhGf6ZJw+fRqXL19GrVq1kJSUhClTpgAA2rVrV8CRERFRYcLkhz4ps2fPxpUrV2BkZITq1avj0KFDsLOzK+iwiIioEGHyQ5+MqlWr4tSpUwUdBhERFXI84ZmIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+dND+/fshk8lyfQhqYRESEoIqVaoUdBhERPSJ4X1+crMv9OPNy2+8WtUDAwOxatUqAICBgQFcXFzQoUMHTJ48GWZmZu9tX69ePcTHx0tPcy+qbt26BXd3d5w+fZpJEhER5RmTnyLK398fYWFhSEtLw6FDh9C3b1+8fPky1webZmVkZARHR8ePECUREVHhw8NeRZRcLoejoyNcXFzQtWtXdOvWDdu2bQMApKamSk9kNzY2RoMGDXDy5EmpbfbDXrdv30bbtm1RrFgxmJmZoWLFiti1a5dU/8CBA6hVqxbkcjmcnJwwbtw4pKenS9N9fX0xbNgwjBkzBjY2NnB0dERISIhSvM+TkzBlbBB8q5SFpaUlGjdujLNnzyrVmT59OhwcHGBhYYE+ffrg9evXao2JYrkiIiJQtWpVmJiYoHHjxkhMTMT//vc/eHl5wdLSEgEBAUhJSZHahYeHo0GDBrC2toatrS3atGmDGzduKPV95MgRVKlSBcbGxqhRowa2bdsGmUyGM2fOSHUuXbqEVq1awdzcHA4ODujevTsePXokTd+0aRO8vb1hYmICW1tbNG3aFC9fvlRrGYmI6MMx+flEmJiYIC0tDQAwZswYbN68GatWrUJ0dDQ8PDzQokULPHnyJMe2gwcPRmpqKg4ePIjz589jxowZMDc3BwDcu3cPrVq1Qs2aNXH27FksWrQIy5cvx9SpU5X6WLVqFczMzHD8+HHMnDkTU6ZMQWRkJABACIEhgV3wKPEBFqzaiFOnTqFatWpo0qSJFNPGjRsRHByMH3/8EVFRUXBycsLChQvzNRYhISGYP38+jhw5gjt37qBz586YN28e1q5di507dyIyMhK//vqrVP/ly5cYOXIkTp48iT179kBPTw/t27dHZmYmAOD58+do27YtvL29ER0djR9++AFjx45Vmmd8fDwaNWqEKlWqICoqCuHh4Xjw4AE6d+4sTQ8ICEDv3r0RExOD/fv3o0OHDhBC5GsZiYgo/3jY6xNw4sQJrF27Fk2aNJEOfa1cuRItW7YEACxduhSRkZFYvnw5vv32W5X2cXFx6NixI7y9vQEApUuXlqYtXLgQLi4umD9/PmQyGcqXL4/79+9j7NixmDRpEvT03ubPPj4+CA4OBgCULVsW8+fPx549e9CsWTPs27cP1y9fwr7T12Akl6NsSWvMnj0b27Ztw6ZNm9CvXz/MmzcPvXv3Rt++fQEAU6dOxT///KP23h9F2/r16wMA+vTpg/Hjx+PGjRvScn355ZfYt2+flMB07NhRqf3y5cthb2+PS5cuoVKlSlizZg1kMhmWLl0KY2NjVKhQAffu3cM333wjtVm0aBGqVauGadOmSWUrVqyAi4sLrl69ihcvXiA9PR0dOnSAq6srAEjjTUREHxf3/BRRO3bsgLm5OYyNjVG3bl00bNgQv/76K27cuIG0tDTpxx8ADA0NUatWLcTExOTY17Bhw6SEITg4GOfOnZOmxcTEoG7dupDJZFJZ/fr18eLFC9y9e1cq8/HxUerTyckJiYmJAIBTp04h5eVLNPQpgzqeJWFubg5zc3PExsZKh5cU88kq+/u8yhqLg4MDTE1NlRI6BwcHKTYAuHHjBrp27YrSpUvD0tIS7u7uAN4mhQBw5coV+Pj4wNjYWGpTq1YtpXmeOnUK+/btk5bN3Nwc5cuXl/qvXLkymjRpAm9vb3Tq1AlLly7F06dP87V8RET0Ybjnp4jy8/PDokWLYGhoCGdnZxgaGgJ4e3gFgFKyArw99JS9TKFv375o0aIFdu7cid27dyM0NBRz5szB0KFDc2ynOFSTtVwxfwWZTCYdNsrMzISdvSOWb/wbAFDeyVKqZ21tre6iv1fWWGQy2TtjA4C2bdvCxcUFS5cuhbOzMzIzM1GpUiW8efMGQM5jl/1wVWZmJtq2bYsZM2aoxOPk5AR9fX1ERkbiyJEj2L17N3799VdMmDABx48fl5ItIiL6OLjnp4gyMzODh4cHXF1dlX7cPTw8YGRkhH///VcqS0tLQ1RUFLy8vHLtz8XFBQMGDMCWLVswatQoLF26FABQoUIFHDlyROnH/siRI7CwsECJEiXyFGu1atXw+OED6BsYoJR7aXh4eEgvOzs7AICXlxeOHTum1C77e214/PgxYmJi8P3336NJkybw8vJS2SNTvnx5nDt3DqmpqVJZVFSUUp1q1arh4sWLcHNzU1o+Dw8P6fYDMpkM9evXx+TJk3H69GkYGRlh69atWl9GIiJSxuTnE2NmZoaBAwfi22+/RXh4OC5duoRvvvkGKSkp6NOnT45tgoKCEBERgdjYWERHR2Pv3r1SojRo0CDcuXMHQ4cOxeXLl/HXX38hODgYI0eOlM73eZ+mTZvCp1pNjOjbDYf378GtW7dw5MgRfP/991ISMXz4cKxYsQIrVqzA1atXERwcjIsXL2pmUN6hWLFisLW1xZIlS3D9+nXs3bsXI0eOVKrTtWtXZGZmol+/foiJiUFERARmz54N4L+9X4MHD8aTJ08QEBCAEydO4ObNm9i9ezd69+6NjIwMHD9+HNOmTUNUVBTi4uKwZcsWPHz48J0JKRERaQcPe32Cpk+fjszMTHTv3h3Pnz9HjRo1EBERgWLFiuVYPyMjA4MHD8bdu3dhaWkJf39//PTTTwCAEiVKYNeuXfj2229RuXJl2NjYoE+fPvj+++/zHI9MJsOC1Rvx68ypCB49FMOfPIKjoyMaNmwIBwcHAECXLl1w48YNjB07Fq9fv0bHjh0xcOBAREREfPiAvIOenh7Wr1+PYcOGoVKlSvD09MQvv/wCX19fqY6lpSX+/vtvDBw4EFWqVIG3tzcmTZqErl27SucBOTs74/Dhwxg7dixatGiB1NRUuLq6wt/fH3p6erC0tMTBgwcxb948JCcnw9XVFXPmzJFOSicioo9HJj7xa22Tk5NhZWWFpKQkWFpaKk17/fo1YmNj4e7urnQyK2neubvPpL99SloXWByasmbNGvTq1QtJSUkwMTHReP9cN4lI173r9/tDcc8PUR6sXr0apUuXRokSJXD27FmMHTsWnTt31kriQ0RE2sXkhygPEhISMGnSJCQkJMDJyQmdOnXCjz/+WNBhERFRPjD5IcqDMWPGYMyYMQUdBhERaQCv9iIiIiKdwuQHqjesIypoXCeJiLRHp5Mfxc0Bsz7hm6gwUKyT2e9OTUREH06nz/nR19eHtbW19JwnU1PTXB8BQR9GpL+R/s7Pw0p1hRACKSkpSExMhLW1NfT19Qs6JCKiT45OJz8A4OjoCABKD7okzUt8+kr62+gVLw9/H2tra2ndJCIizdL55Ecmk8HJyQn29vZIS0sr6HA+WX237Jf+3jPKt8DiKAoMDQ25x4eISIt0PvlR0NfX5w+OFt17niH9zTsWExFRQdLpE56JiIhI9zD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ikFmvyEhoaiZs2asLCwgL29Pb744gtcuXJFqY4QAiEhIXB2doaJiQl8fX1x8eLFAoqYiIiIiroCTX4OHDiAwYMH49ixY4iMjER6ejqaN2+Oly9fSnVmzpyJuXPnYv78+Th58iQcHR3RrFkzPH/+vAAjJyIioqLKoCBnHh4ervQ+LCwM9vb2OHXqFBo2bAghBObNm4cJEyagQ4cOAIBVq1bBwcEBa9euRf/+/QsibCIiIirCCtU5P0lJSQAAGxsbAEBsbCwSEhLQvHlzqY5cLkejRo1w5MiRHPtITU1FcnKy0ouIiIhIodAkP0IIjBw5Eg0aNEClSpUAAAkJCQAABwcHpboODg7StOxCQ0NhZWUlvVxcXLQbOBERERUphSb5GTJkCM6dO4d169apTJPJZErvhRAqZQrjx49HUlKS9Lpz545W4iUiIqKiqUDP+VEYOnQotm/fjoMHD6JkyZJSuaOjI4C3e4CcnJyk8sTERJW9QQpyuRxyuVy7ARMREVGRVaB7foQQGDJkCLZs2YK9e/fC3d1dabq7uzscHR0RGRkplb158wYHDhxAvXr1Pna4RERE9Ako0D0/gwcPxtq1a/HXX3/BwsJCOo/HysoKJiYmkMlkCAoKwrRp01C2bFmULVsW06ZNg6mpKbp27VqQoRMREVERVaDJz6JFiwAAvr6+SuVhYWEIDAwEAIwZMwavXr3CoEGD8PTpU9SuXRu7d++GhYXFR46WiIiIPgUFmvwIId5bRyaTISQkBCEhIdoPiIiIiD55heZqLyIiIqKPgckPERER6RQmP0RERKRTmPwQERGRTmHyQ0RERDqFyQ8RERHpFCY/REREpFOY/BAREZFOYfJDH53buJ1wG7ezoMMgIiIdxeSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilqJz+rVq3Czp07pfdjxoyBtbU16tWrh9u3b2s0OCIiIiJNUzv5mTZtGkxMTAAAR48exfz58zFz5kzY2dlhxIgRGg+QiIiISJMM1G1w584deHh4AAC2bduGL7/8Ev369UP9+vXh6+ur6fiIiIiINErtPT/m5uZ4/PgxAGD37t1o2rQpAMDY2BivXr3SbHREREREGqb2np9mzZqhb9++qFq1Kq5evYrWrVsDAC5evAg3NzdNx0dERESkUWrv+VmwYAHq1q2Lhw8fYvPmzbC1tQUAnDp1CgEBARoPkIiIiEiT1N7zY21tjfnz56uUT548WSMBEREREWlTvu7zc+jQIXz99deoV68e7t27BwD4/fff8e+//2o0OCIiIiJNUzv52bx5M1q0aAETExNER0cjNTUVAPD8+XNMmzZN4wESERERaZLayc/UqVOxePFiLF26FIaGhlJ5vXr1EB0drdHgiIiIiDRN7eTnypUraNiwoUq5paUlnj17pomYiIiIiLRG7eTHyckJ169fVyn/999/Ubp0aY0ERURERKQtaic//fv3x/Dhw3H8+HHIZDLcv38fa9aswejRozFo0CBtxEhERESkMWpf6j5mzBgkJSXBz88Pr1+/RsOGDSGXyzF69GgMGTJEGzFSEeI27u1Db29Nb13AkRAREeVM7eQHAH788UdMmDABly5dQmZmJipUqABzc3NNx0ZERESkcflKfgDA1NQUNWrU0GQsRERERFqndvLTvn17yGQylXKZTAZjY2N4eHiga9eu8PT01EiARERERJqk9gnPVlZW2Lt3L6Kjo6Uk6PTp09i7dy/S09OxYcMGVK5cGYcPH9Z4sEREREQfSu09P46OjujatSvmz58PPb23uVNmZiaGDx8OCwsLrF+/HgMGDMDYsWP5uAsiIiIqdNTe87N8+XIEBQVJiQ8A6OnpYejQoViyZAlkMhmGDBmCCxcuaDRQIiIiIk1QO/lJT0/H5cuXVcovX76MjIwMAICxsXGO5wURERERFTS1D3t1794dffr0wXfffYeaNWtCJpPhxIkTmDZtGnr06AEAOHDgACpWrKjxYImIiIg+lNrJz08//QQHBwfMnDkTDx48AAA4ODhgxIgRGDt2LACgefPm8Pf312ykRERERBqg9mEvfX19TJgwAfHx8Xj27BmePXuG+Ph4fPfdd9DX1wcAlCpVCiVLlnxvXwcPHkTbtm3h7OwMmUyGbdu2KU0PDAyETCZTetWpU0fdkImIiIgkaic/WVlaWsLS0jLf7V++fInKlStj/vz5udbx9/dHfHy89Nq1a1e+50dERESUrzs8b9q0CRs3bkRcXBzevHmjNC06OjrP/bRs2RItW7Z8Zx25XA5HR8f8hElERESkQu09P7/88gt69eoFe3t7nD59GrVq1YKtrS1u3rz53kQmP/bv3w97e3uUK1cO33zzDRITEzU+DyIiItIdaic/CxcuxJIlSzB//nwYGRlhzJgxiIyMxLBhw5CUlKTR4Fq2bIk1a9Zg7969mDNnDk6ePInGjRsjNTU11zapqalITk5WehEREREpqJ38xMXFoV69egAAExMTPH/+HMDbS+DXrVun0eC6dOmC1q1bo1KlSmjbti3+97//4erVq9i5c2eubUJDQ2FlZSW9XFxcNBoTERERFW1qJz+Ojo54/PgxAMDV1RXHjh0DAMTGxkIIodnosnFycoKrqyuuXbuWa53x48cjKSlJet25c0erMREREVHRovYJz40bN8bff/+NatWqoU+fPhgxYgQ2bdqEqKgodOjQQRsxSh4/fow7d+7Ayckp1zpyuRxyuVyrcRAREVHRpXbys2TJEmRmZgIABgwYABsbG/z7779o27YtBgwYoFZfL168wPXr16X3sbGxOHPmDGxsbGBjY4OQkBB07NgRTk5OuHXrFr777jvY2dmhffv26oZNREREBCAfyY+enp7SQ007d+6Mzp0752vmUVFR8PPzk96PHDkSANCzZ08sWrQI58+fx+rVq/Hs2TM4OTnBz88PGzZsgIWFRb7mR0RERJSv+/y8fv0a586dQ2JiorQXSOHzzz/Pcz++vr7vPE8oIiIiP+ERERER5Urt5Cc8PBw9evTAo0ePVKbJZDLpye5EREREhZHaV3sNGTIEnTp1Qnx8PDIzM5VeTHyIiIiosFM7+UlMTMTIkSPh4OCgjXiIiIiItErt5OfLL7/E/v37tRAKERERkfapfc7P/Pnz0alTJxw6dAje3t4wNDRUmj5s2DCNBUdERESkaWonP2vXrkVERARMTEywf/9+yGQyaZpMJmPyQ0RERIWa2snP999/jylTpmDcuHFK9/shIiIiKgrUzl7evHmDLl26MPEhIiKiIkntDKZnz57YsGGDNmIhIiIi0jq1D3tlZGRg5syZiIiIgI+Pj8oJz3PnztVYcERERESapnbyc/78eVStWhUAcOHCBaVpWU9+JiIiIiqM1E5+9u3bp404iIiIiD4KnrVMREREOiXPe346dOiQp3pbtmzJdzBERERE2pbn5MfKykqbcRARERF9FHlOfsLCwrQZBxEREdFHwXN+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpeUp+qlWrhqdPnwIApkyZgpSUFK0GRURERKQteUp+YmJi8PLlSwDA5MmT8eLFC60GRURERKQtebrUvUqVKujVqxcaNGgAIQRmz54Nc3PzHOtOmjRJowESERERaZJMCCHeV+nKlSsIDg7GjRs3EB0djQoVKsDAQDVvkslkiI6O1kqg+ZWcnAwrKyskJSXB0tKyoMP55LmN26n0/tb01jmW51SHiIhIQZu/33na8+Pp6Yn169cDAPT09LBnzx7Y29trNBAiIiKij0Htp7pnZmZqIw4iIiKij0Lt5AcAbty4gXnz5iEmJgYymQxeXl4YPnw4ypQpo+n4iIiIiDRK7fv8REREoEKFCjhx4gR8fHxQqVIlHD9+HBUrVkRkZKQ2YiQiIiLSGLX3/IwbNw4jRozA9OnTVcrHjh2LZs2aaSw4IiIiIk1Te89PTEwM+vTpo1Leu3dvXLp0SSNBEREREWmL2slP8eLFcebMGZXyM2fO8AowIiIiKvTUPuz1zTffoF+/frh58ybq1asHmUyGf//9FzNmzMCoUaO0ESMRERGRxqid/EycOBEWFhaYM2cOxo8fDwBwdnZGSEgIhg0bpvEAiYiIiDRJ7eRHJpNhxIgRGDFiBJ4/fw4AsLCw0HhgRERERNqQr/v8KDDpISIioqJG7ROeiYiIiIoyJj9ERESkU5j8EBERkU5RK/lJS0uDn58frl69qq14SIe5jdspvYiIiLRFreTH0NAQFy5cgEwm01Y8RERERFql9mGvHj16YPny5dqIhYiIiEjr1L7U/c2bN1i2bBkiIyNRo0YNmJmZKU2fO3euxoIjIiIi0jS1k58LFy6gWrVqAKBy7g8PhxEREVFhp3bys2/fPm3EQURERPRR5PtS9+vXryMiIgKvXr0CAAghNBYUERERkbaonfw8fvwYTZo0Qbly5dCqVSvEx8cDAPr27cunuhMREVGhp3byM2LECBgaGiIuLg6mpqZSeZcuXRAeHq7R4IiIiIg0Te1zfnbv3o2IiAiULFlSqbxs2bK4ffu2xgIjIiIi0ga19/y8fPlSaY+PwqNHjyCXyzUSFBEREZG2qJ38NGzYEKtXr5bey2QyZGZmYtasWfDz89NocERERESapvZhr1mzZsHX1xdRUVF48+YNxowZg4sXL+LJkyc4fPiwNmIkIiIi0hi19/xUqFAB586dQ61atdCsWTO8fPkSHTp0wOnTp1GmTBltxEhERESkMWrv+QEAR0dHTJ48WdOxkI4qLE9xV8Rxa3rrAo6EiIi0KV/Jz9OnT7F8+XLExMRAJpPBy8sLvXr1go2NjabjIyIiItIotQ97HThwAO7u7vjll1/w9OlTPHnyBL/88gvc3d1x4MABbcRIREREpDFq7/kZPHgwOnfujEWLFkFfXx8AkJGRgUGDBmHw4MG4cOGCxoMkIiIi0hS19/zcuHEDo0aNkhIfANDX18fIkSNx48YNjQZHREREpGlqJz/VqlVDTEyMSnlMTAyqVKmiiZiIiIiItCZPh73OnTsn/T1s2DAMHz4c169fR506dQAAx44dw4IFCzB9+nTtRElERESkIXlKfqpUqQKZTAYhhFQ2ZswYlXpdu3ZFly5dNBcdERERkYblKfmJjY3VdhxEREREH0Wekh9XV1dtx0FERET0UeTrJof37t3D4cOHkZiYiMzMTKVpw4YN00hgRERERNqgdvITFhaGAQMGwMjICLa2tpDJZNI0mUymVvJz8OBBzJo1C6dOnUJ8fDy2bt2KL774QpouhMDkyZOxZMkSPH36FLVr18aCBQtQsWJFdcMmIiIiApCPS90nTZqESZMmISkpCbdu3UJsbKz0unnzplp9vXz5EpUrV8b8+fNznD5z5kzMnTsX8+fPx8mTJ+Ho6IhmzZrh+fPn6oZNREREBCAfe35SUlLw1VdfQU9P7bxJRcuWLdGyZcscpwkhMG/ePEyYMAEdOnQAAKxatQoODg5Yu3Yt+vfv/8HzJyIiIt2jdgbTp08f/Pnnn9qIRUlsbCwSEhLQvHlzqUwul6NRo0Y4cuRIru1SU1ORnJys9CIiIiJSUHvPT2hoKNq0aYPw8HB4e3vD0NBQafrcuXM1ElhCQgIAwMHBQancwcEBt2/ffmd8kydP1kgMRERE9OlRO/mZNm0aIiIi4OnpCQAqJzxrWvY+hRDvnM/48eMxcuRI6X1ycjJcXFw0HhcREREVTWonP3PnzsWKFSsQGBiohXD+4+joCODtHiAnJyepPDExUWVvUFZyuRxyuVyrsREREVHRpfY5P3K5HPXr19dGLErc3d3h6OiIyMhIqezNmzc4cOAA6tWrp/X5ExER0adJ7eRn+PDh+PXXXzUy8xcvXuDMmTM4c+YMgLcnOZ85cwZxcXGQyWQICgrCtGnTsHXrVly4cAGBgYEwNTVF165dNTJ/IiIi0j1qH/Y6ceIE9u7dix07dqBixYoqJzxv2bIlz31FRUXBz89Peq84V6dnz55YuXIlxowZg1evXmHQoEHSTQ53794NCwsLdcMmIiIiApCP5Mfa2lq6786H8vX1VXpSfHYymQwhISEICQnRyPyIiIiI8vV4CyIiIqKi6sNv00xERERUhKi958fd3f2d99lR9/leRERERB+T2slPUFCQ0vu0tDScPn0a4eHh+PbbbzUVFxEREZFWqJ38DB8+PMfyBQsWICoq6oMDIiIiItImjZ3z07JlS2zevFlT3RERERFphcaSn02bNsHGxkZT3RERERFphdqHvapWrap0wrMQAgkJCXj48CEWLlyo0eCIiIiINE3t5OeLL75Qeq+np4fixYvD19cX5cuX11RcRFrnNm4nAODW9NYFHAkREX1Maic/wcHB2oiDiIiI6KPgTQ6JiIhIp+R5z4+ent47b24IvH0WV3p6+gcHRURERKQteU5+tm7dmuu0I0eO4Ndff33nQ0qJiIiICoM8Jz/t2rVTKbt8+TLGjx+Pv//+G926dcMPP/yg0eCIiIiINC1f5/zcv38f33zzDXx8fJCeno4zZ85g1apVKFWqlKbjIyIiItIotZKfpKQkjB07Fh4eHrh48SL27NmDv//+G5UqVdJWfEREREQalefDXjNnzsSMGTPg6OiIdevW5XgYjIiIiKiwy3PyM27cOJiYmMDDwwOrVq3CqlWrcqy3ZcsWjQVHREREpGl5Tn569Ojx3kvdiYiIiAq7PCc/K1eu1GIYRERERB8H7/BMREREOoXJDxEREekUtR9sSrrlQ598rmivqXZ8AjsREX0o7vkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHSKQUEHQKQOt3E7VcpuTW9dAJG8w77Q//72G19wcRARUY6454eIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ikGBR0AFV1u43ZqfR5BBpsAAPPSv8xX+3fFqOjbbVzubW5Nb/32j32hb//1G//eedxqkY9AiYjoo+GeHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKYU6+QkJCYFMJlN6OTo6FnRYREREVIQV+sdbVKxYEf/884/0Xl9fvwCjISIioqKu0Cc/BgYG3NtDREREGlOoD3sBwLVr1+Ds7Ax3d3d89dVXuHnz5jvrp6amIjk5WelFREREpFCo9/zUrl0bq1evRrly5fDgwQNMnToV9erVw8WLF2Fra5tjm9DQUEyePPkjR0qFjuIp7PD5oG4UT2znk9qJiD4dhXrPT8uWLdGxY0d4e3ujadOm2Lnz7Q/RqlWrcm0zfvx4JCUlSa87d+58rHCJiIioCCjUe36yMzMzg7e3N65du5ZrHblcDrlc/hGjIiIioqKkUO/5yS41NRUxMTFwcnIq6FCIiIioiCrUyc/o0aNx4MABxMbG4vjx4/jyyy+RnJyMnj17FnRoREREVEQV6sNed+/eRUBAAB49eoTixYujTp06OHbsGFxdXQs6NCIiIiqiCnXys379+oIOgYiIiD4xhfqwFxEREZGmMfkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIphfomh/Th3MbtVCm7Nb31e+tnr5NTP4XOvlDpz3l7rv7/Xz7vbRZksOltm/Qvc50GlAOQ+/jkVDcn72qfL1mWWeI3XjN952W+H2NeREQaxj0/REREpFOY/BAREZFOYfJDREREOoXJDxEREekUJj9ERESkU5j8EBERkU5h8kNEREQ6hckPERER6RQmP0RERKRTmPwQERGRTmHyQ0RERDqFyQ8RERHpFCY/REREpFOY/BAREZFOMSjoACjv3MbtBADcmt5ao/0o3udUJy+CDDZJf89L/1KpTPFeU7LOS5rn96pl2eurE0eO89hz9f//8slzP0Dun1nW8ZWm7Qt9+6/feLXm8SFxEBHpIu75ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3Cp7oXAG08YTs/farz5Pac5PT08w/pJ+uT1zXVd27zUndadvO+75XlnfIT4/97AjwQZPD2b7dxyvNQesq84mnueZGHutK60OJcllI1nkafj6fLKz2lXjHfHNpnX09zWm8L5An0+VhmoIBiJcrn+kr/4Z4fIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinGBR0AEWd27idAIBb01vnue6H9pOTIINN/99P3uvOS/9S6X1WimnZ2+Q0TV05zU8bbTTZ/l395KXv7HWUxnNPtsp7eqm0V4z5rRaKNlff9tOkXJZ+rv7/Xz65B7Iv9O2/fuOV3+eRtJ62UC5XXr5yyhOzzCPI4G2M877f9P/vFXXOZWngk3OsOfX5/9Py9P15V3//L+t3VNHXf8t87r3t3+n/5+8W4ZP3WN9RP6dY87KMKvPI7/JoWtZ1MVtM877/7zsRNDVM/T4LyzLmxTtizu37V1A+9HerIHHPDxEREekUJj9ERESkU5j8EBERkU5h8kNEREQ6hckPERER6RQmP0RERKRTmPwQERGRTmHyQ0RERDqFyQ8RERHplCKR/CxcuBDu7u4wNjZG9erVcejQoYIOiYiIiIqoQp/8bNiwAUFBQZgwYQJOnz6Nzz77DC1btkRcXFxBh0ZERERFUKFPfubOnYs+ffqgb9++8PLywrx58+Di4oJFixYVdGhERERUBBXq5OfNmzc4deoUmjdvrlTevHlzHDlypICiIiIioqKsUD/V/dGjR8jIyICDg4NSuYODAxISEnJsk5qaitTUVOl9UlISACA5OVkrMWampuS5f0Vdhaxt8tLPu+q8Tn3ztk56iso0lboZynUV75Xmla2frHXy0i77PHLrK7d55lZHVynGJvnlawD/fd6K91nLstcFsqw7ijLFOpSljooc1rPc+lHM+33zyFpPaVZ5iTWrbNPy9D3MQ39Zv6PJ2fvOob063//s88hTrO+on1Os71zG3OahpW2j2rKui9liUlq/1Im3sC1jXrwj5jx9Nz4itdb/fFD0K4TQfOeiELt3754AII4cOaJUPnXqVOHp6Zljm+DgYAGAL7744osvvvj6BF537tzReH5RqPf82NnZQV9fX2UvT2JiosreIIXx48dj5MiR0vvMzEw8efIEtra2kMlkWo1Xm5KTk+Hi4oI7d+7A0tKyoMP5JHBMtYPjqnkcU83jmGqHJsdVCIHnz5/D2dlZQ9H9p1AnP0ZGRqhevToiIyPRvn17qTwyMhLt2rXLsY1cLodcLlcqs7a21maYH5WlpSW/qBrGMdUOjqvmcUw1j2OqHZoaVysrKw1Eo6pQJz8AMHLkSHTv3h01atRA3bp1sWTJEsTFxWHAgAEFHRoREREVQYU++enSpQseP36MKVOmID4+HpUqVcKuXbvg6upa0KERERFREVTokx8AGDRoEAYNGlTQYRQouVyO4OBglUN6lH8cU+3guGoex1TzOKbaUVTGVSaENq4hIyIiIiqcCvVNDomIiIg0jckPERER6RQmP0RERKRTmPwQERGRTmHyU4Bu3bqFPn36wN3dHSYmJihTpgyCg4Px5o3y84/i4uLQtm1bmJmZwc7ODsOGDVOpc/78eTRq1AgmJiYoUaIEpkyZovI8lAMHDqB69eowNjZG6dKlsXjxYq0vY2G2cOFCuLu7w9jYGNWrV8ehQ4cKOqRCITQ0FDVr1oSFhQXs7e3xxRdf4MqVK0p1hBAICQmBs7MzTExM4Ovri4sXLyrVSU1NxdChQ2FnZwczMzN8/vnnuHv3rlKdp0+fonv37rCysoKVlRW6d++OZ8+eaXsRC1xoaChkMhmCgoKkMo5p/ty7dw9ff/01bG1tYWpqiipVquDUqVPSdI6retLT0/H9999Lv0ulS5fGlClTkJmZKdX5JMZU4w/MoDz73//+JwIDA0VERIS4ceOG+Ouvv4S9vb0YNWqUVCc9PV1UqlRJ+Pn5iejoaBEZGSmcnZ3FkCFDpDpJSUnCwcFBfPXVV+L8+fNi8+bNwsLCQsyePVuqc/PmTWFqaiqGDx8uLl26JJYuXSoMDQ3Fpk2bPuoyFxbr168XhoaGYunSpeLSpUti+PDhwszMTNy+fbugQytwLVq0EGFhYeLChQvizJkzonXr1qJUqVLixYsXUp3p06cLCwsLsXnzZnH+/HnRpUsX4eTkJJKTk6U6AwYMECVKlBCRkZEiOjpa+Pn5icqVK4v09HSpjr+/v6hUqZI4cuSIOHLkiKhUqZJo06bNR13ej+3EiRPCzc1N+Pj4iOHDh0vlHFP1PXnyRLi6uorAwEBx/PhxERsbK/755x9x/fp1qQ7HVT1Tp04Vtra2YseOHSI2Nlb8+eefwtzcXMybN0+q8ymMKZOfQmbmzJnC3d1der9r1y6hp6cn7t27J5WtW7dOyOVykZSUJIQQYuHChcLKykq8fv1aqhMaGiqcnZ1FZmamEEKIMWPGiPLlyyvNq3///qJOnTraXJxCq1atWmLAgAFKZeXLlxfjxo0roIgKr8TERAFAHDhwQAghRGZmpnB0dBTTp0+X6rx+/VpYWVmJxYsXCyGEePbsmTA0NBTr16+X6ty7d0/o6emJ8PBwIYQQly5dEgDEsWPHpDpHjx4VAMTly5c/xqJ9dM+fPxdly5YVkZGRolGjRlLywzHNn7Fjx4oGDRrkOp3jqr7WrVuL3r17K5V16NBBfP3110KIT2dMedirkElKSoKNjY30/ujRo6hUqZLSg91atGiB1NRUadfu0aNH0ahRI6WbSrVo0QL379/HrVu3pDrNmzdXmleLFi0QFRWFtLQ0LS5R4fPmzRucOnVKZTyaN2+OI0eOFFBUhVdSUhIASOtlbGwsEhISlMZPLpejUaNG0vidOnUKaWlpSnWcnZ1RqVIlqc7Ro0dhZWWF2rVrS3Xq1KkDKyurT/ZzGDx4MFq3bo2mTZsqlXNM82f79u2oUaMGOnXqBHt7e1StWhVLly6VpnNc1degQQPs2bMHV69eBQCcPXsW//77L1q1agXg0xlTJj+FyI0bN/Drr78qPbcsISFB5Qn2xYoVg5GRkfS0+5zqKN6/r056ejoePXqk8WUpzB49eoSMjIwcx0MxXvSWEAIjR45EgwYNUKlSJQD/rVPvGr+EhAQYGRmhWLFi76xjb2+vMk97e/tP8nNYv349oqOjERoaqjKNY5o/N2/exKJFi1C2bFlERERgwIABGDZsGFavXg2A45ofY8eORUBAAMqXLw9DQ0NUrVoVQUFBCAgIAPDpjCmTHy0ICQmBTCZ75ysqKkqpzf379+Hv749OnTqhb9++StNkMpnKPIQQSuXZ64j/P9lZ3Tq6JKfx0NWxyM2QIUNw7tw5rFu3TmVafsbvfettXvspau7cuYPhw4fjjz/+gLGxca71OKbqyczMRLVq1TBt2jRUrVoV/fv3xzfffINFixYp1eO45t2GDRvwxx9/YO3atYiOjsaqVaswe/ZsrFq1SqleUR9TJj9aMGTIEMTExLzzpfhfNPA28fHz85OeWp+Vo6OjShb89OlTpKWlSZl3TnUSExMB4L11DAwMYGtrq5kFLyLs7Oygr6+f43hk/9+MLhs6dCi2b9+Offv2oWTJklK5o6MjALxz/BwdHfHmzRs8ffr0nXUePHigMt+HDx9+cp/DqVOnkJiYiOrVq8PAwAAGBgY4cOAAfvnlFxgYGKjsqVXgmL6bk5MTKlSooFTm5eWFuLg4AFxX8+Pbb7/FuHHj8NVXX8Hb2xvdu3fHiBEjpD2Wn8qYMvnRAjs7O5QvX/6dL8X//u7duwdfX19Uq1YNYWFh0NNT/kjq1q2LCxcuID4+XirbvXs35HI5qlevLtU5ePCg0uXvu3fvhrOzM9zc3KQ6kZGRSn3v3r0bNWrUgKGhoTaGodAyMjJC9erVVcYjMjIS9erVK6CoCg8hBIYMGYItW7Zg7969cHd3V5ru7u4OR0dHpfF78+YNDhw4II1f9erVYWhoqFQnPj4eFy5ckOrUrVsXSUlJOHHihFTn+PHjSEpK+uQ+hyZNmuD8+fM4c+aM9KpRowa6deuGM2fOoHTp0hzTfKhfv77KbRiuXr0KV1dXAFxX8yMlJUXld0hfX1+61P2TGVOtn1JNubp3757w8PAQjRs3Fnfv3hXx8fHSS0FxqXuTJk1EdHS0+Oeff0TJkiWVLnV/9uyZcHBwEAEBAeL8+fNiy5YtwtLSMsdL3UeMGCEuXbokli9fzkvdDQ3F8uXLxaVLl0RQUJAwMzMTt27dKujQCtzAgQOFlZWV2L9/v9I6mZKSItWZPn26sLKyElu2bBHnz58XAQEBOV7qWrJkSfHPP/+I6Oho0bhx4xwvdfXx8RFHjx4VR48eFd7e3p/k5cM5yXq1lxAc0/w4ceKEMDAwED/++KO4du2aWLNmjTA1NRV//PGHVIfjqp6ePXuKEiVKSJe6b9myRdjZ2YkxY8ZIdT6FMWXyU4DCwsIEgBxfWd2+fVu0bt1amJiYCBsbGzFkyBCly9qFEOLcuXPis88+E3K5XDg6OoqQkBDpMneF/fv3i6pVqwojIyPh5uYmFi1apPVlLMwWLFggXF1dhZGRkahWrZp0Kbeuy22dDAsLk+pkZmaK4OBg4ejoKORyuWjYsKE4f/68Uj+vXr0SQ4YMETY2NsLExES0adNGxMXFKdV5/Pix6Natm7CwsBAWFhaiW7du4unTpx9hKQte9uSHY5o/f//9t6hUqZKQy+WifPnyYsmSJUrTOa7qSU5OFsOHDxelSpUSxsbGonTp0mLChAkiNTVVqvMpjKlMiGy3ASYiIiL6hPGcHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iKtMDAQHzxxRfSe19fXwQFBRVYPERU+DH5ISKNSkhIwPDhw+Hh4QFjY2M4ODigQYMGWLx4MVJSUrQ+/y1btuCHH37QaJ/ZEywiKtoMCjoAIvp03Lx5E/Xr14e1tTWmTZsGb29vpKen4+rVq1ixYgWcnZ3x+eefq7RLS0vT2AN2bWxsNNIPEX26uOeHiDRm0KBBMDAwQFRUFDp37gwvLy94e3ujY8eO2LlzJ9q2bQsAkMlkWLx4Mdq1awczMzNMnToVGRkZ6NOnD9zd3WFiYgJPT0/8/PPPSv1nZGRg5MiRsLa2hq2tLcaMGYPsT+jJftjrzZs3GDNmDEqUKAEzMzPUrl0b+/fvl6avXLkS1tbWiIiIgJeXF8zNzeHv74/4+HgAQEhICFatWoW//voLMpkMMplMqT0RFT1MfohIIx4/fozdu3dj8ODBMDMzy7GOTCaT/g4ODka7du1w/vx59O7dG5mZmShZsiQ2btyIS5cuYdKkSfjuu++wceNGqc2cOXOwYsUKLF++HP/++y+ePHmCrVu3vjOuXr164fDhw1i/fj3OnTuHTp06wd/fH9euXZPqpKSkYPbs2fj9999x8OBBxMXFYfTo0QCA0aNHo3PnzlJCFB8fj3r16n3IUBFRAeNhLyLSiOvXr0MIAU9PT6VyOzs7vH79GgAwePBgzJgxAwDQtWtX9O7dW6nu5MmTpb/d3d1x5MgRbNy4EZ07dwYAzJs3D+PHj0fHjh0BAIsXL0ZERESuMd24cQPr1q3D3bt34ezsDOBtMhMeHo6wsDBMmzYNwNvDbosXL0aZMmUAAEOGDMGUKVMAAObm5jAxMUFqaiocHR3zNzhEVKgw+SEijcq6dwcATpw4gczMTHTr1g2pqalSeY0aNVTaLl68GMuWLcPt27fx6tUrvHnzBlWqVAEAJCUlIT4+HnXr1pXqGxgYoEaNGiqHvhSio6MhhEC5cuWUylNTU2Frayu9NzU1lRIfAHByckJiYmLeF5qIihQmP0SkER4eHpDJZLh8+bJSeenSpQEAJiYmSuXZD41t3LgRI0aMwJw5c1C3bl1YWFhg1qxZOH78eL5jyszMhL6+Pk6dOgV9fX2laebm5tLf2U+2lslkuSZURFT08ZwfItIIW1tbNGvWDPPnz8fLly/Vbn/o0CHUq1cPgwYNQtWqVeHh4YEbN25I062srODk5IRjx45JZenp6Th16lSufVatWhUZGRlITEyEh4eH0kudQ1hGRkbIyMhQe5mIqHBi8kNEGrNw4UKkp6ejRo0a2LBhA2JiYnDlyhX88ccfuHz5ssrel6w8PDwQFRWFiIgIXL16FRMnTsTJkyeV6gwfPhzTp0/H1q1bcfnyZQwaNAjPnj3Ltc9y5cqhW7du6NGjB7Zs2YLY2FicPHkSM2bMwK5du/K8XG5ubjh37hyuXLmCR48eIS0tLc9tiajwYfJDRBpTpkwZnD59Gk2bNsX48eNRuXJl1KhRA7/++itGjx79zpsPDhgwAB06dECXLl1Qu3ZtPH78GIMGDVKqM2rUKPTo0QOBgYHSobH27du/M6awsDD06NEDo0aNgqenJz7//HMcP34cLi4ueV6ub775Bp6enqhRowaKFy+Ow4cP57ktERU+MsED20RERKRDuOeHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKd8n+z8RaBAy+ZfAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 0 1 labels, cân bằng" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MNIST\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n" + ] + } + ], + "source": [ + "loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='MNIST', resize=resize,\n", + " training_size=training_size, test_size=valid_size, currupt_por=portion)" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "30\n", + "19\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "20\n", + "24\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "25\n", + "15\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "22\n", + "23\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "32\n", + "19\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "27\n", + "24\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "23\n", + "22\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "21\n", + "20\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "29\n", + "21\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "21\n", + "28\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "29\n", + "20\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "21\n", + "24\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "22\n", + "29\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "23\n", + "24\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "24\n", + "28\n", + "torch.Size([40, 1, 32, 32])\n", + "torch.Size([40])\n", + "16\n", + "10\n" + ] + } + ], + "source": [ + "cnt1 =0\n", + "cnt0 =0\n", + "for batch in loaders['train']:\n", + " print(batch[0].size())\n", + " print(batch[1].size())\n", + " print(torch.sum(batch[1] == 1).item())\n", + " cnt1+=torch.sum(batch[1] == 1).item()\n", + " print(torch.sum(batch[1] == 0).item())\n", + " cnt0+=torch.sum(batch[1] == 0).item()" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "28\n", + "36\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "39\n", + "25\n", + "torch.Size([64, 1, 32, 32])\n", + "torch.Size([64])\n", + "38\n", + "26\n", + "torch.Size([8, 1, 32, 32])\n", + "torch.Size([8])\n", + "6\n", + "2\n" + ] + } + ], + "source": [ + "cnt1 =0\n", + "cnt0 =0\n", + "for batch in loaders['test']:\n", + " print(batch[0].size())\n", + " print(batch[1].size())\n", + " print(torch.sum(batch[1] == 1).item())\n", + " cnt1+=torch.sum(batch[1] == 1).item()\n", + " print(torch.sum(batch[1] == 0).item())\n", + " cnt0+=torch.sum(batch[1] == 0).item()" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "385 350\n" + ] + } + ], + "source": [ + "print(cnt1, cnt0)" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "111 89\n" + ] + } + ], + "source": [ + "print(cnt1, cnt0)" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "metadata": {}, + "outputs": [], + "source": [ + "feature_cost = FeatureCost(src_embedding = embedder,\n", + " src_dim = (1, resize,resize),\n", + " tgt_embedding = embedder,\n", + " tgt_dim = (1, resize,resize),\n", + " p = 2,\n", + " device='cuda')\n", + "dist = DatasetDistance(loaders['train'], loaders['test'],\n", + " inner_ot_method = 'exact',\n", + " debiased_loss = True,\n", + " feature_cost = feature_cost,\n", + " λ_x=1.0, λ_y=1.0,\n", + " sqrt_method = 'spectral',\n", + " sqrt_niters=10,\n", + " precision='single',\n", + " p = 2, entreg = 1e-1,\n", + " device='cuda')" + ] + }, + { + "cell_type": "code", + "execution_count": 71, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "c7df0bc512114c47bb181cedf4d1183a", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/16 [00:00 maxsamples:\n", + " idxs_1 = sorted(np.random.choice(\n", + " dist.X1.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_1 = np.s_[:] # hack to get a full slice\n", + "\n", + "if maxsamples and dist.X2.shape[0] > maxsamples:\n", + " idxs_2 = sorted(np.random.choice(\n", + " dist.X2.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_2 = np.s_[:] # hack to get a full slice\n", + "Z1 = torch.cat((dist.X1[idxs_1],\n", + " dist.Y1[idxs_1].type(dist.X1.dtype).unsqueeze(1)), -1)\n", + "Z2 = torch.cat((dist.X2[idxs_2],\n", + " dist.Y2[idxs_2].type(dist.X2.dtype).unsqueeze(1)), -1)\n", + "Z1 = Z1.to(device)\n", + "Z2 = Z2.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1000, 1025]) torch.Size([200, 1025])\n", + "Z1 shape in batch: torch.Size([1, 1000, 1025])\n", + "Z2 shape in batch: torch.Size([1, 200, 1025])\n", + "1 1000 1024\n", + "torch.Size([1, 1000, 100])\n", + "1 200 1024\n", + "torch.Size([1, 200, 100])\n", + "torch.Size([1, 1000, 200])\n", + "torch.Size([1, 1000, 200])\n", + "Gia tri M: tensor([[[22, 23, 23, ..., 23, 23, 22],\n", + " [94, 95, 95, ..., 95, 95, 94],\n", + " [22, 23, 23, ..., 23, 23, 22],\n", + " ...,\n", + " [10, 11, 11, ..., 11, 11, 10],\n", + " [70, 71, 71, ..., 71, 71, 70],\n", + " [10, 11, 11, ..., 11, 11, 10]]], device='cuda:0')\n", + "torch.Size([1, 1000, 200])\n", + "torch.Size([1, 1000, 200])\n", + "gia tri D: tensor([[[ 1474.9448, 383.2818, 1689.2954, ..., 727.8188,\n", + " 662.3774, 1732.9272],\n", + " [ 1722.8909, 1781.7236, 8839.2295, ..., 5565.6006,\n", + " 3359.7568, 1428.5764],\n", + " [ 2425.0854, 1570.7974, 449.8189, ..., 62.1548,\n", + " 517.5649, 3167.2319],\n", + " ...,\n", + " [ 3622.8171, 5301.7891, 12275.8340, ..., 8216.4824,\n", + " 5275.3301, 3864.3269],\n", + " [ 3644.3135, 3490.3376, 13131.1758, ..., 9217.3945,\n", + " 6439.2144, 2865.2217],\n", + " [ 2887.5945, 2668.4766, 11349.3301, ..., 8079.5566,\n", + " 5900.4355, 2034.7645]]], device='cuda:0')\n", + "torch.Size([1, 1000, 200])\n", + "Z1 shape in batch: torch.Size([1, 200, 1025])\n", + "Z2 shape in batch: torch.Size([1, 1000, 1025])\n", + "1 200 1024\n", + "torch.Size([1, 200, 100])\n", + "1 1000 1024\n", + "torch.Size([1, 1000, 100])\n", + "torch.Size([1, 200, 1000])\n", + "torch.Size([1, 200, 1000])\n", + "Gia tri M: tensor([[[121, 127, 121, ..., 120, 125, 120],\n", + " [133, 139, 133, ..., 132, 137, 132],\n", + " [133, 139, 133, ..., 132, 137, 132],\n", + " ...,\n", + " [133, 139, 133, ..., 132, 137, 132],\n", + " [133, 139, 133, ..., 132, 137, 132],\n", + " [121, 127, 121, ..., 120, 125, 120]]], device='cuda:0')\n", + "torch.Size([1, 200, 1000])\n", + "torch.Size([1, 200, 1000])\n", + "gia tri D: tensor([[[ 1474.9448, 1722.8909, 2425.0854, ..., 3622.8171,\n", + " 3644.3145, 2887.5945],\n", + " [ 383.2798, 1781.7236, 1570.7954, ..., 5301.7891,\n", + " 3490.3386, 2668.4766],\n", + " [ 1689.2954, 8839.2295, 449.8189, ..., 12275.8340,\n", + " 13131.1768, 11349.3301],\n", + " ...,\n", + " [ 727.8188, 5565.6006, 62.1548, ..., 8216.4824,\n", + " 9217.3955, 8079.5566],\n", + " [ 662.3774, 3359.7568, 517.5649, ..., 5275.3301,\n", + " 6439.2153, 5900.4355],\n", + " [ 1732.9253, 1428.5764, 3167.2300, ..., 3864.3269,\n", + " 2865.2227, 2034.7645]]], device='cuda:0')\n", + "torch.Size([1, 200, 1000])\n" + ] + } + ], + "source": [ + "with torch.no_grad():\n", + " loss.debias = False\n", + " loss.potentials = True\n", + " print(Z1.shape, Z2.shape)\n", + " F_i, G_j = loss(Z1, Z2)" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": {}, + "outputs": [], + "source": [ + "π = [F_i, G_j]\n", + "dual_sol = π\n", + "for i in range(len(dual_sol)):\n", + " dual_sol[i] = dual_sol[i].to('cpu')" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": {}, + "outputs": [], + "source": [ + "train_indices = get_indices(loaders['train'])\n", + "trained_with_flag = train_with_corrupt_flag(loaders['train'], shuffle_ind, train_indices)" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "inspected: 10, found: 3 detection rate: 0.01 baseline: 1.8\n", + "inspected: 20, found: 11 detection rate: 0.04 baseline: 3.6\n", + "inspected: 30, found: 16 detection rate: 0.05 baseline: 5.4\n", + "inspected: 40, found: 23 detection rate: 0.08 baseline: 7.2\n", + "inspected: 50, found: 31 detection rate: 0.10 baseline: 9.0\n", + "inspected: 60, found: 36 detection rate: 0.12 baseline: 10.8\n", + "inspected: 70, found: 45 detection rate: 0.15 baseline: 12.6\n", + "inspected: 80, found: 53 detection rate: 0.18 baseline: 14.4\n", + "inspected: 90, found: 59 detection rate: 0.20 baseline: 16.2\n", + "inspected: 100, found: 66 detection rate: 0.22 baseline: 18.0\n", + "inspected: 110, found: 74 detection rate: 0.25 baseline: 19.8\n", + "inspected: 120, found: 79 detection rate: 0.26 baseline: 21.6\n", + "inspected: 130, found: 82 detection rate: 0.27 baseline: 23.400000000000002\n", + "inspected: 140, found: 86 detection rate: 0.29 baseline: 25.2\n", + "inspected: 150, found: 92 detection rate: 0.31 baseline: 27.0\n", + "inspected: 160, found: 97 detection rate: 0.32 baseline: 28.8\n", + "inspected: 170, found: 104 detection rate: 0.35 baseline: 30.6\n", + "inspected: 180, found: 110 detection rate: 0.37 baseline: 32.4\n", + "inspected: 190, found: 115 detection rate: 0.38 baseline: 34.2\n", + "inspected: 200, found: 122 detection rate: 0.41 baseline: 36.0\n", + "inspected: 210, found: 128 detection rate: 0.43 baseline: 37.800000000000004\n", + "inspected: 220, found: 134 detection rate: 0.45 baseline: 39.6\n", + "inspected: 230, found: 140 detection rate: 0.47 baseline: 41.4\n", + "inspected: 240, found: 143 detection rate: 0.48 baseline: 43.2\n", + "inspected: 250, found: 146 detection rate: 0.49 baseline: 45.0\n", + "inspected: 260, found: 150 detection rate: 0.50 baseline: 46.800000000000004\n", + "inspected: 270, found: 155 detection rate: 0.52 baseline: 48.6\n", + "inspected: 280, found: 161 detection rate: 0.54 baseline: 50.4\n", + "inspected: 290, found: 166 detection rate: 0.55 baseline: 52.2\n", + "inspected: 300, found: 171 detection rate: 0.57 baseline: 54.0\n", + "inspected: 310, found: 175 detection rate: 0.58 baseline: 55.800000000000004\n", + "inspected: 320, found: 179 detection rate: 0.60 baseline: 57.6\n", + "inspected: 330, found: 183 detection rate: 0.61 baseline: 59.4\n", + "inspected: 340, found: 188 detection rate: 0.63 baseline: 61.2\n", + "inspected: 350, found: 194 detection rate: 0.65 baseline: 63.0\n", + "inspected: 360, found: 196 detection rate: 0.65 baseline: 64.8\n", + "inspected: 370, found: 200 detection rate: 0.67 baseline: 66.60000000000001\n", + "inspected: 380, found: 202 detection rate: 0.67 baseline: 68.4\n", + "inspected: 390, found: 206 detection rate: 0.69 baseline: 70.2\n", + "inspected: 400, found: 211 detection rate: 0.70 baseline: 72.0\n", + "inspected: 410, found: 213 detection rate: 0.71 baseline: 73.8\n", + "inspected: 420, found: 218 detection rate: 0.73 baseline: 75.60000000000001\n", + "inspected: 430, found: 221 detection rate: 0.74 baseline: 77.4\n", + "inspected: 440, found: 226 detection rate: 0.75 baseline: 79.2\n", + "inspected: 450, found: 229 detection rate: 0.76 baseline: 81.0\n", + "inspected: 460, found: 235 detection rate: 0.78 baseline: 82.8\n", + "inspected: 470, found: 240 detection rate: 0.80 baseline: 84.60000000000001\n", + "inspected: 480, found: 241 detection rate: 0.80 baseline: 86.4\n", + "inspected: 490, found: 242 detection rate: 0.81 baseline: 88.2\n", + "inspected: 500, found: 244 detection rate: 0.81 baseline: 90.0\n", + "inspected: 510, found: 245 detection rate: 0.82 baseline: 91.8\n", + "inspected: 520, found: 248 detection rate: 0.83 baseline: 93.60000000000001\n", + "inspected: 530, found: 250 detection rate: 0.83 baseline: 95.4\n", + "inspected: 540, found: 252 detection rate: 0.84 baseline: 97.2\n", + "inspected: 550, found: 257 detection rate: 0.86 baseline: 99.0\n", + "inspected: 560, found: 258 detection rate: 0.86 baseline: 100.8\n", + "inspected: 570, found: 260 detection rate: 0.87 baseline: 102.60000000000001\n", + "inspected: 580, found: 260 detection rate: 0.87 baseline: 104.4\n", + "inspected: 590, found: 261 detection rate: 0.87 baseline: 106.2\n", + "inspected: 600, found: 263 detection rate: 0.88 baseline: 108.0\n", + "inspected: 610, found: 264 detection rate: 0.88 baseline: 109.8\n", + "inspected: 620, found: 266 detection rate: 0.89 baseline: 111.60000000000001\n", + "inspected: 630, found: 267 detection rate: 0.89 baseline: 113.4\n", + "inspected: 640, found: 268 detection rate: 0.89 baseline: 115.2\n", + "inspected: 650, found: 270 detection rate: 0.90 baseline: 117.0\n", + "inspected: 660, found: 272 detection rate: 0.91 baseline: 118.8\n", + "inspected: 670, found: 274 detection rate: 0.91 baseline: 120.60000000000001\n", + "inspected: 680, found: 274 detection rate: 0.91 baseline: 122.4\n", + "inspected: 690, found: 274 detection rate: 0.91 baseline: 124.2\n", + "inspected: 700, found: 275 detection rate: 0.92 baseline: 126.0\n", + "inspected: 710, found: 275 detection rate: 0.92 baseline: 127.8\n", + "inspected: 720, found: 275 detection rate: 0.92 baseline: 129.6\n", + "inspected: 730, found: 275 detection rate: 0.92 baseline: 131.4\n", + "inspected: 740, found: 276 detection rate: 0.92 baseline: 133.20000000000002\n", + "inspected: 750, found: 277 detection rate: 0.92 baseline: 135.0\n", + "inspected: 760, found: 277 detection rate: 0.92 baseline: 136.8\n", + "inspected: 770, found: 279 detection rate: 0.93 baseline: 138.6\n", + "inspected: 780, found: 280 detection rate: 0.93 baseline: 140.4\n", + "inspected: 790, found: 280 detection rate: 0.93 baseline: 142.20000000000002\n", + "inspected: 800, found: 282 detection rate: 0.94 baseline: 144.0\n", + "inspected: 810, found: 282 detection rate: 0.94 baseline: 145.8\n", + "inspected: 820, found: 283 detection rate: 0.94 baseline: 147.6\n", + "inspected: 830, found: 283 detection rate: 0.94 baseline: 149.4\n", + "inspected: 840, found: 286 detection rate: 0.95 baseline: 151.20000000000002\n", + "inspected: 850, found: 290 detection rate: 0.97 baseline: 153.0\n", + "inspected: 860, found: 291 detection rate: 0.97 baseline: 154.8\n", + "inspected: 870, found: 292 detection rate: 0.97 baseline: 156.6\n", + "inspected: 880, found: 294 detection rate: 0.98 baseline: 158.4\n", + "inspected: 890, found: 294 detection rate: 0.98 baseline: 160.20000000000002\n", + "inspected: 900, found: 294 detection rate: 0.98 baseline: 162.0\n", + "inspected: 910, found: 295 detection rate: 0.98 baseline: 163.8\n", + "inspected: 920, found: 296 detection rate: 0.99 baseline: 165.6\n", + "inspected: 930, found: 296 detection rate: 0.99 baseline: 167.4\n", + "inspected: 940, found: 296 detection rate: 0.99 baseline: 169.20000000000002\n", + "inspected: 950, found: 297 detection rate: 0.99 baseline: 171.0\n", + "inspected: 960, found: 298 detection rate: 0.99 baseline: 172.8\n", + "inspected: 970, found: 299 detection rate: 1.00 baseline: 174.6\n", + "inspected: 980, found: 299 detection rate: 1.00 baseline: 176.4\n", + "inspected: 990, found: 299 detection rate: 1.00 baseline: 178.20000000000002\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABOyElEQVR4nO3deVhUZf8G8HtkdwEFBWTHJcAFxTWXIkFDcgnJXDKXTE3NTGlR6zWpzK1S2zV909JS0tSflpqKpuUOivomWibu4IaAqanA8/uDGJmZM8PMMMuZmftzXVw2Zw4zDweJ2+d5vt+jEEIIEBEREclQNWsPgIiIiEgbBhUiIiKSLQYVIiIiki0GFSIiIpItBhUiIiKSLQYVIiIiki0GFSIiIpItBhUiIiKSLQYVIiIiki0GFbKqpUuXQqFQKD/c3d3h7++PLl26YObMmbhy5YrRr338+HGkpqbizJkzphuwge8zbNgwhIWFmfX9reny5ct444030LJlS3h6esLV1RVBQUFITk7G+vXrUVJSYpFx/PLLL1AoFPjll1+Uxyxx7S9duoTU1FRkZWXpdX75OFevXm3WcVmaruuQmpoKhUJh+UGR3WBQIVlYsmQJ9u7di61bt+Kzzz5Dy5YtMXv2bERFRWHbtm1Gvebx48fx9ttvWySoaHufqVOnYu3atWZ9f2vZt28fmjdvjkWLFqF3795YuXIltm3bhlmzZsHFxQXJyclYunSp1cZniWt/6dIlvP3223oHFXul6zqMGDECe/futfygyG44W3sARADQrFkztGnTRvn4qaeewsSJE9G5c2ckJyfjzz//hJ+fnxVHaJyGDRtaewhmUVBQgKSkJNSsWRO7d+9G/fr1VZ5/9tlncfToUVy/fl3n69y5cwfu7u5m+Re3vV57WxMUFISgoCBrD4NsGGdUSLZCQkLw4Ycf4ubNm1i4cKHKcxkZGejduze8vb3h7u6OmJgYfP/998rnly5diqeffhoA0KVLF+XSUsV/4W/btg3x8fHw9PRE9erV0alTJ6Snp2uM48SJExg4cCD8/Pzg5uaGkJAQDBkyBHfv3q30faSWH/755x9MmTIF4eHhcHV1RWBgIF588UUUFBSonBcWFoaePXti8+bNaNWqFTw8PBAZGYmvvvpK53W7f/8+fH19MXjwYI3nCgoK4OHhgZSUFABAaWkppk+fjoiICHh4eKB27dqIjo7GRx99pPM9Fi1ahMuXL2POnDkaIaVcdHQ0unTponxcvsy3ZcsWDB8+HPXq1UP16tVx9+5dnDp1Cs899xwaN26M6tWrIzAwEL169cKxY8c0XvfEiRPo3r07qlevjrp162L06NG4efOmxnlS114Igc8//xwtW7aEh4cH6tSpg759++L06dMq5z322GNo1qwZDh48iEceeQTVq1dHgwYNMGvWLJSWlgIoW8Zp27YtAOC5555Tfu9TU1N1Xjt15Usjv//+OwYOHAgvLy/4+flh+PDhKCwsVDl31apVaN++Pby8vJRjGj58uPL58qWl5cuXIyUlBf7+/vDw8EBsbCwOHz6s8d6V/RyVu3jxIkaNGoXg4GC4uroiICAAffv2xeXLlyu9DlJLP6WlpZgzZw4iIyPh5uYGX19fDBkyBBcuXDD4+0AOQBBZ0ZIlSwQAcfDgQcnn//77b+Hk5CTi4+OVx7Zv3y5cXV3FI488ItLS0sTmzZvFsGHDBACxZMkSIYQQV65cETNmzBAAxGeffSb27t0r9u7dK65cuSKEEGLZsmVCoVCIpKQksWbNGrFhwwbRs2dP4eTkJLZt26Z8r6ysLFGzZk0RFhYmFixYINLT08Xy5ctFv379RFFRUaXvM3ToUBEaGqp8vdLSUpGQkCCcnZ3F1KlTxZYtW8QHH3wgatSoIWJiYsQ///yjPDc0NFQEBQWJJk2aiG+++Ub8/PPP4umnnxYAxM6dO3Ve14kTJwoPDw9RWFiocvzzzz8XAMTRo0eFEELMnDlTODk5iWnTpon09HSxefNmMX/+fJGamqrz9bt16yacnJzErVu3dJ5XUfn3OjAwUIwaNUps2rRJrF69WhQXF4udO3eKV155RaxevVrs3LlTrF27ViQlJQkPDw9x4sQJ5Wvk5eUJX19fERgYKJYsWSI2btwoBg0aJEJCQgQAsWPHDuW56tdeCCFGjhwpXFxcxCuvvCI2b94svvvuOxEZGSn8/PxEXl6e8rzY2Fjh4+MjGjduLBYsWCC2bt0qxo4dKwCIr7/+WgghRGFhofJr+s9//qP83p8/f17rNdixY4cAIFatWqU8Nm3aNAFAREREiLfeekts3bpVzJ07V7i5uYnnnntOed6ePXuEQqEQAwYMEBs3bhTbt28XS5YsEYMHD9Z4/eDgYPHkk0+KDRs2iOXLl4tGjRoJT09P8ddffynP1efnSAghLly4IOrXry/q1q0r5s6dK7Zt2ybS0tLE8OHDRXZ2dqXXofzrq2jUqFECgBg3bpzYvHmzWLBggahXr54IDg4WV69eNej7QPaPQYWsqrKgIoQQfn5+IioqSvk4MjJSxMTEiPv376uc17NnT1G/fn1RUlIihBBi1apVGr+8hBDi1q1bwtvbW/Tq1UvleElJiWjRooVo166d8lhcXJyoXbu2MnhI0fY+Qmj+sty8ebMAIObMmaNyXlpamgAgvvzyS+Wx0NBQ4e7uLs6ePas8dufOHeHt7S1eeOEFreMRQoijR49qvJ4QQrRr1060bt1a+bhnz56iZcuWOl9LSmRkpPD399c4XlJSIu7fv6/8KP9eCPHgez1kyJBKX7+4uFjcu3dPNG7cWEycOFF5fNKkSUKhUIisrCyV87t161ZpUNm7d68AID788EOVzz1//rzw8PAQr7/+uvJYbGysACD279+vcm6TJk1EQkKC8vHBgwc1frHroiuoqP+dGDt2rHB3dxelpaVCCCE++OADAUAUFBRU+vqtWrVSfp4QQpw5c0a4uLiIESNGKI/p+3M0fPhw4eLiIo4fP671fXVdB/Wgkp2dLQCIsWPHqpy3f/9+AUC88cYbymP6fh/IvnHph2RPCKH871OnTuHEiRMYNGgQAKC4uFj58cQTTyA3NxcnT57U+Xp79uxBfn4+hg4dqvL5paWl6N69Ow4ePIhbt27h9u3b2LlzJ/r164d69eqZ5GvZvn07gLJliYqefvpp1KhRQ2PpqWXLlggJCVE+dnd3x0MPPYSzZ8/qfJ/mzZujdevWWLJkifJYdnY2Dhw4oLJU0K5dOxw5cgRjx47Fzz//jKKiImO/NABASkoKXFxclB+9e/fWOOepp57SOFZcXIwZM2agSZMmcHV1hbOzM1xdXfHnn38iOztbed6OHTvQtGlTtGjRQuXzn3nmmUrH9uOPP0KhUODZZ59V+b77+/ujRYsWKhVDAODv74927dqpHIuOjq702htL/VpFR0fjn3/+UVa+lS+v9OvXD99//z0uXryo9bWeeeYZleWW0NBQdOzYETt27ABg2M/Rpk2b0KVLF0RFRZnk6ywfg/rPQLt27RAVFaXxM2Dp7wPJD4MKydqtW7dw/fp1BAQEACgrhwWAV199VeUXoouLC8aOHQsAuHbtms7XLH+Nvn37arzG7NmzIYRAfn4+bty4gZKSEpNuBLx+/TqcnZ01go9CoYC/v7/G5lMfHx+N13Bzc8OdO3cqfa/hw4dj7969OHHiBICyyio3NzcMHDhQec6UKVPwwQcfYN++fUhMTISPjw/i4+ORkZGh87VDQkJw9epV3L59W+X4K6+8goMHD+LgwYNa965IHU9JScHUqVORlJSEDRs2YP/+/Th48CBatGih8rVev34d/v7+Gp8vdUzd5cuXIYSAn5+fxvd93759Gn9vqnLtjaH+fm5ubgCgfL9HH30U69atQ3FxMYYMGYKgoCA0a9YMK1as0Hgtbdeo/O+XIT9HV69eNfnPACD99yAgIMCkPwNkH1j1Q7L2008/oaSkBI899hgAoG7dugDKfsEmJydLfk5ERITO1yx/jU8++QQPP/yw5Dl+fn4oKSmBk5OTxga/qvDx8UFxcTGuXr2qElaEEMjLy1P+q9kUBg4ciJSUFCxduhTvvfceli1bhqSkJNSpU0d5jrOzM1JSUpCSkoKCggJs27YNb7zxBhISEnD+/HlUr15d8rW7deuGLVu2YOPGjejbt6/yeHBwMIKDgwEArq6ukp8rVeGzfPlyDBkyBDNmzFA5fu3aNdSuXVv52MfHB3l5eRqfL3VMXd26daFQKPDrr78qQ0BFUsfk5sknn8STTz6Ju3fvYt++fZg5cyaeeeYZhIWFoUOHDsrztF2j8l/6hvwc1atXz+Q/AwCQm5urEYAuXbqkHBtROc6okGydO3cOr776Kry8vPDCCy8AKPufZ+PGjXHkyBG0adNG8qNWrVoANP9FWq5Tp06oXbs2jh8/rvU1XF1dldUSq1at0jlLo+19pMTHxwMo+8Vc0Q8//IBbt24pnzeFOnXqICkpCd988w1+/PFH5OXlqSz7qKtduzb69u2LF198Efn5+Tr7z4wYMQJ+fn54/fXXkZubW+WxKhQKjaDw008/aSxvdOnSBb///juOHDmicvy7776r9D169uwJIQQuXrwo+T1v3ry5weM25HtvSm5uboiNjcXs2bMBQKOiZ8WKFSpLpmfPnsWePXuUgd+Qn6PExETs2LFD55KqIdchLi4OgObPwMGDB5GdnW3SnwGyD5xRIVn43//+p1wjv3LlCn799VcsWbIETk5OWLt2rcrsw8KFC5GYmIiEhAQMGzYMgYGByM/PR3Z2Ng4dOoRVq1YBKOvNAgBffvklatWqBXd3d4SHh8PHxweffPIJhg4divz8fPTt2xe+vr64evUqjhw5gqtXr+KLL74AAMydOxedO3dG+/btMXnyZDRq1AiXL1/G+vXrsXDhQtSqVUvn+6jr1q0bEhISMGnSJBQVFaFTp044evQopk2bhpiYGMmS4qoYPnw40tLSMG7cOAQFBaFr164qz/fq1UvZw6ZevXo4e/Ys5s+fj9DQUDRu3Fjr69auXRvr1q1Dr1690KJFC4wZMwYPP/wwatasievXr2PXrl3Iy8tDx44d9Rpnz549sXTpUkRGRiI6OhqZmZl4//33Nf7FPWHCBHz11Vfo0aMHpk+fDj8/P3z77bfK5S1dOnXqhFGjRuG5555DRkYGHn30UdSoUQO5ubn47bff0Lx5c4wZM0av8ZZr2LAhPDw88O233yIqKgo1a9ZEQECAcqnSlN566y1cuHAB8fHxCAoKQkFBAT766CO4uLggNjZW5dwrV66gT58+GDlyJAoLCzFt2jS4u7tjypQpynP0/Tl65513sGnTJjz66KN444030Lx5cxQUFGDz5s1ISUlBZGSkQdchIiICo0aNwieffIJq1aohMTERZ86cwdSpUxEcHIyJEyea/NqRjbPmTl6i8kqQ8g9XV1fh6+srYmNjxYwZM7RW2xw5ckT069dP+Pr6ChcXF+Hv7y/i4uLEggULVM6bP3++CA8PF05OThpVCTt37hQ9evQQ3t7ewsXFRQQGBooePXqoVGQIIcTx48fF008/LXx8fISrq6sICQkRw4YNUykl1vY+UiWyd+7cEZMmTRKhoaHCxcVF1K9fX4wZM0bcuHFD5bzQ0FDRo0cPja89NjZWxMbG6r6w/yopKRHBwcECgHjzzTc1nv/www9Fx44dRd26dZVf2/PPPy/OnDmj1+vn5eWJKVOmiOjoaFGjRg3h4uIiAgICRK9evcQ333yjUlGiq8Lrxo0b4vnnnxe+vr6ievXqonPnzuLXX3+V/FqPHz8uunXrJtzd3YW3t7d4/vnnxf/93//pVZ4shBBfffWVaN++vahRo4bw8PAQDRs2FEOGDBEZGRnKc2JjY0XTpk01PlfqNVesWCEiIyOFi4uLACCmTZum9XrpqvqpWJZb8Xrl5OQIIYT48ccfRWJioggMDFT+nDzxxBPi119/1Xj9ZcuWifHjx4t69eoJNzc38cgjj6h8feX0/Tk6f/68GD58uPD391d+j/v16ycuX75c6XWQKk8uKSkRs2fPFg899JBwcXERdevWFc8++6xGabch3weyXwohKswPEhGRzfrll1/QpUsXrFq1SmXvEJEt4x4VIiIiki0GFSIiIpItLv0QERGRbHFGhYiIiGSLQYWIiIhki0GFiIiIZMumG76Vlpbi0qVLqFWrlmRbbiIiIpIfIQRu3ryJgIAAVKume87EpoPKpUuXlPcVISIiItty/vz5Sm96adNBpfxeFOfPn4enp6eVR0NERET6KCoqQnBwsPL3uC42HVTKl3s8PT0ZVIiIiGyMPts2uJmWiIiIZItBhYiIiGSLQYWIiIhki0GFiIiIZItBhYiIiGSLQYWIiIhki0GFiIiIZItBhYiIiGSLQYWIiIhki0GFiIiIZItBhYiIiGTLpu/1Q0RERIY7fO4Gcq7dQnjdGogJqWPQMUtjUCEiIrJzFQPHz7/nYcHO08rnRsc2AAC9jk1OjLLQiB9QCCGExd/VRIqKiuDl5YXCwkLePZmIiAiasyCzNmWrBA5DtFScQrgiFzmiPrJEI6wd29EkMyuG/P7mjAoREZEMGbM8oz5b0icmAGsPXzLq/Sc5r8AY5w3Kx18U90LOtRYWXwJiUCEiIpIBUyzPqDM2pLRUnFIJKQAwxnkDTpYOBxBk1Gsai0GFiIjIyipbnpF6ztjlHHVjYhtAqL3eiKgSQOLlI1yumOQ9DcGgQkREZEL6LNlUfAyYLnRIUV/+GRPbAI839dcYY596uSi6eAKegZGI8H9UMqjAp5HZxqkNgwoREZGJqM+MSC3PtAz2Qtb5QuXjuMh6ZhvPmNgGmJQYhSEdwjSCicpek63TELF7ftl/HwbQaULZR/kxAOg0EQhqY7axasOqHyIiIiOpz4z0+XyPWd5HanlG2zGp2RKdLmQAi+M1j49IL/vz+qmymRQThhRW/RAREZmBrg2vVZkZ6RJRDztOXlU+1hY4EvQ8VmlAuZDxIIBcPyV9zvVTQIsBVplFqYgzKkRERBJM2Y+kMmvHdgQAy3SB3TpNdUknuj9wNE3zvBHpZgspnFEhIiKqAvVQom8/EqmZEfXlGfU9KmNiG+g/E1JVFzJUQwpQFlLUw4qV9qNIYVAhIiKq4PC5GxozJ/r2Ixkf3xjj4xtXujxj8XvolC/15OdIP98wDmg3yiz7UaqKQYWIiKiCnGu3jPo8XTMjMSF1VI6pPzYr9aUeKeXhREYBpRyDChERUQXlFTzq9O1HIitSSz3qZLTMI4VBhYiICKqbZ0fHNtAo+9WrH4ncaKvoiZ0MeIfLbplHCoMKERE5PKlGbWvHdpQMJbIOJoBq6bG2TrKNu8k+oJRjUCEiIocmtXl2wc7TSGjqj+RWlr0BX5Wp70eRUYdZYzGoEBGRXavs3jvaNs/mXLsl/9mTiqT2o+yeX9YPJaqXLCt69MGgQkRENquyEKLePVbq3jt9YgIkX1vbplrZqaz0WCYdZo3FoEJERDZJnxsAqpN6bu3hS5IVPTYxm6Jv6bENY1AhIiJZ0jVbcr+kVHJfibEeaVxPsqJH1uyg9FgfDCpERCQ7xsyWVEV5OLGJgFLODkqP9cGgQkREVldx9gTQDCSmCihS996xmWUewO5Kj/XBoEJERFalPnsSF1nPqNfRFkKkuseq33vHJthh6bE+FEIIYe1BGMuQ20QTEZF1VLbXZNIPx4x+7dlPNYeLUzWtVT82E0IqcyEDWByveXxEetmfNlZ6bMjvb86oEBGRSRlaHiylS0Q97Dh5VflY22xJ/7YhGp9rc3tNtKm4zKNtP4qNlx7rg0GFiIgq7UcSE1JHr3PUl3HU6bvXZHx8Y4yPb2wfSzbGUF/mie4vfZ6Nlx7rg0GFiMiO6BMm1I/pM+vRMtgLWecLdZ6j3ovEWBU3t6qHEbuZLdFFquz4aFpZWDma9uCYHe5HkcI9KkREdkKfkl5zl/kaS2qvicOp2GF25yzN5/ssfLAMZEP7UaRwjwoRkYPRdmM9ddYOKIbsNXEo+naYDWpj0wHFGAwqREQ2rHwJ5+x16RvrWZpUK3q7KQ82FwfpMGssBhUiIhtV2cZVY0nNeqjvUdE2MzIpMUqyFb1D7jXRl4N0mDUWgwoRkY2orHurOm1hwtimaFKbcqVmRhhC9OCAHWaNxc20REQ2QKp76/YTVzXOezm+EUJ9ahhU9cNQYWFSHWYBzQ6z3VItNyYLM+T3N4MKEZFMGdO9de3YjgwecmZnHWaNxaofIiIbY2jjNEC6eytDikxVLD2W4gAdZo3FoEJEZGGVhRJ9G6dp695KMqNv6TFJYlAhIrIgfUKJPiFFV/dWkhGWHlcZgwoRkYVINWUzpOU8u7faIJYeVxmDChGRmRnTlE2qcZrDd2+1FSw9NikGFSIiEzJmU6xUKNHWOI1kTqr0uNMEzdJjhhS9sTyZiMhEjNkUWx5K2NfEDrD0WG8sTyYispCKvU703X8i1ZSN3VxtVMVlHm37UVh6XCUMKkREEvTp5mrsvXYei/BlKLEH6ss80f2lz2PpcZUwqBARqVEPIKNjGwCAUb1OpPafMKTYAamy46NpZWHlaNqDY9yPUmUMKkRE0L2EIzVrom+vE26KtTOVdZhtGAe0G8X9KCbEoEJEDs/YJRwpUr1OuP/ETujbYTaoDQOKCTGoEJHDqbjXBJCeMdEHe504EHaYtRoGFSJyKOqzJ3GR9Sr9nDGxDSCgGmi4rONg2GHWahhUiMhhSLWw337iquS5Uks4CU39NUIJl3XsGDvMygKDChE5jJxr0i3su0TUw46TDwKLtiUchhIHwg6zssGgQkQOo3xPirrx8Y0xPr4xl3CojNR+lN3zyzrMRvViRY+FMagQkd2ruHl2dGwDjb0mFZdxyIFVVnrMDrNWwaBCRHZNqnnb2rEdOXtCqvQtPSaLq2btARARmYvU5tnyx8mtghhSqAxLj2WNMypEZFcqLvNo2zybc+0WQwo9wNJjWWNQISK7ob7M0ycmQPI8bZtqyYGw9NhmMKgQkc2qrMPs2sOXeFNA0sTSY5vCoEJENknfDrOPNK7H7rH0AEuPbQ6DChHZHEM6zJaHEwYUB1ZxmUfbfhSWHssWgwoR2YzypZ6z1/XvMMuA4uDUl3mi+0ufx9Jj2WJQISJZqrj/JCakjsZSjxR2mCUVUss8R9PKwsrRtAfHuB9F1hhUiEh2pKp3Km6IlcIOs6RUWYfZhnFAu1Hcj2IjGFSISFak9p9oCykvxzdCqE8Nzp7QA/p2mA1qw4BiIxhUiEgWKtt/IuWxCF8GFHqAHWbtEoMKEVmcMftP2A+FKsUOs3aJQYWIzKqyUKLv/pNJiVHsh0Ka2GHW7imEEMLagzBWUVERvLy8UFhYCE9PT2sPh4jUGBNKynH/CVVKqsMsoNlhtluq5cZEejHk9zdnVIjILAzZFCuF+09IJ3aYdRgMKkRkFtruXCyF+09Ib5WVHrPDrN1hUCEis9B2h2KpUML9J6QXfUuPya4wqBCRWcSE1MHo2AYqyz+6Qgnvx0M6sfTYYTGoEJFJVazymZwYhYSm/gwlVHUsPXZYDCpEZDLqVT6jYxtgcmIUQwkZh6XHBKCatQdARPZBqspnwc7TOHzuhpVGRDZt6zRgcTyw9oWyP7M3PCg/LselHofAGRUiMgltVT45125xRoUMw9JjqoBBhYhMQluVj7bjRCoqLvNo24/C0mOHxKBCRCahrcqHsylUKfWy4+j+0uex9NghMagQkdHU7+OjrcqHSCupZZ6jaWVh5Wjag2Pcj+KwGFSIyCi6KnwYUEhv2pZ5GsYB7UZxPwoxqBCR4bRV+CQ09WdIIf2U70kpuS/9fHk4YUBxeAwqRGQwVvhQlajvSQlsDVzMfPCYyzxUAYMKEemtfE/K/ZJSyedZ4UOVktqTcjET6P0p4OTCZR7SwKBCRHpR35PSMtgLWecLlY9Z4UNa6VN67ORSVnpMpIZBhYgqJbUnJet8IWY/1RwuTtVY4UPasfSYqogt9ImoUtr2pLg4VUNyqyCGFJKmq/S4Iu5JIR04o0JElWLXWTJI+VJPfo708yw9JgMwqBCRJPVmbuw6S3pRX+qRwtJjMgCDChFp0NbMjV1nSSeppR51XOYhAzGoEJGKypq5MaCQVtoqemInA97hXOYhozCoEJEKNnMjg1QsPdZWudO4GwMKGY1BhYhU9qNw4yzpTX0/SqcJZR8qx7jUQ1WjEEIIaw/CWEVFRfDy8kJhYSE8PT2tPRwimyS1HwWAxsbZSYlRFh8bydiFDGBxvObxEellf7Kih3Qw5Pc3Z1SIHJi2/Shrx3bkxlnSpE+H2eunyjrMMqCQiTCoEDkwXftR2MiNVLDDLFkJO9MSOTDuRyG9sMMsWRFnVIgcGBu5kV60LfOwwyxZAIMKkQOqWOXDRm6kVfmelJL70s+zwyxZAIMKkYPR1nWWAYVUqO9JCWwNXMx88JjLPGQhDCpEdq7i7AkAnV1niQBI70m5mAn0/hRwcuEyD1kUgwqRHVOfPYmLrCd5HrvOkl6lx04uZaXHRBbEoEJkp6R6pGw/cVXyXFb5ODiWHpOMsTyZyE5p65HSJUJ1VoVVPg6Opcckc5xRIbJT2mZJxsc3xvj4xqzycXTlSz35OdLPs/SYZIJBhcjOVNw8q6tHCgOKA1Nf6pHC0mOSCQYVIjsiVXq8dmxHzp7QA1JLPeq4zEMywqBCZCe03WAwoak/klsFWWlUJDvaKnpiJwPe4VzmIdlhUCGyE7puMMiZFAdXsfRYW+VO424MKCRLDCpEdoI3GCRJ6vtROk0o+1A5xqUeki8GFSIbVnHjLG8wSBqk9qPsng+MSAeierGih2wCgwqRjdJ2zx7eYNDB6dNh9vqpsg6zDChkAxhUiGyEIffsYUBxUOwwS3bIJEGloKAAtWvXNsVLEZEE3rOHKqWrw+zRtAfHuB+FbIzBLfRnz56NtLQHf+n79esHHx8fBAYG4siRIyYdHBHxnj2kJ23LPA3jyvak9FlY9me3VIsOi6iqDA4qCxcuRHBwMABg69at2Lp1KzZt2oTExES89tprJh8gkaPjPXtIpwsZwJGVQMl96efLN8tyTwrZKIOXfnJzc5VB5ccff0S/fv3w+OOPIywsDO3btzf5AIkcHe/ZQ1qp70kJbA1czHzwmMs8ZAcMDip16tTB+fPnERwcjM2bN2P69OkAACEESkpKTD5AIkfFe/aQTlJ7Ui5mAr0/BZxcWHZMdsPgoJKcnIxnnnkGjRs3xvXr15GYmAgAyMrKQqNG3ElOZAq8Zw9J0qf02MmlbJmHyE4YHFTmzZuHsLAwnD9/HnPmzEHNmjUBlC0JjR071uQDJHIE+pYe8549Doylx+SgDA4qLi4uePXVVzWOT5gwwRTjIXI4LD2mSrH0mByYUX1Uli1bhoULF+L06dPYu3cvQkNDMX/+fISHh+PJJ5809RiJ7BZLj0mn8qWe/Bzp5xvGAe1GsRU+2TWDy5O/+OILpKSkIDExEQUFBcoNtLVr18b8+fNNPT4iu8bSY9Jq6zRgcTyw9gVg5yzpc1h6TA7A4BmVTz75BIsWLUJSUhJmzXrww9OmTRvJJSEi0o6lxyRJaqlHHZd5yEEYHFRycnIQExOjcdzNzQ23bkn/65CIpFV2x2MGFAelraIndjLgHc5lHnIoBgeV8PBwZGVlITQ0VOX4pk2b0KRJE5MNjMhR8I7HBEC19Fhb5U7jbgwo5HAMDiqvvfYaXnzxRfzzzz8QQuDAgQNYsWIFZs6cicWLF5tjjER2p2I5cvndjhlQHJh66XGnCWUfKse41EOOSSGEEIZ+0qJFizB9+nScP38eABAYGIjU1FQ8//zzJh+gLkVFRfDy8kJhYSE8PT0t+t5ExpJq5jY5McqKIyKrupBRtmlW3Yj0sj9Z0UN2yJDf30YFlXLXrl1DaWkpfH19jX2JKmFQIVtz+NwN9Pl8j8bxtWM7ckbFkah3mF37guY5fRaywyzZLUN+fxvVR6Vc3bp1q/LpRA5HWzkym7k5EHaYJTKIwUElJiYGCoVC47hCoYC7uzsaNWqEYcOGoUuXLiYZIJE90VaOzGZuDoIdZokMZnDDt+7du+P06dOoUaMGunTpgsceeww1a9bEX3/9hbZt2yI3Nxddu3bF//3f/5ljvEQ2rbwcuSI2c3Mg2sqOG8aV7Unps7Dsz26pFh0WkZwZPKNy7do1vPLKK5g6darK8enTp+Ps2bPYsmULpk2bhnfffZft9ImgWeHDcmQHVL4npeS+9PPlm2U5i0KkweDNtF5eXsjMzESjRqrrp6dOnULr1q1RWFiIEydOoG3btrh586ZJB6uOm2lJ7ljhQxp7UgJbAxczHzzuNJEzKORwzLqZ1t3dHXv27NEIKnv27IG7uzsAoLS0FG5uboa+NJFdkbrh4IKdp5HQ1J+zKI5Cak/KxUyg96eAkwvLjon0YHBQeemllzB69GhkZmaibdu2UCgUOHDgABYvXow33ngDAPDzzz9LttknciSs8HFQ6qXHUpxcWHpMpCeDg8p//vMfhIeH49NPP8WyZcsAABEREVi0aBGeeeYZAMDo0aMxZswY046UyAZU3I/CCh8HxNJjIpOrUsM3a+MeFZITqf0oADRuODiJe1Tsk7YOs1Klx9yTQg7OYg3fiKiMtv0oa8d2ZIWPvStf6snPkX6+YRzQbhRb4RMZyeCgUlJSgnnz5uH777/HuXPncO/ePZXn8/PzTTY4Iluhaz9KcqsgBhR7pb7UI4Wlx0RVYnDDt7fffhtz585Fv379UFhYiJSUFCQnJ6NatWpITU01wxCJ5I/7URyQVEWPOnaYJaoyg2dUvv32WyxatAg9evTA22+/jYEDB6Jhw4aIjo7Gvn37MH78eHOMk0iWKm6eHR3bQGM/CmdS7Ji2ip7YyYB3OJd5iEzE4KCSl5eH5s2bAwBq1qyJwsJCAEDPnj01utUS2TOpzbNrx3bkfhR7VrH0WFvlTuNuDChEJmTw0k9QUBByc3MBAI0aNcKWLVsAAAcPHmSTN3IY2jbPAuCeFHu1dVpZVc/aF8r+zN4AdJqgeg6XeohMzuAZlT59+iA9PR3t27fHyy+/jIEDB+K///0vzp07h4kTJ5pjjESyw2ZuDkZqP8ru+WU3EIzqxYoeIjMyOKjMmjVL+d99+/ZFUFCQsqV+7969TTo4IjlhMzcHo0+H2eunyjrMMqAQmU2V+6g8/PDDePjhh00xFiLZktqPws2zdowdZolkw6igcvHiRezevRtXrlxBaWmpynOs+iF7w2ZuDkZqmedomnSHWc6kEJmdwUFlyZIlGD16NFxdXeHj4wOFQqF8TqFQMKiQ3WEzNwejbZmHHWaJrMLgoPLWW2/hrbfewpQpU1CtmsFFQ0Q2h/tRHET5npSS+9LPs8MskVUYHFRu376NAQMGMKSQw4gJqcP9KPZOfU9KYGvgYuaDx1zmIbIag++e/Prrr8Pb2xuTJ08215j0xrsnkzlVrPKJCamj8ZjshLa7Hvf+FHBy4TIPkRmY9e7JM2fORM+ePbF582Y0b94cLi4uKs/PnTvX0Jckkh2pKp/JiVEMKPZCn9JjJ5ey0mMisiqDg8qMGTPw888/IyIiAgA0NtMS2TptVT4JTf0ZVOwBS4+JbIrBQWXu3Ln46quvMGzYMDMMh8j62HXWjrH0mMjmGBxU3Nzc0KlTJ3OMhUgWWOVjh8qXevJzpJ9n6TGRbBlcuvPyyy/jk08+McdYiGShvMqnIlb52LCKNxPcOUv6nPJwwnb4RLJj8IzKgQMHsH37dvz4449o2rSpxmbaNWvWmGxwRJaiXtEzOTGKXWftgdRSjzou8xDJmsFBpXbt2khOTjbHWIisQleFDwOKjdNW0RM7GfAO5zIPkQ0wqoU+kb1ghY8dqlh6rK1yp3E3BhQiG1HluycT2TJW+NgZ9dLjThPKPlSOcamHyJboHVRiYmL06pNy6NChKg2IyJJY4WNHpPaj7J4PjEgHonqxoofIRukdVJKSksw4DCLr4H18bJw+HWavn2I1D5EN0zuoTJs2zZzjILIYVvjYCXaYJXII3KNCDoUVPnaCHWaJHAaDCjkMVvjYEW3LPOwwS2R3GFTIYbDCxw6U70kpuS/9fHk4YUAhshsMKuQwWOFj49T3pAS2Bi5mPnjMZR4iu8SgQg6DFT42TGpPysVMoPengJMLl3mI7JheQeXjjz/W+wXHjx9v9GCIzKFilQ8rfGyIPqXHTi5lpcdEZLf0Cirz5s1TeXz16lXcvn0btWvXBgAUFBSgevXq8PX1ZVAhWdFV5UMyxtJjIvpXNX1OysnJUX689957aNmyJbKzs5Gfn4/8/HxkZ2ejVatWePfdd809XiK9aavyOXzuhpVGRHrRVXpcEfekEDkEg/eoTJ06FatXr0ZERITyWEREBObNm4e+ffti0KBBJh0gkbFY5WNjypd68nOkn2fpMZFDMjio5Obm4v59zdLAkpISXL582SSDIjIFVvnYEPWlHiksPSZySHot/VQUHx+PkSNHIiMjA0IIAEBGRgZeeOEFdO3a1eQDJDLE4XM3sObQBRw+d0NZ5VMRq3xkSGqpRx2XeYgclsEzKl999RWGDh2Kdu3awcXFBQBQXFyMhIQELF682OQDJNKXto2zrPKROW0VPbGTAe9wLvMQOTiDg0q9evWwceNG/PHHHzhx4gSEEIiKisJDDz1kjvER6aWy9vgMKDJTsfRYW+VO424MKERkfMO3sLAwCCHQsGFDODuzbxxZR3mPlLPXuXHWZqjvR+k0oexD5RiXeoiojMEJ4/bt23jppZfw9ddfAwD++OMPNGjQAOPHj0dAQAAmT55s8kESSVFf6pHCjbMyI7UfZfd8YEQ6ENWLFT1EpMHgzbRTpkzBkSNH8Msvv8Dd3V15vGvXrkhLS9PxmUSmI7XUo44bZ2XiQgZwZOWD5R4p10+VhZMWAxhSiEiFwTMq69atQ1paGh5++GEoFArl8SZNmuCvv/4y6eCIKqrYCl9bj5SX4xsh1KcGN87KBTvMElEVGRxUrl69Cl9fX43jt27dUgkuRKakvszTJyZA8rzHInwZUORCV4fZoxVmX7kfhYh0MHjpp23btvjpp5+Uj8vDyaJFi9ChQwfTjYzoX1LLPGsPX9IIK1zqkRltyzwN48r2pPRZWPZnt1SLDouIbIvBMyozZ85E9+7dcfz4cRQXF+Ojjz7C77//jr1792Lnzp3mGCM5OG3LPI80rochHcLYI0VuyveilGh2sAbADrNEZBCDg0rHjh2xe/dufPDBB2jYsCG2bNmCVq1aYe/evWjevLk5xkgOTlcrfPZIkRn1PSmBrYGLmQ8ec5mHiAykEOV98G1QUVERvLy8UFhYCE9PT2sPh8xIfY/KmNgGmJQYZcURkYYLGcDieM3jvT8FnFxYdkxESob8/jZ4RsXJyQm5ubkaG2qvX78OX19flJSUGPqSRJViK3yZqthhVtueFCeXsrJjIiIjGBxUtE3A3L17F66urlUeEBGgWopcHkq4zCMzLD0mIgvQO6h8/PHHAMqqfBYvXoyaNWsqnyspKcGuXbsQGRlp+hGSw9F2c0GSEZYeE5GF6B1U5s2bB6BsRmXBggVwcnJSPufq6oqwsDAsWLDA9CMkh1LZzQXJysqXevJzpJ9vGAe0G8VW+ERkMnoHlZycsv8xdenSBWvWrEGdOvylQaanrRSZNxeUAfWlHiksPSYiEzN4j8qOHTvMMQ4iALpLkcmKpJZ61HGZh4jMwODOtH379sWsWbM0jr///vt4+umnTTIoclwxIXUwOraByjF2nJUBbRU9sZPZYZaIzMrgPir16tXD9u3bNZq7HTt2DF27dsXly5dNOkBd2EfFfklV/ZCFVSw9BqR7pIxI5ywKERnMrH1U/v77b8kyZBcXFxQVFRn6ckQANIMJS5GtTH0/SqcJZR8qx7jUQ0TmZ3BQadasGdLS0vDWW2+pHF+5ciWaNGlisoGR42A5ssxI7UfZPb9s9iSqFyt6iMiiDA4qU6dOxVNPPYW//voLcXFxAID09HSsWLECq1atMvkAyb6xHFkm9Okwe/1UWYdZBhQisiCDg0rv3r2xbt06zJgxA6tXr4aHhweio6Oxbds2xMbGmmOMZMdYjiwD7DBLRDJmcFABgB49eqBHjx6mHgs5IJYjWxk7zBKRzBkVVAoKCrB69WqcPn0ar776Kry9vXHo0CH4+fkhMDDQ1GMkO1Zejqx+Z2TOpliItmUedpglIpkwOKgcPXoUXbt2hZeXF86cOYMRI0bA29sba9euxdmzZ/HNN9+YY5xkx3hnZCso35NScl/6eXaYJSKZMDiopKSkYNiwYZgzZw5q1aqlPJ6YmIhnnnnGpIMj+8Q7I1uZ+p6UwNbAxcwHj7nMQ0QyYnBQOXjwIBYuXKhxPDAwEHl5eSYZFNkvliJbmdSelIuZQO9PAScXLvMQkewYHFTc3d0lG7udPHkS9erVM8mgyD6xFNlK9Ck9dnIpKz0mIpIZg+/18+STT+Kdd97B/ftla9sKhQLnzp3D5MmT8dRTT5l8gGQ/dJUik5lsnVbW+n7tC2V//rVd+jyWHhORTBkcVD744ANcvXoVvr6+uHPnDmJjY9GoUSPUqlUL7733njnGSHaCpcgWpqv0uCLuSSEiGTN46cfT0xO//fYbtm/fjkOHDqG0tBStWrVC165dzTE+siMsRbYwlh4TkR0w+O7J33zzDfr37w83NzeV4/fu3cPKlSsxZMgQkw5QF9492TbxzshmVrH0eP04zed5x2MisjJDfn8bHFScnJyQm5sLX19flePXr1+Hr68vSkpKDB+xkRhUiNToU3rcLdXSoyIiUmHI72+Dl36EEFAoFBrHL1y4AC8vL0NfjhwAZ1AshKXHRGSH9A4qMTExUCgUUCgUiI+Ph7Pzg08tKSlBTk4OunfvbpZBku1i3xQzY+kxEdk5vYNKUlISACArKwsJCQmoWbOm8jlXV1eEhYWxPJlUsG+KmfGux0TkAPQOKtOmTQMAhIWFoX///nB3dzfboMg+6OqbwqBSRbzrMRE5CIP3qAwdOhQFBQVYvnw5/vrrL7z22mu8ezJJYt8UE9NnmYelx0RkZ6p89+SRI0fy7smkpL5xln1TTMSQZR7e9ZiI7IjBQWXixIm8ezJJ0rZxNqGpP6t+qoLLPETkwAwOKhkZGfjyyy81jvPuyY6tso2zDChVwGUeInJgvHsymQQ3zppBxQ6zUrjMQ0QOwOCgUn735O+//x4A755MZbhx1sT06TDLgEJEDsDgFvpFRUV44okn8Pvvv+PmzZsICAhAXl4eOnTogI0bN6JGDcv9YmILfXlR36MyJrYBJrG5m+EuZACL4zWPs8MsEdkJs7bQ592TSRtunK0CdpglIpJkcFApFxcXh7i4OFOOhWyM1D18uHHWCOwwS0SklUFBpbS0FEuXLsWaNWtw5swZKBQKhIeHo2/fvhg8eLDkzQrJPvEePibC0mMiIp30DipCCPTu3RsbN25EixYt0Lx5cwghkJ2djWHDhmHNmjVYt26dGYdKcsF7+JgQS4+JiHTSO6gsXboUu3btQnp6Orp06aLy3Pbt25GUlIRvvvkGQ4YMMfkgSV5YimwCLD0mItKL3kFlxYoVeOONNzRCClC2X2Xy5Mn49ttvGVQcAEuRq4ilx0REequm74lHjx5F9+7dtT6fmJiII0eOmGRQJG/l9/CpiPfw0ZPUnpSLmWWlx30WAiPSgW6p1hgZEZEs6T2jkp+fDz8/P63P+/n54caNGyYZFMkfS5ENwNJjIiKj6R1USkpK4Oys/XQnJycUFxebZFBkG1iKrAeWHhMRVYlBVT/Dhg2Dm5ub5PN379412aBInqT6ppAOLD0mIqoyvYPK0KFDKz2HG2ntF/um6EmfZR6WHhMR6U3voLJkyRJzjoNkjH1T9GTIMg9Lj4mI9KJ31Q85Ll19U+hfupZ5KuIyDxGRQYy+1w85DvZN0QOXeYiIzIJBhSpV3jel4vIP+6b8ix1miYjMikGFJKlX+LBvigR2mCUiMjsGFdKgrcKHfVMq0NVh1smFyzxERCbCoEIqWOGjAzvMEhFZHIMKqeCdkbVgh1kiIqtgeTKpYIWPBJYeExFZDWdUSAUrfCSw9JiIyGoYVEgDK3z+xdJjIiKrY1AhAJrlyA5f4cPSYyIiWWBQId5wUB1Lj4mIZINBxcGxHPlfLD0mIpIlBhUHx3JksPSYiEjGWJ7sgA6fu4E1hy7g8LkbLEdm6TERkaxxRsXBSO1HcbhyZH2WeVh6TEQkCwwqDkTbfpS1Yzs6TjmyIcs8LD0mIrI6Lv04kMr2oyS3CrLvkMJlHiIim8MZFTtXsT+Kw+9H4TIPEZHNYVCxY9yP8i92mCUislkMKnaK+1H+xQ6zREQ2jUHFTunaj2L3e1HKscMsEZHNY1CxUw67H4UdZomI7AqDip2puHnW4fajsMMsEZHdYVCxI1KbZ9eO7egY+1F0lR4fTXtwjHtSiIhsCoOKndB1c8HkVkFWGpUFsfSYiMguMajYCYe9uSBLj4mI7BqDip1wyM2zLD0mIrJ7DCp2IiakjmNtnmXpMRGRQ2BQsSOTE6Psu5kbS4+JiBwOg4qdiQmpY38BBWDpMRGRg+Ldk0n+eNdjIiKHxRkVG1axuZvdzaLos8zD0mMiIrvHoGKjpJq7TU6MsuKITMiQZR6WHhMR2TUu/dggbc3dDp+7YaURmRCXeYiIqALOqNggu27uxmUeIiKqgEHFBtllczd2mCUiIgkMKjbI7pq7scMsERFpoRBCCGsPwlhFRUXw8vJCYWEhPD09rT0ci7OLqp8LGcDieM3j7DBLRGS3DPn9zRkVG2azzd3YYZaIiPTEoGJD7GIGhR1miYjIAAwqNsIu+qboKj0+mvbgGPekEBHRvxhUbIC2vikJTf1ta2aFpcdERGQgBhUbYPN9U1h6TERERmJQsQE23TeFpcdERFQFDCo2wGb7pkjtSbmYydJjIiLSG4OKjZicGIWEpv7yr/ph6TEREZkQg4pMSZUiy75vCkuPiYjIxBhUZMgmS5FZekxERGbAoCIzNlWKrM8yD0uPiYioChhUZMZmSpENWeZh6TERERmpmrUHQKpsohRZ1zJPRVzmISKiKuKMiszYRCkyl3mIiMhCGFRkSLalyOwwS0REFsagIlOyK0Vmh1kiIrICBhWqHDvMEhGRlTCoyIBUczerY4dZIiKSAQYVK5Nlczd2mCUiIplgebIVaWvudvjcDSuNCCw9JiIiWeGMihXJsrkbS4+JiEhGGFSsSFbN3Vh6TEREMsSgYkWyae7G0mMiIpIphRBCWHsQxioqKoKXlxcKCwvh6elp7eEYzapVPxcygMXxmsdZekxERGZiyO9vzqjIgMWbu7H0mIiIbASDiqNh6TEREdkQlic7EpYeExGRjeGMihVYdE+KPss8LD0mIiKZYlCxMIt2ojVkmYelx0REJENc+rEgi3ai5TIPERHZAc6oWJBFO9FymYeIiOwAg4oFWaQTLTvMEhGRHWFQsSCzd6Jlh1kiIrIz7ExrZlIVPmap+mGHWSIishHsTCsT2ip8TNaJlh1miYjIzjGomIm2Cp+Epv6mCSnsMEtERA6A5clmoqvCp8pYekxERA6CMypmYtYKH5YeExGRg2BQMROzVPiw9JiIiBwMg4oZTU6MQkJTf9NU+LD0mIiIHBCDipmZpMJHak/KxUyWHhMRkd1jUJErlh4TERExqJiaSZq5sfSYiIgIAIOKSWlr8GYQXaXHR9MeHOOeFCIicgAMKiZSpQZv+izzsPSYiIgcEIOKiehq8KYzqBiyzMPSYyIicjDsTGsiRjV4Y4dZIiIinTijYiJGNXjjMg8REZFODCompHeDN3aYJSIi0guDiolV2uCNHWaJiIj0xqBSBQb3TGGHWSIiIoMwqBhJ754p7DBLRERkNAYVI+jdM4UdZomIiKqE5clG0NUzRYmlx0RERFXGGRUj6NUzhaXHREREVcagYgSdPVNYekxERGQyDCp6Uq/wkeyZwtJjIiIik2JQ0YO2Ch+VniksPSYiIjI5BpVK6KzwqfYXS4+JiIjMiEGlEtoqfKrvehc49d8HB1h6TEREZHIsT66EVIVPS8UpRFQMKQBLj4mIiMyAMyqVkKrwGRFVApyWOJmlx0RERCbFoKKHyYlR6FMvF0UXT8AzMBIR/o9KBxWWHhMREZkUg4o+tk5DRHlFz2EAnSaUfVSs8uEyDxERkckxqFRGqux493xgRDoQ1YvLPERERGbEoKLFyYztKLp4AkEiD/WlTrh+qqzsmAGFiIjIbKxa9bNr1y706tULAQEBUCgUWLdunTWHo7R34ThE/NgHbQ9PQf2sj6RPYtkxERGR2Vk1qNy6dQvFxcUoLi4GALzyyiv49ddfrTkknMzYjg65y3SfxP0oREREFmHVoFJUVIT09HTMmDEDANCkSRMkJibi3Llz1hvTxROSx48/NAbos7Bsb0q3VMsOioiIyEFZNajMnTsXzz//PEaMGAEAeP755xEcHIwvvvhC8vy7d++iqKhI5cPUPAMjJY87PfQ496QQERFZmNWCyr1795CZmYnHH39c5fjjjz+OPXv2SH7Ou+++Cy8vL+VHcHCwyccV0SYOe+sPVjm2t/5gRLSJM/l7ERERkW5Wq/q5du0aSkpK4Ofnp3Lcz88PeXl5VhpVmQ4vfIqTGcnKBm8dGFKIiIiswurlyQqFQuWxEELjWLmpU6fi9ddfVz4uKioyy6wKUDazAgYUIiIiq7JaUKlbty6cnJw0Zk+uXLmiMctSzs3NDW5ubpYYHhEREcmA1faouLq6omXLllixYgWysrIAADk5OdiwYQOaNm1qrWERERGRjFi16ueJJ57AqlWrEBMTAwBISUlBTk4Orly5Ys1hERERkUxYNai88847+OyzzxAaGgpXV1e0atUKO3fuxOrVq605LCIiIpIJhRBCWHsQxioqKoKXlxcKCwvh6elp7eEQERGRHgz5/W3VGRUiIiIiXRhUiIiISLYYVIiIiEi2GFSIiIhIthhUiIiISLYYVIiIiEi2rH6vn6oor6wuKiqy8kiIiIhIX+W/t/XpkGLTQeXmzZsAYLYbExIREZH53Lx5E15eXjrPsemGb6Wlpbh06RJq1aql9Y7L+ii/C/P58+fZOM4CeL0ti9fbsni9LYvX27JMdb2FELh58yYCAgJQrZruXSg2PaNSrVo1BAUFmez1PD09+Rfdgni9LYvX27J4vS2L19uyTHG9K5tJKcfNtERERCRbDCpEREQkWwwqANzc3DBt2jS4ublZeygOgdfbsni9LYvX27J4vS3LGtfbpjfTEhERkX3jjAoRERHJFoMKERERyRaDChEREckWgwoRERHJFoMKgM8//xzh4eFwd3dH69at8euvv1p7SDZn5syZaNu2LWrVqgVfX18kJSXh5MmTKucIIZCamoqAgAB4eHjgsccew++//65yzt27d/HSSy+hbt26qFGjBnr37o0LFy5Y8kuxSTNnzoRCocCECROUx3i9TevixYt49tln4ePjg+rVq6Nly5bIzMxUPs/rbTrFxcX4z3/+g/DwcHh4eKBBgwZ45513UFpaqjyH19t4u3btQq9evRAQEACFQoF169apPG+qa3vjxg0MHjwYXl5e8PLywuDBg1FQUGD4gIWDW7lypXBxcRGLFi0Sx48fFy+//LKoUaOGOHv2rLWHZlMSEhLEkiVLxP/+9z+RlZUlevToIUJCQsTff/+tPGfWrFmiVq1a4ocffhDHjh0T/fv3F/Xr1xdFRUXKc0aPHi0CAwPF1q1bxaFDh0SXLl1EixYtRHFxsTW+LJtw4MABERYWJqKjo8XLL7+sPM7rbTr5+fkiNDRUDBs2TOzfv1/k5OSIbdu2iVOnTinP4fU2nenTpwsfHx/x448/ipycHLFq1SpRs2ZNMX/+fOU5vN7G27hxo3jzzTfFDz/8IACItWvXqjxvqmvbvXt30axZM7Fnzx6xZ88e0axZM9GzZ0+Dx+vwQaVdu3Zi9OjRKsciIyPF5MmTrTQi+3DlyhUBQOzcuVMIIURpaanw9/cXs2bNUp7zzz//CC8vL7FgwQIhhBAFBQXCxcVFrFy5UnnOxYsXRbVq1cTmzZst+wXYiJs3b4rGjRuLrVu3itjYWGVQ4fU2rUmTJonOnTtrfZ7X27R69Oghhg8frnIsOTlZPPvss0IIXm9TUg8qprq2x48fFwDEvn37lOfs3btXABAnTpwwaIwOvfRz7949ZGZm4vHHH1c5/vjjj2PPnj1WGpV9KCwsBAB4e3sDAHJycpCXl6dyrd3c3BAbG6u81pmZmbh//77KOQEBAWjWrBm/H1q8+OKL6NGjB7p27apynNfbtNavX482bdrg6aefhq+vL2JiYrBo0SLl87zeptW5c2ekp6fjjz/+AAAcOXIEv/32G5544gkAvN7mZKpru3fvXnh5eaF9+/bKcx5++GF4eXkZfP1t+qaEVXXt2jWUlJTAz89P5bifnx/y8vKsNCrbJ4RASkoKOnfujGbNmgGA8npKXeuzZ88qz3F1dUWdOnU0zuH3Q9PKlStx6NAhHDx4UOM5Xm/TOn36NL744gukpKTgjTfewIEDBzB+/Hi4ublhyJAhvN4mNmnSJBQWFiIyMhJOTk4oKSnBe++9h4EDBwLg329zMtW1zcvLg6+vr8br+/r6Gnz9HTqolFMoFCqPhRAax0h/48aNw9GjR/Hbb79pPGfMteb3Q9P58+fx8ssvY8uWLXB3d9d6Hq+3aZSWlqJNmzaYMWMGACAmJga///47vvjiCwwZMkR5Hq+3aaSlpWH58uX47rvv0LRpU2RlZWHChAkICAjA0KFDlefxepuPKa6t1PnGXH+HXvqpW7cunJycNNLdlStXNNIk6eell17C+vXrsWPHDgQFBSmP+/v7A4DOa+3v74979+7hxo0bWs+hMpmZmbhy5Qpat24NZ2dnODs7Y+fOnfj444/h7OysvF683qZRv359NGnSROVYVFQUzp07B4B/v03ttddew+TJkzFgwAA0b94cgwcPxsSJEzFz5kwAvN7mZKpr6+/vj8uXL2u8/tWrVw2+/g4dVFxdXdG6dWts3bpV5fjWrVvRsWNHK43KNgkhMG7cOKxZswbbt29HeHi4yvPh4eHw9/dXudb37t3Dzp07lde6devWcHFxUTknNzcX//vf//j9UBMfH49jx44hKytL+dGmTRsMGjQIWVlZaNCgAa+3CXXq1Emj3P6PP/5AaGgoAP79NrXbt2+jWjXVX09OTk7K8mReb/Mx1bXt0KEDCgsLceDAAeU5+/fvR2FhoeHX36Ctt3aovDz5v//9rzh+/LiYMGGCqFGjhjhz5oy1h2ZTxowZI7y8vMQvv/wicnNzlR+3b99WnjNr1izh5eUl1qxZI44dOyYGDhwoWfIWFBQktm3bJg4dOiTi4uJYTqinilU/QvB6m9KBAweEs7OzeO+998Sff/4pvv32W1G9enWxfPly5Tm83qYzdOhQERgYqCxPXrNmjahbt654/fXXlefwehvv5s2b4vDhw+Lw4cMCgJg7d644fPiwsi2Hqa5t9+7dRXR0tNi7d6/Yu3evaN68OcuTjfXZZ5+J0NBQ4erqKlq1aqUsqSX9AZD8WLJkifKc0tJSMW3aNOHv7y/c3NzEo48+Ko4dO6byOnfu3BHjxo0T3t7ewsPDQ/Ts2VOcO3fOwl+NbVIPKrzeprVhwwbRrFkz4ebmJiIjI8WXX36p8jyvt+kUFRWJl19+WYSEhAh3d3fRoEED8eabb4q7d+8qz+H1Nt6OHTsk/389dOhQIYTpru3169fFoEGDRK1atUStWrXEoEGDxI0bNwwer0IIIQycGSIiIiKyCIfeo0JERETyxqBCREREssWgQkRERLLFoEJERESyxaBCREREssWgQkRERLLFoEJERESyxaBCRDYjLCwM8+fPt/YwiMiCGFSIbMiwYcOQlJRk7WEYxJLhIjU1FS1btrTIexGRZTCoEBERkWwxqBDZsMceewzjx4/H66+/Dm9vb/j7+yM1NVXlnNTUVISEhMDNzQ0BAQEYP3688rmwsDC8++67eOaZZ1CzZk0EBATgk08+Ufn8wsJCjBo1Cr6+vvD09ERcXByOHDmics769evRpk0buLu7o27dukhOTlaO7+zZs5g4cSIUCgUUCoXyc/bs2YNHH30UHh4eCA4Oxvjx43Hr1i3l81euXEGvXr3g4eGB8PBwfPvttwZfn/IZqBkzZsDPzw+1a9fG22+/jeLiYrz22mvw9vZGUFAQvvrqK5XPmzRpEh566CFUr14dDRo0wNSpU3H//n2Vc6ZPnw5fX1/UqlULI0aMwOTJkzVmc5YsWYKoqCi4u7sjMjISn3/+ufK5e/fuYdy4cahfvz7c3d0RFhaGmTNnGvw1Etk7BhUiG/f111+jRo0a2L9/P+bMmYN33nlHefv11atXY968eVi4cCH+/PNPrFu3Ds2bN1f5/Pfffx/R0dE4dOgQpkyZgokTJyo/XwiBHj16IC8vDxs3bkRmZiZatWqF+Ph45OfnAwB++uknJCcno0ePHjh8+DDS09PRpk0bAMCaNWsQFBSEd955B7m5ucjNzQUAHDt2DAkJCUhOTsbRo0eRlpaG3377DePGjVOOa9iwYThz5gy2b9+O1atX4/PPP8eVK1cMvj7bt2/HpUuXsGvXLsydOxepqano2bMn6tSpg/3792P06NEYPXo0zp8/r/ycWrVqYenSpTh+/Dg++ugjLFq0CPPmzVM+/+233+K9997D7NmzkZmZiZCQEHzxxRcq77to0SK8+eabeO+995CdnY0ZM2Zg6tSp+PrrrwEAH3/8MdavX4/vv/8eJ0+exPLlyxEWFmbw10dk94y69SIRWcXQoUPFk08+qXwcGxsrOnfurHJO27ZtxaRJk4QQQnz44YfioYceEvfu3ZN8vdDQUNG9e3eVY/379xeJiYlCCCHS09OFp6en+Oeff1TOadiwoVi4cKEQQogOHTqIQYMGaR1zaGiomDdvnsqxwYMHi1GjRqkc+/XXX0W1atXEnTt3xMmTJwUAsW/fPuXz2dnZAoDGa1U0bdo00aJFC+XjoUOHitDQUFFSUqI8FhERIR555BHl4+LiYlGjRg2xYsUKra87Z84c0bp1a+Xj9u3bixdffFHlnE6dOqm8d3BwsPjuu+9Uznn33XdFhw4dhBBCvPTSSyIuLk6UlpZqfV8iEoIzKkQ2Ljo6WuVx/fr1lTMPTz/9NO7cuYMGDRpg5MiRWLt2LYqLi1XO79Chg8bj7OxsAEBmZib+/vtv+Pj4oGbNmsqPnJwc/PXXXwCArKwsxMfHGzTmzMxMLF26VOU1ExISUFpaipycHGRnZ8PZ2Vk5MwMAkZGRqF27tkHvAwBNmzZFtWoP/lfn5+enMqvk5OQEHx8fldma1atXo3PnzvD390fNmjUxdepUnDt3Tvn8yZMn0a5dO5X3qfj46tWrOH/+PJ5//nmVr3H69OnK6zZs2DBkZWUhIiIC48ePx5YtWwz+2ogcgbO1B0BEVePi4qLyWKFQoLS0FAAQHByMkydPYuvWrdi2bRvGjh2L999/Hzt37tT4PPXXAIDS0lLUr18fv/zyi8Y55aHBw8PD4DGXlpbihRdeUNkvUy4kJAQnT55UGUdVSF0fXdds3759GDBgAN5++20kJCTAy8sLK1euxIcffqjxORUJIZT/Xf5aixYtQvv27VXOc3JyAgC0atUKOTk52LRpE7Zt24Z+/fqha9euWL16dRW+WiL7w6BCZOc8PDzQu3dv9O7dGy+++CIiIyNx7NgxtGrVCkDZL+aK9u3bh8jISABlv0zz8vLg7Oysdf9EdHQ00tPT8dxzz0k+7+rqipKSEpVjrVq1wu+//45GjRpJfk5UVBSKi4uRkZGhnKk4efIkCgoK9P2yjbZ7926EhobizTffVB47e/asyjkRERE4cOAABg8erDyWkZGh/G8/Pz8EBgbi9OnTGDRokNb38vT0RP/+/dG/f3/07dsX3bt3R35+Pry9vU34FRHZNgYVIju2dOlSlJSUoH379qhevTqWLVsGDw8PhIaGKs/ZvXs35syZg6SkJGzduhWrVq3CTz/9BADo2rUrOnTogKSkJMyePRsRERG4dOkSNm7ciKSkJLRp0wbTpk1DfHw8GjZsiAEDBqC4uBibNm3C66+/DqCssmjXrl0YMGAA3NzcULduXUyaNAkPP/wwXnzxRYwcORI1atRAdnY2tm7dik8++QQRERHo3r07Ro4ciS+//BLOzs6YMGGCUbM3hmrUqBHOnTuHlStXom3btvjpp5+wdu1alXNeeukljBw5Em3atEHHjh2RlpaGo0ePokGDBspzUlNTMX78eHh6eiIxMRF3795FRkYGbty4gZSUFMybNw/169dHy5YtUa1aNaxatQr+/v5GLW8R2TPuUSGyY7Vr18aiRYvQqVMn5czHhg0b4OPjozznlVdeQWZmJmJiYvDuu+/iww8/REJCAoCy5Y2NGzfi0UcfxfDhw/HQQw9hwIABOHPmDPz8/ACUlSCvWrUK69evR8uWLREXF4f9+/crX/+dd97BmTNn0LBhQ9SrVw9A2SzMzp078eeff+KRRx5BTEwMpk6divr16ys/b8mSJQgODkZsbCySk5OVJdLm9uSTT2LixIkYN24cWrZsiT179mDq1Kkq5wwaNAhTpkzBq6++qlzCGTZsGNzd3ZXnjBgxAosXL8bSpUvRvHlzxMbGYunSpQgPDwcA1KxZE7Nnz0abNm3Qtm1bnDlzBhs3blTZT0NEgEJUXFglIocSFhaGCRMmYMKECdYeis3r1q0b/P39sWzZMmsPhciucOmHiMhAt2/fxoIFC5CQkAAnJyesWLEC27ZtU/afISLTYVAhIjJQ+ZLY9OnTcffuXUREROCHH35A165drT00IrvDpR8iIiKSLe7aIiIiItliUCEiIiLZYlAhIiIi2WJQISIiItliUCEiIiLZYlAhIiIi2WJQISIiItliUCEiIiLZYlAhIiIi2fp/YyI93p+GL7oAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA4Y0lEQVR4nO3deXhU5d3/8c8kZCOEIRBICIEYQAMIASQFAyq4gFI2BRUREVCsgFRZrIKPFsQFBKS1KkVakWofFn8VbFXcUBBZVDYlsgmyJEDClpCEhGwz9+8Pn0yZbMyEhJyY9+u65iJzzn3O+Z57zsx8ONvYjDFGAAAAFuRT3QUAAACUhaACAAAsi6ACAAAsi6ACAAAsi6ACAAAsi6ACAAAsi6ACAAAsi6ACAAAsi6ACAAAsi6ACVJKdO3fqwQcfVKtWrRQUFKSgoCBdeeWVevjhh7V169bLVseMGTNks9nchl1xxRUaNWpUlS5306ZNmjFjhs6ePVuly/HUqFGjZLPZXI+AgADFxsZq+vTpys3N9Xp+NptNM2bMqPxCL7NevXqpV69e1V0G4LE61V0A8GvwxhtvaMKECYqNjdVjjz2mq6++WjabTXv27NGyZcv0m9/8RgcOHFCrVq2qpb5Vq1apfv36VbqMTZs26dlnn9WoUaPUoEGDKl2Wp4KCgvTll19KktLT07Vs2TLNnDlTe/fu1YoVK7ya1+bNmxUVFVUVZQIoB0EFuEQbN27U+PHj1a9fP/3rX/+Sv7+/a9xNN92kRx55RP/v//0/BQUFlTufnJwc1a1bt0pq7Ny5c5XM1+p8fHx07bXXup737dtXhw8f1rvvvqv58+erWbNmHs/rwvkAuHw49ANcohdffFG+vr5644033ELKhe666y5FRka6no8aNUr16tVTYmKi+vTpo5CQEN18882SpM8//1yDBg1SVFSUAgMD1bp1az388MM6ffp0ifl+9NFH6tSpkwICAhQTE6N58+aVuvzSDv1kZmbq8ccfV0xMjPz9/dWsWTNNnDhR2dnZbu1sNpsmTJigd955R23btlXdunXVsWNHffjhh642M2bM0B/+8AdJUkxMjOtwy7p160qt589//rNsNpsOHDhQYtyTTz4pf39/1/ru2LFD/fv3V5MmTRQQEKDIyEj169dPR48eLXXeF1MUOI4cOSJJSkpK0n333eeaf9u2bfXyyy/L6XSW6IcLD/3k5OS4+i8wMFANGzZUfHy8li1b5jbdf/7zHyUkJKhu3boKCQlR7969tXnzZrc2RYfrdu3apWHDhslutys8PFwPPPCAMjIy3NoaY7RgwQJ16tRJQUFBCg0N1Z133qmDBw+WaDdnzhxFR0crMDBQ11xzjT7++OMK9RlQndijAlwCh8OhtWvXKj4+Xk2bNvVq2vz8fA0cOFAPP/ywpk6dqsLCQknSzz//rISEBI0ZM0Z2u12HDx/W/Pnzdd111ykxMVF+fn6SpC+++EKDBg1SQkKCli9fLofDoTlz5ujEiRMXXXZOTo569uypo0eP6qmnnlJcXJx27dqlP/7xj0pMTNSaNWvcznP56KOPtGXLFs2cOVP16tXTnDlzdMcdd2jfvn1q2bKlxowZo7S0NL366qtauXKlqy/atWtX6vLvu+8+Pfnkk1qyZImef/55t/785z//qQEDBigsLEzZ2dnq3bu3YmJi9Prrrys8PFypqalau3atsrKyvOrvIkXhqHHjxjp16pS6d++u/Px8Pffcc7riiiv04Ycf6vHHH9fPP/+sBQsWlDmfyZMn65133tHzzz+vzp07Kzs7Wz/++KPOnDnjarN06VINHz5cffr00bJly5SXl6c5c+aoV69e+uKLL3Tddde5zXPIkCEaOnSoHnzwQSUmJmratGmSpMWLF7vaPPzww1qyZIkeffRRvfTSS0pLS9PMmTPVvXt3/fDDDwoPD5ckPfvss3r22Wf14IMP6s4771RycrIeeughORwOxcbGVqjvgGphAFRYamqqkWTuueeeEuMKCwtNQUGB6+F0Ol3jRo4caSSZxYsXlzt/p9NpCgoKzJEjR4wk8+9//9s1rlu3biYyMtKcP3/eNSwzM9M0bNjQFH9rR0dHm5EjR7qez5o1y/j4+JgtW7a4tfvXv/5lJJnVq1e7hkky4eHhJjMz0229fXx8zKxZs1zD5s6daySZQ4cOlbtORQYPHmyioqKMw+FwDVu9erWRZD744ANjjDFbt241ksz777/v0TwvNHLkSBMcHOzq/1OnTplXXnnF2Gw285vf/MYYY8zUqVONJPPtt9+6TTtu3Dhjs9nMvn373Pph+vTpruft27c3t99+e5nLdzgcJjIy0nTo0MFtHbOyskyTJk1M9+7dXcOmT59uJJk5c+a4zWP8+PEmMDDQte1s3rzZSDIvv/yyW7vk5GQTFBRknnjiCWOMMenp6SYwMNDccccdbu02btxoJJmePXuWWTdgNRz6AapIly5d5Ofn53q8/PLLJdoMGTKkxLCTJ09q7Nixat68uerUqSM/Pz9FR0dLkvbs2SNJys7O1pYtWzR48GAFBga6pg0JCdGAAQMuWtuHH36o9u3bq1OnTiosLHQ9br311lIP2dx4440KCQlxPQ8PD1eTJk1ch08qYvTo0Tp69KjWrFnjGvbWW28pIiJCffv2lSS1bt1aoaGhevLJJ7Vw4ULt3r3bq2VkZ2e7+r9x48aaOHGi+vbtq1WrVkmSvvzyS7Vr105du3Z1m27UqFEyxrhOxC1N165d9fHHH2vq1Klat26dzp8/7zZ+3759On78uEaMGCEfn/9+1NarV09DhgzRN998o5ycHLdpBg4c6PY8Li5Oubm5OnnypKRfXjebzab77rvP7XWLiIhQx44dXa/b5s2blZubq+HDh7vNr3v37q5tCagpOPQDXIKwsDAFBQWV+oW9dOlS5eTkKCUlpcQXkCTVrVu3xJU4TqdTffr00fHjx/XMM8+oQ4cOCg4OltPp1LXXXuv6MkxPT5fT6VRERESJ+ZY2rLgTJ07owIEDrsNIxRU/H6ZRo0Yl2gQEBJT4cvZG37591bRpU7311lvq06eP0tPT9Z///EePPfaYfH19JUl2u11fffWVXnjhBT311FNKT09X06ZN9dBDD+npp58us/4iQUFBWr9+vave6Ohotz4/c+aMrrjiihLTFZ1PdOFhnOL+8pe/KCoqSitWrNBLL72kwMBA3XrrrZo7d66uvPJK17SlHRKMjIyU0+lUenq62wnUxfs5ICBAklz9fOLECRljXId3imvZsqVb3RXdPgArIagAl8DX11c33XSTPvvsM6WkpLh9KRWdn3H48OFSpy1+rxNJ+vHHH/XDDz9oyZIlGjlypGt48ZNOQ0NDZbPZlJqaWmIepQ0rrihgXXjuQ/HxVc3X11cjRozQX/7yF509e1ZLly5VXl6eRo8e7dauQ4cOWr58uYwx2rlzp5YsWaKZM2cqKChIU6dOLXcZPj4+io+PL3N8o0aNlJKSUmL48ePHJZXfD8HBwa7zQE6cOOHauzJgwADt3bvXFTrKmr+Pj49CQ0PLrb+4sLAw2Ww2ff31164Qc6GiYUXLLmv7KC2cAVbFoR/gEk2bNk0Oh0Njx45VQUHBJc2rKLwU/xJ644033J4HBwera9euWrlypdvNy7KysvTBBx9cdDn9+/fXzz//rEaNGik+Pr7EoyJfZMX/9++J0aNHKzc3V8uWLdOSJUuUkJCgNm3alNrWZrOpY8eO+tOf/qQGDRpo+/btXtdY3M0336zdu3eXmNfbb78tm82mG2+80aP5hIeHa9SoURo2bJj27dunnJwcxcbGqlmzZlq6dKmMMa622dnZeu+991xXAnmjf//+Msbo2LFjpb5uHTp0kPTLlU2BgYH63//9X7fpN23adEmH64DqwB4V4BL16NFDr7/+un7/+9/rmmuu0e9+9ztdffXV8vHxUUpKit577z1J8uiGa23atFGrVq00depUGWPUsGFDffDBB/r8889LtH3uued02223qXfv3poyZYocDodeeuklBQcHKy0trdzlTJw4Ue+9955uuOEGTZo0SXFxcXI6nUpKStJnn32mKVOmqFu3bl71Q9GX5CuvvKKRI0fKz89PsbGxbue2lLa+CQkJmjVrlpKTk7Vo0SK38R9++KEWLFig22+/XS1btpQxRitXrtTZs2fVu3dvr+orzaRJk/T222+rX79+mjlzpqKjo/XRRx9pwYIFGjdunK666qoyp+3WrZv69++vuLg4hYaGas+ePXrnnXfcAsicOXM0fPhw9e/fXw8//LDy8vI0d+5cnT17VrNnz/a63h49euh3v/udRo8era1bt+qGG25QcHCwUlJStGHDBnXo0EHjxo1TaGioHn/8cT3//PMaM2aM7rrrLiUnJ2vGjBkc+kHNU40n8gK/Kt9//70ZPXq0iYmJMQEBASYwMNC0bt3a3H///eaLL75wa1t0RUppdu/ebXr37m1CQkJMaGioueuuu0xSUlKJq06MMeY///mPiYuLM/7+/qZFixZm9uzZritILlT8qh9jjDl37px5+umnTWxsrPH39zd2u9106NDBTJo0yaSmprraSTKPPPJIiTpLm+e0adNMZGSk8fHxMZLM2rVry+80Y8yiRYuMJBMUFGQyMjLcxu3du9cMGzbMtGrVygQFBRm73W66du1qlixZctH5ltfHFzpy5Ii59957TaNGjYyfn5+JjY01c+fOdbtSx5iSV/1MnTrVxMfHm9DQUBMQEGBatmxpJk2aZE6fPu023fvvv2+6detmAgMDTXBwsLn55pvNxo0b3doUvWanTp1yG/7WW2+VeiXV4sWLTbdu3UxwcLAJCgoyrVq1Mvfff7/ZunWrq43T6TSzZs0yzZs3N/7+/iYuLs588MEHpmfPnlz1gxrFZswF+yQBAAAshHNUAACAZRFUAACAZRFUAACAZRFUAACAZRFUAACAZRFUAACAZdXoG745nU4dP35cISEhpd6OHAAAWI8xRllZWYqMjHT70c7S1Oigcvz4cTVv3ry6ywAAABWQnJysqKioctvU6KBSdGvu5ORkj25PDgAAql9mZqaaN29e7k9sFKnRQaXocE/9+vUJKgAA1DCenLbBybQAAMCyCCoAAMCyCCoAAMCyCCoAAMCyCCoAAMCyCCoAAMCyCCoAAMCyCCoAAMCyCCoAAMCyCCoAAMCyqjWorF+/XgMGDFBkZKRsNpvef//96iwHAABYTLX+1k92drY6duyo0aNHa8iQIdVZiuXsSErXodPZigkLVucWoVU+jx1J6Vq376QkqVdsE4+WWdo0RcsscDjl5+ujmLBg/XQiSz8kn1VKRq4Sj2Wo0OFU5xahGtAx0tWuwOHU8bPnlZqR67aMCHugWz0XrtM/vzmi9T+dUh0fmxoE+yuvwKGWYcGKbVpf+09kKbSuv8JCArT1cJoOnDinPIdTYfUCNK5XK0nS25sP63y+Q307NJUkfbXvpIL8fGUP8tN3R9KVdb5QRlKAr02Bfj6q619HOfkOncsrlDFSoL+PIuoHKrpRsA6fztapc3nysUkFDqN8h1M2SfUC6qhzi1ClZuRq/8ksOZ2/TFfXz1fnC5zKLXRIRvKv4yN/X5vq+tdRoJ+vJCnzfIHyHE4F+PqofpCfcgsdOp/vUF1/X4UE+ulcXqGcTqMgf185jZSdV6jzBQ5JUqHTyDiN6vjaVD/QT7JJNkntmtZX76sjtP3If/sxLCRA+09kKTn9vDJy8tWgrr/aNq2vrNwChdb118msPH17ME25hQ6FBPjqlnYR6tE6TDFhwZLker2Pnz0vSTqafl47ktJ1Pt+h0GB/Bfv7KiffobScfLcaPt99QgdPnVOjYH+1bFxPknTmXJ58fGwqcBgdOZOtQD9ft1quiQ6Vn6+PNh44rX2pmYqoH6hHbrrS4+3Vk/fDhe0kuW3jxZ8Xn09pyyjvvXWxmoqPr+jnQll1ebNs1E5W2A5sxhhTLUsuxmazadWqVbr99ts9niYzM1N2u10ZGRm/qh8lnP3xHi386qDr+dieLTW1b9sqm0fxtp4ss7RpOjW36/vkDK/q9NTYni0lqcQyAcn77bWs9qVt154ut7RlSCW32aJpLlZT8fHF31+efi54UtfFll2RzyDUfFW5HXjz/V2jzlHJy8tTZmam2+PXZkdSeokPtoVfHdSOpPQqmUdpbS+2zLKmqaqQUlQPIQVl8XZ7La19Wdu1J8staxllvbdWbEkqt6bS5lf8/eXJ54KndV1s2d5+BqHms9J2UKOCyqxZs2S3212P5s2bV3dJle7Q6Wyvhl/qPMqbb2XUAlwu3m6vxYdXdLs+dDrb62l/SD5bbk2ezu9i7SryucH7HpK1toMaFVSmTZumjIwM1yM5Obm6S6p0RcfFPR1+qfMob76VUQtwuXi7vRYfXtHtOiYs2OtpOzZvUG5Nns7vYu0q8rnB+x6StbaDGhVUAgICVL9+fbfHr03nFqGuY8hFxvVs6dVJTN7Mo7S2F1tmWdN0am73uEZvjevZstRlApL322tp7cvarj1ZblnLKOu9NfQ3LcqtqbT5FX9/efK54GldF1u2t59BqPmstB1wMq1FcdXPL7jqh6t+uOqHq35QfapqO/Dm+7tag8q5c+d04MABSVLnzp01f/583XjjjWrYsKFatGhx0el/zUEFAIBfK2++v6v1Pipbt27VjTfe6Ho+efJkSdLIkSO1ZMmSaqoKAABYRbUGlV69eskiR54AAIAF1aiTaQEAQO1CUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZV4aBy4MABffrppzp//rwkyRhTaUUBAABIFQgqZ86c0S233KKrrrpKv/3tb5WSkiJJGjNmjKZMmVLpBQIAgNrL66AyadIk1alTR0lJSapbt65r+NChQ/XJJ59UanEAAKB2q+PtBJ999pk+/fRTRUVFuQ2/8sordeTIkUorDAAAwOs9KtnZ2W57UoqcPn1aAQEBlVIUAACAVIGgcsMNN+jtt992PbfZbHI6nZo7d65uvPHGSi0OAADUbl4f+pk7d6569eqlrVu3Kj8/X0888YR27dqltLQ0bdy4sSpqBAAAtZTXe1TatWunnTt3qmvXrurdu7eys7M1ePBg7dixQ61ataqKGgEAQC1lMzX4BiiZmZmy2+3KyMhQ/fr1q7scAADgAW++v70+9LNz585Sh9tsNgUGBqpFixacVAsAACqF10GlU6dOstlskv57N9qi55Lk5+enoUOH6o033lBgYGAllQkAAGojr89RWbVqla688kotWrRIP/zwg77//nstWrRIsbGxWrp0qd588019+eWXevrpp6uiXgAAUIt4vUflhRde0CuvvKJbb73VNSwuLk5RUVF65pln9N133yk4OFhTpkzRvHnzKrVYAABQu3i9RyUxMVHR0dElhkdHRysxMVHSL4eHin4DCAAAoKK8Dipt2rTR7NmzlZ+f7xpWUFCg2bNnq02bNpKkY8eOKTw8vPKqBAAAtZLXh35ef/11DRw4UFFRUYqLi5PNZtPOnTvlcDj04YcfSpIOHjyo8ePHV3qxAACgdqnQfVTOnTunf/7zn/rpp59kjFGbNm107733KiQkpCpqLBP3UQEAoOap0vuoSFK9evU0duzYChUHAADgqQoFFUnavXu3kpKS3M5VkaSBAwdeclEAAABSBYLKwYMHdccddygxMVE2m63ETd8cDkflVggAAGotr6/6eeyxxxQTE6MTJ06obt262rVrl9avX6/4+HitW7euCkoEAAC1ldd7VDZv3qwvv/xSjRs3lo+Pj3x8fHTddddp1qxZevTRR7Vjx46qqBMAANRCXu9RcTgcqlevniQpLCxMx48fl/TLDd/27dtXudUBAIBazes9Ku3bt9fOnTvVsmVLdevWTXPmzJG/v78WLVqkli1bVkWNAACglvI6qDz99NPKzs6WJD3//PPq37+/rr/+ejVq1EgrVqyo9AIBAEDtVaEbvhWXlpam0NBQ15U/lws3fAMAoOap8hu+FdewYcPKmA0AAIAbr4NKbm6uXn31Va1du1YnT56U0+l0G799+/ZKKw4AANRuXgeVBx54QJ9//rnuvPNOde3a9bIf7gEAALWH10Hlo48+0urVq9WjR4+qqAcAAMDF6/uoNGvW7LL/SjIAAKidvA4qL7/8sp588kkdOXKkKuoBAABw8frQT3x8vHJzc9WyZUvVrVtXfn5+buPT0tIqrTgAAFC7eR1Uhg0bpmPHjunFF19UeHg4J9MCAIAq43VQ2bRpkzZv3qyOHTtWRT0AAAAuXp+j0qZNG50/f74qagEAAHDjdVCZPXu2pkyZonXr1unMmTPKzMx0ewAAAFQWr3/rx8fnl2xT/NwUY4xsNpscDkflVXcR/NYPAAA1T5X+1s/atWsrXBgAAIA3vA4qPXv2rIo6AAAASvA4qOzcudOjdnFxcRUuBgAA4EIeB5VOnTrJZrOpvFNaLvc5KgAA4NfN46By6NChqqwDAACgBI+DSnR0dFXWAQAAUILX91EBAAC4XAgqAADAsggqAADAsggqAADAsggqAADAsjy66qdz584lftunLNu3b7+kggAAAIp4FFRuv/1219+5ublasGCB2rVrp4SEBEnSN998o127dmn8+PFVUiQAAKidPAoq06dPd/09ZswYPfroo3ruuedKtElOTq7c6gAAQK1mM+XdE78UdrtdW7du1ZVXXuk2fP/+/YqPj1dGRkalFlgeb34mGgAAWIM3399en0wbFBSkDRs2lBi+YcMGBQYGejs7AACAMnl8C/0iEydO1Lhx47Rt2zZde+21kn45R2Xx4sX64x//WOkFAgCA2svroDJ16lS1bNlSr7zyipYuXSpJatu2rZYsWaK777670gsEAAC1l9fnqFgJ56gAAFDzVOk5KpJ09uxZ/f3vf9dTTz2ltLQ0Sb/cP+XYsWMVmR0AAECpvD70s3PnTt1yyy2y2+06fPiwxowZo4YNG2rVqlU6cuSI3n777aqoEwAA1EJe71GZPHmyRo0apf3797td5dO3b1+tX7++UosDAAC1m9dBZcuWLXr44YdLDG/WrJlSU1MrpSgAAACpAkElMDBQmZmZJYbv27dPjRs3rpSiAAAApAoElUGDBmnmzJkqKCiQJNlsNiUlJWnq1KkaMmRIpRcIAABqL6+Dyrx583Tq1Ck1adJE58+fV8+ePdW6dWuFhITohRdeqIoaAQBALeX1VT/169fXhg0b9OWXX2r79u1yOp265pprdMstt1RFfQAAoBbjhm8AAOCy8ub72+s9KpL0xRdf6IsvvtDJkyfldDrdxi1evLgiswQAACjB66Dy7LPPaubMmYqPj1fTpk1ls9mqoi4AAADvg8rChQu1ZMkSjRgxoirqAQAAcPH6qp/8/Hx17969KmoBAABw43VQGTNmjJYuXVoVtQAAALjx+tBPbm6uFi1apDVr1iguLk5+fn5u4+fPn19pxQEAgNqtQr+e3KlTJ0nSjz/+6DaOE2sBAEBl8jqorF27tirqAAAAKMHrc1QudPToUR07dqyyagEAAHDjdVBxOp2aOXOm7Ha7oqOj1aJFCzVo0EDPPfdciZu/AQAAXAqvD/38z//8j958803Nnj1bPXr0kDFGGzdu1IwZM5Sbm8sPEwIAgErj9W/9REZGauHChRo4cKDb8H//+98aP378ZT0UxG/9AABQ83jz/e31oZ+0tDS1adOmxPA2bdooLS3N29kBAACUyeug0rFjR7322mslhr/22mvq2LFjpRQFAAAgVeAclTlz5qhfv35as2aNEhISZLPZtGnTJiUnJ2v16tVVUSMAAKilvN6j0rNnT/3000+64447dPbsWaWlpWnw4MHat2+frr/++qqoEQAA1FJen0xrJZxMCwBAzVOlJ9N+8skn2rBhg+v566+/rk6dOunee+9Venq699UCAACUweug8oc//EGZmZmSpMTERE2ePFm//e1vdfDgQU2ePLnSCwQAALWX1yfTHjp0SO3atZMkvffeexowYIBefPFFbd++Xb/97W8rvUAAAFB7eb1Hxd/fXzk5OZKkNWvWqE+fPpKkhg0buva0AAAAVAav96hcd911mjx5snr06KHvvvtOK1askCT99NNPioqKqvQCAQBA7eX1HpXXXntNderU0b/+9S/99a9/VbNmzSRJH3/8sW677bZKLxAAANReXJ4MAAAuK2++vz069JOZmema0cXOQyEwAACAyuJRUAkNDVVKSoqaNGmiBg0ayGazlWhjjJHNZpPD4aj0IgEAQO3kUVD58ssv1bBhQ0nS2rVrq7QgAACAIpyjAgAALqtKP0eluLNnz+rNN9/Unj17ZLPZ1K5dOz3wwAOy2+0VKhgAAKA0Xl+evHXrVrVq1Up/+tOflJaWptOnT2v+/Plq1aqVtm/fXhU1AgCAWsrrQz/XX3+9Wrdurb/97W+qU+eXHTKFhYUaM2aMDh48qPXr11dJoaXh0A8AADWPN9/fXgeVoKAg7dixQ23atHEbvnv3bsXHx7tur385EFQAAKh5vPn+9vrQT/369ZWUlFRieHJyskJCQrydHQAAQJm8DipDhw7Vgw8+qBUrVig5OVlHjx7V8uXLNWbMGA0bNqwqagQAALWU11f9zJs3TzabTffff78KCwslSX5+fho3bpxmz55d6QUCAIDaq8L3UcnJydHPP/8sY4xat26tunXrVnZtF8U5KgAA1DxVco5KTk6OHnnkETVr1kxNmjTRmDFj1LRpU8XFxVVLSAEAAL9+HgeV6dOna8mSJerXr5/uueceff755xo3blxV1gYAAGo5j89RWblypd58803dc889kqT77rtPPXr0kMPhkK+vb5UVCAAAai+P96gkJyfr+uuvdz3v2rWr6tSpo+PHj1dJYQAAAB4HFYfDIX9/f7dhderUcV35AwAAUNk8PvRjjNGoUaMUEBDgGpabm6uxY8cqODjYNWzlypWVWyEAAKi1PA4qI0eOLDHsvvvuq9RiAAAALuRxUHnrrbeqsg4AAIASvL6FPgAAwOVCUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZFUAEAAJZV7UFlwYIFiomJUWBgoLp06aKvv/66uksCAAAWUac6F75ixQpNnDhRCxYsUI8ePfTGG2+ob9++2r17t1q0aFGdpWlHUroOnc5WTFiwOrcI9bpdedMXH7cjKV2vrz2gg6fOqWVYsB656UpJ0rp9JyVJkQ2CtP1IutJz8nVleIhOZ+Xpx2MZCg6oo9/ENCz1+aHT2crKK1RGTr7yCp0KqOOjZg2C1LJxPUXYA1XoNErPzldosL/2pWbpq30nVeCUfG1Sk5AAySYVFDrl5+uj/EKnzuUVysfHpisa1VW9QD8d/r/55xU4ZUrpl/oBvooMratDp7JV4HTKWVqjy8wmuWr1tUk+Npuc5pchjnLq85Hk52tTgcPIXDAPSarr56MCp1MFDvf2vr42+dokf18fZRc45HD+t72vzy/L9bXZ5JTk83+1ZOUWuurwtUkhgXXU1B6ouKgGkqQz5/KUkpErIynSHqhG9QJ08HS2jqXnyL+Or1o3DpYkZeYWKiuvUKez8mSMUeOQAIUE+ql+YB1l5BbqzLk8BdTxUUign4wxOpp+XoVOo/bN7GoSEqAfj2UoLSdfQX6+6tuhqf5waxut2JKkH5LPKq/QqazcAoXW9dc9XVu4tt9Dp7NV4Phle7lwuy7ahnvFNinxPiiaZ8fmv6zfF3tOyOk0urqZXZENgnT87Hm3aS9sf1V4SKnLLK6095qn7+uyavd0HherxSrKev08maa8tpXdT1XVf1Z9XfBfNmNMtX2FdOvWTddcc43++te/uoa1bdtWt99+u2bNmnXR6TMzM2W325WRkaH69etXWl2zP96jhV8ddD0f27OlpvZt63G78qYvPq5Tc7u+T86otNqByuZrKzvIlbX9ljb8wvfB7a9v8Gq7bxzir1NZ+eW2Kf4+vdh7zdP39YVtPf1suNg8PZ2uqpW2rlL59XmyLpXdT1XVf1Z9XWoDb76/q+3QT35+vrZt26Y+ffq4De/Tp482bdpU6jR5eXnKzMx0e1S2HUnpJd64C786qB1J6R61W7ElqczpS5uGkAKrK29vU1nbb2nDi94HK7Ykeb3dXyykXDh/qfT3Z/Flevq+Lmpb3nu7PJ5+plxuZa2rVHZ9nqxLRde3Ip+pl8KqrwtKqragcvr0aTkcDoWHh7sNDw8PV2pqaqnTzJo1S3a73fVo3rx5pdd16HS2R8PLavdD8tkypy9rGqC2OHQ6u8z3SGXN/8J/PW1f1vMLlffe9mYZnk5X1SpStyfrUtH1rchn6qWw6uuCkqr9ZFqbzeb23BhTYliRadOmKSMjw/VITk6u9HpiwoI9Gl5Wu6Lj7aVNX9Y0QG0RExZc5nuksuZ/4b+eti/r+YXKe297swxPp6tqFanbk3Wp6PpW5DP1Ulj1dUFJ1RZUwsLC5OvrW2LvycmTJ0vsZSkSEBCg+vXruz0qW+cWoRrbs6XbsHE9W5Y4yaqsdkN/06LM6UubplNzeyVWD1Q+39L/3yCp7O23tOFF74Ohv2nh9XbfJMT/om0ufJ968l7z9H1d1La893Z5PP1MudzKWlep7Po8WZeKrm9FPlMvhVVfF5RU7SfTdunSRQsWLHANa9eunQYNGlStJ9NKXPXDVT//xVU/XPXDVT+lT8NVP6gob76/qzWorFixQiNGjNDChQuVkJCgRYsW6W9/+5t27dql6Ojoi05flUEFAABUDW++v6v1PipDhw7VmTNnNHPmTKWkpKh9+/ZavXq1RyEFAAD8+lXrHpVLxR4VAABqnhpxHxUAAICLIagAAADLIqgAAADLIqgAAADLIqgAAADLIqgAAADLIqgAAADLIqgAAADLIqgAAADLqtZb6F+qopvqZmZmVnMlAADAU0Xf257cHL9GB5WsrCxJUvPmzau5EgAA4K2srCzZ7fZy29To3/pxOp06fvy4QkJCZLPZqrucKpeZmanmzZsrOTmZ3za6TOjz6kG/X370+eVXm/vcGKOsrCxFRkbKx6f8s1Bq9B4VHx8fRUVFVXcZl139+vVr3UZd3ejz6kG/X370+eVXW/v8YntSinAyLQAAsCyCCgAAsCyCSg0SEBCg6dOnKyAgoLpLqTXo8+pBv19+9PnlR597pkafTAsAAH7d2KMCAAAsi6ACAAAsi6ACAAAsi6ACAAAsi6BiQVdccYVsNpvbY+rUqW5tkpKSNGDAAAUHByssLEyPPvqo8vPz3dokJiaqZ8+eCgoKUrNmzTRz5kyPflcB/7VgwQLFxMQoMDBQXbp00ddff13dJdVIM2bMKLFNR0REuMYbYzRjxgxFRkYqKChIvXr10q5du9zmkZeXp9///vcKCwtTcHCwBg4cqKNHj17uVbG09evXa8CAAYqMjJTNZtP777/vNr6y+jk9PV0jRoyQ3W6X3W7XiBEjdPbs2SpeO2u6WJ+PGjWqxLZ/7bXXurWhz8tHULGomTNnKiUlxfV4+umnXeMcDof69eun7OxsbdiwQcuXL9d7772nKVOmuNpkZmaqd+/eioyM1JYtW/Tqq69q3rx5mj9/fnWsTo20YsUKTZw4Uf/zP/+jHTt26Prrr1ffvn2VlJRU3aXVSFdffbXbNp2YmOgaN2fOHM2fP1+vvfaatmzZooiICPXu3dv1e16SNHHiRK1atUrLly/Xhg0bdO7cOfXv318Oh6M6VseSsrOz1bFjR7322muljq+sfr733nv1/fff65NPPtEnn3yi77//XiNGjKjy9bOii/W5JN12221u2/7q1avdxtPnF2FgOdHR0eZPf/pTmeNXr15tfHx8zLFjx1zDli1bZgICAkxGRoYxxpgFCxYYu91ucnNzXW1mzZplIiMjjdPprLLaf026du1qxo4d6zasTZs2ZurUqdVUUc01ffp007Fjx1LHOZ1OExERYWbPnu0alpuba+x2u1m4cKExxpizZ88aPz8/s3z5clebY8eOGR8fH/PJJ59Uae01lSSzatUq1/PK6ufdu3cbSeabb75xtdm8ebORZPbu3VvFa2VtxfvcGGNGjhxpBg0aVOY09PnFsUfFol566SU1atRInTp10gsvvOB2WGfz5s1q3769IiMjXcNuvfVW5eXladu2ba42PXv2dLuR0K233qrjx4/r8OHDl209aqr8/Hxt27ZNffr0cRvep08fbdq0qZqqqtn279+vyMhIxcTE6J577tHBgwclSYcOHVJqaqpbXwcEBKhnz56uvt62bZsKCgrc2kRGRqp9+/a8Hh6qrH7evHmz7Ha7unXr5mpz7bXXym6381qUYd26dWrSpImuuuoqPfTQQzp58qRrHH1+cTX6Rwl/rR577DFdc801Cg0N1Xfffadp06bp0KFD+vvf/y5JSk1NVXh4uNs0oaGh8vf3V2pqqqvNFVdc4damaJrU1FTFxMRU/YrUYKdPn5bD4SjRz+Hh4a4+hue6deumt99+W1dddZVOnDih559/Xt27d9euXbtc/VlaXx85ckTSL9usv7+/QkNDS7Th9fBMZfVzamqqmjRpUmL+TZo04bUoRd++fXXXXXcpOjpahw4d0jPPPKObbrpJ27ZtU0BAAH3uAYLKZTJjxgw9++yz5bbZsmWL4uPjNWnSJNewuLg4hYaG6s4773TtZZEkm81WYnpjjNvw4m3M/51IW9q0KF1pfUj/ea9v376uvzt06KCEhAS1atVK//jHP1wnFlakr3k9vFcZ/ezJ5w9+MXToUNff7du3V3x8vKKjo/XRRx9p8ODBZU5Hn/8Xh34ukwkTJmjPnj3lPtq3b1/qtEUf5AcOHJAkRURElEjR6enpKigocP1vqbQ2Rbsbi/+PCiWFhYXJ19e31D6k/y5dcHCwOnTooP3797uu/imvryMiIpSfn6/09PQy26B8ldXPEREROnHiRIn5nzp1itfCA02bNlV0dLT2798viT73BEHlMgkLC1ObNm3KfQQGBpY67Y4dOyT9soFLUkJCgn788UelpKS42nz22WcKCAhQly5dXG3Wr1/vdm7LZ599psjIyBKHhFCSv7+/unTpos8//9xt+Oeff67u3btXU1W/Hnl5edqzZ4+aNm2qmJgYRUREuPV1fn6+vvrqK1dfd+nSRX5+fm5tUlJS9OOPP/J6eKiy+jkhIUEZGRn67rvvXG2+/fZbZWRk8Fp44MyZM0pOTnZ9ntPnHqius3hRuk2bNpn58+ebHTt2mIMHD5oVK1aYyMhIM3DgQFebwsJC0759e3PzzTeb7du3mzVr1pioqCgzYcIEV5uzZ8+a8PBwM2zYMJOYmGhWrlxp6tevb+bNm1cdq1UjLV++3Pj5+Zk333zT7N6920ycONEEBwebw4cPV3dpNc6UKVPMunXrzMGDB80333xj+vfvb0JCQlx9OXv2bGO3283KlStNYmKiGTZsmGnatKnJzMx0zWPs2LEmKirKrFmzxmzfvt3cdNNNpmPHjqawsLC6VstysrKyzI4dO8yOHTuMJNdnyZEjR4wxldfPt912m4mLizObN282mzdvNh06dDD9+/e/7OtrBeX1eVZWlpkyZYrZtGmTOXTokFm7dq1JSEgwzZo1o8+9QFCxmG3btplu3boZu91uAgMDTWxsrJk+fbrJzs52a3fkyBHTr18/ExQUZBo2bGgmTJjgdimyMcbs3LnTXH/99SYgIMBERESYGTNmcGmyl15//XUTHR1t/P39zTXXXGO++uqr6i6pRho6dKhp2rSp8fPzM5GRkWbw4MFm165drvFOp9NMnz7dREREmICAAHPDDTeYxMREt3mcP3/eTJgwwTRs2NAEBQWZ/v37m6SkpMu9Kpa2du1aI6nEY+TIkcaYyuvnM2fOmOHDh5uQkBATEhJihg8fbtLT0y/TWlpLeX2ek5Nj+vTpYxo3bmz8/PxMixYtzMiRI0v0J31ePpsx3KoUAABYE+eoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoALC0UaNG6fbbb3c979WrlyZOnFht9QC4vAgqALySmpqqxx57TK1bt1ZgYKDCw8N13XXXaeHChcrJyany5a9cuVLPPfdcpc6zeBgCYB11qrsAADXHwYMH1aNHDzVo0EAvvviiOnTooMLCQv30009avHixIiMjNXDgwBLTFRQUyM/Pr1JqaNiwYaXMB0DNwB4VAB4bP3686tSpo61bt+ruu+9W27Zt1aFDBw0ZMkQfffSRBgwYIEmy2WxauHChBg0apODgYD3//PNyOBx68MEHFRMTo6CgIMXGxuqVV15xm7/D4dDkyZPVoEEDNWrUSE888YSK/8pH8UM/+fn5euKJJ9SsWTMFBwerW7duWrdunWv8kiVL1KBBA3366adq27at6tWrp9tuu8316+MzZszQP/7xD/373/+WzWaTzWZzmx5A9SKoAPDImTNn9Nlnn+mRRx5RcHBwqW1sNpvr7+nTp2vQoEFKTEzUAw88IKfTqaioKL377rvavXu3/vjHP+qpp57Su+++65rm5Zdf1uLFi/Xmm29qw4YNSktL06pVq8qta/To0dq4caOWL1+unTt36q677tJtt92m/fv3u9rk5ORo3rx5euedd7R+/XolJSXp8ccflyQ9/vjjuvvuu13hJSUlRd27d7+UrgJQiTj0A8AjBw4ckDFGsbGxbsPDwsKUm5srSXrkkUf00ksvSZLuvfdePfDAA25tn332WdffMTEx2rRpk959913dfffdkqQ///nPmjZtmoYMGSJJWrhwoT799NMya/r555+1bNkyHT16VJGRkZJ+CR6ffPKJ3nrrLb344ouSfjn0tHDhQrVq1UqSNGHCBM2cOVOSVK9ePQUFBSkvL08REREV6xwAVYagAsArF+41kaTvvvtOTqdTw4cPV15enmt4fHx8iWkXLlyov//97zpy5IjOnz+v/Px8derUSZKUkZGhlJQUJSQkuNrXqVNH8fHxJQ7/FNm+fbuMMbrqqqvchufl5alRo0au53Xr1nWFFElq2rSpTp486flKA6g2BBUAHmndurVsNpv27t3rNrxly5aSpKCgILfhxQ8Pvfvuu5o0aZJefvllJSQkKCQkRHPnztW3335b4ZqcTqd8fX21bds2+fr6uo2rV6+e6+/iJ/LabLYyww8Aa+EcFQAeadSokXr37q3XXntN2dnZXk//9ddfq3v37ho/frw6d+6s1q1b6+eff3aNt9vtatq0qb755hvXsMLCQm3btq3MeXbu3FkOh0MnT55U69at3R7eHMbx9/eXw+Hwep0AVD2CCgCPLViwQIWFhYqPj9eKFSu0Z88e7du3T//85z+1d+/eEns1LtS6dWtt3bpVn376qX766Sc988wz2rJli1ubxx57TLNnz9aqVau0d+9ejR8/XmfPni1znldddZWGDx+u+++/XytXrtShQ4e0ZcsWvfTSS1q9erXH63XFFVdo586d2rdvn06fPq2CggKPpwVQtQgqADzWqlUr7dixQ7fccoumTZumjh07Kj4+Xq+++qoef/zxcm/ENnbsWA0ePFhDhw5Vt27ddObMGY0fP96tzZQpU3T//fdr1KhRrsNDd9xxR7k1vfXWW7r//vs1ZcoUxcbGauDAgfr222/VvHlzj9froYceUmxsrOLj49W4cWNt3LjR42kBVC2b4UAtAACwKPaoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAyyKoAAAAy/r/teCG8JRYf3UAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of poisoned images: 300 out of 10000.\n", + "last index of poison 990\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAHFCAYAAAAdTZjVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABfQUlEQVR4nO3dd1gUV9sG8HtBWDoICAuKgKLYwBoVTRQ1KFhi1KhRY0+MXSxRiVHAGLDHJLYk9rzWxJJEDUhUbFjAXrCj2BBjAQVFyvn+8NsJywKyZFfAvX/XtZfumXNmnjk7O/sw7ciEEAJEREREesKgpAMgIiIiepOY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXynzyc+bMGQwePBhVq1aFqakpTE1NUa1aNXz++eeIi4t7Y3GEhIRAJpOplLm5uWHAgAE6XW5MTAxCQkLw5MkTrc9748aNqF27NkxNTSGTyXDq1Kl860VHR0Mmk0kvQ0NDODo6onv37oiPj9d4uQMGDICbm9t/C74UWLVqFWQyGW7cuFHSoUiKuk0qP8uZM2eqTVOu15v8fuXm6+ursr2Zmpqibt26WLBgAXJycjSa140bNyCTybBq1SrdBPsGafLZjhw5UvcBlXJubm7o2LFjSYcB4N996G+//VbseZTUfvPJkyewt7fHhg0bpLLbt28jMDAQLVu2hI2NjcbfsaK2z8zMRNWqVbFgwQKN4y7Tyc+PP/6Ihg0b4ujRoxgzZgy2b9+OHTt2IDAwEOfPn8c777yDa9eulVh8W7duxdSpU3W6jJiYGISGhmo9+Xnw4AH69u2LqlWrIiIiAocPH0b16tULbRMWFobDhw9j7969mDRpEqKiotC8eXPcuXNHo2VPnToVW7du/S/hk5bMnDkTjx49Kukw1FSpUgWHDx/G4cOHsXHjRlSsWBFjx45FUFCQRvNxcnLC4cOH0aFDBx1FSvR2Cw0NhbOzM3r27CmVXb16FWvXroWxsTHat2+v8TyL2t7IyAjTpk3D9OnT8fDhQ42WUU7jqEqJQ4cOYfjw4ejQoQN+++03GBsbS9Nat26NESNG4Ndff4WpqWmh80lPT4eZmZlOYqxfv75O5vsmXL58GZmZmfjkk0/QsmXLIrWpVq0amjZtCgBo0aIFbGxsMHjwYKxatQpTpkwp8rKrVq1arJhJu95//31ER0fjm2++wbx580o6HBWmpqbStgYAAQEBqFGjBhYuXIgZM2bAyMioSPORy+Uq8yGionv06BF+/PFHfPvttypnPlq0aIEHDx4AAOLi4rB+/XqN5qtJ+169emHcuHH48ccf8eWXXxZ5GWX2yE9YWBgMDQ3x448/qiQ+uXXv3h3Ozs7S+wEDBsDCwgJnz55F27ZtYWlpiTZt2gAAoqKi0LlzZ1SqVAkmJibw8PDA559/jn/++Udtvjt27EC9evUgl8vh7u6OuXPn5rv8/A5Dp6amYsKECXB3d4exsTEqVqyIwMBApKWlqdRTHpr+5ZdfULNmTZiZmaFu3brYvn27VCckJARffPEFAMDd3V06DRAdHV1o3/3xxx/w8fGBmZkZLC0t4efnh8OHD6v007vvvgsA6NmzJ2QyGXx9fQudZ36UPyo3b94EAOTk5GD27NmoUaMG5HI5HBwc0K9fP9y+fVulXX6Hb3/99Vc0adIE1tbWMDMzQ5UqVTBo0CCVOomJifjkk0/g4OAAuVyOmjVrYt68eSqnQpSnOebOnYv58+fD3d0dFhYW8PHxwZEjR9TWIS4uDh988AFsbW1hYmKC+vXrY9OmTWr1jhw5gubNm8PExATOzs4ICgpCZmZmkfopLi4OH3/8Mdzc3GBqago3Nzf06tVL6jcl5emmvXv3YtiwYbC3t4ednR26du2Ku3fvqtTNzMzExIkToVAoYGZmhnfffRfHjh0rUjxKnp6eGDx4MBYtWqQWS35et10B/54ePn/+PHr16gVra2s4Ojpi0KBBSElJ0Si+3IyMjNCwYUOkp6dLO81z586hc+fOKF++PExMTFCvXj2sXr1apV1+p70ePHiAIUOGwMXFBXK5HBUqVEDz5s3x999/q7RdsWIF6tatCxMTE9ja2qJLly5qp3mV+5yrV6+iffv2sLCwgIuLC8aPH4+MjAyVui9fvsSMGTOk70eFChUwcOBAaX2UtPHZ5qY85bJu3TpMmjQJTk5OsLCwQKdOnXD//n08ffoUQ4YMgb29Pezt7TFw4EA8e/ZMZR6LFi1CixYt4ODgAHNzc3h5eWH27Nlq3wEhBMLCwuDq6goTExM0atQIUVFR8PX1VdvHFHVfWZR9Q3EV9XdBuV2fOXMG3bt3h7W1NWxtbTFu3DhkZWXh0qVL8Pf3h6WlJdzc3DB79ux8l/fixQuMGzcOCoUCpqamaNmyJU6ePKlWb9WqVfD09JT2c2vWrMl3fqGhoWjSpAlsbW1hZWWFBg0aYPny5dDWeOarVq1CVlaWylEfADAw+G+phSbtjY2N0bNnT/z000+arZcog7KysoSpqanw8fHRqF3//v2FkZGRcHNzE+Hh4WL37t0iMjJSCCHEkiVLRHh4uPjjjz/Evn37xOrVq0XdunWFp6enePnypTSPv//+WxgaGop3331XbNmyRfz666/inXfeEZUrVxZ5u9PV1VX0799fep+Wlibq1asn7O3txfz588Xff/8tvvvuO2FtbS1at24tcnJypLoAhJubm2jcuLHYtGmT2Llzp/D19RXlypUT165dE0IIcevWLTFq1CgBQGzZskUcPnxYHD58WKSkpBTYB2vXrhUARNu2bcW2bdvExo0bRcOGDYWxsbE4cOCAEEKIq1evikWLFgkAIiwsTBw+fFicP3++wHnu3btXABC//vqrSvnvv/8uAIgvv/xSCCHEkCFDBAAxcuRIERERIZYuXSoqVKggXFxcxIMHD1Q+J1dXV+l9TEyMkMlk4uOPPxY7d+4Ue/bsEStXrhR9+/aV6iQnJ4uKFSuKChUqiKVLl4qIiAgxcuRIAUAMGzZMqpeQkCD1rb+/v9i2bZvYtm2b8PLyEuXLlxdPnjyR6u7Zs0cYGxuL9957T2zcuFFERESIAQMGCABi5cqVUr3z588LMzMzUatWLbF+/Xrx+++/i3bt2knbREJCQoF9J4QQv/76q5g2bZrYunWr2Ldvn9iwYYNo2bKlqFChgkq/rFy5UgAQVapUEaNGjRKRkZFi2bJlonz58qJVq1Yq8+zfv7+QyWTiiy++ELt27RLz588XFStWFFZWVirbZEEAiBEjRoh79+4JMzMzlb5WxhEbGyuVFWW7EkKI4OBgAUB4enqKadOmiaioKDF//nwhl8vFwIEDXxuXEEK0bNlS1K5dW628QYMGoly5ciI9PV1cvHhRWFpaiqpVq4o1a9aIHTt2iF69egkAYtasWVIb5faQ+/Ns166dqFChgvjpp59EdHS02LZtm5g2bZrYsGGDVCcsLEwAEL169RI7duwQa9asEVWqVBHW1tbi8uXLUr3+/fsLY2NjUbNmTTF37lzx999/i2nTpgmZTCZCQ0OletnZ2cLf31+Ym5uL0NBQERUVJZYtWyYqVqwoatWqJdLT01XmqY3PVkn5/XV1dRUDBgyQvpsWFhaiVatWws/PT0yYMEHs2rVLzJo1SxgaGopRo0apzHPs2LFiyZIlIiIiQuzZs0d8++23wt7eXu0zDQoKEgDEkCFDREREhPj5559F5cqVhZOTk2jZsqVUr6j7yqLsGwri6uoqOnToUGidov4u5N6uv/76axEVFSUmTpwo7e9q1Kghvv/+exEVFSUGDhwoAIjNmzerfQYuLi6ic+fO4s8//xT/+9//hIeHh7CyspL2+UL8+/3LW8/FxUVlvymEEAMGDBDLly8XUVFRIioqSnz99dfC1NRUZdsT4tX2l5mZ+dpXVlaWSrvWrVuLxo0bF9qHsbGxat8xTRSl/caNGwUAcebMmSLPt0wmP0lJSQKA+Pjjj9WmZWVlqXxYuROK/v37CwBixYoVhc4/JydHZGZmips3bwoA4vfff5emNWnSRDg7O4vnz59LZampqcLW1va1yU94eLgwMDBQ+dEQQojffvtNABA7d+6UygAIR0dHkZqaqrLeBgYGIjw8XCqbM2dOkX5ghXi1gTs7OwsvLy+RnZ0tlT99+lQ4ODiIZs2aSWUFJTT5UdbduHGjyMzMFOnp6WL//v3Cw8NDGBoaitOnT4v4+HgBQAwfPlyl7dGjR1USJCHUk5+5c+cKACqJSV6TJ08WAMTRo0dVyocNGyZkMpm4dOmSEOLfHzsvLy+VL/KxY8cEALF+/XqprEaNGqJ+/foiMzNTZZ4dO3YUTk5OUh/27NlTmJqaiqSkJKlOVlaWqFGjRpE/m9yysrLEs2fPhLm5ufjuu++kcuVOL28fzp49WwAQ9+7dE0IIqa/Hjh2rUk+ZoGj6AzllyhRhYGAgTp8+rRKHcjvWZLtS/kjMnj1bZXnDhw8XJiYmKt/XgiiTH+V3/O7du9Ln3717dyGEEB9//LGQy+UiMTFRpW1AQIAwMzOTtqX8kh8LCwsRGBhY4PIfP34sTE1NRfv27VXKExMThVwuF71795bKlPucTZs2qdRt37698PT0lN6vX79e7QdRiH93/IsXLxZCaP+zFeLf72+nTp1U6gUGBgoAYvTo0SrlH374obC1tS1w/sof0jVr1ghDQ0Px6NEjIYQQjx49EnK5XPTs2VOl/uHDhwUAleSnqPvKouwbClKU5Ce3wn4XlNv1vHnzVNrUq1dP+uNUKTMzU1SoUEF07dpVKlN+Bg0aNFD5Dty4cUMYGRmJTz/9VAjx73etoHp5k5/clJ/L9OnThZ2dXb6/ja975f6MhBDCzMxMDB06tNB+exPJz5UrVwQAsWTJkiLPt8ye9ipIw4YNYWRkJL3yu1ahW7duamXJyckYOnQoXFxcUK5cORgZGcHV1RUApEPZaWlpiI2NRdeuXWFiYiK1tbS0RKdOnV4b2/bt21GnTh3Uq1cPWVlZ0qtdu3b5nq5q1aoVLC0tpfeOjo5wcHAo0imI/Fy6dAl3795F3759VQ4rWlhYoFu3bjhy5AjS09OLNW/g1SkyIyMjmJmZoUWLFsjOzsZvv/0Gb29v7N27FwDUTgM2btwYNWvWxO7duwuc7zvvvAMA6NGjBzZt2pTvBdR79uxBrVq10LhxY5XyAQMGQAiBPXv2qJR36NABhoaG0ntvb28A/56iu3r1Ki5evIg+ffoAgMrn1b59e9y7dw+XLl0CAOzduxdt2rSBo6OjND9DQ0O1Q8EFefbsGSZNmgQPDw+UK1cO5cqVg4WFBdLS0vK9W+6DDz5QeZ83dmVfK2NX6tGjB8qV0/wyv4kTJ8LW1haTJk3Kd3pxtqv81uHFixdITk4G8OoUae4+z87OVql//vx56Tvu7OyMefPmoU+fPvj5558BvNoe2rRpAxcXF5V2AwYMQHp6utrpuNwaN26MVatWYcaMGThy5IjaqZvDhw/j+fPnatuyi4sLWrdurbYty2Qytf2Dt7e3yvd4+/btsLGxQadOnVTWu169elAoFNK+QdufbW5573yqWbMmAKhdDF6zZk08evRI5dTXyZMn8cEHH8DOzg6GhoYwMjJCv379kJ2djcuXLwN4dWo4IyMDPXr0UJlf06ZN1U5zF3VfWZR9w39RlN+F3PLrQ5lMhoCAAKmsXLly8PDwyHc/3rt3b5VrZ1xdXdGsWTPpc1d+1wqql9eePXvw/vvvw9raWvpcpk2bhocPH0rfNeDVabvY2NjXvn788UepzZMnT5Ceng4HB4fX9qOuKWPQ5PMvkxc829vbw9TUNN+NZ926dUhPT8e9e/fUdrAAYGZmBisrK5WynJwctG3bFnfv3sXUqVPh5eUFc3Nz5OTkoGnTpnj+/DkA4PHjx8jJyYFCoVCbb35led2/fx9Xr14t8GLMvOeR7ezs1OrI5XIpHk0pr4Z3cnJSm+bs7IycnBw8fvy42BeAz5o1C61bt4ahoSHs7e1Vfnhet+zCEroWLVpg27Zt+P7779GvXz9kZGSgdu3amDJlCnr16iXNP7/bPJXXfOW9EyBv38rlcgCQ+vb+/fsAgAkTJmDChAn5xqX8vB4+fFjsbQJ4tcPbvXs3pk6dinfeeQdWVlaQyWRo3759vp/162JXrmve5ZcrVy7fbep1rKys8NVXXyEwMFDaCedWnO3qdeswaNAgletzWrZsqfLHQdWqVbFhwwbIZDKYmJjA3d1dZf4PHz4sMJ7cMedn48aNmDFjBpYtW4apU6fCwsICXbp0wezZs6FQKF67vlFRUSplZmZmKn8sKdf3xYsX0vv79+/jyZMnBV6/mHtbA7T32eZma2ur8l4ZS0HlL168gIWFBRITE/Hee+/B09MT3333Hdzc3GBiYoJjx45hxIgRattl7j8SlPKWFXVfWZR9Q3EV9Xcht/z6Kr/P39jYGKmpqWrtC9qPnD59GkDBn7+yLPdjNY4dO4a2bdvC19cXP//8MypVqgRjY2Ns27YN33zzjUr8lStXRqVKlQrpjVdyJ1zK9nnXrSQoY9Dkt7FMJj+GhoZo3bo1du3ahXv37qnshGrVqgUABT5bJe+zeIBXF0aePn0aq1atQv/+/aXyq1evqtQrX748ZDIZkpKS1OaRX1leyqRtxYoVBU7XJeXO8d69e2rT7t69CwMDA5QvX77Y869SpQoaNWr02mXn/ZLdvXv3teveuXNndO7cGRkZGThy5AjCw8PRu3dvuLm5wcfHB3Z2dgWuF6B53yrrBwUFoWvXrvnW8fT0lNatuNtESkoKtm/fjuDgYEyePFkqz8jIKPYt5sq+TkpKQsWKFaXyrKwsjW8HVRo2bBi+++47TJo0CcOGDct3edrcrkJCQlSeRZP7CCgA6WLZgvyX7cHe3h4LFizAggULkJiYiD/++AOTJ09GcnIyIiIiXru+xfkeKy9ej4iIyHe6cv118dn+V9u2bUNaWhq2bNkiHRUBoPZcMGXsyj8scktKSlL540WTfeXr9g3FVdTfBW0qaD+i7Lvcn//r2m7YsAFGRkbYvn27SoKybds2tbZ5/9goSO4/QpSxlIZHYShj0OS7V2ZPewUFBSE7OxtDhw4t8l01BVEmRMq/PpVyH+IDAHNzczRu3BhbtmxR+avt6dOn+PPPP1+7nI4dO+LatWuws7NDo0aN1F7FeUBV3r+YC+Pp6YmKFSti3bp1KlfFp6WlYfPmzdKdOrrQunVrAMD//vc/lfLY2FjEx8dLd929jlwuR8uWLTFr1iwAkO6EaNOmDS5cuIATJ06o1F+zZg1kMhlatWqlUbyenp6oVq0aTp8+ne9n1ahRI+kHqVWrVti9e7fKTj07OxsbN2587XJkMhmEEGrb3rJly9RO9RSV8q6ZtWvXqpRv2rQJWVlZxZqnsbExZsyYgdjYWPz6668q03SxXbm5uan0tTLRLKo2bdpgz549anfBrVmzBmZmZkW+vb1y5coYOXIk/Pz8pG3Lx8cHpqamatvy7du3pdNtmurYsSMePnyI7OzsfLc15frr4rP9r/LbfwohpFOQSk2aNIFcLlf7Xhw5ckTtyG9x9pUF7Ru0uV6A+u+CNq1fv17lO3Tz5k3ExMRIn7unpyecnJwKrJebTCZDuXLlVE7vP3/+HL/88ovacotz2svY2BhVqlQp0WfpKV2/fh3Avwc/iqJMHvkBgObNm2PRokUYNWoUGjRogCFDhqB27dowMDDAvXv3sHnzZgBQO8WVnxo1aqBq1aqYPHkyhBCwtbXFn3/+qXb4GgC+/vpr+Pv7w8/PD+PHj0d2djZmzZoFc3Pz12bAgYGB2Lx5M1q0aIGxY8fC29sbOTk5SExMxK5duzB+/Hg0adJEo37w8vICAHz33Xfo378/jIyM4OnpqfaXMvDq9sHZs2ejT58+6NixIz7//HNkZGRgzpw5ePLkSb5P89UWT09PDBkyBD/88AMMDAwQEBCAGzduYOrUqXBxccHYsWMLbDtt2jTcvn0bbdq0QaVKlfDkyRN89913MDIykp5BNHbsWKxZswYdOnTA9OnT4erqih07dmDx4sUYNmzYax/QmJ8ff/wRAQEBaNeuHQYMGICKFSvi0aNHiI+Px4kTJ6Qk4KuvvsIff/yB1q1bY9q0aTAzM8OiRYvUbsnNj5WVFVq0aIE5c+bA3t4ebm5u2LdvH5YvXw4bGxuNYwZeXWfwySefYMGCBTAyMsL777+Pc+fOYe7cuUX6PhSkV69emDt3Lv766y+V8pLcrgoSHByM7du3o1WrVpg2bRpsbW2xdu1a7NixA7Nnz4a1tXW+7VJSUtCqVSv07t0bNWrUgKWlJWJjYxERESEdAbSxscHUqVPx5Zdfol+/fujVqxcePnyI0NBQmJiYIDg4WON4P/74Y6xduxbt27fHmDFj0LhxYxgZGeH27dvYu3cvOnfujC5duujss/0v/Pz8YGxsjF69emHixIl48eIFlixZgsePH6vUU976HR4ejvLly6NLly64ffs2QkND4eTkpHK9WFH3lUXZNxQmKSkp36cqu7m5oW7dukX+XdCW5ORkdOnSBZ999hlSUlIQHBwMExMT6eGdBgYG+Prrr/Hpp59K9Z48eYKQkBC1U2EdOnTA/Pnz0bt3bwwZMgQPHz7E3Llz1ZI55foW549vX19ftf2BkrJflYlJXFwcLCwsAAAfffSRVC8kJAShoaHYu3evyuMOitoeeJVAGxoaokWLFkUPvliXX5cip06dEgMHDhTu7u5CLpcLExMT4eHhIfr16yd2796tUrd///7C3Nw83/lcuHBB+Pn5CUtLS1G+fHnRvXt3kZiYKACI4OBglbp//PGH8Pb2FsbGxqJy5cpi5syZ0tX+ueW920sIIZ49eya++uor4enpKYyNjYW1tbXw8vISY8eOVblbCHnuyChsnkFBQcLZ2VkYGBgIAGLv3r2F9tm2bdtEkyZNhImJiTA3Nxdt2rQRhw4dUqlTnLu9Xlc3OztbzJo1S1SvXl0YGRkJe3t78cknn4hbt26p1Mt7t9f27dtFQECAqFixojA2NhYODg6iffv2KrdQCyHEzZs3Re/evYWdnZ0wMjISnp6eYs6cOSp3ICnv7pkzZ45afPl91qdPnxY9evQQDg4OwsjISCgUCtG6dWuxdOlSlXqHDh0STZs2FXK5XCgUCvHFF1+In376qUh3e92+fVt069ZNlC9fXlhaWgp/f39x7tw5tc86v1vMhfi3/3N/7hkZGWL8+PHCwcFBmJiYiKZNm4rDhw/nu/3kp6Dtb9euXdKdH3njKMp2pfye5L6FP/e6FeXOuIJudc/r7NmzolOnTsLa2loYGxuLunXrqt0xkvdurxcvXoihQ4cKb29vYWVlJUxNTYWnp6cIDg4WaWlpKm2XLVsm7Qesra1F586d1R4JUdA+J7/9RWZmppg7d66oW7euMDExERYWFqJGjRri888/F1euXJHqafuzLej7W9D2lt9n+Oeff0pxV6xYUXzxxRfir7/+Utsuc3JyxIwZM0SlSpWEsbGx8Pb2Ftu3bxd169YVXbp0UVlOUfaVRd035MfV1bXAu5qU/VjU34WCtuuCPv+827DyM/jll1/E6NGjRYUKFYRcLhfvvfeeiIuLU2u/bNkyUa1aNWFsbCyqV68uVqxYobbfFEKIFStWCE9PTyGXy0WVKlVEeHi4WL58ebHuQs3P7t27BQBx7NgxtWkF9W3e7X78+PFCJpOJ+Pj4YrUXQoj33ntP7W7F15H9/0KIiIjeuISEBNSoUQPBwcEaPaGXSgdvb280b94cS5YsKVb7xo0bw9XVVe10elFdu3YN1apVQ2RkJPz8/IrcjskPERG9EadPn8b69evRrFkzWFlZ4dKlS5g9ezZSU1Nx7ty5fO8Eo9ItIiICXbp0wZUrV4p0x1huqampqFChAk6dOiU9WkFTAwcOxO3btzU+HVlmr/khIqKyxdzcHHFxcVi+fDmePHkCa2tr+Pr64ptvvmHiU0b5+/tjzpw5SEhI0Dj5sbKyUhvmRRNZWVmoWrWqxgMaAzzyQ0RERHqmzN7qTkRERFQcTH6IiIhIrzD5ISIiIr3y1l/wnJOTg7t378LS0jLfoS2IiIio9BFC4OnTp3B2dlZ5CKY2vPXJz927d9VGdiYiIqKy4datWxrfSfY6b33yoxzm4datWyX2+HciIiLSTGpqKlxcXPIdrum/euuTH+WpLisrKyY/REREZYwuLlnhBc9ERESkV5j8EBERkV5h8kNERER65a2/5qeosrOzkZmZWdJhEMHIyAiGhoYlHQYR0VtL75MfIQSSkpLw5MmTkg6FSGJjYwOFQsFnUxER6YDeJz/KxMfBwQFmZmb8saESJYRAeno6kpOTAQBOTk4lHBER0dtHr5Of7OxsKfGxs7Mr6XCIAACmpqYAgOTkZDg4OPAUGBGRlun1Bc/Ka3zMzMxKOBIiVcptktehERFpn14nP0o81UWlDbdJIiLdYfJDREREeoXJz1tMJpNh27ZtJR0GERFRqaLXFzwXxm3yjje2rBszO2jcJikpCd988w127NiBO3fuwMHBAfXq1UNgYCDatGmjgyiLz9fXF/Xq1cOCBQtKOhQiIiImP2XRjRs30Lx5c9jY2GD27Nnw9vZGZmYmIiMjMWLECFy8eLGkQyQiIiq1eNqrDBo+fDhkMhmOHTuGjz76CNWrV0ft2rUxbtw4HDlypMB2d+7cQc+ePVG+fHnY2dmhc+fOuHHjhjQ9NjYWfn5+sLe3h7W1NVq2bIkTJ06ozEMmk2HZsmXo0qULzMzMUK1aNfzxxx8axe/m5oYZM2agX79+sLCwgKurK37//Xc8ePAAnTt3hoWFBby8vBAXFye1efjwIXr16oVKlSrBzMwMXl5eWL9+vcp8nz59ij59+sDc3BxOTk749ttv4evri8DAQKnOy5cvMXHiRFSsWBHm5uZo0qQJoqOjpek3b95Ep06dUL58eZibm6N27drYuXOnRutHRESlG5OfMubRo0eIiIjAiBEjYG5urjbdxsYm33bp6elo1aoVLCwssH//fhw8eBAWFhbw9/fHy5cvAbxKHvr3748DBw7gyJEjqFatGtq3b4+nT5+qzCs0NBQ9evTAmTNn0L59e/Tp0wePHj3SaD2+/fZbNG/eHCdPnkSHDh3Qt29f9OvXD5988glOnDgBDw8P9OvXD0IIAMCLFy/QsGFDbN++HefOncOQIUPQt29fHD16VJrnuHHjcOjQIfzxxx+IiorCgQMH1JK3gQMH4tChQ9iwYQPOnDmD7t27w9/fH1euXAEAjBgxAhkZGdi/fz/Onj2LWbNmwcLCQqN1IyKi0o2nvcqYq1evQgiBGjVqaNRuw4YNMDAwwLJly6TbqFeuXAkbGxtER0ejbdu2aN26tUqbH3/8EeXLl8e+ffvQsWNHqXzAgAHo1asXACAsLAw//PADjh07Bn9//yLH0759e3z++ecAgGnTpmHJkiV455130L17dwDApEmT4OPjg/v370OhUKBixYqYMGGC1H7UqFGIiIjAr7/+iiZNmuDp06dYvXo11q1bhzZt2uDM7ScYP+NbREbWktpcu3YN69evx+3bt+Hs7AwAmDBhAiIiIrBy5UqEhYUhMTER3bp1g5eXFwCgSpUqRV4nIiIqG5j8lDHKIyGaPgfm+PHjuHr1KiwtLVXKX7x4gWvXrgF49UThadOmYc+ePbh//z6ys7ORnp6OxMRElTbe3t7S/83NzWFpaSkNx1BUuefh6OgIAFLCkbssOTkZCoUC2dnZmDlzJjZu3Ig7d+4gIyMDGRkZ0tGv69evIzMzE40bN5bmYWllDU9PT+n9iRMnIIRA9erVVWLJyMiQnvA9evRoDBs2DLt27cL777+Pbt26qcRKRERlH5OfMqZatWqQyWSIj4/Hhx9+WOR2OTk5aNiwIdauXas2rUKFCgBeHdF58OABFixYAFdXV8jlcvj4+EinxZSMjIxU3stkMuTk5Gi0HrnnoUzk8itTznfevHn49ttvsWDBAnh5ecHc3ByBgYFSbAUlhcpy5bwMDQ1x/PhxtSEjlKe2Pv30U7Rr1w47duzArl27EB4ejnnz5mHUqFEarR8REZVevOanjLG1tUW7du2waNEipKWlqU0vaHT6Bg0a4MqVK3BwcICHh4fKy9raGgBw4MABjB49Gu3bt0ft2rUhl8vxzz//6HJ1iuzAgQPo3LkzPvnkE9StWxdVqlSRrtMBgKpVq8LIyAjHjh2Typ49TVWpU79+fWRnZyM5OVmtDxQKhVTPxcUFQ4cOxZYtWzB+/Hj8/PPPb2YliYjojWDyUwYtXrwY2dnZaNy4MTZv3owrV64gPj4e33//PXx8fPJt06dPH9jb26Nz5844cOAAEhISsG/fPowZMwa3b98GAHh4eOCXX35BfHw8jh49ij59+kiDbJY0Dw8PREVFISYmBvHx8fj888+RlJQkTbe0tET//v3xxRdfYO/evbh6KR7BE0bBwMBAOhpUvXp19OnTB/369cOWLVuQkJCA2NhYzJo1S7qjKzAwEJGRkUhISMCJEyewZ88e1KxZs0TWmYiIdIPJTxnk7u6OEydOoFWrVhg/fjzq1KkDPz8/7N69G0uWLMm3jZmZGfbv34/KlSuja9euqFmzJgYNGoTnz5/DysoKALBixQo8fvwY9evXR9++fTF69Gg4ODi8yVUr0NSpU9GgQQO0a9cOvr6+UCgUaqf95s+fDx8fH3Ts2BGf9+6Ceo2aoGbNmjAxMZHqrFy5Ev369cP48ePh6emJDz74AEePHoWLiwsAIDs7GyNGjEDNmjXh7+8PT09PLF68+E2uKhER6ZhM5L4o4i2UmpoKa2trpKSkSD/ySi9evEBCQgLc3d1VfiCp7Dtz+wnS09Pg37g25s2bh8GDB5d0SBrhtklE+q6w3+//qkSP/CxZsgTe3t6wsrKClZUVfHx88Ndff0nThRAICQmBs7MzTE1N4evri/Pnz5dgxFSanTx5EuvXr8e1a9cQf/Y0vhw1BADQuXPnEo6MiIhKkxJNfipVqoSZM2ciLi4OcXFxaN26NTp37iwlOLNnz8b8+fOxcOFCxMbGQqFQwM/PT+2he0RKc+fORd26dTGk14d4/jwdBw4cgL29fUmHRUREpUipO+1la2uLOXPmYNCgQXB2dkZgYCAmTZoE4NXzWBwdHTFr1izpAXmvw9Ne+unM7ScAAO9KNiUaR3Fx2yQifffWnvbKLTs7Gxs2bEBaWhp8fHyQkJCApKQktG3bVqojl8vRsmVLxMTEFDifjIwMpKamqryIiIiIlEo8+Tl79iwsLCwgl8sxdOhQbN26FbVq1ZJuY1Y+6VfJ0dFR5RbnvMLDw2FtbS29lHfxEBEREQGlIPnx9PTEqVOncOTIEQwbNgz9+/fHhQsXpOn5PbG3sKEdgoKCkJKSIr1u3bqls9iJiIio7Cnx4S2MjY3h4eEBAGjUqBFiY2Px3XffSdf5JCUlwcnJSaqfnJysdjQoN7lcDrlcrtugiYiIqMwq8SM/eQkhkJGRAXd3dygUCkRFRUnTXr58iX379qFZs2YlGCERERGVZSV65OfLL79EQEAAXFxc8PTpU2zYsAHR0dGIiIiATCZDYGAgwsLCUK1aNVSrVg1hYWEwMzND7969SzJsIiIiKsNKNPm5f/8++vbti3v37sHa2hre3t6IiIiAn58fAGDixIl4/vw5hg8fjsePH6NJkybYtWsXLC0tSzLsMi86OhqtWrXC48ePYWNjU9LhFCgkJATbtm3DqVOnSjoUIiJ6i5Ro8rN8+fJCp8tkMoSEhCAkJOTNBJTb3vA3t6xWQRpVHzBgAFavXg0AKFeuHFxcXNC1a1eEhobC3Nz8te2bNWsmJZxl2Y0bN+Du7o6TJ0+iXr16JR0OERGVESV+wTMVj7+/P1auXInMzEwcOHAAn376KdLS0goc2DQ3Y2NjKBSKNxAlERFR6VPqLnimopHL5VAoFHBxcUHv3r3Rp08fbNu2DcCrBz0qR2Q3MTHBu+++i9jYWKltdHQ0ZDIZnjx5AgC4efMmOnXqhPLly8Pc3By1a9fGzp07pfr79u1D48aNIZfL4eTkhMmTJyMrK0ua7uvri9GjR2PixImwtbWFQqFQO1qXkpKCIUOGwMHBAVZWVmjdujVOnz6tUmfmzJlwdHSEpaUlBg8ejBcvXmjUJ8r1ioyMRA//Fmjs4YTWrVsjOTkZf/31F2rWrAkrKyv06tUL6enpUruIiAi8++67sLGxgZ2dHTp27Ihr166pzDsmJgb16tWDiYkJGjVqhG3btkEmk6mckrtw4QLat28PCwsLODo6om/fvvjnn3+k6b/99hu8vLxgamoKOzs7vP/++0hLS9NoHYmI6L9j8vOWMDU1RWZmJoBX10pt3rwZq1evxokTJ+Dh4YF27drh0aNH+bYdMWIEMjIysH//fpw9exazZs2ChYUFAODOnTto37493nnnHZw+fRpLlizB8uXLMWPGDJV5rF69Gubm5jh69Chmz56N6dOnS3fqCSHQoUMHJCUlYefOnTh+/DgaNGiANm3aSDFt2rQJwcHB+OabbxAXFwcnJycsXry4WH0REhKCoK9nY/W2SNy6dQs9evTAggULsG7dOuzYsQNRUVH44YcfpPppaWkYN24cYmNjsXv3bhgYGKBLly7IyckBADx9+hSdOnWCl5cXTpw4ga+//lp6FIPSvXv30LJlS9SrVw9xcXGIiIjA/fv30aNHD2l6r169MGjQIMTHxyM6Ohpdu3ZFKRtdhohIL/C011vg2LFjWLduHdq0aSOd+lq1ahUCAgIAAD///DOioqKwfPlyfPHFF2rtExMT0a1bN3h5eQEAqlSpIk1bvHgxXFxcsHDhQshkMtSoUQN3797FpEmTMG3aNBgYvMqfvb29ERwcDACoVq0aFi5ciN27d8PPzw979+7F2bNnkZycLD2Dae7cudi2bRt+++03DBkyBAsWLMCgQYPw6aefAgBmzJiBv//+W+OjP8q2FTwbAgAGDx6MoKAgXLt2TVqvjz76CHv37pUSmG7duqm0X758ORwcHHDhwgXUqVMHa9euhUwmw88//wwTExPUqlULd+7cwWeffSa1WbJkCRo0aICwsDCpbMWKFXBxccHly5fx7NkzZGVloWvXrnB1dQUAqb+JiOjN4pGfMmr79u2wsLCAiYkJfHx80KJFC/zwww+4du0aMjMz0bx5c6mukZERGjdujPj4+HznNXr0aMyYMQPNmzdHcHAwzpw5I02Lj4+Hj4+PylO1mzdvjmfPnuH27dtSmbe3t8o8nZyckJycDAA4fvw4nj17Bjs7O1hYWEivhIQE6fSScjm55X2fV/y9/Mdtyx2Lo6MjzMzMVBI6R0dHKTYAuHbtGnr37o0qVarAysoK7u7uAF4lhQBw6dIleHt7qwww2rhxY5VlHj9+HHv37lVZvxo1akjzr1u3Ltq0aQMvLy90794dP//8Mx4/flzo+hERkW7wyE8Z1apVKyxZsgRGRkZwdnaGkZERgFenVwDNhgX59NNP0a5dO+zYsQO7du1CeHg45s2bh1GjRuXbTnmqJne5cvlKMplMOm2Uk5MDJycnREdHqy1bF7faGxkZARn/xlFYbADQqVMnuLi44Oeff4azszNycnJQp04dvHz5EkD+fZf3dFVOTg46deqEWbNmqcXj5OQEQ0NDREVFISYmBrt27cIPP/yAKVOm4OjRo1KyRUREbwaP/JRR5ubm8PDwgKurq8qPu4eHB4yNjXHw4EGpLDMzE3FxcahZs2aB83NxccHQoUOxZcsWjB8/Hj///DMAoFatWoiJiVH5sY+JiYGlpSUqVqxYpFgbNGiApKQklCtXDh4eHiove3t7AEDNmjVx5MgRlXZ53+vCw4cPER8fj6+++gpt2rRBzZo11Y7I1KhRA2fOnEFGRoZUFhcXp1KnQYMGOH/+PNzc3NTWUfn4AZlMhubNmyM0NBQnT56EsbExtm7dqvN1JCIiVUx+3jLm5uYYNmwYvvjiC0RERODChQv47LPPkJ6ejsGDB+fbJjAwEJGRkUhISMCJEyewZ88eKVEaPnw4bt26hVGjRuHixYv4/fffERwcjHHjxknX+7zO+++/Dx8fH3z44YeIjIzEjRs3EBMTg6+++kpKIsaMGYMVK1ZgxYoVuHz5MoKDg3H+/HntdEohypcvDzs7O/z000+4evUq9uzZg3HjxqnU6d27N3JycjBkyBDEx8cjMjISc+fOBfDv0a8RI0bg0aNH6NWrF44dO4br169j165dGDRoELKzs3H06FGEhYUhLi4OiYmJ2LJlCx48eFBoQkpERLrB015voZkzZyInJwd9+/bF06dP0ahRI0RGRqJ8+fL51s/OzsaIESNw+/ZtWFlZwd/fH99++y0AoGLFiti5cye++OIL1K1bF7a2thg8eDC++uqrIscjk8mwc+dOTJkyBYMGDcKDBw+gUCjQokULaZDanj174tq1a5g0aRJevHiBbt26YdiwYYiMjPzvHVIIAwMDbNiwAaNHj0adOnXg6emJ77//Hr6+vlIdKysr/Pnnnxg2bBjq1asHLy8vTJs2Db1795auA3J2dsahQ4cwadIktGvXDhkZGXB1dYW/vz8MDAxgZWWF/fv3Y8GCBUhNTYWrqyvmzZsnXZRORERvjky85ffapqamwtraGikpKbCyslKZ9uLFCyQkJMDd3V3lYlYqG87cfgIA8K5ko9E0bVi7di0GDhyIlJQUmJqaan3+3DaJSN8V9vv9X/HID1ERrFmzBlWqVEHFihVx+vRpTJo0CT169NBJ4kNERLrF5IeoCJKSkjBt2jQkJSXByckJ3bt3xzfffFPSYRERUTEw+SEqgokTJ2LixIklHQYREWkB7/YiIiIivcLkB+oPrCMqadwmiYh0R6+TH+XDAXOP8E1UGii3ybxPpyYiov9Or6/5MTQ0hI2NjTTOk5mZWYFDQFDpI7JeDT+R3+CnhU0rzYQQSE9PR3JyMmxsbGBoaFjSIRERvXX0OvkBAIVCAQAqA11S2ZD8+DkAwPi5+u3mhU0rC2xsbKRtk4iItEvvkx+ZTAYnJyc4ODggMzOzpMMhDXy6JRoAsHu8r0bTSjsjIyMe8SEi0iG9T36UDA0N+YNTxtx5mg0A+T4BubBpRESk3/T6gmciIiLSP0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5Ib3gNnkH3CbvKOkwiIioFGDyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV0o0+QkPD8c777wDS0tLODg44MMPP8SlS5dU6gwYMAAymUzl1bRp0xKKmIiIiMq6Ek1+9u3bhxEjRuDIkSOIiopCVlYW2rZti7S0NJV6/v7+uHfvnvTauXNnCUVMREREZV25klx4RESEyvuVK1fCwcEBx48fR4sWLaRyuVwOhULxpsMjIiKit1CpuuYnJSUFAGBra6tSHh0dDQcHB1SvXh2fffYZkpOTC5xHRkYGUlNTVV5ERERESqUm+RFCYNy4cXj33XdRp04dqTwgIABr167Fnj17MG/ePMTGxqJ169bIyMjIdz7h4eGwtraWXi4uLm9qFYiIiKgMKNHTXrmNHDkSZ86cwcGDB1XKe/bsKf2/Tp06aNSoEVxdXbFjxw507dpVbT5BQUEYN26c9D41NZUJEBEREUlKRfIzatQo/PHHH9i/fz8qVapUaF0nJye4urriypUr+U6Xy+WQy+W6CJOIiIjeAiWa/AghMGrUKGzduhXR0dFwd3d/bZuHDx/i1q1bcHJyegMREhER0dumRK/5GTFiBP73v/9h3bp1sLS0RFJSEpKSkvD8+XMAwLNnzzBhwgQcPnwYN27cQHR0NDp16gR7e3t06dKlJEMnIiKiMqpEj/wsWbIEAODr66tSvnLlSgwYMACGhoY4e/Ys1qxZgydPnsDJyQmtWrXCxo0bYWlpWQIRExERUVlX4qe9CmNqaorIyMg3FA0RERHpg1JzqzsRERHRm8Dkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9onHys3r1auzYsUN6P3HiRNjY2KBZs2a4efOmVoMjIiIi0jaNk5+wsDCYmpoCAA4fPoyFCxdi9uzZsLe3x9ixY7UeIBEREZE2ldO0wa1bt+Dh4QEA2LZtGz766CMMGTIEzZs3h6+vr7bjIyIiItIqjY/8WFhY4OHDhwCAXbt24f333wcAmJiY4Pnz59qNjoiIiEjLND7y4+fnh08//RT169fH5cuX0aFDBwDA+fPn4ebmpu34iIiIiLRK4yM/ixYtgo+PDx48eIDNmzfDzs4OAHD8+HH06tVL6wESERERaZPGR35sbGywcOFCtfLQ0FCtBERERESkS8V6zs+BAwfwySefoFmzZrhz5w4A4JdffsHBgwe1GhwRERGRtmmc/GzevBnt2rWDqakpTpw4gYyMDADA06dPERYWpvUAiYiIiLRJ49NeM2bMwNKlS9GvXz9s2LBBKm/WrBmmT5+u1eCI8nKbvOP1lYo5rxszO2ht3kREVHppfOTn0qVLaNGihVq5lZUVnjx5oo2YiIiIiHRG4+THyckJV69eVSs/ePAgqlSpopWgiIiIiHRF4+Tn888/x5gxY3D06FHIZDLcvXsXa9euxYQJEzB8+HBdxEhERESkNRpf8zNx4kSkpKSgVatWePHiBVq0aAG5XI4JEyZg5MiRuoiRiIiISGs0Tn4A4JtvvsGUKVNw4cIF5OTkoFatWrCwsNB2bERERERaV6zkBwDMzMzQqFEjbcZCREREpHMaJz9dunSBTCZTK5fJZDAxMYGHhwd69+4NT0/P184rPDwcW7ZswcWLF2FqaopmzZph1qxZKm2FEAgNDcVPP/2Ex48fo0mTJli0aBFq166taehEREREml/wbG1tjT179uDEiRNSEnTy5Ens2bMHWVlZ2LhxI+rWrYtDhw69dl779u3DiBEjcOTIEURFRSErKwtt27ZFWlqaVGf27NmYP38+Fi5ciNjYWCgUCvj5+eHp06eahk5ERESk+ZEfhUKB3r17Y+HChTAweJU75eTkYMyYMbC0tMSGDRswdOhQTJo06bXDXURERKi8X7lyJRwcHHD8+HG0aNECQggsWLAAU6ZMQdeuXQEAq1evhqOjI9atW4fPP/9c0/CJiIhIz2l85Gf58uUIDAyUEh8AMDAwwKhRo/DTTz9BJpNh5MiROHfunMbBpKSkAABsbW0BAAkJCUhKSkLbtm2lOnK5HC1btkRMTIzG8yciIiLSOPnJysrCxYsX1covXryI7OxsAICJiUm+1wUVRgiBcePG4d1330WdOnUAAElJSQAAR0dHlbqOjo7StLwyMjKQmpqq8iIiIiJS0vi0V9++fTF48GB8+eWXeOeddyCTyXDs2DGEhYWhX79+AF5dy6PpBckjR47EmTNn8j1VljeREkIUmFyFh4cjNDRUo2UTERGR/tA4+fn222/h6OiI2bNn4/79+wBeHYkZO3YsJk2aBABo27Yt/P39izzPUaNG4Y8//sD+/ftRqVIlqVyhUAB4dQTIyclJKk9OTlY7GqQUFBSEcePGSe9TU1Ph4uJS9BUkIiKit5rGyY+hoSGmTJmCKVOmSKeUrKysVOpUrly5SPMSQmDUqFHYunUroqOj4e7urjLd3d0dCoUCUVFRqF+/PgDg5cuX2LdvH2bNmpXvPOVyOeRyuaarRURERHqi2A85BNSTHk2NGDEC69atw++//w5LS0vpOh5ra2uYmppCJpMhMDAQYWFhqFatGqpVq4awsDCYmZmhd+/e/2nZREREpJ+Klfz89ttv2LRpExITE/Hy5UuVaSdOnCjyfJYsWQIA8PX1VSlfuXIlBgwYAODVWGLPnz/H8OHDpYcc7tq1C5aWlsUJnYiIiPScxnd7ff/99xg4cCAcHBxw8uRJNG7cGHZ2drh+/ToCAgI0mpcQIt+XMvEBXl3sHBISgnv37uHFixfYt2+fdDcYERERkaY0Tn4WL16Mn376CQsXLoSxsTEmTpyIqKgojB49WnpODxEREVFppXHyk5iYiGbNmgEATE1NpWEm+vbti/Xr12s3OiIiIiIt0zj5USgUePjwIQDA1dUVR44cAfDqacxCCO1GR0RERKRlGic/rVu3xp9//gkAGDx4MMaOHQs/Pz/07NkTXbp00XqARERERNqk8d1eP/30E3JycgAAQ4cOha2tLQ4ePIhOnTph6NChWg+QiIiISJs0Tn4MDAxUBjXt0aMHevToodWgiNwm7wAA3JjZoYQjISKit02xnvPz4sULnDlzBsnJydJRIKUPPvhAK4ERERER6YLGyU9ERAT69euHf/75R22aTCaTRnYnIiIiKo00vuB55MiR6N69O+7du4ecnByVFxMfIiIiKu00Tn6Sk5Mxbty4AkdVJyIiIirNNE5+PvroI0RHR+sgFCIiIiLd0/ian4ULF6J79+44cOAAvLy8YGRkpDJ99OjRWguOiIiISNs0Tn7WrVuHyMhImJqaIjo6GjKZTJomk8mY/BAREVGppnHy89VXX2H69OmYPHmyyvN+iIiIiMoCjbOXly9fomfPnkx8iIiIqEzSOIPp378/Nm7cqItYiIiIiHRO49Ne2dnZmD17NiIjI+Ht7a12wfP8+fO1FhwRERGRtmmc/Jw9exb169cHAJw7d05lWu6Ln4mIiIhKI42Tn7179+oiDiIiIqI3glctExERkV4p8pGfrl27Fqneli1bih0MERERka4VOfmxtrbWZRxEREREb0SRk5+VK1fqMg4iIiKiN4LX/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV4qU/DRo0ACPHz8GAEyfPh3p6ek6DYqIiIhIV4qU/MTHxyMtLQ0AEBoaimfPnuk0KCIlt8k74DZ5R4m1JyKit0+RbnWvV68eBg4ciHfffRdCCMydOxcWFhb51p02bZpWAyQiIiLSpiIlP6tWrUJwcDC2b98OmUyGv/76C+XKqTeVyWRMfoiIiKhUK1Ly4+npiQ0bNgAADAwMsHv3bjg4OOg0MCIiIiJd0HhU95ycHF3EQURERPRGaJz8AMC1a9ewYMECxMfHQyaToWbNmhgzZgyqVq2q7fiIiIiItErj5/xERkaiVq1aOHbsGLy9vVGnTh0cPXoUtWvXRlRUlC5iJCIiItIajY/8TJ48GWPHjsXMmTPVyidNmgQ/Pz+tBUdERESkbRof+YmPj8fgwYPVygcNGoQLFy5oJSgiIiIiXdE4+alQoQJOnTqlVn7q1CneAUZERESlnsanvT777DMMGTIE169fR7NmzSCTyXDw4EHMmjUL48eP10WMRERERFqjcfIzdepUWFpaYt68eQgKCgIAODs7IyQkBKNHj9Z6gETaVNyhLvK2uzGzgzbCISKiEqBx8iOTyTB27FiMHTsWT58+BQBYWlpqPTAiIiIiXSjWc36UmPQQERFRWaPxBc9EREREZRmTHyIiItIrTH6IiIhIr2iU/GRmZqJVq1a4fPmyruIhIiIi0imNkh8jIyOcO3cOMplMKwvfv38/OnXqBGdnZ8hkMmzbtk1l+oABAyCTyVReTZs21cqyiYiISD9pfNqrX79+WL58uVYWnpaWhrp162LhwoUF1vH398e9e/ek186dO7WybCIiItJPGt/q/vLlSyxbtgxRUVFo1KgRzM3NVabPnz+/yPMKCAhAQEBAoXXkcjkUCoWmYRIRERHlS+Pk59y5c2jQoAEAqF37o63TYblFR0fDwcEBNjY2aNmyJb755ptCxxDLyMhARkaG9D41NVXrMREREVHZpXHys3fvXl3Eka+AgAB0794drq6uSEhIwNSpU9G6dWscP34ccrk83zbh4eEIDQ19YzES5ZV7KAxtD4OhnDeH1yAiKr5i3+p+9epVREZG4vnz5wAAIYTWglLq2bMnOnTogDp16qBTp07466+/cPnyZezYUfD4TEFBQUhJSZFet27d0npcREREVHZpfOTn4cOH6NGjB/bu3QuZTIYrV66gSpUq+PTTT2FjY4N58+bpIk4AgJOTE1xdXXHlypUC68jl8gKPChERERFpfORn7NixMDIyQmJiIszMzKTynj17IiIiQqvB5fXw4UPcunULTk5OOl0OERERvb00PvKza9cuREZGolKlSirl1apVw82bNzWa17Nnz3D16lXpfUJCAk6dOgVbW1vY2toiJCQE3bp1g5OTE27cuIEvv/wS9vb26NKli6ZhExEREQEoRvKTlpamcsRH6Z9//tH4dFNcXBxatWolvR83bhwAoH///liyZAnOnj2LNWvW4MmTJ3ByckKrVq2wceNGjiZPRERExaZx8tOiRQusWbMGX3/9NYBXt7fn5ORgzpw5KolMUfj6+hZ6oXRkZKSm4REREREVSuPkZ86cOfD19UVcXBxevnyJiRMn4vz583j06BEOHTqkixiJiIiItEbjC55r1aqFM2fOoHHjxvDz80NaWhq6du2KkydPomrVqrqIkYiIiEhrND7yAwAKhYIPEiQiIqIyqVjJz+PHj7F8+XLEx8dDJpOhZs2aGDhwIGxtbbUdHxEREZFWaXzaa9++fXB3d8f333+Px48f49GjR/j+++/h7u6Offv26SJGIiIiIq3R+MjPiBEj0KNHDyxZsgSGhoYAgOzsbAwfPhwjRozAuXPntB4kvV2KMz5V7vGyNJlGRESUl8ZHfq5du4bx48dLiQ8AGBoaYty4cbh27ZpWgyMiIiLSNo2TnwYNGiA+Pl6tPD4+HvXq1dNGTEREREQ6U6TTXmfOnJH+P3r0aIwZMwZXr15F06ZNAQBHjhzBokWLMHPmTN1ESURERKQlRUp+6tWrB5lMpvI05okTJ6rV6927N3r27Km96IiIiIi0rEjJT0JCgq7jICIiInojipT8uLq66joOIiIiojeiWA85vHPnDg4dOoTk5GTk5OSoTBs9erRWAiMiIiLSBY2Tn5UrV2Lo0KEwNjaGnZ0dZDKZNE0mkzH5ISIiolJN4+Rn2rRpmDZtGoKCgmBgoPGd8kREREQlSuPsJT09HR9//DETHyIiIiqTND7yM3jwYPz666+YPHmyLuIhPcZhKoiI6E3QOPkJDw9Hx44dERERAS8vLxgZGalMnz9/vtaCIyIiItI2jZOfsLAwREZGwtPTEwDULngmIiIiKs00Tn7mz5+PFStWYMCAAToIh4iIiEi3NL5qWS6Xo3nz5rqIhYiIiEjnNE5+xowZgx9++EEXsRARERHpnManvY4dO4Y9e/Zg+/btqF27ttoFz1u2bNFacERERETapnHyY2Njg65du+oiFiIiIiKdK9bwFkRERERlFR/TTERERHpF4yM/7u7uhT7P5/r16/8pICIiIiJd0jj5CQwMVHmfmZmJkydPIiIiAl988YW24qJSJvfQEzdmdihWu7dJfutVWL/kra9JHxIRkXZpnPyMGTMm3/JFixYhLi7uPwdEREREpEtau+YnICAAmzdv1tbsiIiIiHRCa8nPb7/9BltbW23NjoiIiEgnND7tVb9+fZULnoUQSEpKwoMHD7B48WKtBkdERESkbRonPx9++KHKewMDA1SoUAG+vr6oUaOGtuIiIiIi0gmNk5/g4GBdxEFERET0RvAhh0RERKRXinzkx8DAoNCHGwKATCZDVlbWfw6KiIiISFeKnPxs3bq1wGkxMTH44YcfIITQSlBEREREulLk5Kdz585qZRcvXkRQUBD+/PNP9OnTB19//bVWgyMiIiLStmJd83P37l189tln8Pb2RlZWFk6dOoXVq1ejcuXK2o6PiIiISKs0Sn5SUlIwadIkeHh44Pz589i9ezf+/PNP1KlTR1fxEekNt8k73tqx0IiISpMin/aaPXs2Zs2aBYVCgfXr1+d7GoyIiIiotCty8jN58mSYmprCw8MDq1evxurVq/Ott2XLFq0FR0RERKRtRU5++vXr99pb3YmIiIhKuyInP6tWrdJhGERERERvBp/wTERERHqFyQ8RERHplRJNfvbv349OnTrB2dkZMpkM27ZtU5kuhEBISAicnZ1hamoKX19fnD9/vmSCJSIiordCiSY/aWlpqFu3LhYuXJjv9NmzZ2P+/PlYuHAhYmNjoVAo4Ofnh6dPn77hSImIiOhtUeQLnnUhICAAAQEB+U4TQmDBggWYMmUKunbtCgBYvXo1HB0dsW7dOnz++edvMlQiIiJ6S5Taa34SEhKQlJSEtm3bSmVyuRwtW7ZETExMge0yMjKQmpqq8iIiIiJSKtEjP4VJSkoCADg6OqqUOzo64ubNmwW2Cw8PR2hoqE5jo7db7iEmbszsoHG7wHK/5Sotevt/2/1/m73h/05sFVTk+RARUeFK7ZEfpbwPVhRCFPqwxaCgIKSkpEivW7du6TpEIiIiKkNK7ZEfhUIB4NURICcnJ6k8OTlZ7WhQbnK5HHK5XOfxERERUdlUao/8uLu7Q6FQICoqSip7+fIl9u3bh2bNmpVgZERERFSWleiRn2fPnuHq1avS+4SEBJw6dQq2traoXLkyAgMDERYWhmrVqqFatWoICwuDmZkZevfuXYJRExERUVlWoslPXFwcWrVqJb0fN24cAKB///5YtWoVJk6ciOfPn2P48OF4/PgxmjRpgl27dsHS0rKkQiYiIqIyrkSTH19fXwghCpwuk8kQEhKCkJCQNxcUERERvdVK7TU/RERERLrA5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivVJqn/BM+iP3WFolqbhxKMfkWpD1kTbDISIiHeGRHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK9weAuiQhQ05IVySIviWvDVwH/////DYgS+iW/j3vBX/7YKegMLIyIqnXjkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0isc3oJ0oqBhIej1lH13o12uQuWwFPDOU+fMv3U0GbJC28NcSPFpcZ5ERDrCIz9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoVje5HKOFw3ZnYocn1lXU3bl4TAcr8BABZkffSf6hRlGbnlHeMsvzoFWbD7cj6l3pqGpRKHynhhGrYF1D/z0vp5ExEVhkd+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr1SqpOfkJAQyGQylZdCoSjpsIiIiKgMK/XP+alduzb+/vtv6b2hoWEJRkNERERlXalPfsqVK8ejPURERKQ1pfq0FwBcuXIFzs7OcHd3x8cff4zr168XWj8jIwOpqakqLyIiIiKlUn3kp0mTJlizZg2qV6+O+/fvY8aMGWjWrBnOnz8POzu7fNuEh4cjNDT0DUeqn/IO3VBQWVHlHvqhuENMaJsuhrx4XR3N2lT/t3Bv+Kt/WwWpvgcQWO6ySn1p6IzdA/+t06a6antdKiTWN7J8ItJrpfrIT0BAALp16wYvLy+8//772LHj1Q/r6tWrC2wTFBSElJQU6XXr1q03FS4RERGVAaX6yE9e5ubm8PLywpUrVwqsI5fLIZfL32BUREREVJaU6iM/eWVkZCA+Ph5OTk4lHQoRERGVUaU6+ZkwYQL27duHhIQEHD16FB999BFSU1PRv3//kg6NiIiIyqhSfdrr9u3b6NWrF/755x9UqFABTZs2xZEjR+Dq6lrSoREREVEZVaqTnw0bNpR0CERERPSWKdWnvYiIiIi0jckPERER6RUmP0RERKRXmPwQERGRXinVFzyTKuXQETdmdihW3bxl/2UoCm14E8svzvASZZVyyIpAhL+mZhHn95Vy6ItChvbYm8+y8g5doam88/z/+eTeXqTt+r8ui4j0Eo/8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVju2lh0p6TK+iUI63tSDro3zL85umyXw1bV9QPG+aJuOQKcf60rT+gsgd/7+sfJa598z/lynnXV2jZfwXquueZ3y7/Mb44rhfRFQAHvkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0Coe3KMN0MUyFcp43ZnZQea/LYSE0GbJB03lrW3Fj1ZX8hrAobFgLTYe8KGr7/MoDW+VTUTnkhLLdVwNf1W1TyDAZedrkLlMuV9k+93fiRruCZ6kt0vel3athPziUBlHZwCM/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXuHwFv9R3uEgNKmj8ij+QqYVpChDThQ2n8KGh9DF0Bl5l6vtupq2K21DVZQW2uqX/IZGkZZR2HAW/684Q3GofCd2//9/dv//EBozVhYYo/L7pxxuo6D6APIMt+Gd77TcsavNR9k+91AYeYfryN0mv/qFxvSauoW1zdsu1zS3SNV1LWxIj6LsF4tTt7B2he1P/6vixvjWK8q2WUrxyA8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV8pE8rN48WK4u7vDxMQEDRs2xIEDB0o6JCIiIiqjSn3ys3HjRgQGBmLKlCk4efIk3nvvPQQEBCAxMbGkQyMiIqIyqNQnP/Pnz8fgwYPx6aefombNmliwYAFcXFywZMmSkg6NiIiIyqBSnfy8fPkSx48fR9u2bVXK27Zti5iYmBKKioiIiMqyUj28xT///IPs7Gw4OjqqlDs6OiIpKSnfNhkZGcjIyJDep6SkAABSU1N1EmNORvpr519QHWX566blpaz7IuPlv/WzCq5fkBfZL4vcVllX02Xlt4zc89KG/OL5L8vQZaxvm9S0FwBUt0UlZT/m14d52ynf55bfPF/XvtA2+XxH8343c7cv8DudK1apfZ74C52Psm7u8rzrk880FLYPy9t/muzvcrctKFao75NSC4mrKPvF4tQtrF1h+9P/qrgxvvWKsm3+B8r+FkJof+aiFLtz544AIGJiYlTKZ8yYITw9PfNtExwcLADwxRdffPHFF19vwevWrVtazy9K9ZEfe3t7GBoaqh3lSU5OVjsapBQUFIRx48ZJ73NycvDo0SPY2dlBJpPpNN7SIDU1FS4uLrh16xasrKxKOhy9wD5/89jnJYP9/ubpc58LIfD06VM4Oztrfd6lOvkxNjZGw4YNERUVhS5dukjlUVFR6Ny5c75t5HI55HK5SpmNjY0uwyyVrKys9O6LUtLY528e+7xksN/fPH3tc2tra53Mt1QnPwAwbtw49O3bF40aNYKPjw9++uknJCYmYujQoSUdGhEREZVBpT756dmzJx4+fIjp06fj3r17qFOnDnbu3AlXV9eSDo2IiIjKoFKf/ADA8OHDMXz48JIOo0yQy+UIDg5WO/VHusM+f/PY5yWD/f7msc91QyaELu4hIyIiIiqdSvVDDomIiIi0jckPERER6RUmP0RERKRXmPwQERGRXmHyU0a5ublBJpOpvCZPnqxSJzExEZ06dYK5uTns7e0xevRovHypOvbR2bNn0bJlS5iamqJixYqYPn26bsZReUstXrwY7u7uMDExQcOGDXHgwIGSDqnMCgkJUdumFQqFNF0IgZCQEDg7O8PU1BS+vr44f/68yjwyMjIwatQo2Nvbw9zcHB988AFu3779plelVNu/fz86deoEZ2dnyGQybNu2TWW6tvr58ePH6Nu3L6ytrWFtbY2+ffviyZMnOl670ul1fT5gwAC1bb9p06Yqddjn2sXkpwxTPvtI+frqq6+kadnZ2ejQoQPS0tJw8OBBbNiwAZs3b8b48eOlOqmpqfDz84OzszNiY2Pxww8/YO7cuZg/f35JrE6Zs3HjRgQGBmLKlCk4efIk3nvvPQQEBCAxMbGkQyuzateurbJNnz17Vpo2e/ZszJ8/HwsXLkRsbCwUCgX8/Pzw9OlTqU5gYCC2bt2KDRs24ODBg3j27Bk6duyI7OzsklidUiktLQ1169bFwoUL852urX7u3bs3Tp06hYiICERERODUqVPo27evztevNHpdnwOAv7+/yra/c+dOlenscy3T+mhh9Ea4urqKb7/9tsDpO3fuFAYGBuLOnTtS2fr164VcLhcpKSlCCCEWL14srK2txYsXL6Q64eHhwtnZWeTk5Ogs9rdF48aNxdChQ1XKatSoISZPnlxCEZVtwcHBom7duvlOy8nJEQqFQsycOVMqe/HihbC2thZLly4VQgjx5MkTYWRkJDZs2CDVuXPnjjAwMBARERE6jb2sAiC2bt0qvddWP1+4cEEAEEeOHJHqHD58WAAQFy9e1PFalW55+1wIIfr37y86d+5cYBv2ufbxyE8ZNmvWLNjZ2aFevXr45ptvVE5pHT58GHXq1FEZEK5du3bIyMjA8ePHpTotW7ZUeXhWu3btcPfuXdy4ceONrUdZ9PLlSxw/fhxt27ZVKW/bti1iYmJKKKqy78qVK3B2doa7uzs+/vhjXL9+HQCQkJCApKQklf6Wy+Vo2bKl1N/Hjx9HZmamSh1nZ2fUqVOHn0kRaaufDx8+DGtrazRp0kSq07RpU1hbW/OzKEB0dDQcHBxQvXp1fPbZZ0hOTpamsc+1r0w84ZnUjRkzBg0aNED58uVx7NgxBAUFISEhAcuWLQMAJCUlqY18X758eRgbGyMpKUmq4+bmplJH2SYpKQnu7u66X5Ey6p9//kF2drZaHzs6Okr9S5pp0qQJ1qxZg+rVq+P+/fuYMWMGmjVrhvPnz0t9ml9/37x5E8CrbdbY2Bjly5dXq8PPpGi01c9JSUlwcHBQm7+DgwM/i3wEBASge/fucHV1RUJCAqZOnYrWrVvj+PHjkMvl7HMdYPJTioSEhCA0NLTQOrGxsWjUqBHGjh0rlXl7e6N8+fL46KOPpKNBACCTydTaCyFUyvPWEf9/sXN+bUldfv3HviuegIAA6f9eXl7w8fFB1apVsXr1auniz+L0Nz8TzWmjn4uy/6FXevbsKf2/Tp06aNSoEVxdXbFjxw507dq1wHbs8+Ljaa9SZOTIkYiPjy/0VadOnXzbKn8crl69CgBQKBRq2f7jx4+RmZkp/VWXXx3loda8f/mRKnt7exgaGubbf+w77TA3N4eXlxeuXLki3fVVWH8rFAq8fPkSjx8/LrAOFU5b/axQKHD//n21+T948ICfRRE4OTnB1dUVV65cAcA+1wUmP6WIvb09atSoUejLxMQk37YnT54E8OpLAwA+Pj44d+4c7t27J9XZtWsX5HI5GjZsKNXZv3+/yrVCu3btgrOzs9rpMFJlbGyMhg0bIioqSqU8KioKzZo1K6Go3i4ZGRmIj4+Hk5MT3N3doVAoVPr75cuX2Ldvn9TfDRs2hJGRkUqde/fu4dy5c/xMikhb/ezj44OUlBQcO3ZMqnP06FGkpKTwsyiChw8f4tatW9L+nH2uAyV1pTUVX0xMjJg/f744efKkuH79uti4caNwdnYWH3zwgVQnKytL1KlTR7Rp00acOHFC/P3336JSpUpi5MiRUp0nT54IR0dH0atXL3H27FmxZcsWYWVlJebOnVsSq1XmbNiwQRgZGYnly5eLCxcuiMDAQGFubi5u3LhR0qGVSePHjxfR0dHi+vXr4siRI6Jjx47C0tJS6s+ZM2cKa2trsWXLFnH27FnRq1cv4eTkJFJTU6V5DB06VFSqVEn8/fff4sSJE6J169aibt26Iisrq6RWq9R5+vSpOHnypDh58qQAIO1Lbt68KYTQXj/7+/sLb29vcfjwYXH48GHh5eUlOnbs+MbXtzQorM+fPn0qxo8fL2JiYkRCQoLYu3ev8PHxERUrVmSf6xCTnzLo+PHjokmTJsLa2lqYmJgIT09PERwcLNLS0lTq3bx5U3To0EGYmpoKW1tbMXLkSJXb2oUQ4syZM+K9994TcrlcKBQKERISwtvcNbBo0SLh6uoqjI2NRYMGDcS+fftKOqQyq2fPnsLJyUkYGRkJZ2dn0bVrV3H+/Hlpek5OjggODhYKhULI5XLRokULcfbsWZV5PH/+XIwcOVLY2toKU1NT0bFjR5GYmPimV6VU27t3rwCg9urfv78QQnv9/PDhQ9GnTx9haWkpLC0tRZ8+fcTjx4/f0FqWLoX1eXp6umjbtq2oUKGCMDIyEpUrVxb9+/dX60/2uXbJhODjfImIiEh/8JofIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiMq0AQMG4MMPP5Te+/r6IjAwsMTiIaLSj8kPEWlVUlISxowZAw8PD5iYmMDR0RHvvvsuli5divT0dJ0vf8uWLfj666+1Os+8CRYRlW3lSjoAInp7XL9+Hc2bN4eNjQ3CwsLg5eWFrKwsXL58GStWrICzszM++OADtXaZmZkwMjLSSgy2trZamQ8Rvb145IeItGb48OEoV64c4uLi0KNHD9SsWRNeXl7o1q0bduzYgU6dOgEAZDIZli5dis6dO8Pc3BwzZsxAdnY2Bg8eDHd3d5iamsLT0xPfffedyvyzs7Mxbtw42NjYwM7ODhMnTkTeEXrynvZ6+fIlJk6ciIoVK8Lc3BxNmjRBdHS0NH3VqlWwsbFBZGQkatasCQsLC/j7++PevXsAgJCQEKxevRq///47ZDIZZDKZSnsiKnuY/BCRVjx8+BC7du3CiBEjYG5unm8dmUwm/T84OBidO3fG2bNnMWjQIOTk5KBSpUrYtGkTLly4gGnTpuHLL7/Epk2bpDbz5s3DihUrsHz5chw8eBCPHj3C1q1bC41r4MCBOHToEDZs2IAzZ86ge/fu8Pf3x5UrV6Q66enpmDt3Ln755Rfs378fiYmJmDBhAgBgwoQJ6NGjh5QQ3bt3D82aNfsvXUVEJYynvYhIK65evQohBDw9PVXK7e3t8eLFCwDAiBEjMGvWLABA7969MWjQIJW6oaGh0v/d3d0RExODTZs2oUePHgCABQsWICgoCN26dQMALF26FJGRkQXGdO3aNaxfvx63b9+Gs7MzgFfJTEREBFauXImwsDAAr067LV26FFWrVgUAjBw5EtOnTwcAWFhYwNTUFBkZGVAoFMXrHCIqVZj8EJFW5T66AwDHjh1DTk4O+vTpg4yMDKm8UaNGam2XLl2KZcuW4ebNm3j+/DlevnyJevXqAQBSUlJw7949+Pj4SPXLlSuHRo0aqZ36Ujpx4gSEEKhevbpKeUZGBuzs7KT3ZmZmUuIDAE5OTkhOTi76ShNRmcLkh4i0wsPDAzKZDBcvXlQpr1KlCgDA1NRUpTzvqbFNmzZh7NixmDdvHnx8fGBpaYk5c+bg6NGjxY4pJycHhoaGOH78OAwNDVWmWVhYSP/Pe7G1TCYrMKEiorKP1/wQkVbY2dnBz88PCxcuRFpamsbtDxw4gGbNmmH48OGoX78+PDw8cO3aNWm6tbU1nJyccOTIEaksKysLx48fL3Ce9evXR3Z2NpKTk+Hh4aHy0uQUlrGxMbKzszVeJyIqnZj8EJHWLF68GFlZWWjUqBE2btyI+Ph4XLp0Cf/73/9w8eJFtaMvuXl4eCAuLg6RkZG4fPkypk6ditjYWJU6Y8aMwcyZM7F161ZcvHgRw4cPx5MnTwqcZ/Xq1dGnTx/069cPW7ZsQUJCAmJjYzFr1izs3LmzyOvl5uaGM2fO4NKlS/jnn3+QmZlZ5LZEVPow+SEiralatSpOnjyJ999/H0FBQahbty4aNWqEH374ARMmTCj04YNDhw5F165d0bNnTzRp0gQPHz7E8OHDVeqMHz8e/fr1w4ABA6RTY126dCk0ppUrV6Jfv34YP348PD098cEHH+Do0aNwcXEp8np99tln8PT0RKNGjVChQgUcOnSoyG2JqPSRCZ7YJiIiIj3CIz9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREemV/wPNVcKuRXFQawAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 0, 1 labels mất cân bằng" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MNIST\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n" + ] + } + ], + "source": [ + "loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='MNIST', resize=resize, stratified=False,\n", + " training_size=training_size, test_size=valid_size, currupt_por=portion)" + ] + }, + { + "cell_type": "code", + "execution_count": 94, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "187 555\n", + "109 91\n" + ] + } + ], + "source": [ + "cnt1 =0\n", + "cnt0 =0\n", + "for batch in loaders['train']:\n", + " #print(batch[0].size())\n", + " ##print(batch[1].size())\n", + " #print(torch.sum(batch[1] == 1).item())\n", + " cnt1+=torch.sum(batch[1] == 1).item()\n", + " #print(torch.sum(batch[1] == 0).item())\n", + " cnt0+=torch.sum(batch[1] == 0).item()\n", + "print(cnt1, cnt0)\n", + "cnt1 =0\n", + "cnt0 =0\n", + "for batch in loaders['test']:\n", + " #print(batch[0].size())\n", + " #print(batch[1].size())\n", + " #print(torch.sum(batch[1] == 1).item())\n", + " cnt1+=torch.sum(batch[1] == 1).item()\n", + " #print(torch.sum(batch[1] == 0).item())\n", + " cnt0+=torch.sum(batch[1] == 0).item()\n", + "print(cnt1, cnt0)" + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "2f9988cab68447caa90b66ac7e65f9cb", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/16 [00:00 maxsamples:\n", + " idxs_1 = sorted(np.random.choice(\n", + " dist.X1.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_1 = np.s_[:] # hack to get a full slice\n", + "\n", + "if maxsamples and dist.X2.shape[0] > maxsamples:\n", + " idxs_2 = sorted(np.random.choice(\n", + " dist.X2.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_2 = np.s_[:] # hack to get a full slice\n", + "Z1 = torch.cat((dist.X1[idxs_1],\n", + " dist.Y1[idxs_1].type(dist.X1.dtype).unsqueeze(1)), -1)\n", + "Z2 = torch.cat((dist.X2[idxs_2],\n", + " dist.Y2[idxs_2].type(dist.X2.dtype).unsqueeze(1)), -1)\n", + "Z1 = Z1.to(device)\n", + "Z2 = Z2.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 99, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1000, 1025]) torch.Size([200, 1025])\n", + "Z1 shape in batch: torch.Size([1, 1000, 1025])\n", + "Z2 shape in batch: torch.Size([1, 200, 1025])\n", + "1 1000 1024\n", + "torch.Size([1, 1000, 100])\n", + "1 200 1024\n", + "torch.Size([1, 200, 100])\n", + "torch.Size([1, 1000, 200])\n", + "torch.Size([1, 1000, 200])\n", + "Gia tri M: tensor([[[11, 11, 10, ..., 11, 10, 10],\n", + " [11, 11, 10, ..., 11, 10, 10],\n", + " [11, 11, 10, ..., 11, 10, 10],\n", + " ...,\n", + " [23, 23, 22, ..., 23, 22, 22],\n", + " [23, 23, 22, ..., 23, 22, 22],\n", + " [23, 23, 22, ..., 23, 22, 22]]], device='cuda:0')\n", + "torch.Size([1, 1000, 200])\n", + "torch.Size([1, 1000, 200])\n", + "gia tri D: tensor([[[6453.8877, 2967.9053, 4213.6929, ..., 1310.1222, 4209.4624,\n", + " 1781.6909],\n", + " [2848.7041, 1528.1398, 2259.4897, ..., 2037.5909, 1271.9507,\n", + " 481.0229],\n", + " [1296.4718, 2012.3370, 1033.0972, ..., 5328.5400, 188.9409,\n", + " 706.6304],\n", + " ...,\n", + " [2324.7200, 1032.2689, 3972.3184, ..., 1206.4545, 2212.0918,\n", + " 1994.9100],\n", + " [4987.5249, 1553.8002, 4647.3730, ..., 118.6576, 4436.5957,\n", + " 2727.1289],\n", + " [6999.4194, 3104.5422, 7282.3887, ..., 340.6185, 6211.5527,\n", + " 4177.6133]]], device='cuda:0')\n", + "torch.Size([1, 1000, 200])\n", + "Z1 shape in batch: torch.Size([1, 200, 1025])\n", + "Z2 shape in batch: torch.Size([1, 1000, 1025])\n", + "1 200 1024\n", + "torch.Size([1, 200, 100])\n", + "1 1000 1024\n", + "torch.Size([1, 1000, 100])\n", + "torch.Size([1, 200, 1000])\n", + "torch.Size([1, 200, 1000])\n", + "Gia tri M: tensor([[[132, 132, 132, ..., 133, 133, 133],\n", + " [132, 132, 132, ..., 133, 133, 133],\n", + " [120, 120, 120, ..., 121, 121, 121],\n", + " ...,\n", + " [132, 132, 132, ..., 133, 133, 133],\n", + " [120, 120, 120, ..., 121, 121, 121],\n", + " [120, 120, 120, ..., 121, 121, 121]]], device='cuda:0')\n", + "torch.Size([1, 200, 1000])\n", + "torch.Size([1, 200, 1000])\n", + "gia tri D: tensor([[[6453.8877, 2848.7041, 1296.4718, ..., 2324.7200, 4987.5249,\n", + " 6999.4194],\n", + " [2967.9033, 1528.1398, 2012.3370, ..., 1032.2689, 1553.7982,\n", + " 3104.5403],\n", + " [4213.6929, 2259.4897, 1033.0972, ..., 3972.3184, 4647.3730,\n", + " 7282.3887],\n", + " ...,\n", + " [1310.1222, 2037.5909, 5328.5381, ..., 1206.4545, 118.6576,\n", + " 340.6185],\n", + " [4209.4624, 1271.9507, 188.9409, ..., 2212.0918, 4436.5957,\n", + " 6211.5527],\n", + " [1781.6890, 481.0249, 706.6304, ..., 1994.9100, 2727.1270,\n", + " 4177.6113]]], device='cuda:0')\n", + "torch.Size([1, 200, 1000])\n" + ] + } + ], + "source": [ + "with torch.no_grad():\n", + " loss.debias = False\n", + " loss.potentials = True\n", + " print(Z1.shape, Z2.shape)\n", + " F_i, G_j = loss(Z1, Z2)" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "metadata": {}, + "outputs": [], + "source": [ + "π = [F_i, G_j]\n", + "dual_sol = π\n", + "for i in range(len(dual_sol)):\n", + " dual_sol[i] = dual_sol[i].to('cpu')" + ] + }, + { + "cell_type": "code", + "execution_count": 101, + "metadata": {}, + "outputs": [], + "source": [ + "train_indices = get_indices(loaders['train'])\n", + "trained_with_flag = train_with_corrupt_flag(loaders['train'], shuffle_ind, train_indices)" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "inspected: 10, found: 4 detection rate: 0.01 baseline: 1.8\n", + "inspected: 20, found: 8 detection rate: 0.03 baseline: 3.6\n", + "inspected: 30, found: 11 detection rate: 0.04 baseline: 5.4\n", + "inspected: 40, found: 16 detection rate: 0.05 baseline: 7.2\n", + "inspected: 50, found: 19 detection rate: 0.06 baseline: 9.0\n", + "inspected: 60, found: 24 detection rate: 0.08 baseline: 10.8\n", + "inspected: 70, found: 26 detection rate: 0.09 baseline: 12.6\n", + "inspected: 80, found: 33 detection rate: 0.11 baseline: 14.4\n", + "inspected: 90, found: 41 detection rate: 0.14 baseline: 16.2\n", + "inspected: 100, found: 45 detection rate: 0.15 baseline: 18.0\n", + "inspected: 110, found: 48 detection rate: 0.16 baseline: 19.8\n", + "inspected: 120, found: 54 detection rate: 0.18 baseline: 21.6\n", + "inspected: 130, found: 60 detection rate: 0.20 baseline: 23.400000000000002\n", + "inspected: 140, found: 63 detection rate: 0.21 baseline: 25.2\n", + "inspected: 150, found: 65 detection rate: 0.22 baseline: 27.0\n", + "inspected: 160, found: 67 detection rate: 0.22 baseline: 28.8\n", + "inspected: 170, found: 70 detection rate: 0.23 baseline: 30.6\n", + "inspected: 180, found: 75 detection rate: 0.25 baseline: 32.4\n", + "inspected: 190, found: 80 detection rate: 0.27 baseline: 34.2\n", + "inspected: 200, found: 84 detection rate: 0.28 baseline: 36.0\n", + "inspected: 210, found: 88 detection rate: 0.29 baseline: 37.800000000000004\n", + "inspected: 220, found: 94 detection rate: 0.31 baseline: 39.6\n", + "inspected: 230, found: 98 detection rate: 0.33 baseline: 41.4\n", + "inspected: 240, found: 103 detection rate: 0.34 baseline: 43.2\n", + "inspected: 250, found: 106 detection rate: 0.35 baseline: 45.0\n", + "inspected: 260, found: 109 detection rate: 0.36 baseline: 46.800000000000004\n", + "inspected: 270, found: 114 detection rate: 0.38 baseline: 48.6\n", + "inspected: 280, found: 118 detection rate: 0.39 baseline: 50.4\n", + "inspected: 290, found: 119 detection rate: 0.40 baseline: 52.2\n", + "inspected: 300, found: 122 detection rate: 0.41 baseline: 54.0\n", + "inspected: 310, found: 124 detection rate: 0.41 baseline: 55.800000000000004\n", + "inspected: 320, found: 126 detection rate: 0.42 baseline: 57.6\n", + "inspected: 330, found: 130 detection rate: 0.43 baseline: 59.4\n", + "inspected: 340, found: 131 detection rate: 0.44 baseline: 61.2\n", + "inspected: 350, found: 133 detection rate: 0.44 baseline: 63.0\n", + "inspected: 360, found: 133 detection rate: 0.44 baseline: 64.8\n", + "inspected: 370, found: 135 detection rate: 0.45 baseline: 66.60000000000001\n", + "inspected: 380, found: 136 detection rate: 0.45 baseline: 68.4\n", + "inspected: 390, found: 138 detection rate: 0.46 baseline: 70.2\n", + "inspected: 400, found: 143 detection rate: 0.48 baseline: 72.0\n", + "inspected: 410, found: 148 detection rate: 0.49 baseline: 73.8\n", + "inspected: 420, found: 149 detection rate: 0.50 baseline: 75.60000000000001\n", + "inspected: 430, found: 152 detection rate: 0.51 baseline: 77.4\n", + "inspected: 440, found: 153 detection rate: 0.51 baseline: 79.2\n", + "inspected: 450, found: 155 detection rate: 0.52 baseline: 81.0\n", + "inspected: 460, found: 162 detection rate: 0.54 baseline: 82.8\n", + "inspected: 470, found: 166 detection rate: 0.55 baseline: 84.60000000000001\n", + "inspected: 480, found: 168 detection rate: 0.56 baseline: 86.4\n", + "inspected: 490, found: 169 detection rate: 0.56 baseline: 88.2\n", + "inspected: 500, found: 172 detection rate: 0.57 baseline: 90.0\n", + "inspected: 510, found: 174 detection rate: 0.58 baseline: 91.8\n", + "inspected: 520, found: 176 detection rate: 0.59 baseline: 93.60000000000001\n", + "inspected: 530, found: 176 detection rate: 0.59 baseline: 95.4\n", + "inspected: 540, found: 181 detection rate: 0.60 baseline: 97.2\n", + "inspected: 550, found: 185 detection rate: 0.62 baseline: 99.0\n", + "inspected: 560, found: 191 detection rate: 0.64 baseline: 100.8\n", + "inspected: 570, found: 194 detection rate: 0.65 baseline: 102.60000000000001\n", + "inspected: 580, found: 196 detection rate: 0.65 baseline: 104.4\n", + "inspected: 590, found: 198 detection rate: 0.66 baseline: 106.2\n", + "inspected: 600, found: 200 detection rate: 0.67 baseline: 108.0\n", + "inspected: 610, found: 202 detection rate: 0.67 baseline: 109.8\n", + "inspected: 620, found: 204 detection rate: 0.68 baseline: 111.60000000000001\n", + "inspected: 630, found: 207 detection rate: 0.69 baseline: 113.4\n", + "inspected: 640, found: 211 detection rate: 0.70 baseline: 115.2\n", + "inspected: 650, found: 212 detection rate: 0.71 baseline: 117.0\n", + "inspected: 660, found: 215 detection rate: 0.72 baseline: 118.8\n", + "inspected: 670, found: 219 detection rate: 0.73 baseline: 120.60000000000001\n", + "inspected: 680, found: 223 detection rate: 0.74 baseline: 122.4\n", + "inspected: 690, found: 224 detection rate: 0.75 baseline: 124.2\n", + "inspected: 700, found: 229 detection rate: 0.76 baseline: 126.0\n", + "inspected: 710, found: 231 detection rate: 0.77 baseline: 127.8\n", + "inspected: 720, found: 237 detection rate: 0.79 baseline: 129.6\n", + "inspected: 730, found: 238 detection rate: 0.79 baseline: 131.4\n", + "inspected: 740, found: 242 detection rate: 0.81 baseline: 133.20000000000002\n", + "inspected: 750, found: 244 detection rate: 0.81 baseline: 135.0\n", + "inspected: 760, found: 246 detection rate: 0.82 baseline: 136.8\n", + "inspected: 770, found: 252 detection rate: 0.84 baseline: 138.6\n", + "inspected: 780, found: 255 detection rate: 0.85 baseline: 140.4\n", + "inspected: 790, found: 257 detection rate: 0.86 baseline: 142.20000000000002\n", + "inspected: 800, found: 259 detection rate: 0.86 baseline: 144.0\n", + "inspected: 810, found: 260 detection rate: 0.87 baseline: 145.8\n", + "inspected: 820, found: 261 detection rate: 0.87 baseline: 147.6\n", + "inspected: 830, found: 264 detection rate: 0.88 baseline: 149.4\n", + "inspected: 840, found: 266 detection rate: 0.89 baseline: 151.20000000000002\n", + "inspected: 850, found: 266 detection rate: 0.89 baseline: 153.0\n", + "inspected: 860, found: 269 detection rate: 0.90 baseline: 154.8\n", + "inspected: 870, found: 274 detection rate: 0.91 baseline: 156.6\n", + "inspected: 880, found: 278 detection rate: 0.93 baseline: 158.4\n", + "inspected: 890, found: 281 detection rate: 0.94 baseline: 160.20000000000002\n", + "inspected: 900, found: 286 detection rate: 0.95 baseline: 162.0\n", + "inspected: 910, found: 289 detection rate: 0.96 baseline: 163.8\n", + "inspected: 920, found: 293 detection rate: 0.98 baseline: 165.6\n", + "inspected: 930, found: 298 detection rate: 0.99 baseline: 167.4\n", + "inspected: 940, found: 300 detection rate: 1.00 baseline: 169.20000000000002\n", + "inspected: 950, found: 300 detection rate: 1.00 baseline: 171.0\n", + "inspected: 960, found: 300 detection rate: 1.00 baseline: 172.8\n", + "inspected: 970, found: 300 detection rate: 1.00 baseline: 174.6\n", + "inspected: 980, found: 300 detection rate: 1.00 baseline: 176.4\n", + "inspected: 990, found: 300 detection rate: 1.00 baseline: 178.20000000000002\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABTA0lEQVR4nO3deVyU5fo/8M+w4wIqCsgOSoAKimsuRYqG5JKSuaaSqampqS1iHdPMNK3U6uSSnrSyFDXxl6WmomG5iyAetzJRcMEVwaOmAvfvD74zzvIMzMAzG3zerxcvzzxzz8zNgx6u7vu6rlshhBAgIiIiskJ2lp4AERERkT4MVIiIiMhqMVAhIiIiq8VAhYiIiKwWAxUiIiKyWgxUiIiIyGoxUCEiIiKrxUCFiIiIrBYDFSIiIrJaDFTIolatWgWFQqH6cnFxgbe3Nzp37oy5c+fi2rVrFX7vkydPYubMmTh//rx8EzbycxITExEUFGTSz7ekq1ev4p133kGLFi3g5uYGJycn+Pn5ISEhAT/99BOKi4vNMo/ffvsNCoUCv/32m+qaOe795cuXMXPmTGRmZho0XjnPDRs2mHRe5lbWfZg5cyYUCoX5J0VVBgMVsgorV67E/v37sWPHDnz55Zdo0aIF5s2bh4iICOzcubNC73ny5Em8//77ZglU9H3O9OnTkZKSYtLPt5QDBw4gMjISy5cvR+/evbF27Vrs3LkTH330ERwdHZGQkIBVq1ZZbH7muPeXL1/G+++/b3CgUlWVdR9GjhyJ/fv3m39SVGU4WHoCRADQrFkztG7dWvX4hRdewOTJk9GpUyckJCTgr7/+gpeXlwVnWDGNGjWy9BRM4vbt2+jTpw9q1aqFvXv3omHDhhrPv/TSS8jKysLNmzfLfJ/79+/DxcXFJP/FXVXvva3x8/ODn5+fpadBNowrKmS1AgIC8Omnn+LOnTtYtmyZxnNHjhxB7969Ua9ePbi4uCA6Ohrr1q1TPb9q1Sq8+OKLAIDOnTurtpbU/wt/586diI2NhZubG2rUqIGOHTsiNTVVZx6nT5/GoEGD4OXlBWdnZwQEBGDYsGF48OBBuZ8jtf3wzz//YNq0aQgODoaTkxN8fX3x2muv4fbt2xrjgoKC0LNnT2zbtg0tW7aEq6srwsPD8fXXX5d53x49egRPT08MHTpU57nbt2/D1dUVU6ZMAQCUlJRg9uzZCAsLg6urK+rUqYOoqCh89tlnZX7G8uXLcfXqVcyfP18nSFGKiopC586dVY+V23zbt2/HiBEj0KBBA9SoUQMPHjzA2bNn8fLLLyM0NBQ1atSAr68vevXqhePHj+u87+nTp9G9e3fUqFED9evXx5gxY3Dnzh2dcVL3XgiBxYsXo0WLFnB1dUXdunXRr18/nDt3TmPcM888g2bNmuHw4cN46qmnUKNGDYSEhOCjjz5CSUkJgNJtnDZt2gAAXn75ZdXPfubMmWXeO23KrZETJ05g0KBBcHd3h5eXF0aMGIGCggKNsevXr0e7du3g7u6umtOIESNUzyu3llavXo0pU6bA29sbrq6uiImJQUZGhs5nl/fvSOnSpUsYPXo0/P394eTkBB8fH/Tr1w9Xr14t9z5Ibf2UlJRg/vz5CA8Ph7OzMzw9PTFs2DBcvHjR6J8DVQOCyIJWrlwpAIjDhw9LPv+///1P2Nvbi9jYWNW1Xbt2CScnJ/HUU0+J5ORksW3bNpGYmCgAiJUrVwohhLh27ZqYM2eOACC+/PJLsX//frF//35x7do1IYQQ3333nVAoFKJPnz5i48aNYvPmzaJnz57C3t5e7Ny5U/VZmZmZolatWiIoKEgsXbpUpKamitWrV4v+/fuLwsLCcj9n+PDhIjAwUPV+JSUlIi4uTjg4OIjp06eL7du3i08++UTUrFlTREdHi3/++Uc1NjAwUPj5+YkmTZqIb7/9Vvz666/ixRdfFABEWlpamfd18uTJwtXVVRQUFGhcX7x4sQAgsrKyhBBCzJ07V9jb24sZM2aI1NRUsW3bNrFo0SIxc+bMMt+/W7duwt7eXty9e7fMceqUP2tfX18xevRosXXrVrFhwwZRVFQk0tLSxBtvvCE2bNgg0tLSREpKiujTp49wdXUVp0+fVr1HXl6e8PT0FL6+vmLlypViy5YtYsiQISIgIEAAELt371aN1b73QggxatQo4ejoKN544w2xbds28cMPP4jw8HDh5eUl8vLyVONiYmKEh4eHCA0NFUuXLhU7duwQ48aNEwDEN998I4QQoqCgQPU9/etf/1L97HNzc/Xeg927dwsAYv369aprM2bMEABEWFiYeO+998SOHTvEggULhLOzs3j55ZdV4/bt2ycUCoUYOHCg2LJli9i1a5dYuXKlGDp0qM77+/v7i+eff15s3rxZrF69WjRu3Fi4ubmJv//+WzXWkH9HQghx8eJF0bBhQ1G/fn2xYMECsXPnTpGcnCxGjBghTp06Ve59UH5/6kaPHi0AiPHjx4tt27aJpUuXigYNGgh/f39x/fp1o34OVPUxUCGLKi9QEUIILy8vERERoXocHh4uoqOjxaNHjzTG9ezZUzRs2FAUFxcLIYRYv369zi8vIYS4e/euqFevnujVq5fG9eLiYtG8eXPRtm1b1bUuXbqIOnXqqAIPKfo+RwjdX5bbtm0TAMT8+fM1xiUnJwsA4quvvlJdCwwMFC4uLuLChQuqa/fv3xf16tUTr776qt75CCFEVlaWzvsJIUTbtm1Fq1atVI979uwpWrRoUeZ7SQkPDxfe3t4614uLi8WjR49UX8qfhRCPf9bDhg0r9/2LiorEw4cPRWhoqJg8ebLq+tSpU4VCoRCZmZka47t161ZuoLJ//34BQHz66acar83NzRWurq7i7bffVl2LiYkRAMTBgwc1xjZp0kTExcWpHh8+fFjnF3tZygpUtP9OjBs3Tri4uIiSkhIhhBCffPKJACBu375d7vu3bNlS9TohhDh//rxwdHQUI0eOVF0z9N/RiBEjhKOjozh58qTezy3rPmgHKqdOnRIAxLhx4zTGHTx4UAAQ77zzjuqaoT8Hqtq49UNWTwih+t9nz57F6dOnMWTIEABAUVGR6uu5557DlStXcObMmTLfb9++fbh16xaGDx+u8fqSkhJ0794dhw8fxt27d3Hv3j2kpaWhf//+aNCggSzfy65duwCUbkuoe/HFF1GzZk2dracWLVogICBA9djFxQVPPPEELly4UObnREZGolWrVli5cqXq2qlTp3Do0CGNrYK2bdvi2LFjGDduHH799VcUFhZW9FsDAEyZMgWOjo6qr969e+uMeeGFF3SuFRUVYc6cOWjSpAmcnJzg4OAAJycn/PXXXzh16pRq3O7du9G0aVM0b95c4/WDBw8ud24///wzFAoFXnrpJY2fu7e3N5o3b65RMQQA3t7eaNu2rca1qKiocu99RWnfq6ioKPzzzz+qyjfl9kr//v2xbt06XLp0Se97DR48WGO7JTAwEB06dMDu3bsBGPfvaOvWrejcuTMiIiJk+T6Vc9D+N9C2bVtERETo/Bsw98+BrA8DFbJqd+/exc2bN+Hj4wOgtBwWAN58802NX4iOjo4YN24cAODGjRtlvqfyPfr166fzHvPmzYMQArdu3UJ+fj6Ki4tlTQS8efMmHBwcdAIfhUIBb29vneRTDw8PnfdwdnbG/fv3y/2sESNGYP/+/Th9+jSA0soqZ2dnDBo0SDVm2rRp+OSTT3DgwAHEx8fDw8MDsbGxOHLkSJnvHRAQgOvXr+PevXsa19944w0cPnwYhw8f1pu7InV9ypQpmD59Ovr06YPNmzfj4MGDOHz4MJo3b67xvd68eRPe3t46r5e6pu3q1asQQsDLy0vn537gwAGdvzeVufcVof15zs7OAKD6vKeffhqbNm1CUVERhg0bBj8/PzRr1gxr1qzReS9990j598uYf0fXr1+X/d8AIP33wMfHR9Z/A1Q1sOqHrNovv/yC4uJiPPPMMwCA+vXrAyj9BZuQkCD5mrCwsDLfU/keX3zxBZ588knJMV5eXiguLoa9vb1Ogl9leHh4oKioCNevX9cIVoQQyMvLU/1XsxwGDRqEKVOmYNWqVfjwww/x3XffoU+fPqhbt65qjIODA6ZMmYIpU6bg9u3b2LlzJ9555x3ExcUhNzcXNWrUkHzvbt26Yfv27diyZQv69eunuu7v7w9/f38AgJOTk+RrpSp8Vq9ejWHDhmHOnDka12/cuIE6deqoHnt4eCAvL0/n9VLXtNWvXx8KhQK///67KghQJ3XN2jz//PN4/vnn8eDBAxw4cABz587F4MGDERQUhPbt26vG6btHyl/6xvw7atCggez/BgDgypUrOgHQ5cuXVXMjUuKKClmtnJwcvPnmm3B3d8err74KoPT/PENDQ3Hs2DG0bt1a8qt27doAdP+LVKljx46oU6cOTp48qfc9nJycVNUS69evL3OVRt/nSImNjQVQ+otZ3Y8//oi7d++qnpdD3bp10adPH3z77bf4+eefkZeXp7Hto61OnTro168fXnvtNdy6davM/jMjR46El5cX3n77bVy5cqXSc1UoFDqBwi+//KKzvdG5c2ecOHECx44d07j+ww8/lPsZPXv2hBACly5dkvyZR0ZGGj1vY372cnJ2dkZMTAzmzZsHADoVPWvWrNHYMr1w4QL27dunCviN+XcUHx+P3bt3l7mlasx96NKlCwDdfwOHDx/GqVOnZP03QFUDV1TIKvz3v/9V7ZFfu3YNv//+O1auXAl7e3ukpKRorD4sW7YM8fHxiIuLQ2JiInx9fXHr1i2cOnUKR48exfr16wGU9mYBgK+++gq1a9eGi4sLgoOD4eHhgS+++ALDhw/HrVu30K9fP3h6euL69es4duwYrl+/jiVLlgAAFixYgE6dOqFdu3ZISkpC48aNcfXqVfz0009YtmwZateuXebnaOvWrRvi4uIwdepUFBYWomPHjsjKysKMGTMQHR0tWVJcGSNGjEBycjLGjx8PPz8/dO3aVeP5Xr16qXrYNGjQABcuXMCiRYsQGBiI0NBQve9bp04dbNq0Cb169ULz5s0xduxYPPnkk6hVqxZu3ryJPXv2IC8vDx06dDBonj179sSqVasQHh6OqKgopKen4+OPP9b5L+5Jkybh66+/Ro8ePTB79mx4eXnh+++/V21vlaVjx44YPXo0Xn75ZRw5cgRPP/00atasiStXruCPP/5AZGQkxo4da9B8lRo1agRXV1d8//33iIiIQK1ateDj46PaqpTTe++9h4sXLyI2NhZ+fn64ffs2PvvsMzg6OiImJkZj7LVr19C3b1+MGjUKBQUFmDFjBlxcXDBt2jTVGEP/Hc2aNQtbt27F008/jXfeeQeRkZG4ffs2tm3bhilTpiA8PNyo+xAWFobRo0fjiy++gJ2dHeLj43H+/HlMnz4d/v7+mDx5suz3jmycJTN5iZSVIMovJycn4enpKWJiYsScOXP0VtscO3ZM9O/fX3h6egpHR0fh7e0tunTpIpYuXaoxbtGiRSI4OFjY29vrVCWkpaWJHj16iHr16glHR0fh6+srevTooVGRIYQQJ0+eFC+++KLw8PAQTk5OIiAgQCQmJmqUEuv7HKkS2fv374upU6eKwMBA4ejoKBo2bCjGjh0r8vPzNcYFBgaKHj166HzvMTExIiYmpuwb+3+Ki4uFv7+/ACDeffddnec//fRT0aFDB1G/fn3V9/bKK6+I8+fPG/T+eXl5Ytq0aSIqKkrUrFlTODo6Ch8fH9GrVy/x7bffalSUlFXhlZ+fL1555RXh6ekpatSoITp16iR+//13ye/15MmTolu3bsLFxUXUq1dPvPLKK+L//b//Z1B5shBCfP3116Jdu3aiZs2awtXVVTRq1EgMGzZMHDlyRDUmJiZGNG3aVOe1Uu+5Zs0aER4eLhwdHQUAMWPGDL33q6yqH/WyXPX7lZ2dLYQQ4ueffxbx8fHC19dX9e/kueeeE7///rvO+3/33Xdi4sSJokGDBsLZ2Vk89dRTGt+fkqH/jnJzc8WIESOEt7e36mfcv39/cfXq1XLvg1R5cnFxsZg3b5544oknhKOjo6hfv7546aWXdEq7jfk5UNWlEEJtfZCIiGzWb7/9hs6dO2P9+vUauUNEtow5KkRERGS1GKgQERGR1eLWDxEREVktrqgQERGR1WKgQkRERFaLgQoRERFZLZtu+FZSUoLLly+jdu3akm25iYiIyPoIIXDnzh34+PjAzq7sNRObDlQuX76sOleEiIiIbEtubm65h17adKCiPIsiNzcXbm5uFp4NERERGaKwsBD+/v6q3+NlselARbnd4+bmxkCFiIjIxhiStsFkWiIiIrJaDFSIiIjIajFQISIiIqvFQIWIiIisFgMVIiIisloMVIiIiMhqMVAhIiIiq8VAhYiIiKwWAxUiIiKyWgxUiIiIyGoxUCEiIiKrZdNn/RAREVV3GTn5yL5xF8H1ayI6oK7O44qO0XfN3BioEBER2aiPtp7C0rRzqsct/N2RmVugejwmJgQAjB6j71pSfITM30H5FEIIYfZPlUlhYSHc3d1RUFDA05OJiKjKU1/hAIC+i/eZ9fNTxnWQZWXFmN/fXFEhIiKyAdqrJ13CG5j8M1soziJYcQXZoiEyRWNk37hr9i0gBipERERWLiMnXyNIAYBdp6+b9DOnOqzBWIfNqsdLinohuH4Hk36mFFb9EBERWbnsG3clr3cO01xVaeHvrvF4bEyIKt/EmDGzWt3TCFIAYKzDZkTb/W3UvOXAFRUiIiIrp8xJ0TYxNhQTY0PLreiJa+pt3Jj8X4ETEh948yzg19pU36YkBipERERWLjqgLsbEhGhs/4yNCVEFGep5I9EBdXXySLSvSY6x+xvR9mcBu8aAR2Ppiei7bkIMVIiIiKyQ9qpHUnyEzsqIbHbMAPYuevy446TSL41rk82+mgKwPJmIiMjqaFf4mLSHycUjwIpY3esjU0v/vHm2dCVFxiCF5clEREQ2SqrCZ2naOcQ19ZZvFeXikccByM2z0mNungWaD7TIKoo6BipERERWRF+Fj2w9TLS3eaIGSI+zQD6KFJYnExERWYGMnHxsPHoRj4pLJJ/XV/ljlItHNIMUAMhK1g1WLJSPIoUrKkRERBZW3pk96hU+FaLc6rmVLf18oy5A29EmyUepLAYqREREFiSVk5KZW4B5L0TC0d6u8hU+2ls9UpTBiRUFKEoMVIiIiCxIX06Ko70dElr6Ve7NpbZ6tFnRNo8UBipERERmpt4jRV/uiSw5KfoqemKSgHrBVrfNI4WBChERkRlJ9Ugpq+us0dRLj/VV7oR2s/oARYmBChERkZno65GSMq6DPF1nrbjDbEUxUCEiIjKTsnqkJLT0q3xlj3Y+yt5FpR1mI3pZZUWPIRioEBERmYlJ8lHKKz22kg6zFcVAhYiIyMTUk2dlzUcxtPTYhjFQISIiMiGp5NmUcR0qn49SBUqPDcFAhYiIyETKOmCw0j1SqkDpsSEYqBAREZmI7AcMVrHSY0MwUCEiIpKRyZq5VcHSY0MohBDC0pOoqMLCQri7u6OgoABubm6Wng4REVVzUvkoAHSSZ6fGRxj3xhePACtida+PTC3908ZKj435/c0VFSIiIhnI3sxNfZtHXz6KjZceG4KBChERkQxkbeamvc0TNUB6nI2XHhvCztITICIislUZOfnYePQiMnLy5ctHkSo7zkrWDVaqYD6KFK6oEBERVYDshwuW12G2UReg7Wiby0epLAYqRERERpI9H8XQDrN+ratNgKLEQIWIiMhIsuajVJMOsxXFQIWIiMhIsvZHqSYdZiuKgQoREZGRogPqypOPUo06zFYUAxUiIiIDqXedTYqPkCcfpZp0mK0odqYlIiIygFSVT1I17zBbUexMS0REJKOyTkE2aCWlvNLjatBhtqIYqBAREUFzWyc6oK7G40qdgmxo6TFJYqBCRETVnva2Tgt/d2TmFqge9432kXxduVU+LD2uNAYqRERUrUlt66gHKQCQknEZfaN9kJJxWXXNoCoflh5XGgMVIiKqdgzZ1tH2VGgDDGsfVH6VD0uPZcVAhYiIqhXtbR592zralMFJmasoLD2WHQMVIiKqNqS2eaS2dbRzVAza5pHKR9m7qLT0OKJXtSk9lhsDFSIiqtIM2eaR2tbRrgKSpL7Noy8fhaXHlcJAhYiIqhT1AOPXE3kGbfNIbesYvc0TNUB6HEuPK4WBChER2SztVQ/t/BNtFa7e0Sa1zZOVXBqsZCU/vsZ8lEpjoEJERDZJKilWPQDRx+DqHSnldZht1AVoO5r5KDJioEJERDZHX1KsIQyq3pFiaIdZv9YMUGTEQIWIiGyGcqvnwk3Dep9oq9A2D8AOsxbEQIWIiGxCefkngO72z9iYEDzb1Lti2zzq2GHWYhioEBGRVVJPlAVQbpAyNiYEU+MjJPNPKryKwg6zFsdAhYiIrI726kmX8AaS416PbYxAj5o6QUmFV06U2GHWajBQISIiqyKVKLvr9HXJsc+EeVY+KNHGDrNWhYEKERFZnCHdYzuHNcDuM48DlgonxupTXukxO8xaBAMVIiKyKEMPCZwYG4qJsaGVT4yVYmjpMZkdAxUiIrIYQw8JVF89MctWjzbmo1gMAxUiIrIYYw4JNBmWHls1BipERGQxytJjqeuyVO/ow9Jjm2Fn6QkQEVH1FR1QF2NiQjSuyZ4kq23HDGBFLJDyaumfpzaXlh6r41aP1VAIIYSlJ1FRhYWFcHd3R0FBAdzc3Cw9HSIiqiDtU5BN5uKR0uBE28jU0j9ZemwWxvz+5tYPERGZnXZgYrZtHn35KCw9tloMVIiIyKy0y5HHxIQgKT7CNB+mXXYcNUB6HEuPrRZzVIiIyKQycvKx8ehFZOTkS5YjL007h4ycfPk/WKrsOCtZN1hhPopV44oKERGZjKFn9mTfuCvf1k95HWYbdQHajmY+io1goEJERCZhzJk9+sqUjWZoh1m/1gxQbAQDFSIikpUyUfbCTTOf2cMOs1USAxUiIpKN9laPFJOd2cMOs1USAxUiIpKF1FaPNtnP7GGH2SqPgQoREclC37k9r8c2RqBHTdOfeNxxUumXxjVu9dg6BipERFRh6o3b9CXEPhPmaZ4Tj/cuKu0wG9GLFT1VCAMVIiIyiHY3WanGbWNiQjSuyX5uT3mlx+wwW+UwUCEionJpByV9o32QknFZY8zStHNIGdcBcU29TXNuj6Glx1SlMFAhIqIySSXJagcpStk37iKhpZ95tnq0MR+lSmKgQkREZdKXJCtFtsZt2lh6XG0xUCEiojLpCz60t39Mlo/C0uNqjYEKERGVKTqgrmSS7NT4CAxrH2SefBSWHldbCiGEsPQkKqqwsBDu7u4oKCiAm5ubpadDRFSlaVf9mMzFI8CKWN3rI1NL/2Tpsc0z5vc3V1SIiEiSdmCi/DIJ9W0effkoLD2ulhioEBGRDqkeKUnxEab5MO1tnqgB0uNYelwt2Vl6AkREZHkZOfnYePQiMnLyJcuRl6adQ0ZOvvwfLFV2nJWsG6wwH6Xa4ooKEVE1p7160iW8geS47Bt35dv6Ka/DbKMuQNvRzEchBipERNWZ1OrJrtPXJcfK1iPF0A6zfq0ZoBC3foiIqjN9zdw6h2muqsjWI4UdZslIXFEhIqrG9K2STIwNxcTYUPnLkdlhlozEQIWIqBrT18xNGZjItorCDrNUQQxUiIiqIfUeKUnxEeY78ZgdZslI7ExLRFTFaTduM1uPFHaYJT3YmZaIiADolh5rHyQIlPZIiWvqbb7SY3aYJSMwUCEiqqKkSo+1gxQl2XqkGFp6TGQglicTEVVR+kqPpcjSI4Wlx2QCXFEhIqqi9AUf2ts/svVIYekxmQADFSKiKkQ7cVaq9HhqfASGtQ+Sp8qHpcdkYgxUiIiqCH3VPFKlx9EBdSu/isLSYzIDlicTEVUBGTn56Lt4n871lHEd5O2LosTSY6oElicTEVUz+hJnTXLisUdj/fkoLD0mmTFQISKyYcqclEfFJZLPm+zE46gB0uNYekwyY6BCRGSjtHNSWvi7IzO3QPXYpCceZyWXBitZyY+vMR+FTICBChGRDZJq5paZW4B5L0TC0d5OnjN7yusw26gL0HY081HIpBioEBHZCPXSY305KY72dkho6Vf5DzO0w6xfawYoZFIMVIiIbIDUmT1S2GGWqhoGKkREVko9UVbqzB52mKXqgIEKEZEV0l5BkfJUaAN2mKUqj4EKEZGVkUqUlaIMTthhlqoyBipERFbGkFOPTVp6vHdRaYfZiF6s6CGLY6BCRGQF1Ct69CXEmrX0mB1myUowUCEisjCpwwSlTj0e0CZAng80tPSYyAowUCEisiCpfJSlaeeQMq6D5KnHlcbSY7IxDFSIiCyorMMEE1r6yX/yMUuPycYwUCEiMjND8lFkO0wQYOkx2TQGKkREZmRoPopsKyksPSYbpxBCCEtPoqIKCwvh7u6OgoICuLm5WXo6RESS1DvMTv3xuM7zKeM6AIBp8lFWxOpeH5la+idLj8lCjPn9zRUVIiITMqTDrKz5KOrbPPryUVh6TDZElkDl9u3bqFOnjhxvRURk09TzTwAY3GFWFtrbPFEDpMex9JhsiJ2xL5g3bx6Sk5NVj/v37w8PDw/4+vri2LFjsk6OiMiWfLT1FPou3ocp646h7+J9+GLXX+W+xqQdZrOSdYMV5qOQjTF6RWXZsmVYvXo1AGDHjh3YsWMHtm7dinXr1uGtt97C9u3bZZ8kEZG1KuuE412nr0u+xqwdZht1AdqOZj4K2SyjA5UrV67A398fAPDzzz+jf//+ePbZZxEUFIR27drJPkEiImtlSP5J57AG2H3mccBikQ6zfq0ZoJDNMjpQqVu3LnJzc+Hv749t27Zh9uzZAAAhBIqLi2WfIBGRNTL0hOOJsaGYGBvKDrNEFWR0oJKQkIDBgwcjNDQUN2/eRHx8PAAgMzMTjRszQYuIqi71RFljTzhmh1miijE6UFm4cCGCgoKQm5uL+fPno1atWgBKt4TGjRsn+wSJiKyB9jZP32gfyXGy5p9oY4dZqobY8I2ISIJ2mXHfxft0xvSN9kFKxmXV47ExIZgaH2GaCUl1mAV0O8x2m2mazyeSkckbvn333XdYtmwZzp07h/379yMwMBCLFi1CcHAwnn/++QpNmojIWmivnnQJbyA57qnQBhjWPkj+/BNtUvkoexeVdpiN6MWKHqrSjO6jsmTJEkyZMgXx8fG4ffu2KoG2Tp06WLRokdzzIyIyK6kkWX1lxsrgxCSnHAOlAcqxtcBfO6Sfv3m2NDhhl1mqwowOVL744gssX74c7777Luzt7VXXW7dujePHdc+wICKyBRk5+dh49CJ+O3NN8vnOYZqrKrIeHChlx4zSc3pSXgXSPpIeww6zVA0YvfWTnZ2N6OhonevOzs64e7f8LHgiImtjSD8Uk5UZS2HpMZGK0YFKcHAwMjMzERgYqHF969ataNKkiWwTIyIyB0P6oZi0zFjJkMMEWXpM1ZDRgcpbb72F1157Df/88w+EEDh06BDWrFmDuXPnYsWKFaaYIxGRyejrh/J6bGMEetQ0/eoJYPhhgiw9pmrI6EDl5ZdfRlFREd5++23cu3cPgwcPhq+vLz777DMMHDjQFHMkIjIZfScXPxPmafoABSj7MMGsxwfAcquHqqsKlSePGjUKo0aNwo0bN1BSUgJPT0+550VEZBbRAXUxJiZEY/vH5Imy6vRt8/AwQSIAFQxUlOrXry/XPIiILCYpPgJxTb3NkygLGNZhlocJEgGoQKASHR0NhUKhc12hUMDFxQWNGzdGYmIiOnfuLMsEiYjkpt51Vj1J1iyrKFIdZjtO0u0wywCFCEAFApXu3btjyZIliIyMRNu2bSGEwJEjR5CVlYXExEScPHkSXbt2xcaNG9mlloisjnYp8piYECSZqu29NnaYJTKa0YHKjRs38MYbb2D69Oka12fPno0LFy5g+/btmDFjBj744AMGKkRkVaRKkZemnUNcU2/T90W5eRa4lS39/M2z7C5LpIfRnWnXrVuHQYMG6VwfOHAg1q1bBwAYNGgQzpw5U/nZERHJSF8psr7rsmCHWaJKMTpQcXFxwb59uqeI7tu3Dy4uLgCAkpISODs7V352REQy0leKrO96pbHDLFGlGb31M2HCBIwZMwbp6elo06YNFAoFDh06hBUrVuCdd94BAPz666+SbfaJiCxBPXnWrKXI7DBLVGkKIYQw9kXff/89/v3vf6u2d8LCwjBhwgQMHjwYAHD//n1VFZApFRYWwt3dHQUFBXBzczPpZxGRbZJKnjVpKbJ66TFQuu2jbWQqAxSq1oz5/V2hQMVaMFAhorJk5OSj72LdreqUcR1Ms4oiVXoM6JYed5sp/2cT2RBjfn9XquEbEZE1Kyt51iQrKSw9JpKd0YFKcXExFi5ciHXr1iEnJwcPHz7UeP7WrVuyTY6IqDLMkjzL0mMikzK66uf999/HggUL0L9/fxQUFGDKlClISEiAnZ0dZs6caYIpEhFJy8jJx8ajF5GRky95TXmOjzpZk2dZekxkckbnqDRq1Aiff/45evTogdq1ayMzM1N17cCBA/jhhx9MNVcdzFEhqr6kkmQBSHadlWqZX2kXj0gnyqpjPgqRJJPmqOTl5SEyMhIAUKtWLRQUFAAAevbsqdOtlohITsqA41FxiWSHWW3qXWdlCVDUK3pYekxkFkYHKn5+frhy5QoCAgLQuHFjbN++HS1btsThw4fZ5I2ITEZ7BcVQsiXOalf0RA2QHhfajQEKkYyMzlHp27cvUlNTAQCvv/46pk+fjtDQUAwbNgwjRoyQfYJERFJn9BhKlsRZqYqerGTdYIVdZolkZ/SKykcfPU4Y69evH/z8/LBv3z40btwYvXv3lnVyRFR9qeeVGHIWz9iYEAjANF1n9W3zNOoCtB3N0mMiE6p0H5Unn3wSTz75pBxzISICoLvN0zfaR3LcvBci4Whvp5EkK1vXWfV8FH2VO8rghAEKkclUKFC5dOkS9u7di2vXrqGkpETjuYkTJ8oyMSKqnqS2eVIyLqNvtA9SMi6rro2NCcGANgE6r5clcVaqw2zHSbodZhmgEJmc0YHKypUrMWbMGDg5OcHDwwMKhUL1nEKhYKBCRBWi3Oq5cFN6m+ep0AYY1j7IdGf0KLHDLJFVMTpQee+99/Dee+9h2rRpsLMzOheXiEinr4khFT3KsSYNUNhhlsjqGB2o3Lt3DwMHDmSQQkQVIpV/or6lI0XWbrJStLd6pLDDLJFFGB2ovPLKK1i/fj2SkpJMMR8iqsL05Z9IeT22MQI9app2mweQ3urRxnwUIosxOlCZO3cuevbsiW3btiEyMhKOjo4azy9YsEC2yRGR7TO2zFjpmTBP0wYoSuwwS2TVjA5U5syZg19//RVhYWEAoJNMS0SkZGiZsVRFj8lXUcorPWaHWSKrYPShhHXr1sXChQuRmJhooikZjocSElmvjJx89F28T+e6VFAy1VQHB0qRKj0GdEuPeZggkcmY9FBCZ2dndOzYscKTI6LqQd82j74yY5NW9Cix9JjI5hgdqLz++uv44osv8Pnnn5tiPkRk49RPOJZi8jJjKSw9JrJZRgcqhw4dwq5du/Dzzz+jadOmOsm0GzdulG1yRGRbtHNSWvi7IzO3QPXY5LknUlh6TGTTjA5U6tSpg4SEBFPMhYhsmFTpcWZugeR5PGbD0mMim1ehFvpERNr05aQ42tshoaWf+SaiXtHD0mMim1fp05OJiIDS3BNjrpuE9jZP1ADpcSw9JrIZBgcq0dHRBvVJOXr0aKUmRES2KTqgLsbEhGhs/5g1J0VqmycruTRYyUp+fI1bPUQ2xeBApU+fPiacBhFVBUnxEYhr6m2efija9G3zNOoCtB3N0mMiG2V0wzdrwoZvRNWcej4KAKyI1R0zMpXBCZGVMWnDNyIiJbN1k5Ui1WG24yTdDrMMUohsGgMVIqoQ7Z4pY2JCkBQfYZ4PZ4dZomqDgQoRGUy966x2z5SlaecQ19TbPIcJssMsUbXBQIWIJGlv62ivoEjJvnHXdIEKO8wSVUsMVIhIh3ZQon3isT4m65nCDrNE1ZZBgYoxBxBOnDixwpMhIsuTaoVvSJBi0p4p7DBLVG0ZFKgsXLhQ4/H169dx79491KlTBwBw+/Zt1KhRA56engxUiGycvlb4Ukx6jo966bG+LR12mCWq8gwKVLKzHyeu/fDDD1i8eDH+85//ICwsDABw5swZjBo1Cq+++qppZklEJqWej6Jv+0Z7+2dsTAgGtAkwzYRYekxE/8fohm+NGjXChg0bEB0drXE9PT0d/fr10whqTI0N34gqRj0w+fVEnk6ZMQCdVvhT4yPM0zfl4hH9jdsAlh4TVQEmbfh25coVPHr0SOd6cXExrl69auzbEZGZlVe9szTtHFLGdZBshR8dUNe0AQpLj4lIi9GBSmxsLEaNGoX//Oc/aNWqFRQKBY4cOYJXX30VXbt2NcUciUgmUomyUrJv3EVCSz/zdZtl6TER6WFn7Au+/vpr+Pr6om3btnBxcYGzszPatWuHhg0bYsWKFaaYIxHJxNBEWZOVGUth6TERlcHoFZUGDRpgy5Yt+PPPP3H69GkIIRAREYEnnnjCFPMjIhkZEoCYtMxYSb2ih6XHRFSGCjd8CwoKghACjRo1goMD+8YR2YLogLoYExOikyj7rEQ+islob/NEDZAex9JjIkIFqn7u3buHCRMm4JtvvgEA/PnnnwgJCcHEiRPh4+ODpKQkk0xUCqt+iMonValjsVOP9VX0RA0AspIfP+44Geg202zTIiLzMmnVz7Rp03Ds2DH89ttv6N69u+p6165dMWPGDLMGKkRUNn0nHJu0eqcs+rZ5GnUB2o5m6TER6TA6UNm0aROSk5Px5JNPQqFQqK43adIEf//9t6yTI6KKk6rwMcsJx9oM6TCrDE4YoBCRFqMDlevXr8PT01Pn+t27dzUCFyKyLH0VPiY94VgbO8wSUSUZHai0adMGv/zyCyZMmAAAquBk+fLlaN++vbyzI6IK01fhY7bSY6my472LSjvMRvTiNg8RGcToQGXu3Lno3r07Tp48iaKiInz22Wc4ceIE9u/fj7S0NFPMkYiMoJ4oK1XhY7bSY3aYJSIZGB2odOjQAXv37sUnn3yCRo0aYfv27WjZsiX279+PyMhIU8yRiAwklTybMq6D5UqPpbDDLBEZwejyZGvC8mSq7tRXTwCg7+J9OmNSxnUwT06KvtJjdSw7JiKYuDzZ3t4eV65c0UmovXnzJjw9PVFcXGzsWxJRBWivnnQJbyA5zmzJs+wwS0QmYHSgom8B5sGDB3Bycqr0hIhImvbqiXbp8a7T1yVfZ9LkWUNKj9lhlogqweBA5fPPPwdQWuWzYsUK1KpVS/VccXEx9uzZg/DwcPlnSEQGr550DmuA3WceBywmTZ5l6TERmYHBOSrBwcEAgAsXLsDPzw/29vaq55ycnBAUFIRZs2ahXbt2ppmpBOaoUFVRVpv7R8UlmPrjcYPeJ2VcBwAwffKsvnyUkamlf7L0mIjKYJIclezs0lLDzp07Y+PGjahb1wLtt4mqIKlKHUB3a0dbWasnJg1QWHpMRGZkdI7K7t27TTEPompFfbVEqs29ISbGhmJibChLj4moSjM6UOnXrx9at26tc/jgxx9/jEOHDmH9+vWyTY6oKtDe1tFeQakIs6yeqJPqMquN+ShEZAJGByppaWmYMWOGzvXu3bvjk08+kWVSRFWFdlDSN9oHKRmXK/Re816IhKO9nXlWTwDNih6WHhORhRgdqPzvf/+TLEN2dHREYWGhLJMiqgqkTi82JEgZGxMCAei0vh/QJkDuKeqnvc0TNUB6HEuPicjEjA5UmjVrhuTkZLz33nsa19euXYsmTZrINjEiW6fv9GIpUqslcU29zZd/ok5qmycruTRYyUp+fI1bPURkBkYHKtOnT8cLL7yAv//+G126dAEApKamYs2aNcxPIVKjr9Ga9vaPvtWS6IC65g1QlPRt8zTqArQdzdJjIjIrowOV3r17Y9OmTZgzZw42bNgAV1dXREVFYefOnYiJiTHFHIlshnbirNTpxVPjIzCsfZBlVkv0MaTDrDI4YYBCRGbEQwmJZCLVDyUpPkKymZtVkeowC+h2mOVhgkQkE2N+f1coULl9+zY2bNiAc+fO4c0330S9evVw9OhReHl5wdfXt8ITNxYDFbK08rrHmu3k4opih1kisgCTnp6clZWFrl27wt3dHefPn8fIkSNRr149pKSk4MKFC/j2228rPHEiW2JIPxSznVxsLHaYJSIbYXSgMmXKFCQmJmL+/PmoXbu26np8fDwGDx4s6+SIrEl5pxdLMenJxRXFDrNEZEOMDlQOHz6MZcuW6Vz39fVFXl6eLJMisjaGnl6szqQnF1cUO8wSkY0xOlBxcXGRbOx25swZNGhQ/v95E9kaqcZtu05flxxr9u6xxmKHWSKyMUYHKs8//zxmzZqFdevWAQAUCgVycnKQlJSEF154QfYJElmavsZtUqcXm7V7rKEMKT1mh1kislJGV/0UFhbiueeew4kTJ3Dnzh34+PggLy8P7du3x5YtW1Czpvn25Fn1Q6ZkSEUPAJYeExEZyeTlyQCwa9cuHD16FCUlJWjZsiW6du1aoclWBgMVkkt5Jxy38HdHZm6B6rGycZtVY+kxEVkpk5Ynf/vttxgwYAC6dOmiaqEPAA8fPsTatWsxbNgw42dMZEGGnHCcmVtg/fknSiw9JqIqxOgVFXt7e1y5cgWenp4a12/evAlPT08UFxfLOsGycEWFKisjJx99F+8zaOyC/s2R0NLPxDOqJENKj0emMkghIosy5ve3nbFvLoSAQqHQuX7x4kW4u7sb+3ZEFmXMCcdW2RNFHUuPiagKMnjrJzo6GgqFAgqFArGxsXBwePzS4uJiZGdno3v37iaZJJGc1PNRjDnh2Cq3e9Qrelh6TERVkMGBSp8+fQAAmZmZiIuLQ61atVTPOTk5ISgoiOXJZPWkDg60mROOtWlv80QNkB7H0mMismFG56h88803GDBgAFxcXEw1J4MxR4WMoS8fxSbKjLXpq+iJGgBkJT9+zNJjIrJCJq36GT58OG7fvo3Vq1fj77//xltvvWWx05OJjKEvHyX7xl0ktPSzjQBFSd82T6MuQNvRLD0moiqj0qcnjxo1iqcnk03Ql49i9UmySoZ0mFUGJwxQiKiKMLrqZ/LkyUhMTMRff/2lsf0THx+PPXv2yDo5IjlFB9TFmJgQjWtWmySrbceM0q2elFdL/zy1+XGXWSVW9BBRFWT0isqRI0fw1Vdf6Vzn6clkrdSrfJLiIxDX1Nv28lG0y473LirthxLRi9s8RFSl8fRkqtKkqnyS4iNsJ0Bhh1kiquaM3vpRnp786NEjADw9maxXRk6+RpACAEvTziEjJ99CMzKC+lZP2kfSY/TlqRARVSFGByqffPIJrl+/Dk9PT9y/fx8xMTFo3LgxateujQ8//NAUcyQyWEZOPjYevaja7pFiTDdai2CHWSIiFaO3ftzc3PDHH39YxenJROqkDheUYvVVPuwwS0SkYnSgoqR9ejKRJUlt86RkXLbNVvj6tnTYYZaIqiGjApWSkhKsWrUKGzduxPnz56FQKBAcHIx+/fph6NChkocVEpmDvu2cp0Ib2F4r/I6TSr80rnGrh4iqJ4MDFSEEevfujS1btqB58+aIjIyEEAKnTp1CYmIiNm7ciE2bNplwqkS6lLkoj4pLJJ9XBidWGaAALD0mIiqHwYHKqlWrsGfPHqSmpqJz584az+3atQt9+vTBt99+i2HDhsk+SSIp2jkpLfzdkZlboHpstds8AEuPiYgMZHCgsmbNGrzzzjs6QQpQmq+SlJSE77//noEKmYVUTkpmbgHmvRAJR3s7693mAXS3eqSw9JiICIAR5clZWVno3r273ufj4+Nx7NgxWSZFVB59OSmO9nbWfcAgS4+JiIxi8IrKrVu34OXlpfd5Ly8v5OfbQCMtqhJs6oBB9Yoelh4TERnF4ECluLgYDg76h9vb26OoqEiWSRFJUT+zR3nAoPr2j1XmpGhv80QNkB7H0mMiIklGVf0kJibC2dlZ8vkHDx7INikibfrO7LHqAwaltnmykkuDlazkx9e41UNEpJfBgcrw4cPLHcNEWjIFfWf2xDX1tu7SY33bPI26AG1Hs/SYiMgABgcqK1euNOU8iPQq68weqwtSDOkwqwxOGKAQEZWrwi30iczFZhJn2WGWiEh2DFTIKtlc4iw7zBIRmQQDFbI6NpU4yw6zREQmxUCFrIpNJc6ywywRkckZ3JmWyBzKSpy1KuwwS0RkFlxRIati1Ymz7DBLRGR2DFTIqlht4iw7zBIRWQQDFbI6Vpc4yw6zREQWw0CFrIJ2ObJVJc6ywywRkcUwUCGL01eObHHKnJTiR9LPs8MsEZHJMVAhiyqvHNlitHNSfFsBl9IfP+Y2DxGRWTBQIYuyynN8pHJSLqUDvf8N2Dtym4eIyIwYqJDZqeejWE05siGlx/aOpV1miYjIbBiokFlJ5aNYvBzZ0NJjdpklIjI7BipkNvryUVLGdbBcOTJLj4mIrBoDFTKbsvJRElr6WSYnhaXHRERWjYEKmY1V5qPo285h6TERkVXgoYRkchk5+dh49CKA0pwUdRbJR1kRC6S8Wvrnqc1Ax0maY7jNQ0RkNRRCCGHpSVRUYWEh3N3dUVBQADc3N0tPhyRIJc9aNB9lRazu9ZGppX9ym4eIyCyM+f3NrR8ymbKauSW09DPfRJRbPbeypZ+/eba07JgBChGR1WGgQiZjFc3ctEuPpbDsmIjIajFHhUzG4smzUqXH2piPQkRk1biiQrLSPgXZ7M3cDOkwG5ME1AtmPgoRkQ1goEKy0XcKstmSZw3tMBvajQEKEZGN4NYPyUJf4mxGTj6iA+qavqFbWR1m1XGrh4jIpnBFhWRh8cRZdpglIqqSGKiQLCyWOKvMSSl+JP08O8wSEdk0BiokC4skzmrnpPi2Ai6lP37MbR4iIpvHzrRUYdoVPvqumYS+LrO9/w3YO3Kbh4jIirEzLZmcvgqf6IC6pgtQDCk9tncs7TJLRERVAgMVMlpZrfFNFqQYWnrMLrNERFUKy5PJaGVV+JgES4+JiKotrqiQwZT5J4+KSySfN1mFD0uPiYiqLQYqZBDtnJQW/u7IzC1QPZa9wkc9H0Xfdg5Lj4mIqjwGKiRJvXoHgE5OSmZuAea9EAlHezv5K3y081E6Tir90rjGbR4iouqAgQrp0F496RLeQHKco70dElr6yfvhUvkoexcBI1OBiF7c5iEiqmYYqBAAzfwT7dWTXaevS75G1pwU5VbPrWzp52+eLS07ZoBCRFStMFAhnRUUKZ3DGmD3mccBi6w5KdpbPVJYdkxEVC0xUKnmpHqiSJkYG4qJsaHyd52V2urRxnwUIqJqi4FKNaSeKGtI7xP11RNZAhRDOszGJAH1gpmPQkRUzTFQqWa0t3n6RvtIjjNbRY++DrOh3RigEBERO9NWJ1LbPCkZl3WClbExIRjQJgAJLf3k743CDrNERGQErqhUI/q2eZ4KbYBh7YNMf+oxO8wSEZGRGKhUI/rKiZXBiclPPS5+JP08O8wSEZEeDFSqkeiAuhgTE6Kx/SN763tt2jkpvq2AS+mPH3Obh4iIyqAQQghLT6KiCgsL4e7ujoKCAri5uVl6OjZDverHpEHKxSPAiljd673/Ddg7cpuHiKiaMub3N1dUqjipoMQs2zxllR7bO5Z2mSUiIioHA5UqTLsUeUxMCJLiI0z3gYaWHrPLLBERGYjlyVWUVCny0rRzyMjJN80HsvSYiIhMgCsqVZS+UuTsG3dNs+3D0mMiIjIBBipVVFmlyLJRz0fRt53D0mMiIqoEbv1UUcpSZHWyn3i8IhZIebX0z1ObgY6TNMdwm4eIiCqJ5clVnElKkfWVHY9MLf2T2zxERFQGlidXY9qBiaylyMqtnlvZ0s/fPFtadswAhYiIZMJApQoxaTmydumxFJYdExGRzJijUkWYtBxZqvRYG/NRiIjIBLiiYsPUt3lkL0c2pMNsTBJQL5j5KEREZDIMVGyU9jZP32gfyXEVKkc2tMNsaDcGKEREZFLc+rFBUts8KRmXdYKVCpUjs8MsERFZEa6o2CB92zxPhTbAsPZBlStHZodZIiKyIgxUbFBZXWcrXI6szEkpfiT9PDvMEhGRBTBQsSHqybNjYkI0tn8q1XVWOyfFtxVwKf3xY27zEBGRhTBQsRFSPVJSxnWofNdZqZyUS+lA738D9o7c5iEiIotioGID9PVIiWvqjYSWfsa/oSGlx/aOpV1miYiILIiBig2QtUeKoaXH7DJLRERWgOXJNqCs5FmjsPSYiIhsDFdUbEB0QF15kmdZekxERDaGgYqNSIqPQFxTb+OTZ9XzUfRt57D0mIiIrBQDFRtidI8U7XyUjpNKvzSucZuHiIisFwOVqkoqH2XvImBkKhDRi9s8RERkExioWCn15m5GraIot3puZUs/f/NsadkxAxQiIrIBDFSskFRzt6T4iPJfqL3VI4Vlx0REZENYnmxl9DV3y8jJL/uFUls92piPQkRENoYrKlbGqOZuhnSYjUkC6gUzH4WIiGwSAxUrY3BzN0M7zIZ2Y4BCREQ2i1s/VkbZ3E2dTnM3dpglIqJqgisqVqjc5m7sMEtERNUEAxUrJdncTZmTUvxI+kXsMEtERFUMAxUrYFDPFO2cFN9WwKX0x4+5zUNERFUQAxULM6hnilROyqV0oPe/AXtHbvMQEVGVxUDFApQrKI+KSyR7psQ19Ua03d/llx7bO5Z2mSUiIqqiGKiYmfYKipQaez4Azv7n8QV9pcfsMktERFUcy5PNSKrrrLYWirMIUw9SAJYeExFRtcUVFTPS13VW3ciIYkAqlmHpMRERVUMMVMxIX9fZ5V0E6tzPgZtvOMK8n5YOVFh6TERE1RC3fsxIquvs2uCt6LZvCNpkTEPYz32BU5uBjpM0X8htHiIiqqYUQghh6UlUVGFhIdzd3VFQUAA3NzdLT0eSVI8U5bWmJX+WBifaRqaW/sltHiIiqoKM+f3NrR8T0tcjJdrub0TbnwUKsqVfePNsadkxAxQiIqrmGKiYiFSFz9K0c3j53ip4HV9a9otZdkxERASAgYrslNs6F27qVvi0UJwtP0hhPgoREZEKAxUZSTVza6E4i2DFFWSLhghWXJF+YUwSUC+Y+ShERERaGKjIRGqrZ6rDGox12Kx6/F+P7sBNiReHdmOAQkREJIHlyTLRbubWQnFWI0gBgGY3t7HDLBERkRG4oiIT7WZuerd52GGWiIjIYAxUZKJs5nZgz3YEK67AAUXSA9lhloiIyGAMVCpBu5lbksNawHnR4wG+rYBL6Y8fc5uHiIjIKAxUKki7wuf9Vvcw/MQizUGX0oHe/wbsHbnNQ0REVAEMVCpAWeGjXnqcmXEFw50kBts7lnaZJSIiIqMxUDGQ+jZP9o27OqXHPxZ1kn4hu8wSERFVGAMVA2hv87wefhuTtUqPX3D4Azcb9YXH3ymPLzInhYiIqFIYqJRDqpFbzp/HAYltHo+o7kDn8Sw9JiIikgkDlXIoG7mp56Nki4bSg1l6TEREJCsGKuUIrl9TJx9lSVEvXI0co3nAILd5iIiIZMdARQ9l8mzTkj91WuGPddgMtEsF2r3IbR4iIiITYqAi4aOtp1QdZnMUVxHmKDHo5tnSsmMGKERERCZj0UMJ9+zZg169esHHxwcKhQKbNm2y5HQAlK6kuO/9EJuc38NCpyWY5LhReiDLjomIiEzOooHK3bt3UVRUhKKi0nNx3njjDfz++++WnBJu/7VfZ6tHB/NRiIiIzMKigUphYSFSU1MxZ84cAECTJk0QHx+PnJwci81J36nHl1u8DvRdBoxMBbrNNO+kiIiIqimLBioLFizAK6+8gpEjRwIAXnnlFfj7+2PJkiWS4x88eIDCwkKNL7kFPdFc8rpP697MSSEiIjIziwUqDx8+RHp6Op599lmN688++yz27dsn+ZoPPvgA7u7uqi9/f3/5J+bXGug4SfMat3qIiIgswmJVPzdu3EBxcTG8vLw0rnt5eSEvL89Cs/o/3d4HInqx9JiIiMjCLLr1AwAKhULjsRBC55rS9OnTUVBQoPrKzc013cT8WnOrh4iIyMIstqJSv3592Nvb66yeXLt2TWeVRcnZ2RnOzs7mmB4RERFZAYutqDg5OaFFixZYs2YNMjMzAQDZ2dnYvHkzmjZtaqlpERERkRWx6NbPc889h/Xr1yM6OhoAMGXKFGRnZ+PatWuWnBYRERFZCYsGKrNmzcKXX36JwMBAODk5oWXLlkhLS8OGDRssOS0iIiKyEgohhLD0JCqqsLAQ7u7uKCgogJubm6WnQ0RERAYw5ve3xat+iIiIiPRhoEJERERWi4EKERERWS0GKkRERGS1GKgQERGR1WKgQkRERFbLYi305aCsrC4sLLTwTIiIiMhQyt/bhnRIselA5c6dOwAAf39/C8+EiIiIjHXnzh24u7uXOcamG76VlJTg8uXLqF27tt4Tlw1RWFgIf39/5ObmsnGcGfB+mxfvt3nxfpsX77d5yXW/hRC4c+cOfHx8YGdXdhaKTa+o2NnZwc/PT7b3c3Nz4190M+L9Ni/eb/Pi/TYv3m/zkuN+l7eSosRkWiIiIrJaDFSIiIjIajFQAeDs7IwZM2bA2dnZ0lOpFni/zYv327x4v82L99u8LHG/bTqZloiIiKo2rqgQERGR1WKgQkRERFaLgQoRERFZLQYqREREZLUYqABYvHgxgoOD4eLiglatWuH333+39JRszty5c9GmTRvUrl0bnp6e6NOnD86cOaMxRgiBmTNnwsfHB66urnjmmWdw4sQJjTEPHjzAhAkTUL9+fdSsWRO9e/fGxYsXzfmt2KS5c+dCoVBg0qRJqmu83/K6dOkSXnrpJXh4eKBGjRpo0aIF0tPTVc/zfsunqKgI//rXvxAcHAxXV1eEhIRg1qxZKCkpUY3h/a64PXv2oFevXvDx8YFCocCmTZs0npfr3ubn52Po0KFwd3eHu7s7hg4ditu3bxs/YVHNrV27Vjg6Oorly5eLkydPitdff13UrFlTXLhwwdJTsylxcXFi5cqV4r///a/IzMwUPXr0EAEBAeJ///ufasxHH30kateuLX788Udx/PhxMWDAANGwYUNRWFioGjNmzBjh6+srduzYIY4ePSo6d+4smjdvLoqKiizxbdmEQ4cOiaCgIBEVFSVef/111XXeb/ncunVLBAYGisTERHHw4EGRnZ0tdu7cKc6ePasaw/stn9mzZwsPDw/x888/i+zsbLF+/XpRq1YtsWjRItUY3u+K27Jli3j33XfFjz/+KACIlJQUjeflurfdu3cXzZo1E/v27RP79u0TzZo1Ez179jR6vtU+UGnbtq0YM2aMxrXw8HCRlJRkoRlVDdeuXRMARFpamhBCiJKSEuHt7S0++ugj1Zh//vlHuLu7i6VLlwohhLh9+7ZwdHQUa9euVY25dOmSsLOzE9u2bTPvN2Aj7ty5I0JDQ8WOHTtETEyMKlDh/ZbX1KlTRadOnfQ+z/strx49eogRI0ZoXEtISBAvvfSSEIL3W07agYpc9/bkyZMCgDhw4IBqzP79+wUAcfr0aaPmWK23fh4+fIj09HQ8++yzGtefffZZ7Nu3z0KzqhoKCgoAAPXq1QMAZGdnIy8vT+NeOzs7IyYmRnWv09PT8ejRI40xPj4+aNasGX8eerz22mvo0aMHunbtqnGd91teP/30E1q3bo0XX3wRnp6eiI6OxvLly1XP837Lq1OnTkhNTcWff/4JADh27Bj++OMPPPfccwB4v01Jrnu7f/9+uLu7o127dqoxTz75JNzd3Y2+/zZ9KGFl3bhxA8XFxfDy8tK47uXlhby8PAvNyvYJITBlyhR06tQJzZo1AwDV/ZS61xcuXFCNcXJyQt26dXXG8Oeha+3atTh69CgOHz6s8xzvt7zOnTuHJUuWYMqUKXjnnXdw6NAhTJw4Ec7Ozhg2bBjvt8ymTp2KgoIChIeHw97eHsXFxfjwww8xaNAgAPz7bUpy3du8vDx4enrqvL+np6fR979aBypKCoVC47EQQucaGW78+PHIysrCH3/8ofNcRe41fx66cnNz8frrr2P79u1wcXHRO473Wx4lJSVo3bo15syZAwCIjo7GiRMnsGTJEgwbNkw1jvdbHsnJyVi9ejV++OEHNG3aFJmZmZg0aRJ8fHwwfPhw1Tjeb9OR495Kja/I/a/WWz/169eHvb29TnR37do1nWiSDDNhwgT89NNP2L17N/z8/FTXvb29AaDMe+3t7Y2HDx8iPz9f7xgqlZ6ejmvXrqFVq1ZwcHCAg4MD0tLS8Pnnn8PBwUF1v3i/5dGwYUM0adJE41pERARycnIA8O+33N566y0kJSVh4MCBiIyMxNChQzF58mTMnTsXAO+3Kcl1b729vXH16lWd979+/brR979aBypOTk5o1aoVduzYoXF9x44d6NChg4VmZZuEEBg/fjw2btyIXbt2ITg4WOP54OBgeHt7a9zrhw8fIi0tTXWvW7VqBUdHR40xV65cwX//+1/+PLTExsbi+PHjyMzMVH21bt0aQ4YMQWZmJkJCQni/ZdSxY0edcvs///wTgYGBAPj3W2737t2DnZ3mryd7e3tVeTLvt+nIdW/bt2+PgoICHDp0SDXm4MGDKCgoMP7+G5V6WwUpy5P/85//iJMnT4pJkyaJmjVrivPnz1t6ajZl7Nixwt3dXfz222/iypUrqq979+6pxnz00UfC3d1dbNy4URw/flwMGjRIsuTNz89P7Ny5Uxw9elR06dKF5YQGUq/6EYL3W06HDh0SDg4O4sMPPxR//fWX+P7770WNGjXE6tWrVWN4v+UzfPhw4evrqypP3rhxo6hfv754++23VWN4vyvuzp07IiMjQ2RkZAgAYsGCBSIjI0PVlkOue9u9e3cRFRUl9u/fL/bv3y8iIyNZnlxRX375pQgMDBROTk6iZcuWqpJaMhwAya+VK1eqxpSUlIgZM2YIb29v4ezsLJ5++mlx/Phxjfe5f/++GD9+vKhXr55wdXUVPXv2FDk5OWb+bmyTdqDC+y2vzZs3i2bNmglnZ2cRHh4uvvrqK43neb/lU1hYKF5//XUREBAgXFxcREhIiHj33XfFgwcPVGN4vytu9+7dkv9/PXz4cCGEfPf25s2bYsiQIaJ27dqidu3aYsiQISI/P9/o+SqEEMLIlSEiIiIis6jWOSpERERk3RioEBERkdVioEJERERWi4EKERERWS0GKkRERGS1GKgQERGR1WKgQkRERFaLgQoR2YygoCAsWrTI0tMgIjNioEJkQxITE9GnTx9LT8Mo5gwuZs6ciRYtWpjls4jIPBioEBERkdVioEJkw5555hlMnDgRb7/9NurVqwdvb2/MnDlTY8zMmTMREBAAZ2dn+Pj4YOLEiarngoKC8MEHH2Dw4MGoVasWfHx88MUXX2i8vqCgAKNHj4anpyfc3NzQpUsXHDt2TGPMTz/9hNatW8PFxQX169dHQkKCan4XLlzA5MmToVAooFAoVK/Zt28fnn76abi6usLf3x8TJ07E3bt3Vc9fu3YNvXr1gqurK4KDg/H9998bfX+UK1Bz5syBl5cX6tSpg/fffx9FRUV46623UK9ePfj5+eHrr7/WeN3UqVPxxBNPoEaNGggJCcH06dPx6NEjjTGzZ8+Gp6cnateujZEjRyIpKUlnNWflypWIiIiAi4sLwsPDsXjxYtVzDx8+xPjx49GwYUO4uLggKCgIc+fONfp7JKrqGKgQ2bhvvvkGNWvWxMGDBzF//nzMmjVLdfz6hg0bsHDhQixbtgx//fUXNm3ahMjISI3Xf/zxx4iKisLRo0cxbdo0TJ48WfV6IQR69OiBvLw8bNmyBenp6WjZsiViY2Nx69YtAMAvv/yChIQE9OjRAxkZGUhNTUXr1q0BABs3boSfnx9mzZqFK1eu4MqVKwCA48ePIy4uDgkJCcjKykJycjL++OMPjB8/XjWvxMREnD9/Hrt27cKGDRuwePFiXLt2zej7s2vXLly+fBl79uzBggULMHPmTPTs2RN169bFwYMHMWbMGIwZMwa5ubmq19SuXRurVq3CyZMn8dlnn2H58uVYuHCh6vnvv/8eH374IebNm4f09HQEBARgyZIlGp+7fPlyvPvuu/jwww9x6tQpzJkzB9OnT8c333wDAPj888/x008/Yd26dThz5gxWr16NoKAgo78/oiqvQkcvEpFFDB8+XDz//POqxzExMaJTp04aY9q0aSOmTp0qhBDi008/FU888YR4+PCh5PsFBgaK7t27a1wbMGCAiI+PF0IIkZqaKtzc3MQ///yjMaZRo0Zi2bJlQggh2rdvL4YMGaJ3zoGBgWLhwoUa14YOHSpGjx6tce33338XdnZ24v79++LMmTMCgDhw4IDq+VOnTgkAOu+lbsaMGaJ58+aqx8OHDxeBgYGiuLhYdS0sLEw89dRTqsdFRUWiZs2aYs2aNXrfd/78+aJVq1aqx+3atROvvfaaxpiOHTtqfLa/v7/44YcfNMZ88MEHon379kIIISZMmCC6dOkiSkpK9H4uEQnBFRUiGxcVFaXxuGHDhqqVhxdffBH3799HSEgIRo0ahZSUFBQVFWmMb9++vc7jU6dOAQDS09Pxv//9Dx4eHqhVq5bqKzs7G3///TcAIDMzE7GxsUbNOT09HatWrdJ4z7i4OJSUlCA7OxunTp2Cg4ODamUGAMLDw1GnTh2jPgcAmjZtCju7x/9X5+XlpbGqZG9vDw8PD43Vmg0bNqBTp07w9vZGrVq1MH36dOTk5KieP3PmDNq2bavxOeqPr1+/jtzcXLzyyisa3+Ps2bNV9y0xMRGZmZkICwvDxIkTsX37dqO/N6LqwMHSEyCiynF0dNR4rFAoUFJSAgDw9/fHmTNnsGPHDuzcuRPjxo3Dxx9/jLS0NJ3Xab8HAJSUlKBhw4b47bffdMYogwZXV1ej51xSUoJXX31VI19GKSAgAGfOnNGYR2VI3Z+y7tmBAwcwcOBAvP/++4iLi4O7uzvWrl2LTz/9VOc16oQQqv+tfK/ly5ejXbt2GuPs7e0BAC1btkR2dja2bt2KnTt3on///ujatSs2bNhQie+WqOphoEJUxbm6uqJ3797o3bs3XnvtNYSHh+P48eNo2bIlgNJfzOoOHDiA8PBwAKW/TPPy8uDg4KA3fyIqKgqpqal4+eWXJZ93cnJCcXGxxrWWLVvixIkTaNy4seRrIiIiUFRUhCNHjqhWKs6cOYPbt28b+m1X2N69exEYGIh3331Xde3ChQsaY8LCwnDo0CEMHTpUde3IkSOq/+3l5QVfX1+cO3cOQ4YM0ftZbm5uGDBgAAYMGIB+/fqhe/fuuHXrFurVqyfjd0Rk2xioEFVhq1atQnFxMdq1a4caNWrgu+++g6urKwIDA1Vj9u7di/nz56NPnz7YsWMH1q9fj19++QUA0LVrV7Rv3x59+vTBvHnzEBYWhsuXL2PLli3o06cPWrdujRkzZiA2NhaNGjXCwIEDUVRUhK1bt+Ltt98GUFpZtGfPHgwcOBDOzs6oX78+pk6diieffBKvvfYaRo0ahZo1a+LUqVPYsWMHvvjiC4SFhaF79+4YNWoUvvrqKzg4OGDSpEkVWr0xVuPGjZGTk4O1a9eiTZs2+OWXX5CSkqIxZsKECRg1ahRat26NDh06IDk5GVlZWQgJCVGNmTlzJiZOnAg3NzfEx8fjwYMHOHLkCPLz8zFlyhQsXLgQDRs2RIsWLWBnZ4f169fD29u7QttbRFUZc1SIqrA6depg+fLl6Nixo2rlY/PmzfDw8FCNeeONN5Ceno7o6Gh88MEH+PTTTxEXFwegdHtjy5YtePrppzFixAg88cQTGDhwIM6fPw8vLy8ApSXI69evx08//YQWLVqgS5cuOHjwoOr9Z82ahfPnz6NRo0Zo0KABgNJVmLS0NPz111946qmnEB0djenTp6Nhw4aq161cuRL+/v6IiYlBQkKCqkTa1J5//nlMnjwZ48ePR4sWLbBv3z5Mnz5dY8yQIUMwbdo0vPnmm6otnMTERLi4uKjGjBw5EitWrMCqVasQGRmJmJgYrFq1CsHBwQCAWrVqYd68eWjdujXatGmD8+fPY8uWLRr5NEQEKIT6xioRVStBQUGYNGkSJk2aZOmp2Lxu3brB29sb3333naWnQlSlcOuHiMhI9+7dw9KlSxEXFwd7e3usWbMGO3fuVPWfISL5MFAhIjKSckts9uzZePDgAcLCwvDjjz+ia9eulp4aUZXDrR8iIiKyWszaIiIiIqvFQIWIiIisFgMVIiIisloMVIiIiMhqMVAhIiIiq8VAhYiIiKwWAxUiIiKyWgxUiIiIyGoxUCEiIiKr9f8BUWWE1fKXBt4AAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA5SElEQVR4nO3deXxU1f3/8fdkDyEMgUAghCUBDFBWiWBABVRAyyJKFREVUFrWCoJVsFYQFxCU1lYRsSxf9cviT8C6oAKyKaCySthiBSQBAggJCSQkITPn94ffjAxZyIQJuTGv5+Mxj5Jzzz33c08mzrt3G5sxxggAAMCCfMq7AAAAgKIQVAAAgGURVAAAgGURVAAAgGURVAAAgGURVAAAgGURVAAAgGURVAAAgGURVAAAgGURVAAv2b17tx599FE1btxYwcHBCg4OVtOmTTV8+HBt27btmtUxZcoU2Ww2t7ZGjRppyJAhZbrdzZs3a8qUKTp79myZbqekhgwZIpvN5noFBgYqNjZWkydPVnZ2tsfj2Ww2TZkyxfuFXmNdu3ZV165dy7sMoMT8yrsA4Lfgrbfe0pgxYxQbG6uxY8fqd7/7nWw2m/bv36/Fixfrhhtu0I8//qjGjRuXS30rVqxQtWrVynQbmzdv1nPPPachQ4aoevXqZbqtkgoODtbatWslSWlpaVq8eLGmTp2qAwcOaOnSpR6NtWXLFkVFRZVFmQCKQVABrtKmTZs0atQo9erVSx988IECAgJcy2699VaNHj1a/+///T8FBwcXO05WVpaqVKlSJjW2a9euTMa1Oh8fH914442un++880799NNPev/99zVr1izVq1evxGNdOg6Aa4dTP8BVeumll+Tr66u33nrLLaRc6t5771VkZKTr5yFDhqhq1apKSEhQjx49FBoaqttuu02StHr1at11112KiopSUFCQmjRpouHDh+v06dMFxv3000/Vtm1bBQYGKjo6Wq+88kqh2y/s1E9GRoaeeOIJRUdHKyAgQPXq1dO4ceOUmZnp1s9ms2nMmDF699131bx5c1WpUkVt2rTRJ5984uozZcoU/eUvf5EkRUdHu063rF+/vtB6/vGPf8hms+nHH38ssOypp55SQECAa3937typ3r17q3bt2goMDFRkZKR69eqlo0ePFjr2leQHjiNHjkiSkpKS9OCDD7rGb968uV599VU5nc4C83DpqZ+srCzX/AUFBalGjRqKi4vT4sWL3db76KOPFB8frypVqig0NFTdu3fXli1b3Prkn67bu3evBg4cKLvdroiICD3yyCNKT09362uM0ezZs9W2bVsFBwcrLCxMf/jDH3To0KEC/WbMmKGGDRsqKChI119/vT777LNSzRlQnjiiAlwFh8OhdevWKS4uTnXr1vVo3dzcXPXt21fDhw/XxIkTlZeXJ0k6ePCg4uPjNWzYMNntdv3000+aNWuWbrrpJiUkJMjf31+S9OWXX+quu+5SfHy8lixZIofDoRkzZujkyZNX3HZWVpa6dOmio0eP6umnn1br1q21d+9ePfvss0pISNCaNWvcrnP59NNPtXXrVk2dOlVVq1bVjBkzdPfddysxMVExMTEaNmyYUlNT9a9//UvLly93zUWLFi0K3f6DDz6op556SgsXLtQLL7zgNp/vvfee+vTpo/DwcGVmZqp79+6Kjo7WG2+8oYiICJ04cULr1q3TuXPnPJrvfPnhqFatWvr555/VqVMn5ebm6vnnn1ejRo30ySef6IknntDBgwc1e/bsIscZP3683n33Xb3wwgtq166dMjMztWfPHp05c8bVZ9GiRRo0aJB69OihxYsXKycnRzNmzFDXrl315Zdf6qabbnIbs3///howYIAeffRRJSQkaNKkSZKk+fPnu/oMHz5cCxcu1GOPPaaXX35Zqampmjp1qjp16qTvv/9eERERkqTnnntOzz33nB599FH94Q9/UHJysv74xz/K4XAoNja2VHMHlAsDoNROnDhhJJn777+/wLK8vDxz8eJF18vpdLqWDR482Egy8+fPL3Z8p9NpLl68aI4cOWIkmf/85z+uZR07djSRkZHmwoULrraMjAxTo0YNc/mfdsOGDc3gwYNdP0+bNs34+PiYrVu3uvX74IMPjCSzcuVKV5skExERYTIyMtz228fHx0ybNs3VNnPmTCPJHD58uNh9ynfPPfeYqKgo43A4XG0rV640kszHH39sjDFm27ZtRpL58MMPSzTmpQYPHmxCQkJc8//zzz+b1157zdhsNnPDDTcYY4yZOHGikWS+/fZbt3VHjhxpbDabSUxMdJuHyZMnu35u2bKl6devX5HbdzgcJjIy0rRq1cptH8+dO2dq165tOnXq5GqbPHmykWRmzJjhNsaoUaNMUFCQ672zZcsWI8m8+uqrbv2Sk5NNcHCwefLJJ40xxqSlpZmgoCBz9913u/XbtGmTkWS6dOlSZN2A1XDqBygj7du3l7+/v+v16quvFujTv3//Am2nTp3SiBEjVL9+ffn5+cnf318NGzaUJO3fv1+SlJmZqa1bt+qee+5RUFCQa93Q0FD16dPnirV98sknatmypdq2bau8vDzXq2fPnoWesunWrZtCQ0NdP0dERKh27dqu0yelMXToUB09elRr1qxxtS1YsEB16tTRnXfeKUlq0qSJwsLC9NRTT2nOnDnat2+fR9vIzMx0zX+tWrU0btw43XnnnVqxYoUkae3atWrRooU6dOjgtt6QIUNkjHFdiFuYDh066LPPPtPEiRO1fv16XbhwwW15YmKijh8/roceekg+Pr/+p7Zq1arq37+/vvnmG2VlZbmt07dvX7efW7durezsbJ06dUrSL783m82mBx980O33VqdOHbVp08b1e9uyZYuys7M1aNAgt/E6derkei8BFQWnfoCrEB4eruDg4EI/sBctWqSsrCylpKQU+ACSpCpVqhS4E8fpdKpHjx46fvy4/va3v6lVq1YKCQmR0+nUjTfe6PowTEtLk9PpVJ06dQqMW1jb5U6ePKkff/zRdRrpcpdfD1OzZs0CfQIDAwt8OHvizjvvVN26dbVgwQL16NFDaWlp+uijjzR27Fj5+vpKkux2uzZs2KAXX3xRTz/9tNLS0lS3bl398Y9/1DPPPFNk/fmCg4O1ceNGV70NGzZ0m/MzZ86oUaNGBdbLv57o0tM4l/vnP/+pqKgoLV26VC+//LKCgoLUs2dPzZw5U02bNnWtW9gpwcjISDmdTqWlpbldQH35PAcGBkqSa55PnjwpY4zr9M7lYmJi3Oou7fsDsBKCCnAVfH19deutt2rVqlVKSUlx+1DKvz7jp59+KnTdy591Ikl79uzR999/r4ULF2rw4MGu9ssvOg0LC5PNZtOJEycKjFFY2+XyA9al1z5cvrys+fr66qGHHtI///lPnT17VosWLVJOTo6GDh3q1q9Vq1ZasmSJjDHavXu3Fi5cqKlTpyo4OFgTJ04sdhs+Pj6Ki4srcnnNmjWVkpJSoP348eOSip+HkJAQ13UgJ0+edB1d6dOnjw4cOOAKHUWN7+Pjo7CwsGLrv1x4eLhsNpu++uorV4i5VH5b/raLen8UFs4Aq+LUD3CVJk2aJIfDoREjRujixYtXNVZ+eLn8Q+itt95y+zkkJEQdOnTQ8uXL3R5edu7cOX388cdX3E7v3r118OBB1axZU3FxcQVepfkgu/z//ZfE0KFDlZ2drcWLF2vhwoWKj49Xs2bNCu1rs9nUpk0b/f3vf1f16tW1Y8cOj2u83G233aZ9+/YVGOudd96RzWZTt27dSjRORESEhgwZooEDByoxMVFZWVmKjY1VvXr1tGjRIhljXH0zMzO1bNky151Anujdu7eMMTp27Fihv7dWrVpJ+uXOpqCgIP3v//6v2/qbN2++qtN1QHngiApwlTp37qw33nhDf/7zn3X99dfrT3/6k373u9/Jx8dHKSkpWrZsmSSV6IFrzZo1U+PGjTVx4kQZY1SjRg19/PHHWr16dYG+zz//vO644w51795dEyZMkMPh0Msvv6yQkBClpqYWu51x48Zp2bJluuWWW/T444+rdevWcjqdSkpK0qpVqzRhwgR17NjRo3nI/5B87bXXNHjwYPn7+ys2Ntbt2pbC9jc+Pl7Tpk1TcnKy5s6d67b8k08+0ezZs9WvXz/FxMTIGKPly5fr7Nmz6t69u0f1Febxxx/XO++8o169emnq1Klq2LChPv30U82ePVsjR47UddddV+S6HTt2VO/evdW6dWuFhYVp//79evfdd90CyIwZMzRo0CD17t1bw4cPV05OjmbOnKmzZ89q+vTpHtfbuXNn/elPf9LQoUO1bds23XLLLQoJCVFKSoq+/vprtWrVSiNHjlRYWJieeOIJvfDCCxo2bJjuvfdeJScna8qUKZz6QcVTjhfyAr8pu3btMkOHDjXR0dEmMDDQBAUFmSZNmpiHH37YfPnll2598+9IKcy+fftM9+7dTWhoqAkLCzP33nuvSUpKKnDXiTHGfPTRR6Z169YmICDANGjQwEyfPt11B8mlLr/rxxhjzp8/b5555hkTGxtrAgICjN1uN61atTKPP/64OXHihKufJDN69OgCdRY25qRJk0xkZKTx8fExksy6deuKnzRjzNy5c40kExwcbNLT092WHThwwAwcONA0btzYBAcHG7vdbjp06GAWLlx4xXGLm+NLHTlyxDzwwAOmZs2axt/f38TGxpqZM2e63aljTMG7fiZOnGji4uJMWFiYCQwMNDExMebxxx83p0+fdlvvww8/NB07djRBQUEmJCTE3HbbbWbTpk1uffJ/Zz///LNb+4IFCwq9k2r+/PmmY8eOJiQkxAQHB5vGjRubhx9+2Gzbts3Vx+l0mmnTppn69eubgIAA07p1a/Pxxx+bLl26cNcPKhSbMZcckwQAALAQrlEBAACWRVABAACWRVABAACWRVABAACWRVABAACWRVABAACWVaEf+OZ0OnX8+HGFhoYW+jhyAABgPcYYnTt3TpGRkW5f2lmYCh1Ujh8/rvr165d3GQAAoBSSk5MVFRVVbJ8KHVTyH82dnJxcoseTAwCA8peRkaH69esX+xUb+Sp0UMk/3VOtWjWCCgAAFUxJLtvgYloAAGBZBBUAAGBZBBUAAGBZBBUAAGBZBBUAAGBZBBUAAGBZBBUAAGBZBBUAAGBZBBUAAGBZBBUAAGBZ5RpUNm7cqD59+igyMlI2m00ffvhheZYDAAAsply/6yczM1Nt2rTR0KFD1b9///IsxRJ2JqVpfeIp7T2WLh8fm25rHqGk1CxtSDwlh9MoNMhf1YL89Lt6dkVWD5a/r48uOpzy9/XRwZ/P678nzymsSoCubxim42cvSJKrX/5yp9MoPTtPZ87nqGZIgGSz6UKuQ7F1QhUVFixJ6hpbW5K0PvGUTqRn68z5HPn42NQ0IlR+PjZXnx9OntOX+08qrEqAwkMDtT7xlA79nKk8h1PhVQN1T/so+fnYtPdYulLSsxUS6KcjqVlKy8yVn49NIYF+ys1z6MJFpy46jHxskr+fj4zTyMfHpgBfm/x8fXQ266Ic5pc58veRmtSuqjr2YO1KPiun06mwkEDl5jmUfiFP2XkOGafk72uTzWZTTp5TRlLNKv66vUWEJOnM+Rx9dyRNWTl5CvD1UaPwEKVl5io166JsMgoJ9FN2nlMXch0yRrJJku2X/w3w85HTaZRnjIL8fNSgRhWFBvnrXE6e0rNyFeDnq5yLDp3PyVNUWLBubR6hlbtTdD4nT7dcV0sdomvoy/0n5XT+skMZ2XmqFvTLn+Hlc7z7aLqSU7PUpn51PXhjQ7f3RmiQvwL9fBQWEqDGtaoqOjxEknT4dKaiw0PUrkFYgffW4dOZrvfLpf0P/nxeaZm5CgsJ0OlzOUrLytVtzSM04IYGRb5P88fKf591ja3tts38PtHhIfrh5Dl9n3xWbepXdxvz0j756xbW5k2Xjp+//55sy5P6ymP/ykJFrFmquHVbhZXmz2aMMeVawf+x2WxasWKF+vXrV+J1MjIyZLfblZ6eXuG/lHD6Z/s1Z8Oh8i4DvxEjusRo4p3NJZX+vdW2vl0fjr7Jra24sfK3WVyf/DEv7zOiS4wkFWjL3wdvKEntnqxf3DrlsX9lwZN9tpKKWrdVXIv58+Tzu0Jdo5KTk6OMjAy312/BzqQ0Qgq8as6GQ9qZlHZV761dyelaujXJ9fOVxpqz4ZCWbk0qts+u5HTN/OJAgT5zNhwqtG1nUlqpar9cSWovbluFrV/UOkX1Lcv9Kwue7LOVVNS6rcKK81ehgsq0adNkt9tdr/r165d3SV5x+HRmeZeA36DDpzOv+r31ffJZt/E86V+UHUdK/h88b/1tlGSc4voUtaywdk9qtvLfvif7bCUVtW6rsOL8VaigMmnSJKWnp7teycnJ5V2SV+SfLwe8KTo85KrfW23qV3cbz5P+Rbm+YcnPd3vrb6Mk4xTXp6hlhbV7UrOV//Y92Wcrqah1W4UV569CBZXAwEBVq1bN7fVb0K5BmOscNuANI7vEqF2DsKt6b7Wrb3e7+PVKY43sEqMBNzQotk+7+nb9pWezAn1GdokptM1bF/GVpPbitlXY+kWtU1Tfsty/suDJPltJRa3bKqw4f1xMayHc9cNdP9z1w10/VlMRa5Yqbt1WUdbz58nnd7kGlfPnz+vHH3+UJLVr106zZs1St27dVKNGDTVoUPh/JC/1WwsqAABUBp58fpfrc1S2bdumbt26uX4eP368JGnw4MFauHBhOVUFAACsolyDSteuXWWRM08AAMCCKtTFtAAAoHIhqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsqdVD58ccf9cUXX+jChQuSJGOM14oCAACQShFUzpw5o9tvv13XXXedfv/73yslJUWSNGzYME2YMMHrBQIAgMrL46Dy+OOPy8/PT0lJSapSpYqrfcCAAfr888+9WhwAAKjc/DxdYdWqVfriiy8UFRXl1t60aVMdOXLEa4UBAAB4fEQlMzPT7UhKvtOnTyswMNArRQEAAEilCCq33HKL3nnnHdfPNptNTqdTM2fOVLdu3bxaHAAAqNw8PvUzc+ZMde3aVdu2bVNubq6efPJJ7d27V6mpqdq0aVNZ1AgAACopj4+otGjRQrt371aHDh3UvXt3ZWZm6p577tHOnTvVuHHjsqgRAABUUjZTgR+AkpGRIbvdrvT0dFWrVq28ywEAACXgyee3x6d+du/eXWi7zWZTUFCQGjRowEW1AADAKzwOKm3btpXNZpP069No83+WJH9/fw0YMEBvvfWWgoKCvFQmAACojDy+RmXFihVq2rSp5s6dq++//167du3S3LlzFRsbq0WLFmnevHlau3atnnnmmbKoFwAAVCIeH1F58cUX9dprr6lnz56uttatWysqKkp/+9vf9N133ykkJEQTJkzQK6+84tViAQBA5eLxEZWEhAQ1bNiwQHvDhg2VkJAg6ZfTQ/nfAQQAAFBaHgeVZs2aafr06crNzXW1Xbx4UdOnT1ezZs0kSceOHVNERIT3qgQAAJWSx6d+3njjDfXt21dRUVFq3bq1bDabdu/eLYfDoU8++USSdOjQIY0aNcrrxQIAgMqlVM9ROX/+vN577z398MMPMsaoWbNmeuCBBxQaGloWNRaJ56gAAFDxlOlzVCSpatWqGjFiRKmKAwAAKKlSBRVJ2rdvn5KSktyuVZGkvn37XnVRAAAAUimCyqFDh3T33XcrISFBNputwEPfHA6HdysEAACVlsd3/YwdO1bR0dE6efKkqlSpor1792rjxo2Ki4vT+vXry6BEAABQWXl8RGXLli1au3atatWqJR8fH/n4+Oimm27StGnT9Nhjj2nnzp1lUScAAKiEPD6i4nA4VLVqVUlSeHi4jh8/LumXB74lJiZ6tzoAAFCpeXxEpWXLltq9e7diYmLUsWNHzZgxQwEBAZo7d65iYmLKokYAAFBJeRxUnnnmGWVmZkqSXnjhBfXu3Vs333yzatasqaVLl3q9QAAAUHmV6oFvl0tNTVVYWJjrzp9rhQe+AQBQ8ZT5A98uV6NGDW8MAwAA4MbjoJKdna1//etfWrdunU6dOiWn0+m2fMeOHV4rDgAAVG4eB5VHHnlEq1ev1h/+8Ad16NDhmp/uAQAAlYfHQeXTTz/VypUr1blz57KoBwAAwMXj56jUq1fvmn9LMgAAqJw8DiqvvvqqnnrqKR05cqQs6gEAAHDx+NRPXFycsrOzFRMToypVqsjf399teWpqqteKAwAAlZvHQWXgwIE6duyYXnrpJUVERHAxLQAAKDMeB5XNmzdry5YtatOmTVnUAwAA4OLxNSrNmjXThQsXyqIWAAAANx4HlenTp2vChAlav369zpw5o4yMDLcXAACAt3j8XT8+Pr9km8uvTTHGyGazyeFweK+6K+C7fgAAqHjK9Lt+1q1bV+rCAAAAPOFxUOnSpUtZ1AEAAFBAiYPK7t27S9SvdevWpS4GAADgUiUOKm3btpXNZlNxl7Rc62tUAADAb1uJg8rhw4fLsg4AAIACShxUGjZsWJZ1AAAAFODxc1QAAACuFYIKAACwLIIKAACwLIIKAACwLIIKAACwrBLd9dOuXbsC3+1TlB07dlxVQQAAAPlKFFT69evn+nd2drZmz56tFi1aKD4+XpL0zTffaO/evRo1alSZFAkAACqnEgWVyZMnu/49bNgwPfbYY3r++ecL9ElOTvZudQAAoFKzmeKeiV8Iu92ubdu2qWnTpm7t//3vfxUXF6f09HSvFlgcT74mGgAAWIMnn98eX0wbHBysr7/+ukD7119/raCgIE+HAwAAKFKJH6Gfb9y4cRo5cqS2b9+uG2+8UdIv16jMnz9fzz77rNcLBAAAlZfHQWXixImKiYnRa6+9pkWLFkmSmjdvroULF+q+++7zeoEAAKDy8vgaFSvhGhUAACqeMr1GRZLOnj2rf//733r66aeVmpoq6Zfnpxw7dqw0wwEAABTK41M/u3fv1u233y673a6ffvpJw4YNU40aNbRixQodOXJE77zzTlnUCQAAKiGPj6iMHz9eQ4YM0X//+1+3u3zuvPNObdy40avFAQCAys3joLJ161YNHz68QHu9evV04sQJrxQFAAAglSKoBAUFKSMjo0B7YmKiatWq5ZWiAAAApFIElbvuuktTp07VxYsXJUk2m01JSUmaOHGi+vfv7/UCAQBA5eVxUHnllVf0888/q3bt2rpw4YK6dOmiJk2aKDQ0VC+++GJZ1AgAACopj+/6qVatmr7++mutXbtWO3bskNPp1PXXX6/bb7+9LOoDAACVGA98AwAA15Qnn98eH1GRpC+//FJffvmlTp06JafT6bZs/vz5pRkSAACgAI+DynPPPaepU6cqLi5OdevWlc1mK4u6AAAAPA8qc+bM0cKFC/XQQw+VRT0AAAAuHt/1k5ubq06dOpVFLQAAAG48DirDhg3TokWLyqIWAAAANx6f+snOztbcuXO1Zs0atW7dWv7+/m7LZ82a5bXiAABA5Vaqb09u27atJGnPnj1uy7iwFgAAeJPHQWXdunVlUQcAAEABHl+jcqmjR4/q2LFj3qoFAADAjcdBxel0aurUqbLb7WrYsKEaNGig6tWr6/nnny/w8DcAAICr4fGpn7/+9a+aN2+epk+frs6dO8sYo02bNmnKlCnKzs7miwkBAIDXePxdP5GRkZozZ4769u3r1v6f//xHo0aNuqangviuHwAAKh5PPr89PvWTmpqqZs2aFWhv1qyZUlNTPR0OAACgSB4HlTZt2uj1118v0P7666+rTZs2XikKAABAKsU1KjNmzFCvXr20Zs0axcfHy2azafPmzUpOTtbKlSvLokYAAFBJeXxEpUuXLvrhhx9099136+zZs0pNTdU999yjxMRE3XzzzWVRIwAAqKQ8vpjWSriYFgCAiqdML6b9/PPP9fXXX7t+fuONN9S2bVs98MADSktL87xaAACAIngcVP7yl78oIyNDkpSQkKDx48fr97//vQ4dOqTx48d7vUAAAFB5eXwx7eHDh9WiRQtJ0rJly9SnTx+99NJL2rFjh37/+997vUAAAFB5eXxEJSAgQFlZWZKkNWvWqEePHpKkGjVquI60AAAAeIPHR1RuuukmjR8/Xp07d9Z3332npUuXSpJ++OEHRUVFeb1AAABQeXl8ROX111+Xn5+fPvjgA7355puqV6+eJOmzzz7THXfc4fUCAQBA5cXtyQAA4Jry5PO7RKd+MjIyXANd6ToUAgMAAPCWEgWVsLAwpaSkqHbt2qpevbpsNluBPsYY2Ww2ORwOrxcJAAAqpxIFlbVr16pGjRqSpHXr1pVpQQAAAPm4RgUAAFxTXr9G5XJnz57VvHnztH//ftlsNrVo0UKPPPKI7HZ7qQoGAAAojMe3J2/btk2NGzfW3//+d6Wmpur06dOaNWuWGjdurB07dpRFjQAAoJLy+NTPzTffrCZNmujtt9+Wn98vB2Ty8vI0bNgwHTp0SBs3biyTQgvDqR8AACoeTz6/PQ4qwcHB2rlzp5o1a+bWvm/fPsXFxbker38tEFQAAKh4PPn89vjUT7Vq1ZSUlFSgPTk5WaGhoZ4OBwAAUCSPg8qAAQP06KOPaunSpUpOTtbRo0e1ZMkSDRs2TAMHDiyLGgEAQCXl8V0/r7zyimw2mx5++GHl5eVJkvz9/TVy5EhNnz7d6wUCAIDKq9TPUcnKytLBgwdljFGTJk1UpUoVb9d2RVyjAgBAxVMm16hkZWVp9OjRqlevnmrXrq1hw4apbt26at26dbmEFAAA8NtX4qAyefJkLVy4UL169dL999+v1atXa+TIkWVZGwAAqORKfI3K8uXLNW/ePN1///2SpAcffFCdO3eWw+GQr69vmRUIAAAqrxIfUUlOTtbNN9/s+rlDhw7y8/PT8ePHy6QwAACAEgcVh8OhgIAAtzY/Pz/XnT8AAADeVuJTP8YYDRkyRIGBga627OxsjRgxQiEhIa625cuXe7dCAABQaZU4qAwePLhA24MPPujVYgAAAC5V4qCyYMGCsqwDAACgAI8foQ8AAHCtEFQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBlEVQAAIBllXtQmT17tqKjoxUUFKT27dvrq6++Ku+SAACARfiV58aXLl2qcePGafbs2ercubPeeust3Xnnndq3b58aNGhQnqVp6dYkfbn/pJLTLuj0uRwZY1Q1yF8+NulEerZyHU752myqFRqokEA/JaVm6cJFpwJ8pVua1tLoW5tqzf6T2pB4SqfP5yr7okO+NpskKcfhlI9NCvD1UdUgfzWpFSJJysjOkzFGWbkOGUmt6tl1f4cG+uHkOS3fcVSnMnIUHOCrkABf2Ww2VQvyU82qgTpzPkcZ2XmKDg/R9Q3DdPzsBe09lq6U9GyFBPqpfo0q2nEkTedz8lS/RhWdSM/WueyLahoRqqa1qyotK1dHzmQpLStXQf6+Ss3M1fkchyTJJinA16Y8p5HD/PKzv69NNklOYyRJTknGKZn/62/+bw5tNinY30dBfr5Ky7ooZxn/zmySfHwkx1VsyEdyq9N2yb/NJf8O9LMp2N9X2XlOORxOXbxkJV+bFODno+rB/vpdZDXX7yglPVupWbm6kOuQMUZhIYHqGF1D93f45b1++HSmosND1K5BmCRpZ1Ka1iee0on0bElSHXuQusbWdi2XpJlfHNCGxFOqUy1IDiP9cPKcIu1B+kNcffn7+ig6PKTA2PnjSiowXv52C+u/+eAZnTmfo5jwEHX/XZ0C4x/8+bzSMnPVpn51DbihQYFxLh3/8u3n980fIywkQI1rVS0w5pUUt01P2ku6vLT1lHTd4n5P3t7e1Siv7VYG5T235b19SbIZY8yVu5WNjh076vrrr9ebb77pamvevLn69eunadOmXXH9jIwM2e12paenq1q1al6rq98bX2tXcrrXxgM8MaJLjCRpzoZDRS6feGdz3fDiav18LtejsdvWtxd4b+ePJ0nTP9vvtt3C+pdErdAAt9ryt3H5+J5so219uz4cfVORyy8fu6htXqn9SuOV1NWsX9g8XWn9q623tMpru5VBec9tWW7fk8/vcjv1k5ubq+3bt6tHjx5u7T169NDmzZsLXScnJ0cZGRluL29bujWJkIJyNWfDoSJDSv7yCe/v8jikSCr0vT1nwyHtTErTzqS0Atst7d/C5bXN2XBIS7cmFbpfJd3GruR0Ld2aVOiywmovapvFte9MSit2vPzlV3I16xe27pXWv9p6S6u8tlsZlPfclvf2L1VuQeX06dNyOByKiIhwa4+IiNCJEycKXWfatGmy2+2uV/369b1e1/fJZ70+JuBt3n6fHj6dqcOnM7065uW8UXNRYxRVe1H9rzROUeOVdI6uZv3i+ng6bln/Tstru5VBec9teW//UuV+Ma3NZnP72RhToC3fpEmTlJ6e7nolJyd7vZ429at7fUzA27z9Po0OD3Fdb1JWvFFzUWMUVXtR/a80TlHjlXSOrmb94vp4Om5Z/07La7uVQXnPbXlv/1LlFlTCw8Pl6+tb4OjJqVOnChxlyRcYGKhq1aq5vbxtwA0N1La+3evjAiU1skuM6zqVopa/el9b1QoN8Hjswt7bI7vEqF2DMLVrEFZgu6X9W6h9WW0ju8RowA0NCt2vkm6jXX17kRfUFlZ7Udssrj3/YsGixivpxYRXs35h615p/autt7TKa7uVQXnPbXlv/1LlfjFt+/btNXv2bFdbixYtdNddd5XrxbQSd/1w18+vY+bjrh/u+vEUd/3gapX33JbV9j35/C7XoLJ06VI99NBDmjNnjuLj4zV37ly9/fbb2rt3rxo2bHjF9csyqAAAgLLhyed3uT5HZcCAATpz5oymTp2qlJQUtWzZUitXrixRSAEAAL995XpE5WpxRAUAgIqnQjxHBQAA4EoIKgAAwLIIKgAAwLIIKgAAwLIIKgAAwLIIKgAAwLIIKgAAwLIIKgAAwLIIKgAAwLLK9RH6Vyv/oboZGRnlXAkAACip/M/tkjwcv0IHlXPnzkmS6tevX86VAAAAT507d052u73YPhX6u36cTqeOHz+u0NBQ2Wy28i7nqmVkZKh+/fpKTk7mu4tKgPnyHHPmGebLM8yXZyrzfBljdO7cOUVGRsrHp/irUCr0ERUfHx9FRUWVdxleV61atUr3pr0azJfnmDPPMF+eYb48U1nn60pHUvJxMS0AALAsggoAALAsgoqFBAYGavLkyQoMDCzvUioE5stzzJlnmC/PMF+eYb5KpkJfTAsAAH7bOKICAAAsi6ACAAAsi6ACAAAsi6ACAAAsi6ByDbz44ovq1KmTqlSpourVqxfaJykpSX369FFISIjCw8P12GOPKTc3161PQkKCunTpouDgYNWrV09Tp04t8D0JGzZsUPv27RUUFKSYmBjNmTOnrHar3M2ePVvR0dEKCgpS+/bt9dVXX5V3SWVu48aN6tOnjyIjI2Wz2fThhx+6LTfGaMqUKYqMjFRwcLC6du2qvXv3uvXJycnRn//8Z4WHhyskJER9+/bV0aNH3fqkpaXpoYcekt1ul91u10MPPaSzZ8+W8d5537Rp03TDDTcoNDRUtWvXVr9+/ZSYmOjWhzn71ZtvvqnWrVu7HkAWHx+vzz77zLWcuSretGnTZLPZNG7cOFcbc+YFBmXu2WefNbNmzTLjx483dru9wPK8vDzTsmVL061bN7Njxw6zevVqExkZacaMGePqk56ebiIiIsz9999vEhISzLJly0xoaKh55ZVXXH0OHTpkqlSpYsaOHWv27dtn3n77bePv728++OCDa7Gb19SSJUuMv7+/efvtt82+ffvM2LFjTUhIiDly5Eh5l1amVq5caf7617+aZcuWGUlmxYoVbsunT59uQkNDzbJly0xCQoIZMGCAqVu3rsnIyHD1GTFihKlXr55ZvXq12bFjh+nWrZtp06aNycvLc/W54447TMuWLc3mzZvN5s2bTcuWLU3v3r2v1W56Tc+ePc2CBQvMnj17zK5du0yvXr1MgwYNzPnz5119mLNfffTRR+bTTz81iYmJJjEx0Tz99NPG39/f7NmzxxjDXBXnu+++M40aNTKtW7c2Y8eOdbUzZ1ePoHINLViwoNCgsnLlSuPj42OOHTvmalu8eLEJDAw06enpxhhjZs+ebex2u8nOznb1mTZtmomMjDROp9MYY8yTTz5pmjVr5jb28OHDzY033lgGe1O+OnToYEaMGOHW1qxZMzNx4sRyqujauzyoOJ1OU6dOHTN9+nRXW3Z2trHb7WbOnDnGGGPOnj1r/P39zZIlS1x9jh07Znx8fMznn39ujDFm3759RpL55ptvXH22bNliJJkDBw6U8V6VrVOnThlJZsOGDcYY5qwkwsLCzL///W/mqhjnzp0zTZs2NatXrzZdunRxBRXmzDs49WMBW7ZsUcuWLRUZGelq69mzp3JycrR9+3ZXny5durg9GKhnz546fvy4fvrpJ1efHj16uI3ds2dPbdu2TRcvXiz7HblGcnNztX379gL72qNHD23evLmcqip/hw8f1okTJ9zmJTAwUF26dHHNy/bt23Xx4kW3PpGRkWrZsqWrz5YtW2S329WxY0dXnxtvvFF2u73Cz296erokqUaNGpKYs+I4HA4tWbJEmZmZio+PZ66KMXr0aPXq1Uu33367Wztz5h0EFQs4ceKEIiIi3NrCwsIUEBCgEydOFNkn/+cr9cnLy9Pp06fLqvxr7vTp03I4HIXua/5cVEb5+17cvJw4cUIBAQEKCwsrtk/t2rULjF+7du0KPb/GGI0fP1433XSTWrZsKYk5K0xCQoKqVq2qwMBAjRgxQitWrFCLFi2YqyIsWbJEO3bs0LRp0wosY868g6BSSlOmTJHNZiv2tW3bthKPZ7PZCrQZY9zaL+9j/u9CWk/7/FYUtq+/xf30VGnm5UrvtZKOY2VjxozR7t27tXjx4gLLmLNfxcbGateuXfrmm280cuRIDR48WPv27XMtZ65+lZycrLFjx+q9995TUFBQkf2Ys6tDUCmlMWPGaP/+/cW+8v9f25XUqVOnQCpOS0vTxYsXXUm8sD6nTp2SpCv28fPzU82aNUu1n1YUHh4uX1/fQvf18v/nUpnUqVNHkoqdlzp16ig3N1dpaWnF9jl58mSB8X/++ecKO79//vOf9dFHH2ndunWKiopytTNnBQUEBKhJkyaKi4vTtGnT1KZNG7322mvMVSG2b9+uU6dOqX379vLz85Ofn582bNigf/7zn/Lz8ytw1DtfZZ6z0iColFJ4eLiaNWtW7Ku4hH2p+Ph47dmzRykpKa62VatWKTAwUO3bt3f12bhxo9sty6tWrVJkZKQaNWrk6rN69Wq3sVetWqW4uDj5+/tf5R5bR0BAgNq3b19gX1evXq1OnTqVU1XlLzo6WnXq1HGbl9zcXG3YsME1L+3bt5e/v79bn5SUFO3Zs8fVJz4+Xunp6fruu+9cfb799lulp6dXuPk1xmjMmDFavny51q5dq+joaLflzNmVGWOUk5PDXBXitttuU0JCgnbt2uV6xcXFadCgQdq1a5diYmKYM2+4xhfvVkpHjhwxO3fuNM8995ypWrWq2blzp9m5c6c5d+6cMebX25Nvu+02s2PHDrNmzRoTFRXldnvy2bNnTUREhBk4cKBJSEgwy5cvN9WqVSv09uTHH3/c7Nu3z8ybN+83f3vyvHnzzL59+8y4ceNMSEiI+emnn8q7tDJ17tw51/tHkpk1a5bZuXOn67bs6dOnG7vdbpYvX24SEhLMwIEDC70VMioqyqxZs8bs2LHD3HrrrYXeCtm6dWuzZcsWs2XLFtOqVasKeSvkyJEjjd1uN+vXrzcpKSmuV1ZWlqsPc/arSZMmmY0bN5rDhw+b3bt3m6efftr4+PiYVatWGWOYq5K49K4fY5gzbyCoXAODBw82kgq81q1b5+pz5MgR06tXLxMcHGxq1KhhxowZ43YrsjHG7N6929x8880mMDDQ1KlTx0yZMsV1a3K+9evXm3bt2pmAgADTqFEj8+abb16LXSwXb7zxhmnYsKEJCAgw119/veuW09+ydevWFfpeGjx4sDHml9shJ0+ebOrUqWMCAwPNLbfcYhISEtzGuHDhghkzZoypUaOGCQ4ONr179zZJSUlufc6cOWMGDRpkQkNDTWhoqBk0aJBJS0u7RnvpPYXNlSSzYMECVx/m7FePPPKI62+qVq1a5rbbbnOFFGOYq5K4PKgwZ1fPZsxljzYFAACwCK5RAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAQAAlkVQAWBpQ4YMUb9+/Vw/d+3aVePGjSu3egBcWwQVAB45ceKExo4dqyZNmigoKEgRERG66aabNGfOHGVlZZX59pcvX67nn3/eq2NeHoYAWIdfeRcAoOI4dOiQOnfurOrVq+ull15Sq1atlJeXpx9++EHz589XZGSk+vbtW2C9ixcveu2LMWvUqOGVcQBUDBxRAVBio0aNkp+fn7Zt26b77rtPzZs3V6tWrdS/f399+umn6tOnjyTJZrNpzpw5uuuuuxQSEqIXXnhBDodDjz76qKKjoxUcHKzY2Fi99tprbuM7HA6NHz9e1atXV82aNfXkk0/q8m/5uPzUT25urp588knVq1dPISEh6tixo9avX+9avnDhQlWvXl1ffPGFmjdvrqpVq+qOO+5wfVv5lClT9D//8z/6z3/+I5vNJpvN5rY+gPJFUAFQImfOnNGqVas0evRohYSEFNrHZrO5/j158mTdddddSkhI0COPPCKn06moqCi9//772rdvn5599lk9/fTTev/9913rvPrqq5o/f77mzZunr7/+WqmpqVqxYkWxdQ0dOlSbNm3SkiVLtHv3bt17772644479N///tfVJysrS6+88oreffddbdy4UUlJSXriiSckSU888YTuu+8+V3hJSUlRp06drmaqAHgRp34AlMiPP/4oY4xiY2Pd2sPDw5WdnS1JGj16tF5++WVJ0gMPPKBHHnnEre9zzz3n+nd0dLQ2b96s999/X/fdd58k6R//+IcmTZqk/v37S5LmzJmjL774osiaDh48qMWLF+vo0aOKjIyU9Evw+Pzzz7VgwQK99NJLkn459TRnzhw1btxYkjRmzBhNnTpVklS1alUFBwcrJydHderUKd3kACgzBBUAHrn0qIkkfffdd3I6nRo0aJBycnJc7XFxcQXWnTNnjv7973/ryJEjunDhgnJzc9W2bVtJUnp6ulJSUhQfH+/q7+fnp7i4uAKnf/Lt2LFDxhhdd911bu05OTmqWbOm6+cqVaq4Qook1a1bV6dOnSr5TgMoNwQVACXSpEkT2Ww2HThwwK09JiZGkhQcHOzWfvnpoffff1+PP/64Xn31VcXHxys0NFQzZ87Ut99+W+qanE6nfH19tX37dvn6+rotq1q1quvfl1/Ia7PZigw/AKyFa1QAlEjNmjXVvXt3vf7668rMzPR4/a+++kqdOnXSqFGj1K5dOzVp0kQHDx50Lbfb7apbt66++eYbV1teXp62b99e5Jjt2rWTw+HQqVOn1KRJE7eXJ6dxAgIC5HA4PN4nAGWPoAKgxGbPnq28vDzFxcVp6dKl2r9/vxITE/Xee+/pwIEDBY5qXKpJkybatm2bvvjiC/3www/629/+pq1bt7r1GTt2rKZPn64VK1bowIEDGjVqlM6ePVvkmNddd50GDRqkhx9+WMuXL9fhw4e1detWvfzyy1q5cmWJ96tRo0bavXu3EhMTdfr0aV28eLHE6wIoWwQVACXWuHFj7dy5U7fffrsmTZqkNm3aKC4uTv/617/0xBNPFPsgthEjRuiee+7RgAED1LFjR505c0ajRo1y6zNhwgQ9/PDDGjJkiOv00N13311sTQsWLNDDDz+sCRMmKDY2Vn379tW3336r+vXrl3i//vjHPyo2NlZxcXGqVauWNm3aVOJ1AZQtm+FELQAAsCiOqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMsiqAAAAMv6/762icOCMIrFAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of poisoned images: 300 out of 10000.\n", + "last index of poison 931\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAHFCAYAAAAdTZjVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABflklEQVR4nO3dd1hT1/8H8HdACBsEZCkCiuIC90Ct4t61atWqdVv3QLSOWhWsBbVqbetq3bZ1tY5a9QtSd8WFaF1o1aI4QOoCBUXG+f3hL7eEBEwwYZj363nyaM4999zPPbm5+XDXkQkhBIiIiIgMhFFRB0BERERUmJj8EBERkUFh8kNEREQGhckPERERGRQmP0RERGRQmPwQERGRQWHyQ0RERAaFyQ8REREZFCY/REREZFBKfPJz4cIFDB06FBUrVoS5uTnMzc1RqVIljBgxAtHR0YUWR3BwMGQymVKZp6cnBg0apNflRkVFITg4GE+fPtV521u3bkX16tVhbm4OmUyG8+fPq613+PBhyGQy6WVsbAxnZ2f07NkTsbGxWi930KBB8PT0fLvgi4H169dDJpPh1q1bRR2KRNNtUvFZzps3T2WaYr0K8/uVU0BAgNL2Zm5ujpo1a2LJkiXIzs7Wqq1bt25BJpNh/fr1+gm2EGnz2Y4dO1b/ARVznp6e6Ny5c1GHAeC/feivv/5a4DaKar/59OlTODo6YsuWLVLZ3bt3ERgYiObNm8POzk7r75im82dkZKBixYpYsmSJ1nGX6OTn+++/R926dXHq1ClMmDABe/bswd69exEYGIjLly+jfv36uHnzZpHFt3PnTsycOVOvy4iKikJISIjOk59///0X/fv3R8WKFREeHo4TJ06gcuXK+c4TGhqKEydO4NChQ5g6dSoiIyPRpEkT3Lt3T6tlz5w5Ezt37nyb8ElH5s2bh8ePHxd1GCoqVKiAEydO4MSJE9i6dSvKli2LiRMnYvr06Vq14+rqihMnTqBTp056ipTo3RYSEgI3Nzf07t1bKrtx4wZ+/vlnmJqaomPHjlq3qen8JiYmmDVrFubMmYNHjx5ptYxSWkdVTBw/fhyjR49Gp06d8Ouvv8LU1FSa1rJlS4wZMwa//PILzM3N820nLS0NFhYWeomxdu3aemm3MPz999/IyMjAxx9/jObNm2s0T6VKldCoUSMAQLNmzWBnZ4ehQ4di/fr1mDFjhsbLrlixYoFiJt1q3bo1Dh8+jC+//BKLFi0q6nCUmJubS9saAHTo0AFVqlTB0qVLMXfuXJiYmGjUjlwuV2qHiDT3+PFjfP/99/j666+Vznw0a9YM//77LwAgOjoamzdv1qpdbebv06cPgoKC8P333+Ozzz7TeBkl9shPaGgojI2N8f333yslPjn17NkTbm5u0vtBgwbBysoKFy9eRNu2bWFtbY1WrVoBACIjI9G1a1eUK1cOZmZm8Pb2xogRI/Dw4UOVdvfu3YtatWpBLpfDy8sLCxcuVLt8dYehU1JSMHnyZHh5ecHU1BRly5ZFYGAgUlNTleopDk3/+OOPqFq1KiwsLFCzZk3s2bNHqhMcHIxPP/0UAODl5SWdBjh8+HC+fbd79274+/vDwsIC1tbWaNOmDU6cOKHUT02bNgUA9O7dGzKZDAEBAfm2qY7iR+X27dsAgOzsbCxYsABVqlSBXC6Hk5MTBgwYgLt37yrNp+7w7S+//IKGDRvC1tYWFhYWqFChAoYMGaJUJz4+Hh9//DGcnJwgl8tRtWpVLFq0SOlUiOI0x8KFC7F48WJ4eXnBysoK/v7+OHnypMo6REdH4/3334e9vT3MzMxQu3ZtbNu2TaXeyZMn0aRJE5iZmcHNzQ3Tp09HRkaGRv0UHR2Njz76CJ6enjA3N4enpyf69Okj9ZuC4nTToUOHMGrUKDg6OsLBwQHdu3fH/fv3lepmZGRgypQpcHFxgYWFBZo2bYrTp09rFI+Cj48Phg4dimXLlqnEos6btivgv9PDly9fRp8+fWBrawtnZ2cMGTIEycnJWsWXk4mJCerWrYu0tDRpp3np0iV07doVpUuXhpmZGWrVqoUNGzYozafutNe///6L4cOHw93dHXK5HGXKlEGTJk3wxx9/KM27du1a1KxZE2ZmZrC3t0e3bt1UTvMq9jk3btxAx44dYWVlBXd3d0yaNAnp6elKdV+9eoW5c+dK348yZcpg8ODB0voo6OKzzUlxymXTpk2YOnUqXF1dYWVlhS5duuDBgwd49uwZhg8fDkdHRzg6OmLw4MF4/vy5UhvLli1Ds2bN4OTkBEtLS/j6+mLBggUq3wEhBEJDQ+Hh4QEzMzPUq1cPkZGRCAgIUNnHaLqv1GTfUFCa/i4otusLFy6gZ8+esLW1hb29PYKCgpCZmYlr166hffv2sLa2hqenJxYsWKB2eS9fvkRQUBBcXFxgbm6O5s2b49y5cyr11q9fDx8fH2k/t3HjRrXthYSEoGHDhrC3t4eNjQ3q1KmDNWvWQFfjma9fvx6ZmZlKR30AwMjo7VILbeY3NTVF79698cMPP2i3XqIEyszMFObm5sLf31+r+QYOHChMTEyEp6enCAsLEwcOHBARERFCCCFWrFghwsLCxO7du8WRI0fEhg0bRM2aNYWPj4949eqV1MYff/whjI2NRdOmTcWOHTvEL7/8IurXry/Kly8vcnenh4eHGDhwoPQ+NTVV1KpVSzg6OorFixeLP/74Q3zzzTfC1tZWtGzZUmRnZ0t1AQhPT0/RoEEDsW3bNrFv3z4REBAgSpUqJW7evCmEEOLOnTti3LhxAoDYsWOHOHHihDhx4oRITk7Osw9+/vlnAUC0bdtW7Nq1S2zdulXUrVtXmJqaimPHjgkhhLhx44ZYtmyZACBCQ0PFiRMnxOXLl/Ns89ChQwKA+OWXX5TKf/vtNwFAfPbZZ0IIIYYPHy4AiLFjx4rw8HCxcuVKUaZMGeHu7i7+/fdfpc/Jw8NDeh8VFSVkMpn46KOPxL59+8TBgwfFunXrRP/+/aU6SUlJomzZsqJMmTJi5cqVIjw8XIwdO1YAEKNGjZLqxcXFSX3bvn17sWvXLrFr1y7h6+srSpcuLZ4+fSrVPXjwoDA1NRXvvfee2Lp1qwgPDxeDBg0SAMS6deukepcvXxYWFhaiWrVqYvPmzeK3334T7dq1k7aJuLi4PPtOCCF++eUXMWvWLLFz505x5MgRsWXLFtG8eXNRpkwZpX5Zt26dACAqVKggxo0bJyIiIsTq1atF6dKlRYsWLZTaHDhwoJDJZOLTTz8V+/fvF4sXLxZly5YVNjY2SttkXgCIMWPGiISEBGFhYaHU14o4zpw5I5Vpsl0JIcTs2bMFAOHj4yNmzZolIiMjxeLFi4VcLheDBw9+Y1xCCNG8eXNRvXp1lfI6deqIUqVKibS0NHH16lVhbW0tKlasKDZu3Cj27t0r+vTpIwCI+fPnS/Motoecn2e7du1EmTJlxA8//CAOHz4sdu3aJWbNmiW2bNki1QkNDRUARJ8+fcTevXvFxo0bRYUKFYStra34+++/pXoDBw4UpqamomrVqmLhwoXijz/+ELNmzRIymUyEhIRI9bKyskT79u2FpaWlCAkJEZGRkWL16tWibNmyolq1aiItLU2pTV18tgqK76+Hh4cYNGiQ9N20srISLVq0EG3atBGTJ08W+/fvF/PnzxfGxsZi3LhxSm1OnDhRrFixQoSHh4uDBw+Kr7/+Wjg6Oqp8ptOnTxcAxPDhw0V4eLhYtWqVKF++vHB1dRXNmzeX6mm6r9Rk35AXDw8P0alTp3zraPq7kHO7/uKLL0RkZKSYMmWKtL+rUqWK+Pbbb0VkZKQYPHiwACC2b9+u8hm4u7uLrl27it9//1389NNPwtvbW9jY2Ej7fCH++/7lrufu7q603xRCiEGDBok1a9aIyMhIERkZKb744gthbm6utO0J8Xr7y8jIeOMrMzNTab6WLVuKBg0a5NuHZ86cUfmOaUOT+bdu3SoAiAsXLmjcbolMfhITEwUA8dFHH6lMy8zMVPqwciYUAwcOFADE2rVr820/OztbZGRkiNu3bwsA4rfffpOmNWzYULi5uYkXL15IZSkpKcLe3v6NyU9YWJgwMjJS+tEQQohff/1VABD79u2TygAIZ2dnkZKSorTeRkZGIiwsTCr76quvNPqBFeL1Bu7m5iZ8fX1FVlaWVP7s2TPh5OQkGjduLJXlldCoo6i7detWkZGRIdLS0sTRo0eFt7e3MDY2Fn/99ZeIjY0VAMTo0aOV5j116pRSgiSEavKzcOFCAUApMclt2rRpAoA4deqUUvmoUaOETCYT165dE0L892Pn6+ur9EU+ffq0ACA2b94slVWpUkXUrl1bZGRkKLXZuXNn4erqKvVh7969hbm5uUhMTJTqZGZmiipVqmj82eSUmZkpnj9/LiwtLcU333wjlSt2ern7cMGCBQKASEhIEEIIqa8nTpyoVE+RoGj7AzljxgxhZGQk/vrrL6U4FNuxNtuV4kdiwYIFSssbPXq0MDMzU/q+5kWR/Ci+4/fv35c+/549ewohhPjoo4+EXC4X8fHxSvN26NBBWFhYSNuSuuTHyspKBAYG5rn8J0+eCHNzc9GxY0el8vj4eCGXy0Xfvn2lMsU+Z9u2bUp1O3bsKHx8fKT3mzdvVvlBFOK/Hf/y5cuFELr/bIX47/vbpUsXpXqBgYECgBg/frxS+QcffCDs7e3zbF/xQ7px40ZhbGwsHj9+LIQQ4vHjx0Iul4vevXsr1T9x4oQAoJT8aLqv1GTfkBdNkp+c8vtdUGzXixYtUpqnVq1a0h+nChkZGaJMmTKie/fuUpniM6hTp47Sd+DWrVvCxMREDBs2TAjx33ctr3q5k5+cFJ/LnDlzhIODg9rfxje9cn5GQghhYWEhRo4cmW+/FUbyc/36dQFArFixQuN2S+xpr7zUrVsXJiYm0kvdtQo9evRQKUtKSsLIkSPh7u6OUqVKwcTEBB4eHgAgHcpOTU3FmTNn0L17d5iZmUnzWltbo0uXLm+Mbc+ePahRowZq1aqFzMxM6dWuXTu1p6tatGgBa2tr6b2zszOcnJw0OgWhzrVr13D//n30799f6bCilZUVevTogZMnTyItLa1AbQOvT5GZmJjAwsICzZo1Q1ZWFn799Vf4+fnh0KFDAKByGrBBgwaoWrUqDhw4kGe79evXBwD06tUL27ZtU3sB9cGDB1GtWjU0aNBAqXzQoEEQQuDgwYNK5Z06dYKxsbH03s/PD8B/p+hu3LiBq1evol+/fgCg9Hl17NgRCQkJuHbtGgDg0KFDaNWqFZydnaX2jI2NVQ4F5+X58+eYOnUqvL29UapUKZQqVQpWVlZITU1Ve7fc+++/r/Q+d+yKvlbErtCrVy+UKqX9ZX5TpkyBvb09pk6dqnZ6QbYrdevw8uVLJCUlAXh9ijRnn2dlZSnVv3z5svQdd3Nzw6JFi9CvXz+sWrUKwOvtoVWrVnB3d1eab9CgQUhLS1M5HZdTgwYNsH79esydOxcnT55UOXVz4sQJvHjxQmVbdnd3R8uWLVW2ZZlMprJ/8PPzU/oe79mzB3Z2dujSpYvSeteqVQsuLi7SvkHXn21Oue98qlq1KgCoXAxetWpVPH78WOnU17lz5/D+++/DwcEBxsbGMDExwYABA5CVlYW///4bwOtTw+np6ejVq5dSe40aNVI5za3pvlKTfcPb0OR3ISd1fSiTydChQweprFSpUvD29la7H+/bt6/StTMeHh5o3Lix9Lkrvmt51cvt4MGDaN26NWxtbaXPZdasWXj06JH0XQNen7Y7c+bMG1/ff/+9NM/Tp0+RlpYGJyenN/ajvili0ObzL5EXPDs6OsLc3FztxrNp0yakpaUhISFBZQcLABYWFrCxsVEqy87ORtu2bXH//n3MnDkTvr6+sLS0RHZ2Nho1aoQXL14AAJ48eYLs7Gy4uLiotKuuLLcHDx7gxo0beV6Mmfs8soODg0oduVwuxaMtxdXwrq6uKtPc3NyQnZ2NJ0+eFPgC8Pnz56Nly5YwNjaGo6Oj0g/Pm5adX0LXrFkz7Nq1C99++y0GDBiA9PR0VK9eHTNmzECfPn2k9tXd5qm45iv3nQC5+1YulwOA1LcPHjwAAEyePBmTJ09WG5fi83r06FGBtwng9Q7vwIEDmDlzJurXrw8bGxvIZDJ07NhR7Wf9ptgV65p7+aVKlVK7Tb2JjY0NPv/8cwQGBko74ZwKsl29aR2GDBmidH1O8+bNlf44qFixIrZs2QKZTAYzMzN4eXkptf/o0aM848kZszpbt27F3LlzsXr1asycORNWVlbo1q0bFixYABcXlzeub2RkpFKZhYWF0h9LivV9+fKl9P7Bgwd4+vRpntcv5tzWAN19tjnZ29srvVfEklf5y5cvYWVlhfj4eLz33nvw8fHBN998A09PT5iZmeH06dMYM2aMynaZ848Ehdxlmu4rNdk3FJSmvws5qesrdZ+/qakpUlJSVObPaz/y119/Acj781eU5XysxunTp9G2bVsEBARg1apVKFeuHExNTbFr1y58+eWXSvGXL18e5cqVy6c3XsuZcCnmz71uRUERgza/jSUy+TE2NkbLli2xf/9+JCQkKO2EqlWrBgB5Plsl97N4gNcXRv71119Yv349Bg4cKJXfuHFDqV7p0qUhk8mQmJio0oa6stwUSdvatWvznK5Pip1jQkKCyrT79+/DyMgIpUuXLnD7FSpUQL169d647Nxfsvv3779x3bt27YquXbsiPT0dJ0+eRFhYGPr27QtPT0/4+/vDwcEhz/UCtO9bRf3p06eje/fuauv4+PhI61bQbSI5ORl79uzB7NmzMW3aNKk8PT29wLeYK/o6MTERZcuWlcozMzO1vh1UYdSoUfjmm28wdepUjBo1Su3ydLldBQcHKz2LJucRUADSxbJ5eZvtwdHREUuWLMGSJUsQHx+P3bt3Y9q0aUhKSkJ4ePgb17cg32PFxevh4eFqpyvWXx+f7dvatWsXUlNTsWPHDumoCACV54IpYlf8YZFTYmKi0h8v2uwr37RvKChNfxd0Ka/9iKLvcn7+b5p3y5YtMDExwZ49e5QSlF27dqnMm/uPjbzk/CNEEUtxeBSGIgZtvnsl9rTX9OnTkZWVhZEjR2p8V01eFAmR4q9PhZyH+ADA0tISDRo0wI4dO5T+anv27Bl+//33Ny6nc+fOuHnzJhwcHFCvXj2VV0EeUJX7L+b8+Pj4oGzZsti0aZPSVfGpqanYvn27dKeOPrRs2RIA8NNPPymVnzlzBrGxsdJdd28il8vRvHlzzJ8/HwCkOyFatWqFK1euICYmRqn+xo0bIZPJ0KJFC63i9fHxQaVKlfDXX3+p/azq1asn/SC1aNECBw4cUNqpZ2VlYevWrW9cjkwmgxBCZdtbvXq1yqkeTSnumvn555+Vyrdt24bMzMwCtWlqaoq5c+fizJkz+OWXX5Sm6WO78vT0VOprRaKpqVatWuHgwYMqd8Ft3LgRFhYWGt/eXr58eYwdOxZt2rSRti1/f3+Ym5urbMt3796VTrdpq3Pnznj06BGysrLUbmuK9dfHZ/u21O0/hRDSKUiFhg0bQi6Xq3wvTp48qXLktyD7yrz2DbpcL0D1d0GXNm/erPQdun37NqKioqTP3cfHB66urnnWy0kmk6FUqVJKp/dfvHiBH3/8UWW5BTntZWpqigoVKhTps/QU/vnnHwD/HfzQRIk88gMATZo0wbJlyzBu3DjUqVMHw4cPR/Xq1WFkZISEhARs374dAFROcalTpUoVVKxYEdOmTYMQAvb29vj9999VDl8DwBdffIH27dujTZs2mDRpErKysjB//nxYWlq+MQMODAzE9u3b0axZM0ycOBF+fn7Izs5GfHw89u/fj0mTJqFhw4Za9YOvry8A4JtvvsHAgQNhYmICHx8flb+Ugde3Dy5YsAD9+vVD586dMWLECKSnp+Orr77C06dP1T7NV1d8fHwwfPhwfPfddzAyMkKHDh1w69YtzJw5E+7u7pg4cWKe886aNQt3795Fq1atUK5cOTx9+hTffPMNTExMpGcQTZw4ERs3bkSnTp0wZ84ceHh4YO/evVi+fDlGjRr1xgc0qvP999+jQ4cOaNeuHQYNGoSyZcvi8ePHiI2NRUxMjJQEfP7559i9ezdatmyJWbNmwcLCAsuWLVO5JVcdGxsbNGvWDF999RUcHR3h6emJI0eOYM2aNbCzs9M6ZuD1dQYff/wxlixZAhMTE7Ru3RqXLl3CwoULNfo+5KVPnz5YuHAh/ve//ymVF+V2lZfZs2djz549aNGiBWbNmgV7e3v8/PPP2Lt3LxYsWABbW1u18yUnJ6NFixbo27cvqlSpAmtra5w5cwbh4eHSEUA7OzvMnDkTn332GQYMGIA+ffrg0aNHCAkJgZmZGWbPnq11vB999BF+/vlndOzYERMmTECDBg1gYmKCu3fv4tChQ+jatSu6deumt8/2bbRp0wampqbo06cPpkyZgpcvX2LFihV48uSJUj3Frd9hYWEoXbo0unXrhrt37yIkJASurq5K14tpuq/UZN+Qn8TERLVPVfb09ETNmjU1/l3QlaSkJHTr1g2ffPIJkpOTMXv2bJiZmUkP7zQyMsIXX3yBYcOGSfWePn2K4OBglVNhnTp1wuLFi9G3b18MHz4cjx49wsKFC1WSOcX6FuSP74CAAJX9gYKiXxWJSXR0NKysrAAAH374oVQvODgYISEhOHTokNLjDjSdH3idQBsbG6NZs2aaB1+gy6+LkfPnz4vBgwcLLy8vIZfLhZmZmfD29hYDBgwQBw4cUKo7cOBAYWlpqbadK1euiDZt2ghra2tRunRp0bNnTxEfHy8AiNmzZyvV3b17t/Dz8xOmpqaifPnyYt68edLV/jnlvttLCCGeP38uPv/8c+Hj4yNMTU2Fra2t8PX1FRMnTlS6Wwi57sjIr83p06cLNzc3YWRkJACIQ4cO5dtnu3btEg0bNhRmZmbC0tJStGrVShw/flypTkHu9npT3aysLDF//nxRuXJlYWJiIhwdHcXHH38s7ty5o1Qv991ee/bsER06dBBly5YVpqamwsnJSXTs2FHpFmohhLh9+7bo27evcHBwECYmJsLHx0d89dVXSncgKe7u+eqrr1TiU/dZ//XXX6JXr17CyclJmJiYCBcXF9GyZUuxcuVKpXrHjx8XjRo1EnK5XLi4uIhPP/1U/PDDDxrd7XX37l3Ro0cPUbp0aWFtbS3at28vLl26pPJZq7vFXIj/+j/n556eni4mTZoknJychJmZmWjUqJE4ceKE2u1Hnby2v/3790t3fuSOQ5PtSvE9yXkLf8510+TOuLxudc/t4sWLokuXLsLW1laYmpqKmjVrqtwxkvtur5cvX4qRI0cKPz8/YWNjI8zNzYWPj4+YPXu2SE1NVZp39erV0n7A1tZWdO3aVeWREHntc9TtLzIyMsTChQtFzZo1hZmZmbCyshJVqlQRI0aMENevX5fq6fqzzev7m9f2pu4z/P3336W4y5YtKz799FPxv//9T2W7zM7OFnPnzhXlypUTpqamws/PT+zZs0fUrFlTdOvWTWk5muwrNd03qOPh4ZHnXU2KftT0dyGv7Tqvzz/3Nqz4DH788Ucxfvx4UaZMGSGXy8V7770noqOjVeZfvXq1qFSpkjA1NRWVK1cWa9euVdlvCiHE2rVrhY+Pj5DL5aJChQoiLCxMrFmzpkB3oapz4MABAUCcPn1aZVpefZt7u580aZKQyWQiNja2QPMLIcR7772ncrfim8j+fyFERESFLi4uDlWqVMHs2bO1ekIvFQ9+fn5o0qQJVqxYUaD5GzRoAA8PD5XT6Zq6efMmKlWqhIiICLRp00bj+Zj8EBFRofjrr7+wefNmNG7cGDY2Nrh27RoWLFiAlJQUXLp0Se2dYFS8hYeHo1u3brh+/bpGd4zllJKSgjJlyuD8+fPSoxW0NXjwYNy9e1fr05El9pofIiIqWSwtLREdHY01a9bg6dOnsLW1RUBAAL788ksmPiVU+/bt8dVXXyEuLk7r5MfGxkZlmBdtZGZmomLFiloPaAzwyA8REREZmBJ7qzsRERFRQTD5ISIiIoPC5IeIiIgMyjt/wXN2djbu378Pa2trtUNbEBERUfEjhMCzZ8/g5uam9BBMXXjnk5/79++rjOxMREREJcOdO3e0vpPsTYo0+VmxYgVWrFghDUJavXp1zJo1Cx06dADwOusLCQnBDz/8gCdPnqBhw4ZYtmwZqlevrvEyFMM83Llzp8ge/05ERETaSUlJgbu7u9rhmt5WkSY/5cqVw7x58+Dt7Q0A2LBhA7p27Ypz586hevXqWLBgARYvXoz169ejcuXKmDt3Ltq0aYNr165p3BmKU102NjZMfoiIiEoYfVyyUuye82Nvb4+vvvoKQ4YMgZubGwIDAzF16lQAQHp6OpydnTF//nyMGDFCo/ZSUlJga2uL5ORkJj9EREQlhD5/v4vN3V5ZWVnYsmULUlNT4e/vj7i4OCQmJqJt27ZSHblcjubNmyMqKqoIIyUiIqKSrMgveL548SL8/f3x8uVLWFlZYefOnahWrZqU4OR+5LmzszNu376dZ3vp6elKj8tOSUnRT+BERERUIhV58uPj44Pz58/j6dOn2L59OwYOHIgjR45I03Of6xNC5Hv+LywsDCEhIVrHkZWVhYyMDK3nI9I1ExMTGBsbF3UYRETvrGJ3zU/r1q1RsWJFTJ06FRUrVkRMTAxq164tTe/atSvs7OywYcMGtfOrO/Lj7u6e5zlDIQQSExPx9OlTna8LUUHZ2dnBxcWFz6YiIoOlz2t+ivzIT25CCKSnp8PLywsuLi6IjIyUkp9Xr17hyJEjmD9/fp7zy+VyyOVyjZenSHycnJxgYWHBHxsqUkIIpKWlISkpCQDg6upaxBEREb17ijT5+eyzz9ChQwe4u7vj2bNn2LJlCw4fPozw8HDIZDIEBgYiNDQUlSpVQqVKlRAaGgoLCwv07dtXJ8vPysqSEh8HBwedtEn0tszNzQEASUlJcHJy4ikwIiIdK9Lk58GDB+jfvz8SEhJga2sLPz8/hIeHo02bNgCAKVOm4MWLFxg9erT0kMP9+/fr7IFHimt8LCwsdNIeka4otsmMjAwmP0REOlbsrvnRtfzOGb58+RJxcXHw8vKCmZlZEUVIpIrbJhEZOoN4zg8RERFRYWDy8w6TyWTYtWtXUYdBRERUrBS7u72KC89pewttWbfmddJ6nsTERHz55ZfYu3cv7t27BycnJ9SqVQuBgYFo1aqVHqIsuICAANSqVQtLliwp6lCIiIiY/JREt27dQpMmTWBnZ4cFCxbAz88PGRkZiIiIwJgxY3D16tWiDpGIiKjY4mmvEmj06NGQyWQ4ffo0PvzwQ1SuXBnVq1dHUFAQTp48med89+7dQ+/evVG6dGk4ODiga9euuHXrljT9zJkzaNOmDRwdHWFra4vmzZsjJiZGqQ2ZTIbVq1ejW7dusLCwQKVKlbB7926t4vf09MTcuXMxYMAAWFlZwcPDA7/99hv+/fdfdO3aFVZWVvD19UV0dLQ0z6NHj9CnTx+UK1cOFhYW8PX1xebNm5XaffbsGfr16wdLS0u4urri66+/RkBAAAIDA6U6r169wpQpU1C2bFlYWlqiYcOGOHz4sDT99u3b6NKlC0qXLg1LS0tUr14d+/bt02r9iIioeGPyU8I8fvwY4eHhGDNmDCwtLVWm29nZqZ0vLS0NLVq0gJWVFY4ePYo///wTVlZWaN++PV69egXgdfIwcOBAHDt2DCdPnkSlSpXQsWNHPHv2TKmtkJAQ9OrVCxcuXEDHjh3Rr18/PH78WKv1+Prrr9GkSROcO3cOnTp1Qv/+/TFgwAB8/PHHiImJgbe3NwYMGADFzYgvX75E3bp1sWfPHly6dAnDhw9H//79cerUKanNoKAgHD9+HLt370ZkZCSOHTumkrwNHjwYx48fx5YtW3DhwgX07NkT7du3x/Xr1wEAY8aMQXp6Oo4ePYqLFy9i/vz5sLKy0mrdiIioeONprxLmxo0bEEKgSpUqWs23ZcsWGBkZYfXq1dJTrNetWwc7OzscPnwYbdu2RcuWLZXm+f7771G6dGkcOXIEnTt3lsoHDRqEPn36AABCQ0Px3Xff4fTp02jfvr3G8XTs2BEjRowAAMyaNQsrVqxA/fr10bNnTwDA1KlT4e/vjwcPHsDFxQVly5bF5MmTpfnHjRuH8PBw/PLLL2jYsCGePXuGDRs2YNOmTdI1T+vWrYObm5s0z82bN7F582bcvXsXbm5uuHD3Kdp+NAzh4eFYt24dQkNDER8fjx49esDX1xcAUKFCBY3XiYiISgYmPyWM4kiItsNwnD17Fjdu3FB5QOTLly9x8+ZNAK+fKDxr1iwcPHgQDx48QFZWFtLS0hAfH680j5+fn/R/S0tLWFtbS8MxaCpnG87OzgAgJRw5y5KSkuDi4oKsrCzMmzcPW7duxb1796Qx3BRHv/755x9kZGSgQYMGUhu2trbw8fGR3sfExEAIgcqVKwMAsv//CVcZr9KlJ3yPHz8eo0aNwv79+9G6dWv06NFDKVYiIir5mPyUMJUqVYJMJkNsbCw++OADjefLzs5G3bp18fPPP6tMK1OmDIDXR3T+/fdfLFmyBB4eHpDL5fD395dOiymYmJgovZfJZMjOztZqPXK2oUjk1JUp2l20aBG+/vprLFmyBL6+vrC0tERgYKAUW15JYc5neGZnZ8PY2Bhnz56FsbExriakAACquNpIp7aGDRuGdu3aYe/evdi/fz/CwsKwaNEijBs3Tqv1IyKi4ovX/JQw9vb2aNeuHZYtW4bU1FSV6XmNTl+nTh1cv34dTk5O8Pb2VnrZ2toCAI4dO4bx48ejY8eOqF69OuRyOR4+fKjP1dHYsWPH0LVrV3z88ceoWbMmKlSoIF2nAwAVK1aEiYkJTp8+LZWlpKQo1alduzaysrKQlJQEb29vlPeqgPJeFeDt7Q0XFxepnru7O0aOHIkdO3Zg0qRJWLVqVeGsJBERFQomPyXQ8uXLkZWVhQYNGmD79u24fv06YmNj8e2338Lf31/tPP369YOjoyO6du2KY8eOIS4uDkeOHMGECRNw9+5dAIC3tzd+/PFHxMbG4tSpU+jXr580yGZR8/b2RmRkJKKiohAbG4sRI0YgMTFRmm5tbY2BAwfi008/xaFDh3D58mUMGTIERkZG0tGgypUro1+/fhgwYAB27NiBu/G3cel8DObPny/d0RUYGIiIiAjExcUhJiYGBw8eRNWqVYtknYmISD+Y/JRAXl5eiImJQYsWLTBp0iTUqFEDbdq0wYEDB7BixQq181hYWODo0aMoX748unfvjqpVq2LIkCF48eKFNGbK2rVr8eTJE9SuXRv9+/fH+PHj4eTkVJirlqeZM2eiTp06aNeuHQICAuDi4qJy2m/x4sXw9/dH586d0bp1azRp0gRVq1ZVGhtr3bp1GDBgACZNmoSuAfUxYWhfnDp1Cu7u7gCArKwsjBkzBlWrVkX79u3h4+OD5cuXF+aqEhGRnnFgUw4e+c5KTU1F2bJlsWjRIgwdOlRl+oW7TwEAfuXsCjcwDXDbJCJDp8+BTXnBM70zzp07h6tXr6JBgwZITk7GnDlzAABdu3Yt4siIiKg4YfJD75SFCxfi2rVrMDU1Rd26dXHs2DE4OjoWdVhERFSMMPmhd0bt2rVx9uzZog6DiIiKOV7wTERERAaFyQ8REREZFCY/REREZFCY/BAREZFBYfJDREREBoXJDxERERkUJj8G6PDhw5DJZHkOglpcBAcHo1atWkUdBhERvWP4nJ+8HAorvGW1mK5V9UGDBmHDhg0AgFKlSsHd3R3du3dHSEgILC0t3zh/48aNkZCQII3mXlLdunULXl5eOHfuHJMkIiLSGJOfEqp9+/ZYt24dMjIycOzYMQwbNgypqal5Dmyak6mpKVxcXAohSiIiouKHp71KKLlcDhcXF7i7u6Nv377o168fdu3aBQBIT0+XRmQ3MzND06ZNcebMGWne3Ke9bt++jS5duqB06dKwtLRE9erVsW/fPqn+kSNH0KBBA8jlcri6umLatGnIzMyUpgcEBGD8+PGYMmUK7O3t4eLiguDgYKV4k5OTMXz4cDg5OcHGxgYtW7bEX3/9pVRn3rx5cHZ2hrW1NYYOHYqXL19q1SeK9YqIiEDt2rVhbm6Oli1bIikpCf/73/9QtWpV2NjYoE+fPkhLS5PmCw8PR9OmTWFnZwcHBwd07twZN2/eVGo7KioKtWrVgpmZGerVq4ddu3ZBJpPh/PnzUp0rV66gY8eOsLKygrOzM/r374+HDx9K03/99Vf4+vrC3NwcDg4OaN26NVJTU7VaRyIientMft4R5ubmyMjIAABMmTIF27dvx4YNGxATEwNvb2+0a9cOjx8/VjvvmDFjkJ6ejqNHj+LixYuYP38+rKysAAD37t1Dx44dUb9+ffz1119YsWIF1qxZg7lz5yq1sWHDBlhaWuLUqVNYsGAB5syZg8jISACAEAKdOnVCYmIi9u3bh7Nnz6JOnTpo1aqVFNO2bdswe/ZsfPnll4iOjoarqyuWL19eoL4IDg7G0qVLERUVhTt37qBXr15YsmQJNm3ahL179yIyMhLfffedVD81NRVBQUE4c+YMDhw4ACMjI3Tr1g3Z2dkAgGfPnqFLly7w9fVFTEwMvvjiC0ydOlVpmQkJCWjevDlq1aqF6OhohIeH48GDB+jVq5c0vU+fPhgyZAhiY2Nx+PBhdO/eHUKIAq0jEREVHE97vQNOnz6NTZs2oVWrVtKpr/Xr16NDhw4AgFWrViEyMhJr1qzBp59+qjJ/fHw8evToAV9fXwBAhQoVpGnLly+Hu7s7li5dCplMhipVquD+/fuYOnUqZs2aBSOj1/mzn58fZs+eDQCoVKkSli5digMHDqBNmzY4dOgQLl68iKSkJMjlcgCvByDdtWsXfv31VwwfPhxLlizBkCFDMGzYMADA3Llz8ccff2h99Ecxb5MmTQAAQ4cOxfTp03Hz5k1pvT788EMcOnQIHfqNAAD06NFDaf41a9bAyckJV65cQY0aNfDzzz9DJpNh1apVMDMzQ7Vq1XDv3j188skn0jwrVqxAnTp1EBoaKpWtXbsW7u7u+Pvvv/H8+XNkZmaie/fu8PDwAACpv4mIqHDxyE8JtWfPHlhZWcHMzAz+/v5o1qwZvvvuO9y8eRMZGRnSjz8AmJiYoEGDBoiNjVXb1vjx46WEYfbs2bhw4YI0LTY2Fv7+/pDJZFJZkyZN8Pz5c9y9e1cq8/PzU2rT1dUVSUlJAICzZ8/i+fPncHBwgJWVlfSKi4uTTi8plpNT7veayhmLs7MzLCwslBI6Z2dnKTYAuHnzJvr27YsKFSrAxsYGXl5eAF4nhQBw7do1+Pn5wczMTJqnQYMGSss8e/YsDh06pLR+VapUkdqvWbMmWrVqBV9fX/Ts2ROrVq3CkydPCrR+RET0dnjkp4Rq0aIFVqxYARMTE7i5ucHExATA69MrAJSSFeD1qafcZQrDhg1Du3btsHfvXuzfvx9hYWFYtGgRxo0bp3Y+xamanOWK5SvIZDLptFF2djZcXV1x+PBhlWXb2dlpvtIayhmLTCbLNzYA6NKlC9zd3bFq1Sq4ubkhOzsbNWrUwKtXrwCo77vcp6uys7PRpUsXzJ8/XyUeV1dXGBsbIzIyElFRUdi/fz++++47zJgxA6dOnZKSLSIiKhw88lNCWVpawtvbGx4eHko/7t7e3jA1NcWff/4plWVkZCA6OhpVq1bNsz13d3eMHDkSO3bswKRJk7Bq1SoAQLVq1RAVFaX0Yx8VFQVra2uULVtWo1jr1KmDxMRElCpVCt7e3kovR0dHAEDVqlVx8uRJpflyv9eHp08eIzY2Fp9//jlatWqFqlWrqhyRqVKlCi5cuID09HSpLDo6WqlOnTp1cPnyZXh6eqqso+LxAzKZDE2aNEFISAjOnTsHU1NT7Ny5U+/rSEREypj8vGMsLS0xatQofPrppwgPD8eVK1fwySefIC0tDUOHDlU7T2BgICIiIhAXF4eYmBgcPHhQSpRGjx6NO3fuYNy4cbh69Sp+++03zJ49G0FBQdL1Pm/SunVr+Pv744MPPkBERARu3bqFqKgofP7551ISMWHCBKxduxZr167F33//jdmzZ+Py5cu66ZR82Ni+vsPrhx9+wI0bN3Dw4EEEBQUp1enbty+ys7MxfPhwxMbGIiIiAgsXLgTw39GvMWPG4PHjx+jTpw9Onz6Nf/75B/v378eQIUOQlZWFU6dOITQ0FNHR0YiPj8eOHTvw77//5puQEhGRfvC01zto3rx5yM7ORv/+/fHs2TPUq1cPERERKF26tNr6WVlZGDNmDO7evQsbGxu0b98eX3/9NQCgbNmy2LdvHz799FPUrFkT9vb2GDp0KD7//HON45HJZNi3bx9mzJiBIUOG4N9//4WLiwuaNWsGZ2dnAEDv3r1x8+ZNTJ06FS9fvkSPHj0watQoREREvH2H5MPIyAhbtmzB+PHjUaNGDfj4+ODbb79FQECAVMfGxga///47Ro0ahVq1asHX1xezZs1C3759peuA3NzccPz4cUydOhXt2rVDeno6PDw80L59exgZGcHGxgZHjx7FkiVLkJKSAg8PDyxatEi6KJ2IiAqPTLzj99qmpKTA1tYWycnJsLGxUZr28uVLxMXFwcvLS+liVjIMF+4+BQD4lbPTet6ff/4ZgwcPRnJyMszNzXUbGLhtEhHl9/v9tnjkh0gDGzduRIUKFVC2bFn89ddfmDp1Knr16qWXxIeIiPSLyQ+RBhITEzFr1iwkJibC1dUVPXv2xJdfflnUYRERUQEw+SHSwJQpUzBlypSiDoOIiHSAd3sRERGRQWHyA9UH1hEVNW6TRET6Y9DJj+LhgDlH+CYqDhTbZO6nUxMR0dsz6Gt+jI2NYWdnJ43zZGFhkecQEPTuEZmvh68oyOCp+iKEQFpaGpKSkmBnZwdjY+OiDomI6J1j0MkPALi4uACA0kCXZBiSnrwAAJi+KH63q9vZ2UnbJhER6ZbBJz8ymQyurq5wcnJCRkZGUYdDhWjYjsMAgAOTAoo0jtxMTEx4xIeISI8MPvlRMDY25g+Ogbn3LAsA+ARlIiIDY9AXPBMREZHhYfJDREREBoXJDxERERkUJj9ERERkUJj8EBERkUFh8kNEREQGpUiTn7CwMNSvXx/W1tZwcnLCBx98gGvXrinVGTRoEGQymdKrUaNGRRQxERERlXRFmvwcOXIEY8aMwcmTJxEZGYnMzEy0bdsWqampSvXat2+PhIQE6bVv374iipiIiIhKuiJ9yGF4eLjS+3Xr1sHJyQlnz55Fs2bNpHK5XM5H/RMREZFOFKtrfpKTkwEA9vb2SuWHDx+Gk5MTKleujE8++STfcbjS09ORkpKi9CIiIiJSKDbJjxACQUFBaNq0KWrUqCGVd+jQAT///DMOHjyIRYsW4cyZM2jZsiXS09PVthMWFgZbW1vp5e7uXlirQEXEc9peeE7bW9RhEBFRCVFsxvYaO3YsLly4gD///FOpvHfv3tL/a9SogXr16sHDwwN79+5F9+7dVdqZPn06goKCpPcpKSlMgIiIiEhSLJKfcePGYffu3Th69CjKlSuXb11XV1d4eHjg+vXraqfL5XLI5XJ9hElERETvgCJNfoQQGDduHHbu3InDhw/Dy8vrjfM8evQId+7cgaurayFESERERO+aIr3mZ8yYMfjpp5+wadMmWFtbIzExEYmJiXjx4gUA4Pnz55g8eTJOnDiBW7du4fDhw+jSpQscHR3RrVu3ogydiIiISqgiPfKzYsUKAEBAQIBS+bp16zBo0CAYGxvj4sWL2LhxI54+fQpXV1e0aNECW7duhbW1dRFETERERCVdkZ/2yo+5uTkiIiIKKRoiIiIyBMXmVnciIiKiwsDkh4iIiAwKkx8iIiIyKEx+iIiIyKAw+SEiIiKDwuSH3mkc94uIiHJj8kNEREQGhckPERERGRQmP0RERGRQmPwQERGRQWHyQ0RERAaFyQ8REREZFCY/REREZFCY/BAREZFBYfJDREREBoXJDxERERkUJj9ERERkUJj8EBERkUEpVdQBEBUUBywlIqKC4JEfIiIiMihMfoiIiMigMPkhIiIig8Lkh4iIiAwKkx8iIiIyKEx+iIiIyKAw+SEiIiKDwuSHiIiIDAqTHyIiIjIoTH6IiIjIoDD5ISIiIoPCsb2Icsk5ZtiteZ2KMBIiItIHHvkhIiIig8Lkh4iIiAwKkx8iIiIyKEx+iIiIyKAw+SEiIiKDwuSHiIiIDAqTHyIiIjIoTH6IiIjIoDD5ISIiIoPC5IeIiIgMCpMfIiIiMigc24vo/+Uc0yt3Gcf4IiJ6d/DIDxERERkUJj9ERERkUJj8EBERkUFh8kNEREQGhckPERERGRQmP0RERGRQtE5+NmzYgL17/7sleMqUKbCzs0Pjxo1x+/ZtnQZHREREpGtaJz+hoaEwNzcHAJw4cQJLly7FggUL4OjoiIkTJ2rVVlhYGOrXrw9ra2s4OTnhgw8+wLVr15TqCCEQHBwMNzc3mJubIyAgAJcvX9Y2bCIiIiIABUh+7ty5A29vbwDArl278OGHH2L48OEICwvDsWPHtGrryJEjGDNmDE6ePInIyEhkZmaibdu2SE1NleosWLAAixcvxtKlS3HmzBm4uLigTZs2ePbsmbahExEREWmf/FhZWeHRo0cAgP3796N169YAADMzM7x48UKrtsLDwzFo0CBUr14dNWvWxLp16xAfH4+zZ88CeH3UZ8mSJZgxYwa6d++OGjVqYMOGDUhLS8OmTZu0DZ2IiIhI++SnTZs2GDZsGIYNG4a///4bnTq9fuz/5cuX4enp+VbBJCcnAwDs7e0BAHFxcUhMTETbtm2lOnK5HM2bN0dUVJTaNtLT05GSkqL0IiIiIlLQemyvZcuW4fPPP8edO3ewfft2ODg4AADOnj2LPn36FDgQIQSCgoLQtGlT1KhRAwCQmJgIAHB2dlaq6+zsnOfF1WFhYQgJCSlwHFS8qRt/i4iISBtaJz92dnZYunSpSvnbJhxjx47FhQsX8Oeff6pMk8lkSu+FECplCtOnT0dQUJD0PiUlBe7u7m8VGxEREb07CvScn2PHjuHjjz9G48aNce/ePQDAjz/+qDZx0cS4ceOwe/duHDp0COXKlZPKXVxcAPx3BEghKSlJ5WiQglwuh42NjdKLiIiISEHr5Gf79u1o164dzM3NERMTg/T0dADAs2fPEBoaqlVbQgiMHTsWO3bswMGDB+Hl5aU03cvLCy4uLoiMjJTKXr16hSNHjqBx48bahk5ERESkffIzd+5crFy5EqtWrYKJiYlU3rhxY8TExGjV1pgxY/DTTz9h06ZNsLa2RmJiIhITE6W7xmQyGQIDAxEaGoqdO3fi0qVLGDRoECwsLNC3b19tQyciIiLS/pqfa9euoVmzZirlNjY2ePr0qVZtrVixAgAQEBCgVL5u3ToMGjQIwOsnSL948QKjR4/GkydP0LBhQ+zfvx/W1tbahk5ERESkffLj6uqKGzduqNzW/ueff6JChQpatSWEeGMdmUyG4OBgBAcHa9U2ERERkTpan/YaMWIEJkyYgFOnTkEmk+H+/fv4+eefMXnyZIwePVofMRIRERHpjNZHfqZMmYLk5GS0aNECL1++RLNmzSCXyzF58mSMHTtWHzESERER6YzWyQ8AfPnll5gxYwauXLmC7OxsVKtWDVZWVrqOjYiIiEjnCpT8AICFhQXq1auny1iIiIiI9E7r5Kdbt25qn64sk8lgZmYGb29v9O3bFz4+PjoJkIiIiEiXtL7g2dbWFgcPHkRMTIyUBJ07dw4HDx5EZmYmtm7dipo1a+L48eM6D5aIiIjobWl95MfFxQV9+/bF0qVLYWT0OnfKzs7GhAkTYG1tjS1btmDkyJGYOnVqgYe7ICIiItIXrY/8rFmzBoGBgVLiAwBGRkYYN24cfvjhB8hkMowdOxaXLl3SaaBEREREuqB18pOZmYmrV6+qlF+9ehVZWVkAADMzszxHXSciIiIqSlqf9urfvz+GDh2Kzz77DPXr14dMJsPp06cRGhqKAQMGAACOHDmC6tWr6zxYIiIioreldfLz9ddfw9nZGQsWLMCDBw8AAM7Ozpg4cSKmTp0KAGjbti3at2+v20iJiIiIdEDr5MfY2BgzZszAjBkzkJKSAuD1oKY5lS9fXjfREREREelYgR9yCKgmPURERETFXYGSn19//RXbtm1DfHw8Xr16pTQtJiZGJ4ERERER6YPWd3t9++23GDx4MJycnHDu3Dk0aNAADg4O+Oeff9ChQwd9xEhERESkM1onP8uXL8cPP/yApUuXwtTUFFOmTEFkZCTGjx+P5ORkfcRIREREpDNaJz/x8fFo3LgxAMDc3BzPnj0D8PoW+M2bN+s2OiIiIiId0zr5cXFxwaNHjwAAHh4eOHnyJAAgLi4OQgjdRkcGz3PaXnhO21vkyyiMOIiIqHBonfy0bNkSv//+OwBg6NChmDhxItq0aYPevXujW7duOg+QiIiISJe0vtvrhx9+QHZ2NgBg5MiRsLe3x59//okuXbpg5MiROg+QiIiISJe0Tn6MjIyUBjXt1asXevXqpdOgiIiIiPSlQM/5efnyJS5cuICkpCTpKJDC+++/r5PAiIiIiPRB6+QnPDwcAwYMwMOHD1WmyWQyaWR3IiIiouJI6wuex44di549eyIhIQHZ2dlKLyY+REREVNxpnfwkJSUhKCgIzs7O+oiHiIiISK+0Tn4+/PBDHD58WA+hEBEREemf1tf8LF26FD179sSxY8fg6+sLExMTpenjx4/XWXBEREREuqZ18rNp0yZERETA3Nwchw8fhkwmk6bJZDImP0RERFSsaZ38fP7555gzZw6mTZum9LwfIiIiopJA6+Tn1atX6N27NxMfKlTajqvFcbiIiCgvWmcwAwcOxNatW/URCxEREZHeaX3kJysrCwsWLEBERAT8/PxULnhevHixzoIjIiIi0jWtk5+LFy+idu3aAIBLly4pTct58TMRERFRcaR18nPo0CF9xEFERERUKHjVMhERERkUjY/8dO/eXaN6O3bsKHAwRERERPqmcfJja2urzziIiIiICoXGyc+6dev0GQcRERFRoeA1P0RERGRQmPwQERGRQWHyQ0RERAaFyQ8REREZFI2Snzp16uDJkycAgDlz5iAtLU2vQRERERHpi0bJT2xsLFJTUwEAISEheP78uV6DIiIiItIXjW51r1WrFgYPHoymTZtCCIGFCxfCyspKbd1Zs2bpNEAiIiIiXdIo+Vm/fj1mz56NPXv2QCaT4X//+x9KlVKdVSaTMfkhIiKiYk2j5MfHxwdbtmwBABgZGeHAgQNwcnLSa2BERERE+qD1qO7Z2dn6iIOIiIioUGid/ADAzZs3sWTJEsTGxkImk6Fq1aqYMGECKlasqOv4iIiIiHRK6+f8REREoFq1ajh9+jT8/PxQo0YNnDp1CtWrV0dkZKRWbR09ehRdunSBm5sbZDIZdu3apTR90KBBkMlkSq9GjRppGzIRERGRROsjP9OmTcPEiRMxb948lfKpU6eiTZs2GreVmpqKmjVrYvDgwejRo4faOu3bt1caVNXU1FTbkImIiIgkWic/sbGx2LZtm0r5kCFDsGTJEq3a6tChAzp06JBvHblcDhcXF63aJSIiIsqL1qe9ypQpg/Pnz6uUnz9/Xi93gB0+fBhOTk6oXLkyPvnkEyQlJel8GURERGQ4tD7y88knn2D48OH4559/0LhxY8hkMvz555+YP38+Jk2apNPgOnTogJ49e8LDwwNxcXGYOXMmWrZsibNnz0Iul6udJz09Henp6dL7lJQUncZEREREJZvWyc/MmTNhbW2NRYsWYfr06QAANzc3BAcHY/z48ToNrnfv3tL/a9SogXr16sHDwwN79+5F9+7d1c4TFhaGkJAQncZBJZ/ntL0AgFvzOum0PW3b1HUcRESkPa1Pe8lkMkycOBF3795FcnIykpOTcffuXUyYMAEymUwfMUpcXV3h4eGB69ev51ln+vTpUlzJycm4c+eOXmMiIiKikqVAz/lRsLa21lUcGnn06BHu3LkDV1fXPOvI5fI8T4kRERERvVXy87aeP3+OGzduSO/j4uJw/vx52Nvbw97eHsHBwejRowdcXV1x69YtfPbZZ3B0dES3bt2KMGoiIiIqyYo0+YmOjkaLFi2k90FBQQCAgQMHYsWKFbh48SI2btyIp0+fwtXVFS1atMDWrVsL/YgTERERvTuKNPkJCAiAECLP6REREYUYDRERERkCrS54zsjIQIsWLfD333/rKx4iIiIivdIq+TExMcGlS5f0flcXERERkb5ofav7gAEDsGbNGn3EQkRERKR3Wl/z8+rVK6xevRqRkZGoV68eLC0tlaYvXrxYZ8ERERER6ZrWyc+lS5dQp04dAFC59oenw4iIiKi40zr5OXTokD7iICIiIioUBb7V/caNG7h58yaaNWsGc3NzCCF45IfeSkHHy1I3PxERUV60vuD50aNHaNWqFSpXroyOHTsiISEBADBs2DCdj+pOREREpGtaJz8TJ06EiYkJ4uPjYWFhIZX37t0b4eHhOg2OiIiISNe0Pu21f/9+REREoFy5ckrllSpVwu3bt3UWGBEREZE+aH3kJzU1VemIj8LDhw85mjoREREVe1onP82aNcPGjRul9zKZDNnZ2fjqq6+UBiklIiIiKo60Pu311VdfISAgANHR0Xj16hWmTJmCy5cv4/Hjxzh+/Lg+YiQiIiLSGa2P/FSrVg0XLlxAgwYN0KZNG6SmpqJ79+44d+4cKlasqI8YiYiIiHSmQM/5cXFxQUhIiK5jISIiItK7AiU/T548wZo1axAbGwuZTIaqVati8ODBsLe313V8RERERDql9WmvI0eOwMvLC99++y2ePHmCx48f49tvv4WXlxeOHDmijxiJiIiIdEbrIz9jxoxBr169sGLFChgbGwMAsrKyMHr0aIwZMwaXLl3SeZBEREREuqL1kZ+bN29i0qRJUuIDAMbGxggKCsLNmzd1GhwVb57T9nI8LSIiKnG0Tn7q1KmD2NhYlfLY2FjUqlVLFzERERER6Y1Gp70uXLgg/X/8+PGYMGECbty4gUaNGgEATp48iWXLlmHevHn6iZKIiIhIRzRKfmrVqgWZTAYhhFQ2ZcoUlXp9+/ZF7969dRcdERERkY5plPzExcXpOw4iIiKiQqFR8uPh4aHvOIiIiIgKRYEecnjv3j0cP34cSUlJyM7OVpo2fvx4nQRGREREpA9aJz/r1q3DyJEjYWpqCgcHB8hkMmmaTCZj8kNERETFmtbJz6xZszBr1ixMnz4dRkZa3ylPREREVKS0zl7S0tLw0UcfMfEhIiKiEknrDGbo0KH45Zdf9BELERERkd5pfdorLCwMnTt3Rnh4OHx9fWFiYqI0ffHixToLjoiIiEjXtE5+QkNDERERAR8fHwBQueCZ8qYYB+vWvE5FHEnhyzkGmK7XP7DUrwCAJZkf6rRdbRny50tEVJJonfwsXrwYa9euxaBBg/QQDhEREZF+aX3Nj1wuR5MmTfQRCxEREZHeaZ38TJgwAd99950+YiEiIiLSO61Pe50+fRoHDx7Enj17UL16dZULnnfs2KGz4IiIiIh0Tevkx87ODt27d9dHLERERER6V6DhLYiIiIhKKj6mmYiIiAyK1kd+vLy88n2ezz///PNWARERERHpk9bJT2BgoNL7jIwMnDt3DuHh4fj00091FRcRERGRXmid/EyYMEFt+bJlyxAdHf3WARERERHpk86u+enQoQO2b9+uq+aIiIiI9ELrIz95+fXXX2Fvb6+r5oj0IucYYwpvOzaYujaJiKj40jr5qV27ttIFz0IIJCYm4t9//8Xy5ct1GhwRERGRrmmd/HzwwQdK742MjFCmTBkEBASgSpUquoqLiIiISC+0Tn5mz56tjziIiIiICgUfckhEREQGReMjP0ZGRvk+3BAAZDIZMjMz3zooIiIiIn3ROPnZuXNnntOioqLw3XffQQihk6CIiIiI9EXj5Kdr164qZVevXsX06dPx+++/o1+/fvjiiy90GhwRERGRrhXomp/79+/jk08+gZ+fHzIzM3H+/Hls2LAB5cuX16qdo0ePokuXLnBzc4NMJsOuXbuUpgshEBwcDDc3N5ibmyMgIACXL18uSMhEREREALRMfpKTkzF16lR4e3vj8uXLOHDgAH7//XfUqFGjQAtPTU1FzZo1sXTpUrXTFyxYgMWLF2Pp0qU4c+YMXFxc0KZNGzx79qxAyyMiIiLS+LTXggULMH/+fLi4uGDz5s1qT4Npq0OHDujQoYPaaUIILFmyBDNmzED37t0BABs2bICzszM2bdqEESNGvPXyiYiIyPBonPxMmzYN5ubm8Pb2xoYNG7Bhwwa19Xbs2KGTwOLi4pCYmIi2bdtKZXK5HM2bN0dUVFSeyU96ejrS09Ol9ykpKTqJh4iIiN4NGic/AwYMeOOt7rqUmJgIAHB2dlYqd3Z2xu3bt/OcLywsDCEhIXqNjfKmGOfq1rxOOqmjoMn4W4o6+dXTpI623nZsMCIiKlwaJz/r16/XYxh5y51wCSHyTcKmT5+OoKAg6X1KSgrc3d31Fh8RERGVLDob1V3XXFxcALw+AuTq6iqVJyUlqRwNykkul0Mul+s9PiIiIiqZiu3wFl5eXnBxcUFkZKRU9urVKxw5cgSNGzcuwsiIiIioJCvSIz/Pnz/HjRs3pPdxcXE4f/487O3tUb58eQQGBiI0NBSVKlVCpUqVEBoaCgsLC/Tt27cIoyYiIqKSrEiTn+joaLRo0UJ6r7hWZ+DAgVi/fj2mTJmCFy9eYPTo0Xjy5AkaNmyI/fv3w9rauqhCJiIiohKuSJOfgICAfMcDk8lkCA4ORnBwcOEFRURERO+0YnvNDxEREZE+MPkhIiIig8Lkh4iIiAwKkx8iIiIyKEx+iIiIyKAU2yc8l3Q5x6bKbwyrd13uMbrelj7G5tKGYn1yxpHbks8HS/8PnLtO7zEREZF2eOSHiIiIDAqTHyIiIjIoTH6IiIjIoDD5ISIiIoPC5IeIiIgMCpMfIiIiMihMfoiIiMigMPkhIiIig8Lkh4iIiAwKkx8iIiIyKEx+iIiIyKAw+SEiIiKDwoFN6a1pO3jpf4OCdirQ/MWVusFO35V1IyJ6l/DIDxERERkUJj9ERERkUJj8EBERkUFh8kNEREQGhckPERERGRQmP0RERGRQmPwQERGRQWHyQ0RERAaFyQ8REREZFCY/REREZFCY/BAREZFB4dhexYRiDKhb8zoVcSS6ocmYVprUUTdelq4UpG1dxZNz3XN/5u/atkBEVNzwyA8REREZFCY/REREZFCY/BAREZFBYfJDREREBoXJDxERERkUJj9ERERkUJj8EBERkUFh8kNEREQGhckPERERGRQmP0RERGRQmPwQERGRQeHYXjqi6/GYinp8p/yWr8mYXG9LV2NoKdpZkvlhkcaRf7scw4uIqDDxyA8REREZFCY/REREZFCY/BAREZFBYfJDREREBoXJDxERERkUJj9ERERkUJj8EBERkUEp1slPcHAwZDKZ0svFxaWowyIiIqISrNg/5LB69er4448/pPfGxsZFGA0RERGVdMU++SlVqhSP9hAREZHOFOvTXgBw/fp1uLm5wcvLCx999BH++eeffOunp6cjJSVF6UVERESkUKyP/DRs2BAbN25E5cqV8eDBA8ydOxeNGzfG5cuX4eDgoHaesLAwhISEFHKkuvO242a97ZhgeS2/OI7npU19fdXVltq2D4W9/rfFdL0tV6Gox4wjIioOivWRnw4dOqBHjx7w9fVF69atsXfv6x33hg0b8pxn+vTpSE5Oll537twprHCJiIioBCjWR35ys7S0hK+vL65fv55nHblcDrlcXohRERERUUlSrI/85Jaeno7Y2Fi4uroWdShERERUQhXr5Gfy5Mk4cuQI4uLicOrUKXz44YdISUnBwIEDizo0IiIiKqGK9Wmvu3fvok+fPnj48CHKlCmDRo0a4eTJk/Dw8Cjq0IiIiKiEKtbJz5YtW4o6BCIiInrHFOvTXkRERES6xuSHiIiIDAqTHyIiIjIoTH6IiIjIoDD5ISIiIoNSrO/2Kglyj3mlbgwsXY2Llbudtx2fKb9xnqQxqA5d+P8Sv7daVn70OZZWUdNm3Qq0nSjGBQPyHRusMMZmy70sjh9GRMUVj/wQERGRQWHyQ0RERAaFyQ8REREZFCY/REREZFCY/BAREZFBYfJDREREBoXJDxERERkUJj9ERERkUJj8EBERkUFh8kNEREQGhckPERERGRSO7VWENBlvSV2d/8aLyjF2kjTGk1+e82ky1lLusaiWHPhbqd03zbMk88N826M3U/TZks//67vAVpVf/yfX+F3/fT5AYIv//49iW8hnrK93BccRI6KC4JEfIiIiMihMfoiIiMigMPkhIiIig8Lkh4iIiAwKkx8iIiIyKEx+iIiIyKAw+SEiIiKDwuSHiIiIDAqTHyIiIjIoTH6IiIjIoDD5ISIiIoPCsb1KMmk8r//GeAos9bdSlZxjbS35fHCuBj7Mo1yVuvG71I3bJY1LlWuMr8JWkDHFSsQ4ZNqM25Vj+8hvbLa8SONmtbvwX6FiubniyHecuhxxeEa8jkNlLK6cseZqU924XZqMi0dElBce+SEiIiKDwuSHiIiIDAqTHyIiIjIoTH6IiIjIoDD5ISIiIoPC5IeIiIgMCpMfIiIiMihMfoiIiMigMPkhIiIig8Lkh4iIiAwKkx8iIiIyKEx+iIiIyKBwYNMiUNBBGXMPvKkYzFSbeTSdVhiKevnFRV6D0uZbF2Gq0/5/gNrAVpVVpuXua89pqm0rDWCqFE+O9v5/AFIpjhb5xKp2wFw/tXXUxZzXMl/TfuBcTQZKzd0HQD6DseY3v5YDz6osQ81Ar3ktM6f8YlSJVYO6GtEyVp0tV1e0GSy4GNP556oB6fs7d12hLVNXeOSHiIiIDAqTHyIiIjIoTH6IiIjIoDD5ISIiIoPC5IeIiIgMCpMfIiIiMiglIvlZvnw5vLy8YGZmhrp16+LYsWNFHRIRERGVUMU++dm6dSsCAwMxY8YMnDt3Du+99x46dOiA+Pj4og6NiIiISqBin/wsXrwYQ4cOxbBhw1C1alUsWbIE7u7uWLFiRVGHRkRERCVQsU5+Xr16hbNnz6Jt27ZK5W3btkVUVFQRRUVEREQlWbEe3uLhw4fIysqCs7OzUrmzszMSExPVzpOeno709HTpfXJyMgAgJSVFLzFmp6fppV11Xma9KrRl5Sc78/U65xePJnVIcympLwEAL9NfqX2vrm5OuespPp/85su9LLXT/v97pe57oO6zVyxXMZ/aZeRqU9265o5fk++31J6aurmXpel8ec6vyf4mx7JUlqGmP/JaZk5v2w8FomWs+toXF5g2n1kxpvPPVQO59wO6pmhXCKH7xkUxdu/ePQFAREVFKZXPnTtX+Pj4qJ1n9uzZAgBffPHFF1988fUOvO7cuaPz/KJYH/lxdHSEsbGxylGepKQklaNBCtOnT0dQUJD0Pjs7G48fP4aDgwNkMple4y0MKSkpcHd3x507d2BjY1PU4RR77C/tsL+0xz7TDvtLO4bcX0IIPHv2DG5ubjpvu1gnP6ampqhbty4iIyPRrVs3qTwyMhJdu3ZVO49cLodcLlcqs7Oz02eYRcLGxsbgvghvg/2lHfaX9thn2mF/acdQ+8vW1lYv7Rbr5AcAgoKC0L9/f9SrVw/+/v744YcfEB8fj5EjRxZ1aERERFQCFfvkp3fv3nj06BHmzJmDhIQE1KhRA/v27YOHh0dRh0ZEREQlULFPfgBg9OjRGD16dFGHUSzI5XLMnj1b5dQeqcf+0g77S3vsM+2wv7TD/tIPmRD6uIeMiIiIqHgq1g85JCIiItI1Jj9ERERkUJj8EBERkUFh8kNEREQGhclPMfHll1+icePGsLCwyPOhjPHx8ejSpQssLS3h6OiI8ePH49Ur5fGTLl68iObNm8Pc3Bxly5bFnDlzVMZFOXLkCOrWrQszMzNUqFABK1eu1NdqFbnly5fDy8sLZmZmqFu3Lo4dO1bUIend0aNH0aVLF7i5uUEmk2HXrl1K04UQCA4OhpubG8zNzREQEIDLly8r1UlPT8e4cePg6OgIS0tLvP/++7h7965SnSdPnqB///6wtbWFra0t+vfvj6dPn+p57XQvLCwM9evXh7W1NZycnPDBBx/g2rVrSnXYZ/9ZsWIF/Pz8pIfu+fv743//+580nX2Vv7CwMMhkMgQGBkpl7LMioPMBM6hAZs2aJRYvXiyCgoKEra2tyvTMzExRo0YN0aJFCxETEyMiIyOFm5ubGDt2rFQnOTlZODs7i48++khcvHhRbN++XVhbW4uFCxdKdf755x9hYWEhJkyYIK5cuSJWrVolTExMxK+//loYq1motmzZIkxMTMSqVavElStXxIQJE4SlpaW4fft2UYemV/v27RMzZswQ27dvFwDEzp07labPmzdPWFtbi+3bt4uLFy+K3r17C1dXV5GSkiLVGTlypChbtqyIjIwUMTExokWLFqJmzZoiMzNTqtO+fXtRo0YNERUVJaKiokSNGjVE586dC2s1daZdu3Zi3bp14tKlS+L8+fOiU6dOonz58uL58+dSHfbZf3bv3i327t0rrl27Jq5duyY+++wzYWJiIi5duiSEYF/l5/Tp08LT01P4+fmJCRMmSOXss8LH5KeYWbdundrkZ9++fcLIyEjcu3dPKtu8ebOQy+UiOTlZCCHE8uXLha2trXj58qVUJywsTLi5uYns7GwhhBBTpkwRVapUUWp7xIgRolGjRnpYm6LVoEEDMXLkSKWyKlWqiGnTphVRRIUvd/KTnZ0tXFxcxLx586Syly9fCltbW7Fy5UohhBBPnz4VJiYmYsuWLVKde/fuCSMjIxEeHi6EEOLKlSsCgDh58qRU58SJEwKAuHr1qp7XSr+SkpIEAHHkyBEhBPtME6VLlxarV69mX+Xj2bNnolKlSiIyMlI0b95cSn7YZ0WDp71KiBMnTqBGjRpKA7y1a9cO6enpOHv2rFSnefPmSg/DateuHe7fv49bt25Jddq2bavUdrt27RAdHY2MjAz9r0ghefXqFc6ePauyrm3btkVUVFQRRVX04uLikJiYqNQvcrkczZs3l/rl7NmzyMjIUKrj5uaGGjVqSHVOnDgBW1tbNGzYUKrTqFEj2Nralvj+TU5OBgDY29sDYJ/lJysrC1u2bEFqair8/f3ZV/kYM2YMOnXqhNatWyuVs8+KBpOfEiIxMVFlJPvSpUvD1NRUGvVeXR3F+zfVyczMxMOHD/UVfqF7+PAhsrKy1K6roi8MkWLd8+uXxMREmJqaonTp0vnWcXJyUmnfycmpRPevEAJBQUFo2rQpatSoAYB9ps7FixdhZWUFuVyOkSNHYufOnahWrRr7Kg9btmxBTEwMwsLCVKaxz4oGkx89Cg4Ohkwmy/cVHR2tcXsymUylTAihVJ67jvj/i521rfOuULeu7+J6aqsg/fKmbU3TdoqzsWPH4sKFC9i8ebPKNPbZf3x8fHD+/HmcPHkSo0aNwsCBA3HlyhVpOvvqP3fu3MGECRPw008/wczMLM967LPCxeRHj8aOHYvY2Nh8X4q/Lt/ExcVFJXt/8uQJMjIypL8Y1NVJSkoCgDfWKVWqFBwcHAq0nsWRo6MjjI2N1a5r7r+wDImLiwsA5NsvLi4uePXqFZ48eZJvnQcPHqi0/++//5bY/h03bhx2796NQ4cOoVy5clI5+0yVqakpvL29Ua9ePYSFhaFmzZr45ptv2FdqnD17FklJSahbty5KlSqFUqVK4ciRI/j2229RqlQplaPzCobcZ4WByY8eOTo6okqVKvm+8vtLICd/f39cunQJCQkJUtn+/fshl8tRt25dqc7Ro0eVbn/fv38/3Nzc4OnpKdWJjIxUanv//v2oV68eTExM3nKNiw9TU1PUrVtXZV0jIyPRuHHjIoqq6Hl5ecHFxUWpX169eoUjR45I/VK3bl2YmJgo1UlISMClS5ekOv7+/khOTsbp06elOqdOnUJycnKJ618hBMaOHYsdO3bg4MGD8PLyUprOPnszIQTS09PZV2q0atUKFy9exPnz56VXvXr10K9fP5w/fx4VKlRgnxWFQr7AmvJw+/Ztce7cORESEiKsrKzEuXPnxLlz58SzZ8+EEP/d6t6qVSsRExMj/vjjD1GuXDmlW92fPn0qnJ2dRZ8+fcTFixfFjh07hI2Njdpb3SdOnCiuXLki1qxZ887f6r5mzRpx5coVERgYKCwtLcWtW7eKOjS9evbsmbT9ABCLFy8W586dk27xnzdvnrC1tRU7duwQFy9eFH369FF7W225cuXEH3/8IWJiYkTLli3V3lbr5+cnTpw4IU6cOCF8fX1L5G21o0aNEra2tuLw4cMiISFBeqWlpUl12Gf/mT59ujh69KiIi4sTFy5cEJ999pkwMjIS+/fvF0KwrzSR824vIdhnRYHJTzExcOBAAUDldejQIanO7du3RadOnYS5ubmwt7cXY8eOVbqtXQghLly4IN577z0hl8uFi4uLCA4Olm5zVzh8+LCoXbu2MDU1FZ6enmLFihWFsYpFYtmyZcLDw0OYmpqKOnXqSLcvv8sOHTqkdlsaOHCgEOL1rbWzZ88WLi4uQi6Xi2bNmomLFy8qtfHixQsxduxYYW9vL8zNzUXnzp1FfHy8Up1Hjx6Jfv36CWtra2FtbS369esnnjx5UkhrqTvq+gqAWLdunVSHffafIUOGSN+pMmXKiFatWkmJjxDsK03kTn7YZ4VPJkSux/8SERERvcN4zQ8REREZFCY/REREZFCY/BAREZFBYfJDREREBoXJDxERERkUJj9ERERkUJj8EBERkUFh8kNEJdqgQYPwwQcfSO8DAgIQGBhYZPEQUfHH5IeIdCoxMRETJkyAt7c3zMzM4OzsjKZNm2LlypVIS0vT+/J37NiBL774Qqdt5k6wiKhkK1XUARDRu+Off/5BkyZNYGdnh9DQUPj6+iIzMxN///031q5dCzc3N7z//vsq82VkZOhsYF17e3udtENE7y4e+SEinRk9ejRKlSqF6Oho9OrVC1WrVoWvry969OiBvXv3okuXLgAAmUyGlStXomvXrrC0tMTcuXORlZWFoUOHwsvLC+bm5vDx8cE333yj1H5WVhaCgoJgZ2cHBwcHTJkyBblH6Ml92uvVq1eYMmUKypYtC0tLSzRs2BCHDx+Wpq9fvx52dnaIiIhA1apVYWVlhfbt2yMhIQEAEBwcjA0bNuC3336DTCaDTCZTmp+ISh4mP0SkE48ePcL+/fsxZswYWFpaqq0jk8mk/8+ePRtdu3bFxYsXMWTIEGRnZ6NcuXLYtm0brly5glmzZuGzzz7Dtm3bpHkWLVqEtWvXYs2aNfjzzz/x+PFj7Ny5M9+4Bg8ejOPHj2PLli24cOECevbsifbt2+P69etSnbS0NCxcuBA//vgjjh49ivj4eEyePBkAMHnyZPTq1UtKiBISEtC4ceO36SoiKmI87UVEOnHjxg0IIeDj46NU7ujoiJcvXwIAxowZg/nz5wMA+vbtiyFDhijVDQkJkf7v5eWFqKgobNu2Db169QIALFmyBNOnT0ePHj0AACtXrkRERESeMd28eRObN2/G3bt34ebmBuB1MhMeHo5169YhNDQUwOvTbitXrkTFihUBAGPHjsWcOXMAAFZWVjA3N0d6ejpcXFwK1jlEVKww+SEincp5dAcATp8+jezsbPTr1w/p6elSeb169VTmXblyJVavXo3bt2/jxYsXePXqFWrVqgUASE5ORkJCAvz9/aX6pUqVQr169VROfSnExMRACIHKlSsrlaenp8PBwUF6b2FhISU+AODq6oqkpCTNV5qIShQmP0SkE97e3pDJZLh69apSeYUKFQAA5ubmSuW5T41t27YNEydOxKJFi+Dv7w9ra2t89dVXOHXqVIFjys7OhrGxMc6ePQtjY2OlaVZWVtL/c19sLZPJ8kyoiKjk4zU/RKQTDg4OaNOmDZYuXYrU1FSt5z927BgaN26M0aNHo3bt2vD29sbNmzel6ba2tnB1dcXJkyelsszMTJw9ezbPNmvXro2srCwkJSXB29tb6aXNKSxTU1NkZWVpvU5EVDwx+SEinVm+fDkyMzNRr149bN26FbGxsbh27Rp++uknXL16VeXoS07e3t6Ijo5GREQE/v77b8ycORNnzpxRqjNhwgTMmzcPO3fuxNWrVzF69Gg8ffo0zzYrV66Mfv36YcCAAdixYwfi4uJw5swZzJ8/H/v27dN4vTw9PXHhwgVcu3YNDx8+REZGhsbzElHxw+SHiHSmYsWKOHfuHFq3bo3p06ejZs2aqFevHr777jtMnjw534cPjhw5Et27d0fv3r3RsGFDPHr0CKNHj1aqM2nSJAwYMACDBg2STo1169Yt35jWrVuHAQMGYNKkSfDx8cH777+PU6dOwd3dXeP1+uSTT+Dj44N69eqhTJkyOH78uMbzElHxIxM8sU1EREQGhEd+iIiIyKAw+SEiIiKDwuSHiIiIDAqTHyIiIjIoTH6IiIjIoDD5ISIiIoPC5IeIiIgMCpMfIiIiMihMfoiIiMigMPkhIiIig8Lkh4iIiAwKkx8iIiIyKP8H/CZWUrSy/ewAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion)" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [], + "source": [ + "dataname='MNIST'\n", + "valid_size=0 \n", + "random_seed=2021 \n", + "resize = resize\n", + "stratified=True \n", + "shuffle=False\n", + "maxsize=training_size \n", + "maxsize_test = valid_size\n", + "shuffle_per=portion" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "def load_torchvision_data_shuffle(dataname, valid_size=0.1, splits=None, shuffle=True,\n", + " stratified=False, random_seed=None, batch_size = 64,\n", + " resize=None, to3channels=False,\n", + " maxsize = None, maxsize_test=None, num_workers = 0, transform=None,\n", + " data=None, datadir=None, download=True, filt=False, print_stats = False, shuffle_per=0, label_ratio=0.3):\n", + " \"\"\" Load torchvision datasets.\n", + "\n", + " We return train and test for plots and post-training experiments\n", + " \"\"\"\n", + " \n", + " if shuffle == True and random_seed:\n", + " np.random.seed(random_seed)\n", + " elif random_seed:\n", + " np.random.seed(random_seed)\n", + " if transform is None:\n", + " if dataname in DATASET_NORMALIZATION.keys():\n", + " transform_dataname = dataname\n", + " else:\n", + " transform_dataname = 'ImageNet'\n", + "\n", + " transform_list = []\n", + "\n", + " if dataname in ['MNIST', 'USPS'] and to3channels:\n", + " transform_list.append(torchvision.transforms.Grayscale(3))\n", + "\n", + " transform_list.append(torchvision.transforms.ToTensor())\n", + " transform_list.append(\n", + " torchvision.transforms.Normalize(*DATASET_NORMALIZATION[transform_dataname])\n", + " )\n", + "\n", + " if resize:\n", + " if not dataname in DATASET_SIZES or DATASET_SIZES[dataname][0] != resize:\n", + " ## Avoid adding an \"identity\" resizing\n", + " transform_list.insert(0, transforms.Resize((resize, resize)))\n", + "\n", + " transform = transforms.Compose(transform_list)\n", + " logger.info(transform)\n", + " train_transform, valid_transform = transform, transform\n", + " elif data is None:\n", + " if len(transform) == 1:\n", + " train_transform, valid_transform = transform, transform\n", + " elif len(transform) == 2:\n", + " train_transform, valid_transform = transform\n", + " else:\n", + " raise ValueError()\n", + "\n", + " if data is None:\n", + " DATASET = getattr(torchvision.datasets, dataname)\n", + " if datadir is None:\n", + " datadir = DATA_DIR\n", + " if dataname == 'EMNIST':\n", + " split = 'letters'\n", + " train = DATASET(datadir, split=split, train=True, download=download, transform=train_transform)\n", + " test = DATASET(datadir, split=split, train=False, download=download, transform=valid_transform)\n", + " ## EMNIST seems to have a bug - classes are wrong\n", + " _merged_classes = set(['C', 'I', 'J', 'K', 'L', 'M', 'O', 'P', 'S', 'U', 'V', 'W', 'X', 'Y', 'Z'])\n", + " _all_classes = set(list(string.digits + string.ascii_letters))\n", + " classes_split_dict = {\n", + " 'byclass': list(_all_classes),\n", + " 'bymerge': sorted(list(_all_classes - _merged_classes)),\n", + " 'balanced': sorted(list(_all_classes - _merged_classes)),\n", + " 'letters': list(string.ascii_lowercase),\n", + " 'digits': list(string.digits),\n", + " 'mnist': list(string.digits),\n", + " }\n", + " train.classes = classes_split_dict[split]\n", + " if split == 'letters':\n", + " ## The letters fold (and only that fold!!!) is 1-indexed\n", + " train.targets -= 1\n", + " test.targets -= 1\n", + " elif dataname == 'STL10':\n", + " train = DATASET(datadir, split='train', download=download, transform=train_transform)\n", + " test = DATASET(datadir, split='test', download=download, transform=valid_transform)\n", + " train.classes = ['airplane', 'bird', 'car', 'cat', 'deer', 'dog', 'horse', 'monkey', 'ship', 'truck']\n", + " test.classes = train.classes\n", + " train.targets = torch.tensor(train.labels)\n", + " test.targets = torch.tensor(test.labels)\n", + " elif dataname == 'SVHN':\n", + " train = DATASET(datadir, split='train', download=download, transform=train_transform)\n", + " test = DATASET(datadir, split='test', download=download, transform=valid_transform)\n", + " ## In torchvision, SVHN 0s have label 0, not 10\n", + " train.classes = test.classes = [str(i) for i in range(10)]\n", + " train.targets = torch.tensor(train.labels)\n", + " test.targets = torch.tensor(train.labels)\n", + " elif dataname == 'LSUN':\n", + " # pdb.set_trace()\n", + " train = DATASET(datadir, classes='train', download=download, transform=train_transform)\n", + " else:\n", + " train = DATASET(datadir, train=True, download=download, transform=train_transform)\n", + " test = DATASET(datadir, train=False, download=download, transform=valid_transform)\n", + " #print(\"HEHE DATASET\")\n", + " else:\n", + " train, test = data\n", + "\n", + "# print(\"Train Type: \", type(train), \" Train: \", train)\n", + "# print(\"Teest Type: \", type(test), \" Train: \", test)\n", + " \n", + "\n", + " if type(train.targets) is list:\n", + " train.targets = torch.LongTensor(train.targets)\n", + " test.targets = torch.LongTensor(test.targets)\n", + "\n", + " if not hasattr(train, 'classes') or not train.classes:\n", + " train.classes = sorted(torch.unique(train.targets).tolist())\n", + " test.classes = sorted(torch.unique(train.targets).tolist())\n", + "\n", + "######################## ------------------------- MNIST MNIST MNIST MNIST MN -------------------------- ##########################\n", + "######################## ------------------------- NIST MNIST MNIST MNIST MNI -------------------------- ##########################\n", + "######################## ------------------------- IST MNIST MNIST MNIST MNIS -------------------------- ##########################\n", + "######################## ------------------------- ST MNIST MNIST MNIST MNIST -------------------------- ##########################\n", + "\n", + " def filter_labels(dataset, labels):\n", + " mask = (dataset.targets == labels[0]) | (dataset.targets == labels[1])\n", + " dataset.data = dataset.data[mask]\n", + " dataset.targets = dataset.targets[mask]\n", + " return dataset\n", + " def adjust_label_ratio(dataset, labels, ratio):\n", + " # Separate the data by labels\n", + " data_label_0 = dataset.data[dataset.targets == labels[0]]\n", + " data_label_1 = dataset.data[dataset.targets == labels[1]]\n", + " \n", + " # Adjust the amount of label 1 relative to label 0\n", + " num_label_0 = len(data_label_0)\n", + " num_label_1 = min(len(data_label_1), int(num_label_0 * ratio))\n", + " \n", + " # Combine the adjusted datasets\n", + " adjusted_data = torch.cat((data_label_0, data_label_1[:num_label_1]))\n", + " adjusted_targets = torch.cat((torch.full((num_label_0,), labels[0], dtype=torch.long), \n", + " torch.full((num_label_1,), labels[1], dtype=torch.long)))\n", + " \n", + " # Ensure the data and targets match the original dataset's data type and shape\n", + " dataset.data = adjusted_data\n", + " dataset.targets = adjusted_targets\n", + " return dataset\n", + " return dataset\n", + " train = filter_labels(train, [0, 1])\n", + " test = filter_labels(test, [0, 1])\n", + " train = adjust_label_ratio(train, [0, 1], label_ratio)\n", + "###### VALIDATION IS 0 SO NOT WORRY NOW ######\n", + " ### Data splitting\n", + " fold_idxs = {}\n", + " if splits is None and valid_size == 0:\n", + " ## Only train\n", + " fold_idxs['train'] = np.arange(len(train))\n", + " \n", + " elif splits is None and valid_size > 0:\n", + " ## Train/Valid\n", + " train_idx, valid_idx = random_index_split(len(train), 1-valid_size, (maxsize, None)) # No maxsize for validation\n", + " fold_idxs['train'] = train_idx\n", + " fold_idxs['valid'] = valid_idx\n", + " elif splits is not None:\n", + " ## Custom splits - must be integer.\n", + " if type(splits) is dict:\n", + " snames, slens = zip(*splits.items())\n", + " elif type(splits) in [list, np.ndarray]:\n", + " snames = ['split_{}'.format(i) for i in range(len(splits))]\n", + " slens = splits\n", + " slens = np.array(slens)\n", + " if any(slens < 0): # Split expressed as -1, i.e., 'leftover'\n", + " assert sum(slens < 0) == 1, 'Can only deal with one split being -1'\n", + " idx_neg = np.where(slens == -1)[0][0]\n", + " slens[idx_neg] = len(train) - np.array([x for x in slens if x > 0]).sum()\n", + " elif slens.sum() > len(train):\n", + " logging.warning(\"Not enough samples to satify splits..cropping train...\")\n", + " if 'train' in snames:\n", + " slens[snames.index('train')] = len(train) - slens[np.array(snames) != 'train'].sum()\n", + "\n", + " idxs = np.arange(len(train))\n", + " if not stratified:\n", + " np.random.shuffle(idxs)\n", + " else:\n", + " ## If stratified, we'll interleave the per-class shuffled indices\n", + " idxs_class = [np.random.permutation(np.where(train.targets==c)).T for c in np.unique(train.targets)]\n", + " idxs = interleave(*idxs_class).squeeze().astype(int)\n", + "\n", + " slens = np.array(slens).cumsum() # Need to make cumulative for np.split\n", + " split_idxs = [np.sort(s) for s in np.split(idxs, slens)[:-1]] # The last one are leftovers\n", + " assert len(split_idxs) == len(splits)\n", + " fold_idxs = {snames[i]: v for i,v in enumerate(split_idxs)}\n", + "\n", + "\n", + " # fold_idxs['train'] = np.arange(len(train)) start -> stop by step\n", + " for k, idxs in fold_idxs.items():\n", + " if maxsize and maxsize < len(idxs):\n", + " fold_idxs[k] = np.sort(np.random.choice(idxs, maxsize, replace = False))\n", + " \n", + " sampler_class = SubsetRandomSampler if shuffle else SubsetSampler\n", + " fold_samplers = {k: sampler_class(idxs) for k,idxs in fold_idxs.items()}\n", + " \n", + "\n", + "# ░██████╗██╗░░██╗██╗░░░██╗███████╗███████╗██╗░░░░░███████╗\n", + "# ██╔════╝██║░░██║██║░░░██║██╔════╝██╔════╝██║░░░░░██╔════╝\n", + "# ╚█████╗░███████║██║░░░██║█████╗░░█████╗░░██║░░░░░█████╗░░\n", + "# ░╚═══██╗██╔══██║██║░░░██║██╔══╝░░██╔══╝░░██║░░░░░██╔══╝░░\n", + "# ██████╔╝██║░░██║╚██████╔╝██║░░░░░██║░░░░░███████╗███████╗\n", + "# ╚═════╝░╚═╝░░╚═╝░╚═════╝░╚═╝░░░░░╚═╝░░░░░╚══════╝╚══════╝\n", + "\n", + " \n", + " old_train = train\n", + " #old_test = fold_idxs['test']\n", + " if shuffle_per != 0:\n", + " \n", + " total_shuffles = int(shuffle_per * len(fold_idxs['train']))\n", + " \n", + " shuffle_inds = np.random.choice(sorted(fold_idxs['train']), size=total_shuffles, replace=False)\n", + " \n", + " \n", + " if dataname == 'CIFAR10':\n", + " print(\"CIFAR TEN\")\n", + " for index in shuffle_inds:\n", + " cur_label = train.targets[index]\n", + " new_label = np.random.randint(10)\n", + " while new_label == cur_label:\n", + " new_label = np.random.randint(10)\n", + " cur_label = new_label\n", + " # print(\"TRAINNNN: \", train[index])\n", + " train.targets[index] = cur_label\n", + " elif dataname == 'CIFAR100':\n", + " print(\"CIFAR HUNDRED\")\n", + " for index in shuffle_inds:\n", + " cur_label = train.targets[index]\n", + " new_label = np.random.randint(100)\n", + " while new_label == cur_label:\n", + " new_label = np.random.randint(100)\n", + " cur_label = new_label\n", + " # print(\"TRAINNNN: \", train[index])\n", + " train.targets[index] = cur_label\n", + " elif dataname == 'MNIST':\n", + " print(\"MNIST\")\n", + " for index in shuffle_inds:\n", + " cur_label = train.targets[index]\n", + " print(f'Currrent label: {cur_label}')\n", + " new_label = np.random.randint(10)\n", + " while new_label == cur_label:\n", + " new_label = np.random.randint(10)\n", + " cur_label = new_label\n", + " print(f'New label: {cur_label} ')\n", + " # print(\"TRAINNNN: \", train[index])\n", + " train.targets[index] = cur_label\n", + " print(\"TRAINNNN label: \", train.targets[index])\n", + " print(\"TRAINNNN: \", train[index])\n", + " elif dataname == 'FashionMNIST':\n", + " print(\"FashionistaMNIST\")\n", + " for index in shuffle_inds:\n", + " cur_label = train.targets[index]\n", + " new_label = np.random.randint(10)\n", + " while new_label == cur_label:\n", + " new_label = np.random.randint(10)\n", + " cur_label = new_label\n", + " # print(\"TRAINNNN: \", train[index])\n", + " train.targets[index] = cur_label\n", + " \n", + " ########## FOR other datasets such as STL10 and ImageNet, we cannot directly modify labels\n", + " ########## so will need to recreate the dataloader! time consuming!\n", + " \n", + " elif dataname == 'STL10' or dataname == 'ImageNet':\n", + " print(\"STL11\")\n", + " if dataname == 'ImageNet':\n", + " print('IMAGI')\n", + " DATASET = getattr(torchvision.datasets, dataname)\n", + " new_train = DATASET\n", + " new_train.targets = {}\n", + " new_train.classes = {}\n", + " new_train.targets = train.targets\n", + " new_train.classes = train.classes\n", + " \n", + " new_ds_imgs = []\n", + " new_ds_labs = []\n", + " class_len = len(train.classes)\n", + " for i in range(len(train)):\n", + " new_ds_imgs.append(train[i][0].permute(1,2,0))\n", + " if i in shuffle_inds:\n", + " cur_label = train.targets[i]\n", + " new_label = np.random.randint(class_len)\n", + "# print(f'{i}.Currrent label: {cur_label} ')\n", + " while new_label == cur_label:\n", + " new_label = np.random.randint(class_len)\n", + " cur_label = new_label\n", + " train.targets[i] = cur_label\n", + "# print(f'{i}.New label: {cur_label} ')\n", + " new_ds_labs.append(torch.tensor(cur_label).reshape(1))\n", + " else:\n", + " new_ds_labs.append(torch.tensor(train[i][1]).reshape(1))\n", + " new_ds_imgs = torch.stack(new_ds_imgs, dim=0)\n", + " new_ds_labs = torch.cat(new_ds_labs)\n", + " new_ds_imgs = new_ds_imgs.numpy()\n", + " new_ds_labs = new_ds_labs.numpy()\n", + " \n", + " new_ds = (new_ds_imgs, new_ds_labs)\n", + " \n", + " \n", + " new_train.targets = train.targets\n", + " new_transform_list = []\n", + " new_transform_list.append(torchvision.transforms.ToTensor())\n", + " new_transform = transforms.Compose(new_transform_list)\n", + " new_train = CustomTensorDataset2(new_ds, transform = new_transform)\n", + " train = new_train\n", + " \n", + " \n", + " if type(train.targets) is np.ndarray:\n", + " train.targets = train.targets.tolist()\n", + "\n", + " if type(train.targets) is list:\n", + " train.targets = torch.LongTensor(train.targets)\n", + " \n", + " if not hasattr(train, 'classes') or not train.classes:\n", + "# train.classes = ['airplane', 'bird', 'car', 'cat', 'deer', 'dog', 'horse', 'monkey', 'ship', 'truck']\n", + " train.classes = sorted(torch.unique(train.targets).tolist())\n", + " ### Create DataLoaders\n", + " dataloader_args = dict(batch_size=batch_size,num_workers=num_workers)\n", + "\n", + " fold_loaders = {k: dataloader.DataLoader(train, sampler=sampler,**dataloader_args)\n", + " for k,sampler in fold_samplers.items()}\n", + "\n", + " \n", + " if maxsize_test and maxsize_test < len(test):\n", + " test_idxs = np.sort(np.random.choice(len(test), maxsize_test, replace = False))\n", + " sampler_test = SubsetSampler(test_idxs) # For test don't want Random\n", + " dataloader_args['sampler'] = sampler_test\n", + " else:\n", + " dataloader_args['shuffle'] = False\n", + " test_loader = dataloader.DataLoader(test, **dataloader_args)\n", + " fold_loaders['test'] = test_loader\n", + "\n", + " fnames, flens = zip(*[[k,len(v)] for k,v in fold_idxs.items()])\n", + " fnames = '/'.join(list(fnames) + ['test'])\n", + " flens = '/'.join(map(str, list(flens) + [len(test)]))\n", + "\n", + " if hasattr(train, 'data'):\n", + " logger.info('Input Dim: {}'.format(train.data.shape[1:]))\n", + " logger.info('Classes: {} (effective: {})'.format(len(train.classes), len(torch.unique(train.targets))))\n", + "\n", + " if shuffle_per != 0:\n", + " return fold_loaders, {'train': train, 'test':test}, shuffle_inds, {'train': old_train, 'test': test}\n", + " return fold_loaders, {'train': train, 'test':test}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# MNIST trên tập dataset không cân bằng kết hợp các kĩ thuật như SMOTE" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "baseline" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "from sklearn.datasets import fetch_openml\n", + "from sklearn.decomposition import PCA\n", + "from sklearn.preprocessing import StandardScaler\n", + "from sklearn.model_selection import train_test_split\n", + "from imblearn.over_sampling import SMOTE\n", + "import matplotlib.pyplot as plt\n", + "import torch\n", + "import torchvision.transforms as transforms\n", + "import torchvision.models as models\n", + "from torch.utils.data import DataLoader, Subset\n", + "from torchvision.datasets import MNIST\n", + "\n", + "# Load MNIST dataset\n", + "transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))])\n", + "mnist_train = MNIST(root='./data', train=True, download=True, transform=transform)\n", + "\n", + "# Filter only classes 0 and 1\n", + "indices = np.where((mnist_train.targets == 0) | (mnist_train.targets == 1))[0]\n", + "mnist_subset = Subset(mnist_train, indices)\n", + "\n", + "# Use a pre-trained model to extract features\n", + "model = models.resnet18(pretrained=True)\n", + "model.fc = torch.nn.Identity() # Remove the final classification layer\n", + "model.eval()\n", + "\n", + "# Create a DataLoader\n", + "data_loader = DataLoader(mnist_subset, batch_size=64, shuffle=False)\n", + "\n", + "# Extract features\n", + "features = []\n", + "labels = []\n", + "\n", + "with torch.no_grad():\n", + " for inputs, targets in data_loader:\n", + " outputs = model(inputs)\n", + " features.append(outputs)\n", + " labels.append(targets)\n", + "\n", + "features = torch.cat(features).numpy()\n", + "labels = torch.cat(labels).numpy()\n", + "\n", + "# Check feature shape\n", + "print(f'Feature shape: {features.shape}')\n", + "\n", + "# Standardize the features\n", + "scaler = StandardScaler()\n", + "features_scaled = scaler.fit_transform(features)\n", + "\n", + "# Reduce dimensionality to 512 dimensions using PCA (if necessary)\n", + "pca = PCA(n_components=512)\n", + "features_reduced = pca.fit_transform(features_scaled)\n", + "\n", + "# Apply SMOTE\n", + "smote = SMOTE(random_state=42)\n", + "X_res, y_res = smote.fit_resample(features_reduced, labels)\n", + "\n", + "# Summarize class distribution\n", + "from collections import Counter\n", + "print(f'Original dataset shape: {Counter(labels)}')\n", + "print(f'Resampled dataset shape: {Counter(y_res)}')\n", + "\n", + "# Optionally, visualize the features in 2D using PCA\n", + "pca_2d = PCA(n_components=2)\n", + "features_2d = pca_2d.fit_transform(X_res)\n", + "\n", + "plt.figure(figsize=(10, 5))\n", + "\n", + "plt.scatter(features_2d[y_res == 0][:, 0], features_2d[y_res == 0][:, 1], label='Class 0', alpha=0.5)\n", + "plt.scatter(features_2d[y_res == 1][:, 0], features_2d[y_res == 1][:, 1], label='Class 1', alpha=0.5)\n", + "plt.legend()\n", + "plt.title('SMOTE Resampled Features in 2D')\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import pickle\n", + "\n", + "# Tải mô hình từ tệp .pickle\n", + "with open('save_x1y1x2y2.txt', 'rb') as file:\n", + " model = pickle.load(file)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "4\n" + ] + } + ], + "source": [ + "print(len(model))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1000, 1024])\n", + "torch.Size([200, 1024])\n", + "torch.Size([1000])\n", + "torch.Size([200])\n" + ] + } + ], + "source": [ + "X1, y1, X2, y2 = model\n", + "print(X1.shape)\n", + "print(X2.shape)\n", + "print(y1.shape)\n", + "print(y2.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "187\n", + "555\n" + ] + } + ], + "source": [ + "num1_y1 = torch.sum(y1 == 1).item()\n", + "print(num1_y1)\n", + "num0_y1 = torch.sum(y1 == 0).item()\n", + "print(num0_y1)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "dataname='MNIST'\n", + "random_seed=2021 \n", + "resize = resize\n", + "stratified=False \n", + "shuffle=False\n", + "maxsize=training_size \n", + "maxsize_test = valid_size\n", + "shuffle_per=portion" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import pdb\n", + "from functools import partial\n", + "import random\n", + "import logging\n", + "import string\n", + "\n", + "import numpy as np\n", + "import torch\n" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "logger = logging.getLogger(__name__)\n", + "DATASET_NCLASSES = {\n", + " 'MNIST': 10,\n", + " 'FashionMNIST': 10,\n", + " 'EMNIST': 26,\n", + " 'KMNIST': 10,\n", + " 'USPS': 10,\n", + " 'CIFAR10': 10,\n", + " 'SVHN': 10,\n", + " 'STL10': 10,\n", + " 'LSUN': 10,\n", + " 'tiny-ImageNet': 200\n", + "}\n", + "\n", + "DATASET_SIZES = {\n", + " 'MNIST': (28,28),\n", + " 'FashionMNIST': (28,28),\n", + " 'EMNIST': (28,28),\n", + " 'QMNIST': (28,28),\n", + " 'KMNIST': (28,28),\n", + " 'USPS': (16,16),\n", + " 'SVHN': (32, 32),\n", + " 'CIFAR10': (32, 32),\n", + " 'STL10': (96, 96),\n", + " 'tiny-ImageNet': (64,64)\n", + "}\n", + "\n", + "DATASET_NORMALIZATION = {\n", + " 'MNIST': ((0.1307,), (0.3081,)),\n", + " 'USPS' : ((0.1307,), (0.3081,)),\n", + " 'FashionMNIST' : ((0.1307,), (0.3081,)),\n", + " 'QMNIST' : ((0.1307,), (0.3081,)),\n", + " 'EMNIST' : ((0.1307,), (0.3081,)),\n", + " 'KMNIST' : ((0.1307,), (0.3081,)),\n", + " 'ImageNet': ((0.485, 0.456, 0.406),(0.229, 0.224, 0.225)),\n", + " 'tiny-ImageNet': ((0.485, 0.456, 0.406),(0.229, 0.224, 0.225)),\n", + " 'CIFAR10': ((0.485, 0.456, 0.406),(0.229, 0.224, 0.225)),\n", + " 'CIFAR100': ((0.485, 0.456, 0.406),(0.229, 0.224, 0.225)),\n", + " 'STL10': ((0.485, 0.456, 0.406),(0.229, 0.224, 0.225))\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [], + "source": [ + "from otdd import DATA_DIR\n", + "from torch.utils.data import Dataset, TensorDataset, DataLoader\n", + "import torch.utils.data.dataloader as dataloader\n" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [], + "source": [ + "class SubsetSampler(torch.utils.data.Sampler):\n", + " r\"\"\"Samples elements in order (not randomly) from a given list of indices, without replacement.\n", + "\n", + " Arguments:\n", + " indices (sequence): a sequence of indices\n", + " (this is identical to torch's SubsetRandomSampler except not random)\n", + " \"\"\"\n", + "\n", + " def __init__(self, indices):\n", + " self.indices = indices\n", + "\n", + " def __iter__(self):\n", + " return (self.indices[i] for i in range(len(self.indices)))\n", + "\n", + " def __len__(self):\n", + " return len(self.indices)" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MNIST\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n" + ] + } + ], + "source": [ + "loaders, full_dict, shuffle_ind, old_dict = load_torchvision_data_shuffle(dataname, splits=None, shuffle=shuffle,\n", + " stratified=stratified, random_seed=random_seed, batch_size = 64,\n", + " resize=resize, to3channels=False,\n", + " maxsize = maxsize, maxsize_test=maxsize_test, num_workers = 0, transform=None,\n", + " data=None, datadir=None, download=True, filt=False, print_stats = False, shuffle_per=shuffle_per, label_ratio=0.3)" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'train': Dataset MNIST\n", + " Number of datapoints: 7699\n", + " Root location: c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\data\n", + " Split: Train\n", + " StandardTransform\n", + " Transform: Compose(\n", + " Resize(size=(32, 32), interpolation=bilinear, max_size=None, antialias=True)\n", + " ToTensor()\n", + " Normalize(mean=(0.1307,), std=(0.3081,))\n", + " ),\n", + " 'test': Dataset MNIST\n", + " Number of datapoints: 2115\n", + " Root location: c:\\Users\\21520\\PycharmProjects\\LAVA\\LAVA\\data\n", + " Split: Test\n", + " StandardTransform\n", + " Transform: Compose(\n", + " Resize(size=(32, 32), interpolation=bilinear, max_size=None, antialias=True)\n", + " ToTensor()\n", + " Normalize(mean=(0.1307,), std=(0.3081,))\n", + " )}" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "full_dict" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(1000,)" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "old_dict['train'].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0, 1, 6, 18, 21, 33, 38, 49, 53, 68, 72,\n", + " 75, 78, 83, 88, 97, 115, 137, 138, 139, 144, 148,\n", + " 152, 159, 163, 166, 168, 180, 192, 214, 224, 236, 243,\n", + " 256, 273, 279, 280, 281, 285, 297, 311, 312, 331, 339,\n", + " 342, 343, 350, 355, 356, 358, 365, 386, 395, 412, 421,\n", + " 430, 432, 440, 442, 446, 450, 451, 457, 463, 465, 466,\n", + " 468, 487, 496, 501, 506, 524, 527, 534, 577, 580, 588,\n", + " 590, 594, 621, 628, 635, 641, 642, 645, 650, 651, 653,\n", + " 662, 676, 681, 687, 725, 727, 735, 737, 738, 743, 759,\n", + " 778, 789, 799, 811, 815, 816, 817, 825, 834, 837, 840,\n", + " 844, 854, 877, 889, 890, 893, 898, 899, 904, 913, 915,\n", + " 927, 934, 944, 945, 950, 953, 954, 955, 963, 973, 976,\n", + " 981, 986, 987, 988, 998, 1024, 1029, 1034, 1042, 1045, 1051,\n", + " 1055, 1062, 1073, 1077, 1102, 1107, 1112, 1113, 1117, 1125, 1148,\n", + " 1160, 1170, 1179, 1182, 1186, 1193, 1197, 1201, 1217, 1218, 1232,\n", + " 1234, 1246, 1254, 1255, 1259, 1276, 1278, 1279, 1288, 1290, 1296,\n", + " 1297, 1324, 1338, 1348, 1351, 1366, 1390, 1401, 1407, 1408, 1411,\n", + " 1417, 1420, 1424, 1432, 1446, 1462, 1477, 1496, 1502, 1507, 1509,\n", + " 1513, 1518, 1526, 1527, 1528, 1535, 1542, 1543, 1550, 1559, 1561,\n", + " 1565, 1567, 1569, 1581, 1584, 1586, 1589, 1591, 1596, 1598, 1604,\n", + " 1606, 1608, 1609, 1612, 1616, 1617, 1621, 1628, 1639, 1640, 1655,\n", + " 1660, 1672, 1679, 1694, 1702, 1722, 1734, 1738, 1747, 1759, 1760,\n", + " 1776, 1779, 1783, 1797, 1805, 1807, 1810, 1812, 1824, 1827, 1833,\n", + " 1849, 1859, 1897, 1899, 1911, 1917, 1920, 1930, 1932, 1938, 1951,\n", + " 1968, 1975, 1993, 1994, 2001, 2003, 2005, 2019, 2022, 2024, 2038,\n", + " 2042, 2045, 2046, 2068, 2072, 2076, 2077, 2088, 2100, 2105, 2108,\n", + " 2117, 2123, 2124, 2126, 2137, 2142, 2149, 2163, 2177, 2191, 2200,\n", + " 2220, 2245, 2246, 2256, 2262, 2263, 2267, 2276, 2280, 2289, 2293,\n", + " 2308, 2337, 2344, 2348, 2355, 2356, 2357, 2366, 2400, 2401, 2409,\n", + " 2412, 2421, 2427, 2429, 2430, 2436, 2441, 2442, 2443, 2446, 2449,\n", + " 2455, 2456, 2467, 2469, 2481, 2488, 2493, 2499, 2504, 2510, 2515,\n", + " 2516, 2523, 2532, 2539, 2553, 2554, 2555, 2559, 2601, 2603, 2637,\n", + " 2651, 2655, 2675, 2709, 2714, 2723, 2726, 2736, 2747, 2770, 2771,\n", + " 2775, 2783, 2789, 2795, 2796, 2802, 2807, 2820, 2824, 2841, 2868,\n", + " 2869, 2870, 2871, 2883, 2895, 2898, 2905, 2907, 2912, 2918, 2924,\n", + " 2937, 2940, 2947, 2949, 2965, 2968, 2975, 2984, 3008, 3019, 3042,\n", + " 3045, 3050, 3053, 3054, 3065, 3078, 3082, 3087, 3092, 3097, 3098,\n", + " 3121, 3147, 3149, 3150, 3168, 3171, 3176, 3185, 3200, 3224, 3225,\n", + " 3230, 3237, 3238, 3245, 3255, 3258, 3259, 3266, 3269, 3287, 3288,\n", + " 3289, 3292, 3294, 3298, 3300, 3333, 3337, 3346, 3348, 3353, 3357,\n", + " 3364, 3368, 3372, 3374, 3375, 3385, 3386, 3397, 3401, 3408, 3409,\n", + " 3426, 3434, 3435, 3457, 3458, 3465, 3476, 3478, 3486, 3487, 3489,\n", + " 3499, 3537, 3545, 3561, 3563, 3564, 3580, 3619, 3635, 3651, 3656,\n", + " 3657, 3658, 3671, 3674, 3678, 3685, 3699, 3713, 3732, 3735, 3740,\n", + " 3742, 3746, 3749, 3762, 3764, 3782, 3795, 3804, 3805, 3807, 3814,\n", + " 3827, 3833, 3836, 3846, 3847, 3853, 3860, 3861, 3862, 3865, 3870,\n", + " 3876, 3877, 3881, 3899, 3912, 3913, 3921, 3923, 3924, 3925, 3945,\n", + " 3951, 3954, 3978, 3980, 3983, 3995, 3998, 3999, 4000, 4014, 4025,\n", + " 4027, 4035, 4036, 4047, 4051, 4053, 4071, 4077, 4091, 4103, 4134,\n", + " 4138, 4139, 4140, 4161, 4175, 4186, 4187, 4188, 4209, 4219, 4220,\n", + " 4221, 4226, 4232, 4249, 4250, 4256, 4263, 4270, 4272, 4281, 4284,\n", + " 4287, 4296, 4299, 4304, 4308, 4309, 4311, 4312, 4317, 4319, 4321,\n", + " 4328, 4332, 4333, 4335, 4340, 4352, 4353, 4367, 4379, 4389, 4394,\n", + " 4425, 4426, 4429, 4435, 4444, 4459, 4462, 4465, 4478, 4495, 4502,\n", + " 4521, 4525, 4530, 4531, 4543, 4547, 4552, 4561, 4571, 4576, 4578,\n", + " 4580, 4603, 4611, 4620, 4621, 4623, 4630, 4645, 4654, 4657, 4688,\n", + " 4699, 4707, 4710, 4712, 4713, 4715, 4720, 4724, 4730, 4731, 4734,\n", + " 4736, 4755, 4758, 4769, 4771, 4786, 4795, 4797, 4799, 4807, 4810,\n", + " 4816, 4823, 4828, 4863, 4867, 4875, 4888, 4901, 4912, 4913, 4920,\n", + " 4926, 4941, 4946, 4954, 4956, 4959, 4960, 4965, 4992, 4996, 5007,\n", + " 5008, 5015, 5016, 5029, 5036, 5041, 5044, 5055, 5059, 5072, 5074,\n", + " 5083, 5086, 5104, 5112, 5113, 5114, 5119, 5120, 5122, 5127, 5138,\n", + " 5171, 5181, 5198, 5211, 5220, 5221, 5229, 5236, 5238, 5244, 5247,\n", + " 5252, 5263, 5281, 5289, 5292, 5301, 5317, 5321, 5324, 5332, 5336,\n", + " 5348, 5356, 5368, 5371, 5390, 5408, 5412, 5427, 5429, 5441, 5443,\n", + " 5446, 5449, 5452, 5459, 5460, 5464, 5469, 5473, 5483, 5484, 5501,\n", + " 5506, 5509, 5512, 5517, 5524, 5532, 5535, 5539, 5547, 5551, 5564,\n", + " 5586, 5587, 5594, 5596, 5604, 5606, 5608, 5640, 5643, 5646, 5648,\n", + " 5656, 5658, 5661, 5662, 5669, 5671, 5674, 5680, 5685, 5704, 5712,\n", + " 5735, 5738, 5741, 5745, 5746, 5751, 5758, 5766, 5786, 5789, 5791,\n", + " 5812, 5839, 5848, 5863, 5865, 5887, 5899, 5913, 5934, 5952, 5963,\n", + " 5964, 5968, 5969, 5980, 5983, 5988, 5995, 6009, 6018, 6022, 6044,\n", + " 6049, 6065, 6078, 6083, 6088, 6104, 6110, 6132, 6138, 6141, 6146,\n", + " 6151, 6156, 6158, 6193, 6200, 6211, 6213, 6215, 6223, 6240, 6253,\n", + " 6257, 6260, 6274, 6300, 6303, 6309, 6320, 6329, 6342, 6361, 6362,\n", + " 6366, 6375, 6378, 6380, 6381, 6382, 6387, 6412, 6444, 6449, 6452,\n", + " 6457, 6469, 6478, 6485, 6486, 6491, 6517, 6521, 6525, 6526, 6542,\n", + " 6547, 6559, 6584, 6585, 6586, 6599, 6609, 6614, 6629, 6644, 6648,\n", + " 6649, 6662, 6671, 6675, 6677, 6684, 6688, 6703, 6715, 6723, 6728,\n", + " 6730, 6742, 6744, 6751, 6759, 6762, 6773, 6777, 6784, 6787, 6795,\n", + " 6799, 6800, 6811, 6813, 6817, 6829, 6843, 6857, 6860, 6863, 6870,\n", + " 6872, 6877, 6892, 6904, 6914, 6917, 6923, 6931, 6948, 6955, 6961,\n", + " 6980, 6986, 6995, 7004, 7005, 7009, 7010, 7027, 7028, 7030, 7031,\n", + " 7033, 7034, 7040, 7042, 7057, 7072, 7077, 7097, 7101, 7115, 7120,\n", + " 7121, 7142, 7144, 7165, 7168, 7175, 7184, 7187, 7191, 7198, 7200,\n", + " 7224, 7227, 7233, 7238, 7239, 7247, 7251, 7252, 7264, 7281, 7286,\n", + " 7300, 7301, 7313, 7319, 7325, 7346, 7349, 7354, 7364, 7368, 7371,\n", + " 7377, 7400, 7402, 7406, 7411, 7424, 7429, 7441, 7457, 7464, 7472,\n", + " 7473, 7478, 7486, 7490, 7492, 7493, 7497, 7510, 7512, 7541, 7543,\n", + " 7549, 7573, 7582, 7583, 7587, 7589, 7610, 7611, 7612, 7615, 7634,\n", + " 7637, 7639, 7644, 7651, 7655, 7658, 7661, 7663, 7665, 7672])" + ] + }, + "execution_count": 43, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "old_dict['train']" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MNIST\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 1\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 0 \n", + "TRAINNNN label: tensor(0)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 0)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 1\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 1\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 8 \n", + "TRAINNNN label: tensor(8)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 8)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 1 \n", + "TRAINNNN label: tensor(1)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 1)\n", + "Currrent label: 1\n", + "New label: 4 \n", + "TRAINNNN label: tensor(4)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 4)\n", + "Currrent label: 0\n", + "New label: 2 \n", + "TRAINNNN label: tensor(2)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 2)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 7 \n", + "TRAINNNN label: tensor(7)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 7)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 3 \n", + "TRAINNNN label: tensor(3)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 3)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n", + "Currrent label: 0\n", + "New label: 9 \n", + "TRAINNNN label: tensor(9)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 9)\n", + "Currrent label: 0\n", + "New label: 5 \n", + "TRAINNNN label: tensor(5)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 5)\n", + "Currrent label: 0\n", + "New label: 6 \n", + "TRAINNNN label: tensor(6)\n", + "TRAINNNN: (tensor([[[-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " ...,\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242],\n", + " [-0.4242, -0.4242, -0.4242, ..., -0.4242, -0.4242, -0.4242]]]), 6)\n" + ] + } + ], + "source": [ + "loaders, shuffle_ind = lava.load_data_corrupted(corrupt_type='shuffle', dataname='MNIST', resize=resize, stratified=False,\n", + " training_size=training_size, test_size=valid_size, currupt_por=portion)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "187 555\n", + "109 91\n" + ] + } + ], + "source": [ + "cnt1 =0\n", + "cnt0 =0\n", + "for batch in loaders['train']:\n", + " #print(batch[0].size())\n", + " ##print(batch[1].size())\n", + " #print(torch.sum(batch[1] == 1).item())\n", + " cnt1+=torch.sum(batch[1] == 1).item()\n", + " #print(torch.sum(batch[1] == 0).item())\n", + " cnt0+=torch.sum(batch[1] == 0).item()\n", + "print(cnt1, cnt0)\n", + "cnt1 =0\n", + "cnt0 =0\n", + "for batch in loaders['test']:\n", + " #print(batch[0].size())\n", + " #print(batch[1].size())\n", + " #print(torch.sum(batch[1] == 1).item())\n", + " cnt1+=torch.sum(batch[1] == 1).item()\n", + " #print(torch.sum(batch[1] == 0).item())\n", + " cnt0+=torch.sum(batch[1] == 0).item()\n", + "print(cnt1, cnt0)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "import geomloss\n", + "import numpy as np\n", + "from otdd.pytorch.distance_fast import DatasetDistance, FeatureCost, batch_augmented_cost\n", + "from otdd.pytorch.wasserstein import pwdist_exact, efficient_pwdist_gauss\n", + "from functools import partial\n", + "from lava import train_with_corrupt_flag, get_indices" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# augment " + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "c7d888f89e314dc4830b404107791d6f", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/16 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAHFCAYAAADcytJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA34klEQVR4nO3dd3xUVf7/8fckpBFCSCgJIaFjAKkaYQEVUAFZiiiriA1QXGkrCK6CX10QXUFQd12VRXYFVv1S/ClYUQQFUUClKREUpCYQIJQUTEibOb8/+GaWScEMZMiJeT0fj3nInDn33s89cyfz9rZxGGOMAAAALORX0QUAAACUhqACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAKUk+3bt+u+++5Ts2bNFBISopCQELVo0UIPPPCANm/efMnqmDZtmhwOh0db48aNNXz4cJ8ud8OGDZo2bZrS09N9upyyGj58uBwOh/sRFBSk+Ph4TZ06VTk5OV7Pz+FwaNq0aeVf6CXWo0cP9ejRo6LLAMqsWkUXAPwWvPrqqxo3bpzi4+M1fvx4XX755XI4HPrxxx+1ePFiXXXVVdqzZ4+aNWtWIfUtX75cNWvW9OkyNmzYoCeffFLDhw9XrVq1fLqssgoJCdHnn38uSUpLS9PixYs1ffp0/fTTT1q6dKlX89q4caNiY2N9USaA8yCoABdp/fr1GjNmjPr166e3335bgYGB7teuu+46jR07Vv/v//0/hYSEnHc+2dnZql69uk9q7Nixo0/mazs/Pz/97ne/cz/v27evDhw4oLfeeksvvPCCGjRoUOZ5nTsfAJcOh36Ai/TMM8/I399fr776qkdIOdett96qmJgY9/Phw4erRo0aSkxMVO/evRUWFqbrr79ekrRq1SrddNNNio2NVXBwsJo3b64HHnhAJ06cKDbfjz76SB06dFBQUJCaNGmi5557rsTll3ToJzMzUw8//LCaNGmiwMBANWjQQBMmTFBWVpZHP4fDoXHjxumNN95Qq1atVL16dbVv314ffvihu8+0adP05z//WZLUpEkT9+GWtWvXlljP3//+dzkcDu3Zs6fYa48++qgCAwPd67tt2zb1799f9erVU1BQkGJiYtSvXz8dOnSoxHn/msLAcfDgQUlSUlKS7rrrLvf8W7Vqpeeff14ul6vYOJx76Cc7O9s9fsHBwYqMjFRCQoIWL17sMd3777+vLl26qHr16goLC1OvXr20ceNGjz6Fh+t27NihoUOHKjw8XFFRUbr33nuVkZHh0dcYozlz5qhDhw4KCQlRRESE/vCHP2jfvn3F+s2aNUuNGjVScHCwrrjiCn388ccXNGZARWKPCnARnE6n1qxZo4SEBNWvX9+rafPy8jRw4EA98MADmjx5sgoKCiRJe/fuVZcuXTRy5EiFh4frwIEDeuGFF3T11VcrMTFRAQEBkqTPPvtMN910k7p06aIlS5bI6XRq1qxZOnbs2K8uOzs7W927d9ehQ4f02GOPqV27dtqxY4f+8pe/KDExUatXr/Y4z+Wjjz7Spk2bNH36dNWoUUOzZs3SzTffrF27dqlp06YaOXKkTp06pZdeeknLli1zj0Xr1q1LXP5dd92lRx99VAsXLtTTTz/tMZ5vvvmmBgwYoDp16igrK0u9evVSkyZN9MorrygqKkpHjx7VmjVrdPr0aa/Gu1BhOKpbt66OHz+url27Ki8vT0899ZQaN26sDz/8UA8//LD27t2rOXPmlDqfiRMn6o033tDTTz+tjh07KisrSz/88INOnjzp7rNo0SLdeeed6t27txYvXqzc3FzNmjVLPXr00Geffaarr77aY56DBw/WkCFDdN999ykxMVFTpkyRJM2fP9/d54EHHtDChQv14IMP6tlnn9WpU6c0ffp0de3aVd9//72ioqIkSU8++aSefPJJ3XffffrDH/6g5ORk3X///XI6nYqPj7+gsQMqhAFwwY4ePWokmdtvv73YawUFBSY/P9/9cLlc7teGDRtmJJn58+efd/4ul8vk5+ebgwcPGknmvffec7/WuXNnExMTY86cOeNuy8zMNJGRkaboR7tRo0Zm2LBh7uczZswwfn5+ZtOmTR793n77bSPJrFixwt0myURFRZnMzEyP9fbz8zMzZsxwt82ePdtIMvv37z/vOhW65ZZbTGxsrHE6ne62FStWGEnmgw8+MMYYs3nzZiPJvPvuu2Wa57mGDRtmQkND3eN//Phx8+KLLxqHw2GuuuoqY4wxkydPNpLMN9984zHt6NGjjcPhMLt27fIYh6lTp7qft2nTxgwaNKjU5TudThMTE2Patm3rsY6nT5829erVM127dnW3TZ061Ugys2bN8pjHmDFjTHBwsHvb2bhxo5Fknn/+eY9+ycnJJiQkxDzyyCPGGGPS0tJMcHCwufnmmz36rV+/3kgy3bt3L7VuwDYc+gF85Morr1RAQID78fzzzxfrM3jw4GJtqampGjVqlOLi4lStWjUFBASoUaNGkqQff/xRkpSVlaVNmzbplltuUXBwsHvasLAwDRgw4Fdr+/DDD9WmTRt16NBBBQUF7kefPn1KPGTTs2dPhYWFuZ9HRUWpXr167sMnF2LEiBE6dOiQVq9e7W5bsGCBoqOj1bdvX0lS8+bNFRERoUcffVRz587Vzp07vVpGVlaWe/zr1q2rCRMmqG/fvlq+fLkk6fPPP1fr1q3VqVMnj+mGDx8uY4z7RNySdOrUSR9//LEmT56stWvX6syZMx6v79q1SykpKbr77rvl5/ffP7U1atTQ4MGD9fXXXys7O9tjmoEDB3o8b9eunXJycpSamirp7PvmcDh01113ebxv0dHRat++vft927hxo3JycnTnnXd6zK9r167ubQmoLDj0A1yEOnXqKCQkpMQv7EWLFik7O1tHjhwp9gUkSdWrVy92JY7L5VLv3r2VkpKiJ554Qm3btlVoaKhcLpd+97vfub8M09LS5HK5FB0dXWy+JbUVdezYMe3Zs8d9GKmooufD1K5du1ifoKCgYl/O3ujbt6/q16+vBQsWqHfv3kpLS9P777+v8ePHy9/fX5IUHh6uL774Qn/961/12GOPKS0tTfXr19f999+vxx9/vNT6C4WEhGjdunXuehs1auQx5idPnlTjxo2LTVd4PtG5h3GK+sc//qHY2FgtXbpUzz77rIKDg9WnTx/Nnj1bLVq0cE9b0iHBmJgYuVwupaWleZxAXXScg4KCJMk9zseOHZMxxn14p6imTZt61H2h2wdgE4IKcBH8/f113XXX6dNPP9WRI0c8vpQKz884cOBAidMWvdeJJP3www/6/vvvtXDhQg0bNszdXvSk04iICDkcDh09erTYPEpqK6owYJ177kPR133N399fd999t/7xj38oPT1dixYtUm5urkaMGOHRr23btlqyZImMMdq+fbsWLlyo6dOnKyQkRJMnTz7vMvz8/JSQkFDq67Vr19aRI0eKtaekpEg6/ziEhoa6zwM5duyYe+/KgAED9NNPP7lDR2nz9/PzU0RExHnrL6pOnTpyOBz68ssv3SHmXIVthcsubfsoKZwBtuLQD3CRpkyZIqfTqVGjRik/P/+i5lUYXop+Cb366qsez0NDQ9WpUyctW7bM4+Zlp0+f1gcffPCry+nfv7/27t2r2rVrKyEhodjjQr7Iiv7ff1mMGDFCOTk5Wrx4sRYuXKguXbqoZcuWJfZ1OBxq3769/va3v6lWrVraunWr1zUWdf3112vnzp3F5vX666/L4XCoZ8+eZZpPVFSUhg8frqFDh2rXrl3Kzs5WfHy8GjRooEWLFskY4+6blZWld955x30lkDf69+8vY4wOHz5c4vvWtm1bSWevbAoODtb//u//eky/YcOGizpcB1QE9qgAF6lbt2565ZVX9Kc//UlXXHGF/vjHP+ryyy+Xn5+fjhw5onfeeUeSynTDtZYtW6pZs2aaPHmyjDGKjIzUBx98oFWrVhXr+9RTT+nGG29Ur169NGnSJDmdTj377LMKDQ3VqVOnzrucCRMm6J133tG1116rhx56SO3atZPL5VJSUpI+/fRTTZo0SZ07d/ZqHAq/JF988UUNGzZMAQEBio+P9zi3paT17dKli2bMmKHk5GTNmzfP4/UPP/xQc+bM0aBBg9S0aVMZY7Rs2TKlp6erV69eXtVXkoceekivv/66+vXrp+nTp6tRo0b66KOPNGfOHI0ePVqXXXZZqdN27txZ/fv3V7t27RQREaEff/xRb7zxhkcAmTVrlu688071799fDzzwgHJzczV79mylp6dr5syZXtfbrVs3/fGPf9SIESO0efNmXXvttQoNDdWRI0f01VdfqW3btho9erQiIiL08MMP6+mnn9bIkSN16623Kjk5WdOmTePQDyqfCjyRF/hN+e6778yIESNMkyZNTFBQkAkODjbNmzc399xzj/nss888+hZekVKSnTt3ml69epmwsDATERFhbr31VpOUlFTsqhNjjHn//fdNu3btTGBgoGnYsKGZOXOm+wqScxW96scYY3755Rfz+OOPm/j4eBMYGGjCw8NN27ZtzUMPPWSOHj3q7ifJjB07tlidJc1zypQpJiYmxvj5+RlJZs2aNecfNGPMvHnzjCQTEhJiMjIyPF776aefzNChQ02zZs1MSEiICQ8PN506dTILFy781fmeb4zPdfDgQXPHHXeY2rVrm4CAABMfH29mz57tcaWOMcWv+pk8ebJJSEgwERERJigoyDRt2tQ89NBD5sSJEx7Tvfvuu6Zz584mODjYhIaGmuuvv96sX7/eo0/he3b8+HGP9gULFpR4JdX8+fNN586dTWhoqAkJCTHNmjUz99xzj9m8ebO7j8vlMjNmzDBxcXEmMDDQtGvXznzwwQeme/fuXPWDSsVhzDn7JAEAACzCOSoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANaq1Dd8c7lcSklJUVhYWIm3IwcAAPYxxuj06dOKiYnx+NHOklTqoJKSkqK4uLiKLgMAAFyA5ORkxcbGnrdPpQ4qhbfmTk5OLtPtyQEAQMXLzMxUXFzceX9io1ClDiqFh3tq1qxJUAEAoJIpy2kbnEwLAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANaq0KCybt06DRgwQDExMXI4HHr33XcrshwAAGCZCv2tn6ysLLVv314jRozQ4MGDK7IU/EZsS0rT/hNZalInVLuPndb3yelqH1dLl0WFae2uVElSj/h6klRivyFXNdTSTUkez8+dZ+F0+U6XAvz9tH7PCaWkn9EVjSL05z4tPWrYe/wX/XzstCKqB+r2Tg3VsWGEtiWlae2uVO04nCE/P4eubxWly6LC9MqaPTqakaPu8XUlSR8nHlFwgL+GdW2spFPZ2nowTTG1QhQbESJJKnAZpWXluddtybdJSsvOc8+vsN6ODSOKjUthHSU9L1yv0voVzmvtrlQdzchxj3t0eLB6xNdz9ynpfTl3vcOCA3Q6J19pWXlyOBxqUidUdcKC9POx03K5jC5vEF5sfpPe+k7fJ6crLrK6TucU6GjGGdWrGayW0WEe7/HRjBxFhwfrUNoZbUtK05k8p+SQHJJa16+pXpdHKyX9jLuWfKfR8dM5crqMcgpcys13qnX9moqvX1Nrd6XqwMlsFRS4FBzgp6saR7qnL1zO9kMZ+i45XS6XS8ZI2flO+TmksOAAOSQF+Pspr8ClM/lOVQ/0l7+/n9Ky8iQZBVfzV4HLyEgKDfRX3bAg5RS4lJaVJ2OMjJGy8p1yuaTqgX6Kj66p9Ow8uYxkjHQqK1dB1fxUu0aQUk/nKq/AqeAAfwVV85PLSE6XUZ0agWoXW0snf8nVkYwchQZV+78xzHdvm7uPndbrGw/oWEaO8gpcCgrwV1xkdbWMDlNugUunc/LlchnVrhGkk7/kau+JLDkk9W1bXw0jq+v75HTlFrh0KC1bp3MKFBMerEPpOfolJ1+dm9bW87d1KHVbKvrZKukzWfTzvff4L+7t/9zPaNHt99xp8p0u9/t27nZ77mfp3GWV9Lm51Epb/vnq8naail7HsnAYY0xFFyGd/WGi5cuXa9CgQWWeJjMzU+Hh4crIyOBHCaGZH/+ouV/su6h5hAT46Uy+y/28bligjp/OK9O0dcMCNfiK2FJr6BAXru+SMy6qPm+N6t5UkjxqKlpHaXUVbS9pXiUtb3LfVh5tF/O+FM6v1RMfe7wvqFz8HZLznG+asmxLhTrEhevdsVeXuh2V9hn1ZhlFlyUV325L2rZ9qbTln68ub6epyHX05vu7UgWV3Nxc5ebmup8X/kw0QQXbktJ085wNFV0GJC0f09Xj/2Yv9n3pGV9Xa3YdL4/SUEmN7dlMr6zZe0mW9ezgtrosKqzE7fbcbduXSvvcPDu4rR59J7HEuiR5Nc355nUp1tGboFKpTqadMWOGwsPD3Y+4uLiKLgmW2H8iq6JLwP85970oj/dlR8ql3QsF+2w9mHbJlvV9cnqp2+2l+jtT2nK+T04vtb+305xvXrapVEFlypQpysjIcD+Sk5MruiRYovAYNyreue9Febwvl8eEX/Q8ULld0ejSnTvRPq5Wqdvtpfo7U9py2sfVKrW/t9Ocb162qVRBJSgoSDVr1vR4AJLUsWGE+5j0xQgJ8PxI1AsLLPO09cICz1tDh7hL/4U7unvTYjUVraO0uoq2lzSvkpZ37m7ji31fRndvqgUjOhV7X1C5+Ds8n5dlWyrUMS5cf+7TstT+pX1GvVnGucsaclXDErfbotu2L5W2/CFXNSy1Lm+nOd+8bFOpzlEpipNpURRX/XDVD1f9cNUPV/3Yf9VPpTmZ9pdfftGePXskSR07dtQLL7ygnj17KjIyUg0bNvyVqQkqAABURt58f1fofVQ2b96snj17up9PnDhRkjRs2DAtXLiwgqoCAAC2qNCg0qNHD1ly5AkAAFiIM9QAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYK0LDip79uzRypUrdebMGUmSMabcigIAAJAuIKicPHlSN9xwgy677DL9/ve/15EjRyRJI0eO1KRJk8q9QAAAUHV5HVQeeughVatWTUlJSapevbq7fciQIfrkk0/KtTgAAFC1VfN2gk8//VQrV65UbGysR3uLFi108ODBcisMAADA6z0qWVlZHntSCp04cUJBQUHlUhQAAIB0AUHl2muv1euvv+5+7nA45HK5NHv2bPXs2bNciwMAAFWb14d+Zs+erR49emjz5s3Ky8vTI488oh07dujUqVNav369L2oEAABVlNd7VFq3bq3t27erU6dO6tWrl7KysnTLLbdo27ZtatasmS9qBAAAVZTDVOIboGRmZio8PFwZGRmqWbNmRZcDAADKwJvvb68P/Wzfvr3EdofDoeDgYDVs2JCTagEAQLnwOqh06NBBDodD0n/vRlv4XJICAgI0ZMgQvfrqqwoODi6nMgEAQFXk9Tkqy5cvV4sWLTRv3jx9//33+u677zRv3jzFx8dr0aJFeu211/T555/r8ccf90W9AACgCvF6j8pf//pXvfjii+rTp4+7rV27doqNjdUTTzyhb7/9VqGhoZo0aZKee+65ci0WAABULV7vUUlMTFSjRo2KtTdq1EiJiYmSzh4eKvwNIAAAgAvldVBp2bKlZs6cqby8PHdbfn6+Zs6cqZYtW0qSDh8+rKioqPKrEgAAVEleH/p55ZVXNHDgQMXGxqpdu3ZyOBzavn27nE6nPvzwQ0nSvn37NGbMmHIvFgAAVC0XdB+VX375RW+++aZ2794tY4xatmypO+64Q2FhYb6osVTcRwUAgMrHp/dRkaQaNWpo1KhRF1QcAABAWV1QUJGknTt3KikpyeNcFUkaOHDgRRcFAAAgXUBQ2bdvn26++WYlJibK4XAUu+mb0+ks3woBAECV5fVVP+PHj1eTJk107NgxVa9eXTt27NC6deuUkJCgtWvX+qBEAABQVXm9R2Xjxo36/PPPVbduXfn5+cnPz09XX321ZsyYoQcffFDbtm3zRZ0AAKAK8nqPitPpVI0aNSRJderUUUpKiqSzN3zbtWtX+VYHAACqNK/3qLRp00bbt29X06ZN1blzZ82aNUuBgYGaN2+emjZt6osaAQBAFeV1UHn88ceVlZUlSXr66afVv39/XXPNNapdu7aWLl1a7gUCAICq64Ju+FbUqVOnFBER4b7y51Lhhm8AAFQ+Pr/hW1GRkZHlMRsAAAAPXgeVnJwcvfTSS1qzZo1SU1Plcrk8Xt+6dWu5FQcAAKo2r4PKvffeq1WrVukPf/iDOnXqdMkP9wAAgKrD66Dy0UcfacWKFerWrZsv6gEAAHDz+j4qDRo0uOS/kgwAAKomr4PK888/r0cffVQHDx70RT0AAABuXh/6SUhIUE5Ojpo2barq1asrICDA4/VTp06VW3EAAKBq8zqoDB06VIcPH9YzzzyjqKgoTqYFAAA+43VQ2bBhgzZu3Kj27dv7oh4AAAA3r89Radmypc6cOeOLWgAAADx4HVRmzpypSZMmae3atTp58qQyMzM9HgAAAOXF69/68fM7m22KnptijJHD4ZDT6Sy/6n4Fv/UDAEDl49Pf+lmzZs0FFwYAAOANr4NK9+7dfVEHAABAMWUOKtu3by9Tv3bt2l1wMQAAAOcqc1Dp0KGDHA6HzndKy6U+RwUAAPy2lTmo7N+/35d1AAAAFFPmoNKoUSNf1gEAAFCM1/dRAQAAuFQIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWKtMV/107Nix2G/7lGbr1q0XVRAAAEChMgWVQYMGuf+dk5OjOXPmqHXr1urSpYsk6euvv9aOHTs0ZswYnxQJAACqpjIFlalTp7r/PXLkSD344IN66qmnivVJTk4u3+oAAECV5jDnuyd+CcLDw7V582a1aNHCo/3nn39WQkKCMjIyyrXA8/HmZ6IBAIAdvPn+9vpk2pCQEH311VfF2r/66isFBwd7OzsAAIBSlfkW+oUmTJig0aNHa8uWLfrd734n6ew5KvPnz9df/vKXci8QAABUXV4HlcmTJ6tp06Z68cUXtWjRIklSq1attHDhQt12223lXiAAAKi6vD5HxSacowIAQOXj03NUJCk9PV3//ve/9dhjj+nUqVOSzt4/5fDhwxcyOwAAgBJ5fehn+/btuuGGGxQeHq4DBw5o5MiRioyM1PLly3Xw4EG9/vrrvqgTAABUQV7vUZk4caKGDx+un3/+2eMqn759+2rdunXlWhwAAKjavA4qmzZt0gMPPFCsvUGDBjp69Gi5FAUAACBdQFAJDg5WZmZmsfZdu3apbt265VIUAACAdAFB5aabbtL06dOVn58vSXI4HEpKStLkyZM1ePDgci8QAABUXV4Hleeee07Hjx9XvXr1dObMGXXv3l3NmzdXWFiY/vrXv/qiRgAAUEV5fdVPzZo19dVXX+nzzz/X1q1b5XK5dMUVV+iGG27wRX0AAKAK44ZvAADgkvLm+9vrPSqS9Nlnn+mzzz5TamqqXC6Xx2vz58+/kFkCAAAU43VQefLJJzV9+nQlJCSofv36cjgcvqgLAADA+6Ayd+5cLVy4UHfffbcv6gEAAHDz+qqfvLw8de3a1Re1AAAAePA6qIwcOVKLFi3yRS0AAAAevD70k5OTo3nz5mn16tVq166dAgICPF5/4YUXyq04AABQtV3Qryd36NBBkvTDDz94vMaJtQAAoDx5HVTWrFnjizoAAACK8foclXMdOnRIhw8fLq9aAAAAPHgdVFwul6ZPn67w8HA1atRIDRs2VK1atfTUU08Vu/kbAADAxfD60M///M//6LXXXtPMmTPVrVs3GWO0fv16TZs2TTk5OfwwIQAAKDde/9ZPTEyM5s6dq4EDB3q0v/feexozZswlPRTEb/0AAFD5ePP97fWhn1OnTqlly5bF2lu2bKlTp055OzsAAIBSeR1U2rdvr5dffrlY+8svv6z27duXS1EAAADSBZyjMmvWLPXr10+rV69Wly5d5HA4tGHDBiUnJ2vFihW+qBEAAFRRXu9R6d69u3bv3q2bb75Z6enpOnXqlG655Rbt2rVL11xzjS9qBAAAVZTXJ9PahJNpAQCofHx6Mu0nn3yir776yv38lVdeUYcOHXTHHXcoLS3N+2oBAABK4XVQ+fOf/6zMzExJUmJioiZOnKjf//732rdvnyZOnFjuBQIAgKrL65Np9+/fr9atW0uS3nnnHQ0YMEDPPPOMtm7dqt///vflXiAAAKi6vN6jEhgYqOzsbEnS6tWr1bt3b0lSZGSke08LAABAefB6j8rVV1+tiRMnqlu3bvr222+1dOlSSdLu3bsVGxtb7gUCAICqy+s9Ki+//LKqVaumt99+W//85z/VoEEDSdLHH3+sG2+8sdwLBAAAVReXJwMAgEvKm+/vMh36yczMdM/o185DITAAAIDyUqagEhERoSNHjqhevXqqVauWHA5HsT7GGDkcDjmdznIvEgAAVE1lCiqff/65IiMjJUlr1qzxaUEAAACFOEcFAABcUuV+jkpR6enpeu211/Tjjz/K4XCodevWuvfeexUeHn5BBQMAAJTE68uTN2/erGbNmulvf/ubTp06pRMnTuiFF15Qs2bNtHXrVl/UCAAAqiivD/1cc801at68uf71r3+pWrWzO2QKCgo0cuRI7du3T+vWrfNJoSXh0A8AAJWPN9/fXgeVkJAQbdu2TS1btvRo37lzpxISEty3178UCCoAAFQ+3nx/e33op2bNmkpKSirWnpycrLCwMG9nBwAAUCqvg8qQIUN03333aenSpUpOTtahQ4e0ZMkSjRw5UkOHDvVFjQAAoIry+qqf5557Tg6HQ/fcc48KCgokSQEBARo9erRmzpxZ7gUCAICq64Lvo5Kdna29e/fKGKPmzZurevXq5V3br+IcFQAAKh+fnKOSnZ2tsWPHqkGDBqpXr55Gjhyp+vXrq127dhUSUgAAwG9fmYPK1KlTtXDhQvXr10+33367Vq1apdGjR/uyNgAAUMWV+RyVZcuW6bXXXtPtt98uSbrrrrvUrVs3OZ1O+fv7+6xAAABQdZV5j0pycrKuueYa9/NOnTqpWrVqSklJ8UlhAAAAZQ4qTqdTgYGBHm3VqlVzX/kDAABQ3sp86McYo+HDhysoKMjdlpOTo1GjRik0NNTdtmzZsvKtEAAAVFllDirDhg0r1nbXXXeVazEAAADnKnNQWbBggS/rAAAAKMbrW+gDAABcKgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFiLoAIAAKxFUAEAANYiqAAAAGsRVAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAAIC1CCoAAMBaBBUAAGAtggoAALAWQQUAAFirwoPKnDlz1KRJEwUHB+vKK6/Ul19+WdElAQAAS1SryIUvXbpUEyZM0Jw5c9StWze9+uqr6tu3r3bu3KmGDRtWZGnalpSmV9bs0b7jvyiomp8OnMjWmQJXsX5B/g45HA7lO11ymrPJr0aQv1rWr6m4yOr66Wim9h3PUoHTpZAAf4UGV1N+gUsB/n7KznMqJ9+poGp+CvD3k9MY+TkcchkjlzFyGqmgwKVq/g4FB/grO88pl8uofniwOjetLUmKDg9WTK0QpaSf0codx3Q4LVtB1fw0pFND/blPy1LXbe2uVElSTK0QBfj7Kd95tqYmdUK1+9hpfZ+crojQQDWrW8PjtY4NI7R0U5K+T05X+7haGnJVw2Lz3n8iy923vN6L8p6nLxSt89fqtnG9yqumbUlpWvJtkvafyFLN4Gq6vEG4e1trUidUq388pi92peqXXKdqBFVT3bAgpZ7O0bGMHNUIDlDzuqG6vEG4th/K0A+HM1TNz6GMnHxl5xX/DP4ahyRzwWviG/6Os/91Gc/a/B1n2/z9JH+HQ3lO4369fs0g5TuNsvMK5HQZ5TuNXJJCA/3UMLK6/BwOZec5JUk5BU6dyXOqeqC/ujSro4wz+fo+OV15BU4VuCQ/h5TQOFIPXt/C/Xeudmigujav4/57cjQjx6Pmk7/k6khGjoyktg3CdXuns5/9otv82l2pOpqRo+jwYPWIr+fuU/h3JN/pUkr6GUlSj/h67u2spL9L526H526bRZd7oWz8DNrGhjFyGGMq7DPcuXNnXXHFFfrnP//pbmvVqpUGDRqkGTNm/Or0mZmZCg8PV0ZGhmrWrFludc38+EfN/WJfuc2votQNC9Sm/+nl0Xax61Y3LFDHT+e5n3eIC9e7Y68ucd6jujfV5L6tLnhZvpqnLxSts0NcuL5LznA/L1q3jetVXjX9Vj4/8E7Rbb6sRnVvKkmlbjNleb08tlMbPoO28eUYefP9XWGHfvLy8rRlyxb17t3bo713797asGFDidPk5uYqMzPT41HetiWl/Wb+yB4/nafZK39yPy+PdTs3pEjSd8kZWropqcR5z/1in7YlpV3wsnwxT18oqc6if7DPrdvG9Sqvmn5Lnx9450JCinR2OzvfNlOW18tjO63oz6BtbBqjCgsqJ06ckNPpVFRUlEd7VFSUjh49WuI0M2bMUHh4uPsRFxdX7nXtP5FV7vOsSFsP/nej8tW6fZ+cXuq8L2aZvpinL5S1nsJ+Nq5XedVk23uDqqG8tlO23/+yaYwq/GRah8Ph8dwYU6yt0JQpU5SRkeF+JCcnl3s9hcc/fyuuaPTfY4q+Wrf2cbVKnffFLNMX8/SFstZT2M/G9Sqvmmx7b1A1lNd2yvb7XzaNUYUFlTp16sjf37/Y3pPU1NRie1kKBQUFqWbNmh6P8taxYYT7mGhlVy8s0OOE2vJYt3phgR7PO8aFa8hVDUuc9+juTS/q5CtfzNMXSqqzQ1y4x/Nz67Zxvcqrpt/S5wfeKbrNl9Xo7k3Pu82U5fXy2E4r+jNoG5vGqMJPpr3yyis1Z84cd1vr1q110003VejJtBJX/XDVj/e46sdzPlz1Uzqu+uGqn8rCV2Pkzfd3hQaVpUuX6u6779bcuXPVpUsXzZs3T//617+0Y8cONWrU6Fen92VQAQAAvuHN93eF3kdlyJAhOnnypKZPn64jR46oTZs2WrFiRZlCCgAA+O2r0D0qF4s9KgAAVD6V4j4qAAAAv4agAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACwFkEFAABYq0JvoX+xCm+qm5mZWcGVAACAsir83i7LzfErdVA5ffq0JCkuLq6CKwEAAN46ffq0wsPDz9unUv/Wj8vlUkpKisLCwuRwOCq6nAqVmZmpuLg4JScn87tHlwDjfWkx3pcW431pVcXxNsbo9OnTiomJkZ/f+c9CqdR7VPz8/BQbG1vRZVilZs2aVWZDtwHjfWkx3pcW431pVbXx/rU9KYU4mRYAAFiLoAIAAKxFUPmNCAoK0tSpUxUUFFTRpVQJjPelxXhfWoz3pcV4n1+lPpkWAAD8trFHBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUKqnc3Fx16NBBDodD3333ncdrSUlJGjBggEJDQ1WnTh09+OCDysvL8+iTmJio7t27KyQkRA0aNND06dPL9JsLVcmBAwd03333qUmTJgoJCVGzZs00derUYmPJePvWnDlz1KRJEwUHB+vKK6/Ul19+WdElVUozZszQVVddpbCwMNWrV0+DBg3Srl27PPoYYzRt2jTFxMQoJCREPXr00I4dOzz65Obm6k9/+pPq1Kmj0NBQDRw4UIcOHbqUq1LpzJgxQw6HQxMmTHC3MdZeMKiUHnzwQdO3b18jyWzbts3dXlBQYNq0aWN69uxptm7dalatWmViYmLMuHHj3H0yMjJMVFSUuf32201iYqJ55513TFhYmHnuuecqYE3s9fHHH5vhw4eblStXmr1795r33nvP1KtXz0yaNMndh/H2rSVLlpiAgADzr3/9y+zcudOMHz/ehIaGmoMHD1Z0aZVOnz59zIIFC8wPP/xgvvvuO9OvXz/TsGFD88svv7j7zJw504SFhZl33nnHJCYmmiFDhpj69eubzMxMd59Ro0aZBg0amFWrVpmtW7eanj17mvbt25uCgoKKWC3rffvtt6Zx48amXbt2Zvz48e52xrrsCCqV0IoVK0zLli3Njh07igWVFStWGD8/P3P48GF32+LFi01QUJDJyMgwxhgzZ84cEx4ebnJyctx9ZsyYYWJiYozL5bpk61EZzZo1yzRp0sT9nPH2rU6dOplRo0Z5tLVs2dJMnjy5gir67UhNTTWSzBdffGGMMcblcpno6Ggzc+ZMd5+cnBwTHh5u5s6da4wxJj093QQEBJglS5a4+xw+fNj4+fmZTz755NKuQCVw+vRp06JFC7Nq1SrTvXt3d1BhrL3DoZ9K5tixY7r//vv1xhtvqHr16sVe37hxo9q0aaOYmBh3W58+fZSbm6stW7a4+3Tv3t3j5kJ9+vRRSkqKDhw44PN1qMwyMjIUGRnpfs54+05eXp62bNmi3r17e7T37t1bGzZsqKCqfjsyMjIkyb0979+/X0ePHvUY76CgIHXv3t093lu2bFF+fr5Hn5iYGLVp04b3pARjx45Vv379dMMNN3i0M9beIahUIsYYDR8+XKNGjVJCQkKJfY4ePaqoqCiPtoiICAUGBuro0aOl9il8XtgHxe3du1cvvfSSRo0a5W5jvH3nxIkTcjqdJY4d43ZxjDGaOHGirr76arVp00bSf7fF84330aNHFRgYqIiIiFL74KwlS5Zo69atmjFjRrHXGGvvEFQsMG3aNDkcjvM+Nm/erJdeekmZmZmaMmXKeefncDiKtRljPNqL9jH/d2JnSdP+1pR1vM+VkpKiG2+8UbfeeqtGjhzp8Rrj7VsljR3jdnHGjRun7du3a/HixcVeu5Dx5j3xlJycrPHjx+vNN99UcHBwqf0Y67KpVtEF4Owfjdtvv/28fRo3bqynn35aX3/9dbHfg0hISNCdd96p//znP4qOjtY333zj8XpaWpry8/Pd6T06OrpYIk9NTZVUPOH/FpV1vAulpKSoZ8+e6tKli+bNm+fRj/H2nTp16sjf37/EsWPcLtyf/vQnvf/++1q3bp1iY2Pd7dHR0ZLO/p98/fr13e3njnd0dLTy8vKUlpbm8X/6qamp6tq16yVaA/tt2bJFqampuvLKK91tTqdT69at08svv+y+2oqxLqMKOjcGF+DgwYMmMTHR/Vi5cqWRZN5++22TnJxsjPnvyZ0pKSnu6ZYsWVLs5M5atWqZ3Nxcd5+ZM2dycmcJDh06ZFq0aGFuv/32Es+0Z7x9q1OnTmb06NEeba1ateJk2gvgcrnM2LFjTUxMjNm9e3eJr0dHR5tnn33W3Zabm1viCZ5Lly5190lJSamSJ3ieT2Zmpsff6sTERJOQkGDuuusuk5iYyFh7iaBSie3fv7/Uy5Ovv/56s3XrVrN69WoTGxvrcblsenq6iYqKMkOHDjWJiYlm2bJlpmbNmlwuW8Thw4dN8+bNzXXXXWcOHTpkjhw54n4UYrx9q/Dy5Ndee83s3LnTTJgwwYSGhpoDBw5UdGmVzujRo014eLhZu3atx7acnZ3t7jNz5kwTHh5uli1bZhITE83QoUNLvGQ2NjbWrF692mzdutVcd911VfKSWW+de9WPMYy1NwgqlVhJQcWYs3te+vXrZ0JCQkxkZKQZN26cx6Wxxhizfft2c80115igoCATHR1tpk2bxv/dF7FgwQIjqcTHuRhv33rllVdMo0aNTGBgoLniiivcl9PCO6VtywsWLHD3cblcZurUqSY6OtoEBQWZa6+91iQmJnrM58yZM2bcuHEmMjLShISEmP79+5ukpKRLvDaVT9GgwliXncMYbo8JAADsxFU/AADAWgQVAABgLYIKAACwFkEFAABYi6ACAACsRVABAADWIqgAAABrEVQAWG348OEaNGiQ+3mPHj00YcKECqsHwKVFUAHglaNHj2r8+PFq3ry5goODFRUVpauvvlpz585Vdna2z5e/bNkyPfXUU+U6z6JhCIA9+PVkAGW2b98+devWTbVq1dIzzzyjtm3bqqCgQLt379b8+fMVExOjgQMHFpsuPz9fAQEB5VJDZGRkucwHQOXAHhUAZTZmzBhVq1ZNmzdv1m233aZWrVqpbdu2Gjx4sD766CMNGDBAkuRwODR37lzddNNNCg0N1dNPPy2n06n77rtPTZo0UUhIiOLj4/Xiiy96zN/pdGrixImqVauWateurUceeURFf+Wj6KGfvLw8PfLII2rQoIFCQ0PVuXNnrV271v36woULVatWLa1cuVKtWrVSjRo1dOONN+rIkSOSpGnTpuk///mP3nvvPTkcDjkcDo/pAVQsggqAMjl58qQ+/fRTjR07VqGhoSX2cTgc7n9PnTpVN910kxITE3XvvffK5XIpNjZWb731lnbu3Km//OUveuyxx/TWW2+5p3n++ec1f/58vfbaa/rqq6906tQpLV++/Lx1jRgxQuvXr9eSJUu0fft23Xrrrbrxxhv1888/u/tkZ2frueee0xtvvKF169YpKSlJDz/8sCTp4Ycf1m233eYOL0eOHFHXrl0vZqgAlCMO/QAokz179sgYo/j4eI/2OnXqKCcnR5I0duxYPfvss5KkO+64Q/fee69H3yeffNL97yZNmmjDhg166623dNttt0mS/v73v2vKlCkaPHiwJGnu3LlauXJlqTXt3btXixcv1qFDhxQTEyPpbPD45JNPtGDBAj3zzDOSzh56mjt3rpo1ayZJGjdunKZPny5JqlGjhkJCQpSbm6vo6OgLGxwAPkNQAeCVc/eaSNK3334rl8ulO++8U7m5ue72hISEYtPOnTtX//73v3Xw4EGdOXNGeXl56tChgyQpIyNDR44cUZcuXdz9q1WrpoSEhGKHfwpt3bpVxhhddtllHu25ubmqXbu2+3n16tXdIUWS6tevr9TU1LKvNIAKQ1ABUCbNmzeXw+HQTz/95NHetGlTSVJISIhHe9HDQ2+99ZYeeughPf/88+rSpYvCwsI0e/ZsffPNNxdck8vlkr+/v7Zs2SJ/f3+P12rUqOH+d9ETeR0OR6nhB4BdOEcFQJnUrl1bvXr10ssvv6ysrCyvp//yyy/VtWtXjRkzRh07dlTz5s21d+9e9+vh4eGqX7++vv76a3dbQUGBtmzZUuo8O3bsKKfTqdTUVDVv3tzj4c1hnMDAQDmdTq/XCYDvEVQAlNmcOXNUUFCghIQELV26VD/++KN27dqlN998Uz/99FOxvRrnat68uTZv3qyVK1dq9+7deuKJJ7Rp0yaPPuPHj9fMmTO1fPly/fTTTxozZozS09NLnedll12mO++8U/fcc4+WLVum/fv3a9OmTXr22We1YsWKMq9X48aNtX37du3atUsnTpxQfn5+macF4FsEFQBl1qxZM23btk033HCDpkyZovbt2yshIUEvvfSSHn744fPeiG3UqFG65ZZbNGTIEHXu3FknT57UmDFjPPpMmjRJ99xzj4YPH+4+PHTzzTeft6YFCxbonnvu0aRJkxQfH6+BAwfqm2++UVxcXJnX6/7771d8fLwSEhJUt25drV+/vszTAvAth+FALQAAsBR7VAAAgLUIKgAAwFoEFQAAYC2CCgAAsBZBBQAAWIugAgAArEVQAQAA1iKoAAAAaxFUAACAtQgqAADAWgQVAABgLYIKAACw1v8HxPnzFo7bwJEAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of poisoned images: 300 out of 10000.\n", + "last index of poison 847\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAHFCAYAAAAdTZjVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABcQ0lEQVR4nO3dd1hT1/8H8HdYYYOALKWAokgV96jaKrhnrVp3VRy1Kg4cddQqaK24S+usC7U/Z1214hek7roRtziL4gBpHaCiyDi/P2xuCQFMMBEw79fz5NGce865n3tyc/PhTpkQQoCIiIhITxgUdQBERERE7xKTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPRKiU9+zp8/jwEDBqB8+fIwMzODmZkZKlSogK+++goxMTHvLI6QkBDIZDKlMg8PDwQEBOh0vkePHkVISAiePHmi9b43bdqEypUrw8zMDDKZDGfPns2z3oEDByCTyaSXoaEhnJyc0KVLF8TFxWk834CAAHh4eLxd8MXA6tWrIZPJcOvWraIORaLuOqn4LGfOnKkyTbFc7/L7lZOfn5/S+mZmZoZq1aohLCwM2dnZGvV169YtyGQyrF69WjfBvkOafLbDhg3TfUDFnIeHB9q1a1fUYQD4bxu6ZcuWQvdRVNvNJ0+ewMHBARs3bpTK7t69i6CgIDRu3Bi2trYaf8fUbZ+RkYHy5csjLCxM47hLdPLz888/o1atWjhx4gRGjhyJXbt2ISIiAkFBQbh06RLq1KmDmzdvFll827dvx+TJk3U6j6NHj2Lq1KlaT37+/vtv9O7dG+XLl0dkZCSOHTuGihUrFthmxowZOHbsGPbv34/x48cjOjoaDRs2xL179zSa9+TJk7F9+/a3CZ+0ZObMmXj06FFRh6GiXLlyOHbsGI4dO4ZNmzahTJkyGDVqFCZOnKhRPy4uLjh27Bjatm2ro0iJ3m9Tp06Fq6srunXrJpXduHED69atg4mJCdq0aaNxn+q2NzY2xpQpUzBt2jQ8fPhQo3kYaRxVMXHkyBEMHToUbdu2xZYtW2BiYiJNa9KkCQIDA/Hrr7/CzMyswH7S0tJgbm6ukxhr1Kihk37fhWvXriEjIwNffPEFGjdurFabChUq4KOPPgIANGrUCLa2thgwYABWr16NSZMmqT3v8uXLFypm0q5mzZrhwIED+P777zFv3ryiDkeJmZmZtK4BQOvWrVGpUiUsXLgQ06dPh7GxsVr9yOVypX6ISH2PHj3Czz//jB9++EHpyEejRo3w999/AwBiYmKwYcMGjfrVpH2PHj0wevRo/Pzzz/jmm2/UnkeJ3fMzY8YMGBoa4ueff1ZKfHLq0qULXF1dpfcBAQGwtLTEhQsX0KJFC1hZWaFp06YAgOjoaHTo0AFly5aFqakpvLy88NVXX+Gff/5R6TciIgLVq1eHXC6Hp6cn5s6dm+f889oNnZqairFjx8LT0xMmJiYoU6YMgoKC8Pz5c6V6il3Tv/zyC3x8fGBubo5q1aph165dUp2QkBB8/fXXAABPT0/pMMCBAwcKHLudO3eifv36MDc3h5WVFZo3b45jx44pjdPHH38MAOjWrRtkMhn8/PwK7DMvih+V27dvAwCys7Mxe/ZsVKpUCXK5HI6OjujTpw/u3r2r1C6v3be//vor6tWrBxsbG5ibm6NcuXLo37+/Up2EhAR88cUXcHR0hFwuh4+PD+bNm6d0KERxmGPu3LmYP38+PD09YWlpifr16+P48eMqyxATE4NPP/0UdnZ2MDU1RY0aNbB582aVesePH0fDhg1hamoKV1dXTJw4ERkZGWqNU0xMDLp37w4PDw+YmZnBw8MDPXr0kMZNQXG4af/+/RgyZAgcHBxgb2+PTp064f79+0p1MzIyMG7cODg7O8Pc3Bwff/wxTp48qVY8Ct7e3hgwYAAWLVqkEkte3rReAf8dHr506RJ69OgBGxsbODk5oX///khJSdEovpyMjY1Rq1YtpKWlSRvNixcvokOHDihVqhRMTU1RvXp1rFmzRqldXoe9/v77bwwaNAhubm6Qy+UoXbo0GjZsiD/++EOp7apVq1CtWjWYmprCzs4OHTt2VDnMq9jm3LhxA23atIGlpSXc3NwwZswYpKenK9V99eoVpk+fLn0/SpcujX79+knLo6CNzzYnxSGX9evXY/z48XBxcYGlpSXat2+PBw8e4OnTpxg0aBAcHBzg4OCAfv364dmzZ0p9LFq0CI0aNYKjoyMsLCzg6+uL2bNnq3wHhBCYMWMG3N3dYWpqitq1ayM6Ohp+fn4q2xh1t5XqbBsKS93fBcV6ff78eXTp0gU2Njaws7PD6NGjkZmZiatXr6JVq1awsrKCh4cHZs+enef8Xr58idGjR8PZ2RlmZmZo3Lgxzpw5o1Jv9erV8Pb2lrZza9euzbO/qVOnol69erCzs4O1tTVq1qyJlStXQlvPM1+9ejUyMzOV9voAgIHB26UWmrQ3MTFBt27dsGzZMs2WS5RAmZmZwszMTNSvX1+jdn379hXGxsbCw8NDhIaGir1794qoqCghhBBLliwRoaGhYufOneLgwYNizZo1olq1asLb21u8evVK6uOPP/4QhoaG4uOPPxbbtm0Tv/76q6hTp4744IMPRO7hdHd3F3379pXeP3/+XFSvXl04ODiI+fPniz/++EP8+OOPwsbGRjRp0kRkZ2dLdQEIDw8PUbduXbF582axe/du4efnJ4yMjMTNmzeFEELcuXNHDB8+XAAQ27ZtE8eOHRPHjh0TKSkp+Y7BunXrBADRokULsWPHDrFp0yZRq1YtYWJiIg4fPiyEEOLGjRti0aJFAoCYMWOGOHbsmLh06VK+fe7fv18AEL/++qtS+W+//SYAiG+++UYIIcSgQYMEADFs2DARGRkpli5dKkqXLi3c3NzE33//rfQ5ubu7S++PHj0qZDKZ6N69u9i9e7fYt2+fCA8PF71795bqJCcnizJlyojSpUuLpUuXisjISDFs2DABQAwZMkSqFx8fL41tq1atxI4dO8SOHTuEr6+vKFWqlHjy5IlUd9++fcLExER88sknYtOmTSIyMlIEBAQIACI8PFyqd+nSJWFubi4+/PBDsWHDBvHbb7+Jli1bSutEfHx8vmMnhBC//vqrmDJliti+fbs4ePCg2Lhxo2jcuLEoXbq00riEh4cLAKJcuXJi+PDhIioqSqxYsUKUKlVK+Pv7K/XZt29fIZPJxNdffy327Nkj5s+fL8qUKSOsra2V1sn8ABCBgYEiMTFRmJubK421Io5Tp05JZeqsV0IIERwcLAAIb29vMWXKFBEdHS3mz58v5HK56Nev3xvjEkKIxo0bi8qVK6uU16xZUxgZGYm0tDRx5coVYWVlJcqXLy/Wrl0rIiIiRI8ePQQAMWvWLKmNYn3I+Xm2bNlSlC5dWixbtkwcOHBA7NixQ0yZMkVs3LhRqjNjxgwBQPTo0UNERESItWvXinLlygkbGxtx7do1qV7fvn2FiYmJ8PHxEXPnzhV//PGHmDJlipDJZGLq1KlSvaysLNGqVSthYWEhpk6dKqKjo8WKFStEmTJlxIcffijS0tKU+tTGZ6ug+P66u7uLgIAA6btpaWkp/P39RfPmzcXYsWPFnj17xKxZs4ShoaEYPny4Up+jRo0SS5YsEZGRkWLfvn3ihx9+EA4ODiqf6cSJEwUAMWjQIBEZGSmWL18uPvjgA+Hi4iIaN24s1VN3W6nOtiE/7u7uom3btgXWUfd3Ied6/d1334no6Ggxbtw4aXtXqVIl8dNPP4no6GjRr18/AUBs3bpV5TNwc3MTHTp0EL///rv4v//7P+Hl5SWsra2lbb4Q/33/ctdzc3NT2m4KIURAQIBYuXKliI6OFtHR0eK7774TZmZmSuueEK/Xv4yMjDe+MjMzldo1adJE1K1bt8AxPHXqlMp3TBPqtN+0aZMAIM6fP692vyUy+UlKShIARPfu3VWmZWZmKn1YOROKvn37CgBi1apVBfafnZ0tMjIyxO3btwUA8dtvv0nT6tWrJ1xdXcWLFy+kstTUVGFnZ/fG5Cc0NFQYGBgo/WgIIcSWLVsEALF7926pDIBwcnISqampSsttYGAgQkNDpbI5c+ao9QMrxOsV3NXVVfj6+oqsrCyp/OnTp8LR0VE0aNBAKssvocmLou6mTZtERkaGSEtLE4cOHRJeXl7C0NBQnDt3TsTFxQkAYujQoUptT5w4oZQgCaGa/MydO1cAUEpMcpswYYIAIE6cOKFUPmTIECGTycTVq1eFEP/92Pn6+ip9kU+ePCkAiA0bNkhllSpVEjVq1BAZGRlKfbZr1064uLhIY9itWzdhZmYmkpKSpDqZmZmiUqVKan82OWVmZopnz54JCwsL8eOPP0rlio1e7jGcPXu2ACASExOFEEIa61GjRinVUyQomv5ATpo0SRgYGIhz584pxaFYjzVZrxQ/ErNnz1aa39ChQ4WpqanS9zU/iuRH8R2/f/++9Pl36dJFCCFE9+7dhVwuFwkJCUptW7duLczNzaV1Ka/kx9LSUgQFBeU7/8ePHwszMzPRpk0bpfKEhAQhl8tFz549pTLFNmfz5s1Kddu0aSO8vb2l9xs2bFD5QRTivw3/4sWLhRDa/2yF+O/72759e6V6QUFBAoAYMWKEUvlnn30m7Ozs8u1f8UO6du1aYWhoKB49eiSEEOLRo0dCLpeLbt26KdU/duyYAKCU/Ki7rVRn25AfdZKfnAr6XVCs1/PmzVNqU716demPU4WMjAxRunRp0alTJ6lM8RnUrFlT6Ttw69YtYWxsLAYOHCiE+O+7ll+93MlPTorPZdq0acLe3j7P38Y3vXJ+RkIIYW5uLgYPHlzguL2L5Of69esCgFiyZIna/ZbYw175qVWrFoyNjaVXXucqdO7cWaUsOTkZgwcPhpubG4yMjGBsbAx3d3cAkHZlP3/+HKdOnUKnTp1gamoqtbWyskL79u3fGNuuXbtQpUoVVK9eHZmZmdKrZcuWeR6u8vf3h5WVlfTeyckJjo6Oah2CyMvVq1dx//599O7dW2m3oqWlJTp37ozjx48jLS2tUH0Drw+RGRsbw9zcHI0aNUJWVha2bNmCqlWrYv/+/QCgchiwbt268PHxwd69e/Ptt06dOgCArl27YvPmzXmeQL1v3z58+OGHqFu3rlJ5QEAAhBDYt2+fUnnbtm1haGgova9atSqA/w7R3bhxA1euXEGvXr0AQOnzatOmDRITE3H16lUAwP79+9G0aVM4OTlJ/RkaGqrsCs7Ps2fPMH78eHh5ecHIyAhGRkawtLTE8+fP87xa7tNPP1V6nzt2xVgrYlfo2rUrjIw0P81v3LhxsLOzw/jx4/OcXpj1Kq9lePnyJZKTkwG8PkSac8yzsrKU6l+6dEn6jru6umLevHno1asXli9fDuD1+tC0aVO4ubkptQsICEBaWprK4bic6tati9WrV2P69Ok4fvy4yqGbY8eO4cWLFyrrspubG5o0aaKyLstkMpXtQ9WqVZW+x7t27YKtrS3at2+vtNzVq1eHs7OztG3Q9mebU+4rn3x8fABA5WRwHx8fPHr0SOnQ15kzZ/Dpp5/C3t4ehoaGMDY2Rp8+fZCVlYVr164BeH1oOD09HV27dlXq76OPPlI5zK3utlKdbcPbUOd3Iae8xlAmk6F169ZSmZGREby8vPLcjvfs2VPp3Bl3d3c0aNBA+twV37X86uW2b98+NGvWDDY2NtLnMmXKFDx8+FD6rgGvD9udOnXqja+ff/5ZavPkyROkpaXB0dHxjeOoa4oYNPn8S+QJzw4ODjAzM8tz5Vm/fj3S0tKQmJiosoEFAHNzc1hbWyuVZWdno0WLFrh//z4mT54MX19fWFhYIDs7Gx999BFevHgBAHj8+DGys7Ph7Oys0m9eZbk9ePAAN27cyPdkzNzHke3t7VXqyOVyKR5NKc6Gd3FxUZnm6uqK7OxsPH78uNAngM+aNQtNmjSBoaEhHBwclH543jTvghK6Ro0aYceOHfjpp5/Qp08fpKeno3Llypg0aRJ69Ogh9Z/XZZ6Kc75yXwmQe2zlcjkASGP74MEDAMDYsWMxduzYPONSfF4PHz4s9DoBvN7g7d27F5MnT0adOnVgbW0NmUyGNm3a5PlZvyl2xbLmnr+RkVGe69SbWFtb49tvv0VQUJC0Ec6pMOvVm5ahf//+SufnNG7cWOmPg/Lly2Pjxo2QyWQwNTWFp6enUv8PHz7MN56cMedl06ZNmD59OlasWIHJkyfD0tISHTt2xOzZs+Hs7PzG5Y2OjlYqMzc3V/pjSbG8L1++lN4/ePAAT548yff8xZzrGqC9zzYnOzs7pfeKWPIrf/nyJSwtLZGQkIBPPvkE3t7e+PHHH+Hh4QFTU1OcPHkSgYGBKutlzj8SFHKXqbutVGfbUFjq/i7klNdY5fX5m5iYIDU1VaV9ftuRc+fOAcj/81eU5bytxsmTJ9GiRQv4+flh+fLlKFu2LExMTLBjxw58//33SvF/8MEHKFu2bAGj8VrOhEvRPveyFQVFDJr8NpbI5MfQ0BBNmjTBnj17kJiYqLQR+vDDDwEg33ur5L4XD/D6xMhz585h9erV6Nu3r1R+48YNpXqlSpWCTCZDUlKSSh95leWmSNpWrVqV73RdUmwcExMTVabdv38fBgYGKFWqVKH7L1euHGrXrv3Geef+kt2/f/+Ny96hQwd06NAB6enpOH78OEJDQ9GzZ094eHigfv36sLe3z3e5AM3HVlF/4sSJ6NSpU551vL29pWUr7DqRkpKCXbt2ITg4GBMmTJDK09PTC32JuWKsk5KSUKZMGak8MzNT48tBFYYMGYIff/wR48ePx5AhQ/KcnzbXq5CQEKV70eTcAwpAOlk2P2+zPjg4OCAsLAxhYWFISEjAzp07MWHCBCQnJyMyMvKNy1uY77Hi5PXIyMg8pyuWXxef7dvasWMHnj9/jm3btkl7RQCo3BdMEbviD4uckpKSlP540WRb+aZtQ2Gp+7ugTfltRxRjl/Pzf1PbjRs3wtjYGLt27VJKUHbs2KHSNvcfG/nJ+UeIIpbicCsMRQyafPdK7GGviRMnIisrC4MHD1b7qpr8KBIixV+fCjl38QGAhYUF6tati23btin91fb06VP8/vvvb5xPu3btcPPmTdjb26N27doqr8LcoCr3X8wF8fb2RpkyZbB+/Xqls+KfP3+OrVu3Slfq6EKTJk0AAP/3f/+nVH7q1CnExcVJV929iVwuR+PGjTFr1iwAkK6EaNq0KS5fvozY2Fil+mvXroVMJoO/v79G8Xp7e6NChQo4d+5cnp9V7dq1pR8kf39/7N27V2mjnpWVhU2bNr1xPjKZDEIIlXVvxYoVKod61KW4ambdunVK5Zs3b0ZmZmah+jQxMcH06dNx6tQp/Prrr0rTdLFeeXh4KI21ItFUV9OmTbFv3z6Vq+DWrl0Lc3NztS9v/+CDDzBs2DA0b95cWrfq168PMzMzlXX57t270uE2TbVr1w4PHz5EVlZWnuuaYvl18dm+rby2n0II6RCkQr169SCXy1W+F8ePH1fZ81uYbWV+2wZtLheg+rugTRs2bFD6Dt2+fRtHjx6VPndvb2+4uLjkWy8nmUwGIyMjpcP7L168wC+//KIy38Ic9jIxMUG5cuWK9F56Cn/99ReA/3Z+qKNE7vkBgIYNG2LRokUYPnw4atasiUGDBqFy5cowMDBAYmIitm7dCgAqh7jyUqlSJZQvXx4TJkyAEAJ2dnb4/fffVXZfA8B3332HVq1aoXnz5hgzZgyysrIwa9YsWFhYvDEDDgoKwtatW9GoUSOMGjUKVatWRXZ2NhISErBnzx6MGTMG9erV02gcfH19AQA//vgj+vbtC2NjY3h7e6v8pQy8vnxw9uzZ6NWrF9q1a4evvvoK6enpmDNnDp48eZLn3Xy1xdvbG4MGDcKCBQtgYGCA1q1b49atW5g8eTLc3NwwatSofNtOmTIFd+/eRdOmTVG2bFk8efIEP/74I4yNjaV7EI0aNQpr165F27ZtMW3aNLi7uyMiIgKLFy/GkCFD3niDxrz8/PPPaN26NVq2bImAgACUKVMGjx49QlxcHGJjY6Uk4Ntvv8XOnTvRpEkTTJkyBebm5li0aJHKJbl5sba2RqNGjTBnzhw4ODjAw8MDBw8exMqVK2Fra6txzMDr8wy++OILhIWFwdjYGM2aNcPFixcxd+5ctb4P+enRowfmzp2L//3vf0rlRble5Sc4OBi7du2Cv78/pkyZAjs7O6xbtw4RERGYPXs2bGxs8myXkpICf39/9OzZE5UqVYKVlRVOnTqFyMhIaQ+gra0tJk+ejG+++QZ9+vRBjx498PDhQ0ydOhWmpqYIDg7WON7u3btj3bp1aNOmDUaOHIm6devC2NgYd+/exf79+9GhQwd07NhRZ5/t22jevDlMTEzQo0cPjBs3Di9fvsSSJUvw+PFjpXqKS79DQ0NRqlQpdOzYEXfv3sXUqVPh4uKidL6YuttKdbYNBUlKSsrzrsoeHh6oVq2a2r8L2pKcnIyOHTviyy+/REpKCoKDg2FqairdvNPAwADfffcdBg4cKNV78uQJQkJCVA6FtW3bFvPnz0fPnj0xaNAgPHz4EHPnzlVJ5hTLW5g/vv38/FS2BwqKcVUkJjExMbC0tAQAfP7551K9kJAQTJ06Ffv371e63YG67YHXCbShoSEaNWqkfvCFOv26GDl79qzo16+f8PT0FHK5XJiamgovLy/Rp08fsXfvXqW6ffv2FRYWFnn2c/nyZdG8eXNhZWUlSpUqJbp06SISEhIEABEcHKxUd+fOnaJq1arCxMREfPDBB2LmzJnS2f455b7aSwghnj17Jr799lvh7e0tTExMhI2NjfD19RWjRo1SuloIua7IKKjPiRMnCldXV2FgYCAAiP379xc4Zjt27BD16tUTpqamwsLCQjRt2lQcOXJEqU5hrvZ6U92srCwxa9YsUbFiRWFsbCwcHBzEF198Ie7cuaNUL/fVXrt27RKtW7cWZcqUESYmJsLR0VG0adNG6RJqIYS4ffu26Nmzp7C3txfGxsbC29tbzJkzR+kKJMXVPXPmzFGJL6/P+ty5c6Jr167C0dFRGBsbC2dnZ9GkSROxdOlSpXpHjhwRH330kZDL5cLZ2Vl8/fXXYtmyZWpd7XX37l3RuXNnUapUKWFlZSVatWolLl68qPJZ53WJuRD/jX/Ozz09PV2MGTNGODo6ClNTU/HRRx+JY8eO5bn+5CW/9W/Pnj3SlR+541BnvVJ8T3Jewp9z2dS5Mi6/S91zu3Dhgmjfvr2wsbERJiYmolq1aipXjOS+2uvly5di8ODBomrVqsLa2lqYmZkJb29vERwcLJ4/f67UdsWKFdJ2wMbGRnTo0EHllhD5bXPy2l5kZGSIuXPnimrVqglTU1NhaWkpKlWqJL766itx/fp1qZ62P9v8vr/5rW95fYa///67FHeZMmXE119/Lf73v/+prJfZ2dli+vTpomzZssLExERUrVpV7Nq1S1SrVk107NhRaT7qbCvV3Tbkxd3dPd+rmhTjqO7vQn7rdX6ff+51WPEZ/PLLL2LEiBGidOnSQi6Xi08++UTExMSotF+xYoWoUKGCMDExERUrVhSrVq1S2W4KIcSqVauEt7e3kMvloly5ciI0NFSsXLmyUFeh5mXv3r0CgDh58qTKtPzGNvd6P2bMGCGTyURcXFyh2gshxCeffKJyteKbyP6dCRER0TsXHx+PSpUqITg4WKM79FLxULVqVTRs2BBLliwpVPu6devC3d1d5XC6um7evIkKFSogKioKzZs3V7sdkx8iInonzp07hw0bNqBBgwawtrbG1atXMXv2bKSmpuLixYt5XglGxVtkZCQ6duyI69evq3XFWE6pqakoXbo0zp49K91aQVP9+vXD3bt3NT4cWWLP+SEiopLFwsICMTExWLlyJZ48eQIbGxv4+fnh+++/Z+JTQrVq1Qpz5sxBfHy8xsmPtbW1ymNeNJGZmYny5ctr/EBjgHt+iIiISM+U2EvdiYiIiAqDyQ8RERHpFSY/REREpFeK9ITn0NBQbNu2DVeuXIGZmRkaNGiAWbNmKd3NNSAgQOW22/Xq1cPx48fVmkd2djbu378PKyurPB9tQURERMWPEAJPnz6Fq6ur0k0wtaFIk5+DBw8iMDAQderUQWZmJiZNmoQWLVrg8uXLsLCwkOq1atUK4eHh0vv8Hv6Xl/v376s82ZmIiIhKhjt37mh8JdmbFGnyk/shfuHh4XB0dMTp06eVblMtl8vVfkJ2borHPNy5c6fIbv9OREREmklNTYWbm1uej2t6W8XqPj8pKSkAXj8DJqcDBw7A0dERtra2aNy4Mb7//ns4Ojrm2Ud6errSfQOePn0K4PX9BJj8EBERlSy6OGWl2NznRwiBDh064PHjxzh8+LBUvmnTJlhaWsLd3R3x8fGYPHkyMjMzcfr06Twf0KZ4SFpuKSkpTH6IiIhKiNTUVNjY2Ojk97vYJD+BgYGIiIjAn3/+WeCxvcTERLi7u2Pjxo3SU5Zzyr3nR7HbjMkPERFRyaHL5KdYHPYaPnw4du7ciUOHDr3xpCYXFxe4u7vj+vXreU6Xy+V57hEiIiIiAoo4+RFCYPjw4di+fTsOHDgAT0/PN7Z5+PAh7ty5AxcXF63GkpWVhYyMDK32SVQYxsbGMDQ0LOowiIjeW0Wa/AQGBmL9+vX47bffYGVlhaSkJACAjY0NzMzM8OzZM4SEhKBz585wcXHBrVu38M0338DBwQEdO3bUSgxCCCQlJeHJkyda6Y9IG2xtbeHs7Mx7UxER6UCRJj9LliwBAPj5+SmVh4eHIyAgAIaGhrhw4QLWrl2LJ0+ewMXFBf7+/ti0aZPWLn1TJD6Ojo4wNzfnjw0VKSEE0tLSkJycDABa38NJRETF4LBXQczMzBAVFaWz+WdlZUmJj729vc7mQ6QJMzMzAEBycjIcHR15CIyISMv0+tleinN8zM3NizgSImWKdZLnoRERaZ9eJz8KPNRFxQ3XSSIi3WHyQ0RERHqFyc97TCaTYceOHUUdBhERUbFSLG5yWBx5TIh4Z/O6NbOtxm2SkpLw/fffIyIiAvfu3YOjoyOqV6+OoKAgNG3aVAdRFp6fnx+qV6+OsLCwog6FiIiIyU9JdOvWLTRs2BC2traYPXs2qlatioyMDERFRSEwMBBXrlwp6hCJiIiKLR72KoGGDh0KmUyGkydP4vPPP0fFihVRuXJljB49GsePH8+33b1799CtWzeUKlUK9vb26NChA27duiVNP3XqFJo3bw4HBwfY2NigcePGiI2NVepDJpNhxYoV6NixI8zNzVGhQgXs3LlTo/g9PDwwffp09OnTR3po7W+//Ya///4bHTp0gKWlJXx9fRETEyO1efjwIXr06IGyZcvC3Nwcvr6+2LBhg1K/T58+Ra9evWBhYQEXFxf88MMP8PPzQ1BQkFTn1atXGDduHMqUKQMLCwvUq1cPBw4ckKbfvn0b7du3R6lSpWBhYYHKlStj9+7dGi0fEREVb0x+SphHjx4hMjISgYGBsLCwUJlua2ubZ7u0tDT4+/vD0tIShw4dwp9//glLS0u0atUKr169AvA6eejbty8OHz6M48ePo0KFCmjTpg2ePn2q1NfUqVPRtWtXnD9/Hm3atEGvXr3w6NEjjZbjhx9+QMOGDXHmzBm0bdsWvXv3Rp8+ffDFF18gNjYWXl5e6NOnj3QvqJcvX6JWrVrYtWsXLl68iEGDBqF37944ceKE1Ofo0aNx5MgR7Ny5E9HR0Th8+LBK8tavXz8cOXIEGzduxPnz59GlSxe0atVKelZcYGAg0tPTcejQIVy4cAGzZs2CpaWlRstGRETFGw97lTA3btyAEAKVKlXSqN3GjRthYGCAFStWSJdRh4eHw9bWFgcOHECLFi3QpEkTpTY///wzSpUqhYMHD6Jdu3ZSeUBAAHr06AEAmDFjBhYsWICTJ0+iVatWasfTpk0bfPXVVwCAKVOmYMmSJahTpw66dOkCABg/fjzq16+PBw8ewNnZGWXKlMHYsWOl9sOHD0dkZCR+/fVX1KtXD0+fPsWaNWuwfv166Zyn8PBwuLq6Sm1u3ryJDRs24O7du1L52LFjERkZifDwcMyYMQMJCQno3LkzfH19AQDlypVTe5lI/yjODSzMeXtEVHSY/JQwij0hmt4H5vTp07hx44bKY0FevnyJmzdvAnh9R+EpU6Zg3759ePDgAbKyspCWloaEhASlNlWrVpX+b2FhASsrK+lxDOrK2YeTkxMASAlHzrLk5GQ4OzsjKysLM2fOxKZNm3Dv3j2kp6cjPT1d2vv1119/ISMjA3Xr1pX6sLGxgbe3t/Q+NjYWQghUrFhRKZb09HTpDt8jRozAkCFDsGfPHjRr1gydO3dWipWIiEo+Jj8lTIUKFSCTyRAXF4fPPvtM7XbZ2dmoVasW1q1bpzKtdOnSAF7v0fn7778RFhYGd3d3yOVy1K9fXzospmBsbKz0XiaTITs7W6PlyNmHIpHLq0zR77x58/DDDz8gLCwMvr6+sLCwQFBQkBRbfklhzkeoZGdnw9DQEKdPn1Z5ZITi0NbAgQPRsmVLREREYM+ePQgNDcW8efMwfPhwjZaPiIiKL57zU8LY2dmhZcuWWLRoEZ4/f64yPb+n09esWRPXr1+Ho6MjvLy8lF42NjYAgMOHD2PEiBFo06YNKleuDLlcjn/++UeXi6O2w4cPo0OHDvjiiy9QrVo1lCtXTjpPBwDKly8PY2NjnDx5UipLTU1VqlOjRg1kZWUhOTlZZQycnZ2lem5ubhg8eDC2bduGMWPGYPny5e9mIYmI6J1g8lMCLV68GFlZWahbty62bt2K69evIy4uDj/99BPq16+fZ5tevXrBwcEBHTp0wOHDhxEfH4+DBw9i5MiRuHv3LgDAy8sLv/zyC+Li4nDixAn06tVLeshmUfPy8kJ0dDSOHj2KuLg4fPXVV0hKSpKmW1lZoW/fvvj666+xf/9+XLp0Cf3794eBgYG0N6hixYro1asX+vTpg23btiE+Ph6nTp3CrFmzpCu6goKCEBUVhfj4eMTGxmLfvn3w8fEpkmUmIiLdYPJTAnl6eiI2Nhb+/v4YM2YMqlSpgubNm2Pv3r1YsmRJnm3Mzc1x6NAhfPDBB+jUqRN8fHzQv39/vHjxAtbW1gCAVatW4fHjx6hRowZ69+6NESNGwNHR8V0uWr4mT56MmjVromXLlvDz84Ozs7PKYb/58+ejfv36aNeuHZo1a4aGDRvCx8cHpqamUp3w8HD06dMHY8aMgbe3Nz799FOcOHECbm5uAICsrCwEBgbCx8cHrVq1gre3NxYvXvwuF5WIiHRMJnKeFPEeSk1NhY2NDVJSUqQfeYWXL18iPj4enp6eSj+Q9H54/vw5ypQpg3nz5mHAgAFFHY5GuG6WDLzai0h3Cvr9fls84ZneG2fOnMGVK1dQt25dpKSkYNq0aQCADh06FHFkRERUnDD5offK3LlzcfXqVZiYmKBWrVo4fPgwHBwcijosIiIqRpj80HujRo0aOH36dFGHQURExRxPeCYiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIr/CEZyIiDSju7UNEJRf3/BAREZFeYfKjhw4cOACZTJbvQ1CLi5CQEFSvXr2owyAiovcMD3vlZ3/ou5uX/0SNqgcEBGDNmjUAACMjI7i5uaFTp06YOnUqLCws3ti+QYMGSExMlJ7mXlLdunULnp6eOHPmDJMkIiJSG5OfEqpVq1YIDw9HRkYGDh8+jIEDB+L58+f5Ptg0JxMTEzg7O7+DKImIiIofHvYqoeRyOZydneHm5oaePXuiV69e2LFjBwAgPT1deiK7qakpPv74Y5w6dUpqm/uw1+3bt9G+fXuUKlUKFhYWqFy5Mnbv3i3VP3jwIOrWrQu5XA4XFxdMmDABmZmZ0nQ/Pz+MGDEC48aNg52dHZydnRESEqIUb0pKCgYNGgRHR0dYW1ujSZMmOHfunFKdmTNnwsnJCVZWVhgwYABevnyp0ZgolisqKgo1atSAmZkZmjRpguTkZPzvf/+Dj48PrK2t0aNHD6SlpUntIiMj8fHHH8PW1hb29vZo164dbt68qdT30aNHUb16dZiamqJ27drYsWMHZDIZzp49K9W5fPky2rRpA0tLSzg5OaF37974559/pOlbtmyBr68vzMzMYG9vj2bNmuH58+caLSMREb09Jj/vCTMzM2RkZAAAxo0bh61bt2LNmjWIjY2Fl5cXWrZsiUePHuXZNjAwEOnp6Th06BAuXLiAWbNmwdLSEgBw7949tGnTBnXq1MG5c+ewZMkSrFy5EtOnT1fqY82aNbCwsMCJEycwe/ZsTJs2DdHR0QAAIQTatm2LpKQk7N69G6dPn0bNmjXRtGlTKabNmzcjODgY33//PWJiYuDi4oLFixcXaixCQkKwcOFCHD16FHfu3EHXrl0RFhaG9evXIyIiAtHR0ViwYIFU//nz5xg9ejROnTqFvXv3wsDAAB07dkR2djYA4OnTp2jfvj18fX0RGxuL7777DuPHj1eaZ2JiIho3bozq1asjJiYGkZGRePDgAbp27SpN79GjB/r374+4uDgcOHAAnTp1ghCiUMtIRESFx8Ne74GTJ09i/fr1aNq0qXToa/Xq1WjdujUAYPny5YiOjsbKlSvx9ddfq7RPSEhA586d4evrCwAoV66cNG3x4sVwc3PDwoULIZPJUKlSJdy/fx/jx4/HlClTYGDwOn+uWrUqgoODAQAVKlTAwoULsXfvXjRv3hz79+/HhQsXkJycDLlcDuD1A0h37NiBLVu2YNCgQQgLC0P//v0xcOBAAMD06dPxxx9/aLz3R9G2YcOGAIABAwZg4sSJuHnzprRcn3/+Ofbv3y8lMJ07d1Zqv3LlSjg6OuLy5cuoUqUK1q1bB5lMhuXLl8PU1BQffvgh7t27hy+//FJqs2TJEtSsWRMzZsyQylatWgU3Nzdcu3YNz549Q2ZmJjp16gR3d3cAkMabiIjeLe75KaF27doFS0tLmJqaon79+mjUqBEWLFiAmzdvIiMjQ/rxBwBjY2PUrVsXcXFxefY1YsQIKWEIDg7G+fPnpWlxcXGoX78+ZDKZVNawYUM8e/YMd+/elcqqVq2q1KeLiwuSk5MBAKdPn8azZ89gb28PS0tL6RUfHy8dXlLMJ6fc79WVMxYnJyeYm5srJXROTk5SbABw8+ZN9OzZE+XKlYO1tTU8PT0BvE4KAeDq1auoWrUqTE1NpTZ169ZVmufp06exf/9+peWrVKmS1H+1atXQtGlT+Pr6okuXLli+fDkeP35cqOUjIqK3wz0/JZS/vz+WLFkCY2NjuLq6wtjYGMDrwysAlJIV4PWhp9xlCgMHDkTLli0RERGBPXv2IDQ0FPPmzcPw4cPzbKc4VJOzXDF/BZlMJh02ys7OhouLCw4cOKAyb1tbW/UXWk05Y5HJZAXGBgDt27eHm5sbli9fDldXV2RnZ6NKlSp49eoVgLzHLvfhquzsbLRv3x6zZs1SicfFxQWGhoaIjo7G0aNHsWfPHixYsACTJk3CiRMnpGSLiIjeDe75KaEsLCzg5eUFd3d3pR93Ly8vmJiY4M8//5TKMjIyEBMTAx8fn3z7c3Nzw+DBg7Ft2zaMGTMGy5cvBwB8+OGHOHr0qNKP/dGjR2FlZYUyZcqoFWvNmjWRlJQEIyMjeHl5Kb0cHBwAAD4+Pjh+/LhSu9zvdeHhw4eIi4vDt99+i6ZNm8LHx0dlj0ylSpVw/vx5pKenS2UxMTFKdWrWrIlLly7Bw8NDZRkVtx+QyWRo2LAhpk6dijNnzsDExATbt2/X+TISEZEyJj/vGQsLCwwZMgRff/01IiMjcfnyZXz55ZdIS0vDgAED8mwTFBSEqKgoxMfHIzY2Fvv27ZMSpaFDh+LOnTsYPnw4rly5gt9++w3BwcEYPXq0dL7PmzRr1gz169fHZ599hqioKNy6dQtHjx7Ft99+KyURI0eOxKpVq7Bq1Spcu3YNwcHBuHTpknYGpQClSpWCvb09li1bhhs3bmDfvn0YPXq0Up2ePXsiOzsbgwYNQlxcHKKiojB37lwA/+39CgwMxKNHj9CjRw+cPHkSf/31F/bs2YP+/fsjKysLJ06cwIwZMxATE4OEhARs27YNf//9d4EJKRER6QYPe72HZs6ciezsbPTu3RtPnz5F7dq1ERUVhVKlSuVZPysrC4GBgbh79y6sra3RqlUr/PDDDwCAMmXKYPfu3fj6669RrVo12NnZYcCAAfj222/Vjkcmk2H37t2YNGkS+vfvj7///hvOzs5o1KgRnJycAADdunXDzZs3MX78eLx8+RKdO3fGkCFDEBUV9fYDUgADAwNs3LgRI0aMQJUqVeDt7Y2ffvoJfn5+Uh1ra2v8/vvvGDJkCKpXrw5fX19MmTIFPXv2lM4DcnV1xZEjRzB+/Hi0bNkS6enpcHd3R6tWrWBgYABra2scOnQIYWFhSE1Nhbu7O+bNmyedlE5ERO+OTLzn19qmpqbCxsYGKSkpsLa2Vpr28uVLxMfHw9PTU+lkVqI3WbduHfr164eUlBSYmZlpvX+um8VXXg82vTWzbRFEQvR+K+j3+21xzw+RGtauXYty5cqhTJkyOHfuHMaPH4+uXbvqJPEhIiLdYvJDpIakpCRMmTIFSUlJcHFxQZcuXfD9998XdVhUzCj2CnFPEFHxxuSHSA3jxo3DuHHjijoMIiLSAl7tRURERHqFyQ9Ub1hHVNS4ThIR6Y5eJz+KmwPmfMI3UXGgWCdz352aiIjenl6f82NoaAhbW1vpOU/m5ub5PgKC6F0QQiAtLQ3JycmwtbWFoaFhUYdERPTe0evkBwCcnZ0BQOlBl0RFzdbWVlo3iYhIu/Q++ZHJZHBxcYGjoyMyMjKKOhwiGBsbc48PEZEO6X3yo2BoaMgfHCIiIj2g1yc8ExERkf5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFl7oTEanBY0JEoeremtlWF+EQ0Vvgnh8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itFmvyEhoaiTp06sLKygqOjIz777DNcvXpVqY4QAiEhIXB1dYWZmRn8/Pxw6dKlIoqYiIiISroiTX4OHjyIwMBAHD9+HNHR0cjMzESLFi3w/Plzqc7s2bMxf/58LFy4EKdOnYKzszOaN2+Op0+fFmHkREREVFIV6bO9IiMjld6Hh4fD0dERp0+fRqNGjSCEQFhYGCZNmoROnToBANasWQMnJyesX78eX331VVGETURERCVYsTrnJyUlBQBgZ2cHAIiPj0dSUhJatGgh1ZHL5WjcuDGOHj2aZx/p6elITU1VehEREREpFJvkRwiB0aNH4+OPP0aVKlUAAElJSQAAJycnpbpOTk7StNxCQ0NhY2Mjvdzc3HQbOBFRATwmRGj0RHgi0r1ik/wMGzYM58+fx4YNG1SmyWQypfdCCJUyhYkTJyIlJUV63blzRyfxEhERUclUpOf8KAwfPhw7d+7EoUOHULZsWanc2dkZwOs9QC4uLlJ5cnKyyt4gBblcDrlcrtuAiYiIqMQq0j0/QggMGzYM27Ztw759++Dp6ak03dPTE87OzoiOjpbKXr16hYMHD6JBgwbvOlwiIiJ6DxTpnp/AwECsX78ev/32G6ysrKTzeGxsbGBmZgaZTIagoCDMmDEDFSpUQIUKFTBjxgyYm5ujZ8+eRRk6ERERlVBFmvwsWbIEAODn56dUHh4ejoCAAADAuHHj8OLFCwwdOhSPHz9GvXr1sGfPHlhZWb3jaImIiOh9UKTJjxDijXVkMhlCQkIQEhKi+4CIiIjovVdsrvYiIiIieheY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RE75DHhAh4TIgo6jCI9BqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrxgVdQBERO+Sx4QI6f+3ZrYtwkiIqKhwzw8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/RKT3PCZEKD3tXd/mT6RvmPwQERGRXtE4+VmzZg0iIv77C2XcuHGwtbVFgwYNcPv2ba0GR0RERKRtGic/M2bMgJmZGQDg2LFjWLhwIWbPng0HBweMGjVK6wESERERaZORpg3u3LkDLy8vAMCOHTvw+eefY9CgQWjYsCH8/Py0HR8RERGRVmm858fS0hIPHz4EAOzZswfNmjUDAJiamuLFixfajY6IiIhIyzTe89O8eXMMHDgQNWrUwLVr19C2bVsAwKVLl+Dh4aHt+IiIiIi0SuM9P4sWLUL9+vXx999/Y+vWrbC3twcAnD59Gj169NB6gERERETapPGeH1tbWyxcuFClfOrUqVoJiIiIiEiXCnWfn8OHD+OLL75AgwYNcO/ePQDAL7/8gj///FOrwRERERFpm8bJz9atW9GyZUuYmZkhNjYW6enpAICnT59ixowZWg+QiIiISJs0Tn6mT5+OpUuXYvny5TA2NpbKGzRogNjYWK0GR0RERKRtGic/V69eRaNGjVTKra2t8eTJE23ERERERKQzGic/Li4uuHHjhkr5n3/+iXLlymklKCIiIiJd0Tj5+eqrrzBy5EicOHECMpkM9+/fx7p16zB27FgMHTpUFzESERERaY3Gl7qPGzcOKSkp8Pf3x8uXL9GoUSPI5XKMHTsWw4YN00WMRERFxmNCxJsrEVGJonHyAwDff/89Jk2ahMuXLyM7OxsffvghLC0ttR0bERERkdYVKvkBAHNzc9SuXVubsRARERHpnMbJT8eOHSGTyVTKZTIZTE1N4eXlhZ49e8Lb21srARIRERFpk8YnPNvY2GDfvn2IjY2VkqAzZ85g3759yMzMxKZNm1CtWjUcOXJE68ESERERvS2N9/w4OzujZ8+eWLhwIQwMXudO2dnZGDlyJKysrLBx40YMHjwY48eP5+MuiIiIqNjReM/PypUrERQUJCU+AGBgYIDhw4dj2bJlkMlkGDZsGC5evKjVQImIiIi0QePkJzMzE1euXFEpv3LlCrKysgAApqameZ4XRERERFTUND7s1bt3bwwYMADffPMN6tSpA5lMhpMnT2LGjBno06cPAODgwYOoXLmy1oMlIiIielsaJz8//PADnJycMHv2bDx48AAA4OTkhFGjRmH8+PEAgBYtWqBVq1bajZSIiIhICzQ+7GVoaIhJkyYhMTERT548wZMnT5CYmIhvvvkGhoaGAIAPPvgAZcuWfWNfhw4dQvv27eHq6gqZTIYdO3YoTQ8ICIBMJlN6ffTRR5qGTERERCTROPnJydraGtbW1oVu//z5c1SrVg0LFy7Mt06rVq2QmJgovXbv3l3o+REREREV6g7PW7ZswebNm5GQkIBXr14pTYuNjVW7n9atW6N169YF1pHL5XB2di5MmEREREQqNN7z89NPP6Ffv35wdHTEmTNnULduXdjb2+Ovv/56YyJTGAcOHICjoyMqVqyIL7/8EsnJyVqfBxEREekPjZOfxYsXY9myZVi4cCFMTEwwbtw4REdHY8SIEUhJSdFqcK1bt8a6deuwb98+zJs3D6dOnUKTJk2Qnp6eb5v09HSkpqYqvYiIiIgUND7slZCQgAYNGgAAzMzM8PTpUwCvL4H/6KOPCjx/R1PdunWT/l+lShXUrl0b7u7uiIiIQKdOnfJsExoaiqlTp2otBk15TIgAANya2bbIYiDSV4rvX04FfRfzqq/ONCIq2TTe8+Ps7IyHDx8CANzd3XH8+HEAQHx8PIQQ2o0uFxcXF7i7u+P69ev51pk4cSJSUlKk1507d3QaExEREZUsGu/5adKkCX7//XfUrFkTAwYMwKhRo7BlyxbExMTkuzdGWx4+fIg7d+7AxcUl3zpyuRxyuVyncRAREVHJpXHys2zZMmRnZwMABg8eDDs7O/z5559o3749Bg8erFFfz549w40bN6T38fHxOHv2LOzs7GBnZ4eQkBB07twZLi4uuHXrFr755hs4ODigY8eOmoZNREREBKAQyY+BgYHSQ027du2Krl27FmrmMTEx8Pf3l96PHj0aANC3b18sWbIEFy5cwNq1a/HkyRO4uLjA398fmzZtgpWVVaHmR0RERFSo+/y8fPkS58+fR3JysrQXSOHTTz9Vux8/P78CzxOKiooqTHhERERE+dI4+YmMjESfPn3wzz//qEyTyWTSk92JiIiIiiONr/YaNmwYunTpgsTERGRnZyu9mPgQERFRcadx8pOcnIzRo0fDyclJF/EQERER6ZTGyc/nn3+OAwcO6CAUIiIiIt3T+JyfhQsXokuXLjh8+DB8fX1hbGysNH3EiBFaC46IiIhI2zROftavX4+oqCiYmZnhwIEDkMlk0jSZTMbkh4iIiIo1jZOfb7/9FtOmTcOECROU7vdDREREVBJonL28evUK3bp1Y+JDREREJZLGGUzfvn2xadMmXcRCRKR1HhMiSvQT2kt6/ETFkcaHvbKysjB79mxERUWhatWqKic8z58/X2vBEREREWmbxsnPhQsXUKNGDQDAxYsXlablPPmZiIiIqDjSOPnZv3+/LuIgIiIieid41jIRERHpFbX3/HTq1Emtetu2bSt0MERERES6pnbyY2Njo8s4iIiIiN4JtZOf8PBwXcZBRERE9E7wnB8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9IpayU/NmjXx+PFjAMC0adOQlpam06CIiIiIdEWt5CcuLg7Pnz8HAEydOhXPnj3TaVBEREREuqLWpe7Vq1dHv3798PHHH0MIgblz58LS0jLPulOmTNFqgERERETapFbys3r1agQHB2PXrl2QyWT43//+ByMj1aYymYzJz788JkRI/781s20RRkJE6sr5vS3K+Wu6zShsOyJ9pVby4+3tjY0bNwIADAwMsHfvXjg6Ouo0MCIiIiJd0Pip7tnZ2bqIg4iIiOid0Dj5AYCbN28iLCwMcXFxkMlk8PHxwciRI1G+fHltx0dERESkVRrf5ycqKgoffvghTp48iapVq6JKlSo4ceIEKleujOjoaF3ESERERKQ1Gu/5mTBhAkaNGoWZM2eqlI8fPx7NmzfXWnBERERE2qbxnp+4uDgMGDBApbx///64fPmyVoIiIiIi0hWNk5/SpUvj7NmzKuVnz57lFWBERERU7Gl82OvLL7/EoEGD8Ndff6FBgwaQyWT4888/MWvWLIwZM0YXMRIRERFpjcbJz+TJk2FlZYV58+Zh4sSJAABXV1eEhIRgxIgRWg+QiIiISJs0Tn5kMhlGjRqFUaNG4enTpwAAKysrrQdGREREpAuFus+PApMeIiIiKmk0PuGZiIiIqCRj8kNERER6hckPERER6RWNzvnJyMhAixYt8PPPP6NixYq6iomISOs8JkSUmL417S/IaMu//2ur1TiI3lca7fkxNjbGxYsXIZPJdBUPERERkU5pfNirT58+WLlypS5iISIiItI5jS91f/XqFVasWIHo6GjUrl0bFhYWStPnz5+vteCIiIiItE3j5OfixYuoWbMmAODatWtK03g4jIiIiIo7jZOf/fv36yIOIiIionei0Je637hxA1FRUXjx4gUAQAihtaCIiIiIdEXj5Ofhw4do2rQpKlasiDZt2iAxMREAMHDgQD7VnYiIiIo9jZOfUaNGwdjYGAkJCTA3N5fKu3XrhsjISK0GR0RERKRtGp/zs2fPHkRFRaFs2bJK5RUqVMDt27e1FhgRERGRLmi85+f58+dKe3wU/vnnH8jlcq0ERURERKQrGic/jRo1wtq1a6X3MpkM2dnZmDNnDvz9/bUaHBEREZG2aXzYa86cOfDz80NMTAxevXqFcePG4dKlS3j06BGOHDmiixiJiIiItEbjPT8ffvghzp8/j7p166J58+Z4/vw5OnXqhDNnzqB8+fK6iJGIiIhIazTe8wMAzs7OmDp1qrZjee8pntR8ayafvEykb3I/qV2XT5knooIVKvl5/PgxVq5cibi4OMhkMvj4+KBfv36ws7PTdnxEREREWqXxYa+DBw/C09MTP/30Ex4/foxHjx7hp59+gqenJw4ePKiLGImIiIi0RuM9P4GBgejatSuWLFkCQ0NDAEBWVhaGDh2KwMBAXLx4UetBEhEREWmLxnt+bt68iTFjxkiJDwAYGhpi9OjRuHnzplaDIyIiItI2jZOfmjVrIi4uTqU8Li4O1atX10ZMRERERDqj1mGv8+fPS/8fMWIERo4ciRs3buCjjz4CABw/fhyLFi3CzJkzdRMlERERkZaolfxUr14dMpkMQgipbNy4cSr1evbsiW7dumkvOiIiIiItUyv5iY+P13UcRERERO+EWsmPu7u7ruMgIiIieicKdZPDe/fu4ciRI0hOTkZ2drbStBEjRmglMCIiIiJd0Dj5CQ8Px+DBg2FiYgJ7e3vIZDJpmkwm0yj5OXToEObMmYPTp08jMTER27dvx2effSZNF0Jg6tSpWLZsGR4/fox69eph0aJFqFy5sqZhExEREQEoxKXuU6ZMwZQpU5CSkoJbt24hPj5eev31118a9fX8+XNUq1YNCxcuzHP67NmzMX/+fCxcuBCnTp2Cs7MzmjdvjqdPn2oaNhERERGAQuz5SUtLQ/fu3WFgoHHepKJ169Zo3bp1ntOEEAgLC8OkSZPQqVMnAMCaNWvg5OSE9evX46uvvnrr+RMREZH+0TiDGTBgAH799VddxKIkPj4eSUlJaNGihVQml8vRuHFjHD16NN926enpSE1NVXoRERERKWi85yc0NBTt2rVDZGQkfH19YWxsrDR9/vz5WgksKSkJAODk5KRU7uTkhNu3bxcY39SpU7USAxFRceExIQIAcGtmW6X3ABBUqEtXiPSXxl+ZGTNmICoqCt7e3gCgcsKztuXuUwhR4HwmTpyI0aNHS+9TU1Ph5uam9biIiIioZNI4+Zk/fz5WrVqFgIAAHYTzH2dnZwCv9wC5uLhI5cnJySp7g3KSy+WQy+U6jY2IiIhKLo3P+ZHL5WjYsKEuYlHi6ekJZ2dnREdHS2WvXr3CwYMH0aBBA53Pn4iIiN5PGic/I0eOxIIFC7Qy82fPnuHs2bM4e/YsgNcnOZ89exYJCQmQyWQICgrCjBkzsH37dly8eBEBAQEwNzdHz549tTJ/IiIi0j8aH/Y6efIk9u3bh127dqFy5coqJzxv27ZN7b5iYmLg7+8vvVecq9O3b1+sXr0a48aNw4sXLzB06FDpJod79uyBlZWVpmETERERAShE8mNrayvdd+dt+fn5KT0pPjeZTIaQkBCEhIRoZX5EREREhXq8BREREVFJ9fa3aSYiIiIqQTTe8+Pp6VngfXY0fb4XERER0bukcfITFBSk9D4jIwNnzpxBZGQkvv76a23FRURERKQTGic/I0eOzLN80aJFiImJeeuAiIiIiHRJa+f8tG7dGlu3btVWd0REREQ6obXkZ8uWLbCzs9NWd0REREQ6ofFhrxo1aiid8CyEQFJSEv7++28sXrxYq8ERERERaZvGyc9nn32m9N7AwAClS5eGn58fKlWqpK24ShyPCRFaaX9rZltthENExVSQ0RYAQFjm52+sk1c9dbY13J4QFUzj5Cc4OFgXcRARERG9E7zJIREREekVtff8GBgYFHhzQ+D1s7gyMzPfOigiIiIiXVE7+dm+fXu+044ePYoFCxYU+JBSIiIiouJA7eSnQ4cOKmVXrlzBxIkT8fvvv6NXr1747rvvtBocERERkbYV6pyf+/fv48svv0TVqlWRmZmJs2fPYs2aNfjggw+0HR8RERGRVmmU/KSkpGD8+PHw8vLCpUuXsHfvXvz++++oUqWKruIjIiIi0iq1D3vNnj0bs2bNgrOzMzZs2JDnYTAiIiKi4k7t5GfChAkwMzODl5cX1qxZgzVr1uRZb9u2bVoLjoiIiEjb1E5++vTp88ZL3YmIiIiKO7WTn9WrV+swDCIiIqJ3g3d4JiIiIr3C5IeIiIj0isYPNiUiopIh5xPg+YR3ov9wzw8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeMijoAfeAxIUKrbW/NbPvGfhV1iEj3goy2AADCMj8vVn0r2r727zZhf+jrf/0n5ttOsW3hdoTeV9zzQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFaOiDkAfeUyIAADcmtn2rdoT6ZOc631hvzv5CTLaAgAIy/xcq/3mNY/Czj93nYL6U0eht0P7Q1//6z/x7eoQFSHu+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivVKsk5+QkBDIZDKll7Ozc1GHRURERCVYsX+8ReXKlfHHH39I7w0NDYswGiIiIirpin3yY2RkxL09REREpDXF+rAXAFy/fh2urq7w9PRE9+7d8ddffxVYPz09HampqUovIiIiIoViveenXr16WLt2LSpWrIgHDx5g+vTpaNCgAS5dugR7e/s824SGhmLq1KnvLEZtP2H9bZ/4XlR90zvGp2a/kbaf1J5Xf2/zZPW82qrTX0F1Ctwe/bvOhO299m8/1/6doOb2QLHOva3c/XAdpiJQrPf8tG7dGp07d4avry+aNWuGiIjXX+w1a9bk22bixIlISUmRXnfu3HlX4RIREVEJUKz3/ORmYWEBX19fXL9+Pd86crkccrn8HUZFREREJUmx3vOTW3p6OuLi4uDi4lLUoRAREVEJVayTn7Fjx+LgwYOIj4/HiRMn8PnnnyM1NRV9+/Yt6tCIiIiohCrWh73u3r2LHj164J9//kHp0qXx0Ucf4fjx43B3dy/q0IiIiKiEKtbJz8aNG4s6BCIiInrPFOvDXkRERETaxuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivFOubHOozjwkROu/71sy2OpsHkbaEfdvv3/99nm8dxTodZLRFo741rV/YNsVB2N5rb6xT0HZH0T6oacU3tr/VUsPgiN4x7vkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPSKUVEHoM88JkQUdQika/tDX//rP/Ht6pRkiuUDVJZR8R24NbOtSjPFtKB/t1JBRltyTNNyjAXIOd+iaK/reYV92y9H+9xTVT+XEuF9/07RW+OeHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrfKr7e6qgp2Vrq706T+Qu7PyLREFPgn6XT4nO+RT0wtRVxFjQtMLS0TgoPVm8acXX/xpd00rfiiedh2V+rvRe3XZvM8/3Udjefz+Xvf1UpimeCh+299/3/v9OyGNdVPQj1VGTtG1pef51gba/kwV9//iU+PcG9/wQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHrFqKgDIN3ymBCh8/Y569ya2Vbt+urUVYdG/e0P1ayO/8Q3t1fUKajvf6d5RFUFANxqeV6aFLb3GgAgyF+5iaIcAIKaVnxD0G+IMb9pOZdPMS338uSooxJrHnXUGeO3XS+DjLa8jifzc43bvG0dXbYvbsK+7fdW7aXvZsscfeZYr3PWyTl2is9VUaa8/ldV6kexLuZcp4KM8llPc1LjeyvNo6DvX17fgXdF3W1VftPyijm/7UAe9bW9LX+XuOeHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPRKiUh+Fi9eDE9PT5iamqJWrVo4fPhwUYdEREREJVSxT342bdqEoKAgTJo0CWfOnMEnn3yC1q1bIyEhoahDIyIiohKo2Cc/8+fPx4ABAzBw4ED4+PggLCwMbm5uWLJkSVGHRkRERCVQsU5+Xr16hdOnT6NFixZK5S1atMDRo0eLKCoiIiIqyYr1U93/+ecfZGVlwcnJSancyckJSUlJebZJT09Henq69D4lJQUAkJqaqpMYs9PTdNLvu1DQmBR2uRR9KtrnNY+CphWGRv09f6lZ54o+C2qnTp1/SbHmqPsy/dW/3Sj3oyjPXV+rco6ZYh65lydHnfxizbOfAuaXe/16maW6rDmXPz/Zmfn3Q0VP8fnktb7nrpPzs8tdlrO9Yt2RpuWxTuWeluc6qcb3Vlrf81rPFQqapms5Y889f3WmqbM8BfSj7W15bop+hRDa71wUY/fu3RMAxNGjR5XKp0+fLry9vfNsExwcLADwxRdffPHFF1/vwevOnTtazy+K9Z4fBwcHGBoaquzlSU5OVtkbpDBx4kSMHj1aep+dnY1Hjx7B3t4eMplMp/EWd6mpqXBzc8OdO3dgbW1d1OG89zje7xbH+93ieL9b+jjeQgg8ffoUrq6uWu+7WCc/JiYmqFWrFqKjo9GxY0epPDo6Gh06dMizjVwuh1wuVyqztbXVZZgljrW1td58eYoDjve7xfF+tzje75a+jbeNjY1O+i3WyQ8AjB49Gr1790bt2rVRv359LFu2DAkJCRg8eHBRh0ZEREQlULFPfrp164aHDx9i2rRpSExMRJUqVbB79264u7sXdWhERERUAhX75AcAhg4diqFDhxZ1GCWeXC5HcHCwymFB0g2O97vF8X63ON7vFsdbu2RC6OIaMiIiIqLiqVjf5JCIiIhI25j8EBERkV5h8kNERER6hckPERER6RUmP++x9PR0VK9eHTKZDGfPnlWalpCQgPbt28PCwgIODg4YMWIEXr1SfubOhQsX0LhxY5iZmaFMmTKYNm2abp6xUoLdunULAwYMgKenJ8zMzFC+fHkEBwerjCXHW7cWL14MT09PmJqaolatWjh8+HBRh1TihIaGok6dOrCysoKjoyM+++wzXL16VamOEAIhISFwdXWFmZkZ/Pz8cOnSJaU66enpGD58OBwcHGBhYYFPP/0Ud+/efZeLUiKFhoZCJpMhKChIKuN465DWH5hBxcaIESNE69atBQBx5swZqTwzM1NUqVJF+Pv7i9jYWBEdHS1cXV3FsGHDpDopKSnCyclJdO/eXVy4cEFs3bpVWFlZiblz5xbBkhRf//vf/0RAQICIiooSN2/eFL/99ptwdHQUY8aMkepwvHVr48aNwtjYWCxfvlxcvnxZjBw5UlhYWIjbt28XdWglSsuWLUV4eLi4ePGiOHv2rGjbtq344IMPxLNnz6Q6M2fOFFZWVmLr1q3iwoULolu3bsLFxUWkpqZKdQYPHizKlCkjoqOjRWxsrPD39xfVqlUTmZmZRbFYJcLJkyeFh4eHqFq1qhg5cqRUzvHWHSY/76ndu3eLSpUqiUuXLqkkP7t37xYGBgbi3r17UtmGDRuEXC4XKSkpQgghFi9eLGxsbMTLly+lOqGhocLV1VVkZ2e/s+UoiWbPni08PT2l9xxv3apbt64YPHiwUlmlSpXEhAkTiiii90NycrIAIA4ePCiEECI7O1s4OzuLmTNnSnVevnwpbGxsxNKlS4UQQjx58kQYGxuLjRs3SnXu3bsnDAwMRGRk5LtdgBLi6dOnokKFCiI6Olo0btxYSn443rrFw17voQcPHuDLL7/EL7/8AnNzc5Xpx44dQ5UqVZQeFteyZUukp6fj9OnTUp3GjRsr3VCrZcuWuH//Pm7duqXzZSjJUlJSYGdnJ73neOvOq1evcPr0abRo0UKpvEWLFjh69GgRRfV+SElJAQBpXY6Pj0dSUpLSWMvlcjRu3Fga69OnTyMjI0OpjqurK6pUqcLPIx+BgYFo27YtmjVrplTO8dYtJj/vGSEEAgICMHjwYNSuXTvPOklJSXByclIqK1WqFExMTJCUlJRvHcV7RR1SdfPmTSxYsEDp2XMcb935559/kJWVlefYcdwKTwiB0aNH4+OPP0aVKlUA/LceFjTWSUlJMDExQalSpfKtQ//ZuHEjYmNjERoaqjKN461bTH5KiJCQEMhksgJfMTExWLBgAVJTUzFx4sQC+5PJZCplQgil8tx1xL8n3+bV9n2j7njndP/+fbRq1QpdunTBwIEDlaZxvHUrr7HjuBXesGHDcP78eWzYsEFlWmHGmp+Hqjt37mDkyJH4v//7P5iamuZbj+OtGyXi2V70emPUvXv3Aut4eHhg+vTpOH78uMrzX2rXro1evXphzZo1cHZ2xokTJ5SmP378GBkZGdJfGc7Ozip/OSQnJwNQ/UvkfaTueCvcv38f/v7+qF+/PpYtW6ZUj+OtOw4ODjA0NMxz7DhuhTN8+HDs3LkThw4dQtmyZaVyZ2dnAK/3Nri4uEjlOcfa2dkZr169wuPHj5X2RiQnJ6NBgwbvaAlKhtOnTyM5ORm1atWSyrKysnDo0CEsXLhQutKO460jRXSuEenI7du3xYULF6RXVFSUACC2bNki7ty5I4T47wTc+/fvS+02btyocgKura2tSE9Pl+rMnDmTJ+Dm4e7du6JChQqie/fueV5hwfHWrbp164ohQ4Yolfn4+PCEZw1lZ2eLwMBA4erqKq5du5bndGdnZzFr1iypLD09Pc8TcDdt2iTVuX//Pk/AzUNqaqrStvrChQuidu3a4osvvhAXLlzgeOsYk5/3XHx8fL6Xujdt2lTExsaKP/74Q5QtW1bp0usnT54IJycn0aNHD3HhwgWxbds2YW1tzUuvc7l3757w8vISTZo0EXfv3hWJiYnSS4HjrVuKS91XrlwpLl++LIKCgoSFhYW4detWUYdWogwZMkTY2NiIAwcOKK3HaWlpUp2ZM2cKGxsbsW3bNnHhwgXRo0ePPC+9Llu2rPjjjz9EbGysaNKkCS+9VlPOq72E4HjrEpOf91xeyY8Qr/cQtW3bVpiZmQk7OzsxbNgwpcushRDi/Pnz4pNPPhFyuVw4OzuLkJAQ7oXIJTw8XADI85UTx1u3Fi1aJNzd3YWJiYmoWbOmdHk2qS+/9Tg8PFyqk52dLYKDg4Wzs7OQy+WiUaNG4sKFC0r9vHjxQgwbNkzY2dkJMzMz0a5dO5GQkPCOl6Zkyp38cLx1RyYEbyFLRERE+oNXexEREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RUogUEBOCzzz6T3vv5+SEoKKjI4iGi4o/JDxFpVVJSEkaOHAkvLy+YmprCyckJH3/8MZYuXYq0tDSdz3/btm347rvvtNpn7gSLiEo2PtWdiLTmr7/+QsOGDWFra4sZM2bA19cXmZmZuHbtGlatWgVXV1d8+umnKu0yMjJgbGyslRjs7Oy00g8Rvb+454eItGbo0KEwMjJCTEwMunbtCh8fH/j6+qJz586IiIhA+/btAQAymQxLly5Fhw4dYGFhgenTpyMrKwsDBgyAp6cnzMzM4O3tjR9//FGp/6ysLIwePRq2trawt7fHuHHjkPsJPbkPe7169Qrjxo1DmTJlYGFhgXr16uHAgQPS9NWrV8PW1hZRUVHw8fGBpaUlWrVqhcTERABASEgI1qxZg99++w0ymQwymUypPRGVPEx+iEgrHj58iD179iAwMBAWFhZ51pHJZNL/g4OD0aFDB1y4cAH9+/dHdnY2ypYti82bN+Py5cuYMmUKvvnmG2zevFlqM2/ePKxatQorV67En3/+iUePHmH79u0FxtWvXz8cOXIEGzduxPnz59GlSxe0atUK169fl+qkpaVh7ty5+OWXX3Do0CEkJCRg7NixAICxY8eia9euUkKUmJiIBg0avM1QEVER42EvItKKGzduQAgBb29vpXIHBwe8fPkSABAYGIhZs2YBAHr27In+/fsr1Z06dar0f09PTxw9ehSbN29G165dAQBhYWGYOHEiOnfuDABYunQpoqKi8o3p5s2b2LBhA+7evQtXV1cAr5OZyMhIhIeHY8aMGQBeH3ZbunQpypcvDwAYNmwYpk2bBgCwtLSEmZkZ0tPT4ezsXLjBIaJihckPEWlVzr07AHDy5ElkZ2ejV69eSE9Pl8pr166t0nbp0qVYsWIFbt++jRcvXuDVq1eoXr06ACAlJQWJiYmoX7++VN/IyAi1a9dWOfSlEBsbCyEEKlasqFSenp4Oe3t76b25ubmU+ACAi4sLkpOT1V9oIipRmPwQkVZ4eXlBJpPhypUrSuXlypUDAJiZmSmV5z40tnnzZowaNQrz5s1D/fr1YWVlhTlz5uDEiROFjik7OxuGhoY4ffo0DA0NlaZZWlpK/899srVMJss3oSKiko/n/BCRVtjb26N58+ZYuHAhnj9/rnH7w4cPo0GDBhg6dChq1KgBLy8v3Lx5U5puY2MDFxcXHD9+XCrLzMzE6dOn8+2zRo0ayMrKQnJyMry8vJRemhzCMjExQVZWlsbLRETFE5MfItKaxYsXIzMzE7Vr18amTZsQFxeHq1ev4v/+7/9w5coVlb0vOXl5eSEmJgZRUVG4du0aJk+ejFOnTinVGTlyJGbOnInt27fjypUrGDp0KJ48eZJvnxUrVkSvXr3Qp08fbNu2DfHx8Th16hRmzZqF3bt3q71cHh4eOH/+PK5evYp//vkHGRkZarclouKHyQ8RaU358uVx5swZNGvWDBMnTkS1atVQu3ZtLFiwAGPHji3w5oODBw9Gp06d0K1bN9SrVw8PHz7E0KFDleqMGTMGffr0QUBAgHRorGPHjgXGFB4ejj59+mDMmDHw9vbGp59+ihMnTsDNzU3t5fryyy/h7e2N2rVro3Tp0jhy5IjabYmo+JEJHtgmIiIiPcI9P0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6ZX/Bzu1/4Maf8u6AAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag, training_size, portion)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# continue" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "train_indices = get_indices(loaders['train'])\n", + "trained_with_flag = train_with_corrupt_flag(loaders['train'], shuffle_ind, train_indices)" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(0) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(8) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(9) True\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(7) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(4) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(5) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(3) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(2) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(0) True\n", + "torch.Size([1, 32, 32]) tensor(6) True\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n" + ] + } + ], + "source": [ + "for tr in trained_with_flag:\n", + " print(tr[0].shape, tr[1], tr[2])" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [], + "source": [ + "all_data = []\n", + "all_labels = []\n", + "\n", + "for data, labels in loaders['train']:\n", + " all_data.append(data)\n", + " all_labels.append(labels)\n", + "\n", + "all_data = torch.cat(all_data)\n", + "all_labels = torch.cat(all_labels)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1000])\n", + "torch.Size([1000, 1, 32, 32])\n" + ] + } + ], + "source": [ + "print(all_labels.shape)\n", + "print(all_data.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "# Reshape dữ liệu để SMOTE có thể xử lý\n", + "n_samples, channels, height, width = all_data.shape\n", + "all_data_flat = all_data.view(n_samples, -1).numpy()\n", + "all_labels_flat = all_labels.numpy()" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(1000,)\n", + "(1000, 1024)\n" + ] + } + ], + "source": [ + "print(all_labels.shape)\n", + "print(all_data_flat.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "cnt_feature = []\n", + "cnt_label = []\n", + "cnt_feature_dif = []\n", + "cnt_label_dif = []\n", + "for i in range(len(all_data_flat)):\n", + " if all_labels_flat[i] == 1:\n", + " cnt_feature.append(all_data_flat[i])\n", + " cnt_label.append(all_labels_flat[i])\n", + " elif all_labels_flat[i] == 0:\n", + " cnt_feature.append(all_data_flat[i])\n", + " cnt_label.append(all_labels_flat[i])\n", + " else:\n", + " cnt_feature_dif.append(all_data_flat[i])\n", + " cnt_label_dif.append(all_labels_flat[i])" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "742\n", + "742\n" + ] + } + ], + "source": [ + "print(len(cnt_feature))\n", + "print(len(cnt_label))" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "258\n", + "258\n" + ] + } + ], + "source": [ + "print(len(cnt_feature_dif))\n", + "print(len(cnt_label_dif))" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [], + "source": [ + "cnt_feature = [torch.tensor(f) for f in cnt_feature]\n", + "cnt_label = [torch.tensor(l) for l in cnt_label]\n", + "cnt_feature_dif = [torch.tensor(f) for f in cnt_feature_dif]\n", + "cnt_label_dif = [torch.tensor(l) for l in cnt_label_dif]" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [], + "source": [ + "cnt_feature = torch.stack(cnt_feature)\n", + "cnt_label = torch.stack(cnt_label)\n", + "cnt_feature = cnt_feature.numpy()\n", + "cnt_label = cnt_label.numpy()\n", + "cnt_feature_dif = torch.stack(cnt_feature_dif)\n", + "cnt_label_dif = torch.stack(cnt_label_dif)" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([258, 1024])\n" + ] + } + ], + "source": [ + "print(cnt_feature_dif.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [], + "source": [ + "from imblearn.over_sampling import SMOTE\n", + "\n", + "# Áp dụng SMOTE\n", + "smote = SMOTE(random_state=42)\n", + "X_res, y_res = smote.fit_resample(cnt_feature, cnt_label) " + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "old_sz = len(cnt_label)\n", + "#new_indices = np.arange(old_sz, len(X_res))\n", + "X_res_new = X_res[old_sz:]\n", + "y_res_new = y_res[old_sz:]\n", + "X_res_old = X_res[:old_sz]\n", + "y_res_old = y_res[:old_sz]" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "742\n" + ] + } + ], + "source": [ + "print(old_sz)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(368, 1024)\n", + "(368,)\n", + "(742, 1024)\n", + "(742,)\n" + ] + } + ], + "source": [ + "print(X_res_new.shape)\n", + "print(y_res_new.shape)\n", + "print(X_res_old.shape)\n", + "print(y_res_old.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 130, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(1110, 1024)\n", + "(1110,)\n" + ] + } + ], + "source": [ + "print(X_res.shape)\n", + "print(y_res.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "C:\\Users\\21520\\AppData\\Local\\Temp\\ipykernel_8088\\724189199.py:6: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n", + " cnt_feature_dif_tensor = torch.tensor(cnt_feature_dif).view(-1, channels, height, width)\n", + "C:\\Users\\21520\\AppData\\Local\\Temp\\ipykernel_8088\\724189199.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n", + " cnt_label_dif_tensor = torch.tensor(cnt_label_dif)\n" + ] + } + ], + "source": [ + "# Chuyển đổi lại dữ liệu thành tensor\n", + "X_res_old_tensor = torch.tensor(X_res_old).view(-1, channels, height, width)\n", + "y_res_old_tensor = torch.tensor(y_res_old)\n", + "X_res_new_tensor = torch.tensor(X_res_new).view(-1, channels, height, width)\n", + "y_res_new_tensor = torch.tensor(y_res_new)\n", + "cnt_feature_dif_tensor = torch.tensor(cnt_feature_dif).view(-1, channels, height, width)\n", + "cnt_label_dif_tensor = torch.tensor(cnt_label_dif)" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([368, 1, 32, 32])\n", + "torch.Size([368])\n", + "torch.Size([1000, 1, 32, 32])\n", + "torch.Size([1000])\n" + ] + } + ], + "source": [ + "print(X_res_new_tensor.shape)\n", + "print(y_res_new_tensor.shape)\n", + "print(all_data.shape)\n", + "print(all_labels.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": {}, + "outputs": [], + "source": [ + "result_X= torch.cat((all_data, X_res_new_tensor), dim = 0)\n", + "result_y= torch.cat((all_labels, y_res_new_tensor), dim = 0)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1368, 1, 32, 32])\n", + "torch.Size([1368])\n" + ] + } + ], + "source": [ + "print(result_X.shape)\n", + "print(result_y.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 99, + "metadata": {}, + "outputs": [], + "source": [ + "trained_with_flag1 = trained_with_flag.copy()" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n", + "torch.Size([1, 32, 32]) tensor(1) True\n" + ] + } + ], + "source": [ + "for i in range(len(X_res_new_tensor)):\n", + " tr= [X_res_new_tensor[i], y_res_new_tensor[i], True]\n", + " print(tr[0].shape, tr[1], tr[2])\n", + " trained_with_flag1.append(tr)" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n", + "torch.Size([1, 32, 32]) tensor(1) False\n" + ] + } + ], + "source": [ + "for i in range(len(X_res_new_tensor)):\n", + " tr= [X_res_new_tensor[i], y_res_new_tensor[i], False]\n", + " print(tr[0].shape, tr[1], tr[2])\n", + " trained_with_flag1.append(tr)" + ] + }, + { + "cell_type": "code", + "execution_count": 101, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1368\n" + ] + } + ], + "source": [ + "print(len(trained_with_flag1))" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([64, 1, 32, 32]) torch.Size([64])\n" + ] + } + ], + "source": [ + "from torch.utils.data import TensorDataset\n", + "\n", + "# Tạo TensorDataset và DataLoader từ dữ liệu đã áp dụng SMOTE\n", + "resampled_dataset = TensorDataset(result_X, result_y)\n", + "resampled_loader = DataLoader(resampled_dataset, batch_size=64, shuffle=True)\n", + "\n", + "# Kiểm tra hình dạng của dữ liệu mới\n", + "for data, labels in resampled_loader:\n", + " print(data.shape, labels.shape)\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b79e0d61dd16451a8315a2506313ef95", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/22 [00:00 maxsamples:\n", + " idxs_1 = sorted(np.random.choice(\n", + " dist.X1.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_1 = np.s_[:] # hack to get a full slice\n", + "\n", + "if maxsamples and dist.X2.shape[0] > maxsamples:\n", + " idxs_2 = sorted(np.random.choice(\n", + " dist.X2.shape[0], maxsamples, replace=False))\n", + "else:\n", + " idxs_2 = np.s_[:] # hack to get a full slice\n", + "Z1 = torch.cat((dist.X1[idxs_1],\n", + " dist.Y1[idxs_1].type(dist.X1.dtype).unsqueeze(1)), -1)\n", + "Z2 = torch.cat((dist.X2[idxs_2],\n", + " dist.Y2[idxs_2].type(dist.X2.dtype).unsqueeze(1)), -1)\n", + "Z1 = Z1.to(device)\n", + "Z2 = Z2.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 107, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1368, 1025]) torch.Size([200, 1025])\n", + "Z1 shape in batch: torch.Size([1, 1368, 1025])\n", + "Z2 shape in batch: torch.Size([1, 200, 1025])\n", + "1 1368 1024\n", + "torch.Size([1, 1368, 100])\n", + "1 200 1024\n", + "torch.Size([1, 200, 100])\n", + "torch.Size([1, 1368, 200])\n", + "torch.Size([1, 1368, 200])\n", + "Gia tri M: tensor([[[83, 83, 82, ..., 83, 82, 82],\n", + " [11, 11, 10, ..., 11, 10, 10],\n", + " [23, 23, 22, ..., 23, 22, 22],\n", + " ...,\n", + " [47, 47, 46, ..., 47, 46, 46],\n", + " [11, 11, 10, ..., 11, 10, 10],\n", + " [23, 23, 22, ..., 23, 22, 22]]], device='cuda:0')\n", + "torch.Size([1, 1368, 200])\n", + "torch.Size([1, 1368, 200])\n", + "gia tri D: tensor([[[3326.0740, 1406.3806, 1136.8853, ..., 2389.4412, 1735.7329,\n", + " 478.1528],\n", + " [2519.7764, 1308.7198, 1082.8784, ..., 2579.1260, 914.8433,\n", + " 73.7124],\n", + " [1981.0059, 229.4922, 2430.8093, ..., 832.4902, 1931.9421,\n", + " 1417.0027],\n", + " ...,\n", + " [1010.7980, 2057.9290, 1097.6913, ..., 5886.2061, 327.8202,\n", + " 1087.2440],\n", + " [2929.4072, 1566.8429, 817.9858, ..., 3025.8799, 1196.9038,\n", + " 129.9682],\n", + " [ 490.3457, 266.4102, 1084.4929, ..., 2984.0996, 734.3679,\n", + " 1149.1433]]], device='cuda:0')\n", + "torch.Size([1, 1368, 200])\n", + "Z1 shape in batch: torch.Size([1, 200, 1025])\n", + "Z2 shape in batch: torch.Size([1, 1368, 1025])\n", + "1 200 1024\n", + "torch.Size([1, 200, 100])\n", + "1 1368 1024\n", + "torch.Size([1, 1368, 100])\n", + "torch.Size([1, 200, 1368])\n", + "torch.Size([1, 200, 1368])\n", + "Gia tri M: tensor([[[138, 132, 133, ..., 135, 132, 133],\n", + " [138, 132, 133, ..., 135, 132, 133],\n", + " [126, 120, 121, ..., 123, 120, 121],\n", + " ...,\n", + " [138, 132, 133, ..., 135, 132, 133],\n", + " [126, 120, 121, ..., 123, 120, 121],\n", + " [126, 120, 121, ..., 123, 120, 121]]], device='cuda:0')\n", + "torch.Size([1, 200, 1368])\n", + "torch.Size([1, 200, 1368])\n", + "gia tri D: tensor([[[3326.0740, 2519.7764, 1981.0059, ..., 1010.7980, 2929.4072,\n", + " 490.3457],\n", + " [1406.3806, 1308.7198, 229.4922, ..., 2057.9290, 1566.8429,\n", + " 266.4102],\n", + " [1136.8853, 1082.8784, 2430.8093, ..., 1097.6913, 817.9858,\n", + " 1084.4929],\n", + " ...,\n", + " [2389.4412, 2579.1279, 832.4902, ..., 5886.2041, 3025.8799,\n", + " 2984.0996],\n", + " [1735.7329, 914.8433, 1931.9421, ..., 327.8202, 1196.9038,\n", + " 734.3679],\n", + " [ 478.1508, 73.7124, 1417.0027, ..., 1087.2440, 129.9682,\n", + " 1149.1433]]], device='cuda:0')\n", + "torch.Size([1, 200, 1368])\n" + ] + } + ], + "source": [ + "with torch.no_grad():\n", + " loss.debias = False\n", + " loss.potentials = True\n", + " print(Z1.shape, Z2.shape)\n", + " F_i, G_j = loss(Z1, Z2)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "metadata": {}, + "outputs": [], + "source": [ + "π = [F_i, G_j]\n", + "dual_sol = π\n", + "for i in range(len(dual_sol)):\n", + " dual_sol[i] = dual_sol[i].to('cpu')" + ] + }, + { + "cell_type": "code", + "execution_count": 109, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "300" + ] + }, + "execution_count": 109, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(shuffle_ind)" + ] + }, + { + "cell_type": "code", + "execution_count": 110, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([1, 1368])" + ] + }, + "execution_count": 110, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dual_sol[0].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 111, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1368\n" + ] + } + ], + "source": [ + "print(training_size)" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": {}, + "outputs": [], + "source": [ + "training_size = 1368" + ] + }, + { + "cell_type": "code", + "execution_count": 98, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "inspected: 10, found: 6 actual found: 3 sythetic found; 3, detection rate: 0.01 baseline: 1.8\n", + "inspected: 20, found: 11 actual found: 5 sythetic found; 6, detection rate: 0.02 baseline: 3.6\n", + "inspected: 30, found: 14 actual found: 6 sythetic found; 8, detection rate: 0.02 baseline: 5.4\n", + "inspected: 40, found: 18 actual found: 10 sythetic found; 8, detection rate: 0.03 baseline: 7.2\n", + "inspected: 50, found: 25 actual found: 14 sythetic found; 11, detection rate: 0.04 baseline: 9.0\n", + "inspected: 60, found: 30 actual found: 16 sythetic found; 14, detection rate: 0.04 baseline: 10.8\n", + "inspected: 70, found: 37 actual found: 19 sythetic found; 18, detection rate: 0.06 baseline: 12.6\n", + "inspected: 80, found: 43 actual found: 20 sythetic found; 23, detection rate: 0.06 baseline: 14.4\n", + "inspected: 90, found: 47 actual found: 21 sythetic found; 26, detection rate: 0.07 baseline: 16.2\n", + "inspected: 100, found: 51 actual found: 22 sythetic found; 29, detection rate: 0.08 baseline: 18.0\n", + "inspected: 110, found: 55 actual found: 24 sythetic found; 31, detection rate: 0.08 baseline: 19.8\n", + "inspected: 120, found: 62 actual found: 26 sythetic found; 36, detection rate: 0.09 baseline: 21.6\n", + "inspected: 130, found: 67 actual found: 29 sythetic found; 38, detection rate: 0.10 baseline: 23.400000000000002\n", + "inspected: 140, found: 72 actual found: 31 sythetic found; 41, detection rate: 0.11 baseline: 25.2\n", + "inspected: 150, found: 77 actual found: 34 sythetic found; 43, detection rate: 0.12 baseline: 27.0\n", + "inspected: 160, found: 82 actual found: 36 sythetic found; 46, detection rate: 0.12 baseline: 28.8\n", + "inspected: 170, found: 89 actual found: 39 sythetic found; 50, detection rate: 0.13 baseline: 30.6\n", + "inspected: 180, found: 94 actual found: 40 sythetic found; 54, detection rate: 0.14 baseline: 32.4\n", + "inspected: 190, found: 100 actual found: 44 sythetic found; 56, detection rate: 0.15 baseline: 34.2\n", + "inspected: 200, found: 105 actual found: 47 sythetic found; 58, detection rate: 0.16 baseline: 36.0\n", + "inspected: 210, found: 108 actual found: 49 sythetic found; 59, detection rate: 0.16 baseline: 37.800000000000004\n", + "inspected: 220, found: 114 actual found: 51 sythetic found; 63, detection rate: 0.17 baseline: 39.6\n", + "inspected: 230, found: 117 actual found: 53 sythetic found; 64, detection rate: 0.18 baseline: 41.4\n", + "inspected: 240, found: 120 actual found: 55 sythetic found; 65, detection rate: 0.18 baseline: 43.2\n", + "inspected: 250, found: 123 actual found: 56 sythetic found; 67, detection rate: 0.18 baseline: 45.0\n", + "inspected: 260, found: 129 actual found: 59 sythetic found; 70, detection rate: 0.19 baseline: 46.800000000000004\n", + "inspected: 270, found: 134 actual found: 60 sythetic found; 74, detection rate: 0.20 baseline: 48.6\n", + "inspected: 280, found: 140 actual found: 63 sythetic found; 77, detection rate: 0.21 baseline: 50.4\n", + "inspected: 290, found: 145 actual found: 64 sythetic found; 81, detection rate: 0.22 baseline: 52.2\n", + "inspected: 300, found: 153 actual found: 70 sythetic found; 83, detection rate: 0.23 baseline: 54.0\n", + "inspected: 310, found: 157 actual found: 72 sythetic found; 85, detection rate: 0.24 baseline: 55.800000000000004\n", + "inspected: 320, found: 160 actual found: 73 sythetic found; 87, detection rate: 0.24 baseline: 57.6\n", + "inspected: 330, found: 165 actual found: 74 sythetic found; 91, detection rate: 0.25 baseline: 59.4\n", + "inspected: 340, found: 172 actual found: 78 sythetic found; 94, detection rate: 0.26 baseline: 61.2\n", + "inspected: 350, found: 179 actual found: 81 sythetic found; 98, detection rate: 0.27 baseline: 63.0\n", + "inspected: 360, found: 183 actual found: 83 sythetic found; 100, detection rate: 0.27 baseline: 64.8\n", + "inspected: 370, found: 188 actual found: 85 sythetic found; 103, detection rate: 0.28 baseline: 66.60000000000001\n", + "inspected: 380, found: 193 actual found: 86 sythetic found; 107, detection rate: 0.29 baseline: 68.4\n", + "inspected: 390, found: 197 actual found: 88 sythetic found; 109, detection rate: 0.29 baseline: 70.2\n", + "inspected: 400, found: 202 actual found: 91 sythetic found; 111, detection rate: 0.30 baseline: 72.0\n", + "inspected: 410, found: 208 actual found: 94 sythetic found; 114, detection rate: 0.31 baseline: 73.8\n", + "inspected: 420, found: 213 actual found: 96 sythetic found; 117, detection rate: 0.32 baseline: 75.60000000000001\n", + "inspected: 430, found: 218 actual found: 96 sythetic found; 122, detection rate: 0.33 baseline: 77.4\n", + "inspected: 440, found: 221 actual found: 98 sythetic found; 123, detection rate: 0.33 baseline: 79.2\n", + "inspected: 450, found: 226 actual found: 99 sythetic found; 127, detection rate: 0.34 baseline: 81.0\n", + "inspected: 460, found: 233 actual found: 101 sythetic found; 132, detection rate: 0.35 baseline: 82.8\n", + "inspected: 470, found: 236 actual found: 103 sythetic found; 133, detection rate: 0.35 baseline: 84.60000000000001\n", + "inspected: 480, found: 241 actual found: 104 sythetic found; 137, detection rate: 0.36 baseline: 86.4\n", + "inspected: 490, found: 245 actual found: 106 sythetic found; 139, detection rate: 0.37 baseline: 88.2\n", + "inspected: 500, found: 253 actual found: 110 sythetic found; 143, detection rate: 0.38 baseline: 90.0\n", + "inspected: 510, found: 259 actual found: 110 sythetic found; 149, detection rate: 0.39 baseline: 91.8\n", + "inspected: 520, found: 265 actual found: 113 sythetic found; 152, detection rate: 0.40 baseline: 93.60000000000001\n", + "inspected: 530, found: 271 actual found: 116 sythetic found; 155, detection rate: 0.41 baseline: 95.4\n", + "inspected: 540, found: 274 actual found: 117 sythetic found; 157, detection rate: 0.41 baseline: 97.2\n", + "inspected: 550, found: 277 actual found: 118 sythetic found; 159, detection rate: 0.41 baseline: 99.0\n", + "inspected: 560, found: 280 actual found: 118 sythetic found; 162, detection rate: 0.42 baseline: 100.8\n", + "inspected: 570, found: 286 actual found: 122 sythetic found; 164, detection rate: 0.43 baseline: 102.60000000000001\n", + "inspected: 580, found: 291 actual found: 123 sythetic found; 168, detection rate: 0.44 baseline: 104.4\n", + "inspected: 590, found: 296 actual found: 124 sythetic found; 172, detection rate: 0.44 baseline: 106.2\n", + "inspected: 600, found: 303 actual found: 126 sythetic found; 177, detection rate: 0.45 baseline: 108.0\n", + "inspected: 610, found: 307 actual found: 129 sythetic found; 178, detection rate: 0.46 baseline: 109.8\n", + "inspected: 620, found: 315 actual found: 133 sythetic found; 182, detection rate: 0.47 baseline: 111.60000000000001\n", + "inspected: 630, found: 320 actual found: 137 sythetic found; 183, detection rate: 0.48 baseline: 113.4\n", + "inspected: 640, found: 324 actual found: 139 sythetic found; 185, detection rate: 0.49 baseline: 115.2\n", + "inspected: 650, found: 330 actual found: 143 sythetic found; 187, detection rate: 0.49 baseline: 117.0\n", + "inspected: 660, found: 332 actual found: 143 sythetic found; 189, detection rate: 0.50 baseline: 118.8\n", + "inspected: 670, found: 337 actual found: 146 sythetic found; 191, detection rate: 0.50 baseline: 120.60000000000001\n", + "inspected: 680, found: 341 actual found: 148 sythetic found; 193, detection rate: 0.51 baseline: 122.4\n", + "inspected: 690, found: 345 actual found: 148 sythetic found; 197, detection rate: 0.52 baseline: 124.2\n", + "inspected: 700, found: 350 actual found: 150 sythetic found; 200, detection rate: 0.52 baseline: 126.0\n", + "inspected: 710, found: 356 actual found: 151 sythetic found; 205, detection rate: 0.53 baseline: 127.8\n", + "inspected: 720, found: 361 actual found: 155 sythetic found; 206, detection rate: 0.54 baseline: 129.6\n", + "inspected: 730, found: 369 actual found: 157 sythetic found; 212, detection rate: 0.55 baseline: 131.4\n", + "inspected: 740, found: 374 actual found: 158 sythetic found; 216, detection rate: 0.56 baseline: 133.20000000000002\n", + "inspected: 750, found: 378 actual found: 158 sythetic found; 220, detection rate: 0.57 baseline: 135.0\n", + "inspected: 760, found: 381 actual found: 159 sythetic found; 222, detection rate: 0.57 baseline: 136.8\n", + "inspected: 770, found: 388 actual found: 163 sythetic found; 225, detection rate: 0.58 baseline: 138.6\n", + "inspected: 780, found: 392 actual found: 165 sythetic found; 227, detection rate: 0.59 baseline: 140.4\n", + "inspected: 790, found: 398 actual found: 168 sythetic found; 230, detection rate: 0.60 baseline: 142.20000000000002\n", + "inspected: 800, found: 405 actual found: 171 sythetic found; 234, detection rate: 0.61 baseline: 144.0\n", + "inspected: 810, found: 409 actual found: 172 sythetic found; 237, detection rate: 0.61 baseline: 145.8\n", + "inspected: 820, found: 414 actual found: 173 sythetic found; 241, detection rate: 0.62 baseline: 147.6\n", + "inspected: 830, found: 420 actual found: 176 sythetic found; 244, detection rate: 0.63 baseline: 149.4\n", + "inspected: 840, found: 424 actual found: 177 sythetic found; 247, detection rate: 0.63 baseline: 151.20000000000002\n", + "inspected: 850, found: 428 actual found: 180 sythetic found; 248, detection rate: 0.64 baseline: 153.0\n", + "inspected: 860, found: 434 actual found: 184 sythetic found; 250, detection rate: 0.65 baseline: 154.8\n", + "inspected: 870, found: 440 actual found: 187 sythetic found; 253, detection rate: 0.66 baseline: 156.6\n", + "inspected: 880, found: 444 actual found: 189 sythetic found; 255, detection rate: 0.66 baseline: 158.4\n", + "inspected: 890, found: 450 actual found: 194 sythetic found; 256, detection rate: 0.67 baseline: 160.20000000000002\n", + "inspected: 900, found: 454 actual found: 195 sythetic found; 259, detection rate: 0.68 baseline: 162.0\n", + "inspected: 910, found: 457 actual found: 198 sythetic found; 259, detection rate: 0.68 baseline: 163.8\n", + "inspected: 920, found: 460 actual found: 199 sythetic found; 261, detection rate: 0.69 baseline: 165.6\n", + "inspected: 930, found: 464 actual found: 201 sythetic found; 263, detection rate: 0.69 baseline: 167.4\n", + "inspected: 940, found: 466 actual found: 202 sythetic found; 264, detection rate: 0.70 baseline: 169.20000000000002\n", + "inspected: 950, found: 467 actual found: 203 sythetic found; 264, detection rate: 0.70 baseline: 171.0\n", + "inspected: 960, found: 470 actual found: 205 sythetic found; 265, detection rate: 0.70 baseline: 172.8\n", + "inspected: 970, found: 477 actual found: 208 sythetic found; 269, detection rate: 0.71 baseline: 174.6\n", + "inspected: 980, found: 481 actual found: 210 sythetic found; 271, detection rate: 0.72 baseline: 176.4\n", + "inspected: 990, found: 484 actual found: 212 sythetic found; 272, detection rate: 0.72 baseline: 178.20000000000002\n", + "inspected: 1000, found: 489 actual found: 215 sythetic found; 274, detection rate: 0.73 baseline: 180.0\n", + "inspected: 1010, found: 496 actual found: 220 sythetic found; 276, detection rate: 0.74 baseline: 181.8\n", + "inspected: 1020, found: 499 actual found: 221 sythetic found; 278, detection rate: 0.75 baseline: 183.6\n", + "inspected: 1030, found: 503 actual found: 224 sythetic found; 279, detection rate: 0.75 baseline: 185.4\n", + "inspected: 1040, found: 506 actual found: 225 sythetic found; 281, detection rate: 0.76 baseline: 187.20000000000002\n", + "inspected: 1050, found: 514 actual found: 228 sythetic found; 286, detection rate: 0.77 baseline: 189.0\n", + "inspected: 1060, found: 518 actual found: 230 sythetic found; 288, detection rate: 0.78 baseline: 190.8\n", + "inspected: 1070, found: 521 actual found: 232 sythetic found; 289, detection rate: 0.78 baseline: 192.6\n", + "inspected: 1080, found: 528 actual found: 236 sythetic found; 292, detection rate: 0.79 baseline: 194.4\n", + "inspected: 1090, found: 532 actual found: 237 sythetic found; 295, detection rate: 0.80 baseline: 196.20000000000002\n", + "inspected: 1100, found: 535 actual found: 237 sythetic found; 298, detection rate: 0.80 baseline: 198.0\n", + "inspected: 1110, found: 540 actual found: 241 sythetic found; 299, detection rate: 0.81 baseline: 199.8\n", + "inspected: 1120, found: 546 actual found: 245 sythetic found; 301, detection rate: 0.82 baseline: 201.6\n", + "inspected: 1130, found: 548 actual found: 245 sythetic found; 303, detection rate: 0.82 baseline: 203.4\n", + "inspected: 1140, found: 554 actual found: 248 sythetic found; 306, detection rate: 0.83 baseline: 205.20000000000002\n", + "inspected: 1150, found: 560 actual found: 251 sythetic found; 309, detection rate: 0.84 baseline: 207.0\n", + "inspected: 1160, found: 563 actual found: 252 sythetic found; 311, detection rate: 0.84 baseline: 208.8\n", + "inspected: 1170, found: 568 actual found: 255 sythetic found; 313, detection rate: 0.85 baseline: 210.6\n", + "inspected: 1180, found: 576 actual found: 260 sythetic found; 316, detection rate: 0.86 baseline: 212.4\n", + "inspected: 1190, found: 580 actual found: 261 sythetic found; 319, detection rate: 0.87 baseline: 214.20000000000002\n", + "inspected: 1200, found: 584 actual found: 263 sythetic found; 321, detection rate: 0.87 baseline: 216.0\n", + "inspected: 1210, found: 587 actual found: 264 sythetic found; 323, detection rate: 0.88 baseline: 217.8\n", + "inspected: 1220, found: 591 actual found: 266 sythetic found; 325, detection rate: 0.88 baseline: 219.6\n", + "inspected: 1230, found: 597 actual found: 267 sythetic found; 330, detection rate: 0.89 baseline: 221.4\n", + "inspected: 1240, found: 602 actual found: 268 sythetic found; 334, detection rate: 0.90 baseline: 223.20000000000002\n", + "inspected: 1250, found: 607 actual found: 270 sythetic found; 337, detection rate: 0.91 baseline: 225.0\n", + "inspected: 1260, found: 611 actual found: 271 sythetic found; 340, detection rate: 0.91 baseline: 226.8\n", + "inspected: 1270, found: 617 actual found: 275 sythetic found; 342, detection rate: 0.92 baseline: 228.6\n", + "inspected: 1280, found: 622 actual found: 279 sythetic found; 343, detection rate: 0.93 baseline: 230.4\n", + "inspected: 1290, found: 626 actual found: 282 sythetic found; 344, detection rate: 0.94 baseline: 232.20000000000002\n", + "inspected: 1300, found: 632 actual found: 286 sythetic found; 346, detection rate: 0.95 baseline: 234.0\n", + "inspected: 1310, found: 637 actual found: 288 sythetic found; 349, detection rate: 0.95 baseline: 235.8\n", + "inspected: 1320, found: 640 actual found: 290 sythetic found; 350, detection rate: 0.96 baseline: 237.6\n", + "inspected: 1330, found: 645 actual found: 294 sythetic found; 351, detection rate: 0.97 baseline: 239.4\n", + "inspected: 1340, found: 653 actual found: 297 sythetic found; 356, detection rate: 0.98 baseline: 241.20000000000002\n", + "inspected: 1350, found: 656 actual found: 298 sythetic found; 358, detection rate: 0.98 baseline: 243.0\n", + "inspected: 1360, found: 663 actual found: 299 sythetic found; 364, detection rate: 0.99 baseline: 244.8\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHFCAYAAADosxNlAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABN30lEQVR4nO3dd3jUVf728TuENCB0khAIBII06S2oaKQoIIgsIiJIkeqiouBqRH8YsICwKqjPYgGFFRuC6KICEoqxAEMTUGmKNCmGDisIJDnPH9kZMiSZQiaZzMz7dV1cOt85MzlzIOTmnM853yBjjBEAAIAPKuHtDgAAAFwtggwAAPBZBBkAAOCzCDIAAMBnEWQAAIDPIsgAAACfRZABAAA+iyADAAB8FkEGAAD4LIIMvGrOnDkKCgqy/QoPD1dMTIzat2+vyZMnKz09/arfe9u2bZowYYL27t3ruQ67+XUGDx6s+Pj4Qv363vTHH3/oySefVLNmzVS2bFmFhoaqevXq6tWrlxYtWqTMzMwi6cfXX3+toKAgff3117ZrRTH2hw4d0oQJE7R582aX2lv7uWDBgkLtV1FzNA4TJkxQUFBQ0XcKAYMgg2Jh9uzZWrNmjVJTU/Wvf/1LzZo105QpU9SgQQMtX778qt5z27ZtmjhxYpEEmfy+zvjx4/Xpp58W6tf3lrVr16px48aaOXOmevTooY8++kjLly/XCy+8oJCQEPXq1Utz5szxWv+KYuwPHTqkiRMnuhxk/JWjcRg2bJjWrFlT9J1CwCjp7Q4AktSoUSO1atXK9vjOO+/UmDFj1K5dO/Xq1Uu//PKLoqOjvdjDq5OQkODtLhSKU6dOqWfPnipTpoy+//57Va1a1e75e++9V1u3btXx48cdvs/58+cVHh5eKP9i99ex9zXVq1dX9erVvd0N+DFmZFBs1ahRQy+99JLOnj2rN9980+65DRs2qEePHqpYsaLCw8PVvHlzffzxx7bn58yZo7vuukuS1L59e9vSVc4ZguXLl6tjx44qW7asSpUqpRtuuEErVqzI1Y8dO3bonnvuUXR0tMLCwlSjRg0NHDhQFy5ccPp18lre+OuvvzRu3DjVqlVLoaGhqlatmh544AGdOnXKrl18fLy6d++upUuXqkWLFoqIiFD9+vX1zjvvOBy3S5cuKSoqSgMGDMj13KlTpxQREaGxY8dKkrKysvTcc8+pXr16ioiIUPny5dWkSRO98sorDr/GzJkz9ccff2jq1Km5QoxVkyZN1L59e9tj6zLismXLNGTIEFWpUkWlSpXShQsX9Ouvv+q+++7TNddco1KlSqlatWq6/fbb9eOPP+Z63x07dqhLly4qVaqUKleurPvvv19nz57N1S6vsTfGaMaMGWrWrJkiIiJUoUIF9e7dW7/99ptdu5tvvlmNGjXS+vXrdeONN6pUqVKqXbu2XnjhBWVlZUnKXiZq3bq1JOm+++6z/d5PmDDB4dhdybr08vPPP+uee+5RuXLlFB0drSFDhuj06dN2befPn6/ExESVK1fO1qchQ4bYnrcuXb333nsaO3asYmJiFBERoaSkJP3www+5vraz7yOrgwcPasSIEYqLi1NoaKhiY2PVu3dv/fHHH07HIa+lpaysLE2dOlX169dXWFiYoqKiNHDgQP3+++9u/z4AMoAXzZ4920gy69evz/P5//73vyY4ONh07NjRdm3lypUmNDTU3HjjjWbevHlm6dKlZvDgwUaSmT17tjHGmPT0dDNp0iQjyfzrX/8ya9asMWvWrDHp6enGGGPmzp1rgoKCTM+ePc3ChQvN559/brp3726Cg4PN8uXLbV9r8+bNpkyZMiY+Pt688cYbZsWKFea9994zffr0MWfOnHH6dQYNGmRq1qxpe7+srCzTuXNnU7JkSTN+/HizbNky8+KLL5rSpUub5s2bm7/++svWtmbNmqZ69eqmYcOG5t133zVfffWVueuuu4wkk5aW5nBcx4wZYyIiIszp06ftrs+YMcNIMlu3bjXGGDN58mQTHBxsUlJSzIoVK8zSpUvN9OnTzYQJExy+/y233GKCg4PNn3/+6bBdTtbf62rVqpkRI0aYJUuWmAULFpiMjAyTlpZmHn30UbNgwQKTlpZmPv30U9OzZ08TERFhduzYYXuPI0eOmKioKFOtWjUze/Zss3jxYtO/f39To0YNI8msWrXK1vbKsTfGmOHDh5uQkBDz6KOPmqVLl5oPPvjA1K9f30RHR5sjR47Y2iUlJZlKlSqZa665xrzxxhsmNTXVjBo1ykgy//73v40xxpw+fdr2mf7v//7P9nt/4MCBfMdg1apVRpKZP3++7VpKSoqRZOrVq2eefvppk5qaal5++WUTFhZm7rvvPlu71atXm6CgINO3b1+zePFis3LlSjN79mwzYMCAXO8fFxdn7rjjDvP555+b9957z9SpU8eULVvW7N6929bWle8jY4z5/fffTdWqVU3lypXNyy+/bJYvX27mzZtnhgwZYrZv3+50HKyfL6cRI0YYSebBBx80S5cuNW+88YapUqWKiYuLM0ePHnXr9wEgyMCrnAUZY4yJjo42DRo0sD2uX7++ad68ubl06ZJdu+7du5uqVauazMxMY4wx8+fPz/XDzRhj/vzzT1OxYkVz++23213PzMw0TZs2NW3atLFd69ChgylfvrwtmOQlv69jTO4fpkuXLjWSzNSpU+3azZs3z0gyb731lu1azZo1TXh4uNm3b5/t2vnz503FihXNyJEj8+2PMcZs3bo11/sZY0ybNm1My5YtbY+7d+9umjVr5vC98lK/fn0TExOT63pmZqa5dOmS7Zf198KYy7/XAwcOdPr+GRkZ5uLFi+aaa64xY8aMsV1PTk42QUFBZvPmzXbtb7nlFqdBZs2aNUaSeemll+xee+DAARMREWEef/xx27WkpCQjyVgsFru2DRs2NJ07d7Y9Xr9+fa4f/I44CjJX/pkYNWqUCQ8PN1lZWcYYY1588UUjyZw6dcrp+7do0cL2OmOM2bt3rwkJCTHDhg2zXXP1+2jIkCEmJCTEbNu2Ld+v62gcrgwy27dvN5LMqFGj7NpZLBYjyTz55JO2a67+PiCwsbSEYs8YY/v/X3/9VTt27FD//v0lSRkZGbZft912mw4fPqydO3c6fL/Vq1frxIkTGjRokN3rs7Ky1KVLF61fv15//vmnzp07p7S0NPXp00dVqlTxyGdZuXKlpOxlj5zuuusulS5dOtfSVrNmzVSjRg3b4/DwcNWtW1f79u1z+HUaN26sli1bavbs2bZr27dv17p16+yWItq0aaMtW7Zo1KhR+uqrr3TmzJmr/WiSpLFjxyokJMT2q0ePHrna3HnnnbmuZWRkaNKkSWrYsKFCQ0NVsmRJhYaG6pdfftH27dtt7VatWqVrr71WTZs2tXt9v379nPbtiy++UFBQkO6991673/eYmBg1bdrUbseTJMXExKhNmzZ215o0aeJ07K/WlWPVpEkT/fXXX7ade9blmz59+ujjjz/WwYMH832vfv362S3n1KxZU9dff71WrVolyb3voyVLlqh9+/Zq0KCBRz6ntQ9Xfg+0adNGDRo0yPU9UNS/D/A9BBkUa3/++aeOHz+u2NhYSdnbfSXpH//4h90PzJCQEI0aNUqSdOzYMYfvaX2P3r1753qPKVOmyBijEydO6OTJk8rMzPRooeLx48dVsmTJXMEoKChIMTExuYpjK1WqlOs9wsLCdP78eadfa8iQIVqzZo127NghKXtnWFhYmO655x5bm3HjxunFF1/U2rVr1bVrV1WqVEkdO3bUhg0bHL53jRo1dPToUZ07d87u+qOPPqr169dr/fr1+dbO5HV97NixGj9+vHr27KnPP/9cFotF69evV9OmTe0+6/HjxxUTE5Pr9Xldu9Iff/whY4yio6Nz/b6vXbs215+bgoz91bjy64WFhUmS7evddNNN+uyzz5SRkaGBAweqevXqatSokT788MNc75XfGFn/fLnzfXT06FGPfw9Ief85iI2N9ej3AAIDu5ZQrH355ZfKzMzUzTffLEmqXLmypOwfwL169crzNfXq1XP4ntb3eO2119S2bds820RHRyszM1PBwcG5ChALolKlSsrIyNDRo0ftwowxRkeOHLH9q9sT7rnnHo0dO1Zz5szR888/r7lz56pnz56qUKGCrU3JkiU1duxYjR07VqdOndLy5cv15JNPqnPnzjpw4IBKlSqV53vfcsstWrZsmRYvXqzevXvbrsfFxSkuLk6SFBoamudr89qh9N5772ngwIGaNGmS3fVjx46pfPnytseVKlXSkSNHcr0+r2tXqly5soKCgvTtt9/aQkJOeV0rbu644w7dcccdunDhgtauXavJkyerX79+io+P13XXXWdrl98YWUOBO99HVapU8fj3gCQdPnw4V0A6dOiQrW+Aq5iRQbG1f/9+/eMf/1C5cuU0cuRISdl/uV5zzTXasmWLWrVqleevyMhISbn/RWt1ww03qHz58tq2bVu+7xEaGmrb7TF//nyHszz5fZ28dOzYUVL2D+6cPvnkE/3555+25z2hQoUK6tmzp95991198cUXOnLkiN2y0pXKly+v3r1764EHHtCJEyccnr8zbNgwRUdH6/HHH9fhw4cL3NegoKBcQeLLL7/MtXzSvn17/fzzz9qyZYvd9Q8++MDp1+jevbuMMTp48GCev+eNGzd2u9/u/N57UlhYmJKSkjRlyhRJyrUj6cMPP7Rbkt23b59Wr15t+weBO99HXbt21apVqxwu2bozDh06dJCU+3tg/fr12r59u0e/BxAYmJFBsfDTTz/Z1ujT09P17bffavbs2QoODtann35qN3vx5ptvqmvXrurcubMGDx6satWq6cSJE9q+fbs2bdqk+fPnS8o+m0aS3nrrLUVGRio8PFy1atVSpUqV9Nprr2nQoEE6ceKEevfuraioKB09elRbtmzR0aNH9frrr0uSXn75ZbVr106JiYl64oknVKdOHf3xxx9atGiR3nzzTUVGRjr8Ole65ZZb1LlzZyUnJ+vMmTO64YYbtHXrVqWkpKh58+Z5bpkuiCFDhmjevHl68MEHVb16dXXq1Mnu+dtvv912hk+VKlW0b98+TZ8+XTVr1tQ111yT7/uWL19en332mW6//XY1bdpUf//739W2bVuVKVNGx48f1zfffKMjR47o+uuvd6mf3bt315w5c1S/fn01adJEGzdu1D//+c9c/2J/5JFH9M4776hbt2567rnnFB0drffff9+2fObIDTfcoBEjRui+++7Thg0bdNNNN6l06dI6fPiwvvvuOzVu3Fh///vfXeqvVUJCgiIiIvT++++rQYMGKlOmjGJjY21LoZ709NNP6/fff1fHjh1VvXp1nTp1Sq+88opCQkKUlJRk1zY9PV1/+9vfNHz4cJ0+fVopKSkKDw/XuHHjbG1c/T565plntGTJEt1000168skn1bhxY506dUpLly7V2LFjVb9+fbfGoV69ehoxYoRee+01lShRQl27dtXevXs1fvx4xcXFacyYMR4fO/g5b1YaA9adLNZfoaGhJioqyiQlJZlJkyblu1toy5Ytpk+fPiYqKsqEhISYmJgY06FDB/PGG2/YtZs+fbqpVauWCQ4OzrWrIi0tzXTr1s1UrFjRhISEmGrVqplu3brZ7Sgxxpht27aZu+66y1SqVMmEhoaaGjVqmMGDB9ttlc7v6+S1Bfj8+fMmOTnZ1KxZ04SEhJiqVauav//97+bkyZN27WrWrGm6deuW67MnJSWZpKQkxwP7P5mZmSYuLs5IMk899VSu51966SVz/fXXm8qVK9s+29ChQ83evXtdev8jR46YcePGmSZNmpjSpUubkJAQExsba26//Xbz7rvv2u2IcbRD7eTJk2bo0KEmKirKlCpVyrRr1858++23eX7Wbdu2mVtuucWEh4ebihUrmqFDh5r//Oc/Lm2/NsaYd955xyQmJprSpUubiIgIk5CQYAYOHGg2bNhga5OUlGSuvfbaXK/N6z0//PBDU79+fRMSEmIkmZSUlHzHy9GupZzbjnOO1549e4wxxnzxxRema9euplq1arbvk9tuu818++23ud5/7ty5ZvTo0aZKlSomLCzM3HjjjXafz8rV76MDBw6YIUOGmJiYGNvvcZ8+fcwff/zhdBzy2n6dmZlppkyZYurWrWtCQkJM5cqVzb333ptr67o7vw8IXEHG5Jh/BAD4rK+//lrt27fX/Pnz7WqXAH9GjQwAAPBZBBkAAOCzWFoCAAA+ixkZAADgswgyAADAZxFkAACAz/LpA/GysrJ06NAhRUZG5nnsOQAAKH6MMTp79qxiY2NVokTB5lR8OsgcOnTIdl8XAADgWw4cOFDgm5L6dJCx3gvkwIEDKlu2rJd7AwAAXHHmzBnFxcXZfo4XhE8HGetyUtmyZQkyAAD4GE+UhVDsCwAAfBZBBgAA+CyCDAAA8FkEGQAA4LMIMgAAwGcRZAAAgM8iyAAAAJ9FkAEAAD6LIAMAAHwWQQYAAPgsggwAAPBZPn2vJQAAUIjS3pYOrJfiWktJQ73dmzwxIwMAAHKbnigdHCaVeDP7v9MTvd2jPDEjAwAAsh2zSGd3Sb/ukKLW2T8XtS57hqaYzcwQZAAAgPRDsrR9quM2B9ZLKl5BhqUlAAAC3TGL8xAjZdfKFDMEGQAAAtkxi/TbbOft0hOL3bKSxNISAACBy9ly0oVBUkh49kxMv+IXYiSCDAAAgeeYRTq0xHGIWSSpW0fp3gFF1q2rQZABACCQOJuFWSEpTdJuSWPqFlGnrh41MgAABApXinqtISY5WUosnmfH5MSMDAAA/s56PszZ3Y7bLZJ0b4rUtatPhBiJIAMAgH9z5XyYTyRtkdQ7WZowoQg65TkEGQAA/JErBb1S9ixM4xTpcd+ZhcmJIAMAgL9xNgvziaR0SYeVXQ/zboJPhhiJIAMAgH9xpaB3i7IDjFXd4r87KT/sWgIAwF+4ckrvItmHGB/ZnZQfZmQAAPAHriwnWWdiZs2SQkOzZ2J8OMRIBBkAAHyXdVt15kXnp/Qu/N//JydLQ4vn7QauBkEGAABf5Mq26pyn9Kb41vkwriLIAADga1wp6JXsT+n1sfNhXEWQAQDAl7hS0Cv55Cm9V4MgAwCAr3C2nPSWpExlnw/jg6f0Xg2CDAAAxZ0rp/QuknRzipSQ4Be7kVxFkAEAoDhzNguTs6B3rX8vI+WFA/EAACiuXCnqzVnQG2AhRmJGBgCA4sd6PszZ3Y7bBUhBryMEGQAAihNXzoexntIbIAW9jhBkAAAoDlwp6JWyZ2GqjJTevy9gZ2FyIsgAAOBtrtwnKV3Z26p3S1pLiLEiyAAA4E2uFPRab/YoBWxRb34IMgAAeIsrp/RaC3oD7HwYVxFkAADwBleWk6wzMQF4PoyrCDIAABQV67bqzIvOT+ld+L//ZynJIYIMAABFwZVt1TlP6U0J7PNhXEWQAQCgsLlS0CvZn9Ib4OfDuIogAwBAYXKloFfKXk7qxPkw7iLIAABQWJwtJ70lKVOcD1MABBkAADzNlVN6Fyl7KcmKot6rQpABAMCTnM3C5CzonTVLCg3lfJgCIMgAAOAprhT15izoHTq0KHrl1wgyAAAUlPV8mLO7HbejoNfjCDIAABSEK+fD2J3SS4jxJIIMAABXw5WCXolTegsZQQYAAHe5cp+kdF3eVs0pvYWGIAMAgDtcKei1LiNJnNJbyAgyAAC4Iu1t6cB6KeOQFOqg3SJJ49hWXVQIMgAAODM9UYpaJ5VQ/iHGWtDbm23VRYkgAwBAfo5ZpPUzs0OMI4skNU6RHqcOpqgRZAAAyIs7Bb29qYPxFoIMAABXcqWgt9uTUkh96mC8jCADAEBOxyzSb7Mdt0lPlB55vmj6A4cIMgAAWDlbTjp2k9R0oNSPYt7igiADAAhs1vskZV50HGIWSeo2TEoaUGRdg3MEGQBA4HLlPkkrdPmO1WPqFkGn4I4S3u4AAABe4UpBr3Q5xHCfpGKJGRkAQOA5ZpHWvOC83SJJ93KfpOKMIAMACCzOlpPekpQpzofxEQQZAEBgOGaRDi1xXtA7gPsk+RKCDADA/zmbhbEW9HKfJJ9DkAEA+DdXinor9pTef4IZGB9EkAEA+Ke0t6UD66VKGY7bLZI0hhDjqwgyAAD/Mz0x+47VJSSdzKfNJ5K2KHs5iRDjswgyAAD/ccwirZ+ZHWIcWSSpykjp/fsIMT6OIAMA8A/OCno/kZSu7G3VuyWtJcT4A4IMAMD3uVLQu0XZAUbilF4/QpABAPg2V07pTU+U3n9F2rWL82H8DEEGAOC7nC0n7W0h3TBK6ve/s2EIMH6HIAMA8C3WbdXRlaSjTk7pHTOD8OLnCDIAAN+Rc1v10Xza5DyllxDj9wgyAIDiz2KRVs2UajjZVi1xSm+AKeHtDgAA4FBystS/rfTL287bLpLUmxATSJiRAQAUXxaLtG+q9IyDNm9JylT2+TAsJwUcggwAoHg6ZpF+eUHq4aBNeqI0hW3VgYwgAwAofqzbqvMrgDjVQrqWbdUgyAAAiguLJXtmpcZF6aCTU3r7zJAqE15AkAEAFAfJydInU6WqkqIk3emgbYNkQgxsCDIAAO+xWKQlS5wX9EpSVk+pyxOEGNhh+zUAwDus26p/nOi4oFfK3lZ9DSEGuTEjAwAoeq5sq/5EUrrYVg2HCDIAgKK3a4nzWZjRs6T9oWyrhkMEGQBA0Tpmkcpvkc46aNMgWWo+tMi6BN9FkAEAFB3r+TD5aZQixXalFgYuI8gAAApX2tvSgfVSdCXpqIMQ0yBZajKhyLoF/0CQAQAUnumJUtS67D2yR/NpE9lTuo4dSbg6BBkAgOdZLNKqmVKNdc7bnmpKiMFV4xwZAIBnWc+H+eVt520XSarbtdC7BP/FjAwAwDNcPaX3LUmZ4nwYeARBBgBQcNZ7JTWV4/skpSdKU17Jvjkk58PAAwgyAICCceWU3lMtpGtHSf3+dzYMAQYeQpABAFwdiyV7ZuVgmvNTevvMoKAXhYIgAwBwn3UpqaqkKEk1HLRtkEyIQaEhyAAA3OPKUpLEKb0oEgQZAIDr0t6Wvp/hfCmJU3pRRAgyAADHrLUw+6ZItX+W4vNpV2GoVD9JiqzLLAyKDEEGAJC/5GRp6lQpQc6XkloPJ8CgyBFkAAD2rDMwFy9eDjFJTl5DQS+8hCADALjMOgNjdbcc18NUGMpMDLyKIAMAyGaxXJ6BqSopWI5DTHqi1G9W0fQNyAdBBgCQbdcu5zMwkpTVXorrf/mUXsCLCDIAgOxt1SffdR5iJKnLZJaSUGwQZAAgUF25rbqyC6+hqBfFDEEGAAKFNbjUrSstXOj6tuo2s6TgUM6HQbFEkAEAf2YNL6mp0ty59s+5uq26DrUwKL4IMgDgr67cSp0T26rhJwgyAOBvLBZpyZLcIYZt1fBDBBkA8HV51b5ciW3V8FMEGQDwZY6Wj6wSxLZq+C2CDAD4ovyWj67kSkGvxLZq+CyCDAD4GldmYSTny0lsq4YfIMgAgK9wZxamqRyHGLZVw08QZADAF7g6CzMtUYqy5P98nZFS7fuYgYHfIMgAQHFnvSu1IykpUvs46eAwx+0IMfAzBBkAKK6s26p373bcbsIAaWCCdHa3dNBBOwp64Yc8EmROnTql8uXLe+KtAACSa0tJKSlS69+k03OlNXPzb9coRYrtSoiBXyrh7gumTJmiefPm2R736dNHlSpVUrVq1bRlyxaPdg4AAo7FIk2Y4DzETBgg9VJ2iHGkQbLUZAIhBn7L7RmZN998U++9954kKTU1VampqVqyZIk+/vhjPfbYY1q2bJnHOwkAAcHZLExKipSQIFVMzQ4wP+XTrlGKFJnAtmoEBLeDzOHDhxUXFydJ+uKLL9SnTx/deuutio+PV2Ii3zAAcFVcKejt2jV7a/WygY7bsYyEAOL20lKFChV04MABSdLSpUvVqVMnSZIxRpmZmZ7tHQAEAotFmj3bcZvk5OwQ85uTdhT0IsC4PSPTq1cv9evXT9dcc42OHz+url27SpI2b96sOnXqeLyDAODXXFlO6tpVCl0oLWubfzsKehGg3A4y06ZNU3x8vA4cOKCpU6eqTJkykrKXnEaNGuXxDgKA33K2nGTdVp35k7TOQTtrQS8QgIKMMcbbnbhaZ86cUbly5XT69GmVLVvW290BANfkPB9m4sTcz48cKd1+zvmOJE7phY/y5M9vt2tkJGnu3Llq166dYmNjtW/fPknS9OnT9Z///KdAnQEAv5ecLLVtKw0cmHeIkaR7WjsPMRIhBtBVBJnXX39dY8eOVdeuXXXq1ClbgW/58uU1ffp0T/cPAPyDO+fDRKx3/n4U9QKSrmJpqWHDhpo0aZJ69uypyMhIbdmyRbVr19ZPP/2km2++WceOHSusvubC0hIAn+Du+TD5aTNLCg7lfBj4PE/+/Ha72HfPnj1q3rx5ruthYWH6888/C9QZAPArFou0ZInzWZj2cVKF3dJPDkJMg2SpzlDP9g/wA24HmVq1amnz5s2qWbOm3fUlS5aoYcOGHusYAPg0V+6VJEnTErPvWJ3fzR4p6AUccjvIPPbYY3rggQf0119/yRijdevW6cMPP9TkyZM1a9aswugjAPgWV07pTUnJnok5OMxxO0IM4JDbQea+++5TRkaGHn/8cZ07d079+vVTtWrV9Morr6hv376F0UcA8A05t1U7Yj0f5uzu/GdiJAp6ARcU6ByZY8eOKSsrS1FRUZ7sk8so9gVQbLiylJSSIrX+zfnWak7phZ/zarFvTpUrVy7QFwcAv+DKUtKEAdLf5LigV+KUXsBNbgeZ5s2bKygoKNf1oKAghYeHq06dOho8eLDat2/vkQ4CQLHm6IaPV26r/imf92iUIkUmsK0auApuH4jXpUsX/fbbbypdurTat2+vm2++WWXKlNHu3bvVunVrHT58WJ06deKUXwD+z3pK75tv5v18165S17rOl5Jiu0q1BhBigKvg9ozMsWPH9Oijj2r8+PF215977jnt27dPy5YtU0pKip599lndcccdHusoABQbrpwPk5wsJUj6LZ/ZGisKeoECcbvYt1y5ctq4caPq1Kljd/3XX39Vy5Ytdfr0ae3YsUOtW7fW2bNnPdrZK1HsC6DIOSvqHTlSuu8+KXShtN1BOwp6EcC8etPI8PBwrV69Otf11atXKzw8XJKUlZWlsLCwAnUMAIodZ0W9CZJ6x0iVfnIcYqwFvYQYoMDcXlp66KGHdP/992vjxo1q3bq1goKCtG7dOs2aNUtPPvmkJOmrr77K8zYGAODTdu3K/7m7JfWQlD5RSs+nDaf0Ah53VefIvP/++/p//+//aefOnZKkevXq6aGHHlK/fv0kSefPn7ftYipMLC0BKBLWg+4uXpSG5XES7+ShUo23nb/PrWsJMYA8+/O7QAfieRtBBkChu7Impk0bad26y48nDJC6lpJ+zWfnklWDZKn5C4XTR8DHFJsD8QDAr+VVE7NunTRrlhQaevl8mF/zeX2bWVJwKOfDAIXI7SCTmZmpadOm6eOPP9b+/ft18eJFu+dPnDjhsc4BgNc4Ougu9IDUVI5P6W2QLNUZWihdA3CZ27uWJk6cqJdffll9+vTR6dOnNXbsWPXq1UslSpTQhAkTCqGLAFDEHB10d7ek4InSTxPzfm2dkdm1MCwjAUXC7RqZhIQEvfrqq+rWrZsiIyO1efNm27W1a9fqgw8+KKy+5kKNDACPsh50NzGfkJIg6Rkn70FBL+CUV2tkjhw5osaNG0uSypQpo9OnT0uSunfvnuu0XwDwGY4OukuQ1K+9dF1t6aSD3Umc0gsUObeDTPXq1XX48GHVqFFDderU0bJly9SiRQutX7+eQ/AA+CZHB91Zz4fRKunkqrzbcEov4DVuB5m//e1vWrFihRITE/Xwww/rnnvu0dtvv639+/drzJgxhdFHACgc1vNhdu/O/VyCsgt6ezh5D+spvQC8osDnyKxdu1arV69WnTp11KOHs+94z6JGBsBVc7SUZJuFyUejFCkygW3VwFUqVufItG3bVm3bti3o2wBA0XB25+oEOZ+FYRkJKDauKsgcPHhQ33//vdLT05WVlWX33OjRoz3SMQDwOGd3rp48VKr+m6R8amEkCnqBYsbtIDN79mzdf//9Cg0NVaVKlRQUFGR7LigoiCADoPhxNgsjZS8nObpfEgW9QLHkdpB5+umn9fTTT2vcuHEqUcLt8/QAoGg521ZdVVKwHC8nUdALFFtuB5lz586pb9++hBgAxVfOu1VfbUGvlH1Kb+37mIUBijG308jQoUM1f/78wugLABSc9fYCAwdKw4bl3caVgl6JEAP4ALe3X2dmZqp79+46f/68GjdurJCQELvnX375ZY920BG2XwOwY7FkhxhHEiSNbSGV3+S4XYNk7pcEFBKvbr+eNGmSvvrqK9WrV0+SchX7AoDX7Nrl+HnbclI+IabNLCk4lPNhAB/idpB5+eWX9c4772jw4MGF0B0AuEoWS94n9ErS209KodukEp/l//oGyVKdoYXSNQCFx+0gExYWphtuuKEw+gIAV8fRzqRpiVL4pPxfS0Ev4NPcLvZ9+OGH9dprrxVGXwDAffnd8DElRfp6lhRlcfx6Qgzg09yekVm3bp1WrlypL774Qtdee22uYt+FCxd6rHMA4JDFIs2enft6gqQ6R6QKkg46eD2n9AI+z+0gU758efXq1asw+gIArstvOclW0Pum9FM+r+WUXsBvFPju197E9msgQOW1zTpBUlNJdzp5LduqAa8rVne/BoAiYz2x98rdSc5O6W2UIkUmsK0a8EMuB5nmzZu7dE7Mpk1ODpkCgKuR31KSK6f0sowE+C2Xg0zPnj0LsRsA4EB+O5MSJCU5eS0FvYBfcznIpKSkFGY/ACBv+e1McmU5iZkYwO9RIwOg+LpyOSlBUlVJwXIcYhokS00mFGrXABQPBBkAxYu1oPfiRfsQ42wGRuKUXiAAEWQAFB8FKeiVCDFAACLIAPA+i0VasuTqC3olinqBAEWQAeBdjm746Gw5qc0sKTiU82GAAOZSkHn11VddfsPRo0dfdWcABJD8ZmHcKeitM7Tw+gfAJ7h0i4JatWrZPT569KjOnTun8uXLS5JOnTqlUqVKKSoqSr/99luhdDQv3KIA8CHWIt66daWFC53cJ8kBCnoBn1fktyjYs2eP7f8/+OADzZgxQ2+//bbq1asnSdq5c6eGDx+ukSNHFqgzAPyUo+UjKwp6AVwFt28amZCQoAULFqh58+Z21zdu3KjevXvbhZ7CxowM4APyusHjlawFvR2dvBc3fAT8gldvGnn48GFdunQp1/XMzEz98ccfBeoMAD+T36m8OVHQC6AA3A4yHTt21PDhw/X222+rZcuWCgoK0oYNGzRy5Eh16tSpMPoIwBc5W05KkNRUFPQCKBC3g8w777yjQYMGqU2bNgoJCZEkZWRkqHPnzpo1a5bHOwjAxzg6E8ZqWqIUZcn/eQp6AbjI7SBTpUoVLV68WLt27dKOHTtkjFGDBg1Ut27dwugfAF/ibBZm5EjpntbSwWGO34cQA8BFV30gXnx8vIwxSkhIUMmSnKsHBDyLxflSUu8YqcIB6aCD9+GEXgBucDuBnDt3Tg899JD+/e9/S5J27dql2rVra/To0YqNjdUTTzzh8U4CKMas58Ps3p1/G2tBb/pEKT2fNo1SpNiuhBgAbinh7gvGjRunLVu26Ouvv1Z4eLjteqdOnTRv3jyPdg5AMZecnL21euBAaeLE3M8nSHrlJufnwzRIlppMIMQAcJvbMzKfffaZ5s2bp7Zt2yooKMh2vWHDhtrt6F9kAPyLs6Uk27bqb/J+vlGKFJnAtmoABeJ2kDl69KiioqJyXf/zzz/tgg0AP7drV97XU1KyZ2KC85ihyYllJAAe4PbSUuvWrfXll1/aHlvDy8yZM3Xdddd5rmcAii+LJf+amPZxUr0jjl9PQS8AD3F7Rmby5Mnq0qWLtm3bpoyMDL3yyiv6+eeftWbNGqWlpRVGHwEUJ462WE9LdLy1moJeAB7m9ozM9ddfr++//17nzp1TQkKCli1bpujoaK1Zs0YtW7YsjD4CKA4sFmnChNwhJkHSiz2lZU86PuSOgl4AhcDtm0YWJ9w0Eigi+c3COLtPksQpvQBy8eTPb7dnZIKDg5WenvsgiOPHjys4OLhAnQFQDOW3OylBzkOMRIgBUKjcDjL5TeBcuHBBoaGhBe4QgGImr91JCZKSXHgtRb0ACpnLxb6vvvqqpOxdSrNmzVKZMmVsz2VmZuqbb75R/fr1Pd9DAN5hPbH34kX7686Wk9rMkoJDOR8GQJFwOchMmzZNUvaMzBtvvGG3jBQaGqr4+Hi98cYbnu8hgKJ3ZU1MmzbS8XVSUzkOMQ2SpTpDC7t3AGDjcpDZs2ePJKl9+/ZauHChKlSoUGidAuAlFou0ZEnumpha66SHHbyOgl4AXuL2OTKrVq0qjH4A8Lb8dia5UtRLiAHgJW4X+/bu3VsvvPBCruv//Oc/ddddd3mkUwCKWF47kxIktVP2cpIjFPQC8CK3Z2TS0tKUkpKS63qXLl304osveqRTAIqQxSLNnm1/zZXzYTilF0Ax4HaQ+e9//5vnNuuQkBCdOXPGI50CUESuXE5KkPOCXunyKb0A4GVuB5lGjRpp3rx5evrpp+2uf/TRR2rYsKHHOgagkF25nORsFqZRihSZwLZqAMWK20Fm/PjxuvPOO7V792516NBBkrRixQp9+OGHmj9/vsc7CMDDrOfD5Lx7tSsFvSwjASiG3A4yPXr00GeffaZJkyZpwYIFioiIUJMmTbR8+XIlJbly1CeAImcNL6mp0ty59s+5ckovBb0AiiluGgn4u/y2VUuuLScxEwPAwzz589vtGRlJOnXqlBYsWKDffvtN//jHP1SxYkVt2rRJ0dHRqlatWoE6BMBD8jvcLkFSVUnBcn5KLwW9AIo5t4PM1q1b1alTJ5UrV0579+7VsGHDVLFiRX366afat2+f3n333cLoJwBnrMtHdetKCxfmPQvjyrZqTukF4EPcDjJjx47V4MGDNXXqVEVGRtqud+3aVf369fNo5wC4yNHykZUrBb0SIQaAT3H7ZN/169dr5MiRua5Xq1ZNR44c8UinALghr1N5r+RKQa9EUS8An+P2jEx4eHieB9/t3LlTVapU8UinALgor1N5r+RsOanNLCk4lPNhAPgkt4PMHXfcoWeeeUYff/yxJCkoKEj79+/XE088oTvvvNPjHQSQD2fLSa6c0tsgWaoz1MMdA4Ci4/b26zNnzui2227Tzz//rLNnzyo2NlZHjhzRddddp8WLF6t06dKF1dc8+8L2awQki0Vq2zb/56clSlGW/J+noBeAF3l1+3XZsmX13XffaeXKldq0aZOysrLUokULderUqUAdAeAiR8tJI0dK97SWDg5z/B6EGAB+wu0g8+677+ruu+9Whw4dbLcokKSLFy/qo48+0sCBAz3aQQA5OFpOSpDUO0aqcEA66OA9KOgF4EfcXloKDg7W4cOHFRUVZXf9+PHjioqKUmZmpkc76AhLSwgojpaTXDkfhlN6ARQTXl1aMsYoKCgo1/Xff/9d5cqVK1BnAOQhr5s8WiVIGtJCit/k+D04pReAn3I5yDRv3lxBQUEKCgpSx44dVbLk5ZdmZmZqz5496tKlS6F0EghYLt0nKZ8Q0yhFikxgWzUAv+ZykOnZs6ckafPmzercubPKlCljey40NFTx8fFsvwY8Kb+D7lzZVi2xjAQgILgcZFJSUiRJ8fHxuvvuuxUeHl5onQKg7OWkK7lSCyNR0AsgYLh9i4JBgwbpr7/+0qxZszRu3DidOHFCkrRp0yYdPOhoqwQAt9Sta//YlXslNUqRbl0rNX+hsHoFAMVKge9+PXz4cO5+DXiatcB3wABp9VypqqQoJ6+hoBdAAHI7yIwZM4a7XwOFKWeB792SnnHSnm3VAAKY20Fmw4YNeuutt3Jd5+7XQAFZLNKSJdkhxtWCXmZhAAQ47n4NFAdXzsI4CjBsqwYAG+5+DXiLtQ7m4sXLIcaVgl6WkQDAxu0g8+KLL+q2225TVFSUzp8/r6SkJNvdr59//vnC6CPgf/I66C5BUpKT17GtGgDscPdroCjlrIPJyZXlJGZiACAXt28aWZxw00j4lCtnYRKUva06WNIIB69rkMy5MAD8itduGpmVlaU5c+Zo4cKF2rt3r4KCglSrVi317t1bAwYMyPNmkgCU+3YDrpzQW2ekVPs+ZmEAwAGXT/Y1xqhHjx4aNmyYDh48qMaNG+vaa6/Vvn37NHjwYP3tb38rzH4CvstikWbPvvzYlYJeiRADAC5weUZmzpw5+uabb7RixQq1b9/e7rmVK1eqZ8+eevfddzVw4ECPdxLwWXktJzkr6JUo6gUAF7lcI3PrrbeqQ4cOeuKJJ/J8ftKkSUpLS9NXX33l0Q46Qo0MijWLRWrb9vJjZ8tJbWZJwaGcDwPA73mlRmbr1q2aeuVOixy6du2qV199tUCdAfyC9XyY3buzH7tySm+DZKnO0CLoHAD4F5eDzIkTJxQdHZ3v89HR0Tp58qRHOgX4HGt4SU2V5s69fN3ZLAwFvQBQIC4HmczMTJUsmX/z4OBgZWRkeKRTgE/J63A7ybWiXkIMABSIy0HGGKPBgwcrLCwsz+cvXLjgsU4BPuPKbdXS5fNhopy8loJeACgwl4PMoEGDnLZhxxICypXbqiXXzofhlF4A8BiXg8zsK//CBgJZXtuqnRX0StmzME0mFF6/ACDAuH2vJSBg5XW3asm1+yRFJrCtGgAKAUEGcEVBCnpZRgKAQuPyLQqAgJVXQa/k2im9FPQCQKFiRgZwJK+CXsm15SRmYgCg0BFkgPzkVdBbVVKwnJ/SS0EvABQJggyQlyuXk1zZVs0pvQBQ5KiRAa505XKSKwW9EiEGALyAGRkgp7yWk5wV9EoU9QKAlxBkgKs9H6bNLCk4lPNhAMCLCDIIbHmdD+PKKb0NkqU6QwuxYwAAVxBkELjyOh/G2SwMBb0AUKxQ7IvAlNf5MK4U9RJiAKBYYUYGgSe/82GinLyOgl4AKHYIMggcFou0ZIn758NwSi8AFFsEGQSGvGZhnBX0SpzSCwDFHEEG/utqt1U3SpEiE9hWDQA+gCAD/5TXtmrJtYJelpEAwGewawn+J69t1ZJrp/RS0AsAPoUZGfifXbtyX3NlOYmZGADwOQQZ+BeLRdq9O/v/rduqg+X8lF4KegHAJxFk4D9y1sW4sq2aU3oBwOdRIwP/kLMuxpWCXokQAwB+gCAD35fzdgOuFPRKFPUCgJ9gaQm+zZ3lpDazpOBQzocBAD9CkIHvsi4nuXJKb4Nkqc7QIuoYAKCoEGTge6wn9u7e7XwWhoJeAPBrBBn4Bmt4SU2V5s7NvpYg6RknryPEAIBfI8ig+Mvrho9VJUU5eR0FvQDg9wgyKL4sFmnJEvdu+ChxSi8ABBCCDIqnvGZhnBX0SpzSCwABhiCD4ufKmz66cp+kyAS2VQNAACLIoPjJedNHV07pZRkJAAIWJ/ui+KlbN/u/rpzSS0EvAAQ0ZmRQfFi3WNetK01rI0Wty78tBb0AABFkUFwkJ0ufTM3eVh0saYSDthT0AgD+hyAD77Jusd431fnhdpzSCwC4AkEG3mPdYu3KCb0SIQYAkAvFvvCOnDd8dFbQK1HUCwDIEzMyKHoWizR7tvPzYdrMkoJDOR8GAJAvggyKlrWot6mkOx20a5As1RlaVL0CAPgoggyKjsXivKiXgl4AgBsIMih81vNhDqY5P6WXEAMAcANBBoUr5/kwUZJqOGhLQS8AwE0EGRQeV5aSJE7pBQBcNYIMCkfa29L3M5wvJXFKLwCgAAgy8BxrLcy+KVLtn6X4fNpVGCrVT2JbNQCgwAgy8Ax3TultPZwAAwDwCIIMrp51BubiRddP6aWgFwDgQQQZuMcaXlJTpblzL193dkpvhaHMxAAAPI4gA9dZl4+sEpS9rTpYjkNMeqLUb1bh9g0AEJAIMnCN9SaPVs5mYCQpq70U11/qx60GAACFgyADx6xLSbt3X76WIOchRpK6TGYpCQBQqAgyyN+VS0mSawW9EkW9AIAiQZBBbhaLtGRJ7hDjbDmpzSwpOJTzYQAARYYgA3tXW9DbIFmqQy0MAKBoEWSQLa9ZGFcKeuuM5I7VAACvIcgg/1oYVwp6CTEAAC8q4e0OwMuu3FYtUdALAPAZzMgEqry2VUsU9AIAfApBJhDlt5TUVBT0AgB8CkEm0OS1lORsFoaCXgBAMUWNTCCxWKTZs+2vuVLUS4gBABRTzMgEivzOh4ly8joKegEAxRhBJhBczQ0fG6VIsV0JMQCAYo0gEwh27cr+rysFvVL2LEyTCYXbJwAAPIAg4+8sluwt1s5mYRqlSJEJbKsGAPgUgow/s9bFJEh6xklblpEAAD6IIONvrAfdXbx4OcQ4O6WXgl4AgI8iyPiTK3cmubKcxEwMAMCHEWT8hXVnknVbdbCcn9JLQS8AwMcRZPyB9aA7V7ZVc0ovAMCPcLKvr0tOltq2lZa/6TzESIQYAIBfYUbGV1ks0pIlrhf0ShT1AgD8DkHGF+Us6nW2nNRmlhQcyvkwAAC/RJDxNTmLep2d0tsgWaoztIg6BgBA0SPI+Arr+TCunNJLQS8AIEAQZHxBzqUkV07pJcQAAAIEQaa4u/J8mCgn7SnoBQAEEIJMcebO+TCc0gsACEAEmeIqOVn6ZGp2Qe+dTtpySi8AIEARZIob6/kw+6Y6roVplCJFJrCtGgAQ0AgyxYm1qNeVgl6WkQAAIMh4nXVb9cWLrp/SS0EvAACSCDLelXNbteS8qJeCXgAA7BBkvOHK+yRVlRQs56f0UtALAIAdgkxRc+c+SRKn9AIA4EAJb3cgoFgPt5OyZ2KchRiJEAMAgAMEmaK0a1f2f10p6JUo6gUAwAmWloqKxeLaDR/bzJKCQzkfBgAAFxBkioKrp/Q2SJbqDC2qXgEA4PMIMoXNYnF+Si8FvQAAXBWCTGFJe1s6sF5KP+S8qJcQAwDAVSHIFIbpiVLUuuxS6hgnbSnoBQDgqhFkPC3t7ewQ4wyn9AIAUGAEGU9Ke1ta/5IU66Qdp/QCAOARBBlPsS4n5Rdiqjwp1anPtmoAADyIIFNQxyzS+pmOl5PSE6V+zxddnwAACBAEmYL4IVnaPjX/5w81kFo/KvXjbBgAAAoDtyi4WscsjkOMlB1ikggxAAAUFmZk3GU9HybjkBTqoF16IjMxAAAUMoKMO3KeD5NfiNnbQrphFCEGAIAiQJBxRdrb0pZ3nZ8Ps0jSmBlSIruSAAAoCgQZZ6yzMJXzef4TSemSDkvqnUyIAQCgCBFk8mOxSKtmSjWczMJ0e1IKqS/VrUuIAQCgiBFk8pKcLH0yVUqSVMNBu/RE6RHOhwEAwFu8uv36m2++UePGjVWyZEkFBQUpISFB3377rTe7lD0Ts2+q9Iykjvm0OXaTVG2W9MjaouwZAAC4gleDzOLFi7Vt2zaNGDFCktSwYUN17dpV+/fv916ndi2Rejh4Pj1RGp3G+TAAABQDXg0yq1at0ogRIzRjxgxJ0tChQxUXF6fXX3/de52qms/1Uy2YhQEAoJjxWpC5ePGiNm7cqFtvvdXu+q233qrVq1fn+ZozZ87o999/t/06ePCg5zvWrGve1/vMYBYGAIBixmvFvseOHVNmZqaio6PtrkdHR+vIkSN5vqZHjx5KS0sr3I5VTpQaPG5/+4EGydyxGgCAYsjru5aCgoLsHhtjcl2zWrRokc6cOWN7fPbsWTVs2NDznWo+RYrrJZ3dJUXWJcQAAFBMeS3IVK5cWcHBwblmX9LT03PN0liVLVtWZcuWtT3OGWo838FEAgwAAMWc12pkQkND1bJlS6WmptpdT01N1fXXX++lXgEAAF/i1aWlUaNGadiwYYqKipIkTZs2TXv27NHtt9/uzW4BAAAf4dXt1zVr1lRGRoYmTpwoKfuAvL/++ktvvfWWN7sFAAB8hFdnZG6++WYZY7zZBQAA4MO8OiMDAABQEAQZAADgswgyAADAZxFkAACAzyLIAAAAn0WQAQAAPosgAwAAfJbXbxpZENYzaAr1nksAAMCjrD+3PXGWnE8HmbNnz0qS4uLivNwTAADgrrNnz6pcuXIFeo8g48NH62ZlZenQoUOKjIxUUFBQgd7rzJkziouL04EDB+zusB2IGIvLGIvLGIvLGItsjMNljMVlroyFMUZnz55VbGysSpQoWJWLT8/IlChRQtWrV/foe5YtWzbg/xBaMRaXMRaXMRaXMRbZGIfLGIvLnI1FQWdirCj2BQAAPosgAwAAfBZB5n/CwsKUkpKisLAwb3fF6xiLyxiLyxiLyxiLbIzDZYzFZUU9Fj5d7AsAAAIbMzIAAMBnEWQAAIDPIsgAAACfRZABAAA+iyDzPzNmzFCtWrUUHh6uli1b6ttvv/V2lzxq8uTJat26tSIjIxUVFaWePXtq586ddm2MMZowYYJiY2MVERGhm2++WT///LNdmwsXLuihhx5S5cqVVbp0afXo0UO///57UX4Uj5o8ebKCgoL0yCOP2K4F0jgcPHhQ9957rypVqqRSpUqpWbNm2rhxo+35QBmLjIwM/d///Z9q1aqliIgI1a5dW88884yysrJsbfx1LL755hvdfvvtio2NVVBQkD777DO75z31uU+ePKkBAwaoXLlyKleunAYMGKBTp04V8qdzj6OxuHTpkpKTk9W4cWOVLl1asbGxGjhwoA4dOmT3HoEwFlcaOXKkgoKCNH36dLvrRTYWBuajjz4yISEhZubMmWbbtm3m4YcfNqVLlzb79u3zdtc8pnPnzmb27Nnmp59+Mps3bzbdunUzNWrUMP/9739tbV544QUTGRlpPvnkE/Pjjz+au+++21StWtWcOXPG1ub+++831apVM6mpqWbTpk2mffv2pmnTpiYjI8MbH6tA1q1bZ+Lj402TJk3Mww8/bLseKONw4sQJU7NmTTN48GBjsVjMnj17zPLly82vv/5qaxMoY/Hcc8+ZSpUqmS+++MLs2bPHzJ8/35QpU8ZMnz7d1sZfx2Lx4sXmqaeeMp988omRZD799FO75z31ubt06WIaNWpkVq9ebVavXm0aNWpkunfvXlQf0yWOxuLUqVOmU6dOZt68eWbHjh1mzZo1JjEx0bRs2dLuPQJhLHL69NNPTdOmTU1sbKyZNm2a3XNFNRYEGWNMmzZtzP333293rX79+uaJJ57wUo8KX3p6upFk0tLSjDHGZGVlmZiYGPPCCy/Y2vz111+mXLly5o033jDGZH8jh4SEmI8++sjW5uDBg6ZEiRJm6dKlRfsBCujs2bPmmmuuMampqSYpKckWZAJpHJKTk027du3yfT6QxqJbt25myJAhdtd69epl7r33XmNM4IzFlT+wPPW5t23bZiSZtWvX2tqsWbPGSDI7duwo5E91dRz98LZat26dkWT7R2+gjcXvv/9uqlWrZn766SdTs2ZNuyBTlGMR8EtLFy9e1MaNG3XrrbfaXb/11lu1evVqL/Wq8J0+fVqSVLFiRUnSnj17dOTIEbtxCAsLU1JSkm0cNm7cqEuXLtm1iY2NVaNGjXxurB544AF169ZNnTp1srseSOOwaNEitWrVSnfddZeioqLUvHlzzZw50/Z8II1Fu3bttGLFCu3atUuStGXLFn333Xe67bbbJAXWWOTkqc+9Zs0alStXTomJibY2bdu2Vbly5Xx2bKTsv0eDgoJUvnx5SYE1FllZWRowYIAee+wxXXvttbmeL8qx8OmbRnrCsWPHlJmZqejoaLvr0dHROnLkiJd6VbiMMRo7dqzatWunRo0aSZLts+Y1Dvv27bO1CQ0NVYUKFXK18aWx+uijj7Rp0yatX78+13OBNA6//fabXn/9dY0dO1ZPPvmk1q1bp9GjRyssLEwDBw4MqLFITk7W6dOnVb9+fQUHByszM1PPP/+87rnnHkmB9eciJ0997iNHjigqKirX+0dFRfns2Pz111964okn1K9fP9uNEQNpLKZMmaKSJUtq9OjReT5flGMR8EHGKigoyO6xMSbXNX/x4IMPauvWrfruu+9yPXc14+BLY3XgwAE9/PDDWrZsmcLDw/Nt5+/jIGX/i6pVq1aaNGmSJKl58+b6+eef9frrr2vgwIG2doEwFvPmzdN7772nDz74QNdee602b96sRx55RLGxsRo0aJCtXSCMRV488bnzau+rY3Pp0iX17dtXWVlZmjFjhtP2/jYWGzdu1CuvvKJNmza53efCGIuAX1qqXLmygoODc6W/9PT0XP8K8QcPPfSQFi1apFWrVql69eq26zExMZLkcBxiYmJ08eJFnTx5Mt82xd3GjRuVnp6uli1bqmTJkipZsqTS0tL06quvqmTJkrbP4e/jIElVq1ZVw4YN7a41aNBA+/fvlxQ4fyYk6bHHHtMTTzyhvn37qnHjxhowYIDGjBmjyZMnSwqsscjJU587JiZGf/zxR673P3r0qM+NzaVLl9SnTx/t2bNHqampttkYKXDG4ttvv1V6erpq1Khh+3t03759evTRRxUfHy+paMci4INMaGioWrZsqdTUVLvrqampuv76673UK88zxujBBx/UwoULtXLlStWqVcvu+Vq1aikmJsZuHC5evKi0tDTbOLRs2VIhISF2bQ4fPqyffvrJZ8aqY8eO+vHHH7V582bbr1atWql///7avHmzateuHRDjIEk33HBDri34u3btUs2aNSUFzp8JSTp37pxKlLD/6zA4ONi2/TqQxiInT33u6667TqdPn9a6detsbSwWi06fPu1TY2MNMb/88ouWL1+uSpUq2T0fKGMxYMAAbd261e7v0djYWD322GP66quvJBXxWLhcFuzHrNuv3377bbNt2zbzyCOPmNKlS5u9e/d6u2se8/e//92UK1fOfP311+bw4cO2X+fOnbO1eeGFF0y5cuXMwoULzY8//mjuueeePLdZVq9e3Sxfvtxs2rTJdOjQodhvL3Um564lYwJnHNatW2dKlixpnn/+efPLL7+Y999/35QqVcq89957tjaBMhaDBg0y1apVs22/XrhwoalcubJ5/PHHbW38dSzOnj1rfvjhB/PDDz8YSebll182P/zwg20njqc+d5cuXUyTJk3MmjVrzJo1a0zjxo2L3ZZjR2Nx6dIl06NHD1O9enWzefNmu79HL1y4YHuPQBiLvFy5a8mYohsLgsz//Otf/zI1a9Y0oaGhpkWLFrZtyf5CUp6/Zs+ebWuTlZVlUlJSTExMjAkLCzM33XST+fHHH+3e5/z58+bBBx80FStWNBEREaZ79+5m//79RfxpPOvKIBNI4/D555+bRo0ambCwMFO/fn3z1ltv2T0fKGNx5swZ8/DDD5saNWqY8PBwU7t2bfPUU0/Z/YDy17FYtWpVnn83DBo0yBjjuc99/Phx079/fxMZGWkiIyNN//79zcmTJ4voU7rG0Vjs2bMn379HV61aZXuPQBiLvOQVZIpqLIKMMcb1+RsAAIDiI+BrZAAAgO8iyAAAAJ9FkAEAAD6LIAMAAHwWQQYAAPgsggwAAPBZBBkAAOCzCDIAfEZ8fLymT5/u7W4AKEYIMoAPGTx4sHr27OntbrilKMPHhAkT1KxZsyL5WgCKB4IMAADwWQQZwIfdfPPNGj16tB5//HFVrFhRMTExmjBhgl2bCRMmqEaNGgoLC1NsbKxGjx5tey4+Pl7PPvus+vXrpzJlyig2Nlavvfaa3etPnz6tESNGKCoqSmXLllWHDh20ZcsWuzaLFi1Sq1atFB4ersqVK6tXr162/u3bt09jxoxRUFCQgoKCbK9ZvXq1brrpJkVERCguLk6jR4/Wn3/+aXs+PT1dt99+uyIiIlSrVi29//77bo+PdQZr0qRJio6OVvny5TVx4kRlZGToscceU8WKFVW9enW98847dq9LTk5W3bp1VapUKdWuXVvjx4/XpUuX7No899xzioqKUmRkpIYNG6Ynnngi12zQ7Nmz1aBBA4WHh6t+/fqaMWOG7bmLFy/qwQcfVNWqVRUeHq74+HhNnjzZ7c8IBDqCDODj/v3vf6t06dKyWCyaOnWqnnnmGaWmpkqSFixYoGnTpunNN9/UL7/8os8++0yNGze2e/0///lPNWnSRJs2bdK4ceM0ZswY2+uNMerWrZuOHDmixYsXa+PGjWrRooU6duyoEydOSJK+/PJL9erVS926ddMPP/ygFStWqFWrVpKkhQsXqnr16nrmmWd0+PBhHT58WJL0448/qnPnzurVq5e2bt2qefPm6bvvvtODDz5o69fgwYO1d+9erVy5UgsWLNCMGTOUnp7u9visXLlShw4d0jfffKOXX35ZEyZMUPfu3VWhQgVZLBbdf//9uv/++3XgwAHbayIjIzVnzhxt27ZNr7zyimbOnKlp06bZnn///ff1/PPPa8qUKdq4caNq1Kih119/3e7rzpw5U0899ZSef/55bd++XZMmTdL48eP173//W5L06quvatGiRfr444+1c+dOvffee4qPj3f78wEBz61bTALwqkGDBpk77rjD9jgpKcm0a9fOrk3r1q1NcnKyMcaYl156ydStW9dcvHgxz/erWbOm6dKli921u+++23Tt2tUYY8yKFStM2bJlzV9//WXXJiEhwbz55pvGGGOuu+46079//3z7nNddcQcMGGBGjBhhd+3bb781JUqUMOfPnzc7d+40kszatWttz2/fvt1IyvVeOaWkpJimTZvaHg8aNMjUrFnTZGZm2q7Vq1fP3HjjjbbHGRkZpnTp0ubDDz/M932nTp1qWrZsaXucmJhoHnjgAbs2N9xwg93XjouLMx988IFdm2effdZcd911xhhjHnroIdOhQweTlZWV79cF4BwzMoCPa9Kkid3jqlWr2mYu7rrrLp0/f161a9fW8OHD9emnnyojI8Ou/XXXXZfr8fbt2yVJGzdu1H//+19VqlRJZcqUsf3as2ePdu/eLUnavHmzOnbs6FafN27cqDlz5ti9Z+fOnZWVlaU9e/Zo+/btKlmypG1mR5Lq16+v8uXLu/V1JOnaa69ViRKX/6qLjo62m5UKDg5WpUqV7GZ7FixYoHbt2ikmJkZlypTR+PHjtX//ftvzO3fuVJs2bey+Ts7HR48e1YEDBzR06FC7z/jcc8/Zxm3w4MHavHmz6tWrp9GjR2vZsmVufzYAUklvdwBAwYSEhNg9DgoKUlZWliQpLi5OO3fuVGpqqpYvX65Ro0bpn//8p9LS0nK97sr3kKSsrCxVrVpVX3/9da421lARERHhdp+zsrI0cuRIu3odqxo1amjnzp12/SiIvMbH0ZitXbtWffv21cSJE9W5c2eVK1dOH330kV566aVcr8nJGGP7f+t7zZw5U4mJiXbtgoODJUktWrTQnj17tGTJEi1fvlx9+vRRp06dtGDBggJ8WiDwEGQAPxcREaEePXqoR48eeuCBB1S/fn39+OOPatGihaTsH9w5rV27VvXr15eU/cP2yJEjKlmyZL71G02aNNGKFSt033335fl8aGioMjMz7a61aNFCP//8s+rUqZPnaxo0aKCMjAxt2LDBNtOxc+dOnTp1ytWPfdW+//571axZU0899ZTt2r59++za1KtXT+vWrdOAAQNs1zZs2GD7/+joaFWrVk2//fab+vfvn+/XKlu2rO6++27dfffd6t27t7p06aITJ06oYsWKHvxEgH8jyAB+bM6cOcrMzFRiYqJKlSqluXPnKiIiQjVr1rS1+f777zV16lT17NlTqampmj9/vr788ktJUqdOnXTdddepZ8+emjJliurVq6dDhw5p8eLF6tmzp1q1aqWUlBR17NhRCQkJ6tu3rzIyMrRkyRI9/vjjkrJ3Rn3zzTfq27evwsLCVLlyZSUnJ6tt27Z64IEHNHz4cJUuXVrbt29XamqqXnvtNdWrV09dunTR8OHD9dZbb6lkyZJ65JFHrmr2x1116tTR/v379dFHH6l169b68ssv9emnn9q1eeihhzR8+HC1atVK119/vebNm6etW7eqdu3atjYTJkzQ6NGjVbZsWXXt2lUXLlzQhg0bdPLkSY0dO1bTpk1T1apV1axZM5UoUULz589XTEzMVS2fAYGMGhnAj5UvX14zZ87UDTfcYJs5+fzzz1WpUiVbm0cffVQbN25U8+bN9eyzz+qll15S586dJWUvnyxevFg33XSThgwZorp166pv377au3evoqOjJWVvsZ4/f74WLVqkZs2aqUOHDrJYLLb3f+aZZ7R3714lJCSoSpUqkrJncdLS0vTLL7/oxhtvVPPmzTV+/HhVrVrV9rrZs2crLi5OSUlJ6tWrl20LeGG74447NGbMGD344INq1qyZVq9erfHjx9u16d+/v8aNG6d//OMftiWiwYMHKzw83NZm2LBhmjVrlubMmaPGjRsrKSlJc+bMUa1atSRJZcqU0ZQpU9SqVSu1bt1ae/fu1eLFi+3qeQA4F2RyLuwCCCjx8fF65JFH9Mgjj3i7Kz7vlltuUUxMjObOnevtrgABhaUlAHDTuXPn9MYbb6hz584KDg7Whx9+qOXLl9vO3wFQdAgyAOAm65Lbc889pwsXLqhevXr65JNP1KlTJ293DQg4LC0BAACfRVUZAADwWQQZAADgswgyAADAZxFkAACAzyLIAAAAn0WQAQAAPosgAwAAfBZBBgAA+CyCDAAA8Fn/H45nR7nn70kLAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHFCAYAAADosxNlAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA1YUlEQVR4nO3deXxU1f3/8fdkDyEMYQuEBExAAwgBJIIBFayCUhZRqqiogNLKYguCVbC2IC4gqK2tUqQVqPaL4K+AVcEFZZNFZZXIEmWRBAl7FkxIgMz5/cE382WykUkmJEdez8djHs2ce+65nzmMnffj3nNnHMYYIwAAAAv5VXcBAAAAFUWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABfGT79u16+OGH1aJFC4WGhio0NFRXXnmlHnnkEW3atOmS1TF58mQ5HA6PtiuuuEJDhw6t0uOuX79ekydPVmZmZpUep7yGDh0qh8PhfgQHBys+Pl6TJk1SXl6e1+M5HA5NnjzZ94VeYj169FCPHj2quwzAZwKquwDg5+CNN97Qo48+qvj4eI0ZM0ZXX321HA6Hdu3apXfeeUfXXnut9uzZoxYtWlRLfUuWLFGdOnWq9Bjr16/XM888o6FDh6pu3bpVeqzyCg0N1YoVKyRJGRkZeueddzRlyhTt3r1bCxcu9GqsDRs2KDo6uirKBFAJBBmgktatW6dRo0apT58++s9//qOgoCD3tl/84hcaPXq0/t//+38KDQ0tc5zc3FzVqlWrSmrs2LFjlYxb0/n5+em6665zP+/du7d++OEHvfvuu3rllVfUtGnTco914TgAag4uLQGV9MILL8jf319vvPGGR4i50F133aWoqCj386FDh6p27dpKTk5Wr169FB4erptvvlmStHz5ct1+++2Kjo5WSEiIWrZsqUceeUTHjx8vNu7SpUvVoUMHBQcHKzY2Vi+99FKJxy/p0lJ2drYef/xxxcbGKigoSE2bNtXYsWOVk5Pj0c/hcOjRRx/V22+/rdatW6tWrVpq3769PvzwQ3efyZMn6/e//70kKTY21n05Z9WqVSXW85e//EUOh0N79uwptu3JJ59UUFCQ+/Vu3bpVffv2VaNGjRQcHKyoqCj16dNHBw8eLHHsiykMJAcOHJAkpaam6v7773eP37p1a7388styuVzF5uHCS0u5ubnu+QsJCVG9evWUmJiod955x2O/999/X0lJSapVq5bCw8PVs2dPbdiwwaNP4eXAHTt26N5775XT6VRkZKQeeughZWVlefQ1xmjmzJnq0KGDQkNDFRERoV/96lfat29fsX7Tp09X8+bNFRISomuuuUYfffRRheYMqMk4IwNUQkFBgVauXKnExEQ1adLEq33PnDmj/v3765FHHtGECRN07tw5SdLevXuVlJSk4cOHy+l06ocfftArr7yi66+/XsnJyQoMDJQkff7557r99tuVlJSkBQsWqKCgQNOnT9eRI0cueuzc3Fx1795dBw8e1FNPPaWEhATt2LFDf/rTn5ScnKzPPvvMY53N0qVLtXHjRk2ZMkW1a9fW9OnTdccddyglJUVxcXEaPny4Tp48qb/97W9avHixey7atGlT4vHvv/9+Pfnkk5o3b56ee+45j/n897//rX79+qlBgwbKyclRz549FRsbq9dff12RkZE6fPiwVq5cqVOnTnk134UKw1PDhg117Ngxde3aVWfOnNGzzz6rK664Qh9++KEef/xx7d27VzNnzix1nHHjxuntt9/Wc889p44dOyonJ0fffvutTpw44e4zf/58DR48WL169dI777yj/Px8TZ8+XT169NDnn3+u66+/3mPMgQMHatCgQXr44YeVnJysiRMnSpLmzJnj7vPII49o3rx5+t3vfqcXX3xRJ0+e1JQpU9S1a1d98803ioyMlCQ988wzeuaZZ/Twww/rV7/6ldLS0vTrX/9aBQUFio+Pr9DcATWSAVBhhw8fNpLMPffcU2zbuXPnzNmzZ90Pl8vl3jZkyBAjycyZM6fM8V0ulzl79qw5cOCAkWT++9//urd16dLFREVFmdOnT7vbsrOzTb169UzR/7SbN29uhgwZ4n4+depU4+fnZzZu3OjR7z//+Y+RZJYtW+Zuk2QiIyNNdna2x+v28/MzU6dOdbfNmDHDSDL79+8v8zUVuvPOO010dLQpKChwty1btsxIMh988IExxphNmzYZSea9994r15gXGjJkiAkLC3PP/7Fjx8yrr75qHA6Hufbaa40xxkyYMMFIMl999ZXHviNHjjQOh8OkpKR4zMOkSZPcz9u2bWsGDBhQ6vELCgpMVFSUadeuncdrPHXqlGnUqJHp2rWru23SpElGkpk+fbrHGKNGjTIhISHu986GDRuMJPPyyy979EtLSzOhoaHmiSeeMMYYk5GRYUJCQswdd9zh0W/dunVGkunevXupdQO24dISUEU6deqkwMBA9+Pll18u1mfgwIHF2o4ePaoRI0YoJiZGAQEBCgwMVPPmzSVJu3btkiTl5ORo48aNuvPOOxUSEuLeNzw8XP369btobR9++KHatm2rDh066Ny5c+7HrbfeWuIloZtuuknh4eHu55GRkWrUqJH78kxFDBs2TAcPHtRnn33mbps7d64aN26s3r17S5JatmypiIgIPfnkk5o1a5Z27tzp1TFycnLc89+wYUONHTtWvXv31pIlSyRJK1asUJs2bdS5c2eP/YYOHSpjjHuhcEk6d+6sjz76SBMmTNCqVat0+vRpj+0pKSk6dOiQHnjgAfn5/d//1dauXVsDBw7Ul19+qdzcXI99+vfv7/E8ISFBeXl5Onr0qKTz/24Oh0P333+/x79b48aN1b59e/e/24YNG5SXl6fBgwd7jNe1a1f3ewn4ueDSElAJDRo0UGhoaIkf6PPnz1dubq7S09OLfUBJUq1atYrdSeRyudSrVy8dOnRIf/zjH9WuXTuFhYXJ5XLpuuuuc39YZmRkyOVyqXHjxsXGLamtqCNHjmjPnj3uy1RFFV2PU79+/WJ9goODi314e6N3795q0qSJ5s6dq169eikjI0Pvv/++xowZI39/f0mS0+nU6tWr9fzzz+upp55SRkaGmjRpol//+td6+umnS62/UGhoqNasWeOut3nz5h5zfuLECV1xxRXF9itcz3ThZaKi/vrXvyo6OloLFy7Uiy++qJCQEN16662aMWOGrrzySve+JV1yjIqKksvlUkZGhscC76LzHBwcLEnueT5y5IiMMe7LR0XFxcV51F3R9wdgE4IMUAn+/v76xS9+oU8//VTp6ekeH1qF60N++OGHEvct+l0vkvTtt9/qm2++0bx58zRkyBB3e9FFsREREXI4HDp8+HCxMUpqK6owgF249qLo9qrm7++vBx54QH/961+VmZmp+fPnKz8/X8OGDfPo165dOy1YsEDGGG3fvl3z5s3TlClTFBoaqgkTJpR5DD8/PyUmJpa6vX79+kpPTy/WfujQIUllz0NYWJh7HcqRI0fcZ2f69eun3bt3u0NJaeP7+fkpIiKizPqLatCggRwOh7744gt3yLlQYVvhsUt7f5QU3gBbcWkJqKSJEyeqoKBAI0aM0NmzZys1VmG4Kfoh9cYbb3g8DwsLU+fOnbV48WKPL3c7deqUPvjgg4sep2/fvtq7d6/q16+vxMTEYo+KfNAVPXtQHsOGDVNeXp7eeecdzZs3T0lJSWrVqlWJfR0Oh9q3b68///nPqlu3rrZs2eJ1jUXdfPPN2rlzZ7Gx3nrrLTkcDt10003lGicyMlJDhw7Vvffeq5SUFOXm5io+Pl5NmzbV/PnzZYxx983JydGiRYvcdzJ5o2/fvjLG6Mcffyzx361du3aSzt+ZFRISov/5n//x2H/9+vWVuhwI1ESckQEqqVu3bnr99df129/+Vtdcc41+85vf6Oqrr5afn5/S09O1aNEiSSrXF9K1atVKLVq00IQJE2SMUb169fTBBx9o+fLlxfo+++yzuu2229SzZ0+NHz9eBQUFevHFFxUWFqaTJ0+WeZyxY8dq0aJFuvHGG/XYY48pISFBLpdLqamp+vTTTzV+/Hh16dLFq3ko/BB99dVXNWTIEAUGBio+Pt5jbU1JrzcpKUlTp05VWlqaZs+e7bH9ww8/1MyZMzVgwADFxcXJGKPFixcrMzNTPXv29Kq+kjz22GN666231KdPH02ZMkXNmzfX0qVLNXPmTI0cOVJXXXVVqft26dJFffv2VUJCgiIiIrRr1y69/fbbHgFl+vTpGjx4sPr27atHHnlE+fn5mjFjhjIzMzVt2jSv6+3WrZt+85vfaNiwYdq0aZNuvPFGhYWFKT09XWvXrlW7du00cuRIRURE6PHHH9dzzz2n4cOH66677lJaWpomT57MpSX8/FTjQmPgZ2Xbtm1m2LBhJjY21gQHB5uQkBDTsmVL8+CDD5rPP//co2/hHTUl2blzp+nZs6cJDw83ERER5q677jKpqanF7poxxpj333/fJCQkmKCgINOsWTMzbdo09x0wFyp615Ixxvz000/m6aefNvHx8SYoKMg4nU7Trl0789hjj5nDhw+7+0kyo0ePLlZnSWNOnDjRREVFGT8/PyPJrFy5suxJM8bMnj3bSDKhoaEmKyvLY9vu3bvNvffea1q0aGFCQ0ON0+k0nTt3NvPmzbvouGXN8YUOHDhg7rvvPlO/fn0TGBho4uPjzYwZMzzuNDKm+F1LEyZMMImJiSYiIsIEBwebuLg489hjj5njx4977Pfee++ZLl26mJCQEBMWFmZuvvlms27dOo8+hf9mx44d82ifO3duiXeCzZkzx3Tp0sWEhYWZ0NBQ06JFC/Pggw+aTZs2ufu4XC4zdepUExMTY4KCgkxCQoL54IMPTPfu3blrCT8rDmMuOOcJAABgEdbIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYy+ovxHO5XDp06JDCw8NL/Lp3AABQ8xhjdOrUKUVFRXn8qGpFWB1kDh06pJiYmOouAwAAVEBaWpqio6MrNYbVQabwq8/T0tLK9fXvAACg+mVnZysmJqbMnzApL6uDTOHlpDp16hBkAACwjC+WhbDYFwAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsVa1BZs2aNerXr5+ioqLkcDj03nvvVWc5AADAMtX6W0s5OTlq3769hg0bpoEDB1ZnKcUMm/u1vvj+mApckilhu0P/1170bz+H5O8nBfr7K+9sgVzm/HNjzm+TpLOu8/8b6H9+58LnkhTo59BZV0lH/T91gv11TtLp/IIS67vUgv3PvzCXMXIZqeB/i/rfZhkjORxS3dBA1Q4J1NFTeco/65K/n9SgdrAk6VT+OYUHB+jGqxqqQXiwMnLOqH1MXV0VGa79x3N0tsClQ5mnJUlRdUMV6O+nvcd+0vdHTsn1v/OVnpWn3DMFCg3yV1iQvyLCghQeEqhTeWcVUStI1zSPUKC/n2IbhKljswhJ0tbUDK1KOSpJOucy+v7IKUXUClKD8GD33/d0bqbvjpzSN2mZiggLUouGtRXbIEyStP94jsd4RV04fo/4RurYLEJbUzM89it8fuFrLOxb2pgXO25F+l5snKKvozzHPVvgKjbnVXncivDVHFXVeL5SU+tCzWXDe8ZhjKkJn4NyOBxasmSJBgwYUO59srOz5XQ6lZWV5dMfjWwxcan7gxg/XyO6x0mSZq3e57PxJvRu7dE27aNdxcbvEOPUtrSsUp97O2ZJfSrStywlvQ5vjlvR43t73Irw1RxV1Xi+UlPrQs1Vle8ZX35+W7VGJj8/X9nZ2R4PXxs292tCzGVi1up9PgsxheNtTc1wP9+amlHi+EVDS2khprxjFu1Tkb5lKe11eHPcihzf2+NWhK/mqKrG85WaWhdqLpveM1YFmalTp8rpdLofMTExPj/GjkOlf6gAF7P/eE6Jf1f1mCW1e9O3vMevzHG9Pb63x60IX81RVY3nKzW1LtRcNr1nrAoyEydOVFZWlvuRlpbm82NcHeX0+Zi4fBSumyn6d1WPWVK7N33Le/zKHNfb43t73Irw1RxV1Xi+UlPrQs1l03vGqiATHBysOnXqeDx8be6wzu4Fqvh5G9k9zr1OxlfjXbgYrmOziBLH7xDjLPO5t2MW7VORvmUp7XV4c9yKHN/b41aEr+aoqsbzlZpaF2oum94zLPYtBXcteYe7lrhrqbTjctdSzfk//ppaF2quqnrP+PLzu1qDzE8//aQ9e/ZIkjp27KhXXnlFN910k+rVq6dmzZpddP+qDDIAAKBq+PLzu1q/R2bTpk266aab3M/HjRsnSRoyZIjmzZtXTVUBAABbVGuQ6dGjh2rIlS0AAGAhqxb7AgAAXIggAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWqnCQ2bNnjz755BOdPn1akmSM8VlRAAAA5eF1kDlx4oRuueUWXXXVVfrlL3+p9PR0SdLw4cM1fvx4nxcIAABQGq+DzGOPPaaAgAClpqaqVq1a7vZBgwbp448/9mlxAAAAZQnwdodPP/1Un3zyiaKjoz3ar7zySh04cMBnhQEAAFyM12dkcnJyPM7EFDp+/LiCg4N9UhQAAEB5eB1kbrzxRr311lvu5w6HQy6XSzNmzNBNN93k0+IAAADK4vWlpRkzZqhHjx7atGmTzpw5oyeeeEI7duzQyZMntW7duqqoEQAAoERen5Fp06aNtm/frs6dO6tnz57KycnRnXfeqa1bt6pFixZVUSMAAECJHMbiL4DJzs6W0+lUVlaW6tSpU93lAACAcvDl57fXl5a2b99eYrvD4VBISIiaNWvGol8AAHBJeB1kOnToIIfDIen/vs238LkkBQYGatCgQXrjjTcUEhLiozIBAACK83qNzJIlS3TllVdq9uzZ+uabb7Rt2zbNnj1b8fHxmj9/vt58802tWLFCTz/9dFXUCwAA4Ob1GZnnn39er776qm699VZ3W0JCgqKjo/XHP/5RX3/9tcLCwjR+/Hi99NJLPi0WAADgQl6fkUlOTlbz5s2LtTdv3lzJycmSzl9+KvwNJgAAgKridZBp1aqVpk2bpjNnzrjbzp49q2nTpqlVq1aSpB9//FGRkZG+qxIAAKAEXl9aev3119W/f39FR0crISFBDodD27dvV0FBgT788ENJ0r59+zRq1CifFwsAAHChCn2PzE8//aR///vf+u6772SMUatWrXTfffcpPDy8KmosFd8jAwCAfar1e2QkqXbt2hoxYkSlDgwAAFBZFQoykrRz506lpqZ6rJWRpP79+1e6KAAAgPLwOsjs27dPd9xxh5KTk+VwOIp9KV5BQYFvKwQAACiF13ctjRkzRrGxsTpy5Ihq1aqlHTt2aM2aNUpMTNSqVauqoEQAAICSeX1GZsOGDVqxYoUaNmwoPz8/+fn56frrr9fUqVP1u9/9Tlu3bq2KOgEAAIrx+oxMQUGBateuLUlq0KCBDh06JOn8F+KlpKT4tjoAAIAyeH1Gpm3bttq+fbvi4uLUpUsXTZ8+XUFBQZo9e7bi4uKqokYAAIASeR1knn76aeXk5EiSnnvuOfXt21c33HCD6tevr4ULF/q8QAAAgNJU6Avxijp58qQiIiLcdy5dKnwhHgAA9qn2L8Qrql69er4YBgAAwCteB5m8vDz97W9/08qVK3X06FG5XC6P7Vu2bPFZcQAAAGXxOsg89NBDWr58uX71q1+pc+fOl/xyEgAAQCGvg8zSpUu1bNkydevWrSrqAQAAKDevv0emadOml/xXrgEAAEridZB5+eWX9eSTT+rAgQNVUQ8AAEC5eX1pKTExUXl5eYqLi1OtWrUUGBjosf3kyZM+Kw4AAKAsXgeZe++9Vz/++KNeeOEFRUZGstgXAABUG6+DzPr167Vhwwa1b9++KuoBAAAoN6/XyLRq1UqnT5+uiloAAAC84nWQmTZtmsaPH69Vq1bpxIkTys7O9ngAAABcKl7/1pKf3/nsU3RtjDFGDodDBQUFvqvuIvitJQAA7FOtv7W0cuXKSh0QAADAV7wOMt27d6+KOgAAALxW7iCzffv2cvVLSEiocDEAAADeKHeQ6dChgxwOh8paUnOp18gAAIDLW7mDzP79+6uyDgAAAK+VO8g0b968KusAAADwmtffIwMAAFBTEGQAAIC1CDIAAMBaBBkAAGAtggwAALBWue5a6tixY7HfVirNli1bKlUQAABAeZUryAwYMMD9d15enmbOnKk2bdooKSlJkvTll19qx44dGjVqVJUUCQAAUJJyBZlJkya5/x4+fLh+97vf6dlnny3WJy0tzbfVAQAAlMFhyvrNgRI4nU5t2rRJV155pUf7999/r8TERGVlZfm0wLL48mfAAQDApeHLz2+vF/uGhoZq7dq1xdrXrl2rkJCQShUDAADgjXL/REGhsWPHauTIkdq8ebOuu+46SefXyMyZM0d/+tOffF4gAABAabwOMhMmTFBcXJxeffVVzZ8/X5LUunVrzZs3T3fffbfPCwQAACiN12tkahLWyAAAYJ9qXSMjSZmZmfrnP/+pp556SidPnpR0/vtjfvzxx0oVAwAA4A2vLy1t375dt9xyi5xOp3744QcNHz5c9erV05IlS3TgwAG99dZbVVEnAABAMV6fkRk3bpyGDh2q77//3uMupd69e2vNmjU+LQ4AAKAsXgeZjRs36pFHHinW3rRpUx0+fNgnRQEAAJSH10EmJCRE2dnZxdpTUlLUsGFDnxQFAABQHl4Hmdtvv11TpkzR2bNnJUkOh0OpqamaMGGCBg4c6PMCAQAASuN1kHnppZd07NgxNWrUSKdPn1b37t3VsmVLhYeH6/nnn6+KGgEAAErk9V1LderU0dq1a7VixQpt2bJFLpdL11xzjW655ZaqqA8AAKBUfCEeAAC4pHz5+e31GRlJ+vzzz/X555/r6NGjcrlcHtvmzJlTqYIAAADKy+sg88wzz2jKlClKTExUkyZN5HA4qqIuAACAi/I6yMyaNUvz5s3TAw88UBX1AAAAlJvXdy2dOXNGXbt2rYpaAAAAvOJ1kBk+fLjmz59fFbUAAAB4xetLS3l5eZo9e7Y+++wzJSQkKDAw0GP7K6+84rPiAAAAylKhX7/u0KGDJOnbb7/12MbCXwAAcCl5HWRWrlxZFXUAAAB4zes1Mhc6ePCgfvzxR1/VAgAA4BWvg4zL5dKUKVPkdDrVvHlzNWvWTHXr1tWzzz5b7MvxAAAAqpLXl5b+8Ic/6M0339S0adPUrVs3GWO0bt06TZ48WXl5efxwJAAAuGS8/q2lqKgozZo1S/379/do/+9//6tRo0Zd0ktN/NYSAAD28eXnt9eXlk6ePKlWrVoVa2/VqpVOnjxZqWIAAAC84XWQad++vV577bVi7a+99prat2/vk6IAAADKw+s1MtOnT1efPn302WefKSkpSQ6HQ+vXr1daWpqWLVtWFTUCAACUyOszMt27d9d3332nO+64Q5mZmTp58qTuvPNOpaSk6IYbbqiKGgEAAErk9WLfmoTFvgAA2KdaF/t+/PHHWrt2rfv566+/rg4dOui+++5TRkZGpYoBAADwhtdB5ve//72ys7MlScnJyRo3bpx++ctfat++fRo3bpzPCwQAACiN14t99+/frzZt2kiSFi1apH79+umFF17Qli1b9Mtf/tLnBQIAAJTG6zMyQUFBys3NlSR99tln6tWrlySpXr167jM1AAAAl4LXZ2Suv/56jRs3Tt26ddPXX3+thQsXSpK+++47RUdH+7xAAACA0nh9Rua1115TQECA/vOf/+jvf/+7mjZtKkn66KOPdNttt/m8QAAAgNJw+zUAALikfPn5Xa5LS9nZ2e4DXWwdDIECAABcKuUKMhEREUpPT1ejRo1Ut25dORyOYn2MMXI4HCooKPB5kQAAACUpV5BZsWKF6tWrJ0lauXJllRYEAABQXqyRAQAAl9QlXyNTVGZmpt58803t2rVLDodDbdq00UMPPSSn01mpYgAAALzh9e3XmzZtUosWLfTnP/9ZJ0+e1PHjx/XKK6+oRYsW2rJlS1XUCAAAUCKvLy3dcMMNatmypf7xj38oIOD8CZ1z585p+PDh2rdvn9asWVMlhZaES0sAANjHl5/fXgeZ0NBQbd26Va1atfJo37lzpxITE90/X3ApEGQAALCPLz+/vb60VKdOHaWmphZrT0tLU3h4eKWKAQAA8IbXQWbQoEF6+OGHtXDhQqWlpengwYNasGCBhg8frnvvvbcqagQAACiR13ctvfTSS3I4HHrwwQd17tw5SVJgYKBGjhypadOm+bxAAACA0lT4e2Ryc3O1d+9eGWPUsmVL1apVy9e1XRRrZAAAsE+1rJHJzc3V6NGj1bRpUzVq1EjDhw9XkyZNlJCQUC0hBgAAoNxBZtKkSZo3b5769Omje+65R8uXL9fIkSOrsjYAAIAylXuNzOLFi/Xmm2/qnnvukSTdf//96tatmwoKCuTv719lBQIAAJSm3Gdk0tLSdMMNN7ifd+7cWQEBATp06FCVFAYAAHAx5Q4yBQUFCgoK8mgLCAhw37kEAABwqZX70pIxRkOHDlVwcLC7LS8vTyNGjFBYWJi7bfHixb6tEAAAoBTlDjJDhgwp1nb//ff7tBgAAABvlDvIzJ07tyrrAAAA8JrXP1EAAABQUxBkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGCtag8yM2fOVGxsrEJCQtSpUyd98cUX1V0SAACwREB1HnzhwoUaO3asZs6cqW7duumNN95Q7969tXPnTjVr1qw6S9PCjal6a8MP2ncsR3lnXTIX6e/nkIyRjM6nQ1eRbQ3Dg5V/1qXsvLNyFRks0E86e8EODumix6tOgX7n6zvnKrufv0MKDvBToL9DEWHBysk/p9NnCyRJ51xGAX4OBfn7qVaQv8KCA3S2wKXTZwoUU6+WYurV0qm8s3K5jK5u6lRU3VAF+vtp3Z7jOpR5WlF1Q9WtZQPtPfaTVqUc1ekzBerYLEIBfg7tP56j2AZhuqdzM3VsFuGuZ2tqhlalHNXhrDw1doaoR3wjSdKqlKOSpKi6oTqUedpj+4X7lzRGYV2xDcKK9S3sX1hPx2YR7v0Lj1e4b2Edh7PyJKnY8RduTNU3aZlqH1NXg65tVurYZT331tbUDC34OlUZuWd0c+tI93Ero7I1VYXqrKkmzoftasKc1oQaLicOY0y1fWZ26dJF11xzjf7+97+721q3bq0BAwZo6tSpF90/OztbTqdTWVlZqlOnjs/qGvD6Wm1Ly/LZeKg+I7rHaULv1pr20S7NWr2vwvtLuugYF/YtqX+HGKfX76sR3eP05b4THvt1iHHqurj6ZY5d9HnR2i6mpNfaIcap90Zf71X9ZY3pbU1VoTprqonzYbuaMKc1oQYb+PLzu9ouLZ05c0abN29Wr169PNp79eql9evXl7hPfn6+srOzPR6+tnBjKiHmZ2TW6n1auDG1QiGmcP+tqRnamppx0TEK+0oqsX9F3lezVu8rtt+2tKyLjl30+YW1XUxpr3VbWpYWbkwt1xjlGdObmqpCddZUE+fDdjVhTmtCDZejagsyx48fV0FBgSIjIz3aIyMjdfjw4RL3mTp1qpxOp/sRExPj87q+Scv0+ZioXpX9N91/PEf7j+eUu++F/1uTePsaSlLRuSxtzOqcp+qsqSbOh+1qwpzWhBouR9W+2NfhcHg8N8YUays0ceJEZWVluR9paWk+r6d9TF2fj4nqVdl/09gGYe51LOXpe+H/1iTevoaSVHQuSxuzOuepOmuqifNhu5owpzWhhstRtQWZBg0ayN/fv9jZl6NHjxY7S1MoODhYderU8Xj42qBrm6lDjNPn46J6jOwep0HXNtOI7nEV3r9jswh1bBZx0TEK+0oqsX9F3lcju8cV269jjPOiYxd9fmFtF1Paa+0Y46zwgt+SxvSmpqpQnTXVxPmwXU2Y05pQw+Wo2hf7durUSTNnznS3tWnTRrfffnu1LvaVuGupLNy1xF1LFVUT7+bgrqWfl5owpzWhhprOl5/f1RpkFi5cqAceeECzZs1SUlKSZs+erX/84x/asWOHmjdvftH9qzLIAACAquHLz+9q/R6ZQYMG6cSJE5oyZYrS09PVtm1bLVu2rFwhBgAAoFrPyFQWZ2QAALDPz+J7ZAAAACqLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWKtaf6Kgsgq/lDg7O7uaKwEAAOVV+Lntix8XsDrInDp1SpIUExNTzZUAAABvnTp1Sk6ns1JjWP1bSy6XS4cOHVJ4eLgcDkd1l2ON7OxsxcTEKC0tjd+oqgTmsfKYQ99gHn2DefSN8syjMUanTp1SVFSU/Pwqt8rF6jMyfn5+io6Oru4yrFWnTh3+Y/UB5rHymEPfYB59g3n0jYvNY2XPxBRisS8AALAWQQYAAFiLIHMZCg4O1qRJkxQcHFzdpViNeaw85tA3mEffYB5941LPo9WLfQEAwOWNMzIAAMBaBBkAAGAtggwAALAWQQYAAFiLIHOZmTlzpmJjYxUSEqJOnTrpiy++qO6SqtWaNWvUr18/RUVFyeFw6L333vPYbozR5MmTFRUVpdDQUPXo0UM7duzw6JOfn6/f/va3atCggcLCwtS/f38dPHjQo09GRoYeeOABOZ1OOZ1OPfDAA8rMzKziV3dpTJ06Vddee63Cw8PVqFEjDRgwQCkpKR59mMey/f3vf1dCQoL7C8SSkpL00UcfubczfxUzdepUORwOjR071t3GXF7c5MmT5XA4PB6NGzd2b69xc2hw2ViwYIEJDAw0//jHP8zOnTvNmDFjTFhYmDlw4EB1l1Ztli1bZv7whz+YRYsWGUlmyZIlHtunTZtmwsPDzaJFi0xycrIZNGiQadKkicnOznb3GTFihGnatKlZvny52bJli7nppptM+/btzblz59x9brvtNtO2bVuzfv16s379etO2bVvTt2/fS/Uyq9Stt95q5s6da7799luzbds206dPH9OsWTPz008/ufswj2V7//33zdKlS01KSopJSUkxTz31lAkMDDTffvutMYb5q4ivv/7aXHHFFSYhIcGMGTPG3c5cXtykSZPM1VdfbdLT092Po0ePurfXtDkkyFxGOnfubEaMGOHR1qpVKzNhwoRqqqhmKRpkXC6Xady4sZk2bZq7LS8vzzidTjNr1ixjjDGZmZkmMDDQLFiwwN3nxx9/NH5+fubjjz82xhizc+dOI8l8+eWX7j4bNmwwkszu3bur+FVdekePHjWSzOrVq40xzGNFRUREmH/+85/MXwWcOnXKXHnllWb58uWme/fu7iDDXJbPpEmTTPv27UvcVhPnkEtLl4kzZ85o8+bN6tWrl0d7r169tH79+mqqqmbbv3+/Dh8+7DFnwcHB6t69u3vONm/erLNnz3r0iYqKUtu2bd19NmzYIKfTqS5durj7XHfddXI6nT/Luc/KypIk1atXTxLz6K2CggItWLBAOTk5SkpKYv4qYPTo0erTp49uueUWj3bmsvy+//57RUVFKTY2Vvfcc4/27dsnqWbOodU/GonyO378uAoKChQZGenRHhkZqcOHD1dTVTVb4byUNGcHDhxw9wkKClJERESxPoX7Hz58WI0aNSo2fqNGjX52c2+M0bhx43T99derbdu2kpjH8kpOTlZSUpLy8vJUu3ZtLVmyRG3atHH/nzrzVz4LFizQli1btHHjxmLbeC+WT5cuXfTWW2/pqquu0pEjR/Tcc8+pa9eu2rFjR42cQ4LMZcbhcHg8N8YUa4OnisxZ0T4l9f85zv2jjz6q7du3a+3atcW2MY9li4+P17Zt25SZmalFixZpyJAhWr16tXs783dxaWlpGjNmjD799FOFhISU2o+5LFvv3r3df7dr105JSUlq0aKF/vWvf+m6666TVLPmkEtLl4kGDRrI39+/WNI9evRosWSN8wpX6Zc1Z40bN9aZM2eUkZFRZp8jR44UG//YsWM/q7n/7W9/q/fff18rV65UdHS0u515LJ+goCC1bNlSiYmJmjp1qtq3b69XX32V+fPC5s2bdfToUXXq1EkBAQEKCAjQ6tWr9de//lUBAQHu18lceicsLEzt2rXT999/XyPfjwSZy0RQUJA6deqk5cuXe7QvX75cXbt2raaqarbY2Fg1btzYY87OnDmj1atXu+esU6dOCgwM9OiTnp6ub7/91t0nKSlJWVlZ+vrrr919vvrqK2VlZf0s5t4Yo0cffVSLFy/WihUrFBsb67GdeawYY4zy8/OZPy/cfPPNSk5O1rZt29yPxMREDR48WNu2bVNcXBxzWQH5+fnatWuXmjRpUjPfj14tDYbVCm+/fvPNN83OnTvN2LFjTVhYmPnhhx+qu7Rqc+rUKbN161azdetWI8m88sorZuvWre5b0qdNm2acTqdZvHixSU5ONvfee2+JtxlGR0ebzz77zGzZssX84he/KPE2w4SEBLNhwwazYcMG065du5/NrZojR440TqfTrFq1yuN2zdzcXHcf5rFsEydONGvWrDH79+8327dvN0899ZTx8/Mzn376qTGG+auMC+9aMoa5LI/x48ebVatWmX379pkvv/zS9O3b14SHh7s/K2raHBJkLjOvv/66ad68uQkKCjLXXHON+xbZy9XKlSuNpGKPIUOGGGPO32o4adIk07hxYxMcHGxuvPFGk5yc7DHG6dOnzaOPPmrq1atnQkNDTd++fU1qaqpHnxMnTpjBgweb8PBwEx4ebgYPHmwyMjIu0ausWiXNnyQzd+5cdx/msWwPPfSQ+7/Lhg0bmptvvtkdYoxh/iqjaJBhLi+u8HthAgMDTVRUlLnzzjvNjh073Ntr2hw6jDHGy7NMAAAANQJrZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAKjRhg4dqgEDBrif9+jRQ2PHjq22egDULAQZAF45fPiwxowZo5YtWyokJESRkZG6/vrrNWvWLOXm5lb58RcvXqxnn33Wp2MWDUsA7BFQ3QUAsMe+ffvUrVs31a1bVy+88ILatWunc+fO6bvvvtOcOXMUFRWl/v37F9vv7NmzCgwM9EkN9erV88k4AH4eOCMDoNxGjRqlgIAAbdq0SXfffbdat26tdu3aaeDAgVq6dKn69esnSXI4HJo1a5Zuv/12hYWF6bnnnlNBQYEefvhhxcbGKjQ0VPHx8Xr11Vc9xi8oKNC4ceNUt25d1a9fX0888YSK/opK0UtLZ86c0RNPPKGmTZsqLCxMXbp00apVq9zb582bp7p16+qTTz5R69atVbt2bd12221KT0+XJE2ePFn/+te/9N///lcOh0MOh8NjfwA1G0EGQLmcOHFCn376qUaPHq2wsLAS+zgcDvffkyZN0u23367k5GQ99NBDcrlcio6O1rvvvqudO3fqT3/6k5566im9++677n1efvllzZkzR2+++abWrl2rkydPasmSJWXWNWzYMK1bt04LFizQ9u3bddddd+m2227T999/7+6Tm5url156SW+//bbWrFmj1NRUPf7445Kkxx9/XHfffbc73KSnp6tr166VmSoAlxCXlgCUy549e2SMUXx8vEd7gwYNlJeXJ0kaPXq0XnzxRUnSfffdp4ceesij7zPPPOP+OzY2VuvXr9e7776ru+++W5L0l7/8RRMnTtTAgQMlSbNmzdInn3xSak179+7VO++8o4MHDyoqKkrS+WDy8ccfa+7cuXrhhRcknb+0NWvWLLVo0UKS9Oijj2rKlCmSpNq1ays0NFT5+flq3LhxxSYHQLUhyADwyoVnXSTp66+/lsvl0uDBg5Wfn+9uT0xMLLbvrFmz9M9//lMHDhzQ6dOndebMGXXo0EGSlJWVpfT0dCUlJbn7BwQEKDExsdjlpUJbtmyRMUZXXXWVR3t+fr7q16/vfl6rVi13iJGkJk2a6OjRo+V/0QBqLIIMgHJp2bKlHA6Hdu/e7dEeFxcnSQoNDfVoL3r56d1339Vjjz2ml19+WUlJSQoPD9eMGTP01VdfVbgml8slf39/bd68Wf7+/h7bateu7f676EJjh8NRajgCYBfWyAAol/r166tnz5567bXXlJOT4/X+X3zxhbp27apRo0apY8eOatmypfbu3eve7nQ61aRJE3355ZfutnPnzmnz5s2ljtmxY0cVFBTo6NGjatmypcfDm8tEQUFBKigo8Po1Aah+BBkA5TZz5kydO3dOiYmJWrhwoXbt2qWUlBT9+9//1u7du4udFblQy5YttWnTJn3yySf67rvv9Mc//lEbN2706DNmzBhNmzZNS5Ys0e7duzVq1ChlZmaWOuZVV12lwYMH68EHH9TixYu1f/9+bdy4US+++KKWLVtW7td1xRVXaPv27UpJSdHx48d19uzZcu8LoHoRZACUW4sWLbR161bdcsstmjhxotq3b6/ExET97W9/0+OPP17mF9WNGDFCd955pwYNGqQuXbroxIkTGjVqlEef8ePH68EHH9TQoUPdl5/uuOOOMmuaO3euHnzwQY0fP17x8fHq37+/vvrqK8XExJT7df36179WfHy8EhMT1bBhQ61bt67c+wKoXg7DhWIAAGApzsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYK3/D5JQSC+u5bUxAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of poisoned images: 668 out of 10000.\n", + "last index of poison 1367\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAHFCAYAAAAdTZjVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABh9klEQVR4nO3dd1gUV9sG8HtpSwcBaUoARREVrFFRo2CvMWrUqLEnNixYgqJRIVGwhySWJHbzxhpLjBqQWNCIKFhiw44dJLGAgiLlfH/47YR1QVlcXHDv33XtJXvmzNlnzs7uPp6ZOSMTQggQERER6Qg9bQdARERE9DYx+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinlPnk5/Tp0xgyZAgqV64MExMTmJiYoEqVKhg2bBgSEhLeWhwhISGQyWRKZW5ubhg4cGCJvm5sbCxCQkLw6NEjjbe9ceNG1KhRAyYmJpDJZDh16lSB9Q4cOACZTCY99PX14eDggB49eiAxMVHt1x04cCDc3NzeLPhSYPXq1ZDJZLh+/bq2Q5EUdZ9UvJezZ89WWabYrrf5+crPz89PaX8zMTFBrVq1EBERgby8PLXaun79OmQyGVavXl0ywb5F6ry3o0aNKvmASjk3Nzd06tRJ22EA+O879Ndffy12G9r63nz06BHs7OywYcMGqez27dsIDAxE8+bNYW1trfZnrKjrZ2dno3LlyoiIiFA77jKd/Pz444+oV68ejh49irFjx2Lnzp3YtWsXAgMDce7cObz//vu4evWq1uLbtm0bpk2bVqKvERsbi9DQUI0nP//88w/69euHypUrIzIyEkeOHEHVqlVfuU5YWBiOHDmC/fv3Y9KkSYiOjkaTJk1w584dtV572rRp2LZt25uETxoye/ZsPHjwQNthqKhUqRKOHDmCI0eOYOPGjahQoQLGjRuH4OBgtdpxcnLCkSNH0LFjxxKKlOjdFhoaCmdnZ/Tq1Usqu3LlCn755RcYGRmhQ4cOardZ1PUNDQ0xffp0fPXVV7h//75ar2GgdlSlxOHDhzFy5Eh07NgRv/76K4yMjKRlLVq0QEBAADZv3gwTE5NXtpOZmQlTU9MSibFOnTol0u7bcOnSJWRnZ+PTTz9F8+bNi7ROlSpV0KhRIwBAs2bNYG1tjSFDhmD16tWYOnVqkV+7cuXKxYqZNKtVq1Y4cOAAZs2ahQULFmg7HCUmJibSvgYA7du3R7Vq1bBo0SLMnDkThoaGRWpHLpcrtUNERffgwQP8+OOP+Oabb5SOfDRr1gz//PMPACAhIQHr169Xq1111u/duzfGjx+PH3/8EVOmTCnya5TZkZ+wsDDo6+vjxx9/VEp88uvRowecnZ2l5wMHDoS5uTnOnDmDNm3awMLCAi1btgQAREdHo0uXLqhYsSKMjY3h4eGBYcOG4d9//1Vpd9euXahduzbkcjnc3d0xf/78Al+/oGHo9PR0TJw4Ee7u7jAyMkKFChUQGBiIjIwMpXqKoemff/4ZXl5eMDU1Ra1atbBz506pTkhICL744gsAgLu7u3QY4MCBA6/sux07dsDX1xempqawsLBA69atceTIEaV+atq0KQCgV69ekMlk8PPze2WbBVH8qNy4cQMAkJeXh7lz56JatWqQy+Wwt7dH//79cfv2baX1Chq+3bx5Mxo2bAgrKyuYmpqiUqVKGDx4sFKdmzdv4tNPP4W9vT3kcjm8vLywYMECpUMhisMc8+fPx8KFC+Hu7g5zc3P4+voiLi5OZRsSEhLw4YcfwsbGBsbGxqhTpw42bdqkUi8uLg5NmjSBsbExnJ2dERwcjOzs7CL1U0JCAj755BO4ubnBxMQEbm5u6N27t9RvCorDTfv378eIESNgZ2cHW1tbdOvWDXfv3lWqm52djaCgIDg6OsLU1BRNmzbFsWPHihSPgqenJ4YMGYLFixerxFKQ1+1XwH+Hh8+dO4fevXvDysoKDg4OGDx4MNLS0tSKLz9DQ0PUq1cPmZmZ0pfm2bNn0aVLF5QrVw7GxsaoXbs21qxZo7ReQYe9/vnnHwwdOhQuLi6Qy+UoX748mjRpgj///FNp3ZUrV6JWrVowNjaGjY0NunbtqnKYV/Gdc+XKFXTo0AHm5uZwcXHBhAkTkJWVpVT3+fPnmDlzpvT5KF++PAYNGiRtj4Im3tv8FIdc1q1bh0mTJsHJyQnm5ubo3Lkz7t27h8ePH2Po0KGws7ODnZ0dBg0ahCdPnii1sXjxYjRr1gz29vYwMzODt7c35s6dq/IZEEIgLCwMrq6uMDY2Rv369REdHQ0/Pz+V75iiflcW5buhuIr6u6DYr0+fPo0ePXrAysoKNjY2GD9+PHJycnDx4kW0a9cOFhYWcHNzw9y5cwt8vWfPnmH8+PFwdHSEiYkJmjdvjpMnT6rUW716NTw9PaXvubVr1xbYXmhoKBo2bAgbGxtYWlqibt26WLFiBTR1P/PVq1cjJydHadQHAPT03iy1UGd9IyMj9OrVCz/99JN62yXKoJycHGFiYiJ8fX3VWm/AgAHC0NBQuLm5ifDwcLF3714RFRUlhBBi6dKlIjw8XOzYsUPExMSINWvWiFq1aglPT0/x/PlzqY0///xT6Ovri6ZNm4qtW7eKzZs3i/fff1+899574uXudHV1FQMGDJCeZ2RkiNq1aws7OzuxcOFC8eeff4pvv/1WWFlZiRYtWoi8vDypLgDh5uYmGjRoIDZt2iR2794t/Pz8hIGBgbh69aoQQohbt26J0aNHCwBi69at4siRI+LIkSMiLS2t0D745ZdfBADRpk0bsX37drFx40ZRr149YWRkJA4dOiSEEOLKlSti8eLFAoAICwsTR44cEefOnSu0zf379wsAYvPmzUrlv/32mwAgpkyZIoQQYujQoQKAGDVqlIiMjBQ//PCDKF++vHBxcRH//POP0vvk6uoqPY+NjRUymUx88sknYvfu3WLfvn1i1apVol+/flKd1NRUUaFCBVG+fHnxww8/iMjISDFq1CgBQIwYMUKql5SUJPVtu3btxPbt28X27duFt7e3KFeunHj06JFUd9++fcLIyEh88MEHYuPGjSIyMlIMHDhQABCrVq2S6p07d06YmpqK6tWri/Xr14vffvtNtG3bVtonkpKSCu07IYTYvHmzmD59uti2bZuIiYkRGzZsEM2bNxfly5dX6pdVq1YJAKJSpUpi9OjRIioqSixfvlyUK1dO+Pv7K7U5YMAAIZPJxBdffCH27NkjFi5cKCpUqCAsLS2V9snCABABAQEiOTlZmJqaKvW1Io74+HiprCj7lRBCzJgxQwAQnp6eYvr06SI6OlosXLhQyOVyMWjQoNfGJYQQzZs3FzVq1FApr1u3rjAwMBCZmZniwoULwsLCQlSuXFmsXbtW7Nq1S/Tu3VsAEHPmzJHWUewP+d/Ptm3bivLly4uffvpJHDhwQGzfvl1Mnz5dbNiwQaoTFhYmAIjevXuLXbt2ibVr14pKlSoJKysrcenSJanegAEDhJGRkfDy8hLz588Xf/75p5g+fbqQyWQiNDRUqpebmyvatWsnzMzMRGhoqIiOjhbLly8XFSpUENWrVxeZmZlKbWrivVVQfH5dXV3FwIEDpc+mubm58Pf3F61btxYTJ04Ue/bsEXPmzBH6+vpi9OjRSm2OGzdOLF26VERGRop9+/aJb775RtjZ2am8p8HBwQKAGDp0qIiMjBTLli0T7733nnBychLNmzeX6hX1u7Io3w2FcXV1FR07dnxlnaL+LuTfr7/++msRHR0tgoKCpO+7atWqie+++05ER0eLQYMGCQBiy5YtKu+Bi4uL6NKli/j999/F//73P+Hh4SEsLS2l73wh/vv8vVzPxcVF6XtTCCEGDhwoVqxYIaKjo0V0dLT4+uuvhYmJidK+J8SL/S87O/u1j5ycHKX1WrRoIRo0aPDKPoyPj1f5jKmjKOtv3LhRABCnT58ucrtlMvlJSUkRAMQnn3yisiwnJ0fpzcqfUAwYMEAAECtXrnxl+3l5eSI7O1vcuHFDABC//fabtKxhw4bC2dlZPH36VCpLT08XNjY2r01+wsPDhZ6entKPhhBC/PrrrwKA2L17t1QGQDg4OIj09HSl7dbT0xPh4eFS2bx584r0AyvEix3c2dlZeHt7i9zcXKn88ePHwt7eXjRu3FgqKyyhKYii7saNG0V2drbIzMwUBw8eFB4eHkJfX1/8/fffIjExUQAQI0eOVFr36NGjSgmSEKrJz/z58wUApcTkZZMnTxYAxNGjR5XKR4wYIWQymbh48aIQ4r8fO29vb6UP8rFjxwQAsX79eqmsWrVqok6dOiI7O1upzU6dOgknJyepD3v16iVMTExESkqKVCcnJ0dUq1atyO9Nfjk5OeLJkyfCzMxMfPvtt1K54kvv5T6cO3euACCSk5OFEELq63HjxinVUyQo6v5ATp06Vejp6Ym///5bKQ7FfqzOfqX4kZg7d67S640cOVIYGxsrfV4Lo0h+FJ/xu3fvSu9/jx49hBBCfPLJJ0Iul4ubN28qrdu+fXthamoq7UsFJT/m5uYiMDCw0Nd/+PChMDExER06dFAqv3nzppDL5aJPnz5SmeI7Z9OmTUp1O3ToIDw9PaXn69evV/lBFOK/L/4lS5YIITT/3grx3+e3c+fOSvUCAwMFADFmzBil8o8++kjY2NgU2r7ih3Tt2rVCX19fPHjwQAghxIMHD4RcLhe9evVSqn/kyBEBQCn5Kep3ZVG+GwpTlOQnv1f9Lij26wULFiitU7t2bek/pwrZ2dmifPnyolu3blKZ4j2oW7eu0mfg+vXrwtDQUHz22WdCiP8+a4XVezn5yU/xvnz11VfC1ta2wN/G1z3yv0dCCGFqaiqGDx/+yn57G8nP5cuXBQCxdOnSIrdbZg97FaZevXowNDSUHgWdq9C9e3eVstTUVAwfPhwuLi4wMDCAoaEhXF1dAUAays7IyEB8fDy6desGY2NjaV0LCwt07tz5tbHt3LkTNWvWRO3atZGTkyM92rZtW+DhKn9/f1hYWEjPHRwcYG9vX6RDEAW5ePEi7t69i379+ikNK5qbm6N79+6Ii4tDZmZmsdoGXhwiMzQ0hKmpKZo1a4bc3Fz8+uuv8PHxwf79+wFA5TBggwYN4OXlhb179xba7vvvvw8A6NmzJzZt2lTgCdT79u1D9erV0aBBA6XygQMHQgiBffv2KZV37NgR+vr60nMfHx8A/x2iu3LlCi5cuIC+ffsCgNL71aFDByQnJ+PixYsAgP3796Nly5ZwcHCQ2tPX11cZCi7MkydPMGnSJHh4eMDAwAAGBgYwNzdHRkZGgVfLffjhh0rPX45d0deK2BV69uwJAwP1T/MLCgqCjY0NJk2aVODy4uxXBW3Ds2fPkJqaCuDFIdL8fZ6bm6tU/9y5c9Jn3NnZGQsWLEDfvn2xbNkyAC/2h5YtW8LFxUVpvYEDByIzM1PlcFx+DRo0wOrVqzFz5kzExcWpHLo5cuQInj59qrIvu7i4oEWLFir7skwmU/l+8PHxUfoc79y5E9bW1ujcubPSdteuXRuOjo7Sd4Om39v8Xr7yycvLCwBUTgb38vLCgwcPlA59nTx5Eh9++CFsbW2hr68PQ0ND9O/fH7m5ubh06RKAF4eGs7Ky0LNnT6X2GjVqpHKYu6jflUX5bngTRfldyK+gPpTJZGjfvr1UZmBgAA8PjwK/x/v06aN07oyrqysaN24sve+Kz1ph9V62b98+tGrVClZWVtL7Mn36dNy/f1/6rAEvDtvFx8e/9vHjjz9K6zx69AiZmZmwt7d/bT+WNEUM6rz/ZfKEZzs7O5iYmBS486xbtw6ZmZlITk5W+YIFAFNTU1haWiqV5eXloU2bNrh79y6mTZsGb29vmJmZIS8vD40aNcLTp08BAA8fPkReXh4cHR1V2i2o7GX37t3DlStXCj0Z8+XjyLa2tip15HK5FI+6FGfDOzk5qSxzdnZGXl4eHj58WOwTwOfMmYMWLVpAX18fdnZ2Sj88r3vtVyV0zZo1w/bt2/Hdd9+hf//+yMrKQo0aNTB16lT07t1bar+gyzwV53y9fCXAy30rl8sBQOrbe/fuAQAmTpyIiRMnFhiX4v26f/9+sfcJ4MUX3t69ezFt2jS8//77sLS0hEwmQ4cOHQp8r18Xu2JbX359AwODAvep17G0tMSXX36JwMBA6Us4v+LsV6/bhsGDByudn9O8eXOl/xxUrlwZGzZsgEwmg7GxMdzd3ZXav3//fqHx5I+5IBs3bsTMmTOxfPlyTJs2Debm5ujatSvmzp0LR0fH125vdHS0UpmpqanSf5YU2/vs2TPp+b179/Do0aNCz1/Mv68Bmntv87OxsVF6roilsPJnz57B3NwcN2/exAcffABPT098++23cHNzg7GxMY4dO4aAgACV/TL/fxIUXi4r6ndlUb4biquovwv5FdRXBb3/RkZGSE9PV1m/sO+Rv//+G0Dh77+iLP+0GseOHUObNm3g5+eHZcuWoWLFijAyMsL27dsxa9Yspfjfe+89VKxY8RW98UL+hEux/svbpg2KGNT5bSyTyY++vj5atGiBPXv2IDk5WelLqHr16gBQ6NwqL8/FA7w4MfLvv//G6tWrMWDAAKn8ypUrSvXKlSsHmUyGlJQUlTYKKnuZImlbuXJloctLkuLLMTk5WWXZ3bt3oaenh3LlyhW7/UqVKqF+/fqvfe2XP2R379597bZ36dIFXbp0QVZWFuLi4hAeHo4+ffrAzc0Nvr6+sLW1LXS7APX7VlE/ODgY3bp1K7COp6entG3F3SfS0tKwc+dOzJgxA5MnT5bKs7Kyin2JuaKvU1JSUKFCBak8JydH7ctBFUaMGIFvv/0WkyZNwogRIwp8PU3uVyEhIUpz0eQfAQUgnSxbmDfZH+zs7BAREYGIiAjcvHkTO3bswOTJk5GamorIyMjXbm9xPseKk9cjIyMLXK7Y/pJ4b9/U9u3bkZGRga1bt0qjIgBU5gVTxK74j0V+KSkpSv95Uee78nXfDcVV1N8FTSrse0TRd/nf/9etu2HDBhgaGmLnzp1KCcr27dtV1n35PxuFyf+fEEUspWEqDEUM6nz2yuxhr+DgYOTm5mL48OFFvqqmMIqESPG/T4X8Q3wAYGZmhgYNGmDr1q1K/2t7/Pgxfv/999e+TqdOnXD16lXY2tqifv36Ko/iTFD18v+YX8XT0xMVKlTAunXrlM6Kz8jIwJYtW6QrdUpCixYtAAD/+9//lMrj4+ORmJgoXXX3OnK5HM2bN8ecOXMAQLoSomXLljh//jxOnDihVH/t2rWQyWTw9/dXK15PT09UqVIFf//9d4HvVf369aUfJH9/f+zdu1fpSz03NxcbN2587evIZDIIIVT2veXLl6sc6ikqxVUzv/zyi1L5pk2bkJOTU6w2jYyMMHPmTMTHx2Pz5s1Ky0piv3Jzc1Pqa0WiWVQtW7bEvn37VK6CW7t2LUxNTYt8eft7772HUaNGoXXr1tK+5evrCxMTE5V9+fbt29LhNnV16tQJ9+/fR25uboH7mmL7S+K9fVMFfX8KIaRDkAoNGzaEXC5X+VzExcWpjPwW57uysO8GTW4XoPq7oEnr169X+gzduHEDsbGx0vvu6ekJJyenQuvlJ5PJYGBgoHR4/+nTp/j5559VXrc4h72MjIxQqVIlrc6lp3Dt2jUA/w1+FEWZHPkBgCZNmmDx4sUYPXo06tati6FDh6JGjRrQ09NDcnIytmzZAgAqh7gKUq1aNVSuXBmTJ0+GEAI2Njb4/fffVYavAeDrr79Gu3bt0Lp1a0yYMAG5ubmYM2cOzMzMXpsBBwYGYsuWLWjWrBnGjRsHHx8f5OXl4ebNm9izZw8mTJiAhg0bqtUP3t7eAIBvv/0WAwYMgKGhITw9PVX+pwy8uHxw7ty56Nu3Lzp16oRhw4YhKysL8+bNw6NHjwqczVdTPD09MXToUHz//ffQ09ND+/btcf36dUybNg0uLi4YN25coetOnz4dt2/fRsuWLVGxYkU8evQI3377LQwNDaU5iMaNG4e1a9eiY8eO+Oqrr+Dq6opdu3ZhyZIlGDFixGsnaCzIjz/+iPbt26Nt27YYOHAgKlSogAcPHiAxMREnTpyQkoAvv/wSO3bsQIsWLTB9+nSYmppi8eLFKpfkFsTS0hLNmjXDvHnzYGdnBzc3N8TExGDFihWwtrZWO2bgxXkGn376KSIiImBoaIhWrVrh7NmzmD9/fpE+D4Xp3bs35s+fjz/++EOpXJv7VWFmzJiBnTt3wt/fH9OnT4eNjQ1++eUX7Nq1C3PnzoWVlVWB66WlpcHf3x99+vRBtWrVYGFhgfj4eERGRkojgNbW1pg2bRqmTJmC/v37o3fv3rh//z5CQ0NhbGyMGTNmqB3vJ598gl9++QUdOnTA2LFj0aBBAxgaGuL27dvYv38/unTpgq5du5bYe/smWrduDSMjI/Tu3RtBQUF49uwZli5diocPHyrVU1z6HR4ejnLlyqFr1664ffs2QkND4eTkpHS+WFG/K4vy3fAqKSkpBc6q7Obmhlq1ahX5d0FTUlNT0bVrV3z++edIS0vDjBkzYGxsLE3eqaenh6+//hqfffaZVO/Ro0cICQlRORTWsWNHLFy4EH369MHQoUNx//59zJ8/XyWZU2xvcf7z7efnp/J9oKDoV0VikpCQAHNzcwDAxx9/LNULCQlBaGgo9u/frzTdQVHXB14k0Pr6+mjWrFnRgy/W6delyKlTp8SgQYOEu7u7kMvlwtjYWHh4eIj+/fuLvXv3KtUdMGCAMDMzK7Cd8+fPi9atWwsLCwtRrlw50aNHD3Hz5k0BQMyYMUOp7o4dO4SPj48wMjIS7733npg9e7Z0tn9+L1/tJYQQT548EV9++aXw9PQURkZGwsrKSnh7e4tx48YpXS2El67IeFWbwcHBwtnZWejp6QkAYv/+/a/ss+3bt4uGDRsKY2NjYWZmJlq2bCkOHz6sVKc4V3u9rm5ubq6YM2eOqFq1qjA0NBR2dnbi008/Fbdu3VKq9/LVXjt37hTt27cXFSpUEEZGRsLe3l506NBB6RJqIYS4ceOG6NOnj7C1tRWGhobC09NTzJs3T+kKJMXVPfPmzVOJr6D3+u+//xY9e/YU9vb2wtDQUDg6OooWLVqIH374Qane4cOHRaNGjYRcLheOjo7iiy++ED/99FORrva6ffu26N69uyhXrpywsLAQ7dq1E2fPnlV5rwu6xFyI//o///uelZUlJkyYIOzt7YWxsbFo1KiROHLkSIH7T0EK2//27NkjXfnxchxF2a8Un5P8l/Dn37aiXBlX2KXuLztz5ozo3LmzsLKyEkZGRqJWrVoqV4y8fLXXs2fPxPDhw4WPj4+wtLQUJiYmwtPTU8yYMUNkZGQorbt8+XLpe8DKykp06dJFZUqIwr5zCvq+yM7OFvPnzxe1atUSxsbGwtzcXFSrVk0MGzZMXL58Waqn6fe2sM9vYftbQe/h77//LsVdoUIF8cUXX4g//vhDZb/My8sTM2fOFBUrVhRGRkbCx8dH7Ny5U9SqVUt07dpV6XWK8l1Z1O+Ggri6uhZ6VZOiH4v6u1DYfl3Y+//yPqx4D37++WcxZswYUb58eSGXy8UHH3wgEhISVNZfvny5qFKlijAyMhJVq1YVK1euVPneFEKIlStXCk9PTyGXy0WlSpVEeHi4WLFiRbGuQi3I3r17BQBx7NgxlWWF9e3L+/2ECROETCYTiYmJxVpfCCE++OADlasVX0f2/y9CRET01iUlJaFatWqYMWOGWjP0Uung4+ODJk2aYOnSpcVav0GDBnB1dVU5nF5UV69eRZUqVRAVFYXWrVsXeT0mP0RE9Fb8/fffWL9+PRo3bgxLS0tcvHgRc+fORXp6Os6ePVvglWBUukVGRqJr1664fPlyka4Yyy89PR3ly5fHqVOnpKkV1DVo0CDcvn1b7cORZfacHyIiKlvMzMyQkJCAFStW4NGjR7CysoKfnx9mzZrFxKeMateuHebNm4ekpCS1kx9LS0uV27yoIycnB5UrV1b7hsYAR36IiIhIx5TZS92JiIiIioPJDxEREekUJj9ERESkU975E57z8vJw9+5dWFhYFHhrCyIiIip9hBB4/PgxnJ2dlSbB1IR3Pvm5e/euyp2diYiIqGy4deuW2leSvc47n/wobvNw69YtrU3/TkREROpJT0+Hi4tLgbdrelPvfPKjONRlaWnJ5IeIiKiMKYlTVnjCMxEREemUUpP8hIeHQyaTITAwUCoTQiAkJATOzs4wMTGBn58fzp07p70giYiIqMwrFclPfHw8fvrpJ/j4+CiVz507FwsXLsSiRYsQHx8PR0dHtG7dGo8fP9ZSpERERFTWaf2cnydPnqBv375YtmwZZs6cKZULIRAREYGpU6eiW7duAIA1a9bAwcEB69atw7BhwzQaR25uLrKzszXaJlFxGBoaQl9fX9thEBG9s7Se/AQEBKBjx45o1aqVUvKTlJSElJQUtGnTRiqTy+Vo3rw5YmNjNZb8CCGQkpKCR48eaaQ9Ik2wtraGo6Mj56YiIioBWk1+NmzYgBMnTiA+Pl5lWUpKCgCo3OnXwcEBN27cKLTNrKwspbvEpqenvzIGReJjb28PU1NT/tiQVgkhkJmZidTUVACAk5OTliMiInr3aC35uXXrFsaOHYs9e/bA2Ni40HovJyNCiFcmKOHh4QgNDS1SDLm5uVLiY2trW7TAiUqYiYkJACA1NRX29vY8BEZEpGFaO+H5+PHjSE1NRb169WBgYAADAwPExMTgu+++g4GBgTTioxgBUkhNTVUZDcovODgYaWlp0uPWrVuF1lWc42NqaqqBLSLSHMU+yfPQiIg0T2sjPy1btsSZM2eUygYNGoRq1aph0qRJqFSpEhwdHREdHY06deoAAJ4/f46YmBjMmTOn0HblcjnkcrlasfBQF5U23CeJiEqO1pIfCwsL1KxZU6nMzMwMtra2UnlgYCDCwsJQpUoVVKlSBWFhYTA1NUWfPn20ETIRERG9A7R+tderBAUF4enTpxg5ciQePnyIhg0bYs+ePSVyn493kUwmw7Zt2/DRRx9pOxQiIqJSo1QlPwcOHFB6LpPJEBISgpCQkLcei9vkXW/tta7P7qj2OikpKZg1axZ27dqFO3fuwN7eHrVr10ZgYCBatmxZAlEWn5+fH2rXro2IiAhth0JERFS6kh8qmuvXr6NJkyawtrbG3Llz4ePjg+zsbERFRSEgIAAXLlzQdohERESlVqm4vQWpZ+TIkZDJZDh27Bg+/vhjVK1aFTVq1MD48eMRFxdX6Hp37txBr169UK5cOdja2qJLly64fv26tDw+Ph6tW7eGnZ0drKys0Lx5c5w4cUKpDZlMhuXLl6Nr164wNTVFlSpVsGPHDrXid3Nzw8yZM9G/f3+Ym5vD1dUVv/32G/755x906dIF5ubm8Pb2RkJCgrTO/fv30bt3b1SsWBGmpqbw9vbG+vXrldp9/Pgx+vbtCzMzMzg5OeGbb76Bn5+f0v3inj9/jqCgIFSoUAFmZmZo2LCh0ojjjRs30LlzZ5QrVw5mZmaoUaMGdu/erdb2ERFR6cbkp4x58OABIiMjERAQADMzM5Xl1tbWBa6XmZkJf39/mJub4+DBg/jrr79gbm6Odu3a4fnz5wBeJA8DBgzAoUOHEBcXhypVqqBDhw4q91ILDQ1Fz549cfr0aXTo0AF9+/bFgwcP1NqOb775Bk2aNMHJkyfRsWNH9OvXD/3798enn36KEydOwMPDA/3794cQAgDw7Nkz1KtXDzt37sTZs2cxdOhQ9OvXD0ePHpXaHD9+PA4fPowdO3YgOjoahw4dUkneBg0ahMOHD2PDhg04ffo0evTogXbt2uHy5csAXsw4npWVhYMHD+LMmTOYM2cOzM3N1do2IiIq3Zj8lDFXrlyBEALVqlVTa70NGzZAT08Py5cvh7e3N7y8vLBq1SrcvHlTGvlo0aIFfPw6IdvCCV4VrPHjvBBkZmYiJiZGqa2BAweid+/e8PDwQFhYGDIyMnDs2DG14unQoQOGDRuGKlWqYPr06Xj8+DHef/999OjRA1WrVsWkSZOQmJiIe/fuAQAqVKiAiRMnonbt2qhUqRJGjx6Ntm3bYvPmzQBeJG5r1qzB/Pnz0bJlS9SsWROrVq1Cbm6u9JpXr17F+vXrsXnzZnzwwQeoXLkyJk6ciKZNm2LVqlUAgJs3b6JJkybw9vZGpUqV0KlTJzRr1kytbSMiotKN5/yUMYqREHXngTl+/DiuXLmicqXcs2fPcPXqVQAvJpD8OngSjh0+hEf3U5Gbm4vMzKe4efOm0jo+Pj7S32ZmZrCwsJBux1BU+dtQTFrp7e2tUpaamgpHR0fk5uZi9uzZ2LhxI+7cuSPdxkQx+nXt2jVkZ2ejQYMGUhtWVlbw9PSUnp84cQJCCFStWlUplqysLGmG7zFjxmDEiBHYs2cPWrVqhe7duyvFSkREZR+TnzKmSpUqkMlkSExMVOsS9ry8PNSrVw+//PKLyrLy5csDeDGic/NOCoJCwuDnaQ+5kRF823SRDospGBoaKj2XyWTIy8tTazvyt6FI5AoqU7S7YMECfPPNN4iIiIC3tzfMzMwQGBgoxVZYUqgoV7Slr6+P48ePq9wyQnFo67PPPkPbtm2xa9cu7NmzB+Hh4ViwYAFGjx6t1vYREVHpxcNeZYyNjQ3atm2LxYsXIyMjQ2V5YXenr1u3Li5fvgx7e3t4eHgoPaysrAAAhw4dQu/BQ/FBizao4eUJudwI//77b0luTpEdOnQIXbp0waeffopatWqhUqVK0nk6AFC5cmUYGhoqHX5LT09XqlOnTh3k5uYiNTVVpQ8cHR2lei4uLhg+fDi2bt2KCRMmYNmyZW9nI4mI6K1g8lMGLVmyBLm5uWjQoAG2bNmCy5cvIzExEd999x18fX0LXKdv376ws7NDly5dcOjQISQlJSEmJgZjx47F7du3AQAeHh7YuWUTrl2+iKMJJ9D381HSTTa1zcPDA9HR0YiNjUViYiKGDRumdN83CwsLDBgwAF988QX279+Pc+fOYfDgwdDT05NGg6pWrYq+ffuif//+2Lp1K5KSkhAfH485c+ZIV3QFBgYiKioKSUlJOHHiBPbt2wcvLy+tbDMREZUMJj9lkLu7O06cOAF/f39MmDABNWvWROvWrbF3714sXbq0wHVMTU1x8OBBvPfee+jWrRu8vLwwePBgPH36FJaWlgCAlStX4nHaI/Rq3xz9ho7BmGFDYG9v/zY3rVDTpk1D3bp10bZtW/j5+cHR0VHlsN/ChQvh6+uLTp06oVWrVmjSpAm8vLxgbGws1Vm1ahX69++PCRMmwNPTEx9++CGOHj0KFxcXAEBubi4CAgLg5eWFdu3awdPTE0uWLHmbm0pERCVMJvKfFPEOSk9Ph5WVFdLS0qQfeYVnz54hKSkJ7u7uSj+Quuz07UcAAB/Lpy8KLJ20F8wbysjIQIUKFbBgwQIMGTJE2+GohfsmEem6V/1+vyme8EzvjJMnT+LChQto0KAB0tLS8NVXXwEAunTpouXIiIioNGHyQ++U+fPn4+LFizAyMkK9evVw6NAh2NnZaTssIiIqRZj80DujTp06OH78uLbDICKiUo4nPBMREZFOYfJDREREOoXJDxEREekUJj9ERESkU5j8EBERkU5h8kNEREQ6hcmPDjpw4ABkMlmhN0EtLUJCQlC7dm1th0FERO8YzvNTmP3hb++1/IPVqj5w4ECsWbMGAGBgYAAXFxd069YNoaGhMDMze+36jRs3RnJysnQ397Lq+vXrcHd3x8mTJ5kkERFRkTH5KaPatWuHVatWITs7G4cOHcJnn32GjIyMQm9smp+RkREcHR3fQpRERESlDw97lVFyuRyOjo5wcXFBnz590LdvX2zfvh0AkJWVhTFjxsDe3h7GxsZo2rQp4uPjpXVfPux148YNdO7cGeXKlUPDqhXQtaUvdu/ZK9WPiYlBgwYNIJfL4eTkhMmTJyMnJ0da7ufnhzFjxiAoKAg2NjZwdHRESEiIUrxpaWkYOnQo7O3tYWlpiRYtWuDvv/9WqjN79mw4ODjAwsICQ4YMwbNnz9TqE8V2RUVFoU6dOjAxMUGLFi2QmpqKP/74A15eXrC0tETv3r2RmZkprRcZGYmmTZvC2toatra26NSpE65evarUdmxsLGrXrg1jY2PUr18f27dvh0wmw6lTp6Q658+fR4cOHWBubg4HBwf069cP//77r7T8119/hbe3N0xMTGBra4tWrVohIyNDrW0kIqI3x+TnHWFiYoLs7GwAQFBQELZs2YI1a9bgxIkT8PDwQNu2bfHgwYMC1w0ICEBWVhYOHjyILdGHERg8A+b/f/jszp076NChA95//338/fffWLp0KVasWIGZM2cqtbFmzRqYmZnh6NGjmDt3Lr766itER0cDAIQQ6NixI1JSUrB7924cP34cdevWRcuWLaWYNm3ahBkzZmDWrFlISEiAk5MTlixZUqy+CAkJwaJFixAbG4tbt26hZ8+eiIiIwLp167Br1y5ER0fj+++/l+pnZGRg/PjxiI+Px969e6Gnp4euXbsiLy8PAPD48WN07twZ3t7eOHHiBL7++mtMmjRJ6TWTk5PRvHlz1K5dGwkJCYiMjMS9e/fQs2dPaXnv3r0xePBgJCYm4sCBA+jWrRuEEMXaRiIiKj4e9noHHDt2DOvWrUPLli2lQ1+rV69G+/btAQDLli1DdHQ0VqxYgS+++EJl/Zs3b6J79+7w9vaGuP0IFV3d4GP5FACwZMkSuLi4YNGiRZDJZKhWrRru3r2LSZMmYfr06dDTe5E/+/j4YMaMGQCAKlWqYNGiRdi7dy9at26N/fv348yZM0hNTYVcLgfw4gak27dvx6+//oqhQ4ciIiICgwcPxmeffQYAmDlzJv7880+1R38U6zZp0gQAMGTIEAQHB+Pq1auoVKkSAODjjz/G/v37pQSme/fuSuuvWLEC9vb2OH/+PGrWrIlffvkFMpkMy5Ytg7GxMapXr447d+7g888/l9ZZunQp6tati7CwMKls5cqVcHFxwaVLl/DkyRPk5OSgW7ducHV1BQB4e3urvW1ERPTmOPJTRu3cuRPm5uYwNjaGr68vmjVrhu+//x5Xr15Fdna29OMPAIaGhmjQoAESExMLbGvMmDFSwrBkQTguJZ6VliUmJsLX1xcymUwqa9KkCZ48eYLbt29LZT4+PkptOjk5ITU1FQBw/PhxPHnyBLa2tjA3N5ceSUlJ0uElxevk9/Lzosofi4ODA0xNTaXER1GmiA0Arl69ij59+qBSpUqwtLSEu7s7gBdJIQBcvHgRPj4+MDY2ltZp0KCB0mseP34c+/fvV9q+atWqSe3XqlULLVu2hLe3N3r06IFly5bh4cOHxdo+IiJ6Mxz5KaP8/f2xdOlSGBoawtnZGYaGhgBeHF4BoJSsAC8OPb1cpvDZZ5+hbdu22LVrFzb/tgsrFn+D5FnTMXrYkALXUxyqyV+ueH0FmUwmHTbKy8uDk5MTDhw4oPLa1tbWRd/oIsofi0wme2VsANC5c2e4uLhg2bJlcHZ2Rl5eHmrWrInnz58DKLjvXj5clZeXh86dO2POnDkq8Tg5OUFfXx/R0dGIjY3Fnj178P3332Pq1Kk4evSolGwREdHbwZGfMsrMzAweHh5wdXVV+nH38PCAkZER/vrrL6ksOzsbCQkJ8PLyKrQ9FxcXDB8+HN8s+xn9hwZg2Zp1AIDq1asjNjZW6cc+NjYWFhYWqFChQpFirVu3LlJSUmBgYAAPDw+lh52dHQDAy8sLcXFxSuu9/Lwk3L9/H4mJifjyyy/RsmVLeHl5qYzIVKtWDadPn0ZWVpZUlpCQoFSnbt26OHfuHNzc3FS2UTH9gEwmQ5MmTRAaGoqTJ0/CyMgI27ZtK/FtJCIiZUx+3jFmZmYYMWIEvvjiC0RGRuL8+fP4/PPPkZmZiSFDhhS4TmBgIKKiopCUlITEM3/j2OFD8KrqAQAYOXIkbt26hdGjR+PChQv47bffMGPGDIwfP1463+d1WrVqBV9fX3z00UeIiorC9evXERsbiy+//FJKIsaOHYuVK1di5cqVuHTpEmbMmIFz585pplNeoVy5crC1tcVPP/2EK1euYN++fRg/frxSnT59+iAvLw9Dhw5FYmIioqKiMH/+fAD/jX4FBATgwYMH6N27N44dO4Zr165hz549GDx4MHJzc3H06FGEhYUhISEBN2/exNatW/HPP/+8MiElIqKSwcNe76DZs2cjLy8P/fr1w+PHj1G/fn1ERUWhXLlyBdbPzc1FQEAAbt++DVNzCzTxa4kl86cBACpUqIDdu3fjiy++QK1atWBjY4MhQ4bgyy+/LHI8MpkMu3fvxtSpUzF48GD8888/cHR0RLNmzeDg4AAA6NWrF65evYpJkybh2bNn6N69O0aMGIGoqKg375BX0NPTw4YNGzBmzBjUrFkTnp6e+O677+Dn5yfVsbS0xO+//44RI0agdu3a8Pb2xvTp09GnTx/pPCBnZ2ccPnwYkyZNQtu2bZGVlQVXV1e0a9cOenp6sLS0xMGDBxEREYH09HS4urpiwYIF0knpRET09sjEO36tbXp6OqysrJCWlgZLS0ulZc+ePUNSUhLc3d2VTmbVZadvPwIA6WovWDq9+Dc9+b9KijId9ssvv2DQoEFIS0uDiYmJxtvnvklEuu5Vv99viiM/REWwdu1aVKpUCRUqVMDff/+NSZMmoWfPniWS+BARUcli8kNUBCkpKZg+fTpSUlLg5OSEHj16YNasWdoOi4iIioHJD1ERBAUFISgoSNthEBGRBvBqLyIiItIpWk1+li5dCh8fH1haWsLS0hK+vr74448/pOUDBw6ETCZTejRq1Ejjcbzj53xTGcR9koio5Gj1sFfFihUxe/ZseHi8mFNmzZo16NKlC06ePIkaNWoAANq1a4dVq1ZJ6xgZGWns9RWTA2ZmZvLEVSpVFHedf3l2aiIienNaTX46d+6s9HzWrFlYunQp4uLipORHLpfD0dGxRF5fX18f1tbW0n2eTE1NC70FhK4QOS9u6fDs+Ys7xENxY1HF8/xlpHFCCGRmZiI1NRXW1tbQ19fXdkhERO+cUnPCc25uLjZv3oyMjAylG1oeOHAA9vb2sLa2RvPmzTFr1izY29sX2k5WVpbSbQjS09Nf+bqKxCr/jS51WerDF/P7GJm8SIJgnPHi32dp/1VSlFGJsba2LrGkn4hI12k9+Tlz5gx8fX3x7NkzmJubY9u2bahevToAoH379ujRowdcXV2RlJSEadOmoUWLFjh+/DjkcnmB7YWHhyM0NLTIry+TyeDk5AR7e3tkZ2e/foV33MHfggEAH/i6vSjwGvbi36M//ldJUUYlwtDQkCM+REQlSOvJj6enJ06dOoVHjx5hy5YtGDBgAGJiYlC9enX06tVLqlezZk3Ur18frq6u2LVrF7p161Zge8HBwUr3ZkpPT4eLi8tr49DX1+cPDoCnTx4BAIzzXpxzAsXsworn+cuIiIjKIK0nP0ZGRtIJz/Xr10d8fDy+/fZb/Pjjjyp1nZyc4OrqisuXLxfanlwuL3RUiIiIiKjUzfMjhFA6Zye/+/fv49atW3By4r2liIiIqHi0OvIzZcoUtG/fHi4uLnj8+DE2bNiAAwcOIDIyEk+ePEFISAi6d+8OJycnXL9+HVOmTIGdnR26du2qzbCJiIioDNNq8nPv3j3069cPycnJsLKygo+PDyIjI9G6dWs8ffoUZ86cwdq1a/Ho0SM4OTnB398fGzduhIWFhTbDJiIiojJMq8nPihUrCl1mYmKCqKiotxgNERER6YJSd84PERERUUli8kNEREQ6hckPERER6RQmP0RERKRTmPwQERGRTmHyQ0RERDqFyQ8RERHpFCY/REREpFOY/BAREZFOYfJDREREOoXJDxEREekUrd7bi0qJ/eHajoCIiOit4cgPERER6RQmP0RERKRTmPwQERGRTmHyQ0RERDqFyQ8RERHpFCY/REREpFOY/BAREZFOYfJDREREOoWTHBIi9l4qtCzQ/21HQ0REVLI48kNEREQ6hckPERER6RQmP0RERKRTmPwQERGRTmHyQ0RERDqFyQ8RERHpFCY/REREpFOY/BAREZFOYfJDREREOoXJDxEREekUJj9ERESkU5j8EBERkU5h8kNEREQ6RavJz9KlS+Hj4wNLS0tYWlrC19cXf/zxh7RcCIGQkBA4OzvDxMQEfn5+OHfunBYjJiIiorJOq8lPxYoVMXv2bCQkJCAhIQEtWrRAly5dpARn7ty5WLhwIRYtWoT4+Hg4OjqidevWePz4sTbDJiIiojJMq8lP586d0aFDB1StWhVVq1bFrFmzYG5ujri4OAghEBERgalTp6Jbt26oWbMm1qxZg8zMTKxbt06bYRMREVEZVmrO+cnNzcWGDRuQkZEBX19fJCUlISUlBW3atJHqyOVyNG/eHLGxsYW2k5WVhfT0dKUHERERkYLWk58zZ87A3Nwccrkcw4cPx7Zt21C9enWkpKQAABwcHJTqOzg4SMsKEh4eDisrK+nh4uJSovG/69wm74Lb5F3aDoOIiEhjtJ78eHp64tSpU4iLi8OIESMwYMAAnD9/Xlouk8mU6gshVMryCw4ORlpamvS4detWicVOREREZY+BtgMwMjKCh4cHAKB+/fqIj4/Ht99+i0mTJgEAUlJS4OTkJNVPTU1VGQ3KTy6XQy6Xl2zQREREVGZpfeTnZUIIZGVlwd3dHY6OjoiOjpaWPX/+HDExMWjcuLEWIyQiIqKyTKsjP1OmTEH79u3h4uKCx48fY8OGDThw4AAiIyMhk8kQGBiIsLAwVKlSBVWqVEFYWBhMTU3Rp08fbYZNREREZZhWk5979+6hX79+SE5OhpWVFXx8fBAZGYnWrVsDAIKCgvD06VOMHDkSDx8+RMOGDbFnzx5YWFhoM2wiIiIqw7Sa/KxYseKVy2UyGUJCQhASEvJ2AiIiIqJ3Xqk754eIiIioJDH5ISIiIp2i9Uvdqex5edLD67M7aikSIiIi9XHkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHSK2snPmjVrsGvXf3f1DgoKgrW1NRo3bowbN25oNDgiIiIiTVM7+QkLC4OJiQkA4MiRI1i0aBHmzp0LOzs7jBs3TuMBEhEREWmSgbor3Lp1Cx4eHgCA7du34+OPP8bQoUPRpEkT+Pn5aTo+IiIiIo1Se+TH3Nwc9+/fBwDs2bMHrVq1AgAYGxvj6dOnmo2OiIiISMPUHvlp3bo1PvvsM9SpUweXLl1Cx44dAQDnzp2Dm5ubpuMjIiIi0ii1R34WL14MX19f/PPPP9iyZQtsbW0BAMePH0fv3r01HiARERGRJqk98mNtbY1FixaplIeGhmokICIiIqKSVKx5fg4dOoRPP/0UjRs3xp07dwAAP//8M/766y+NBkdERESkaWonP1u2bEHbtm1hYmKCEydOICsrCwDw+PFjhIWFaTxAIiIiIk1SO/mZOXMmfvjhByxbtgyGhoZSeePGjXHixAmNBkdERESkaWonPxcvXkSzZs1Uyi0tLfHo0SNNxERERERUYtROfpycnHDlyhWV8r/++guVKlXSSFBEREREJUXt5GfYsGEYO3Ysjh49CplMhrt37+KXX37BxIkTMXLkyJKIkYiIiEhj1L7UPSgoCGlpafD398ezZ8/QrFkzyOVyTJw4EaNGjSqJGKmERHw5qFjrBRr8+lJJxzcPhoiI6C1RO/kBgFmzZmHq1Kk4f/488vLyUL16dZibm2s6NiIiIiKNK1byAwCmpqaoX7++JmMhIiIiKnFqJz9du3aFTCZTKZfJZDA2NoaHhwf69OkDT09PjQRIREREpElqn/BsZWWFffv24cSJE1ISdPLkSezbtw85OTnYuHEjatWqhcOHD2s8WCIiIqI3pXby4+joiD59+uDatWvYsmULtm7diqtXr+LTTz9F5cqVkZiYiAEDBmDSpEmvbSs8PBzvv/8+LCwsYG9vj48++ggXL15UqjNw4EDIZDKlR6NGjdQNm4iIiAhAMZKfFStWIDAwEHp6/62qp6eH0aNH46effoJMJsOoUaNw9uzZ17YVExODgIAAxMXFITo6Gjk5OWjTpg0yMjKU6rVr1w7JycnSY/fu3eqGTURERASgGOf85OTk4MKFC6hatapS+YULF5CbmwsAMDY2LvC8oJdFRkYqPV+1ahXs7e1x/PhxpVmk5XI5HB0d1Q2ViIiISIXayU+/fv0wZMgQTJkyBe+//z5kMhmOHTuGsLAw9O/fH8CLEZ0aNWqoHUxaWhoAwMbGRqn8wIEDsLe3h7W1NZo3b45Zs2bB3t6+wDaysrKkm60CQHp6utpxEBER0btL7eTnm2++gYODA+bOnYt79+4BABwcHDBu3DjpPJ82bdqgXbt2arUrhMD48ePRtGlT1KxZUypv3749evToAVdXVyQlJWHatGlo0aIFjh8/DrlcrtJOeHg4QkND1d0sehP7w//72z9Ye3EQEREVgUwIIYq7smJUxdLS8o0DCQgIwK5du/DXX3+hYsWKhdZLTk6Gq6srNmzYgG7duqksL2jkx8XFBWlpaRqJ811SlBmeI3I+BgBcb3v6v7K9l5TqBLbMdwiUyQ8REWlAeno6rKysSuT3u9iTHAKaSXoAYPTo0dixYwcOHjz4ysQHeHFjVVdXV1y+fLnA5XK5vMARISIiIiKgmMnPr7/+ik2bNuHmzZt4/vy50rITJ04UuR0hBEaPHo1t27bhwIEDcHd3f+069+/fx61bt+Dk5KR23ERERERqX+r+3XffYdCgQbC3t8fJkyfRoEED2Nra4tq1a2jfvr1abQUEBOB///sf1q1bBwsLC6SkpCAlJQVPnz4FADx58gQTJ07EkSNHcP36dRw4cACdO3eGnZ0dunbtqm7oREREROonP0uWLMFPP/2ERYsWwcjICEFBQYiOjsaYMWOkq7WKaunSpUhLS4Ofnx+cnJykx8aNGwEA+vr6OHPmDLp06YKqVatiwIABqFq1Ko4cOQILCwt1QyciIiJS/7DXzZs30bhxYwCAiYkJHj9+DODFJfCNGjXCokWLitzW6861NjExQVRUlLohEhERERWqWLe3uH//PgDA1dUVcXFxAICkpKTXJjNERERE2qZ28tOiRQv8/vvvAIAhQ4Zg3LhxaN26NXr16sXzcIiIiKjUU/uw108//YS8vDwAwPDhw2FjY4O//voLnTt3xvDhwzUeIJUOL8/tQ0REVFapnfzo6ekp3dS0Z8+e6Nmzp0aDIiIiIiopxZrn59mzZzh9+jRSU1OlUSCFDz/8UCOBEREREZUEtZOfyMhI9O/fH//++6/KMplMJt3ZnYiIiKg0UvuE51GjRqFHjx5ITk5GXl6e0oOJDxEREZV2aic/qampGD9+PBwcHEoiHiIiIqISpXby8/HHH+PAgQMlEAoRERFRyVP7nJ9FixahR48eOHToELy9vWFoaKi0fMyYMRoLjoiIiEjT1E5+1q1bh6ioKJiYmODAgQOQyWTSMplMxuSHiIiISjW1k58vv/wSX331FSZPnqw03w8RERFRWaB29vL8+XP06tWLiQ8RERGVSWpnMAMGDMDGjRtLIhYiIiKiEqf2Ya/c3FzMnTsXUVFR8PHxUTnheeHChRoLjoiIiEjT1E5+zpw5gzp16gAAzp49q7Qs/8nPRERERKWR2snP/v37SyIOIiIioreCZy0TERGRTinyyE+3bt2KVG/r1q3FDoaIiIiopBU5+bGysirJOIiIiIjeiiInP6tWrSrJOIiIiIjeCp7zQ0RERDqFyQ8RERHpFCY/REREpFOY/BAREZFOKVLyU7duXTx8+BAA8NVXXyEzM7NEgyIiIiIqKUVKfhITE5GRkQEACA0NxZMnT0o0KCIiIqKSUqRL3WvXro1BgwahadOmEEJg/vz5MDc3L7Du9OnTNRogERERkSYVKflZvXo1ZsyYgZ07d0Imk+GPP/6AgYHqqjKZjMkPERERlWpFSn48PT2xYcMGAICenh727t0Le3v7Eg2M1OM2eRcA4PrsjlqOhIiIqHRT+67ueXl5JREHERER0VuhdvIDAFevXkVERAQSExMhk8ng5eWFsWPHonLlypqOj4iIiEij1J7nJyoqCtWrV8exY8fg4+ODmjVr4ujRo6hRowaio6NLIkYiIiIijVF75Gfy5MkYN24cZs+erVI+adIktG7dWmPBEREREWma2iM/iYmJGDJkiEr54MGDcf78ebXaCg8Px/vvvw8LCwvY29vjo48+wsWLF5XqCCEQEhICZ2dnmJiYwM/PD+fOnVM3bCIiIiIAxUh+ypcvj1OnTqmUnzp1Su0rwGJiYhAQEIC4uDhER0cjJycHbdq0kSZUBIC5c+di4cKFWLRoEeLj4+Ho6IjWrVvj8ePH6oZOREREpP5hr88//xxDhw7FtWvX0LhxY8hkMvz111+YM2cOJkyYoFZbkZGRSs9XrVoFe3t7HD9+HM2aNYMQAhEREZg6dSq6desGAFizZg0cHBywbt06DBs2TN3wiYiISMepnfxMmzYNFhYWWLBgAYKDgwEAzs7OCAkJwZgxY94omLS0NACAjY0NACApKQkpKSlo06aNVEcul6N58+aIjY0tMPnJyspCVlaW9Dw9Pf2NYiIiIqJ3i9rJj0wmw7hx4zBu3Djp0JOFhcUbByKEwPjx49G0aVPUrFkTAJCSkgIAcHBwUKrr4OCAGzduFNhOeHg4QkND3zieskox2WFBOAEiERFRMc75yc/CwkIjiQ8AjBo1CqdPn8b69etVlslkMqXnQgiVMoXg4GCkpaVJj1u3bmkkPiIiIno3FGuSQ00bPXo0duzYgYMHD6JixYpSuaOjI4AXI0BOTk5SeWpqqspokIJcLodcLi/ZgImIiKjMeqORnzclhMCoUaOwdetW7Nu3D+7u7krL3d3d4ejoqDR54vPnzxETE4PGjRu/7XCJiIjoHaDVkZ+AgACsW7cOv/32GywsLKRzfKysrGBiYgKZTIbAwECEhYWhSpUqqFKlCsLCwmBqaoo+ffpoM3QiIiIqo9Qa+cnOzoa/vz8uXbqkkRdfunQp0tLS4OfnBycnJ+mxceNGqU5QUBACAwMxcuRI1K9fH3fu3MGePXs0dq4RERER6Ra1Rn4MDQ1x9uzZQk82VpcQ4rV1ZDIZQkJCEBISopHXJCIiIt2m9jk//fv3x4oVK0oiFiIiIqISp/Y5P8+fP8fy5csRHR2N+vXrw8zMTGn5woULNRYcERERkaapnfycPXsWdevWBQCVc380dTiMiIiIqKSonfzs37+/JOIgIiIieiuKPc/PlStXEBUVhadPnwIo2snLRERERNqmdvJz//59tGzZElWrVkWHDh2QnJwMAPjss8/Uvqs7ERER0dumdvIzbtw4GBoa4ubNmzA1NZXKe/XqhcjISI0GR0RERKRpap/zs2fPHkRFRSndgwsAqlSpUuid1omIiIhKC7VHfjIyMpRGfBT+/fdf3lCUiIiISj21k59mzZph7dq10nOZTIa8vDzMmzcP/v7+Gg2OiIiISNPUPuw1b948+Pn5ISEhAc+fP0dQUBDOnTuHBw8e4PDhwyURIxEREZHGqJ38VK9eHadPn8bSpUuhr6+PjIwMdOvWDQEBAXByciqJGHWS2+RdAIDrszu+skwh0OBXAEBEzscajUPRLhER0btC7eQHABwdHREaGqrpWIiIiIhKXLGSn4cPH2LFihVITEyETCaDl5cXBg0aBBsbG03HR0RERKRRap/wHBMTA3d3d3z33Xd4+PAhHjx4gO+++w7u7u6IiYkpiRiJiIiINEbtkZ+AgAD07NlTOucHAHJzczFy5EgEBATg7NmzGg+SiIiISFPUHvm5evUqJkyYICU+AKCvr4/x48fj6tWrGg2OiIiISNPUTn7q1q2LxMRElfLExETUrl1bEzERERERlZgiHfY6ffq09PeYMWMwduxYXLlyBY0aNQIAxMXFYfHixZg9e3bJRElERESkIUVKfmrXrg2ZTAYhhFQWFBSkUq9Pnz7o1auX5qIjIiIi0rAiJT9JSUklHQcRERHRW1Gk5MfV1bWk4yAiIiJ6K4o1yeGdO3dw+PBhpKamIi8vT2nZmDFjNBIYERERUUlQO/lZtWoVhg8fDiMjI9ja2kImk0nLZDIZkx8iIiIq1dROfqZPn47p06cjODgYenpqXylPREREpFVqZy+ZmZn45JNPmPgQERFRmaR2BjNkyBBs3ry5JGIhIiIiKnFqH/YKDw9Hp06dEBkZCW9vbxgaGiotX7hwocaCIyIiItI0tZOfsLAwREVFwdPTEwBUTngmIiIiKs3UTn4WLlyIlStXYuDAgSUQTtnnNnkXAOD67I4abzvQ4Nf//0vzbb+JiL2X/vs7Snn7S7I/iIiIikPtc37kcjmaNGlSErEQERERlTi1k5+xY8fi+++/L4lYiIiIiEqc2oe9jh07hn379mHnzp2oUaOGygnPW7du1VhwRERERJqmdvJjbW2Nbt26lUQsRERERCWuWLe30JSDBw9i3rx5OH78OJKTk7Ft2zZ89NFH0vKBAwdizZo1Sus0bNgQcXFxGouBiIiIdItWp2nOyMhArVq1sGjRokLrtGvXDsnJydJj9+7dbzFCIiIieteoPfLj7u7+yvl8rl27VuS22rdvj/bt27+yjlwuh6OjY5HbJCIiInoVtZOfwMBApefZ2dk4efIkIiMj8cUXX2gqLsmBAwdgb28Pa2trNG/eHLNmzYK9vX2h9bOyspCVlSU9T09P13hMREREVHapnfyMHTu2wPLFixcjISHhjQPKr3379ujRowdcXV2RlJSEadOmoUWLFjh+/DjkcnmB64SHhyM0NFSjcRTJ/vD//8OnyKsoJgAENDcJoGIixIicjwt9vbc54aAiHrfJ/5VxwkMiItImjZ3z0759e2zZskVTzQEAevXqhY4dO6JmzZro3Lkz/vjjD1y6dAm7du0qdJ3g4GCkpaVJj1u3bmk0JiIiIirb1B75Kcyvv/4KGxsbTTVXICcnJ7i6uuLy5cuF1pHL5YWOChERERGpnfzUqVNH6YRnIQRSUlLwzz//YMmSJRoN7mX379/HrVu34OTkVKKvQ0RERO8utZOf/PPwAICenh7Kly8PPz8/VKtWTa22njx5gitXrkjPk5KScOrUKdjY2MDGxgYhISHo3r07nJyccP36dUyZMgV2dnbo2rWrumETERERAShG8jNjxgyNvXhCQgL8/f2l5+PHjwcADBgwAEuXLsWZM2ewdu1aPHr0CE5OTvD398fGjRthYWGhsRiIiIhIt2jsnJ/i8PPzgxCi0OVRUVFvMRoiIiLSBUVOfvT09F45uSEAyGQy5OTkvHFQRERERCWlyMnPtm3bCl0WGxuL77///pWjOERERESlQZGTny5duqiUXbhwAcHBwfj999/Rt29ffP311xoNTpcpJgcECp8QMP8kiYFaPYBJRERUdhRrksO7d+/i888/h4+PD3JycnDq1CmsWbMG7733nqbjIyIiItIotZKftLQ0TJo0CR4eHjh37hz27t2L33//HTVr1iyp+IiIiIg0qsgHS+bOnYs5c+bA0dER69evL/AwGBEREVFpV+TkZ/LkyTAxMYGHhwfWrFmDNWvWFFhv69atGguOiIiISNOKnPz079//tZe6ExEREZV2RU5+Vq9eXYJhEBEREb0dxbrai4iIiKisYvJDREREOoVT472p/eFKT181OaFiUsLrswufuFAd/72WmnX2n9bI67/SS/1S4DL/4JKPg4iI6CUc+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEincJJDDYnYe+m1dV41AaI6FJMlBpbCd++/bayq1TiIiIgKw5EfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdEopnCbv3aCYiBAArrc9rcVIVBVlQsa3QdFH12e/2aSPRERE6uDIDxEREekUJj9ERESkU5j8EBERkU5h8kNEREQ6RavJz8GDB9G5c2c4OztDJpNh+/btSsuFEAgJCYGzszNMTEzg5+eHc+fOaSdYIiIieidoNfnJyMhArVq1sGjRogKXz507FwsXLsSiRYsQHx8PR0dHtG7dGo8fP37LkRIREdG7QquXurdv3x7t27cvcJkQAhEREZg6dSq6desGAFizZg0cHBywbt06DBs27G2GSkRERO+IUnvOT1JSElJSUtCmTRupTC6Xo3nz5oiNjS10vaysLKSnpys9iIiIiBRK7SSHKSkpAAAHBwelcgcHB9y4caPQ9cLDwxEaGlqisalLZVLB/eH//4fPW4+lNPivP3Rz+4mISLtK7ciPgkwmU3ouhFApyy84OBhpaWnS49atWyUdIhEREZUhpXbkx9HREcCLESAnJyepPDU1VWU0KD+5XA65XF7i8REREVHZVGpHftzd3eHo6Ijo6Gip7Pnz54iJiUHjxo21GBkRERGVZVod+Xny5AmuXLkiPU9KSsKpU6dgY2OD9957D4GBgQgLC0OVKlVQpUoVhIWFwdTUFH369NFi1ERERFSWaTX5SUhIgL+/v/R8/PjxAIABAwZg9erVCAoKwtOnTzFy5Eg8fPgQDRs2xJ49e2BhYaGtkImIiKiM02ry4+fnByFEoctlMhlCQkIQEhLy9oIiIiKid1qpPeeHiIiIqCQw+SEiIiKdUmovdS/rAg1+VbNOR422rW0qEzsWl2JCSP9gzbRHREQ6jyM/REREpFOY/BAREZFOYfJDREREOoXJDxEREekUJj9ERESkU5j8EBERkU5h8kNEREQ6hckPERER6RQmP0RERKRTOMPzGyrOTMYFrvP/Mxm7RfkAAAINlMvfRdJM1ftPv/iXszgTEdFbwJEfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAonOSxlpIn//l9xJlEsjV7ergLlm9Dxv8keX2x/oH+JhEVERDqIIz9ERESkU5j8EBERkU5h8kNEREQ6hckPERER6RQmP0RERKRTmPwQERGRTmHyQ0RERDqFyQ8RERHpFE5ySKVG/gkdFZMbShQTIPoHv8WIiIjoXcSRHyIiItIpTH6IiIhIpzD5ISIiIp1SqpOfkJAQyGQypYejo6O2wyIiIqIyrNSf8FyjRg38+eef0nN9fX0tRkNERERlXalPfgwMDDjaQ0RERBpTqg97AcDly5fh7OwMd3d3fPLJJ7h27Zq2QyIiIqIyrFSP/DRs2BBr165F1apVce/ePcycORONGzfGuXPnYGtrW+A6WVlZyMrKkp6np6e/rXCJiIioDJAJIYS2gyiqjIwMVK5cGUFBQRg/fnyBdUJCQhAaGqpSnpaWBktLS43HFPHlII23SaoCW1Z98QcnOSQi0gnp6emwsrIqkd/vUn/YKz8zMzN4e3vj8uXLhdYJDg5GWlqa9Lh169ZbjJCIiIhKu1J92OtlWVlZSExMxAcffFBoHblcDrlc/hajIiIiorKkVI/8TJw4ETExMUhKSsLRo0fx8ccfIz09HQMGDNB2aERERFRGleqRn9u3b6N37974999/Ub58eTRq1AhxcXFwdXXVdmhERERURpXq5GfDhg3aDoGIiIjeMaX6sBcRERGRpjH5ISIiIp3C5IeIiIh0Sqk+54dIIWLvpRf/Ru2Syq7P7ljk9d0mv1jvetvT/xUqJkzcH678nIiI3mkc+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEincJJDKlMCDX7N96zwSQ6lSQ3/fyJExXoRe/O1hXCNx6drXu5nIqKygCM/REREpFOY/BAREZFOYfJDREREOoXJDxEREekUJj9ERESkU5j8EBERkU5h8kNEREQ6hckPERER6RROckhlVsSXg178m/MxgIIn2lPUUYdi4j6lNve/mBAxYu8lAEBgy6qFrx/lo7p+MV6/oHWLu+yd9//vD/yDtRsHEZUJHPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIpTH6IiIhIp8iEEELbQZSk9PR0WFlZIS0tDZaWlhpvvziT6FHpopiwUDGBYX6KCRQDDX597frSOq9o51UTMSraUUyS+KrXlF575irp75cnOcy/byrqvVxHaULHtqdf/KHGRIFSe4p18ytKO4rJCfO3+f/br9ZkjW9hkkPpfcrX528k/7a/Iu63OnnlOz5ZZIETmOpwHG+shPeXkvz95sgPERER6RQmP0RERKRTmPwQERGRTikTyc+SJUvg7u4OY2Nj1KtXD4cOHdJ2SERERFRGlfrkZ+PGjQgMDMTUqVNx8uRJfPDBB2jfvj1u3ryp7dCIiIioDCr1yc/ChQsxZMgQfPbZZ/Dy8kJERARcXFywdOlSbYdGREREZVCpTn6eP3+O48ePo02bNkrlbdq0QWxsrJaiIiIiorLMQNsBvMq///6L3NxcODg4KJU7ODggJSWlwHWysrKQlZUlPU9LSwPwYr6AkvAs63mJtEtvT3rGMwAFv5d5OZkvluUW/j4r1ld4VTsF7YeK+op28rJe/5rSa+drT7Geoix/HIqyl+sonitthxqfFam9l/qgyO0UsN7LMRZJMWJXl/Q+aeo18m/7K9osVn8U11voR21S2t+1uI2lJY43VsL7i6JvSmQ6QlGK3blzRwAQsbGxSuUzZ84Unp6eBa4zY8YMAYAPPvjggw8++HgHHrdu3dJ4flGqR37s7Oygr6+vMsqTmpqqMhqkEBwcjPHjx0vP8/Ly8ODBA9ja2kImk5VovO+S9PR0uLi44NatWyUyM7auYD++OfahZrAfNYP9+OaK2odCCDx+/BjOzs4aj6FUJz9GRkaoV68eoqOj0bVrV6k8OjoaXbp0KXAduVwOuVyuVGZtbV2SYb7TLC0t+QHXAPbjm2Mfagb7UTPYj2+uKH1oZWVVIq9dqpMfABg/fjz69euH+vXrw9fXFz/99BNu3ryJ4cOHazs0IiIiKoNKffLTq1cv3L9/H1999RWSk5NRs2ZN7N69G66urtoOjYiIiMqgUp/8AMDIkSMxcuRIbYehU+RyOWbMmKFyCJHUw358c+xDzWA/agb78c2Vhj6UCVES15ARERERlU6lepJDIiIiIk1j8kNEREQ6hckPERER6RQmP0RERKRTmPyQiiVLlsDd3R3GxsaoV68eDh06pO2QtObgwYPo3LkznJ2dIZPJsH37dqXlQgiEhITA2dkZJiYm8PPzw7lz55TqZGVlYfTo0bCzs4OZmRk+/PBD3L59W6nOw4cP0a9fP1hZWcHKygr9+vXDo0ePSnjr3p7w8HC8//77sLCwgL29PT766CNcvHhRqQ778tWWLl0KHx8faWI4X19f/PHHH9Jy9l/xhIeHQyaTITAwUCpjX75eSEgIZDKZ0sPR0VFaXur7UOM3zKAybcOGDcLQ0FAsW7ZMnD9/XowdO1aYmZmJGzduaDs0rdi9e7eYOnWq2LJliwAgtm3bprR89uzZwsLCQmzZskWcOXNG9OrVSzg5OYn09HSpzvDhw0WFChVEdHS0OHHihPD39xe1atUSOTk5Up127dqJmjVritjYWBEbGytq1qwpOnXq9LY2s8S1bdtWrFq1Spw9e1acOnVKdOzYUbz33nviyZMnUh325avt2LFD7Nq1S1y8eFFcvHhRTJkyRRgaGoqzZ88KIdh/xXHs2DHh5uYmfHx8xNixY6Vy9uXrzZgxQ9SoUUMkJydLj9TUVGl5ae9DJj+kpEGDBmL48OFKZdWqVROTJ0/WUkSlx8vJT15ennB0dBSzZ8+Wyp49eyasrKzEDz/8IIQQ4tGjR8LQ0FBs2LBBqnPnzh2hp6cnIiMjhRBCnD9/XgAQcXFxUp0jR44IAOLChQslvFXakZqaKgCImJgYIQT7srjKlSsnli9fzv4rhsePH4sqVaqI6Oho0bx5cyn5YV8WzYwZM0StWrUKXFYW+pCHvUjy/PlzHD9+HG3atFEqb9OmDWJjY7UUVemVlJSElJQUpf6Sy+Vo3ry51F/Hjx9Hdna2Uh1nZ2fUrFlTqnPkyBFYWVmhYcOGUp1GjRrBysrqne33tLQ0AICNjQ0A9qW6cnNzsWHDBmRkZMDX15f9VwwBAQHo2LEjWrVqpVTOviy6y5cvw9nZGe7u7vjkk09w7do1AGWjD8vEDM/0dvz777/Izc2Fg4ODUrmDgwNSUlK0FFXppeiTgvrrxo0bUh0jIyOUK1dOpY5i/ZSUFNjb26u0b29v/072uxAC48ePR9OmTVGzZk0A7MuiOnPmDHx9ffHs2TOYm5tj27ZtqF69uvRDwP4rmg0bNuDEiROIj49XWcZ9sWgaNmyItWvXomrVqrh37x5mzpyJxo0b49y5c2WiD5n8kAqZTKb0XAihUkb/KU5/vVynoPrvar+PGjUKp0+fxl9//aWyjH35ap6enjh16hQePXqELVu2YMCAAYiJiZGWs/9e79atWxg7diz27NkDY2PjQuuxL1+tffv20t/e3t7w9fVF5cqVsWbNGjRq1AhA6e5DHvYiiZ2dHfT19VUy6tTUVJUMniBd2fCq/nJ0dMTz58/x8OHDV9a5d++eSvv//PPPO9fvo0ePxo4dO7B//35UrFhRKmdfFo2RkRE8PDxQv359hIeHo1atWvj222/Zf2o4fvw4UlNTUa9ePRgYGMDAwAAxMTH47rvvYGBgIG0n+1I9ZmZm8Pb2xuXLl8vE/sjkhyRGRkaoV68eoqOjlcqjo6PRuHFjLUVVerm7u8PR0VGpv54/f46YmBipv+rVqwdDQ0OlOsnJyTh79qxUx9fXF2lpaTh27JhU5+jRo0hLS3tn+l0IgVGjRmHr1q3Yt28f3N3dlZazL4tHCIGsrCz2nxpatmyJM2fO4NSpU9Kjfv366Nu3L06dOoVKlSqxL4shKysLiYmJcHJyKhv74xudLk3vHMWl7itWrBDnz58XgYGBwszMTFy/fl3boWnF48ePxcmTJ8XJkycFALFw4UJx8uRJ6dL/2bNnCysrK7F161Zx5swZ0bt37wIv56xYsaL4888/xYkTJ0SLFi0KvJzTx8dHHDlyRBw5ckR4e3u/M5fECiHEiBEjhJWVlThw4IDSpbGZmZlSHfblqwUHB4uDBw+KpKQkcfr0aTFlyhShp6cn9uzZI4Rg/72J/Fd7CcG+LIoJEyaIAwcOiGvXrom4uDjRqVMnYWFhIf1WlPY+ZPJDKhYvXixcXV2FkZGRqFu3rnQ5si7av3+/AKDyGDBggBDixSWdM2bMEI6OjkIul4tmzZqJM2fOKLXx9OlTMWrUKGFjYyNMTExEp06dxM2bN5Xq3L9/X/Tt21dYWFgICwsL0bdvX/Hw4cO3tJUlr6A+BCBWrVol1WFfvtrgwYOlz2X58uVFy5YtpcRHCPbfm3g5+WFfvp5i3h5DQ0Ph7OwsunXrJs6dOyctL+19KBNCiDcbOyIiIiIqO3jODxEREekUJj9ERESkU5j8EBERkU5h8kNEREQ6hckPERER6RQmP0RERKRTmPwQERGRTmHyQ0Rl2sCBA/HRRx9Jz/38/BAYGKi1eIio9GPyQ0QalZKSgrFjx8LDwwPGxsZwcHBA06ZN8cMPPyAzM7PEX3/r1q34+uuvNdrmywkWEZVtBtoOgIjeHdeuXUOTJk1gbW2NsLAweHt7IycnB5cuXcLKlSvh7OyMDz/8UGW97OxsGBoaaiQGGxsbjbRDRO8ujvwQkcaMHDkSBgYGSEhIQM+ePeHl5QVvb290794du3btQufOnQEAMpkMP/zwA7p06QIzMzPMnDkTubm5GDJkCNzd3WFiYgJPT098++23Su3n5uZi/PjxsLa2hq2tLYKCgvDyHXpePuz1/PlzBAUFoUKFCjAzM0PDhg1x4MABafnq1athbW2NqKgoeHl5wdzcHO3atUNycjIAICQkBGvWrMFvv/0GmUwGmUymtD4RlT1MfohII+7fv489e/YgICAAZmZmBdaRyWTS3zNmzECXLl1w5swZDB48GHl5eahYsSI2bdqE8+fPY/r06ZgyZQo2bdokrbNgwQKsXLkSK1aswF9//YUHDx5g27Ztr4xr0KBBOHz4MDZs2IDTp0+jR48eaNeuHS5fvizVyczMxPz58/Hzzz/j4MGDuHnzJiZOnAgAmDhxInr27CklRMnJyWjcuPGbdBURaRkPexGRRly5cgVCCHh6eiqV29nZ4dmzZwCAgIAAzJkzBwDQp08fDB48WKluaGio9Le7uztiY2OxadMm9OzZEwAQERGB4OBgdO/eHQDwww8/ICoqqtCYrl69ivXr1+P27dtwdnYG8CKZiYyMxKpVqxAWFgbgxWG3H374AZUrVwYAjBo1Cl999RUAwNzcHCYmJsjKyoKjo2PxOoeIShUmP0SkUflHdwDg2LFjyMvLQ9++fZGVlSWV169fX2XdH374AcuXL8eNGzfw9OlTPH/+HLVr1wYApKWlITk5Gb6+vlJ9AwMD1K9fX+XQl8KJEycghEDVqlWVyrOysmBrays9NzU1lRIfAHByckJqamrRN5qIyhQmP0SkER4eHpDJZLhw4YJSeaVKlQAAJiYmSuUvHxrbtGkTxo0bhwULFsDX1xcWFhaYN28ejh49WuyY8vLyoK+vj+PHj0NfX19pmbm5ufT3yydby2SyQhMqIir7eM4PEWmEra0tWrdujUWLFiEjI0Pt9Q8dOoTGjRtj5MiRqFOnDjw8PHD16lVpuZWVFZycnBAXFyeV5eTk4Pjx44W2WadOHeTm5iI1NRUeHh5KD3UOYRkZGSE3N1ftbSKi0onJDxFpzJIlS5CTk4P69etj48aNSExMxMWLF/G///0PFy5cUBl9yc/DwwMJCQmIiorCpUuXMG3aNMTHxyvVGTt2LGbPno1t27bhwoULGDlyJB49elRom1WrVkXfvn3Rv39/bN26FUlJSYiPj8ecOXOwe/fuIm+Xm5sbTp8+jYsXL+Lff/9FdnZ2kdclotKHyQ8RaUzlypVx8uRJtGrVCsHBwahVqxbq16+P77//HhMnTnzl5IPDhw9Ht27d0KtXLzRs2BD379/HyJEjlepMmDAB/fv3x8CBA6VDY127dn1lTKtWrUL//v0xYcIEeHp64sMPP8TRo0fh4uJS5O36/PPP4enpifr166N8+fI4fPhwkdclotJHJnhgm4iIiHQIR36IiIhIpzD5ISIiIp3C5IeIiIh0CpMfIiIi0ilMfoiIiEinMPkhIiIincLkh4iIiHQKkx8iIiLSKUx+iIiISKcw+SEiIiKdwuSHiIiIdAqTHyIiItIp/wdpb2vObpLHkwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag1, training_size, portion, poisoned=668)" + ] + }, + { + "cell_type": "code", + "execution_count": 112, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "inspected: 10, found: 1 actual found: 1 sythetic found; 0, detection rate: 0.00 baseline: 1.8\n", + "inspected: 20, found: 5 actual found: 5 sythetic found; 0, detection rate: 0.02 baseline: 3.6\n", + "inspected: 30, found: 7 actual found: 7 sythetic found; 0, detection rate: 0.02 baseline: 5.4\n", + "inspected: 40, found: 8 actual found: 8 sythetic found; 0, detection rate: 0.03 baseline: 7.2\n", + "inspected: 50, found: 10 actual found: 10 sythetic found; 0, detection rate: 0.03 baseline: 9.0\n", + "inspected: 60, found: 13 actual found: 13 sythetic found; 0, detection rate: 0.04 baseline: 10.8\n", + "inspected: 70, found: 14 actual found: 14 sythetic found; 0, detection rate: 0.05 baseline: 12.6\n", + "inspected: 80, found: 15 actual found: 15 sythetic found; 0, detection rate: 0.05 baseline: 14.4\n", + "inspected: 90, found: 16 actual found: 16 sythetic found; 0, detection rate: 0.05 baseline: 16.2\n", + "inspected: 100, found: 16 actual found: 16 sythetic found; 0, detection rate: 0.05 baseline: 18.0\n", + "inspected: 110, found: 20 actual found: 20 sythetic found; 0, detection rate: 0.07 baseline: 19.8\n", + "inspected: 120, found: 22 actual found: 22 sythetic found; 0, detection rate: 0.07 baseline: 21.6\n", + "inspected: 130, found: 24 actual found: 24 sythetic found; 0, detection rate: 0.08 baseline: 23.400000000000002\n", + "inspected: 140, found: 27 actual found: 27 sythetic found; 0, detection rate: 0.09 baseline: 25.2\n", + "inspected: 150, found: 30 actual found: 30 sythetic found; 0, detection rate: 0.10 baseline: 27.0\n", + "inspected: 160, found: 33 actual found: 33 sythetic found; 0, detection rate: 0.11 baseline: 28.8\n", + "inspected: 170, found: 36 actual found: 36 sythetic found; 0, detection rate: 0.12 baseline: 30.6\n", + "inspected: 180, found: 38 actual found: 38 sythetic found; 0, detection rate: 0.13 baseline: 32.4\n", + "inspected: 190, found: 42 actual found: 42 sythetic found; 0, detection rate: 0.14 baseline: 34.2\n", + "inspected: 200, found: 44 actual found: 44 sythetic found; 0, detection rate: 0.15 baseline: 36.0\n", + "inspected: 210, found: 45 actual found: 45 sythetic found; 0, detection rate: 0.15 baseline: 37.800000000000004\n", + "inspected: 220, found: 47 actual found: 47 sythetic found; 0, detection rate: 0.16 baseline: 39.6\n", + "inspected: 230, found: 50 actual found: 50 sythetic found; 0, detection rate: 0.17 baseline: 41.4\n", + "inspected: 240, found: 52 actual found: 52 sythetic found; 0, detection rate: 0.17 baseline: 43.2\n", + "inspected: 250, found: 53 actual found: 53 sythetic found; 0, detection rate: 0.18 baseline: 45.0\n", + "inspected: 260, found: 54 actual found: 54 sythetic found; 0, detection rate: 0.18 baseline: 46.800000000000004\n", + "inspected: 270, found: 56 actual found: 56 sythetic found; 0, detection rate: 0.19 baseline: 48.6\n", + "inspected: 280, found: 58 actual found: 58 sythetic found; 0, detection rate: 0.19 baseline: 50.4\n", + "inspected: 290, found: 60 actual found: 60 sythetic found; 0, detection rate: 0.20 baseline: 52.2\n", + "inspected: 300, found: 64 actual found: 64 sythetic found; 0, detection rate: 0.21 baseline: 54.0\n", + "inspected: 310, found: 68 actual found: 68 sythetic found; 0, detection rate: 0.23 baseline: 55.800000000000004\n", + "inspected: 320, found: 71 actual found: 71 sythetic found; 0, detection rate: 0.24 baseline: 57.6\n", + "inspected: 330, found: 73 actual found: 73 sythetic found; 0, detection rate: 0.24 baseline: 59.4\n", + "inspected: 340, found: 76 actual found: 76 sythetic found; 0, detection rate: 0.25 baseline: 61.2\n", + "inspected: 350, found: 77 actual found: 77 sythetic found; 0, detection rate: 0.26 baseline: 63.0\n", + "inspected: 360, found: 81 actual found: 81 sythetic found; 0, detection rate: 0.27 baseline: 64.8\n", + "inspected: 370, found: 84 actual found: 84 sythetic found; 0, detection rate: 0.28 baseline: 66.60000000000001\n", + "inspected: 380, found: 84 actual found: 84 sythetic found; 0, detection rate: 0.28 baseline: 68.4\n", + "inspected: 390, found: 87 actual found: 87 sythetic found; 0, detection rate: 0.29 baseline: 70.2\n", + "inspected: 400, found: 89 actual found: 89 sythetic found; 0, detection rate: 0.30 baseline: 72.0\n", + "inspected: 410, found: 92 actual found: 92 sythetic found; 0, detection rate: 0.31 baseline: 73.8\n", + "inspected: 420, found: 94 actual found: 94 sythetic found; 0, detection rate: 0.31 baseline: 75.60000000000001\n", + "inspected: 430, found: 96 actual found: 96 sythetic found; 0, detection rate: 0.32 baseline: 77.4\n", + "inspected: 440, found: 96 actual found: 96 sythetic found; 0, detection rate: 0.32 baseline: 79.2\n", + "inspected: 450, found: 98 actual found: 98 sythetic found; 0, detection rate: 0.33 baseline: 81.0\n", + "inspected: 460, found: 99 actual found: 99 sythetic found; 0, detection rate: 0.33 baseline: 82.8\n", + "inspected: 470, found: 101 actual found: 101 sythetic found; 0, detection rate: 0.34 baseline: 84.60000000000001\n", + "inspected: 480, found: 101 actual found: 101 sythetic found; 0, detection rate: 0.34 baseline: 86.4\n", + "inspected: 490, found: 102 actual found: 102 sythetic found; 0, detection rate: 0.34 baseline: 88.2\n", + "inspected: 500, found: 104 actual found: 104 sythetic found; 0, detection rate: 0.35 baseline: 90.0\n", + "inspected: 510, found: 105 actual found: 105 sythetic found; 0, detection rate: 0.35 baseline: 91.8\n", + "inspected: 520, found: 107 actual found: 107 sythetic found; 0, detection rate: 0.36 baseline: 93.60000000000001\n", + "inspected: 530, found: 111 actual found: 111 sythetic found; 0, detection rate: 0.37 baseline: 95.4\n", + "inspected: 540, found: 112 actual found: 112 sythetic found; 0, detection rate: 0.37 baseline: 97.2\n", + "inspected: 550, found: 113 actual found: 113 sythetic found; 0, detection rate: 0.38 baseline: 99.0\n", + "inspected: 560, found: 116 actual found: 116 sythetic found; 0, detection rate: 0.39 baseline: 100.8\n", + "inspected: 570, found: 117 actual found: 117 sythetic found; 0, detection rate: 0.39 baseline: 102.60000000000001\n", + "inspected: 580, found: 119 actual found: 119 sythetic found; 0, detection rate: 0.40 baseline: 104.4\n", + "inspected: 590, found: 122 actual found: 122 sythetic found; 0, detection rate: 0.41 baseline: 106.2\n", + "inspected: 600, found: 126 actual found: 126 sythetic found; 0, detection rate: 0.42 baseline: 108.0\n", + "inspected: 610, found: 129 actual found: 129 sythetic found; 0, detection rate: 0.43 baseline: 109.8\n", + "inspected: 620, found: 131 actual found: 131 sythetic found; 0, detection rate: 0.44 baseline: 111.60000000000001\n", + "inspected: 630, found: 133 actual found: 133 sythetic found; 0, detection rate: 0.44 baseline: 113.4\n", + "inspected: 640, found: 136 actual found: 136 sythetic found; 0, detection rate: 0.45 baseline: 115.2\n", + "inspected: 650, found: 139 actual found: 139 sythetic found; 0, detection rate: 0.46 baseline: 117.0\n", + "inspected: 660, found: 139 actual found: 139 sythetic found; 0, detection rate: 0.46 baseline: 118.8\n", + "inspected: 670, found: 140 actual found: 140 sythetic found; 0, detection rate: 0.47 baseline: 120.60000000000001\n", + "inspected: 680, found: 143 actual found: 143 sythetic found; 0, detection rate: 0.48 baseline: 122.4\n", + "inspected: 690, found: 148 actual found: 148 sythetic found; 0, detection rate: 0.49 baseline: 124.2\n", + "inspected: 700, found: 151 actual found: 151 sythetic found; 0, detection rate: 0.50 baseline: 126.0\n", + "inspected: 710, found: 153 actual found: 153 sythetic found; 0, detection rate: 0.51 baseline: 127.8\n", + "inspected: 720, found: 155 actual found: 155 sythetic found; 0, detection rate: 0.52 baseline: 129.6\n", + "inspected: 730, found: 157 actual found: 157 sythetic found; 0, detection rate: 0.52 baseline: 131.4\n", + "inspected: 740, found: 158 actual found: 158 sythetic found; 0, detection rate: 0.53 baseline: 133.20000000000002\n", + "inspected: 750, found: 159 actual found: 159 sythetic found; 0, detection rate: 0.53 baseline: 135.0\n", + "inspected: 760, found: 162 actual found: 162 sythetic found; 0, detection rate: 0.54 baseline: 136.8\n", + "inspected: 770, found: 164 actual found: 164 sythetic found; 0, detection rate: 0.55 baseline: 138.6\n", + "inspected: 780, found: 167 actual found: 167 sythetic found; 0, detection rate: 0.56 baseline: 140.4\n", + "inspected: 790, found: 168 actual found: 168 sythetic found; 0, detection rate: 0.56 baseline: 142.20000000000002\n", + "inspected: 800, found: 171 actual found: 171 sythetic found; 0, detection rate: 0.57 baseline: 144.0\n", + "inspected: 810, found: 173 actual found: 173 sythetic found; 0, detection rate: 0.58 baseline: 145.8\n", + "inspected: 820, found: 174 actual found: 174 sythetic found; 0, detection rate: 0.58 baseline: 147.6\n", + "inspected: 830, found: 176 actual found: 176 sythetic found; 0, detection rate: 0.59 baseline: 149.4\n", + "inspected: 840, found: 178 actual found: 178 sythetic found; 0, detection rate: 0.59 baseline: 151.20000000000002\n", + "inspected: 850, found: 179 actual found: 179 sythetic found; 0, detection rate: 0.60 baseline: 153.0\n", + "inspected: 860, found: 183 actual found: 183 sythetic found; 0, detection rate: 0.61 baseline: 154.8\n", + "inspected: 870, found: 185 actual found: 185 sythetic found; 0, detection rate: 0.62 baseline: 156.6\n", + "inspected: 880, found: 187 actual found: 187 sythetic found; 0, detection rate: 0.62 baseline: 158.4\n", + "inspected: 890, found: 191 actual found: 191 sythetic found; 0, detection rate: 0.64 baseline: 160.20000000000002\n", + "inspected: 900, found: 193 actual found: 193 sythetic found; 0, detection rate: 0.64 baseline: 162.0\n", + "inspected: 910, found: 195 actual found: 195 sythetic found; 0, detection rate: 0.65 baseline: 163.8\n", + "inspected: 920, found: 196 actual found: 196 sythetic found; 0, detection rate: 0.65 baseline: 165.6\n", + "inspected: 930, found: 199 actual found: 199 sythetic found; 0, detection rate: 0.66 baseline: 167.4\n", + "inspected: 940, found: 201 actual found: 201 sythetic found; 0, detection rate: 0.67 baseline: 169.20000000000002\n", + "inspected: 950, found: 204 actual found: 204 sythetic found; 0, detection rate: 0.68 baseline: 171.0\n", + "inspected: 960, found: 205 actual found: 205 sythetic found; 0, detection rate: 0.68 baseline: 172.8\n", + "inspected: 970, found: 209 actual found: 209 sythetic found; 0, detection rate: 0.70 baseline: 174.6\n", + "inspected: 980, found: 210 actual found: 210 sythetic found; 0, detection rate: 0.70 baseline: 176.4\n", + "inspected: 990, found: 212 actual found: 212 sythetic found; 0, detection rate: 0.71 baseline: 178.20000000000002\n", + "inspected: 1000, found: 212 actual found: 212 sythetic found; 0, detection rate: 0.71 baseline: 180.0\n", + "inspected: 1010, found: 213 actual found: 213 sythetic found; 0, detection rate: 0.71 baseline: 181.8\n", + "inspected: 1020, found: 215 actual found: 215 sythetic found; 0, detection rate: 0.72 baseline: 183.6\n", + "inspected: 1030, found: 219 actual found: 219 sythetic found; 0, detection rate: 0.73 baseline: 185.4\n", + "inspected: 1040, found: 222 actual found: 222 sythetic found; 0, detection rate: 0.74 baseline: 187.20000000000002\n", + "inspected: 1050, found: 223 actual found: 223 sythetic found; 0, detection rate: 0.74 baseline: 189.0\n", + "inspected: 1060, found: 228 actual found: 228 sythetic found; 0, detection rate: 0.76 baseline: 190.8\n", + "inspected: 1070, found: 230 actual found: 230 sythetic found; 0, detection rate: 0.77 baseline: 192.6\n", + "inspected: 1080, found: 232 actual found: 232 sythetic found; 0, detection rate: 0.77 baseline: 194.4\n", + "inspected: 1090, found: 234 actual found: 234 sythetic found; 0, detection rate: 0.78 baseline: 196.20000000000002\n", + "inspected: 1100, found: 236 actual found: 236 sythetic found; 0, detection rate: 0.79 baseline: 198.0\n", + "inspected: 1110, found: 238 actual found: 238 sythetic found; 0, detection rate: 0.79 baseline: 199.8\n", + "inspected: 1120, found: 242 actual found: 242 sythetic found; 0, detection rate: 0.81 baseline: 201.6\n", + "inspected: 1130, found: 244 actual found: 244 sythetic found; 0, detection rate: 0.81 baseline: 203.4\n", + "inspected: 1140, found: 247 actual found: 247 sythetic found; 0, detection rate: 0.82 baseline: 205.20000000000002\n", + "inspected: 1150, found: 249 actual found: 249 sythetic found; 0, detection rate: 0.83 baseline: 207.0\n", + "inspected: 1160, found: 253 actual found: 253 sythetic found; 0, detection rate: 0.84 baseline: 208.8\n", + "inspected: 1170, found: 256 actual found: 256 sythetic found; 0, detection rate: 0.85 baseline: 210.6\n", + "inspected: 1180, found: 259 actual found: 259 sythetic found; 0, detection rate: 0.86 baseline: 212.4\n", + "inspected: 1190, found: 262 actual found: 262 sythetic found; 0, detection rate: 0.87 baseline: 214.20000000000002\n", + "inspected: 1200, found: 264 actual found: 264 sythetic found; 0, detection rate: 0.88 baseline: 216.0\n", + "inspected: 1210, found: 268 actual found: 268 sythetic found; 0, detection rate: 0.89 baseline: 217.8\n", + "inspected: 1220, found: 269 actual found: 269 sythetic found; 0, detection rate: 0.90 baseline: 219.6\n", + "inspected: 1230, found: 271 actual found: 271 sythetic found; 0, detection rate: 0.90 baseline: 221.4\n", + "inspected: 1240, found: 272 actual found: 272 sythetic found; 0, detection rate: 0.91 baseline: 223.20000000000002\n", + "inspected: 1250, found: 274 actual found: 274 sythetic found; 0, detection rate: 0.91 baseline: 225.0\n", + "inspected: 1260, found: 275 actual found: 275 sythetic found; 0, detection rate: 0.92 baseline: 226.8\n", + "inspected: 1270, found: 276 actual found: 276 sythetic found; 0, detection rate: 0.92 baseline: 228.6\n", + "inspected: 1280, found: 280 actual found: 280 sythetic found; 0, detection rate: 0.93 baseline: 230.4\n", + "inspected: 1290, found: 281 actual found: 281 sythetic found; 0, detection rate: 0.94 baseline: 232.20000000000002\n", + "inspected: 1300, found: 284 actual found: 284 sythetic found; 0, detection rate: 0.95 baseline: 234.0\n", + "inspected: 1310, found: 288 actual found: 288 sythetic found; 0, detection rate: 0.96 baseline: 235.8\n", + "inspected: 1320, found: 290 actual found: 290 sythetic found; 0, detection rate: 0.97 baseline: 237.6\n", + "inspected: 1330, found: 292 actual found: 292 sythetic found; 0, detection rate: 0.97 baseline: 239.4\n", + "inspected: 1340, found: 293 actual found: 293 sythetic found; 0, detection rate: 0.98 baseline: 241.20000000000002\n", + "inspected: 1350, found: 295 actual found: 295 sythetic found; 0, detection rate: 0.98 baseline: 243.0\n", + "inspected: 1360, found: 297 actual found: 297 sythetic found; 0, detection rate: 0.99 baseline: 244.8\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHFCAYAAADosxNlAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABRTElEQVR4nO3deViUVeM+8HtENpVFQUAQQTHEfRdNi9wll8jMTMMNzRazxHLUvgZZuVVm9Sv39HVJzfWlUhOXsFLGLZdyK1dKEFFZXjWR4fz+mGZgmO0ZnWG2+3NdXjXPc+bhzFHk9qwyIYQAERERkQOqYusKEBERET0oBhkiIiJyWAwyRERE5LAYZIiIiMhhMcgQERGRw2KQISIiIofFIENEREQOi0GGiIiIHBaDDBERETksBhmyqRUrVkAmk2l+eXl5ISQkBF27dsWsWbOQm5v7wM8+deoUUlNTcenSJctV2MyvM3LkSERGRlr169vStWvXMG3aNLRq1Qq+vr7w8PBA3bp1MXDgQKSlpUGpVFZKPX788UfIZDL8+OOPmmuV0fZXr15Famoqjh07Jqm8up4bN260ar0qm7F2SE1NhUwmq/xKkctgkCG7sHz5chw4cADp6en44osv0KpVK8yZMweNGzfGrl27HuiZp06dwrvvvlspQcbQ15k+fTq2bNli1a9vK5mZmWjevDmWLFmCAQMGYN26ddi1axdmz54Nd3d3DBw4ECtWrLBZ/Sqj7a9evYp3331XcpBxVsbaYcyYMThw4EDlV4pcRlVbV4AIAJo1a4Z27dppXj/zzDOYOHEiunTpgoEDB+KPP/5AcHCwDWv4YKKiomxdBavIz89HQkICatSogV9++QV16tTRuv/CCy/gxIkTuHHjhtHn3L17F15eXlb5F7uztr2jqVu3LurWrWvrapATY48M2a169erh448/RlFRERYtWqR17/DhwxgwYABq1aoFLy8vtG7dGt98843m/ooVK/Dss88CALp27aoZuirfQ7Br1y50794dvr6+qFatGjp37ozdu3fr1OPMmTN4/vnnERwcDE9PT9SrVw/Dhw/HvXv3TH4dfcMb//zzD6ZOnYr69evDw8MDYWFhePXVV5Gfn69VLjIyEv369cOOHTvQpk0beHt7IyYmBl999ZXRdrt//z6CgoKQmJiocy8/Px/e3t5ITk4GAJSWluL9999Ho0aN4O3tDX9/f7Ro0QKffvqp0a+xZMkSXLt2DXPnztUJMWotWrRA165dNa/Vw4g7d+7E6NGjUbt2bVSrVg337t3Dn3/+iVGjRuGRRx5BtWrVEBYWhv79++PkyZM6zz1z5gz69OmDatWqITAwEC+99BKKiop0yulreyEEvvzyS7Rq1Qre3t6oWbMmBg0ahAsXLmiVe+KJJ9CsWTMcOnQIjz32GKpVq4YGDRpg9uzZKC0tBaAaJmrfvj0AYNSoUZrf+9TUVKNtV5F66OX333/H888/Dz8/PwQHB2P06NEoKCjQKrthwwbExsbCz89PU6fRo0dr7quHrlavXo3k5GSEhITA29sbcXFx+PXXX3W+tqnvI7W///4bL774IsLDw+Hh4YHQ0FAMGjQI165dM9kO+oaWSktLMXfuXMTExMDT0xNBQUEYPnw4/vrrL7N/H4ggiGxo+fLlAoA4dOiQ3vv/+9//hJubm+jevbvm2p49e4SHh4d47LHHxPr168WOHTvEyJEjBQCxfPlyIYQQubm5YubMmQKA+OKLL8SBAwfEgQMHRG5urhBCiFWrVgmZTCYSEhLE5s2bxbfffiv69esn3NzcxK5duzRf69ixY6JGjRoiMjJSLFy4UOzevVusXr1aDB48WBQWFpr8OiNGjBARERGa55WWlorevXuLqlWriunTp4udO3eKjz76SFSvXl20bt1a/PPPP5qyERERom7duqJJkyZi5cqV4ocffhDPPvusACAyMjKMtuvEiROFt7e3KCgo0Lr+5ZdfCgDixIkTQgghZs2aJdzc3ERKSorYvXu32LFjh5g/f75ITU01+vyePXsKNzc3cfv2baPlylP/XoeFhYkXX3xRbN++XWzcuFGUlJSIjIwMMWnSJLFx40aRkZEhtmzZIhISEoS3t7c4c+aM5hk5OTkiKChIhIWFieXLl4tt27aJYcOGiXr16gkAYu/evZqyFdteCCHGjh0r3N3dxaRJk8SOHTvE119/LWJiYkRwcLDIycnRlIuLixMBAQHikUceEQsXLhTp6enilVdeEQDEf/7zHyGEEAUFBZrP9H//93+a3/usrCyDbbB3714BQGzYsEFzLSUlRQAQjRo1Eu+8845IT08X8+bNE56enmLUqFGacvv37xcymUwMGTJEbNu2TezZs0csX75cJCYm6jw/PDxcPPXUU+Lbb78Vq1evFg0bNhS+vr7i/PnzmrJSvo+EEOKvv/4SderUEYGBgWLevHli165dYv369WL06NHi9OnTJttB/fnKe/HFFwUAMX78eLFjxw6xcOFCUbt2bREeHi6uX79u1u8DEYMM2ZSpICOEEMHBwaJx48aa1zExMaJ169bi/v37WuX69esn6tSpI5RKpRBCiA0bNuj8cBNCiNu3b4tatWqJ/v37a11XKpWiZcuWokOHDppr3bp1E/7+/ppgoo+hryOE7g/THTt2CABi7ty5WuXWr18vAIjFixdrrkVERAgvLy9x+fJlzbW7d++KWrVqiXHjxhmsjxBCnDhxQud5QgjRoUMH0bZtW83rfv36iVatWhl9lj4xMTEiJCRE57pSqRT379/X/FL/XghR9ns9fPhwk88vKSkRxcXF4pFHHhETJ07UXJfL5UImk4ljx45ple/Zs6fJIHPgwAEBQHz88cda783KyhLe3t5i8uTJmmtxcXECgFAoFFplmzRpInr37q15fejQIZ0f/MYYCzIV/0y88sorwsvLS5SWlgohhPjoo48EAJGfn2/y+W3atNG8TwghLl26JNzd3cWYMWM016R+H40ePVq4u7uLU6dOGfy6xtqhYpA5ffq0ACBeeeUVrXIKhUIAENOmTdNck/r7QK6NQ0tk94QQmv//888/cebMGQwbNgwAUFJSovn15JNPIjs7G2fPnjX6vP379+PmzZsYMWKE1vtLS0vRp08fHDp0CLdv38adO3eQkZGBwYMHo3bt2hb5LHv27AGgGvYo79lnn0X16tV1hrZatWqFevXqaV57eXkhOjoaly9fNvp1mjdvjrZt22L58uWaa6dPn8bBgwe1hiI6dOiA48eP45VXXsEPP/yAwsLCB/1oAIDk5GS4u7trfg0YMECnzDPPPKNzraSkBDNnzkSTJk3g4eGBqlWrwsPDA3/88QdOnz6tKbd37140bdoULVu21Hr/0KFDTdbtu+++g0wmwwsvvKD1+x4SEoKWLVtqrXgCgJCQEHTo0EHrWosWLUy2/YOq2FYtWrTAP//8o1m5px6+GTx4ML755hv8/fffBp81dOhQreGciIgIPProo9i7dy8A876Ptm/fjq5du6Jx48YW+ZzqOlT8HujQoQMaN26s8z1Q2b8P5HgYZMiu3b59Gzdu3EBoaCgA1XJfAHjzzTe1fmC6u7vjlVdeAQDk5eUZfab6GYMGDdJ5xpw5cyCEwM2bN3Hr1i0olUqLTlS8ceMGqlatqhOMZDIZQkJCdCbHBgQE6DzD09MTd+/eNfm1Ro8ejQMHDuDMmTMAVCvDPD098fzzz2vKTJ06FR999BEyMzMRHx+PgIAAdO/eHYcPHzb67Hr16uH69eu4c+eO1vVJkybh0KFDOHTokMG5M/quJycnY/r06UhISMC3334LhUKBQ4cOoWXLllqf9caNGwgJCdF5v75rFV27dg1CCAQHB+v8vmdmZur8uXmYtn8QFb+ep6cnAGi+3uOPP46tW7eipKQEw4cPR926ddGsWTOsXbtW51mG2kj958uc76Pr169b/HsA0P/nIDQ01KLfA+QauGqJ7Nr3338PpVKJJ554AgAQGBgIQPUDeODAgXrf06hRI6PPVD/j888/R8eOHfWWCQ4OhlKphJubm84ExIcREBCAkpISXL9+XSvMCCGQk5Oj+Ve3JTz//PNITk7GihUr8MEHH2DVqlVISEhAzZo1NWWqVq2K5ORkJCcnIz8/H7t27cK0adPQu3dvZGVloVq1anqf3bNnT+zcuRPbtm3DoEGDNNfDw8MRHh4OAPDw8ND7Xn0rlFavXo3hw4dj5syZWtfz8vLg7++veR0QEICcnByd9+u7VlFgYCBkMhl++uknTUgoT981e/PUU0/hqaeewr1795CZmYlZs2Zh6NChiIyMRKdOnTTlDLWROhSY831Uu3Zti38PAEB2drZOQLp69aqmbkRSsUeG7NaVK1fw5ptvws/PD+PGjQOg+sv1kUcewfHjx9GuXTu9v3x8fADo/otWrXPnzvD398epU6cMPsPDw0Oz2mPDhg1Ge3kMfR19unfvDkD1g7u8TZs24fbt25r7llCzZk0kJCRg5cqV+O6775CTk6M1rFSRv78/Bg0ahFdffRU3b940uv/OmDFjEBwcjMmTJyM7O/uh6yqTyXSCxPfff68zfNK1a1f8/vvvOH78uNb1r7/+2uTX6NevH4QQ+Pvvv/X+njdv3tzsepvze29Jnp6eiIuLw5w5cwBAZ0XS2rVrtYZkL1++jP3792v+QWDO91F8fDz27t1rdMjWnHbo1q0bAN3vgUOHDuH06dMW/R4g18AeGbILv/32m2aMPjc3Fz/99BOWL18ONzc3bNmyRav3YtGiRYiPj0fv3r0xcuRIhIWF4ebNmzh9+jSOHj2KDRs2AFDtTQMAixcvho+PD7y8vFC/fn0EBATg888/x4gRI3Dz5k0MGjQIQUFBuH79Oo4fP47r169jwYIFAIB58+ahS5cuiI2NxZQpU9CwYUNcu3YNaWlpWLRoEXx8fIx+nYp69uyJ3r17Qy6Xo7CwEJ07d8aJEyeQkpKC1q1b610y/TBGjx6N9evXY/z48ahbty569Oihdb9///6aPXxq166Ny5cvY/78+YiIiMAjjzxi8Ln+/v7YunUr+vfvj5YtW+Lll19Gx44dUaNGDdy4cQP79u1DTk4OHn30UUn17NevH1asWIGYmBi0aNECR44cwYcffqjzL/Y33ngDX331Ffr27Yv3338fwcHBWLNmjWb4zJjOnTvjxRdfxKhRo3D48GE8/vjjqF69OrKzs/Hzzz+jefPmePnllyXVVy0qKgre3t5Ys2YNGjdujBo1aiA0NFQzFGpJ77zzDv766y90794ddevWRX5+Pj799FO4u7sjLi5Oq2xubi6efvppjB07FgUFBUhJSYGXlxemTp2qKSP1+2jGjBnYvn07Hn/8cUybNg3NmzdHfn4+duzYgeTkZMTExJjVDo0aNcKLL76Izz//HFWqVEF8fDwuXbqE6dOnIzw8HBMnTrR425GTs+VMYyL1Shb1Lw8PDxEUFCTi4uLEzJkzDa4WOn78uBg8eLAICgoS7u7uIiQkRHTr1k0sXLhQq9z8+fNF/fr1hZubm86qioyMDNG3b19Rq1Yt4e7uLsLCwkTfvn21VpQIIcSpU6fEs88+KwICAoSHh4eoV6+eGDlypNZSaUNfR98S4Lt37wq5XC4iIiKEu7u7qFOnjnj55ZfFrVu3tMpFRESIvn376nz2uLg4ERcXZ7xh/6VUKkV4eLgAIN5++22d+x9//LF49NFHRWBgoOazJSUliUuXLkl6fk5Ojpg6dapo0aKFqF69unB3dxehoaGif//+YuXKlVorYoytULt165ZISkoSQUFBolq1aqJLly7ip59+0vtZT506JXr27Cm8vLxErVq1RFJSkvjvf/8rafm1EEJ89dVXIjY2VlSvXl14e3uLqKgoMXz4cHH48GFNmbi4ONG0aVOd9+p75tq1a0VMTIxwd3cXAERKSorB9jK2aqn8suPy7XXx4kUhhBDfffediI+PF2FhYZrvkyeffFL89NNPOs9ftWqVmDBhgqhdu7bw9PQUjz32mNbnU5P6fZSVlSVGjx4tQkJCNL/HgwcPFteuXTPZDvqWXyuVSjFnzhwRHR0t3N3dRWBgoHjhhRd0lq6b8/tArksmRLn+RyIiclg//vgjunbtig0bNmjNXSJyZpwjQ0RERA6LQYaIiIgcFoeWiIiIyGGxR4aIiIgcFoMMEREROSwGGSIiInJYDr0hXmlpKa5evQofHx+9254TERGR/RFCoKioCKGhoahS5eH6VBw6yFy9elVzrgsRERE5lqysrIc+lNShg4z6LJCsrCz4+vrauDZEREQkRWFhIcLDwzU/xx+GQwcZ9XCSr68vgwwREZGDscS0EE72JSIiIofFIENEREQOi0GGiIiIHBaDDBERETksBhkiIiJyWAwyRERE5LAYZIiIiMhhMcgQERGRw2KQISIiIofFIENEREQOi0GGiIiIHJZDn7VEREREVpSnAIrOAT7RQGCsrWujF4MMERER6fpVDpyeW/a68WSg9Rzb1ccABhkiIiJSUffAKIu1Qwygeh0+0O56ZhhkiIiISLcHRp9j24Ee9hVkONmXiIjI1eUpTIcYAMi2flXMxSBDRETkyvIUwIXlpsulAYiOt3p1zMWhJSIiIldlajhpMQAlVD0xg+RArH0NKwEMMkRERK4nYxmQ9R1QZavhMmkAnkgBoqKA6Gi7DDEAgwwREZFrmR8LBB00PLlkN4AMAOcBZMbbbYBRY5AhIiJydgoFcO4ccP+MKsQYow4xcvscSqqIQYaIiMiZyeXAprlAHQBBAJ4xUjYNwAspQLz998SoMcgQERE5K4UCuDwXmGGi3CYAx6Ga0Juaav16WRCDDBERkTPKWAb88iUwwES5C02BBDkw2X4n9BrDIENERORs1BN6Iw3cv98fcAsFwtsDQ5Mqs2YWxyBDRETkTDKWmZ7Q2/dtuzsz6UFxZ18iIiJnkacAstYYL9NY7jQhBmCPDBERkXNQ79JrqIuiZhLQfqxThRiAQYaIiMhx5SmAonOAstj4UQO5scDQpZVXr0rEIENEROSITJ2TBAD5bYCmrzj8hF5jOEeGiIjI0eQpTIcYAPDpD8Q5b4gBGGSIiIgcS54CuLDcdLk0ANHxVq+OrXFoiYiIyFGYGk5aDEAJIBuqXXodcIM7czHIEBER2bs8BXB1u/EQkwbgiRQgKgqIdsxdeh8EgwwREZE9M9ULsxtlJ1ZnOs5hj5bCOTJERET2SsqkXnWIkbvGUFJF7JEhIiKyN+r9YYrOGy+XBuCFFCDe9Xpi1BhkiIiI7ImU/WE2ATgO1YTe1NRKqJT9YpAhIiKyBxnLgKzvgCpbjZdLA9A8BZjsur0w5THIEBER2dr8WNWJ1YZmrm4CkAvVsurzAFZGMcT8i0GGiIjIljKWqUKMMcehCjBq0dHWrJFD4aolIiIiW8lYBhz62HiZNGiHGBddnWQIe2SIiIgqi0IBnDun6lE5MEHVExNqoGz5Cb2ZT5e9jyFGC4MMERFRZZDLgU1zgToA3AC8aKTshaZAghyYXC64MMDoxSBDRERkTQoFsH07cHkuMMNE2auNgfaTgKHOfWK1JTHIEBERWZp6CCk9HVi1CoiC6RADqEJMHEOMORhkiIiILEkuB+aW29AuCkCchPflxrIn5gEwyBAREVmKQqEdYp4DMMBI+drTgGs3gPD2DDEPiEGGiIjIUs6dU/03CkBLGA8xjeVA6w8qoVLOjUGGiIjIUqKjTffCNBwHNBgFBHIVkiUwyBARET0s9eTeesXGQwzAEGNhDDJEREQPo/z+MEEAnjFStrGcIcbCGGSIiIgehDn7wzRLAULjGWKsgEGGiIjIXOpemJYw3gMDqHphWqRWQqVcE4MMERGRORQK070wNZOAmDjAJ5q9MFbGIENERCSFekLv3xmmJ/S2H8sAU0kYZIiIiEypOKG3npGynNBbqRhkiIiIjJEylARwQq+NMMgQEREZkqcA/phteiiJE3pthkGGiIhIn1/lwOm5QBUD9zmh1y4wyBAREVWUp1CFGGM4odcuGMqZRERErilPARyYbbwMJ/TaDfbIEBERqamHkwwpTQD6TGGIsSMMMkRE5NryFEDROUBZbDzEpAGYyBBjbxhkiIjIdZnqgQGA3QAyAAySA7EMMfaGQYaIiFxPngK4ut10iAGAWgnAmikMMXaKQYaIiFyLlF4YNfVwEkOM3WKQISIi1yFlWfViAEoA2eBwkgNgkCEiIteQpwAuLDde5losMOdT1eGQ0dEMMQ6AQYaIiJyfqeGkTQCOAzivADIBJCZWUsXoYTHIEBGRczJnWfXmcq/PnWNPjANhkCEiIudjzrLq8xWuR0dbqVJkDTyigIiInIuUCb2A/hAj5+ReR8MeGSIich5SJvQCquGk8iEmJQWIj2eIcUAMMkRE5BxMDSeVX1ZdPsTI5UBqqlWrRtbDIENERI5Nyi69aVANJZU3bhwwahR7YRwcgwwRETkuU70whib0AgwxToKTfYmIyDFJmdRrKMRwUq/TYI8MERE5FvX+MEX6Eko5+ib0RkVxx14nwyBDRESOQ8r+MHmPA5/t0+2J4aokp8QgQ0RE9k/KhF5A1Quzfp/udQ4lOS0GGSIism+memFqJgGnbgALtur2wnB/GKfHIENERPZLyoTe15bpn9ALqObEMMQ4Na5aIiIi+yRll96KE3or4rlJTo89MkREZH9MDSeVJgB/BQDrlxkuw3kxLoFBhoiI7IN6WbWy2PQuveu3Gr7PeTEuhUGGiIhsT8qy6vw2wLyjxoeSeG6Sy2GQISIi25IyoRcwHGK40Z1LY5AhIiLbkTKhFzA+qZfDSC6NQYaIiGzD1HDSYgBKANkwHGI4odflMcgQEVHlkrJLbxpUBz4awgm99C8GGSIiqjymemF2w/CJ1Wqc0EvlMMgQEVHlkDKp11iIYS8M6cEgQ0RE1qXeH6bIWDcLjE/oZS8MGcAgQ0RE1iNlf5iaSfrPS1q6FPDw4LJqMopBhoiILE/KhF4AaCwHWs8GngkA5pYrK5cDSUnWrSM5BZkQQti6Eg+qsLAQfn5+KCgogK+vr62rQ0REgOlemGYpgE8U4BOt6oU5d67scEf1/7MHxqlZ8uc3e2SIiMhypEzovRUOHAeQ/gWwalXZ9cmTgTlzrFo9cj4MMkREZBlSdum9FgsMG6P/3ty5wMCB7I0hszDIEBHRw5MynHQr3HCIUTt3jkGGzMIgQ0RED0a9rFpZbDzE+CcCx6OA8yaWXwNlc2WIJGKQISIi80lZVt1wHLD9NpC6CsAq42UBnptED4RBhoiIzCNlQi8A3G0PpJoYSkpMBHr25EolemAMMkREJJ2UCb2Aan+Y3zz030tJAaKiGF7IIhhkiIhIGlPDSR2WAm4eqv1hAmOBYoX+cjwviSyIQYaIiIyTsktvYznQsNxOvAqFagVSYqL2XjGcB0MWxiBDRESGmeqFaTgOaDBK1QOjJpdrHzfwwgtAr14cSiKrqGLrChARkZ2SMqm3fIhRKFQnVM+t8J7VqxliyGrYI0NERNrU+8MUmdj3xT8R2H4OiAawebNugCmPG92RlTDIEBFRGSn7w2yC6qyk8xL3hwG40R1ZDYMMERFJm9ALAGkANpv5bE7wJStikCEicnWmemFqJgEHLgBf7wUknDKgkZLCpdZkdQwyRESuTMqE3teWmRdgAFUvTGrqg9aKSDIGGSIiVyVll940SAsxcjnw9NOqSb1coUSViEGGiMgVmRpO0kzo1XNv3Dhg1CjV/1cMLgwwVMkYZIiIXIV6WbWy2HiIMTWhd9QoBheyGwwyRESuQMqy6vw2wLyjxoeSuAKJ7AyDDBGRs5MyoRfQH2I494XsHIMMEZEzkzKhF9Cd1Ftx6TQDDNkpBhkiImdlajhpMQAlgGzo9sRERTG8kENgkCEicjZSdulNA5Bh5Bk8UoAcBIMMEZEzMdULU9oVKO4ErJ9puAwn9JIDYZAhInJ0UpdVA0DqXuD8XqBDB+DgwbLriYlAz56c0EsOh0GGiMiRSVlWrVZ+Qu/Bg8DSpYCHB8MLOTSLBJn8/Hz4+/tb4lFERCSV1GXVhnbp9fBQ9cQQObAq5r5hzpw5WL9+veb14MGDERAQgLCwMBw/ftyilSMiIgPMWVa9Gfo3ueOEXnICZgeZRYsWITw8HACQnp6O9PR0bN++HfHx8XjrrbcsXkEiIqrgVzmwsyPw5yL992tPA850Bd4BsF5/EU7oJWdh9tBSdna2Jsh89913GDx4MHr16oXIyEjE8puCiMh6pCyrvhYLDDOyIqniRndEDs7sHpmaNWsiKysLALBjxw706NEDACCEgFKptGztiIhIRd0L89u7+u+XdgX+mQYkKww/Qy4HUlMZYsipmN0jM3DgQAwdOhSPPPIIbty4gfj4eADAsWPH0LBhQ4tXkIjI5UmZ1KteVq3PuHHaJ1YTORGzg8wnn3yCyMhIZGVlYe7cuahRowYA1ZDTK6+8YvEKEhG5LPX+MEXGjqOG7jlJFTHEkBOTCSGErSvxoAoLC+Hn54eCggL4+vraujpERJYjZX8YQ8uqy5PLgdmzLVgxoodnyZ/fD7SPzKpVq7Bo0SJcuHABBw4cQEREBObPn4/69evjqaeeeqgKERG5NCkTeoGyZdX6cKM7ciFmT/ZdsGABkpOTER8fj/z8fM0EX39/f8yfP9/S9SMich2mJvRuArAAppdVJyWpNrpjiCEXYHaQ+fzzz7FkyRK8/fbbcHNz01xv164dTp48adHKERG5DCkTeo8D+Bn6h5JSUoDMTA4jkcsxe2jp4sWLaN26tc51T09P3L592yKVIiJyKVJ26TU2oVe9rJrIBZkdZOrXr49jx44hIiJC6/r27dvRpEkTi1WMiMglmJrU2ywFuBUOrB+je4+b2xGZH2TeeustvPrqq/jnn38ghMDBgwexdu1azJo1C0uXLrVGHYmInIt6WbWy2HiI8U8EjkepJu1OngzMLVeWvTBEAB4gyIwaNQolJSWYPHky7ty5g6FDhyIsLAyffvophgwZYo06EhE5DynLqhuOA7bfBlJXAVilujZ5smoOzLlzXI1EVM5D7SOTl5eH0tJSBAUFWbJOknEfGSJyKHkK1aokU64kAVOX6V7PzGSAIadgyZ/fZq9aKi8wMNBmIYaIyKFImdALqCb16gsxgKo3hoi0mD201Lp1a8hkMp3rMpkMXl5eaNiwIUaOHImuXbtapIJERA7P1HBS7WnAiVPAgq3Gd+mNjrZ0zYgcntk9Mn369MGFCxdQvXp1dO3aFU888QRq1KiB8+fPo3379sjOzkaPHj3w3//+1xr1JSJyHHkK4ESq8RBzLRboNRN4c6vpowY4rESkw+wemby8PEyaNAnTp0/Xuv7+++/j8uXL2LlzJ1JSUvDee+/xuAIicl2memEajgPutgeG6VlWXR6XWBMZZfZkXz8/Pxw5cgQNGzbUuv7nn3+ibdu2KCgowJkzZ9C+fXsUFRVZtLIVcbIvEdklKZN6e2UC288Bw4cbLsMDH8lJ2fTQSC8vL+zfv18nyOzfvx9eXl4AgNLSUnh6ej5UxYiIHI56f5giY2NEABrLgcBYwNCUF/bCEElmdpB57bXX8NJLL+HIkSNo3749ZDIZDh48iKVLl2LatGkAgB9++EHvMQZERE5Lyv4wzVKA0HjVXJhVq7jRHZEFPNA+MmvWrMH/+3//D2fPngUANGrUCK+99hqGDh0KALh7965mFZM1cWiJiGwuTwFc3W74xGo1/0TgRk8gPV0VYtQmTwYGDuRGd+RSLPnz+6E2xLM1Bhkisikp5yT5RAErdwKpqw2X40Z35GLsZkM8IiKXlacwPZR0Kxz4z3njIQbgRndED8HsOTJKpRKffPIJvvnmG1y5cgXFxcVa92/evGmxyhER2SUpu/ReizW9tFqNG90RPTCze2TeffddzJs3D4MHD0ZBQQGSk5MxcOBAVKlSBamcoEZEzu5XuWpp9Z+L9N8vTQD+mQYkK6Q9jxvdET0Us+fIREVF4bPPPkPfvn3h4+ODY8eOaa5lZmbi66+/tlZddXCODBFVCvWyamUxcNBIL0sagPUSnpeYCPTsycm95LJsuo9MTk4OmjdvDgCoUaMGCgoKAAD9+vXT2e2XiMjhSVlWvRtABowfMQBwfxgiKzB7aKlu3brIzs4GADRs2BA7d+4EABw6dIib4BGRc5EyoReQFmLU+8MwxBBZlNk9Mk8//TR2796N2NhYvP7663j++eexbNkyXLlyBRMnTrRGHYmIKp+UCb2AajjJUIhZuhTw8OAQEpEVPfQ+MpmZmZojCwYMGGCpeknCOTJEZBWmhpMWA1ACyIbhEMNzkogMsukcmYo6duyIjh1NHI5GROQI1Lv0GgsxaVANJenDSbxEle6Bgszff/+NX375Bbm5uSgtLdW6N2HCBItUjIioUpnqhSntCvzVAFi/TPfeuHHAqFEML0Q2YPbQ0vLly/HSSy/Bw8MDAQEBkMlkZQ+TyXDhwgWLV9IQDi0RkUXkKVR7wxjzDgwPI/GIASKz2HRo6Z133sE777yDqVOnokoVnnBARA5MvT9MkYklR8Ym9HJDOyKbMjvI3LlzB0OGDGGIISLHJmV/mE0AjkM3xKSkAFFRnAtDZAfMTiNJSUnYsGGDNepCRGR9eQrgRKrpEJMGYDP098TEx6sm9jLEENmc2XNklEol+vXrh7t376J58+Zwd3fXuj9v3jyLVtAYzpEhIrOY6oWpmQScugEs2Mpl1URWZNM5MjNnzsQPP/yARo0aAYDOZF8iIrskZZfe15bpDzBcVk1kt8wOMvPmzcNXX32FkSNHWqE6RERWIGWXXn0Tenk2EpHdMzvIeHp6onPnztaoCxGR5ZkaTjI0oRdQTehliCGya2ZP9n399dfx+eefW6MuRESWkacALq4C/lxmepdeQxN6AdVQEhHZNbN7ZA4ePIg9e/bgu+++Q9OmTXUm+27evNlilSMiMpuUZdVXGwMLTxs/sZr7wxA5BLODjL+/PwYOHGiNuhARPRwpE3oBwyGGp1UTORyzg8zy5RKOtSciqmxSJvQChnfplcuBpCRL14qIrOyhT78mIrI5U8NJiwEoAWRDN8TwwEcihyY5yLRu3VrSPjFHjx59qAoREUmWpwCubjc9oTfDyDMYYogcmuQgk5CQYMVqEBGZyVQvzG6oAgwn9BI5NclBJiUlxZr1ICKSTsqkXkMhhrv0EjkVzpEhIseRpwCKzgFFxrpZoDuhl6uRiJwWgwwROQYp+8Po26WXq5GInBqDDBHZNykTeoGyXXrVeE4SkUtgkCEi+yXlnKRc6C6rlsuB1FSrVo2I7AODDBHZF/U8GGWx6V6YisNI7IUhcjmSgsxnn30m+YETJkx44MoQkYuTMg9GreKEXvbCELkkmRBCmCpUv359rdfXr1/HnTt34O/vDwDIz89HtWrVEBQUhAsXLlilovoUFhbCz88PBQUF8PX1rbSvS0RWkKcAdnY0Xa78hF6uRiJySJb8+S2pR+bixYua///666/x5ZdfYtmyZWjUqBEA4OzZsxg7dizGjRv3UJUhIhdlzjlJ6gm9XI1ERJDYI1NeVFQUNm7ciNatW2tdP3LkCAYNGqQVeqyNPTJETsDcc5I4D4bI4VV6j0x52dnZuH//vs51pVKJa9euPVRliMiFPOg5SVFRDDFEpFHF3Dd0794dY8eOxeHDh6HuzDl8+DDGjRuHHj16WLyCROSEfpWr5sP89q7++w3HAWFLgfV67kVHW7VqRORYzA4yX331FcLCwtChQwd4eXnB09MTsbGxqFOnDpYuXWqNOhKRM5FyTtLZEMCrGTB5svZ1HvJIRBWYPbRUu3ZtbNu2DefOncOZM2cghEDjxo0RzX8lEZEx5pyTtP5dAO+qgkxmJnDuHFcmEZFeD7whXmRkJIQQiIqKQtWq3FePiIyQsj9MzSTgtWXae8PMnQsMHKg6sZqISA+zh5bu3LmDpKQkVKtWDU2bNsWVK1cAqDbCmz17tsUrSEQOLE8BnEg1HWIay4G8OO0Qo3bunBUqRkTOwuwgM3XqVBw/fhw//vgjvLy8NNd79OiB9ev1zcwjIpdkakJvsxSg00qgVyZQ/DRw3sCQE4eticgIs8eEtm7divXr16Njx46QyWSa602aNMF5Q38REZFrkTKh91a4aofe9C+AVav0l+HkXiIywewgc/36dQQFBelcv337tlawISIXJWWX3kMhwLAxhu9z0zsiksjsoaX27dvj+++/17xWh5clS5agU6dOlqsZETke9XDSn4v0398E4B0A83OMP4eb3hGRRGb3yMyaNQt9+vTBqVOnUFJSgk8//RS///47Dhw4gIyMiltwEpHTUy+rVhab3qV3s+HbWjgvhogkMjvIPProo/jll1/w0UcfISoqCjt37kSbNm1w4MABNG/e3Bp1JCJ7JWVZ9W6ojhmQOoWO82KIyAxmHxppT3hoJJEN5SlUw0imvAPdEBMbCygUZa8TE4GePbnpHZGLsOmhkW5ubsjOztaZ8Hvjxg0EBQVBqVQ+VIWIyAFImdALqIaTyoeYceOAUaPKggx37CWih2R2kDHUgXPv3j14eHg8dIWIyM6ZGk6qPQ04cQpYsFW3J0YdYgDVfxlgiOghSQ4yn332GQDVKqWlS5eiRo0amntKpRL79u1DTEyM5WtIRPYhTwFc3W48xFyLBYbN1H+Pc1+IyAokB5lPPvkEgKpHZuHChXBzc9Pc8/DwQGRkJBYuXGj5GhKR7ZnqhSntChR3ApL1hBjuCUNEViQ5yFy8eBEA0LVrV2zevBk1a9a0WqWIyI5I2aU3dS9wfq/+e9wThoisyOw5Mnv3GvjLioici3p/mCIT66YrTuitiHvCEJEVmR1kBg0ahHbt2mHKlCla1z/88EMcPHgQGzZssFjliMhGpOwPswmqs5KMhRjOiyEiKzM7yGRkZCAlJUXnep8+ffDRRx9ZpFJEZCNSJvQCxnfpXboU8PDgsmoiqhRmB5n//e9/epdZu7u7o7Cw0CKVIiIbMNUL0ywFOJEDpC4y3AsjlwNJSVapHhGRPmYfGtmsWTOsX79e5/q6devQpEkTi1SKiCqZlAm9t8KBP0P0h5iUFCAzE5g92yrVIyIyxOwemenTp+OZZ57B+fPn0a1bNwDA7t27sXbtWs6PIXJEUnbpvRYLDBuj/55cDqSmWrxaRERSmB1kBgwYgK1bt2LmzJnYuHEjvL290aJFC+zatQtxcXHWqCMRWYuU4aRb4fpDDPeHISI7YHaQAYC+ffuib9++lq4LEVUG9bJqZbHxENNYDrRIBVat0n+f+8MQkR14oCCTn5+PjRs34sKFC3jzzTdRq1YtHD16FMHBwQgLC7N0HYnIUqQsq244DmgwSjUXZtUqoLhYfznuD0NEdsDsIHPixAn06NEDfn5+uHTpEsaMGYNatWphy5YtuHz5MlauXGmNehLRw5IyoRdQhZgPNwNzy5Xt0AE4eLDsNfeHISI7YfaqpeTkZIwcORJ//PEHvLy8NNfj4+Oxb98+i1aOiCxEyoReQDWcdB7aIQZQhZilS4GVK7k6iYjsitk9MocOHcKiRYt0roeFhSEnJ8cilSIiCzI1nNRhKeDmAfhEA4GxhufEeHgAiYnWqSMR0QMyO8h4eXnp3fju7NmzqF27tkUqRUQWIGWX3sZyoGG5DewUCuC8gd3uOCeGiOyQ2UHmqaeewowZM/DNN98AAGQyGa5cuYIpU6bgmWeesXgFiegBmOqFUU/oDSw3z0Uu1x1SKn+Pc2KIyA7JhBDCnDcUFhbiySefxO+//46ioiKEhoYiJycHnTp1wrZt21C9enVr1VVvXfz8/FBQUABfX99K+7pEdi1PAezsaLxMr8yyEKNQANu3A+++q1uOe8UQkRVY8ue32T0yvr6++Pnnn7Fnzx4cPXoUpaWlaNOmDXr06PFQFSGih6TeH6bI2HHUAPwTge3ngGgAmzcb7oUBuFcMEdk9s4PMypUr8dxzz6Fbt26aIwoAoLi4GOvWrcPw4cMtWkEikkDK/jDNUoAt54HUVQAMTOitiPNiiMjOmT205ObmhuzsbAQFBWldv3HjBoKCgqBUKi1aQWM4tEQuTz2h9zc9w0LlNZYDxU8DHU0MOZUnl3OZNRFZhU2HloQQkMlkOtf/+usv+Pn5PVRliMgMUs5J8olSLas+D2C5hH1kAM6LISKHIjnItG7dGjKZDDKZDN27d0fVqmVvVSqVuHjxIvr06WOVShJRBVJ26b0VDhwHkP6F4b1hKuJJ1kTkYCQHmYSEBADAsWPH0Lt3b9SoUUNzz8PDA5GRkVx+TVQZpOzSey1W/4nVFcnlwNNPA+fOqebDsBeGiByM5CCTkpICAIiMjMRzzz2ndTwBEVUSU8NJpQlAcRMgeabx54wbB4waVRZcGGCIyEGZPdkXKDv9+vz583jrrbdsdvo1J/uSS1Avq1YWAweN9LKkAVgv8ZmZmQwvRGQzNp3sW/H067Fjx/L0ayJrkbKs2icBmLhVNaFXCu7SS0ROxOzTrydOnMjTr4kqg5QJvQCQ39J0iElM5MnVROSUzO6ROXz4MBYvXqxznadfE1mQlAm9gGqX3oMG7i1dqjqxmpN4iciJ8fRrIntjajipw1LAzQNYufPfXXr1kMuBpCT994iInIjZQ0vq06/v378PgKdfE1lMngI4kWo8xDSWAzeaAf85D6Su1r2fksLhIyJyKWb3yHz00Ud48sknERQUhLt37yIuLk5z+vUHH3xgjToSOT+Ty6q7AuHDgHXngLlGjhngIY9E5GJ4+jWRrUmZ1Ju6Fzi/1/SzeMgjEbkYs4OMWsXTr4nITOr9YYpMLDlKg7Sl1VxWTUQuyKwgU1paihUrVmDz5s24dOkSZDIZ6tevj0GDBiExMVHvYZJEpIeU/WEutQG+Omo6xPCQRyJyYZKDjBACAwYMwLZt29CyZUs0b94cQgicPn0aI0eOxObNm7F161YrVpXICeQpgKvbTYeYNADrj5p+Hg95JCIXJznIrFixAvv27cPu3bvRtWtXrXt79uxBQkICVq5cieHDh1u8kkROwVQvzCYAuQCyYbgXhoc8EhFpkXzWUq9evdCtWzdMmTJF7/2ZM2ciIyMDP/zwg0UraAzPWiKHkacAdhpZbQQA78BwgKl4yCMRkQOz5M9vyfvInDhxAn369DF4Pz4+HsePH3+oyhA5pYxlwI6pxsuYmtDLEENEpJfkoaWbN28iODjY4P3g4GDcunXLIpUichrzY4Ggg4b/ybAJwHEYDzFcjUREZJDkIKNUKlG1quHibm5uKCkpsUiliByaeln1n2dUIcaQNACb//3/2FhAoSi7l5gI9OzJeTBERCaYtWpp5MiR8PT01Hv/3r17FqsUkcNRKFQTcGulAwUGzj9S2w0gA6pemPJLp9XPYHghIpJMcpAZMWKEyTJcsUQuSS4HNs0FWgKQctyYOsRUXDodG8sAQ0RkJsmrluwRVy2RTSkUwPbtwJl3gQES33OhKRAhZ68LEbk0S/78fuAjCohcmlwOzJ0LRAGYYaJsTn8gKBQIbw8MTaqM2hERuQwGGSKp1HNYiovLQkycifekAZj4NntfiIishEGGSAp1D4zaczA+nKReVj2IS6eJiKyJQYbIFIWirAemDgA3GA8x/olAQk9gMufBEBFZG4MMkTEKBbB8uekeGABoOA5oMAoIZHghIqosko8oIHI5cjnQsSOwa5G0VUkMMURElY5Bhkif8sNJpib0AkBjOUMMEZENcGiJqCKpw0kdlgJuHoBPNEMMEZGNMMgQlSd1l97GcqAh94QhIrI1BhkiNYUCuDzX+AZ3nNBLRGRXGGTINZU/oBFQ/f/fGaYn9TLEEBHZFQYZcj0VN7dT7w8TBKCekfdxQi8Rkd1hkCHXol6NpCZlf5hmKUBoPEMMEZEdYpAh16FejQSoemFawnSIaSwHWqRat15ERPTAGGTINZQfTjLVC1MzCYiJ47JqIiIHwCBDzq/8cFIUTPfCtB/LAENE5CC4sy85v3PnVP+VsksvJ/QSETkU9siQ84uONj2cxAm9REQOiUGGnFfGMiDrEBAcYDzEcEIvEZHDYpAh56Le6O7yHKDB76rB0+sGynKXXiIih8cgQ85DvTIpCsaPGVBjiCEicnic7EvOQb0yScqEXoCTeomInAR7ZMg5nDtnekJv7WlAwxjuD0NE5EQYZMjxZSwDbq00HmJyY4GhH1RalYiIqHIwyJBjmx8LBB0EAg3cL+0KhA8DhiZVarWIiKhyMMiQ41GvTLp/RhVijOkzi8NIREROjEGGHItcDmyaC9QBEATgGSNlOaGXiMjpMciQ41AogMtzTS+trpnE85KIiFwEgww5hoxlwC9fmj7wMTcWGLq0UqpERES2xyBD9k89oTfSwP37/QG3UCC8PSf1EhG5GAYZsm8Zy0xP6O37NoeRiIhcFHf2JfuVpwCy1hgvwwm9REQujT0yZJ9+lQOn5xqO2pzQS0REYJAhe5KnAIrOAcpiVYgxhBN6iYjoXwwyZB/UPTDGcJdeIiKqgHNkyPbyFKZDDKAKMXEMMUREVIZBhmwrTwFcWG66XBqAKx5Wrw4RETkWDi2R7ZgaTloMQAkgG8B5ABOjK6deRETkMBhkqPLlKYCr242HmEMhQEZO2Wu5HIjlCiUiItLGIEOVy1QvzG4AGQDO5wBLlwIeHkB0NEMMERHpxSBDlUfKpN4MqIaRAFWISUy0dq2IiMiBMciQ9an3hyk6b7xcGspCDKDqiSEiIjKCQYasS8r+MJsAHId2iOGcGCIikoBBhqxDyoReQNULs7nc65QUID6eIYaIiCRhkCHLM9ULswlALsqWVavJ5UBqqlWrRkREzoVBhixLyoTeisNI48YBo0axF4aIiMzGIEOWk7EMyFpjfL/oihN6AYYYIiJ6YAwyZBnzY4Ggg4ZDjL4JvQAn9RIR0UNhkKEHp15W/ecZVYgxpOKEXm50R0REFsIgQw9GyrJqzS695a7J5UAST7AmIiLLYJAh80mZ0AuUhZjERKBnT/bAEBGRxTHIkHnyFMCB2abLXWgKpMgZXoiIyKoYZEg6U8NJOf2BoFAgvD0wlMNHRERkfQwyZJx6Qq+y2HiISQMw8W32vhARUaVikCHDzJnQO4jLqImIqPIxyJCujGVA1ndAla2my9ZKANZMYYghIiKbYJAhbaY2tisvDcBEhhgiIrIdBhkqk7HM+MZ2ALAYgBKqAx85nERERDbGIEMqGcuAQx8DoUbKpAFI5K68RERkPxhkqGw4yVCIUZ+TNIi78hIRkX1hkHFVUs9JutAUSJADk9kDQ0RE9odBxhVJWVZ9tTHQfhI3tiMiIrsmZW0KOROp5yS1nwTEMcQQEZF9Y5BxJXkK4MJy0+VyYxliiIjIIXBoyVWYGk66NwJw9+I5SURE5FAYZJxdngK4ut30OUl9uwMvJFZatYiIiCyBQcaZmeqFUZ+TdB7AxOhKqhQREZHlcI6Ms5IyqVcdYuTcoZeIiBwTe2ScjXp/mKLzxsulAXghBYiPZ4ghIiKHxSDjTKTsD1N+l97U1EqoFBERkfUwyDgDKRN6AVUvTPMUYDJ7YYiIyDkwyDg6U70wmwDkQnVa9XkAK6MYYoiIyGkwyDgyKRN6j0MVYNSiuTqJiIicB1ctOSopu/SmQTvEcHUSERE5GfbIOCIpw0nqnpiUFCAqStUTwxBDREROhkHGUaiXVSuLTe/Su7ncay6vJiIiJ8Yg4wikLKsuv0uvGoeSiIjIyTHI2DspE3oB7RCTwo3uiIjINXCyrz2TMqEX0J3UG8Ul1kRE5BrYI2OvTA0nLQagRNn+MOVxiTUREbkIBhl7I2WX3jSohpL04bwYIiJyIQwy9sRUL4y+Cb1q48YBo0YxxBARkUvhHBl7IWVSr6EQAzDEEBGRS2KPjK2p94cpMpRQ/lVxQm95HE4iIiIXxSBjS1L2hym/S295S5cCHh7csZeIiFwag4wtSJnQC+ju0qsmlwNJSdaoGRERkUNhkKlspnphmqUAuQBeele3F4Yb3REREWlhkKlMUib03goHsj2ARxOB86vKrsvlQGqqVatHRETkaBhkKouUXXoPhQDDxpS9fuEFoFcvzoMhIiIygEGmMpgaTtJM6M3Rvr56NTB+PEMMERGRAQwy1pKxDMg6BAQHANdN7NKrb0Kv2rlzDDJEREQGMMhYw/xYIOigarvB6wbKGNultzyem0RERGQQd/a1tIxlqhBjshx0Q0zFnhdudEdERGQUe2QsKU8BZK0xHQ/L79KbmAj07Fk2oVehUA0ncYIvERGRSQwylqKe0GsoxCwGoASQDdXS6pU99YeV2FgGGCIiIokYZB6WlF16c2OBOZ+yp4WIiMjCGGQehqll1fltgKavAEP/PU6AAYaIiMiiGGTMpZ7DUq8Y+NvELr0+/YE4nolERERkLQwy5pDLgU1zgToAggA8Y6RsGoCJ8ZVTLyIiIhfFICOFQgFs3w5cngvMMFFWvUvvIC6dJiIisjYGGVPUvTAtYbwHBlD1wjRPASbzhGoiIqLKwCBjjEJhuhdmE4BcqJZVD+IJ1URERJWJQcaYc9uBASbKTFgKXPHgsmoiIiIbsOkRBfv27UP//v0RGhoKmUyGrVu32rI62vIUgP9x42Uay1WrkhITGWKIiIhswKY9Mrdv30ZJSQlKSkoAAJMmTUJAQAAee+wxW1bL9P4wzVKA0HggkOGFiIjIlmzaI1NYWIjdu3dj5syZAIAmTZogPj4eV65csV2l8hTGQ0xjOdAilSGGiIjIDtg0yMybNw9JSUkYM2YMACApKQnh4eFYsGCB3vL37t1DYWGh1i+LO7Zd/3WfBKBXJtB6tuW/JhERET0QmwWZ4uJiHDlyBL169dK63qtXL+zfv1/ve9577z34+flpfoWHh1u+YtkGrue3ZC8MERGRnbFZkMnLy4NSqURwcLDW9eDgYOTk5NioVgCi41X7wZSX9u91IiIisis2HVoCAJlMpvVaCKFzTW369OkoKCjQ/MrKyrJ8hWJjgYjJwDsAFkD130ju0ktERGSPbLZqKTAwEG5ubjq9L7m5uTq9NGqenp7w9PS0fuXmzAEGDlQdDsn9YYiIiOyWzYKMh4cHWrVqhbVr16J+/foAgIsXL+Lbb79Fnz59bFWtMrGxDDBERER2zqZDS08++SQ2bNiA1q1bAwCSk5Nx8eJF5Obm2rJaRERE5CBsGmRmzJiBL774AhEREfDw8ECbNm2QkZGBjRs32rJaRERE5CBkQghh60o8qMLCQvj5+aGgoAC+vr62rg4RERFJYMmf3zZftURERET0oBhkiIiIyGExyBAREZHDYpAhIiIih8UgQ0RERA6LQYaIiIgcFoMMEREROSwGGSIiInJYNjtryRLUe/kVFhbauCZEREQklfrntiX25HXoIFNUVAQACA8Pt3FNiIiIyFxFRUXw8/N7qGc49BEFpaWluHr1Knx8fCCTyR7qWYWFhQgPD0dWVpbLH3fAtijDtijDtijDtlBhO5RhW5SR0hZCCBQVFSE0NBRVqjzcLBeH7pGpUqUK6tata9Fn+vr6uvwfQjW2RRm2RRm2RRm2hQrboQzbooyptnjYnhg1TvYlIiIih8UgQ0RERA6LQeZfnp6eSElJgaenp62rYnNsizJsizJsizJsCxW2Qxm2RZnKbguHnuxLREREro09MkREROSwGGSIiIjIYTHIEBERkcNikCEiIiKHxSDzry+//BL169eHl5cX2rZti59++snWVbKoWbNmoX379vDx8UFQUBASEhJw9uxZrTJCCKSmpiI0NBTe3t544okn8Pvvv2uVuXfvHl577TUEBgaievXqGDBgAP7666/K/CgWNWvWLMhkMrzxxhuaa67UDn///TdeeOEFBAQEoFq1amjVqhWOHDmiue8qbVFSUoL/+7//Q/369eHt7Y0GDRpgxowZKC0t1ZRx1rbYt28f+vfvj9DQUMhkMmzdulXrvqU+961bt5CYmAg/Pz/4+fkhMTER+fn5Vv505jHWFvfv34dcLkfz5s1RvXp1hIaGYvjw4bh69arWM1yhLSoaN24cZDIZ5s+fr3W90tpCkFi3bp1wd3cXS5YsEadOnRKvv/66qF69urh8+bKtq2YxvXv3FsuXLxe//fabOHbsmOjbt6+oV6+e+N///qcpM3v2bOHj4yM2bdokTp48KZ577jlRp04dUVhYqCnz0ksvibCwMJGeni6OHj0qunbtKlq2bClKSkps8bEeysGDB0VkZKRo0aKFeP311zXXXaUdbt68KSIiIsTIkSOFQqEQFy9eFLt27RJ//vmnpoyrtMX7778vAgICxHfffScuXrwoNmzYIGrUqCHmz5+vKeOsbbFt2zbx9ttvi02bNgkAYsuWLVr3LfW5+/TpI5o1ayb2798v9u/fL5o1ayb69etXWR9TEmNtkZ+fL3r06CHWr18vzpw5Iw4cOCBiY2NF27ZttZ7hCm1R3pYtW0TLli1FaGio+OSTT7TuVVZbMMgIITp06CBeeuklrWsxMTFiypQpNqqR9eXm5goAIiMjQwghRGlpqQgJCRGzZ8/WlPnnn3+En5+fWLhwoRBC9Y3s7u4u1q1bpynz999/iypVqogdO3ZU7gd4SEVFReKRRx4R6enpIi4uThNkXKkd5HK56NKli8H7rtQWffv2FaNHj9a6NnDgQPHCCy8IIVynLSr+wLLU5z516pQAIDIzMzVlDhw4IACIM2fOWPlTPRhjP7zVDh48KABo/tHram3x119/ibCwMPHbb7+JiIgIrSBTmW3h8kNLxcXFOHLkCHr16qV1vVevXti/f7+NamV9BQUFAIBatWoBAC5evIicnBytdvD09ERcXJymHY4cOYL79+9rlQkNDUWzZs0crq1effVV9O3bFz169NC67krtkJaWhnbt2uHZZ59FUFAQWrdujSVLlmjuu1JbdOnSBbt378a5c+cAAMePH8fPP/+MJ598EoBrtUV5lvrcBw4cgJ+fH2JjYzVlOnbsCD8/P4dtG0D196hMJoO/vz8A12qL0tJSJCYm4q233kLTpk117ldmWzj0oZGWkJeXB6VSieDgYK3rwcHByMnJsVGtrEsIgeTkZHTp0gXNmjUDAM1n1dcOly9f1pTx8PBAzZo1dco4UlutW7cOR48exaFDh3TuuVI7XLhwAQsWLEBycjKmTZuGgwcPYsKECfD09MTw4cNdqi3kcjkKCgoQExMDNzc3KJVKfPDBB3j++ecBuNafi/Is9blzcnIQFBSk8/ygoCCHbZt//vkHU6ZMwdChQzUHI7pSW8yZMwdVq1bFhAkT9N6vzLZw+SCjJpPJtF4LIXSuOYvx48fjxIkT+Pnnn3XuPUg7OFJbZWVl4fXXX8fOnTvh5eVlsJyztwOg+hdVu3btMHPmTABA69at8fvvv2PBggUYPny4ppwrtMX69euxevVqfP3112jatCmOHTuGN954A6GhoRgxYoSmnCu0hT6W+Nz6yjtq29y/fx9DhgxBaWkpvvzyS5Plna0tjhw5gk8//RRHjx41u87WaAuXH1oKDAyEm5ubTvrLzc3V+VeIM3jttdeQlpaGvXv3om7duprrISEhAGC0HUJCQlBcXIxbt24ZLGPvjhw5gtzcXLRt2xZVq1ZF1apVkZGRgc8++wxVq1bVfA5nbwcAqFOnDpo0aaJ1rXHjxrhy5QoA1/kzAQBvvfUWpkyZgiFDhqB58+ZITEzExIkTMWvWLACu1RblWepzh4SE4Nq1azrPv379usO1zf379zF48GBcvHgR6enpmt4YwHXa4qeffkJubi7q1aun+Xv08uXLmDRpEiIjIwFUblu4fJDx8PBA27ZtkZ6ernU9PT0djz76qI1qZXlCCIwfPx6bN2/Gnj17UL9+fa379evXR0hIiFY7FBcXIyMjQ9MObdu2hbu7u1aZ7Oxs/Pbbbw7TVt27d8fJkydx7Ngxza927dph2LBhOHbsGBo0aOAS7QAAnTt31lmCf+7cOURERABwnT8TAHDnzh1UqaL916Gbm5tm+bUrtUV5lvrcnTp1QkFBAQ4ePKgpo1AoUFBQ4FBtow4xf/zxB3bt2oWAgACt+67SFomJiThx4oTW36OhoaF466238MMPPwCo5LaQPC3YiamXXy9btkycOnVKvPHGG6J69eri0qVLtq6axbz88svCz89P/PjjjyI7O1vz686dO5oys2fPFn5+fmLz5s3i5MmT4vnnn9e7zLJu3bpi165d4ujRo6Jbt252v7zUlPKrloRwnXY4ePCgqFq1qvjggw/EH3/8IdasWSOqVasmVq9erSnjKm0xYsQIERYWpll+vXnzZhEYGCgmT56sKeOsbVFUVCR+/fVX8euvvwoAYt68eeLXX3/VrMSx1Ofu06ePaNGihThw4IA4cOCAaN68ud0tOTbWFvfv3xcDBgwQdevWFceOHdP6e/TevXuaZ7hCW+hTcdWSEJXXFgwy//riiy9ERESE8PDwEG3atNEsS3YWAPT+Wr58uaZMaWmpSElJESEhIcLT01M8/vjj4uTJk1rPuXv3rhg/fryoVauW8Pb2Fv369RNXrlyp5E9jWRWDjCu1w7fffiuaNWsmPD09RUxMjFi8eLHWfVdpi8LCQvH666+LevXqCS8vL9GgQQPx9ttva/2Acta22Lt3r96/G0aMGCGEsNznvnHjhhg2bJjw8fERPj4+YtiwYeLWrVuV9CmlMdYWFy9eNPj36N69ezXPcIW20EdfkKmstpAJIYT0/hsiIiIi++Hyc2SIiIjIcTHIEBERkcNikCEiIiKHxSBDREREDotBhoiIiBwWgwwRERE5LAYZIiIiclgMMkTkMCIjIzF//nxbV4OI7AiDDJEDGTlyJBISEmxdDbNUZvhITU1Fq1atKuVrEZF9YJAhIiIih8UgQ+TAnnjiCUyYMAGTJ09GrVq1EBISgtTUVK0yqampqFevHjw9PREaGooJEyZo7kVGRuK9997D0KFDUaNGDYSGhuLzzz/Xen9BQQFefPFFBAUFwdfXF926dcPx48e1yqSlpaFdu3bw8vJCYGAgBg4cqKnf5cuXMXHiRMhkMshkMs179u/fj8cffxze3t4IDw/HhAkTcPv2bc393Nxc9O/fH97e3qhfvz7WrFljdvuoe7BmzpyJ4OBg+Pv7491330VJSQneeust1KpVC3Xr1sVXX32l9T65XI7o6GhUq1YNDRo0wPTp03H//n2tMu+//z6CgoLg4+ODMWPGYMqUKTq9QcuXL0fjxo3h5eWFmJgYfPnll5p7xcXFGD9+POrUqQMvLy9ERkZi1qxZZn9GIlfHIEPk4P7zn/+gevXqUCgUmDt3LmbMmIH09HQAwMaNG/HJJ59g0aJF+OOPP7B161Y0b95c6/0ffvghWrRogaNHj2Lq1KmYOHGi5v1CCPTt2xc5OTnYtm0bjhw5gjZt2qB79+64efMmAOD777/HwIED0bdvX/z666/YvXs32rVrBwDYvHkz6tatixkzZiA7OxvZ2dkAgJMnT6J3794YOHAgTpw4gfXr1+Pnn3/G+PHjNfUaOXIkLl26hD179mDjxo348ssvkZuba3b77NmzB1evXsW+ffswb948pKamol+/fqhZsyYUCgVeeuklvPTSS8jKytK8x8fHBytWrMCpU6fw6aefYsmSJfjkk08099esWYMPPvgAc+bMwZEjR1CvXj0sWLBA6+suWbIEb7/9Nj744AOcPn0aM2fOxPTp0/Gf//wHAPDZZ58hLS0N33zzDc6ePYvVq1cjMjLS7M9H5PLMOmKSiGxqxIgR4qmnntK8jouLE126dNEq0759eyGXy4UQQnz88cciOjpaFBcX631eRESE6NOnj9a15557TsTHxwshhNi9e7fw9fUV//zzj1aZqKgosWjRIiGEEJ06dRLDhg0zWGd9p+ImJiaKF198UevaTz/9JKpUqSLu3r0rzp49KwCIzMxMzf3Tp08LADrPKi8lJUW0bNlS83rEiBEiIiJCKJVKzbVGjRqJxx57TPO6pKREVK9eXaxdu9bgc+fOnSvatm2reR0bGyteffVVrTKdO3fW+trh4eHi66+/1irz3nvviU6dOgkhhHjttddEt27dRGlpqcGvS0SmsUeGyMG1aNFC63WdOnU0PRfPPvss7t69iwYNGmDs2LHYsmULSkpKtMp36tRJ5/Xp06cBAEeOHMH//vc/BAQEoEaNGppfFy9exPnz5wEAx44dQ/fu3c2q85EjR7BixQqtZ/bu3RulpaW4ePEiTp8+japVq2p6dgAgJiYG/v7+Zn0dAGjatCmqVCn7qy44OFirV8rNzQ0BAQFavT0bN25Ely5dEBISgho1amD69Om4cuWK5v7Zs2fRoUMHra9T/vX169eRlZWFpKQkrc/4/vvva9pt5MiROHbsGBo1aoQJEyZg586dZn82IgKq2roCRPRw3N3dtV7LZDKUlpYCAMLDw3H27Fmkp6dj165deOWVV/Dhhx8iIyND530VnwEApaWlqFOnDn788UedMupQ4e3tbXadS0tLMW7cOK35Omr16tXD2bNnterxMPS1j7E2y8zMxJAhQ/Duu++id+/e8PPzw7p16/Dxxx/rvKc8IYTm/9XPWrJkCWJjY7XKubm5AQDatGmDixcvYvv27di1axcGDx6MHj16YOPGjQ/xaYlcD4MMkZPz9vbGgAEDMGDAALz66quIiYnByZMn0aZNGwCqH9zlZWZmIiYmBoDqh21OTg6qVq1qcP5GixYtsHv3bowaNUrvfQ8PDyiVSq1rbdq0we+//46GDRvqfU/jxo1RUlKCw4cPa3o6zp49i/z8fKkf+4H98ssviIiIwNtvv625dvnyZa0yjRo1wsGDB5GYmKi5dvjwYc3/BwcHIywsDBcuXMCwYcMMfi1fX18899xzeO655zBo0CD06dMHN2/eRK1atSz4iYicG4MMkRNbsWIFlEolYmNjUa1aNaxatQre3t6IiIjQlPnll18wd+5cJCQkID09HRs2bMD3338PAOjRowc6deqEhIQEzJkzB40aNcLVq1exbds2JCQkoF27dkhJSUH37t0RFRWFIUOGoKSkBNu3b8fkyZMBqFZG7du3D0OGDIGnpycCAwMhl8vRsWNHvPrqqxg7diyqV6+O06dPIz09HZ9//jkaNWqEPn36YOzYsVi8eDGqVq2KN95444F6f8zVsGFDXLlyBevWrUP79u3x/fffY8uWLVplXnvtNYwdOxbt2rXDo48+ivXr1+PEiRNo0KCBpkxqaiomTJgAX19fxMfH4969ezh8+DBu3bqF5ORkfPLJJ6hTpw5atWqFKlWqYMOGDQgJCXmg4TMiV8Y5MkROzN/fH0uWLEHnzp01PSfffvstAgICNGUmTZqEI0eOoHXr1njvvffw8ccfo3fv3gBUwyfbtm3D448/jtGjRyM6OhpDhgzBpUuXEBwcDEC1xHrDhg1IS0tDq1at0K1bNygUCs3zZ8yYgUuXLiEqKgq1a9cGoOrFycjIwB9//IHHHnsMrVu3xvTp01GnTh3N+5YvX47w8HDExcVh4MCBmiXg1vbUU09h4sSJGD9+PFq1aoX9+/dj+vTpWmWGDRuGqVOn4s0339QMEY0cORJeXl6aMmPGjMHSpUuxYsUKNG/eHHFxcVixYgXq168PAKhRowbmzJmDdu3aoX379rh06RK2bdumNZ+HiEyTifIDu0TkUiIjI/HGG2/gjTfesHVVHF7Pnj0REhKCVatW2boqRC6FQ0tERGa6c+cOFi5ciN69e8PNzQ1r167Frl27NPvvEFHlYZAhIjKTesjt/fffx71799CoUSNs2rQJPXr0sHXViFwOh5aIiIjIYXFWGRERETksBhkiIiJyWAwyRERE5LAYZIiIiMhhMcgQERGRw2KQISIiIofFIENEREQOi0GGiIiIHBaDDBERETms/w+6zUPIEozg7AAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHFCAYAAADosxNlAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA1vUlEQVR4nO3deXxU1f3/8fdkDyEMYQuEACagAQoBJIIBFKyCUhZRqqiogNLKYguCVfCrBXEBQWxtlSKtQLVfBL8FXHFB2WRRWSWyxAUkAQIIZMFAFjLn9wePzI/JxgyZkBx9PR+PeTwy95577ueeBOftvefecRhjjAAAACwUUN0FAAAAXCyCDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIM4Cc7d+7U/fffr5YtWyo8PFzh4eG6/PLL9cADD2jLli2XrI6pU6fK4XB4LLvssss0fPjwKt3vxo0bNXXqVGVlZVXpfrw1fPhwORwO9ys0NFQJCQmaMmWK8vLyfO7P4XBo6tSp/i/0EuvVq5d69epV3WUAfhNU3QUAPwevvPKKHnzwQSUkJGjcuHH61a9+JYfDoT179uiNN97QVVddpe+++04tW7aslvqWL1+uOnXqVOk+Nm7cqCeffFLDhw9X3bp1q3Rf3goPD9eqVaskSZmZmXrjjTc0bdo07d27V0uWLPGpr02bNik2NrYqygRQCQQZoJI2bNigMWPGqF+/fvrvf/+rkJAQ97pf//rXGjt2rP7v//5P4eHhFfZz+vRp1apVq0pq7NSpU5X0W9MFBATo6quvdr/v27evfvjhB7355pt64YUX1LRpU6/7Or8fADUHl5aASnr22WcVGBioV155xSPEnO+2225TTEyM+/3w4cNVu3ZtpaSkqE+fPoqMjNT1118vSVq5cqVuvvlmxcbGKiwsTK1atdIDDzyg48ePl+r3/fffV8eOHRUaGqq4uDg9//zzZe6/rEtLOTk5evjhhxUXF6eQkBA1bdpU48ePV25urkc7h8OhBx98UK+//rratGmjWrVqqUOHDnrvvffcbaZOnao//elPkqS4uDj35Zw1a9aUWc9f//pXORwOfffdd6XWPfroowoJCXEf7/bt29W/f381atRIoaGhiomJUb9+/XTw4MEy+76Q4kBy4MABSVJaWpruvvtud/9t2rTR7Nmz5XK5So3D+ZeWTp8+7R6/sLAw1atXT0lJSXrjjTc8tnvnnXeUnJysWrVqKTIyUr1799amTZs82hRfDty1a5fuvPNOOZ1ORUdH67777lN2drZHW2OM5syZo44dOyo8PFxRUVH67W9/q3379pVqN3PmTLVo0UJhYWG68sor9cEHH1zUmAE1GWdkgEooKirS6tWrlZSUpCZNmvi0bUFBgQYOHKgHHnhAkyZN0tmzZyVJ33//vZKTkzVy5Eg5nU798MMPeuGFF9SjRw+lpKQoODhYkvTpp5/q5ptvVnJyshYvXqyioiLNnDlTR48eveC+T58+rZ49e+rgwYN67LHHlJiYqF27dunPf/6zUlJS9Mknn3jMs3n//fe1efNmTZs2TbVr19bMmTN1yy23KDU1VfHx8Ro5cqROnjypv//971q2bJl7LNq2bVvm/u+++249+uijWrhwoZ5++mmP8fzPf/6jAQMGqEGDBsrNzVXv3r0VFxenl19+WdHR0Tpy5IhWr16tU6dO+TTexYrDU8OGDfXjjz+qW7duKigo0FNPPaXLLrtM7733nh5++GF9//33mjNnTrn9TJgwQa+//rqefvppderUSbm5ufr666914sQJd5tFixZp6NCh6tOnj9544w3l5+dr5syZ6tWrlz799FP16NHDo8/BgwdryJAhuv/++5WSkqLJkydLkubPn+9u88ADD2jhwoX64x//qOeee04nT57UtGnT1K1bN3311VeKjo6WJD355JN68skndf/99+u3v/2t0tPT9bvf/U5FRUVKSEi4qLEDaiQD4KIdOXLESDJ33HFHqXVnz541hYWF7pfL5XKvGzZsmJFk5s+fX2H/LpfLFBYWmgMHDhhJ5u2333av69q1q4mJiTFnzpxxL8vJyTH16tUzJf9pt2jRwgwbNsz9fvr06SYgIMBs3rzZo91///tfI8msWLHCvUySiY6ONjk5OR7HHRAQYKZPn+5eNmvWLCPJ7N+/v8JjKnbrrbea2NhYU1RU5F62YsUKI8m8++67xhhjtmzZYiSZt956y6s+zzds2DATERHhHv8ff/zRvPjii8bhcJirrrrKGGPMpEmTjCTzxRdfeGw7evRo43A4TGpqqsc4TJkyxf2+Xbt2ZtCgQeXuv6ioyMTExJj27dt7HOOpU6dMo0aNTLdu3dzLpkyZYiSZmTNnevQxZswYExYW5v7b2bRpk5FkZs+e7dEuPT3dhIeHm0ceecQYY0xmZqYJCwszt9xyi0e7DRs2GEmmZ8+e5dYN2IZLS0AV6dy5s4KDg92v2bNnl2ozePDgUsuOHTumUaNGqVmzZgoKClJwcLBatGghSdqzZ48kKTc3V5s3b9att96qsLAw97aRkZEaMGDABWt777331K5dO3Xs2FFnz551v2688cYyLwldd911ioyMdL+Pjo5Wo0aN3JdnLsaIESN08OBBffLJJ+5lCxYsUOPGjdW3b19JUqtWrRQVFaVHH31Uc+fO1e7du33aR25urnv8GzZsqPHjx6tv375avny5JGnVqlVq27atunTp4rHd8OHDZYxxTxQuS5cuXfTBBx9o0qRJWrNmjc6cOeOxPjU1VYcPH9Y999yjgID//5/a2rVra/Dgwfr88891+vRpj20GDhzo8T4xMVF5eXk6duyYpHO/N4fDobvvvtvj99a4cWN16NDB/XvbtGmT8vLyNHToUI/+unXr5v5bAn4uuLQEVEKDBg0UHh5e5gf6okWLdPr0aWVkZJT6gJKkWrVqlbqTyOVyqU+fPjp8+LCeeOIJtW/fXhEREXK5XLr66qvdH5aZmZlyuVxq3LhxqX7LWlbS0aNH9d1337kvU5VUcj5O/fr1S7UJDQ0t9eHti759+6pJkyZasGCB+vTpo8zMTL3zzjsaN26cAgMDJUlOp1Nr167VM888o8cee0yZmZlq0qSJfve73+nxxx8vt/5i4eHhWrdunbveFi1aeIz5iRMndNlll5Xarng+0/mXiUr629/+ptjYWC1ZskTPPfecwsLCdOONN2rWrFm6/PLL3duWdckxJiZGLpdLmZmZHhO8S45zaGioJLnH+ejRozLGuC8flRQfH+9R98X+fQA2IcgAlRAYGKhf//rX+vjjj5WRkeHxoVU8P+SHH34oc9uSz3qRpK+//lpfffWVFi5cqGHDhrmXl5wUGxUVJYfDoSNHjpTqo6xlJRUHsPPnXpRcX9UCAwN1zz336G9/+5uysrK0aNEi5efna8SIER7t2rdvr8WLF8sYo507d2rhwoWaNm2awsPDNWnSpAr3ERAQoKSkpHLX169fXxkZGaWWHz58WFLF4xAREeGeh3L06FH32ZkBAwZo79697lBSXv8BAQGKioqqsP6SGjRoIIfDoc8++8wdcs5XvKx43+X9fZQV3gBbcWkJqKTJkyerqKhIo0aNUmFhYaX6Kg43JT+kXnnlFY/3ERER6tKli5YtW+bxcLdTp07p3XffveB++vfvr++//17169dXUlJSqdfFfNCVPHvgjREjRigvL09vvPGGFi5cqOTkZLVu3brMtg6HQx06dNBf/vIX1a1bV9u2bfO5xpKuv/567d69u1Rfr732mhwOh6677jqv+omOjtbw4cN15513KjU1VadPn1ZCQoKaNm2qRYsWyRjjbpubm6ulS5e672TyRf/+/WWM0aFDh8r8vbVv317SuTuzwsLC9L//+78e22/cuLFSlwOBmogzMkAlde/eXS+//LL+8Ic/6Morr9Tvf/97/epXv1JAQIAyMjK0dOlSSfLqgXStW7dWy5YtNWnSJBljVK9ePb377rtauXJlqbZPPfWUbrrpJvXu3VsTJ05UUVGRnnvuOUVEROjkyZMV7mf8+PFaunSprr32Wj300ENKTEyUy+VSWlqaPv74Y02cOFFdu3b1aRyKP0RffPFFDRs2TMHBwUpISPCYW1PW8SYnJ2v69OlKT0/XvHnzPNa/9957mjNnjgYNGqT4+HgZY7Rs2TJlZWWpd+/ePtVXloceekivvfaa+vXrp2nTpqlFixZ6//33NWfOHI0ePVpXXHFFudt27dpV/fv3V2JioqKiorRnzx69/vrrHgFl5syZGjp0qPr3768HHnhA+fn5mjVrlrKysjRjxgyf6+3evbt+//vfa8SIEdqyZYuuvfZaRUREKCMjQ+vXr1f79u01evRoRUVF6eGHH9bTTz+tkSNH6rbbblN6erqmTp3KpSX8/FTjRGPgZ2XHjh1mxIgRJi4uzoSGhpqwsDDTqlUrc++995pPP/3Uo23xHTVl2b17t+ndu7eJjIw0UVFR5rbbbjNpaWml7poxxph33nnHJCYmmpCQENO8eXMzY8YM9x0w5yt515Ixxvz000/m8ccfNwkJCSYkJMQ4nU7Tvn1789BDD5kjR46420kyY8eOLVVnWX1OnjzZxMTEmICAACPJrF69uuJBM8bMmzfPSDLh4eEmOzvbY93evXvNnXfeaVq2bGnCw8ON0+k0Xbp0MQsXLrxgvxWN8fkOHDhg7rrrLlO/fn0THBxsEhISzKxZszzuNDKm9F1LkyZNMklJSSYqKsqEhoaa+Ph489BDD5njx497bPfWW2+Zrl27mrCwMBMREWGuv/56s2HDBo82xb+zH3/80WP5ggULyrwTbP78+aZr164mIiLChIeHm5YtW5p7773XbNmyxd3G5XKZ6dOnm2bNmpmQkBCTmJho3n33XdOzZ0/uWsLPisOY8855AgAAWIQ5MgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1rL6gXgul0uHDx9WZGRkmY97BwAANY8xRqdOnVJMTIzHl6peDKuDzOHDh9WsWbPqLgMAAFyE9PR0xcbGVqoPq4NM8aPP09PTvXr8OwAAqH45OTlq1qxZhV9h4i2rg0zx5aQ6deoQZAAAsIw/poUw2RcAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArFWtQWbdunUaMGCAYmJi5HA49NZbb1VnOQAAwDLV+l1Lubm56tChg0aMGKHBgwdXZymlzPpor9amHlORyyjvrEv5hUVyGSkzt1CSUd2IELlcRhGhQercIkqn8gqVmVsgh8MhY4wOZp7RmcIihQcHyuGQjJHCggMlSTlnCvVT/llJUu3QIIUEB+jkTwVyyaiJM1yRYUE6mp2nM4Uu5Z0tUoCkOmHBkqT8IpcCHFKAw6GzLiNJOusyMi6joECHwoID5ZDObVtYpCJz4WMNCXSooMjIIalWSIAkh84UFMl1XpvQwHP7M0aqHRqo+pGhOnWmULXDgtWwdoiO5xbo1JlCNaoTpug6Yfrm6Cn9lFeoqIhQNawdIofDoVP5Z3X8VL6CAx1q7AxXwdlz4xPfsLYaO8N0MPOMDmed0ZUtotS8Xi19lZ6lDs3qashVzcutfXtapvYfz1VcgwhJcv/cqXmUV9sUt9uelqk1qcckSTF1wxUcGFBmP+e365XQqML9+MLX4yjrGLzpuzL1+tpPcfvCIle541kT+Gt8AFQPhzHGi4+6qudwOLR8+XINGjTI621ycnLkdDqVnZ3t1y+NvOqZlfrxVIHf+kPldGzm1Ftje5RaPuODPZq7dl+Z24zqGa9JfdtccJtRPeMlyat+ytpfefvxha/HUdYxlFeDL219qfFC/ZR3TP4YL3/y1/gA8I0/P7+tmiOTn5+vnJwcj5e/zfpoLyGmhtmRnq0lm9M8lm1Pyyz3w186F0y2p2VecJu5a/d51U95+ytrP77w9TjKO4ayavClra81VtRPRcdU2fHyJ3+ND4DqZVWQmT59upxOp/vVrFkzv+9j2wH+I1YTfZWe5fF+//HcC25Tso0325TXT0XbXmy/3m57fpvy2pe13Je23u7fH/1XZrz8yV/jA6B6WRVkJk+erOzsbPcrPT3d7/u4sgXXyGuiDs3qerwvnktSkZJtvNmmvH4q2vZi+/V22/PblNe+rOW+tPV2//7ovzLj5U/+Gh8A1cuqIBMaGqo6dep4vPztTze2VsPIEL/3i4vXqZmz1ITfTs2j3PNbyjK6Z3ypiZtlbTO6Z7xX/ZS3v7L24wtfj6O8YyirBl/a+lpjRf1UdEyVHS9/8tf4AKheTPYtB3ctcdcSdy1Vrh/uWgJQHn9+fldrkPnpp5/03XffSZI6deqkF154Qdddd53q1aun5s3L//AqVpVBBgAAVA1/fn5X63NktmzZouuuu879fsKECZKkYcOGaeHChdVUFQAAsEW1BplevXqphlzZAgAAFrJqsi8AAMD5CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYK2LDjLfffedPvroI505c0aSZIzxW1EAAADe8DnInDhxQjfccIOuuOIK/eY3v1FGRoYkaeTIkZo4caLfCwQAACiPz0HmoYceUlBQkNLS0lSrVi338iFDhujDDz/0a3EAAAAVCfJ1g48//lgfffSRYmNjPZZffvnlOnDggN8KAwAAuBCfz8jk5uZ6nIkpdvz4cYWGhvqlKAAAAG/4HGSuvfZavfbaa+73DodDLpdLs2bN0nXXXefX4gAAACri86WlWbNmqVevXtqyZYsKCgr0yCOPaNeuXTp58qQ2bNhQFTUCAACUyeczMm3bttXOnTvVpUsX9e7dW7m5ubr11lu1fft2tWzZsipqBAAAKJPDWPwAmJycHDmdTmVnZ6tOnTrVXQ4AAPCCPz+/fb60tHPnzjKXOxwOhYWFqXnz5kz6BQAAl4TPQaZjx45yOByS/v/TfIvfS1JwcLCGDBmiV155RWFhYX4qEwAAoDSf58gsX75cl19+uebNm6evvvpKO3bs0Lx585SQkKBFixbp1Vdf1apVq/T4449XRb0AAABuPp+ReeaZZ/Tiiy/qxhtvdC9LTExUbGysnnjiCX355ZeKiIjQxIkT9fzzz/u1WAAAgPP5fEYmJSVFLVq0KLW8RYsWSklJkXTu8lPxdzABAABUFZ+DTOvWrTVjxgwVFBS4lxUWFmrGjBlq3bq1JOnQoUOKjo72X5UAAABl8PnS0ssvv6yBAwcqNjZWiYmJcjgc2rlzp4qKivTee+9Jkvbt26cxY8b4vVgAAIDzXdRzZH766Sf95z//0TfffCNjjFq3bq277rpLkZGRVVFjuXiODAAA9qnW58hIUu3atTVq1KhK7RgAAKCyLirISNLu3buVlpbmMVdGkgYOHFjpogAAALzhc5DZt2+fbrnlFqWkpMjhcJR6KF5RUZF/KwQAACiHz3ctjRs3TnFxcTp69Khq1aqlXbt2ad26dUpKStKaNWuqoEQAAICy+XxGZtOmTVq1apUaNmyogIAABQQEqEePHpo+fbr++Mc/avv27VVRJwAAQCk+n5EpKipS7dq1JUkNGjTQ4cOHJZ17IF5qaqp/qwMAAKiAz2dk2rVrp507dyo+Pl5du3bVzJkzFRISonnz5ik+Pr4qagQAACiTz0Hm8ccfV25uriTp6aefVv/+/XXNNdeofv36WrJkid8LBAAAKM9FPRCvpJMnTyoqKsp959KlwgPxAACwT7U/EK+kevXq+aMbAAAAn/gcZPLy8vT3v/9dq1ev1rFjx+RyuTzWb9u2zW/FAQAAVMTnIHPfffdp5cqV+u1vf6suXbpc8stJAAAAxXwOMu+//75WrFih7t27V0U9AAAAXvP5OTJNmza95N9yDQAAUBafg8zs2bP16KOP6sCBA1VRDwAAgNd8vrSUlJSkvLw8xcfHq1atWgoODvZYf/LkSb8VBwAAUBGfg8ydd96pQ4cO6dlnn1V0dDSTfQEAQLXxOchs3LhRmzZtUocOHaqiHgAAAK/5PEemdevWOnPmTFXUAgAA4BOfg8yMGTM0ceJErVmzRidOnFBOTo7HCwAA4FLx+buWAgLOZZ+Sc2OMMXI4HCoqKvJfdRfAdy0BAGCfav2updWrV1dqhwAAAP7ic5Dp2bNnVdQBAADgM6+DzM6dO71ql5iYeNHFAAAA+MLrINOxY0c5HA5VNKXmUs+RAQAAv2xeB5n9+/dXZR0AAAA+8zrItGjRoirrAAAA8JnPz5EBAACoKQgyAADAWgQZAABgLYIMAACwFkEGAABYy6u7ljp16lTqu5XKs23btkoVBAAA4C2vgsygQYPcP+fl5WnOnDlq27atkpOTJUmff/65du3apTFjxlRJkQAAAGXxKshMmTLF/fPIkSP1xz/+UU899VSpNunp6f6tDgAAoAIOU9F3DpTB6XRqy5Ytuvzyyz2Wf/vtt0pKSlJ2drZfC6yIP78GHAAAXBr+/Pz2ebJveHi41q9fX2r5+vXrFRYWVqliAAAAfOH1VxQUGz9+vEaPHq2tW7fq6quvlnRujsz8+fP15z//2e8FAgAAlMfnIDNp0iTFx8frxRdf1KJFiyRJbdq00cKFC3X77bf7vUAAAIDy+DxHpiZhjgwAAPap1jkykpSVlaV//etfeuyxx3Ty5ElJ554fc+jQoUoVAwAA4AufLy3t3LlTN9xwg5xOp3744QeNHDlS9erV0/Lly3XgwAG99tprVVEnAABAKT6fkZkwYYKGDx+ub7/91uMupb59+2rdunV+LQ4AAKAiPgeZzZs364EHHii1vGnTpjpy5IhfigIAAPCGz0EmLCxMOTk5pZanpqaqYcOGfikKAADAGz4HmZtvvlnTpk1TYWGhJMnhcCgtLU2TJk3S4MGD/V4gAABAeXwOMs8//7x+/PFHNWrUSGfOnFHPnj3VqlUrRUZG6plnnqmKGgEAAMrk811LderU0fr167Vq1Spt27ZNLpdLV155pW644YaqqA8AAKBcPBAPAABcUv78/Pb5jIwkffrpp/r000917NgxuVwuj3Xz58+vVEEAAADe8jnIPPnkk5o2bZqSkpLUpEkTORyOqqgLAADggnwOMnPnztXChQt1zz33VEU9AAAAXvP5rqWCggJ169atKmoBAADwic9BZuTIkVq0aFFV1AIAAOATny8t5eXlad68efrkk0+UmJio4OBgj/UvvPCC34oDAACoyEV9+3XHjh0lSV9//bXHOib+AgCAS8nnILN69eqqqAMAAMBnPs+ROd/Bgwd16NAhf9UCAADgE5+DjMvl0rRp0+R0OtWiRQs1b95cdevW1VNPPVXq4XgAAABVyedLS//zP/+jV199VTNmzFD37t1ljNGGDRs0depU5eXl8cWRAADgkvH5u5ZiYmI0d+5cDRw40GP522+/rTFjxlzSS0181xIAAPbx5+e3z5eWTp48qdatW5da3rp1a508ebJSxQAAAPjC5yDToUMHvfTSS6WWv/TSS+rQoYNfigIAAPCGz3NkZs6cqX79+umTTz5RcnKyHA6HNm7cqPT0dK1YsaIqagQAACiTz2dkevbsqW+++Ua33HKLsrKydPLkSd16661KTU3VNddcUxU1AgAAlMnnyb41CZN9AQCwT7VO9v3www+1fv169/uXX35ZHTt21F133aXMzMxKFQMAAOALn4PMn/70J+Xk5EiSUlJSNGHCBP3mN7/Rvn37NGHCBL8XCAAAUB6fJ/vu379fbdu2lSQtXbpUAwYM0LPPPqtt27bpN7/5jd8LBAAAKI/PZ2RCQkJ0+vRpSdInn3yiPn36SJLq1avnPlMDAABwKfh8RqZHjx6aMGGCunfvri+//FJLliyRJH3zzTeKjY31e4EAAADl8fmMzEsvvaSgoCD997//1T/+8Q81bdpUkvTBBx/opptu8nuBAAAA5eH2awAAcEn58/Pbq0tLOTk57h1daB4MgQIAAFwqXgWZqKgoZWRkqFGjRqpbt64cDkepNsYYORwOFRUV+b1IAACAsngVZFatWqV69epJklavXl2lBQEAAHiLOTIAAOCSuuRzZErKysrSq6++qj179sjhcKht27a677775HQ6K1UMAACAL3y+/XrLli1q2bKl/vKXv+jkyZM6fvy4XnjhBbVs2VLbtm2rihoBAADK5POlpWuuuUatWrXSP//5TwUFnTuhc/bsWY0cOVL79u3TunXrqqTQsnBpCQAA+/jz89vnIBMeHq7t27erdevWHst3796tpKQk99cXXAoEGQAA7OPPz2+fLy3VqVNHaWlppZanp6crMjKyUsUAAAD4wucgM2TIEN1///1asmSJ0tPTdfDgQS1evFgjR47UnXfeWRU1AgAAlMnnu5aef/55ORwO3XvvvTp79qwkKTg4WKNHj9aMGTP8XiAAAEB5Lvo5MqdPn9b3338vY4xatWqlWrVq+bu2C2KODAAA9qmWOTKnT5/W2LFj1bRpUzVq1EgjR45UkyZNlJiYWC0hBgAAwOsgM2XKFC1cuFD9+vXTHXfcoZUrV2r06NFVWRsAAECFvJ4js2zZMr366qu64447JEl33323unfvrqKiIgUGBlZZgQAAAOXx+oxMenq6rrnmGvf7Ll26KCgoSIcPH66SwgAAAC7E6yBTVFSkkJAQj2VBQUHuO5cAAAAuNa8vLRljNHz4cIWGhrqX5eXladSoUYqIiHAvW7ZsmX8rBAAAKIfXQWbYsGGllt19991+LQYAAMAXXgeZBQsWVGUdAAAAPvP5KwoAAABqCoIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwFkEGAABYiyADAACsRZABAADWIsgAAABrEWQAAIC1CDIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxV7UFmzpw5iouLU1hYmDp37qzPPvusuksCAACWCKrOnS9ZskTjx4/XnDlz1L17d73yyivq27evdu/erebNm1dnaZr45g69vf2Qzhr/9BfokFxG8lN3VnJICg8OUFCAQzn5Re7loYEOSVKRMQoJDFB8w9pqGBmq4ECHXC6j+rVD3W2/Opitn/IK1ahOmFo3jlRGdp5SDmUrr7BIDSJClH/WpewzhQoOdCimbria1aulyLBgncorlMt1bvQDAhy6vk20hlzVXNvTMrX4yzRlni7Q9W2ilXbytNamHlPjOmFKaFJH3x49pahaIbqjS3N9c/SUvkrPUlREiIICztV81mWUmVugDs3qashVpf9mt6dlak3qMR3JzlNjZ5hi6obrcNYZHcnOkyQ1doapV0IjSdKa1GPadShbOXlnFdcgQnd0aa5OzaMkSUs2p+mr9CyP/WxPy9T+47mKaxChTs2jLvjeVyXHpni8Ktunr9sXj6Ek9UpodFH7rYq6/NGfv/frr7psVhOOsSbU4C82HIvDGFNtn61du3bVlVdeqX/84x/uZW3atNGgQYM0ffr0C26fk5Mjp9Op7Oxs1alTx291tXniA50pdPmtP9RMDSND9OOpAr/117GZU2+N7eF+P+ODPZq7dl+l+hzVM16f7zuhHenZHvu5Or6+R98dmzlLtTn//aie8ZrUt43X+y2r9pLjVdk+vdm+rDp83W9V1OWP/vy9X3/VZbOacIw1oQZ/qcpj8efnd7VdWiooKNDWrVvVp08fj+V9+vTRxo0by9wmPz9fOTk5Hi9/m/jmDkLML4Q/Q4wk7UjP1pLNaZLO/V9MZUOMJM1du88jkBTvp2TfZbUp2c/2tEyv9lle7SXHq7J9Xmj78urwZb9VUZc/+vP3fv1Vl81qwjHWhBr8xaZjqbYgc/z4cRUVFSk6OtpjeXR0tI4cOVLmNtOnT5fT6XS/mjVr5ve6vkrP8nuf+OUo/vvZfzy3egspg7c1+VJ7ZfusaPuLXeeLi6nLH/35e78Xcqn3Vx1qwjHWhBr8xaZjqfbJvg6Hw+O9MabUsmKTJ09Wdna2+5Wenu73ejo0q+v3PvHLUfz3E9cgonoLKYO3NflSe2X7rGj7i13ni4upyx/9+Xu/F3Kp91cdasIx1oQa/MWmY6m2INOgQQMFBgaWOvty7NixUmdpioWGhqpOnToeL3+bfXtHhQdXe77DJdAoMsSv/XVq5nRPxO3UPEqjesZXus/RPePVsZmz1H5K9l2yTcn3o3vGez1Rr7zaS45XZfu80Pbl1eHLfquiLn/05+/9+qsum9WEY6wJNfiLTcdS7ZN9O3furDlz5riXtW3bVjfffHO1TvaVuGupKnDXEnctcddS1e7XX3XZrCYcY02owV+q6lj8+fldrUFmyZIluueeezR37lwlJydr3rx5+uc//6ldu3apRYsWF9y+KoMMAACoGv78/K7W58gMGTJEJ06c0LRp05SRkaF27dppxYoVXoUYAACAaj0jU1mckQEAwD4/i+fIAAAAVBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgLYIMAACwVrV+RUFlFT+UOCcnp5orAQAA3ir+3PbHlwtYHWROnTolSWrWrFk1VwIAAHx16tQpOZ3OSvVh9XctuVwuHT58WJGRkXI4HNVdjjVycnLUrFkzpaen8x1VlcA4Vh5j6B+Mo38wjv7hzTgaY3Tq1CnFxMQoIKBys1ysPiMTEBCg2NjY6i7DWnXq1OEfqx8wjpXHGPoH4+gfjKN/XGgcK3smphiTfQEAgLUIMgAAwFoEmV+g0NBQTZkyRaGhodVditUYx8pjDP2DcfQPxtE/LvU4Wj3ZFwAA/LJxRgYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZH5h5syZo7i4OIWFhalz58767LPPqrukarVu3ToNGDBAMTExcjgceuuttzzWG2M0depUxcTEKDw8XL169dKuXbs82uTn5+sPf/iDGjRooIiICA0cOFAHDx70aJOZmal77rlHTqdTTqdT99xzj7Kysqr46C6N6dOn66qrrlJkZKQaNWqkQYMGKTU11aMN41ixf/zjH0pMTHQ/QCw5OVkffPCBez3jd3GmT58uh8Oh8ePHu5cxlhc2depUORwOj1fjxo3d62vcGBr8YixevNgEBwebf/7zn2b37t1m3LhxJiIiwhw4cKC6S6s2K1asMP/zP/9jli5daiSZ5cuXe6yfMWOGiYyMNEuXLjUpKSlmyJAhpkmTJiYnJ8fdZtSoUaZp06Zm5cqVZtu2bea6664zHTp0MGfPnnW3uemmm0y7du3Mxo0bzcaNG027du1M//79L9VhVqkbb7zRLFiwwHz99ddmx44dpl+/fqZ58+bmp59+crdhHCv2zjvvmPfff9+kpqaa1NRU89hjj5ng4GDz9ddfG2MYv4vx5Zdfmssuu8wkJiaacePGuZczlhc2ZcoU86tf/cpkZGS4X8eOHXOvr2ljSJD5BenSpYsZNWqUx7LWrVubSZMmVVNFNUvJIONyuUzjxo3NjBkz3Mvy8vKM0+k0c+fONcYYk5WVZYKDg83ixYvdbQ4dOmQCAgLMhx9+aIwxZvfu3UaS+fzzz91tNm3aZCSZvXv3VvFRXXrHjh0zkszatWuNMYzjxYqKijL/+te/GL+LcOrUKXP55ZeblStXmp49e7qDDGPpnSlTppgOHTqUua4mjiGXln4hCgoKtHXrVvXp08djeZ8+fbRx48Zqqqpm279/v44cOeIxZqGhoerZs6d7zLZu3arCwkKPNjExMWrXrp27zaZNm+R0OtW1a1d3m6uvvlpOp/NnOfbZ2dmSpHr16kliHH1VVFSkxYsXKzc3V8nJyYzfRRg7dqz69eunG264wWM5Y+m9b7/9VjExMYqLi9Mdd9yhffv2SaqZY2j1l0bCe8ePH1dRUZGio6M9lkdHR+vIkSPVVFXNVjwuZY3ZgQMH3G1CQkIUFRVVqk3x9keOHFGjRo1K9d+oUaOf3dgbYzRhwgT16NFD7dq1k8Q4eislJUXJycnKy8tT7dq1tXz5crVt29b9H3XGzzuLFy/Wtm3btHnz5lLr+Fv0TteuXfXaa6/piiuu0NGjR/X000+rW7du2rVrV40cQ4LML4zD4fB4b4wptQyeLmbMSrYpq/3PcewffPBB7dy5U+vXry+1jnGsWEJCgnbs2KGsrCwtXbpUw4YN09q1a93rGb8LS09P17hx4/Txxx8rLCys3HaMZcX69u3r/rl9+/ZKTk5Wy5Yt9e9//1tXX321pJo1hlxa+oVo0KCBAgMDSyXdY8eOlUrWOKd4ln5FY9a4cWMVFBQoMzOzwjZHjx4t1f+PP/74sxr7P/zhD3rnnXe0evVqxcbGupczjt4JCQlRq1atlJSUpOnTp6tDhw568cUXGT8fbN26VceOHVPnzp0VFBSkoKAgrV27Vn/7298UFBTkPk7G0jcRERFq3769vv322xr590iQ+YUICQlR586dtXLlSo/lK1euVLdu3aqpqpotLi5OjRs39hizgoICrV271j1mnTt3VnBwsEebjIwMff311+42ycnJys7O1pdffulu88UXXyg7O/tnMfbGGD344INatmyZVq1apbi4OI/1jOPFMcYoPz+f8fPB9ddfr5SUFO3YscP9SkpK0tChQ7Vjxw7Fx8czlhchPz9fe/bsUZMmTWrm36NPU4NhteLbr1999VWze/duM378eBMREWF++OGH6i6t2pw6dcps377dbN++3UgyL7zwgtm+fbv7lvQZM2YYp9Npli1bZlJSUsydd95Z5m2GsbGx5pNPPjHbtm0zv/71r8u8zTAxMdFs2rTJbNq0ybRv3/5nc6vm6NGjjdPpNGvWrPG4XfP06dPuNoxjxSZPnmzWrVtn9u/fb3bu3Gkee+wxExAQYD7++GNjDONXGefftWQMY+mNiRMnmjVr1ph9+/aZzz//3PTv399ERka6Pytq2hgSZH5hXn75ZdOiRQsTEhJirrzySvctsr9Uq1evNpJKvYYNG2aMOXer4ZQpU0zjxo1NaGioufbaa01KSopHH2fOnDEPPvigqVevngkPDzf9+/c3aWlpHm1OnDhhhg4daiIjI01kZKQZOnSoyczMvERHWbXKGj9JZsGCBe42jGPF7rvvPve/y4YNG5rrr7/eHWKMYfwqo2SQYSwvrPi5MMHBwSYmJsbceuutZteuXe71NW0MHcYY4+NZJgAAgBqBOTIAAMBaBBkAAGAtggwAALAWQQYAAFiLIAMAAKxFkAEAANYiyAAAAGsRZADUaMOHD9egQYPc73v16qXx48dXWz0AahaCDACfHDlyROPGjVOrVq0UFham6Oho9ejRQ3PnztXp06erfP/Lli3TU0895dc+S4YlAPYIqu4CANhj37596t69u+rWratnn31W7du319mzZ/XNN99o/vz5iomJ0cCBA0ttV1hYqODgYL/UUK9ePb/0A+DngTMyALw2ZswYBQUFacuWLbr99tvVpk0btW/fXoMHD9b777+vAQMGSJIcDofmzp2rm2++WREREXr66adVVFSk+++/X3FxcQoPD1dCQoJefPFFj/6Lioo0YcIE1a1bV/Xr19cjjzyikt+iUvLSUkFBgR555BE1bdpUERER6tq1q9asWeNev3DhQtWtW1cfffSR2rRpo9q1a+umm25SRkaGJGnq1Kn697//rbffflsOh0MOh8NjewA1G0EGgFdOnDihjz/+WGPHjlVERESZbRwOh/vnKVOm6Oabb1ZKSoruu+8+uVwuxcbG6s0339Tu3bv15z//WY899pjefPNN9zazZ8/W/Pnz9eqrr2r9+vU6efKkli9fXmFdI0aM0IYNG7R48WLt3LlTt912m2666SZ9++237janT5/W888/r9dff13r1q1TWlqaHn74YUnSww8/rNtvv90dbjIyMtStW7fKDBWAS4hLSwC88t1338kYo4SEBI/lDRo0UF5eniRp7Nixeu655yRJd911l+677z6Ptk8++aT757i4OG3cuFFvvvmmbr/9dknSX//6V02ePFmDBw+WJM2dO1cfffRRuTV9//33euONN3Tw4EHFxMRIOhdMPvzwQy1YsEDPPvuspHOXtubOnauWLVtKkh588EFNmzZNklS7dm2Fh4crPz9fjRs3vrjBAVBtCDIAfHL+WRdJ+vLLL+VyuTR06FDl5+e7lyclJZXadu7cufrXv/6lAwcO6MyZMyooKFDHjh0lSdnZ2crIyFBycrK7fVBQkJKSkkpdXiq2bds2GWN0xRVXeCzPz89X/fr13e9r1arlDjGS1KRJEx07dsz7gwZQYxFkAHilVatWcjgc2rt3r8fy+Ph4SVJ4eLjH8pKXn95880099NBDmj17tpKTkxUZGalZs2bpiy++uOiaXC6XAgMDtXXrVgUGBnqsq127tvvnkhONHQ5HueEIgF2YIwPAK/Xr11fv3r310ksvKTc31+ftP/vsM3Xr1k1jxoxRp06d1KpVK33//ffu9U6nU02aNNHnn3/uXnb27Flt3bq13D47deqkoqIiHTt2TK1atfJ4+XKZKCQkREVFRT4fE4DqR5AB4LU5c+bo7NmzSkpK0pIlS7Rnzx6lpqbqP//5j/bu3VvqrMj5WrVqpS1btuijjz7SN998oyeeeEKbN2/2aDNu3DjNmDFDy5cv1969ezVmzBhlZWWV2+cVV1yhoUOH6t5779WyZcu0f/9+bd68Wc8995xWrFjh9XFddtll2rlzp1JTU3X8+HEVFhZ6vS2A6kWQAeC1li1bavv27brhhhs0efJkdejQQUlJSfr73/+uhx9+uMIH1Y0aNUq33nqrhgwZoq5du+rEiRMaM2aMR5uJEyfq3nvv1fDhw92Xn2655ZYKa1qwYIHuvfdeTZw4UQkJCRo4cKC++OILNWvWzOvj+t3vfqeEhAQlJSWpYcOG2rBhg9fbAqheDsOFYgAAYCnOyAAAAGsRZAAAgLUIMgAAwFoEGQAAYC2CDAAAsBZBBgAAWIsgAwAArEWQAQAA1iLIAAAAaxFkAACAtQgyAADAWgQZAABgrf8Hk+yYMsD6jWMAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of poisoned images: 300 out of 10000.\n", + "last index of poison 1367\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAHFCAYAAAAdTZjVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABc9ElEQVR4nO3dd1QU19sH8O/Qlo4C0hQBBREVrNGgRrFgN0aNGjX2xNjFEpSYCCYGlKgxiS12zS+2xBKjBiUWNGIBe0FjQbEhiQUUlHrfP3x3wrqgu7gIuN/POXsOe+fOnWfuzs4+zMydkYQQAkRERER6wqCkAyAiIiJ6nZj8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFfKfPJz+vRpDBkyBFWrVoWZmRnMzMzg5eWFTz75BPHx8a8tjrCwMEiSpFLm7u6OgQMHFutyY2NjERYWhocPH+q87fXr16NmzZowMzODJEk4efJkgfX27dsHSZLkl6GhIRwdHdGjRw8kJCRovdyBAwfC3d391YIvBVauXAlJknDt2rWSDkWm6Tap/CxnzJihNk25Xq/z+5VfQECAyvZmZmaG2rVrY+7cucjLy9OqrWvXrkGSJKxcubJ4gn2NtPlsR40aVfwBlXLu7u7o1KlTSYcB4L996K+//lrkNkpqv/nw4UPY29tj3bp1ctnNmzcRFBSE5s2bo1y5clp/xzSdPzs7G1WrVsXcuXO1jrtMJz8//vgj6tevjyNHjmDs2LHYtm0btm/fjqCgIJw7dw5vvfUWrly5UmLxbd68GV988UWxLiM2NhbTpk3TefLzzz//oF+/fqhatSqioqJw6NAhVKtW7YXzhIeH49ChQ9i7dy8mTZqE6OhoNGnSBLdu3dJq2V988QU2b978KuGTjsyYMQP3798v6TDUVKlSBYcOHcKhQ4ewfv16VKxYEePGjUNISIhW7Tg7O+PQoUPo2LFjMUVK9GabNm0aXFxc0KtXL7ns8uXL+Pnnn2FiYoIOHTpo3aam8xsbG2Pq1Kn48ssvce/ePa2WYaR1VKXEwYMHMWLECHTs2BG//vorTExM5GktW7bEyJEj8csvv8DMzOyF7WRkZMDc3LxYYqxbt26xtPs6/P3338jOzsaHH36I5s2bazSPl5cX3n77bQBAs2bNUK5cOQwZMgQrV67ElClTNF521apVixQz6Vbr1q2xb98+fP3115g9e3ZJh6PCzMxM3tYAoH379qhevTrmzZuH6dOnw9jYWKN2FAqFSjtEpLn79+/jxx9/xLfffqty5qNZs2b4559/AADx8fFYu3atVu1qM3/v3r0xfvx4/Pjjj/jss880XkaZPfITHh4OQ0ND/PjjjyqJT349evSAi4uL/H7gwIGwtLTEmTNn0KZNG1hZWaFVq1YAgOjoaHTp0gWVKlWCqakpPD098cknn+Dff/9Va3f79u2oU6cOFAoFPDw8MGvWrAKXX9Bh6LS0NEycOBEeHh4wMTFBxYoVERQUhPT0dJV6ykPTP/30E3x8fGBubo7atWtj27Ztcp2wsDB8+umnAAAPDw/5NMC+ffte2Hdbt26Fv78/zM3NYWVlhcDAQBw6dEiln5o2bQoA6NWrFyRJQkBAwAvbLIjyR+X69esAgLy8PERGRqJ69epQKBRwcHBA//79cfPmTZX5Cjp8+8svv6BRo0awsbGBubk5qlSpgsGDB6vUSUpKwocffggHBwcoFAr4+Phg9uzZKqdClKc5Zs2ahTlz5sDDwwOWlpbw9/fH4cOH1dYhPj4e7777LmxtbWFqaoq6detiw4YNavUOHz6MJk2awNTUFC4uLggJCUF2drZG/RQfH48PPvgA7u7uMDMzg7u7O3r37i33m5LydNPevXsxfPhw2Nvbw87ODt26dcPt27dV6mZnZyM4OBhOTk4wNzdH06ZNcfToUY3iUfL29saQIUMwf/58tVgK8rLtCvjv9PC5c+fQu3dv2NjYwNHREYMHD0ZqaqpW8eVnbGyM+vXrIyMjQ95pnj17Fl26dEH58uVhamqKOnXqYNWqVSrzFXTa659//sHQoUPh6uoKhUKBChUqoEmTJvjzzz9V5l2+fDlq164NU1NT2NraomvXrmqneZX7nMuXL6NDhw6wtLSEq6srJkyYgMzMTJW6WVlZmD59uvz9qFChAgYNGiSvj5IuPtv8lKdc1qxZg0mTJsHZ2RmWlpbo3Lkz7t69i0ePHmHo0KGwt7eHvb09Bg0ahMePH6u0MX/+fDRr1gwODg6wsLCAr68vIiMj1b4DQgiEh4fDzc0NpqamaNCgAaKjoxEQEKC2j9F0X6nJvqGoNP1dUG7Xp0+fRo8ePWBjYwNbW1uMHz8eOTk5uHjxItq1awcrKyu4u7sjMjKywOU9ffoU48ePh5OTE8zMzNC8eXOcOHFCrd7KlSvh7e0t7+dWr15dYHvTpk1Do0aNYGtrC2tra9SrVw/Lli2Drp5nvnLlSuTk5Kgc9QEAA4NXSy20md/ExAS9evXC4sWLtVsvUQbl5OQIMzMz4e/vr9V8AwYMEMbGxsLd3V1ERESI3bt3i507dwohhFi4cKGIiIgQW7duFTExMWLVqlWidu3awtvbW2RlZclt/Pnnn8LQ0FA0bdpUbNq0Sfzyyy/irbfeEpUrVxbPd6ebm5sYMGCA/D49PV3UqVNH2Nvbizlz5og///xTfPfdd8LGxka0bNlS5OXlyXUBCHd3d9GwYUOxYcMGsWPHDhEQECCMjIzElStXhBBC3LhxQ4wePVoAEJs2bRKHDh0Shw4dEqmpqYX2wc8//ywAiDZt2ogtW7aI9evXi/r16wsTExNx4MABIYQQly9fFvPnzxcARHh4uDh06JA4d+5coW3u3btXABC//PKLSvlvv/0mAIjPPvtMCCHE0KFDBQAxatQoERUVJRYtWiQqVKggXF1dxT///KPyObm5ucnvY2NjhSRJ4oMPPhA7duwQe/bsEStWrBD9+vWT66SkpIiKFSuKChUqiEWLFomoqCgxatQoAUAMHz5crpeYmCj3bbt27cSWLVvEli1bhK+vryhfvrx4+PChXHfPnj3CxMREvPPOO2L9+vUiKipKDBw4UAAQK1askOudO3dOmJubixo1aoi1a9eK3377TbRt21beJhITEwvtOyGE+OWXX8TUqVPF5s2bRUxMjFi3bp1o3ry5qFChgkq/rFixQgAQVapUEaNHjxY7d+4US5cuFeXLlxctWrRQaXPAgAFCkiTx6aefil27dok5c+aIihUrCmtra5VtsjAAxMiRI8WdO3eEubm5Sl8r44iLi5PLNNmuhBAiNDRUABDe3t5i6tSpIjo6WsyZM0coFAoxaNCgl8YlhBDNmzcXNWvWVCuvV6+eMDIyEhkZGeLChQvCyspKVK1aVaxevVps375d9O7dWwAQM2fOlOdRbg/5P8+2bduKChUqiMWLF4t9+/aJLVu2iKlTp4p169bJdcLDwwUA0bt3b7F9+3axevVqUaVKFWFjYyP+/vtvud6AAQOEiYmJ8PHxEbNmzRJ//vmnmDp1qpAkSUybNk2ul5ubK9q1aycsLCzEtGnTRHR0tFi6dKmoWLGiqFGjhsjIyFBpUxefrZLy++vm5iYGDhwofzctLS1FixYtRGBgoJg4caLYtWuXmDlzpjA0NBSjR49WaXPcuHFi4cKFIioqSuzZs0d8++23wt7eXu0zDQkJEQDE0KFDRVRUlFiyZImoXLmycHZ2Fs2bN5frabqv1GTfUBg3NzfRsWPHF9bR9Hch/3b91VdfiejoaBEcHCzv76pXry6+//57ER0dLQYNGiQAiI0bN6p9Bq6urqJLly7i999/F//73/+Ep6ensLa2lvf5Qvz3/Xu+nqurq8p+UwghBg4cKJYtWyaio6NFdHS0+Oqrr4SZmZnKtifEs+0vOzv7pa+cnByV+Vq2bCkaNmz4wj6Mi4tT+45pQ5P5169fLwCI06dPa9xumUx+kpOTBQDxwQcfqE3LyclR+bDyJxQDBgwQAMTy5ctf2H5eXp7Izs4W169fFwDEb7/9Jk9r1KiRcHFxEU+ePJHL0tLShK2t7UuTn4iICGFgYKDyoyGEEL/++qsAIHbs2CGXARCOjo4iLS1NZb0NDAxERESEXPbNN99o9AMrxLMN3MXFRfj6+orc3Fy5/NGjR8LBwUE0btxYLissoSmIsu769etFdna2yMjIEPv37xeenp7C0NBQnDp1SiQkJAgAYsSIESrzHjlyRCVBEkI9+Zk1a5YAoJKYPG/y5MkCgDhy5IhK+fDhw4UkSeLixYtCiP9+7Hx9fVW+yEePHhUAxNq1a+Wy6tWri7p164rs7GyVNjt16iScnZ3lPuzVq5cwMzMTycnJcp2cnBxRvXp1jT+b/HJycsTjx4+FhYWF+O677+Ry5U7v+T6MjIwUAMSdO3eEEELu63HjxqnUUyYo2v5ATpkyRRgYGIhTp06pxKHcjrXZrpQ/EpGRkSrLGzFihDA1NVX5vhZGmfwov+O3b9+WP/8ePXoIIYT44IMPhEKhEElJSSrztm/fXpibm8vbUkHJj6WlpQgKCip0+Q8ePBBmZmaiQ4cOKuVJSUlCoVCIPn36yGXKfc6GDRtU6nbo0EF4e3vL79euXav2gyjEfzv+BQsWCCF0/9kK8d/3t3Pnzir1goKCBAAxZswYlfL33ntP2NraFtq+8od09erVwtDQUNy/f18IIcT9+/eFQqEQvXr1Uql/6NAhAUAl+dF0X6nJvqEwmiQ/+b3od0G5Xc+ePVtlnjp16sj/nCplZ2eLChUqiG7dusllys+gXr16Kt+Ba9euCWNjY/HRRx8JIf77rhVW7/nkJz/l5/Lll18KOzu7An8bX/bK/xkJIYS5ubkYNmzYC/vtdSQ/ly5dEgDEwoULNW63zJ72Kkz9+vVhbGwsvwq6VqF79+5qZSkpKRg2bBhcXV1hZGQEY2NjuLm5AYB8KDs9PR1xcXHo1q0bTE1N5XmtrKzQuXPnl8a2bds21KpVC3Xq1EFOTo78atu2bYGnq1q0aAErKyv5vaOjIxwcHDQ6BVGQixcv4vbt2+jXr5/KYUVLS0t0794dhw8fRkZGRpHaBp6dIjM2Noa5uTmaNWuG3Nxc/Prrr/Dz88PevXsBQO00YMOGDeHj44Pdu3cX2u5bb70FAOjZsyc2bNhQ4AXUe/bsQY0aNdCwYUOV8oEDB0IIgT179qiUd+zYEYaGhvJ7Pz8/AP+dort8+TIuXLiAvn37AoDK59WhQwfcuXMHFy9eBADs3bsXrVq1gqOjo9yeoaGh2qHgwjx+/BiTJk2Cp6cnjIyMYGRkBEtLS6Snpxc4Wu7dd99Vef987Mq+Vsau1LNnTxgZaX+ZX3BwMGxtbTFp0qQCpxdluypoHZ4+fYqUlBQAz06R5u/z3Nxclfrnzp2Tv+MuLi6YPXs2+vbtiyVLlgB4tj20atUKrq6uKvMNHDgQGRkZaqfj8mvYsCFWrlyJ6dOn4/Dhw2qnbg4dOoQnT56obcuurq5o2bKl2rYsSZLa/sHPz0/le7xt2zaUK1cOnTt3VlnvOnXqwMnJSd436Pqzze/5kU8+Pj4AoHYxuI+PD+7fv69y6uvEiRN49913YWdnB0NDQxgbG6N///7Izc3F33//DeDZqeHMzEz07NlTpb23335b7TS3pvtKTfYNr0KT34X8CupDSZLQvn17uczIyAienp4F7sf79Omjcu2Mm5sbGjduLH/uyu9aYfWet2fPHrRu3Ro2Njby5zJ16lTcu3dP/q4Bz07bxcXFvfT1448/yvM8fPgQGRkZcHBweGk/FjdlDNp8/mXygmd7e3uYmZkVuPGsWbMGGRkZuHPnjtoOFgDMzc1hbW2tUpaXl4c2bdrg9u3b+OKLL+Dr6wsLCwvk5eXh7bffxpMnTwAADx48QF5eHpycnNTaLajseXfv3sXly5cLvRjz+fPIdnZ2anUUCoUcj7aUV8M7OzurTXNxcUFeXh4ePHhQ5AvAZ86ciZYtW8LQ0BD29vYqPzwvW/aLErpmzZphy5Yt+P7779G/f39kZmaiZs2amDJlCnr37i23X9AwT+U1X8+PBHi+bxUKBQDIfXv37l0AwMSJEzFx4sQC41J+Xvfu3SvyNgE82+Ht3r0bX3zxBd566y1YW1tDkiR06NChwM/6ZbEr1/X55RsZGRW4Tb2MtbU1Pv/8cwQFBck74fyKsl29bB0GDx6scn1O8+bNVf45qFq1KtatWwdJkmBqagoPDw+V9u/du1doPPljLsj69esxffp0LF26FF988QUsLS3RtWtXREZGwsnJ6aXrGx0drVJmbm6u8s+Scn2fPn0qv7979y4ePnxY6PWL+bc1QHefbX62trYq75WxFFb+9OlTWFpaIikpCe+88w68vb3x3Xffwd3dHaampjh69ChGjhyptl3m/ydB6fkyTfeVmuwbikrT34X8Cuqrgj5/ExMTpKWlqc1f2H7k1KlTAAr//JVl+W+rcfToUbRp0wYBAQFYsmQJKlWqBBMTE2zZsgVff/21SvyVK1dGpUqVXtAbz+RPuJTzP79uJUEZgza/jWUy+TE0NETLli2xa9cu3LlzR2UnVKNGDQAo9N4qz9+LB3h2YeSpU6ewcuVKDBgwQC6/fPmySr3y5ctDkiQkJyertVFQ2fOUSdvy5csLnV6clDvHO3fuqE27ffs2DAwMUL58+SK3X6VKFTRo0OCly37+S3b79u2XrnuXLl3QpUsXZGZm4vDhw4iIiECfPn3g7u4Of39/2NnZFbpegPZ9q6wfEhKCbt26FVjH29tbXreibhOpqanYtm0bQkNDMXnyZLk8MzOzyEPMlX2dnJyMihUryuU5OTlaDwdVGj58OL777jtMmjQJw4cPL3B5utyuwsLCVO5Fk/8IKAD5YtnCvMr2YG9vj7lz52Lu3LlISkrC1q1bMXnyZKSkpCAqKuql61uU77Hy4vWoqKgCpyvXvzg+21e1ZcsWpKenY9OmTfJREQBq9wVTxq78xyK/5ORklX9etNlXvmzfUFSa/i7oUmH7EWXf5f/8XzbvunXrYGxsjG3btqkkKFu2bFGb9/l/NgqT/58QZSyl4VYYyhi0+e6V2dNeISEhyM3NxbBhwzQeVVMYZUKk/O9TKf8hPgCwsLBAw4YNsWnTJpX/2h49eoTff//9pcvp1KkTrly5Ajs7OzRo0EDtVZQbVD3/H/OLeHt7o2LFilizZo3KVfHp6enYuHGjPFKnOLRs2RIA8L///U+lPC4uDgkJCfKou5dRKBRo3rw5Zs6cCQDySIhWrVrh/PnzOH78uEr91atXQ5IktGjRQqt4vb294eXlhVOnThX4WTVo0ED+QWrRogV2796tslPPzc3F+vXrX7ocSZIghFDb9pYuXap2qkdTylEzP//8s0r5hg0bkJOTU6Q2TUxMMH36dMTFxeGXX35RmVYc25W7u7tKXysTTU21atUKe/bsURsFt3r1apibm2s8vL1y5coYNWoUAgMD5W3L398fZmZmatvyzZs35dNt2urUqRPu3buH3NzcArc15foXx2f7qgrafwoh5FOQSo0aNYJCoVD7Xhw+fFjtyG9R9pWF7Rt0uV6A+u+CLq1du1blO3T9+nXExsbKn7u3tzecnZ0LrZefJEkwMjJSOb3/5MkT/PTTT2rLLcppLxMTE1SpUqVE76WndPXqVQD/HfzQRJk88gMATZo0wfz58zF69GjUq1cPQ4cORc2aNWFgYIA7d+5g48aNAKB2iqsg1atXR9WqVTF58mQIIWBra4vff/9d7fA1AHz11Vdo164dAgMDMWHCBOTm5mLmzJmwsLB4aQYcFBSEjRs3olmzZhg3bhz8/PyQl5eHpKQk7Nq1CxMmTECjRo206gdfX18AwHfffYcBAwbA2NgY3t7eav8pA8+GD0ZGRqJv377o1KkTPvnkE2RmZuKbb77Bw4cPC7ybr654e3tj6NCh+OGHH2BgYID27dvj2rVr+OKLL+Dq6opx48YVOu/UqVNx8+ZNtGrVCpUqVcLDhw/x3XffwdjYWL4H0bhx47B69Wp07NgRX375Jdzc3LB9+3YsWLAAw4cPf+kNGgvy448/on379mjbti0GDhyIihUr4v79+0hISMDx48flJODzzz/H1q1b0bJlS0ydOhXm5uaYP3++2pDcglhbW6NZs2b45ptvYG9vD3d3d8TExGDZsmUoV66c1jEDz64z+PDDDzF37lwYGxujdevWOHv2LGbNmqXR96EwvXv3xqxZs/DHH3+olJfkdlWY0NBQbNu2DS1atMDUqVNha2uLn3/+Gdu3b0dkZCRsbGwKnC81NRUtWrRAnz59UL16dVhZWSEuLg5RUVHyEcBy5crhiy++wGeffYb+/fujd+/euHfvHqZNmwZTU1OEhoZqHe8HH3yAn3/+GR06dMDYsWPRsGFDGBsb4+bNm9i7dy+6dOmCrl27Fttn+yoCAwNhYmKC3r17Izg4GE+fPsXChQvx4MEDlXrKod8REREoX748unbtips3b2LatGlwdnZWuV5M032lJvuGF0lOTi7wrsru7u6oXbu2xr8LupKSkoKuXbvi448/RmpqKkJDQ2FqairfvNPAwABfffUVPvroI7new4cPERYWpnYqrGPHjpgzZw769OmDoUOH4t69e5g1a5ZaMqdc36L88x0QEKC2P1BS9qsyMYmPj4elpSUA4P3335frhYWFYdq0adi7d6/K7Q40nR94lkAbGhqiWbNmmgdfpMuvS5GTJ0+KQYMGCQ8PD6FQKISpqanw9PQU/fv3F7t371apO2DAAGFhYVFgO+fPnxeBgYHCyspKlC9fXvTo0UMkJSUJACI0NFSl7tatW4Wfn58wMTERlStXFjNmzJCv9s/v+dFeQgjx+PFj8fnnnwtvb29hYmIibGxshK+vrxg3bpzKaCE8NyLjRW2GhIQIFxcXYWBgIACIvXv3vrDPtmzZIho1aiRMTU2FhYWFaNWqlTh48KBKnaKM9npZ3dzcXDFz5kxRrVo1YWxsLOzt7cWHH34obty4oVLv+dFe27ZtE+3btxcVK1YUJiYmwsHBQXTo0EFlCLUQQly/fl306dNH2NnZCWNjY+Ht7S2++eYblRFIytE933zzjVp8BX3Wp06dEj179hQODg7C2NhYODk5iZYtW4pFixap1Dt48KB4++23hUKhEE5OTuLTTz8Vixcv1mi0182bN0X37t1F+fLlhZWVlWjXrp04e/as2mdd0BBzIf7r//yfe2ZmppgwYYJwcHAQpqam4u233xaHDh0qcPspSGHb365du+SRH8/Hocl2pfye5B/Cn3/dNBkZV9hQ9+edOXNGdO7cWdjY2AgTExNRu3ZttREjz4/2evr0qRg2bJjw8/MT1tbWwszMTHh7e4vQ0FCRnp6uMu/SpUvl/YCNjY3o0qWL2i0hCtvnFLS/yM7OFrNmzRK1a9cWpqamwtLSUlSvXl188skn4tKlS3I9XX+2hX1/C9veCvoMf//9dznuihUrik8//VT88ccfattlXl6emD59uqhUqZIwMTERfn5+Ytu2baJ27dqia9euKsvRZF+p6b6hIG5uboWOalL2o6a/C4Vt14V9/s9vw8rP4KeffhJjxowRFSpUEAqFQrzzzjsiPj5ebf6lS5cKLy8vYWJiIqpVqyaWL1+utt8UQojly5cLb29voVAoRJUqVURERIRYtmxZkUahFmT37t0CgDh69KjatML69vntfsKECUKSJJGQkFCk+YUQ4p133lEbrfgy0v8vhIiI6LVLTExE9erVERoaqtUdeql08PPzQ5MmTbBw4cIizd+wYUO4ubmpnU7X1JUrV+Dl5YWdO3ciMDBQ4/mY/BAR0Wtx6tQprF27Fo0bN4a1tTUuXryIyMhIpKWl4ezZswWOBKPSLSoqCl27dsWlS5c0GjGWX1paGipUqICTJ0/Kt1bQ1qBBg3Dz5k2tT0eW2Wt+iIiobLGwsEB8fDyWLVuGhw8fwsbGBgEBAfj666+Z+JRR7dq1wzfffIPExEStkx9ra2u1x7xoIycnB1WrVtX6gcYAj/wQERGRnimzQ92JiIiIioLJDxEREekVJj9ERESkV974C57z8vJw+/ZtWFlZFfhoCyIiIip9hBB49OgRXFxcVG6CqQtvfPJz+/ZttSc7ExERUdlw48YNrUeSvcwbn/woH/Nw48aNErv9OxEREWknLS0Nrq6uBT6u6VW98cmP8lSXtbU1kx8iIqIypjguWeEFz0RERKRXmPwQERGRXmHyQ0RERHrljb/mR1O5ubnIzs4u6TCIYGxsDENDw5IOg4jojaX3yY8QAsnJyXj48GFJh0IkK1euHJycnHhvKiKiYqD3yY8y8XFwcIC5uTl/bKhECSGQkZGBlJQUAICzs3MJR0RE9ObR6+QnNzdXTnzs7OxKOhwiAICZmRkAICUlBQ4ODjwFRkSkY3p9wbPyGh9zc/MSjoRIlXKb5HVoRES6p9fJjxJPdVFpw22SiKj4MPkhIiIivcLk5w0mSRK2bNlS0mEQERGVKnp9wfOLuE/e/tqWdW1GR63nSU5Oxtdff43t27fj1q1bcHBwQJ06dRAUFIRWrVoVQ5RFFxAQgDp16mDu3LklHQoRERGTn7Lo2rVraNKkCcqVK4fIyEj4+fkhOzsbO3fuxMiRI3HhwoWSDpGIiKjU4mmvMmjEiBGQJAlHjx7F+++/j2rVqqFmzZoYP348Dh8+XOh8t27dQq9evVC+fHnY2dmhS5cuuHbtmjw9Li4OgYGBsLe3h42NDZo3b47jx4+rtCFJEpYuXYquXbvC3NwcXl5e2Lp1q1bxu7u7Y/r06ejfvz8sLS3h5uaG3377Df/88w+6dOkCS0tL+Pr6Ij4+Xp7n3r176N27NypVqgRzc3P4+vpi7dq1Ku0+evQIffv2hYWFBZydnfHtt98iICAAQUFBcp2srCwEBwejYsWKsLCwQKNGjbBv3z55+vXr19G5c2eUL18eFhYWqFmzJnbs2KHV+hERUenG5KeMuX//PqKiojBy5EhYWFioTS9XrlyB82VkZKBFixawtLTE/v378ddff8HS0hLt2rVDVlYWgGfJw4ABA3DgwAEcPnwYXl5e6NChAx49eqTS1rRp09CzZ0+cPn0aHTp0QN++fXH//n2t1uPbb79FkyZNcOLECXTs2BH9+vVD//798eGHH+L48ePw9PRE//79IYQAADx9+hT169fHtm3bcPbsWQwdOhT9+vXDkSNH5DbHjx+PgwcPYuvWrYiOjsaBAwfUkrdBgwbh4MGDWLduHU6fPo0ePXqgXbt2uHTpEgBg5MiRyMzMxP79+3HmzBnMnDkTlpaWWq0bERGVbkx+ypjLly9DCIHq1atrNd+6detgYGCApUuXwtfXFz4+PlixYgWSkpLkIx8tW7aEX0AnZFs5w8fHBz/++CMyMjIQExOj0tbAgQPRu3dveHp6Ijw8HOnp6Th69KhW8XTo0AGffPIJvLy8MHXqVDx69AhvvfUWevTogWrVqmHSpElISEjA3bt3AQAVK1bExIkTUadOHVSpUgWjR49G27Zt8csvvwB4lritWrUKs2bNQqtWrVCrVi2sWLECubm58jKvXLmCtWvX4pdffsE777yDqlWrYuLEiWjatClWrFgBAEhKSkKTJk3g6+uLKlWqoFOnTmjWrJlW60ZERKUbr/kpY5RHQrS9D8yxY8dw+fJlWFlZqZQ/ffoUV65cAfDsjsJfhUzC0YMH8PDeP8jNzUVGRgaSkpJU5vHz85P/trCwgJWVlfw4Bk3lb8PR0REA4Ovrq1aWkpICJycn5ObmYsaMGVi/fj1u3bqFzMxMZGZmyke/rl69iuzsbDRs2FBuw8bGBt7e3vL748ePQwiBatWqqcSSmZkp3+F7zJgxGD58OHbt2oXWrVuje/fuKrESEVHZx+SnjPHy8oIkSUhISMB7772n8Xx5eXmoX78+fv75Z7VpFSpUAPDsiE7SrWQEh4UjoH5NKBQK+Pv7y6fFlIyNjVXeS5KEvLw8rdYjfxvKRK6gMmW7s2fPxrfffou5c+fC19cXFhYWCAoKkmMrLClUlivbMjQ0xLFjx9QeGaE8tfXRRx+hbdu22L59O3bt2oWIiAjMnj0bo0eP1mr9iIio9OJprzLG1tYWbdu2xfz585Genq42vbCn09erVw+XLl2Cg4MDPD09VV42NjYAgAMHDqD34KF4p2Ub1Kz5LPn5999/i3N1NHbgwAF06dIFH374IWrXro0qVarI1+kAQNWqVWFsbKxy+i0tLU2lTt26dZGbm4uUlBS1PnBycpLrubq6YtiwYdi0aRMmTJiAJUuWvJ6VJCKi14LJTxm0YMEC5ObmomHDhti4cSMuXbqEhIQEfP/99/D39y9wnr59+8Le3h5dunTBgQMHkJiYiJiYGIwdOxY3b94EAHh6emLbxg24eukijhw5gr59+8oP2Sxpnp6eiI6ORmxsLBISEvDJJ58gOTlZnm5lZYUBAwbg008/xd69e3Hu3DkMHjwYBgYG8tGgatWqoW/fvujfvz82bdqExMRExMXFYebMmfKIrqCgIOzcuROJiYk4fvw49uzZAx8fnxJZZyIiKh5MfsogDw8PHD9+HC1atMCECRNQq1YtBAYGYvfu3Vi4cGGB85ibm2P//v2oXLkyunXrBh8fHwwePBhPnjyBtbU1AGD58uV4lPoQvdo3R79+/TBmzBg4ODi8zlUr1BdffIF69eqhbdu2CAgIgJOTk9ppvzlz5sDf3x+dOnVC69at0aRJE/j4+MDU1FSus2LFCvTv3x8TJkyAt7c33n33XRw5cgSurq4AgNzcXIwcORI+Pj5o164dvL29sWDBgte5qkREVMwkkf+iiDdQWloabGxskJqaKv/IKz19+hSJiYnw8PBQ+YHUZ6dvPgQA+FUqV6Jx6EJ6ejoqVqyI2bNnY8iQISUdjla4bRKRvnvR7/er4gXP9MY4ceIELly4gIYNGyI1NRVffvklAKBLly4lHBkREZUmTH7ojTJr1ixcvHgRJiYmqF+/Pg4cOAB7e/uSDouIiEoRJj/0xqhbty6OHTtW0mEQEVEpxwueiYiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3C5EcP7du3D5IkFfoQ1NIiLCwMderUKekwiIjoDcP7/BRmb8TrW1aLEK2qDxw4EKtWrQIAGBkZwdXVFd26dcO0adNgYWHx0vkbN26MO3fuyE9zL6uuXbsGDw8PnDhxgkkSERFpjMlPGdWuXTusWLEC2dnZOHDgAD766COkp6cX+mDT/ExMTODk5PQaoiQiIip9eNqrjFIoFHBycoKrqyv69OmDvn37YsuWLQCAzMxM+YnspqamaNq0KeLi4uR5nz/tdf36dXTu3Bnly5dHo2oV0bWVP3bs2CHXj4mJQcOGDaFQKODs7IzJkycjJydHnh4QEIAxY8YgODgYtra2cHJyQlhYmEq8qampGDp0KBwcHGBtbY2WLVvi1KlTKnVmzJgBR0dHWFlZYciQIXj69KlWfaJcr507d6Ju3bowMzNDy5YtkZKSgj/++AM+Pj6wtrZG7969kZGRIc8XFRWFpk2boly5crCzs0OnTp1w5coVlbZjY2NRp04dmJqaokGDBtiyZQskScLJkyflOufPn0eHDh1gaWkJR0dH9OvXD//++688/ddff4Wvry/MzMxgZ2eH1q1bIz09Xat1JCKiV8fk5w1hZmaG7OxsAEBwcDA2btyIVatW4fjx4/D09ETbtm1x//79AucdOXIkMjMzsX//fmyMPoigkFBYWloCAG7duoUOHTrgrbfewqlTp7Bw4UIsW7YM06dPV2lj1apVsLCwwJEjRxAZGYkvv/wS0dHRAAAhBDp27Ijk5GTs2LEDx44dQ7169dCqVSs5pg0bNiA0NBRff/014uPj4ezsjAULFhSpL8LCwjBv3jzExsbixo0b6NmzJ+bOnYs1a9Zg+/btiI6Oxg8//CDXT09Px/jx4xEXF4fdu3fDwMAAXbt2RV5eHgDg0aNH6Ny5M3x9fXH8+HF89dVXmDRpksoy79y5g+bNm6NOnTqIj49HVFQU7t69i549e8rTe/fujcGDByMhIQH79u1Dt27dIIQo0joSEVHR8bTXG+Do0aNYs2YNWrVqJZ/6WrlyJdq3bw8AWLJkCaKjo7Fs2TJ8+umnavMnJSWhe/fu8PX1hbj5EJXc3OFXqRwAYMGCBXB1dcW8efMgSRKqV6+O27dvY9KkSZg6dSoMDJ7lz35+fggNDQUAeHl5Yd68edi9ezcCAwOxd+9enDlzBikpKVAoFACePYB0y5Yt+PXXXzF06FDMnTsXgwcPxkcffQQAmD59Ov7880+tj/4o523SpAkAYMiQIQgJCcGVK1dQpUoVAMD777+PvXv3yglM9+7dVeZftmwZHBwccP78edSqVQs///wzJEnCkiVLYGpqiho1auDWrVv4+OOP5XkWLlyIevXqITw8XC5bvnw5XF1d8ffff+Px48fIyclBt27d4ObmBgDw9fXVet2IiOjV8chPGbVt2zZYWlrC1NQU/v7+aNasGX744QdcuXIF2dnZ8o8/ABgbG6Nhw4ZISEgosK0xY8bICcOC2RH4O+GsPC0hIQH+/v6QJEkua9KkCR4/foybN2/KZX5+fiptOjs7IyUlBQBw7NgxPH78GHZ2drC0tJRfiYmJ8ukl5XLye/69pvLH4ujoCHNzcznxUZYpYwOAK1euoE+fPqhSpQqsra3h4eEB4FlSCAAXL16En58fTE1N5XkaNmyossxjx45h7969KutXvXp1uf3atWujVatW8PX1RY8ePbBkyRI8ePCgSOtHRESvhkd+yqgWLVpg4cKFMDY2houLC4yNjQE8O70CQCVZAZ6denq+TOmjjz5C27ZtsX37dvzy23Ysm/8t7syejdGjRxc4n/JUTf5y5fKVJEmSTxvl5eXB2dkZ+/btU1t2uXLlNF9pDeWPRZKkF8YGAJ07d4arqyuWLFkCFxcX5OXloVatWsjKygJQcN89f7oqLy8PnTt3xsyZM9XicXZ2hqGhIaKjoxEbG4tdu3bhhx9+wJQpU3DkyBE52SIioteDR37KKAsLC3h6esLNzU3lx93T0xMmJib466+/5LLs7GzEx8fDx8enwLZO33yIB5IVGnf6AN8u+Qn9h47EkiVLAAA1atRAbGysyo99bGwsrKysULFiRY1irVevHpKTk2FkZARPT0+Vl729PQDAx8cHhw8fVpnv+ffF4d69e0hISMDnn3+OVq1awcfHR+2ITPXq1XH69GlkZmbKZfHx8Sp16tWrh3PnzsHd3V1tHZW3H5AkCU2aNMG0adNw4sQJmJiYYPPmzcW+jkREpIrJzxvGwsICw4cPx6effoqoqCicP38eH3/8MTIyMjBkyJAC54kMC8HBfbtxM+k6Es6cwtGDB+REacSIEbhx4wZGjx6NCxcu4LfffkNoaCjGjx8vX+/zMq1bt4a/vz/ee+897Ny5E9euXUNsbCw+//xzOYkYO3Ysli9fjuXLl+Pvv/9GaGgozp07p5tOeYHy5cvDzs4OixcvxuXLl7Fnzx6MHz9epU6fPn2Ql5eHoUOHIiEhATt37sSsWbMA/Hf0a+TIkbh//z569+6No0eP4urVq9i1axcGDx6M3NxcHDlyBOHh4YiPj0dSUhI2bdqEf/75p9CElIiIig9Pe72BZsyYgby8PPTr1w+PHj1CgwYNsHPnTpQvX77A+rm5uYj4/FPcTb4NC0srNAlohQUL5gMAKlasiB07duDTTz9F7dq1YWtriyFDhuDzzz/XOB5JkrBjxw5MmTIFgwcPxj///AMnJyc0a9YMjo6OAIBevXrhypUrmDRpEp4+fYru3btj+PDh2Llz56t3yAsYGBhg3bp1GDNmDGrVqgVvb298//33CAgIkOtYW1vj999/x/Dhw1GnTh34+vpi6tSp6NOnj3wdkIuLCw4ePIhJkyahbdu2yMzMhJubG9q1awcDAwNYW1tj//79mDt3LtLS0uDm5obZs2fLF6UTEdHrI4k3fKxtWloabGxskJqaCmtra5VpT58+RWJiIjw8PFQuZtU3p28+VCtTjvaigv38888YNGgQUlNTYWZmpvP2uW0Skb570e/3q+KRHyINrF69GlWqVEHFihVx6tQpTJo0CT179iyWxIeIiIoXkx8iDSQnJ2Pq1KlITk6Gs7MzevToga+//rqkwyIioiJg8kOkgeDgYAQHB5d0GEREpAMc7UVERER6hckP1G9YR1TSuE0SERUfvU5+lDcHzP+Eb6LSQLlNPn93aiIienV6fc2PoaEhypUrJz/nydzcvNBHQLzJRE6WWllRHihKr04IgYyMDKSkpKBcuXIwNDQs6ZCIiN44ep38AICTkxMAqDzoUt+kPHiiVmbyhEO4S1K5cuXkbZOIiHRL75MfSZLg7OwMBwcHZGdnl3Q4JeKjTfvUynZPCHjtcdAzxsbGPOJDRFSM9D75UTI0NNTbH5xbj3LVyqqH7QYAXJvR8XWHQ0REVKz0+oJnIiIi0j8lmvyEhYVBkiSVV/7rHIQQCAsLg4uLC8zMzBAQEPBanvRNREREb64SP/JTs2ZN3LlzR36dOXNGnhYZGYk5c+Zg3rx5iIuLg5OTEwIDA/Ho0aMSjJiIiIjKshJPfoyMjODk5CS/KlSoAODZUZ+5c+diypQp6NatG2rVqoVVq1YhIyMDa9asKeGoiYiIqKwq8eTn0qVLcHFxgYeHBz744ANcvXoVAJCYmIjk5GS0adNGrqtQKNC8eXPExsYW2l5mZibS0tJUXkRERERKJTraq1GjRli9ejWqVauGu3fvYvr06WjcuDHOnTuH5ORkAICjo6PKPI6Ojrh+/XqhbUZERGDatGnFGrc+cp+8Xf6bI8CIiKgsK9EjP+3bt0f37t3h6+uL1q1bY/v2Zz+wq1atkus8f8dlIcQL78IcEhKC1NRU+XXjxo3iCZ6IiIjKpBI/7ZWfhYUFfH19cenSJXnUl/IIkFJKSora0aD8FAoFrK2tVV5ERERESqUq+cnMzERCQgKcnZ3h4eEBJycnREdHy9OzsrIQExODxo0bl2CUREREVJaV6DU/EydOROfOnVG5cmWkpKRg+vTpSEtLw4ABAyBJEoKCghAeHg4vLy94eXkhPDwc5ubm6NOnT0mGTURERGVYiSY/N2/eRO/evfHvv/+iQoUKePvtt3H48GG4ubkBAIKDg/HkyROMGDECDx48QKNGjbBr1y5YWVmVZNhERERUhpVo8rNu3boXTpckCWFhYQgLC3s9AREREdEbr1Rd80NERERU3Jj8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJD2nNffJ2uE/eXtJhEBERFQmTHyIiItIrTH6IiIhIrzD5ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyoyPuCUiIjKIiY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV4xKOgAqOe6Tt5d0CERERK8dj/wQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREemVUpP8REREQJIkBAUFyWVCCISFhcHFxQVmZmYICAjAuXPnSi5IIiIiKvNKRfITFxeHxYsXw8/PT6U8MjISc+bMwbx58xAXFwcnJycEBgbi0aNHJRQpERERlXUlnvw8fvwYffv2xZIlS1C+fHm5XAiBuXPnYsqUKejWrRtq1aqFVatWISMjA2vWrCnBiImIiKgsK/HkZ+TIkejYsSNat26tUp6YmIjk5GS0adNGLlMoFGjevDliY2Nfd5hERET0htA6+Vm1ahW2b//vmVDBwcEoV64cGjdujOvXr2vV1rp163D8+HFERESoTUtOTgYAODo6qpQ7OjrK0wqSmZmJtLQ0lRcRERGRktbJT3h4OMzMzAAAhw4dwrx58xAZGQl7e3uMGzdO43Zu3LiBsWPH4n//+x9MTU0LrSdJksp7IYRaWX4RERGwsbGRX66urhrHROrcJ2/nA1CJiOiNonXyc+PGDXh6egIAtmzZgvfffx9Dhw5FREQEDhw4oHE7x44dQ0pKCurXrw8jIyMYGRkhJiYG33//PYyMjOQjPs8f5UlJSVE7GpRfSEgIUlNT5deNGze0XUUiIiJ6g2md/FhaWuLevXsAgF27dsnX6piamuLJkycat9OqVSucOXMGJ0+elF8NGjRA3759cfLkSVSpUgVOTk6Ijo6W58nKykJMTAwaN25caLsKhQLW1tYqLyIiIiIlI21nCAwMxEcffYS6devi77//RseOHQEA586dg7u7u8btWFlZoVatWiplFhYWsLOzk8uDgoIQHh4OLy8veHl5ITw8HObm5ujTp4+2YRMREREBKELyM3/+fHz++ee4ceMGNm7cCDs7OwDPTmP17t1bp8EFBwfjyZMnGDFiBB48eIBGjRph165dsLKy0ulyiIiISH9IQghR0kEUp7S0NNjY2CA1NZWnwJ6jqwuZr83oqJN2iIiIlIrz97tI9/k5cOAAPvzwQzRu3Bi3bt0CAPz000/466+/dBocERERka5pnfxs3LgRbdu2hZmZGY4fP47MzEwAwKNHjxAeHq7zAImIiIh0SevkZ/r06Vi0aBGWLFkCY2Njubxx48Y4fvy4ToMjIiIi0jWtk5+LFy+iWbNmauXW1tZ4+PChLmIiIiIiKjZaJz/Ozs64fPmyWvlff/2FKlWq6CQoIiIiouKidfLzySefYOzYsThy5AgkScLt27fx888/Y+LEiRgxYkRxxEhERESkM1rf5yc4OBipqalo0aIFnj59imbNmkGhUGDixIkYNWpUccRIREREpDNaJz8A8PXXX2PKlCk4f/488vLyUKNGDVhaWuo6NiIiIiKdK1LyAwDm5uZo0KCBLmMhIiIiKnZaJz9du3aFJElq5ZIkwdTUFJ6enujTpw+8vb11EiARERGRLml9wbONjQ327NmD48ePy0nQiRMnsGfPHuTk5GD9+vWoXbs2Dh48qPNgiYiIiF6V1kd+nJyc0KdPH8ybNw8GBs9yp7y8PIwdOxZWVlZYt24dhg0bhkmTJvFxF0RERFTqaH3kZ9myZQgKCpITHwAwMDDA6NGjsXjxYkiShFGjRuHs2bM6DZSIiIhIF7ROfnJycnDhwgW18gsXLiA3NxcAYGpqWuB1QUREREQlTevTXv369cOQIUPw2Wef4a233oIkSTh69CjCw8PRv39/AEBMTAxq1qyp82CJiIiIXpXWyc+3334LR0dHREZG4u7duwAAR0dHjBs3DpMmTQIAtGnTBu3atdNtpEREREQ6IAkhRFFnTktLA/DsoaalVVpaGmxsbJCamlqq4ywJ7pO366SdazM66qQdIiIipeL8/S7yTQ6B0p30EBERERWkSMnPr7/+ig0bNiApKQlZWVkq044fP66TwIiIiIiKg9ajvb7//nsMGjQIDg4OOHHiBBo2bAg7OztcvXoV7du3L44YiYiIiHRG6+RnwYIFWLx4MebNmwcTExMEBwcjOjoaY8aMQWpqanHESERERKQzWic/SUlJaNy4MQDAzMwMjx49AvBsCPzatWt1Gx0RERGRjmmd/Dg5OeHevXsAADc3Nxw+fBgAkJiYiFcYOEZERET0Wmid/LRs2RK///47AGDIkCEYN24cAgMD0atXL3Tt2lXnARIRERHpktajvRYvXoy8vDwAwLBhw2Bra4u//voLnTt3xrBhw3QeIBEREZEuaZ38GBgYqDzUtGfPnujZs6dOgyIiIiIqLkW6z8/Tp09x+vRppKSkyEeBlN59912dBEZERERUHLROfqKiotC/f3/8+++/atMkSZKf7E5ERERUGml9wfOoUaPQo0cP3LlzB3l5eSovJj5ERERU2mmd/KSkpGD8+PFwdHQsjniIiIiIipXWyc/777+Pffv2FUMoRERERMVP62t+5s2bhx49euDAgQPw9fWFsbGxyvQxY8boLDgiIiIiXdM6+VmzZg127twJMzMz7Nu3D5IkydMkSWLyQ0RERKWa1snP559/ji+//BKTJ09Wud8PERERUVmgdfaSlZWFXr16MfEhIiKiMknrDGbAgAFYv359ccRCREREVOy0Pu2Vm5uLyMhI7Ny5E35+fmoXPM+ZM0dnwRERERHpmtbJz5kzZ1C3bl0AwNmzZ1Wm5b/4mV4v98nbAQDXZnQs4UiIiIhKN62Tn7179xZHHERERESvBa9aJiIiIr2i8ZGfbt26aVRv06ZNRQ6GiIiIqLhpnPzY2NgUZxxEREREr4XGyc+KFSuKMw4iIiKi14LX/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkVzRKfurVq4cHDx4AAL788ktkZGQUa1BERERExUWj5CchIQHp6ekAgGnTpuHx48fFGhQRERFRcdFoqHudOnUwaNAgNG3aFEIIzJo1C5aWlgXWnTp1qk4DJCIiItIljZKflStXIjQ0FNu2bYMkSfjjjz9gZKQ+qyRJTH6IiIioVNPotJe3tzfWrVuHuLg4CCGwe/dunDhxQu11/PhxrRa+cOFC+Pn5wdraGtbW1vD398cff/whTxdCICwsDC4uLjAzM0NAQADOnTun3RoSERER5aP1aK+8vDw4ODjoZOGVKlXCjBkzEB8fj/j4eLRs2RJdunSRE5zIyEjMmTMH8+bNQ1xcHJycnBAYGIhHjx7pZPlERESkf4o01P3KlSsYPXo0WrdujcDAQIwZMwZXrlzRup3OnTujQ4cOqFatGqpVq4avv/4alpaWOHz4MIQQmDt3LqZMmYJu3bqhVq1aWLVqFTIyMrBmzZqihE1ERESkffKzc+dO1KhRA0ePHoWfnx9q1aqFI0eOoGbNmoiOji5yILm5uVi3bh3S09Ph7++PxMREJCcno02bNnIdhUKB5s2bIzY2ttB2MjMzkZaWpvIiIiIiUtL4waZKkydPxrhx4zBjxgy18kmTJiEwMFCr9s6cOQN/f388ffoUlpaW2Lx5M2rUqCEnOI6Ojir1HR0dcf369ULbi4iIwLRp07SKgYiIiPSH1kd+EhISMGTIELXywYMH4/z581oH4O3tjZMnT+Lw4cMYPnw4BgwYoNKOJEkq9YUQamX5hYSEIDU1VX7duHFD65iIiIjozaX1kZ8KFSrg5MmT8PLyUik/efJkkS6ENjExgaenJwCgQYMGiIuLw3fffYdJkyYBAJKTk+Hs7CzXT0lJUTsalJ9CoYBCodA6DiIiItIPWic/H3/8MYYOHYqrV6+icePGkCQJf/31F2bOnIkJEya8ckBCCGRmZsLDwwNOTk6Ijo5G3bp1AQBZWVmIiYnBzJkzX3k5REREpJ+0Tn6++OILWFlZYfbs2QgJCQEAuLi4ICwsDGPGjNGqrc8++wzt27eHq6srHj16hHXr1mHfvn2IioqCJEkICgpCeHg4vLy84OXlhfDwcJibm6NPnz7ahk1EREQEoAjJjyRJGDduHMaNGyffb8fKyqpIC7979y769euHO3fuwMbGBn5+foiKipIvmg4ODsaTJ08wYsQIPHjwAI0aNcKuXbuKvDwiIiIirZOf/F41CVm2bNkLp0uShLCwMISFhb3ScoiIiIiUXin5IQIA98nb5b+vzehYgpEQERG9XJHu8ExERERUVjH5ISIiIr2iVfKTnZ2NFi1a4O+//y6ueIiIiIiKlVbJj7GxMc6ePfvCOywTERERlWZan/bq37//S0dpEREREZVWWo/2ysrKwtKlSxEdHY0GDRrAwsJCZfqcOXN0FhxpTznyiqOuiIiICqZ18nP27FnUq1cPANSu/eHpMCIiIirttE5+9u7dWxxxEBEREb0WRR7qfvnyZezcuRNPnjwB8OyBpERERESlndbJz71799CqVStUq1YNHTp0wJ07dwAAH330kU6e6k5ERERUnLROfsaNGwdjY2MkJSXB3NxcLu/VqxeioqJ0GhwRERGRrml9zc+uXbuwc+dOVKpUSaXcy8sL169f11lgpK4sjuQqizETEdGbTesjP+np6SpHfJT+/fdfKBQKnQRFREREVFy0Tn6aNWuG1atXy+8lSUJeXh6++eYbtGjRQqfBEREREema1qe9vvnmGwQEBCA+Ph5ZWVkIDg7GuXPncP/+fRw8eLA4YiQiIiLSGa2P/NSoUQOnT59Gw4YNERgYiPT0dHTr1g0nTpxA1apViyNGIiIiIp3R+sgPADg5OWHatGm6joWIiIio2BUp+Xnw4AGWLVuGhIQESJIEHx8fDBo0CLa2trqOj16RcrQVwBFXREREQBFOe8XExMDDwwPff/89Hjx4gPv37+P777+Hh4cHYmJiiiNGIiIiIp3R+sjPyJEj0bNnTyxcuBCGhoYAgNzcXIwYMQIjR47E2bNndR4kERERka5ofeTnypUrmDBhgpz4AIChoSHGjx+PK1eu6DQ4IiIiIl3TOvmpV68eEhIS1MoTEhJQp04dXcREREREVGw0Ou11+vRp+e8xY8Zg7NixuHz5Mt5++20AwOHDhzF//nzMmDGjeKIkIiIi0hGNkp86depAkiQIIeSy4OBgtXp9+vRBr169dBcdERERkY5plPwkJiYWdxxEREREr4VGyY+bm1txx0FERET0WhTpJoe3bt3CwYMHkZKSgry8PJVpY8aM0UlgRERERMVB6+RnxYoVGDZsGExMTGBnZwdJkuRpkiQx+SEiIqJSTevkZ+rUqZg6dSpCQkJgYKD1SHkiIiKiEqV19pKRkYEPPviAiQ8RERGVSVpnMEOGDMEvv/xSHLEQERERFTutT3tFRESgU6dOiIqKgq+vL4yNjVWmz5kzR2fBEREREema1slPeHg4du7cCW9vbwBQu+CZiIiIqDTTOvmZM2cOli9fjoEDBxZDOERERETFS+trfhQKBZo0aVIcsRAREREVO62Tn7Fjx+KHH34ojliIiIiIip3Wp72OHj2KPXv2YNu2bahZs6baBc+bNm3SWXBlifvk7QCAazM6lsr2iIiI6Bmtk59y5cqhW7duxRELERERUbEr0uMtiIiIiMoq3qaZiIiI9IrWR348PDxeeD+fq1evvlJARERERMVJ6+QnKChI5X12djZOnDiBqKgofPrpp7qKi4iIiKhYaJ38jB07tsDy+fPnIz4+/pUDIiIiIipOOrvmp3379ti4caOumiMiIiIqFjpLfn799VfY2trqqjkiIiKiYqH1aa+6deuqXPAshEBycjL++ecfLFiwQKfBEREREema1snPe++9p/LewMAAFSpUQEBAAKpXr66ruIiIiIiKhdbJT2hoaHHEQURERPRa8CaHr5H75O3yM7v0cflERESlgcZHfgwMDF54c0MAkCQJOTk5rxwUERERUXHROPnZvHlzodNiY2Pxww8/QAih1cIjIiKwadMmXLhwAWZmZmjcuDFmzpwJb29vuY4QAtOmTcPixYvx4MEDNGrUCPPnz0fNmjW1WhYRERERoEXy06VLF7WyCxcuICQkBL///jv69u2Lr776SquFx8TEYOTIkXjrrbeQk5ODKVOmoE2bNjh//jwsLCwAAJGRkZgzZw5WrlyJatWqYfr06QgMDMTFixdhZWWl1fKIiIiIinTNz+3bt/Hxxx/Dz88POTk5OHnyJFatWoXKlStr1U5UVBQGDhyImjVronbt2lixYgWSkpJw7NgxAM+O+sydOxdTpkxBt27dUKtWLaxatQoZGRlYs2ZNUUInIiIiPadV8pOamopJkybB09MT586dw+7du/H777+jVq1aOgkmNTUVAOSbJSYmJiI5ORlt2rSR6ygUCjRv3hyxsbEFtpGZmYm0tDSVFxEREZGSxslPZGQkqlSpgm3btmHt2rWIjY3FO++8o7NAhBAYP348mjZtKidTycnJAABHR0eVuo6OjvK050VERMDGxkZ+ubq66ixGTbwpI6qCjH5FkNGvxdL2m9JHRERUNml8zc/kyZNhZmYGT09PrFq1CqtWrSqw3qZNm4oUyKhRo3D69Gn89ddfatOeH2UmhCh05FlISAjGjx8vv09LS3vtCRARERGVXhonP/3793/pUPeiGj16NLZu3Yr9+/ejUqVKcrmTkxOAZ0eAnJ2d5fKUlBS1o0FKCoUCCoWiWOIkIiKisk/j5GflypU6X7gQAqNHj8bmzZuxb98+eHh4qEz38PCAk5MToqOjUbduXQBAVlYWYmJiMHPmTJ3HQ0RERG8+rR9voUsjR47EmjVr8Ntvv8HKykq+jsfGxgZmZmaQJAlBQUEIDw+Hl5cXvLy8EB4eDnNzc/Tp06ckQyciIqIyqkSTn4ULFwIAAgICVMpXrFiBgQMHAgCCg4Px5MkTjBgxQr7J4a5du3iPHyIiIiqSEk1+NLkjtCRJCAsLQ1hYWPEH9JrlH/F0bUbHV5pfm2lERET6jA82JSIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPRKiQ511xelbdj564intK0zERGREo/8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXuFoL1IRZPTrK81f2Cgvjv4iIqLSgkd+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIr3C0VzEpztFNHDlFRERUdDzyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoWjvV4RR14RERGVLTzyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoWjvUoAR4gRERGVHB75ISIiIr3C5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh0o198nbOTqOiIh0iskPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RERHpFSY/REREpFf4YFNCkNGvOpt/bs77Gs+nHMJ+bUbHV1o+ERGRNnjkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9ApHe5UyZf0hnsqRX9qM+iIiInqdeOSHiIiI9EqJJj/79+9H586d4eLiAkmSsGXLFpXpQgiEhYXBxcUFZmZmCAgIwLlz50omWCIiInojlGjyk56ejtq1a2PevHkFTo+MjMScOXMwb948xMXFwcnJCYGBgXj06NFrjpSIiIjeFCV6zU/79u3Rvn37AqcJITB37lxMmTIF3bp1AwCsWrUKjo6OWLNmDT755JPXGSoRERG9IUrtNT+JiYlITk5GmzZt5DKFQoHmzZsjNja20PkyMzORlpam8iIiIiJSKrWjvZKTkwEAjo6OKuWOjo64fv16ofNFRERg2rRpxRrbm0KbZ3oV9PyuV30mmBKf8UVERK9TqT3yoyRJksp7IYRaWX4hISFITU2VXzdu3CjuEImIiKgMKbVHfpycnAA8OwLk7Owsl6ekpKgdDcpPoVBAoVAUe3xERERUNpXaIz8eHh5wcnJCdHS0XJaVlYWYmBg0bty4BCMjIiKisqxEj/w8fvwYly9flt8nJibi5MmTsLW1ReXKlREUFITw8HB4eXnBy8sL4eHhMDc3R58+fUowaiIiIirLSjT5iY+PR4sWLeT348ePBwAMGDAAK1euRHBwMJ48eYIRI0bgwYMHaNSoEXbt2gUrK6uSCpmIiIjKuBJNfgICAiCEKHS6JEkICwtDWFjY6wvqFenq2VbKdtwnv3JIZUb+55o9P/KLI8KIiEhXSu01P0RERETFgckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHql1N7hWd/kH+mkqYKet6VrBT2/S1fP9HqRovQHERGRJnjkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9ApHe5Uhmjw37Pk6BY3MKq6RYdrS1XPQNMFngxERkRKP/BAREZFeYfJDREREeoXJDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hUPdX1FxD9fW9CGiz9d70Xyv48GkpVX+B6Zy2DsRkX7ikR8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrHO1Vgor60NGyOForf8zarGNpeQgrERG9OXjkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9ApHe+nIi56tpRyxpMkorbI4kqsgRVkPTebR9Nlcynqa1NGkPSIienPwyA8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoXJDxEREekVjvZ6Dd6UEVy68qKRcYXVLXjE3LPRWS8a2fX8iK5XpckosuJU0st/XmmLh4hIEzzyQ0RERHqFyQ8RERHpFSY/REREpFeY/BAREZFeYfJDREREeoWjvajMmvv5oP//69lIsFcd2VXQ/MpRTM9Pe9GyrrU9/V+9nX4q7bxouYUt62XzF9aOPtHndSci7fHIDxEREekVJj9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXONSdyoSiPhz2RQ9GVZa9aFnuk1++/Ofbmbv773zv/P6/nZcPxX7R8Pnnh/Ur3wdNX6HBPP/Ve9GwenmIfouQQtss1N4I+U9NhvcXNF+R5n+NNOlzbaj0vRa3QihOb/otAzTtc32J41WV5e2lTBz5WbBgATw8PGBqaor69evjwIEDJR0SERERlVGlPvlZv349goKCMGXKFJw4cQLvvPMO2rdvj6SkpJIOjYiIiMqgUp/8zJkzB0OGDMFHH30EHx8fzJ07F66urli4cGFJh0ZERERlUKlOfrKysnDs2DG0adNGpbxNmzaIjY0toaiIiIioLCvVFzz/+++/yM3NhaOjo0q5o6MjkpOTC5wnMzMTmZmZ8vvU1FQAQFpaWrHE+DQzq1jaJc3l5WQUOu1pbpZKHeX7F81XUJ38ZS9b/ouWUdB2mJdZePzPt/l8PPnbU7ajLMu/bSrLnq+Tf9lp6U+VlV8aj9oylfMWsIwXyjdfkeZ/hXm0pexPXS1Dpe9f0ObrWLeSWFZJ0LTP9SWOV1Xc24uyXSGE7hsXpditW7cEABEbG6tSPn36dOHt7V3gPKGhoQIAX3zxxRdffPH1Brxu3Lih8/yiVB/5sbe3h6GhodpRnpSUFLWjQUohISEYP368/D4vLw/379+HnZ0dJEkq1njfJGlpaXB1dcWNGzdgbW1d0uGUWezHV8c+1A32o26wH1+dpn0ohMCjR4/g4uKi8xhKdfJjYmKC+vXrIzo6Gl27dpXLo6Oj0aVLlwLnUSgUUCgUKmXlypUrzjDfaNbW1vyC6wD78dWxD3WD/agb7MdXp0kf2tjYFMuyS3XyAwDjx49Hv3790KBBA/j7+2Px4sVISkrCsGHDSjo0IiIiKoNKffLTq1cv3Lt3D19++SXu3LmDWrVqYceOHXBzcyvp0IiIiKgMKvXJDwCMGDECI0aMKOkw9IpCoUBoaKjaKUTSDvvx1bEPdYP9qBvsx1dXGvpQEqI4xpARERERlU6l+iaHRERERLrG5IeIiIj0CpMfIiIi0itMfoiIiEivMPkhNQsWLICHhwdMTU1Rv359HDhwoKRDKjH79+9H586d4eLiAkmSsGXLFpXpQgiEhYXBxcUFZmZmCAgIwLlz51TqZGZmYvTo0bC3t4eFhQXeffdd3Lx5U6XOgwcP0K9fP9jY2MDGxgb9+vXDw4cPi3ntXp+IiAi89dZbsLKygoODA9577z1cvHhRpQ778sUWLlwIPz8/+cZw/v7++OOPP+Tp7L+iiYiIgCRJCAoKksvYly8XFhYGSZJUXk5OTvL0Ut+HOn9gBpVp69atE8bGxmLJkiXi/PnzYuzYscLCwkJcv369pEMrETt27BBTpkwRGzduFADE5s2bVabPmDFDWFlZiY0bN4ozZ86IXr16CWdnZ5GWlibXGTZsmKhYsaKIjo4Wx48fFy1atBC1a9cWOTk5cp127dqJWrVqidjYWBEbGytq1aolOnXq9LpWs9i1bdtWrFixQpw9e1acPHlSdOzYUVSuXFk8fvxYrsO+fLGtW7eK7du3i4sXL4qLFy+Kzz77TBgbG4uzZ88KIdh/RXH06FHh7u4u/Pz8xNixY+Vy9uXLhYaGipo1a4o7d+7Ir5SUFHl6ae9DJj+komHDhmLYsGEqZdWrVxeTJ08uoYhKj+eTn7y8POHk5CRmzJghlz19+lTY2NiIRYsWCSGEePjwoTA2Nhbr1q2T69y6dUsYGBiIqKgoIYQQ58+fFwDE4cOH5TqHDh0SAMSFCxeKea1KRkpKigAgYmJihBDsy6IqX768WLp0KfuvCB49eiS8vLxEdHS0aN68uZz8sC81ExoaKmrXrl3gtLLQhzztRbKsrCwcO3YMbdq0USlv06YNYmNjSyiq0isxMRHJyckq/aVQKNC8eXO5v44dO4bs7GyVOi4uLqhVq5Zc59ChQ7CxsUGjRo3kOm+//TZsbGze2H5PTU0FANja2gJgX2orNzcX69atQ3p6Ovz9/dl/RTBy5Eh07NgRrVu3VilnX2ru0qVLcHFxgYeHBz744ANcvXoVQNnowzJxh2d6Pf7991/k5ubC0dFRpdzR0RHJycklFFXppeyTgvrr+vXrch0TExOUL19erY5y/uTkZDg4OKi17+Dg8Eb2uxAC48ePR9OmTVGrVi0A7EtNnTlzBv7+/nj69CksLS2xefNm1KhRQ/4hYP9pZt26dTh+/Dji4uLUpnFb1EyjRo2wevVqVKtWDXfv3sX06dPRuHFjnDt3rkz0IZMfUiNJksp7IYRaGf2nKP31fJ2C6r+p/T5q1CicPn0af/31l9o09uWLeXt74+TJk3j48CE2btyIAQMGICYmRp7O/nu5GzduYOzYsdi1axdMTU0Lrce+fLH27dvLf/v6+sLf3x9Vq1bFqlWr8PbbbwMo3X3I014ks7e3h6GhoVpGnZKSopbBE+SRDS/qLycnJ2RlZeHBgwcvrHP37l219v/55583rt9Hjx6NrVu3Yu/evahUqZJczr7UjImJCTw9PdGgQQNERESgdu3a+O6779h/Wjh27BhSUlJQv359GBkZwcjICDExMfj+++9hZGQkryf7UjsWFhbw9fXFpUuXysT2yOSHZCYmJqhfvz6io6NVyqOjo9G4ceMSiqr08vDwgJOTk0p/ZWVlISYmRu6v+vXrw9jYWKXOnTt3cPbsWbmOv78/UlNTcfToUbnOkSNHkJqa+sb0uxACo0aNwqZNm7Bnzx54eHioTGdfFo0QApmZmew/LbRq1QpnzpzByZMn5VeDBg3Qt29fnDx5ElWqVGFfFkFmZiYSEhLg7OxcNrbHV7pcmt44yqHuy5YtE+fPnxdBQUHCwsJCXLt2raRDKxGPHj0SJ06cECdOnBAAxJw5c8SJEyfkof8zZswQNjY2YtOmTeLMmTOid+/eBQ7nrFSpkvjzzz/F8ePHRcuWLQsczunn5ycOHTokDh06JHx9fd+YIbFCCDF8+HBhY2Mj9u3bpzI0NiMjQ67DvnyxkJAQsX//fpGYmChOnz4tPvvsM2FgYCB27dolhGD/vYr8o72EYF9qYsKECWLfvn3i6tWr4vDhw6JTp07CyspK/q0o7X3I5IfUzJ8/X7i5uQkTExNRr149eTiyPtq7d68AoPYaMGCAEOLZkM7Q0FDh5OQkFAqFaNasmThz5oxKG0+ePBGjRo0Stra2wszMTHTq1EkkJSWp1Ll3757o27evsLKyElZWVqJv377iwYMHr2kti19BfQhArFixQq7DvnyxwYMHy9/LChUqiFatWsmJjxDsv1fxfPLDvnw55X17jI2NhYuLi+jWrZs4d+6cPL2096EkhBCvduyIiIiIqOzgNT9ERESkV5j8EBERkV5h8kNERER6hckPERER6RUmP0RERKRXmPwQERGRXmHyQ0RERHqFyQ8RlWkDBw7Ee++9J78PCAhAUFBQicVDRKUfkx8i0qnk5GSMHTsWnp6eMDU1haOjI5o2bYpFixYhIyOj2Je/adMmfPXVVzpt8/kEi4jKNqOSDoCI3hxXr15FkyZNUK5cOYSHh8PX1xc5OTn4+++/sXz5cri4uODdd99Vmy87OxvGxsY6icHW1lYn7RDRm4tHfohIZ0aMGAEjIyPEx8ejZ8+e8PHxga+vL7p3747t27ejc+fOAABJkrBo0SJ06dIFFhYWmD59OnJzczFkyBB4eHjAzMwM3t7e+O6771Taz83Nxfjx41GuXDnY2dkhODgYzz+h5/nTXllZWQgODkbFihVhYWGBRo0aYd++ffL0lStXoly5cti5cyd8fHxgaWmJdu3a4c6dOwCAsLAwrFq1Cr/99hskSYIkSSrzE1HZw+SHiHTi3r172LVrF0aOHAkLC4sC60iSJP8dGhqKLl264MyZMxg8eDDy8vJQqVIlbNiwAefPn8fUqVPx2WefYcOGDfI8s2fPxvLly7Fs2TL89ddfuH//PjZv3vzCuAYNGoSDBw9i3bp1OH36NHr06IF27drh0qVLcp2MjAzMmjULP/30E/bv34+kpCRMnDgRADBx4kT07NlTToju3LmDxo0bv0pXEVEJ42kvItKJy5cvQwgBb29vlXJ7e3s8ffoUADBy5EjMnDkTANCnTx8MHjxYpe60adPkvz08PBAbG4sNGzagZ8+eAIC5c+ciJCQE3bt3BwAsWrQIO3fuLDSmK1euYO3atbh58yZcXFwAPEtmoqKisGLFCoSHhwN4dtpt0aJFqFq1KgBg1KhR+PLLLwEAlpaWMDMzQ2ZmJpycnIrWOURUqjD5ISKdyn90BwCOHj2KvLw89O3bF5mZmXJ5gwYN1OZdtGgRli5diuvXr+PJkyfIyspCnTp1AACpqam4c+cO/P395fpGRkZo0KCB2qkvpePHj0MIgWrVqqmUZ2Zmws7OTn5vbm4uJz4A4OzsjJSUFM1XmojKFCY/RKQTnp6ekCQJFy5cUCmvUqUKAMDMzEyl/PlTYxs2bMC4ceMwe/Zs+Pv7w8rKCt988w2OHDlS5Jjy8vJgaGiIY8eOwdDQUGWapaWl/PfzF1tLklRoQkVEZR+v+SEinbCzs0NgYCDmzZuH9PR0rec/cOAAGjdujBEjRqBu3brw9PTElStX5Ok2NjZwdnbG4cOH5bKcnBwcO3as0Dbr1q2L3NxcpKSkwNPTU+WlzSksExMT5Obmar1ORFQ6MfkhIp1ZsGABcnJy0KBBA6xfvx4JCQm4ePEi/ve//+HChQtqR1/y8/T0RHx8PHbu3Im///4bX3zxBeLi4lTqjB07FjNmzMDmzZtx4cIFjBgxAg8fPiy0zWrVqqFv377o378/Nm3ahMTERMTFxWHmzJnYsWOHxuvl7u6O06dP4+LFi/j333+RnZ2t8bxEVPow+SEinalatSpOnDiB1q1bIyQkBLVr10aDBg3www8/YOLEiS+8+eCwYcPQrVs39OrVC40aNcK9e/cwYsQIlToTJkxA//79MXDgQPnUWNeuXV8Y04oVK9C/f39MmDAB3t7eePfdd3HkyBG4urpqvF4ff/wxvL290aBBA1SoUAEHDx7UeF4iKn0kwRPbREREpEd45IeIiIj0CpMfIiIi0itMfoiIiEivMPkhIiIivcLkh4iIiPQKkx8iIiLSK0x+iIiISK8w+SEiIiK9wuSHiIiI9AqTHyIiItIrTH6IiIhIrzD5ISIiIr3yf3DeLwW1JanQAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "calibrated_gradient = lava.compute_values_and_visualize(dual_sol, trained_with_flag1, training_size, portion, poisoned=300)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## mean, cov" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "DA = (X1, Y1)\n", + "DB = (X2, Y2)\n", + "XA = augmented_dataset(DA, self.Means[0], self.Covs[0], maxn=maxsamples)\n", + "XB = augmented_dataset(DB, self.Means[1], self.Covs[1], maxn=maxsamples)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/otdd/pytorch/datasets.py b/otdd/pytorch/datasets.py index 60dc053..e5a1921 100644 --- a/otdd/pytorch/datasets.py +++ b/otdd/pytorch/datasets.py @@ -25,7 +25,7 @@ from torchtext.data.utils import get_tokenizer from copy import deepcopy as dpcp -import h5py +#import h5py from .. import DATA_DIR @@ -610,7 +610,7 @@ def load_torchvision_data_shuffle(dataname, valid_size=0.1, splits=None, shuffle stratified=False, random_seed=None, batch_size = 64, resize=None, to3channels=False, maxsize = None, maxsize_test=None, num_workers = 0, transform=None, - data=None, datadir=None, download=True, filt=False, print_stats = False, shuffle_per=0): + data=None, datadir=None, download=True, filt=False, print_stats = False, shuffle_per=0, label_ratio=0.3): """ Load torchvision datasets. We return train and test for plots and post-training experiments @@ -717,7 +717,33 @@ def load_torchvision_data_shuffle(dataname, valid_size=0.1, splits=None, shuffle ######################## ------------------------- IST MNIST MNIST MNIST MNIS -------------------------- ########################## ######################## ------------------------- ST MNIST MNIST MNIST MNIST -------------------------- ########################## - + def filter_labels(dataset, labels): + mask = (dataset.targets == labels[0]) | (dataset.targets == labels[1]) + dataset.data = dataset.data[mask] + dataset.targets = dataset.targets[mask] + return dataset + def adjust_label_ratio(dataset, labels, ratio): + # Separate the data by labels + data_label_0 = dataset.data[dataset.targets == labels[0]] + data_label_1 = dataset.data[dataset.targets == labels[1]] + + # Adjust the amount of label 1 relative to label 0 + num_label_0 = len(data_label_0) + num_label_1 = min(len(data_label_1), int(num_label_0 * ratio)) + + # Combine the adjusted datasets + adjusted_data = torch.cat((data_label_0, data_label_1[:num_label_1])) + adjusted_targets = torch.cat((torch.full((num_label_0,), labels[0], dtype=torch.long), + torch.full((num_label_1,), labels[1], dtype=torch.long))) + + # Ensure the data and targets match the original dataset's data type and shape + dataset.data = adjusted_data + dataset.targets = adjusted_targets + return dataset + return dataset + train = filter_labels(train, [0, 1]) + test = filter_labels(test, [0, 1]) + train = adjust_label_ratio(train, [0, 1], label_ratio) ###### VALIDATION IS 0 SO NOT WORRY NOW ###### ### Data splitting fold_idxs = {} diff --git a/otdd/pytorch/distance_fast.py b/otdd/pytorch/distance_fast.py index 076e84d..47cc410 100644 --- a/otdd/pytorch/distance_fast.py +++ b/otdd/pytorch/distance_fast.py @@ -141,7 +141,7 @@ class DatasetDistance(): """ - def __init__(self, D1=None, D2=None, + def __init__(self, D1=None, D2=None, X1=None, X2=None, Y1=None, Y2=None, ## General Arguments method='precomputed_labeldist', symmetric_tasks=False, @@ -220,8 +220,8 @@ def __init__(self, D1=None, D2=None, self.Means = [None, None] self.Covs = [None, None] self.label_distances = None - self.X1, self.X2 = None, None - self.Y1, self.Y2 = None, None + self.X1, self.X2 = X1, X2 + self.Y1, self.Y2 = Y1, Y2 self._pwlabel_stats_1 = None self._pwlabel_stats_2 = None @@ -240,9 +240,9 @@ def __init__(self, D1=None, D2=None, if self.src_embedding is not None or self.tgt_embedding is not None: self.feature_cost = partial(FeatureCost, src_emb = self.src_embedding, - src_dim = (3,28,28), + src_dim = (1,32,32), tgt_emb = self.tgt_embedding, - tgt_dim = (3,28,28), + tgt_dim = (1,32,32), p = self.p, device=self.device) self.src_embedding = None @@ -1445,32 +1445,48 @@ def _batchify_computation(self, X, side='x', slices=20): return out.to(X.device) def __call__(self, X1, X2): - _orig_device = X1.device - device = process_device_arg(self.device) - #print("Device call: ", device) - #print("Self Device call: ", self.device) - if self.src_emb is not None: - B1, N1, D1 = self._get_batch_shape(X1) - try: - self.src_emb.to(device) - X1 = self.src_emb(X1.view(-1,*self.src_dim).to(self.device)).reshape(B1, N1, -1) - except: # Memory error? - print('Batchifying feature distance computation') - X1 = self._batchify_computation(X1.view(-1,*self.src_dim).to(self.device), 'x').reshape(B1, N1, -1) - if self.tgt_emb is not None: - B2, N2, D2 = self._get_batch_shape(X2) - try: - X2 = self.tgt_emb(X2.view(-1,*self.tgt_dim).to(self.device)).reshape(B2, N2, -1) - except: - print('Batchifying feature distance computation') - X2 = self._batchify_computation(X2.view(-1,*self.tgt_dim).to(self.device), 'y').reshape(B2, N2, -1) - if self.p == 1: - c = geomloss.utils.distances(X1, X2) - elif self.p == 2: - c = geomloss.utils.squared_distances(X1, X2) / 2 - else: - raise ValueError() - return c.to(_orig_device) + _orig_device = X1.device + device = process_device_arg(self.device) + #print("Device call: ", device) + #print("Self Device call: ", self.device) + if self.src_emb is not None: + B1, N1, D1 = self._get_batch_shape(X1) + print(B1, N1, D1) + try: + self.src_emb.to(device) + #X1 = self.src_emb(X1.view(-1,*self.src_dim).to(self.device)).reshape(B1, N1, -1) + X_test_1 = X1.view(-1, *self.src_dim) + X_test_rgb_1 = X_test_1.repeat(1, 3, 1, 1) + X_test_rgb_1 = X_test_rgb_1.to(device) + X_test_emb_1 = self.src_emb(X_test_rgb_1) + X1 = X_test_emb_1.reshape(B1, N1, -1) + print(X1.shape) + except: # Memory error? + print('Batchifying feature distance computation') + X1 = self._batchify_computation(X1.view(-1,*self.src_dim).to(self.device), 'x').reshape(B1, N1, -1) + if self.tgt_emb is not None: + B2, N2, D2 = self._get_batch_shape(X2) + print(B2, N2, D2) + try: + self.tgt_emb.to(device) + X_test_2 = X2.view(-1, *self.tgt_dim) + X_test_rgb_2 = X_test_2.repeat(1, 3, 1, 1) + X_test_rgb_2 = X_test_rgb_2.to(device) + X_test_emb_2 = self.tgt_emb(X_test_rgb_2) + X2 = X_test_emb_2.reshape(B2, N2, -1) + #X2 = self.tgt_emb(X2.view(-1,*self.tgt_dim).to(self.device)).reshape(B2, N2, -1) + print(X2.shape) + except: + print('Batchifying feature distance computation') + X2 = self._batchify_computation(X2.view(-1,*self.tgt_dim).to(self.device), 'y').reshape(B2, N2, -1) + if self.p == 1: + c = geomloss.utils.distances(X1, X2) + elif self.p == 2: + c = geomloss.utils.squared_distances(X1, X2) / 2 + else: + raise ValueError() + print(c.shape) + return c.to(_orig_device) def batch_jdot_cost(Z1, Z2, p=2, alpha=1.0, feature_cost=None): @@ -1524,9 +1540,11 @@ class mean vectors. ValueError: If neither W nor (Means, Covs) are provided. """ -# print("Z1 shape in batch: ", Z1.shape) + print("Z1 shape in batch: ", Z1.shape) + print("Z2 shape in batch: ", Z2.shape) B, N, D1 = Z1.shape B, M, D2 = Z2.shape + assert (D1 == D2) or (feature_cost is not None) Y1 = Z1[:, :, -1].long() @@ -1541,7 +1559,7 @@ class mean vectors. C1 = cost_routines[p](Z1[:, :, :-1], Z2[:, :, :-1]) # Get from GeomLoss else: C1 = feature_cost(Z1[:, :, :-1], Z2[:, :, :-1]) # Feature Embedding - + print(C1.shape) # Label Distances if λ_y is None or λ_y == 0: @@ -1553,7 +1571,10 @@ class mean vectors. ## Label-to-label distances have been precomputed and passed ## Stores flattened index corresponoding to label pairs M = W.shape[1] * Y1[:, :, None] + Y2[:, None, :] + print('Gia tri M:', M) + print(M.shape) C2 = W.flatten()[M.flatten(start_dim=1)].reshape(-1,Y1.shape[1], Y2.shape[1]) + print(C2.shape) elif Means is not None and Covs is not None: ## We need to compate label distances too dmeans = cost_routines[p](Means[0][Y1.squeeze()], Means[1][Y2.squeeze()]) @@ -1567,7 +1588,8 @@ class mean vectors. ## NOTE: geomloss's cost_routines as defined above already divide by p. We do ## so here too for consistency. But as a consequence, need to divide C2 by p too. D = λ_x * C1 + λ_y * (C2/p) - + print('gia tri D:', D) + print(D.shape) global first_dist global last_dist diff --git a/otdd/pytorch/utils.py b/otdd/pytorch/utils.py index c4c153b..a6e7df9 100644 --- a/otdd/pytorch/utils.py +++ b/otdd/pytorch/utils.py @@ -288,7 +288,7 @@ def load_full_dataset(data, targets=False, return_both_targets=False, keeps = np.isin(Y.cpu(), labels_keep) X = X[keeps,:] Y = Y[keeps] - + print('load full dataset:', Y.shape) if orig_idxs is not None: loader.sampler.indices = orig_idxs if targets is False: diff --git a/otdd/pytorch/wasserstein.py b/otdd/pytorch/wasserstein.py index e8effd7..c3ec453 100644 --- a/otdd/pytorch/wasserstein.py +++ b/otdd/pytorch/wasserstein.py @@ -293,7 +293,7 @@ def pwdist_exact(X1, Y1, X2=None, Y2=None, symmetric=False, loss='sinkhorn', c1 = torch.unique(Y1) c2 = torch.unique(Y2) n1, n2 = len(c1), len(c2) - + print(n1, n2) ## We account for the possibility that labels are shifted (c1[0]!=0), see below if symmetric: @@ -302,7 +302,7 @@ def pwdist_exact(X1, Y1, X2=None, Y2=None, symmetric=False, loss='sinkhorn', else: ## If tasks are assymetric, need n1 x n2 comparisons pairs = list(itertools.product(range(n1), range(n2))) - + print(pairs) if cost_function == 'euclidean': if p == 1: @@ -311,8 +311,9 @@ def pwdist_exact(X1, Y1, X2=None, Y2=None, symmetric=False, loss='sinkhorn', cost_function = lambda x, y: geomloss.utils.squared_distances(x, y) else: raise ValueError() - + print('cost function:') if loss == 'sinkhorn': + #print('cost function:',cost_function.shape) distance = geomloss.SamplesLoss( loss=loss, p=p, cost=cost_function, @@ -333,7 +334,10 @@ def distance(Xa, Xb): D = torch.zeros((n1, n2), device = device, dtype=X1.dtype) for i, j in pbar: try: + print(i,j) + print("huhu:", X1[Y1==c1[i]].shape, X2[Y2==c2[j]].shape) D[i, j] = distance(X1[Y1==c1[i]].to(device), X2[Y2==c2[j]].to(device)).item() + print('was:', D[i, j]) except: print("This is awkward. Distance computation failed. Geomloss is hard to debug" \ "But here's a few things that might be happening: "\ diff --git a/preact_resnet.py b/preact_resnet.py index abb1bc3..8c8b48b 100644 --- a/preact_resnet.py +++ b/preact_resnet.py @@ -63,7 +63,7 @@ def forward(self, x): class PreActResNet(nn.Module): - def __init__(self, block, num_blocks, num_classes=10): + def __init__(self, block, num_blocks, num_classes=100): super(PreActResNet, self).__init__() self.in_planes = 64