{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "3.6.7 | packaged by conda-forge | (default, Jul 2 2019, 02:18:42) \n", "[GCC 7.3.0]\n" ] } ], "source": [ "\n", "# Import Python libraries \n", "import json\n", "from collections import OrderedDict\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import os\n", "import sys\n", "from PIL import Image\n", "\n", "# Import PyTorch libraries\n", "import torch\n", "from torch import nn\n", "from torch import optim\n", "import torch.nn.functional as F\n", "from torchvision import datasets, transforms, models\n", "\n", "# Pretty display for Jupyter notebooks\n", "%matplotlib inline\n", "%config InlineBackend.figure_format = 'retina'\n", "\n", "# Print Python version for future reference\n", "print(sys.version)" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "np.random.seed(42)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "# Stats needed to normalize ImageNet images\n", "means = [0.485, 0.456, 0.406]\n", "std_devs = [0.229, 0.224, 0.225]\n", "input_size = 128\n", "\n", "# Other transforms parameters \n", "down_size = 256\n", "rotation = 30\n", "\n", "# Determine batch size for DataLoaders\n", "_batch_size = 4" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "ename": "FileNotFoundError", "evalue": "[Errno 2] No such file or directory: 'data/train/'", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 19\u001b[0m image_datasets = {\n\u001b[1;32m 20\u001b[0m 'train': datasets.ImageFolder('data/train/', \n\u001b[0;32m---> 21\u001b[0;31m transform=data_transforms['train']),\n\u001b[0m\u001b[1;32m 22\u001b[0m 'valid': datasets.ImageFolder('data/val/', \n\u001b[1;32m 23\u001b[0m transform=data_transforms['valid']),\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torchvision/datasets/folder.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, root, transform, target_transform, loader, is_valid_file)\u001b[0m\n\u001b[1;32m 227\u001b[0m \u001b[0mtransform\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtransform\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 228\u001b[0m \u001b[0mtarget_transform\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtarget_transform\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 229\u001b[0;31m is_valid_file=is_valid_file)\n\u001b[0m\u001b[1;32m 230\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mimgs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msamples\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torchvision/datasets/folder.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, root, loader, extensions, transform, target_transform, is_valid_file)\u001b[0m\n\u001b[1;32m 106\u001b[0m super(DatasetFolder, self).__init__(root, transform=transform,\n\u001b[1;32m 107\u001b[0m target_transform=target_transform)\n\u001b[0;32m--> 108\u001b[0;31m \u001b[0mclasses\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mclass_to_idx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_find_classes\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mroot\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 109\u001b[0m \u001b[0msamples\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmake_dataset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mroot\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mclass_to_idx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextensions\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mis_valid_file\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 110\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msamples\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torchvision/datasets/folder.py\u001b[0m in \u001b[0;36m_find_classes\u001b[0;34m(self, dir)\u001b[0m\n\u001b[1;32m 135\u001b[0m \u001b[0mNo\u001b[0m \u001b[0;32mclass\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0ma\u001b[0m \u001b[0msubdirectory\u001b[0m \u001b[0mof\u001b[0m \u001b[0manother\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 136\u001b[0m \"\"\"\n\u001b[0;32m--> 137\u001b[0;31m \u001b[0mclasses\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mname\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0md\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mos\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mscandir\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdir\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_dir\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 138\u001b[0m \u001b[0mclasses\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msort\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 139\u001b[0m \u001b[0mclass_to_idx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mcls_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcls_name\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclasses\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: 'data/train/'" ] } ], "source": [ "# Define transforms for the training, validation, and testing sets\n", "data_transforms = {\n", " 'train': transforms.Compose([transforms.RandomRotation(rotation),\n", " transforms.RandomResizedCrop(input_size),\n", " transforms.RandomHorizontalFlip(),\n", " transforms.ToTensor(),\n", " transforms.Normalize(means,std_devs)]),\n", " 'valid': transforms.Compose([transforms.Resize(down_size),\n", " transforms.CenterCrop(input_size),\n", " transforms.ToTensor(),\n", " transforms.Normalize(means,std_devs)]),\n", " 'test': transforms.Compose([transforms.Resize(down_size),\n", " transforms.CenterCrop(input_size),\n", " transforms.ToTensor(),\n", " transforms.Normalize(means,std_devs)])\n", " }\n", "\n", "# Build datasets and apply transforms (with ImageFolder)\n", "image_datasets = {\n", " 'train': datasets.ImageFolder('data/train/', \n", " transform=data_transforms['train']),\n", " 'valid': datasets.ImageFolder('data/val/', \n", " transform=data_transforms['valid']),\n", " 'test': datasets.ImageFolder('data/test/', \n", " transform=data_transforms['test'])\n", " }\n", "\n", "# Define dataloaders\n", "dataloaders = {\n", " 'train': torch.utils.data.DataLoader(image_datasets['train'], \n", " batch_size=_batch_size, shuffle=True),\n", " 'valid': torch.utils.data.DataLoader(image_datasets['valid'], \n", " batch_size=_batch_size),\n", " 'test': torch.utils.data.DataLoader(image_datasets['test'], \n", " batch_size=_batch_size)\n", " }" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "env: JOBLIB_TEMP_FOLDER=/datalab\n" ] } ], "source": [ "%env JOBLIB_TEMP_FOLDER=/tmp\n", "os.environ[\"JOBLIB_TEMP_FOLDER\"]=\"/imp/\"" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Downloading: \"https://download.pytorch.org/models/resnet50-19c8e357.pth\" to /home/jmanoj01/.cache/torch/hub/checkpoints/resnet50-19c8e357.pth\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "5559a36315254368a34551e8c4944fbd", "version_major": 2, "version_minor": 0 }, "text/plain": [ " 0%| | 0.00/97.8M [00:00\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
Unnamed: 0ref_captionlabel
00test_img_0.jpgtroll
11test_img_1.jpgtroll
22test_img_2.jpgtroll
33test_img_3.jpgtroll
44test_img_4.jpgtroll
\n", "" ], "text/plain": [ " Unnamed: 0 ref_caption label\n", "0 0 test_img_0.jpg troll\n", "1 1 test_img_1.jpg troll\n", "2 2 test_img_2.jpg troll\n", "3 3 test_img_3.jpg troll\n", "4 4 test_img_4.jpg troll" ] }, "execution_count": 29, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df.head()" ] }, { "cell_type": "code", "execution_count": 30, "metadata": {}, "outputs": [], "source": [ "df = df.drop(['Unnamed: 0'], axis=1)" ] }, { "cell_type": "code", "execution_count": 31, "metadata": {}, "outputs": [], "source": [ "df['new_label'] = (df['label']=='troll').astype(int)" ] }, { "cell_type": "code", "execution_count": 32, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
ref_captionlabelnew_label
0test_img_0.jpgtroll1
1test_img_1.jpgtroll1
2test_img_2.jpgtroll1
3test_img_3.jpgtroll1
4test_img_4.jpgtroll1
\n", "
" ], "text/plain": [ " ref_caption label new_label\n", "0 test_img_0.jpg troll 1\n", "1 test_img_1.jpg troll 1\n", "2 test_img_2.jpg troll 1\n", "3 test_img_3.jpg troll 1\n", "4 test_img_4.jpg troll 1" ] }, "execution_count": 32, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df.head()" ] }, { "cell_type": "code", "execution_count": 33, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "0.5922038980509745" ] }, "execution_count": 33, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df['new_label'].sum()/df.shape[0]" ] }, { "cell_type": "code", "execution_count": 44, "metadata": {}, "outputs": [ { "ename": "SyntaxError", "evalue": "invalid syntax (, line 5)", "output_type": "error", "traceback": [ "\u001b[0;36m File \u001b[0;32m\"\"\u001b[0;36m, line \u001b[0;32m5\u001b[0m\n\u001b[0;31m dataloader = ImageDataLoaders.from_df(path, ,item_tfms=Resize(128, 128), valid='val', test='test', bs=_batch_size, batch_tfms= tfms)\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n" ] } ], "source": [ "# data = DataLoaders(dataloaders['train'], dataloaders['valid']).cuda()\n", "path = \"meme-classification/data/test_img\"\n", "xtra_tfms = Normalize.from_stats(*imagenet_stats)\n", "tfms = aug_transforms(pad_mode='zeros', mult=2, min_scale=0.5, xtra_tfms = xtra_tfms)\n", "# dataloader = ImageDataLoaders.from_df(path, ,item_tfms=Resize(128, 128), valid='val', test='test', bs=_batch_size, batch_tfms= tfms)\n", "testdata = ImageDataLoaders.from_df(df=df, folder=path, item_tfms=Resize(128, 128), label_col=2, bs=_batch_size, batch_tfms= tfms, valid_pct=0.0)" ] }, { "cell_type": "code", "execution_count": 41, "metadata": {}, "outputs": [], "source": [ "learn = Learner(testdata, model_conv, loss_func=loss_function, opt_func=Adam, metrics=accuracy)" ] }, { "cell_type": "code", "execution_count": 58, "metadata": {}, "outputs": [ { "data": { "text/html": [ "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
epochtrain_lossvalid_lossaccuracytime
00.9557201.3133850.67318400:41
10.95272500:03
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stderr", "output_type": "stream", "text": [ "/opt/conda/lib/python3.6/site-packages/fastai/callback/schedule.py:68: UserWarning: This overload of nonzero is deprecated:\n", "\tnonzero()\n", "Consider using one of the following signatures instead:\n", "\tnonzero(*, bool as_tuple) (Triggered internally at /pytorch/torch/csrc/utils/python_arg_parser.cpp:882.)\n", " idx = (pos >= pcts).nonzero().max()\n" ] }, { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mlearn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit_one_cycle\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m20\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlearning_rate\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/callback/schedule.py\u001b[0m in \u001b[0;36mfit_one_cycle\u001b[0;34m(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)\u001b[0m\n\u001b[1;32m 110\u001b[0m scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),\n\u001b[1;32m 111\u001b[0m 'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}\n\u001b[0;32m--> 112\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mn_epoch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcbs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mParamScheduler\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mscheds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mL\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcbs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mreset_opt\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mreset_opt\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwd\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mwd\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 113\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 114\u001b[0m \u001b[0;31m# Cell\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, n_epoch, lr, wd, cbs, reset_opt)\u001b[0m\n\u001b[1;32m 203\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mopt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_hypers\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlr\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlr\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlr\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mlr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 204\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mn_epoch\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mn_epoch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 205\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_with_events\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_fit\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'fit'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mCancelFitException\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_end_cleanup\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 206\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 207\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_end_cleanup\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mxb\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0myb\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpred\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mloss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36m_with_events\u001b[0;34m(self, f, event_type, ex, final)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 153\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_with_events\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mevent_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfinal\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnoop\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 154\u001b[0;31m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'before_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m;\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 155\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mex\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'after_cancel_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 156\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'after_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m;\u001b[0m\u001b[0mfinal\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36m_do_fit\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 194\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mepoch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mn_epoch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 195\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mepoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mepoch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 196\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_with_events\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_epoch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'epoch'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mCancelEpochException\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 197\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 198\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mn_epoch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlr\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwd\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcbs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mreset_opt\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36m_with_events\u001b[0;34m(self, f, event_type, ex, final)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 153\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_with_events\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mevent_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfinal\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnoop\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 154\u001b[0;31m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'before_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m;\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 155\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mex\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'after_cancel_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 156\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'after_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m;\u001b[0m\u001b[0mfinal\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36m_do_epoch\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 188\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 189\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_epoch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 190\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_epoch_train\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 191\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_epoch_validate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 192\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36m_do_epoch_train\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 180\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_epoch_train\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 181\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdl\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdls\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 182\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_with_events\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mall_batches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'train'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mCancelTrainException\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 183\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 184\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_epoch_validate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mds_idx\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdl\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36m_with_events\u001b[0;34m(self, f, event_type, ex, final)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 153\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_with_events\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mevent_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfinal\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnoop\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 154\u001b[0;31m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'before_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m;\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 155\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mex\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'after_cancel_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 156\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'after_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m;\u001b[0m\u001b[0mfinal\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36mall_batches\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 158\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mall_batches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 159\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mn_iter\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 160\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mo\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mone_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mo\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 161\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 162\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_one_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36mone_batch\u001b[0;34m(self, i, b)\u001b[0m\n\u001b[1;32m 176\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0miter\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_split\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 178\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_with_events\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_one_batch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'batch'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mCancelBatchException\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 179\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 180\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_epoch_train\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36m_with_events\u001b[0;34m(self, f, event_type, ex, final)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 153\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_with_events\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mevent_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfinal\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnoop\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 154\u001b[0;31m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'before_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m;\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 155\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mex\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'after_cancel_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 156\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'after_{event_type}'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m;\u001b[0m\u001b[0mfinal\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36m_do_one_batch\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 161\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 162\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_one_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 163\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpred\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mxb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 164\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'after_pred'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 165\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0myb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mloss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mloss_func\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpred\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0myb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torchvision/models/resnet.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 218\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 219\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 220\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 221\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 222\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torchvision/models/resnet.py\u001b[0m in \u001b[0;36m_forward_impl\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 207\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 208\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer1\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 209\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 210\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer3\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 211\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer4\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torch/nn/modules/container.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 115\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 116\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mmodule\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 117\u001b[0;31m \u001b[0minput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodule\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 118\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 119\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torchvision/models/resnet.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 106\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrelu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 107\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 108\u001b[0;31m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 109\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbn2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 110\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrelu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torch/nn/modules/conv.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 421\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 422\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 423\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_conv_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 424\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 425\u001b[0m \u001b[0;32mclass\u001b[0m \u001b[0mConv3d\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_ConvNd\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torch/nn/modules/conv.py\u001b[0m in \u001b[0;36m_conv_forward\u001b[0;34m(self, input, weight)\u001b[0m\n\u001b[1;32m 418\u001b[0m _pair(0), self.dilation, self.groups)\n\u001b[1;32m 419\u001b[0m return F.conv2d(input, weight, self.bias, self.stride,\n\u001b[0;32m--> 420\u001b[0;31m self.padding, self.dilation, self.groups)\n\u001b[0m\u001b[1;32m 421\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 422\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/torch_core.py\u001b[0m in \u001b[0;36m__torch_function__\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 315\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 316\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__torch_function__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtypes\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 317\u001b[0;31m \u001b[0;32mwith\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_C\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mDisableTorchFunction\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mret\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_convert\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkwargs\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__class__\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 318\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mret\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTensorBase\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mret\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_meta\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mas_copy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 319\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mret\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "learn.fit_one_cycle(20, learning_rate)" ] }, { "cell_type": "code", "execution_count": 18, "metadata": { "scrolled": true }, "outputs": [], "source": [ "# learn.show_results()" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [], "source": [ "# learn.show_training_loop()" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [], "source": [ "# learn.recorder" ] }, { "cell_type": "code", "execution_count": 43, "metadata": {}, "outputs": [ { "ename": "RuntimeError", "evalue": "Error(s) in loading state_dict for ResNet:\n\tMissing key(s) in state_dict: \"conv1.weight\", \"bn1.weight\", \"bn1.bias\", \"bn1.running_mean\", \"bn1.running_var\", \"layer1.0.conv1.weight\", \"layer1.0.bn1.weight\", \"layer1.0.bn1.bias\", \"layer1.0.bn1.running_mean\", \"layer1.0.bn1.running_var\", \"layer1.0.conv2.weight\", \"layer1.0.bn2.weight\", \"layer1.0.bn2.bias\", \"layer1.0.bn2.running_mean\", \"layer1.0.bn2.running_var\", \"layer1.0.conv3.weight\", \"layer1.0.bn3.weight\", \"layer1.0.bn3.bias\", \"layer1.0.bn3.running_mean\", \"layer1.0.bn3.running_var\", \"layer1.0.downsample.0.weight\", \"layer1.0.downsample.1.weight\", \"layer1.0.downsample.1.bias\", \"layer1.0.downsample.1.running_mean\", \"layer1.0.downsample.1.running_var\", \"layer1.1.conv1.weight\", \"layer1.1.bn1.weight\", \"layer1.1.bn1.bias\", \"layer1.1.bn1.running_mean\", \"layer1.1.bn1.running_var\", \"layer1.1.conv2.weight\", \"layer1.1.bn2.weight\", \"layer1.1.bn2.bias\", \"layer1.1.bn2.running_mean\", \"layer1.1.bn2.running_var\", \"layer1.1.conv3.weight\", \"layer1.1.bn3.weight\", \"layer1.1.bn3.bias\", \"layer1.1.bn3.running_mean\", \"layer1.1.bn3.running_var\", \"layer1.2.conv1.weight\", \"layer1.2.bn1.weight\", \"layer1.2.bn1.bias\", \"layer1.2.bn1.running_mean\", \"layer1.2.bn1.running_var\", \"layer1.2.conv2.weight\", \"layer1.2.bn2.weight\", \"layer1.2.bn2.bias\", \"layer1.2.bn2.running_mean\", \"layer1.2.bn2.running_var\", \"layer1.2.conv3.weight\", \"layer1.2.bn3.weight\", \"layer1.2.bn3.bias\", \"layer1.2.bn3.running_mean\", \"layer1.2.bn3.running_var\", \"layer2.0.conv1.weight\", \"layer2.0.bn1.weight\", \"layer2.0.bn1.bias\", \"layer2.0.bn1.running_mean\", \"layer2.0.bn1.running_var\", \"layer2.0.conv2.weight\", \"layer2.0.bn2.weight\", \"layer2.0.bn2.bias\", \"layer2.0.bn2.running_mean\", \"layer2.0.bn2.running_var\", \"layer2.0.conv3.weight\", \"layer2.0.bn3.weight\", \"layer2.0.bn3.bias\", \"layer2.0.bn3.running_mean\", \"layer2.0.bn3.running_var\", \"layer2.0.downsample.0.weight\", \"layer2.0.downsample.1.weight\", \"layer2.0.downsample.1.bias\", \"layer2.0.downsample.1.running_mean\", \"layer2.0.downsample.1.running_var\", \"layer2.1.conv1.weight\", \"layer2.1.bn1.weight\", \"layer2.1.bn1.bias\", \"layer2.1.bn1.running_mean\", \"layer2.1.bn1.running_var\", \"layer2.1.conv2.weight\", \"layer2.1.bn2.weight\", \"layer2.1.bn2.bias\", \"layer2.1.bn2.running_mean\", \"layer2.1.bn2.running_var\", \"layer2.1.conv3.weight\", \"layer2.1.bn3.weight\", \"layer2.1.bn3.bias\", \"layer2.1.bn3.running_mean\", \"layer2.1.bn3.running_var\", \"layer2.2.conv1.weight\", \"layer2.2.bn1.weight\", \"layer2.2.bn1.bias\", \"layer2.2.bn1.running_mean\", \"layer2.2.bn1.running_var\", \"layer2.2.conv2.weight\", \"layer2.2.bn2.weight\", \"layer2.2.bn2.bias\", \"layer2.2.bn2.running_mean\", \"layer2.2.bn2.running_var\", \"layer2.2.conv3.weight\", \"layer2.2.bn3.weight\", \"layer2.2.bn3.bias\", \"layer2.2.bn3.running_mean\", \"layer2.2.bn3.running_var\", \"layer2.3.conv1.weight\", \"layer2.3.bn1.weight\", \"layer2.3.bn1.bias\", \"layer2.3.bn1.running_mean\", \"layer2.3.bn1.running_var\", \"layer2.3.conv2.weight\", \"layer2.3.bn2.weight\", \"layer2.3.bn2.bias\", \"layer2.3.bn2.running_mean\", \"layer2.3.bn2.running_var\", \"layer2.3.conv3.weight\", \"layer2.3.bn3.weight\", \"layer2.3.bn3.bias\", \"layer2.3.bn3.running_mean\", \"layer2.3.bn3.running_var\", \"layer3.0.conv1.weight\", \"layer3.0.bn1.weight\", \"layer3.0.bn1.bias\", \"layer3.0.bn1.running_mean\", \"layer3.0.bn1.running_var\", \"layer3.0.conv2.weight\", \"layer3.0.bn2.weight\", \"layer3.0.bn2.bias\", \"layer3.0.bn2.running_mean\", \"layer3.0.bn2.running_var\", \"layer3.0.conv3.weight\", \"layer3.0.bn3.weight\", \"layer3.0.bn3.bias\", \"layer3.0.bn3.running_mean\", \"layer3.0.bn3.running_var\", \"layer3.0.downsample.0.weight\", \"layer3.0.downsample.1.weight\", \"layer3.0.downsample.1.bias\", \"layer3.0.downsample.1.running_mean\", \"layer3.0.downsample.1.running_var\", \"layer3.1.conv1.weight\", \"layer3.1.bn1.weight\", \"layer3.1.bn1.bias\", \"layer3.1.bn1.running_mean\", \"layer3.1.bn1.running_var\", \"layer3.1.conv2.weight\", \"layer3.1.bn2.weight\", \"layer3.1.bn2.bias\", \"layer3.1.bn2.running_mean\", \"layer3.1.bn2.running_var\", \"layer3.1.conv3.weight\", \"layer3.1.bn3.weight\", \"layer3.1.bn3.bias\", \"layer3.1.bn3.running_mean\", \"layer3.1.bn3.running_var\", \"layer3.2.conv1.weight\", \"layer3.2.bn1.weight\", \"layer3.2.bn1.bias\", \"layer3.2.bn1.running_mean\", \"layer3.2.bn1.running_var\", \"layer3.2.conv2.weight\", \"layer3.2.bn2.weight\", \"layer3.2.bn2.bias\", \"layer3.2.bn2.running_mean\", \"layer3.2.bn2.running_var\", \"layer3.2.conv3.weight\", \"layer3.2.bn3.weight\", \"layer3.2.bn3.bias\", \"layer3.2.bn3.running_mean\", \"layer3.2.bn3.running_var\", \"layer3.3.conv1.weight\", \"layer3.3.bn1.weight\", \"layer3.3.bn1.bias\", \"layer3.3.bn1.running_mean\", \"layer3.3.bn1.running_var\", \"layer3.3.conv2.weight\", \"layer3.3.bn2.weight\", \"layer3.3.bn2.bias\", \"layer3.3.bn2.running_mean\", \"layer3.3.bn2.running_var\", \"layer3.3.conv3.weight\", \"layer3.3.bn3.weight\", \"layer3.3.bn3.bias\", \"layer3.3.bn3.running_mean\", \"layer3.3.bn3.running_var\", \"layer3.4.conv1.weight\", \"layer3.4.bn1.weight\", \"layer3.4.bn1.bias\", \"layer3.4.bn1.running_mean\", \"layer3.4.bn1.running_var\", \"layer3.4.conv2.weight\", \"layer3.4.bn2.weight\", \"layer3.4.bn2.bias\", \"layer3.4.bn2.running_mean\", \"layer3.4.bn2.running_var\", \"layer3.4.conv3.weight\", \"layer3.4.bn3.weight\", \"layer3.4.bn3.bias\", \"layer3.4.bn3.running_mean\", \"layer3.4.bn3.running_var\", \"layer3.5.conv1.weight\", \"layer3.5.bn1.weight\", \"layer3.5.bn1.bias\", \"layer3.5.bn1.running_mean\", \"layer3.5.bn1.running_var\", \"layer3.5.conv2.weight\", \"layer3.5.bn2.weight\", \"layer3.5.bn2.bias\", \"layer3.5.bn2.running_mean\", \"layer3.5.bn2.running_var\", \"layer3.5.conv3.weight\", \"layer3.5.bn3.weight\", \"layer3.5.bn3.bias\", \"layer3.5.bn3.running_mean\", \"layer3.5.bn3.running_var\", \"layer4.0.conv1.weight\", \"layer4.0.bn1.weight\", \"layer4.0.bn1.bias\", \"layer4.0.bn1.running_mean\", \"layer4.0.bn1.running_var\", \"layer4.0.conv2.weight\", \"layer4.0.bn2.weight\", \"layer4.0.bn2.bias\", \"layer4.0.bn2.running_mean\", \"layer4.0.bn2.running_var\", \"layer4.0.conv3.weight\", \"layer4.0.bn3.weight\", \"layer4.0.bn3.bias\", \"layer4.0.bn3.running_mean\", \"layer4.0.bn3.running_var\", \"layer4.0.downsample.0.weight\", \"layer4.0.downsample.1.weight\", \"layer4.0.downsample.1.bias\", \"layer4.0.downsample.1.running_mean\", \"layer4.0.downsample.1.running_var\", \"layer4.1.conv1.weight\", \"layer4.1.bn1.weight\", \"layer4.1.bn1.bias\", \"layer4.1.bn1.running_mean\", \"layer4.1.bn1.running_var\", \"layer4.1.conv2.weight\", \"layer4.1.bn2.weight\", \"layer4.1.bn2.bias\", \"layer4.1.bn2.running_mean\", \"layer4.1.bn2.running_var\", \"layer4.1.conv3.weight\", \"layer4.1.bn3.weight\", \"layer4.1.bn3.bias\", \"layer4.1.bn3.running_mean\", \"layer4.1.bn3.running_var\", \"layer4.2.conv1.weight\", \"layer4.2.bn1.weight\", \"layer4.2.bn1.bias\", \"layer4.2.bn1.running_mean\", \"layer4.2.bn1.running_var\", \"layer4.2.conv2.weight\", \"layer4.2.bn2.weight\", \"layer4.2.bn2.bias\", \"layer4.2.bn2.running_mean\", \"layer4.2.bn2.running_var\", \"layer4.2.conv3.weight\", \"layer4.2.bn3.weight\", \"layer4.2.bn3.bias\", \"layer4.2.bn3.running_mean\", \"layer4.2.bn3.running_var\", \"fc.fc1.weight\", \"fc.fc1.bias\", \"fc.fc2.weight\", \"fc.fc2.bias\". \n\tUnexpected key(s) in state_dict: \"0.0.weight\", \"0.1.weight\", \"0.1.bias\", \"0.1.running_mean\", \"0.1.running_var\", \"0.1.num_batches_tracked\", \"0.4.0.conv1.weight\", \"0.4.0.bn1.weight\", \"0.4.0.bn1.bias\", \"0.4.0.bn1.running_mean\", \"0.4.0.bn1.running_var\", \"0.4.0.bn1.num_batches_tracked\", \"0.4.0.conv2.weight\", \"0.4.0.bn2.weight\", \"0.4.0.bn2.bias\", \"0.4.0.bn2.running_mean\", \"0.4.0.bn2.running_var\", \"0.4.0.bn2.num_batches_tracked\", \"0.4.0.conv3.weight\", \"0.4.0.bn3.weight\", \"0.4.0.bn3.bias\", \"0.4.0.bn3.running_mean\", \"0.4.0.bn3.running_var\", \"0.4.0.bn3.num_batches_tracked\", \"0.4.0.downsample.0.weight\", \"0.4.0.downsample.1.weight\", \"0.4.0.downsample.1.bias\", \"0.4.0.downsample.1.running_mean\", \"0.4.0.downsample.1.running_var\", \"0.4.0.downsample.1.num_batches_tracked\", \"0.4.1.conv1.weight\", \"0.4.1.bn1.weight\", \"0.4.1.bn1.bias\", \"0.4.1.bn1.running_mean\", \"0.4.1.bn1.running_var\", \"0.4.1.bn1.num_batches_tracked\", \"0.4.1.conv2.weight\", \"0.4.1.bn2.weight\", \"0.4.1.bn2.bias\", \"0.4.1.bn2.running_mean\", \"0.4.1.bn2.running_var\", \"0.4.1.bn2.num_batches_tracked\", \"0.4.1.conv3.weight\", \"0.4.1.bn3.weight\", \"0.4.1.bn3.bias\", \"0.4.1.bn3.running_mean\", \"0.4.1.bn3.running_var\", \"0.4.1.bn3.num_batches_tracked\", \"0.4.2.conv1.weight\", \"0.4.2.bn1.weight\", \"0.4.2.bn1.bias\", \"0.4.2.bn1.running_mean\", \"0.4.2.bn1.running_var\", \"0.4.2.bn1.num_batches_tracked\", \"0.4.2.conv2.weight\", \"0.4.2.bn2.weight\", \"0.4.2.bn2.bias\", \"0.4.2.bn2.running_mean\", \"0.4.2.bn2.running_var\", \"0.4.2.bn2.num_batches_tracked\", \"0.4.2.conv3.weight\", \"0.4.2.bn3.weight\", \"0.4.2.bn3.bias\", \"0.4.2.bn3.running_mean\", \"0.4.2.bn3.running_var\", \"0.4.2.bn3.num_batches_tracked\", \"0.5.0.conv1.weight\", \"0.5.0.bn1.weight\", \"0.5.0.bn1.bias\", \"0.5.0.bn1.running_mean\", \"0.5.0.bn1.running_var\", \"0.5.0.bn1.num_batches_tracked\", \"0.5.0.conv2.weight\", \"0.5.0.bn2.weight\", \"0.5.0.bn2.bias\", \"0.5.0.bn2.running_mean\", \"0.5.0.bn2.running_var\", \"0.5.0.bn2.num_batches_tracked\", \"0.5.0.conv3.weight\", \"0.5.0.bn3.weight\", \"0.5.0.bn3.bias\", \"0.5.0.bn3.running_mean\", \"0.5.0.bn3.running_var\", \"0.5.0.bn3.num_batches_tracked\", \"0.5.0.downsample.0.weight\", \"0.5.0.downsample.1.weight\", \"0.5.0.downsample.1.bias\", \"0.5.0.downsample.1.running_mean\", \"0.5.0.downsample.1.running_var\", \"0.5.0.downsample.1.num_batches_tracked\", \"0.5.1.conv1.weight\", \"0.5.1.bn1.weight\", \"0.5.1.bn1.bias\", \"0.5.1.bn1.running_mean\", \"0.5.1.bn1.running_var\", \"0.5.1.bn1.num_batches_tracked\", \"0.5.1.conv2.weight\", \"0.5.1.bn2.weight\", \"0.5.1.bn2.bias\", \"0.5.1.bn2.running_mean\", \"0.5.1.bn2.running_var\", \"0.5.1.bn2.num_batches_tracked\", \"0.5.1.conv3.weight\", \"0.5.1.bn3.weight\", \"0.5.1.bn3.bias\", \"0.5.1.bn3.running_mean\", \"0.5.1.bn3.running_var\", \"0.5.1.bn3.num_batches_tracked\", \"0.5.2.conv1.weight\", \"0.5.2.bn1.weight\", \"0.5.2.bn1.bias\", \"0.5.2.bn1.running_mean\", \"0.5.2.bn1.running_var\", \"0.5.2.bn1.num_batches_tracked\", \"0.5.2.conv2.weight\", \"0.5.2.bn2.weight\", \"0.5.2.bn2.bias\", \"0.5.2.bn2.running_mean\", \"0.5.2.bn2.running_var\", \"0.5.2.bn2.num_batches_tracked\", \"0.5.2.conv3.weight\", \"0.5.2.bn3.weight\", \"0.5.2.bn3.bias\", \"0.5.2.bn3.running_mean\", \"0.5.2.bn3.running_var\", \"0.5.2.bn3.num_batches_tracked\", \"0.5.3.conv1.weight\", \"0.5.3.bn1.weight\", \"0.5.3.bn1.bias\", \"0.5.3.bn1.running_mean\", \"0.5.3.bn1.running_var\", \"0.5.3.bn1.num_batches_tracked\", \"0.5.3.conv2.weight\", \"0.5.3.bn2.weight\", \"0.5.3.bn2.bias\", \"0.5.3.bn2.running_mean\", \"0.5.3.bn2.running_var\", \"0.5.3.bn2.num_batches_tracked\", \"0.5.3.conv3.weight\", \"0.5.3.bn3.weight\", \"0.5.3.bn3.bias\", \"0.5.3.bn3.running_mean\", \"0.5.3.bn3.running_var\", \"0.5.3.bn3.num_batches_tracked\", \"0.6.0.conv1.weight\", \"0.6.0.bn1.weight\", \"0.6.0.bn1.bias\", \"0.6.0.bn1.running_mean\", \"0.6.0.bn1.running_var\", \"0.6.0.bn1.num_batches_tracked\", \"0.6.0.conv2.weight\", \"0.6.0.bn2.weight\", \"0.6.0.bn2.bias\", \"0.6.0.bn2.running_mean\", \"0.6.0.bn2.running_var\", \"0.6.0.bn2.num_batches_tracked\", \"0.6.0.conv3.weight\", \"0.6.0.bn3.weight\", \"0.6.0.bn3.bias\", \"0.6.0.bn3.running_mean\", \"0.6.0.bn3.running_var\", \"0.6.0.bn3.num_batches_tracked\", \"0.6.0.downsample.0.weight\", \"0.6.0.downsample.1.weight\", \"0.6.0.downsample.1.bias\", \"0.6.0.downsample.1.running_mean\", \"0.6.0.downsample.1.running_var\", \"0.6.0.downsample.1.num_batches_tracked\", \"0.6.1.conv1.weight\", \"0.6.1.bn1.weight\", \"0.6.1.bn1.bias\", \"0.6.1.bn1.running_mean\", \"0.6.1.bn1.running_var\", \"0.6.1.bn1.num_batches_tracked\", \"0.6.1.conv2.weight\", \"0.6.1.bn2.weight\", \"0.6.1.bn2.bias\", \"0.6.1.bn2.running_mean\", \"0.6.1.bn2.running_var\", \"0.6.1.bn2.num_batches_tracked\", \"0.6.1.conv3.weight\", \"0.6.1.bn3.weight\", \"0.6.1.bn3.bias\", \"0.6.1.bn3.running_mean\", \"0.6.1.bn3.running_var\", \"0.6.1.bn3.num_batches_tracked\", \"0.6.2.conv1.weight\", \"0.6.2.bn1.weight\", \"0.6.2.bn1.bias\", \"0.6.2.bn1.running_mean\", \"0.6.2.bn1.running_var\", \"0.6.2.bn1.num_batches_tracked\", \"0.6.2.conv2.weight\", \"0.6.2.bn2.weight\", \"0.6.2.bn2.bias\", \"0.6.2.bn2.running_mean\", \"0.6.2.bn2.running_var\", \"0.6.2.bn2.num_batches_tracked\", \"0.6.2.conv3.weight\", \"0.6.2.bn3.weight\", \"0.6.2.bn3.bias\", \"0.6.2.bn3.running_mean\", \"0.6.2.bn3.running_var\", \"0.6.2.bn3.num_batches_tracked\", \"0.6.3.conv1.weight\", \"0.6.3.bn1.weight\", \"0.6.3.bn1.bias\", \"0.6.3.bn1.running_mean\", \"0.6.3.bn1.running_var\", \"0.6.3.bn1.num_batches_tracked\", \"0.6.3.conv2.weight\", \"0.6.3.bn2.weight\", \"0.6.3.bn2.bias\", \"0.6.3.bn2.running_mean\", \"0.6.3.bn2.running_var\", \"0.6.3.bn2.num_batches_tracked\", \"0.6.3.conv3.weight\", \"0.6.3.bn3.weight\", \"0.6.3.bn3.bias\", \"0.6.3.bn3.running_mean\", \"0.6.3.bn3.running_var\", \"0.6.3.bn3.num_batches_tracked\", \"0.6.4.conv1.weight\", \"0.6.4.bn1.weight\", \"0.6.4.bn1.bias\", \"0.6.4.bn1.running_mean\", \"0.6.4.bn1.running_var\", \"0.6.4.bn1.num_batches_tracked\", \"0.6.4.conv2.weight\", \"0.6.4.bn2.weight\", \"0.6.4.bn2.bias\", \"0.6.4.bn2.running_mean\", \"0.6.4.bn2.running_var\", \"0.6.4.bn2.num_batches_tracked\", \"0.6.4.conv3.weight\", \"0.6.4.bn3.weight\", \"0.6.4.bn3.bias\", \"0.6.4.bn3.running_mean\", \"0.6.4.bn3.running_var\", \"0.6.4.bn3.num_batches_tracked\", \"0.6.5.conv1.weight\", \"0.6.5.bn1.weight\", \"0.6.5.bn1.bias\", \"0.6.5.bn1.running_mean\", \"0.6.5.bn1.running_var\", \"0.6.5.bn1.num_batches_tracked\", \"0.6.5.conv2.weight\", \"0.6.5.bn2.weight\", \"0.6.5.bn2.bias\", \"0.6.5.bn2.running_mean\", \"0.6.5.bn2.running_var\", \"0.6.5.bn2.num_batches_tracked\", \"0.6.5.conv3.weight\", \"0.6.5.bn3.weight\", \"0.6.5.bn3.bias\", \"0.6.5.bn3.running_mean\", \"0.6.5.bn3.running_var\", \"0.6.5.bn3.num_batches_tracked\", \"0.7.0.conv1.weight\", \"0.7.0.bn1.weight\", \"0.7.0.bn1.bias\", \"0.7.0.bn1.running_mean\", \"0.7.0.bn1.running_var\", \"0.7.0.bn1.num_batches_tracked\", \"0.7.0.conv2.weight\", \"0.7.0.bn2.weight\", \"0.7.0.bn2.bias\", \"0.7.0.bn2.running_mean\", \"0.7.0.bn2.running_var\", \"0.7.0.bn2.num_batches_tracked\", \"0.7.0.conv3.weight\", \"0.7.0.bn3.weight\", \"0.7.0.bn3.bias\", \"0.7.0.bn3.running_mean\", \"0.7.0.bn3.running_var\", \"0.7.0.bn3.num_batches_tracked\", \"0.7.0.downsample.0.weight\", \"0.7.0.downsample.1.weight\", \"0.7.0.downsample.1.bias\", \"0.7.0.downsample.1.running_mean\", \"0.7.0.downsample.1.running_var\", \"0.7.0.downsample.1.num_batches_tracked\", \"0.7.1.conv1.weight\", \"0.7.1.bn1.weight\", \"0.7.1.bn1.bias\", \"0.7.1.bn1.running_mean\", \"0.7.1.bn1.running_var\", \"0.7.1.bn1.num_batches_tracked\", \"0.7.1.conv2.weight\", \"0.7.1.bn2.weight\", \"0.7.1.bn2.bias\", \"0.7.1.bn2.running_mean\", \"0.7.1.bn2.running_var\", \"0.7.1.bn2.num_batches_tracked\", \"0.7.1.conv3.weight\", \"0.7.1.bn3.weight\", \"0.7.1.bn3.bias\", \"0.7.1.bn3.running_mean\", \"0.7.1.bn3.running_var\", \"0.7.1.bn3.num_batches_tracked\", \"0.7.2.conv1.weight\", \"0.7.2.bn1.weight\", \"0.7.2.bn1.bias\", \"0.7.2.bn1.running_mean\", \"0.7.2.bn1.running_var\", \"0.7.2.bn1.num_batches_tracked\", \"0.7.2.conv2.weight\", \"0.7.2.bn2.weight\", \"0.7.2.bn2.bias\", \"0.7.2.bn2.running_mean\", \"0.7.2.bn2.running_var\", \"0.7.2.bn2.num_batches_tracked\", \"0.7.2.conv3.weight\", \"0.7.2.bn3.weight\", \"0.7.2.bn3.bias\", \"0.7.2.bn3.running_mean\", \"0.7.2.bn3.running_var\", \"0.7.2.bn3.num_batches_tracked\", \"1.2.weight\", \"1.2.bias\", \"1.2.running_mean\", \"1.2.running_var\", \"1.2.num_batches_tracked\", \"1.4.weight\", \"1.6.weight\", \"1.6.bias\", \"1.6.running_mean\", \"1.6.running_var\", \"1.6.num_batches_tracked\", \"1.8.weight\". ", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# learn.save('dragonfly-netV2-1')\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mlearn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'memeclassifier-1'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36mload\u001b[0;34m(self, file, device, **kwargs)\u001b[0m\n\u001b[1;32m 347\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mopt\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcreate_opt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 348\u001b[0m \u001b[0mfile\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mjoin_path_file\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfile\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel_dir\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mext\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'.pth'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 349\u001b[0;31m \u001b[0mload_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfile\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mopt\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdevice\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 350\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 351\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/fastai/learner.py\u001b[0m in \u001b[0;36mload_model\u001b[0;34m(file, model, opt, with_opt, device, strict)\u001b[0m\n\u001b[1;32m 49\u001b[0m \u001b[0mhasopt\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstate\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m==\u001b[0m\u001b[0;34m{\u001b[0m\u001b[0;34m'model'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'opt'\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 50\u001b[0m \u001b[0mmodel_state\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mstate\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'model'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhasopt\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mstate\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 51\u001b[0;31m \u001b[0mget_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_state_dict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_state\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstrict\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mstrict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 52\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhasopt\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mwith_opt\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mopt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_state_dict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstate\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'opt'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36mload_state_dict\u001b[0;34m(self, state_dict, strict)\u001b[0m\n\u001b[1;32m 1050\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0merror_msgs\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m>\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1051\u001b[0m raise RuntimeError('Error(s) in loading state_dict for {}:\\n\\t{}'.format(\n\u001b[0;32m-> 1052\u001b[0;31m self.__class__.__name__, \"\\n\\t\".join(error_msgs)))\n\u001b[0m\u001b[1;32m 1053\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0m_IncompatibleKeys\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmissing_keys\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0munexpected_keys\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1054\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mRuntimeError\u001b[0m: Error(s) in loading state_dict for ResNet:\n\tMissing key(s) in state_dict: \"conv1.weight\", \"bn1.weight\", \"bn1.bias\", \"bn1.running_mean\", \"bn1.running_var\", \"layer1.0.conv1.weight\", \"layer1.0.bn1.weight\", \"layer1.0.bn1.bias\", \"layer1.0.bn1.running_mean\", \"layer1.0.bn1.running_var\", \"layer1.0.conv2.weight\", \"layer1.0.bn2.weight\", \"layer1.0.bn2.bias\", \"layer1.0.bn2.running_mean\", \"layer1.0.bn2.running_var\", \"layer1.0.conv3.weight\", \"layer1.0.bn3.weight\", \"layer1.0.bn3.bias\", \"layer1.0.bn3.running_mean\", \"layer1.0.bn3.running_var\", \"layer1.0.downsample.0.weight\", \"layer1.0.downsample.1.weight\", \"layer1.0.downsample.1.bias\", \"layer1.0.downsample.1.running_mean\", \"layer1.0.downsample.1.running_var\", \"layer1.1.conv1.weight\", \"layer1.1.bn1.weight\", \"layer1.1.bn1.bias\", \"layer1.1.bn1.running_mean\", \"layer1.1.bn1.running_var\", \"layer1.1.conv2.weight\", \"layer1.1.bn2.weight\", \"layer1.1.bn2.bias\", \"layer1.1.bn2.running_mean\", \"layer1.1.bn2.running_var\", \"layer1.1.conv3.weight\", \"layer1.1.bn3.weight\", \"layer1.1.bn3.bias\", \"layer1.1.bn3.running_mean\", \"layer1.1.bn3.running_var\", \"layer1.2.conv1.weight\", \"layer1.2.bn1.weight\", \"layer1.2.bn1.bias\", \"layer1.2.bn1.running_mean\", \"layer1.2.bn1.running_var\", \"layer1.2.conv2.weight\", \"layer1.2.bn2.weight\", \"layer1.2.bn2.bias\", \"layer1.2.bn2.running_mean\", \"layer1.2.bn2.running_var\", \"layer1.2.conv3.weight\", \"layer1.2.bn3.weight\", \"layer1.2.bn3.bias\", \"layer1.2.bn3.running_mean\", \"layer1.2.bn3.running_var\", \"layer2.0.conv1.weight\", \"layer2.0.bn1.weight\", \"layer2.0.bn1.bias\", \"layer2.0.bn1.running_mean\", \"layer2.0.bn1.running_var\", \"layer2.0.conv2.weight\", \"layer2.0.bn2.weight\", \"layer2.0.bn2.bias\", \"layer2.0.bn2.running_mean\", \"layer2.0.bn2.running_var\", \"layer2.0.conv3.weight\", \"layer2.0.bn3.weight\", \"layer2.0.bn3.bias\", \"layer2.0.bn3.running_mean\", \"layer2.0.bn3.running_var\", \"layer2.0.downsample.0.weight\", \"layer2.0.downsample.1.weight\", \"layer2.0.downsample.1.bias\", \"layer2.0.downsample.1.running_mean\", \"layer2.0.downsample.1.running_var\", \"layer2.1.conv1.weight\", \"layer2.1.bn1.weight\", \"layer2.1.bn1.bias\", \"layer2.1.bn1.running_mean\", \"layer2.1.bn1.running_var\", \"layer2.1.conv2.weight\", \"layer2.1.bn2.weight\", \"layer2.1.bn2.bias\", \"layer2.1.bn2.running_mean\", \"layer2.1.bn2.running_var\", \"layer2.1.conv3.weight\", \"layer2.1.bn3.weight\", \"layer2.1.bn3.bias\", \"layer2.1.bn3.running_mean\", \"layer2.1.bn3.running_var\", \"layer2.2.conv1.weight\", \"layer2.2.bn1.weight\", \"layer2.2.bn1.bias\", \"layer2.2.bn1.running_mean\", \"layer2.2.bn1.running_var\", \"layer2.2.conv2.weight\", \"layer2.2.bn2.weight\", \"layer2.2.bn2.bias\", \"layer2.2.bn2.running_mean\", \"layer2.2.bn2.running_var\", \"layer2.2.conv3.weight\", \"layer2.2.bn3.weight\", \"layer2.2.bn3.bias\", \"layer2.2.bn3.running_mean\", \"layer2.2.bn3.running_var\", \"layer2.3.conv1.weight\", \"layer2.3.bn1.weight\", \"layer2.3.bn1.bias\", \"layer2.3.bn1.running_mean\", \"layer2.3.bn1.running_var\", \"layer2.3.conv2.weight\", \"layer2.3.bn2.weight\", \"layer2.3.bn2.bias\", \"layer2.3.bn2.running_mean\", \"layer2.3.bn2.running_var\", \"layer2.3.conv3.weight\", \"layer2.3.bn3.weight\", \"layer2.3.bn3.bias\", \"layer2.3.bn3.running_mean\", \"layer2.3.bn3.running_var\", \"layer3.0.conv1.weight\", \"layer3.0.bn1.weight\", \"layer3.0.bn1.bias\", \"layer3.0.bn1.running_mean\", \"layer3.0.bn1.running_var\", \"layer3.0.conv2.weight\", \"layer3.0.bn2.weight\", \"layer3.0.bn2.bias\", \"layer3.0.bn2.running_mean\", \"layer3.0.bn2.running_var\", \"layer3.0.conv3.weight\", \"layer3.0.bn3.weight\", \"layer3.0.bn3.bias\", \"layer3.0.bn3.running_mean\", \"layer3.0.bn3.running_var\", \"layer3.0.downsample.0.weight\", \"layer3.0.downsample.1.weight\", \"layer3.0.downsample.1.bias\", \"layer3.0.downsample.1.running_mean\", \"layer3.0.downsample.1.running_var\", \"layer3.1.conv1.weight\", \"layer3.1.bn1.weight\", \"layer3.1.bn1.bias\", \"layer3.1.bn1.running_mean\", \"layer3.1.bn1.running_var\", \"layer3.1.conv2.weight\", \"layer3.1.bn2.weight\", \"layer3.1.bn2.bias\", \"layer3.1.bn2.running_mean\", \"layer3.1.bn2.running_var\", \"layer3.1.conv3.weight\", \"layer3.1.bn3.weight\", \"layer3.1.bn3.bias\", \"layer3.1.bn3.running_mean\", \"layer3.1.bn3.running_var\", \"layer3.2.conv1.weight\", \"layer3.2.bn1.weight\", \"layer3.2.bn1.bias\", \"layer3.2.bn1.running_mean\", \"layer3.2.bn1.running_var\", \"layer3.2.conv2.weight\", \"layer3.2.bn2.weight\", \"layer3.2.bn2.bias\", \"layer3.2.bn2.running_mean\", \"layer3.2.bn2.running_var\", \"layer3.2.conv3.weight\", \"layer3.2.bn3.weight\", \"layer3.2.bn3.bias\", \"layer3.2.bn3.running_mean\", \"layer3.2.bn3.running_var\", \"layer3.3.conv1.weight\", \"layer3.3.bn1.weight\", \"layer3.3.bn1.bias\", \"layer3.3.bn1.running_mean\", \"layer3.3.bn1.running_var\", \"layer3.3.conv2.weight\", \"layer3.3.bn2.weight\", \"layer3.3.bn2.bias\", \"layer3.3.bn2.running_mean\", \"layer3.3.bn2.running_var\", \"layer3.3.conv3.weight\", \"layer3.3.bn3.weight\", \"layer3.3.bn3.bias\", \"layer3.3.bn3.running_mean\", \"layer3.3.bn3.running_var\", \"layer3.4.conv1.weight\", \"layer3.4.bn1.weight\", \"layer3.4.bn1.bias\", \"layer3.4.bn1.running_mean\", \"layer3.4.bn1.running_var\", \"layer3.4.conv2.weight\", \"layer3.4.bn2.weight\", \"layer3.4.bn2.bias\", \"layer3.4.bn2.running_mean\", \"layer3.4.bn2.running_var\", \"layer3.4.conv3.weight\", \"layer3.4.bn3.weight\", \"layer3.4.bn3.bias\", \"layer3.4.bn3.running_mean\", \"layer3.4.bn3.running_var\", \"layer3.5.conv1.weight\", \"layer3.5.bn1.weight\", \"layer3.5.bn1.bias\", \"layer3.5.bn1.running_mean\", \"layer3.5.bn1.running_var\", \"layer3.5.conv2.weight\", \"layer3.5.bn2.weight\", \"layer3.5.bn2.bias\", \"layer3.5.bn2.running_mean\", \"layer3.5.bn2.running_var\", \"layer3.5.conv3.weight\", \"layer3.5.bn3.weight\", \"layer3.5.bn3.bias\", \"layer3.5.bn3.running_mean\", \"layer3.5.bn3.running_var\", \"layer4.0.conv1.weight\", \"layer4.0.bn1.weight\", \"layer4.0.bn1.bias\", \"layer4.0.bn1.running_mean\", \"layer4.0.bn1.running_var\", \"layer4.0.conv2.weight\", \"layer4.0.bn2.weight\", \"layer4.0.bn2.bias\", \"layer4.0.bn2.running_mean\", \"layer4.0.bn2.running_var\", \"layer4.0.conv3.weight\", \"layer4.0.bn3.weight\", \"layer4.0.bn3.bias\", \"layer4.0.bn3.running_mean\", \"layer4.0.bn3.running_var\", \"layer4.0.downsample.0.weight\", \"layer4.0.downsample.1.weight\", \"layer4.0.downsample.1.bias\", \"layer4.0.downsample.1.running_mean\", \"layer4.0.downsample.1.running_var\", \"layer4.1.conv1.weight\", \"layer4.1.bn1.weight\", \"layer4.1.bn1.bias\", \"layer4.1.bn1.running_mean\", \"layer4.1.bn1.running_var\", \"layer4.1.conv2.weight\", \"layer4.1.bn2.weight\", \"layer4.1.bn2.bias\", \"layer4.1.bn2.running_mean\", \"layer4.1.bn2.running_var\", \"layer4.1.conv3.weight\", \"layer4.1.bn3.weight\", \"layer4.1.bn3.bias\", \"layer4.1.bn3.running_mean\", \"layer4.1.bn3.running_var\", \"layer4.2.conv1.weight\", \"layer4.2.bn1.weight\", \"layer4.2.bn1.bias\", \"layer4.2.bn1.running_mean\", \"layer4.2.bn1.running_var\", \"layer4.2.conv2.weight\", \"layer4.2.bn2.weight\", \"layer4.2.bn2.bias\", \"layer4.2.bn2.running_mean\", \"layer4.2.bn2.running_var\", \"layer4.2.conv3.weight\", \"layer4.2.bn3.weight\", \"layer4.2.bn3.bias\", \"layer4.2.bn3.running_mean\", \"layer4.2.bn3.running_var\", \"fc.fc1.weight\", \"fc.fc1.bias\", \"fc.fc2.weight\", \"fc.fc2.bias\". \n\tUnexpected key(s) in state_dict: \"0.0.weight\", \"0.1.weight\", \"0.1.bias\", \"0.1.running_mean\", \"0.1.running_var\", \"0.1.num_batches_tracked\", \"0.4.0.conv1.weight\", \"0.4.0.bn1.weight\", \"0.4.0.bn1.bias\", \"0.4.0.bn1.running_mean\", \"0.4.0.bn1.running_var\", \"0.4.0.bn1.num_batches_tracked\", \"0.4.0.conv2.weight\", \"0.4.0.bn2.weight\", \"0.4.0.bn2.bias\", \"0.4.0.bn2.running_mean\", \"0.4.0.bn2.running_var\", \"0.4.0.bn2.num_batches_tracked\", \"0.4.0.conv3.weight\", \"0.4.0.bn3.weight\", \"0.4.0.bn3.bias\", \"0.4.0.bn3.running_mean\", \"0.4.0.bn3.running_var\", \"0.4.0.bn3.num_batches_tracked\", \"0.4.0.downsample.0.weight\", \"0.4.0.downsample.1.weight\", \"0.4.0.downsample.1.bias\", \"0.4.0.downsample.1.running_mean\", \"0.4.0.downsample.1.running_var\", \"0.4.0.downsample.1.num_batches_tracked\", \"0.4.1.conv1.weight\", \"0.4.1.bn1.weight\", \"0.4.1.bn1.bias\", \"0.4.1.bn1.running_mean\", \"0.4.1.bn1.running_var\", \"0.4.1.bn1.num_batches_tracked\", \"0.4.1.conv2.weight\", \"0.4.1.bn2.weight\", \"0.4.1.bn2.bias\", \"0.4.1.bn2.running_mean\", \"0.4.1.bn2.running_var\", \"0.4.1.bn2.num_batches_tracked\", \"0.4.1.conv3.weight\", \"0.4.1.bn3.weight\", \"0.4.1.bn3.bias\", \"0.4.1.bn3.running_mean\", \"0.4.1.bn3.running_var\", \"0.4.1.bn3.num_batches_tracked\", \"0.4.2.conv1.weight\", \"0.4.2.bn1.weight\", \"0.4.2.bn1.bias\", \"0.4.2.bn1.running_mean\", \"0.4.2.bn1.running_var\", \"0.4.2.bn1.num_batches_tracked\", \"0.4.2.conv2.weight\", \"0.4.2.bn2.weight\", \"0.4.2.bn2.bias\", \"0.4.2.bn2.running_mean\", \"0.4.2.bn2.running_var\", \"0.4.2.bn2.num_batches_tracked\", \"0.4.2.conv3.weight\", \"0.4.2.bn3.weight\", \"0.4.2.bn3.bias\", \"0.4.2.bn3.running_mean\", \"0.4.2.bn3.running_var\", \"0.4.2.bn3.num_batches_tracked\", \"0.5.0.conv1.weight\", \"0.5.0.bn1.weight\", \"0.5.0.bn1.bias\", \"0.5.0.bn1.running_mean\", \"0.5.0.bn1.running_var\", \"0.5.0.bn1.num_batches_tracked\", \"0.5.0.conv2.weight\", \"0.5.0.bn2.weight\", \"0.5.0.bn2.bias\", \"0.5.0.bn2.running_mean\", \"0.5.0.bn2.running_var\", \"0.5.0.bn2.num_batches_tracked\", \"0.5.0.conv3.weight\", \"0.5.0.bn3.weight\", \"0.5.0.bn3.bias\", \"0.5.0.bn3.running_mean\", \"0.5.0.bn3.running_var\", \"0.5.0.bn3.num_batches_tracked\", \"0.5.0.downsample.0.weight\", \"0.5.0.downsample.1.weight\", \"0.5.0.downsample.1.bias\", \"0.5.0.downsample.1.running_mean\", \"0.5.0.downsample.1.running_var\", \"0.5.0.downsample.1.num_batches_tracked\", \"0.5.1.conv1.weight\", \"0.5.1.bn1.weight\", \"0.5.1.bn1.bias\", \"0.5.1.bn1.running_mean\", \"0.5.1.bn1.running_var\", \"0.5.1.bn1.num_batches_tracked\", \"0.5.1.conv2.weight\", \"0.5.1.bn2.weight\", \"0.5.1.bn2.bias\", \"0.5.1.bn2.running_mean\", \"0.5.1.bn2.running_var\", \"0.5.1.bn2.num_batches_tracked\", \"0.5.1.conv3.weight\", \"0.5.1.bn3.weight\", \"0.5.1.bn3.bias\", \"0.5.1.bn3.running_mean\", \"0.5.1.bn3.running_var\", \"0.5.1.bn3.num_batches_tracked\", \"0.5.2.conv1.weight\", \"0.5.2.bn1.weight\", \"0.5.2.bn1.bias\", \"0.5.2.bn1.running_mean\", \"0.5.2.bn1.running_var\", \"0.5.2.bn1.num_batches_tracked\", \"0.5.2.conv2.weight\", \"0.5.2.bn2.weight\", \"0.5.2.bn2.bias\", \"0.5.2.bn2.running_mean\", \"0.5.2.bn2.running_var\", \"0.5.2.bn2.num_batches_tracked\", \"0.5.2.conv3.weight\", \"0.5.2.bn3.weight\", \"0.5.2.bn3.bias\", \"0.5.2.bn3.running_mean\", \"0.5.2.bn3.running_var\", \"0.5.2.bn3.num_batches_tracked\", \"0.5.3.conv1.weight\", \"0.5.3.bn1.weight\", \"0.5.3.bn1.bias\", \"0.5.3.bn1.running_mean\", \"0.5.3.bn1.running_var\", \"0.5.3.bn1.num_batches_tracked\", \"0.5.3.conv2.weight\", \"0.5.3.bn2.weight\", \"0.5.3.bn2.bias\", \"0.5.3.bn2.running_mean\", \"0.5.3.bn2.running_var\", \"0.5.3.bn2.num_batches_tracked\", \"0.5.3.conv3.weight\", \"0.5.3.bn3.weight\", \"0.5.3.bn3.bias\", \"0.5.3.bn3.running_mean\", \"0.5.3.bn3.running_var\", \"0.5.3.bn3.num_batches_tracked\", \"0.6.0.conv1.weight\", \"0.6.0.bn1.weight\", \"0.6.0.bn1.bias\", \"0.6.0.bn1.running_mean\", \"0.6.0.bn1.running_var\", \"0.6.0.bn1.num_batches_tracked\", \"0.6.0.conv2.weight\", \"0.6.0.bn2.weight\", \"0.6.0.bn2.bias\", \"0.6.0.bn2.running_mean\", \"0.6.0.bn2.running_var\", \"0.6.0.bn2.num_batches_tracked\", \"0.6.0.conv3.weight\", \"0.6.0.bn3.weight\", \"0.6.0.bn3.bias\", \"0.6.0.bn3.running_mean\", \"0.6.0.bn3.running_var\", \"0.6.0.bn3.num_batches_tracked\", \"0.6.0.downsample.0.weight\", \"0.6.0.downsample.1.weight\", \"0.6.0.downsample.1.bias\", \"0.6.0.downsample.1.running_mean\", \"0.6.0.downsample.1.running_var\", \"0.6.0.downsample.1.num_batches_tracked\", \"0.6.1.conv1.weight\", \"0.6.1.bn1.weight\", \"0.6.1.bn1.bias\", \"0.6.1.bn1.running_mean\", \"0.6.1.bn1.running_var\", \"0.6.1.bn1.num_batches_tracked\", \"0.6.1.conv2.weight\", \"0.6.1.bn2.weight\", \"0.6.1.bn2.bias\", \"0.6.1.bn2.running_mean\", \"0.6.1.bn2.running_var\", \"0.6.1.bn2.num_batches_tracked\", \"0.6.1.conv3.weight\", \"0.6.1.bn3.weight\", \"0.6.1.bn3.bias\", \"0.6.1.bn3.running_mean\", \"0.6.1.bn3.running_var\", \"0.6.1.bn3.num_batches_tracked\", \"0.6.2.conv1.weight\", \"0.6.2.bn1.weight\", \"0.6.2.bn1.bias\", \"0.6.2.bn1.running_mean\", \"0.6.2.bn1.running_var\", \"0.6.2.bn1.num_batches_tracked\", \"0.6.2.conv2.weight\", \"0.6.2.bn2.weight\", \"0.6.2.bn2.bias\", \"0.6.2.bn2.running_mean\", \"0.6.2.bn2.running_var\", \"0.6.2.bn2.num_batches_tracked\", \"0.6.2.conv3.weight\", \"0.6.2.bn3.weight\", \"0.6.2.bn3.bias\", \"0.6.2.bn3.running_mean\", \"0.6.2.bn3.running_var\", \"0.6.2.bn3.num_batches_tracked\", \"0.6.3.conv1.weight\", \"0.6.3.bn1.weight\", \"0.6.3.bn1.bias\", \"0.6.3.bn1.running_mean\", \"0.6.3.bn1.running_var\", \"0.6.3.bn1.num_batches_tracked\", \"0.6.3.conv2.weight\", \"0.6.3.bn2.weight\", \"0.6.3.bn2.bias\", \"0.6.3.bn2.running_mean\", \"0.6.3.bn2.running_var\", \"0.6.3.bn2.num_batches_tracked\", \"0.6.3.conv3.weight\", \"0.6.3.bn3.weight\", \"0.6.3.bn3.bias\", \"0.6.3.bn3.running_mean\", \"0.6.3.bn3.running_var\", \"0.6.3.bn3.num_batches_tracked\", \"0.6.4.conv1.weight\", \"0.6.4.bn1.weight\", \"0.6.4.bn1.bias\", \"0.6.4.bn1.running_mean\", \"0.6.4.bn1.running_var\", \"0.6.4.bn1.num_batches_tracked\", \"0.6.4.conv2.weight\", \"0.6.4.bn2.weight\", \"0.6.4.bn2.bias\", \"0.6.4.bn2.running_mean\", \"0.6.4.bn2.running_var\", \"0.6.4.bn2.num_batches_tracked\", \"0.6.4.conv3.weight\", \"0.6.4.bn3.weight\", \"0.6.4.bn3.bias\", \"0.6.4.bn3.running_mean\", \"0.6.4.bn3.running_var\", \"0.6.4.bn3.num_batches_tracked\", \"0.6.5.conv1.weight\", \"0.6.5.bn1.weight\", \"0.6.5.bn1.bias\", \"0.6.5.bn1.running_mean\", \"0.6.5.bn1.running_var\", \"0.6.5.bn1.num_batches_tracked\", \"0.6.5.conv2.weight\", \"0.6.5.bn2.weight\", \"0.6.5.bn2.bias\", \"0.6.5.bn2.running_mean\", \"0.6.5.bn2.running_var\", \"0.6.5.bn2.num_batches_tracked\", \"0.6.5.conv3.weight\", \"0.6.5.bn3.weight\", \"0.6.5.bn3.bias\", \"0.6.5.bn3.running_mean\", \"0.6.5.bn3.running_var\", \"0.6.5.bn3.num_batches_tracked\", \"0.7.0.conv1.weight\", \"0.7.0.bn1.weight\", \"0.7.0.bn1.bias\", \"0.7.0.bn1.running_mean\", \"0.7.0.bn1.running_var\", \"0.7.0.bn1.num_batches_tracked\", \"0.7.0.conv2.weight\", \"0.7.0.bn2.weight\", \"0.7.0.bn2.bias\", \"0.7.0.bn2.running_mean\", \"0.7.0.bn2.running_var\", \"0.7.0.bn2.num_batches_tracked\", \"0.7.0.conv3.weight\", \"0.7.0.bn3.weight\", \"0.7.0.bn3.bias\", \"0.7.0.bn3.running_mean\", \"0.7.0.bn3.running_var\", \"0.7.0.bn3.num_batches_tracked\", \"0.7.0.downsample.0.weight\", \"0.7.0.downsample.1.weight\", \"0.7.0.downsample.1.bias\", \"0.7.0.downsample.1.running_mean\", \"0.7.0.downsample.1.running_var\", \"0.7.0.downsample.1.num_batches_tracked\", \"0.7.1.conv1.weight\", \"0.7.1.bn1.weight\", \"0.7.1.bn1.bias\", \"0.7.1.bn1.running_mean\", \"0.7.1.bn1.running_var\", \"0.7.1.bn1.num_batches_tracked\", \"0.7.1.conv2.weight\", \"0.7.1.bn2.weight\", \"0.7.1.bn2.bias\", \"0.7.1.bn2.running_mean\", \"0.7.1.bn2.running_var\", \"0.7.1.bn2.num_batches_tracked\", \"0.7.1.conv3.weight\", \"0.7.1.bn3.weight\", \"0.7.1.bn3.bias\", \"0.7.1.bn3.running_mean\", \"0.7.1.bn3.running_var\", \"0.7.1.bn3.num_batches_tracked\", \"0.7.2.conv1.weight\", \"0.7.2.bn1.weight\", \"0.7.2.bn1.bias\", \"0.7.2.bn1.running_mean\", \"0.7.2.bn1.running_var\", \"0.7.2.bn1.num_batches_tracked\", \"0.7.2.conv2.weight\", \"0.7.2.bn2.weight\", \"0.7.2.bn2.bias\", \"0.7.2.bn2.running_mean\", \"0.7.2.bn2.running_var\", \"0.7.2.bn2.num_batches_tracked\", \"0.7.2.conv3.weight\", \"0.7.2.bn3.weight\", \"0.7.2.bn3.bias\", \"0.7.2.bn3.running_mean\", \"0.7.2.bn3.running_var\", \"0.7.2.bn3.num_batches_tracked\", \"1.2.weight\", \"1.2.bias\", \"1.2.running_mean\", \"1.2.running_var\", \"1.2.num_batches_tracked\", \"1.4.weight\", \"1.6.weight\", \"1.6.bias\", \"1.6.running_mean\", \"1.6.running_var\", \"1.6.num_batches_tracked\", \"1.8.weight\". " ] } ], "source": [ "# learn.save('dragonfly-netV2-1')\n", "learn.load('memeclassifier-1')" ] }, { "cell_type": "code", "execution_count": 90, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "Sequential(\n", " (0): Sequential(\n", " (0): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n", " (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (2): ReLU(inplace=True)\n", " (3): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n", " (4): Sequential(\n", " (0): Bottleneck(\n", " (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " (downsample): Sequential(\n", " (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " (2): Bottleneck(\n", " (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " )\n", " (5): Sequential(\n", " (0): Bottleneck(\n", " (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " (downsample): Sequential(\n", " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " (2): Bottleneck(\n", " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " (3): Bottleneck(\n", " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " )\n", " (6): Sequential(\n", " (0): Bottleneck(\n", " (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " (downsample): Sequential(\n", " (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", " (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " (2): Bottleneck(\n", " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " (3): Bottleneck(\n", " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " (4): Bottleneck(\n", " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " (5): Bottleneck(\n", " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " )\n", " (7): Sequential(\n", " (0): Bottleneck(\n", " (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " (downsample): Sequential(\n", " (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", " (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " (2): Bottleneck(\n", " (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", " (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (relu): ReLU(inplace=True)\n", " )\n", " )\n", " )\n", " (1): Sequential(\n", " (0): AdaptiveConcatPool2d(\n", " (ap): AdaptiveAvgPool2d(output_size=1)\n", " (mp): AdaptiveMaxPool2d(output_size=1)\n", " )\n", " (1): Flatten(full=False)\n", " (2): BatchNorm1d(4096, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (3): Dropout(p=0.25, inplace=False)\n", " (4): Linear(in_features=4096, out_features=512, bias=False)\n", " (5): ReLU(inplace=True)\n", " (6): BatchNorm1d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (7): Dropout(p=0.5, inplace=False)\n", " (8): Linear(in_features=512, out_features=2, bias=False)\n", " )\n", ")" ] }, "execution_count": 90, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model" ] }, { "cell_type": "code", "execution_count": 61, "metadata": { "scrolled": true }, "outputs": [], "source": [ "# labels_set = []\n", "# preds_set = []\n", "# for idx, data in enumerate(dataloaders['test']):\n", "# # get the inputs\n", "# model.eval()\n", "# print(\"batch no. : \", idx)\n", "# inputs, labels = data\n", "# labels_set += labels.tolist()\n", "# inputs, labels = inputs.cuda(), labels.cuda()\n", "# # print(labels)\n", "# outputs = model(inputs)\n", "# _, preds = torch.max(outputs.data, 1)\n", "# preds_set += preds.cpu().tolist()\n", "# # print(labels_set, preds_set)\n" ] }, { "cell_type": "code", "execution_count": 62, "metadata": {}, "outputs": [], "source": [ "# from sklearn.metrics import accuracy_score\n", "# accuracy_score(y_pred=preds_set, y_true=labels_set)" ] }, { "cell_type": "code", "execution_count": 63, "metadata": { "scrolled": true }, "outputs": [], "source": [ "import matplotlib as mpl\n", "font = {'weight' : 'bold',\n", " 'size' : 3}\n", "\n", "mpl.rc('font', **font)\n", "mpl.rcParams['figure.dpi']= 50\n", "import seaborn as sns\n", "sns.set_context(\"notebook\", rc={\"lines.linewidth\": 1.0})\n" ] }, { "cell_type": "code", "execution_count": 64, "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "batch no. : 0\n", "batch no. : 1\n", "batch no. : 2\n", "batch no. : 3\n", "batch no. : 4\n", "batch no. : 5\n", "batch no. : 6\n", "batch no. : 7\n", "batch no. : 8\n", "batch no. : 9\n", "batch no. : 10\n", "batch no. : 11\n", "batch no. : 12\n", "batch no. : 13\n", "batch no. : 14\n", "batch no. : 15\n", "batch no. : 16\n", "batch no. : 17\n", "batch no. : 18\n", "batch no. : 19\n", "batch no. : 20\n", "batch no. : 21\n", "batch no. : 22\n", "batch no. : 23\n", "batch no. : 24\n", "batch no. : 25\n", "batch no. : 26\n", "batch no. : 27\n", "batch no. : 28\n", "batch no. : 29\n", "batch no. : 30\n", "batch no. : 31\n", "batch no. : 32\n", "batch no. : 33\n", "batch no. : 34\n", "batch no. : 35\n", "batch no. : 36\n", "batch no. : 37\n", "batch no. : 38\n", "batch no. : 39\n", "batch no. : 40\n", "batch no. : 41\n", "batch no. : 42\n", "batch no. : 43\n", "batch no. : 44\n", "batch no. : 45\n", "batch no. : 46\n", "batch no. : 47\n", "batch no. : 48\n", "batch no. : 49\n", "batch no. : 50\n", "batch no. : 51\n", "batch no. : 52\n", "batch no. : 53\n", "batch no. : 54\n", "batch no. : 55\n", "batch no. : 56\n", "batch no. : 57\n", "batch no. : 58\n", "batch no. : 59\n", "batch no. : 60\n", "batch no. : 61\n", "batch no. : 62\n", "batch no. : 63\n", "batch no. : 64\n", "batch no. : 65\n", "batch no. : 66\n", "batch no. : 67\n", "batch no. : 68\n", "batch no. : 69\n", "batch no. : 70\n", "batch no. : 71\n", "batch no. : 72\n", "batch no. : 73\n", "batch no. : 74\n", "batch no. : 75\n", "batch no. : 76\n", "batch no. : 77\n", "batch no. : 78\n", "batch no. : 79\n", "batch no. : 80\n", "batch no. : 81\n", "batch no. : 82\n", "batch no. : 83\n", "batch no. : 84\n", "batch no. : 85\n", "batch no. : 86\n", "batch no. : 87\n", "batch no. : 88\n", "batch no. : 89\n", "batch no. : 90\n", "batch no. : 91\n", "batch no. : 92\n", "batch no. : 93\n", "batch no. : 94\n", "batch no. : 95\n", "batch no. : 96\n", "batch no. : 97\n", "batch no. : 98\n", "batch no. : 99\n", "batch no. : 100\n", "batch no. : 101\n", "batch no. : 102\n", "batch no. : 103\n", "batch no. : 104\n", "batch no. : 105\n", "batch no. : 106\n", "batch no. : 107\n", "batch no. : 108\n", "batch no. : 109\n", "batch no. : 110\n", "batch no. : 111\n", "batch no. : 112\n", "batch no. : 113\n", "batch no. : 114\n", "batch no. : 115\n", "batch no. : 116\n", "batch no. : 117\n", "batch no. : 118\n", "batch no. : 119\n", "batch no. : 120\n", "batch no. : 121\n", "batch no. : 122\n", "batch no. : 123\n", "batch no. : 124\n", "batch no. : 125\n", "batch no. : 126\n", "batch no. : 127\n", "batch no. : 128\n", "batch no. : 129\n", "batch no. : 130\n", "batch no. : 131\n", "batch no. : 132\n", "batch no. : 133\n", "batch no. : 134\n", "batch no. : 135\n", "batch no. : 136\n", "batch no. : 137\n", "batch no. : 138\n", "batch no. : 139\n", "batch no. : 140\n", "batch no. : 141\n", "batch no. : 142\n", "batch no. : 143\n", "batch no. : 144\n", "batch no. : 145\n", "batch no. : 146\n", "batch no. : 147\n", "batch no. : 148\n", "batch no. : 149\n", "batch no. : 150\n", "batch no. : 151\n", "batch no. : 152\n", "batch no. : 153\n", "batch no. : 154\n", "batch no. : 155\n", "batch no. : 156\n", "batch no. : 157\n", "batch no. : 158\n", "batch no. : 159\n", "batch no. : 160\n", "batch no. : 161\n", "batch no. : 162\n", "batch no. : 163\n", "batch no. : 164\n", "batch no. : 165\n", "batch no. : 166\n", "batch no. : 167\n", "batch no. : 168\n", "batch no. : 169\n", "batch no. : 170\n", "batch no. : 171\n", "batch no. : 172\n", "batch no. : 173\n", "batch no. : 174\n", "batch no. : 175\n", "batch no. : 176\n", "batch no. : 177\n", "batch no. : 178\n", "batch no. : 179\n", "batch no. : 180\n", "batch no. : 181\n", "batch no. : 182\n", "batch no. : 183\n", "batch no. : 184\n", "batch no. : 185\n", "batch no. : 186\n", "batch no. : 187\n", "batch no. : 188\n", "batch no. : 189\n", "batch no. : 190\n", "batch no. : 191\n", "batch no. : 192\n", "batch no. : 193\n", "batch no. : 194\n", "batch no. : 195\n", "batch no. : 196\n", "batch no. : 197\n", "batch no. : 198\n", "batch no. : 199\n", "batch no. : 200\n", "batch no. : 201\n", "batch no. : 202\n", "batch no. : 203\n", "batch no. : 204\n", "batch no. : 205\n", "batch no. : 206\n", "batch no. : 207\n", "batch no. : 208\n", "batch no. : 209\n", "batch no. : 210\n", "batch no. : 211\n", "batch no. : 212\n", "batch no. : 213\n", "batch no. : 214\n", "batch no. : 215\n", "batch no. : 216\n", "batch no. : 217\n", "batch no. : 218\n", "batch no. : 219\n", "batch no. : 220\n", "batch no. : 221\n", "batch no. : 222\n", "batch no. : 223\n", "batch no. : 224\n", "batch no. : 225\n", "batch no. : 226\n", "batch no. : 227\n", "batch no. : 228\n", "batch no. : 229\n", "batch no. : 230\n", "batch no. : 231\n", "batch no. : 232\n", "batch no. : 233\n", "batch no. : 234\n", "batch no. : 235\n", "batch no. : 236\n", "batch no. : 237\n", "batch no. : 238\n", "batch no. : 239\n", "batch no. : 240\n", "batch no. : 241\n", "batch no. : 242\n", "batch no. : 243\n", "batch no. : 244\n", "batch no. : 245\n", "batch no. : 246\n", "batch no. : 247\n", "batch no. : 248\n", "batch no. : 249\n", "batch no. : 250\n", "batch no. : 251\n", "batch no. : 252\n", "batch no. : 253\n", "batch no. : 254\n", "batch no. : 255\n", "batch no. : 256\n", "batch no. : 257\n", "batch no. : 258\n", "batch no. : 259\n", "batch no. : 260\n", "batch no. : 261\n", "batch no. : 262\n", "batch no. : 263\n", "batch no. : 264\n", "batch no. : 265\n", "batch no. : 266\n", "batch no. : 267\n", "batch no. : 268\n", "batch no. : 269\n", "batch no. : 270\n", "batch no. : 271\n", "batch no. : 272\n", "batch no. : 273\n", "batch no. : 274\n", "batch no. : 275\n", "batch no. : 276\n", "batch no. : 277\n", "batch no. : 278\n", "batch no. : 279\n", "batch no. : 280\n", "batch no. : 281\n", "batch no. : 282\n", "batch no. : 283\n", "batch no. : 284\n", "batch no. : 285\n", "batch no. : 286\n", "batch no. : 287\n", "batch no. : 288\n", "batch no. : 289\n", "batch no. : 290\n", "batch no. : 291\n", "batch no. : 292\n", "batch no. : 293\n", "batch no. : 294\n", "batch no. : 295\n", "batch no. : 296\n", "batch no. : 297\n", "batch no. : 298\n", "batch no. : 299\n", "batch no. : 300\n", "batch no. : 301\n", "batch no. : 302\n", "batch no. : 303\n", "batch no. : 304\n", "batch no. : 305\n", "batch no. : 306\n", "batch no. : 307\n", "batch no. : 308\n", "batch no. : 309\n", "batch no. : 310\n", "batch no. : 311\n", "batch no. : 312\n", "batch no. : 313\n", "batch no. : 314\n", "batch no. : 315\n", "batch no. : 316\n", "batch no. : 317\n", "batch no. : 318\n", "batch no. : 319\n", "batch no. : 320\n", "batch no. : 321\n", "batch no. : 322\n", "batch no. : 323\n", "batch no. : 324\n", "batch no. : 325\n", "batch no. : 326\n", "batch no. : 327\n", "batch no. : 328\n", "batch no. : 329\n", "batch no. : 330\n", "batch no. : 331\n", "batch no. : 332\n", "batch no. : 333\n", "batch no. : 334\n", "batch no. : 335\n", "batch no. : 336\n", "batch no. : 337\n", "batch no. : 338\n", "batch no. : 339\n", "batch no. : 340\n", "batch no. : 341\n", "batch no. : 342\n", "batch no. : 343\n", "batch no. : 344\n", "batch no. : 345\n", "batch no. : 346\n", "batch no. : 347\n", "batch no. : 348\n", "batch no. : 349\n", "batch no. : 350\n", "batch no. : 351\n", "batch no. : 352\n", "batch no. : 353\n", "batch no. : 354\n", "batch no. : 355\n", "batch no. : 356\n", "batch no. : 357\n", "batch no. : 358\n", "batch no. : 359\n", "batch no. : 360\n", "batch no. : 361\n", "batch no. : 362\n", "batch no. : 363\n", "batch no. : 364\n", "batch no. : 365\n", "batch no. : 366\n", "batch no. : 367\n", "batch no. : 368\n", "batch no. : 369\n", "batch no. : 370\n", "batch no. : 371\n", "batch no. : 372\n", "batch no. : 373\n", "batch no. : 374\n", "batch no. : 375\n", "batch no. : 376\n", "batch no. : 377\n", "batch no. : 378\n", "batch no. : 379\n", "batch no. : 380\n", "batch no. : 381\n", "batch no. : 382\n", "batch no. : 383\n", "batch no. : 384\n", "batch no. : 385\n", "batch no. : 386\n", "batch no. : 387\n", "batch no. : 388\n", "batch no. : 389\n", "batch no. : 390\n", "batch no. : 391\n", "batch no. : 392\n", "batch no. : 393\n", "batch no. : 394\n", "batch no. : 395\n", "batch no. : 396\n", "batch no. : 397\n", "batch no. : 398\n", "batch no. : 399\n", "batch no. : 400\n", "batch no. : 401\n", "batch no. : 402\n", "batch no. : 403\n", "batch no. : 404\n", "batch no. : 405\n", "batch no. : 406\n", "batch no. : 407\n", "batch no. : 408\n", "batch no. : 409\n", "batch no. : 410\n", "batch no. : 411\n", "batch no. : 412\n", "batch no. : 413\n", "batch no. : 414\n", "batch no. : 415\n", "batch no. : 416\n", "batch no. : 417\n", "batch no. : 418\n", "batch no. : 419\n", "batch no. : 420\n", "batch no. : 421\n", "batch no. : 422\n", "batch no. : 423\n", "batch no. : 424\n", "batch no. : 425\n", "batch no. : 426\n", "batch no. : 427\n", "batch no. : 428\n", "batch no. : 429\n", "batch no. : 430\n", "batch no. : 431\n", "batch no. : 432\n", "batch no. : 433\n", "batch no. : 434\n", "batch no. : 435\n", "batch no. : 436\n", "batch no. : 437\n", "batch no. : 438\n", "batch no. : 439\n", "batch no. : 440\n", "batch no. : 441\n", "batch no. : 442\n", "batch no. : 443\n", "batch no. : 444\n", "batch no. : 445\n", "batch no. : 446\n", "batch no. : 447\n", "batch no. : 448\n", "batch no. : 449\n", "batch no. : 450\n", "batch no. : 451\n", "batch no. : 452\n", "batch no. : 453\n", "batch no. : 454\n", "batch no. : 455\n", "batch no. : 456\n", "batch no. : 457\n", "batch no. : 458\n", "batch no. : 459\n", "batch no. : 460\n", "batch no. : 461\n", "batch no. : 462\n", "batch no. : 463\n", "batch no. : 464\n", "batch no. : 465\n", "batch no. : 466\n", "batch no. : 467\n", "batch no. : 468\n", "batch no. : 469\n", "batch no. : 470\n", "batch no. : 471\n", "batch no. : 472\n", "batch no. : 473\n", "batch no. : 474\n", "batch no. : 475\n", "batch no. : 476\n", "batch no. : 477\n", "batch no. : 478\n", "batch no. : 479\n", "batch no. : 480\n", "batch no. : 481\n", "batch no. : 482\n", "batch no. : 483\n", "batch no. : 484\n", "batch no. : 485\n", "batch no. : 486\n", "batch no. : 487\n", "batch no. : 488\n", "batch no. : 489\n", "batch no. : 490\n", "batch no. : 491\n", "batch no. : 492\n", "batch no. : 493\n", "batch no. : 494\n", "batch no. : 495\n", "batch no. : 496\n", "batch no. : 497\n", "batch no. : 498\n", "batch no. : 499\n", "batch no. : 500\n", "batch no. : 501\n", "batch no. : 502\n", "batch no. : 503\n", "batch no. : 504\n", "batch no. : 505\n", "batch no. : 506\n", "batch no. : 507\n", "batch no. : 508\n", "batch no. : 509\n", "batch no. : 510\n", "batch no. : 511\n", "batch no. : 512\n", "batch no. : 513\n", "batch no. : 514\n", "batch no. : 515\n", "batch no. : 516\n", "batch no. : 517\n", "batch no. : 518\n", "batch no. : 519\n", "batch no. : 520\n", "batch no. : 521\n", "batch no. : 522\n", "batch no. : 523\n", "batch no. : 524\n", "batch no. : 525\n", "batch no. : 526\n", "batch no. : 527\n", "batch no. : 528\n", "batch no. : 529\n", "batch no. : 530\n", "batch no. : 531\n", "batch no. : 532\n", "batch no. : 533\n", "batch no. : 534\n", "batch no. : 535\n", "batch no. : 536\n", "batch no. : 537\n", "batch no. : 538\n", "batch no. : 539\n", "batch no. : 540\n", "batch no. : 541\n", "batch no. : 542\n", "batch no. : 543\n", "batch no. : 544\n", "batch no. : 545\n", "batch no. : 546\n", "batch no. : 547\n", "batch no. : 548\n", "batch no. : 549\n", "batch no. : 550\n", "batch no. : 551\n", "batch no. : 552\n", "batch no. : 553\n", "batch no. : 554\n", "batch no. : 555\n", "batch no. : 556\n", "batch no. : 557\n", "batch no. : 558\n", "batch no. : 559\n", "batch no. : 560\n", "batch no. : 561\n", "batch no. : 562\n", "batch no. : 563\n", "batch no. : 564\n", "batch no. : 565\n", "batch no. : 566\n", "batch no. : 567\n", "batch no. : 568\n", "batch no. : 569\n", "batch no. : 570\n", "batch no. : 571\n", "batch no. : 572\n", "batch no. : 573\n", "batch no. : 574\n", "batch no. : 575\n", "batch no. : 576\n", "batch no. : 577\n", "batch no. : 578\n", "batch no. : 579\n", "batch no. : 580\n", "batch no. : 581\n", "batch no. : 582\n", "batch no. : 583\n", "batch no. : 584\n", "batch no. : 585\n", "batch no. : 586\n", "batch no. : 587\n", "batch no. : 588\n", "batch no. : 589\n", "batch no. : 590\n", "batch no. : 591\n", "batch no. : 592\n", "batch no. : 593\n", "batch no. : 594\n", "batch no. : 595\n", "batch no. : 596\n", "batch no. : 597\n", "batch no. : 598\n", "batch no. : 599\n", "batch no. : 600\n", "batch no. : 601\n", "batch no. : 602\n", "batch no. : 603\n", "batch no. : 604\n", "batch no. : 605\n", "batch no. : 606\n", "batch no. : 607\n", "batch no. : 608\n", "batch no. : 609\n", "batch no. : 610\n", "batch no. : 611\n", "batch no. : 612\n", "batch no. : 613\n", "batch no. : 614\n", "batch no. : 615\n", "batch no. : 616\n", "batch no. : 617\n", "batch no. : 618\n", "batch no. : 619\n", "batch no. : 620\n", "batch no. : 621\n", "batch no. : 622\n", "batch no. : 623\n", "batch no. : 624\n", "batch no. : 625\n", "batch no. : 626\n", "batch no. : 627\n", "batch no. : 628\n", "batch no. : 629\n", "batch no. : 630\n", "batch no. : 631\n", "batch no. : 632\n", "batch no. : 633\n", "batch no. : 634\n", "batch no. : 635\n", "batch no. : 636\n", "batch no. : 637\n", "batch no. : 638\n", "batch no. : 639\n", "batch no. : 640\n", "batch no. : 641\n", "batch no. : 642\n", "batch no. : 643\n", "batch no. : 644\n", "batch no. : 645\n", "batch no. : 646\n", "batch no. : 647\n", "batch no. : 648\n", "batch no. : 649\n", "batch no. : 650\n", "batch no. : 651\n", "batch no. : 652\n", "batch no. : 653\n", "batch no. : 654\n", "batch no. : 655\n", "batch no. : 656\n", "batch no. : 657\n", "batch no. : 658\n", "batch no. : 659\n", "batch no. : 660\n", "batch no. : 661\n", "batch no. : 662\n", "batch no. : 663\n", "batch no. : 664\n", "batch no. : 665\n", "batch no. : 666\n", "batch no. : 667\n", "batch no. : 668\n", "batch no. : 669\n", "batch no. : 670\n", "batch no. : 671\n", "batch no. : 672\n", "batch no. : 673\n", "batch no. : 674\n", "batch no. : 675\n", "batch no. : 676\n", "batch no. : 677\n", "batch no. : 678\n", "batch no. : 679\n", "batch no. : 680\n", "batch no. : 681\n", "batch no. : 682\n", "batch no. : 683\n", "batch no. : 684\n", "batch no. : 685\n", "batch no. : 686\n", "batch no. : 687\n", "batch no. : 688\n", "batch no. : 689\n", "batch no. : 690\n", "batch no. : 691\n", "batch no. : 692\n", "batch no. : 693\n", "batch no. : 694\n", "batch no. : 695\n", "batch no. : 696\n", "batch no. : 697\n", "batch no. : 698\n", "batch no. : 699\n", "batch no. : 700\n", "batch no. : 701\n", "batch no. : 702\n", "batch no. : 703\n", "batch no. : 704\n", "batch no. : 705\n", "batch no. : 706\n", "batch no. : 707\n", "batch no. : 708\n", "batch no. : 709\n", "batch no. : 710\n", "batch no. : 711\n", "batch no. : 712\n", "batch no. : 713\n", "batch no. : 714\n", "batch no. : 715\n", "batch no. : 716\n", "batch no. : 717\n", "batch no. : 718\n", "batch no. : 719\n", "batch no. : 720\n", "batch no. : 721\n", "batch no. : 722\n", "batch no. : 723\n", "batch no. : 724\n", "batch no. : 725\n", "batch no. : 726\n", "batch no. : 727\n", "batch no. : 728\n", "batch no. : 729\n", "batch no. : 730\n", "batch no. : 731\n", "batch no. : 732\n", "batch no. : 733\n", "batch no. : 734\n", "batch no. : 735\n", "batch no. : 736\n", "batch no. : 737\n", "batch no. : 738\n", "batch no. : 739\n", "batch no. : 740\n", "batch no. : 741\n", "batch no. : 742\n", "batch no. : 743\n", "batch no. : 744\n", "batch no. : 745\n", "batch no. : 746\n", "batch no. : 747\n", "batch no. : 748\n", "batch no. : 749\n", "batch no. : 750\n", "batch no. : 751\n", "batch no. : 752\n", "batch no. : 753\n", "batch no. : 754\n", "batch no. : 755\n", "batch no. : 756\n", "batch no. : 757\n", "batch no. : 758\n", "batch no. : 759\n", "batch no. : 760\n", "batch no. : 761\n", "batch no. : 762\n", "batch no. : 763\n", "batch no. : 764\n", "batch no. : 765\n", "batch no. : 766\n", "batch no. : 767\n", "batch no. : 768\n", "batch no. : 769\n", "batch no. : 770\n", "batch no. : 771\n", "batch no. : 772\n", "batch no. : 773\n", "batch no. : 774\n", "batch no. : 775\n", "batch no. : 776\n", "batch no. : 777\n", "batch no. : 778\n", "batch no. : 779\n", "batch no. : 780\n", "batch no. : 781\n", "batch no. : 782\n", "batch no. : 783\n", "batch no. : 784\n", "batch no. : 785\n", "batch no. : 786\n", "batch no. : 787\n", "batch no. : 788\n", "batch no. : 789\n", "batch no. : 790\n", "batch no. : 791\n", "batch no. : 792\n", "batch no. : 793\n", "batch no. : 794\n", "batch no. : 795\n", "batch no. : 796\n", "batch no. : 797\n", "batch no. : 798\n", "batch no. : 799\n", "batch no. : 800\n", "batch no. : 801\n", "batch no. : 802\n", "batch no. : 803\n", "batch no. : 804\n", "batch no. : 805\n", "batch no. : 806\n", "batch no. : 807\n", "batch no. : 808\n", "batch no. : 809\n", "batch no. : 810\n", "batch no. : 811\n", "batch no. : 812\n", "batch no. : 813\n", "batch no. : 814\n", "batch no. : 815\n", "batch no. : 816\n", "batch no. : 817\n", "batch no. : 818\n", "batch no. : 819\n", "batch no. : 820\n", "batch no. : 821\n", "batch no. : 822\n", "batch no. : 823\n", "batch no. : 824\n", "batch no. : 825\n", "batch no. : 826\n", "batch no. : 827\n", "batch no. : 828\n", "batch no. : 829\n", "batch no. : 830\n", "batch no. : 831\n", "batch no. : 832\n", "batch no. : 833\n", "batch no. : 834\n", "batch no. : 835\n", "batch no. : 836\n", "batch no. : 837\n", "batch no. : 838\n", "batch no. : 839\n", "batch no. : 840\n", "batch no. : 841\n", "batch no. : 842\n", "batch no. : 843\n", "batch no. : 844\n", "batch no. : 845\n", "batch no. : 846\n", "batch no. : 847\n", "batch no. : 848\n", "batch no. : 849\n", "batch no. : 850\n", "batch no. : 851\n", "batch no. : 852\n", "batch no. : 853\n", "batch no. : 854\n", "batch no. : 855\n", "batch no. : 856\n", "batch no. : 857\n", "batch no. : 858\n", "batch no. : 859\n", "batch no. : 860\n", "batch no. : 861\n", "batch no. : 862\n", "batch no. : 863\n", "batch no. : 864\n", "batch no. : 865\n", "batch no. : 866\n", "batch no. : 867\n", "batch no. : 868\n", "batch no. : 869\n", "batch no. : 870\n", "batch no. : 871\n", "batch no. : 872\n", "batch no. : 873\n", "batch no. : 874\n", "batch no. : 875\n", "batch no. : 876\n", "batch no. : 877\n", "batch no. : 878\n", "batch no. : 879\n", "batch no. : 880\n", "batch no. : 881\n", "batch no. : 882\n", "batch no. : 883\n", "batch no. : 884\n", "batch no. : 885\n", "batch no. : 886\n", "batch no. : 887\n", "batch no. : 888\n", "batch no. : 889\n", "batch no. : 890\n", "batch no. : 891\n", "batch no. : 892\n", "batch no. : 893\n", "batch no. : 894\n", "batch no. : 895\n", "batch no. : 896\n", "batch no. : 897\n", "batch no. : 898\n", "batch no. : 899\n", "batch no. : 900\n", "batch no. : 901\n", "batch no. : 902\n", "batch no. : 903\n", "batch no. : 904\n", "batch no. : 905\n", "batch no. : 906\n", "batch no. : 907\n", "batch no. : 908\n", "batch no. : 909\n", "batch no. : 910\n", "batch no. : 911\n", "batch no. : 912\n", "batch no. : 913\n", "batch no. : 914\n", "batch no. : 915\n", "batch no. : 916\n", "batch no. : 917\n", "batch no. : 918\n", "batch no. : 919\n", "batch no. : 920\n", "batch no. : 921\n", "batch no. : 922\n", "batch no. : 923\n", "batch no. : 924\n", "batch no. : 925\n", "batch no. : 926\n", "batch no. : 927\n", "batch no. : 928\n", "batch no. : 929\n", "batch no. : 930\n", "batch no. : 931\n", "batch no. : 932\n", "batch no. : 933\n", "batch no. : 934\n", "batch no. : 935\n", "batch no. : 936\n", "batch no. : 937\n", "batch no. : 938\n", "batch no. : 939\n", "batch no. : 940\n", "batch no. : 941\n", "batch no. : 942\n", "batch no. : 943\n", "batch no. : 944\n", "batch no. : 945\n", "batch no. : 946\n", "batch no. : 947\n", "batch no. : 948\n", "batch no. : 949\n", "batch no. : 950\n", "batch no. : 951\n", "batch no. : 952\n", "batch no. : 953\n", "batch no. : 954\n", "batch no. : 955\n", "batch no. : 956\n", "batch no. : 957\n", "batch no. : 958\n", "batch no. : 959\n", "batch no. : 960\n", "batch no. : 961\n", "batch no. : 962\n", "batch no. : 963\n", "batch no. : 964\n", "batch no. : 965\n", "batch no. : 966\n", "batch no. : 967\n", "batch no. : 968\n", "batch no. : 969\n", "batch no. : 970\n", "batch no. : 971\n", "batch no. : 972\n", "batch no. : 973\n", "batch no. : 974\n", "batch no. : 975\n", "batch no. : 976\n", "batch no. : 977\n", "batch no. : 978\n", "batch no. : 979\n", "batch no. : 980\n", "batch no. : 981\n", "batch no. : 982\n", "batch no. : 983\n", "batch no. : 984\n", "batch no. : 985\n", "batch no. : 986\n", "batch no. : 987\n", "batch no. : 988\n", "batch no. : 989\n", "batch no. : 990\n", "batch no. : 991\n", "batch no. : 992\n", "batch no. : 993\n", "batch no. : 994\n", "batch no. : 995\n", "batch no. : 996\n", "batch no. : 997\n", "batch no. : 998\n", "batch no. : 999\n", "batch no. : 1000\n", "batch no. : 1001\n", "batch no. : 1002\n", "batch no. : 1003\n", "batch no. : 1004\n", "batch no. : 1005\n", "batch no. : 1006\n", "batch no. : 1007\n", "batch no. : 1008\n", "batch no. : 1009\n", "batch no. : 1010\n", "batch no. : 1011\n", "batch no. : 1012\n", "batch no. : 1013\n", "batch no. : 1014\n", "batch no. : 1015\n", "batch no. : 1016\n", "batch no. : 1017\n", "batch no. : 1018\n", "batch no. : 1019\n", "batch no. : 1020\n", "batch no. : 1021\n", "batch no. : 1022\n", "batch no. : 1023\n", "batch no. : 1024\n", "batch no. : 1025\n", "batch no. : 1026\n", "batch no. : 1027\n", "batch no. : 1028\n", "batch no. : 1029\n", "batch no. : 1030\n", "batch no. : 1031\n", "batch no. : 1032\n", "batch no. : 1033\n", "batch no. : 1034\n", "batch no. : 1035\n", "batch no. : 1036\n", "batch no. : 1037\n", "batch no. : 1038\n", "batch no. : 1039\n", "batch no. : 1040\n", "batch no. : 1041\n", "batch no. : 1042\n", "batch no. : 1043\n", "batch no. : 1044\n", "batch no. : 1045\n", "batch no. : 1046\n", "batch no. : 1047\n", "batch no. : 1048\n", "batch no. : 1049\n", "batch no. : 1050\n", "batch no. : 1051\n", "batch no. : 1052\n", "batch no. : 1053\n", "batch no. : 1054\n", "batch no. : 1055\n", "batch no. : 1056\n", "batch no. : 1057\n", "batch no. : 1058\n", "batch no. : 1059\n", "batch no. : 1060\n", "batch no. : 1061\n", "batch no. : 1062\n", "batch no. : 1063\n", "batch no. : 1064\n", "batch no. : 1065\n", "batch no. : 1066\n", "batch no. : 1067\n", "batch no. : 1068\n", "batch no. : 1069\n", "batch no. : 1070\n", "batch no. : 1071\n", "batch no. : 1072\n", "batch no. : 1073\n", "batch no. : 1074\n", "batch no. : 1075\n", "batch no. : 1076\n", "batch no. : 1077\n", "batch no. : 1078\n", "batch no. : 1079\n", "batch no. : 1080\n", "batch no. : 1081\n", "batch no. : 1082\n", "batch no. : 1083\n", "batch no. : 1084\n", "batch no. : 1085\n", "batch no. : 1086\n", "batch no. : 1087\n", "batch no. : 1088\n", "batch no. : 1089\n", "batch no. : 1090\n", "batch no. : 1091\n", "batch no. : 1092\n", "batch no. : 1093\n", "batch no. : 1094\n", "batch no. : 1095\n", "batch no. : 1096\n", "batch no. : 1097\n", "batch no. : 1098\n", "batch no. : 1099\n", "batch no. : 1100\n", "batch no. : 1101\n", "batch no. : 1102\n", "batch no. : 1103\n", "batch no. : 1104\n", "batch no. : 1105\n", "batch no. : 1106\n", "batch no. : 1107\n", "batch no. : 1108\n", "batch no. : 1109\n", "batch no. : 1110\n", "batch no. : 1111\n", "batch no. : 1112\n", "batch no. : 1113\n", "batch no. : 1114\n", "batch no. : 1115\n", "batch no. : 1116\n", "batch no. : 1117\n", "batch no. : 1118\n", "batch no. : 1119\n", "batch no. : 1120\n", "batch no. : 1121\n", "batch no. : 1122\n", "batch no. : 1123\n", "batch no. : 1124\n", "batch no. : 1125\n", "batch no. : 1126\n", "batch no. : 1127\n", "batch no. : 1128\n", "batch no. : 1129\n", "batch no. : 1130\n", "batch no. : 1131\n", "batch no. : 1132\n", "batch no. : 1133\n", "batch no. : 1134\n", "batch no. : 1135\n", "batch no. : 1136\n", "batch no. : 1137\n", "batch no. : 1138\n", "batch no. : 1139\n", "batch no. : 1140\n", "batch no. : 1141\n", "batch no. : 1142\n", "batch no. : 1143\n", "batch no. : 1144\n", "batch no. : 1145\n", "batch no. : 1146\n", "batch no. : 1147\n", "batch no. : 1148\n", "batch no. : 1149\n", "batch no. : 1150\n", "batch no. : 1151\n", "batch no. : 1152\n", "batch no. : 1153\n", "batch no. : 1154\n", "batch no. : 1155\n", "batch no. : 1156\n", "batch no. : 1157\n", "batch no. : 1158\n", "batch no. : 1159\n", "batch no. : 1160\n", "batch no. : 1161\n", "batch no. : 1162\n", "batch no. : 1163\n", "batch no. : 1164\n", "batch no. : 1165\n", "batch no. : 1166\n", "batch no. : 1167\n", "batch no. : 1168\n", "batch no. : 1169\n", "batch no. : 1170\n", "batch no. : 1171\n", "batch no. : 1172\n", "batch no. : 1173\n", "batch no. : 1174\n", "batch no. : 1175\n", "batch no. : 1176\n", "batch no. : 1177\n", "batch no. : 1178\n", "batch no. : 1179\n", "batch no. : 1180\n", "batch no. : 1181\n", "batch no. : 1182\n", "batch no. : 1183\n", "batch no. : 1184\n", "batch no. : 1185\n", "batch no. : 1186\n", "batch no. : 1187\n", "batch no. : 1188\n", "batch no. : 1189\n", "batch no. : 1190\n", "batch no. : 1191\n", "batch no. : 1192\n", "batch no. : 1193\n", "batch no. : 1194\n", "batch no. : 1195\n", "batch no. : 1196\n", "batch no. : 1197\n", "batch no. : 1198\n", "batch no. : 1199\n", "batch no. : 1200\n", "batch no. : 1201\n", "batch no. : 1202\n", "batch no. : 1203\n", "batch no. : 1204\n", "batch no. : 1205\n", "batch no. : 1206\n", "batch no. : 1207\n", "batch no. : 1208\n", "batch no. : 1209\n", "batch no. : 1210\n", "batch no. : 1211\n", "batch no. : 1212\n", "batch no. : 1213\n", "batch no. : 1214\n", "batch no. : 1215\n", "batch no. : 1216\n", "batch no. : 1217\n", "batch no. : 1218\n", "batch no. : 1219\n", "batch no. : 1220\n", "batch no. : 1221\n", "batch no. : 1222\n", "batch no. : 1223\n", "batch no. : 1224\n", "batch no. : 1225\n", "batch no. : 1226\n", "batch no. : 1227\n", "batch no. : 1228\n", "batch no. : 1229\n", "batch no. : 1230\n", "batch no. : 1231\n", "batch no. : 1232\n", "batch no. : 1233\n", "batch no. : 1234\n", "batch no. : 1235\n", "batch no. : 1236\n", "batch no. : 1237\n", "batch no. : 1238\n", "batch no. : 1239\n", "batch no. : 1240\n", "batch no. : 1241\n", "batch no. : 1242\n", "batch no. : 1243\n", "batch no. : 1244\n", "batch no. : 1245\n", "batch no. : 1246\n", "batch no. : 1247\n", "batch no. : 1248\n", "batch no. : 1249\n", "batch no. : 1250\n", "batch no. : 1251\n", "batch no. : 1252\n", "batch no. : 1253\n", "batch no. : 1254\n", "batch no. : 1255\n", "batch no. : 1256\n", "batch no. : 1257\n", "batch no. : 1258\n", "batch no. : 1259\n", "batch no. : 1260\n", "batch no. : 1261\n", "batch no. : 1262\n", "batch no. : 1263\n", "batch no. : 1264\n", "batch no. : 1265\n", "batch no. : 1266\n", "batch no. : 1267\n", "batch no. : 1268\n", "batch no. : 1269\n", "batch no. : 1270\n", "batch no. : 1271\n", "batch no. : 1272\n", "batch no. : 1273\n", "batch no. : 1274\n", "batch no. : 1275\n", "batch no. : 1276\n", "batch no. : 1277\n", "batch no. : 1278\n", "batch no. : 1279\n", "batch no. : 1280\n", "batch no. : 1281\n", "batch no. : 1282\n", "batch no. : 1283\n", "batch no. : 1284\n", "batch no. : 1285\n", "batch no. : 1286\n", "batch no. : 1287\n", "batch no. : 1288\n", "batch no. : 1289\n", "batch no. : 1290\n", "batch no. : 1291\n", "batch no. : 1292\n", "batch no. : 1293\n", "batch no. : 1294\n", "batch no. : 1295\n", "batch no. : 1296\n", "batch no. : 1297\n", "batch no. : 1298\n", "batch no. : 1299\n", "batch no. : 1300\n", "batch no. : 1301\n", "batch no. : 1302\n", "batch no. : 1303\n", "batch no. : 1304\n", "batch no. : 1305\n", "batch no. : 1306\n", "batch no. : 1307\n", "batch no. : 1308\n", "batch no. : 1309\n", "batch no. : 1310\n", "batch no. : 1311\n", "batch no. : 1312\n", "batch no. : 1313\n", "batch no. : 1314\n", "batch no. : 1315\n", "batch no. : 1316\n", "batch no. : 1317\n", "batch no. : 1318\n", "batch no. : 1319\n", "batch no. : 1320\n", "batch no. : 1321\n", "batch no. : 1322\n", "batch no. : 1323\n", "batch no. : 1324\n", "batch no. : 1325\n", "batch no. : 1326\n", "batch no. : 1327\n", "batch no. : 1328\n", "batch no. : 1329\n", "batch no. : 1330\n", "batch no. : 1331\n", "batch no. : 1332\n", "batch no. : 1333\n", "batch no. : 1334\n", "batch no. : 1335\n", "batch no. : 1336\n", "batch no. : 1337\n", "batch no. : 1338\n", "batch no. : 1339\n", "batch no. : 1340\n", "batch no. : 1341\n", "batch no. : 1342\n", "batch no. : 1343\n", "batch no. : 1344\n", "batch no. : 1345\n", "batch no. : 1346\n", "batch no. : 1347\n", "batch no. : 1348\n", "batch no. : 1349\n", "batch no. : 1350\n", "batch no. : 1351\n", "batch no. : 1352\n", "batch no. : 1353\n", "batch no. : 1354\n", "batch no. : 1355\n", "batch no. : 1356\n", "batch no. : 1357\n", "batch no. : 1358\n", "batch no. : 1359\n", "batch no. : 1360\n", "batch no. : 1361\n", "batch no. : 1362\n", "batch no. : 1363\n", "batch no. : 1364\n", "batch no. : 1365\n", "batch no. : 1366\n", "batch no. : 1367\n", "batch no. : 1368\n", "batch no. : 1369\n", "batch no. : 1370\n", "batch no. : 1371\n", "batch no. : 1372\n", "batch no. : 1373\n", "batch no. : 1374\n", "batch no. : 1375\n", "batch no. : 1376\n", "batch no. : 1377\n", "batch no. : 1378\n", "batch no. : 1379\n", "batch no. : 1380\n", "batch no. : 1381\n", "batch no. : 1382\n", "batch no. : 1383\n", "batch no. : 1384\n", "batch no. : 1385\n", "batch no. : 1386\n", "batch no. : 1387\n", "batch no. : 1388\n", "batch no. : 1389\n", "batch no. : 1390\n", "batch no. : 1391\n", "batch no. : 1392\n", "batch no. : 1393\n", "batch no. : 1394\n", "batch no. : 1395\n", "batch no. : 1396\n", "batch no. : 1397\n", "batch no. : 1398\n", "batch no. : 1399\n", "batch no. : 1400\n", "batch no. : 1401\n", "batch no. : 1402\n", "batch no. : 1403\n", "batch no. : 1404\n", "batch no. : 1405\n", "batch no. : 1406\n", "batch no. : 1407\n", "batch no. : 1408\n", "batch no. : 1409\n", "batch no. : 1410\n", "batch no. : 1411\n", "batch no. : 1412\n", "batch no. : 1413\n", "batch no. : 1414\n", "batch no. : 1415\n", "batch no. : 1416\n", "batch no. : 1417\n", "batch no. : 1418\n", "batch no. : 1419\n", "batch no. : 1420\n", "batch no. : 1421\n", "batch no. : 1422\n", "batch no. : 1423\n", "batch no. : 1424\n", "batch no. : 1425\n", "batch no. : 1426\n", "batch no. : 1427\n", "batch no. : 1428\n", "batch no. : 1429\n", "batch no. : 1430\n", "batch no. : 1431\n", "batch no. : 1432\n", "batch no. : 1433\n", "batch no. : 1434\n", "batch no. : 1435\n", "batch no. : 1436\n", "batch no. : 1437\n", "batch no. : 1438\n", "batch no. : 1439\n", "batch no. : 1440\n", "batch no. : 1441\n", "batch no. : 1442\n", "batch no. : 1443\n", "batch no. : 1444\n", "batch no. : 1445\n", "batch no. : 1446\n", "batch no. : 1447\n", "batch no. : 1448\n", "batch no. : 1449\n", "batch no. : 1450\n", "batch no. : 1451\n", "batch no. : 1452\n", "batch no. : 1453\n", "batch no. : 1454\n", "batch no. : 1455\n", "batch no. : 1456\n", "batch no. : 1457\n", "batch no. : 1458\n", "batch no. : 1459\n", "batch no. : 1460\n", "batch no. : 1461\n", "batch no. : 1462\n", "batch no. : 1463\n", "batch no. : 1464\n", "batch no. : 1465\n", "batch no. : 1466\n", "batch no. : 1467\n", "batch no. : 1468\n", "batch no. : 1469\n", "batch no. : 1470\n", "batch no. : 1471\n", "batch no. : 1472\n", "batch no. : 1473\n", "batch no. : 1474\n", "batch no. : 1475\n", "batch no. : 1476\n", "batch no. : 1477\n", "batch no. : 1478\n", "batch no. : 1479\n", "batch no. : 1480\n", "batch no. : 1481\n", "batch no. : 1482\n", "batch no. : 1483\n", "batch no. : 1484\n", "batch no. : 1485\n", "batch no. : 1486\n", "batch no. : 1487\n", "batch no. : 1488\n", "batch no. : 1489\n", "batch no. : 1490\n", "batch no. : 1491\n", "batch no. : 1492\n", "batch no. : 1493\n", "batch no. : 1494\n", "batch no. : 1495\n", "batch no. : 1496\n", "batch no. : 1497\n", "batch no. : 1498\n", "batch no. : 1499\n", "batch no. : 1500\n", "batch no. : 1501\n", "batch no. : 1502\n", "batch no. : 1503\n", "batch no. : 1504\n", "batch no. : 1505\n", "batch no. : 1506\n", "batch no. : 1507\n" ] } ], "source": [ "train_labels_set = []\n", "train_preds_set = []\n", "for idx, data in enumerate(dataloader.train):\n", " # get the inputs\n", " model.eval()\n", " print(\"batch no. : \", idx)\n", " inputs, labels = data\n", " train_labels_set += labels.tolist()\n", " inputs, labels = inputs.cuda(), labels.cuda()\n", "# print(labels)\n", " outputs = model(inputs)\n", " _, preds = torch.max(outputs.data, 1)\n", " train_preds_set += preds.cpu().tolist()\n", "# print(labels_set, preds_set)\n" ] }, { "cell_type": "code", "execution_count": 86, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "0.8728260869565218" ] }, "execution_count": 86, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from sklearn.metrics import accuracy_score\n", "accuracy_score(y_pred=train_preds_set, y_true=train_labels_set)" ] }, { "cell_type": "code", "execution_count": 87, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ " precision recall f1-score support\n", "\n", " 0 0.86 0.93 0.89 1031\n", " 1 0.90 0.80 0.85 809\n", "\n", " accuracy 0.87 1840\n", " macro avg 0.88 0.87 0.87 1840\n", "weighted avg 0.87 0.87 0.87 1840\n", "\n" ] } ], "source": [ "from sklearn.metrics import classification_report\n", "print(classification_report(y_pred=train_preds_set, y_true=train_labels_set))" ] }, { "cell_type": "code", "execution_count": 66, "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "batch no. : 0\n", "batch no. : 1\n", "batch no. : 2\n", "batch no. : 3\n", "batch no. : 4\n", "batch no. : 5\n", "batch no. : 6\n", "batch no. : 7\n", "batch no. : 8\n", "batch no. : 9\n", "batch no. : 10\n", "batch no. : 11\n", "batch no. : 12\n", "batch no. : 13\n", "batch no. : 14\n", "batch no. : 15\n", "batch no. : 16\n", "batch no. : 17\n", "batch no. : 18\n", "batch no. : 19\n", "batch no. : 20\n", "batch no. : 21\n", "batch no. : 22\n", "batch no. : 23\n", "batch no. : 24\n", "batch no. : 25\n", "batch no. : 26\n", "batch no. : 27\n", "batch no. : 28\n", "batch no. : 29\n", "batch no. : 30\n", "batch no. : 31\n", "batch no. : 32\n", "batch no. : 33\n", "batch no. : 34\n", "batch no. : 35\n", "batch no. : 36\n", "batch no. : 37\n", "batch no. : 38\n", "batch no. : 39\n", "batch no. : 40\n", "batch no. : 41\n", "batch no. : 42\n", "batch no. : 43\n", "batch no. : 44\n", "batch no. : 45\n", "batch no. : 46\n", "batch no. : 47\n", "batch no. : 48\n", "batch no. : 49\n", "batch no. : 50\n", "batch no. : 51\n", "batch no. : 52\n", "batch no. : 53\n", "batch no. : 54\n", "batch no. : 55\n", "batch no. : 56\n", "batch no. : 57\n", "batch no. : 58\n", "batch no. : 59\n", "batch no. : 60\n", "batch no. : 61\n", "batch no. : 62\n", "batch no. : 63\n", "batch no. : 64\n", "batch no. : 65\n", "batch no. : 66\n", "batch no. : 67\n", "batch no. : 68\n", "batch no. : 69\n", "batch no. : 70\n", "batch no. : 71\n", "batch no. : 72\n", "batch no. : 73\n", "batch no. : 74\n", "batch no. : 75\n", "batch no. : 76\n", "batch no. : 77\n", "batch no. : 78\n", "batch no. : 79\n", "batch no. : 80\n", "batch no. : 81\n", "batch no. : 82\n", "batch no. : 83\n", "batch no. : 84\n", "batch no. : 85\n", "batch no. : 86\n", "batch no. : 87\n", "batch no. : 88\n", "batch no. : 89\n", "batch no. : 90\n", "batch no. : 91\n", "batch no. : 92\n", "batch no. : 93\n", "batch no. : 94\n", "batch no. : 95\n", "batch no. : 96\n", "batch no. : 97\n", "batch no. : 98\n", "batch no. : 99\n", "batch no. : 100\n", "batch no. : 101\n", "batch no. : 102\n", "batch no. : 103\n", "batch no. : 104\n", "batch no. : 105\n", "batch no. : 106\n", "batch no. : 107\n", "batch no. : 108\n", "batch no. : 109\n", "batch no. : 110\n", "batch no. : 111\n", "batch no. : 112\n", "batch no. : 113\n", "batch no. : 114\n", "batch no. : 115\n", "batch no. : 116\n", "batch no. : 117\n", "batch no. : 118\n", "batch no. : 119\n", "batch no. : 120\n", "batch no. : 121\n", "batch no. : 122\n", "batch no. : 123\n", "batch no. : 124\n", "batch no. : 125\n", "batch no. : 126\n", "batch no. : 127\n", "batch no. : 128\n", "batch no. : 129\n", "batch no. : 130\n", "batch no. : 131\n", "batch no. : 132\n", "batch no. : 133\n", "batch no. : 134\n", "batch no. : 135\n", "batch no. : 136\n", "batch no. : 137\n", "batch no. : 138\n", "batch no. : 139\n", "batch no. : 140\n", "batch no. : 141\n", "batch no. : 142\n", "batch no. : 143\n", "batch no. : 144\n", "batch no. : 145\n", "batch no. : 146\n", "batch no. : 147\n", "batch no. : 148\n", "batch no. : 149\n", "batch no. : 150\n", "batch no. : 151\n", "batch no. : 152\n", "batch no. : 153\n", "batch no. : 154\n", "batch no. : 155\n", "batch no. : 156\n", "batch no. : 157\n", "batch no. : 158\n", "batch no. : 159\n", "batch no. : 160\n", "batch no. : 161\n", "batch no. : 162\n", "batch no. : 163\n", "batch no. : 164\n", "batch no. : 165\n", "batch no. : 166\n", "batch no. : 167\n", "batch no. : 168\n", "batch no. : 169\n", "batch no. : 170\n", "batch no. : 171\n", "batch no. : 172\n", "batch no. : 173\n", "batch no. : 174\n", "batch no. : 175\n", "batch no. : 176\n", "batch no. : 177\n", "batch no. : 178\n" ] } ], "source": [ "valid_labels_set = []\n", "valid_preds_set = []\n", "for idx, data in enumerate(dataloader.valid):\n", " # get the inputs\n", " print(\"batch no. : \", idx)\n", " inputs, labels = data\n", " valid_labels_set += labels.tolist()\n", " inputs, labels = inputs.cuda(), labels.cuda()\n", "# print(labels)\n", " outputs = model(inputs)\n", " _, preds = torch.max(outputs.data, 1)\n", " valid_preds_set += preds.cpu().tolist()\n", "# print(labels_set, preds_set)\n" ] }, { "cell_type": "code", "execution_count": 67, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "0.6731843575418994" ] }, "execution_count": 67, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from sklearn.metrics import accuracy_score\n", "accuracy_score(y_pred=valid_preds_set, y_true=valid_labels_set)" ] }, { "cell_type": "code", "execution_count": 261, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ " precision recall f1-score support\n", "\n", " 0 1.00 0.60 0.75 5\n", " 1 0.33 0.20 0.25 5\n", " 2 0.00 0.00 0.00 3\n", " 3 0.67 0.80 0.73 10\n", " 4 0.00 0.00 0.00 4\n", " 5 0.00 0.00 0.00 3\n", " 6 0.00 0.00 0.00 4\n", " 7 0.67 0.86 0.75 7\n", " 8 0.68 0.72 0.70 18\n", " 9 0.54 0.70 0.61 10\n", " 10 0.71 1.00 0.83 5\n", " 11 0.75 1.00 0.86 3\n", " 12 0.50 0.33 0.40 3\n", " 13 0.75 0.75 0.75 4\n", " 14 0.60 0.75 0.67 4\n", " 15 1.00 0.33 0.50 6\n", " 16 0.00 0.00 0.00 3\n", " 17 0.38 0.60 0.46 5\n", " 18 0.77 0.89 0.83 19\n", " 19 1.00 0.67 0.80 9\n", " 20 0.56 1.00 0.71 5\n", " 21 0.67 0.50 0.57 4\n", " 22 0.73 0.80 0.76 10\n", " 23 0.83 0.83 0.83 6\n", " 24 0.81 0.89 0.85 19\n", " 25 1.00 0.40 0.57 5\n", " 26 0.60 0.60 0.60 5\n", " 27 0.57 0.40 0.47 10\n", " 28 0.55 1.00 0.71 11\n", " 29 0.75 0.60 0.67 5\n", " 30 0.76 0.86 0.81 22\n", " 31 0.60 1.00 0.75 3\n", " 32 0.83 0.87 0.85 23\n", " 33 0.43 0.60 0.50 10\n", " 34 0.00 0.00 0.00 3\n", " 35 1.00 0.75 0.86 4\n", " 36 0.50 1.00 0.67 3\n", " 37 0.73 1.00 0.84 8\n", " 38 0.33 0.50 0.40 4\n", " 39 0.85 0.73 0.79 15\n", " 40 0.33 0.33 0.33 3\n", " 41 0.60 0.92 0.73 13\n", " 42 0.29 0.40 0.33 5\n", " 43 1.00 0.33 0.50 3\n", " 44 0.56 0.67 0.61 15\n", " 45 0.55 0.65 0.59 17\n", " 46 0.88 0.88 0.88 8\n", " 47 0.44 0.78 0.56 9\n", " 48 0.83 0.83 0.83 6\n", " 49 1.00 0.25 0.40 4\n", " 50 1.00 0.25 0.40 4\n", " 51 1.00 0.80 0.89 5\n", " 52 0.00 0.00 0.00 3\n", " 53 0.67 0.67 0.67 3\n", " 54 0.90 0.90 0.90 10\n", " 55 1.00 0.92 0.96 12\n", " 56 1.00 0.29 0.44 7\n", " 57 1.00 0.93 0.96 14\n", " 58 0.56 1.00 0.71 5\n", " 59 0.75 0.60 0.67 5\n", " 60 0.60 0.30 0.40 10\n", " 61 0.80 0.73 0.76 11\n", " 62 0.67 0.67 0.67 15\n", " 63 0.65 0.65 0.65 17\n", " 64 0.38 1.00 0.55 3\n", " 65 0.67 0.50 0.57 4\n", " 66 0.70 1.00 0.82 7\n", " 67 0.65 0.85 0.73 13\n", " 68 0.50 0.50 0.50 4\n", " 69 0.43 0.50 0.46 6\n", " 70 0.00 0.00 0.00 3\n", " 71 0.69 0.75 0.72 12\n", " 72 0.50 0.50 0.50 4\n", " 73 0.40 0.67 0.50 3\n", " 74 0.75 0.38 0.50 8\n", " 75 0.38 0.43 0.40 7\n", " 76 0.25 0.25 0.25 4\n", " 77 0.50 0.25 0.33 12\n", " 78 0.67 0.17 0.27 12\n", " 79 1.00 0.33 0.50 3\n", " 80 0.50 0.33 0.40 3\n", " 81 1.00 0.75 0.86 4\n", " 82 1.00 1.00 1.00 8\n", " 83 0.00 0.00 0.00 3\n", " 84 1.00 0.50 0.67 6\n", " 85 1.00 0.67 0.80 3\n", " 86 1.00 0.75 0.86 4\n", " 87 0.76 0.76 0.76 21\n", " 88 0.65 0.69 0.67 16\n", " 89 0.60 0.75 0.67 8\n", " 90 0.67 0.89 0.76 9\n", " 91 0.83 1.00 0.91 5\n", " 92 0.86 0.67 0.75 9\n", " 93 1.00 0.33 0.50 3\n", "\n", " accuracy 0.67 716\n", " macro avg 0.64 0.60 0.59 716\n", "weighted avg 0.68 0.67 0.65 716\n", "\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/opt/conda/lib/python3.6/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", " 'precision', 'predicted', average, warn_for)\n", "/opt/conda/lib/python3.6/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", " 'precision', 'predicted', average, warn_for)\n", "/opt/conda/lib/python3.6/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", " 'precision', 'predicted', average, warn_for)\n" ] } ], "source": [ "from sklearn.metrics import classification_report\n", "print(classification_report(y_pred=valid_preds_set, y_true=valid_labels_set))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "tensor([[-1.1673, -0.2164]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1175, -1.4215]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0916, -3.4097]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3203, -0.9870]], device='cuda:0', grad_fn=)\n", "tensor([[-0.9048, -1.5405]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0522, -2.8063]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1136, -2.3834]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7111, -3.9478]], device='cuda:0', grad_fn=)\n", "tensor([[-1.4123, -3.3514]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2776, -1.2252]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2520, -3.1235]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1294, 1.0293]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8395, -2.0392]], device='cuda:0', grad_fn=)\n", "tensor([[-1.6889, -3.1920]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7741, -0.1292]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1864, -3.5444]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5541, -1.2779]], device='cuda:0', grad_fn=)\n", "tensor([[2.0713, 1.3056]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7842, -0.1954]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0858, -1.1561]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2493, -0.9807]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5672, -0.4622]], device='cuda:0', grad_fn=)\n", "tensor([[0.8731, 0.1573]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7977, -0.0190]], device='cuda:0', grad_fn=)\n", "tensor([[1.0106, 0.5682]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5795, -0.4562]], device='cuda:0', grad_fn=)\n", "tensor([[-1.2733, -5.2885]], device='cuda:0', grad_fn=)\n", "tensor([[1.1738, 0.4287]], device='cuda:0', grad_fn=)\n", "tensor([[-1.2398, -2.1526]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2043, -1.9697]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0256, -0.4382]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1617, -2.1974]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8668, -0.2576]], device='cuda:0', grad_fn=)\n", "tensor([[1.1572, 0.2950]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9823, -0.5004]], device='cuda:0', grad_fn=)\n", "tensor([[ 6.2838e-05, -1.8517e+00]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0028, -1.9786]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0577, -2.6379]], device='cuda:0', grad_fn=)\n", "tensor([[0.5963, 0.6486]], device='cuda:0', grad_fn=)\n", "tensor([[-1.8406, -3.2111]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1254, -0.0922]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.8227, -0.2319]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3301, -1.8567]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0117, -3.5311]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1803, -2.6689]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9588, -1.1507]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4955, -0.6411]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1880, -2.0008]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4970, -1.1149]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3872, -1.1890]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5505, -0.6584]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0718, -4.1589]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.5572, -0.2007]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0484, -0.7548]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0835, -2.5675]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4253, -2.9504]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9472, -0.4062]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.5228, -0.3454]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3905, -1.3289]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7872, -0.1771]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3593, -1.5574]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5007, -0.9292]], device='cuda:0', grad_fn=)\n", "tensor([[-2.0704, -2.3775]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1697, -3.6508]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6088, -1.4529]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3294, -0.2743]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0294, -2.9252]], device='cuda:0', grad_fn=)\n", "tensor([[1.3963, 0.4332]], device='cuda:0', grad_fn=)\n", "tensor([[-1.6095, -4.4847]], device='cuda:0', grad_fn=)\n", "tensor([[0.6031, 0.8085]], device='cuda:0', grad_fn=)\n", "tensor([[-0.6312, -3.9942]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3070, -0.1071]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.4553, -1.7609]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1045, -0.7977]], device='cuda:0', grad_fn=)\n", "tensor([[1.1791, 0.3531]], device='cuda:0', grad_fn=)\n", "tensor([[1.8385, 0.2887]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5758, -1.2775]], device='cuda:0', grad_fn=)\n", "tensor([[-1.5202, -6.2216]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7252, -0.5163]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7752, -0.4127]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2360, -0.8524]], device='cuda:0', grad_fn=)\n", "tensor([[1.6862, 0.9611]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0757, 0.2711]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9696, -0.1933]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1379, -0.7391]], device='cuda:0', grad_fn=)\n", "tensor([[0.5132, 0.6981]], device='cuda:0', grad_fn=)\n", "tensor([[1.5150, 0.6093]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3434, -0.2869]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.4603e-03, -2.1190e+00]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0844, -0.3300]], device='cuda:0', grad_fn=)\n", "tensor([[0.1839, 0.0281]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6405, -2.3632]], device='cuda:0', grad_fn=)\n", "tensor([[-1.6082, -1.9826]], device='cuda:0', grad_fn=)\n", "tensor([[ 8.1506e-01, -3.2523e-04]], device='cuda:0', grad_fn=)\n", "tensor([[0.9523, 0.1736]], device='cuda:0', grad_fn=)\n", "tensor([[-1.3933, -2.1009]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6763, -0.9494]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5832, -1.3671]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0223, -0.6032]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9949, -2.5346]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6116, -1.2521]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4650, -1.5464]], device='cuda:0', grad_fn=)\n", "tensor([[1.4831, 0.3547]], device='cuda:0', grad_fn=)\n", "tensor([[0.8521, 0.5180]], device='cuda:0', grad_fn=)\n", "tensor([[1.1992, 0.7503]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6142, -0.6342]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0644, -1.5430]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3526, -2.9997]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3643, -1.7772]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5609, -1.9488]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2732, -0.5377]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2860, -0.4733]], device='cuda:0', grad_fn=)\n", "tensor([[-0.9458, -1.7540]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5728, -0.3686]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5571, -2.2649]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3781, -0.4348]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4783, -2.0994]], device='cuda:0', grad_fn=)\n", "tensor([[1.0707, 1.0123]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1473, -1.5165]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3721, -3.0098]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3083, -0.8813]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1496, -0.6377]], device='cuda:0', grad_fn=)\n", "tensor([[1.2859, 0.6574]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2446, -2.2846]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3455, -0.6020]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1083, -2.0273]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5853, -1.1772]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4697, -5.3933]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9505, -0.9375]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0068, -0.9285]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5208, -0.1262]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3971, -0.1968]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5638, -2.4642]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4304, -0.1323]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3752, -2.7415]], device='cuda:0', grad_fn=)\n", "tensor([[-0.6398, 0.0583]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8843, -0.5594]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7022, -2.3892]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2965, -2.6241]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7575, -0.4081]], device='cuda:0', grad_fn=)\n", "tensor([[0.2239, 0.4443]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1097, -2.4076]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1930, -0.5522]], device='cuda:0', grad_fn=)\n", "tensor([[-1.7396, -6.5396]], device='cuda:0', grad_fn=)\n", "tensor([[-0.8506, -3.2803]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4944, -2.2484]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4480, -2.7008]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7205, -1.4036]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0045, -0.5179]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0506, -0.4957]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4850, -1.7284]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0181, -2.9691]], device='cuda:0', grad_fn=)\n", "tensor([[0.5552, 1.1028]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4703, -1.4501]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2159, -1.2924]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1336, -1.6634]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7165, -1.7608]], device='cuda:0', grad_fn=)\n", "tensor([[0.9598, 0.2126]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7846, -0.5448]], device='cuda:0', grad_fn=)\n", "tensor([[0.9444, 1.2854]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7631, -4.6301]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0956, -0.1041]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2350, -2.2595]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3373, -3.0154]], device='cuda:0', grad_fn=)\n", "tensor([[1.9104, 0.9173]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2637, -0.3231]], device='cuda:0', grad_fn=)\n", "tensor([[1.0882, 0.2457]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0739, -3.1219]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3427, -1.9083]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1072, -1.9655]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5885, -0.6465]], device='cuda:0', grad_fn=)\n", "tensor([[0.7794, 0.1567]], device='cuda:0', grad_fn=)\n", "tensor([[-1.8627, -4.8995]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7574, -2.6044]], device='cuda:0', grad_fn=)\n", "tensor([[-1.3254, -1.2128]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3043, -1.5315]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5858, -0.9811]], device='cuda:0', grad_fn=)\n", "tensor([[1.3765, 1.0498]], device='cuda:0', grad_fn=)\n", "tensor([[0.7225, 1.1752]], device='cuda:0', grad_fn=)\n", "tensor([[1.6570, 1.7965]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0400, -0.3908]], device='cuda:0', grad_fn=)\n", "tensor([[0.3378, 0.3434]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0904, -2.5823]], device='cuda:0', grad_fn=)\n", "tensor([[2.2546, 0.2071]], device='cuda:0', grad_fn=)\n", "tensor([[2.0058, 1.0910]], device='cuda:0', grad_fn=)\n", "tensor([[0.7888, 0.9465]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4558, -3.2402]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0118, -0.4489]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5555, -4.5323]], device='cuda:0', grad_fn=)\n", "tensor([[-1.2639, -4.7488]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9185, -1.8260]], device='cuda:0', grad_fn=)\n", "tensor([[1.2509, 0.4886]], device='cuda:0', grad_fn=)\n", "tensor([[-1.9205, -3.6179]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2579, -1.6891]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1406, -1.2175]], device='cuda:0', grad_fn=)\n", "tensor([[1.1314, 0.5433]], device='cuda:0', grad_fn=)\n", "tensor([[0.8620, 0.0233]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6841, -2.0965]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0352, -0.3605]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5525, -1.9340]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6084, -0.2990]], device='cuda:0', grad_fn=)\n", "tensor([[-0.6542, -2.6477]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6619, -0.6637]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2462, -0.1222]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4825, -0.2781]], device='cuda:0', grad_fn=)\n", "tensor([[0.3278, 0.4645]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2411, -2.8536]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9453, -0.6593]], device='cuda:0', grad_fn=)\n", "tensor([[1.2337, 0.0777]], device='cuda:0', grad_fn=)\n", "tensor([[-2.4725, -5.0293]], device='cuda:0', grad_fn=)\n", "tensor([[-0.6614, -1.9119]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6682, -0.9926]], device='cuda:0', grad_fn=)\n", "tensor([[-0.8520, -2.8934]], device='cuda:0', grad_fn=)\n", "tensor([[1.0387, 0.5220]], device='cuda:0', grad_fn=)\n", "tensor([[1.0393, 0.5437]], device='cuda:0', grad_fn=)\n", "tensor([[1.6441, 1.2576]], device='cuda:0', grad_fn=)\n", "tensor([[0.2174, 1.2973]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0514, -1.9004]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4104, -0.6419]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7015, -2.5724]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0182, -1.4353]], device='cuda:0', grad_fn=)\n", "tensor([[-0.6895, -3.0893]], device='cuda:0', grad_fn=)\n", "tensor([[-1.2995, -3.8825]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4827, -1.5381]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7317, -1.6312]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0562, -1.8516]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8000, -0.5525]], device='cuda:0', grad_fn=)\n", "tensor([[1.2290, 0.2286]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0204, -1.2423]], device='cuda:0', grad_fn=)\n", "tensor([[-1.4019, -3.6435]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5194, -1.9675]], device='cuda:0', grad_fn=)\n", "tensor([[9.0636e-01, 4.4882e-05]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8025, -0.2325]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7378, -0.2105]], device='cuda:0', grad_fn=)\n", "tensor([[1.6556, 1.6677]], device='cuda:0', grad_fn=)\n", "tensor([[1.6411, 1.2308]], device='cuda:0', grad_fn=)\n", "tensor([[-1.7473, -1.9133]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2929, -0.2938]], device='cuda:0', grad_fn=)\n", "tensor([[1.8521, 1.8275]], device='cuda:0', grad_fn=)\n", "tensor([[1.6978, 1.3247]], device='cuda:0', grad_fn=)\n", "tensor([[0.8621, 0.5662]], device='cuda:0', grad_fn=)\n", "tensor([[1.5402, 0.7588]], device='cuda:0', grad_fn=)\n", "tensor([[-1.7169, -3.7939]], device='cuda:0', grad_fn=)\n", "tensor([[-1.1543, -0.8531]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1115, -1.7547]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8370, -2.2908]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2057, -0.9380]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3789, -2.9074]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1655, -1.5906]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7310, -1.8390]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5545, -0.3962]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2473, -3.3986]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7416, -0.7887]], device='cuda:0', grad_fn=)\n", "tensor([[1.1961, 0.0017]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3845, -0.5072]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4074, -3.8871]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1156, -2.8772]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0054, 1.3011]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0205, -1.6140]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0139, -1.5141]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3582, -0.6461]], device='cuda:0', grad_fn=)\n", "tensor([[-0.8307, -0.1144]], device='cuda:0', grad_fn=)\n", "tensor([[-1.1348, -4.5524]], device='cuda:0', grad_fn=)\n", "tensor([[1.1872, 0.2135]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1386, -0.2798]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2198, -1.4166]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7895, -1.6380]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1592, -3.6316]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7922, -0.3303]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3540, -2.3570]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7527, -1.3427]], device='cuda:0', grad_fn=)\n", "tensor([[1.1878, 0.5744]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3948, -1.6400]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.9485, -0.5157]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3174, -4.4381]], device='cuda:0', grad_fn=)\n", "tensor([[-0.6139, -1.3849]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6874, -0.6350]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7099, -4.2372]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1018, -1.9332]], device='cuda:0', grad_fn=)\n", "tensor([[-1.4147, -3.1168]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1742, -1.0902]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1726, -1.8467]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0546, -1.8240]], device='cuda:0', grad_fn=)\n", "tensor([[-1.3779, -2.7615]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0501, -1.3183]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0595, -0.1505]], device='cuda:0', grad_fn=)\n", "tensor([[2.5332, 0.2794]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3696, -0.4920]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9521, -0.9895]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3920, -0.7467]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7352, -1.7377]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.5914, -0.6191]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4333, -0.7817]], device='cuda:0', grad_fn=)\n", "tensor([[0.6785, 0.2195]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3796, -2.1166]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6259, -0.2460]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7263, -1.2481]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7041, -1.6683]], device='cuda:0', grad_fn=)\n", "tensor([[1.2236, 1.1780]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4513, -0.4899]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5070, -2.4922]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7016, -4.9132]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8004, -2.2229]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0459, -3.2011]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6612, -0.2608]], device='cuda:0', grad_fn=)\n", "tensor([[1.6414, 0.2059]], device='cuda:0', grad_fn=)\n", "tensor([[-0.8252, -1.2329]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3474, -1.0832]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6635, -1.0329]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8518, -0.2408]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3005, -1.6974]], device='cuda:0', grad_fn=)\n", "tensor([[1.0480, 0.1126]], device='cuda:0', grad_fn=)\n", "tensor([[1.4777, 3.0459]], device='cuda:0', grad_fn=)\n", "tensor([[0.9109, 0.4535]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9986, -0.6319]], device='cuda:0', grad_fn=)\n", "tensor([[-1.3128, -2.3280]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2064, -1.1941]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8847, -0.2355]], device='cuda:0', grad_fn=)\n", "tensor([[1.8921, 0.8651]], device='cuda:0', grad_fn=)\n", "tensor([[-1.5820, -2.1295]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1179, -3.8908]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2402, -0.0033]], device='cuda:0', grad_fn=)\n", "tensor([[1.1618, 0.9584]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4643, -0.4604]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0423, -2.3469]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0870, -1.4366]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5605, -0.3544]], device='cuda:0', grad_fn=)\n", "tensor([[1.5663, 0.3850]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.4860, -0.7778]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6106, -1.6629]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2599, -0.1356]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4797, -0.0198]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2689, -3.2263]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3964, -0.1151]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1559, -1.1033]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7879, -1.2706]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3226, -1.8094]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4956, -1.6443]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7938, -0.9817]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1946, -2.1573]], device='cuda:0', grad_fn=)\n", "tensor([[-1.1358, -4.3674]], device='cuda:0', grad_fn=)\n", "tensor([[0.6185, 0.0896]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5019, -2.3339]], device='cuda:0', grad_fn=)\n", "tensor([[1.5799, 0.2492]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7347, -0.8160]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4997, -3.5607]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6900, -0.9345]], device='cuda:0', grad_fn=)\n", "tensor([[-0.9665, -2.5185]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1151, -2.9529]], device='cuda:0', grad_fn=)\n", "tensor([[1.1087, 0.9007]], device='cuda:0', grad_fn=)\n", "tensor([[-3.2613, -5.0380]], device='cuda:0', grad_fn=)\n", "tensor([[-1.1739, -1.8722]], device='cuda:0', grad_fn=)\n", "tensor([[2.2891, 0.1808]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6949, -1.1488]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5888, -4.7416]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7892, -1.6882]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3994, -1.3697]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.4412, -1.2463]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0655, -3.1536]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.6650, -0.2869]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5268, -0.8062]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0279, -1.2239]], device='cuda:0', grad_fn=)\n", "tensor([[2.0553, 1.3936]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8710, -1.0665]], device='cuda:0', grad_fn=)\n", "tensor([[-1.6060, -3.8303]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0562, -2.6977]], device='cuda:0', grad_fn=)\n", "tensor([[1.3683, 1.9927]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5529, -0.0624]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7874, -0.7867]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2537, -2.2790]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9207, -0.6283]], device='cuda:0', grad_fn=)\n", "tensor([[0.8710, 0.0589]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3328, -1.0560]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2837, -1.5463]], device='cuda:0', grad_fn=)\n", "tensor([[-2.1871, -3.7209]], device='cuda:0', grad_fn=)\n", "tensor([[-0.8523, -3.5819]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2985, -1.6785]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1989, -0.0232]], device='cuda:0', grad_fn=)\n", "tensor([[1.9809, 0.3134]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0354, -0.7833]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7820, -2.2521]], device='cuda:0', grad_fn=)\n", "tensor([[1.8317, 1.7936]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3934, -0.4933]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2742, -1.4553]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.6596, -0.7426]], device='cuda:0', grad_fn=)\n", "tensor([[0.6276, 0.6574]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3801, -0.5003]], device='cuda:0', grad_fn=)\n", "tensor([[-1.5445, -1.2098]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2977, -0.1450]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4105, -1.7491]], device='cuda:0', grad_fn=)\n", "tensor([[1.6496, 1.8702]], device='cuda:0', grad_fn=)\n", "tensor([[0.5137, 0.2222]], device='cuda:0', grad_fn=)\n", "tensor([[0.8240, 1.1197]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1208, -1.0618]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5309, -1.8064]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0484, -2.1431]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1631, -2.1731]], device='cuda:0', grad_fn=)\n", "tensor([[-2.3183, -3.6011]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9093, -0.8795]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4783, -0.0030]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5397, -2.0383]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0267, -1.1121]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4961, 0.2316]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1432, -0.2521]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2705, -0.1663]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0094, -2.6670]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9673, -0.7201]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3130, -4.3022]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4593, -0.7490]], device='cuda:0', grad_fn=)\n", "tensor([[1.8424, 0.7405]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7082, -3.8023]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8968, -0.2014]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9405, -1.3596]], device='cuda:0', grad_fn=)\n", "tensor([[-3.0999, -6.0852]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1015, -0.8606]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5319, -1.5757]], device='cuda:0', grad_fn=)\n", "tensor([[-1.6407, 0.1997]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4926, -2.7722]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1080, -0.9431]], device='cuda:0', grad_fn=)\n", "tensor([[1.2895, 0.9233]], device='cuda:0', grad_fn=)\n", "tensor([[-3.1169, -5.8688]], device='cuda:0', grad_fn=)\n", "tensor([[1.0476, 1.3413]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2827, -0.1314]], device='cuda:0', grad_fn=)\n", "tensor([[-1.1546, -4.7721]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8926, -2.2830]], device='cuda:0', grad_fn=)\n", "tensor([[1.9193, 0.5545]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0408, -2.0701]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.4373, -0.0056]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1556, -0.6265]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7573, -3.7333]], device='cuda:0', grad_fn=)\n", "tensor([[-1.6291, -2.1687]], device='cuda:0', grad_fn=)\n", "tensor([[1.5032, 0.4625]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9755, -1.9556]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2037, -2.3303]], device='cuda:0', grad_fn=)\n", "tensor([[0.7232, 1.0361]], device='cuda:0', grad_fn=)\n", "tensor([[0.8622, 0.6700]], device='cuda:0', grad_fn=)\n", "tensor([[0.1697, 0.6848]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3930, -0.4264]], device='cuda:0', grad_fn=)\n", "tensor([[1.8097, 0.5311]], device='cuda:0', grad_fn=)\n", "tensor([[0.9369, 2.7555]], device='cuda:0', grad_fn=)\n", "tensor([[-1.9514, -4.3412]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4103, -1.1492]], device='cuda:0', grad_fn=)\n", "tensor([[0.1933, 1.5091]], device='cuda:0', grad_fn=)\n", "tensor([[0.7522, 2.6349]], device='cuda:0', grad_fn=)\n", "tensor([[1.2820, 0.4107]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9631, -2.3615]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1153, -4.4124]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5568, -0.6842]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1441, -0.7871]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8764, -2.2176]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3881, -1.7959]], device='cuda:0', grad_fn=)\n", "tensor([[1.2636, 1.0455]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9503, -0.5321]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2356, -1.2671]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5215, -0.3537]], device='cuda:0', grad_fn=)\n", "tensor([[0.7181, 1.4789]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2666, -1.7545]], device='cuda:0', grad_fn=)\n", "tensor([[0.6792, 0.3124]], device='cuda:0', grad_fn=)\n", "tensor([[1.2307, 0.6949]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2540, -0.4174]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2182, 1.0734]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5954, -0.9714]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2590, -1.7290]], device='cuda:0', grad_fn=)\n", "tensor([[0.7804, 0.5886]], device='cuda:0', grad_fn=)\n", "tensor([[-2.0282, -2.8919]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0306, -1.1622]], device='cuda:0', grad_fn=)\n", "tensor([[0.7906, 1.3787]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7643, -1.0876]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6445, -1.2697]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0673, -3.3423]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5481, -0.4700]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4084, -0.1736]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3538, -0.5792]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0511, -1.7692]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4807, -0.5720]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1808, -1.3580]], device='cuda:0', grad_fn=)\n", "tensor([[0.8064, 0.1597]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0997, -1.5919]], device='cuda:0', grad_fn=)\n", "tensor([[-1.8295, -2.7181]], device='cuda:0', grad_fn=)\n", "tensor([[0.0006, 0.2164]], device='cuda:0', grad_fn=)\n", "tensor([[1.3483, 1.2788]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1241, -1.8250]], device='cuda:0', grad_fn=)\n", "tensor([[0.8451, 0.3723]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4510, -0.7681]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8733, -0.8705]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6441, -0.0970]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3759, -0.3163]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2333, -0.9801]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0475, -4.2016]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5515, -2.2153]], device='cuda:0', grad_fn=)\n", "tensor([[0.6904, 0.7633]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0728, -1.8329]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9484, -1.0166]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0931, -1.9106]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5718, -3.6694]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1473, -3.9351]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8830, -0.5449]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3794, -4.0344]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2169, -1.6082]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9340, -1.2972]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.2468, -1.7264]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4504, -0.2705]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7985, -0.9238]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3219, -1.3874]], device='cuda:0', grad_fn=)\n", "tensor([[0.5426, 0.0575]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0887, -0.7764]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1422, -1.9153]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6069, -0.3360]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0311, -0.8950]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0143, -1.5527]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7293, -3.1055]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1618, -1.4803]], device='cuda:0', grad_fn=)\n", "tensor([[0.4866, 1.6226]], device='cuda:0', grad_fn=)\n", "tensor([[0.6115, 0.0135]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0198, -1.2339]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.0811, -0.6849]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0314, -0.5816]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2159, -3.8230]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9777, -0.5594]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7003, -1.1133]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5080, -4.5736]], device='cuda:0', grad_fn=)\n", "tensor([[1.5570, 0.5869]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4553, -3.4607]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8158, -3.4075]], device='cuda:0', grad_fn=)\n", "tensor([[1.2111, 0.0435]], device='cuda:0', grad_fn=)\n", "tensor([[-1.1909, -4.1094]], device='cuda:0', grad_fn=)\n", "tensor([[1.0848, 1.5909]], device='cuda:0', grad_fn=)\n", "tensor([[0.9267, 0.4748]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3102, -0.9948]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0397, -3.5004]], device='cuda:0', grad_fn=)\n", "tensor([[2.3955, 1.7508]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1940, -1.7580]], device='cuda:0', grad_fn=)\n", "tensor([[1.3775, 1.0711]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1870, -0.3219]], device='cuda:0', grad_fn=)\n", "tensor([[0.2433, 0.4605]], device='cuda:0', grad_fn=)\n", "tensor([[0.8649, 0.2720]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9370, -0.3423]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1483, -3.0533]], device='cuda:0', grad_fn=)\n", "tensor([[-2.5110, -4.2947]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8329, -2.3375]], device='cuda:0', grad_fn=)\n", "tensor([[-1.3603, -2.8804]], device='cuda:0', grad_fn=)\n", "tensor([[0.4482, 0.0478]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4646, -2.1172]], device='cuda:0', grad_fn=)\n", "tensor([[-1.0196, -1.9531]], device='cuda:0', grad_fn=)\n", "tensor([[0.9201, 0.3225]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3147, -0.9028]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7406, -2.1625]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3595, -1.7877]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3468, -0.0443]], device='cuda:0', grad_fn=)\n", "tensor([[-1.9495, -3.0798]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3688, -0.3074]], device='cuda:0', grad_fn=)\n", "tensor([[0.5851, 0.1137]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.6026, -2.6798]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5645, -1.0849]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1661, -1.3827]], device='cuda:0', grad_fn=)\n", "tensor([[1.2425, 0.9780]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1053, -1.1304]], device='cuda:0', grad_fn=)\n", "tensor([[-1.9258, -2.6760]], device='cuda:0', grad_fn=)\n", "tensor([[-1.3023, -2.8460]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5617, -1.0146]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9499, -1.0198]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5897, -3.5274]], device='cuda:0', grad_fn=)\n", "tensor([[-1.5165, -1.8513]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3859, -1.3680]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7673, -1.9551]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1572, -0.8401]], device='cuda:0', grad_fn=)\n", "tensor([[1.4732, 1.4332]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1801, -3.3591]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.1794, -1.3014]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1872, -0.8697]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.4137, -1.9505]], device='cuda:0', grad_fn=)\n", "tensor([[0.3550, 2.0835]], device='cuda:0', grad_fn=)\n", "tensor([[0.7925, 0.6600]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6918, -2.5138]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6214, -1.4909]], device='cuda:0', grad_fn=)\n", "tensor([[1.4544, 0.1431]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1393, 0.5347]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0816, -2.4935]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.6187, -0.0335]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7458, -4.2572]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4651, -1.9313]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.1743, -2.7449]], device='cuda:0', grad_fn=)\n", "tensor([[0.8001, 0.3074]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3903, -0.2083]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5850, -1.6110]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6244, -1.7519]], device='cuda:0', grad_fn=)\n", "tensor([[0.7842, 0.0053]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9588, -0.4149]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9546, -0.0851]], device='cuda:0', grad_fn=)\n", "tensor([[1.4146, 0.4208]], device='cuda:0', grad_fn=)\n", "tensor([[1.1573, 0.3606]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.3091, -0.9326]], device='cuda:0', grad_fn=)\n", "tensor([[0.9880, 0.8345]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4779, -1.1054]], device='cuda:0', grad_fn=)\n", "tensor([[0.4288, 0.8571]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4608, -0.6093]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3522, -2.8759]], device='cuda:0', grad_fn=)\n", "tensor([[1.6977, 0.1185]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0467, -1.0167]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6972, -0.4724]], device='cuda:0', grad_fn=)\n", "tensor([[-1.4471, -5.5531]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2862, -1.5572]], device='cuda:0', grad_fn=)\n", "tensor([[-0.3268, -2.6613]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.2621, -0.0318]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6155, -1.7353]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6341, -1.6060]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3081, -1.2311]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.5675, -1.7683]], device='cuda:0', grad_fn=)\n", "tensor([[0.8115, 1.4494]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8144, -0.2226]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8952, -0.2370]], device='cuda:0', grad_fn=)\n", "tensor([[1.5782, 0.4145]], device='cuda:0', grad_fn=)\n", "tensor([[1.5669, 0.3477]], device='cuda:0', grad_fn=)\n", "tensor([[1.7589, 0.6936]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7697, -3.3416]], device='cuda:0', grad_fn=)\n", "tensor([[0.8734, 0.5210]], device='cuda:0', grad_fn=)\n", "tensor([[1.3052, 0.2670]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.4238, -0.0483]], device='cuda:0', grad_fn=)\n", "tensor([[0.3749, 0.9036]], device='cuda:0', grad_fn=)\n", "tensor([[-1.2863, -1.6988]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2457, -0.4887]], device='cuda:0', grad_fn=)\n", "tensor([[-0.2006, -0.6630]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.0543, -1.9770]], device='cuda:0', grad_fn=)\n", "tensor([[0.6850, 0.3774]], device='cuda:0', grad_fn=)\n", "tensor([[-0.7516, -4.1780]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.3501, -0.5694]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4257, -0.1025]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6410, -0.6486]], device='cuda:0', grad_fn=)\n", "tensor([[-1.1495, -1.4545]], device='cuda:0', grad_fn=)\n", "tensor([[-0.4758, -2.6206]], device='cuda:0', grad_fn=)\n", "tensor([[-1.5274, -2.3287]], device='cuda:0', grad_fn=)\n", "tensor([[-1.3144, -5.1455]], device='cuda:0', grad_fn=)\n", "tensor([[1.9926, 1.0033]], device='cuda:0', grad_fn=)\n", "tensor([[-0.1418, -1.3121]], device='cuda:0', grad_fn=)\n", "tensor([[0.2774, 0.7618]], device='cuda:0', grad_fn=)\n", "tensor([[1.0107, 0.4625]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.7698, -0.6860]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.6563, -2.1693]], device='cuda:0', grad_fn=)\n", "tensor([[ 1.7915, -0.2018]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8789, -0.7258]], device='cuda:0', grad_fn=)\n", "tensor([[0.8153, 0.1047]], device='cuda:0', grad_fn=)\n", "tensor([[-0.0317, 2.3950]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.4798, -0.8649]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8578, -0.1814]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.9189, -0.4678]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5805, -1.8266]], device='cuda:0', grad_fn=)\n", "tensor([[-0.5877, -2.6481]], device='cuda:0', grad_fn=)\n", "tensor([[1.8791, 0.4744]], device='cuda:0', grad_fn=)\n", "tensor([[ 0.8929, -2.5863]], device='cuda:0', grad_fn=)\n" ] } ], "source": [ "test_labels_set = []\n", "test_preds_set = []\n", "for idx, data in enumerate(testdata.train):\n", " # get the inputs\n", " model.eval()\n", " inputs, labels = data\n", " test_labels_set += labels.tolist()\n", " inputs, labels = inputs.cuda(), labels.cuda()\n", "# print(labels)\n", " outputs = model(inputs)\n", " _, preds = torch.max(outputs.data, 1)\n", " test_preds_set += preds.cpu().tolist()\n", " print(outputs)\n", " \n" ] }, { "cell_type": "code", "execution_count": 69, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "0.6083333333333333" ] }, "execution_count": 69, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from sklearn.metrics import accuracy_score\n", "accuracy_score(y_pred=test_preds_set, y_true=test_labels_set)" ] }, { "cell_type": "code", "execution_count": 253, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ " precision recall f1-score support\n", "\n", " 0 0.00 0.00 0.00 7\n", " 1 0.38 0.43 0.40 7\n", " 2 0.00 0.00 0.00 4\n", " 3 0.71 0.91 0.80 11\n", " 4 0.83 0.83 0.83 6\n", " 5 1.00 0.50 0.67 4\n", " 6 0.00 0.00 0.00 6\n", " 7 0.44 0.57 0.50 7\n", " 8 0.56 0.53 0.54 19\n", " 9 0.57 0.73 0.64 11\n", " 10 0.43 0.50 0.46 6\n", " 11 1.00 0.40 0.57 5\n", " 12 1.00 0.25 0.40 4\n", " 13 0.75 0.60 0.67 5\n", " 14 1.00 0.50 0.67 6\n", " 15 1.00 0.57 0.73 7\n", " 16 0.00 0.00 0.00 5\n", " 17 0.62 0.83 0.71 6\n", " 18 0.67 0.84 0.74 19\n", " 19 0.50 0.90 0.64 10\n", " 20 0.50 0.33 0.40 6\n", " 21 0.80 0.80 0.80 5\n", " 22 0.58 1.00 0.73 11\n", " 23 0.33 0.25 0.29 8\n", " 24 0.43 0.76 0.55 21\n", " 25 0.67 0.80 0.73 5\n", " 26 0.75 0.50 0.60 6\n", " 27 0.20 0.17 0.18 12\n", " 28 0.24 0.42 0.30 12\n", " 29 0.67 0.86 0.75 7\n", " 30 0.76 0.86 0.81 22\n", " 31 0.22 0.50 0.31 4\n", " 32 0.67 0.80 0.73 25\n", " 33 0.53 0.83 0.65 12\n", " 34 0.50 0.40 0.44 5\n", " 35 0.80 0.67 0.73 6\n", " 36 0.33 0.50 0.40 4\n", " 37 0.89 0.89 0.89 9\n", " 38 0.14 0.20 0.17 5\n", " 39 0.73 0.94 0.82 17\n", " 40 0.67 0.50 0.57 4\n", " 41 0.65 0.73 0.69 15\n", " 42 1.00 0.33 0.50 6\n", " 43 1.00 0.50 0.67 4\n", " 44 0.67 0.50 0.57 16\n", " 45 0.50 0.50 0.50 18\n", " 46 1.00 0.67 0.80 9\n", " 47 0.29 0.40 0.33 10\n", " 48 0.62 0.62 0.62 8\n", " 49 0.50 0.33 0.40 6\n", " 50 0.50 0.17 0.25 6\n", " 51 0.40 0.33 0.36 6\n", " 52 0.00 0.00 0.00 4\n", " 53 0.00 0.00 0.00 4\n", " 54 0.83 0.83 0.83 12\n", " 55 0.81 1.00 0.90 13\n", " 56 0.80 0.50 0.62 8\n", " 57 1.00 0.81 0.90 16\n", " 58 0.50 1.00 0.67 6\n", " 59 0.75 0.50 0.60 6\n", " 60 0.60 0.25 0.35 12\n", " 61 0.29 0.18 0.22 11\n", " 62 1.00 0.88 0.94 17\n", " 63 0.72 0.72 0.72 18\n", " 64 1.00 1.00 1.00 5\n", " 65 0.50 0.40 0.44 5\n", " 66 0.73 0.89 0.80 9\n", " 67 0.55 0.86 0.67 14\n", " 68 0.50 0.40 0.44 5\n", " 69 0.38 0.71 0.50 7\n", " 70 0.00 0.00 0.00 4\n", " 71 0.79 0.85 0.81 13\n", " 72 0.00 0.00 0.00 6\n", " 73 0.00 0.00 0.00 3\n", " 74 0.60 0.33 0.43 9\n", " 75 0.00 0.00 0.00 9\n", " 76 1.00 0.40 0.57 5\n", " 77 0.33 0.31 0.32 13\n", " 78 0.44 0.31 0.36 13\n", " 79 0.00 0.00 0.00 3\n", " 80 0.60 0.60 0.60 5\n", " 81 0.67 0.67 0.67 6\n", " 82 0.91 1.00 0.95 10\n", " 83 1.00 0.40 0.57 5\n", " 84 0.86 0.75 0.80 8\n", " 85 0.67 0.40 0.50 5\n", " 86 1.00 0.40 0.57 5\n", " 87 0.86 0.83 0.84 23\n", " 88 0.64 0.53 0.58 17\n", " 89 0.67 0.80 0.73 10\n", " 90 0.82 0.90 0.86 10\n", " 91 0.80 0.57 0.67 7\n", " 92 0.67 1.00 0.80 10\n", " 93 0.67 0.50 0.57 4\n", "\n", " accuracy 0.61 840\n", " macro avg 0.58 0.54 0.54 840\n", "weighted avg 0.61 0.61 0.59 840\n", "\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/opt/conda/lib/python3.6/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", " 'precision', 'predicted', average, warn_for)\n", "/opt/conda/lib/python3.6/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", " 'precision', 'predicted', average, warn_for)\n", "/opt/conda/lib/python3.6/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", " 'precision', 'predicted', average, warn_for)\n" ] } ], "source": [ "from sklearn.metrics import classification_report\n", "print(classification_report(y_pred=test_preds_set, y_true=test_labels_set))" ] }, { "cell_type": "code", "execution_count": 91, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
imagenamecaptions
0test_img_0.jpgsugarkaga nadandhava vida figuregaga nadandhavan dhan adhigam
1test_img_1.jpgi have come for my stones stones thaane.. nalla cheap aana price la namma kalyan jewellers la kedaikkum
2test_img_2.jpg\"special porotta\" nu pottuierukke spacial kum sadhavukkum yenna vidhiyasam..? rendukkum oru naal dhan vidhiyasam..
3test_img_3.jpg*we : amma .. cooker 3 whistle vanthuchu off panniten...
4test_img_4.jpgcreating whatsapp group - 1st day vaanga ji.. vaanga ji.. ellarayum add pannunga ji TEAM WORK PANROM...ENJOY PANROMM.. AFTER ONE WEEK ... ENNAYA..? IVLO AMAITHIYA IRUKEENGA? ETHACHU PESUNGAYA
5test_img_5.jpg*FOODIE : YAARUM PAARKKAMAL ENNAI PAARKIREN ENNAI ARIYAMAL UNNAI PARKIREN
6test_img_6.jpgMARGALI MAADHA EFFECT YENNA KULLURU
7test_img_7.jpgYENNAMMA FEEL PANNI YELUTHI ERUKRANDA AVAN
8test_img_8.jpgTAMIL SERIALS BE LIKE... INI AVARUKKU BADHIL..... IVAR
9test_img_9.jpgMama ponnu Expectation reality
10test_img_10.jpgpondatti yennai thittum bodhu... eppavum unga amma pechai dhan kepingala.,,? en maganai thittum bodhu... amma pechai ketkave mattiiya...?
11test_img_11.jpgmattai adakuvatharku en mutha ponna kattikoduppen... yoo maams kannukuttiya adakitten ilaya ponnu ennakkudhan...
12test_img_12.jpgada paavi payale
13test_img_13.jpg*(me)ENGINEER : ennoda maasa sambalam 30aayiram... unga ponna nalla pathukka idhu podhadha? *mamanar : en ponnukku naan kodukkura pocket maneyye 25aayiram oru masathukku sorry mama...adhaiyum sethudha 30aayiram endru kurinen...
14test_img_14.jpg#10years challenge..... *2008 : NET RECHARGE PANNA 30RS KUDU AATHA... *2018 : NET RECHARGE PANNA 300RS KUDU AATHA
15test_img_15.jpgIDHUYENA PRAMADHAM ELECTION VARTTUM IDHAVIDA SPECIAL AACHIYANGALLAN KATHUERUKKU..
16test_img_16.jpg*PEOPLE: MATRAM MUNNETRAM ENRIRE ADHARKU YENNA ARTHAM *JINGLE BELL : PETTIGALAI KAI MATRI NAAN MUNNERUVEN YENDRU ARTHAM
17test_img_17.jpg*TN PEOPLE : ADA ARAIBODHA NAAYE
18test_img_18.jpg*SIMBU : NAMMA MASSU ENNA NU KATANUM! ENAKU ANDAL PAAL UTHUNGA! YENGA VEETULA ANDA ILLA!
19test_img_19.jpg*MOM : VEETULA IRUNDHA ANDAA VAYUM PAAL PACKET UM KANOM *BANNER JEE FANS
20test_img_20.jpgONLY FOR 90S KIDS SCHOOL DAYS *PANAKARA VEETU PULLAIYA ERUPPARO
21test_img_21.jpgBEAUTY CARE #OTHER GIRLS FOFR MY BESTIE #ME : NEE DHINAM KULICHAA POTHUMAE, VERA YATHUVUM VENDAAMAE
22test_img_22.jpgVATHIYAR : CURRENT POGUM BODHU MUTHALA YEDHAI YEDUPINGA? TORCH LIGHT, EMERGENCY LIGHT? RENDUMILLAI SIR, APPA PACKETLA ERUKKIRA PANATHAI YEDUPPEN SIR...
23test_img_23.jpgDEI NAMMA ANDREA DA..
24test_img_24.jpg#2K KIDS : INNAKKI VENNUMNA NEENGA ORE VEDIYILA 7 SHOT VEDIKKALAM...... #90 KIDS : AANA NANGA ANNAIKE 7 BIJILI VEDIYA ONNA THIRICHU ORE VEDIYILA 7SHOT VEDICHAVUKAPU!!!
25test_img_25.jpg100CR... 125CR... NEENGA EDHACHU FEEL PANRINGALA?? *SIMBU : CHA CHA NAAN RAJA AH VANTHA ASSAULT AH 150 CR EDUPANGA..
26test_img_26.jpgDECENTA KENJUFYING MY SISTER .. PROGRESS REPORT LA APPA KITTA SIGN VAANGI KUDUKARIYAA PLEASE
27test_img_27.jpgTHONDARGAL *CAR OOTUNAR *VEETUKARAR
28test_img_28.jpgWHEN 90S KIDS WATCHING THIS SERIAL... ~90'S KIDS : NAMAKU VEELAIKKAARI AMAIYULA... VEELAIKKARI VAIKURA ALAVUKKU PANACASITHYUM AMAIYULS...
29test_img_29.jpgCOLLEGE AMAIVATHU ELLAM +2 KODUTHA VARAM... OFFICE AMAIVATHU ELLAM AVAN AVAN SEITHA VINAI... NAMAKU COLLEGE'UM OZHUNGA AMAIYALA...OFFICE'UM OZHUNGA AMAIYALA INTHA PAGE'AAVATHU MAINTAIN PANNUVOM..
.........
637test_img_637.jpgIF YOU ARE BAD I'M YOUR DAD
638test_img_638.jpgMEERA: NAMMA HUMAN BEING NAMALUKU IRUKKURA MAARI POWER ETHUMAE KEDAIYAATHU ATHA SIXTH SENSE.. ORUTHAR NAMBA PESUMBOTHAE ATHA ENNA 'NU FEEL PANNA MUDIUM.
639test_img_639.jpgGYM TRAINER TO NEW YEAR MOTIVATIONAL GUYS UNNA KADAISIYA PONA JANUARY 2 DHETHY PATHATHU
640test_img_640.jpgNAA POREN JAANNU DEII RAM.. VENAM DA SONNA KELU DA
641test_img_641.jpgFOUR TYPES OF BIRTHDAY TREATS ON EVERY GANG
642test_img_642.jpg90S KIDS DURING INTERVALS *ME: HEIGHT SOLLADHA .. AVAN KITTA UNDERTAKER IRUKKU!
643test_img_643.jpgTHANI ORUVAN UNGA EDAM UNGA AALUNGA.. ENAKU BAYAM ILLA, ATHUKU ITHAAN SAMPLE
644test_img_644.jpgLETS TAKE A SELFIE PULA *SELFIE KUMAR: SHROOOVVV
645test_img_645.jpg~IN KAATRU VELIYIDAI MOVIE IDE MAARI COLOUR FULL ANA SONG AH ENGAYO NAMMA PATHURUKAMEY....!!?
646test_img_646.jpg5) TAMIL MATHANGAL KURIPU ELUDHUGA: 1) CHITTRAI 2}VAIKASI 3) AANI 4) AAVADI 5) PURATTASI
647test_img_647.jpgNO COMMENTS
648test_img_648.jpgKAILA VERA PATHUPAISA ILLA IVAN VERA NAMBALA LOVE PANDRENNU SOLDRAN IVAN KUDA SENDHU APPADIYE VANDIYA OOTRIVENDI DHAN
649test_img_649.jpgENERGY LOW AAGUM VARAI VELAIYADIYADHU ANDHA KALAM BATTERY LOW AAGUM VARAI VELIYADHUVADHU INDHA KALAM.
650test_img_650.jpgayyey vaaya paaru
651test_img_651.jpgno comments
652test_img_652.jpgMEETS YOU BEFORE EXAM ASKS YOU QUESTIONS YOU'VE NEVER HEARD
653test_img_653.jpgCURRENT THOTTAA SAADHARANA MANUSHANUKU THAAN SHOCK ADIKUM NAAN NARASIMMA.. ENNA THOTTAA ANDHA CURRENTUKE SHOCK ADIKKUM CAPTAIN Y U NO PRODUCE CURRENT FOR TAMILNADU POWERCUT?
654test_img_654.jpgDAI NANACHUM PADAM MATTUM THA RELEASE PANREN... YENNA NAMBI NEENGA PADAM PAKALAMDA... AANA IVANAILAM ORU AALA MATHICHU IVANUGALA NAMBI PADATHUKU POVATHINGADA...
655test_img_655.jpgONGI ADICHA ONDRA TON WEIGHT AH?? AIYO, ATHU SATHIYAMA NAAN ILLAINGA NA.
656test_img_656.jpgYENDA UNGA APPA PEYARAI FRIDGEKULLA YELUDHI VACHURUKA.? AVARDHANDA SONNARU EN PEYAR KETTUPOGAMMA PAARTHUKONNU....!
657test_img_657.jpgWATCHING OTHER LANGUAGE MOVIE IN THEATRE SOOOPER APPU!
658test_img_658.jpgENAKU PIDITHA YENNALME THOLAI THURATHULA ANDRU NILLA.. INDRU NEE..
659test_img_659.jpgEVERUONE IS A GANGSTER UNTIL THE REAL GANSTER ARRIVES
660test_img_660.jpg*TN GIRLS
661test_img_661.jpgCLIMAX SCENE LA MAGALA KOOTITU VANDHU ALUDHA ELLARUM SUNA PANA AAGIDA MUDIYUMA? DA
662test_img_662.jpgDURING SCHOOL DAYS ME & FRNDS: ANNA MAIL CHECK PANNANUM LAST SYSTEM KODUNGA BROWSING CENTER ANNA:
663test_img_663.jpgREMEMBER THIS KID THIS KID IS RIGHT NOW
664test_img_664.jpg*SINGLES: IVAN VERA ENGALA ROMBA TORCHER PANDRANDA...!
665test_img_665.jpgENAIKI PUBG LA CHICKEN DINNER VANGURA PAYALUGALAM YARUUU ORU KALATHULA DUCK KA SHOOT PANNA MUDIYAMA ENDHA NAI KITAA HE HE HE VANGUNA PAYALUGA THANN
666test_img_666.jpgLATER THAT DAY
\n", "

667 rows × 2 columns

\n", "
" ], "text/plain": [ " imagename \\\n", "0 test_img_0.jpg \n", "1 test_img_1.jpg \n", "2 test_img_2.jpg \n", "3 test_img_3.jpg \n", "4 test_img_4.jpg \n", "5 test_img_5.jpg \n", "6 test_img_6.jpg \n", "7 test_img_7.jpg \n", "8 test_img_8.jpg \n", "9 test_img_9.jpg \n", "10 test_img_10.jpg \n", "11 test_img_11.jpg \n", "12 test_img_12.jpg \n", "13 test_img_13.jpg \n", "14 test_img_14.jpg \n", "15 test_img_15.jpg \n", "16 test_img_16.jpg \n", "17 test_img_17.jpg \n", "18 test_img_18.jpg \n", "19 test_img_19.jpg \n", "20 test_img_20.jpg \n", "21 test_img_21.jpg \n", "22 test_img_22.jpg \n", "23 test_img_23.jpg \n", "24 test_img_24.jpg \n", "25 test_img_25.jpg \n", "26 test_img_26.jpg \n", "27 test_img_27.jpg \n", "28 test_img_28.jpg \n", "29 test_img_29.jpg \n", ".. ... \n", "637 test_img_637.jpg \n", "638 test_img_638.jpg \n", "639 test_img_639.jpg \n", "640 test_img_640.jpg \n", "641 test_img_641.jpg \n", "642 test_img_642.jpg \n", "643 test_img_643.jpg \n", "644 test_img_644.jpg \n", "645 test_img_645.jpg \n", "646 test_img_646.jpg \n", "647 test_img_647.jpg \n", "648 test_img_648.jpg \n", "649 test_img_649.jpg \n", "650 test_img_650.jpg \n", "651 test_img_651.jpg \n", "652 test_img_652.jpg \n", "653 test_img_653.jpg \n", "654 test_img_654.jpg \n", "655 test_img_655.jpg \n", "656 test_img_656.jpg \n", "657 test_img_657.jpg \n", "658 test_img_658.jpg \n", "659 test_img_659.jpg \n", "660 test_img_660.jpg \n", "661 test_img_661.jpg \n", "662 test_img_662.jpg \n", "663 test_img_663.jpg \n", "664 test_img_664.jpg \n", "665 test_img_665.jpg \n", "666 test_img_666.jpg \n", "\n", " captions \n", "0 sugarkaga nadandhava vida figuregaga nadandhavan dhan adhigam \n", "1 i have come for my stones stones thaane.. nalla cheap aana price la namma kalyan jewellers la kedaikkum \n", "2 \"special porotta\" nu pottuierukke spacial kum sadhavukkum yenna vidhiyasam..? rendukkum oru naal dhan vidhiyasam.. \n", "3 *we : amma .. cooker 3 whistle vanthuchu off panniten... \n", "4 creating whatsapp group - 1st day vaanga ji.. vaanga ji.. ellarayum add pannunga ji TEAM WORK PANROM...ENJOY PANROMM.. AFTER ONE WEEK ... ENNAYA..? IVLO AMAITHIYA IRUKEENGA? ETHACHU PESUNGAYA \n", "5 *FOODIE : YAARUM PAARKKAMAL ENNAI PAARKIREN ENNAI ARIYAMAL UNNAI PARKIREN \n", "6 MARGALI MAADHA EFFECT YENNA KULLURU \n", "7 YENNAMMA FEEL PANNI YELUTHI ERUKRANDA AVAN \n", "8 TAMIL SERIALS BE LIKE... INI AVARUKKU BADHIL..... IVAR \n", "9 Mama ponnu Expectation reality \n", "10 pondatti yennai thittum bodhu... eppavum unga amma pechai dhan kepingala.,,? en maganai thittum bodhu... amma pechai ketkave mattiiya...? \n", "11 mattai adakuvatharku en mutha ponna kattikoduppen... yoo maams kannukuttiya adakitten ilaya ponnu ennakkudhan... \n", "12 ada paavi payale \n", "13 *(me)ENGINEER : ennoda maasa sambalam 30aayiram... unga ponna nalla pathukka idhu podhadha? *mamanar : en ponnukku naan kodukkura pocket maneyye 25aayiram oru masathukku sorry mama...adhaiyum sethudha 30aayiram endru kurinen... \n", "14 #10years challenge..... *2008 : NET RECHARGE PANNA 30RS KUDU AATHA... *2018 : NET RECHARGE PANNA 300RS KUDU AATHA \n", "15 IDHUYENA PRAMADHAM ELECTION VARTTUM IDHAVIDA SPECIAL AACHIYANGALLAN KATHUERUKKU.. \n", "16 *PEOPLE: MATRAM MUNNETRAM ENRIRE ADHARKU YENNA ARTHAM *JINGLE BELL : PETTIGALAI KAI MATRI NAAN MUNNERUVEN YENDRU ARTHAM \n", "17 *TN PEOPLE : ADA ARAIBODHA NAAYE \n", "18 *SIMBU : NAMMA MASSU ENNA NU KATANUM! ENAKU ANDAL PAAL UTHUNGA! YENGA VEETULA ANDA ILLA! \n", "19 *MOM : VEETULA IRUNDHA ANDAA VAYUM PAAL PACKET UM KANOM *BANNER JEE FANS \n", "20 ONLY FOR 90S KIDS SCHOOL DAYS *PANAKARA VEETU PULLAIYA ERUPPARO \n", "21 BEAUTY CARE #OTHER GIRLS FOFR MY BESTIE #ME : NEE DHINAM KULICHAA POTHUMAE, VERA YATHUVUM VENDAAMAE \n", "22 VATHIYAR : CURRENT POGUM BODHU MUTHALA YEDHAI YEDUPINGA? TORCH LIGHT, EMERGENCY LIGHT? RENDUMILLAI SIR, APPA PACKETLA ERUKKIRA PANATHAI YEDUPPEN SIR... \n", "23 DEI NAMMA ANDREA DA.. \n", "24 #2K KIDS : INNAKKI VENNUMNA NEENGA ORE VEDIYILA 7 SHOT VEDIKKALAM...... #90 KIDS : AANA NANGA ANNAIKE 7 BIJILI VEDIYA ONNA THIRICHU ORE VEDIYILA 7SHOT VEDICHAVUKAPU!!! \n", "25 100CR... 125CR... NEENGA EDHACHU FEEL PANRINGALA?? *SIMBU : CHA CHA NAAN RAJA AH VANTHA ASSAULT AH 150 CR EDUPANGA.. \n", "26 DECENTA KENJUFYING MY SISTER .. PROGRESS REPORT LA APPA KITTA SIGN VAANGI KUDUKARIYAA PLEASE \n", "27 THONDARGAL *CAR OOTUNAR *VEETUKARAR \n", "28 WHEN 90S KIDS WATCHING THIS SERIAL... ~90'S KIDS : NAMAKU VEELAIKKAARI AMAIYULA... VEELAIKKARI VAIKURA ALAVUKKU PANACASITHYUM AMAIYULS... \n", "29 COLLEGE AMAIVATHU ELLAM +2 KODUTHA VARAM... OFFICE AMAIVATHU ELLAM AVAN AVAN SEITHA VINAI... NAMAKU COLLEGE'UM OZHUNGA AMAIYALA...OFFICE'UM OZHUNGA AMAIYALA INTHA PAGE'AAVATHU MAINTAIN PANNUVOM.. \n", ".. ... \n", "637 IF YOU ARE BAD I'M YOUR DAD \n", "638 MEERA: NAMMA HUMAN BEING NAMALUKU IRUKKURA MAARI POWER ETHUMAE KEDAIYAATHU ATHA SIXTH SENSE.. ORUTHAR NAMBA PESUMBOTHAE ATHA ENNA 'NU FEEL PANNA MUDIUM. \n", "639 GYM TRAINER TO NEW YEAR MOTIVATIONAL GUYS UNNA KADAISIYA PONA JANUARY 2 DHETHY PATHATHU \n", "640 NAA POREN JAANNU DEII RAM.. VENAM DA SONNA KELU DA \n", "641 FOUR TYPES OF BIRTHDAY TREATS ON EVERY GANG \n", "642 90S KIDS DURING INTERVALS *ME: HEIGHT SOLLADHA .. AVAN KITTA UNDERTAKER IRUKKU! \n", "643 THANI ORUVAN UNGA EDAM UNGA AALUNGA.. ENAKU BAYAM ILLA, ATHUKU ITHAAN SAMPLE \n", "644 LETS TAKE A SELFIE PULA *SELFIE KUMAR: SHROOOVVV \n", "645 ~IN KAATRU VELIYIDAI MOVIE IDE MAARI COLOUR FULL ANA SONG AH ENGAYO NAMMA PATHURUKAMEY....!!? \n", "646 5) TAMIL MATHANGAL KURIPU ELUDHUGA: 1) CHITTRAI 2}VAIKASI 3) AANI 4) AAVADI 5) PURATTASI \n", "647 NO COMMENTS \n", "648 KAILA VERA PATHUPAISA ILLA IVAN VERA NAMBALA LOVE PANDRENNU SOLDRAN IVAN KUDA SENDHU APPADIYE VANDIYA OOTRIVENDI DHAN \n", "649 ENERGY LOW AAGUM VARAI VELAIYADIYADHU ANDHA KALAM BATTERY LOW AAGUM VARAI VELIYADHUVADHU INDHA KALAM. \n", "650 ayyey vaaya paaru \n", "651 no comments \n", "652 MEETS YOU BEFORE EXAM ASKS YOU QUESTIONS YOU'VE NEVER HEARD \n", "653 CURRENT THOTTAA SAADHARANA MANUSHANUKU THAAN SHOCK ADIKUM NAAN NARASIMMA.. ENNA THOTTAA ANDHA CURRENTUKE SHOCK ADIKKUM CAPTAIN Y U NO PRODUCE CURRENT FOR TAMILNADU POWERCUT? \n", "654 DAI NANACHUM PADAM MATTUM THA RELEASE PANREN... YENNA NAMBI NEENGA PADAM PAKALAMDA... AANA IVANAILAM ORU AALA MATHICHU IVANUGALA NAMBI PADATHUKU POVATHINGADA... \n", "655 ONGI ADICHA ONDRA TON WEIGHT AH?? AIYO, ATHU SATHIYAMA NAAN ILLAINGA NA. \n", "656 YENDA UNGA APPA PEYARAI FRIDGEKULLA YELUDHI VACHURUKA.? AVARDHANDA SONNARU EN PEYAR KETTUPOGAMMA PAARTHUKONNU....! \n", "657 WATCHING OTHER LANGUAGE MOVIE IN THEATRE SOOOPER APPU! \n", "658 ENAKU PIDITHA YENNALME THOLAI THURATHULA ANDRU NILLA.. INDRU NEE.. \n", "659 EVERUONE IS A GANGSTER UNTIL THE REAL GANSTER ARRIVES \n", "660 *TN GIRLS \n", "661 CLIMAX SCENE LA MAGALA KOOTITU VANDHU ALUDHA ELLARUM SUNA PANA AAGIDA MUDIYUMA? DA \n", "662 DURING SCHOOL DAYS ME & FRNDS: ANNA MAIL CHECK PANNANUM LAST SYSTEM KODUNGA BROWSING CENTER ANNA: \n", "663 REMEMBER THIS KID THIS KID IS RIGHT NOW \n", "664 *SINGLES: IVAN VERA ENGALA ROMBA TORCHER PANDRANDA...! \n", "665 ENAIKI PUBG LA CHICKEN DINNER VANGURA PAYALUGALAM YARUUU ORU KALATHULA DUCK KA SHOOT PANNA MUDIYAMA ENDHA NAI KITAA HE HE HE VANGUNA PAYALUGA THANN \n", "666 LATER THAT DAY \n", "\n", "[667 rows x 2 columns]" ] }, "execution_count": 91, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df_test" ] }, { "cell_type": "code", "execution_count": 94, "metadata": {}, "outputs": [], "source": [ "learn = text_classifier_learner(text_dls,AWD_LSTM, drop_mult=0.5, metrics=accuracy)" ] }, { "cell_type": "code", "execution_count": 95, "metadata": {}, "outputs": [ { "data": { "text/html": [ "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
epochtrain_lossvalid_lossaccuracytime
00.6803650.2901430.90652200:13
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
epochtrain_lossvalid_lossaccuracytime
00.6018110.4034470.85000000:25
10.6092610.3889940.90652200:24
20.5935650.3863540.89347800:24
30.5719900.2922130.92826100:23
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "learn.fine_tune(4, 1e-2)" ] }, { "cell_type": "code", "execution_count": 96, "metadata": {}, "outputs": [ { "data": { "text/html": [ "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
epochtrain_lossvalid_lossaccuracytime
00.5848010.2304260.92391300:13
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
epochtrain_lossvalid_lossaccuracytime
00.5858320.6630960.60000000:23
10.6135700.3074600.91521700:23
20.6075700.3347830.90217400:25
30.5061330.2895270.92391300:24
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "learn.fine_tune(4, 1e-2)" ] }, { "cell_type": "code", "execution_count": 98, "metadata": {}, "outputs": [ { "data": { "text/html": [], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
textcategorycategory_
0xxbos xxup adra xxup adra … xxup xxunk : xxup xxunk xxup xxunk xxup xxunk xxup xxunk xxup alagana xxup xxunk xxup xxunk xxup xxunk . ? xxup namakku xxup yen xxup appadi xxup xxunk xxup xxunk . xxup xxunk \" xxunk \" xxup xxunk xxup xxunk . xxup xxunk : xxup adhuva xxup chellam … xxup naam xxup xxunk xxup piragu xxup xxunk xxup xxunk .. xxup xxunk , xxup chicken xxup xxunk , xxup chicken xxup xxunk , xxup chicken xxup kuruma , xxup chicken xxup kari , xxup xxunk xxup chicken , xxup pepper xxup chicken , xxup xxunk xxup chicken , xxup xxunk xxup chicken , xxup xxunk . xxup ippadi xxup pala xxu...11
1xxbos xxup after xxup xxunk xxup with xxup girlfriend … * boy : xxup xxunk ? * girl : xxup hum xxup xxunk xxup xxunk * boy : xxup enna xxup pandra ? * girl : xxup hum xxup xxunk xxup irukan * boy : xxup how xxup was xxup the xxup day ? * girl : xxup hum xxup hum xxup nice xxup nice * boy : xxup ipo xxup olunga xxup xxunk xxup xxunk ? * girls : xxup xxunk xxup sanda xxup xxunk xxup la xxup ipo xxup mattum xxup naan xxup nalla xxup xxunk xxup ah ?11
2xxbos xxunk 's xxup la xxup oru xxup xxunk xxup xxunk xxunk xxup xxunk xxup dhan xxup irukum … xxunk / xxunk … xxup apo xxup oru xxup xxunk xxup ku xxup xxunk xxup pota xxup unaku xxunk xxup xxunk xxup dhana ? ? .. xxup apdi xxup oru xxup coin xxup hey xxup illaye .. xxup enda xxup xxunk xxup xxunk xxup xxunk ? xxup dai xxup xxunk xxrep 3 !11
3xxbos xxup physics xxup oru xxup tution … xxunk xxup ku xxup oru xxup tution … … xxup xxunk 's xxup ku xxup oru xxup tution … .. ~ xxup engineering xxup xxunk ) : xxup xxunk xxup tution xxup xxunk xxup padicha xxup xxunk xxup vela xxup xxunk xxup xxunk xxup xxunk xxup xxunk xxup college xxup pona xxup yennakku xxup xxunk xxup vela xxup xxunk xxup xxunk xxup poda11
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "learn.show_results()" ] }, { "cell_type": "code", "execution_count": 105, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "SequentialRNN(\n", " (0): SentenceEncoder(\n", " (module): AWD_LSTM(\n", " (encoder): Embedding(1432, 400, padding_idx=1)\n", " (encoder_dp): EmbeddingDropout(\n", " (emb): Embedding(1432, 400, padding_idx=1)\n", " )\n", " (rnns): ModuleList(\n", " (0): WeightDropout(\n", " (module): LSTM(400, 1152, batch_first=True)\n", " )\n", " (1): WeightDropout(\n", " (module): LSTM(1152, 1152, batch_first=True)\n", " )\n", " (2): WeightDropout(\n", " (module): LSTM(1152, 400, batch_first=True)\n", " )\n", " )\n", " (input_dp): RNNDropout()\n", " (hidden_dps): ModuleList(\n", " (0): RNNDropout()\n", " (1): RNNDropout()\n", " (2): RNNDropout()\n", " )\n", " )\n", " )\n", " (1): PoolingLinearClassifier(\n", " (layers): Sequential(\n", " (0): LinBnDrop(\n", " (0): BatchNorm1d(1200, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (1): Dropout(p=0.2, inplace=False)\n", " (2): Linear(in_features=1200, out_features=50, bias=False)\n", " (3): ReLU(inplace=True)\n", " )\n", " (1): LinBnDrop(\n", " (0): BatchNorm1d(50, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (1): Dropout(p=0.1, inplace=False)\n", " (2): Linear(in_features=50, out_features=2, bias=False)\n", " )\n", " )\n", " )\n", ")" ] }, "execution_count": 105, "metadata": {}, "output_type": "execute_result" } ], "source": [ "text_model = learn.model\n", "text_model" ] }, { "cell_type": "code", "execution_count": 109, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "batch no. : 0\n", "batch no. : 1\n", "batch no. : 2\n", "batch no. : 3\n", "batch no. : 4\n", "batch no. : 5\n", "batch no. : 6\n", "batch no. : 7\n", "batch no. : 8\n", "batch no. : 9\n", "batch no. : 10\n", "batch no. : 11\n", "batch no. : 12\n", "batch no. : 13\n", "batch no. : 14\n", "batch no. : 15\n", "batch no. : 16\n", "batch no. : 17\n", "batch no. : 18\n", "batch no. : 19\n", "batch no. : 20\n", "batch no. : 21\n", "batch no. : 22\n", "batch no. : 23\n", "batch no. : 24\n", "batch no. : 25\n", "batch no. : 26\n", "batch no. : 27\n", "batch no. : 28\n", "batch no. : 29\n", "batch no. : 30\n", "batch no. : 31\n", "batch no. : 32\n", "batch no. : 33\n", "batch no. : 34\n", "batch no. : 35\n", "batch no. : 36\n", "batch no. : 37\n", "batch no. : 38\n", "batch no. : 39\n", "batch no. : 40\n", "batch no. : 41\n", "batch no. : 42\n", "batch no. : 43\n", "batch no. : 44\n", "batch no. : 45\n", "batch no. : 46\n", "batch no. : 47\n", "batch no. : 48\n", "batch no. : 49\n", "batch no. : 50\n", "batch no. : 51\n", "batch no. : 52\n", "batch no. : 53\n", "batch no. : 54\n", "batch no. : 55\n", "batch no. : 56\n", "batch no. : 57\n", "batch no. : 58\n", "batch no. : 59\n", "batch no. : 60\n", "batch no. : 61\n", "batch no. : 62\n", "batch no. : 63\n", "batch no. : 64\n", "batch no. : 65\n", "batch no. : 66\n", "batch no. : 67\n", "batch no. : 68\n", "batch no. : 69\n", "batch no. : 70\n", "batch no. : 71\n", "batch no. : 72\n", "batch no. : 73\n", "batch no. : 74\n", "batch no. : 75\n", "batch no. : 76\n", "batch no. : 77\n", "batch no. : 78\n", "batch no. : 79\n", "batch no. : 80\n", "batch no. : 81\n", "batch no. : 82\n", "batch no. : 83\n", "batch no. : 84\n", "batch no. : 85\n", "batch no. : 86\n", "batch no. : 87\n", "batch no. : 88\n", "batch no. : 89\n", "batch no. : 90\n", "batch no. : 91\n", "batch no. : 92\n", "batch no. : 93\n", "batch no. : 94\n", "batch no. : 95\n", "batch no. : 96\n", "batch no. : 97\n", "batch no. : 98\n", "batch no. : 99\n", "batch no. : 100\n", "batch no. : 101\n", "batch no. : 102\n", "batch no. : 103\n", "batch no. : 104\n", "batch no. : 105\n", "batch no. : 106\n", "batch no. : 107\n", "batch no. : 108\n", "batch no. : 109\n", "batch no. : 110\n", "batch no. : 111\n", "batch no. : 112\n", "batch no. : 113\n", "batch no. : 114\n", "batch no. : 115\n", "batch no. : 116\n", "batch no. : 117\n", "batch no. : 118\n", "batch no. : 119\n", "batch no. : 120\n", "batch no. : 121\n", "batch no. : 122\n", "batch no. : 123\n", "batch no. : 124\n", "batch no. : 125\n", "batch no. : 126\n", "batch no. : 127\n", "batch no. : 128\n", "batch no. : 129\n", "batch no. : 130\n", "batch no. : 131\n", "batch no. : 132\n", "batch no. : 133\n", "batch no. : 134\n", "batch no. : 135\n", "batch no. : 136\n", "batch no. : 137\n", "batch no. : 138\n", "batch no. : 139\n", "batch no. : 140\n", "batch no. : 141\n", "batch no. : 142\n", "batch no. : 143\n", "batch no. : 144\n", "batch no. : 145\n", "batch no. : 146\n", "batch no. : 147\n", "batch no. : 148\n", "batch no. : 149\n", "batch no. : 150\n", "batch no. : 151\n", "batch no. : 152\n", "batch no. : 153\n", "batch no. : 154\n", "batch no. : 155\n", "batch no. : 156\n", "batch no. : 157\n", "batch no. : 158\n", "batch no. : 159\n", "batch no. : 160\n", "batch no. : 161\n", "batch no. : 162\n", "batch no. : 163\n", "batch no. : 164\n", "batch no. : 165\n", "batch no. : 166\n", "batch no. : 167\n", "batch no. : 168\n", "batch no. : 169\n", "batch no. : 170\n", "batch no. : 171\n", "batch no. : 172\n", "batch no. : 173\n", "batch no. : 174\n", "batch no. : 175\n", "batch no. : 176\n", "batch no. : 177\n", "batch no. : 178\n", "batch no. : 179\n", "batch no. : 180\n", "batch no. : 181\n", "batch no. : 182\n", "batch no. : 183\n", "batch no. : 184\n", "batch no. : 185\n", "batch no. : 186\n", "batch no. : 187\n", "batch no. : 188\n", "batch no. : 189\n", "batch no. : 190\n", "batch no. : 191\n", "batch no. : 192\n", "batch no. : 193\n", "batch no. : 194\n", "batch no. : 195\n", "batch no. : 196\n", "batch no. : 197\n", "batch no. : 198\n", "batch no. : 199\n", "batch no. : 200\n", "batch no. : 201\n", "batch no. : 202\n", "batch no. : 203\n", "batch no. : 204\n", "batch no. : 205\n", "batch no. : 206\n", "batch no. : 207\n", "batch no. : 208\n", "batch no. : 209\n", "batch no. : 210\n", "batch no. : 211\n", "batch no. : 212\n", "batch no. : 213\n", "batch no. : 214\n", "batch no. : 215\n", "batch no. : 216\n", "batch no. : 217\n", "batch no. : 218\n", "batch no. : 219\n", "batch no. : 220\n", "batch no. : 221\n", "batch no. : 222\n", "batch no. : 223\n", "batch no. : 224\n", "batch no. : 225\n", "batch no. : 226\n", "batch no. : 227\n", "batch no. : 228\n", "batch no. : 229\n", "batch no. : 230\n", "batch no. : 231\n", "batch no. : 232\n", "batch no. : 233\n", "batch no. : 234\n", "batch no. : 235\n", "batch no. : 236\n", "batch no. : 237\n", "batch no. : 238\n", "batch no. : 239\n", "batch no. : 240\n", "batch no. : 241\n", "batch no. : 242\n", "batch no. : 243\n", "batch no. : 244\n", "batch no. : 245\n", "batch no. : 246\n", "batch no. : 247\n", "batch no. : 248\n", "batch no. : 249\n", "batch no. : 250\n", "batch no. : 251\n", "batch no. : 252\n", "batch no. : 253\n", "batch no. : 254\n", "batch no. : 255\n", "batch no. : 256\n", "batch no. : 257\n", "batch no. : 258\n", "batch no. : 259\n", "batch no. : 260\n", "batch no. : 261\n", "batch no. : 262\n", "batch no. : 263\n", "batch no. : 264\n", "batch no. : 265\n", "batch no. : 266\n", "batch no. : 267\n", "batch no. : 268\n", "batch no. : 269\n", "batch no. : 270\n", "batch no. : 271\n", "batch no. : 272\n", "batch no. : 273\n", "batch no. : 274\n", "batch no. : 275\n", "batch no. : 276\n", "batch no. : 277\n", "batch no. : 278\n", "batch no. : 279\n", "batch no. : 280\n", "batch no. : 281\n", "batch no. : 282\n", "batch no. : 283\n", "batch no. : 284\n", "batch no. : 285\n", "batch no. : 286\n", "batch no. : 287\n", "batch no. : 288\n", "batch no. : 289\n", "batch no. : 290\n", "batch no. : 291\n", "batch no. : 292\n", "batch no. : 293\n", "batch no. : 294\n", "batch no. : 295\n", "batch no. : 296\n", "batch no. : 297\n", "batch no. : 298\n", "batch no. : 299\n", "batch no. : 300\n", "batch no. : 301\n", "batch no. : 302\n", "batch no. : 303\n", "batch no. : 304\n", "batch no. : 305\n", "batch no. : 306\n", "batch no. : 307\n", "batch no. : 308\n", "batch no. : 309\n", "batch no. : 310\n", "batch no. : 311\n", "batch no. : 312\n", "batch no. : 313\n", "batch no. : 314\n", "batch no. : 315\n", "batch no. : 316\n", "batch no. : 317\n", "batch no. : 318\n", "batch no. : 319\n", "batch no. : 320\n", "batch no. : 321\n", "batch no. : 322\n", "batch no. : 323\n", "batch no. : 324\n", "batch no. : 325\n", "batch no. : 326\n", "batch no. : 327\n", "batch no. : 328\n", "batch no. : 329\n", "batch no. : 330\n", "batch no. : 331\n", "batch no. : 332\n", "batch no. : 333\n", "batch no. : 334\n", "batch no. : 335\n", "batch no. : 336\n", "batch no. : 337\n", "batch no. : 338\n", "batch no. : 339\n", "batch no. : 340\n", "batch no. : 341\n", "batch no. : 342\n", "batch no. : 343\n", "batch no. : 344\n", "batch no. : 345\n", "batch no. : 346\n", "batch no. : 347\n", "batch no. : 348\n", "batch no. : 349\n", "batch no. : 350\n", "batch no. : 351\n", "batch no. : 352\n", "batch no. : 353\n", "batch no. : 354\n", "batch no. : 355\n", "batch no. : 356\n", "batch no. : 357\n", "batch no. : 358\n", "batch no. : 359\n", "batch no. : 360\n", "batch no. : 361\n", "batch no. : 362\n", "batch no. : 363\n", "batch no. : 364\n", "batch no. : 365\n", "batch no. : 366\n", "batch no. : 367\n", "batch no. : 368\n", "batch no. : 369\n", "batch no. : 370\n", "batch no. : 371\n", "batch no. : 372\n", "batch no. : 373\n", "batch no. : 374\n", "batch no. : 375\n", "batch no. : 376\n", "batch no. : 377\n", "batch no. : 378\n", "batch no. : 379\n", "batch no. : 380\n", "batch no. : 381\n", "batch no. : 382\n", "batch no. : 383\n", "batch no. : 384\n", "batch no. : 385\n", "batch no. : 386\n", "batch no. : 387\n", "batch no. : 388\n", "batch no. : 389\n", "batch no. : 390\n", "batch no. : 391\n", "batch no. : 392\n", "batch no. : 393\n", "batch no. : 394\n", "batch no. : 395\n", "batch no. : 396\n", "batch no. : 397\n", "batch no. : 398\n", "batch no. : 399\n", "batch no. : 400\n", "batch no. : 401\n", "batch no. : 402\n", "batch no. : 403\n", "batch no. : 404\n", "batch no. : 405\n", "batch no. : 406\n", "batch no. : 407\n", "batch no. : 408\n", "batch no. : 409\n", "batch no. : 410\n", "batch no. : 411\n", "batch no. : 412\n", "batch no. : 413\n", "batch no. : 414\n", "batch no. : 415\n", "batch no. : 416\n", "batch no. : 417\n", "batch no. : 418\n", "batch no. : 419\n", "batch no. : 420\n", "batch no. : 421\n", "batch no. : 422\n", "batch no. : 423\n", "batch no. : 424\n", "batch no. : 425\n", "batch no. : 426\n", "batch no. : 427\n", "batch no. : 428\n", "batch no. : 429\n", "batch no. : 430\n", "batch no. : 431\n", "batch no. : 432\n", "batch no. : 433\n", "batch no. : 434\n", "batch no. : 435\n", "batch no. : 436\n", "batch no. : 437\n", "batch no. : 438\n", "batch no. : 439\n", "batch no. : 440\n", "batch no. : 441\n", "batch no. : 442\n", "batch no. : 443\n", "batch no. : 444\n", "batch no. : 445\n", "batch no. : 446\n", "batch no. : 447\n", "batch no. : 448\n", "batch no. : 449\n", "batch no. : 450\n", "batch no. : 451\n", "batch no. : 452\n", "batch no. : 453\n", "batch no. : 454\n", "batch no. : 455\n", "batch no. : 456\n", "batch no. : 457\n", "batch no. : 458\n", "batch no. : 459\n" ] } ], "source": [ "train_labels_set_text = []\n", "train_preds_set_text = []\n", "for idx, data in enumerate(text_dls.train):\n", " # get the inputs\n", " text_model.eval()\n", " print(\"batch no. : \", idx)\n", " inputs, labels = data\n", " train_labels_set_text += labels.tolist()\n", " inputs, labels = inputs.cuda(), labels.cuda()\n", "# print(labels)\n", " outputs = text_model(inputs)[0]\n", " _, preds = torch.max(outputs, 1)\n", " train_preds_set_text += preds.cpu().tolist()" ] }, { "cell_type": "code", "execution_count": 110, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "0.9211956521739131" ] }, "execution_count": 110, "metadata": {}, "output_type": "execute_result" } ], "source": [ "accuracy_score(y_pred=train_preds_set_text, y_true=train_labels_set_text)" ] }, { "cell_type": "code", "execution_count": 111, "metadata": {}, "outputs": [], "source": [ "class MyEnsemble(nn.Module):\n", " def __init__(self, modelA, modelB):\n", " super(MyEnsemble, self).__init__()\n", " self.modelA = modelA\n", " self.modelB = modelB\n", " self.classifier = nn.Linear(4, 2)\n", " \n", " def forward(self, x1, x2):\n", " x1 = self.modelA(x1)\n", " x2 = self.modelB(x2)[0]\n", " x = torch.cat((x1, x2), dim=1)\n", " x = self.classifier(F.relu(x))\n", " return x" ] }, { "cell_type": "code", "execution_count": 112, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " 0,\n", " 0,\n", " 0,\n", " 1,\n", " 1,\n", " 1,\n", " 1,\n", " ...]" ] }, "execution_count": 112, "metadata": {}, "output_type": "execute_result" } ], "source": [ "train_preds_set_text" ] }, { "cell_type": "code", "execution_count": 116, "metadata": {}, "outputs": [ { "data": { "text/html": [], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stderr", "output_type": "stream", "text": [ "/opt/conda/lib/python3.6/site-packages/numpy/core/_asarray.py:83: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray\n", " return array(a, dtype, copy=False, order=order)\n" ] } ], "source": [ "data_text = TextDataLoaders.from_df(df, valid_pct=0, text_col='captions', label_col='label', shuffle_train=False)" ] }, { "cell_type": "code", "execution_count": 123, "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "batch no. : 0\n", "TensorText([[ 2, 369, 232, 12, 34, 0, 817, 10, 915, 57, 567, 0,\n", " 915, 57, 168, 13, 567, 0, 16, 43, 0, 14, 369, 90,\n", " 12, 43, 258, 436, 466, 67, 0, 15, 7, 0, 7, 915,\n", " 7, 57, 7, 122, 7, 1222, 244, 7, 63, 7, 581, 7,\n", " 31, 7, 548, 15, 7, 63, 7, 258, 363, 7, 466, 7,\n", " 567, 7, 0, 13, 7, 0, 16, 7, 0, 7, 581, 7,\n", " 63, 7, 548, 7, 137, 7, 102, 7, 71, 7, 537, 7,\n", " 0, 7, 0, 7, 567, 7, 0, 7, 0, 7, 63, 7,\n", " 0, 11, 11, 7, 222, 47, 7, 0, 10, 18, 0, 7,\n", " 0, 11],\n", " [ 2, 7, 0, 7, 38, 7, 0, 7, 0, 7, 0, 12,\n", " 7, 118, 90, 7, 486, 7, 82, 7, 1004, 9, 7, 229,\n", " 7, 996, 7, 97, 7, 1397, 9, 454, 7, 82, 7, 1004,\n", " 9, 454, 7, 996, 7, 97, 7, 1397, 9, 454, 7, 82,\n", " 7, 1004, 9, 454, 7, 996, 7, 97, 7, 1397, 9, 454,\n", " 7, 82, 7, 1004, 9, 266, 368, 5, 17, 29, 7, 0,\n", " 7, 174, 7, 0, 7, 646, 7, 53, 7, 64, 7, 0,\n", " 7, 999, 7, 0, 7, 281, 9, 1, 1, 1, 1, 1,\n", " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", " 1, 1],\n", " [ 2, 7, 0, 7, 0, 7, 0, 7, 0, 7, 81, 29,\n", " 7, 1322, 7, 0, 363, 7, 0, 7, 0, 7, 994, 29,\n", " 7, 0, 244, 26, 7, 242, 7, 889, 363, 7, 0, 7,\n", " 400, 7, 0, 7, 46, 7, 370, 7, 0, 7, 0, 15,\n", " 7, 71, 7, 0, 7, 240, 7, 294, 7, 0, 29, 26,\n", " 7, 0, 7, 146, 7, 242, 7, 98, 7, 46, 7, 1336,\n", " 15, 26, 7, 0, 11, 7, 546, 7, 98, 7, 46, 7,\n", " 0, 7, 1180, 5, 197, 163, 1, 1, 1, 1, 1, 1,\n", " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", " 1, 1],\n", " [ 2, 7, 0, 7, 1231, 7, 326, 7, 1232, 7, 0, 7,\n", " 127, 7, 0, 7, 32, 7, 0, 13, 7, 178, 7, 1233,\n", " 7, 1231, 7, 58, 7, 326, 7, 1232, 7, 1234, 7, 0,\n", " 7, 32, 7, 0, 13, 7, 27, 7, 32, 7, 746, 7,\n", " 643, 13, 7, 0, 9, 7, 666, 7, 0, 7, 326, 7,\n", " 0, 7, 0, 7, 0, 13, 13, 7, 838, 7, 892, 7,\n", " 32, 7, 194, 10, 7, 892, 7, 0, 10, 7, 0, 7,\n", " 493, 7, 504, 7, 68, 1, 1, 1, 1, 1, 1, 1,\n", " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", " 1, 1]], device='cuda:0')\n", "TensorCategory([1, 1, 1, 1], device='cuda:0')\n" ] } ], "source": [ "for idx, data in enumerate(text_dls.train.new(shuffle=False)):\n", " # get the inputs\n", " text_model.eval()\n", " print(\"batch no. : \", idx)\n", " inputs, labels = data\n", " print(inputs)\n", " print(labels)\n", " break" ] }, { "cell_type": "code", "execution_count": 114, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
imagenamecaptions
0test_img_0.jpgsugarkaga nadandhava vida figuregaga nadandhavan dhan adhigam
1test_img_1.jpgi have come for my stones stones thaane.. nalla cheap aana price la namma kalyan jewellers la kedaikkum
2test_img_2.jpg\"special porotta\" nu pottuierukke spacial kum sadhavukkum yenna vidhiyasam..? rendukkum oru naal dhan vidhiyasam..
3test_img_3.jpg*we : amma .. cooker 3 whistle vanthuchu off panniten...
4test_img_4.jpgcreating whatsapp group - 1st day vaanga ji.. vaanga ji.. ellarayum add pannunga ji TEAM WORK PANROM...ENJOY PANROMM.. AFTER ONE WEEK ... ENNAYA..? IVLO AMAITHIYA IRUKEENGA? ETHACHU PESUNGAYA
5test_img_5.jpg*FOODIE : YAARUM PAARKKAMAL ENNAI PAARKIREN ENNAI ARIYAMAL UNNAI PARKIREN
6test_img_6.jpgMARGALI MAADHA EFFECT YENNA KULLURU
7test_img_7.jpgYENNAMMA FEEL PANNI YELUTHI ERUKRANDA AVAN
8test_img_8.jpgTAMIL SERIALS BE LIKE... INI AVARUKKU BADHIL..... IVAR
9test_img_9.jpgMama ponnu Expectation reality
10test_img_10.jpgpondatti yennai thittum bodhu... eppavum unga amma pechai dhan kepingala.,,? en maganai thittum bodhu... amma pechai ketkave mattiiya...?
11test_img_11.jpgmattai adakuvatharku en mutha ponna kattikoduppen... yoo maams kannukuttiya adakitten ilaya ponnu ennakkudhan...
12test_img_12.jpgada paavi payale
13test_img_13.jpg*(me)ENGINEER : ennoda maasa sambalam 30aayiram... unga ponna nalla pathukka idhu podhadha? *mamanar : en ponnukku naan kodukkura pocket maneyye 25aayiram oru masathukku sorry mama...adhaiyum sethudha 30aayiram endru kurinen...
14test_img_14.jpg#10years challenge..... *2008 : NET RECHARGE PANNA 30RS KUDU AATHA... *2018 : NET RECHARGE PANNA 300RS KUDU AATHA
15test_img_15.jpgIDHUYENA PRAMADHAM ELECTION VARTTUM IDHAVIDA SPECIAL AACHIYANGALLAN KATHUERUKKU..
16test_img_16.jpg*PEOPLE: MATRAM MUNNETRAM ENRIRE ADHARKU YENNA ARTHAM *JINGLE BELL : PETTIGALAI KAI MATRI NAAN MUNNERUVEN YENDRU ARTHAM
17test_img_17.jpg*TN PEOPLE : ADA ARAIBODHA NAAYE
18test_img_18.jpg*SIMBU : NAMMA MASSU ENNA NU KATANUM! ENAKU ANDAL PAAL UTHUNGA! YENGA VEETULA ANDA ILLA!
19test_img_19.jpg*MOM : VEETULA IRUNDHA ANDAA VAYUM PAAL PACKET UM KANOM *BANNER JEE FANS
20test_img_20.jpgONLY FOR 90S KIDS SCHOOL DAYS *PANAKARA VEETU PULLAIYA ERUPPARO
21test_img_21.jpgBEAUTY CARE #OTHER GIRLS FOFR MY BESTIE #ME : NEE DHINAM KULICHAA POTHUMAE, VERA YATHUVUM VENDAAMAE
22test_img_22.jpgVATHIYAR : CURRENT POGUM BODHU MUTHALA YEDHAI YEDUPINGA? TORCH LIGHT, EMERGENCY LIGHT? RENDUMILLAI SIR, APPA PACKETLA ERUKKIRA PANATHAI YEDUPPEN SIR...
23test_img_23.jpgDEI NAMMA ANDREA DA..
24test_img_24.jpg#2K KIDS : INNAKKI VENNUMNA NEENGA ORE VEDIYILA 7 SHOT VEDIKKALAM...... #90 KIDS : AANA NANGA ANNAIKE 7 BIJILI VEDIYA ONNA THIRICHU ORE VEDIYILA 7SHOT VEDICHAVUKAPU!!!
25test_img_25.jpg100CR... 125CR... NEENGA EDHACHU FEEL PANRINGALA?? *SIMBU : CHA CHA NAAN RAJA AH VANTHA ASSAULT AH 150 CR EDUPANGA..
26test_img_26.jpgDECENTA KENJUFYING MY SISTER .. PROGRESS REPORT LA APPA KITTA SIGN VAANGI KUDUKARIYAA PLEASE
27test_img_27.jpgTHONDARGAL *CAR OOTUNAR *VEETUKARAR
28test_img_28.jpgWHEN 90S KIDS WATCHING THIS SERIAL... ~90'S KIDS : NAMAKU VEELAIKKAARI AMAIYULA... VEELAIKKARI VAIKURA ALAVUKKU PANACASITHYUM AMAIYULS...
29test_img_29.jpgCOLLEGE AMAIVATHU ELLAM +2 KODUTHA VARAM... OFFICE AMAIVATHU ELLAM AVAN AVAN SEITHA VINAI... NAMAKU COLLEGE'UM OZHUNGA AMAIYALA...OFFICE'UM OZHUNGA AMAIYALA INTHA PAGE'AAVATHU MAINTAIN PANNUVOM..
.........
637test_img_637.jpgIF YOU ARE BAD I'M YOUR DAD
638test_img_638.jpgMEERA: NAMMA HUMAN BEING NAMALUKU IRUKKURA MAARI POWER ETHUMAE KEDAIYAATHU ATHA SIXTH SENSE.. ORUTHAR NAMBA PESUMBOTHAE ATHA ENNA 'NU FEEL PANNA MUDIUM.
639test_img_639.jpgGYM TRAINER TO NEW YEAR MOTIVATIONAL GUYS UNNA KADAISIYA PONA JANUARY 2 DHETHY PATHATHU
640test_img_640.jpgNAA POREN JAANNU DEII RAM.. VENAM DA SONNA KELU DA
641test_img_641.jpgFOUR TYPES OF BIRTHDAY TREATS ON EVERY GANG
642test_img_642.jpg90S KIDS DURING INTERVALS *ME: HEIGHT SOLLADHA .. AVAN KITTA UNDERTAKER IRUKKU!
643test_img_643.jpgTHANI ORUVAN UNGA EDAM UNGA AALUNGA.. ENAKU BAYAM ILLA, ATHUKU ITHAAN SAMPLE
644test_img_644.jpgLETS TAKE A SELFIE PULA *SELFIE KUMAR: SHROOOVVV
645test_img_645.jpg~IN KAATRU VELIYIDAI MOVIE IDE MAARI COLOUR FULL ANA SONG AH ENGAYO NAMMA PATHURUKAMEY....!!?
646test_img_646.jpg5) TAMIL MATHANGAL KURIPU ELUDHUGA: 1) CHITTRAI 2}VAIKASI 3) AANI 4) AAVADI 5) PURATTASI
647test_img_647.jpgNO COMMENTS
648test_img_648.jpgKAILA VERA PATHUPAISA ILLA IVAN VERA NAMBALA LOVE PANDRENNU SOLDRAN IVAN KUDA SENDHU APPADIYE VANDIYA OOTRIVENDI DHAN
649test_img_649.jpgENERGY LOW AAGUM VARAI VELAIYADIYADHU ANDHA KALAM BATTERY LOW AAGUM VARAI VELIYADHUVADHU INDHA KALAM.
650test_img_650.jpgayyey vaaya paaru
651test_img_651.jpgno comments
652test_img_652.jpgMEETS YOU BEFORE EXAM ASKS YOU QUESTIONS YOU'VE NEVER HEARD
653test_img_653.jpgCURRENT THOTTAA SAADHARANA MANUSHANUKU THAAN SHOCK ADIKUM NAAN NARASIMMA.. ENNA THOTTAA ANDHA CURRENTUKE SHOCK ADIKKUM CAPTAIN Y U NO PRODUCE CURRENT FOR TAMILNADU POWERCUT?
654test_img_654.jpgDAI NANACHUM PADAM MATTUM THA RELEASE PANREN... YENNA NAMBI NEENGA PADAM PAKALAMDA... AANA IVANAILAM ORU AALA MATHICHU IVANUGALA NAMBI PADATHUKU POVATHINGADA...
655test_img_655.jpgONGI ADICHA ONDRA TON WEIGHT AH?? AIYO, ATHU SATHIYAMA NAAN ILLAINGA NA.
656test_img_656.jpgYENDA UNGA APPA PEYARAI FRIDGEKULLA YELUDHI VACHURUKA.? AVARDHANDA SONNARU EN PEYAR KETTUPOGAMMA PAARTHUKONNU....!
657test_img_657.jpgWATCHING OTHER LANGUAGE MOVIE IN THEATRE SOOOPER APPU!
658test_img_658.jpgENAKU PIDITHA YENNALME THOLAI THURATHULA ANDRU NILLA.. INDRU NEE..
659test_img_659.jpgEVERUONE IS A GANGSTER UNTIL THE REAL GANSTER ARRIVES
660test_img_660.jpg*TN GIRLS
661test_img_661.jpgCLIMAX SCENE LA MAGALA KOOTITU VANDHU ALUDHA ELLARUM SUNA PANA AAGIDA MUDIYUMA? DA
662test_img_662.jpgDURING SCHOOL DAYS ME & FRNDS: ANNA MAIL CHECK PANNANUM LAST SYSTEM KODUNGA BROWSING CENTER ANNA:
663test_img_663.jpgREMEMBER THIS KID THIS KID IS RIGHT NOW
664test_img_664.jpg*SINGLES: IVAN VERA ENGALA ROMBA TORCHER PANDRANDA...!
665test_img_665.jpgENAIKI PUBG LA CHICKEN DINNER VANGURA PAYALUGALAM YARUUU ORU KALATHULA DUCK KA SHOOT PANNA MUDIYAMA ENDHA NAI KITAA HE HE HE VANGUNA PAYALUGA THANN
666test_img_666.jpgLATER THAT DAY
\n", "

667 rows × 2 columns

\n", "
" ], "text/plain": [ " imagename \\\n", "0 test_img_0.jpg \n", "1 test_img_1.jpg \n", "2 test_img_2.jpg \n", "3 test_img_3.jpg \n", "4 test_img_4.jpg \n", "5 test_img_5.jpg \n", "6 test_img_6.jpg \n", "7 test_img_7.jpg \n", "8 test_img_8.jpg \n", "9 test_img_9.jpg \n", "10 test_img_10.jpg \n", "11 test_img_11.jpg \n", "12 test_img_12.jpg \n", "13 test_img_13.jpg \n", "14 test_img_14.jpg \n", "15 test_img_15.jpg \n", "16 test_img_16.jpg \n", "17 test_img_17.jpg \n", "18 test_img_18.jpg \n", "19 test_img_19.jpg \n", "20 test_img_20.jpg \n", "21 test_img_21.jpg \n", "22 test_img_22.jpg \n", "23 test_img_23.jpg \n", "24 test_img_24.jpg \n", "25 test_img_25.jpg \n", "26 test_img_26.jpg \n", "27 test_img_27.jpg \n", "28 test_img_28.jpg \n", "29 test_img_29.jpg \n", ".. ... \n", "637 test_img_637.jpg \n", "638 test_img_638.jpg \n", "639 test_img_639.jpg \n", "640 test_img_640.jpg \n", "641 test_img_641.jpg \n", "642 test_img_642.jpg \n", "643 test_img_643.jpg \n", "644 test_img_644.jpg \n", "645 test_img_645.jpg \n", "646 test_img_646.jpg \n", "647 test_img_647.jpg \n", "648 test_img_648.jpg \n", "649 test_img_649.jpg \n", "650 test_img_650.jpg \n", "651 test_img_651.jpg \n", "652 test_img_652.jpg \n", "653 test_img_653.jpg \n", "654 test_img_654.jpg \n", "655 test_img_655.jpg \n", "656 test_img_656.jpg \n", "657 test_img_657.jpg \n", "658 test_img_658.jpg \n", "659 test_img_659.jpg \n", "660 test_img_660.jpg \n", "661 test_img_661.jpg \n", "662 test_img_662.jpg \n", "663 test_img_663.jpg \n", "664 test_img_664.jpg \n", "665 test_img_665.jpg \n", "666 test_img_666.jpg \n", "\n", " captions \n", "0 sugarkaga nadandhava vida figuregaga nadandhavan dhan adhigam \n", "1 i have come for my stones stones thaane.. nalla cheap aana price la namma kalyan jewellers la kedaikkum \n", "2 \"special porotta\" nu pottuierukke spacial kum sadhavukkum yenna vidhiyasam..? rendukkum oru naal dhan vidhiyasam.. \n", "3 *we : amma .. cooker 3 whistle vanthuchu off panniten... \n", "4 creating whatsapp group - 1st day vaanga ji.. vaanga ji.. ellarayum add pannunga ji TEAM WORK PANROM...ENJOY PANROMM.. AFTER ONE WEEK ... ENNAYA..? IVLO AMAITHIYA IRUKEENGA? ETHACHU PESUNGAYA \n", "5 *FOODIE : YAARUM PAARKKAMAL ENNAI PAARKIREN ENNAI ARIYAMAL UNNAI PARKIREN \n", "6 MARGALI MAADHA EFFECT YENNA KULLURU \n", "7 YENNAMMA FEEL PANNI YELUTHI ERUKRANDA AVAN \n", "8 TAMIL SERIALS BE LIKE... INI AVARUKKU BADHIL..... IVAR \n", "9 Mama ponnu Expectation reality \n", "10 pondatti yennai thittum bodhu... eppavum unga amma pechai dhan kepingala.,,? en maganai thittum bodhu... amma pechai ketkave mattiiya...? \n", "11 mattai adakuvatharku en mutha ponna kattikoduppen... yoo maams kannukuttiya adakitten ilaya ponnu ennakkudhan... \n", "12 ada paavi payale \n", "13 *(me)ENGINEER : ennoda maasa sambalam 30aayiram... unga ponna nalla pathukka idhu podhadha? *mamanar : en ponnukku naan kodukkura pocket maneyye 25aayiram oru masathukku sorry mama...adhaiyum sethudha 30aayiram endru kurinen... \n", "14 #10years challenge..... *2008 : NET RECHARGE PANNA 30RS KUDU AATHA... *2018 : NET RECHARGE PANNA 300RS KUDU AATHA \n", "15 IDHUYENA PRAMADHAM ELECTION VARTTUM IDHAVIDA SPECIAL AACHIYANGALLAN KATHUERUKKU.. \n", "16 *PEOPLE: MATRAM MUNNETRAM ENRIRE ADHARKU YENNA ARTHAM *JINGLE BELL : PETTIGALAI KAI MATRI NAAN MUNNERUVEN YENDRU ARTHAM \n", "17 *TN PEOPLE : ADA ARAIBODHA NAAYE \n", "18 *SIMBU : NAMMA MASSU ENNA NU KATANUM! ENAKU ANDAL PAAL UTHUNGA! YENGA VEETULA ANDA ILLA! \n", "19 *MOM : VEETULA IRUNDHA ANDAA VAYUM PAAL PACKET UM KANOM *BANNER JEE FANS \n", "20 ONLY FOR 90S KIDS SCHOOL DAYS *PANAKARA VEETU PULLAIYA ERUPPARO \n", "21 BEAUTY CARE #OTHER GIRLS FOFR MY BESTIE #ME : NEE DHINAM KULICHAA POTHUMAE, VERA YATHUVUM VENDAAMAE \n", "22 VATHIYAR : CURRENT POGUM BODHU MUTHALA YEDHAI YEDUPINGA? TORCH LIGHT, EMERGENCY LIGHT? RENDUMILLAI SIR, APPA PACKETLA ERUKKIRA PANATHAI YEDUPPEN SIR... \n", "23 DEI NAMMA ANDREA DA.. \n", "24 #2K KIDS : INNAKKI VENNUMNA NEENGA ORE VEDIYILA 7 SHOT VEDIKKALAM...... #90 KIDS : AANA NANGA ANNAIKE 7 BIJILI VEDIYA ONNA THIRICHU ORE VEDIYILA 7SHOT VEDICHAVUKAPU!!! \n", "25 100CR... 125CR... NEENGA EDHACHU FEEL PANRINGALA?? *SIMBU : CHA CHA NAAN RAJA AH VANTHA ASSAULT AH 150 CR EDUPANGA.. \n", "26 DECENTA KENJUFYING MY SISTER .. PROGRESS REPORT LA APPA KITTA SIGN VAANGI KUDUKARIYAA PLEASE \n", "27 THONDARGAL *CAR OOTUNAR *VEETUKARAR \n", "28 WHEN 90S KIDS WATCHING THIS SERIAL... ~90'S KIDS : NAMAKU VEELAIKKAARI AMAIYULA... VEELAIKKARI VAIKURA ALAVUKKU PANACASITHYUM AMAIYULS... \n", "29 COLLEGE AMAIVATHU ELLAM +2 KODUTHA VARAM... OFFICE AMAIVATHU ELLAM AVAN AVAN SEITHA VINAI... NAMAKU COLLEGE'UM OZHUNGA AMAIYALA...OFFICE'UM OZHUNGA AMAIYALA INTHA PAGE'AAVATHU MAINTAIN PANNUVOM.. \n", ".. ... \n", "637 IF YOU ARE BAD I'M YOUR DAD \n", "638 MEERA: NAMMA HUMAN BEING NAMALUKU IRUKKURA MAARI POWER ETHUMAE KEDAIYAATHU ATHA SIXTH SENSE.. ORUTHAR NAMBA PESUMBOTHAE ATHA ENNA 'NU FEEL PANNA MUDIUM. \n", "639 GYM TRAINER TO NEW YEAR MOTIVATIONAL GUYS UNNA KADAISIYA PONA JANUARY 2 DHETHY PATHATHU \n", "640 NAA POREN JAANNU DEII RAM.. VENAM DA SONNA KELU DA \n", "641 FOUR TYPES OF BIRTHDAY TREATS ON EVERY GANG \n", "642 90S KIDS DURING INTERVALS *ME: HEIGHT SOLLADHA .. AVAN KITTA UNDERTAKER IRUKKU! \n", "643 THANI ORUVAN UNGA EDAM UNGA AALUNGA.. ENAKU BAYAM ILLA, ATHUKU ITHAAN SAMPLE \n", "644 LETS TAKE A SELFIE PULA *SELFIE KUMAR: SHROOOVVV \n", "645 ~IN KAATRU VELIYIDAI MOVIE IDE MAARI COLOUR FULL ANA SONG AH ENGAYO NAMMA PATHURUKAMEY....!!? \n", "646 5) TAMIL MATHANGAL KURIPU ELUDHUGA: 1) CHITTRAI 2}VAIKASI 3) AANI 4) AAVADI 5) PURATTASI \n", "647 NO COMMENTS \n", "648 KAILA VERA PATHUPAISA ILLA IVAN VERA NAMBALA LOVE PANDRENNU SOLDRAN IVAN KUDA SENDHU APPADIYE VANDIYA OOTRIVENDI DHAN \n", "649 ENERGY LOW AAGUM VARAI VELAIYADIYADHU ANDHA KALAM BATTERY LOW AAGUM VARAI VELIYADHUVADHU INDHA KALAM. \n", "650 ayyey vaaya paaru \n", "651 no comments \n", "652 MEETS YOU BEFORE EXAM ASKS YOU QUESTIONS YOU'VE NEVER HEARD \n", "653 CURRENT THOTTAA SAADHARANA MANUSHANUKU THAAN SHOCK ADIKUM NAAN NARASIMMA.. ENNA THOTTAA ANDHA CURRENTUKE SHOCK ADIKKUM CAPTAIN Y U NO PRODUCE CURRENT FOR TAMILNADU POWERCUT? \n", "654 DAI NANACHUM PADAM MATTUM THA RELEASE PANREN... YENNA NAMBI NEENGA PADAM PAKALAMDA... AANA IVANAILAM ORU AALA MATHICHU IVANUGALA NAMBI PADATHUKU POVATHINGADA... \n", "655 ONGI ADICHA ONDRA TON WEIGHT AH?? AIYO, ATHU SATHIYAMA NAAN ILLAINGA NA. \n", "656 YENDA UNGA APPA PEYARAI FRIDGEKULLA YELUDHI VACHURUKA.? AVARDHANDA SONNARU EN PEYAR KETTUPOGAMMA PAARTHUKONNU....! \n", "657 WATCHING OTHER LANGUAGE MOVIE IN THEATRE SOOOPER APPU! \n", "658 ENAKU PIDITHA YENNALME THOLAI THURATHULA ANDRU NILLA.. INDRU NEE.. \n", "659 EVERUONE IS A GANGSTER UNTIL THE REAL GANSTER ARRIVES \n", "660 *TN GIRLS \n", "661 CLIMAX SCENE LA MAGALA KOOTITU VANDHU ALUDHA ELLARUM SUNA PANA AAGIDA MUDIYUMA? DA \n", "662 DURING SCHOOL DAYS ME & FRNDS: ANNA MAIL CHECK PANNANUM LAST SYSTEM KODUNGA BROWSING CENTER ANNA: \n", "663 REMEMBER THIS KID THIS KID IS RIGHT NOW \n", "664 *SINGLES: IVAN VERA ENGALA ROMBA TORCHER PANDRANDA...! \n", "665 ENAIKI PUBG LA CHICKEN DINNER VANGURA PAYALUGALAM YARUUU ORU KALATHULA DUCK KA SHOOT PANNA MUDIYAMA ENDHA NAI KITAA HE HE HE VANGUNA PAYALUGA THANN \n", "666 LATER THAT DAY \n", "\n", "[667 rows x 2 columns]" ] }, "execution_count": 114, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df_test" ] }, { "cell_type": "code", "execution_count": 115, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
imagenamecaptionsnot_trolllabel
0Not_troll_0.jpgAda pikkalipayalugala10
1Not_troll_1.jpgEtho sambavam nadandhirukkum pola10
2Not_troll_10.jpgVunnayellam frienda vechirukken paaru10
3Not_troll_100.jpgIdho! Ivan dhaan naan nasama ponadhukku kaaranam10
4Not_troll_1000.jpgEnnada lusu thanama pesikittu irukka lusu payale10
5Not_troll_1001.jpgVuhvuhvuhvuhvuh........10
6Not_troll_1002.jpgYenna ore pattasu sathama kekkudhu..\\nOhhoo.. inaikku New year ah.. sari naama Namma velaiya paapom10
7Not_troll_1003.jpgInimel ipidi post pottal vailaye vettuven10
8Not_troll_1004.jpgIvaingakittarundhu yepidi thapikka porenanu theriyalaye10
9Not_troll_1005.jpg(no caption)10
10Not_troll_1006.jpg(no caption)10
11Not_troll_1007.jpgInaikku mattum match win panniten naan jaichiten..10
12Not_troll_1008.jpgKarupu vulaipaligalin vannam.. theriuma vunakku?10
13Not_troll_1009.jpgAndha group yellam poiyacha???10
14Not_troll_101.jpgChildhood Memories ☺️☺️☺️10
15Not_troll_1010.jpgDeii.. naanpatukku sivanennu dhanada irundhen..\\nYaaru vambu thumbukkavadhu ponena..?10
16Not_troll_1011.jpgIdhellam yenga vuruppudapogudhu..10
17Not_troll_1012.jpgAvasarapattu vettidadhinganey.. naan verum like mattum dhaan potten.. post pottadhu vera yaaro..10
18Not_troll_1013.jpgIvan oruthan.... Neram,kaalam theriyama comedy pannuvaan...10
19Not_troll_1014.jpgAnaivarukkum iniya kaalai vanakkam..\\nAama ivaru kaalai vanakkam sollalana yaarukku poludhu vidiyadhu paaru..10
20Not_troll_1015.jpgPattasai konjam thalli vechi vedikkurela pakkathula yen vandi nikkidhu..\\nPadadhu padadhu..10
21Not_troll_1016.jpgEnnaiya vambilukkuradhe vunakku polappa pochi..10
22Not_troll_1017.jpgIru vadivelugalum govaluvai thediyapoludhu..10
23Not_troll_102.jpgPodhum itthoda niruthikka..10
24Not_troll_103.jpg(no caption)10
25Not_troll_104.jpgPodhuma oottunadhu..\\nReel andhupochi...10
26Not_troll_105.jpgArumaiyaana karutthu...10
27Not_troll_106.jpgVittula oru velaium seiuradhilla! Facebookla vandhu adutthavangalukku advice panna vendiyadhu...10
28Not_troll_107.jpgIdhoda niruthikka! Idhellam kavidhainu solladhinga naan aludhuruven😭😭😭10
29Not_troll_108.jpgYenga thaikku yevalo dhillu paathiyaa..??10
...............
2270troll_416.pngFLIPKART Ivanungalae Big Billion day nu onna aaramipaangalam Adhula Out of stock products ah just oru rooba ku Sold out nu potu pilem kaatuvangalaam01
2271troll_417.jpgWhenever vanitha opens her mouth chai sariyana lambadi pombalaiya irupa pola ivala pethaingla ila....01
2272troll_418.jpgNo Captions01
2273troll_419.jpgidhil yethanai vulladhu 3 alla 4 ?? yennada idhu... yenakku therinja konja nenja kannukuda marandhudum pola... yenanga padhil01
2274troll_42.jpgEvalo Prachana Irundhalum Nalla Mooku mutta Saptom na Prachana Theerudho illayo... Naama Prachanaya Marandhruvom !!!01
2275troll_420.jpgNo Captions01
2276troll_421.jpgHOW VIJAY TV CELEBRATE BODI PALAYA PATIENT KUMKI, RAJA RANI, SAATAI, BAAHUBALI 2 AH DISCHARGE PANUNGA PODHU PATIENT PARIYERUM PERUMAL, VADA CHENNAI, SAAMY 2 , CCV AH ADMIT PANUNGA01
2277troll_422.pngANDHA 1Re PENDRIVE IRUKKA ENNA PENDRIVE IRUKKA ANDHA 900 RS LUMIA IRUKKA ENNA LUMIA IRUKKA SERI VIDUPA, ANDHA 1300 RS SAMSUNG TAB AACHUM IRUKKA ENNA TAB AACHUM IRUKKA01
2278troll_423.jpgWHAT YOU ORDERED... WHAT THEY DELEVERED...01
2279troll_424.jpgNo Captions01
2280troll_425.pngunayum vitu veikalay antha vjd01
2281troll_426.jpgin boys movie thuniya thokkikodi ninanjida pogudhu appo ni thokkalana thuni ninanjidukkum apidithana....01
2282troll_427.jpgThat NALLAVAN Moment While watching a Movie with Family and when a KISSING Scene Comes01
2283troll_428.jpgNo Captions01
2284troll_429.jpgidhukku kalaila padichi irukalam idea illatha pasanga!!01
2285troll_43.jpglargest family in the world ithu yellam nambura mari ah ga irruku01
2286troll_430.jpgponnukku 100 kilo periya vengayamum.... 50 kilo sinna vengayamum kudutthudunga kalyanam pesi pudichidalam...01
2287troll_431.jpgporatasi effect enga thala zebra ku evlo dhillu paathiya..?!!01
2288troll_432.pngTHAT AWESOME MOMENT WHEN U FIND MONEY FROM UR OLD DRESS AT MONTH END01
2289troll_433.jpgIN GIRLS VERSION IN BOYS VERSION01
2290troll_434.jpgperumbettil paraparappu valibarkaidhu..! kaidhu seium naal: 23.02.2015 kaidhu seium idam : P.D.S thirumana mandabam kaidhu anavar A. Rajesh kaidhu seidhavar C. Sarmila kuttram: pennin manadhai thirudivittar thirpu: mundru mudichu podudhal mukkiya satchigal G. selvam N. purushothamman mattrum perumbedu kiramam nanbargal01
2291troll_435.jpgMy bra 2000 rupees branded ethu antha roattu kadaila edutha athuva..01
2292troll_436.pngINDIAN CRICKET TEAM in sub continents in OVERSEAS01
2293troll_437.jpgNo Captions01
2294troll_438.pngGET A 1300 GRE SCORE RETAKES IT01
2295troll_439.jpgamma yaaro orutharu... pattu paaduraaruma... amma nadikkuraruma... kattuthanama dancelam aaduraru ivar dhaaan STRah ma?01
2296troll_44.jpgPhone n Twitter hacked please don't respond any random messages.. My back end team is working on getting things in control. HACKED PAE!01
2297troll_440.jpgRespected deat @narendramodi ji hearty corigratulations... You made it !!! God bless..01
2298troll_441.jpgEngu irutha nan mnnil piranthidum bothu vera enga irunthan andrea va love panitu irunthan01
2299troll_442.jpgWhen STR fan hears about milk.. Paal ice... cup ice... paal ice... Yen da... Unakku vera paatu edhuvum kadaikalaya? Enapa... Patta balama iruku... Balamurugam kovil ka? Murugar kovil nu sollalam la.. Yen da balamurugan solura? Ball kadakudhu paathu va pa.. Baal ball.. Adhuku pandhu nu sollalam la... Yen ball nu solra... Sodikkadhinga da ennaya..01
\n", "

2300 rows × 4 columns

\n", "
" ], "text/plain": [ " imagename \\\n", "0 Not_troll_0.jpg \n", "1 Not_troll_1.jpg \n", "2 Not_troll_10.jpg \n", "3 Not_troll_100.jpg \n", "4 Not_troll_1000.jpg \n", "5 Not_troll_1001.jpg \n", "6 Not_troll_1002.jpg \n", "7 Not_troll_1003.jpg \n", "8 Not_troll_1004.jpg \n", "9 Not_troll_1005.jpg \n", "10 Not_troll_1006.jpg \n", "11 Not_troll_1007.jpg \n", "12 Not_troll_1008.jpg \n", "13 Not_troll_1009.jpg \n", "14 Not_troll_101.jpg \n", "15 Not_troll_1010.jpg \n", "16 Not_troll_1011.jpg \n", "17 Not_troll_1012.jpg \n", "18 Not_troll_1013.jpg \n", "19 Not_troll_1014.jpg \n", "20 Not_troll_1015.jpg \n", "21 Not_troll_1016.jpg \n", "22 Not_troll_1017.jpg \n", "23 Not_troll_102.jpg \n", "24 Not_troll_103.jpg \n", "25 Not_troll_104.jpg \n", "26 Not_troll_105.jpg \n", "27 Not_troll_106.jpg \n", "28 Not_troll_107.jpg \n", "29 Not_troll_108.jpg \n", "... ... \n", "2270 troll_416.png \n", "2271 troll_417.jpg \n", "2272 troll_418.jpg \n", "2273 troll_419.jpg \n", "2274 troll_42.jpg \n", "2275 troll_420.jpg \n", "2276 troll_421.jpg \n", "2277 troll_422.png \n", "2278 troll_423.jpg \n", "2279 troll_424.jpg \n", "2280 troll_425.png \n", "2281 troll_426.jpg \n", "2282 troll_427.jpg \n", "2283 troll_428.jpg \n", "2284 troll_429.jpg \n", "2285 troll_43.jpg \n", "2286 troll_430.jpg \n", "2287 troll_431.jpg \n", "2288 troll_432.png \n", "2289 troll_433.jpg \n", "2290 troll_434.jpg \n", "2291 troll_435.jpg \n", "2292 troll_436.png \n", "2293 troll_437.jpg \n", "2294 troll_438.png \n", "2295 troll_439.jpg \n", "2296 troll_44.jpg \n", "2297 troll_440.jpg \n", "2298 troll_441.jpg \n", "2299 troll_442.jpg \n", "\n", " captions \\\n", "0 Ada pikkalipayalugala \n", "1 Etho sambavam nadandhirukkum pola \n", "2 Vunnayellam frienda vechirukken paaru \n", "3 Idho! Ivan dhaan naan nasama ponadhukku kaaranam \n", "4 Ennada lusu thanama pesikittu irukka lusu payale \n", "5 Vuhvuhvuhvuhvuh........ \n", "6 Yenna ore pattasu sathama kekkudhu..\\nOhhoo.. inaikku New year ah.. sari naama Namma velaiya paapom \n", "7 Inimel ipidi post pottal vailaye vettuven \n", "8 Ivaingakittarundhu yepidi thapikka porenanu theriyalaye \n", "9 (no caption) \n", "10 (no caption) \n", "11 Inaikku mattum match win panniten naan jaichiten.. \n", "12 Karupu vulaipaligalin vannam.. theriuma vunakku? \n", "13 Andha group yellam poiyacha??? \n", "14 Childhood Memories ☺️☺️☺️ \n", "15 Deii.. naanpatukku sivanennu dhanada irundhen..\\nYaaru vambu thumbukkavadhu ponena..? \n", "16 Idhellam yenga vuruppudapogudhu.. \n", "17 Avasarapattu vettidadhinganey.. naan verum like mattum dhaan potten.. post pottadhu vera yaaro.. \n", "18 Ivan oruthan.... Neram,kaalam theriyama comedy pannuvaan... \n", "19 Anaivarukkum iniya kaalai vanakkam..\\nAama ivaru kaalai vanakkam sollalana yaarukku poludhu vidiyadhu paaru.. \n", "20 Pattasai konjam thalli vechi vedikkurela pakkathula yen vandi nikkidhu..\\nPadadhu padadhu.. \n", "21 Ennaiya vambilukkuradhe vunakku polappa pochi.. \n", "22 Iru vadivelugalum govaluvai thediyapoludhu.. \n", "23 Podhum itthoda niruthikka.. \n", "24 (no caption) \n", "25 Podhuma oottunadhu..\\nReel andhupochi... \n", "26 Arumaiyaana karutthu... \n", "27 Vittula oru velaium seiuradhilla! Facebookla vandhu adutthavangalukku advice panna vendiyadhu... \n", "28 Idhoda niruthikka! Idhellam kavidhainu solladhinga naan aludhuruven😭😭😭 \n", "29 Yenga thaikku yevalo dhillu paathiyaa..?? \n", "... ... \n", "2270 FLIPKART Ivanungalae Big Billion day nu onna aaramipaangalam Adhula Out of stock products ah just oru rooba ku Sold out nu potu pilem kaatuvangalaam \n", "2271 Whenever vanitha opens her mouth chai sariyana lambadi pombalaiya irupa pola ivala pethaingla ila.... \n", "2272 No Captions \n", "2273 idhil yethanai vulladhu 3 alla 4 ?? yennada idhu... yenakku therinja konja nenja kannukuda marandhudum pola... yenanga padhil \n", "2274 Evalo Prachana Irundhalum Nalla Mooku mutta Saptom na Prachana Theerudho illayo... Naama Prachanaya Marandhruvom !!! \n", "2275 No Captions \n", "2276 HOW VIJAY TV CELEBRATE BODI PALAYA PATIENT KUMKI, RAJA RANI, SAATAI, BAAHUBALI 2 AH DISCHARGE PANUNGA PODHU PATIENT PARIYERUM PERUMAL, VADA CHENNAI, SAAMY 2 , CCV AH ADMIT PANUNGA \n", "2277 ANDHA 1Re PENDRIVE IRUKKA ENNA PENDRIVE IRUKKA ANDHA 900 RS LUMIA IRUKKA ENNA LUMIA IRUKKA SERI VIDUPA, ANDHA 1300 RS SAMSUNG TAB AACHUM IRUKKA ENNA TAB AACHUM IRUKKA \n", "2278 WHAT YOU ORDERED... WHAT THEY DELEVERED... \n", "2279 No Captions \n", "2280 unayum vitu veikalay antha vjd \n", "2281 in boys movie thuniya thokkikodi ninanjida pogudhu appo ni thokkalana thuni ninanjidukkum apidithana.... \n", "2282 That NALLAVAN Moment While watching a Movie with Family and when a KISSING Scene Comes \n", "2283 No Captions \n", "2284 idhukku kalaila padichi irukalam idea illatha pasanga!! \n", "2285 largest family in the world ithu yellam nambura mari ah ga irruku \n", "2286 ponnukku 100 kilo periya vengayamum.... 50 kilo sinna vengayamum kudutthudunga kalyanam pesi pudichidalam... \n", "2287 poratasi effect enga thala zebra ku evlo dhillu paathiya..?!! \n", "2288 THAT AWESOME MOMENT WHEN U FIND MONEY FROM UR OLD DRESS AT MONTH END \n", "2289 IN GIRLS VERSION IN BOYS VERSION \n", "2290 perumbettil paraparappu valibarkaidhu..! kaidhu seium naal: 23.02.2015 kaidhu seium idam : P.D.S thirumana mandabam kaidhu anavar A. Rajesh kaidhu seidhavar C. Sarmila kuttram: pennin manadhai thirudivittar thirpu: mundru mudichu podudhal mukkiya satchigal G. selvam N. purushothamman mattrum perumbedu kiramam nanbargal \n", "2291 My bra 2000 rupees branded ethu antha roattu kadaila edutha athuva.. \n", "2292 INDIAN CRICKET TEAM in sub continents in OVERSEAS \n", "2293 No Captions \n", "2294 GET A 1300 GRE SCORE RETAKES IT \n", "2295 amma yaaro orutharu... pattu paaduraaruma... amma nadikkuraruma... kattuthanama dancelam aaduraru ivar dhaaan STRah ma? \n", "2296 Phone n Twitter hacked please don't respond any random messages.. My back end team is working on getting things in control. HACKED PAE! \n", "2297 Respected deat @narendramodi ji hearty corigratulations... You made it !!! God bless.. \n", "2298 Engu irutha nan mnnil piranthidum bothu vera enga irunthan andrea va love panitu irunthan \n", "2299 When STR fan hears about milk.. Paal ice... cup ice... paal ice... Yen da... Unakku vera paatu edhuvum kadaikalaya? Enapa... Patta balama iruku... Balamurugam kovil ka? Murugar kovil nu sollalam la.. Yen da balamurugan solura? Ball kadakudhu paathu va pa.. Baal ball.. Adhuku pandhu nu sollalam la... Yen ball nu solra... Sodikkadhinga da ennaya.. \n", "\n", " not_troll label \n", "0 1 0 \n", "1 1 0 \n", "2 1 0 \n", "3 1 0 \n", "4 1 0 \n", "5 1 0 \n", "6 1 0 \n", "7 1 0 \n", "8 1 0 \n", "9 1 0 \n", "10 1 0 \n", "11 1 0 \n", "12 1 0 \n", "13 1 0 \n", "14 1 0 \n", "15 1 0 \n", "16 1 0 \n", "17 1 0 \n", "18 1 0 \n", "19 1 0 \n", "20 1 0 \n", "21 1 0 \n", "22 1 0 \n", "23 1 0 \n", "24 1 0 \n", "25 1 0 \n", "26 1 0 \n", "27 1 0 \n", "28 1 0 \n", "29 1 0 \n", "... ... ... \n", "2270 0 1 \n", "2271 0 1 \n", "2272 0 1 \n", "2273 0 1 \n", "2274 0 1 \n", "2275 0 1 \n", "2276 0 1 \n", "2277 0 1 \n", "2278 0 1 \n", "2279 0 1 \n", "2280 0 1 \n", "2281 0 1 \n", "2282 0 1 \n", "2283 0 1 \n", "2284 0 1 \n", "2285 0 1 \n", "2286 0 1 \n", "2287 0 1 \n", "2288 0 1 \n", "2289 0 1 \n", "2290 0 1 \n", "2291 0 1 \n", "2292 0 1 \n", "2293 0 1 \n", "2294 0 1 \n", "2295 0 1 \n", "2296 0 1 \n", "2297 0 1 \n", "2298 0 1 \n", "2299 0 1 \n", "\n", "[2300 rows x 4 columns]" ] }, "execution_count": 115, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df" ] }, { "cell_type": "code", "execution_count": 120, "metadata": {}, "outputs": [], "source": [ "train_path = \"meme-classification/data/uploaded_tamil_memes\"\n", "dataloader = ImageDataLoaders.from_df(df, train_path, item_tfms=Resize(128, 128), bs=_batch_size, batch_tfms= tfms, valid_pct=0, shuffle_train=False, label_col=3)\n" ] }, { "cell_type": "code", "execution_count": 124, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "batch no. : 0\n", "TensorImage([[[[-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " ...,\n", " [-2.1179, -2.1179, -1.4713, ..., -0.2718, -0.3900, -0.5826],\n", " [-2.1179, -2.1179, -2.1179, ..., -0.2002, -0.3137, -0.4954],\n", " [-2.1179, -2.1179, -2.1179, ..., -0.1916, -0.2944, -0.4632]],\n", "\n", " [[-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " ...,\n", " [-2.0357, -2.0357, -1.5155, ..., -0.7734, -0.8655, -1.0067],\n", " [-2.0357, -2.0357, -2.0357, ..., -0.7492, -0.8471, -0.9563],\n", " [-2.0357, -2.0357, -2.0357, ..., -0.6971, -0.8084, -0.9129]],\n", "\n", " [[-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " ...,\n", " [-1.8044, -1.8044, -1.3299, ..., -0.7420, -0.8014, -0.8916],\n", " [-1.8044, -1.8044, -1.8044, ..., -0.7024, -0.7792, -0.8509],\n", " [-1.8044, -1.8044, -1.8044, ..., -0.6559, -0.7538, -0.8338]]],\n", "\n", "\n", " [[[-2.1179, -2.1179, -2.1179, ..., 2.1623, 2.1502, 2.1392],\n", " [-2.1179, -2.1179, -2.1179, ..., 2.1627, 2.1304, 2.0733],\n", " [-2.1179, -2.1179, -2.1179, ..., 2.1697, 2.1112, 2.0145],\n", " ...,\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179]],\n", "\n", " [[-2.0357, -2.0357, -2.0357, ..., 2.0833, 1.9432, 1.7517],\n", " [-2.0357, -2.0357, -2.0357, ..., 2.1327, 1.9468, 1.6620],\n", " [-2.0357, -2.0357, -2.0357, ..., 2.2334, 2.0195, 1.6870],\n", " ...,\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357]],\n", "\n", " [[-1.8044, -1.8044, -1.8044, ..., 1.3699, 1.1031, 0.8827],\n", " [-1.8044, -1.8044, -1.8044, ..., 1.5882, 1.3092, 0.9734],\n", " [-1.8044, -1.8044, -1.8044, ..., 1.8552, 1.6008, 1.2346],\n", " ...,\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044]]],\n", "\n", "\n", " [[[-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " ...,\n", " [ 0.9429, 0.8234, 0.8648, ..., -0.2800, -0.2599, -0.2424],\n", " [ 0.9576, 0.7924, 0.8475, ..., -0.3058, -0.2806, -0.2543],\n", " [ 0.9598, 0.7771, 0.8419, ..., -0.3434, -0.3249, -0.3128]],\n", "\n", " [[-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " ...,\n", " [ 0.9595, 0.8254, 0.8874, ..., -0.9777, -0.9435, -0.9115],\n", " [ 0.9370, 0.7631, 0.8346, ..., -1.0228, -0.9849, -0.9452],\n", " [ 0.9003, 0.7242, 0.7937, ..., -1.0579, -1.0242, -0.9898]],\n", "\n", " [[-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " ...,\n", " [ 0.1140, 0.0063, 0.0620, ..., -1.3183, -1.3055, -1.2888],\n", " [ 0.1049, -0.0428, 0.0188, ..., -1.3529, -1.3417, -1.3243],\n", " [ 0.0832, -0.0699, -0.0069, ..., -1.3593, -1.3534, -1.3466]]],\n", "\n", "\n", " [[[ 0.2006, 0.4443, 0.5806, ..., -2.1179, -2.1179, -2.1179],\n", " [-1.6271, -1.5950, -1.5828, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.0878, -2.0885, -2.0869, ..., -2.1179, -2.1179, -2.1179],\n", " ...,\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179],\n", " [-2.1179, -2.1179, -2.1179, ..., -2.1179, -2.1179, -2.1179]],\n", "\n", " [[ 0.3771, 0.6181, 0.7485, ..., -2.0357, -2.0357, -2.0357],\n", " [-1.5385, -1.5076, -1.4970, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0111, -2.0117, -2.0123, ..., -2.0357, -2.0357, -2.0357],\n", " ...,\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357],\n", " [-2.0357, -2.0357, -2.0357, ..., -2.0357, -2.0357, -2.0357]],\n", "\n", " [[-1.7577, -1.7576, -1.7576, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.7576, -1.7576, -1.7576, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.7766, -1.7769, -1.7772, ..., -1.8044, -1.8044, -1.8044],\n", " ...,\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044],\n", " [-1.8044, -1.8044, -1.8044, ..., -1.8044, -1.8044, -1.8044]]]],\n", " device='cuda:0')\n", "TensorCategory([0, 1, 0, 0], device='cuda:0')\n" ] } ], "source": [ "for idx, data in enumerate(dataloader.train.new(shuffle=False)):\n", " # get the inputs\n", " text_model.eval()\n", " print(\"batch no. : \", idx)\n", " inputs, labels = data\n", " print(inputs)\n", " print(labels)\n", " break" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.3" } }, "nbformat": 4, "nbformat_minor": 4 }