diff --git a/README.md b/README.md
index 3f45d35..ee2d55f 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,6 @@
 # PyTorch Sentiment Analysis
 
-## Note: This repo only works with torchtext 0.9 or above which requires PyTorch 1.8 or above. If you are using torchtext 0.8 then please use [this](https://github.com/bentrevett/pytorch-sentiment-analysis/tree/torchtext08) branch
-
-This repo contains tutorials covering how to do sentiment analysis using [PyTorch](https://github.com/pytorch/pytorch) 1.8 and [torchtext](https://github.com/pytorch/text) 0.9 using Python 3.7.
+This repo contains tutorials covering how to do sentiment analysis using [PyTorch](https://github.com/pytorch/pytorch) 1.7 and [TorchText](https://github.com/pytorch/text) 0.8 using Python 3.7.
 
 The first 2 tutorials will cover getting started with the de facto approach to sentiment analysis: recurrent neural networks (RNNs). The third notebook covers the [FastText](https://arxiv.org/abs/1607.01759) model and the final covers a [convolutional neural network](https://arxiv.org/abs/1408.5882) (CNN) model.
 
diff --git a/experimental/1_nbow.ipynb b/experimental/1_nbow.ipynb
deleted file mode 100644
index 791d259..0000000
--- a/experimental/1_nbow.ipynb
+++ /dev/null
@@ -1,1523 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 228
-    },
-    "colab_type": "code",
-    "id": "-V90fMxJdFl7",
-    "outputId": "2bbc3f28-84e3-47bd-97a2-ea0c2f0cf395"
-   },
-   "outputs": [],
-   "source": [
-    "import torch\n",
-    "import torch.nn as nn\n",
-    "import torch.optim as optim\n",
-    "\n",
-    "import torchtext\n",
-    "import torchtext.experimental\n",
-    "import torchtext.experimental.vectors\n",
-    "from torchtext.experimental.datasets.raw.text_classification import RawTextIterableDataset\n",
-    "from torchtext.experimental.datasets.text_classification import TextClassificationDataset\n",
-    "from torchtext.experimental.functional import sequential_transforms, vocab_func, totensor\n",
-    "\n",
-    "import collections\n",
-    "import random\n",
-    "import time"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "tOO7b-Z1dFmA"
-   },
-   "outputs": [],
-   "source": [
-    "seed = 1234\n",
-    "\n",
-    "torch.manual_seed(seed)\n",
-    "random.seed(seed)\n",
-    "torch.backends.cudnn.deterministic = True\n",
-    "torch.backends.cudnn.benchmark = False"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "FhBXG95YdFmD"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_test_data = torchtext.experimental.datasets.raw.IMDB()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "raw_train_data = list(raw_train_data)\n",
-    "raw_test_data = list(raw_test_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "('neg',\n",
-       " 'I rented I AM CURIOUS-YELLOW from my video store because of all the controversy that surrounded it when it was first released in 1967. I also heard that at first it was seized by U.S. customs if it ever tried to enter this country, therefore being a fan of films considered \"controversial\" I really had to see this for myself.<br /><br />The plot is centered around a young Swedish drama student named Lena who wants to learn everything she can about life. In particular she wants to focus her attentions to making some sort of documentary on what the average Swede thought about certain political issues such as the Vietnam War and race issues in the United States. In between asking politicians and ordinary denizens of Stockholm about their opinions on politics, she has sex with her drama teacher, classmates, and married men.<br /><br />What kills me about I AM CURIOUS-YELLOW is that 40 years ago, this was considered pornographic. Really, the sex and nudity scenes are few and far between, even then it\\'s not shot like some cheaply made porno. While my countrymen mind find it shocking, in reality sex and nudity are a major staple in Swedish cinema. Even Ingmar Bergman, arguably their answer to good old boy John Ford, had sex scenes in his films.<br /><br />I do commend the filmmakers for the fact that any sex shown in the film is shown for artistic purposes rather than just to shock people and make money to be shown in pornographic theaters in America. I AM CURIOUS-YELLOW is a good film for anyone wanting to study the meat and potatoes (no pun intended) of Swedish cinema. But really, this film doesn\\'t have much of a plot.')"
-      ]
-     },
-     "execution_count": 5,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "raw_train_data[0]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "('neg',\n",
-       " 'I love sci-fi and am willing to put up with a lot. Sci-fi movies/TV are usually underfunded, under-appreciated and misunderstood. I tried to like this, I really did, but it is to good TV sci-fi as Babylon 5 is to Star Trek (the original). Silly prosthetics, cheap cardboard sets, stilted dialogues, CG that doesn\\'t match the background, and painfully one-dimensional characters cannot be overcome with a \\'sci-fi\\' setting. (I\\'m sure there are those of you out there who think Babylon 5 is good sci-fi TV. It\\'s not. It\\'s clichéd and uninspiring.) While US viewers might like emotion and character development, sci-fi is a genre that does not take itself seriously (cf. Star Trek). It may treat important issues, yet not as a serious philosophy. It\\'s really difficult to care about the characters here as they are not simply foolish, just missing a spark of life. Their actions and reactions are wooden and predictable, often painful to watch. The makers of Earth KNOW it\\'s rubbish as they have to always say \"Gene Roddenberry\\'s Earth...\" otherwise people would not continue watching. Roddenberry\\'s ashes must be turning in their orbit as this dull, cheap, poorly edited (watching it without advert breaks really brings this home) trudging Trabant of a show lumbers into space. Spoiler. So, kill off a main character. And then bring him back as another actor. Jeeez! Dallas all over again.')"
-      ]
-     },
-     "execution_count": 6,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "raw_test_data[0]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Number of training examples: 25,000\n",
-      "Number of testing examples: 25,000\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'Number of training examples: {len(raw_train_data):,}')\n",
-    "print(f'Number of testing examples: {len(raw_test_data):,}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "rOTczrIEdFmY"
-   },
-   "outputs": [],
-   "source": [
-    "def get_train_valid_split(raw_train_data, split_ratio = 0.7):\n",
-    "        \n",
-    "    random.shuffle(raw_train_data)\n",
-    "        \n",
-    "    n_train_examples = int(len(raw_train_data) * split_ratio)\n",
-    "        \n",
-    "    train_data = raw_train_data[:n_train_examples]\n",
-    "    valid_data = raw_train_data[n_train_examples:]\n",
-    "    \n",
-    "    return train_data, valid_data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "C6Tp4CyQdFma"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_valid_data = get_train_valid_split(raw_train_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Number of training examples: 17,500\n",
-      "Number of validation examples: 7,500\n",
-      "Number of testing examples: 25,000\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'Number of training examples: {len(raw_train_data):,}')\n",
-    "print(f'Number of validation examples: {len(raw_valid_data):,}')\n",
-    "print(f'Number of testing examples: {len(raw_test_data):,}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "LTJjCocRdFmh"
-   },
-   "outputs": [],
-   "source": [
-    "class Tokenizer:\n",
-    "    def __init__(self, tokenize_fn = 'basic_english', lower = True, max_length = None):\n",
-    "        \n",
-    "        self.tokenize_fn = torchtext.data.utils.get_tokenizer(tokenize_fn)\n",
-    "        self.lower = lower\n",
-    "        self.max_length = max_length\n",
-    "        \n",
-    "    def tokenize(self, s):\n",
-    "        \n",
-    "        tokens = self.tokenize_fn(s)\n",
-    "        \n",
-    "        if self.lower:\n",
-    "            tokens = [token.lower() for token in tokens]\n",
-    "            \n",
-    "        if self.max_length is not None:\n",
-    "            tokens = tokens[:self.max_length]\n",
-    "            \n",
-    "        return tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "5P2KumuDdFmj"
-   },
-   "outputs": [],
-   "source": [
-    "max_length = 250\n",
-    "\n",
-    "tokenizer = Tokenizer(max_length = max_length)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "V1albCvadFmm",
-    "outputId": "5c7c30f2-c6b7-4098-990d-7bfcdc2446f1"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "['this', 'film', 'is', 'terrible', '.', 'i', 'hate', 'it', 'and', 'it', \"'\", 's', 'bad', '!']\n"
-     ]
-    }
-   ],
-   "source": [
-    "s = \"this film is terrible. i hate it and it's bad!\"\n",
-    "\n",
-    "print(tokenizer.tokenize(s))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "anC7_ViodFmp"
-   },
-   "outputs": [],
-   "source": [
-    "def build_vocab_from_data(raw_data, tokenizer, **vocab_kwargs):\n",
-    "        \n",
-    "    token_freqs = collections.Counter()\n",
-    "    \n",
-    "    for label, text in raw_data:\n",
-    "        tokens = tokenizer.tokenize(text)\n",
-    "        token_freqs.update(tokens)\n",
-    "                \n",
-    "    vocab = torchtext.vocab.Vocab(token_freqs, **vocab_kwargs)\n",
-    "    \n",
-    "    return vocab"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "rgHPS1xzdFmt"
-   },
-   "outputs": [],
-   "source": [
-    "max_size = 25_000\n",
-    "\n",
-    "vocab = build_vocab_from_data(raw_train_data, tokenizer, max_size = max_size)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Unique tokens in vocab: 25,002\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'Unique tokens in vocab: {len(vocab):,}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 364
-    },
-    "colab_type": "code",
-    "id": "PsRQLrlddFmw",
-    "outputId": "5357c17c-b0ba-429d-b675-aa3fd9c39b72"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[('the', 165322),\n",
-       " ('.', 164239),\n",
-       " (',', 133647),\n",
-       " ('a', 81952),\n",
-       " ('and', 80334),\n",
-       " ('of', 71820),\n",
-       " ('to', 65662),\n",
-       " (\"'\", 64249),\n",
-       " ('is', 53598),\n",
-       " ('it', 49589),\n",
-       " ('i', 48810),\n",
-       " ('in', 45611),\n",
-       " ('this', 40868),\n",
-       " ('that', 35609),\n",
-       " ('s', 29273),\n",
-       " ('was', 26159),\n",
-       " ('movie', 24543),\n",
-       " ('as', 22276),\n",
-       " ('with', 21494),\n",
-       " ('for', 21332)]"
-      ]
-     },
-     "execution_count": 17,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "vocab.freqs.most_common(20)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "FGS5tZKmdFmy",
-    "outputId": "5304c151-6696-4d2e-bd4e-ac9cfb2e3f23"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "['<unk>', '<pad>', 'the', '.', ',', 'a', 'and', 'of', 'to', \"'\"]"
-      ]
-     },
-     "execution_count": 18,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "vocab.itos[:10]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "izsoXluedFm3",
-    "outputId": "1ab77cea-612b-4d86-cca3-5273f0964fbe"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "2"
-      ]
-     },
-     "execution_count": 19,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "vocab.stoi['the']"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "xiW0GItTdFm6"
-   },
-   "outputs": [],
-   "source": [
-    "def raw_data_to_dataset(raw_data, tokenizer, vocab):\n",
-    "        \n",
-    "    text_transform = sequential_transforms(tokenizer.tokenize,\n",
-    "                                           vocab_func(vocab),\n",
-    "                                           totensor(dtype=torch.long))\n",
-    "    \n",
-    "    label_transform = sequential_transforms(lambda x: 1 if x == 'pos' else 0, \n",
-    "                                            totensor(dtype=torch.long))\n",
-    "\n",
-    "    transforms = (label_transform, text_transform)\n",
-    "\n",
-    "    dataset = TextClassificationDataset(raw_data,\n",
-    "                                        vocab,\n",
-    "                                        transforms)\n",
-    "    \n",
-    "    return dataset"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "LCslagnudFm8"
-   },
-   "outputs": [],
-   "source": [
-    "train_data = raw_data_to_dataset(raw_train_data, tokenizer, vocab)\n",
-    "valid_data = raw_data_to_dataset(raw_valid_data, tokenizer, vocab)\n",
-    "test_data = raw_data_to_dataset(raw_test_data, tokenizer, vocab)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Number of training examples: 17,500\n",
-      "Number of validation examples: 7,500\n",
-      "Number of testing examples: 25,000\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'Number of training examples: {len(train_data):,}')\n",
-    "print(f'Number of validation examples: {len(valid_data):,}')\n",
-    "print(f'Number of testing examples: {len(test_data):,}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 416
-    },
-    "colab_type": "code",
-    "id": "FDsGUUeydFm_",
-    "outputId": "848655ba-b5b2-4307-ca5b-a827200fdef2"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "tensor([   12,   121,  1013,     6,   219,  1855,     8,   276,    70,    20,\n",
-      "            5,   177,     3,  1013,     0,    30,   541,     0,     4, 15259,\n",
-      "            6,  7022,     3,    12,   751,     8,    45,    14,     4,    12,\n",
-      "           69,   123,     4,    22,    11,    10,     8,    56,   241,  1013,\n",
-      "           19, 12534,   563,    10,     8,   338,  1803,    25,     2,   196,\n",
-      "           24,     3,   717,     0,     4,   745,  3428,   686,     4,  4315,\n",
-      "         3437,     4,  4258,    15,   170,     9,    28,  1209,     2,   951,\n",
-      "            4,     6,  2005,  5083,   113,   544,    35,  2957,    20,     5,\n",
-      "            9,  1013,     9,   925,     3,    25,    12,     9,   145,   255,\n",
-      "           46,    30,   160,     7,    26,    54,    46,    42,   107, 12534,\n",
-      "          563,    10,    56,  1013,   241,     3,    11,     9,    16,    29,\n",
-      "            3,    11,     9,    16,  2966,     6,  8018,     3,    24,   143,\n",
-      "          199,   773,   249,    45,  1364,     6,   120,   893,     4,  1013,\n",
-      "           10,     5,   516,    15,   135,    29,   205,   437,   599,    25,\n",
-      "        24229,     3,   338,  1803,    24,     3,    11,   222,  1655,   734,\n",
-      "         1296,     4,   265,    29,    19,     5,   618,  4793,     3,    11,\n",
-      "            9,    16,    69,   866,     8,   474,    47,     2,   113,   138,\n",
-      "           19,    39,    30,    29,   343,  6136,     4,    48,   984,     5,\n",
-      "         5212,     7,   122,     3,    77,  1894,     6,  3550,    30,  1650,\n",
-      "            6,   634,     4,   403,  1266,     8,   110,     3,     2,  1332,\n",
-      "            7,   649,   130,    11,     9,    16,  1834,    19,    39,    31,\n",
-      "            8,   215,   134,  1965, 13961,     9,    16,   649,     3,     3,\n",
-      "            3,   910,    81,    68,    29,  1677,   142,     3, 13961,     9,\n",
-      "           16, 13264,   208,    35,  1685,    13,    77, 13826,    19,    14,\n",
-      "          696,     4,   745,     4,   793,  2192,    25,   142,    11,   211])\n"
-     ]
-    }
-   ],
-   "source": [
-    "label, indexes = test_data[0]\n",
-    "\n",
-    "print(indexes)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 55
-    },
-    "colab_type": "code",
-    "id": "nXOay2JUdFnB",
-    "outputId": "148242f9-c657-46be-e71d-c7503f662fc9"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "['i', 'love', 'sci-fi', 'and', 'am', 'willing', 'to', 'put', 'up', 'with', 'a', 'lot', '.', 'sci-fi', '<unk>', 'are', 'usually', '<unk>', ',', 'under-appreciated', 'and', 'misunderstood', '.', 'i', 'tried', 'to', 'like', 'this', ',', 'i', 'really', 'did', ',', 'but', 'it', 'is', 'to', 'good', 'tv', 'sci-fi', 'as', 'babylon', '5', 'is', 'to', 'star', 'trek', '(', 'the', 'original', ')', '.', 'silly', '<unk>', ',', 'cheap', 'cardboard', 'sets', ',', 'stilted', 'dialogues', ',', 'cg', 'that', 'doesn', \"'\", 't', 'match', 'the', 'background', ',', 'and', 'painfully', 'one-dimensional', 'characters', 'cannot', 'be', 'overcome', 'with', 'a', \"'\", 'sci-fi', \"'\", 'setting', '.', '(', 'i', \"'\", 'm', 'sure', 'there', 'are', 'those', 'of', 'you', 'out', 'there', 'who', 'think', 'babylon', '5', 'is', 'good', 'sci-fi', 'tv', '.', 'it', \"'\", 's', 'not', '.', 'it', \"'\", 's', 'clichéd', 'and', 'uninspiring', '.', ')', 'while', 'us', 'viewers', 'might', 'like', 'emotion', 'and', 'character', 'development', ',', 'sci-fi', 'is', 'a', 'genre', 'that', 'does', 'not', 'take', 'itself', 'seriously', '(', 'cf', '.', 'star', 'trek', ')', '.', 'it', 'may', 'treat', 'important', 'issues', ',', 'yet', 'not', 'as', 'a', 'serious', 'philosophy', '.', 'it', \"'\", 's', 'really', 'difficult', 'to', 'care', 'about', 'the', 'characters', 'here', 'as', 'they', 'are', 'not', 'simply', 'foolish', ',', 'just', 'missing', 'a', 'spark', 'of', 'life', '.', 'their', 'actions', 'and', 'reactions', 'are', 'wooden', 'and', 'predictable', ',', 'often', 'painful', 'to', 'watch', '.', 'the', 'makers', 'of', 'earth', 'know', 'it', \"'\", 's', 'rubbish', 'as', 'they', 'have', 'to', 'always', 'say', 'gene', 'roddenberry', \"'\", 's', 'earth', '.', '.', '.', 'otherwise', 'people', 'would', 'not', 'continue', 'watching', '.', 'roddenberry', \"'\", 's', 'ashes', 'must', 'be', 'turning', 'in', 'their', 'orbit', 'as', 'this', 'dull', ',', 'cheap', ',', 'poorly', 'edited', '(', 'watching', 'it', 'without']\n"
-     ]
-    }
-   ],
-   "source": [
-    "print([vocab.itos[i] for i in indexes])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "egzlLweTdFnH"
-   },
-   "outputs": [],
-   "source": [
-    "class Collator:\n",
-    "    def __init__(self, pad_idx):\n",
-    "        \n",
-    "        self.pad_idx = pad_idx\n",
-    "        \n",
-    "    def collate(self, batch):\n",
-    "        \n",
-    "        labels, text = zip(*batch)\n",
-    "        \n",
-    "        labels = torch.LongTensor(labels)\n",
-    "        \n",
-    "        text = nn.utils.rnn.pad_sequence(text, padding_value = self.pad_idx)\n",
-    "        \n",
-    "        return labels, text"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "TYLvjhoSdFnM"
-   },
-   "outputs": [],
-   "source": [
-    "pad_token = '<pad>'\n",
-    "pad_idx = vocab[pad_token]\n",
-    "\n",
-    "collator = Collator(pad_idx)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "7Ly4l1I8dFnR"
-   },
-   "outputs": [],
-   "source": [
-    "batch_size = 256\n",
-    "\n",
-    "train_iterator = torch.utils.data.DataLoader(train_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = True, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "valid_iterator = torch.utils.data.DataLoader(valid_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = False, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "test_iterator = torch.utils.data.DataLoader(test_data, \n",
-    "                                            batch_size, \n",
-    "                                            shuffle = False, \n",
-    "                                            collate_fn = collator.collate)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "dbh38jHEdFnV"
-   },
-   "outputs": [],
-   "source": [
-    "class NBOW(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, output_dim, pad_idx):\n",
-    "        super().__init__()\n",
-    "        \n",
-    "        self.embedding = nn.Embedding(input_dim, emb_dim, padding_idx = pad_idx)\n",
-    "        self.fc = nn.Linear(emb_dim, output_dim)\n",
-    "        \n",
-    "    def forward(self, text):\n",
-    "        \n",
-    "        # text = [seq len, batch size]\n",
-    "        \n",
-    "        embedded = self.embedding(text)\n",
-    "        \n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        pooled = embedded.mean(0)\n",
-    "        \n",
-    "        # pooled = [batch size, emb dim]\n",
-    "        \n",
-    "        prediction = self.fc(pooled)\n",
-    "        \n",
-    "        # prediction = [batch size, output dim]\n",
-    "        \n",
-    "        return prediction"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Ga1nXhindFnZ"
-   },
-   "outputs": [],
-   "source": [
-    "input_dim = len(vocab)\n",
-    "emb_dim = 100\n",
-    "output_dim = 2\n",
-    "\n",
-    "model = NBOW(input_dim, emb_dim, output_dim, pad_idx)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "UyIJC0tYdFnc"
-   },
-   "outputs": [],
-   "source": [
-    "def count_parameters(model):\n",
-    "    return sum(p.numel() for p in model.parameters() if p.requires_grad)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "1sJRLyewdFng",
-    "outputId": "e7e357e1-1cc7-4aa4-ff40-4d749209759d"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The model has 2,500,402 trainable parameters\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'The model has {count_parameters(model):,} trainable parameters')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "BPsihrZudFnl"
-   },
-   "outputs": [],
-   "source": [
-    "glove = torchtext.experimental.vectors.GloVe(name = '6B',\n",
-    "                                             dim = emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 243
-    },
-    "colab_type": "code",
-    "id": "hUIoXGkpdFno",
-    "outputId": "b58af33d-b40f-4783-b997-8e85a0edc583"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([-0.0382, -0.2449,  0.7281, -0.3996,  0.0832,  0.0440, -0.3914,  0.3344,\n",
-       "        -0.5755,  0.0875,  0.2879, -0.0673,  0.3091, -0.2638, -0.1323, -0.2076,\n",
-       "         0.3340, -0.3385, -0.3174, -0.4834,  0.1464, -0.3730,  0.3458,  0.0520,\n",
-       "         0.4495, -0.4697,  0.0263, -0.5415, -0.1552, -0.1411, -0.0397,  0.2828,\n",
-       "         0.1439,  0.2346, -0.3102,  0.0862,  0.2040,  0.5262,  0.1716, -0.0824,\n",
-       "        -0.7179, -0.4153,  0.2033, -0.1276,  0.4137,  0.5519,  0.5791, -0.3348,\n",
-       "        -0.3656, -0.5486, -0.0629,  0.2658,  0.3020,  0.9977, -0.8048, -3.0243,\n",
-       "         0.0125, -0.3694,  2.2167,  0.7220, -0.2498,  0.9214,  0.0345,  0.4674,\n",
-       "         1.1079, -0.1936, -0.0746,  0.2335, -0.0521, -0.2204,  0.0572, -0.1581,\n",
-       "        -0.3080, -0.4162,  0.3797,  0.1501, -0.5321, -0.2055, -1.2526,  0.0716,\n",
-       "         0.7056,  0.4974, -0.4206,  0.2615, -1.5380, -0.3022, -0.0734, -0.2831,\n",
-       "         0.3710, -0.2522,  0.0162, -0.0171, -0.3898,  0.8742, -0.7257, -0.5106,\n",
-       "        -0.5203, -0.1459,  0.8278,  0.2706])"
-      ]
-     },
-     "execution_count": 33,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "glove['the']"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 104
-    },
-    "colab_type": "code",
-    "id": "vz_X14INdFnq",
-    "outputId": "b41c1997-b970-4042-fab9-2d72f07540b0"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
-       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
-       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
-       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
-       "        0., 0., 0., 0.])"
-      ]
-     },
-     "execution_count": 34,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "glove['shoggoth']"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 104
-    },
-    "colab_type": "code",
-    "id": "iBKvWWCwdFnu",
-    "outputId": "821572aa-2743-4b1e-a03d-afeb5387bd9f"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
-       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
-       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
-       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
-       "        0., 0., 0., 0.])"
-      ]
-     },
-     "execution_count": 35,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "glove['The']"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "glove_vocab = glove.vectors.get_stoi()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "True"
-      ]
-     },
-     "execution_count": 37,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "'the' in glove_vocab"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "False"
-      ]
-     },
-     "execution_count": 38,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "'The' in glove_vocab"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 39,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "4BFftRDMdFnx"
-   },
-   "outputs": [],
-   "source": [
-    "def get_pretrained_embedding(initial_embedding, pretrained_vectors, vocab, unk_token):\n",
-    "    \n",
-    "    pretrained_embedding = torch.FloatTensor(initial_embedding.weight.clone()).detach()    \n",
-    "    pretrained_vocab = pretrained_vectors.vectors.get_stoi()\n",
-    "    \n",
-    "    unk_tokens = []\n",
-    "    \n",
-    "    for idx, token in enumerate(vocab.itos):\n",
-    "        if token in pretrained_vocab:\n",
-    "            pretrained_vector = pretrained_vectors[token]\n",
-    "            pretrained_embedding[idx] = pretrained_vector\n",
-    "        else:\n",
-    "            unk_tokens.append(token)\n",
-    "        \n",
-    "    return pretrained_embedding, unk_tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 40,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "QRToW07JdFnz"
-   },
-   "outputs": [],
-   "source": [
-    "unk_token = '<unk>'\n",
-    "\n",
-    "pretrained_embedding, unk_tokens = get_pretrained_embedding(model.embedding, glove, vocab, unk_token)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 41,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[-0.1117, -0.4966,  0.1631,  ...,  1.5903, -0.1947, -0.2415],\n",
-       "        [ 0.0000,  0.0000,  0.0000,  ...,  0.0000,  0.0000,  0.0000],\n",
-       "        [ 0.7289, -0.7336,  1.5624,  ..., -0.5592, -0.4480, -0.6476],\n",
-       "        ...,\n",
-       "        [ 0.0914,  1.5196,  0.4670,  ...,  0.6393, -0.0332,  0.0185],\n",
-       "        [-0.6290,  0.4650, -0.7165,  ..., -1.3171,  2.0381, -2.0497],\n",
-       "        [-1.1222, -0.0240, -1.0878,  ..., -0.4948, -0.3874,  0.0339]])"
-      ]
-     },
-     "execution_count": 41,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.embedding.weight.data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 42,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[-0.1117, -0.4966,  0.1631,  ...,  1.5903, -0.1947, -0.2415],\n",
-       "        [ 0.0000,  0.0000,  0.0000,  ...,  0.0000,  0.0000,  0.0000],\n",
-       "        [-0.0382, -0.2449,  0.7281,  ..., -0.1459,  0.8278,  0.2706],\n",
-       "        ...,\n",
-       "        [ 0.4029,  0.1353,  0.6673,  ..., -0.3300,  0.7533, -0.1666],\n",
-       "        [ 0.1226,  0.0419,  0.0746,  ..., -0.0024, -0.2733, -1.0033],\n",
-       "        [-0.1009, -0.1484,  0.3141,  ..., -0.3414, -0.3768,  0.5605]])"
-      ]
-     },
-     "execution_count": 42,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "pretrained_embedding"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 43,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "j36jzQpPdFn3",
-    "outputId": "7ebe041d-b092-498e-ea16-0fce8c20ed33"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "734"
-      ]
-     },
-     "execution_count": 43,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "len(unk_tokens)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 44,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "yzvhgf8tdFn5",
-    "outputId": "8c30dc4a-9a2b-4c11-8c7b-1d2cb3ba0aee"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "['<unk>', '<pad>', '\\x96', '****', 'hadn', 'camera-work', '*1/2', '100%', '*****', '$1']\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(unk_tokens[:10])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 45,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 139
-    },
-    "colab_type": "code",
-    "id": "AnE6D4MAdFn_",
-    "outputId": "8b3fea1a-9bcb-4fd9-ba78-72baee94f96a"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[-0.1117, -0.4966,  0.1631,  ...,  1.5903, -0.1947, -0.2415],\n",
-       "        [ 0.0000,  0.0000,  0.0000,  ...,  0.0000,  0.0000,  0.0000],\n",
-       "        [-0.0382, -0.2449,  0.7281,  ..., -0.1459,  0.8278,  0.2706],\n",
-       "        ...,\n",
-       "        [ 0.4029,  0.1353,  0.6673,  ..., -0.3300,  0.7533, -0.1666],\n",
-       "        [ 0.1226,  0.0419,  0.0746,  ..., -0.0024, -0.2733, -1.0033],\n",
-       "        [-0.1009, -0.1484,  0.3141,  ..., -0.3414, -0.3768,  0.5605]])"
-      ]
-     },
-     "execution_count": 45,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.embedding.weight.data.copy_(pretrained_embedding)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 46,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "DJloauERdFoF"
-   },
-   "outputs": [],
-   "source": [
-    "optimizer = optim.Adam(model.parameters())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 47,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "fPPZ0cs_dFoH"
-   },
-   "outputs": [],
-   "source": [
-    "criterion = nn.CrossEntropyLoss()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 48,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "HGUcFIupdFoK",
-    "outputId": "e5d9b842-689b-49ca-a4f4-08574f0524ee"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Using: cuda\n"
-     ]
-    }
-   ],
-   "source": [
-    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
-    "\n",
-    "print(f'Using: {device}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 49,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Ynf7j6kQdFoM"
-   },
-   "outputs": [],
-   "source": [
-    "model = model.to(device)\n",
-    "criterion = criterion.to(device)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 50,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "977iykeOdFoP"
-   },
-   "outputs": [],
-   "source": [
-    "def calculate_accuracy(predictions, labels):\n",
-    "    top_predictions = predictions.argmax(1, keepdim = True)\n",
-    "    correct = top_predictions.eq(labels.view_as(top_predictions)).sum()\n",
-    "    accuracy = correct.float() / labels.shape[0]\n",
-    "    return accuracy"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 51,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "HPNI8DJudFoS"
-   },
-   "outputs": [],
-   "source": [
-    "def train(model, iterator, optimizer, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.train()\n",
-    "    \n",
-    "    for labels, text in iterator:\n",
-    "        \n",
-    "        labels = labels.to(device)\n",
-    "        text = text.to(device)\n",
-    "        \n",
-    "        optimizer.zero_grad()\n",
-    "        \n",
-    "        predictions = model(text)\n",
-    "        \n",
-    "        loss = criterion(predictions, labels)\n",
-    "        \n",
-    "        acc = calculate_accuracy(predictions, labels)\n",
-    "        \n",
-    "        loss.backward()\n",
-    "        \n",
-    "        optimizer.step()\n",
-    "        \n",
-    "        epoch_loss += loss.item()\n",
-    "        epoch_acc += acc.item()\n",
-    "        \n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 52,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "kp6pV5xKdFoV"
-   },
-   "outputs": [],
-   "source": [
-    "def evaluate(model, iterator, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.eval()\n",
-    "    \n",
-    "    with torch.no_grad():\n",
-    "    \n",
-    "        for labels, text in iterator:\n",
-    "\n",
-    "            labels = labels.to(device)\n",
-    "            text = text.to(device)\n",
-    "            \n",
-    "            predictions = model(text)\n",
-    "            \n",
-    "            loss = criterion(predictions, labels)\n",
-    "            \n",
-    "            acc = calculate_accuracy(predictions, labels)\n",
-    "\n",
-    "            epoch_loss += loss.item()\n",
-    "            epoch_acc += acc.item()\n",
-    "        \n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 53,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "8YzL45gYdFoX"
-   },
-   "outputs": [],
-   "source": [
-    "def epoch_time(start_time, end_time):\n",
-    "    elapsed_time = end_time - start_time\n",
-    "    elapsed_mins = int(elapsed_time / 60)\n",
-    "    elapsed_secs = int(elapsed_time - (elapsed_mins * 60))\n",
-    "    return elapsed_mins, elapsed_secs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 54,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 537
-    },
-    "colab_type": "code",
-    "id": "0A8wv7-xdFoa",
-    "outputId": "238f01bf-5438-482a-80ac-75c70cb20ed1"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Epoch: 01 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.683 | Train Acc: 60.00%\n",
-      "\t Val. Loss: 0.669 |  Val. Acc: 67.02%\n",
-      "Epoch: 02 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.651 | Train Acc: 68.09%\n",
-      "\t Val. Loss: 0.632 |  Val. Acc: 71.31%\n",
-      "Epoch: 03 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.603 | Train Acc: 74.06%\n",
-      "\t Val. Loss: 0.582 |  Val. Acc: 74.86%\n",
-      "Epoch: 04 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.545 | Train Acc: 78.13%\n",
-      "\t Val. Loss: 0.528 |  Val. Acc: 78.88%\n",
-      "Epoch: 05 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.485 | Train Acc: 82.10%\n",
-      "\t Val. Loss: 0.477 |  Val. Acc: 81.64%\n",
-      "Epoch: 06 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.430 | Train Acc: 85.15%\n",
-      "\t Val. Loss: 0.437 |  Val. Acc: 83.25%\n",
-      "Epoch: 07 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.386 | Train Acc: 86.92%\n",
-      "\t Val. Loss: 0.404 |  Val. Acc: 84.59%\n",
-      "Epoch: 08 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.350 | Train Acc: 88.21%\n",
-      "\t Val. Loss: 0.383 |  Val. Acc: 85.19%\n",
-      "Epoch: 09 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.319 | Train Acc: 89.36%\n",
-      "\t Val. Loss: 0.363 |  Val. Acc: 85.86%\n",
-      "Epoch: 10 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.295 | Train Acc: 90.17%\n",
-      "\t Val. Loss: 0.349 |  Val. Acc: 86.27%\n"
-     ]
-    }
-   ],
-   "source": [
-    "n_epochs = 10\n",
-    "\n",
-    "best_valid_loss = float('inf')\n",
-    "\n",
-    "for epoch in range(n_epochs):\n",
-    "\n",
-    "    start_time = time.monotonic()\n",
-    "    \n",
-    "    train_loss, train_acc = train(model, train_iterator, optimizer, criterion, device)\n",
-    "    valid_loss, valid_acc = evaluate(model, valid_iterator, criterion, device)\n",
-    "    \n",
-    "    end_time = time.monotonic()\n",
-    "\n",
-    "    epoch_mins, epoch_secs = epoch_time(start_time, end_time)\n",
-    "    \n",
-    "    if valid_loss < best_valid_loss:\n",
-    "        best_valid_loss = valid_loss\n",
-    "        torch.save(model.state_dict(), 'nbow-model.pt')\n",
-    "    \n",
-    "    print(f'Epoch: {epoch+1:02} | Epoch Time: {epoch_mins}m {epoch_secs}s')\n",
-    "    print(f'\\tTrain Loss: {train_loss:.3f} | Train Acc: {train_acc*100:.2f}%')\n",
-    "    print(f'\\t Val. Loss: {valid_loss:.3f} |  Val. Acc: {valid_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 55,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "oMHAuMFNdFoc",
-    "outputId": "58b32f9a-8c39-4818-b526-1a80e435f3ae"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Test Loss: 0.374 | Test Acc: 84.75%\n"
-     ]
-    }
-   ],
-   "source": [
-    "model.load_state_dict(torch.load('nbow-model.pt'))\n",
-    "\n",
-    "test_loss, test_acc = evaluate(model, test_iterator, criterion, device)\n",
-    "\n",
-    "print(f'Test Loss: {test_loss:.3f} | Test Acc: {test_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 56,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "sEDiSM3fdFog"
-   },
-   "outputs": [],
-   "source": [
-    "def predict_sentiment(tokenizer, vocab, model, device, sentence):\n",
-    "    model.eval()\n",
-    "    tokens = tokenizer.tokenize(sentence)\n",
-    "    indexes = [vocab.stoi[token] for token in tokens]\n",
-    "    tensor = torch.LongTensor(indexes).unsqueeze(-1).to(device)\n",
-    "    prediction = model(tensor)\n",
-    "    probabilities = nn.functional.softmax(prediction, dim = -1)\n",
-    "    pos_probability = probabilities.squeeze()[-1].item()\n",
-    "    return pos_probability"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 57,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "ycEAWhYIdFoi",
-    "outputId": "8a675641-fd79-46a6-b4e6-0b2006f866cc"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "2.818893153744284e-05"
-      ]
-     },
-     "execution_count": 57,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'the absolute worst movie of all time.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 58,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "cuMFqIoJdFok",
-    "outputId": "12c964fc-6788-459c-ad5e-ca0af366b1d4"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.9997795224189758"
-      ]
-     },
-     "execution_count": 58,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'one of the greatest films i have ever seen in my life.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 59,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "zausUPENdFoo",
-    "outputId": "2bdd06df-dab7-47ea-8952-8bd82d39bac2"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.6041761040687561"
-      ]
-     },
-     "execution_count": 59,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be one of the greatest films i have ever seen in my life, \\\n",
-    "but it was actually the absolute worst movie of all time.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 60,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "e15vpNJYdFor",
-    "outputId": "eed3ae38-d01a-4476-a235-8fd3582240f3"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.6041760444641113"
-      ]
-     },
-     "execution_count": 60,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be the absolute worst movie of all time, \\\n",
-    "but it was actually one of the greatest films i have ever seen in my life.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  }
- ],
- "metadata": {
-  "accelerator": "GPU",
-  "colab": {
-   "machine_shape": "hm",
-   "name": "1_nbow.ipynb",
-   "provenance": []
-  },
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/experimental/2_rnn_gru.ipynb b/experimental/2_rnn_gru.ipynb
deleted file mode 100644
index 8084fdb..0000000
--- a/experimental/2_rnn_gru.ipynb
+++ /dev/null
@@ -1,1120 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 228
-    },
-    "colab_type": "code",
-    "id": "lIYdn1woOS1n",
-    "outputId": "05f43a3e-f111-4f96-ee3e-d95027c041c8"
-   },
-   "outputs": [],
-   "source": [
-    "import torch\n",
-    "import torch.nn as nn\n",
-    "import torch.optim as optim\n",
-    "\n",
-    "import torchtext\n",
-    "import torchtext.experimental\n",
-    "import torchtext.experimental.vectors\n",
-    "from torchtext.experimental.datasets.raw.text_classification import RawTextIterableDataset\n",
-    "from torchtext.experimental.datasets.text_classification import TextClassificationDataset\n",
-    "from torchtext.experimental.functional import sequential_transforms, vocab_func, totensor\n",
-    "\n",
-    "import collections\n",
-    "import random\n",
-    "import time"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "kjHAEB8BKbEY"
-   },
-   "outputs": [],
-   "source": [
-    "seed = 1234\n",
-    "\n",
-    "torch.manual_seed(seed)\n",
-    "random.seed(seed)\n",
-    "torch.backends.cudnn.deterministic = True\n",
-    "torch.backends.cudnn.benchmark = False"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "HRkCva2fJ_kr"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_test_data = torchtext.experimental.datasets.raw.IMDB()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "RkgVHXXSKAyU"
-   },
-   "outputs": [],
-   "source": [
-    "def get_train_valid_split(raw_train_data, split_ratio = 0.7):\n",
-    "\n",
-    "    raw_train_data = list(raw_train_data)\n",
-    "        \n",
-    "    random.shuffle(raw_train_data)\n",
-    "        \n",
-    "    n_train_examples = int(len(raw_train_data) * split_ratio)\n",
-    "        \n",
-    "    train_data = raw_train_data[:n_train_examples]\n",
-    "    valid_data = raw_train_data[n_train_examples:]\n",
-    "    \n",
-    "    train_data = RawTextIterableDataset(train_data)\n",
-    "    valid_data = RawTextIterableDataset(valid_data)\n",
-    "    \n",
-    "    return train_data, valid_data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "T5fGSB1OKC77"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_valid_data = get_train_valid_split(raw_train_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "zvcEouXQLmHz"
-   },
-   "outputs": [],
-   "source": [
-    "class Tokenizer:\n",
-    "    def __init__(self, tokenize_fn = 'basic_english', lower = True, max_length = None):\n",
-    "        \n",
-    "        self.tokenize_fn = torchtext.data.utils.get_tokenizer(tokenize_fn)\n",
-    "        self.lower = lower\n",
-    "        self.max_length = max_length\n",
-    "        \n",
-    "    def tokenize(self, s):\n",
-    "        \n",
-    "        tokens = self.tokenize_fn(s)\n",
-    "        \n",
-    "        if self.lower:\n",
-    "            tokens = [token.lower() for token in tokens]\n",
-    "            \n",
-    "        if self.max_length is not None:\n",
-    "            tokens = tokens[:self.max_length]\n",
-    "            \n",
-    "        return tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "dnpijQRFLnXV"
-   },
-   "outputs": [],
-   "source": [
-    "max_length = 500\n",
-    "\n",
-    "tokenizer = Tokenizer(max_length = max_length)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "VOl6UxZoLdg_"
-   },
-   "outputs": [],
-   "source": [
-    "def build_vocab_from_data(raw_data, tokenizer, **vocab_kwargs):\n",
-    "    \n",
-    "    token_freqs = collections.Counter()\n",
-    "    \n",
-    "    for label, text in raw_data:\n",
-    "        tokens = tokenizer.tokenize(text)\n",
-    "        token_freqs.update(tokens)\n",
-    "                \n",
-    "    vocab = torchtext.vocab.Vocab(token_freqs, **vocab_kwargs)\n",
-    "    \n",
-    "    return vocab"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "eNLrpvt2Lgsr"
-   },
-   "outputs": [],
-   "source": [
-    "max_size = 25_000\n",
-    "\n",
-    "vocab = build_vocab_from_data(raw_train_data, tokenizer, max_size = max_size)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "AN1YQiYfLr0_"
-   },
-   "outputs": [],
-   "source": [
-    "def process_raw_data(raw_data, tokenizer, vocab):\n",
-    "    \n",
-    "    raw_data = [(label, text) for (label, text) in raw_data]\n",
-    "\n",
-    "    text_transform = sequential_transforms(tokenizer.tokenize,\n",
-    "                                           vocab_func(vocab),\n",
-    "                                           totensor(dtype=torch.long))\n",
-    "    \n",
-    "    label_transform = sequential_transforms(totensor(dtype=torch.long))\n",
-    "\n",
-    "    transforms = (label_transform, text_transform)\n",
-    "\n",
-    "    dataset = TextClassificationDataset(raw_data,\n",
-    "                                        vocab,\n",
-    "                                        transforms)\n",
-    "    \n",
-    "    return dataset"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "dlejEwWLMScW"
-   },
-   "outputs": [],
-   "source": [
-    "train_data = process_raw_data(raw_train_data, tokenizer, vocab)\n",
-    "valid_data = process_raw_data(raw_valid_data, tokenizer, vocab)\n",
-    "test_data = process_raw_data(raw_test_data, tokenizer, vocab)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "hggYldmOQahU"
-   },
-   "outputs": [],
-   "source": [
-    "class Collator:\n",
-    "    def __init__(self, pad_idx):\n",
-    "        \n",
-    "        self.pad_idx = pad_idx\n",
-    "        \n",
-    "    def collate(self, batch):\n",
-    "        \n",
-    "        labels, text = zip(*batch)\n",
-    "        \n",
-    "        labels = torch.LongTensor(labels)\n",
-    "        \n",
-    "        lengths = torch.LongTensor([len(x) for x in text])\n",
-    "\n",
-    "        text = nn.utils.rnn.pad_sequence(text, padding_value = self.pad_idx)\n",
-    "        \n",
-    "        return labels, text, lengths"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "gw4LBXWAQiEC"
-   },
-   "outputs": [],
-   "source": [
-    "pad_token = '<pad>'\n",
-    "pad_idx = vocab[pad_token]\n",
-    "\n",
-    "collator = Collator(pad_idx)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "d0dP9wnZQjaU"
-   },
-   "outputs": [],
-   "source": [
-    "batch_size = 256\n",
-    "\n",
-    "train_iterator = torch.utils.data.DataLoader(train_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = True, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "valid_iterator = torch.utils.data.DataLoader(valid_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = False, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "test_iterator = torch.utils.data.DataLoader(test_data, \n",
-    "                                            batch_size, \n",
-    "                                            shuffle = False, \n",
-    "                                            collate_fn = collator.collate)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "class GRU(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, hid_dim, output_dim, pad_idx):\n",
-    "        super().__init__()\n",
-    "\n",
-    "        self.embedding = nn.Embedding(input_dim, emb_dim, padding_idx = pad_idx)\n",
-    "        self.gru = nn.GRUCell(emb_dim, hid_dim)\n",
-    "        self.fc = nn.Linear(hid_dim, output_dim)\n",
-    "\n",
-    "    def forward(self, text, lengths):\n",
-    "\n",
-    "        # text = [seq len, batch size]\n",
-    "        # lengths = [batch size]\n",
-    "\n",
-    "        embedded = self.embedding(text)\n",
-    "\n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "\n",
-    "        seq_len, batch_size, _ = embedded.shape\n",
-    "        hid_dim = self.gru.hidden_size\n",
-    "                \n",
-    "        hidden = torch.zeros(batch_size, hid_dim).to(embedded.device)\n",
-    "        \n",
-    "        for i in range(seq_len):\n",
-    "            x = embedded[i]\n",
-    "            hidden = self.gru(x, hidden)\n",
-    "        \n",
-    "        prediction = self.fc(hidden)\n",
-    "\n",
-    "        # prediction = [batch size, output dim]\n",
-    "\n",
-    "        return prediction"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "class GRU(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, hid_dim, output_dim, pad_idx):\n",
-    "        super().__init__()\n",
-    "\n",
-    "        self.embedding = nn.Embedding(input_dim, emb_dim, padding_idx = pad_idx)\n",
-    "        self.gru = nn.GRU(emb_dim, hid_dim)\n",
-    "        self.fc = nn.Linear(hid_dim, output_dim)\n",
-    "\n",
-    "    def forward(self, text, lengths):\n",
-    "\n",
-    "        # text = [seq len, batch size]\n",
-    "        # lengths = [batch size]\n",
-    "\n",
-    "        embedded = self.embedding(text)\n",
-    "\n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "\n",
-    "        output, hidden = self.gru(embedded)\n",
-    "\n",
-    "        # output = [seq_len, batch size, n directions * hid dim]\n",
-    "        # hidden = [n layers * n directions, batch size, hid dim]\n",
-    "\n",
-    "        prediction = self.fc(hidden.squeeze(0))\n",
-    "\n",
-    "        # prediction = [batch size, output dim]\n",
-    "\n",
-    "        return prediction "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "LGQ5JkfBQll0"
-   },
-   "outputs": [],
-   "source": [
-    "class GRU(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, hid_dim, output_dim, pad_idx):\n",
-    "        super().__init__()\n",
-    "\n",
-    "        self.embedding = nn.Embedding(input_dim, emb_dim, padding_idx = pad_idx)\n",
-    "        self.gru = nn.GRU(emb_dim, hid_dim)\n",
-    "        self.fc = nn.Linear(hid_dim, output_dim)\n",
-    "\n",
-    "    def forward(self, text, lengths):\n",
-    "\n",
-    "        # text = [seq len, batch size]\n",
-    "        # lengths = [batch size]\n",
-    "\n",
-    "        embedded = self.embedding(text)\n",
-    "\n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "\n",
-    "        packed_embedded = nn.utils.rnn.pack_padded_sequence(embedded, lengths, enforce_sorted = False)\n",
-    "\n",
-    "        packed_output, hidden = self.gru(packed_embedded)\n",
-    "\n",
-    "        output, _ = nn.utils.rnn.pad_packed_sequence(packed_output)\n",
-    "\n",
-    "        # output = [seq_len, batch size, n directions * hid dim]\n",
-    "        # hidden = [n layers * n directions, batch size, hid dim]\n",
-    "\n",
-    "        prediction = self.fc(hidden.squeeze(0))\n",
-    "\n",
-    "        # prediction = [batch size, output dim]\n",
-    "\n",
-    "        return prediction "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "mEb-ff-bQtKL"
-   },
-   "outputs": [],
-   "source": [
-    "input_dim = len(vocab)\n",
-    "emb_dim = 100\n",
-    "hid_dim = 256\n",
-    "output_dim = 2\n",
-    "\n",
-    "model = GRU(input_dim, emb_dim, hid_dim, output_dim, pad_idx)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "WEwnyJT_Tm8q"
-   },
-   "outputs": [],
-   "source": [
-    "def count_parameters(model):\n",
-    "    return sum(p.numel() for p in model.parameters() if p.requires_grad)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "SJdVErKTTogS",
-    "outputId": "aaf74c2e-2b9f-47df-a672-b809ffffd6e5"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The model has 2,775,658 trainable parameters\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'The model has {count_parameters(model):,} trainable parameters')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "name: embedding.weight, shape: torch.Size([25002, 100])\n",
-      "name: gru.weight_ih_l0, shape: torch.Size([768, 100])\n",
-      "name: gru.weight_hh_l0, shape: torch.Size([768, 256])\n",
-      "name: gru.bias_ih_l0, shape: torch.Size([768])\n",
-      "name: gru.bias_hh_l0, shape: torch.Size([768])\n",
-      "name: fc.weight, shape: torch.Size([2, 256])\n",
-      "name: fc.bias, shape: torch.Size([2])\n"
-     ]
-    }
-   ],
-   "source": [
-    "for n, p in model.named_parameters():\n",
-    "    print(f'name: {n}, shape: {p.shape}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def initialize_parameters(m):\n",
-    "    if isinstance(m, nn.Embedding):\n",
-    "        nn.init.uniform_(m.weight, -0.05, 0.05)\n",
-    "    elif isinstance(m, nn.GRU):\n",
-    "        for n, p in m.named_parameters():\n",
-    "            if 'weight_ih' in n:\n",
-    "                r, z, n = p.chunk(3)\n",
-    "                nn.init.xavier_uniform_(r)\n",
-    "                nn.init.xavier_uniform_(z)\n",
-    "                nn.init.xavier_uniform_(n)\n",
-    "            elif 'weight_hh' in n:\n",
-    "                r, z, n = p.chunk(3)\n",
-    "                nn.init.orthogonal_(r)\n",
-    "                nn.init.orthogonal_(z)\n",
-    "                nn.init.orthogonal_(n)\n",
-    "            elif 'bias' in n:\n",
-    "                r, z, n = p.chunk(3)\n",
-    "                nn.init.zeros_(r)\n",
-    "                nn.init.zeros_(z)\n",
-    "                nn.init.zeros_(n)\n",
-    "    elif isinstance(m, nn.Linear):\n",
-    "        nn.init.xavier_uniform_(m.weight)\n",
-    "        nn.init.zeros_(m.bias)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "GRU(\n",
-       "  (embedding): Embedding(25002, 100, padding_idx=1)\n",
-       "  (gru): GRU(100, 256)\n",
-       "  (fc): Linear(in_features=256, out_features=2, bias=True)\n",
-       ")"
-      ]
-     },
-     "execution_count": 23,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.apply(initialize_parameters)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "HE9cEN3XTpf7"
-   },
-   "outputs": [],
-   "source": [
-    "glove = torchtext.experimental.vectors.GloVe(name = '6B',\n",
-    "                                             dim = emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "AyI08bfvTrCV"
-   },
-   "outputs": [],
-   "source": [
-    "def get_pretrained_embedding(initial_embedding, pretrained_vectors, vocab, unk_token):\n",
-    "    \n",
-    "    pretrained_embedding = torch.FloatTensor(initial_embedding.weight.clone()).detach()    \n",
-    "    pretrained_vocab = pretrained_vectors.vectors.get_stoi()\n",
-    "    \n",
-    "    unk_tokens = []\n",
-    "    \n",
-    "    for idx, token in enumerate(vocab.itos):\n",
-    "        if token in pretrained_vocab:\n",
-    "            pretrained_vector = pretrained_vectors[token]\n",
-    "            pretrained_embedding[idx] = pretrained_vector\n",
-    "        else:\n",
-    "            unk_tokens.append(token)\n",
-    "        \n",
-    "    return pretrained_embedding, unk_tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "GPMcsd6HTtoC"
-   },
-   "outputs": [],
-   "source": [
-    "unk_token = '<unk>'\n",
-    "\n",
-    "pretrained_embedding, unk_tokens = get_pretrained_embedding(model.embedding, glove, vocab, unk_token)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 139
-    },
-    "colab_type": "code",
-    "id": "LhlnYb2ZTvPr",
-    "outputId": "8d56d0e2-6af1-40fe-ea1e-9ec7a42d8b15"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[ 0.0098,  0.0150, -0.0099,  ...,  0.0211, -0.0092,  0.0027],\n",
-       "        [ 0.0347,  0.0276,  0.0468,  ..., -0.0315, -0.0472, -0.0326],\n",
-       "        [-0.0382, -0.2449,  0.7281,  ..., -0.1459,  0.8278,  0.2706],\n",
-       "        ...,\n",
-       "        [-0.2925,  0.1087,  0.7920,  ..., -0.3641,  0.1822, -0.4104],\n",
-       "        [-0.7250,  0.7545,  0.1637,  ..., -0.0144, -0.1761,  0.3418],\n",
-       "        [ 1.1753,  0.0460, -0.3542,  ...,  0.4510,  0.0485, -0.4015]])"
-      ]
-     },
-     "execution_count": 27,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.embedding.weight.data.copy_(pretrained_embedding)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "model.embedding.weight.data[pad_idx] = torch.zeros(emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[ 0.0098,  0.0150, -0.0099,  ...,  0.0211, -0.0092,  0.0027],\n",
-       "        [ 0.0000,  0.0000,  0.0000,  ...,  0.0000,  0.0000,  0.0000],\n",
-       "        [-0.0382, -0.2449,  0.7281,  ..., -0.1459,  0.8278,  0.2706],\n",
-       "        ...,\n",
-       "        [-0.2925,  0.1087,  0.7920,  ..., -0.3641,  0.1822, -0.4104],\n",
-       "        [-0.7250,  0.7545,  0.1637,  ..., -0.0144, -0.1761,  0.3418],\n",
-       "        [ 1.1753,  0.0460, -0.3542,  ...,  0.4510,  0.0485, -0.4015]])"
-      ]
-     },
-     "execution_count": 29,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.embedding.weight.data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Sji9nWvaTxcp"
-   },
-   "outputs": [],
-   "source": [
-    "optimizer = optim.Adam(model.parameters())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "a4Q-afN8Tyqr"
-   },
-   "outputs": [],
-   "source": [
-    "criterion = nn.CrossEntropyLoss()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "PjZOAABMT0-T"
-   },
-   "outputs": [],
-   "source": [
-    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "6cYt2pfoT3TD"
-   },
-   "outputs": [],
-   "source": [
-    "model = model.to(device)\n",
-    "criterion = criterion.to(device)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "SSdhLxTJT4mn"
-   },
-   "outputs": [],
-   "source": [
-    "def calculate_accuracy(predictions, labels):\n",
-    "    top_predictions = predictions.argmax(1, keepdim = True)\n",
-    "    correct = top_predictions.eq(labels.view_as(top_predictions)).sum()\n",
-    "    accuracy = correct.float() / labels.shape[0]\n",
-    "    return accuracy"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "EoJT5j-1T54w"
-   },
-   "outputs": [],
-   "source": [
-    "def train(model, iterator, optimizer, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.train()\n",
-    "    \n",
-    "    for labels, text, lengths in iterator:\n",
-    "        \n",
-    "        labels = labels.to(device)\n",
-    "        text = text.to(device)\n",
-    "\n",
-    "        optimizer.zero_grad()\n",
-    "        \n",
-    "        predictions = model(text, lengths)\n",
-    "        \n",
-    "        loss = criterion(predictions, labels)\n",
-    "        \n",
-    "        acc = calculate_accuracy(predictions, labels)\n",
-    "        \n",
-    "        loss.backward()\n",
-    "        \n",
-    "        optimizer.step()\n",
-    "        \n",
-    "        epoch_loss += loss.item()\n",
-    "        epoch_acc += acc.item()\n",
-    "\n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "UBh7g1cnUBMG"
-   },
-   "outputs": [],
-   "source": [
-    "def evaluate(model, iterator, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.eval()\n",
-    "    \n",
-    "    with torch.no_grad():\n",
-    "    \n",
-    "        for labels, text, lengths in iterator:\n",
-    "\n",
-    "            labels = labels.to(device)\n",
-    "            text = text.to(device)\n",
-    "            \n",
-    "            predictions = model(text, lengths)\n",
-    "            \n",
-    "            loss = criterion(predictions, labels)\n",
-    "            \n",
-    "            acc = calculate_accuracy(predictions, labels)\n",
-    "\n",
-    "            epoch_loss += loss.item()\n",
-    "            epoch_acc += acc.item()\n",
-    "        \n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "jSMtdoeSUDAH"
-   },
-   "outputs": [],
-   "source": [
-    "def epoch_time(start_time, end_time):\n",
-    "    elapsed_time = end_time - start_time\n",
-    "    elapsed_mins = int(elapsed_time / 60)\n",
-    "    elapsed_secs = int(elapsed_time - (elapsed_mins * 60))\n",
-    "    return elapsed_mins, elapsed_secs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 537
-    },
-    "colab_type": "code",
-    "id": "lG-dJsjFUF8x",
-    "outputId": "c434d13f-4efa-4a7c-c346-5e886db0405d"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Epoch: 01 | Epoch Time: 0m 7s\n",
-      "\tTrain Loss: 0.654 | Train Acc: 60.73%\n",
-      "\t Val. Loss: 0.584 |  Val. Acc: 68.87%\n",
-      "Epoch: 02 | Epoch Time: 0m 7s\n",
-      "\tTrain Loss: 0.423 | Train Acc: 80.73%\n",
-      "\t Val. Loss: 0.332 |  Val. Acc: 86.04%\n",
-      "Epoch: 03 | Epoch Time: 0m 7s\n",
-      "\tTrain Loss: 0.252 | Train Acc: 90.15%\n",
-      "\t Val. Loss: 0.285 |  Val. Acc: 88.63%\n",
-      "Epoch: 04 | Epoch Time: 0m 8s\n",
-      "\tTrain Loss: 0.186 | Train Acc: 93.05%\n",
-      "\t Val. Loss: 0.286 |  Val. Acc: 89.40%\n",
-      "Epoch: 05 | Epoch Time: 0m 7s\n",
-      "\tTrain Loss: 0.116 | Train Acc: 95.85%\n",
-      "\t Val. Loss: 0.307 |  Val. Acc: 89.56%\n",
-      "Epoch: 06 | Epoch Time: 0m 7s\n",
-      "\tTrain Loss: 0.065 | Train Acc: 97.90%\n",
-      "\t Val. Loss: 0.354 |  Val. Acc: 89.64%\n",
-      "Epoch: 07 | Epoch Time: 0m 8s\n",
-      "\tTrain Loss: 0.042 | Train Acc: 98.74%\n",
-      "\t Val. Loss: 0.403 |  Val. Acc: 89.35%\n",
-      "Epoch: 08 | Epoch Time: 0m 8s\n",
-      "\tTrain Loss: 0.020 | Train Acc: 99.47%\n",
-      "\t Val. Loss: 0.408 |  Val. Acc: 89.35%\n",
-      "Epoch: 09 | Epoch Time: 0m 7s\n",
-      "\tTrain Loss: 0.010 | Train Acc: 99.81%\n",
-      "\t Val. Loss: 0.505 |  Val. Acc: 88.53%\n",
-      "Epoch: 10 | Epoch Time: 0m 7s\n",
-      "\tTrain Loss: 0.007 | Train Acc: 99.85%\n",
-      "\t Val. Loss: 0.657 |  Val. Acc: 88.27%\n"
-     ]
-    }
-   ],
-   "source": [
-    "n_epochs = 10\n",
-    "\n",
-    "best_valid_loss = float('inf')\n",
-    "\n",
-    "for epoch in range(n_epochs):\n",
-    "\n",
-    "    start_time = time.monotonic()\n",
-    "    \n",
-    "    train_loss, train_acc = train(model, train_iterator, optimizer, criterion, device)\n",
-    "    valid_loss, valid_acc = evaluate(model, valid_iterator, criterion, device)\n",
-    "    \n",
-    "    end_time = time.monotonic()\n",
-    "\n",
-    "    epoch_mins, epoch_secs = epoch_time(start_time, end_time)\n",
-    "    \n",
-    "    if valid_loss < best_valid_loss:\n",
-    "        best_valid_loss = valid_loss\n",
-    "        torch.save(model.state_dict(), 'gru-model.pt')\n",
-    "    \n",
-    "    print(f'Epoch: {epoch+1:02} | Epoch Time: {epoch_mins}m {epoch_secs}s')\n",
-    "    print(f'\\tTrain Loss: {train_loss:.3f} | Train Acc: {train_acc*100:.2f}%')\n",
-    "    print(f'\\t Val. Loss: {valid_loss:.3f} |  Val. Acc: {valid_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 39,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "PH7-0f6nUKRb",
-    "outputId": "faf1e6dd-c99e-4fda-c6f8-435a08ca0073"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Test Loss: 0.290 | Test Acc: 87.93%\n"
-     ]
-    }
-   ],
-   "source": [
-    "model.load_state_dict(torch.load('gru-model.pt'))\n",
-    "\n",
-    "test_loss, test_acc = evaluate(model, test_iterator, criterion, device)\n",
-    "\n",
-    "print(f'Test Loss: {test_loss:.3f} | Test Acc: {test_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 40,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "rnWNSo8kdcl_"
-   },
-   "outputs": [],
-   "source": [
-    "def predict_sentiment(tokenizer, vocab, model, device, sentence):\n",
-    "    model.eval()\n",
-    "    tokens = tokenizer.tokenize(sentence)\n",
-    "    length = torch.LongTensor([len(tokens)]).to(device)\n",
-    "    indexes = [vocab.stoi[token] for token in tokens]\n",
-    "    tensor = torch.LongTensor(indexes).unsqueeze(-1).to(device)\n",
-    "    prediction = model(tensor, length)\n",
-    "    probabilities = nn.functional.softmax(prediction, dim = -1)\n",
-    "    pos_probability = probabilities.squeeze()[-1].item()\n",
-    "    return pos_probability"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 41,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "hb7bC-aEeC1q",
-    "outputId": "059cccd1-efb4-404c-81f9-606983c23b33"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.06520231813192368"
-      ]
-     },
-     "execution_count": 41,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'the absolute worst movie of all time.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 42,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "APEVZ3D4eEVw",
-    "outputId": "0d188e29-6e4e-4183-c7aa-467ea8f1afe6"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.8539475798606873"
-      ]
-     },
-     "execution_count": 42,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'one of the greatest films i have ever seen in my life.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 43,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "X7GMey_jebjg",
-    "outputId": "04ca4196-51f0-4661-ffe4-8f4dd199baf4"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.15590433776378632"
-      ]
-     },
-     "execution_count": 43,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be one of the greatest films i have ever seen in my life, \\\n",
-    "but it was actually the absolute worst movie of all time.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 44,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "kOoESlQSxYx2",
-    "outputId": "e5826bef-5f9c-41f6-9eb0-795318280045"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.3470574617385864"
-      ]
-     },
-     "execution_count": 44,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be the absolute worst movie of all time, \\\n",
-    "but it was actually one of the greatest films i have ever seen in my life.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  }
- ],
- "metadata": {
-  "accelerator": "GPU",
-  "colab": {
-   "machine_shape": "hm",
-   "name": "2_rnn_gru.ipynb",
-   "provenance": []
-  },
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/experimental/3_rnn_bilstm.ipynb b/experimental/3_rnn_bilstm.ipynb
deleted file mode 100644
index 54d3d95..0000000
--- a/experimental/3_rnn_bilstm.ipynb
+++ /dev/null
@@ -1,1049 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 228
-    },
-    "colab_type": "code",
-    "id": "lIYdn1woOS1n",
-    "outputId": "a30c21d5-b7cc-4ea6-a0d3-f9f1392ee04a"
-   },
-   "outputs": [],
-   "source": [
-    "import torch\n",
-    "import torch.nn as nn\n",
-    "import torch.optim as optim\n",
-    "\n",
-    "import torchtext\n",
-    "import torchtext.experimental\n",
-    "import torchtext.experimental.vectors\n",
-    "from torchtext.experimental.datasets.raw.text_classification import RawTextIterableDataset\n",
-    "from torchtext.experimental.datasets.text_classification import TextClassificationDataset\n",
-    "from torchtext.experimental.functional import sequential_transforms, vocab_func, totensor\n",
-    "\n",
-    "import collections\n",
-    "import random\n",
-    "import time"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "II-XIfhSkZS-"
-   },
-   "outputs": [],
-   "source": [
-    "seed = 1234\n",
-    "\n",
-    "torch.manual_seed(seed)\n",
-    "random.seed(seed)\n",
-    "torch.backends.cudnn.deterministic = True\n",
-    "torch.backends.cudnn.benchmark = False"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "kIkeEy2mkcT6"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_test_data = torchtext.experimental.datasets.raw.IMDB()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "_a5ucP1ZkeDv"
-   },
-   "outputs": [],
-   "source": [
-    "def get_train_valid_split(raw_train_data, split_ratio = 0.7):\n",
-    "\n",
-    "    raw_train_data = list(raw_train_data)\n",
-    "        \n",
-    "    random.shuffle(raw_train_data)\n",
-    "        \n",
-    "    n_train_examples = int(len(raw_train_data) * split_ratio)\n",
-    "        \n",
-    "    train_data = raw_train_data[:n_train_examples]\n",
-    "    valid_data = raw_train_data[n_train_examples:]\n",
-    "    \n",
-    "    train_data = RawTextIterableDataset(train_data)\n",
-    "    valid_data = RawTextIterableDataset(valid_data)\n",
-    "    \n",
-    "    return train_data, valid_data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "1WP4nz-_kf_0"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_valid_data = get_train_valid_split(raw_train_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "pPvrMZlWkicJ"
-   },
-   "outputs": [],
-   "source": [
-    "class Tokenizer:\n",
-    "    def __init__(self, tokenize_fn = 'basic_english', lower = True, max_length = None):\n",
-    "        \n",
-    "        self.tokenize_fn = torchtext.data.utils.get_tokenizer(tokenize_fn)\n",
-    "        self.lower = lower\n",
-    "        self.max_length = max_length\n",
-    "        \n",
-    "    def tokenize(self, s):\n",
-    "        \n",
-    "        tokens = self.tokenize_fn(s)\n",
-    "        \n",
-    "        if self.lower:\n",
-    "            tokens = [token.lower() for token in tokens]\n",
-    "            \n",
-    "        if self.max_length is not None:\n",
-    "            tokens = tokens[:self.max_length]\n",
-    "            \n",
-    "        return tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "SMsMQSuSkkt3"
-   },
-   "outputs": [],
-   "source": [
-    "max_length = 500\n",
-    "\n",
-    "tokenizer = Tokenizer(max_length = max_length)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Yie7TKWKkmeK"
-   },
-   "outputs": [],
-   "source": [
-    "def build_vocab_from_data(raw_data, tokenizer, **vocab_kwargs):\n",
-    "    \n",
-    "    token_freqs = collections.Counter()\n",
-    "    \n",
-    "    for label, text in raw_data:\n",
-    "        tokens = tokenizer.tokenize(text)\n",
-    "        token_freqs.update(tokens)\n",
-    "                \n",
-    "    vocab = torchtext.vocab.Vocab(token_freqs, **vocab_kwargs)\n",
-    "    \n",
-    "    return vocab"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "9jW7Ci7WkoSn"
-   },
-   "outputs": [],
-   "source": [
-    "max_size = 25_000\n",
-    "\n",
-    "vocab = build_vocab_from_data(raw_train_data, tokenizer, max_size = max_size)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "cvSZt_iFkqkt"
-   },
-   "outputs": [],
-   "source": [
-    "def process_raw_data(raw_data, tokenizer, vocab):\n",
-    "    \n",
-    "    raw_data = [(label, text) for (label, text) in raw_data]\n",
-    "\n",
-    "    text_transform = sequential_transforms(tokenizer.tokenize,\n",
-    "                                           vocab_func(vocab),\n",
-    "                                           totensor(dtype=torch.long))\n",
-    "    \n",
-    "    label_transform = sequential_transforms(totensor(dtype=torch.long))\n",
-    "\n",
-    "    transforms = (label_transform, text_transform)\n",
-    "\n",
-    "    dataset = TextClassificationDataset(raw_data,\n",
-    "                                        vocab,\n",
-    "                                        transforms)\n",
-    "    \n",
-    "    return dataset"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "bwsSiBdkktRk"
-   },
-   "outputs": [],
-   "source": [
-    "train_data = process_raw_data(raw_train_data, tokenizer, vocab)\n",
-    "valid_data = process_raw_data(raw_valid_data, tokenizer, vocab)\n",
-    "test_data = process_raw_data(raw_test_data, tokenizer, vocab)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "5m3xRusSk8v3"
-   },
-   "outputs": [],
-   "source": [
-    "class Collator:\n",
-    "    def __init__(self, pad_idx):\n",
-    "        \n",
-    "        self.pad_idx = pad_idx\n",
-    "        \n",
-    "    def collate(self, batch):\n",
-    "        \n",
-    "        labels, text = zip(*batch)\n",
-    "        \n",
-    "        labels = torch.LongTensor(labels)\n",
-    "        \n",
-    "        lengths = torch.LongTensor([len(x) for x in text])\n",
-    "\n",
-    "        text = nn.utils.rnn.pad_sequence(text, padding_value = self.pad_idx)\n",
-    "        \n",
-    "        return labels, text, lengths"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "1ZMuZqZxk8-p"
-   },
-   "outputs": [],
-   "source": [
-    "pad_token = '<pad>'\n",
-    "pad_idx = vocab[pad_token]\n",
-    "\n",
-    "collator = Collator(pad_idx)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "mxG97Si9lAI2"
-   },
-   "outputs": [],
-   "source": [
-    "batch_size = 256\n",
-    "\n",
-    "train_iterator = torch.utils.data.DataLoader(train_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = True, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "valid_iterator = torch.utils.data.DataLoader(valid_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = False, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "test_iterator = torch.utils.data.DataLoader(test_data, \n",
-    "                                            batch_size, \n",
-    "                                            shuffle = False, \n",
-    "                                            collate_fn = collator.collate)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "ty3NbheMlPYs"
-   },
-   "outputs": [],
-   "source": [
-    "class BiLSTM(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, hid_dim, output_dim, n_layers, dropout, pad_idx):\n",
-    "\n",
-    "        super().__init__()\n",
-    "\n",
-    "        self.embedding = nn.Embedding(input_dim, emb_dim, padding_idx = pad_idx)\n",
-    "        self.lstm = nn.LSTM(emb_dim, hid_dim, num_layers = n_layers, bidirectional = True, dropout = dropout)\n",
-    "        self.fc = nn.Linear(2 * hid_dim, output_dim)\n",
-    "        self.dropout = nn.Dropout(dropout)\n",
-    "\n",
-    "    def forward(self, text, lengths):\n",
-    "\n",
-    "        # text = [seq len, batch size]\n",
-    "        # lengths = [batch size]\n",
-    "\n",
-    "        embedded = self.dropout(self.embedding(text))\n",
-    "\n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "\n",
-    "        packed_embedded = nn.utils.rnn.pack_padded_sequence(embedded, lengths, enforce_sorted = False)\n",
-    "\n",
-    "        packed_output, (hidden, cell) = self.lstm(packed_embedded)\n",
-    "\n",
-    "        output, _ = nn.utils.rnn.pad_packed_sequence(packed_output)\n",
-    "\n",
-    "        # outputs = [seq_len, batch size, n directions * hid dim]\n",
-    "        # hidden = [n layers * n directions, batch size, hid dim]\n",
-    "\n",
-    "        hidden_fwd = hidden[-2]\n",
-    "        hidden_bck = hidden[-1]\n",
-    "\n",
-    "        # hidden_fwd/bck = [batch size, hid dim]\n",
-    "\n",
-    "        hidden = torch.cat((hidden_fwd, hidden_bck), dim = 1)\n",
-    "\n",
-    "        # hidden = [batch size, hid dim * 2]\n",
-    "\n",
-    "        prediction = self.fc(self.dropout(hidden))\n",
-    "\n",
-    "        # prediction = [batch size, output dim]\n",
-    "\n",
-    "        return prediction"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "trg6yTjBqOLZ"
-   },
-   "outputs": [],
-   "source": [
-    "input_dim = len(vocab)\n",
-    "emb_dim = 100\n",
-    "hid_dim = 256\n",
-    "output_dim = 2\n",
-    "n_layers = 2\n",
-    "dropout = 0.5\n",
-    "\n",
-    "model = BiLSTM(input_dim, emb_dim, hid_dim, output_dim, n_layers, dropout, pad_idx)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "9dgdCRsqqQoD"
-   },
-   "outputs": [],
-   "source": [
-    "def count_parameters(model):\n",
-    "    return sum(p.numel() for p in model.parameters() if p.requires_grad)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "bfiGzjvnqV-s",
-    "outputId": "168a3662-b95a-48de-d722-c76264e8c8ab"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The model has 4,811,370 trainable parameters\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'The model has {count_parameters(model):,} trainable parameters')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "name: embedding.weight, shape: torch.Size([25002, 100])\n",
-      "name: lstm.weight_ih_l0, shape: torch.Size([1024, 100])\n",
-      "name: lstm.weight_hh_l0, shape: torch.Size([1024, 256])\n",
-      "name: lstm.bias_ih_l0, shape: torch.Size([1024])\n",
-      "name: lstm.bias_hh_l0, shape: torch.Size([1024])\n",
-      "name: lstm.weight_ih_l0_reverse, shape: torch.Size([1024, 100])\n",
-      "name: lstm.weight_hh_l0_reverse, shape: torch.Size([1024, 256])\n",
-      "name: lstm.bias_ih_l0_reverse, shape: torch.Size([1024])\n",
-      "name: lstm.bias_hh_l0_reverse, shape: torch.Size([1024])\n",
-      "name: lstm.weight_ih_l1, shape: torch.Size([1024, 512])\n",
-      "name: lstm.weight_hh_l1, shape: torch.Size([1024, 256])\n",
-      "name: lstm.bias_ih_l1, shape: torch.Size([1024])\n",
-      "name: lstm.bias_hh_l1, shape: torch.Size([1024])\n",
-      "name: lstm.weight_ih_l1_reverse, shape: torch.Size([1024, 512])\n",
-      "name: lstm.weight_hh_l1_reverse, shape: torch.Size([1024, 256])\n",
-      "name: lstm.bias_ih_l1_reverse, shape: torch.Size([1024])\n",
-      "name: lstm.bias_hh_l1_reverse, shape: torch.Size([1024])\n",
-      "name: fc.weight, shape: torch.Size([2, 512])\n",
-      "name: fc.bias, shape: torch.Size([2])\n"
-     ]
-    }
-   ],
-   "source": [
-    "for n, p in model.named_parameters():\n",
-    "    print(f'name: {n}, shape: {p.shape}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def initialize_parameters(m):\n",
-    "    if isinstance(m, nn.Embedding):\n",
-    "        nn.init.uniform_(m.weight, -0.05, 0.05)\n",
-    "    elif isinstance(m, nn.LSTM):\n",
-    "        for n, p in m.named_parameters():\n",
-    "            if 'weight_ih' in n:\n",
-    "                i, f, g, o = p.chunk(4)\n",
-    "                nn.init.xavier_uniform_(i)\n",
-    "                nn.init.xavier_uniform_(f)\n",
-    "                nn.init.xavier_uniform_(g)\n",
-    "                nn.init.xavier_uniform_(o)\n",
-    "            elif 'weight_hh' in n:\n",
-    "                i, f, g, o = p.chunk(4)\n",
-    "                nn.init.orthogonal_(i)\n",
-    "                nn.init.orthogonal_(f)\n",
-    "                nn.init.orthogonal_(g)\n",
-    "                nn.init.orthogonal_(o)\n",
-    "            elif 'bias' in n:\n",
-    "                i, f, g, o = p.chunk(4)\n",
-    "                nn.init.zeros_(i)\n",
-    "                nn.init.ones_(f)\n",
-    "                nn.init.zeros_(g)\n",
-    "                nn.init.zeros_(o)\n",
-    "    elif isinstance(m, nn.Linear):\n",
-    "        nn.init.xavier_uniform_(m.weight)\n",
-    "        nn.init.zeros_(m.bias)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "BiLSTM(\n",
-       "  (embedding): Embedding(25002, 100, padding_idx=1)\n",
-       "  (lstm): LSTM(100, 256, num_layers=2, dropout=0.5, bidirectional=True)\n",
-       "  (fc): Linear(in_features=512, out_features=2, bias=True)\n",
-       "  (dropout): Dropout(p=0.5, inplace=False)\n",
-       ")"
-      ]
-     },
-     "execution_count": 21,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.apply(initialize_parameters)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Sah17A41qW5d"
-   },
-   "outputs": [],
-   "source": [
-    "glove = torchtext.experimental.vectors.GloVe(name = '6B',\n",
-    "                                             dim = emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "S1Dfcn2Nqabo"
-   },
-   "outputs": [],
-   "source": [
-    "def get_pretrained_embedding(initial_embedding, pretrained_vectors, vocab, unk_token):\n",
-    "    \n",
-    "    pretrained_embedding = torch.FloatTensor(initial_embedding.weight.clone()).detach()    \n",
-    "    pretrained_vocab = pretrained_vectors.vectors.get_stoi()\n",
-    "    \n",
-    "    unk_tokens = []\n",
-    "    \n",
-    "    for idx, token in enumerate(vocab.itos):\n",
-    "        if token in pretrained_vocab:\n",
-    "            pretrained_vector = pretrained_vectors[token]\n",
-    "            pretrained_embedding[idx] = pretrained_vector\n",
-    "        else:\n",
-    "            unk_tokens.append(token)\n",
-    "        \n",
-    "    return pretrained_embedding, unk_tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "sGyV94f7qvdr"
-   },
-   "outputs": [],
-   "source": [
-    "unk_token = '<unk>'\n",
-    "\n",
-    "pretrained_embedding, unk_tokens = get_pretrained_embedding(model.embedding, glove, vocab, unk_token)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 139
-    },
-    "colab_type": "code",
-    "id": "KYnGxbVisUsk",
-    "outputId": "e1a88c1c-0f3e-48c6-afcf-9d791fd54bb9"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[-0.0398,  0.0357, -0.0046,  ..., -0.0485, -0.0088,  0.0329],\n",
-       "        [-0.0330,  0.0428,  0.0304,  ...,  0.0236,  0.0487,  0.0101],\n",
-       "        [-0.0382, -0.2449,  0.7281,  ..., -0.1459,  0.8278,  0.2706],\n",
-       "        ...,\n",
-       "        [-0.2925,  0.1087,  0.7920,  ..., -0.3641,  0.1822, -0.4104],\n",
-       "        [-0.7250,  0.7545,  0.1637,  ..., -0.0144, -0.1761,  0.3418],\n",
-       "        [ 1.1753,  0.0460, -0.3542,  ...,  0.4510,  0.0485, -0.4015]])"
-      ]
-     },
-     "execution_count": 25,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.embedding.weight.data.copy_(pretrained_embedding)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "model.embedding.weight.data[pad_idx] = torch.zeros(emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "DTwNU41WseMS"
-   },
-   "outputs": [],
-   "source": [
-    "optimizer = optim.Adam(model.parameters())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Rxlx7a72s1ze"
-   },
-   "outputs": [],
-   "source": [
-    "criterion = nn.CrossEntropyLoss()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "1CLimBxus2yX"
-   },
-   "outputs": [],
-   "source": [
-    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "108fm55ftBgO"
-   },
-   "outputs": [],
-   "source": [
-    "model = model.to(device)\n",
-    "criterion = criterion.to(device)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "IYCxbvXUvE5v"
-   },
-   "outputs": [],
-   "source": [
-    "def calculate_accuracy(predictions, labels):\n",
-    "    top_predictions = predictions.argmax(1, keepdim = True)\n",
-    "    correct = top_predictions.eq(labels.view_as(top_predictions)).sum()\n",
-    "    accuracy = correct.float() / labels.shape[0]\n",
-    "    return accuracy"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Ik2JQo6TvGml"
-   },
-   "outputs": [],
-   "source": [
-    "def train(model, iterator, optimizer, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.train()\n",
-    "    \n",
-    "    for labels, text, lengths in iterator:\n",
-    "        \n",
-    "        labels = labels.to(device)\n",
-    "        text = text.to(device)\n",
-    "\n",
-    "        optimizer.zero_grad()\n",
-    "        \n",
-    "        predictions = model(text, lengths)\n",
-    "        \n",
-    "        loss = criterion(predictions, labels)\n",
-    "        \n",
-    "        acc = calculate_accuracy(predictions, labels)\n",
-    "        \n",
-    "        loss.backward()\n",
-    "        \n",
-    "        optimizer.step()\n",
-    "        \n",
-    "        epoch_loss += loss.item()\n",
-    "        epoch_acc += acc.item()\n",
-    "\n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "aGy1Zk6jvIf8"
-   },
-   "outputs": [],
-   "source": [
-    "def evaluate(model, iterator, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.eval()\n",
-    "    \n",
-    "    with torch.no_grad():\n",
-    "    \n",
-    "        for labels, text, lengths in iterator:\n",
-    "\n",
-    "            labels = labels.to(device)\n",
-    "            text = text.to(device)\n",
-    "            \n",
-    "            predictions = model(text, lengths)\n",
-    "            \n",
-    "            loss = criterion(predictions, labels)\n",
-    "            \n",
-    "            acc = calculate_accuracy(predictions, labels)\n",
-    "\n",
-    "            epoch_loss += loss.item()\n",
-    "            epoch_acc += acc.item()\n",
-    "        \n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "9MyMRRzbvKPx"
-   },
-   "outputs": [],
-   "source": [
-    "def epoch_time(start_time, end_time):\n",
-    "    elapsed_time = end_time - start_time\n",
-    "    elapsed_mins = int(elapsed_time / 60)\n",
-    "    elapsed_secs = int(elapsed_time - (elapsed_mins * 60))\n",
-    "    return elapsed_mins, elapsed_secs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 537
-    },
-    "colab_type": "code",
-    "id": "dRKwD51WvMa3",
-    "outputId": "79389e66-c1bf-45c9-a919-63ee787ad660"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Epoch: 01 | Epoch Time: 0m 23s\n",
-      "\tTrain Loss: 0.777 | Train Acc: 52.23%\n",
-      "\t Val. Loss: 0.683 |  Val. Acc: 53.70%\n",
-      "Epoch: 02 | Epoch Time: 0m 24s\n",
-      "\tTrain Loss: 0.683 | Train Acc: 57.90%\n",
-      "\t Val. Loss: 0.676 |  Val. Acc: 53.47%\n",
-      "Epoch: 03 | Epoch Time: 0m 24s\n",
-      "\tTrain Loss: 0.625 | Train Acc: 65.60%\n",
-      "\t Val. Loss: 0.482 |  Val. Acc: 78.27%\n",
-      "Epoch: 04 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.483 | Train Acc: 77.15%\n",
-      "\t Val. Loss: 0.410 |  Val. Acc: 82.67%\n",
-      "Epoch: 05 | Epoch Time: 0m 24s\n",
-      "\tTrain Loss: 0.350 | Train Acc: 85.31%\n",
-      "\t Val. Loss: 0.315 |  Val. Acc: 86.75%\n",
-      "Epoch: 06 | Epoch Time: 0m 24s\n",
-      "\tTrain Loss: 0.294 | Train Acc: 88.14%\n",
-      "\t Val. Loss: 0.288 |  Val. Acc: 88.41%\n",
-      "Epoch: 07 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.258 | Train Acc: 89.92%\n",
-      "\t Val. Loss: 0.277 |  Val. Acc: 89.14%\n",
-      "Epoch: 08 | Epoch Time: 0m 24s\n",
-      "\tTrain Loss: 0.231 | Train Acc: 91.03%\n",
-      "\t Val. Loss: 0.280 |  Val. Acc: 88.89%\n",
-      "Epoch: 09 | Epoch Time: 0m 24s\n",
-      "\tTrain Loss: 0.196 | Train Acc: 92.50%\n",
-      "\t Val. Loss: 0.285 |  Val. Acc: 89.27%\n",
-      "Epoch: 10 | Epoch Time: 0m 24s\n",
-      "\tTrain Loss: 0.175 | Train Acc: 93.53%\n",
-      "\t Val. Loss: 0.316 |  Val. Acc: 89.55%\n"
-     ]
-    }
-   ],
-   "source": [
-    "n_epochs = 10\n",
-    "\n",
-    "best_valid_loss = float('inf')\n",
-    "\n",
-    "for epoch in range(n_epochs):\n",
-    "\n",
-    "    start_time = time.monotonic()\n",
-    "    \n",
-    "    train_loss, train_acc = train(model, train_iterator, optimizer, criterion, device)\n",
-    "    valid_loss, valid_acc = evaluate(model, valid_iterator, criterion, device)\n",
-    "    \n",
-    "    end_time = time.monotonic()\n",
-    "\n",
-    "    epoch_mins, epoch_secs = epoch_time(start_time, end_time)\n",
-    "    \n",
-    "    if valid_loss < best_valid_loss:\n",
-    "        best_valid_loss = valid_loss\n",
-    "        torch.save(model.state_dict(), 'bilstm-model.pt')\n",
-    "    \n",
-    "    print(f'Epoch: {epoch+1:02} | Epoch Time: {epoch_mins}m {epoch_secs}s')\n",
-    "    print(f'\\tTrain Loss: {train_loss:.3f} | Train Acc: {train_acc*100:.2f}%')\n",
-    "    print(f'\\t Val. Loss: {valid_loss:.3f} |  Val. Acc: {valid_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "hKOg4oARvPHJ",
-    "outputId": "7cfe4b85-de2f-47f3-8437-45589c32ceca"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Test Loss: 0.291 | Test Acc: 88.06%\n"
-     ]
-    }
-   ],
-   "source": [
-    "model.load_state_dict(torch.load('bilstm-model.pt'))\n",
-    "\n",
-    "test_loss, test_acc = evaluate(model, test_iterator, criterion, device)\n",
-    "\n",
-    "print(f'Test Loss: {test_loss:.3f} | Test Acc: {test_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "tQ4Jsf_vvWgB"
-   },
-   "outputs": [],
-   "source": [
-    "def predict_sentiment(tokenizer, vocab, model, device, sentence):\n",
-    "    model.eval()\n",
-    "    tokens = tokenizer.tokenize(sentence)\n",
-    "    length = torch.LongTensor([len(tokens)]).to(device)\n",
-    "    indexes = [vocab.stoi[token] for token in tokens]\n",
-    "    tensor = torch.LongTensor(indexes).unsqueeze(-1).to(device)\n",
-    "    prediction = model(tensor, length)\n",
-    "    probabilities = nn.functional.softmax(prediction, dim = -1)\n",
-    "    pos_probability = probabilities.squeeze()[-1].item()\n",
-    "    return pos_probability"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "Yy7_6rhovZTE",
-    "outputId": "78860852-39ea-4a7b-eb33-9a1a077fb9e0"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.06933268904685974"
-      ]
-     },
-     "execution_count": 38,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'the absolute worst movie of all time.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 39,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "L3LmQxrgvau9",
-    "outputId": "0204aa17-0bc1-45f2-9be1-c014798af120"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.9730159640312195"
-      ]
-     },
-     "execution_count": 39,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'one of the greatest films i have ever seen in my life.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 40,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "t7Qoy21Bvb7v",
-    "outputId": "6094a141-4f37-4110-edc7-aa14b9a3c667"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.1614144891500473"
-      ]
-     },
-     "execution_count": 40,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be one of the greatest films i have ever seen in my life, \\\n",
-    "but it was actually the absolute worst movie of all time.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 41,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "EPGXBr18vdQT",
-    "outputId": "e5b3d210-0254-4d5f-bdbe-609c0b7d6a8a"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.5040232539176941"
-      ]
-     },
-     "execution_count": 41,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be the absolute worst movie of all time, \\\n",
-    "but it was actually one of the greatest films i have ever seen in my life.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  }
- ],
- "metadata": {
-  "accelerator": "GPU",
-  "colab": {
-   "machine_shape": "hm",
-   "name": "scratchpad",
-   "provenance": []
-  },
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/experimental/4_cnn.ipynb b/experimental/4_cnn.ipynb
deleted file mode 100644
index ae4d680..0000000
--- a/experimental/4_cnn.ipynb
+++ /dev/null
@@ -1,1028 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 228
-    },
-    "colab_type": "code",
-    "id": "lIYdn1woOS1n",
-    "outputId": "f9419fe4-7c0e-4706-a9b9-30fbc836d9a9"
-   },
-   "outputs": [],
-   "source": [
-    "import torch\n",
-    "import torch.nn as nn\n",
-    "import torch.nn.functional as F\n",
-    "import torch.optim as optim\n",
-    "\n",
-    "import torchtext\n",
-    "import torchtext.experimental\n",
-    "import torchtext.experimental.vectors\n",
-    "from torchtext.experimental.datasets.raw.text_classification import RawTextIterableDataset\n",
-    "from torchtext.experimental.datasets.text_classification import TextClassificationDataset\n",
-    "from torchtext.experimental.functional import sequential_transforms, vocab_func, totensor\n",
-    "\n",
-    "import collections\n",
-    "import random\n",
-    "import time"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "II-XIfhSkZS-"
-   },
-   "outputs": [],
-   "source": [
-    "seed = 1234\n",
-    "\n",
-    "torch.manual_seed(seed)\n",
-    "random.seed(seed)\n",
-    "torch.backends.cudnn.deterministic = True\n",
-    "torch.backends.cudnn.benchmark = False"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "kIkeEy2mkcT6"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_test_data = torchtext.experimental.datasets.raw.IMDB()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "_a5ucP1ZkeDv"
-   },
-   "outputs": [],
-   "source": [
-    "def get_train_valid_split(raw_train_data, split_ratio = 0.7):\n",
-    "\n",
-    "    raw_train_data = list(raw_train_data)\n",
-    "        \n",
-    "    random.shuffle(raw_train_data)\n",
-    "        \n",
-    "    n_train_examples = int(len(raw_train_data) * split_ratio)\n",
-    "        \n",
-    "    train_data = raw_train_data[:n_train_examples]\n",
-    "    valid_data = raw_train_data[n_train_examples:]\n",
-    "    \n",
-    "    train_data = RawTextIterableDataset(train_data)\n",
-    "    valid_data = RawTextIterableDataset(valid_data)\n",
-    "    \n",
-    "    return train_data, valid_data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "1WP4nz-_kf_0"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_valid_data = get_train_valid_split(raw_train_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "pPvrMZlWkicJ"
-   },
-   "outputs": [],
-   "source": [
-    "class Tokenizer:\n",
-    "    def __init__(self, tokenize_fn = 'basic_english', lower = True, max_length = None):\n",
-    "        \n",
-    "        self.tokenize_fn = torchtext.data.utils.get_tokenizer(tokenize_fn)\n",
-    "        self.lower = lower\n",
-    "        self.max_length = max_length\n",
-    "        \n",
-    "    def tokenize(self, s):\n",
-    "        \n",
-    "        tokens = self.tokenize_fn(s)\n",
-    "        \n",
-    "        if self.lower:\n",
-    "            tokens = [token.lower() for token in tokens]\n",
-    "            \n",
-    "        if self.max_length is not None:\n",
-    "            tokens = tokens[:self.max_length]\n",
-    "            \n",
-    "        return tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "SMsMQSuSkkt3"
-   },
-   "outputs": [],
-   "source": [
-    "max_length = 500\n",
-    "\n",
-    "tokenizer = Tokenizer(max_length = max_length)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Yie7TKWKkmeK"
-   },
-   "outputs": [],
-   "source": [
-    "def build_vocab_from_data(raw_data, tokenizer, **vocab_kwargs):\n",
-    "    \n",
-    "    token_freqs = collections.Counter()\n",
-    "    \n",
-    "    for label, text in raw_data:\n",
-    "        tokens = tokenizer.tokenize(text)\n",
-    "        token_freqs.update(tokens)\n",
-    "                \n",
-    "    vocab = torchtext.vocab.Vocab(token_freqs, **vocab_kwargs)\n",
-    "    \n",
-    "    return vocab"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "9jW7Ci7WkoSn"
-   },
-   "outputs": [],
-   "source": [
-    "max_size = 25_000\n",
-    "\n",
-    "vocab = build_vocab_from_data(raw_train_data, tokenizer, max_size = max_size)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "cvSZt_iFkqkt"
-   },
-   "outputs": [],
-   "source": [
-    "def process_raw_data(raw_data, tokenizer, vocab):\n",
-    "    \n",
-    "    raw_data = [(label, text) for (label, text) in raw_data]\n",
-    "\n",
-    "    text_transform = sequential_transforms(tokenizer.tokenize,\n",
-    "                                           vocab_func(vocab),\n",
-    "                                           totensor(dtype=torch.long))\n",
-    "    \n",
-    "    label_transform = sequential_transforms(totensor(dtype=torch.long))\n",
-    "\n",
-    "    transforms = (label_transform, text_transform)\n",
-    "\n",
-    "    dataset = TextClassificationDataset(raw_data,\n",
-    "                                        vocab,\n",
-    "                                        transforms)\n",
-    "    \n",
-    "    return dataset"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "bwsSiBdkktRk"
-   },
-   "outputs": [],
-   "source": [
-    "train_data = process_raw_data(raw_train_data, tokenizer, vocab)\n",
-    "valid_data = process_raw_data(raw_valid_data, tokenizer, vocab)\n",
-    "test_data = process_raw_data(raw_test_data, tokenizer, vocab)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "5m3xRusSk8v3"
-   },
-   "outputs": [],
-   "source": [
-    "class Collator:\n",
-    "    def __init__(self, pad_idx, batch_first):\n",
-    "        \n",
-    "        self.pad_idx = pad_idx\n",
-    "        self.batch_first = batch_first\n",
-    "        \n",
-    "    def collate(self, batch):\n",
-    "        \n",
-    "        labels, text = zip(*batch)\n",
-    "        \n",
-    "        labels = torch.LongTensor(labels)\n",
-    "\n",
-    "        text = nn.utils.rnn.pad_sequence(text, \n",
-    "                                         padding_value = self.pad_idx,\n",
-    "                                         batch_first = self.batch_first)\n",
-    "        \n",
-    "        return labels, text"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "1ZMuZqZxk8-p"
-   },
-   "outputs": [],
-   "source": [
-    "pad_token = '<pad>'\n",
-    "pad_idx = vocab[pad_token]\n",
-    "batch_first = True\n",
-    "\n",
-    "collator = Collator(pad_idx, batch_first)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "mxG97Si9lAI2"
-   },
-   "outputs": [],
-   "source": [
-    "batch_size = 256\n",
-    "\n",
-    "train_iterator = torch.utils.data.DataLoader(train_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = True, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "valid_iterator = torch.utils.data.DataLoader(valid_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = False, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "test_iterator = torch.utils.data.DataLoader(test_data, \n",
-    "                                            batch_size, \n",
-    "                                            shuffle = False, \n",
-    "                                            collate_fn = collator.collate)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "ty3NbheMlPYs"
-   },
-   "outputs": [],
-   "source": [
-    "class CNN(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, n_filters, filter_sizes, output_dim, dropout, pad_idx):\n",
-    "        super().__init__()\n",
-    "\n",
-    "        self.embedding = nn.Embedding(input_dim, emb_dim, padding_idx = pad_idx)\n",
-    "        self.convs = nn.ModuleList([nn.Conv1d(in_channels = emb_dim,\n",
-    "                                              out_channels = n_filters,\n",
-    "                                              kernel_size = filter_size)\n",
-    "                                    for filter_size in filter_sizes])\n",
-    "        self.fc = nn.Linear(len(filter_sizes) * n_filters, output_dim)\n",
-    "        self.dropout = nn.Dropout(dropout)\n",
-    "\n",
-    "    def forward(self, text):\n",
-    "\n",
-    "        # text = [batch size, seq len]\n",
-    "                \n",
-    "        embedded = self.dropout(self.embedding(text))\n",
-    "\n",
-    "        # embedded = [batch size, seq len, emb dim]\n",
-    "\n",
-    "        embedded = embedded.permute(0, 2, 1)\n",
-    "\n",
-    "        # embedded = [batch size, emb dim, seq len]\n",
-    "\n",
-    "        conved = [F.relu(conv(embedded)) for conv in self.convs]\n",
-    "\n",
-    "        # conved[n] = [batch size, n filters, seq len - filter_sizes[n] + 1]\n",
-    "\n",
-    "        pooled = [F.max_pool1d(conv, conv.shape[-1]).squeeze(-1) for conv in conved]\n",
-    "\n",
-    "        # pooled[n] = [batch size, n filters]\n",
-    "\n",
-    "        cat = torch.cat(pooled, dim = -1)\n",
-    "\n",
-    "        # cat = [batch size, n filters * len(filter_sizes)]\n",
-    "\n",
-    "        prediction = self.fc(self.dropout(cat))\n",
-    "\n",
-    "        # prediction = [batch size, output dim]\n",
-    "\n",
-    "        return prediction"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "trg6yTjBqOLZ"
-   },
-   "outputs": [],
-   "source": [
-    "input_dim = len(vocab)\n",
-    "emb_dim = 100\n",
-    "n_filters = 100\n",
-    "filter_sizes = [3, 4, 5]\n",
-    "output_dim = 2\n",
-    "dropout = 0.5\n",
-    "pad_idx = pad_idx\n",
-    "\n",
-    "model = CNN(input_dim, emb_dim, n_filters, filter_sizes, output_dim, dropout, pad_idx)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "9dgdCRsqqQoD"
-   },
-   "outputs": [],
-   "source": [
-    "def count_parameters(model):\n",
-    "    return sum(p.numel() for p in model.parameters() if p.requires_grad)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "bfiGzjvnqV-s",
-    "outputId": "fffbb2a6-0a0a-432f-f182-7697a6903c75"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The model has 2,621,102 trainable parameters\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'The model has {count_parameters(model):,} trainable parameters')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "name: embedding.weight, shape: torch.Size([25002, 100])\n",
-      "name: convs.0.weight, shape: torch.Size([100, 100, 3])\n",
-      "name: convs.0.bias, shape: torch.Size([100])\n",
-      "name: convs.1.weight, shape: torch.Size([100, 100, 4])\n",
-      "name: convs.1.bias, shape: torch.Size([100])\n",
-      "name: convs.2.weight, shape: torch.Size([100, 100, 5])\n",
-      "name: convs.2.bias, shape: torch.Size([100])\n",
-      "name: fc.weight, shape: torch.Size([2, 300])\n",
-      "name: fc.bias, shape: torch.Size([2])\n"
-     ]
-    }
-   ],
-   "source": [
-    "for n, p in model.named_parameters():\n",
-    "    print(f'name: {n}, shape: {p.shape}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def initialize_parameters(m):\n",
-    "    if isinstance(m, nn.Embedding):\n",
-    "        nn.init.uniform_(m.weight, -0.05, 0.05)\n",
-    "    elif isinstance(m, nn.Conv1d):\n",
-    "        nn.init.xavier_uniform_(m.weight)\n",
-    "        nn.init.zeros_(m.bias)\n",
-    "    elif isinstance(m, nn.Linear):\n",
-    "        nn.init.xavier_uniform_(m.weight)\n",
-    "        nn.init.zeros_(m.bias)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "CNN(\n",
-       "  (embedding): Embedding(25002, 100, padding_idx=1)\n",
-       "  (convs): ModuleList(\n",
-       "    (0): Conv1d(100, 100, kernel_size=(3,), stride=(1,))\n",
-       "    (1): Conv1d(100, 100, kernel_size=(4,), stride=(1,))\n",
-       "    (2): Conv1d(100, 100, kernel_size=(5,), stride=(1,))\n",
-       "  )\n",
-       "  (fc): Linear(in_features=300, out_features=2, bias=True)\n",
-       "  (dropout): Dropout(p=0.5, inplace=False)\n",
-       ")"
-      ]
-     },
-     "execution_count": 21,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.apply(initialize_parameters)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Sah17A41qW5d"
-   },
-   "outputs": [],
-   "source": [
-    "glove = torchtext.experimental.vectors.GloVe(name = '6B',\n",
-    "                                             dim = emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "S1Dfcn2Nqabo"
-   },
-   "outputs": [],
-   "source": [
-    "def get_pretrained_embedding(initial_embedding, pretrained_vectors, vocab, unk_token):\n",
-    "    \n",
-    "    pretrained_embedding = torch.FloatTensor(initial_embedding.weight.clone()).detach()    \n",
-    "    pretrained_vocab = pretrained_vectors.vectors.get_stoi()\n",
-    "    \n",
-    "    unk_tokens = []\n",
-    "    \n",
-    "    for idx, token in enumerate(vocab.itos):\n",
-    "        if token in pretrained_vocab:\n",
-    "            pretrained_vector = pretrained_vectors[token]\n",
-    "            pretrained_embedding[idx] = pretrained_vector\n",
-    "        else:\n",
-    "            unk_tokens.append(token)\n",
-    "        \n",
-    "    return pretrained_embedding, unk_tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "sGyV94f7qvdr"
-   },
-   "outputs": [],
-   "source": [
-    "unk_token = '<unk>'\n",
-    "\n",
-    "pretrained_embedding, unk_tokens = get_pretrained_embedding(model.embedding, glove, vocab, unk_token)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 139
-    },
-    "colab_type": "code",
-    "id": "KYnGxbVisUsk",
-    "outputId": "39d1354c-9a3a-4a6e-bf4a-8595d7f4eac9"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[-0.0220, -0.0288, -0.0422,  ...,  0.0103,  0.0218, -0.0141],\n",
-       "        [ 0.0326,  0.0222,  0.0044,  ...,  0.0249,  0.0163,  0.0052],\n",
-       "        [-0.0382, -0.2449,  0.7281,  ..., -0.1459,  0.8278,  0.2706],\n",
-       "        ...,\n",
-       "        [-0.2925,  0.1087,  0.7920,  ..., -0.3641,  0.1822, -0.4104],\n",
-       "        [-0.7250,  0.7545,  0.1637,  ..., -0.0144, -0.1761,  0.3418],\n",
-       "        [ 1.1753,  0.0460, -0.3542,  ...,  0.4510,  0.0485, -0.4015]])"
-      ]
-     },
-     "execution_count": 25,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.embedding.weight.data.copy_(pretrained_embedding)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "model.embedding.weight.data[pad_idx] = torch.zeros(emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "DTwNU41WseMS"
-   },
-   "outputs": [],
-   "source": [
-    "optimizer = optim.Adam(model.parameters())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Rxlx7a72s1ze"
-   },
-   "outputs": [],
-   "source": [
-    "criterion = nn.CrossEntropyLoss()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "1CLimBxus2yX"
-   },
-   "outputs": [],
-   "source": [
-    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "108fm55ftBgO"
-   },
-   "outputs": [],
-   "source": [
-    "model = model.to(device)\n",
-    "criterion = criterion.to(device)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "IYCxbvXUvE5v"
-   },
-   "outputs": [],
-   "source": [
-    "def calculate_accuracy(predictions, labels):\n",
-    "    top_predictions = predictions.argmax(1, keepdim = True)\n",
-    "    correct = top_predictions.eq(labels.view_as(top_predictions)).sum()\n",
-    "    accuracy = correct.float() / labels.shape[0]\n",
-    "    return accuracy"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Ik2JQo6TvGml"
-   },
-   "outputs": [],
-   "source": [
-    "def train(model, iterator, optimizer, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.train()\n",
-    "    \n",
-    "    for labels, text in iterator:\n",
-    "        \n",
-    "        labels = labels.to(device)\n",
-    "        text = text.to(device)\n",
-    "\n",
-    "        optimizer.zero_grad()\n",
-    "        \n",
-    "        predictions = model(text)\n",
-    "        \n",
-    "        loss = criterion(predictions, labels)\n",
-    "        \n",
-    "        acc = calculate_accuracy(predictions, labels)\n",
-    "        \n",
-    "        loss.backward()\n",
-    "        \n",
-    "        optimizer.step()\n",
-    "        \n",
-    "        epoch_loss += loss.item()\n",
-    "        epoch_acc += acc.item()\n",
-    "\n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "aGy1Zk6jvIf8"
-   },
-   "outputs": [],
-   "source": [
-    "def evaluate(model, iterator, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.eval()\n",
-    "    \n",
-    "    with torch.no_grad():\n",
-    "    \n",
-    "        for labels, text in iterator:\n",
-    "\n",
-    "            labels = labels.to(device)\n",
-    "            text = text.to(device)\n",
-    "            \n",
-    "            predictions = model(text)\n",
-    "            \n",
-    "            loss = criterion(predictions, labels)\n",
-    "            \n",
-    "            acc = calculate_accuracy(predictions, labels)\n",
-    "\n",
-    "            epoch_loss += loss.item()\n",
-    "            epoch_acc += acc.item()\n",
-    "        \n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "9MyMRRzbvKPx"
-   },
-   "outputs": [],
-   "source": [
-    "def epoch_time(start_time, end_time):\n",
-    "    elapsed_time = end_time - start_time\n",
-    "    elapsed_mins = int(elapsed_time / 60)\n",
-    "    elapsed_secs = int(elapsed_time - (elapsed_mins * 60))\n",
-    "    return elapsed_mins, elapsed_secs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 537
-    },
-    "colab_type": "code",
-    "id": "dRKwD51WvMa3",
-    "outputId": "935b7d4b-c396-42d8-8041-802ec9575cd6"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Epoch: 01 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 1.370 | Train Acc: 53.26%\n",
-      "\t Val. Loss: 0.588 |  Val. Acc: 69.31%\n",
-      "Epoch: 02 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.796 | Train Acc: 60.77%\n",
-      "\t Val. Loss: 0.562 |  Val. Acc: 73.82%\n",
-      "Epoch: 03 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.620 | Train Acc: 67.86%\n",
-      "\t Val. Loss: 0.523 |  Val. Acc: 78.67%\n",
-      "Epoch: 04 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.523 | Train Acc: 74.40%\n",
-      "\t Val. Loss: 0.459 |  Val. Acc: 81.48%\n",
-      "Epoch: 05 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.459 | Train Acc: 78.51%\n",
-      "\t Val. Loss: 0.416 |  Val. Acc: 83.35%\n",
-      "Epoch: 06 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.412 | Train Acc: 81.52%\n",
-      "\t Val. Loss: 0.381 |  Val. Acc: 84.52%\n",
-      "Epoch: 07 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.374 | Train Acc: 83.71%\n",
-      "\t Val. Loss: 0.369 |  Val. Acc: 84.95%\n",
-      "Epoch: 08 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.356 | Train Acc: 84.29%\n",
-      "\t Val. Loss: 0.356 |  Val. Acc: 85.49%\n",
-      "Epoch: 09 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.339 | Train Acc: 85.20%\n",
-      "\t Val. Loss: 0.344 |  Val. Acc: 85.92%\n",
-      "Epoch: 10 | Epoch Time: 0m 9s\n",
-      "\tTrain Loss: 0.318 | Train Acc: 86.43%\n",
-      "\t Val. Loss: 0.334 |  Val. Acc: 86.28%\n"
-     ]
-    }
-   ],
-   "source": [
-    "n_epochs = 10\n",
-    "\n",
-    "best_valid_loss = float('inf')\n",
-    "\n",
-    "for epoch in range(n_epochs):\n",
-    "\n",
-    "    start_time = time.monotonic()\n",
-    "    \n",
-    "    train_loss, train_acc = train(model, train_iterator, optimizer, criterion, device)\n",
-    "    valid_loss, valid_acc = evaluate(model, valid_iterator, criterion, device)\n",
-    "    \n",
-    "    end_time = time.monotonic()\n",
-    "\n",
-    "    epoch_mins, epoch_secs = epoch_time(start_time, end_time)\n",
-    "    \n",
-    "    if valid_loss < best_valid_loss:\n",
-    "        best_valid_loss = valid_loss\n",
-    "        torch.save(model.state_dict(), 'cnn-model.pt')\n",
-    "    \n",
-    "    print(f'Epoch: {epoch+1:02} | Epoch Time: {epoch_mins}m {epoch_secs}s')\n",
-    "    print(f'\\tTrain Loss: {train_loss:.3f} | Train Acc: {train_acc*100:.2f}%')\n",
-    "    print(f'\\t Val. Loss: {valid_loss:.3f} |  Val. Acc: {valid_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "hKOg4oARvPHJ",
-    "outputId": "b5552b10-fcca-4c29-8d4b-4f5688ef53dd"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Test Loss: 0.338 | Test Acc: 85.99%\n"
-     ]
-    }
-   ],
-   "source": [
-    "model.load_state_dict(torch.load('cnn-model.pt'))\n",
-    "\n",
-    "test_loss, test_acc = evaluate(model, test_iterator, criterion, device)\n",
-    "\n",
-    "print(f'Test Loss: {test_loss:.3f} | Test Acc: {test_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "tQ4Jsf_vvWgB"
-   },
-   "outputs": [],
-   "source": [
-    "def predict_sentiment(tokenizer, vocab, model, device, sentence):\n",
-    "    model.eval()\n",
-    "    tokens = tokenizer.tokenize(sentence)\n",
-    "    indexes = [vocab.stoi[token] for token in tokens]\n",
-    "    tensor = torch.LongTensor(indexes).unsqueeze(0).to(device)\n",
-    "    prediction = model(tensor)\n",
-    "    probabilities = nn.functional.softmax(prediction, dim = -1)\n",
-    "    pos_probability = probabilities.squeeze()[-1].item()\n",
-    "    return pos_probability"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "Yy7_6rhovZTE",
-    "outputId": "4297c903-8ef3-4c94-8a9e-21fbb98a6be9"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.08827298134565353"
-      ]
-     },
-     "execution_count": 38,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'the absolute worst movie of all time.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 39,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "L3LmQxrgvau9",
-    "outputId": "afee78c4-6c74-4900-dd3b-53ad1c1b7b26"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.6329940557479858"
-      ]
-     },
-     "execution_count": 39,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'one of the greatest films i have ever seen in my life.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 40,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "t7Qoy21Bvb7v",
-    "outputId": "d85a8a1b-b4dc-4aea-e58e-2597087b46c2"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.060872383415699005"
-      ]
-     },
-     "execution_count": 40,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be one of the greatest films i have ever seen in my life, \\\n",
-    "but it was actually the absolute worst movie of all time.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 41,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "EPGXBr18vdQT",
-    "outputId": "1b28c7d1-9e12-462f-d9ac-2b4876b3b6b4"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.07820437103509903"
-      ]
-     },
-     "execution_count": 41,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be the absolute worst movie of all time, \\\n",
-    "but it was actually one of the greatest films i have ever seen in my life.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  }
- ],
- "metadata": {
-  "accelerator": "GPU",
-  "colab": {
-   "machine_shape": "hm",
-   "name": "scratchpad",
-   "provenance": []
-  },
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/experimental/5_transformer.ipynb b/experimental/5_transformer.ipynb
deleted file mode 100644
index 85b635a..0000000
--- a/experimental/5_transformer.ipynb
+++ /dev/null
@@ -1,1601 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 228
-    },
-    "colab_type": "code",
-    "id": "lIYdn1woOS1n",
-    "outputId": "0e6cb755-1903-417a-8d37-a33ccb613ed6"
-   },
-   "outputs": [],
-   "source": [
-    "import torch\n",
-    "import torch.nn as nn\n",
-    "import torch.nn.functional as F\n",
-    "import torch.optim as optim\n",
-    "\n",
-    "import torchtext\n",
-    "import torchtext.experimental\n",
-    "import torchtext.experimental.vectors\n",
-    "from torchtext.experimental.datasets.raw.text_classification import RawTextIterableDataset\n",
-    "from torchtext.experimental.datasets.text_classification import TextClassificationDataset\n",
-    "from torchtext.experimental.functional import sequential_transforms, vocab_func, totensor\n",
-    "\n",
-    "import matplotlib.pyplot as plt\n",
-    "\n",
-    "import collections\n",
-    "import copy\n",
-    "import random\n",
-    "import time"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "7w0Fqtf5bosU"
-   },
-   "outputs": [],
-   "source": [
-    "seed = 1234\n",
-    "\n",
-    "torch.manual_seed(seed)\n",
-    "random.seed(seed)\n",
-    "torch.backends.cudnn.deterministic = True\n",
-    "torch.backends.cudnn.benchmark = False"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "COPd0_YIbp6G"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_test_data = torchtext.experimental.datasets.raw.IMDB()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "raw_train_data = list(raw_train_data)\n",
-    "raw_test_data = list(raw_test_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "sqImRLskbrAd"
-   },
-   "outputs": [],
-   "source": [
-    "def get_train_valid_split(raw_train_data, split_ratio = 0.7):\n",
-    "        \n",
-    "    random.shuffle(raw_train_data)\n",
-    "        \n",
-    "    n_train_examples = int(len(raw_train_data) * split_ratio)\n",
-    "        \n",
-    "    train_data = raw_train_data[:n_train_examples]\n",
-    "    valid_data = raw_train_data[n_train_examples:]\n",
-    "    \n",
-    "    return train_data, valid_data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "YgKzkSjibsCh"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_valid_data = get_train_valid_split(raw_train_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Yh1olQ-EbtLT"
-   },
-   "outputs": [],
-   "source": [
-    "class Tokenizer:\n",
-    "    def __init__(self, tokenize_fn = 'basic_english', lower = True, max_length = None, sos_token = None):\n",
-    "        \n",
-    "        self.tokenize_fn = torchtext.data.utils.get_tokenizer(tokenize_fn)\n",
-    "        self.lower = lower\n",
-    "        self.max_length = max_length\n",
-    "        self.sos_token = sos_token\n",
-    "        \n",
-    "    def tokenize(self, s):\n",
-    "        \n",
-    "        tokens = self.tokenize_fn(s)\n",
-    "        \n",
-    "        if self.lower:\n",
-    "            tokens = [token.lower() for token in tokens]\n",
-    "            \n",
-    "        if self.sos_token is not None:\n",
-    "            tokens = [self.sos_token] + tokens\n",
-    "\n",
-    "        if self.max_length is not None:\n",
-    "            tokens = tokens[:self.max_length]\n",
-    "            \n",
-    "        return tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "hEKZjV-PbuMk"
-   },
-   "outputs": [],
-   "source": [
-    "max_length = 250\n",
-    "sos_token = '<sos>'\n",
-    "\n",
-    "tokenizer = Tokenizer(max_length = max_length, sos_token = sos_token)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "YMpskDMRoBOT",
-    "outputId": "68995c90-d409-432a-9ed3-4614244b23bc"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "['<sos>', 'hello', 'world', ',', 'how', 'are', 'you', '?']"
-      ]
-     },
-     "execution_count": 9,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "s = \"hello world, how are you?\"\n",
-    "\n",
-    "tokenizer.tokenize(s)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "DQx6B6JQbvSU"
-   },
-   "outputs": [],
-   "source": [
-    "def build_vocab_from_data(raw_data, tokenizer, **vocab_kwargs):\n",
-    "        \n",
-    "    token_freqs = collections.Counter()\n",
-    "    \n",
-    "    for label, text in raw_data:\n",
-    "        tokens = tokenizer.tokenize(text)\n",
-    "        token_freqs.update(tokens)\n",
-    "                \n",
-    "    vocab = torchtext.vocab.Vocab(token_freqs, **vocab_kwargs)\n",
-    "    \n",
-    "    return vocab"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "unWu0DuZbwVB"
-   },
-   "outputs": [],
-   "source": [
-    "max_size = 25_000\n",
-    "\n",
-    "vocab = build_vocab_from_data(raw_train_data, \n",
-    "                              tokenizer, \n",
-    "                              max_size = max_size)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Unique tokens in vocab: 25,002\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'Unique tokens in vocab: {len(vocab):,}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "zOi2KdeebxZP"
-   },
-   "outputs": [],
-   "source": [
-    "def raw_data_to_dataset(raw_data, tokenizer, vocab):\n",
-    "        \n",
-    "    text_transform = sequential_transforms(tokenizer.tokenize,\n",
-    "                                           vocab_func(vocab),\n",
-    "                                           totensor(dtype=torch.long))\n",
-    "    \n",
-    "    label_transform = sequential_transforms(lambda x: 1 if x == 'pos' else 0, \n",
-    "                                            totensor(dtype=torch.long))\n",
-    "\n",
-    "    transforms = (label_transform, text_transform)\n",
-    "\n",
-    "    dataset = TextClassificationDataset(raw_data,\n",
-    "                                        vocab,\n",
-    "                                        transforms)\n",
-    "    \n",
-    "    return dataset"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "zSodj_hBbzVk"
-   },
-   "outputs": [],
-   "source": [
-    "train_data = raw_data_to_dataset(raw_train_data, tokenizer, vocab)\n",
-    "valid_data = raw_data_to_dataset(raw_valid_data, tokenizer, vocab)\n",
-    "test_data = raw_data_to_dataset(raw_test_data, tokenizer, vocab)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Number of training examples: 17,500\n",
-      "Number of validation examples: 7,500\n",
-      "Number of testing examples: 25,000\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'Number of training examples: {len(train_data):,}')\n",
-    "print(f'Number of validation examples: {len(valid_data):,}')\n",
-    "print(f'Number of testing examples: {len(test_data):,}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "c_KIBjr4b2ks"
-   },
-   "outputs": [],
-   "source": [
-    "class Collator:\n",
-    "    def __init__(self, pad_idx, batch_first):\n",
-    "        \n",
-    "        self.pad_idx = pad_idx\n",
-    "        self.batch_first = batch_first\n",
-    "        \n",
-    "    def collate(self, batch):\n",
-    "        \n",
-    "        labels, text = zip(*batch)\n",
-    "        \n",
-    "        labels = torch.LongTensor(labels)\n",
-    "\n",
-    "        text = nn.utils.rnn.pad_sequence(text, \n",
-    "                                         padding_value = self.pad_idx,\n",
-    "                                         batch_first = self.batch_first)\n",
-    "        \n",
-    "        return labels, text"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "i6j7fcFYb354"
-   },
-   "outputs": [],
-   "source": [
-    "pad_token = '<pad>'\n",
-    "pad_idx = vocab[pad_token]\n",
-    "batch_first = False\n",
-    "\n",
-    "collator = Collator(pad_idx, batch_first)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "FCyc9S85b5jd"
-   },
-   "outputs": [],
-   "source": [
-    "batch_size = 256\n",
-    "\n",
-    "train_iterator = torch.utils.data.DataLoader(train_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = True, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "valid_iterator = torch.utils.data.DataLoader(valid_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = False, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "test_iterator = torch.utils.data.DataLoader(test_data, \n",
-    "                                            batch_size, \n",
-    "                                            shuffle = False, \n",
-    "                                            collate_fn = collator.collate)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "ajddJOkZb7xe"
-   },
-   "outputs": [],
-   "source": [
-    "class DefaultTransformer(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, n_heads, hid_dim, n_layers, output_dim, dropout, max_length, pad_idx):\n",
-    "        super().__init__()\n",
-    "\n",
-    "        self.tok_embedding = nn.Embedding(input_dim, emb_dim, padding_idx = pad_idx)\n",
-    "        self.pos_embedding = nn.Embedding(max_length, emb_dim)\n",
-    "        self.layer_norm = nn.LayerNorm(emb_dim)\n",
-    "        transformer_layer = nn.TransformerEncoderLayer(emb_dim, n_heads, hid_dim, activation = 'gelu')\n",
-    "        norm = nn.LayerNorm(emb_dim)\n",
-    "        self.transformer = nn.TransformerEncoder(transformer_layer, n_layers, norm)\n",
-    "        self.fc = nn.Linear(emb_dim, output_dim)\n",
-    "        self.dropout = nn.Dropout(dropout)\n",
-    "\n",
-    "    def forward(self, text):\n",
-    "\n",
-    "        # text = [seq len, batch size]\n",
-    "\n",
-    "        seq_len, batch_size = text.shape\n",
-    "        \n",
-    "        pos = torch.arange(0, seq_len).unsqueeze(-1).repeat(1, batch_size).to(text.device)\n",
-    "        \n",
-    "        # pos = [seq len, batch size]\n",
-    "        \n",
-    "        embedded_pos = self.pos_embedding(pos)\n",
-    "        embedded_tok = self.tok_embedding(text)\n",
-    "        \n",
-    "        embedded = self.dropout(embedded_pos + embedded_tok)\n",
-    "\n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "\n",
-    "        emedded = self.layer_norm(embedded)\n",
-    "        \n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        transformed = self.transformer(embedded)\n",
-    "\n",
-    "        # transformed = [seq len, batch size, emb dim]\n",
-    "\n",
-    "        sos_transformed = transformed[0]\n",
-    "\n",
-    "        # sos_transformed = [batch size, emb dim]\n",
-    "\n",
-    "        prediction = self.fc(self.dropout(sos_transformed))\n",
-    "\n",
-    "        # prediction = [batch size, output dim]\n",
-    "\n",
-    "        return prediction"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "class TransformerEncoder(nn.Module):\n",
-    "    def __init__(self, encoder_layer, n_layers, norm = None):\n",
-    "        super().__init__()\n",
-    "        \n",
-    "        self.encoder_layers = nn.ModuleList([copy.deepcopy(encoder_layer) \n",
-    "                                             for _ in range(n_layers)])\n",
-    "        self.norm = norm\n",
-    "        \n",
-    "    def forward(self, embedded):\n",
-    "        \n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        for layer in self.encoder_layers:\n",
-    "            embedded, attention = layer(embedded)\n",
-    "            \n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "            \n",
-    "        if self.norm is not None:\n",
-    "            embedded = self.norm(embedded)\n",
-    "        \n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        return embedded, attention"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "class TransformerEncoderLayer(nn.Module):\n",
-    "    def __init__(self, emb_dim, ff_layer, attention_layer, dropout):\n",
-    "        super().__init__()\n",
-    "        \n",
-    "        self.attention_layer = attention_layer\n",
-    "        self.ff_layer = ff_layer\n",
-    "        self.layer_norm_1 = nn.LayerNorm(emb_dim)\n",
-    "        self.layer_norm_2 = nn.LayerNorm(emb_dim)\n",
-    "        self.dropout = nn.Dropout(dropout)\n",
-    "        \n",
-    "    def forward(self, embedded):\n",
-    "        \n",
-    "        _embedded, attention = self.attention_layer(embedded, embedded, embedded)\n",
-    "        embedded = embedded + self.dropout(_embedded)\n",
-    "        embedded = self.layer_norm_1(embedded)\n",
-    "        _embedded = self.ff_layer(embedded)\n",
-    "        embedded = embedded + self.dropout(_embedded)\n",
-    "        embedded = self.layer_norm_2(embedded)\n",
-    "        \n",
-    "        return embedded, attention"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "class MultiHeadAttentionLayer(nn.Module):\n",
-    "    def __init__(self, emb_dim, n_heads, dropout):\n",
-    "        super().__init__()\n",
-    "        \n",
-    "        assert emb_dim % n_heads == 0\n",
-    "        \n",
-    "        self.n_heads = n_heads\n",
-    "        self.head_dim = emb_dim // n_heads\n",
-    "        \n",
-    "        self.fc_q = nn.Linear(emb_dim, emb_dim)\n",
-    "        self.fc_k = nn.Linear(emb_dim, emb_dim)\n",
-    "        self.fc_v = nn.Linear(emb_dim, emb_dim)\n",
-    "        self.fc_o = nn.Linear(emb_dim, emb_dim)\n",
-    "        self.dropout = nn.Dropout(dropout)\n",
-    "        \n",
-    "        self.scale = self.head_dim ** -0.5\n",
-    "        \n",
-    "    def forward(self, query, key, value):\n",
-    "        \n",
-    "        # query/key/value = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        seq_len, batch_size, emb_dim = query.shape\n",
-    "        \n",
-    "        Q = self.fc_q(query)\n",
-    "        K = self.fc_k(key)\n",
-    "        V = self.fc_v(value)\n",
-    "        \n",
-    "        # Q/K/V = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        Q = Q.view(seq_len, batch_size, self.n_heads, self.head_dim)\n",
-    "        K = K.view(seq_len, batch_size, self.n_heads, self.head_dim)\n",
-    "        V = V.view(seq_len, batch_size, self.n_heads, self.head_dim)\n",
-    "        \n",
-    "        # Q/K/V = [seq len, batch size, n heads, head dim]\n",
-    "        \n",
-    "        Q = Q.permute(1, 2, 0, 3)\n",
-    "        K = K.permute(1, 2, 0, 3)\n",
-    "        V = V.permute(1, 2, 0, 3)\n",
-    "        \n",
-    "        # Q/K/V = [batch size, n heads, seq len, head dim]\n",
-    "        \n",
-    "        energy = torch.matmul(Q, K.permute(0, 1, 3, 2)) * self.scale\n",
-    "        \n",
-    "        # energy = [batch size, n heads, seq len, seq len]\n",
-    "        \n",
-    "        attention = torch.softmax(energy, dim = -1)\n",
-    "        \n",
-    "        # attention = [batch size, n heads, seq len, seq len]\n",
-    "        \n",
-    "        x = torch.matmul(self.dropout(attention), V)\n",
-    "        \n",
-    "        # x = [batch size, n heads, seq len, head dim]\n",
-    "        \n",
-    "        x = x.permute(2, 0, 1, 3).contiguous()\n",
-    "        \n",
-    "        # x = [seq len, batch size, n heads, head dim]\n",
-    "        \n",
-    "        x = x.view(seq_len, batch_size, emb_dim)\n",
-    "        \n",
-    "        # x = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        x = self.fc_o(x)\n",
-    "        \n",
-    "        # x = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        return x, attention"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "class FeedForwardLayer(nn.Module):\n",
-    "    def __init__(self, emb_dim, hid_dim, activation, dropout):\n",
-    "        super().__init__()\n",
-    "        \n",
-    "        self.fc_1 = nn.Linear(emb_dim, hid_dim)\n",
-    "        self.fc_2 = nn.Linear(hid_dim, emb_dim)\n",
-    "        self.activation = activation\n",
-    "        self.dropout = nn.Dropout(dropout)\n",
-    "        \n",
-    "    def forward(self, embedded):\n",
-    "        \n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        embedded = self.dropout(self.activation(self.fc_1(embedded)))\n",
-    "        \n",
-    "        # embedded = [seq len, batch size, hid dim]\n",
-    "        \n",
-    "        embedded = self.fc_2(embedded)\n",
-    "        \n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        return embedded"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "class Transformer(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, encoder, output_dim, dropout, max_length, pad_idx):\n",
-    "        super().__init__()\n",
-    "\n",
-    "        self.tok_embedding = nn.Embedding(input_dim, emb_dim, padding_idx = pad_idx)\n",
-    "        self.pos_embedding = nn.Embedding(max_length, emb_dim)\n",
-    "        self.layer_norm = nn.LayerNorm(emb_dim)\n",
-    "        self.encoder = encoder\n",
-    "        self.fc = nn.Linear(emb_dim, output_dim)\n",
-    "        self.dropout = nn.Dropout(dropout)\n",
-    "\n",
-    "    def forward(self, text):\n",
-    "\n",
-    "        # text = [seq len, batch size]\n",
-    "\n",
-    "        seq_len, batch_size = text.shape\n",
-    "        \n",
-    "        pos = torch.arange(0, seq_len).unsqueeze(-1).repeat(1, batch_size).to(text.device)\n",
-    "        \n",
-    "        # pos = [seq len, batch size]\n",
-    "        \n",
-    "        embedded_pos = self.pos_embedding(pos)\n",
-    "        embedded_tok = self.tok_embedding(text)\n",
-    "        \n",
-    "        embedded = self.dropout(embedded_pos + embedded_tok)\n",
-    "\n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "\n",
-    "        emedded = self.layer_norm(embedded)\n",
-    "        \n",
-    "        # embedded = [seq len, batch size, emb dim]\n",
-    "        \n",
-    "        encoded, attention = self.encoder(embedded)\n",
-    "\n",
-    "        # encoded = [seq len, batch size, emb dim]\n",
-    "\n",
-    "        sos_encoded = encoded[0]\n",
-    "\n",
-    "        # sos_encoded = [batch size, emb dim]\n",
-    "\n",
-    "        prediction = self.fc(self.dropout(sos_encoded))\n",
-    "\n",
-    "        # prediction = [batch size, output dim]\n",
-    "\n",
-    "        return prediction, attention"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "input_dim = len(vocab)\n",
-    "emb_dim = 100\n",
-    "n_heads = 10\n",
-    "hid_dim = 1024\n",
-    "n_layers = 3\n",
-    "output_dim = 2\n",
-    "dropout = 0.1\n",
-    "pad_idx = pad_idx\n",
-    "ff_layer_activation = F.gelu\n",
-    "encoder_norm = nn.LayerNorm(emb_dim)\n",
-    "\n",
-    "ff_layer = FeedForwardLayer(emb_dim, hid_dim, ff_layer_activation, dropout)\n",
-    "attention_layer = MultiHeadAttentionLayer(emb_dim, n_heads, dropout)\n",
-    "transformer_layer = TransformerEncoderLayer(emb_dim, ff_layer, attention_layer, dropout)\n",
-    "transformer_encoder = TransformerEncoder(transformer_layer, n_layers, encoder_norm)\n",
-    "model = Transformer(input_dim, emb_dim, transformer_encoder, output_dim, dropout, max_length, pad_idx)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Kubq6Vkjfec_"
-   },
-   "outputs": [],
-   "source": [
-    "def count_parameters(model):\n",
-    "    return sum(p.numel() for p in model.parameters() if p.requires_grad)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "TPaxqpJMf0-6",
-    "outputId": "4db6b90c-b273-4c94-c12d-4f68be8e1477"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The model has 3,265,974 trainable parameters\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'The model has {count_parameters(model):,} trainable parameters')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "name: tok_embedding.weight, shape: torch.Size([25002, 100])\n",
-      "name: pos_embedding.weight, shape: torch.Size([250, 100])\n",
-      "name: layer_norm.weight, shape: torch.Size([100])\n",
-      "name: layer_norm.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.attention_layer.fc_q.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.0.attention_layer.fc_q.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.attention_layer.fc_k.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.0.attention_layer.fc_k.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.attention_layer.fc_v.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.0.attention_layer.fc_v.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.attention_layer.fc_o.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.0.attention_layer.fc_o.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.ff_layer.fc_1.weight, shape: torch.Size([1024, 100])\n",
-      "name: encoder.encoder_layers.0.ff_layer.fc_1.bias, shape: torch.Size([1024])\n",
-      "name: encoder.encoder_layers.0.ff_layer.fc_2.weight, shape: torch.Size([100, 1024])\n",
-      "name: encoder.encoder_layers.0.ff_layer.fc_2.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.layer_norm_1.weight, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.layer_norm_1.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.layer_norm_2.weight, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.0.layer_norm_2.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.attention_layer.fc_q.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.1.attention_layer.fc_q.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.attention_layer.fc_k.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.1.attention_layer.fc_k.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.attention_layer.fc_v.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.1.attention_layer.fc_v.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.attention_layer.fc_o.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.1.attention_layer.fc_o.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.ff_layer.fc_1.weight, shape: torch.Size([1024, 100])\n",
-      "name: encoder.encoder_layers.1.ff_layer.fc_1.bias, shape: torch.Size([1024])\n",
-      "name: encoder.encoder_layers.1.ff_layer.fc_2.weight, shape: torch.Size([100, 1024])\n",
-      "name: encoder.encoder_layers.1.ff_layer.fc_2.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.layer_norm_1.weight, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.layer_norm_1.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.layer_norm_2.weight, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.1.layer_norm_2.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.attention_layer.fc_q.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.2.attention_layer.fc_q.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.attention_layer.fc_k.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.2.attention_layer.fc_k.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.attention_layer.fc_v.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.2.attention_layer.fc_v.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.attention_layer.fc_o.weight, shape: torch.Size([100, 100])\n",
-      "name: encoder.encoder_layers.2.attention_layer.fc_o.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.ff_layer.fc_1.weight, shape: torch.Size([1024, 100])\n",
-      "name: encoder.encoder_layers.2.ff_layer.fc_1.bias, shape: torch.Size([1024])\n",
-      "name: encoder.encoder_layers.2.ff_layer.fc_2.weight, shape: torch.Size([100, 1024])\n",
-      "name: encoder.encoder_layers.2.ff_layer.fc_2.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.layer_norm_1.weight, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.layer_norm_1.bias, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.layer_norm_2.weight, shape: torch.Size([100])\n",
-      "name: encoder.encoder_layers.2.layer_norm_2.bias, shape: torch.Size([100])\n",
-      "name: encoder.norm.weight, shape: torch.Size([100])\n",
-      "name: encoder.norm.bias, shape: torch.Size([100])\n",
-      "name: fc.weight, shape: torch.Size([2, 100])\n",
-      "name: fc.bias, shape: torch.Size([2])\n"
-     ]
-    }
-   ],
-   "source": [
-    "for n, p in model.named_parameters():\n",
-    "    print(f'name: {n}, shape: {p.shape}')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def initialize_parameters(m):\n",
-    "    if isinstance(m, nn.Embedding):\n",
-    "        nn.init.normal_(m.weight, std = 0.02)\n",
-    "    elif isinstance(m, nn.Linear):\n",
-    "        nn.init.normal_(m.weight, std = 0.02)\n",
-    "        nn.init.zeros_(m.bias)\n",
-    "    elif isinstance(m, nn.LayerNorm):\n",
-    "        nn.init.ones_(m.weight)\n",
-    "        nn.init.zeros_(m.bias)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "Transformer(\n",
-       "  (tok_embedding): Embedding(25002, 100, padding_idx=1)\n",
-       "  (pos_embedding): Embedding(250, 100)\n",
-       "  (layer_norm): LayerNorm((100,), eps=1e-05, elementwise_affine=True)\n",
-       "  (encoder): TransformerEncoder(\n",
-       "    (encoder_layers): ModuleList(\n",
-       "      (0): TransformerEncoderLayer(\n",
-       "        (attention_layer): MultiHeadAttentionLayer(\n",
-       "          (fc_q): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_k): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_v): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_o): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (dropout): Dropout(p=0.1, inplace=False)\n",
-       "        )\n",
-       "        (ff_layer): FeedForwardLayer(\n",
-       "          (fc_1): Linear(in_features=100, out_features=1024, bias=True)\n",
-       "          (fc_2): Linear(in_features=1024, out_features=100, bias=True)\n",
-       "          (dropout): Dropout(p=0.1, inplace=False)\n",
-       "        )\n",
-       "        (layer_norm_1): LayerNorm((100,), eps=1e-05, elementwise_affine=True)\n",
-       "        (layer_norm_2): LayerNorm((100,), eps=1e-05, elementwise_affine=True)\n",
-       "        (dropout): Dropout(p=0.1, inplace=False)\n",
-       "      )\n",
-       "      (1): TransformerEncoderLayer(\n",
-       "        (attention_layer): MultiHeadAttentionLayer(\n",
-       "          (fc_q): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_k): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_v): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_o): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (dropout): Dropout(p=0.1, inplace=False)\n",
-       "        )\n",
-       "        (ff_layer): FeedForwardLayer(\n",
-       "          (fc_1): Linear(in_features=100, out_features=1024, bias=True)\n",
-       "          (fc_2): Linear(in_features=1024, out_features=100, bias=True)\n",
-       "          (dropout): Dropout(p=0.1, inplace=False)\n",
-       "        )\n",
-       "        (layer_norm_1): LayerNorm((100,), eps=1e-05, elementwise_affine=True)\n",
-       "        (layer_norm_2): LayerNorm((100,), eps=1e-05, elementwise_affine=True)\n",
-       "        (dropout): Dropout(p=0.1, inplace=False)\n",
-       "      )\n",
-       "      (2): TransformerEncoderLayer(\n",
-       "        (attention_layer): MultiHeadAttentionLayer(\n",
-       "          (fc_q): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_k): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_v): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (fc_o): Linear(in_features=100, out_features=100, bias=True)\n",
-       "          (dropout): Dropout(p=0.1, inplace=False)\n",
-       "        )\n",
-       "        (ff_layer): FeedForwardLayer(\n",
-       "          (fc_1): Linear(in_features=100, out_features=1024, bias=True)\n",
-       "          (fc_2): Linear(in_features=1024, out_features=100, bias=True)\n",
-       "          (dropout): Dropout(p=0.1, inplace=False)\n",
-       "        )\n",
-       "        (layer_norm_1): LayerNorm((100,), eps=1e-05, elementwise_affine=True)\n",
-       "        (layer_norm_2): LayerNorm((100,), eps=1e-05, elementwise_affine=True)\n",
-       "        (dropout): Dropout(p=0.1, inplace=False)\n",
-       "      )\n",
-       "    )\n",
-       "    (norm): LayerNorm((100,), eps=1e-05, elementwise_affine=True)\n",
-       "  )\n",
-       "  (fc): Linear(in_features=100, out_features=2, bias=True)\n",
-       "  (dropout): Dropout(p=0.1, inplace=False)\n",
-       ")"
-      ]
-     },
-     "execution_count": 30,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.apply(initialize_parameters)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "o3ZggI59f3KY"
-   },
-   "outputs": [],
-   "source": [
-    "glove = torchtext.experimental.vectors.GloVe(name = '6B',\n",
-    "                                             dim = emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "5BQXMqvKf41y"
-   },
-   "outputs": [],
-   "source": [
-    "def get_pretrained_embedding(initial_embedding, pretrained_vectors, vocab, unk_token):\n",
-    "    \n",
-    "    pretrained_embedding = torch.FloatTensor(initial_embedding.weight.clone()).detach()    \n",
-    "    pretrained_vocab = pretrained_vectors.vectors.get_stoi()\n",
-    "    \n",
-    "    unk_tokens = []\n",
-    "    \n",
-    "    for idx, token in enumerate(vocab.itos):\n",
-    "        if token in pretrained_vocab:\n",
-    "            pretrained_vector = pretrained_vectors[token]\n",
-    "            pretrained_embedding[idx] = pretrained_vector\n",
-    "        else:\n",
-    "            unk_tokens.append(token)\n",
-    "        \n",
-    "    return pretrained_embedding, unk_tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "xsG6yriEf8dA"
-   },
-   "outputs": [],
-   "source": [
-    "unk_token = '<unk>'\n",
-    "\n",
-    "pretrained_embedding, unk_tokens = get_pretrained_embedding(model.tok_embedding, glove, vocab, unk_token)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 139
-    },
-    "colab_type": "code",
-    "id": "aZWfRQnPf99e",
-    "outputId": "b9fbce51-9abb-42a2-8e57-88f92f4d100b"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[-0.0017, -0.0139, -0.0213,  ...,  0.0069,  0.0128, -0.0211],\n",
-       "        [ 0.0157, -0.0071, -0.0066,  ..., -0.0251,  0.0035,  0.0269],\n",
-       "        [-0.0382, -0.2449,  0.7281,  ..., -0.1459,  0.8278,  0.2706],\n",
-       "        ...,\n",
-       "        [-0.0288, -0.0316,  0.4083,  ...,  0.6288, -0.5348, -0.8080],\n",
-       "        [-0.0152,  0.0155, -0.0168,  ..., -0.0409,  0.0031,  0.0127],\n",
-       "        [-0.2612,  0.6821, -0.2295,  ..., -0.5306,  0.0863,  0.4852]])"
-      ]
-     },
-     "execution_count": 34,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.tok_embedding.weight.data.copy_(pretrained_embedding)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "model.tok_embedding.weight.data[pad_idx] = torch.zeros(emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "xBmTbzpRf-5x"
-   },
-   "outputs": [],
-   "source": [
-    "optimizer = optim.Adam(model.parameters())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "H-2Pqg5VgABR"
-   },
-   "outputs": [],
-   "source": [
-    "criterion = nn.CrossEntropyLoss()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "tj6Q8hfMgA5q"
-   },
-   "outputs": [],
-   "source": [
-    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 39,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "HSK--Y91gB7W"
-   },
-   "outputs": [],
-   "source": [
-    "model = model.to(device)\n",
-    "criterion = criterion.to(device)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 40,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "X8m3rLRZgDGZ"
-   },
-   "outputs": [],
-   "source": [
-    "def calculate_accuracy(predictions, labels):\n",
-    "    top_predictions = predictions.argmax(1, keepdim = True)\n",
-    "    correct = top_predictions.eq(labels.view_as(top_predictions)).sum()\n",
-    "    accuracy = correct.float() / labels.shape[0]\n",
-    "    return accuracy"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 41,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "qONO5ahxgEbN"
-   },
-   "outputs": [],
-   "source": [
-    "def train(model, iterator, optimizer, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.train()\n",
-    "    \n",
-    "    for labels, text in iterator:\n",
-    "\n",
-    "        labels = labels.to(device)\n",
-    "        text = text.to(device)\n",
-    "\n",
-    "        optimizer.zero_grad()\n",
-    "        \n",
-    "        predictions, attention = model(text)\n",
-    "        \n",
-    "        loss = criterion(predictions, labels)\n",
-    "        \n",
-    "        acc = calculate_accuracy(predictions, labels)\n",
-    "        \n",
-    "        loss.backward()\n",
-    "        \n",
-    "        optimizer.step()\n",
-    "        \n",
-    "        epoch_loss += loss.item()\n",
-    "        epoch_acc += acc.item()\n",
-    "\n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 42,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "6g9SqCLBgFh3"
-   },
-   "outputs": [],
-   "source": [
-    "def evaluate(model, iterator, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.eval()\n",
-    "    \n",
-    "    with torch.no_grad():\n",
-    "    \n",
-    "        for labels, text in iterator:\n",
-    "\n",
-    "            labels = labels.to(device)\n",
-    "            text = text.to(device)\n",
-    "            \n",
-    "            predictions, attention = model(text)\n",
-    "            \n",
-    "            loss = criterion(predictions, labels)\n",
-    "            \n",
-    "            acc = calculate_accuracy(predictions, labels)\n",
-    "\n",
-    "            epoch_loss += loss.item()\n",
-    "            epoch_acc += acc.item()\n",
-    "        \n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 43,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "6UO5TMjqgGpT"
-   },
-   "outputs": [],
-   "source": [
-    "def epoch_time(start_time, end_time):\n",
-    "    elapsed_time = end_time - start_time\n",
-    "    elapsed_mins = int(elapsed_time / 60)\n",
-    "    elapsed_secs = int(elapsed_time - (elapsed_mins * 60))\n",
-    "    return elapsed_mins, elapsed_secs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 44,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 537
-    },
-    "colab_type": "code",
-    "id": "XN6rcPDmgIR5",
-    "outputId": "586ca3f4-340c-4040-92b0-29f5e6d3fb54"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Epoch: 01 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.604 | Train Acc: 64.21%\n",
-      "\t Val. Loss: 0.457 |  Val. Acc: 78.76%\n",
-      "Epoch: 02 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.364 | Train Acc: 84.16%\n",
-      "\t Val. Loss: 0.355 |  Val. Acc: 84.73%\n",
-      "Epoch: 03 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.270 | Train Acc: 89.23%\n",
-      "\t Val. Loss: 0.384 |  Val. Acc: 84.55%\n",
-      "Epoch: 04 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.206 | Train Acc: 92.15%\n",
-      "\t Val. Loss: 0.355 |  Val. Acc: 86.63%\n",
-      "Epoch: 05 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.150 | Train Acc: 94.58%\n",
-      "\t Val. Loss: 0.435 |  Val. Acc: 86.43%\n",
-      "Epoch: 06 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.101 | Train Acc: 96.54%\n",
-      "\t Val. Loss: 0.455 |  Val. Acc: 86.67%\n",
-      "Epoch: 07 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.084 | Train Acc: 97.17%\n",
-      "\t Val. Loss: 0.505 |  Val. Acc: 84.09%\n",
-      "Epoch: 08 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.066 | Train Acc: 97.82%\n",
-      "\t Val. Loss: 0.508 |  Val. Acc: 86.05%\n",
-      "Epoch: 09 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.041 | Train Acc: 98.78%\n",
-      "\t Val. Loss: 0.605 |  Val. Acc: 86.25%\n",
-      "Epoch: 10 | Epoch Time: 0m 25s\n",
-      "\tTrain Loss: 0.035 | Train Acc: 99.01%\n",
-      "\t Val. Loss: 0.681 |  Val. Acc: 85.79%\n"
-     ]
-    }
-   ],
-   "source": [
-    "n_epochs = 10\n",
-    "\n",
-    "best_valid_loss = float('inf')\n",
-    "\n",
-    "for epoch in range(n_epochs):\n",
-    "\n",
-    "    start_time = time.monotonic()\n",
-    "    \n",
-    "    train_loss, train_acc = train(model, train_iterator, optimizer, criterion, device)\n",
-    "    valid_loss, valid_acc = evaluate(model, valid_iterator, criterion, device)\n",
-    "    \n",
-    "    end_time = time.monotonic()\n",
-    "\n",
-    "    epoch_mins, epoch_secs = epoch_time(start_time, end_time)\n",
-    "    \n",
-    "    if valid_loss < best_valid_loss:\n",
-    "        best_valid_loss = valid_loss\n",
-    "        torch.save(model.state_dict(), 'transformer-model.pt')\n",
-    "    \n",
-    "    print(f'Epoch: {epoch+1:02} | Epoch Time: {epoch_mins}m {epoch_secs}s')\n",
-    "    print(f'\\tTrain Loss: {train_loss:.3f} | Train Acc: {train_acc*100:.2f}%')\n",
-    "    print(f'\\t Val. Loss: {valid_loss:.3f} |  Val. Acc: {valid_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 45,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "lZdkI89ggJZj",
-    "outputId": "4a36eb35-d243-4da7-f16b-c72af5787809"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Test Loss: 0.371 | Test Acc: 85.62%\n"
-     ]
-    }
-   ],
-   "source": [
-    "model.load_state_dict(torch.load('transformer-model.pt'))\n",
-    "\n",
-    "test_loss, test_acc = evaluate(model, test_iterator, criterion, device)\n",
-    "\n",
-    "print(f'Test Loss: {test_loss:.3f} | Test Acc: {test_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 46,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "G6xX77_FigbW"
-   },
-   "outputs": [],
-   "source": [
-    "def predict_sentiment(tokenizer, vocab, model, device, sentence):\n",
-    "    model.eval()\n",
-    "    tokens = tokenizer.tokenize(sentence)\n",
-    "    indexes = [vocab.stoi[token] for token in tokens]\n",
-    "    tensor = torch.LongTensor(indexes).unsqueeze(-1).to(device)\n",
-    "    prediction, attention = model(tensor)\n",
-    "    probabilities = nn.functional.softmax(prediction, dim = -1)\n",
-    "    pos_probability = probabilities.squeeze()[-1].item()\n",
-    "    return pos_probability, attention"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 47,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def display_attention(tokens, attention):\n",
-    "    \n",
-    "    fig = plt.figure(figsize=(10,20))\n",
-    "    \n",
-    "    attention = attention.cpu().detach().squeeze(0).mean(0)\n",
-    "    \n",
-    "    ax = fig.add_subplot(1,1,1)\n",
-    "    \n",
-    "    ax.matshow(attention, cmap='bone')\n",
-    "    \n",
-    "    ax.set_xticks(range(len(tokens)))\n",
-    "    ax.set_xticklabels(tokens, rotation=45)\n",
-    "    ax.set_yticks(range(len(tokens)))\n",
-    "    ax.set_yticklabels(tokens)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 48,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "ALLuVhn7m_OF",
-    "outputId": "62cee726-84fd-4ee0-9d36-8ae54a71a356"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "0.0068605015985667706\n"
-     ]
-    }
-   ],
-   "source": [
-    "sentence = 'the absolute worst movie of all time.'\n",
-    "\n",
-    "sentiment, attention = predict_sentiment(tokenizer, vocab, model, device, sentence)\n",
-    "print(sentiment)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 49,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAm4AAAJsCAYAAABEYC8YAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAArHUlEQVR4nO3dd7yld1nv/e81M2mkKkFUBAIBqUJ8EkJHmhQLhiYQejFgBBSERw54EH3kEQ4CgogYkc45gEhRRBQpGpCWEBIIICjlQCwkxJBITJmZ6/xx3yPbOZMQyOx979/e7/frNa+sWWuXa2X2vtdn3bW6OwAArH9blh4AAIArRrgBAAxCuAEADEK4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxCuMEaq6paegYAxrRt6QFgM6mq6vlyJVV1VKY3T6e1S5gAcAWU1wtYe1X1lCQ/keSCJBcl+W/d/YVlpwJgvbOpFNZYVd02yZ26+85JPpnkoCRfXHYqAEYg3GCVVdVBK24fmOSsJB+oquclOSbJvbu7q+ouS80IwBjs4warqKr2T/JTVbUzyb5JjkjyziS3SnKVJPft7kuq6ueSPKaq7tnd5y42MADrmn3cYBVV1dYkN07y5iQHJLlJd19QVScm+bFMm0h3JLlXkgd295mLDQvAuifcYJVV1VWT/FmSf0/ypu5++Xz/3ZJ8//znrd39+eWmBGAENpUubNfpIVaeJoKNY94EenF336aqbpHkGVV1SHe/IMk5Sc7o7n9ZdkoARuHghIVU1a7/97v+e+xSs7D37OHkup9M8gdVdUJ3fyzJS5Lctar+NMmLk2xd6xlZPZd1cmUnXQb2FmvcFtLdO6vq2klOrKojk9ylqq7X3V9feja+eytOrnutJOd194er6vZJ/rqqtnb371fV/07yiCSv6e6zFhyXvWi3kysfn+TiJFfp7tdamw7sLda4LaCqHlhVT0zyv5J8JskXkrwp08lYGVBV3aiqfqWqtlbV9ZM8L8kdqurg7j4lyd2S/E5VPaG7P9fdT+/uzy479eqoqm1V9QPz7RvPB2hseCui7fFJTkzyH0leVlXHLTkXsLEItzU0v6DdOskzknwtyVOSvCHJnZK8ubsvWXI+vjvzZrDrJbl+ksfPBxmcnOS4JLed92n7aJL/meRpVXXYik3lG9Gdkzyoql6Q5FVJ9ll2nNVVVdesqgPm21dLcockd0lywyTvS/JnVbXfgiMCG4ijStdQVR3Q3f8xr4W5YL7vlknu1d3PWHg8roSq2jfJjyf56SSf7O7fq6qfT3J0kg8lOTDJjZL8f939T8tNuvrmc9e9Lcktkzyiu9++7ESrp6qukeT4JC/LtIYtSV6e5Lwk10rykO6+sKoel+Rj3X3qIoMCG8ZGfte/rsybRl9RVfvsirbZMzOdJoLBrNzhfF5b+q4kf5Lk5lX1+O7+/SR/neQGSR6Y5KUbPdpmV0/y20nemORHq+o2u/5fbbQ1jfM+ii9Lcp0kj+vu7Um+muQJ+Va0HZ/k55L863KTwpXjAJv1w8EJa2De5+VBSR7e3ZdW1f7dfVFV3TzTJtPnLTsh36nddkR/aJJDkmzt7hfPC7j7VtUvZIq1N1TVgd39zSVnXgvzc75lksdlWtP4m0nun+Tcqjo2yUWZ9ufcSHYk+ZFMkfqQJL+R5LAkJ1fVR5LcItPv/leXGxG+e7st77ZkWtZdWlVbunvnwuNtOjaVrrKq2ifJc5P8QaZLHt02yaMzLdz/ItM79X9IvrVzM+Ooqsdk2hH9hUmemOSMTGtX7pjkUZk2j71oM5ynr6oelOSXM236/6f5oIRK8qwk18i039c9N8rVIeZAv0OSX8v03O6d5K5JPtLdr66q2yXZmeSfuvtLiw0Ke8m85eimSQ5OclJ3v2/hkTYl4bYGquoZSe6X6eLi78i0duZ2mS5xdOGSs/GdWXHC5C2Zdrp/XaYF2Lvnx9+X5MzufnxV3T3J6ZvhBLtVdfVM0fa/M20yvm+mn/lTu/txVXXNJJeO/v9iTyfMrqo3JHlnd7+mqh6W5JhMR4u/trvtBsGGMJ9M/AFJHp7kNUm+2t0PX3aqzcmm0lVSVY/MdEHxSzPt7/O6JBd097lVdadMp4fYL4lwG8Rua80O6e7zquqsJN+74sMemuR/JEl3/+Vaz7iEeYF+RJK3ZzpK+u6ZLvF1fJLfr6ojNsoapxX//ldPsitC/zjJrefHXzMfnHHdWL6ysRyYafn2wEznKHz0fFDWYd39tUUn22Q21I7C60VVnZDksUk+n+SaSc7MdNmjc6vqKUmen+TJ3f1vC465ai7n7PFD/7ytWMPyuHxrv8Qzkzy5qm4xL8R+LMn3V9UBm2Fn3nkN002SvLq7P5hpbdN9u/ukJEdmitrzFxxxr1h5cEVV3SjJaVX1lKq6Z5I/TfLj8/5tmZ/7b3X3eYsNDFfCZSy7rpHkb5Ic290/MR+I8+gkj9ws52pcL2wq3YtWbEZ5aZI/7+4/n+///zO9oN09ySOTfLC7/37BUVfNbpuQHpxpH599u/vVy062d8w73z88yUN3/RvOz/MhmU4BcZ0kj+nuTy025BrYtVNyVf11psu1Xae7v15V2zL9mz8myS9m2h3gk0vOemXt9jO968Ci6yW5fabf579Ksj3JUUlO7O5zFxuWVbcZ9lfdZX4z0klOS/LlJO9O8vF5V5BHJ3lykvts1Nez9WroNSDr0A+tuH3TFbefn+QrPXnFRv4hX/EC90uZdtLfnuTp8ykRhlZVB2Z6cX5Qkour6uer6kOZ1qw+JMnTMy3ENmS07fYu/JAk6e67ZnoX/tb579szLVc+luS40aMt+S8/009M8qaqen+SW3T3K5P8ZJLvS3KrTAcoOMJug6qqo5Lp52Gjrk1f+bxquuLH0zMdgPPLSX4q03kqf7iqXpfkwUnuv5Ffz9Yr+2DsJTWfbHXe3+eVSd5VVV9P8ook90xyo6o6LMk3Nvq7tao6NNML2x2r6ulJ/j7JG2s+AfHC410hKxdgu/69uvubVfXFTFdF+HCSv810otnnJrl3d39xgVHXzIqAOSHTpsFzk7yju3+6qt5WVe/u7h+f4+20RYfdy6rqPpleqB6aaf+136uq7+nul1bVkzLt/3PQRt88WlW3SfL1zfRivWIN28ur6uvdfffdD1DZCHZbs3yNTFsP7tPdn62qn810gvFLuvtu88f854nkWVvCbS+YD0R4VKZ9ezrJx+YF3KszHT1600zncTpvuSlXzx7O5bM1yYFV9YdJrprkAd29o6oeUFWf6unanevddbr7C0lSVY/NtBA7K8nvZFq79LV50+CPZdqvbftSg66lqvrpTJtHHpxpDdNdquoHuvu4qvpoVb2tu49bdMi9YA8/0/skeW93fy7J56rqgUneUlUf7u6PZ9qPb/h9+S5PVd0q0yXM7lFV+3X3xQuPtFaOTPIP3X1MVf1dVb21u++9keJt5c97Vf1yppUNN8508NxnM+0OkCQPqOkSfq+OE8cvxqbSK2HeUXlbphNsPidJV9WTajrp5j26+1aZzu21Yc5dtScrfuFvPC/Qz03y/kzX6nxaT5f5elimF/x1fTqImhyQ5KNV9bSqOibTDrhnZVqAvz/TaS2+XlX/LVPIPW0znPahqm6bKWBf1NOlm16S6coQd6yqrd19bKb92oa34mf6uKq6W5KbZ3ozcsD8Yv2xTEfOXrTknGulpgOLrpfkLZmOIH7svOzbsOZlwf6ZrjX73CTp7tskuUZV7do1YENsNl3x8/4Tmc5Bed9M5198UlXdbl7p8O5MJ8/+y/lzhg/WUTk44Uqoqpt296fmtRCvyrQJ7X1JPp7k2Zl2zN6wlziqqh9NcvuerhZwYpJfyBQ4f5Dk9CQ/k+nktH+eac3jQ9d7wK7Y6f5GSd6Z5Nwkv9TdJ8+P/0qSmyV5WKag+0B3f3qxgddIVR2S6c3JoZlewH+uu8+YH/urJE8fZE3q5dptc9EDM51Y+Q8zHVh0g0w/23+baX/WX0xy9+7+ykLjrold/0/mfTy/kGlLzQ26+5yFR1tVK5YF10vy5ky7Bfzq/NhHk5zV3fee/z7kmrequnWSo7v7JVV1RKZTGV21u+8yP/7zmVY+/EJ3v3fU57nRWOP2XaqqRyU5qaqu0t1/luQ2SY7v7hcl2T/Twm3DroWZ32UenuQnq+q3Mm0uvG2mc1rdJVOo/W6mHflflWkfsPUebbXrnWd3fybT8/nBTEeR7vK2JNu7e0d3n7RJou3G3X1+klOTfDHJ/8z0Tvxu8w7Mh2a6PufQdou2a2U6mu523f3MTJfu+nKmtRFHZjoo4X6bINq2rHihPizTJrOvJfnZxYZaA7stC/4h07/3T1XVb8z3HZvk6lX1nvnvo8bMWUn+pL51rsWXJ7lw3lyanq63fFKS581bIlgHNvSq7tWwYl+A70/y3J6vfNDfOjXEL2S6TuOD5xe7Daeqvi/J93T3u6vqzplC7avz6vQ/rOnanbfLFLBv6gFOj7Dbi/YTMp2b7MxMlzH666r6UqaDEG6R5Kiq+t4RnteVNb8jf0NNp7R5b6aI/WCmK4A8NdN+XSf0Brkiwnz7iZn24Ts4yQuq6qzu/rOazlX1u5mOmH3pfBDGhrZiE9pjM+2r+7Uk/z3J82s62Oj5S863GvawLLhxpithPCbJa6pqe3f/RnffpqreU1U/1INdh3Zehn/fvMXouEwHXry0u585/5wfV1VP7u4XdPcLq+qVPciBZZuBNW7foXnV+XUzHWHzn7+sVXWdeZ+PT2c6RPqMpWZcA4cmeUlVvTLTBcVflORa8wteuvu1mV7cbpjpAtzr3ooF9YmZLtX0m0l+Pcm9Mm0W/aVMR0reLNOa1c0Qbfsm+Uqmd+WPzfQC9pFMax9Oz3T1j+O7+/TFhtxLVvz7H5fpnIsPzRSnP5LkVlW1rbvflilW/3UzRNsuVXXfTJvL/ijT9Zavlelo+UfMa9s3lD0sC56d6Xq090/yK0nuu2Kft7uMFm2zQzO9KXltpuXbcUluX1XP7O6/yLQv49FV9fj547+xzJjsiXD7Dsw7q+6TaeH9iiSfqKqbVNWfZtrx/hrd/b7u/uyig66y7v58phfu+yb54+5+faZ34Xeuql+cP+blSZ7Z3cP8ws/7cf0/mS7pcp9M8XmdTKFyQqYLpv/eet/kuzfMa9qekWkB/5BMb1K+J9M5626a6XJWWzbSkYU1nQLhxZkOPvlckmdmWqN43yR3muPtDT0fbbyJ3CDJK7v7E5nO53Vhpp+FB2V6sT98wdlWxR6WBadkugrOfTLttH+nqjp81AMT5mX4GZn2Q35fd38gyeOT3L2q/ntPl+t7VaZ9+0beFLwh2VT6HZh/eC+tqoMz7fv0nkwv7qdn2oy2ma47+rJMz/vJVXVud7+xqr6W5KU1nevodaNtKu7u8+dN3TfMtE/eneYF83mZDji5ZW+Co0dnX5n/vDrJSzMdYHJ+d7+lqnYkeX93D7E29Yrq7rNqOnH0S6rqQd39v6rq1zPtsH33TJuIN82athU+nemyRu+c9+l8WVX9TZJ/TnKH/q+nTdkQvs2y4JQkd94Ay4Jdy/Anzcvw189rnD9WVefM+7exDgm371BV3SDTKvPKtEB/d3dfuuxUa2/eYfcfquq8JM+e/7t/kksyvcANqbsvrqoLk2yrqh9Jcu0k78p0RNnoC+orbN788/Kq+liS38q0v9fVkrylu/9o0eFW0RymFyf5rarKHG//b6Z9OjfTG7OV3p9p8/GDa7pqxAGZTji8+7nuNpSNvizYwzL8G0kuSPJ3mU7zwzrldCDfhXk1+vaVC/KVO7RuNlV1j0wXXf9mkkePvimxqvbLtE/bXTOtWb3/Zjh69LLMOzLfJd+69uiXlp1o9dV08fiTkjy5u/946XmWVlU/mGkz4b0yHS3/6xth38ZvZ7MsC+Zl+P/I9Mb7ofNR9WTa17e7L1l6jpWEG3vF/OLe3X320rPsDfO+jN+fZGd3n7X0POtBVe2zmdYuV9WPJ/nHTbhP22Wqqqtket345tKzrJXNsizYaMvwvaGmEzC/Iskj19P+vMINAGAPqurA9fZGRbgBAAzC6UAAAAYh3AAABiHcAAAGIdxWWVWdsPQMS/C8NxfPe3PxvDcXz3t9EW6rb13+w68Bz3tz8bw3F897c/G81xHhBgAwiE1xOpCqLb1lyzKN2r0zVct87507N9SlJK+w/fc/cLHvvWPH9mzdusyV5G5ykxsu8n2T5Oyzz87Vrna1Rb73xz/+8UW+b5J0d5a6zvhmWHbDJnZOd+9xoboprlW6ZcuWHHDAQUuPseYuumhdnTNwzVz3ukctPcIiTjnlA0uPsIj99jtg6REWccklFy09wiK2bNm69AiLWOoNwtJ27Ni+9AhL+fJlPWBTKQDAIIQbAMAghBsAwCCEGwDAIIQbAMAghBsAwCCEGwDAIIQbAMAghBsAwCCEGwDAIIQbAMAghBsAwCCEGwDAIIQbAMAghBsAwCCEGwDAIIQbAMAghBsAwCCEGwDAIIQbAMAghBsAwCCEGwDAIIQbAMAghBsAwCCEGwDAIIQbAMAghBsAwCBWNdyqat+qOnAvfa0Dq2qfvfG1AABGtCrhVlU3qqrnJ/n7JD883/ecqvp0VZ1RVb8933dEVb13vu89VXWt+f77V9Wnqur0qvrb+cv+cJLPVdVvV9WNVmNuAID1bNve+kLzmrWfTfLo+a5XJnlWd19QVVdNcu8kN+zurqrD5o/53SSv7u5XV9Wjkrw4yXFJnpnk7t191q6P7e7TqupmSR6Q5OVV1Un+KMmbuvube5jnhCQnzLf31tMEAFhMdffe+UJV5yc5I8ljuvuzuz22Lcmp8593JHlHd19SVeck+YHuvnTeDPrP3X14Vb0syZFJ3pTkLd399T18vxtlCrebdvchlzfb1q3b+oADDtoLz3IsF130f/XspnCDG9xy6REWceaZH1h6hEXst98BS4+wiEsuuWjpERaxZcvWpUdYxGZdAbFjx/alR1jKqd19zJ4e2JubSu+X5Kwkb6mqZ1bVtXc90N3bkxyb5M1JfirJuy7vC3X345L8apJrJjl1XmOX5D83r/5akrcm+cr8fQEANry9tqm0u/8qyV/NkfWQJG+f16g9Jsk5Sa7S3e+sqg8m+cL8aX+X5IFJXpvkwUlOTpKqOrK7P5LkI1V1zyTXrKqDk7w8yeGZNsPedk9r4gAANqq9Fm67zDH1oiQvqqpjk+xIcnCmkNs/SSV58vzhT0jyyqp6apKzkzxyvv95VXX9+WPfk+T0JD+U5Ond/dG9PTMAwAj2erittFtkHbuHx7+c5M57uP8+e/hyX5n/AABsSk7ACwAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwiG1LD7AWtm7dlkMOOXzpMdbc/vsfuPQIi9iyZevSIyziF5/+wqVHWMRm/N1Okh07ti89wiKqaukRWEOb9ef8G984+zIfs8YNAGAQwg0AYBDCDQBgEMINAGAQwg0AYBDCDQBgEMINAGAQwg0AYBDCDQBgEMINAGAQwg0AYBDCDQBgEMINAGAQwg0AYBDCDQBgEMINAGAQwg0AYBDCDQBgEMINAGAQwg0AYBDCDQBgEMINAGAQwg0AYBDCDQBgEMINAGAQwg0AYBDCDQBgEOsm3KrqsKo6cb59x6p6x9IzAQCsJ+sm3JIcluTEpYcAAFivti09wArPSXJkVX0iyaVJvllVb05y0ySnJnlId3dVHZ3kBUkOSnJOkkd09z8vNDMAwJpZT2vcnpbkH7v7qCRPTfKjSX4pyY2TXDfJbatqnyS/m+R+3X10klckefaevlhVnVBVp1TVKTt37liD8QEAVtd6WuO2u49291eTZF4Ld0SS8zKtgXt3VSXJ1iR7XNvW3SclOSlJ9t13/171aQEAVtl6DreLV9zekWnWSnJmd996mZEAAJaznjaVXpDk4G/zMX+f5GpVdeskqap9quomqz4ZAMA6sG7WuHX316vqg1X1qST/keRf9/Axl1TV/ZK8uKoOzTT/7yQ5c02HBQBYwLoJtyTp7uMv4/7Hr7j9iSR3WKuZAADWi/W0qRQAgMsh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAaxbekB1srWLVuXHmHN7bPP/kuPsIhLL71o6REW8flPfHbpERaxbdu+S4+wiP32O2DpERaxY8eOpUeAVfeNb5x9mY9Z4wYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADCI7zrcqurfr+w3r6pHVNVLvs3HHFFVx1/Z7wUAMLoR1rgdkUS4AQCb3hUKt6p6W1WdWlVnVtUJK+5/4Xzfe6rqavN9T6yqT1fVGVX1hvm+752/xhlV9eGqutkevserqup+K/6+a43ec5Lcvqo+UVVPqqqtVfW8qvrY/PUee2X+BwAAjOKKrnF7VHcfneSYJE+sqqsmOTDJKd19kyR/k+TX5o99WpIf7e6bJXncfN+vJzltvu/pSV7zHcz4tCQnd/dR3f3CJI9O8o3uvkWSWyT5uaq6zu6fVFUnVNUpVXXKzp07voNvBwCwPl3RcHtiVZ2e5MNJrpnk+kl2Jnnj/Pjrktxuvn1GktdX1UOSbJ/vu12S1yZJd783yVWr6pDvcua7JXlYVX0iyUeSXHWe57/o7pO6+5juPmbLlq3f5bcCAFg/tn27D6iqOya5a5Jbd/eFVfX+JPvv4UN7/u9PJrlDkp9O8oyq+pErOMv2zCFZVVuS7HtZIyV5Qnf/5RX8ugAAG8IVWeN2aJJ/m6PthkluteJzd+2TdnySD8zBdc3ufl+SX5k/96AkJyd5cPKfIXhOd5+/2/f5UpKj59v3SrLPfPuCJAev+Li/TPLzVbXP/PV+uKoOvALPAwBgaN92jVuSdyV5XFV9JsnfZ9pcmiTfTHJsVf1qkq8leUCSrUleV1WHZloz9uLuPq+qnpXkFVV1RpILkzx8D9/nD5O8fd4k+6756yfTptcd8/2vSvKiTEeafryqKsnZSY77Dp4zAMCQqru//UcNbt999++rf9+1lx5jzW3fsf3bf9AGdOihhy89wiKue92jlh5hEaed9tdLj7CIrVs35767O3Y42IyN71/+5Qundvcxe3pshPO4AQAQ4QYAMAzhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADCIbUsPsDYqqc3XqNu27bv0CIvYfuklS4+wiH/55y8sPcIitm7duvQIi9hvv6ssPcIiLt2kv9/dvfQIi6iqpUdYdzZfzQAADEq4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxCuAEADEK4AQAMQrgBAAxi2HCrquOq6sZLzwEAsFbWfbhV1dbLeOi4JMINANg0VjXcquqpVfXE+fYLq+q98+07V9Xrq+pBVfXJqvpUVT13xef9e1U9v6pOT3LrqnpOVX26qs6oqt+uqtskuVeS51XVJ6rqyNV8HgAA68Fqr3E7Ocnt59vHJDmoqvaZ7/tckucmuXOSo5LcoqqOmz/2wCQf6e6bJ/lMknsnuUl33yzJb3b33yX50yRP7e6juvsfd//GVXVCVZ1SVafs3Ll91Z4gAMBaWe1wOzXJ0VV1SJKLk3woU8DdPsl5Sd7f3Wd39/Ykr09yh/nzdiT5k/n2N5JclOSPquo+SS68It+4u0/q7mO6+5gtW7btrecDALCYVQ237r40yReTPCLJ32VaA3enJNdL8qXL+dSLunvH/DW2Jzk2yZuT/FSSd63exAAA69daHJxwcpKnJPnb+fbjkpyW5KNJfqyqDp8PQHhQkr/Z/ZOr6qAkh3b3O5M8KcnN54cuSHLw6o8PALA+rFW4/UCSD3X3v2ba7Hlyd/9zkqcleV+S05Oc2t1v38PnH5zkHVV1RpIPJHnyfP8bkjy1qk5zcAIAsBlUdy89w6rbd98D+upXP2LpMVgj++27/9IjLOKQQw5feoRFfO3sLy89wiL22+8qS4+wiEsvvWTpERaxGV6r96Sqlh5hEV/5ymdO7e5j9vTYuj+PGwAAE+EGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwiG1LD7AWundm+/ZLlh5jzV188YVLj7CI7/meGyw9wiJ+/L73XnqERfzR7zx76REW8c1vnr/0CKyhqlp6hEXs2LF96RHWHWvcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABjFEuFXV46rqYUvPAQCwpG1LD3BFdPfLlp4BAGBpe32NW1UdUVWfrapXVdXnqur1VXXXqvpgVX2+qo6tqu+tqrdV1RlV9eGqullVbamqL1XVYSu+1uer6upV9ayqesp835FV9a6qOrWqTq6qG+7t5wAAsB6t1qbS6yV5fpIbzn+OT3K7JE9J8vQkv57ktO6+2fz313T3ziRvT3LvJKmqWyb5cnf/625f+6QkT+juo+ev99I9DVBVJ1TVKVV1ys6dO/f28wMAWHOrtan0i939ySSpqjOTvKe7u6o+meSIJNdOct8k6e73VtVVq+qQJG9M8swkr0zywPnv/6mqDkpymyR/XFW77t5vTwN090mZIi/77LNf79VnBwCwgNUKt4tX3N654u875+956WV83oeSXK+qrpbkuCS/udvjW5Kc191H7bVJAQAGsdRRpScneXCSVNUdk5zT3ed3dyd5a5IXJPlMd3995Sd19/lJvlhV958/t6rq5ms5OADAUpYKt2clObqqzkjynCQPX/HYG5M8JLttJl3hwUkeXVWnJzkzyc+s4pwAAOvGXt9U2t1fSnLTFX9/xGU8dtxlfP4pSWq3+5614vYXk9xj70wLADCOIU7ACwCAcAMAGIZwAwAYhHADABiEcAMAGIRwAwAYhHADABiEcAMAGIRwAwAYhHADABiEcAMAGIRwAwAYhHADABiEcAMAGIRwAwAYhHADABiEcAMAGIRwAwAYhHADABiEcAMAGIRwAwAYhHADABiEcAMAGIRwAwAYhHADABjEtqUHWAtVla1bty49xprbunVT/PMyO//r5y89wiK21Ob73U6SnbVz6REWsWWL9Q2bSffm/P2+PH4DAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABjF0uFXVE6vqM1X1+qVnAQBYbduWHuBKOjHJXbv7q0sPAgCw2oZZ41ZVT66qT81/fqmqXpbkukn+oqqetPR8AACrbYg1blV1dJJHJrllkkrykSQPSXKPJHfq7nP28DknJDkhSbZuHeJpAgBcrlGK5nZJ3trd30ySqnpLkttf3id090lJTkqSfffdv1d9QgCAVTbMplIAgM1ulHA7OclxVXWVqjowyb3n+wAANo0hNpV298er6lVJPjrf9fLuPq2qFpwKAGBtDRFuSdLdL0jygt3uO2KZaQAA1t4om0oBADY94QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADAI4QYAMAjhBgAwCOEGADCIbUsPsFZ27ty59AhrrnvzPeck6e6lR1jEtn33WXqERXQ257/3li2b8333Zv39rqqlR2Cd2Jy/+QAAAxJuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMYMtyq6ktVdfh8+9+XngcAYC0MGW4AAJvRug+3qnpbVZ1aVWdW1QlLzwMAsJRtSw9wBTyqu8+tqgOSfKyq/uSKfNIceSckydatIzxNAIDLN0LRPLGq7j3fvmaS61+RT+ruk5KclCT77rt/r9JsAABrZl2HW1XdMcldk9y6uy+sqvcn2X/JmQAAlrLe93E7NMm/zdF2wyS3WnogAIClrPdwe1eSbVX1mSTPSfLhhecBAFjMut5U2t0XJ7nnHh46YsXHHLRmAwEALGi9r3EDAGAm3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAaxbekB1kJ3p7uXHmPNbcbnnCRVtfQIizjgoAOWHmER3TuXHmERO3bsWHqERWzW3+/NujzfuXNz/pxfHmvcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABiHcAAAGIdwAAAYh3AAABrHm4VZVh1XVifPtH6yqN6/1DAAAI1pijdthSU5Mku7+p+6+3wIzAAAMZ9sC3/M5SY6sqk8k+XySG3X3TavqEUmOS3Jgkusn+e0k+yZ5aJKLk/xEd59bVUcm+b0kV0tyYZKf6+7PrvWTAABYa0uscXtakn/s7qOSPHW3x26a5D5JbpHk2Uku7O4fTfKhJA+bP+akJE/o7qOTPCXJS/f0TarqhKo6papO2blz595/FgAAa2yJNW6X533dfUGSC6rqG0n+bL7/k0luVlUHJblNkj+uql2fs9+evlB3n5Qp8rLPPvv1qk4NALAG1lu4Xbzi9s4Vf9+ZadYtSc6b19YBAGwqS2wqvSDJwd/NJ3b3+Um+WFX3T5Ka3HxvDgcAsF6tebh199eTfLCqPpXked/Fl3hwkkdX1elJzkzyM3tzPgCA9WqRTaXdffwe7ntVklet+PsRe3qsu7+Y5B6rOyEAwPrjygkAAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIPYtvQAa6GqsmXL5mvUqlp6BNbQxf9x8dIjLKJq8/1uJ0nVzqVHWITl2ubi3/v/tjmXeAAAAxJuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIMQbgAAgxBuAACDEG4AAIPYtvQAq6WqTkhyQpJs3bphnyYAsIls2DVu3X1Sdx/T3cds2bJ16XEAAK60DRtuAAAbjXADABjE8OFWVe+sqh9ceg4AgNU2/F773f0TS88AALAWhl/jBgCwWQg3AIBBCDcAgEEINwCAQQg3AIBBCDcAgEEINwCAQQg3AIBBCDcAgEEINwCAQQg3AIBBCDcAgEEINwCAQQg3AIBBCDcAgEEINwCAQQg3AIBBCDcAgEEINwCAQQg3AIBBCDcAgEEINwCAQQg3AIBBCDcAgEEINwCAQVR3Lz3Dqquqs5N8eaFvf3iScxb63kvyvDcXz3tz8bw3F8977V27u6+2pwc2RbgtqapO6e5jlp5jrXnem4vnvbl43puL572+2FQKADAI4QYAMAjhtvpOWnqAhXjem4vnvbl43puL572O2McNAGAQ1rgBAAxCuAEADEK4AQAMQrgBAAxCuAEADOL/AAPRSP69IC9/AAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<Figure size 720x1440 with 1 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display_attention(tokenizer.tokenize(sentence), attention)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 50,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "aLqml9PenBMp",
-    "outputId": "1614cf67-7583-4cb6-ab17-09ea8d1774a6"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "0.991644561290741\n"
-     ]
-    }
-   ],
-   "source": [
-    "sentence = 'one of the greatest films i have ever seen in my life.'\n",
-    "\n",
-    "sentiment, attention = predict_sentiment(tokenizer, vocab, model, device, sentence)\n",
-    "print(sentiment)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 51,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAm0AAAJrCAYAAACyUqSjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAAA2Y0lEQVR4nO3deZxedX328c+VTBZIWBRR9KmCYCsiFRQQAXexoqLgVlBcWDTiWtc+rXsrtLWKa2vbuGFrrVbUurRuVVHEpygoKLgrWhfckD0kIZnv88c50TFmbiCZuc/9Sz7v12tec8+9XjNzL9f5nd85J1WFJEmSJtuCoQNIkiTp+lnaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZpK5IkM04vGzKLJGluxSMiSFuHvrDdnW5hrID9gTdX1aohc0mS5sbU0AEkzZlFQIC/BPYCjq6qVUkWVNX0sNEkSVvK1aPSViBJqmot8G3g94ALgFsBWNgkaevgSJvUuL6wVZIjgKuBuwEHA0cluWlVnZ7klsBUVf1w0LCSpM3mSJvUuL6wPRT4W+AmVfUL4Czgs8BhSf4eOANwwwRJapgbIkiNS7ITXSl7Ot3q0QPo5rR9Brg9cCzw4ar68GAhJUlbzNWjc2jGaqqUbVjjFeA44HbAauB+wOur6jTgTPjN83OwhJKkLeLq0TmQZMPfccP3uw6VRVu/DftiS3LXJAcBNwOeCSwE/qmqTgROBO6cZPsNz08LmyS1zdI2B6pqOsnuwF8lOQP4aJJdhs6lrVM/mvsg4C3AXYDzgOVV9cKq+kySBwCvA95ZVavcelSStg6uHt1CSY4Fbk43b2gl3WqqS4GrhsylrVM/arYL8FzgIXSl7TvA//YjcDsBTwP+b1X912BBJUlzzpG2zZRkKskhwAuBnwPPA94F3Ac4o99nlrTF0lne/7iMboHgC3Tz1p4LPKaqfgo8nG5B7DFV9Z8zD2klSWqfI22bb1FV/b8kh1bVVQBJDgY+XlWfGDibti73Apb2+1o7Angs8AfA84GbVtWVSe4KvAD4dlV9BYabwzZzgwePxnDjbWqDEf+OksDStlmSPBM4JMnjNxS23kuAzw0US1uvK4G/pjvSwZ9W1XVJHg18FPiXJOcAfwy8dENhG8pGhe2RwOVJPmXhuGE2+vvdDbgC+FVV/WzSiluS7arq2qFzzCbJwqpa3592y2ltFVw9eiMleTrwaLoPyOuSLO3P349uNekrh8ynrceG1ZtV9SXgbOCb/fm3raq1VXVf4FPA94FnVtUHhl4lOqNwPB14EfC9mUVjxpbW2oQZf7+nAa8GHgWcnWSXCStsTwEel2Th0Fk2pf/7vSrJKfDrjXecLqDm+QZ6IyRZBOwJHA8sSXIycFaShwAXAX8FrPfNQVtqxj7/9kyyHfBiunmTDwEemGRpktsAH6mqd1bVZ2EyduuRZF/gCcD9qup7SY5I8uQku01S8ZhU/Qjb0cAfAeuB7wGXzdjVy6DvL0mOAu5E99xbP2SWTemPDnIS8AngyCT/BhY3bR08IsKNlOSFwCOBHwMfBnYE7g4cW1WrhsymrUuSI4GX0h3ZYC3dKO6ewHOAX9IVo4dU1VmDheR3Vz31CzevBn6frnDcCrgM+E5VnTpMynYk2RM4im76yv2Bh1bV6n5180eq6pqBci2g2zr5x8C5VXXPfqStJqWMJ7k/8HjgA1V1Rn/el4CvV9Vxg4aT5oAjbTdAkhOS/EWSFwGn0S0FP76q/hH4IrA9sGTAiL8jya5Jdh46hzZPv2HBKXRbhBbwIOBVdKtCX0h3lIPBCxv81iq9eyV5cFVdB7yVbn7na6vqaLrXyfLZ70VJjk3yeuBnwFOAk6vqj/rC9ji60aOlA0acqqrLgP2AOyZ5blWt7/dTOSmfJUuB3YC7Jbl1f94BwKFJ3jJcLGluONJ2PZKsoNu7/N8B9wAOBw6rqp8meR7wGOD4oSeAz9RvKPEAutGNH1bVnw8caeLNNlF53JO/Nzxekj+imyN5c7qNEP4UOBm4DnhZVX1rxm0GmWS90aT5pwNPoludd26f8Sf9ZScCTwWeUFUXjTvnpNrECOWtgb+n+z/fAvgA8Da6I10cAZxQVV8dKOuJdEd6+RrwIWAR8HnglKp67RCZZkpyH+BXwCV0pe3/Av9NtzX/j/vr3LaqLh4upTaXG5L8xqQsHU2cGXMf9gdeXlXvqKonA+8G/rm//FfAoyessB1Lt2rlscA1wB8Om2jybVQ+jkvy6CRPgO5oF+PK0J/cqX/cj1fV+cCRwFOq6pPAT4A1bDTaMgGFbTGwA3C3qroL3cjzi5LsnuSOwCF0hcPCNsOMv9+G//2lwLeAw6vqy3Sjq9fRvc8cN2BhW0E3j/eddAuwj+sXGu4BvLov7IPpNzo4lW5B9Xy60eh/p9tVzlHpdpWDhe36pTuyz0TpF2R//V6zYeO/bZWlbXa/N+P0vjNOn0Y3elVV9daq+uaYc12fa+mWMh8P7AE8DCDJnQfMNNFmvCE8i260aB3wgiSPGWeGJA8E3pvk1CR/2190S+AZSQ4F7gm8buiFhI3eRJ8H/Cfd8+3B/VWeRFfiXkq34PDMoQrHpEtyP+ArSe5JV8bfCfxlkjtU1YVVdUpVvXrg95nbAMcAtwV+QXe4vsVV9XVgb+DjQwVLt9X+Q+nWgCwALqqqK6vqA8AZdAvdzjW+HuksAz6R5BFD55lpw4JzvwbpLcDbkzxq2FTDsbRtQrrN2V/aLwG/DfjTJE/s5208ELhDkp0ndEukHehWC9y/qh7Q75bkicBJ/VaIg0hyaJLbD/X41yfJTsBBVXVvugn03wTePa6/Wb/F4CuAZ9NNQD+4v+h4urlgz6dbFXX+OPKMMuNN9DDg3sDLgX8BHp/kj/p9dz0ZmAauneR9eY3bxu8Z/QjqP9HNXfxnYNf++73Gn+639XMUb0I3Angm3ZE27l9V64AnJXlMVX1r5qr6AVwJnAU8k+65+BD49RqHjwDPqqorBkvXiH4Q4hrgZcBd0m2dPmg/SHL7JLfoT59EV86fD2xHtzHgNsmd624kyQl0qwAe0Y8mfLEf5Xg73Vai+9LNzbl8uJS/rc+8O/DJqnpHX46OTHIHut0GnES3GneQD8++kJwOHJFkSVWtGSLHTJuYq7YQWJbkTXTH9jymqtYnOSbJhVV17jxk2LBbj13oXov/F7gJ3aHQ/ri/2k2q6mHpd2Q65NyOJHsDd6yq9yY5CHg98J9V9dkkX6UbhXlaPwrzYeCJQ+ScVButUn483dyrr9OVtqL7v7+QbpT/F0ne3BekIbLelG50Dbp9Ad4P+Fh/2WPo5t0N9sGZZFfgcrotWY8Edq+qW/aXPZbuPe+TVfWLoTK2Isn+dM9DgP+hm6e9vKp+OeB82cPpXgsz13Y8ke4Y34uAhyeZAm5VVf877nxDsrT1+qWKBcBBwN8AleTZdE+Sd1bV3ZLsCCyZpDeCdPtMegpwDnBCvxr0VLq5T8+lO1blMf2qjCHyLQBuB7yPbnXtkUneONSH0QYzRov2Ab5bVb9KcibdG8VhfUF6PN3uNR40Txkq3QYHD6MbLXgd3UTqQ6vq6iT3BR6Z5M83jBYMPBn3COD2SVZXd2zT/wYOSrJndftj+3e6VXyPS/JpYJWTh39jo9XwR9OtCn0u3Wrv11TVJ/rye3vgJ0O+RvrXw+V0G1mdkOStdB+UR9P9jx891Crbfg7bg4Hv0s0BfATwySRvoFsdfz/gxKHfp4dcwLqh+vmof033d1wE/GV/+i+SPGNcc3pn5FlAtwB9NPBZ4ObpdoFzO7ojDv1PVT2wv+6TgVsmObW6Lda3CW492kuyb1VdmG5HuafTfYh+GvgSXQk6tvqt4SZFX9j+HHhYVV2S5OF0HwDfAVZW1dokUwMurW8YSVpGt7+uKeD2VfXLIfL0me4M3KOqXp/kqcDT6JbW/wm4gG4jjqfSzdO6O92k63mZQN8XxucCb6uqzyX5C7pVPCfSraJ9JfCCqvrQfDz+DdWPlF5OtyHEE4A7AO/vS8Zr6VbpvaSqvtsv2CyYpJHooc0c1U3yB3Sv2ScBz6Jb5fMluq1uX1VVlwyVEyDJgXTHs/14//NHgbdU1Xv6Vbu3oCvjVw6U71i6Ub7H0i1cT1XVsf1qtGPpFlY/NfAq241HVR8EBPh/VfWrIXPN1H9ePIJun4/foHuNPxH4AXBH4D5VtWqc5TPJzlV1eZLfo9uoZH1VbVhFeibde/UTgccBf0K3Ruwb48g2KZzTxq83Z1+ZZPv+A/JQuvkbr6NbqpwCrh4y48aSbA/8lG4i8PEAVfU+urkn+wMr+qWWQfZYnhmT1YGd6SYr/5zfrPYbIlOAmwEPTvLXdPOGDgPeQ7d0fnfgDXSHKTudrgzPV2Hbk2714j50B4MP3UYuHwXeRPeh/mdV9aGN50GNU7odun6Ibue+U3S7pPgacHSS+1fVs+ieh69Jt0uFKy1sv21GYbst8CPgL+h2n/FQ4L7Al+mmMTw7Ax4WKsld+jyvTfL8fnrFu+leMxvmPf10wMK2nO55+Fy60e+b0314AyyrqtdV1T8OWdjSmbmhzgnA39JNfXhpug1OBpduH56Pp5v3dwHdHNSL6Irvf9D9bZ8H4xnd7/9uuwP/2C/kL6AbNf1lkg07RX4Q3fSRf6Arm4/a1gobbOOrR2csAe8GvKL6IxpsGPbvh+FPptvcfpA3qk1Jd/isu9EtET2HbkOJn1bV26rqP5KsA7447qHtmWZ8UD2Zbh7gz+kOxXRauvlZp40zT5Kb080P+0S/2vF+wI/6gvGmdDsvvTtdSf/3+VginjHyuB/dm+On6HaHcTfg/H4E8q+TvAaY7kdKh5zDdjPgBLoP8svpSua76N40nwo8LMl0VT03yV/RjXJMjCFHmfvHPxS4TVW9K8kz6EYGPk23f7MAn6uqdX0n/2/gtBrosFD9e9296Ark++les7vQzRfbOcmXquqcIbL1+Z5KtwPz1XQLVF+oqsP7y54E7JXkZVW1eqiMvYUbnnNJHky3gckf0n3WvhR4SP+SHmyn2EkOBu4CnFdV7+jPW0/3d31BVX0wyYV0u+0Z12toYVX9oH8e3hW4pKp2798r/znJDtXtzP5B/YLNktpGj0C0TZe26nZiuifdoWI+tuH8fon4h3QjChPV5tNtjv10utUDJwJX0M2NOTnJTavqtOomgQ+uz/pM4Di6icG3odtT/vFJblbj3envTsDfJfkR3UYbr6Mb2XhmVb2+qv4lyRK6D6t5+eDsC9tD6JZgl9KtMv4CXSlal+Sfq+onMz94Bp4Ts4ZufskL6SbKn0U312Qd8Ea6pfMnJFlfVS8YKuSmpJuo/pYkRw34N7wJXQnfm27jggfQ/a9vR7cF3LP6Yvxgun2z/WyIkOnmqR1LN/L3Ybpi+TS6/e2topsyMNhqvX7B73i6ke8fJ9kD2CfdsXePpHsePmbowjbzOUf3ujmQrgjftarOSfI6uuL+6P418/kBMh5Kt0eE79DNF/sc3cLD29NN7H9Nks/SLUjeBVhM93qfz0w3A85NcpequrQvlccnOb6q/ifd3PJX9Av7r+kXbLbJwgZAVW2TX3RLuovoRg0eR/ciuyPwQbpVZLsPnXGW3C8AntefXkw3EvgauhfZp+lWRWbonCOyvpauGH0OuNmY87yKbhcBT+l/fiDdqoA/mXGdHefx8W/R/9579z8/nW4T+z+jG315MbB46P/bRpmfTzc14AX9z0+kK94P6F8zTwJ2GzrnLNm3p1vteNMBM9wf+Crwpv7nJXQLXM+mG7l8JN1o3JB/pyfQjQgdTzeNYWl//h799x0GzLZd/xp9AN1q2pPpdjHzQ7pdo/w73RbNgz/fZjzn7g/s3P98Kt3I5Z36n29BN6H+5gNkOxj4BPCH/c8v7z/r7gUs6s/7vf77A4DfH2O2h9DNq7tJ//PT6FbbHjwjz2cn6fNtsOfY0AGG/gLeQTff4Ey6id8vp9sv1oKhs82S92i6w9vsM+O8T9MtvS8dOt8NyPoZutUuY//79n+jx9GtAjqmP+8Aui1vHzuGx78JXTk7rP95EfBmup2APmvDG9QkfdGNSh7ev6Ge3H8onUg3D/DwofPdgPxH0U1e3nngDD+n25gJurJ7El1hH6xQzsh3L7otMc+acd4z6eZYTk1AvhX9a/ZDdPPDTqbbp+F2k5BvE3mPppu7uBPd4MALgPcCd+kvH+SzhW4BZh3wnP7nRXTzK99Kt9EBQxYiuoXo784obs+gOyTehvfL7Yf+307C1za9ejTd/sweRffC+lvgEzX5mw6fSTfsfly/Nc12dCXz8hp+PsfGzuR3sy6je9Ma+3y7qvoO8J10uzI4tf++lG5y89ljePzLkrwXuG+SK6rbWvk9dIVtH7pJ/hOlqn4A/CDdvrneTXdYpXf13yf+sFRV9YEka4DzkhxY3QHPh8iwjm5VKdXNcXsb3b6wJmGu7Hl0C1fTSe5NN43hCXT7oxx01zy9f6YrbRt2zfMYunlPmZB8v6W6ecWr6QrHgXS71Hg58Nx0G72tHSjXx/spK3/Vz4F+Z5KX0+3m4+f9dQabjlFVH0l3SLRz+9fqG9Lt3Pxvktyf7mg/27xtfpcf6XZRsK5mTGoccvL3DZHkVnSrMx5Kt+rqL6rqgmFTbdqkZk1yBN3I6jXASTWm42Km25T9yXRv5l+iWz32OOBFwIsn4W8zm35S8KfoltTfPnSeGyPdIcJeQ7cPvEHmZ/UZVgLPrqozhsgwm3TH53xo/3Up8MqasEOPpdsa/gS6hZxHV9WFwyYaLd2uPk4DDqluNxa7VNWlE5Lr5cAbqur0geP8jo1fq0luMsTC1o2Vbqfi817It/nS1rJ0u/1IdYcfmWiTmLXforRqzDvh7BcUDgX2A/6LbpXjm+gOPTbIZPQbKsm+wOp+1LIp/QTxlwIHDjHS22e4P92I0feGePzrk2QRwCSucejfQ46h28HqIDsLv7H6jTxeTPecm5gP2yQPpdvP3eHAz2qgrZZn079WX0Y3faUm6W+3KekOYv9W4ISa5yP+WNqkASW5D93qkydP8ijb1iLJ8qqaqH0u6oab9LUgmzKpz7kku457gfXGmNS/22ySLBvHoISlTRpQv1pqcT93TJKkWVnaJEmSGuBhrCRJkhpgaZMkSWqApe1GSrJi6AyjTHK+Sc4G5ttS5tsy5tt8k5wNzLelzPcblrYbb6KfPEx2vknOBubbUubbMubbfJOcDcy3pczXs7RJkiQ1YKvfenTx4qW1dOmyObu/tWvXsHjxkjm7v4UL5/ZIYmvWXMuSJdvN2f2tWzd3+9i87ro1LFo0d387gEWLFs/Zfa1Zs4olS7afs/u71a1vMWf3BXDZpZdyk112mbP7+9EPLpmz+wJYu/ZaFi+eu+fe4qVz+1y59tpr2G67uXsvWLp86ZzdF8DVV1zB8p12mrP7W3vt3O7jc9U1V7P9suVzdn+rr5m7o+7N9XMPYNHiRXN2X3P93Ft97dwe0Wmu35uvu25un3vr16+b08/KnW86d++jAKtWXc3228/da+Nnl/zwl1W166Yu2+qPPbp06TIOOuhBQ8eY1Y473mzoCCNd9qu5/WCfa7vuduuhI8zqlNc+Z+gIIz1/xalDRxhp931uO3SEkfY+eO+hI4z0/Qu/P3SEkb7xxck+dO0tbnOroSPM6jsXTvQRvPjpzy4eOsJIRz76cUNHGOm0lz1j1v12unpUkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJasC8lbYki5Msm6P7WpZk0VzclyRJUovmvLQluUOS04BvAn/Qn/c3Sb6W5CtJXtWft0eST/XnfTLJbfrzH5XkwiQXJPlsf7d/AHwryauS3GGuM0uSJE26OSlt/UjYCUk+B7wJ+Bpwp6r6cpJdgIcBd6yqOwGn9Dd7A/D2/rx/BV7fn/8S4AFVtR/wUICq+jJwJ+AbwJuTfK5/vE2O5CVZkeTcJOeuXbtmLn5FSZKkQc3VSNslwEnAE6vq7lX1lqq6qr/sCmA18JYkDwdW9ecfAryzP/0vwN3702cDpyd5ErBwwwNU1VVV9eaqOgx4Uv91yabCVNXKqjqwqg5cvHjJHP2KkiRJw5mr0vZI4MfA+5K8JMnuGy6oqnXAXYEzgCOBj466o6o6GXgRcGvgvH6kDvj1KtWXAu8Hftg/riRJ0lZvai7upKo+Dny8L1iPBT6Q5JfAE4FfAttX1X8lORv4Xn+zzwPH0o2yHQecBZBkr6o6BzgnyQOBWyfZAXgzcDPgbcBhVXXpXGSXJElqwZyUtg36IvU64HVJ7gqsB3agK3FLgQDP6a/+DOBtSZ4P/AI4oT//lUl+v7/uJ4ELgN8DXlBVX5jLvJIkSa2Y09I200YF666buPwHwH03cf7DN3F3P+y/JEmStknuXFeSJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGTA0dYL6tXr2Kb33r3KFjzOrwBx47dISRdtvjFkNHGGn1NWuGjjCrpxz3/KEjjHSbvW43dISR1q5eO3SEkQ7ab++hI4x05r99eugII1177dVDRxhpzzvddugIs/rBt749dISRdtzxZkNHGOl2d57s975RHGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJasDgpS3Jc5Jc2H89K8keSb6e5E1JLkry8STb9dfdK8lHk5yX5Kwkew+dX5IkaRwGLW1JDgBOAA4G7gY8CbgJ8PvA31fVHYHLgUf0N1kJPKOqDgCeB7xx3JklSZKGMDXw498deH9VXQOQ5H3APYCLq+r8/jrnAXskWQ4cCrwnyYbbL9nUnSZZAawAWLhw6F9RkiRpy01qo1kz4/R6YDu6UcHLq2r/67txVa2kG5Vj8eLtaj4CSpIkjdPQc9rOAo5Osn2SZcDD+vN+R1VdCVyc5FEA6ew3vqiSJEnDGbS0VdWXgNOBLwDnAG8GLhtxk+OAk5JcAFwEHDXfGSVJkibB4KtHq+rVwKs3OnvfGZe/asbpi4EjxhRNkiRpYgy9elSSJEk3gKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBkwNHWC+TU+v46qrLh06xqwWLV00dISRdrjpjkNHGGnddZcNHWFW3/7WF4eOMNLt9t1n6AgjVdXQEUa66fLlQ0cY6dprrxk6wkjr1l03dISRli7fbugIs1q4cLI/NxYtWjJ0hJGWLls6dITN5kibJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNaDZ0pbkmUm+nuRfh84iSZI036aGDrAFngocXlU/GjqIJEnSfGtipC3Jc5Jc2H89K8k/AnsCH0ny7KHzSZIkzbeJH2lLcgBwAnAwEOAc4LHAEcB9quqXA8aTJEkai4kvbcDdgfdX1TUASd4H3GPUDZKsAFZ0p5sYTJQkSRppq2w0VbWyqg6sqgMXLNgqf0VJkrSNaaHRnAUcnWT7JMuAh/XnSZIkbTMmfvVoVX0pyenAF/qz3lxVX04yYCpJkqTxmvjSBlBVrwZevdF5ewyTRpIkafxaWD0qSZK0zbO0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNWBq6ACabAsWZugII1XV0BFmNV3TQ0doWk1P7v+2BdPTPv+2xIIFjmlsrmSyPzcmPN5IPislSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBkxEaUuyc5Kn9qfvneTDQ2eSJEmaJBNR2oCdgacOHUKSJGlSTQ0doPc3wF5JzgeuA65JcgawL3Ae8NiqqiQHAK8GlgO/BI6vqksGyixJkjQ2kzLS9mfAd6tqf+D5wJ2BZwH7AHsChyVZBLwBeGRVHQC8FTh1kLSSJEljNikjbRv7QlX9CKAffdsDuJxu5O0TSQAWApscZUuyAljRnZ6UXipJkrT5JrW0rZlxej1dzgAXVdUh13fjqloJrASYmlpU85JQkiRpjCZlGOoqYIfruc43gV2THAKQZFGSO857MkmSpAkwESNtVXVpkrOTXAhcC/xsE9dZm+SRwOuT7ESX/bXARWMNK0mSNICJKG0AVfWYWc5/+ozT5wP3HFcmSZKkSTEpq0clSZI0gqVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAVNDB5hvVdNcd92aoWPManr99NARRlp33fqhI4y0bu26oSPMaqeddh06wkg1XUNHGGl6erJfG7ssXz50hJGSDB1hpHXr1g4doVmT/JkGk/+/zcJ2x6vaTS5JkrQNsbRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAwYrbUmOTrLPZt52/yQPmutMkiRJk2pOSluSqc242dHAZpU2YH/A0iZJkrYZN6i0JXlxkm8m+VySf0vyvCRnJnltknOBP0lyQJLPJDkvyceS3LK/7ZOSfDHJBUnem2T7JIcCDwVemeT8JHv1Xx/tb39Wkr372z8qyYX97T+bZDHwl8Ax/W2Pmae/jSRJ0sS43hGyJAcBjwD2AxYBXwLO6y9eXFUHJlkEfAY4qqp+0RepU4ETgfdV1Zv6+zoFOKmq3pDkg8CHq+qM/rJPAidX1beTHAy8Ebgv8BLgAVX14yQ7V9XaJC8BDqyqp8/ZX0KSJGmC3ZDVmocBH6iq1cDqJB+acdm7+++3B/YFPpEEYCFwSX/Zvn1Z2xlYDnxs4wdIshw4FHhPf3uAJf33s4HTk/w78L4b8kslWQGs6E/fkJtIkiRNtM2ZizbTNf33ABdV1SGbuM7pwNFVdUGS44F7b+I6C4DLq2r/jS+oqpP7kbcHA+clOeD6QlXVSmAlwMKFC+v6fw1JkqTJdkPmtJ0NPCTJ0n5E7MhNXOebwK5JDgFIsijJHfvLdgAu6VehHjfjNlf1l1FVVwIXJ3lUf/sk2a8/vVdVnVNVLwF+Adx65m0lSZK2Bddb2qrqi8AHga8AHwG+Clyx0XXWAo8EXpHkAuB8utWdAC8GzqErf9+YcbN3Ac9P8uUke9EVupP6218EHNVf75VJvprkQuDzwAXAp4F93BBBkiRtK27o6tFXVdXLkmwPfBY4b8PGBRtU1fnAPTe+YVX9A/APmzj/bH53lx9HbOJ6D99Enl8BB93A7JIkSc27oaVtZb8j3KXA26vqS/OYSZIkSRu5QaWtqh4z30EkSZI0O489KkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNeAGHTC+ZckClizZfugYzZpePz10hJEWLlo4dIRZXXXVZUNHGGnh1OT+7QAW1GQvU/7siiuGjjDS1NTioSOMNOnvy+uvWzd0hFktXrzd0BFGWrt29dARRlp77dqhI2y2yX5XlCRJEmBpkyRJaoKlTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAaMrbQleWaSrye5LMmf9ee9LMnzxpVBkiSpVVNjfKynAodX1Y/G+JiSJElbhbGMtCX5R2BP4CNJnp3k7zZxnTOTvCbJuf2I3EFJ3pfk20lO6a+zLMl/JrkgyYVJjhlHfkmSpKGNZaStqk5OcgRwH+DIEVddW1UHJvkT4APAAcCvgO8meQ1wb+AnVfVggCQ7zW9ySZKkyTBpGyJ8sP/+VeCiqrqkqtYA3wNu3Z9//ySvSHKPqrpiU3eSZEU/Yndu1fR4kkuSJM2jSStta/rv0zNOb/h5qqq+BdyFrrydkuQlm7qTqlpZVQd2o3aT9itKkiTdeOPcEGGLJbkV8KuqekeSy4EnDhxJkiRpLJoqbcAfAq9MMg1cBzxl4DySJEljMbbSVlV79CdP77+oqpfNuPzeM06fCZy5qcuAj81PQkmSpMnlhC9JkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaMDV0gPk2PT3NqlVXDh1jVlOLJvtfsMNNdxg6wkhXXjq5/9vLLrtk6AhNm14/PXSEkW6+445DRxhp1aorho4w0po1q4aOMNKSZUuHjjCr6en1Q0do2vY7bD90hM3mSJskSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUgKZLW5LPD51BkiRpHJoubVV16NAZJEmSxqHp0pbk6qEzSJIkjcPU0AHmQ5IVwIr+p0GzSJIkzYWtsrRV1UpgJcCCBQtr4DiSJElbrOnVo5IkSdsKS5skSVIDLG2SJEkNaLq0VdXyoTNIkiSNQ9OlTZIkaVthaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWrA1NAB5tvChVPsuOPNho4xq+np6aEjjLTqylVDRxgpydARZrXrrrcZOsJI69etHzrCSAunFg4dYaRvXHLJ0BFGWrZs56EjNG2S3/umphYNHWGkhQsm+7V75aVXDh1hsznSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDVgLKUtyR5JLhzHY0mSJG2NHGmTJElqwDhL28Ikb0pyUZKPJ9kuyZOSfDHJBUnem2T7JDsl+UGSBQBJliX5YZJFSfZK8tEk5yU5K8neY8wvSZI0mHGWtt8H/r6q7ghcDjwCeF9VHVRV+wFfB06qqiuA84F79bc7EvhYVV0HrASeUVUHAM8D3ripB0qyIsm5Sc6dnl4/n7+TJEnSWEyN8bEurqrz+9PnAXsA+yY5BdgZWA58rL/83cAxwKeBY4E3JlkOHAq8J8mG+1yyqQeqqpV0BY9Fi5bUHP8ekiRJYzfO0rZmxun1wHbA6cDRVXVBkuOBe/eXfxD4qyQ3BQ4APgUsAy6vqv3HlFeSJGliDL0hwg7AJUkWAcdtOLOqrga+CLwO+HBVra+qK4GLkzwKIJ39hggtSZI0bkOXthcD5wBnA9/Y6LJ3A4/tv29wHHBSkguAi4CjxhFSkiRpaGNZPVpV3wf2nfHzq2Zc/A+z3OYMIBuddzFwxDxElCRJmmhDj7RJkiTpBrC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNWBq6ADzrWqadevWDh1jVuvXrR86QtPWX7du6Aiz2nnnXYeOMNL0+sl+7lXV0BFGuuXOOw0dYaRJ//+uXbt66AgjLVy0cOgIs1qzZtXQEUa6dvXVQ0cYafF2i4eOsNkcaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIa0HxpS7Jw6AySJEnzbeylLcljk3whyflJ/inJ05K8csblxyf5u1muu7A//+okpyW5ADhk3L+DJEnSuI21tCW5A3AMcFhV7Q+sB64GHjbjascA75rlusf111kGnFNV+1XV58YUX5IkaTBTY368+wEHAF9MArAd8HPge0nuBnwb2Bs4G3jaLNeFrsC9d7YHSbICWAGwYEHza4AlSZLGXtoCvL2q/vy3zkxOBP4Y+Abw/qqqdE3td67bW11V62d7kKpaCawEmJpaVHOWXpIkaSDjHob6JPDIJDcHSHLTJLsD7weOAh4NvOt6ritJkrTNGWtpq6qvAS8CPp7kK8AngFtW1WXA14Hdq+oLo647zrySJEmTYtyrR6mqdwPv3sT5R96I6y6fn3SSJEmTyVn6kiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ICpoQPMt6mpxey6662HjjGrNdesHjrCSD+9+KdDR2jWPR7w4KEjjHTpT345dISRdtxlx6EjjPT1H/9k6Agj3eQWuwwdYaTVa64ZOsJIl/30sqEjzKqqho4w0nVr1wwdYaTLLvnV0BE2myNtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ0YW2lLsizJfya5IMmFSY5JckCSzyQ5L8nHktyyv+5eST7an39Wkr37809P8vokn0/yvSSPHFd+SZKkIU2N8bGOAH5SVQ8GSLIT8BHgqKr6RZJjgFOBE4GVwMlV9e0kBwNvBO7b388tgbsDewMfBM4Y4+8gSZI0iHGWtq8CpyV5BfBh4DJgX+ATSQAWApckWQ4cCrynPx9gyYz7+Y+qmga+luQWm3qgJCuAFQBTU4vn4VeRJEkar7GVtqr6VpK7AA8CTgE+BVxUVYfMvF6SHYHLq2r/We5qzcyrz/JYK+lG61i6dFltYXRJkqTBjXNO262AVVX1DuCVwMHArkkO6S9flOSOVXUlcHGSR/XnJ8l+48opSZI0ica5evQPgVcmmQauA54CrANe389vmwJeC1wEHAf8Q5IXAYuAdwEXjDGrJEnSRBnn6tGPAR/bxEX33MR1L6bbcGHj84/f6Oflc5VPkiRpkrmfNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhowNXSA+VY1zZo1q4aOMavFSxcPHWGk7XbcfugII1175eT+b7/4mc8OHWGkfe58wNARRkoydISRFk1N9tvnqqsm97UBECb7/7tkuyVDR5jVkiWT/b68/bIdh44w0uLtJvtzdxRH2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGNFHaknx+6AySJElDaqK0VdWhQ2eQJEkaUhOlLcnV/fd7JzkzyRlJvpHkX5Nk6HySJEnzrYnStpE7A88C9gH2BA4bNI0kSdIYtFjavlBVP6qqaeB8YI+Nr5BkRZJzk5y7fv36ceeTJEmacy2WtjUzTq8Hpja+QlWtrKoDq+rAhQsXji+ZJEnSPGmxtEmSJG1zLG2SJEkN+J1Vi5Ooqpb3388Ezpxx/tMHiiRJkjRWjrRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUgKmhA8y3tWtX84MfXDR0jFktXfa4oSOMtNseuw0dYaT//fr/Dh1hVv/zPx8cOsJIdz7kkKEjjLRm9dqhI4x0/333HTrCSKf+7EdDRxhpzZprh44w0m63vcXQEWa1bt1kvzaqaugII916z/8zdITN5kibJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2YmNKWZI8k30hyepJvJfnXJIcnOTvJt5Pctf++a3/9BUm+s+FnSZKkrdnElLbe7YDTgL37r8cAdweeB7wAeAdwXH/dw4ELquoXA+SUJEkaq0krbRdX1Verahq4CPhkVRXwVWAP4K3A4/vrngi8bVN3kmRFknOTnDuGzJIkSfNu0krbmhmnp2f8PA1MVdUPgZ8luS9wV+Ajm7qTqlpZVQdW1YHzmlaSJGlMJq203RBvpltN+p6qWj90GEmSpHFosbR9EFjOLKtGJUmStkZTQwfYoKq+D+w74+fjZ7lsP7oNEL4xxniSJEmDmpjSdkMk+TPgKfxmC1JJkqRtQlOrR6vqb6pq96r63NBZJEmSxqmp0iZJkrStsrRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDpoYOMN+SBSxevHToGLNat2790BFGuubKa4aOMNL09PTQEWa12263HTrCSJP+3EsydISRrrz22qEjjLR06bKhI4y0fv26oSOMtGbVmqEjzGrS/3br1183dISRVl+3dugIm82RNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBE1Paklzdf79VkjNmnP9vSb6S5NnDpZMkSRrW1NABNlZVPwEeCZBkN+CgqrrdsKkkSZKGNTEjbRsk2SPJhf2PHwf+T5Lzk9wjyV5JPprkvCRnJdl7yKySJEnjMnEjbRt5KPDhqtofIMkngZOr6ttJDgbeCNx3wHySJEljMeml7deSLAcOBd6TZMPZS2a57gpgRf/TGNJJkiTNr2ZKG92q3Ms3jLqNUlUrgZUACxYsrHnOJUmSNO8mbk7bbKrqSuDiJI8CSGe/gWNJkiSNRTOlrXcccFKSC4CLgKMGziNJkjQWE7N6tKqW99+/D+y78en+54uBIwaIJ0mSNKjWRtokSZK2SZY2SZKkBljaJEmSGmBpkyRJaoClTZIkqQGWNkmSpAZY2iRJkhpgaZMkSWqApU2SJKkBljZJkqQGWNokSZIaYGmTJElqgKVNkiSpAZY2SZKkBkwNHWC+LViwkOXLdx46RrOm100PHWGkBQsmd7ljkrMBJBk6wkgLFk723++Hl146dISRFi1aPHSEkRYvXjp0hJHWrF47dIRZLVq0ZOgIIy1aNNn/21VXXTt0hM022e+KkiRJAixtkiRJTbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDVgaugA8yHJCmAFwIIFCwdOI0mStOW2ypG2qlpZVQdW1YHJVvkrSpKkbYyNRpIkqQGWNkmSpAY0XdqS/FeSWw2dQ5Ikab41vSFCVT1o6AySJEnj0PRImyRJ0rbC0iZJktQAS5skSVIDLG2SJEkNsLRJkiQ1wNImSZLUAEubJElSAyxtkiRJDbC0SZIkNcDSJkmS1ABLmyRJUgMsbZIkSQ2wtEmSJDXA0iZJktSAVNXQGeZVkl8AP5jDu7wZ8Ms5vL+5Nsn5JjkbmG9LmW/LmG/zTXI2MN+W2tby7V5Vu27qgq2+tM21JOdW1YFD55jNJOeb5Gxgvi1lvi1jvs03ydnAfFvKfL/h6lFJkqQGWNokSZIaYGm78VYOHeB6THK+Sc4G5ttS5tsy5tt8k5wNzLelzNdzTpskSVIDHGmTJElqgKVNkiSpAZY2SZKkBljaJEmSGmBpkyRJasD/B4t+2dYRgFDsAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<Figure size 720x1440 with 1 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display_attention(tokenizer.tokenize(sentence), attention)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 52,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "MyjsYDeJnCui",
-    "outputId": "d87ccbee-9e91-4e64-fb2b-aaaf474f12e6"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "0.051942430436611176\n"
-     ]
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be one of the greatest films i have ever seen in my life, \\\n",
-    "but it was actually the absolute worst movie of all time.\"\n",
-    "\n",
-    "sentiment, attention = predict_sentiment(tokenizer, vocab, model, device, sentence)\n",
-    "print(sentiment)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 53,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAm4AAAJsCAYAAABEYC8YAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAABjSElEQVR4nO3deZhdRZ3/8fenOyEhJGwSFRWJbLIJaAIKAiJuoCirooAIIhFRURRHxp0ZdHBwdwb5RVR0RGFAEdxABlCQRUggAQIoKjgujILsW0jS398fVU0ube+n7r3ndH9ez9NP3+WcunW2Ot9TVaeOIgIzMzMzq7+ebmfAzMzMzEbHgZuZmZlZQzhwMzMzM2sIB25mZmZmDeHAzczMzKwhHLiZmZmZNYQDNzMzM7OGcOBmZmZm1hAO3MzMzCYxSep2Hmz0HLiZ2RNaC3BJa3QzL2bWfpIU+RFKkraT9AIHcvUmP/KqeST1RERft/NhE0surHcmXdAFsB1wWkQ80s18mVn7SToOeDXwIPAY8M8R8fvu5soG4xq3hpA0TdKM/HbdrmbGJqqpgIB/Ab4DXBkRj0hyOWHWJpI2lDS9y3l4MfDSiNgduBGYCdzezTzZ0FwgN0CuCXkl8CZJBwMXS1rHJ1QrJTeXPA7cBjwLWAI8A8C1u2blKZkJnAG8rsO/PbPl9RrAn4FfSjoZmAfsGxEh6WWdzJeNzpRuZ8BGlg+gS4BfAHOAt0XEvd3NlU0U/X1cJO0BPAS8CHghsLekdSPidEnrA1Mi4o9dzaxZw0maEhErcr+yhyR9AThA0sUR8fcO/P50YC9JfcBqpHPKT0jH/Qxg/4h4XNKRwNsk7RkR97Q7XzZ6DtxqrL+DaCQPS/oWsC+whaSrgbsiYmVXM2mNl4O21wEnAh+OiLskXQ6sA+wqaXtSf7cjuphNs0aTtH5E3BkRKyRtC9wL3AP8CNgLWD1P1+4+zMuBpcA5+Te3iogHJV0IvAT4kKSVpFrANzpoqx/fnFBTrQevpBcAy4A/kTqNngncCnwY2BHojYjLupVXazZJa5EK8XeRmkrnAhuTanifC7wR+FFE/KhrmTRrsHwRfjqwXkS8RtJ/AtNIF0fHAP9KuobqyMWRpKcAPyTVsP93RJyWP38l8PT8d25E3NaJ/NjYOHAbhZampCdum27z780GvhoR++Sg7fvAb0lXSacDdwDfAP4KHAAcGhE/bne+bGLKgdv3gCuBTUgXBy8DvhQRn22ZriP7v9lEIumpEfE3SWsCXwf+FhFH5z7K/wxsSupvvj3wuoi4rZ3HWm4CXRYR38q16R8GLouIz+XzzV8i4v/a8dtWhgO3YfTXeknqjYiVkl4YEb/qwO9OBU4FNiLd2fNp4D7gUGAD4GvAb4AdgAcj4jqfVG20Wi5EdiAN+3EP6er/YOBnEfGL3Cn5cGA+8JhvUDAbu1zTthBYEhFvlTQL+DbwUEQcnKfZFHgq8FngpxFxQuk8tJ4bJL0IuBR4T0QskPRy4H3ACtKIBQdGxJ9L5mE0+Rrpc1vFgdsIJG0IHE1qOnoZsEk7O5C2nFSnAicARwHzIuL3kjYm9XHbDPhuRFzarnzYxCbp1aQLgv/I/18ZEdfk715FOon8U0T8pHu5NGs+SesCVwEXRsQxOXj7JvB4RLyxZbqnk1pU3hARD7QhH88G7ouIByTNA/6HNFbbVyRtBhwGfCsibi3924PkpXXQ34NIXYFmRMR/tfu3JwIPJzEESW+UdAzwXeAW4PfAf5MGJ2ybHLRtBfRExIdIHVdPl7R2RPwOOC/n5a/tzEdpHom7HiT15Kb49wOvBe4iNcP/bx6eYG3gncAHHbSZVZPvIL2H1Bf5tZK+FBEPAm8BeiWd2zL5NqSheIrUbkvaQtIHJfXmWr2TSTcbzYqIhaQhpr4g6d0R8ZuI+NBogjZJU/Jd5kjaUlLvWPPWErS9i1Qx8ihwqqR9xprWZOTAbYC8U+5Iavf/G3Ac6WaAlwLn5LGu2vG7yv+3JQVrF+Um2kOBxcDZktbJnUW/GBE3tyMf7ZID0l3y1ZV1UA7I+sdtWoN08XENqQb5/cBBuU/LfqQ7zQ+KiB872DYbn5ZjZ4akp+TgbS5pGI4v5+DtCKBH0vPztPcBe0fEQ4V+fxNS/7l35fPG5cA+wIslrZlr2L8DHC9pbY1+XNDdSWOKfo5UQzh1DPnaQFL/3bOzgV1J5dDmpObbH0qaNtr0Jis3lQ4gafWIeDRflTyYP3shqdPoh9v823sCHyT1gXg3qVbt1fn28a+S7vDbHehrSp+jlqbfHUnL9jrg6Ig4tctZmzQk7QZMB9YH9gAOIV2M7A2sm5tOdgC+AhweETeMIs3Wpo6uPoJtsD4x3c6TTV4tZd7epNqkHlLXlq9LWge4mnQzwJH9/afblI/VgFeQatZvjIj/lPQOUgB5FekibgvgXyPiL2NIdzrwA9JYj4dFxHmjnO+ZwEGk/tuP5o9PIwWszwYOifSklqOAayNi0WjzNNm4xq1Fbhr9uqSp/UFb9jHSbdPt+t3eXN18CHBmRJwWEdsCK4H/ySehI0lXTiuadELqr2kDvkXqS/VW4ARJb+9uziaVB4CPkx5l9YOIWA68CbgM+C9JHwIWAP8yjqDtAGD3MVytFzUgLy/KzUNPyzcVjTlP/bUBFfLT2/LaNZaTUC7zXkk6bxxOegrJlyW9J9LA6TsCr5K0JenmoGJa97ncOnQB6Y7xbSW9KyK+Qurb1j/MzyljCdqypwGfAc4Cni9pp5YWoyGPuXzDw6nAc4CjImIFaYird7MqaDsIOJIGdAXq5vHtAXiz3Nb+JuAtEbFc0vSIeCw3Xf6N1D+gXWZFxH2SbuPJB/KbSUOAfCPn64bBahcaYAPS3VJXAVdJ+jVwqaQVEfG1LudtwurfVyLddXwFaRBdJD0nIm4nBVzvIfVzOyYiLhvN/jWgf8rbgP1aLyY6WdvVkpd3ku6KvQA4VOkO8DHdRJRrI1ZK+tp4akFyHjaR9HBEfCSfwJt4vNo4DNjWTwHeQXp81Paki/KvSpoREf8maaMcuLTl9yW9GViTNMbnl3KQsX/eR0+JiDMlrRERD4/xN95Jqmk7ilRrdyLweuCeXGv/GKkv+FBWAs8jBXyHkC4m1wYul/Qr0rp6S0T8aSz56rSBLQ6k9by8Y2VfREz6P1Ib/edIVyHPI+2U15KqmKeQ+gmI3LRc+Lc3AH4ObAvsTOrPtmP+vRcA/490O/mRXV5Ho172gdOSOsGeTRqluyd/dirpqmqfbm//ifjXvw1IQ8qsnv+2I/VpOZrUdPpsYLNxpr91Pkaekt/vAbwdeHoXlvVFwEWkB2N/GPgZqTWhfx2MuO+Smo2/Amwwzjy8DrgOeHU+hr87cFv4r9l/o9yPXtTyem3gXGDX/P404HfAM9u5T5Aupq4jXfhfSxo+qofUzebbpGFAxrxfkio2FgLPyO978/nxRFLlwv+SnsIw6LojPZXhkvx6P+AUUpBGPvftBMzp9nYe4zo5htRa8V3gpZ36XfdxyyR9mDSY7Z9JNwesSdqZ3hgRj7Txd2eRqob3IB1w2wHHAn8kddzci3RjxH3RhVulc/++GyLi0REnfvJ8rwC2BP43Is6V9E3SAftF0phFbwauJwXLR4Z3xOIk7UVqIv0F8Dip1ngj0phNd5PubHttRFw+irQGjgXVf7GzKeku52eQHuHz24j4ZOFFGSlvG5ECrymkPj2vi1RbfgCppnfIWoV8tbwW6bhfGBG75ubOiFFeOed9/VDgvIg4J392HXBL5LG6rLnGUgZKOo80ZNRW+f0XSOMk9gdSn4mIawvnr79PXQ+pEuLbwIKIuCh/fymwNCLepTTUz5IY4wC7kp5GupHpf0m12vuTzpeLIuIoSRsAy1vTbclXa+3UmcBPIg3+eyipRvIW4L+iwE0ZnaQ0kPGBpHL0W8CfIuItnfjtSd3HTdLhkk6Q9BHSuFX7kJ5CcCrpSmUGaWDSdvz2lgCR+tJ9FTifdIfO9aSD4tOkwO3ppLuPFrYjHyPk8VhSP42ntXw2ZLt+Sz+HbUlXU88EDpT0ybxD/4nUfHAiqY/E70g1P43oCyRpttJwGbWXmy1OJF3ZBqkm6DOkp258mFTLO6qgDZ7UJPkSSa+J1E/u68AvgS9ExD6kY2bm0KmUpTRkz5dINbfvIPWbeWUO2t5MOm6mj5DMlEj9jrYFtpL0/ohYGWPrIzeddJy+KJ/AIHUA30mSuwKMUb4o6H/d1XPUaMtASTMAImJv4GZJ/cHZecB6pPL8jHYFbfntmhGxjHQRsm7LZG/ufx8RF44jaDuSVLN0HukGs8+TLtIOInUNmBMRfxyYbku+ntby8dmkYU+IiG8BNwEb0sxuW2uQ1u0bSOPQHSFpNUlPbfsvd7uqsVt/pBHhryb1Pfh/pCDi6fm740hXSNu04Xd7SQHz1aSrjP7P1yIFcFcD2+fPngn8BNi2C+vn1Tkva+T3c0gFA/xjU+iaLd+9JK/PPfL7TYEzgH8bMP2epE67z+v2vjDK9XEM8GPS1ey/dfi3B23SIDc7D/YZqXl6u/x/EemW+7NJTaWbjSb9gd+RnmW6JB8bC8hNJvm7t5IuLgZtKmnHeiB1MzifVNv3fFJtwAmkgHXhSPtWzvOpeds+hzSw9d3Ae0eZn5eSAr6nkk5GZ5A6oz+zZZrndHvfrbKPdSEfawG7kILh1wAv6GJeRlUGkroNHAts3vLZ+cAvWt4/feB8490mQxz3R5EekwipBae/v9hqpL6fl5C6S4y1efRQ4Av9ZQYpCFstv94jlwXrDpZv0nluC+BO0jl1T1KN4BLSzQhPbPNu73fj2RakFozf8ORuEe8gBbe97czPpKtxa7la2o50G/S3I+LtpDtkvpW/vwd4U4ziDrtx/O5qkZpgdgNmSzoNICLuJ51gbyPXhEa6C+eAiFhSKh9jyOf6pCaweZI+CfwXsFRpLLnWJrM1SQdl/914a5Cqj/vHJvodqbluU6UhTSB1UN0IeHNE3NjO5SlB0htJTXGHAA+T+kF26rdbmxkOlvQmSW8BiCffENC/3dbK3/0sIhaTmtrfEREXA38hXRk+qRaqdXsO89urAbNIfXheQKqN/oikDZUGjN6RNJTI0kKL/g9a8tK/rH8nFZwvj4jrSSfa5aTj9+Dh9i1J80kjxX+HFMC9OSJ+QwoaPqd048WQciftTwKvIvVpu4PUKfslwN7KA5RGugmkY5Se9DLWeXpat7PScA/dsg6p8/v3Sa0gHX/I+SjKwKeTgvz+IS42I/XP2kNpoFtIwdI2ki7L7/8GQx9rg+VhNMd9/v6dpH34M/n7rwJfInX8/ybprs1jIuLRMfx+f2xwaE67/0afvwMr8vHzWVIftXsGyzfpXHcL6Zj6O+m5rB8k9QfbS+mJEv3nvo4ZrtVoKC3b4pC8PbYEPkG60Pt7/u4I0sXtD6JNQ7y0ZmhS/ZE7H5Oa8j7Y8vlTgK+1+bdfTjpR/BupM3Mvqb/AN0gdu28Etuvy+lkr/59GGqvnJ6Tmth5SLctOg8zzVFLtx5vz+z1JBV5/rZtINW/bdnv7j3Od7E3qi/Ee4EJgav78+R3Mw3tJzZuvB35NGiR34DR7kq6sPwn8e/7sbNIJZydSM/yo9i9arupJgflFpL4oB+TPVs/pfp1UE7F6h9bDy/Jxsiup+ecFpKBpizGmcyLpxPyWvGxTWFWTsBnD3LRBqmW7kBS8Hg9c1PLdXvk4WavD+6hIF02/AfYfZxrH5G16FvD6TuZ/QD4OBx4hNS/O6sLvr5X/D1UGHkoKhj5Luut/Gql265u5jJiT95FPArtVzMuwx33e5l8lPZJxDqnG5yrSc6yfQqpJfsYof6u1FnHtltc/JI071/9+CunCfNMR9qXzc97flD+bRQoof0C6k33t0eSr4HbdbrBlHcM62Qe4mdSi9DVSBcVTSDdDfZtU9m7ZkWXp5Irr9l/eqU/Lhdz2pEj5bfmAPAS4knQnUDvuHt2JFMwcmA/ub+f/U4CPkvoN7N3l9fPOXPh8HNhwwHf7k07cz2r5rPXkfnA+UA/O7/chnUReNyCdjjTJlPydvG/cB/yo5bO3kZ7zOWzAkrf7cyv+/lqk/jEAH8rrubf1t0l3Vt5AOmF8mtxMkwv2c/PfmE/owItJN+vs2vLbr8zfzcjH09M6uR1JV7VfyPl6Falm4ahRpvcSUq3OsaQT4YUt372TQQLiQdJ4DvARUtB2ATA9f/7GvF1mtHPfHiFvB5EChukM0qQ2YNrn9m87Un/A/yH11TsfOKtL+X81qelxh7wcHwY2zt89jTY3QY2yDHw26cacx0mtNv3f70i6M/lMUvPgK4bah0eZlxGP+5bv/kKqpXwvqVbrUsYZGJG6EZ1NClBemz/7AS0XKCPMvx+pqXYzUnPq70iDrpPzvyajDCZb0hx3Odq//kndJy4c+PlI8+XXz6SlOZzUr+2rwL4t03TsIqMjP1KHP9JV3LXAs1s+ey6pD8PptL9/zv7A8fn16qQrljPJgVDLztWVviakZqMrSSel35P667yMdALYi3SS27pl+v78PrO/MCXVKJ7Jqpq3A0h3x86ukK9K64PU3+MTpIL42WOY7/A83y75/b+Saqy2IAXcN4y0v5CCqd+QmoWnjeG3ewa8XzcXnF/NhfPqLdtsHumqb2dSjdtupMdZzcnT9O9f/fOMVFhtTg7wSBc3i0gD80IKeI4kdVLeqwP7ZGvBeSjwT6Qheqay6g7Sn5OetXoV6UaD4dJbl1TT/hJSgPsj4N35u4NINXlDnhyA2fm3VyOVG3e2fHcI6WQ57n29wnrajlTrMy3vaz8C1htue+dj9VJg/fz+CFKNzXuBn+b1O2Usx0yB5ZgOnESqzVyX1CXhi3m7/wuplqNtQTGjLANJzxPdjFRr+6952/cH7xuROtuPuX80Ix/3W7Tk81N5Xb2bdAG1BauG5nkJqZZw5jjy8FrgVtLNNf9EukCan7+7htQMOFK+D+TJfZq3J50HxtVfkXGWoy3zb9Ly+krg3Jb3I/YjJN1N+z+k4Pjt+bO1ScHbmawa0qRj5+6O/Eg3/0i1aVNIBfb+pCa9Y0lXBO/J06xJ4QKXAScR0lXI72kZp4Y0ovWONVhHe5Fq/NYiVXFfTOovcV4uuJ5CS4frlvn2IJ0wv0VqAn4qqcP2t4G35mmeOs48jfkAHSSNY/IBtyspaP/4KOfbOxdSXyY1BR5DKrw/Qqph+i4jNM2xqhb3JNL4SccM3CdGkY8t+9cD6YR6F6s6CR9KCh4PIl3lH5S/v4FcYOffPYUxNNvl3/kK8Jr8/tOkE/lG+f1apJPFWaTavLYXVqxqLpqf/5/MqrGknk46UQ3ZbDMgrU8B38iv98v76sXAFbRcmAwy3ztJJ8Mv5+V/JukE9+W8ja9lDDfalFpvpCDyp6QA55S8Pj4H/CdD3LxCCj7/g3Qjx7akWtV/A/5AS00bqfvGJ8hdAzrxR7oD8+Ok4HM2KVA6nhRkFr9ZrOV3R1UGksq8m0hNk1NItb9fJJUZu+V9tVKt4BDH/fNI/anOIvX7uz7vh18glVWb5On/OX83nsDxxXnZ35Hfz8jr5TusujjfcJj59yHdDPUpUpPoEzdD5H1zzM2IVChHSS1r00m1pJ9u+fwaRhG85e9eTWoqXotU/twK7Jy/W4dUhnR+7MpO/2DHFzAXxqQrib+TrmDeQ+oweRljrLIdxe+t3fJ6t1zobEWqIn5f3gm2zgfiQgr0+yL3zcmvxxocrJkP/sNI/dAuzJ+L1HfoBAa5yiVd/fR35t6MdHV2Rd7BDyBdiYxr3ebt83XSCWlHRlnlz6o7KvsLi5NJJ6n3k066U3JhMuRVO6kAvppVNRH75fXzLlb1gRqpZqf/99cgDVXxd3INyAjzPZ/UiRjSILlLSf0n9ic9MPr9pGaHL5Hu5notqRaivyA5gTRm28akE8yN5KaOUfz2i0i1bWuSTginsKqp5wuk2of+Jqs1R7tNxrn9W692NyP1AZ1C6mt3Wc7PZ/u30SjSm0du3s3vLyD34cr7+dPJdwsOMf8bSQHjs0iB3pn586flffUoxjCQMU+uSXw16e7JdUc7f8u8++Xt8kVSN5A3kQKOr5MuTGcM8ntr5//PIgUCf2357uc5vdVJJ6mltNwp2cZtPLCGZl1SM+nZrDoOR+qS0PYykFRu30quhe/PFymoPwX4v9EebwN+f6Tj/jjScf9fpJuLlg7IwwfzduvN2208AdKaeRnOyPvONi3f/QyYN8J+/EZS8/C/5PnvI130vYZ0AXAzYxzcmnGWowP3sbwOFwMntnw3aPBGOt+8K7+eQ7rh6OKW6d5BCgR3H7gOOvnX8R/s6MKlu2GuZFUB9tyW16/I3w1ZYI/j99Yg1T69h9Rp+gZSQf9D0ngvzyUFb1eRaoL2K/Cba5IK7HXzQXLgaHem/gM8r6cPkgqtW3NBsifpqnewmrb1SYHnKfl9/wHyH6zq4/YP840yT/0jz88hNUOcSu77N4bleg6poD2XFLCdy6obCo6kpV/CgPlmkO5ouw/455bP9yHVtL2LlhH5h0ij9YT0TFJhewu5j8cw8ynvkxeSaj/OIlXHH0kqUA8j1a7skPetnfM+dBWp2Ut5X/hnUifZ77Gq1myk5tEDSFf1G+X9qCcv63+yKnj7LKmPTceGt2jZjnNIfVwuIwVwbyEFpf/OCLUbeV39E+nE8QFSk9Lh5FqFUeRhJilAmks6Kf6sZV/aaIzLowH7x+Gk2pvLSMHXrmNIa23SReghpON3MWlIktmkG59uBz424Lc3JF1QrUHqp/UHUhDQf8zOIB0vp+f9sO0drXN+tyQ1pb275fOXkC5Ovpu3+XDHXEfKQFLZ8LX8uodVF3LK+8mmozneBtknxnLcb08KkE5rSeO5wDcrbIP+dXAEqfn3PaSLpVeSyr5fMaBWiScHbc/O67z/wu61pCE/fsWqmxTGtC8xznJ0sPy1pLGY3O0jf3YlLUFZy7Ksz6quJq8knb/f3zLNsaQuJGMeXqXYcdONH237Qq0KJD7EIB3+SVdIN1Kx6p2WK7v+16QTzLmkQrV/PLZDSc1Ph+b3q5E7MlbZ8C2/+eZc2CxllE2MpCuLP5CuhjYjBQBvITW5XZl38sH6tG1DCqb+iTTQ4+Et0/wL8IGxLFdeF/0Fx66kK9/WDr/zc96GqyXbifSEC0i1RdeTmj1+TLoK7u+zdVg++DceJI2jSCesE0iF+K0Dlm0vxtAJP6/XL5MCngPyun7/ENM+ldy3ilR4XwN8v+X7N5M6Ch9FOjltm6f7UC5UPkLLlSipeeCJk8oI+Vwvr6fn5bS/nZe1l1U1by/L036KcdSiMvqmjYHb8bek/j2H523yqfzdW/L2HXZ7kI7z/yYFs58kdTo/iRQs/Ql44QjzH00qpN8BPAD8T8t3R+a0po9nPZACjB+STtxTSSfMk2mpSRkmnRfmPH205bO3kAKd/ju5NyLVvE1p/W1Sk99LWdXJelvSSfaolrTadoPFINv4RlLNzPdIQ2a8M3/3prx+h93f6EAZSKrB3oLUNLak/3jIaexJ7nIzjnUxluP+g6Ry992k2vGHSMf9VFLwvoTx1dq2roNNSRcR/0aq7bsob5dtB8zTGrQdQwrQbibdsNXf128fUjC+I2OsAR3wW6MuR4fI37tJ595jSDXvN/PkC5qLSbXPT2VV69w+pNro/r69e+bt8L6W+dZux/Ex6vXSzR9v64KlgutSYG7LZ88hXb09UXBVSH826UpCOc1D8o6xY96R7yCNKN8//cGkaui3UqDPSP798/Lrl5Oqki9k1UCPQ95RRgqWnkUqnK4jXaF/llRD+HzSyfwfToqkK6lfkAcPJgVZfyQFEPuRCrrdxrgcm5BqMc7IhcShpOa/1sEsf8AwfcpIJ8HbSTV03yEVtK8knXQvJV2hfp3UD+kfrvxIhdRNpE7eXyLVin4sb8dhC4kh8rM/6QSyHanQeR+p386NDDJ4L6nAvIh0lXtJ3lcWkptP8jRvy+v7wLwNfkWqjfhonud4xhdUzSI1HZ6Z0zuaVLu4B+kEfjSpFnlM23Ww42QU0/Zvx0/kPGxMCpA+Seq31UcajuHPw+0POa19gMtJJ9srSIHRTFIB/THSFfNwwxm8nXQi7a9t+TQp0Hp2XidLGMPNTPxjefFxUjD4wpbvTyQFyv8w5E5LOjuROsn/OO/PL2VVDeARpAuTGaTgY3F+vR6pPFo3T3c8Kch5UX6/e07r2PFs4zHuD0Mdq+8gBQ03kgLs/2WEWho6UAayqvbovHwcfJR0En87qSvM9cCrx7kuRnvcX0J6QsmzSGMUfopU3t5NKrf+fSz74jDrYG/SxcMNpHJZDBMEk46xb5GC3n8n1Rrvxqpg+o2MsVZ6QPpjKkcHmf9oUln5LFIz66dbtuenB0y7Kek89F+kG0F2Jp07Ppa/fxXpHNXfjNrVAau79sNtW6BVV7BfIV2x9JL6mJ2fN/6GBX9rRi501s07152kAnIrUtX2+bRcjZGCkmJPCsi/v1v+vxbp6uI8Vl05bDHwwCMFlifkPG6U83gYqWarjyE68JMKsV+y6kr9naQathNJBfAnGUf/jpzWZ0gnsf47dr5Kupp8A2n8opsZ4SYHUnPDTay6fX5aXv5/J9UgrcsQN6CQAs/j8uvVSFe4nydd2V7KGIeIGSK9L5Cu3n/JIP00WtZBf8fgPcn9MVum2XjANngXKcg5nlT4fpSWvj5jyO8HSFfwH8rv30YKdF9FOn6OpEIH3NbjZBTTvoJUMPePAj+NdFF0LCmoPoBR3OlIqjnZL+/bP2NVTcCc/H/IW/dJTSA/yMu/Xt5+/0q6SPkWqRZvPCfKGXn51s7vP0mqnd+m5Rj72FD7Oqmm7SJyGZLz9GVSs2J/8NZ/F/GraAlMWXW34Dotx+8SVgWOryIFTmPa18e5Pwx1rJ6cv9uQ0Y891rYykHT8X0sK9g8mBUpfIZVJl5DKqdflacc75Mewxz2pGfibpOa7Y0iB6XdJF7f7kQKbDcfxuyOtg48xTDcEUtPj/7Kq6Xh6y/74CirUsrX8xpjL0ZZ51yRd/LWut++QguR9SRdl6/Hk2rn+bdF/p/lWpIu+j7bstx2/EWHQ5et2Btq2YKnJ54OsugvtX0lX3MOObTSO39mbVAPwjLyRb2NVx8U9SIX8mGttxvD7++YDaC3SSfZDpKvxE0idewc+juRZpBPzQlLt39vJfe1IV+z/0IyYv1uHFBy8OL+fmg+Mc8mDsubPx1yAka7u+h86/+p8QB2Wl+N0RtmknbfFvcCBLZ+d15q/IebbJ0+3Zctnl+Z8jbopbIT0fkFqphp0/xuwDg7Mn80l1awdMsI2OId099mwTX/D5HdDUo3FraTCcUbeN84mPZWg5HGy9iin/RurmtR68775iYH78zBpvITUofvyls+OIdWqjHhSIZ3ArifVsv17Xi+fJgV1VZp+9iE1065Fusj8EKmm+QX5++FqiV4JrCA32eTtfwIpyH5p/my4vmB75nXSH7y9m1QO9O9PHRt/bphjdTxjDbalDMzTbZ/X+7WkC6eLSP3e5rEqWK7S3WU0x/00UpP2pf2/B9xPCr7HPOTHWNbBCGnsRxoio3+A3SmkmvHPlNiXGEc5OmD+Ma23lm1xHav6fc4mVcaMqk9sp/66noG2LFTqrLmMVLX5atp8OzurBhlcn3RL9Q2sumvtPfmgH1dn/TH8/m2sGvF7fi64hqwVyDv0T0jNSbeO8nfeT6rV6b+afVX+nW9SYOR8UnPFjaTmnz1IBe86Y0xjL9KwK5/IB/6NIxVCpFqGE0k1IK/I+biWMdzBNIr0FjKKIWdINSM35HW7d94+z2n5frBt8FNSE2Kl/ZzU+fk2UsHd/wDlUd25OYb99InAYYRpX5PXQ3/w1sMYbiQiXaT1n0R2I9V2L2KYIT8GzD+ddNLub148iBTMlzghPXG8kk4mJ+ayahoj90ncm1TLclB+P4XUdDaqGkD+MXj7p7yPTR/pt0eZ/qj3wfEcq6NZp/l9sTIwH8fvya8PJdXWjGr4mTHkf6TjftP82fPyejuLAq1Ho10Hw8zff5y2Bm9FhtaiQjlaZb21bIu9SBeA3ym9vSuvm25noG0LlqpKZwz4rG1NALlAvJXUJLdXLhxPIF1JFam1GOH3X03q39J/ohmxgy6pCeBNpD5rc0Yx/bNINZc/zQfTr1nVJLxtwfV4Q15v4+qHmE8CK0lXa6PqY0GqMX0XqVnt+1WXp0p6rBrK4+qBJ552b4NckP+dPKhkG/bTJ46TUU77R0aoMR1m/vVJNQk/JjVxjrmbAilgPCJvj1EFfaNMt/94XTu/f8oY510EHFZiGzDGi6MR1tWZjKGv3HiO1VGs06JlIKmv1qWkYTmeqPEu/TfCcT+N1IJ0ESlwL3bH71jPA0PsT3+kDY9Jq1ouj3e95W1xAylQHNPj9MaYvzF3bYmYwIFbN/5IV0rX5QJsR1I/hFd0+PcX598fdZMwY7tKXjPv1B8kXcW8MO/gxR57RKqernTVRrpSmjOO+WYAaxRclnGllwvTofrktXUbkPqQbFIirSHSf+I4GcW0ryhwQp86ln18kO13eDsK7xy0LGJ83QteR+r7+QzGMeBr3gbXM8LwNuNI93mk2upRj5I/3mN1mOUqWgbm4+0ttAyx066/EY77qaQB5NvSejPeYyTPW/k4HSH9cZfL411vw22LQss0nVSbN+bB5vuHeLBCJM2MiIfy6ykRsaJbv9+B33op6dbxt0fEkk78pj1ZU7dBJ/fTqiQp2lRQVlkPkmZHxF3d+O0R0t0dWBIRfy+d9ih/v13LNSUiVrRzf7DJRdIaEfHwmOfz/mfjJWl9UlXvH7qdl8nK28DqZqIGNhN1uax5HLiZmZmZNURPtzNgZmZmZqPjwM3MzMysIRy4ZZLmd3N+p+E0mpBGHfLgNJxGu9OoQx6chtMYigO3VapuiMob0mk4jQakUYc8OA2n0e406pAHp+E0BuXAzczMzKwhJsVdpZJC0rDTRATDTTMZ1pM129Sp00acpq9vJT09vUN+v3z5spJZMrM2G+54hpHPbQDPf/52w35/1113MXv27CG/X7Ro0bDz27jcHRGDrvRJEbj19PTElCmrVUpjxYrllfNRYl2PdAB2Kh89PdUqa/v6VlbOQ3rMYx3U4xh61jM3q5zGn/78m8ppSNUr8iP6Kqcx0gltNKoebytXVh9/u8z6LLGPlig3qm+Tvr7q+0YJZcri6ssya9a6ldN44IFqYyWX2EfrUo7WyKKImDfYF24qNTMzM2sIB25mZmZmDeHAzczMzKwhHLiZmZmZNURbAzdJq0lao1Baa0iaWiItMzMzsyZqS+AmaQtJnwV+DWyWPztJ0s2SbpD0mfzZHEmX5M8ulvTs/PnrJd0kaYmky3KymwG/kfQZSVu0I99mZmZmdVYscMs1YodL+iXwVeBmYJuIuF7SU4B9ga0iYhvgxDzbl4Fv5s/OAL6UP/8Y8KqI2BZ4HUBEXA9sA9wKnCbpl/n3itTomZmZmdVdyRq3O4EjgLdFxM4R8bWIeDB/dz/wGPA1SfsBj+TPdwS+k1//F7Bzfn0FcLqkI4EnBv6JiAcj4rSIeDFwZP67c7DMSJovaaGkhZNhrDozMzOb+EoGbgcAfwa+L+ljkjbs/yIiVgA7AOcAewEXDJdQRBwFfATYAFiUa+yAJ5pXPw6cC/wx/+5gaSyIiHkRMa/EQIlmZmZm3TalVEIR8TPgZznIOgQ4T9LdwNuAu4EZEfETSVcAv8+zXQm8kVTbdjBwOYCkjSPiV8CvJO0JbCBpFnAasB7wDeDFEVFtuGczMzOzBikWuPXLwdQXgS9K2gFYCcwiBXLTSc8pel+e/N3ANyR9ALgLODx/frKkTfO0FwNLgGcBH4qIa0rn2czMzKwJigdurQYEWTsM8v0fgN0H+Xy/QZL7Y/4zMzMzm5Q8AK+ZmZlZQzhwMzMzM2sIB25mZmZmDdHWPm51IfUwfXq1cXofX/ZoodxUVGJokxqMaxd0Pw+llBgnsMSQNX3RVzmNqVOnVU6jR9WvB0ssS09P78gTjUAVl6WnZ3nlPJRYjhLHfF22SYllmUjlT29v9SdBzpu3R6X5p602vXIeSuxfJZQoi0ucE5YvXzbkd65xMzMzM2sIB25mZmZmDeHAzczMzKwhHLiZmZmZNUTjAzdJV3Y7D2ZmZmad0PjALSJ26nYezMzMzDqh8YGbpIe6nQczMzOzTmh84GZmZmY2WUzYAXglzQfmp9eOT83MzKz5JmxEExELImJeRMwrMRKymZmZWbdN2MDNzMzMbKJx4GZmZmbWEI0P3CJiZrfzYGZmZtYJjQ/czMzMzCYLB25mZmZmDeHAzczMzKwhJuw4blZvEVE5DQ/z8mReH+XVYp0WOFasPFF93wjqsW17ex0KlFTi/DYc17iZmZmZNYQDNzMzM7OGcOBmZmZm1hAO3MzMzMwaYkyBm6S1JR2dX+8m6UftydYTvzdH0k1DfHeYpGe08/fNzMzM6mSsNW5rA0e3IR/jcRjgwM3MzMwmjbEGbicBG0taDJwMzJR0jqRbJZ2hfO+8pJdJul7SjZK+Lmla/vwOSevl1/Mk/Ty/ni3pIklLJZ0m6Q/90wG9kr6av/uZpNUlHQDMA86QtFjS6pXXhJmZmVnNjTVwOx74XURsB3wAeD7wXmBLYCPgxZKmA6cDB0bE80hjxb1jhHQ/DlwSEVsB5wDPbvluU+A/83f3AftHxDnAQuDgiNguIh4d43KYmZmZNU7VmxOuiYg/RUQfsBiYAzwXuD0ifpOn+Saw6wjp7AycCRARFwD3tnx3e0Qszq8X5d8YkaT5khZKWtjuwfDMzMzMOqFq4Las5fVKRn4Sw4qW35zept8AICIWRMS8iJhXi9HPzczMzCoaa+D2IDBrhGl+DcyRtEl+/2bgF/n1HcDc/Hr/lnmuAN4AIOmVwDqF8mJmZmY2YYwpcIuIvwNX5CE6Th5imseAw4GzJd0I9AGn5q9PAL4oaSGp9oyWz1+Z03098H+kwGw4pwOn+uYEMzMzmyxUh/5f+a7TlRGxQtKOwFfyDRBF9PZOiTXWWKtSGo8vq8n9DyWafWuwzfuir3IadWkCL3EMlViW2bM3qJzG3/72v5XT6FH1cb1L7B89Pb1dT2PFiser56HA+iyhLtukRPlV4uHudXnI/IwZ1c5tAJttNq/S/EsWX1I5DyX2rxJKlMV9fdWXZcWKxxdFxKAbZlT9xTrg2cB/S+oBHgeO7HJ+zMzMzGqnFoFbRNxGGlrEzMzMzIZQjzp4MzMzMxtRLWrcOqFqP6QSfRFK9IXqKRBrl1iWynmoQT+7UuqyLL29UyunUWJZQvU4Vor0hapabtRkfZZQm21Sk7K4DuUolOmTtd56z6o0f122yWThGjczMzOzhnDgZmZmZtYQDtzMzMzMGsKBm5mZmVlDNC5wk3Rl/j9H0kHdzo+ZmZlZpzQucIuInfLLOYADNzMzM5s0Ghe4SXoovzwJ2CU/q/TYbubJzMzMrBOaPI7b8cBxEbHXYF9Kmg/MT68bF5+amZmZ/YMJG9FExIKImBcR8+ryMHIzMzOzKiZs4GZmZmY20TQ5cHsQmNXtTJiZmZl1SpMDtxuAlZKW+OYEMzMzmwwad3NCRMzM/5cDu3c5O2ZmZmYd0+QaNzMzM7NJxYGbmZmZWUM4cDMzMzNriMb1cRuviL5K8/f0VF9VVfOQ8tFbOY2+vpWV06g6qLGonociCozxV2K7lhgk+sEH76mcRk9P9XyUOFZgRfV89FbPR9UxIOuyPkvso7091Y+VEtskIgqkUX191MXKFcsrp3HDkksrzV+i/CpwqBRRYlnavX/VZFWZmZmZ2UgcuJmZmZk1hAM3MzMzs4Zw4GZmZmbWEA7czMzMzBrCgZuZmZlZQ3Q1cJP0AUnH5Nefl3RJfr27pDMkfUXSQklLJZ3QMt9Jkm6WdIOkz3Qr/2ZmZmad1O1x3C4H3g98CZgHTJM0FdgFuAw4OyLukdQLXCxpG+DPwL7A5hERktYeLGFJ84H5+XXbF8TMzMys3brdVLoImCtpTWAZcBUpgNuFFNS9QdJ1wPXAVsCWwP3AY8DXJO0HPDJYwhGxICLmRcS8EgPqmZmZmXVbVyOaiFgO3A4cBlxJCtZeCmwCPAocB7wsIrYBfgxMj4gVwA7AOcBewAWdz7mZmZlZ59WhKupyUoB2WX59FKmGbU3gYeB+SU8D9gSQNBNYKyJ+AhwLbNuNTJuZmZl1Wrf7uEEK1j4MXBURD0t6DLg8IpZIuh64FfgjcEWefhZwnqTpgID3dSPTZmZmZp3W9cAtIi4Gpra836zl9WFDzLZDm7NlZmZmVjt1aCo1MzMzs1Fw4GZmZmbWEA7czMzMzBqi633cOqWnp7fS/CuWP145D0FUTqOvr3ISENXzEbGy0vx9UWJBqhPVB2eOAusTqq+PGTNmVU7joYfurZxG9FXbN6DMOu0rkI+qY0CWWI4S67OEIsdsiWUpsU4LlMV10Ttl6sgTjeBZG2xRaf6/3fW/lfNQphwtofp+3u5lcY2bmZmZWUM4cDMzMzNrCAduZmZmZg3hwM3MzMysIWoVuEn6F0kv73Y+zMzMzOqoVneVRsTHup0HMzMzs7pqe42bpI9K+rWkX0r6rqTjJG0n6WpJN0g6V9I6edrTJR2QX98h6QRJ10m6UdLm+fPZki6StFTSaZL+IGm9di+HmZmZWbe1NXCTtD2wP7AtsCcwL3/1LeCDEbENcCPw8SGSuDsiXgB8BTguf/Zx4JKI2Ao4B3j2EL89X9JCSQvrMz6MmZmZ2fi1u8btxcB5EfFYRDwI/BBYA1g7In6Rp/kmsOsQ838//18EzMmvdwbOBIiIC4BBRwyNiAURMS8i5knVB1k1MzMz67Za3ZwwiGX5/0pq1h/PzMzMrNPaHbhdAbxW0nRJM4G9gIeBeyXtkqd5M/CLoRIYIs03AEh6JbBOwfyamZmZ1VZba7Ei4lpJ5wM3AH8l9We7H3gLcKqkGcDvgcPHkOwJwHclvRm4Cvg/4MGiGTczMzOroU40P34mIj6Rg7TLgEURsRh40cAJI+KwltdzWl4vBHbLb+8HXhURKyTtCGwfEcswMzMzm+A6EbgtkLQlMB34ZkRcVzG9ZwP/LakHeBw4smoGzczMzJqg7YFbRBxUOL3bgOeXTNPMzMysCXyn5mgVGFJEBYaTSxWN1USsLJGRarNTjyFaSuQjqMc4gStWLO92FpISw+/U5FipxVBCRdZnPfbRIuUXfdXzUY/VUaTs6OurXp4/8sj9leafSOVoE9R9OBAzMzMzyxy4mZmZmTWEAzczMzOzhnDgZmZmZtYQtQ/cJK0t6ehu58PMzMys22ofuAFrAw7czMzMbNJrwnAgJwEbS1oMXJQ/25M0YMCJEXFWtzJmZmZm1klNqHE7HvhdRGwHXA1sB2wLvBw4WdL63cuamZmZWec0IXBrtTPw3YhYGRF/BX4BbD/YhJLmS1ooaWHUZPBJMzMzsyqaFriNWkQsiIh5ETGvFqOfm5mZmVXUhMDtQWBWfn05cKCkXkmzgV2Ba7qWMzMzM7MOqv3NCRHxd0lXSLoJ+ClwA7CEdHPCP0XE/3U1g2ZmZmYdUvvADSAiDhrw0Qe6khEzMzOzLmpCU6mZmZmZ4cDNzMzMrDEcuJmZmZk1RCP6uNVCgbHgggLjyUVf9TRKqLg+ajO23gQaKWbKlKndzkJSk20bBY4VqbdATiqqS9lTQIltMpHWRwlS9fqXGavPGnmiYUyk9dkErnEzMzMzawgHbmZmZmYN4cDNzMzMrCEcuJmZmZk1RK0DN0lz8hMTzMzMzCa9WgduZmZmZrZKEwK3KZLOkHSLpHMkzZA0V9IvJC2SdKGk9budSTMzM7N2a0Lg9lzglIjYAngAeCfwZeCAiJgLfB34ZBfzZ2ZmZtYRTRiA948RcUV+/W3gQ8DWwEWSAHqBOwfOJGk+MD+9bkJ8amZmZja8JgRuA4dkfhBYGhE7DjtTxAJgAUBv7xQP62xmZmaN14SqqGdL6g/SDgKuBmb3fyZpqqStupY7MzMzsw5pQuD2a+Cdkm4B1iH3bwM+LWkJsBjYqXvZMzMzM+uMWjeVRsQdwOaDfLUY2LWjmTEzMzPrsibUuJmZmZkZDtzMzMzMGsOBm5mZmVlD1LqPW53EP4xK0lwllkWoQE4mhoh67Bs9PdUP5xLLEiqQRk3WadV8FFmOmhxqRZalxP5VoPwqsSx5HNEJkY+Zs9apnEZVdTnmS2j3srjGzczMzKwhHLiZmZmZNYQDNzMzM7OGcOBmZmZm1hAO3MzMzMwawoGbmZmZWUPUInCT9D5JN+W/90qaI+kWSV+VtFTSzyStnqfdWNIFkhZJulzSYI/EMjMzM5twuh64SZoLHA68EHgRcCTpYfKbAv8ZEVsB9wH751kWAO+OiLnAccApnc6zmZmZWTfUYQDenYFzI+JhAEnfB3YBbo+IxXmaRcAcSTOBnYCzWwYdnDZYopLmA/PT667Hp2ZmZmaV1SFwG8qyltcrgdVJNYT3RcR2I80cEQtItXP09k6ZOEMym5mZ2aRVh6qoy4F9JM2QtAawb/7sH0TEA8Dtkl4PoGTbzmXVzMzMrHu6HrhFxHXA6cA1wK+A04B7h5nlYOAISUuApcDe7c6jmZmZWR3Uoqk0Ij4HfG7Ax1u3fP+Zlte3A3t0KGtmZmZmtdH1GjczMzMzGx0HbmZmZmYN4cDNzMzMrCFq0cetE/r6Vlaav6en+qqK6KucRk9Pb+U0+qpno/LYeD0F8lDEqvEAx62nwOWPqJ6Phx4a7p6e0ekpsDAljhVYUSAf1Y8VVdw/SqzPEvuoCgyIVGTf6C1RjlZfmKh4PgBQgf2rRD5WrlheOY3f/vb6SvOXGCu1xKFSQomyOEoccMOoyaoyMzMzs5E4cDMzMzNrCAduZmZmZg3hwM3MzMysIRoduEk6RtItks7odl7MzMzM2q3pd5UeDbw8Iv7U7YyYmZmZtVtjatwkvU/STfnvvZJOBTYCfirp2G7nz8zMzKzdGlHjJmkucDjwQkCkh9EfQnpm6Usj4u4uZs/MzMysIxoRuAE7A+dGxMMAkr4P7DLcDJLmA/Pz67Zn0MzMzKzdmhK4jVlELAAWAPT2TmnvMMZmZmZmHdCUPm6XA/tImiFpDWDf/JmZmZnZpNGIGreIuE7S6cA1+aPTIuJ6N4GamZnZZNKIwA0gIj4HfG7AZ3O6kxszMzOzzmtKU6mZmZnZpOfAzczMzKwhHLiZmZmZNURj+rhVJVWLUSOWF8pJNRETY2SToPpyiAI3pxRYnyW2SYkbbXp763E4R/R1OwtAfbaLrVJim9Rn/6pHPiiwj06ZslqBjFTj43X0XONmZmZm1hAO3MzMzMwawoGbmZmZWUM4cDMzMzNriNoEbpLWlnR0fr2bpB91O09mZmZmdVKbwA1YGzi625kwMzMzq6t6jB+QnARsLGkxsBx4WNI5wNbAIuCQiAhJc0mPvpoJ3A0cFhF3dinPZmZmZh1Tpxq344HfRcR2wAeA5wPvBbYENgJeLGkq8GXggIiYC3wd+GRXcmtmZmbWYXWqcRvomoj4E0CuhZsD3EeqgbsoD7TXCwxa2yZpPjA/va5TfGpmZmY2PnUO3Ja1vF5JyquApRGx40gzR8QCYAFAb++UifG4ATMzM5vU6lQV9SAwa4Rpfg3MlrQjgKSpkrZqe87MzMzMaqA2NW4R8XdJV0i6CXgU+Osg0zwu6QDgS5LWIuX/C8DSjmbWzMzMrAtqE7gBRMRBQ3z+rpbXi4FdO5UnMzMzs7qoU1OpmZmZmQ3DgZuZmZlZQzhwMzMzM2uIWvVxa6eIvm5nAaLAqCSqnkSJfATdX59B9eVQkRVaXYll6e2tfjhHgX1DBVZpiXyUUDUfdVmfJdRlmxQpR0soUo6W2D+q7yCzZq5TLYGabJMi+2gDyi/XuJmZmZk1hAM3MzMzs4Zw4GZmZmbWEA7czMzMzBqiq4GbpH0kbTnOebeT9OrSeTIzMzOrq2KBm6Tx3NK2DzCuwA3YDnDgZmZmZpPGqAM3SR+V9GtJv5T0XUnHSfq5pC9IWgi8R9JcSb+QtEjShZLWz/MeKelaSUskfU/SDEk7Aa8DTpa0WNLG+e+CPP/lkjbP879e0k15/sskrQb8C3BgnvfANqwbMzMzs1oZVS2ZpO2B/YFtganAdcCi/PVqETFP0lTgF8DeEXFXDqY+CbwV+H5EfDWndSJwRER8WdL5wI8i4pz83cXAURFxm6QXAqcAuwMfA14VEX+WtHZ+2PzHgHmtzzE1MzMzm8hG27z5YuC8iHgMeEzSD1u+Oyv/fy6wNXBRHhCwF7gzf7d1DtjWBmYCFw78AUkzgZ2As1sGFJyW/18BnC7pv4HvjybDkuYD8/Pr0cxiZmZmVmslnpzwcP4vYGlE7DjINKcD+0TEEkmHAbsNMk0PcF9EbDfwi4g4KtfAvQZYJGnuSJmKiAXAAoDe3in1GNbZzMzMrILR9nG7AnitpOm5ZmyvQab5NTBb0o4AkqZK2ip/Nwu4MzenHtwyz4P5OyLiAeB2Sa/P80vStvn1xhHxq4j4GHAXsEHrvGZmZmaTwagCt4i4FjgfuAH4KXAjcP+AaR4HDgA+LWkJsJjU9AnwUeBXpADw1pbZzgQ+IOl6SRuTgroj8vxLgb3zdCdLulHSTcCVwBLgUmBL35xgZmZmk4VG+zBUSTMj4iFJM4DLgPkRcV1bc1dIb++UWH31mZXSWLFiefWMlHjgdE9v9Wz0raycRtUnX0d0/yH1UOYh830FlqVEP8x11nl65TTuuefOkScaQU+BfbSvwD7a2zu1chpVt8uKFY9XzkOJ9Vmi7Cmxn5fYJkXKr5oo8ZD51Vev3vC00XO2rTT/zTdfUTkPJfavEkqUxSUeMr98+bJFETFvsO/G0sdtQR4sdzrwzaYEbWZmZmYTxagDt4g4qJ0ZMTMzM7Ph+VmlZmZmZg1RYjiQSUEqEOMWGE6uSPt7gTQqr48CfQCq9rMrpUQ/uRJpLFv2aOU0egrs5yWOlR4V6A9aZD+vlsZEGkOyDusTgBL9fIv0Sy1wTijQX69Ef9C/3fWHagmU2DdKnCALKLJdaW9/Pde4mZmZmTWEAzczMzOzhnDgZmZmZtYQDtzMzMzMGqKjgZukYyTdIuleScfnzz4h6bhO5sPMzMysiTp9V+nRwMsj4k8d/l0zMzOzxutYjZukU4GNgJ9KOlbSfwwyzc8lfV7Swlwzt72k70u6TdKJeZo1JP1Y0hJJN/k5pWZmZjZZdKzGLSKOkrQH8FJgr2EmfTwi5kl6D3AeMBe4B/idpM8DuwF/iYjXAEhaq705NzMzM6uHOt6ccH7+fyOwNCLujIhlwO+BDfLnr5D0aUm7RMT9gyUiaX6uuVtYlweam5mZmVVRx8BtWf7f1/K6//2UiPgN8AJSAHeipI8NlkhELIiIean2ro6LaWZmZjY2jXvklaRnAPdExLcl3Qe8rctZMjMzM+uIxgVuwPOAkyX1AcuBd3Q5P2ZmZmYdoSjxsO+a6+2dEquvPrNSGitXVn+QbwklHtJc4qHEVZufo0Ae6vKQ+SIPrC7wgOXVZ6xZOY1HH3mgchoq8RDwAvtHT2/169Kqx9uKFY8XyEOBrh4Fyvmgehq9vVOr56PEstTkIfMl9vPVpq1eOY0113xKpfnvvvvPlfNQl77oRbZrgWV5/PHHFkXEvMG+c+cvMzMzs4Zw4GZmZmbWEA7czMzMzBqiiTcnjEtPxX43K5ZX76tSQpTo11Wkj0i1vhkl+suUWI4SivQTLbBZq/bjBHjkkUGHRRybAv12+kr0dynRT65iuVFi36hJV84iy1Kif21djvu+WNHtLABl+g0+dfaGlea/+67qT7GsT3/76mVPu5fFNW5mZmZmDeHAzczMzKwhHLiZmZmZNYQDNzMzM7OGaHzgJunKbufBzMzMrBMaH7hFxE7dzoOZmZlZJzQ+cJP0ULfzYGZmZtYJjQ/czMzMzCaLCTsAr6T5wPz02vGpmZmZNd+EjWgiYkFEzIuIearL0ONmZmZmFUzYwM3MzMxsonHgZmZmZtYQjQ/cIqL6k7XNzMzMGqDxgZuZmZnZZOHAzczMzKwhHLiZmZmZNcSEHcdtoIiolkBNhhQpMSZd0FcgJ1UzUXF7AKIe26RENkosy4rlj1fPSAkFjpUS66MO4zdOpKGISixLbcqvEuVPif2rQD76+lZWTuOhh++rlkBNjvkSiuSjzYvS/ZLNzMzMzEbFgZuZmZlZQzhwMzMzM2sIB25mZmZmDdGxwE3SHEk3der3zMzMzCYa17iZmZmZNUSnA7deSV+VtFTSzyStLulISddKWiLpe5JmSFpL0h+U77eWtIakP0qaKmljSRdIWiTpckmbd3gZzMzMzLqi04HbpsB/RsRWwH3A/sD3I2L7iNgWuAU4IiLuBxYDL8nz7QVcGBHLgQXAuyNiLnAccMpgPyRpvqSFkhZWHsPNzMzMrAY6PQDv7RGxOL9eBMwBtpZ0IrA2MBO4MH9/FnAgcCnwRuAUSTOBnYCzWwaDnDbYD0XEAlKQR2/vFEduZmZm1nidDtyWtbxeCawOnA7sExFLJB0G7Ja/Px/4lKR1gbnAJcAawH0RsV2H8mtmZmZWG3W4OWEWcKekqcDB/R9GxEPAtcAXgR9FxMqIeAC4XdLrAZRs241Mm5mZmXVaHQK3jwK/Aq4Abh3w3VnAIfl/v4OBIyQtAZYCe3cik2ZmZmbd1rGm0oi4A9i65f1nWr7+yhDznMOAx7VGxO3AHm3IopmZmVmt1aHGzczMzMxGwYGbmZmZWUM4cDMzMzNriE4PB9I1LeO+jU+BQXyDegwnF9FXOQ1RbX2WGBS5xPqsvF9QZlkqrk4Apk1bvXIaJZZFBbZLX1+BfVQl0uitNH9d1mcJRY7ZAmVPbcrimgzs3ttb/TQ+e/YGleb/wx+qP4a8NgPlFyiLS5Rfw3GNm5mZmVlDOHAzMzMzawgHbmZmZmYN4cDNzMzMrCEcuJmZmZk1xIQI3FT11i8zMzOzBuhK4CbpEEnXSFos6f9Jeqekk1u+P0zSfwwxbW/+/CFJn83PLN2xG8thZmZm1kkdD9wkbQEcCLw4IrYDVgIPAfu2THYgcOYQ0x6cp1kD+FVEbBsRvxzkd+ZLWihpYW3GhzEzMzOroBsD8L4MmAtcmwc/XR34G/B7SS8CbgM2B64A3jnEtJCCuO8N9SMRsQBYANDbO8WRm5mZmTVeNwI3Ad+MiH9+0ofSW4E3ALcC50ZEKEVr/zBt9lhErGx/ds3MzMzqoRt93C4GDpD0VABJ60raEDgX2Bt4E3DmCNOamZmZTTodD9wi4mbgI8DPJN0AXASsHxH3ArcAG0bENcNN2+k8m5mZmdVBVx4yHxFnAWcN8vleY5h2ZntyZ2ZmZlZPE2IcNzMzM7PJwIGbmZmZWUM4cDMzMzNriK70ceuGvr5qI4f0RV+hnFQjqg9JV2RAYlWbver2AJCqX3eUWBdRYN8osSxTpk6rnEaJ7VLicrDEOqXAtq1cbpTYz3srHmyU2c9LLEtPgf28RFlcYn3ksUW7no8SNt1ym0rzX3vtTyrnoUjZU0CJsrjdy+IaNzMzM7OGcOBmZmZm1hAO3MzMzMwawoGbmZmZWUM4cDMzMzNrCAduZmZmZg3R0cBN0hqSfixpiaSbJB0oaa6kX0haJOlCSevnaTeWdEH+/HJJm+fPT5f0JUlXSvq9pAM6uQxmZmZm3dLpcdz2AP4SEa8BkLQW8FNg74i4S9KBwCeBtwILgKMi4jZJLwROAXbP6awP7AxsDpwPnDPwhyTNB+bn121dKDMzM7NO6HTgdiPwWUmfBn4E3AtsDVyUg6te4E5JM4GdgLNbgq7W0UV/EGmEzpslPW2wH4qIBaTgj97eKfUY5dDMzMysgo4GbhHxG0kvAF4NnAhcAiyNiB1bp5O0JnBfRGw3RFLLWidvR17NzMzM6qbTfdyeATwSEd8GTgZeCMyWtGP+fqqkrSLiAeB2Sa/Pn0vStp3Mq5mZmVnddLqp9HnAyZL6gOXAO4AVwJdyf7cpwBeApcDBwFckfQSYCpwJLOlwfs3MzMxqo9NNpRcCFw7y1a6DTHs76WaGgZ8fNuD9zFL5MzMzM6szj+NmZmZm1hAO3MzMzMwaotN93LpGqhaj9vT0FspJNVWXA6Cnr0A+arA+VJMbivuiej5KjDX48EP3VU6jxH5el2Olp7d68VZ1u/T1rSyQhxLX19UP+hLbtUS50VPgeCMKjBBV4JhNo1pVU2IfW3jVxZXmr8sxX0JPgeOtRHk+3HZ1jZuZmZlZQzhwMzMzM2sIB25mZmZmDeHAzczMzKwhGhO4Sbqy23kwMzMz66bGBG4RsVO382BmZmbWTY0J3CQ9lP/vJunnks6RdKukM1Ti3lszMzOzmmtM4DbA84H3AlsCGwEv7mpuzMzMzDqgqYHbNRHxp0ijFy4G5gycQNJ8SQslLYwSgy2amZmZdVlTA7dlLa9XMsgTICJiQUTMi4h5bkk1MzOziaCpgZuZmZnZpOPAzczMzKwhGvOQ+YiYmf//HPh5y+fv6lKWzMzMzDrKNW5mZmZmDeHAzczMzKwhHLiZmZmZNURj+rhVlYZ8Gz+peoxbNQ/F1GB4FNH9PABF1kWJZSmRxsq+FdXzUWJ9FDhWpHocK1XHgKzLUERFjrd6LEoZE2i7RF/1Y+Xhh++vnEZVdTlWSmj3srjGzczMzKwhHLiZmZmZNYQDNzMzM7OGcOBmZmZm1hAO3MzMzMwawoGbmZmZWUPUKnCTNEfSrZJOl/QbSWdIermkKyTdJmmH/H92nr5H0m/735uZmZlNZLUK3LJNgM8Cm+e/g4CdgeOADwHfBg7O074cWBIRd3Uhn2ZmZmYdVcfA7faIuDHSaLVLgYsjjYJ5IzAH+DpwaJ72rcA3BktE0nxJCyUtrM3At2ZmZmYV1DFwW9byuq/lfR8wJSL+CPxV0u7ADsBPB0skIhZExLyImFdiJHczMzOzbmtqRHMaqcn07IhY2e3MmJmZmXVCUwO384GZDNFMamZmZjYR1eoh8xFxB7B1y/vDhvhuW9JNCbd2MHtmZmZmXVWrwG00JB0PvINVd5aamZmZTQqNayqNiJMiYsOI+GW382JmZmbWSY0L3MzMzMwmq8Y1lXZLkbHgIqqnoepJlMhHUG19BAXWRQEqkI2oyXadMmW1ymmUWJYSx0qZfFRPQ6q2YYrkocCxUuJ4K7Kf10WRY7b6QVukHCyQj3XWeXql+e/8y28r56E2+1eBsrjdy+IaNzMzM7OGcOBmZmZm1hAO3MzMzMwawoGbmZmZWUPULnCT9FD+/wxJ57R8/l1JN0g6tnu5MzMzM+ue2t5VGhF/AQ4AkPR0YPuI2KS7uTIzMzPrntrVuPWTNEfSTfntz4BnSlosaRdJG0u6QNIiSZdL2rybeTUzMzPrhNrWuA3wOuBHEbEdgKSLgaMi4jZJLwROAXbvYv7MzMzM2q4pgdsTJM0EdgLObhkcc9og080H5ufXHcufmZmZWbs0LnAjNe/e11/7NpSIWAAsAOjtnVKTIZnNzMzMxq+2fdyGEhEPALdLej2Akm27nC0zMzOztmtc4JYdDBwhaQmwFNi7y/kxMzMza7vaNZVGxMz8/w5g64Gv8/vbgT26kD0zMzOzrmlqjZuZmZnZpOPAzczMzKwhHLiZmZmZNUTt+ri1i1SDGNXjydkQRPV9I8Kj3kxIBcoNFdg1Au9fpZU47kuMU7ps2SOV05goipTFbT5WahDNmJmZmdloOHAzMzMzawgHbmZmZmYN4cDNzMzMrCEcuJmZmZk1hAM3MzMzs4Zw4GZmZmbWEA7czMzMzBpiwg7AK2k+MD+9dnxqZmZmzTdhI5qIWBAR8yJiXomRpc3MzMy6bUIEbpIulvTMbufDzMzMrJ0aH7gptYNuAtzT7byYmZmZtVPjAzdgS+B7EfFotzNiZmZm1k6NvzkhIm4C3tftfJiZmZm120SocTMzMzObFBy4mZmZmTVE45tKOyWir9tZKCaIymmoYhIR1fNQRE1GiimxTaatNr1ATqqry7FSJh81uLaty7FSQPSt7HYWgDLHW122S4lxStdaa3al+Yusz5powrLUoFQyMzMzs9Fw4GZmZmbWEA7czMzMzBrCgZuZmZlZQ9QmcJM0R9JNY5h+N0k7tTNPZmZmZnVSm8BtHHYDHLiZmZnZpFG3wG2KpDMk3SLpHEkzJN0haT0ASfMk/VzSHOAo4FhJiyXt0tVcm5mZmXVA3QK35wKnRMQWwAPA0YNNFBF3AKcCn4+I7SLi8s5l0czMzKw76ha4/TEirsivvw3sPN6EJM2XtFDSwtoM9mpmZmZWQd0Ct4ERVgArWJXPUQ8NHxELImJeRMyTajI8vpmZmVkFdQvcni1px/z6IOCXwB3A3PzZ/i3TPgjM6lzWzMzMzLqrboHbr4F3SroFWAf4CnAC8EVJC4HWB939ENjXNyeYmZnZZFGbh8znGw42H+Sry4HNBpn+N8A2bc6WmZmZWW3UrcbNzMzMzIbgwM3MzMysIRy4mZmZmTVEbfq42SjVZEy6+IeRW8Y4f4HlKDHMS4l81GWcwJ7e6ofzRNouRY6ViotSZP9SPfavItukwMhMVcseqM8xW0KJ422ddZ5WICfVTKRt0u5lcY2bmZmZWUM4cDMzMzNrCAduZmZmZg3hwM3MzMysIRoXuEm6Mv+fI+mgbufHzMzMrFMaF7hFxE755RzS80zNzMzMJoXGBW6SHsovTwJ2yc8qPbabeTIzMzPrhCaP43Y8cFxE7NXtjJiZmZl1QpMDt2FJmg/MT68bV7FoZmZm9g8mbEQTEQsiYl5EzCsxsrSZmZlZtzU5cHsQmNXtTJiZmZl1SpMDtxuAlZKW+OYEMzMzmwwa18ctImbm/8uB3bucHTMzM7OOaXKNm5mZmdmk4sDNzMzMrCEcuJmZmZk1ROP6uHVLT0/1VRXRVyAfvZXT6OtbWTmNqmPjiep5KKLAUDEltquono+HHrq3cho9PdWv5UocK7Ciej56q+ej6lBCdVmfUeCYL7IsBbZJRBRIo/oxW0SBZVm5YnnlNG666fJK85cYK7XA7lVEibI4VH27Dqcmq8rMzMzMRuLAzczMzKwhHLiZmZmZNYQDNzMzM7OGcOBmZmZm1hAO3MzMzMwaoquBm6QPSDomv/68pEvy690lnSHpK5IWSloq6YSW+U6SdLOkGyR9plv5NzMzM+ukbo/jdjnwfuBLwDxgmqSpwC7AZcDZEXGPpF7gYknbAH8G9gU2j4iQtHZ3sm5mZmbWWd1uKl0EzJW0JrAMuIoUwO1CCureIOk64HpgK2BL4H7gMeBrkvYDHhksYUnzc23dwhIDNpqZmZl1W1cDt4hYDtwOHAZcSQrWXgpsAjwKHAe8LCK2AX4MTI+IFcAOwDnAXsAFQ6S9ICLmRcS8qqOfm5mZmdVBt2vcIAVrx5GaRi8HjiLVsK0JPAzcL+lpwJ4AkmYCa0XET4BjgW27kWkzMzOzTut2HzdIwdqHgasi4mFJjwGXR8QSSdcDtwJ/BK7I088CzpM0HRDwvm5k2szMzKzTuh64RcTFwNSW95u1vD5siNl2aHO2zMzMzGqnDk2lZmZmZjYKDtzMzMzMGsKBm5mZmVlDdL2PW6dUHRIk+lYWykk1fSXyUWBcu4hq+QhqMrZekXVRYFkKjFgzbdqMymk8+OA9ldMocayUWKd1GL+xzHL0FchJdSWWpTblV4HyRwUO2hL56Omtfhpfd92nV5r/r/93e+U81OF4BYqUxe1eFte4mZmZmTWEAzczMzOzhnDgZmZmZtYQDtzMzMzMGqJtgZuk3STtVDGNh/L/OZJuKpMzMzMzs2ZqZ43bbkClwM3MzMzMVhlz4CbpB5IWSVoqaX7+bA9J10laIuliSXNID4s/VtJiSbtIOl3SAS3p9NemzczzXCfpRkl7j/D7l0naruX9LyX5QfNmZmY24Y1nAJi3RsQ9klYHrpV0HvBVYNeIuF3Suvn7U4GHIuIzAJKOGCK9x4B9I+IBSesBV0s6P4YeCOVrwGHAeyVtBkyPiCXjWA4zMzOzRhlPU+kxkpYAVwMbAPOByyLidoCIGOsIngI+JekG4H+AZwJPG2b6s4G9JE0F3gqcPmii0nxJCyUtrM3AfmZmZmYVjKnGTdJuwMuBHSPiEUk/BxYDm49i9hXkQFFSD7Ba/vxgYDYwNyKWS7oDmD5UIvl3LwL2Bt4AzB1iugXAAoDe3imO3MzMzKzxxlrjthZwbw6eNgdeRAqydpX0HABJ6+ZpHwRmtcx7B6uCrNcBU1vS/FsO2l4KbDiKfJwGfAm4NiLuHeMymJmZmTXSWAO3C4Apkm4BTiI1l95Fai79fm5CPStP+0Ng3/6bE0j94F6Sp9kReDhPdwYwT9KNwKHArSNlIiIWAQ8A3xhj/s3MzMwaa0xNpRGxDNhziK9/OmDa3wDbDJjmRS2vP5inu5sUyA32ezPz/zuArfs/l/QMUtD5s9Hn3szMzKzZGvfkBEmHAr8CPhwRfd3Oj5mZmVmnjGc4kK6KiG8B3+p2PszMzMw6rXE1bmZmZmaTVeNq3LpG6nYOAEgjqVQTsbJERqpmonoeChDVt2tQfVlK5GPFiuWV0yiixLFSj93DCitSflG9h4xK7F8F9vMS+SjRY2jZskerZ2SCqMs5YTiucTMzMzNrCAduZmZmZg3hwM3MzMysIRy4mZmZmTVEbQI3SWtLOjq/3k3Sj7qdJzMzM7M6qU3gBqwNHN3tTJiZmZnVVZ2GAzkJ2FjSYmA58LCkc0iPuloEHBIRIWku8DlgJnA3cFhE3NmlPJuZmZl1TJ1q3I4HfhcR2wEfAJ4PvBfYEtgIeLGkqcCXgQMiYi7wdeCTgyUmab6khZIWRk3GDDMzMzOrok41bgNdExF/Asi1cHOA+0g1cBcpDX7YCwxa2xYRC4AFAL29Uxy5mZmZWePVOXBb1vJ6JSmvApZGxI7dyZKZmZlZ99SpqfRBYNYI0/wamC1pRwBJUyVt1facmZmZmdVAbWrcIuLvkq6QdBPwKPDXQaZ5XNIBwJckrUXK/xeApR3NrJmZmVkX1CZwA4iIg4b4/F0trxcDu3YqT2ZmZmZ1UaemUjMzMzMbhgM3MzMzs4Zw4GZmZmbWELXq49Y+wcqVKyqlIE2cGFc9vd3OQm2U2K49oVrk47HHHqqcRk+BfaPIOp0gh1td1ifVd9Ei+3kJdVkfRRTIx4oVj1dO4y9/+W2l+UucUybS/tXuZZkgxaOZmZnZxOfAzczMzKwhHLiZmZmZNYQDNzMzM7OGGHfgJqlyT2hJh0n6jxGmmSNp0IF5zczMzCaTJtS4zQEcuJmZmdmkN6rATdIPJC2StFTS/JbPP58/u1jS7PzZMZJulnSDpDPzZ+vmNG6QdLWkbQb5jdPzc0j73/fX6J0E7CJpsaRjJfVKOlnStTm9t1dZAWZmZmZNMdoat7dGxFxgHnCMpKcAawALI2Ir4BfAx/O0xwPPj4htgKPyZycA1+fPPgR8awx5PB64PCK2i4jPA0cA90fE9sD2wJGSnjNwJknzJS2UtDAixvBzZmZmZvU02sDtGElLgKuBDYBNgT7grPz9t4Gd8+sbgDMkHQL0j3q7M/BfABFxCfAUSWuOM8+vBA6VtBj4FfCUnJ8niYgFETEvIuZJ9RjYz8zMzKyKEZ+cIGk34OXAjhHxiKSfA9MHmbS/Wus1wK7Aa4EPS3reKPOyghxIKg1dvNpQWQLeHREXjjJdMzMzswlhNDVuawH35qBtc+BFLfP290k7CPhlDrg2iIhLgQ/meWcClwMHwxOB4N0R8cCA37kDmJtfvw6Yml8/CMxqme5C4B2Spub0NpO0xiiWw8zMzKzRRvOs0guAoyTdAvya1FwK8DCwg6SPAH8DDgR6gW9LWotUM/aliLhP0ieAr0u6AXgEeMsgv/NV4LzcJHtBTh9S0+vK/PnpwBdJd5pep9QGehewzxiW2czMzKyRNBk67vf29sb06TMrpdHX11coNwYQUY/1WeKBwiWWpUQ+pk4dqnfB6D3++GOV05hI67Rq/9i+vpUF8lCPUZvqsk3syXp7qz/gfcqUamXHsmWPVs6DzwlPtmzZI4siYt5g3/koMjMzM2sIB25mZmZmDeHAzczMzKwhRnNzQuNFTJw+anXpZ1I1HxOpr0tvb/XDqMT+uWLF8spp9PRU7y9Tot9siXyU6F9WdVFKLEcJJcayjKieRoltUkIdykAos3+sXFl9na5cWb2PWlUlytESZU9dyq9h029r6mZmZmZWjAM3MzMzs4Zw4GZmZmbWEA7czMzMzBqisYGbpH0kbdntfJiZmZl1Su0DN0lD3Z6xD+DAzczMzCaNtgZukj4g6Zj8+vOSLsmvd5d0hqQ3SbpR0k2SPt0y30OSPpufT7qjpJMk3SzpBkmfkbQT6UH0J0taLGnjdi6HmZmZWR20u8btcmCX/HoeMFPS1PzZb4BPA7sD2wHbS9onT7sG8KuI2Ba4BdgX2CoitgFOjIgrgfOBD0TEdhHxu4E/LGm+pIWSFk6G57GamZnZxNfuwG0RMFfSmsAy4CpSALcLcB/w84i4KyJWAGcAu+b5VgLfy6/vBx4DviZpP+CR0fxwRCyIiHkRMa/E4JNmZmZm3dbWwC0ilgO3A4cBV5Jq4F4KbALcMcysj0XEypzGCmAH4BxgL+CC9uXYzMzMrL46cXPC5cBxwGX59VHA9cA1wEskrZdvQHgT8IuBM0uaCawVET8BjgW2zV89CMxqf/bNzMzM6qFTgdv6wFUR8VdSs+flEXEncDxwKbAEWBQR5w0y/yzgR5JuAH4JvC9/fibwAUnX++YEMzMzmww0GTru9/T0xrRpM7qdjSL8kPn66empviwlHjJfl76cJcqUEstShweaT6yHzFffrnXYJlCPMhDK7B8T5Rxeohyty0PmSxxvjz764KKImDfYdxPn7GlmZmY2wTlwMzMzM2uIKd3OQCdIYurU1Sql8fiyRyvnIyhRBVuiWW5F9XxQrSp4Zd/yynkooUSV9ooVJZpMqm/X2bM3qJzGX//6h8pp9BZo/llRoEmtt7d68Va1KWv58mWV81BifZYoe0o055fYJpRoso3qZWAJK1ZU389nzlynchrPe96uI080jKuvPr9yHkqUoyXUpVvBcFzjZmZmZtYQDtzMzMzMGsKBm5mZmVlDOHAzMzMza4hGBG6SjpJ0aLfzYWZmZtZNjbirNCJO7XYezMzMzLqteI2bpDmSbpV0uqTfSDpD0sslXSHpNkk7SFpX0g8k3SDpaknbSOqRdIektVvSuk3S0yR9QtJx+bONJV0gaZGkyyVtXnoZzMzMzOqoXU2lmwCfBTbPfwcBO5MeNv8h4ATg+ojYJr//VqTnh5wH7Asg6YXAH/LzTVstAN4dEXNzeqe0aRnMzMzMaqVdTaW3R8SNAJKWAhdHREi6EZgDbAjsDxARl0h6iqQ1gbOAjwHfAN6Y3z9B0kxgJ+DslkHypg2WAUnzgfnpdSO68pmZmZkNq12BW+tw4X0t7/vybw41bP5VwCaSZgP7ACcO+L4HuC8ithspAxGxgFQ7R2/vlInxFF4zMzOb1LpVFXU5cDCApN2AuyPigUjPiTgX+BxwS0T8vXWmiHgAuF3S6/O8krRtJzNuZmZm1i3dCtw+AcyVdANwEvCWlu/OAg5hQDNpi4OBIyQtAZYCe7cxn2ZmZma1UbypNCLuALZueX/YEN/tM8T8C+HJTzCPiE+0vL4d2KNMbs3MzMyaw732zczMzBrCgZuZmZlZQzhwMzMzM2uIRjzyqoS+vpWV5ldPb/VMRF/lJHoK5KOvejYqj43XUyAPRUgjTzOCngKXP6J6Ph555MHKafQUWJgSx0oP1UfwKXGsqOL+UWJ9lthHVWBApBLLUmKbpMEHqiZSvQAqMT5oVDwvAaxcMdToWqP3hz8srTR/iXVR4lApoURZHCUOuGHUZFWZmZmZ2UgcuJmZmZk1hAM3MzMzs4Zw4GZmZmbWEI0O3CQdI+kWSWd0Oy9mZmZm7db0u0qPBl4eEX/qdkbMzMzM2q0xNW6S3ifppvz3XkmnAhsBP5V0bLfzZ2ZmZtZujahxkzQXOBx4Iek5pr8iPYh+D+ClEXF3F7NnZmZm1hGNCNyAnYFzI+JhAEnfB3YZbgZJ84H5+XXbM2hmZmbWbk0J3MYsIhYACwB6e6e0dxhjMzMzsw5oSh+3y4F9JM2QtAawb/7MzMzMbNJoRI1bRFwn6XTgmvzRaRFxvZtAzczMbDJpROAGEBGfAz434LM53cmNmZmZWec1panUzMzMbNJz4GZmZmbWEA7czMzMzBqiMX3cqurp6a00/4oVy6tnIqqPShIF0iiSD/oqzl+PEVpUZHVWT6TEjTZTp06rnEYJEdX2jZRGTY6VGuRhIt2DVWa7Vt+/ypTFK6vnowD1VK9/WWONtaslMEGOVyhTFrd7WVzjZmZmZtYQDtzMzMzMGsKBm5mZmVlDOHAzMzMza4hGBm6S7pC0Xn79ULfzY2ZmZtYJjQzczMzMzCaj2gdukn4gaZGkpZLmdzs/ZmZmZt3ShHHc3hoR90haHbhW0ve6nSEzMzOzbmhC4HaMpH3z6w2ATUczU66dm59e175i0czMzGxEtQ7cJO0GvBzYMSIekfRzYPpo5o2IBcACgN7eKfUYktnMzMysgrpXRa0F3JuDts2BF3U7Q2ZmZmbdUvfA7QJgiqRbgJOAq7ucHzMzM7OuqXVTaUQsA/Yc5Ks5LdPM7FiGzMzMzLqo7jVuZmZmZpY5cDMzMzNrCAduZmZmZg1R6z5utRIeUaSkKLA+JVXPB9XzUWJZQtXT6O2tfjgX2S4F1qm1KLF/1WU/j77KadRlfYh6lD8lximdNWudymlU5XPC6LnGzczMzKwhHLiZmZmZNYQDNzMzM7OGcOBmZmZm1hAdD9wkrS3p6Pz6GZLO6XQezMzMzJqoGzVuawNHA0TEXyLigC7kwczMzKxxujEcyEnAxpIWA7cBW0TE1pIOA/YB1gA2BT4DrAa8GVgGvDoi7pG0MfCfwGzgEeDIiLi10wthZmZm1mndqHE7HvhdRGwHfGDAd1sD+wHbA58EHomI5wNXAYfmaRYA746IucBxwCmdyLSZmZlZt9VtAN5LI+JB4EFJ9wM/zJ/fCGwjaSawE3B2y0B70wZLSNJ8YH567XswzMzMrPnqFrgta3nd1/K+j5TXHuC+XFs3rIhYQKqdo7d3iodyNzMzs8brRlXUg8Cs8cwYEQ8At0t6PYCSbUtmzszMzKyuOh64RcTfgSsk3QScPI4kDgaOkLQEWArsXTJ/ZmZmZnXVlabSiDhokM9OB05veT9nsO8i4nZgj/bm0MzMzKx+3GvfzMzMrCEcuJmZmZk1hAM3MzMzs4ao23AgbRNRcUSQVePGNV+BZak6Np7UVz0P1GObqMT6LLAsy5cvG3mikfJRYj+vyfoosl0qplGb9VliQKQii1K9riAoUHYUWR/12C59fSsrp3HvvX+tlkANjrVS6lL2DMc1bmZmZmYN4cDNzMzMrCEcuJmZmZk1hAM3MzMzs4Zw4GZmZmbWEA7czMzMzBrCgZuZmZlZQzhwMzMzM2uICTsAr6T5wPz02vGpmZmZNd+EjWgiYkFEzIuIeXUZkdnMzMysigkbuJmZmZlNNI0P3CT9RNIzup0PMzMzs3ZrfB+3iHh1t/NgZmZm1gmNr3EzMzMzmywcuJmZmZk1hAM3MzMzs4ZQRHQ7D20n6S7gDyNMth5wd4WfqTq/03AaTUijDnlwGk6j3WnUIQ9OY3KnsWFEzB70m4jwXwpeF3ZzfqfhNJqQRh3y4DScRrvTqEMenIbTGOrPTaVmZmZmDeHAzczMzKwhHLitsqDL8zsNp9GENOqQB6fhNNqdRh3y4DScxqAmxc0JZmZmZhOBa9zMzMzMGsKBm5mZmVlDOHAzMzMzawgHbmZmZmYN4cDNzMzMrCH+P/BCIoThQaQdAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<Figure size 720x1440 with 1 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display_attention(tokenizer.tokenize(sentence), attention)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 54,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "u1ezlkkknE2M",
-    "outputId": "1ba5ed25-cc74-4abc-ea51-f15798c45023"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "0.06497201323509216\n"
-     ]
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be the absolute worst movie of all time, \\\n",
-    "but it was actually one of the greatest films i have ever seen in my life.\"\n",
-    "\n",
-    "sentiment, attention = predict_sentiment(tokenizer, vocab, model, device, sentence)\n",
-    "print(sentiment)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 55,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAm4AAAJsCAYAAABEYC8YAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAABisUlEQVR4nO3debwcVZn/8c/33oSEJBBWERWJbLIJwQQUBERUBEVZFQVEEImIiKI4Mu444OCAG47IL6LiwggDiigqyCACsieQBCIIo+CgorLvhCT3+f1xzjXN5e5V3VV17/f9et3Xre6uPv101amqp06dOq2IwMzMzMzqr6vqAMzMzMxseJy4mZmZmTWEEzczMzOzhnDiZmZmZtYQTtzMzMzMGsKJm5mZmVlDOHEzMzMzawgnbmZmZmYN4cTNrAYkqeoYzMys/py4mVVMkiL/hImkmZJe7kTOrN5at1FJU6uMxcYX+SevmkdSV0T0VB2HlUvSccAbgceAp4F/jYg/VhuVmfWVk7YdSY0fAcwEzoyIJ6uMy8YHt7g1hKRJkqbkh2tUGoyVTtKrgNdExK7ALcA04K5qozJ7LknrS5pcdRwVmwgI+BzwX8A1EfGkJB9Tre1cyRogn93tBrxD0kHAZZJW906iuSRNa5meCvwF+K2kU4DZwD4REZJeW1WMZq2UTAPOBt5SdTxVyV0bngHuBF4ELAReAOArIdYJE6oOwIaWD+C/Bq4AZgDviYiHqo3KRiu3VuwpqQdYibROfwG8EpgC7BcRz0g6AniPpD0i4sHKArZxTdKEiFiW+2E+LukrwP6SLouIByoOr6N6+6NK2h14nLTNvgLYS9IaEXGWpHWBCRFxT6XB2pjlxK3Geju/RvKEpO8B+wCbSboOuC8illcapI3GUmAxcD6wMrBFRDwm6RLg1cDHJS0ntWq83UmbVUHSuhFxb0Qsk7Q18BDwIHARsCep7o6rPrc5aXsLcCLwiYi4T9JVwOrAzpK2JfV3O7zCMG2M880JNdW6M5T0cmAJ8GdSp/VzgNuBTwDbA90RcWVVsdrISVoT+BnprP2/I+LM/PxuwPPz3wURcWd1Udp4lU8azwLWiog3Sfo6MImUoBwD/BspjxlXCYqk6aQTrqNJl0pnARuSroa8FHg7cFFEXFRZkDbmOXEbhpbm8X8O29Dmz1sb+GZE7J2Tth8D/0tqpTkLuBv4DvB3YH/gkIj4ebvjsnLkS6BLIuJ7+Qz9E8CVEfGlvL7/GhF/qzZKG68kPS8i/iFpVeDbwD8i4qjcp/ZfgY1J/aO3Bd4SEXd2at9YtZy4/Qi4BtiIdCL9WuC0iPhiy3zjYnlYNZy4DaK31UtSd0Qsl/SKiLi+A587ETgD2IB0Z+EXgIeBQ4D1gG8BdwDbAY9FxE3eUdRX33Uj6ZXA5cAHI2KupNcBHwaWke4YPiAi/lJFbEM9b2NbbmmbByyMiHdLWgX4AfB4RByU59kYeB7wReCXEXFCZQG3WctJ+3akYT8eJLU8HgT8KiKuyDcQHQbMAZ4eL5eNrTpO3IYgaX3gKFJz+GuBjdrZIbdlRzEROAE4EpgdEX+UtCGpj9smwA8j4vJ2xWHlk/Ri4OGIeFTSbOB/SGO1fUPSJsChwPci4vYOxdM68O+BpMvxUyLi+534fKsnSWsA1wKXRMQxOXn7LvBMRLy9Zb7nk64AvC0iHq0k2A6Q9EbSyfN/5v+7RcQN+bU3kBLYf4mIX1QXpY0nHk5iAJLeLukY4IfAbcAfgf8mDY7aNjlp2wLoioiPkzoCnyVptYj4A3BhjuXv7YyjbOPxlwAkbSbpY5K6cyvFKaQOzKtExDzSEC9fkfSBiLgjIj4+nKRN0oR85xqSNpfUPZr4WpK2o0knJ08BZ0jaezTlWfPlO0gfJPWdfbOk0yLiMeBdQLekC1pm34o0HMaYbGGS1JW7rXwEeDNwH6nLyv/loVFWA94PfMxJm3WSE7c+8kFxe1K/o38Ax5FuBngNcH4ev6cdn6v8f2tSsnZpvkR7CLAAOE/S6rmz+lcj4nftiKNdckK6U27ZGfPy+tyI1B/o6LzergL2Bl4ladV81v5fwPGSVtPwx+XblTSm35dILR4TRxjbepJ67whcG9iZ1Jq8KekS7s8kTRpJmdZsLSdWUyStmZO3WaRha76Wk7fDgS5J2+R5Hwb2iojHOx9xe+SErHeMxamkE/UbSNvHR4ADc//TfUmjMhwYET8fjyemVh1fKu1D0soR8VRuFXksP/cKUifcT7T5s/cAPkbqU/IBUqvaG/Pt+N8k3bW0K9DTlH4ULZd+tyd9t7cAR0XEGRWH1naSVgJeTzpbvyUivi7pfaQD4rWkA8NmwL9FxF9HUO5k4Cek8aMOjYgLR/DeFwIHkvpQPpWfPpN0EH4xcHCkEeCPBG6MiPnDLduaqWUb3YvU8tpF6orxbUmrA9eRbp45ore/b6UBt5GkXYDJwLrA7sDBpBP3vYA1cjeH7YBvAIdFxKJhlNnaJaHSoVP667tadUw2cm5xa5EvjX5b0sTepC37NGnYhnZ9bne+3HUwcE5EnBkRWwPLgf/JG9YRpJabZU3ayHpb2oDvkfqHvBs4QdJ7q42sPVrPvHPr7MWku9C2lnR0RHyD1Letd+iA00eStGXrAKcC5wLbSNqhpcV20G063/RwBvAS4MiIWEYaZuYDrEjaDgSOoAGX493SUVzeRncj7ecOI/0SwNckfTDSQN/bA2+QtDmpg/5Y9ijwGdJPWf0kIpYC7wCuBL4v6ePAXOBzo0ja9gd2HUHLeqn6xPLK3JVjnXwD3ohj6m21LxBPd8u0t+MR8AC8We7n8w7gXRGxVNLkiHg6X7r8B6l/UrusEhEPS7qTZ+8Y30kaAuQ7Oa5F/Z0xNcB6pLvPrgWulfR74HJJyyLiWxXHVpo+O8Z3AquSxtg7Le+Y9pP0flKydo6kqRHxxAg/4/2klrYjSa12JwJvBR7MLQFPk/piDmY58DJS0ncw6SC1GnCVpOtJwzy8KyL+PJLYOq1vSwZpWS91C8Lw9NmXrAm8j/Rza9uSTiK/KWlKRPy7pA1ykj8m9S6LSHfoX00aRBdJL4mIu0gJ1wdJ/dyOiYgrh7Mv7tOP9D3Avq11s5N1tSWW95Puir0YOERptIQR3XCXrxwsl/St0bTA5hg2kvRERHwynzw08dhWjYgY93+kPkJfIrWCvIx0ULyRdIlrAqmfksiXlkv+7PWA3wBbAzuS+rNtnz/v5cD/I92ef0TFy2jY373vvKRO+OeRRlrvys+dQWrR2bvq9d+GZfUe4CZS4n0jafiWLtJl7h+QhgEZ0TLN878j14UX5MfduX6eSEru/4/0KwwDrhfSLzP8Ok/vC5xOStLI9W8HYEbVy3CEy+UYUivID4HXVB1PXf6GU7+AV7ZMrwZcAOycH58J/AF4YTv2fXX56/1upOGXVs5/M0n9T48iXTp9MbDJKMvfMu8H1syPdwfeCzy/gu/6SuBSYBqpH/ev8r6pdxkMp87sRbpUvN4oY3hL3j++MR/vfth3Xfhv8D/3ccskfYI0mO1fSDcHrEo6kL09Ip5s4+euQrostTvpgD8TOBa4h9RpfE/SjREPRwXDNOT+fYsi4qkhZ372+14PbA78X0RcIOm7pGThq6QxoN4J3ExKlo+IBlfElj5CXaSTgB8AcyPi0vz65cDiiDhaafiAhTHCAXYlrUPqHP1/pDPl/Uj1dX5EHClpPWBp33JbYmttnToH+EWkAYAPIbWy3AZ8PxrW0VxpMOMDSHc9fg/4c0S8q9qoqjWSbVbShaQhjrbIj79CGqus98Tj1Ii4sY3h1oKkPUmXSK8AniFdYdmANL7i/aT69eaIuGoYZfUdt7G3YWBj0ogALyD9fNj/RsRJJX+VoWLbgJR4TSD1v31LpCtL+5Ouigx4BSDv36aTjpHzImLnfLkzYpithvm4cAhwYUScn5+7Cbgt8jiBNrRx3cdN0mGSTpD0SdJYPHuTfoXgDNIZ0hTSYIvt+OzNASL1pfsm8FPSHYI3kw7KXyAlbs8n3c01rx1xDBHjsaR+L+u0PDdgX4SWflZbk1pyXggcIOmkfDD9M+lyzImkPlp/IJ3NNrZ/Q5+d9KoRsYS0Y1ujZbZ39j6OiEtGkbQdQWpVupB0g8eXSTv+A0mXG2ZExD39ldsS2zotT59HGsqBiPgecCuwPs3sOjGVtHzfRhqH7nBJK0l6Xic+PB+Ue6cr358Od5uVNAUgIvYCfiepNzm7EFiLtP85e7RJm6S1lYbLqL3cxeBEUgt0kFqCTiX9Qs0nSFdEhpW0wbMuSb5a0psi9ZP7NvBb4CsRsTfp+DJt4FLKpTS81WmkqxzvI/Vv3S0nbe8kHWMmD1HMhEh9HrcGtpD0kYhYHiPrIzeZdEx7ZT7ZhHSz1g6Sxky3mbarusmvqj/SKNfXkfpy/D9SEvH8/NpxpDPOrdrwud2khPk6UgtH7/PTSQncdcC2+bkXAr8Atq5g+bwxxzI1P55BSkzguZdCV2157dV5ee6eH28MnA38e5/59yB1gn5Z1XVhkGXQb7M9+XJvn+eOJP1MGaQW1N6+YiuR+pP8mnQJZqSXRw8BvkK+TEM6IK+Up3fP9XSNgWLPdW0z4N5cr/cgtQouJN2M8M/6V/XyHs36ILWM3MGzL7e8j5Tgdrc5nunATqSD0ZuAl1e8fIa1zZIu3R0LbNry3E+BK1oeP7/v+0YQxzHAz0ktz/8+0veXXUfy8/1ts73dNnYjXenYDZhPGvrjPNKl0k2GU34/y/jovI3dRLqM/4KW195NOhEfsFtD2cuB1CXnp6TWvm1ILfcnkBLWeUPth3PMZ+R1+xLSIPD3Ax8aZjyvISV8zyOdNJ5NuhHmhS3zvKSTdaXsOtbJv8rPEDut5exzJmkYhh9ExHtJd+h9L7/+IPCOGMZdQ6P43JUiNSvvAqwt6UyAiHiEtNO4k9wSGukOwP0jYmFZcYwgznVJzfqzJZ0EfB9YrDSWXOtlgFVJCUHvHUZTSZeuesd6+gPpEsTGSkOaQOocvwHwzoi4pZ3fZ7T6XFo8SNI7JL0LIPpcFsgdbd9NOksnIr4JnEbq9P9d0h2bx0TEU63LbojP7902D8ll93YefgBYJmkOqZX4XZHG3Oo3dlJ9u42UYDxA+q3Jj5H6g+2pNEp+b/3rmMFabgfSsj4Ozutkc+CzpAPIA/m1w0kHzZ9E+4etWJ10o8iPSevizjZ/Xr+Gsc0+n3Sg7R0OZhNSX8bdlQaGhnRysZWkK/Pjf8CzWmyHG8vbSZfiDgaeIPUZ7ojhbrMty2t6fu1XEbGA1C3lfRFxGfBXUgvus1qhBloefT57JWAVUv/Bl5Ou3HxS0vpKg6tvTxpKZHFJX/05WmLp/a4PkE5wXhcRN5OS/KWkY91Bg+2H877mUFIi+27SfvsO0j7lS0o3Xgwo7x9PAt5A6tN2N+kGqlcDeykPJh7pJpCOUfpVpJG+p6t1PSsNzdR5VWeOnf4jd6gkXcr7WMvzawLfavNnv45U+f+d1EGzm9Rf6Tukzqq3ADMrXj7T8/9JpLHCfkG6hNBFOnPcoZ/3PI90RvfO/HgP0gGkt9VNpJa3rate/6NYHh8iXSp5K/B70oCbra9PJbWUbkhq4Xgf6W7P7XKdegktZ9tDfFbrGftqLdM/I42j1ft4Aikx3niI8o4hnWX/hnQiAumAclpet/e1fk6HlufMlulhnbn2WS57A78jtep+i3SSsCapk/UPSC2bm3fw+xwGPEm6tLhKRXV0ev4/0DZ7COnk4Yuku9QnkVqDvwt8MNfbrUkH110KxrIXqc/kB4FLgIn5+W06uDwG3WbzPHvkunIS8B/5ufNIye4OpC4rM4f5eV0t08eROv/fRjrphnRS+33S5dIZwModWg6vJR1TdiZ11Xg5KWnabITlnEg6KXhX/m4TWNHqvwmD3LSR69UlpOT1eODSltf2zPVzeqfqRv5ckfbbdwD7jbKMY/I6PRd4ayfjj4jxlbiRDqpn5hW3Leks5D15B3cwcA3pzqp23D26AymZOSDv1H6Q/08APkXqt7RXxcvn/Xln/hlg/T6v7Zd3Ri9qea51h3UQKUk4KD/eO28Yb+lTTkeamcv4HNIZ+dl5+uP5+3X33fHm1/5Kann5EKlF63JGmRSRLuOfR0pO3pyf+0nrTm8YZexLuly7CemS6h9IAx+Tv8OqDDOh7FOHX1pkfZAuy1wy3PXEs5O2F9JyiY/Ur+2bwD4t83QseSK1WmxJStJPIvWH2jC/tg5tvlSbP2c42+yLSZ3jnyFdZeh9fXvS3YHnkC6lv34462SIeA4mDeZ8Uctz7yH9zuegCUuR+tVSxpDbLOnOykWkpOIL5EvEpIP5BflvxAd04FWkG9t2bvns3fJrU0jHnnXaWBf660pwNKmrxUWkFq/Pkfq3Dae8V5NalY8lJcCt2+376Sch7qeMlwCfJCVtFwOT8/Nvz+tlSruWxzBiOzBvt5Pp51J6n3lf2rvuSP0B/4fUV++nwLkdj72qhVbBSjqM1CH0xX1WxnWkmwLa3edgP+D4PL0yqcXkHHIiBMO/HbtN8R1KSlxfQkowzyadsU0mnRn9HtiyZf7eeF9IPkCRWhTPYUXL2/6ku2PXLhBXoeVB6m/2WdKB7cVDzNvV5/EapITpm6SkbLOWZfV54GRSS8YUUj+y3tv9X01q9Zg2injfDNxO6rD7L3mnOye/dgPpEuBwYj+AZ/cr3Davi1H1wyId7O4gXeKeNIr3b9QyfQ1wwVDrmGefGHwk7yz/Crw3P7caKXk7hxVDmnTqxGByXv+X5HryMtId0/9COjh+izYflIa7zZJ+T3QTUsvJv5GSq94D6AakG1NG3Z+XtG/9LLBTfvxvpBarzUgnp4sYYt862vo1jG125ZZlNZvUOrsjqcVtl7xNzcjz9O6Le98z1EnFpuQEL29f80kD80JKeI4g3eyxZwfqY+sJziG5Hr6Z1J+19w7S35B+a/Va0o0Gg5W3Bumq1KtJCe5FwAfyaweSWvIGTLKBtfNnr0Q6xt7b8trBpBPbUR8XCiynmaQW50m5rl0ErDXY+iYd1y4H1s2PDye1nH4I+GVevhMY4vhS6vfo9IKrYEV15YV6Oil5Wo90BnE9K8bTWrXsStR3wyC1gPyRljGySCPqb1+DZbQnqcVvOqkJ+DJSf60LSQeCNWnpRNryvt3zTuB7pEvAzyN1Qv0B8O48z/NGGdOIk4N+yjiGdLDfmZS0f2aY79u89/Pzxnkf6cB8P6lp/E7SgekDpMTqBnJiQupDdjOjOBCSztiPIfWzgZQQ7pmXbW9yvP4QZexN6mT9edIl0X/eEJG3gRFfRmRFi/TJpLHojulbvwd5r0iJxG3AF1qev4FhJG/5tTeSLhdPJ7VG3g7smF9bPW9bVYyJtRbphOAi0oFqS1LLwuWjWf8j/OxhbbN5G72VdCl/AqkF5qukS5q75Po96pbBXM4NwNdIlwKPyev7k6QWph8yxKW5IvWrpYz+ttneG3oOISWPB5JaGA/Mry8in1zlzz2dEVy2y5/zDeBN+fEXSAfyDfLj6aR9xLmk1ry2n1Sw4jLxnPz/FFaM+/h8UiI2aBeLlrI+D3wnT+9L2q9fBlxNy0l8P+97P+nE9Wv5+78wb7Nfy+v4RkZwU1pZy42URP4y1//T8/L4EvB1Brh5hZR8/ifpRo6tSfvofwf+REtLG6mr02fJXQPavp478SFV/vVWMNLZxwOks7EPkjpWXskILxcN4/NWa5nehbQj34LULPxh0gFoS1IiMI8S+n2R+xvk6ZHu8FYlJR+HkvqhXZKfF6k/xAn003JAOlvp7aC6CekM7+q8s9qf1AoyqmWb18+380a2PcO85MiKu8R6E5VT8ob3kbwjmUBKZKb0ed82pJsHIA24uZjUZ2o/0g/FH0e61Ph9UoflxeTWhfyej5FaO7pJO8zRJEer5p3J2aSTiq1aXvsVMHuA97Weab+ddMnrc7mMh0kHkzflHcvvGOGgmS3LcippKIEHyGeoI1wnG5E6Jp/Y8lq/yVte50fn6RmkjsyXtcz3PlIiuGvfZdCOP57d8te3JXMN0uWW81hxRj5kHyY6sM2S9jO396mrK5MOrKcDfyNfih/lctmL1JrS+733zXEdzYo+UEO17IyqfjH0NvsR0jZ7GunOzjeTWkF7E/4TSGO2bUhKbm8Z7rIgtQ5umtfDB/Ky7L3M/BXSNtx7yXxV2tiPtE/d3ITUX3oCaZ91ZY7ni73raBjlzSZf3s2PLyb34cr16/nkO5UHeP/bSQnji0iJ3jn5+XVI+/UjGcFAxjx7//ZG0r7sOXfRD6OcffN6+Spp//EOUhL6bdK+cko/n7da/v8i0kn731te+00ub2XSPn8xLXdpt/uvIx9S1R/pDphrWlbKS1umX59fG7ASjuLzppJanz5I6gi6KFfen5HGmnopKXm7ltQStG8Jn7lqroRr5Ep9AMM8kJETjLycPkY6CNxO2inuQWpJ6K+lbV1S4nl6ftx7cP5PVvRxe877hhlT72jaM0iXXM4g9/0bwfd6CenAdQEpYbuAFZ2kj+DZfaKU68IlpDOpc0mX4I4g7ZAPJZ2pbZfX6bak5OjMljJeCny3wDrsXQ+Hky5nfZC0A96N1IJ2Pf20KPXZybw4r/veA8abScMRXM+KmxRGlFDy7IPCC0mJ623kvnLDeH/fIQlemNft51qeu4aWpKzlu6zLiktYu+Vt6CMt8xxLujQ14iFWCqyntUktO/eQLxvl519NSg5+SDpoDnWJrSPbLOmO12/1rktWJFMijSG28Ui2qz5xTMnlPwz8a8vze5Na2o6mZUT+MusXI99mdyTtb68lXfZSXgf/SrpB4UesaDUbat3tT2qt2yCvv678Xb/OiuTti6TtrWPDW7BinzeD1FfwylwX30VKSv+DIVpW87L6F9IJ3kdJl7oPI18BGEYM00gJ0ixSMvMrVux3Nxjh91Gf+nEYqeX4SlLytfMIylqN1GBzcN5uFpCGJFmbdJPgXcCn+3z2+qTGh6mk/dGfSMlZ7/FtCunYclauhx27ISpijCZurEgkPk4/Hf5JZ5y3UPByBi1nk73TeaO5IFeU3vHYDiE1qR+SH69E7kQ91I5iOJ9PSgpvzxVrWJcYSa0afyK1xGxC2qm9i3QZ4Zpcufvr07YVKZn6F9JAs4e1zPM54KMj+V55WfQejHYmnSG2dqCek2MbsL9QXuZvz9MfIF2q/DJpLKm/saIfyqGkA0NvcvM8cj8N0gHgBuDHLeW+k3SDwJGkg+QZufxXAo+TLgdNJO0QFjK6M8HW9bAxacf076SWg0vJP1Dfz/tak7ZjSAna70gdwXv7L+1NSjK2Z4StOn0+672kyxxfJB24/kRLEjXAe1rj+0Cu/8eQzuh/x7N3lJeRzmqfx4oW8r1JZ7m9fYb2yOviwy3vW63I9juM7923Xt1CasH8EWm4jPfn195BugQ0ZAszHdhmSa1Im5EuIy8EXttSxh7kLiIFlsuRpAPWCaQD4e08ez+wJyPohD+S+sXIttk1SJe3/p10LPgZaZtdq2X+ybQktEPEuRZpn/KyXPYP8nftZkXL22vzvJ8fTn0YqH6Mom7+L6lf32F5nXw+v/Yu0r5w0PVBOib+NymZPYl0w8vJpGTpz8Arhnj/UaSTqfcBjwL/0/LaEbmsyaNZDqSTm5+REqqJpJPbU2hpSR6knFfkmD7V8ty7SCdavaMebEBqeZvQ+tmk7gavYcXNUFvn7enIlrIqucGiox/W0S+WVsblwKyW515COgv558ooUP7apLMq5TIPJh2UticdRO8mjZLdO/9BpKbVd1PCdfD8+Rfm6deRLjNcwoqBMwe8S4aULL2ItLO/iXTW8UVSC+E2pB3UczZ0UivOFeTBg0lJ1j2kneK+pAPHLiP8HhuRzszOJh0QDyFd0mgdHPQnDNJPJm/Yd5Fa6P6LdODaLe9ILie1kH2b1Ldi85b3bUxKjr5DOvM+iHT5+piWed6TX/ttXmYPknbK+5ISi1tJZ7MjvrGln/WwF2mHtCgvFzHEQZ2U4HyPdCD/D9LZ6C6s2Pm8nRGe7fYpfz9ScjGTdHD9MKlf1y0MY3BV0g79ivw9HyAlPr2tgV/oM+/GuS58n9SxfMe8/j6dX39Drie9l1HbfYl0oHr1PlKCfQvpAPd/DOOMmw5ssy3L9sJcLz5FSmTem+vFzcAbC9aHW3N9OC3Xh0+T9nmDJvNl1C+Gv81+hdSSeUWO7Yd5Wfya1H1lNEnVKqRLh+fk8o4itS7uTjqAH5WX+S6jXLb/PKaMoG5+NsewISlBOonUb6uHNNTGXxi6j+HewFWkRP9qUmI0jZQkf5rUsj1gv7hct25gRUvvF0iJ1ovzMlnICPaPPPfY+hlSMviKltdPJCXKzxmeqqWcHUg36PyctO9/DStaAA8nncRPIZ34LMjTa5GO3Wvk+Y4nnZi8Mj/eNZd17Gi3oTL+Kvvgtn2hFVn5N0hnX92kPmY/zTuG9Uv8rCmkHfkapB3PvXmlb0Fqpv8pLWe3pKSktF8KyJ+/S/4/nXTmdSErWi02o8+Bn5RYnpBj3CDHeCipZauHATrwkw4Kv2XF2cf7SS1sJ5IOaicxyv4ypE7Vj7LibsFvks6M30Yai+l3DHGTA+nSya2sGApgUv7+/0E6K16Dfm5Aafns3hsC9iD3g8yPVyUdnNcltRhdQtppf4uUvC0eTZ0axnr4NENf2nghKWnovRw2mZRkfC0vj1G3srV8xseB4/L0SqSWjK+QWnZ+yyD9kfKyO7PPsvsv0kF3H9LOfi2e3TrXuz5672DbgnQw+VTLeu7YjQiD1KtT8mvrM4IkgDZus6SW4BtJB9yDSCcW3yBtQ78mbVdvyfOOKukdoD58OX/25YxwOKXR1C+G2Gbzcxvy7P3V0aQk53hS8vspWvoZjiDej5Ja2z+eH7+HdFL4BtKx5ogi9ZOWY8ow6+YtrPjFlkmkBoRjSUn1/gzjTkdSC9S+uU79ihUt9jPy/wGH2CF1VfhJ/v5r5fX3b6QT+u+RWvFGc1I7JX+/1fLjk0hXsrbKj9ch7SP7PS6QWtouJR9vWbFffDUrkrfeu4jfQEtiyoo7+1fPj99PSj5f0TL/lSOt62X+dfwDO/bFUjP2x1hxZ82/kc4iBh2vZRSfsxfprOYFpAPMnazoNL17rrgjPhMdwefvQzp4T887jo+TzjBOIHWYXqPP/C/KO5t5pNa/95L72pHOQjYc4HNWJ+3wXpUfTyQdlC8gDzSZnx9Nf5mNWPGj82/MO4BD8/c4i2Fe0s7r4iHggJbnLmyNb4jPPiA/N4t0ln5wfjyJ1Ex+ee93BB7JG/SIh/wosh76KWdf0hAZvQPsTiCdcZ9KCU34pLPxC3l2S+UVpMsIQ25LI112LevjJlb0J1mbdEI0rL42Zf8NUq9GO3hnW7bZPN+2pAP/jaTk5VJSv7fZrDhgFeme0V99uDyvt2FfCitSv4a5zQ60vzqfdOfloJf+Bol3fVJr6e2kJGVKXifnkX6VoKz69heG0RUgz/sPVlw27c514rN969EgZbyadCPHVS3PHUNq0R3y5I90AnEzqZXtP/Jy+QIpqSvSRWNv0mXa6aT9xsdJV2Venl8frIV6N2AZuWtFXv8nkJLs1wy1HZBOBv7AiuTtA3n7661PlY0/FzFGEzdSZ/ElpMsqb6TNt+iyYoDTdUm3Cy9ixZ04HyTtREfVWX8En38nK0ZQn0M6EAx4pkM6mP6C1ER++zA/5yOkM9Xe1oE35M/5LiWMBk66/HMLqUl797yhrT7CMvYkDbvy2bzh38IwkiDSWdai/J32ysvlJS2vb5yfe1n+jHMpofV2NOuhnzLelGNvTd5KGd6GdFZ5IumM9/V5Hc0bSfmjWXYt62NP0oHlvxjmMAYj/H7D2jeMtl4NUl7bttm8rj6Ypw8htWyWsuwGqA83MoI7jcuqX8PYZvvbX/2SdAmx0DGB1JH/TlKSNJWUSA7rzs0R1I9/Jg5DzNu7/fcmb12M4KY7UoNG78neLrnOzGeQIT/6vH8y6YSh9/LigaRkvowTx39uJ6Tk7UTScX0SQ/dJ3It0ReTA/HgCqZvLsFoAeW7y9i+5jk0e6rPb/VfZB7f9i6XLNH2HfWjbws4r+XbSJbk98wo/gXQmUsqZ2BCf/0bSNfvejWfIDs+kSyrvIPVZmzGM+V9Earn8Zd7R/p4Vl4S3LnE5LsrLbVT9EEkH1uWkM/lh9+9ixbAA1/XduPOO4mOkFozFlHgX0UjXwyDL7R7a8PMrpNbko0mXUX480nU92mWX18ci0oF8RD/TM8zyu0j9lYbVX2W09WqQ8tqyzZL6NV5OGhLin61OdakPZZY3xDbb1v0VKZF+gDz4cxvq5z+PKcOc9x4GubowxPvXJbXk/px0iXPEXXry9nR4Xh/DSvqGWW7vdrJafrzmCN87Hzi0jHXACBsShlH+iC/XR4zhxK2KP1KGf1OuwNuT+kG9vsOfvyB//rAvCTOCs09SQrx7PhC/jNSXYBEl/pQL6dJYoRYjUivNjFG873kDfTapuX092tR6OpL1MMD7X19GQjFI+VOAqaP9bqNZdoOtj5K+08tILUbD+kWJ0darQcorfZvN2+i7aBnmom71oczyhthm27q/IvXF26iMsgapHzcNp26Usf3n7XRU+6G8/g6jPSdZe+cEbDRdcd5C6if9AkYx2HReBzczxPA2oyh3MulKwogHm+8d4sFKImlaRDyepydExLKqPr8Dn/Ua0q32742IhZ34TLOySdoVWBgRD1T0+W3ZZnv3P5IU3tE3dn/VyX16Ue2sa0WWg6S1I+K+Kj57iHKnRsQTI36ft2cbLUnrkpp6/1R1LGajNVYTm7H6vUbL+ysbK5y4mZmZmTVEV9UBmJmZmdnwOHEzMzMzawgnbpmkOVW+32W4jCaUUYcYXIbLaHcZdYjBZbiMgThxW6Hoiii8Il2Gy2hAGXWIwWW4jHaXUYcYXIbL6JcTNzMzM7OGGBd3lUoKSYPOExEMNs94WE42Ol1dQ5//DFW/ttlmmyHLuO+++1h77bUHfH3+/PlDlmFmMGnSlEFfX758Gd3dEwadZ8mSJ8sMqWKDHx8hhpjHx8c2uD8i+t3hj4vEraurKyZMWKlQGcuWLS0cRxnLeqgEtFNxDCdZGUxPz/LCMQy9sxmO4stiypRVC5fxxBOPFC6jjLpRH/VYt11d3cUiKGFbq8P2CmVts/WwySbbFi7jjjtuLFyGVHy9RPQULqPo8bGMGJYv7+hY9U0wPyJm9/eCL5WamZmZNYQTNzMzM7OGcOJmZmZm1hBO3MzMzMwaoq2Jm6SVJE0tqaypkiaWUZaZmZlZE7UlcZO0maQvAr8HNsnPnSzpd5IWSTo1PzdD0q/zc5dJenF+/q2SbpW0UNKVudhNgDsknSpps3bEbWZmZlZnpSVuuUXsMEm/Bb4J/A7YKiJulrQmsA+wRURsBZyY3/Y14Lv5ubOB0/LznwbeEBFbA28BiIibga2A24EzJf02f14pLXpmZmZmdVdmi9u9wOHAeyJix4j4VkQ8ll97BHga+JakfYHekQu3B/4rT38f2DFPXw2cJekI4J8DKUXEYxFxZkS8Cjgi/93bXzCS5kiaJ2neeBirzszMzMa+MhO3/YG/AD+W9GlJ6/e+EBHLgO2A84E9gYsHKygijgQ+CawHzM8tdsA/L69+BrgAuCd/bn9lzI2I2RExe2wNTGpmZmbj1eC/6TECEfEr4Fc5yToYuFDS/cB7gPuBKRHxC0lXA3/Mb7sGeDupte0g4CoASRtGxPXA9ZL2ANaTtApwJrAW8B3gVRHxQFnxm5mZmdVdaYlbr5xMfRX4qqTtgOXAKqREbjLpt2w+nGf/APAdSR8F7gMOy8+fImnjPO9lwELgRcDHI+KGsmM2MzMza4LSE7dWfZKs7fp5/U/Arv08v28/xd2T/8zMzMzGJQ/Aa2ZmZtYQTtzMzMzMGsKJm5mZmVlDtLWPW11IXUyeXGyc3meWPFVSNAWVMbRJDca1C6qPoSwTJqxUuIzZs3cvXMZKK00uXEYZYx7WZfidMr5LV1f30DO1Ww22Vyhnm61L/XrqqccLl1HG9iaKf5ee6ClcRhn7sKKWL19adQilKaOeL126ZMDX3OJmZmZm1hBO3MzMzMwawombmZmZWUM4cTMzMzNriMYnbpKuqToGMzMzs05ofOIWETtUHYOZmZlZJzQ+cZNU/L5uMzMzswZofOJmZmZmNl6M2QF4Jc0B5qRp56dmZmbWfGM2o4mIuRExOyJm12UkdzMzM7MixmziZmZmZjbWOHEzMzMza4jGJ24RMa3qGMzMzMw6ofGJm5mZmdl44cTNzMzMrCGcuJmZmZk1xJgdx83qLSIKl1GXYV7KiKO7e+xsimNp3Y4ZZSzPEtZrGepSv+oSRxmKxlHGshhLy7Pd3OJmZmZm1hBO3MzMzMwawombmZmZWUM4cTMzMzNriBElbpJWk3RUnt5F0kXtCeufnzdD0q0DvHaopBe08/PNzMzM6mSkLW6rAUe1IY7ROBRw4mZmZmbjxkgTt5OBDSUtAE4Bpkk6X9Ltks5WvhdX0msl3SzpFknfljQpP3+3pLXy9GxJv8nTa0u6VNJiSWdK+lPvfEC3pG/m134laWVJ+wOzgbMlLZC0cuElYWZmZlZzI03cjgf+EBEzgY8C2wAfAjYHNgBeJWkycBZwQES8jDRW3PuGKPczwK8jYgvgfODFLa9tDHw9v/YwsF9EnA/MAw6KiJkR8dQIv4eZmZlZ4xS9OeGGiPhzRPQAC4AZwEuBuyLijjzPd4GdhyhnR+AcgIi4GHio5bW7ImJBnp6fP2NIkuZImidpXhkD+5mZmZlVrWjitqRlejlD/xLDspbPnNymzwAgIuZGxOyImD1eRlM2MzOzsW2kidtjwCpDzPN7YIakjfLjdwJX5Om7gVl5er+W91wNvA1A0m7A6iXFYmZmZjZmjChxi4gHgKvzEB2nDDDP08BhwHmSbgF6gDPyyycAX5U0j9R6Rsvzu+Vy3wr8jZSYDeYs4AzfnGBmZmbjxYh/2ToiDhzg+aNbpi8j3bjQd56rgE36efsjwBsiYpmk7YFtI2IJqYVuy5b3n9oy/SPgRyON38zMzKypRpy4tcmLgf+W1AU8AxxRcTxmZmZmtVOLxC0i7qSfFjozMzMzW8G/VWpmZmbWELVoceuEomO5BcXHgitjPLmuEnLtMr5L4Rg8tt6zrLXWiwqXUcYyHUvrpZTvUoP9RilqUjfKKKOM4Z0mTZpSuIxSlofqsUx7enoKl1GU9z3D5xY3MzMzs4Zw4mZmZmbWEE7czMzMzBrCiZuZmZlZQzQucZN0Tf4/Q1K/gwGbmZmZjUWNS9wiYoc8OQNw4mZmZmbjRuMSN0mP58mTgZ3yb5UeW2VMZmZmZp3Q5HHcjgeOi4g9+3tR0hxgTppuXH5qZmZm9hxjNqOJiLkRMTsiZpcxYKOZmZlZ1cZs4mZmZmY21jQ5cXsMWKXqIMzMzMw6pcmJ2yJguaSFvjnBzMzMxoPG3ZwQEdPy/6XArhWHY2ZmZtYxTW5xMzMzMxtXnLiZmZmZNYQTNzMzM7OGaFwft9GK6Cn0/q6ueiyqMsak6+lZXkIcxXJ+UTwGajI+XxnLc9HCywuX0dXVXbiMotsJgChhvZSxbiMKF9HVXWy7jxJiKGOdlFE3yqjndalfDzzwl8JldJewTFVGGSXsS7u7i8VRRj0vY3nWZb9Rxi5w2SCvucXNzMzMrCGcuJmZmZk1hBM3MzMzs4Zw4mZmZmbWEE7czMzMzBrCiZuZmZlZQ1SauEn6qKRj8vSXJf06T+8q6WxJ35A0T9JiSSe0vO9kSb+TtEjSqVXFb2ZmZtZJVQ9OdhXwEeA0YDYwSdJEYCfgSuC8iHhQUjdwmaStgL8A+wCbRkRIWq2/giXNAebk6bZ/ETMzM7N2q/pS6XxglqRVgSXAtaQEbidSUvc2STcBNwNbAJsDjwBPA9+StC/wZH8FR8TciJgdEbOLDhZrZmZmVgeVZjQRsRS4CzgUuIaUrL0G2Ah4CjgOeG1EbAX8HJgcEcuA7YDzgT2BizsfuZmZmVnn1aEp6ipSgnZlnj6S1MK2KvAE8IikdYA9ACRNA6ZHxC+AY4GtqwjazMzMrNOq7uMGKVn7BHBtRDwh6WngqohYKOlm4HbgHuDqPP8qwIWSJpN+EezDVQRtZmZm1mmVJ24RcRkwseXxJi3Thw7wtu3aHJaZmZlZ7dThUqmZmZmZDYMTNzMzM7OGcOJmZmZm1hCV93HrlK6u7kLvX7b0mcIxBFG4jFLGpIvicUQsL/T+nugpHEMJi7OUwZmL1i2AF623WeEy/nHf/xUuI8qoG2XUc4qvlzK+Cz3F6nkZ21oZepYvK1xGGeu1LvVr1VXXKlzGY489WLiMrhJ2g2XsS3t6SgikaAwlfI8y9htlKGXfMwi3uJmZmZk1hBM3MzMzs4Zw4mZmZmbWEE7czMzMzBqiVombpM9Jel3VcZiZmZnVUa3uKo2IT1cdg5mZmVldtb3FTdKnJP1e0m8l/VDScZJmSrpO0iJJF0haPc97lqT98/Tdkk6QdJOkWyRtmp9fW9KlkhZLOlPSnyQVv7fbzMzMrObamrhJ2hbYD9ga2AOYnV/6HvCxiNgKuAX4zABF3B8RLwe+ARyXn/sM8OuI2AI4H3jxAJ89R9I8SfPaPaaKmZmZWSe0u8XtVcCFEfF0RDwG/AyYCqwWEVfkeb4L7DzA+3+c/88HZuTpHYFzACLiYuCh/t4YEXMjYnZEzC5jkFUzMzOzqtXq5oR+LMn/l1Oz/nhmZmZmndbuxO1q4M2SJkuaBuwJPAE8JGmnPM87gSsGKmCAMt8GIGk3YPUS4zUzMzOrrba2YkXEjZJ+CiwC/k7qz/YI8C7gDElTgD8Ch42g2BOAH0p6J3At8DfgsVIDNzMzM6uhTlx+PDUiPpuTtCuB+RGxAHhl3xkj4tCW6Rkt0/OAXfLDR4A3RMQySdsD20bEEszMzMzGuE4kbnMlbQ5MBr4bETcVLO/FwH9L6gKeAY4oGqCZmZlZE7Q9cYuIA0su705gmzLLNDMzM2sC36k5XCUMKaIShpNLDY3FRCwvI5Bib2fsDNFSxjiBTzzxcPFAaqKM4XfKqB9B8fVSdHsLegrHQBnjUJYxJFJNxsMso34tXfp0LeIoZ72UEUaxOMrYB3rYruGr+3AgZmZmZpY5cTMzMzNrCCduZmZmZg3hxM3MzMysIWqfuElaTdJRVcdhZmZmVrXaJ27AaoATNzMzMxv3mjAcyMnAhpIWAJfm5/Yg3QR9YkScW1VgZmZmZp3UhBa344E/RMRM4DpgJrA18DrgFEnrVheamZmZWec0IXFrtSPww4hYHhF/B64Atu1vRklzJM2TNK+MwQHNzMzMqta0xG3YImJuRMyOiNkekdnMzMzGgiYkbo8Bq+Tpq4ADJHVLWhvYGbihssjMzMzMOqj2NydExAOSrpZ0K/BLYBGwkHRzwr9ExN8qDdDMzMysQ2qfuAFExIF9nvpoJYGYmZmZVagJl0rNzMzMDCduZmZmZo3hxM3MzMysIRrRx60WajIWXERP1SEkBZdHGWPr1WWYlzLimDpl1RIiqYdSxk2sx6otvr3VZL9RmzhKUEb9mjhxci3iEPVYL3UY63QsHRPazS1uZmZmZg3hxM3MzMysIZy4mZmZmTWEEzczMzOzhqh14iZpRv7FBDMzM7Nxr9aJm5mZmZmt0ITEbYKksyXdJul8SVMkzZJ0haT5ki6RtG7VQZqZmZm1WxMSt5cCp0fEZsCjwPuBrwH7R8Qs4NvASRXGZ2ZmZtYRTRiA956IuDpP/wD4OLAlcGkebK8buLfvmyTNAeak6Sbkp2ZmZmaDa0Li1nc45ceAxRGx/aBvipgLzAXo7p5Q/bDQZmZmZgU1oSnqxZJ6k7QDgeuAtXufkzRR0haVRWdmZmbWIU1I3H4PvF/SbcDq5P5twBckLQQWADtUF56ZmZlZZ9T6UmlE3A1s2s9LC4CdOxqMmZmZWcWa0OJmZmZmZjhxMzMzM2sMJ25mZmZmDVHrPm5liig4IkgaM65yZYxJF/RUHod6io/QIuqxTqKn+PJ86OG/Fy6jjOURzxl9ZxRxlLCtlLJuyyii6PZWQgwR1W+vAPQsL1xEXerXU089Vos4yjiulLGtdHUVqx+Fj6/UZ79Rlzo6GLe4mZmZmTWEEzczMzOzhnDiZmZmZtYQTtzMzMzMGqI2iZuk1SQdlad3kXRR1TGZmZmZ1UltEjdgNeCoqoMwMzMzq6s6DQdyMrChpAXAUuAJSecDWwLzgYMjIiTNAr4ETAPuBw6NiHsritnMzMysY+rU4nY88IeImAl8FNgG+BCwObAB8CpJE8k/Mh8Rs4BvAydVEq2ZmZlZh9Wpxa2vGyLizwC5FW4G8DCpBe7SPMBdN9Bva5ukOcCcNF2n/NTMzMxsdOqcuC1pmV5OilXA4ojYfqg3R8RcYC5Ad/eE4kMhm5mZmVWsTk1RjwGrDDHP74G1JW0PIGmipC3aHpmZmZlZDdSmxS0iHpB0taRbgaeA5/x4Y0Q8I2l/4DRJ00nxfwVY3NFgzczMzCpQm8QNICIOHOD5o1umFwA7dyomMzMzs7qo06VSMzMzMxuEEzczMzOzhnDiZmZmZtYQterj1j5BT8/yQiWMpbHg6vBdurrGTtVbtnxp4TL+9re7Cpehru7CZXSFCpdRhlLqaPQUL6MG6rC9Qn3qVxnL48knHy1cxljah/X0VL+t1GV5lrEHjDbve+qxRzAzMzOzITlxMzMzM2sIJ25mZmZmDeHEzczMzKwhRp24SXq86IdLOlTSfw4xzwxJ/Q7Ma2ZmZjaeNKHFbQbgxM3MzMzGvWElbpJ+Imm+pMWS5rQ8/+X83GWS1s7PHSPpd5IWSTonP7dGLmORpOskbdXPZ5yVf4e093Fvi97JwE6SFkg6VlK3pFMk3ZjLe2+RBWBmZmbWFMNtcXt3RMwCZgPHSFoTmArMi4gtgCuAz+R5jwe2iYitgCPzcycAN+fnPg58bwQxHg9cFREzI+LLwOHAIxGxLbAtcISkl4ygPDMzM7NGGm7idoykhcB1wHrAxkAPcG5+/QfAjnl6EXC2pIOBZfm5HYHvA0TEr4E1Ja06yph3Aw6RtAC4Hlgzx/MskuZImidpXkSM8qPMzMzM6mPIoYol7QK8Dtg+Ip6U9Btgcj+z9mZHbwJ2Bt4MfELSy4YZyzJyIqk0NPZKA4UEfCAiLhmssIiYC8wF6O7uduZmZmZmjTecFrfpwEM5adsUeGXLe3v7pB0I/DYnXOtFxOXAx/J7pwFXAQfBPxPB+yOi72+O3A3MytNvASbm6ceAVVrmuwR4n6SJubxNJE0dxvcwMzMza7Th/DjYxcCRkm4Dfk+6XArwBLCdpE8C/wAOALqBH0iaTmoZOy0iHpb0WeDbkhYBTwLv6udzvglcmC/JXpzLh3TpdXl+/izgq6Q7TW+SJOA+YO8RfGczMzOzRtJ46P/V3d0dkydPK1RGHX6E1+qpq6v4qDoTJ04qXMaSJU8VLqPdP448XGX8kHgZ36UuP/A+VtRlnUycOFBPnOFbuvSZwmWUoYxl2tXVXUIkxYylXKSMdbJkyZPzI2J2f695r2RmZmbWEE7czMzMzBrCiZuZmZlZQwzn5oTGixg7fdTq0kekaBxl9Kkoo09Eur+lmDLq1tNPFf7pX7q6i2/OXV3FyyhjvZRRRncJy2P58mVDzzSIuvSRq8t+o4x1UkbdKKN/Whl9W8vYd5SxL+3pWV64jKLKqBv10d7tvh57FTMzMzMbkhM3MzMzs4Zw4mZmZmbWEE7czMzMzBqisYmbpL0lbV51HGZmZmadUvvETdJAt8zsDThxMzMzs3GjrYmbpI9KOiZPf1nSr/P0rpLOlvQOSbdIulXSF1re97ikL+bfJ91e0smSfidpkaRTJe1A+iH6UyQtkLRhO7+HmZmZWR20u8XtKmCnPD0bmCZpYn7uDuALwK7ATGBbSXvneacC10fE1sBtwD7AFhGxFXBiRFwD/BT4aETMjIg/tPl7mJmZmVWu3YnbfGCWpFWBJcC1pARuJ+Bh4DcRcV9ELAPOBnbO71sO/ChPPwI8DXxL0r7Ak8P5YElzJM2TNG8s/XitmZmZjV9tTdwiYilwF3AocA2pBe41wEbA3YO89emIWJ7LWAZsB5wP7AlcPMzPnhsRsyNidhmj45uZmZlVrRM3J1wFHAdcmaePBG4GbgBeLWmtfAPCO4Ar+r5Z0jRgekT8AjgW2Dq/9BiwSvvDNzMzM6uHTiVu6wLXRsTfSZc9r4qIe4HjgcuBhcD8iLiwn/evAlwkaRHwW+DD+flzgI9Kutk3J5iZmdl4oPHQ/6urqzsmTZpSdRilqMuPRftH5suNI0r4kecyfmS+NsujhDLK+BFw/8j8CmV8lzLWyViqX2X8yHwZ26x/ZL5+nnzy0fkRMbu/1+qxVzEzMzOzITlxMzMzM2sIJ25mZmZmDTEuLipLYuLElQqV8cySp0qKphiV0TesjP4MBftVLFv2TAkh1GOYl8mTpxUuY7PNXlm4jJtv/p/CZdSlz00ZZSxbVvy7TJhQbL9RRt8yyuiHXEo/qGL9/aCcdVJG3Xj+8zcoXMbf/vbHwmV0ldBvcHkJdWzixEmF3l9Gn8GlS5cULqMux4Qy9qODcYubmZmZWUM4cTMzMzNrCCduZmZmZg3hxM3MzMysIRqRuEk6UtIhVcdhZmZmVqVG3FUaEWdUHYOZmZlZ1UpvcZM0Q9Ltks6SdIeksyW9TtLVku6UtJ2kNST9RNIiSddJ2kpSl6S7Ja3WUtadktaR9FlJx+XnNpR0saT5kq6StGnZ38HMzMysjtp1qXQj4IvApvnvQGBH4Djg48AJwM0RsVV+/L1IAx5dCOwDIOkVwJ/yD9O3mgt8ICJm5fJOb9N3MDMzM6uVdl0qvSsibgGQtBi4LCJC0i3ADGB9YD+AiPi1pDUlrQqcC3wa+A7w9vz4nyRNA3YAzmsZaK/fkQMlzQHmpOlGdOUzMzMzG1S7ErfWIZB7Wh735M9cOsD7rgU2krQ2sDdwYp/Xu4CHI2LmUAFExFxS6xzd3RNKGHrczMzMrFpVNUVdBRwEIGkX4P6IeDTS72ZcAHwJuC0iHmh9U0Q8Ctwl6a35vZK0dScDNzMzM6tKVYnbZ4FZkhYBJwPvanntXOBg+lwmbXEQcLikhcBiYK82xmlmZmZWG6VfKo2Iu4EtWx4fOsBrew/w/nmA+jz32Zbpu4Ddy4nWzMzMrDnca9/MzMysIZy4mZmZmTWEEzczMzOzhmjET16VoadneaH3q6u7pEiKaRm/btSihDKKjo3XpRJGaCnhe5QhjR1dzF//+ofCZYjiy6Orq/i5XBlxlLFuS/gqJWxvxYMIitevrhL2Xz3FwyhnnZRQv5544uHCZXSVMD5oGceVrhLWSxnHlaLKWJ51UcZ+dNDy21q6mZmZmZXGiZuZmZlZQzhxMzMzM2sIJ25mZmZmDdHoxE3SMZJuk3R21bGYmZmZtVvT7yo9CnhdRPy56kDMzMzM2q0xLW6SPizp1vz3IUlnABsAv5R0bNXxmZmZmbVbI1rcJM0CDgNeQfod0+tJP0S/O/CaiLi/wvDMzMzMOqIRiRuwI3BBRDwBIOnHwE6DvUHSHGBOnm57gGZmZmbt1pTEbcQiYi4wF6C7e0IJw/SbmZmZVaspfdyuAvaWNEXSVGCf/JyZmZnZuNGIFreIuEnSWcAN+akzI+JmXwI1MzOz8aQRiRtARHwJ+FKf52ZUE42ZmZlZ5zXlUqmZmZnZuOfEzczMzKwhnLiZmZmZNURj+rgV1dXVXej9y5YtLR5ElDAqScHvUVYcQU/B95ewLEr4HqL4DS5S8fOf6dPXLlzG3/9+V+Eyoow6WsI9Q6rJAD5Fl0dEse0kF1K4iJ6e5SWEUfy71KV+rbzytMJlPP74Q8UDKWOZlrAvLV7Pyzim1GSjL0Ep9XwQbnEzMzMzawgnbmZmZmYN4cTNzMzMrCGcuJmZmZk1RCMTN0l3S1orTz9edTxmZmZmndDIxM3MzMxsPKp94ibpJ5LmS1osaU7V8ZiZmZlVpQnjuL07Ih6UtDJwo6QfVR2QmZmZWRWakLgdI2mfPL0esPFw3pRb5+ak6do3LJqZmZkNqdaJm6RdgNcB20fEk5J+A0weznsjYi4wF6C7e8LYGZLZzMzMxq26N0VNBx7KSdumwCurDsjMzMysKnVP3C4GJki6DTgZuK7ieMzMzMwqU+tLpRGxBNijn5dmtMxT/NeCzczMzBqg7i1uZmZmZpY5cTMzMzNrCCduZmZmZg1R6z5utRL1GFEkoqfqEEoRJSxPScXjoB7rdZVV1qg6BKCc9VKK4qu2lO8yVra3saSM9drdPbEWcaiE/U9tttmC6nJMKEO714lb3MzMzMwawombmZmZWUM4cTMzMzNrCCduZmZmZg3R8cRN0mqSjsrTL5B0fqdjMDMzM2uiKlrcVgOOAoiIv0bE/hXEYGZmZtY4VQwHcjKwoaQFwJ3AZhGxpaRDgb2BqcDGwKnASsA7gSXAGyPiQUkbAl8H1gaeBI6IiNs7/SXMzMzMOq2KFrfjgT9ExEzgo31e2xLYF9gWOAl4MiK2Aa4FDsnzzAU+EBGzgOOA0/v7EElzJM2TNG+sjHNjZmZm41vdBuC9PCIeAx6T9Ajws/z8LcBWkqYBOwDntQy0N6m/giJiLinJo7t7gjM3MzMza7y6JW5LWqZ7Wh73kGLtAh7OrXVmZmZm40oVl0ofA1YZzRsj4lHgLklvBVCydZnBmZmZmdVVxxO3iHgAuFrSrcApoyjiIOBwSQuBxcBeZcZnZmZmVleVXCqNiAP7ee4s4KyWxzP6ey0i7gJ2b2+EZmZmZvXjX04wMzMzawgnbmZmZmYN4cTNzMzMrCHqNhxI2xQehHfFuHGVkorn2kFP5XFIJcRAPdYJJQzw/MjD/yhcRhnLQyXU81LKKGPdllFECdtbUXXYXlMgxet5ULyMMurXkiVP1iKOMo4rddnu6xBDGcuiLnV0MNXvlczMzMxsWJy4mZmZmTWEEzczMzOzhnDiZmZmZtYQTtzMzMzMGsKJm5mZmVlDOHEzMzMza4gxO46bpDnAnDTt/NTMzMyab8xmNBExNyJmR8TsOgwuaGZmZlbUmE3czMzMzMaaMZG4SbpM0gurjsPMzMysnRqfuCl1YNsIeLDqWMzMzMzaqfGJG7A58KOIeKrqQMzMzMzaqfF3lUbErcCHq47DzMzMrN3GQoubmZmZ2bjgxM3MzMysIRp/qbRTInqqDqE8ESUUsbzg+0uIgeJllDLGXwllrLHmCwqXcef/zi9cRhnKWLeUsFpKqWM9xep5XRTdXqGc7a0MZazXqVOnFy7jwQfvLVwGFD+ulLE8enqKxVHKtlaT/UYTuMXNzMzMrCGcuJmZmZk1hBM3MzMzs4Zw4mZmZmbWELVJ3CTNkHTrCObfRdIO7YzJzMzMrE5qk7iNwi6AEzczMzMbN+qWuE2QdLak2ySdL2mKpLslrQUgabak30iaARwJHCtpgaSdKo3azMzMrAPqNo7bS4HDI+JqSd8Gjupvpoi4W9IZwOMRcWp/80iaA8xJ03XLT83MzMxGrm4ZzT0RcXWe/gGw42gLioi5ETE7ImaXMsiqmZmZWcXqlrj1HTo5gGWsiHNyZ8MxMzMzq4+6JW4vlrR9nj4Q+C1wNzArP7dfy7yPAat0LjQzMzOzatUtcfs98H5JtwGrA98ATgC+Kmke0PqDez8D9vHNCWZmZjZe1ObmhIi4G9i0n5euAjbpZ/47gK3aHJaZmZlZbdStxc3MzMzMBuDEzczMzKwhanOpdDyI6HvT7MjpOTfejiKOEsooHEMZy6KEYV7qEsf06WsVLqMMZSyPuijluxRctXXY1spSxvKsS/3q7p5YuIy67DvqskyLGivfA9r/XdziZmZmZtYQTtzMzMzMGsKJm5mZmVlDOHEzMzMza4jGJW6Srsn/Z0g6sOp4zMzMzDqlcYlbROyQJ2eQfhbLzMzMbFxoXOIm6fE8eTKwU/7Jq2OrjMnMzMysE5o8jtvxwHERsWfVgZiZmZl1QpMTt0FJmgPMSdONa1g0MzMze44xm9FExNyImB0Rs8sYndrMzMysak1O3B4DVqk6CDMzM7NOaXLitghYLmmhb04wMzOz8aBxfdwiYlr+vxTYteJwzMzMzDqmyS1uZmZmZuOKEzczMzOzhnDiZmZmZtYQjevjVpWuruKLKqKnhDi6C5fR07O8cBlFx8YTxWOoi+gpvl5vueXKwmV0lTBeYSgKlyFKGH6nhCF8Stneuott9xHFl2cZ36OMsSyjhP1GXerXQw/9rXAZ3SXsi1VCGbCscAldXcXqRxn1vIzlWRdl1PPBuMXNzMzMrCGcuJmZmZk1hBM3MzMzs4Zw4mZmZmbWEE7czMzMzBrCiZuZmZlZQ1SauEn6qKRj8vSXJf06T+8q6WxJ35A0T9JiSSe0vO9kSb+TtEjSqVXFb2ZmZtZJVY/jdhXwEeA0YDYwSdJEYCfgSuC8iHhQUjdwmaStgL8A+wCbRkRIWq2a0M3MzMw6q+pLpfOBWZJWBZYA15ISuJ1ISd3bJN0E3AxsAWwOPAI8DXxL0r7Ak/0VLGlObq2bV8bggGZmZmZVqzRxi4ilwF3AocA1pGTtNcBGwFPAccBrI2Ir4OfA5IhYBmwHnA/sCVw8QNlzI2J2RMxWCaOwm5mZmVWt6hY3SMnacaRLo1cBR5Ja2FYFngAekbQOsAeApGnA9Ij4BXAssHUVQZuZmZl1WtV93CAla58Aro2IJyQ9DVwVEQsl3QzcDtwDXJ3nXwW4UNJkQMCHqwjazMzMrNMqT9wi4jJgYsvjTVqmDx3gbdu1OSwzMzOz2qnDpVIzMzMzGwYnbmZmZmYN4cTNzMzMrCEq7+PWKUWHBIme5SVFUkxPGXGUMK5dRLE4eqKncAxlKGOoGHUVP/95wbobFi7jb3/7Y+EyyhjzMChehii+Xsr4LoW3t5qMIRkU397KWK91qV9Tp04vXMYjj9xXuIyuEnaDpSzTgmWUsq2VcEyoy9Bf7R471i1uZmZmZg3hxM3MzMysIZy4mZmZmTWEEzczMzOzhmhb4iZpF0k7FCzj8fx/hqRby4nMzMzMrJna2eK2C1AocTMzMzOzFUacuEn6iaT5khZLmpOf213STZIWSrpM0gzSj8UfK2mBpJ0knSVp/5ZyelvTpuX33CTpFkl7DfH5V0qa2fL4t5L8Q/NmZmY25o1mHLd3R8SDklYGbpR0IfBNYOeIuEvSGvn1M4DHI+JUAEmHD1De08A+EfGopLWA6yT9NAYeCOVbwKHAhyRtAkyOiIWj+B5mZmZmjTKaS6XHSFoIXAesB8wBroyIuwAi4sERlifg85IWAf8DvBBYZ5D5zwP2lDQReDdwVr+FSnMkzZM0r92D4ZmZmZl1woha3CTtArwO2D4inpT0G2ABsOkw3r6MnChK6gJWys8fBKwNzIqIpZLuBiYPVEj+3EuBvYC3AbMGmG8uMBegu3uCMzczMzNrvJG2uE0HHsrJ06bAK0lJ1s6SXgIgaY0872PAKi3vvZsVSdZbgIktZf4jJ22vAdYfRhxnAqcBN0bEQyP8DmZmZmaNNNLE7WJggqTbgJNJl0vvI10u/XG+hHpunvdnwD69NyeQ+sG9Os+zPfBEnu9sYLakW4BDgNuHCiIi5gOPAt8ZYfxmZmZmjTWiS6URsQTYY4CXf9ln3juArfrM88qW6Y/l+e4nJXL9fd60/P9uYMve5yW9gJR0/mr40ZuZmZk1W+N+OUHSIcD1wCcioqfqeMzMzMw6ZTTDgVQqIr4HfK/qOMzMzMw6rXEtbmZmZmbjVeNa3JosKD4qidCYiaMO6jLG36TJUwuXUcZ3KaMMqR51oy7rtqhSttcSFsVYql/d3fU49JWxbuuyXuqgLt+j3XG4xc3MzMysIZy4mZmZmTWEEzczMzOzhnDiZmZmZtYQTtzMzMzMGsKJm5mZmVlD1CJxk/RhSbfmvw9JmiHpNknflLRY0q8krZzn3VDSxZLmS7oq/9i9mZmZ2ZhXeeImaRZwGPAK0m+ZHgGsDmwMfD0itgAeBvbLb5kLfCAiZgHHAad3OmYzMzOzKtRhFMIdgQsi4gkAST8GdgLuiogFeZ75wAxJ04AdgPNaBmGc1F+hkuYAc9J05fmpmZmZWWF1SNwGsqRlejmwMqmF8OGImDnUmyNiLql1ju7uCfUYTtnMzMysgDo0RV0F7C1piqSpwD75ueeIiEeBuyS9FUDJ1p0L1czMzKw6lSduEXETcBZwA3A9cCbw0CBvOQg4XNJCYDGwV7tjNDMzM6uDWlwqjYgvAV/q8/SWLa+f2jJ9F7B7h0IzMzMzq43KW9zMzMzMbHicuJmZmZk1hBM3MzMzs4aoRR+3TujpWV7o/erqLhyDhp5l6DJUvJSenjLiKJbzd5UQAyUsC6L4SDFF6xbA3XffUriMrhLGKwwVXx4qo6aXsG67Sjgt7Sq43UcJ9YsovrEU/R4ALF9WuIi61K9HHrmveByl1NHih2BRfP/TVXBjKaOel7E8S9n3lKCMej4Yt7iZmZmZNYQTNzMzM7OGcOJmZmZm1hBO3MzMzMwaotGJm6RjJN0m6eyqYzEzMzNrt6bfVXoU8LqI+HPVgZiZmZm1W2Na3CR9WNKt+e9Dks4ANgB+KenYquMzMzMza7dGtLhJmgUcBryCNBza9cDBpN8sfU1E3F9heGZmZmYd0YjEDdgRuCAingCQ9GNgp8HeIGkOMCdPtz1AMzMzs3ZrSuI2YhExF5gL0N09ob3DGJuZmZl1QFP6uF0F7C1piqSpwD75OTMzM7NxoxEtbhFxk6SzgBvyU2dGxM2+BGpmZmbjSSMSN4CI+BLwpT7PzagmGjMzM7POa8qlUjMzM7Nxz4mbmZmZWUM4cTMzMzNriMb0cStKKpajRiwtKZJipO6qQ6iPqMcoL6L4TTKTJk0pIZLiooxlWsI9Q6rHqh0zSlmvNREU/y7d3RNLiKS4iJ7iZZSwPGyFJixPt7iZmZmZNYQTNzMzM7OGcOJmZmZm1hBO3MzMzMwaojaJm6TVJB2Vp3eRdFHVMZmZmZnVSW0SN2A14KiqgzAzMzOrqzoNB3IysKGkBcBS4AlJ5wNbAvOBgyMiJM0i/fTVNOB+4NCIuLeimM3MzMw6pk4tbscDf4iImcBHgW2ADwGbAxsAr5I0EfgasH9EzAK+DZxUSbRmZmZmHVanFre+boiIPwPkVrgZwMOkFrhLJQF0A/22tkmaA8xJ03XKT83MzMxGp86J25KW6eWkWAUsjojth3pzRMwF5gJ0d0+o/1DIZmZmZkOoU1PUY8AqQ8zze2BtSdsDSJooaYu2R2ZmZmZWA7VpcYuIByRdLelW4Cng7/3M84yk/YHTJE0nxf8VYHFHgzUzMzOrQG0SN4CIOHCA549umV4A7NypmMzMzMzqok6XSs3MzMxsEE7czMzMzBrCiZuZmZlZQ9Sqj1s7RfRUHUIpIsbGyCZB8e8hVEIk9TBhwkpVhzDmlLGtFC1jrOx3ylLGOsljeBbS3T1uDn3DMlaOK3XR7uXpFjczMzOzhnDiZmZmZtYQTtzMzMzMGsKJm5mZmVlDVJq4Sdpb0uajfO9MSW8sOyYzMzOzuiotcZM0mtt09gZGlbgBMwEnbmZmZjZuDDtxk/QpSb+X9FtJP5R0nKTfSPqKpHnAByXNknSFpPmSLpG0bn7vEZJulLRQ0o8kTZG0A/AW4BRJCyRtmP8uzu+/StKm+f1vlXRrfv+VklYCPgcckN97QBuWjZmZmVmtDKuVTNK2wH7A1sBE4CZgfn55pYiYLWkicAWwV0Tcl5Opk4B3Az+OiG/msk4EDo+Ir0n6KXBRRJyfX7sMODIi7pT0CuB0YFfg08AbIuIvklbLPzb/aWB26++YmpmZmY1lw728+Srgwoh4Gnha0s9aXjs3/38psCVwaR4gsRu4N7+2ZU7YVgOmAZf0/QBJ04AdgPNaBliclP9fDZwl6b+BHw8nYElzgDl5ejhvMTMzM6u1MoaPfiL/F7A4IrbvZ56zgL0jYqGkQ4Fd+pmnC3g4Imb2fSEijswtcG8C5kuaNVRQETEXmAvQ3T3Bw0KbmZlZ4w23j9vVwJslTc4tY3v2M8/vgbUlbQ8gaaKkLfJrqwD35supB7W857H8GhHxKHCXpLfm90vS1nl6w4i4PiI+DdwHrNf6XjMzM7PxYFiJW0TcCPwUWAT8ErgFeKTPPM8A+wNfkLQQWEC69AnwKeB6UgJ4e8vbzgE+KulmSRuSkrrD8/sXA3vl+U6RdIukW4FrgIXA5cDmvjnBzMzMxgsN98dQJU2LiMclTQGuBOZExE1tja4k3d0TYuWVpxUqY9mypSVFU4xUfASXMn74umgcPT3LisdQkx+ZnzRpSuEyXrTepoXL+MP/Ft8ce0qpG8XXSxnrtozv0t09sdD76/Ij86XsN3qWFy6jLvVrzTVfWLiMBx74S+Ey6rI/nzBhpcJlFLVs2TOFyyhjvxEU71lVxo/ML126ZH5EzO7vtZH0cZubB8udDHy3KUmbmZmZ2Vgx7MQtIg5sZyBmZmZmNjj/VqmZmZlZQ5QxHMi4UEZfhDKU0b+jhMvvheOoy/IsQxn9dh555B/FA6lJ37JS+h6W8F26SjgvLb691aOel7LfGEP16+mnnxh6pg7EUcp+sIQdetH6UUafrrr0WS5nvbb3u9Rjr2JmZmZmQ3LiZmZmZtYQTtzMzMzMGsKJm5mZmVlDdDRxk3SMpNskPSTp+PzcZyUd18k4zMzMzJqo03eVHgW8LiL+3OHPNTMzM2u8jrW4SToD2AD4paRjJf1nP/P8RtKXJc3LLXPbSvqxpDslnZjnmSrp55IWSrrVv1NqZmZm40XHWtwi4khJuwOvAfYcZNZnImK2pA8CFwKzgAeBP0j6MrAL8NeIeBOApOntjdzMzMysHup4c8JP8/9bgMURcW9ELAH+CKyXn3+9pC9I2ikiHumvEElzcsvdvLr80LOZmZlZEXVM3Jbk/z0t072PJ0TEHcDLSQnciZI+3V8hETE3Iman1rs6fk0zMzOzkWncT15JegHwYET8QNLDwHsqDsnMzMysIxqXuAEvA06R1AMsBd5XcTxmZmZmHaEyfhy27rq7J8TKK08rVMby5ctLiqaYMn68tqen+Hfp6uquPIa6mDhxUuEypk9fq3AZ99//l8JllNEftC4/Ml/Gj293dRc7t63L/rUu+4261K8pU4vf0/bkE/12rx4RFdyPAkQJ62XCxJWKxVBCPV++bGnhMsaSJc88NT8iZvf3mjt/mZmZmTWEEzczMzOzhnDiZmZmZtYQTbw5YVSK9slatvSZkiIpJmrS96dn+bJiIVCPvj9lKFq3ANZZ5yWFy7jvvnsKl1FKn6wSqmgZdbSU71KHvphlLIsS9htl9E+rS/2aNm31wmU88cTDxQMpoX71lLBeenqqH+t0LB0T2t231S1uZmZmZg3hxM3MzMysIZy4mZmZmTWEEzczMzOzhmh84ibpmqpjMDMzM+uExiduEbFD1TGYmZmZdULjEzdJj1cdg5mZmVknjNlx3CTNAeak6cbnp2ZmZmbNb3EbSETMjYjZETG7jB9YNjMzM6vamE3czMzMzMYaJ25mZmZmDeHEzczMzKwhGp+4RcS0qmMwMzMz64TGJ25mZmZm44UTNzMzM7OGcOJmZmZm1hBjdgDeviKiWAE1GQuujMGEg54SIikaRMH1AYh6rJMyvssTTzxcuIxSlkdNFmldvksdBu8uY3st5XuUUM/rUr+WLl1SdQhJCceVMraVsTLWaW2OCW0Oo/q9kpmZmZkNixM3MzMzs4Zw4mZmZmbWEE7czMzMzBqiY4mbpBmSbu3U55mZmZmNNW5xMzMzM2uITidu3ZK+KWmxpF9JWlnSEZJulLRQ0o8kTZE0XdKflO9hlzRV0j2SJkraUNLFkuZLukrSph3+DmZmZmaV6HTitjHw9YjYAngY2A/4cURsGxFbA7cBh0fEI8AC4NX5fXsCl0TEUmAu8IGImAUcB5ze3wdJmiNpnqR5hcdwMzMzM6uBTg/Ae1dELMjT84EZwJaSTgRWA6YBl+TXzwUOAC4H3g6cLmkasANwXsuAgZP6+6CImEtK8ujunuDMzczMzBqv04lb63DVy4GVgbOAvSNioaRDgV3y6z8FPi9pDWAW8GtgKvBwRMzsULxmZmZmtVGHmxNWAe6VNBE4qPfJiHgcuBH4KnBRRCyPiEeBuyS9FUDJ1lUEbWZmZtZpdUjcPgVcD1wN3N7ntXOBg/P/XgcBh0taCCwG9upEkGZmZmZV69il0oi4G9iy5fGpLS9/Y4D3nE+fn2uNiLuA3dsQopmZmVmt1aHFzczMzMyGwYmbmZmZWUM4cTMzMzNriE4PB1KZlnHfRqeEQXyDegwnF9FTuAxRbHmWMShyGcuzcL0A1FX8/GettV5UuIw//nFh4TJqo/hqKaeOFd1WalLPy4ijp4T9Rl1MmbJK4TIeeKCM/XnxZVpOPa/+2FRKDCXsN8rQ7uXpFjczMzOzhnDiZmZmZtYQTtzMzMzMGsKJm5mZmVlDOHEzMzMza4gxkbhJ6q46BjMzM7N2qyRxk3SwpBskLZD0/yS9X9IpLa8fKuk/B5i3Oz//uKQv5t8s3b6K72FmZmbWSR1P3CRtBhwAvCoiZgLLgceBfVpmOwA4Z4B5D8rzTAWuj4itI+K3/XzOHEnzJM2rwxg1ZmZmZkVVMQDva4FZwI158NOVgX8Af5T0SuBOYFPgauD9A8wLKYn70UAfEhFzgbkA3d0TnLmZmZlZ41WRuAn4bkT867OelN4NvA24HbggIkIpW3vOvNnTEbG8/eGamZmZ1UMVfdwuA/aX9DwASWtIWh+4ANgLeAdwzhDzmpmZmY07HU/cIuJ3wCeBX0laBFwKrBsRDwG3AetHxA2DzdvpmM3MzMzqoJIfmY+Ic4Fz+3l+zxHMO6090ZmZmZnV05gYx83MzMxsPHDiZmZmZtYQlVwqrUJPT7EbUHuip6RIiioeRynj2qnY24uuDwCp+HlHXcb422zmywuXcf31FxUuo5z1UrByUM66rcN3qUv9Krq9QjnfJWqyH11ppZULl1FG/erqKv6jP2XEUXTdlrFe63JMKEMZ32Uw9fiWZmZmZjYkJ25mZmZmDeHEzczMzKwhnLiZmZmZNYQTNzMzM7OGcOJmZmZm1hAdTdwkTZX0c0kLJd0q6QBJsyRdIWm+pEskrZvn3VDSxfn5qyRtmp8/S9Jpkq6R9EdJ+3fyO5iZmZlVpdPjuO0O/DUi3gQgaTrwS2CviLhP0gHAScC7gbnAkRFxp6RXAKcDu+Zy1gV2BDYFfgqc39mvYWZmZtZ5nU7cbgG+KOkLwEXAQ8CWwKV5oMtu4F5J04AdgPNaBsCc1FLOTyKN+Pc7Sev090GS5gBz8nQbvoqZmZlZZ3U0cYuIOyS9HHgjcCLwa2BxRGzfOp+kVYGHI2LmAEUtaZ19gM+aS2q1o7t7Qk2GLzczMzMbvU73cXsB8GRE/AA4BXgFsLak7fPrEyVtERGPAndJemt+XpK27mSsZmZmZnXT6UulLwNOkdQDLAXeBywDTsv93SYAXwEWAwcB35D0SWAicA6wsMPxmpmZmdVGpy+VXgJc0s9LO/cz712kmxn6Pn9on8fTyorPzMzMrM48jpuZmZlZQzhxMzMzM2sIJ25mZmZmDdHpmxMqIxXLUbu6ukuKpJgy4ujpWV64jKLLswzqfySYjouensJlXHP5LwuX0V1C3ShjzMNSyihh3ZYRR3f3xELvjyg+ElEasrKYMvYbYlnhMnqiHuv1oYf+VriM7u7ih88y9qPl1PNi9SOi+Pcoo57XRRnrZLDjdPVHXzMzMzMbFiduZmZmZg3hxM3MzMysIZy4mZmZmTVEYxI3SddUHYOZmZlZlRqTuEXEDlXHYGZmZlalxiRukh7P/3eR9BtJ50u6XdLZKuPeWzMzM7Oaa0zi1sc2wIeAzYENgFdVGo2ZmZlZBzQ1cbshIv4cacS+BcCMvjNImiNpnqR5ZQyCaWZmZla1piZuS1qml9PPL0BExNyImB0Rs30l1czMzMaCpiZuZmZmZuOOEzczMzOzhmjMj8xHxLT8/zfAb1qeP7qikMzMzMw6yi1uZmZmZg3hxM3MzMysIZy4mZmZmTVEY/q4FZWGfBs9yTlumUQJQ7TUZJiXnoJ1C+Cxxx4sHkgJy6OM9VKXddvl89JyjaH6tXTpkqFn6kAcpRxXxsg4paXsN2qi3UOQec9mZmZm1hBO3MzMzMwawombmZmZWUM4cTMzMzNrCCduZmZmZg3hxM3MzMysIWqVuEmaIel2SWdJukPS2ZJeJ+lqSXdK2i7/XzvP3yXpf3sfm5mZmY1ltUrcso2ALwKb5r8DgR2B44CPAz8ADsrzvg5YGBH3VRCnmZmZWUfVMXG7KyJuiTRi7mLgsogI4BZgBvBt4JA877uB7/RXiKQ5kuZJmld08F0zMzOzOqhj4tY6pHVPy+MeYEJE3AP8XdKuwHbAL/srJCLmRsTsiJjtXz0wMzOzsaCpGc2ZpEum50XE8qqDMTMzM+uEpiZuPwWmMcBlUjMzM7OxqFY/Mh8RdwNbtjw+dIDXtibdlHB7B8MzMzMzq1StErfhkHQ88D5W3FlqZmZmNi407lJpRJwcEetHxG+rjsXMzMyskxqXuJmZmZmNV427VFqV+owFV0KuHVG8DBUMgeIxqISvUQap4MIAVl99ncJl/P3vdxUuI2pQN6CcddtTwjY7Vs5sS1mvJZRRl/q10korFy7jcR4qHkgJdbSMfWkdjJXvASXV80GMlf2SmZmZ2ZjnxM3MzMysIZy4mZmZmTWEEzczMzOzhqhd4ibp8fz/BZLOb3n+h5IWSTq2uujMzMzMqlPbu0oj4q/A/gCSng9sGxEbVRuVmZmZWXVq1+LWS9IMSbfmh78CXihpgaSdJG0o6WJJ8yVdJWnTKmM1MzMz64Tatrj18RbgooiYCSDpMuDIiLhT0iuA04FdK4zPzMzMrO2akrj9k6RpwA7AeS0Dn07qZ745wJw83bH4zMzMzNqlcYkb6fLuw72tbwOJiLnAXIDu7gljZ0hmMzMzG7dq28dtIBHxKHCXpLcCKNm64rDMzMzM2q5xiVt2EHC4pIXAYmCviuMxMzMza7vaXSqNiGn5/93Aln2n8+O7gN0rCM/MzMysMk1tcTMzMzMbd5y4mZmZmTWEEzczMzOzhqhdH7d2kcZGjlrGmHRRg3HtRPUxlKWM7/LUU4/XIo5g7Iyc4/Eb66cu66SnZ3nVIVgb1Oa40uYwxkY2Y2ZmZjYOOHEzMzMzawgnbmZmZmYN4cTNzMzMrCGcuJmZmZk1hBM3MzMzs4Zw4mZmZmbWEE7czMzMzBpizA7AK2kOMCdNOz81MzOz5huzGU1EzI2I2RExuy6jdZuZmZkVMWYTNzMzM7OxpvGJm6RfSHpB1XGYmZmZtVvj+7hFxBurjsHMzMysExrf4mZmZmY2XjhxMzMzM2sIJ25mZmZmDaGIqDqGtpN0H/CnIWZbC7i/wMcUfb/LcBlNKKMOMbgMl9HuMuoQg8sY32WsHxFr9/tKRPgvJa/zqny/y3AZTSijDjG4DJfR7jLqEIPLcBkD/flSqZmZmVlDOHEzMzMzawgnbivMrfj9LsNlNKGMOsTgMlxGu8uoQwwuw2X0a1zcnGBmZmY2FrjFzczMzKwhnLiZmZmZNYQTNzMzM7OGcOJmZmZm1hBO3MzMzMwa4v8DLi74F56FI00AAAAASUVORK5CYII=\n",
-      "text/plain": [
-       "<Figure size 720x1440 with 1 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display_attention(tokenizer.tokenize(sentence), attention)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 56,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def count_pos_neg_token(token, tokenizer, data):\n",
-    "    n_examples = {'pos': 0, 'neg': 0}\n",
-    "    n_count = {'pos': 0, 'neg': 0}\n",
-    "    for label, text in data.data:\n",
-    "        tokens = tokenizer.tokenize(text)\n",
-    "        count = tokens.count(token)\n",
-    "        if count > 0:\n",
-    "            n_examples[label] += 1\n",
-    "            n_count[label] += count\n",
-    "    return n_examples['pos'], n_count['pos'], n_examples['neg'], n_count['neg']"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 57,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\"worst\" appears in 106 positive examples, 111 times in total.\n",
-      "\"worst\" appears in 1246 negative examples, 1457 times in total.\n"
-     ]
-    }
-   ],
-   "source": [
-    "token = 'worst'\n",
-    "\n",
-    "n_pos, count_pos, n_neg, count_neg = count_pos_neg_token(token, tokenizer, train_data)\n",
-    "\n",
-    "print(f'\"{token}\" appears in {n_pos} positive examples, {count_pos} times in total.')\n",
-    "print(f'\"{token}\" appears in {n_neg} negative examples, {count_neg} times in total.')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 58,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\"greatest\" appears in 242 positive examples, 264 times in total.\n",
-      "\"greatest\" appears in 86 negative examples, 91 times in total.\n"
-     ]
-    }
-   ],
-   "source": [
-    "token = 'greatest'\n",
-    "\n",
-    "n_pos, count_pos, n_neg, count_neg = count_pos_neg_token(token, tokenizer, train_data)\n",
-    "\n",
-    "print(f'\"{token}\" appears in {n_pos} positive examples, {count_pos} times in total.')\n",
-    "print(f'\"{token}\" appears in {n_neg} negative examples, {count_neg} times in total.')"
-   ]
-  }
- ],
- "metadata": {
-  "accelerator": "GPU",
-  "colab": {
-   "machine_shape": "hm",
-   "name": "scratchpad",
-   "provenance": []
-  },
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/experimental/a_nbow-bag.ipynb b/experimental/a_nbow-bag.ipynb
deleted file mode 100644
index 097aec4..0000000
--- a/experimental/a_nbow-bag.ipynb
+++ /dev/null
@@ -1,950 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 228
-    },
-    "colab_type": "code",
-    "id": "-V90fMxJdFl7",
-    "outputId": "2bbc3f28-84e3-47bd-97a2-ea0c2f0cf395"
-   },
-   "outputs": [],
-   "source": [
-    "import torch\n",
-    "import torch.nn as nn\n",
-    "import torch.optim as optim\n",
-    "\n",
-    "import torchtext\n",
-    "import torchtext.experimental\n",
-    "import torchtext.experimental.vectors\n",
-    "from torchtext.experimental.datasets.raw.text_classification import RawTextIterableDataset\n",
-    "from torchtext.experimental.datasets.text_classification import TextClassificationDataset\n",
-    "from torchtext.experimental.functional import sequential_transforms, vocab_func, totensor\n",
-    "\n",
-    "import collections\n",
-    "import random\n",
-    "import time"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "tOO7b-Z1dFmA"
-   },
-   "outputs": [],
-   "source": [
-    "seed = 1234\n",
-    "\n",
-    "torch.manual_seed(seed)\n",
-    "random.seed(seed)\n",
-    "torch.backends.cudnn.deterministic = True\n",
-    "torch.backends.cudnn.benchmark = False"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "FhBXG95YdFmD"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_test_data = torchtext.experimental.datasets.raw.IMDB()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "rOTczrIEdFmY"
-   },
-   "outputs": [],
-   "source": [
-    "def get_train_valid_split(raw_train_data, split_ratio = 0.7):\n",
-    "\n",
-    "    raw_train_data = list(raw_train_data)\n",
-    "        \n",
-    "    random.shuffle(raw_train_data)\n",
-    "        \n",
-    "    n_train_examples = int(len(raw_train_data) * split_ratio)\n",
-    "        \n",
-    "    train_data = raw_train_data[:n_train_examples]\n",
-    "    valid_data = raw_train_data[n_train_examples:]\n",
-    "    \n",
-    "    train_data = RawTextIterableDataset(train_data)\n",
-    "    valid_data = RawTextIterableDataset(valid_data)\n",
-    "    \n",
-    "    return train_data, valid_data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "C6Tp4CyQdFma"
-   },
-   "outputs": [],
-   "source": [
-    "raw_train_data, raw_valid_data = get_train_valid_split(raw_train_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "LTJjCocRdFmh"
-   },
-   "outputs": [],
-   "source": [
-    "class Tokenizer:\n",
-    "    def __init__(self, tokenize_fn = 'basic_english', lower = True, max_length = None):\n",
-    "        \n",
-    "        self.tokenize_fn = torchtext.data.utils.get_tokenizer(tokenize_fn)\n",
-    "        self.lower = lower\n",
-    "        self.max_length = max_length\n",
-    "        \n",
-    "    def tokenize(self, s):\n",
-    "        \n",
-    "        tokens = self.tokenize_fn(s)\n",
-    "        \n",
-    "        if self.lower:\n",
-    "            tokens = [token.lower() for token in tokens]\n",
-    "            \n",
-    "        if self.max_length is not None:\n",
-    "            tokens = tokens[:self.max_length]\n",
-    "            \n",
-    "        return tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "5P2KumuDdFmj"
-   },
-   "outputs": [],
-   "source": [
-    "max_length = 500\n",
-    "\n",
-    "tokenizer = Tokenizer(max_length = max_length)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "V1albCvadFmm",
-    "outputId": "5c7c30f2-c6b7-4098-990d-7bfcdc2446f1"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "['this', 'film', 'is', 'terrible', '.', 'i', 'hate', 'it', 'and', 'it', \"'\", 's', 'bad', '!']\n"
-     ]
-    }
-   ],
-   "source": [
-    "s = \"this film is terrible. i hate it and it's bad!\"\n",
-    "\n",
-    "print(tokenizer.tokenize(s))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "anC7_ViodFmp"
-   },
-   "outputs": [],
-   "source": [
-    "def build_vocab_from_data(raw_data, tokenizer, **vocab_kwargs):\n",
-    "    \n",
-    "    token_freqs = collections.Counter()\n",
-    "    \n",
-    "    for label, text in raw_data:\n",
-    "        tokens = tokenizer.tokenize(text)\n",
-    "        token_freqs.update(tokens)\n",
-    "                \n",
-    "    vocab = torchtext.vocab.Vocab(token_freqs, **vocab_kwargs)\n",
-    "    \n",
-    "    return vocab"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "rgHPS1xzdFmt"
-   },
-   "outputs": [],
-   "source": [
-    "max_size = 25_000\n",
-    "\n",
-    "vocab = build_vocab_from_data(raw_train_data, tokenizer, max_size = max_size)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "xiW0GItTdFm6"
-   },
-   "outputs": [],
-   "source": [
-    "def process_raw_data(raw_data, tokenizer, vocab):\n",
-    "    \n",
-    "    raw_data = [(label, text) for (label, text) in raw_data]\n",
-    "\n",
-    "    text_transform = sequential_transforms(tokenizer.tokenize,\n",
-    "                                           vocab_func(vocab),\n",
-    "                                           totensor(dtype=torch.long))\n",
-    "    \n",
-    "    label_transform = sequential_transforms(totensor(dtype=torch.long))\n",
-    "\n",
-    "    transforms = (label_transform, text_transform)\n",
-    "\n",
-    "    dataset = TextClassificationDataset(raw_data,\n",
-    "                                        vocab,\n",
-    "                                        transforms)\n",
-    "    \n",
-    "    return dataset"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "4Rec_Wk6dFnD"
-   },
-   "outputs": [],
-   "source": [
-    "train_data = process_raw_data(raw_train_data, tokenizer, vocab)\n",
-    "valid_data = process_raw_data(raw_valid_data, tokenizer, vocab)\n",
-    "test_data = process_raw_data(raw_test_data, tokenizer, vocab)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "egzlLweTdFnH"
-   },
-   "outputs": [],
-   "source": [
-    "class Collator:      \n",
-    "    def collate(self, batch):\n",
-    "        \n",
-    "        labels, text = zip(*batch)\n",
-    "        \n",
-    "        labels = torch.LongTensor(labels)\n",
-    "        \n",
-    "        lengths = [len(x) for x in text]\n",
-    "        lengths = torch.LongTensor([0] + lengths[:-1])\n",
-    "        \n",
-    "        offsets = torch.cumsum(lengths, dim = 0)\n",
-    "                \n",
-    "        text = torch.cat(text)\n",
-    "     \n",
-    "        return labels, text, offsets"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "TYLvjhoSdFnM"
-   },
-   "outputs": [],
-   "source": [
-    "collator = Collator()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "7Ly4l1I8dFnR"
-   },
-   "outputs": [],
-   "source": [
-    "batch_size = 256\n",
-    "\n",
-    "train_iterator = torch.utils.data.DataLoader(train_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = True, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "valid_iterator = torch.utils.data.DataLoader(valid_data, \n",
-    "                                             batch_size, \n",
-    "                                             shuffle = False, \n",
-    "                                             collate_fn = collator.collate)\n",
-    "\n",
-    "test_iterator = torch.utils.data.DataLoader(test_data, \n",
-    "                                            batch_size, \n",
-    "                                            shuffle = False, \n",
-    "                                            collate_fn = collator.collate)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "dbh38jHEdFnV"
-   },
-   "outputs": [],
-   "source": [
-    "class NBOW(nn.Module):\n",
-    "    def __init__(self, input_dim, emb_dim, output_dim):\n",
-    "        super().__init__()\n",
-    "        \n",
-    "        self.embedding = nn.EmbeddingBag(input_dim, emb_dim)\n",
-    "        self.fc = nn.Linear(emb_dim, output_dim)\n",
-    "        \n",
-    "    def forward(self, text, offsets):\n",
-    "        \n",
-    "        # text = [seq len * batch size]\n",
-    "        # offsets = [batch size]\n",
-    "        \n",
-    "        embedded = self.embedding(text, offsets)\n",
-    "        \n",
-    "        # embedded = [batch size, emb dim]\n",
-    "        \n",
-    "        prediction = self.fc(embedded)\n",
-    "        \n",
-    "        # prediction = [batch size, output dim]\n",
-    "        \n",
-    "        return prediction"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Ga1nXhindFnZ"
-   },
-   "outputs": [],
-   "source": [
-    "input_dim = len(vocab)\n",
-    "emb_dim = 100\n",
-    "output_dim = 2\n",
-    "\n",
-    "model = NBOW(input_dim, emb_dim, output_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "UyIJC0tYdFnc"
-   },
-   "outputs": [],
-   "source": [
-    "def count_parameters(model):\n",
-    "    return sum(p.numel() for p in model.parameters() if p.requires_grad)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "1sJRLyewdFng",
-    "outputId": "e7e357e1-1cc7-4aa4-ff40-4d749209759d"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The model has 2,500,402 trainable parameters\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f'The model has {count_parameters(model):,} trainable parameters')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "BPsihrZudFnl"
-   },
-   "outputs": [],
-   "source": [
-    "glove = torchtext.experimental.vectors.GloVe(name = '6B',\n",
-    "                                             dim = emb_dim)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "4BFftRDMdFnx"
-   },
-   "outputs": [],
-   "source": [
-    "def get_pretrained_embedding(initial_embedding, pretrained_vectors, vocab, unk_token):\n",
-    "    \n",
-    "    pretrained_embedding = torch.FloatTensor(initial_embedding.weight.clone()).detach()    \n",
-    "    pretrained_vocab = pretrained_vectors.vectors.get_stoi()\n",
-    "    \n",
-    "    unk_tokens = []\n",
-    "    \n",
-    "    for idx, token in enumerate(vocab.itos):\n",
-    "        if token in pretrained_vocab:\n",
-    "            pretrained_vector = pretrained_vectors[token]\n",
-    "            pretrained_embedding[idx] = pretrained_vector\n",
-    "        else:\n",
-    "            unk_tokens.append(token)\n",
-    "        \n",
-    "    return pretrained_embedding, unk_tokens"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "QRToW07JdFnz"
-   },
-   "outputs": [],
-   "source": [
-    "unk_token = '<unk>'\n",
-    "\n",
-    "pretrained_embedding, unk_tokens = get_pretrained_embedding(model.embedding, glove, vocab, unk_token)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 139
-    },
-    "colab_type": "code",
-    "id": "AnE6D4MAdFn_",
-    "outputId": "8b3fea1a-9bcb-4fd9-ba78-72baee94f96a"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "tensor([[-0.1117, -0.4966,  0.1631,  ...,  1.5903, -0.1947, -0.2415],\n",
-       "        [ 1.3204,  1.5997, -1.0792,  ...,  0.6060,  0.2209, -0.8245],\n",
-       "        [-0.0382, -0.2449,  0.7281,  ..., -0.1459,  0.8278,  0.2706],\n",
-       "        ...,\n",
-       "        [-0.2925,  0.1087,  0.7920,  ..., -0.3641,  0.1822, -0.4104],\n",
-       "        [-0.7250,  0.7545,  0.1637,  ..., -0.0144, -0.1761,  0.3418],\n",
-       "        [ 1.1753,  0.0460, -0.3542,  ...,  0.4510,  0.0485, -0.4015]])"
-      ]
-     },
-     "execution_count": 23,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "model.embedding.weight.data.copy_(pretrained_embedding)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "DJloauERdFoF"
-   },
-   "outputs": [],
-   "source": [
-    "optimizer = optim.Adam(model.parameters())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "fPPZ0cs_dFoH"
-   },
-   "outputs": [],
-   "source": [
-    "criterion = nn.CrossEntropyLoss()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "HGUcFIupdFoK",
-    "outputId": "e5d9b842-689b-49ca-a4f4-08574f0524ee"
-   },
-   "outputs": [],
-   "source": [
-    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "Ynf7j6kQdFoM"
-   },
-   "outputs": [],
-   "source": [
-    "model = model.to(device)\n",
-    "criterion = criterion.to(device)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "977iykeOdFoP"
-   },
-   "outputs": [],
-   "source": [
-    "def calculate_accuracy(predictions, labels):\n",
-    "    top_predictions = predictions.argmax(1, keepdim = True)\n",
-    "    correct = top_predictions.eq(labels.view_as(top_predictions)).sum()\n",
-    "    accuracy = correct.float() / labels.shape[0]\n",
-    "    return accuracy"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "HPNI8DJudFoS"
-   },
-   "outputs": [],
-   "source": [
-    "def train(model, iterator, optimizer, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.train()\n",
-    "    \n",
-    "    for labels, text, offsets in iterator:\n",
-    "        \n",
-    "        labels = labels.to(device)\n",
-    "        text = text.to(device)\n",
-    "        offsets = offsets.to(device)\n",
-    "        \n",
-    "        optimizer.zero_grad()\n",
-    "        \n",
-    "        predictions = model(text, offsets)\n",
-    "        \n",
-    "        loss = criterion(predictions, labels)\n",
-    "        \n",
-    "        acc = calculate_accuracy(predictions, labels)\n",
-    "        \n",
-    "        loss.backward()\n",
-    "        \n",
-    "        optimizer.step()\n",
-    "        \n",
-    "        epoch_loss += loss.item()\n",
-    "        epoch_acc += acc.item()\n",
-    "        \n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "kp6pV5xKdFoV"
-   },
-   "outputs": [],
-   "source": [
-    "def evaluate(model, iterator, criterion, device):\n",
-    "    \n",
-    "    epoch_loss = 0\n",
-    "    epoch_acc = 0\n",
-    "    \n",
-    "    model.eval()\n",
-    "    \n",
-    "    with torch.no_grad():\n",
-    "    \n",
-    "        for labels, text, offsets in iterator:\n",
-    "\n",
-    "            labels = labels.to(device)\n",
-    "            text = text.to(device)\n",
-    "            offsets = offsets.to(device)\n",
-    "            \n",
-    "            predictions = model(text, offsets)\n",
-    "            \n",
-    "            loss = criterion(predictions, labels)\n",
-    "            \n",
-    "            acc = calculate_accuracy(predictions, labels)\n",
-    "\n",
-    "            epoch_loss += loss.item()\n",
-    "            epoch_acc += acc.item()\n",
-    "        \n",
-    "    return epoch_loss / len(iterator), epoch_acc / len(iterator)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "8YzL45gYdFoX"
-   },
-   "outputs": [],
-   "source": [
-    "def epoch_time(start_time, end_time):\n",
-    "    elapsed_time = end_time - start_time\n",
-    "    elapsed_mins = int(elapsed_time / 60)\n",
-    "    elapsed_secs = int(elapsed_time - (elapsed_mins * 60))\n",
-    "    return elapsed_mins, elapsed_secs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 537
-    },
-    "colab_type": "code",
-    "id": "0A8wv7-xdFoa",
-    "outputId": "238f01bf-5438-482a-80ac-75c70cb20ed1"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Epoch: 01 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.678 | Train Acc: 63.94%\n",
-      "\t Val. Loss: 0.659 |  Val. Acc: 70.86%\n",
-      "Epoch: 02 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.634 | Train Acc: 72.62%\n",
-      "\t Val. Loss: 0.608 |  Val. Acc: 74.09%\n",
-      "Epoch: 03 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.570 | Train Acc: 77.51%\n",
-      "\t Val. Loss: 0.542 |  Val. Acc: 78.16%\n",
-      "Epoch: 04 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.497 | Train Acc: 81.81%\n",
-      "\t Val. Loss: 0.477 |  Val. Acc: 81.68%\n",
-      "Epoch: 05 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.430 | Train Acc: 84.98%\n",
-      "\t Val. Loss: 0.424 |  Val. Acc: 84.21%\n",
-      "Epoch: 06 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.375 | Train Acc: 87.18%\n",
-      "\t Val. Loss: 0.387 |  Val. Acc: 85.68%\n",
-      "Epoch: 07 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.334 | Train Acc: 88.58%\n",
-      "\t Val. Loss: 0.357 |  Val. Acc: 86.61%\n",
-      "Epoch: 08 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.302 | Train Acc: 89.62%\n",
-      "\t Val. Loss: 0.337 |  Val. Acc: 87.14%\n",
-      "Epoch: 09 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.274 | Train Acc: 90.65%\n",
-      "\t Val. Loss: 0.321 |  Val. Acc: 87.67%\n",
-      "Epoch: 10 | Epoch Time: 0m 4s\n",
-      "\tTrain Loss: 0.253 | Train Acc: 91.37%\n",
-      "\t Val. Loss: 0.308 |  Val. Acc: 88.14%\n"
-     ]
-    }
-   ],
-   "source": [
-    "n_epochs = 10\n",
-    "\n",
-    "best_valid_loss = float('inf')\n",
-    "\n",
-    "for epoch in range(n_epochs):\n",
-    "\n",
-    "    start_time = time.monotonic()\n",
-    "    \n",
-    "    train_loss, train_acc = train(model, train_iterator, optimizer, criterion, device)\n",
-    "    valid_loss, valid_acc = evaluate(model, valid_iterator, criterion, device)\n",
-    "    \n",
-    "    end_time = time.monotonic()\n",
-    "\n",
-    "    epoch_mins, epoch_secs = epoch_time(start_time, end_time)\n",
-    "    \n",
-    "    if valid_loss < best_valid_loss:\n",
-    "        best_valid_loss = valid_loss\n",
-    "        torch.save(model.state_dict(), 'nbow-model.pt')\n",
-    "    \n",
-    "    print(f'Epoch: {epoch+1:02} | Epoch Time: {epoch_mins}m {epoch_secs}s')\n",
-    "    print(f'\\tTrain Loss: {train_loss:.3f} | Train Acc: {train_acc*100:.2f}%')\n",
-    "    print(f'\\t Val. Loss: {valid_loss:.3f} |  Val. Acc: {valid_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "oMHAuMFNdFoc",
-    "outputId": "58b32f9a-8c39-4818-b526-1a80e435f3ae"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Test Loss: 0.327 | Test Acc: 86.80%\n"
-     ]
-    }
-   ],
-   "source": [
-    "model.load_state_dict(torch.load('nbow-model.pt'))\n",
-    "\n",
-    "test_loss, test_acc = evaluate(model, test_iterator, criterion, device)\n",
-    "\n",
-    "print(f'Test Loss: {test_loss:.3f} | Test Acc: {test_acc*100:.2f}%')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {
-    "colab": {},
-    "colab_type": "code",
-    "id": "sEDiSM3fdFog"
-   },
-   "outputs": [],
-   "source": [
-    "def predict_sentiment(tokenizer, vocab, model, device, sentence):\n",
-    "    model.eval()\n",
-    "    tokens = tokenizer.tokenize(sentence)\n",
-    "    indexes = [vocab.stoi[token] for token in tokens]\n",
-    "    tensor = torch.LongTensor(indexes).to(device)\n",
-    "    offset = torch.LongTensor([0]).to(device)\n",
-    "    prediction = model(tensor, offset)\n",
-    "    probabilities = nn.functional.softmax(prediction, dim = -1)\n",
-    "    pos_probability = probabilities.squeeze()[-1].item()\n",
-    "    return pos_probability"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "ycEAWhYIdFoi",
-    "outputId": "8a675641-fd79-46a6-b4e6-0b2006f866cc"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.00038787935045547783"
-      ]
-     },
-     "execution_count": 35,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'the absolute worst movie of all time.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "cuMFqIoJdFok",
-    "outputId": "12c964fc-6788-459c-ad5e-ca0af366b1d4"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.9986314177513123"
-      ]
-     },
-     "execution_count": 36,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = 'one of the greatest films i have ever seen in my life.'\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "zausUPENdFoo",
-    "outputId": "2bdd06df-dab7-47ea-8952-8bd82d39bac2"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.6374390721321106"
-      ]
-     },
-     "execution_count": 37,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be one of the greatest films i have ever seen in my life, \\\n",
-    "but it was actually the absolute worst movie of all time.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 35
-    },
-    "colab_type": "code",
-    "id": "e15vpNJYdFor",
-    "outputId": "eed3ae38-d01a-4476-a235-8fd3582240f3"
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.6374390721321106"
-      ]
-     },
-     "execution_count": 38,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "sentence = \"i thought it was going to be the absolute worst movie of all time, \\\n",
-    "but it was actually one of the greatest films i have ever seen in my life.\"\n",
-    "\n",
-    "predict_sentiment(tokenizer, vocab, model, device, sentence)"
-   ]
-  }
- ],
- "metadata": {
-  "accelerator": "GPU",
-  "colab": {
-   "machine_shape": "hm",
-   "name": "1_nbow.ipynb",
-   "provenance": []
-  },
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}