nsmc-study/Training.ipynb
2022-02-24 01:24:20 +09:00

1749 lines
107 KiB
Plaintext

{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "4c31f5ad",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'C:\\\\Users\\\\Monoid\\\\anaconda3\\\\envs\\\\nn\\\\python.exe'"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import sys\n",
"sys.executable"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "2b9e11e7",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"load bert tokenizer...\n"
]
}
],
"source": [
"from transformers import BertTokenizer\n",
"print(\"load bert tokenizer...\")\n",
"PRETAINED_MODEL_NAME = 'bert-base-multilingual-cased'\n",
"tokenizer = BertTokenizer.from_pretrained(PRETAINED_MODEL_NAME)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "82bf44a2",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"cuda available : True\n",
"available device count : 1\n",
"device name: NVIDIA GeForce RTX 3070\n"
]
}
],
"source": [
"import torch\n",
"print(\"cuda available :\",torch.cuda.is_available())\n",
"print(\"available device count :\",torch.cuda.device_count())\n",
"if torch.cuda.is_available():\n",
" device_index = torch.cuda.current_device()\n",
" print(\"device name:\",torch.cuda.get_device_name(device_index))"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "38dcf62d",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"read train set\n",
"100%|██████████████████████████████████████████████████████████████████████| 150000/150000 [00:00<00:00, 208333.74it/s]\n",
"read test set\n",
"100%|████████████████████████████████████████████████████████████████████████| 50000/50000 [00:00<00:00, 260420.34it/s]\n"
]
}
],
"source": [
"from ndataset import readNsmcDataAll, make_collate_fn\n",
"dataTrain, dataTest = readNsmcDataAll()\n",
"collate_fn = make_collate_fn(tokenizer)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "650c8a19",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Some weights of the model checkpoint at bert-base-multilingual-cased were not used when initializing BertModel: ['cls.predictions.transform.LayerNorm.weight', 'cls.seq_relationship.bias', 'cls.predictions.bias', 'cls.seq_relationship.weight', 'cls.predictions.decoder.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.dense.weight']\n",
"- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
"- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
]
}
],
"source": [
"from transformers import BertModel\n",
"PRETAINED_MODEL_NAME = 'bert-base-multilingual-cased'\n",
"bert = BertModel.from_pretrained(PRETAINED_MODEL_NAME)"
]
},
{
"cell_type": "markdown",
"id": "30d69b45",
"metadata": {},
"source": [
"BERT 로딩"
]
},
{
"cell_type": "code",
"execution_count": 26,
"id": "7583b0d1",
"metadata": {},
"outputs": [],
"source": [
"import torch.nn as nn\n",
"\n",
"class MyModel(nn.Module):\n",
" def __init__(self,bert):\n",
" super().__init__()\n",
" self.bert = bert\n",
" self.dropout = nn.Dropout(p=0.1)\n",
" self.lin1 = nn.Linear(768,256) #[batch_size,768] -> [batch_size,256]\n",
" self.lin2 = nn.Linear(256,1) #[batch_size,256] -> [batch_size,1]\n",
"\n",
" def forward(self,**kargs):\n",
" emb = self.bert(**kargs)\n",
" e1 = self.dropout(emb['pooler_output'])\n",
" e2 = self.lin1(e1)\n",
" w = self.lin2(e2)\n",
" return w.squeeze() #[batch_size]"
]
},
{
"cell_type": "markdown",
"id": "befe62b0",
"metadata": {},
"source": [
"모델 선언. 비슷하게 감."
]
},
{
"cell_type": "code",
"execution_count": 27,
"id": "36585e76",
"metadata": {},
"outputs": [],
"source": [
"model = MyModel(bert)"
]
},
{
"cell_type": "markdown",
"id": "7969fead",
"metadata": {},
"source": [
"학습 과정에서 벌어지는 일"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "8c2a4bc9",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"MyModel(\n",
" (bert): BertModel(\n",
" (embeddings): BertEmbeddings(\n",
" (word_embeddings): Embedding(119547, 768, padding_idx=0)\n",
" (position_embeddings): Embedding(512, 768)\n",
" (token_type_embeddings): Embedding(2, 768)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (encoder): BertEncoder(\n",
" (layer): ModuleList(\n",
" (0): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (1): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (2): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (3): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (4): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (5): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (6): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (7): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (8): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (9): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (10): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (11): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" )\n",
" )\n",
" (pooler): BertPooler(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (activation): Tanh()\n",
" )\n",
" )\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" (lin1): Linear(in_features=768, out_features=256, bias=True)\n",
" (lin2): Linear(in_features=256, out_features=1, bias=True)\n",
")"
]
},
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.cpu()"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "e027b926",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([4, 768])"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"hidden = bert(**tokenizer([\"사랑해요.\",\"무서워요.\",\"슬퍼요.\",\"재미있어요.\"], return_tensors = 'pt', padding='longest'))['pooler_output']\n",
"hidden.size()"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "ae9f8fba",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([0.1623, 0.1365, 0.1949, 0.1491], grad_fn=<SqueezeBackward0>)"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"w = model.lin2(model.lin1(hidden)).squeeze()\n",
"w"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "5470c3f8",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([0.5405, 0.5341, 0.5486, 0.5372], grad_fn=<SigmoidBackward0>)"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"torch.sigmoid(w)"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "b7eb8e67",
"metadata": {},
"outputs": [],
"source": [
"labels = torch.tensor([1,0,0,1])"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "7a324ed7",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor(0.6989, dtype=torch.float64,\n",
" grad_fn=<BinaryCrossEntropyWithLogitsBackward0>)"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"nn.BCEWithLogitsLoss()(w,labels.double())"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "cb54294d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([ True, False, False, True])"
]
},
"execution_count": 18,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"(w > 0).long() == labels"
]
},
{
"cell_type": "markdown",
"id": "596b89bd",
"metadata": {},
"source": [
"이런 일이 벌어짐. sigmoid 는 나중에"
]
},
{
"cell_type": "code",
"execution_count": 28,
"id": "769c4290",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"MyModel(\n",
" (bert): BertModel(\n",
" (embeddings): BertEmbeddings(\n",
" (word_embeddings): Embedding(119547, 768, padding_idx=0)\n",
" (position_embeddings): Embedding(512, 768)\n",
" (token_type_embeddings): Embedding(2, 768)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (encoder): BertEncoder(\n",
" (layer): ModuleList(\n",
" (0): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (1): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (2): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (3): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (4): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (5): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (6): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (7): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (8): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (9): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (10): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (11): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" )\n",
" )\n",
" (pooler): BertPooler(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (activation): Tanh()\n",
" )\n",
" )\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" (lin1): Linear(in_features=768, out_features=256, bias=True)\n",
" (lin2): Linear(in_features=256, out_features=1, bias=True)\n",
")\n"
]
}
],
"source": [
"device = torch.device('cuda')\n",
"model.to(device)\n",
"print(model)"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "b9380dcd",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"device(type='cuda', index=0)"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"bert.device"
]
},
{
"cell_type": "markdown",
"id": "5e82df0e",
"metadata": {},
"source": [
"모델을 모두 gpu로 보냄"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "74c4becc",
"metadata": {},
"outputs": [],
"source": [
"from torch.utils.data import Dataset, DataLoader\n",
"BATCH_SIZE = 16\n",
"train_loader = DataLoader(\n",
" dataTrain,\n",
" batch_size=BATCH_SIZE,\n",
" shuffle=True,\n",
" collate_fn=collate_fn\n",
")\n",
"test_loader = DataLoader(\n",
" dataTest,\n",
" batch_size=BATCH_SIZE,\n",
" shuffle=True,\n",
" collate_fn=collate_fn\n",
")"
]
},
{
"cell_type": "markdown",
"id": "4153b2e7",
"metadata": {},
"source": [
"데이터 모델 준비"
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "3cd5bf7b",
"metadata": {},
"outputs": [],
"source": [
"from torch.optim import AdamW\n",
"from groupby_index import groupby_index\n",
"from tqdm import tqdm"
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "65b5ccde",
"metadata": {},
"outputs": [],
"source": [
"optimizer = AdamW(model.parameters(), lr=1.0e-5)\n",
"BCELoss = nn.BCEWithLogitsLoss()"
]
},
{
"cell_type": "markdown",
"id": "79607e81",
"metadata": {},
"source": [
"학습 준비"
]
},
{
"cell_type": "code",
"execution_count": 30,
"id": "4835a0d3",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch 0 start:\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Epoch 0: 100%|███████████████████████████████████| 9375/9375 [12:35<00:00, 12.41minibatch/s, accuracy=0.875, loss=2.58]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch 1 start:\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Epoch 1: 100%|███████████████████████████████████| 9375/9375 [12:35<00:00, 12.41minibatch/s, accuracy=0.898, loss=2.18]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"epoch 2 start:\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Epoch 2: 1%|▎ | 82/9375 [00:06<12:30, 12.39minibatch/s, accuracy=0.867, loss=2.08]\n"
]
},
{
"ename": "KeyboardInterrupt",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m~\\AppData\\Local\\Temp/ipykernel_10708/1191029387.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 21\u001b[0m \u001b[0moptimizer\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mzero_grad\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mmini_i\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mmini_l\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mbatch\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 23\u001b[1;33m \u001b[0mbatch_inputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m{\u001b[0m\u001b[0mk\u001b[0m\u001b[1;33m:\u001b[0m \u001b[0mv\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mv\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mlist\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmini_i\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 24\u001b[0m \u001b[0mbatch_labels\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmini_l\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 25\u001b[0m \u001b[0mattention_mask\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mbatch_inputs\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m\"attention_mask\"\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;32m~\\AppData\\Local\\Temp/ipykernel_10708/1191029387.py\u001b[0m in \u001b[0;36m<dictcomp>\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 21\u001b[0m \u001b[0moptimizer\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mzero_grad\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mmini_i\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mmini_l\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mbatch\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 23\u001b[1;33m \u001b[0mbatch_inputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m{\u001b[0m\u001b[0mk\u001b[0m\u001b[1;33m:\u001b[0m \u001b[0mv\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mv\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mlist\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmini_i\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 24\u001b[0m \u001b[0mbatch_labels\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmini_l\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 25\u001b[0m \u001b[0mattention_mask\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mbatch_inputs\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m\"attention_mask\"\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;31mKeyboardInterrupt\u001b[0m: "
]
}
],
"source": [
"TRAIN_EPOCH = 5\n",
"\n",
"result = []\n",
"iteration = 0\n",
"\n",
"t = []\n",
"\n",
"model.zero_grad()\n",
"\n",
"for epoch in range(TRAIN_EPOCH):\n",
" model.train()\n",
" print(f\"epoch {epoch} start:\")\n",
" with tqdm(train_loader, unit=\"minibatch\") as tepoch:\n",
" tepoch.set_description(f\"Epoch {epoch}\")\n",
" \n",
" for batch in groupby_index(tepoch,8):\n",
" corrects = 0\n",
" totals = 0\n",
" losses = 0\n",
" \n",
" optimizer.zero_grad()\n",
" for mini_i,mini_l in batch:\n",
" batch_inputs = {k: v.to(device) for k, v in list(mini_i.items())}\n",
" batch_labels = mini_l.to(device)\n",
" \n",
" output = model(**batch_inputs)\n",
" loss = BCELoss(output, batch_labels.double())\n",
" \n",
" prediction = (output > 0).to(device,dtype=torch.int64)\n",
" corrects += (prediction == batch_labels).sum().item()\n",
" totals += prediction.size()[0]\n",
" losses += loss.item()\n",
" loss.backward()\n",
"\n",
" optimizer.step()\n",
" accuracy = corrects / totals\n",
" result.append({\"iter\":iteration,\"loss\":losses,\"accuracy\":accuracy})\n",
" tepoch.set_postfix(loss=losses, accuracy= accuracy)\n",
" iteration += 1"
]
},
{
"cell_type": "code",
"execution_count": 31,
"id": "81b69931",
"metadata": {},
"outputs": [],
"source": [
"%matplotlib inline\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt"
]
},
{
"cell_type": "code",
"execution_count": 32,
"id": "c3a73c68",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYEAAAD4CAYAAAAKA1qZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAABU1UlEQVR4nO2dd7zURNfHf+c2uPTekd6ld1REBQQsKCrFgqKiCIj4CthARBQRHwQLPGAFFeURFRUFRRGxoRQFadJVepcOt837x2x2k2zKJJvdze6d7/3s56ZMZibZzTkzZ86cIcYYJBKJRJI/SYl3BSQSiUQSP6QSkEgkknyMVAISiUSSj5FKQCKRSPIxUglIJBJJPiYtGpmmpKSwzMzMaGQtkUgkScmZM2cYYyzmDfOoKIHMzEycPn06GllLJBJJUkJEZ+NRrjQHSSQSST5GKgGJRCLJx0glIJFIJPkYqQQkEokkHyOVgEQikeRjpBKQSCSSfIxUAhKJRJKP8ZUSWLt/LX7e9XO8qyGRSPzMp58C+/fHuxZJA0VjPYHChQszN5PFaBwBANhYucaBRCIxIDsbyMgA6tUD/vwz3rXxFCI6wxgrHOtyfdMTyGN58a6CRCLxO0qjdfv2+NYjifCNEiBQcHvK8ilxrIkk6ZgxA/jnH+D774Evv4x3bYw5dAiYMiUk5BKJzz8Hfo6xGTcvxo1G5TdkBGPA888D//4b0yp5hS/NQYA0CUk84uhRoHRpbj7YvJkf86Og7dEDWLQIWLkSaNUq3rVxBgXe21g813PngMxMXmasFMGxY0CpUkCdOsCWLeHnlywBOncG+vYF3n/fdTH53hwkkUSFnBz+/8iR+NbDDqUVef58XKvhexRFE0tFriibw4eNzyvfWYL2BHylBCZ3nRzcPpV1Ko41kSQdRPZpRDl7FnjgAeD4ce/yTFQOHbJPs2oVMHmyfToRvBL+jAFPPQVs2mSe5plngPXr7Xs6c+fy/7E2UXmEr5RA/6b9g9szVs2IY00kEgtefRV46SVg4kTv8oylScVLhg61T9O6NTBihDflefV8jh8Hxo4FLrvM+Pz588Do0UCHDvYNiHfe8aZOccJXSqBMoTLB7ePnZCtL4lNOnOD/0zxcjsPLnkosycqKbXleKQGl1W5nflOftys70RR4AF8pATWr962OdxXyB88+G9+WzL//Aj17AgcPus/jnXeACRMir8uZM0CvXsZeILt28VZhly7Avn382LRpkZWXnQ306QNs3Bg69sEHoe3584Fbbw0Jl5Mn+bNSytfz5pvAf/4TWZ30/PQTMHCgNwIuN5f/Z4zn+dNPxun+/JM/l6ws3oNYuDB0TqQex4/z57R/P7B1K9CtG9C9e+i5/fYbMGAA3zZTvko5WVnAt99qjzEG3HUX8Msv2mu+/tq+bn6EMeb5p1ChQswtE76fwPAkGJ6E6zwkDuA/6fiV/9xzvPwRI9znYXUP+/fzc2XL2t/re+/x8336hJ+79dbQ9amp3jy35ct5Hm3aMHbJJeF5pqTw/XPn+P706Xx/0CDj/KLxXSr3mpVlfP766+3LVc6fPcv3z57l+wUKGKdv356f//HH8LyPH7cvb+pUfn7YMMYuvzyUXnlulSuHjpUsaZzHmTOhNMqnWDF+7t9/+X7Rotr7AxjLzTWvlw0ATrMoyGO7j+96Ag91eCjeVZDEg2ibQ0TyTwm8DrHq1ivlEBnXTzFZ+MHM4MX3o5iOlLyUnoGTMr0YfBVZ/9zomau/L7M02dnu6xUnfKcEMlIz4l0F5/z4I/dDP3Mm3jWJHQsWAC1a2L/ICosXA02bhr8k0RZwRvmfOsW/L313Xnm57QSNOs9LL+X/c3P58/j88/D02dn83s0mqumF3fjxwO23m9+Dsp+dDTRpAnz1lfb86dP8/ozMLX/9BVSrxs1beiZMAG67TXtM+X6XLTOuu5o6dbgnkBmKElDqb/Tbyc0Fli/n22PGaM99/TVQsmRon8iZCWbGDG52LFTIOt0ddwDjxoUfP3kyVK4ZiktyAuE7JZCQPPQQn0Tyxx/xrkns6N8f+P330CCpHQMH8uezd6/x+Vj2BFav5t/XqFHaNKI9AfX577/n///9lz+P/v3D0+/bx+994EDjfPQ9gSeeAN5+2zidmj17gHXrgHvu0R5fu5bf38iR4XV57TU+5jF7dvi5xx8H3n03/DgAPPig8XE127Zxjxs9+pa/la+/+vek2OIV9PcJAIMHW9dJ/8w++UQ7oG/0u5s9G5g0yTpfQPYEJCoS1b0vEry613g8s/R0/l//wrrpCSgogsWqJagXOEo5dgrQTGimpmrzUVCUmdF9RPu3qjxbozKjYd5y03iItHyrZyh7At5wUdWLAABZuTF2P3NLorr3idCxY7iJAAi9ADVq8BakmsaN+WQqJ0yaxH3OmzZ1V087DhwIbSueSNnZQNWqvOV9+jRw4438uPrlHjIEaNZMvJycHODvv/lvQm9uMuPnn8XMLQozZ3LTo/K7270bKKyKNqCYU379NdTLIOL3ZSbAjL5jI5S8cnO5V9P8+drz+la2OrzDzJlcQakFpahAbtqUm7KM6mPGSy/xkA5q7r2X99j0VK7MTU127zKRtSdblHsCRNSNiDYT0TYiesTgfEkimk9EfxDRCiK60C5PXyqBn3ZxW+a1718b55o4JBl7Aj/8YG4iALg7nt49c/16/gKKoH5m06Z5b1Iz+k4UIZmdzQXo+PFat1B1C3r6dG5eES0nOztkp37tNe05vYAR/b0YXb9nT6jFD2jHo5Ry9eTlmSsBq+9YXw+Ax/CZMyc8jVFPQGHcOJ7HKVU0ANGBXrPfRaQNMOX6vXvFwz4o4x4xNgcRUSqAaQC6A2gIoB8RNdQlewzAGsZYEwD9Abxol68vlcCgloMAAF9t/8ompU+Q5iD/YlRPM3OQ1TVWwkZxEAR4K9fMiyRSJaCvj1mdUkxea6uegNN6EBk7QigmKivUz13UscCMePbCY28OagNgG2NsB2MsC8BcAD11aRoCWMKrx/4EUJ2Iyltl6ksl0LJSy9gXumcP/0EpcUCc4EclUK0a0KaN9hgRULCg+QsMADt28POffuptfZSWtvoZVa8OPPZYeNoXXuB1EIlEq45JozZ9KHzzTfg1zzzD/6uF0S23hLYXLOC/A9HZooxpW7TKAKaRgJowQWueEaFCBf7/vvvCyzViwwbj4y+8wOPlAHxCllIPfQuYKHwAf926cAF/9mx4GUa9Az2dO4e2P/iA/w5at+blliplf72+rgCfvEXEezTDhzu/3gl9+vD/586FX1+rlvP8QqQR0SrVRz8SXhmA2q1rd+CYmrUAegEAEbUBUA1AFatCfakEzufEIZKi8uK8+abza/2oBP75h4clVlDqZhfxULnG7mX24l7//tv4+JQp/P/Ro/Z5vPCC9Xn1DFwr9HbiWbNCLoF25OWJP4/nngtti15jpLBTUpz7zD/9dGhb8WoCuElMj1VgNat6ibBtW2j7gw/478DKtdQK5d1T3lszU1hikMMYa6X6vKo7b6Sx9D+iiQBKEtEaAPcD+B2AZffEl0pAPSB8zwIDt7BoEIkgV1+bne0uHPCZM9qu8dmzIUF97pz3tkazFpByXC8As7L4fZ09qzV5OEXkunPnwo8p5errZPVccnKMW6sidUlNFf8e8/KM0545o31WjGldII3u0wx92pMnnZse1EJbHXrCTNlZRUndssXcPfjIEfHfa6STv/S/Y6fv3rFjkZUfW3YDqKrarwJA02VjjJ1gjA1gjDUDHxMoC2CnVaa+VwKv/RZjzR6pEmjUiJtcnFK4sNYkUagQ91Y4fpzPcIx0oRH9fdkpgS+/1LYWy5UDihbl9eqpN0M6QOSlV+K2q+tcqBBw1VXadGXLWocy7tYt3NdclJQUoIqqF21lNsjL06ZVeO89oGvX0H3oYxJ17y5eH/0s12bNgEGDxK8HzO3vHTqEHyMCatc2z6t5c3OTYZkyfB1gEbxw11RPivv1V2fX5+YaT5zzJysB1CGiGkSUAaAvgM/UCYioROAcANwN4HvGmOVkHl8qgS61usS+UK96Alu3uq/D//4XfkwxiUTqNaO/L7PBQ7Ww+/HH0Pbx46HW3cKF7l/eSAYCFy/W7ttNVNO7B+qxugf987FKa6XYli61rkMk6GcKe43ZIipeEunAMGAeiE4Uo9XCfAhjLAfAUABfAdgE4APG2AYiGkRESougAYANRPQnuBeRra+2h7FwvaNFxRYoVqAYTpwXnI0ab8wUyJkzXFApA3uR5K0mO5v7vRu1PvfuNR5c03f59+4FSpQQK88IvT340CEetbFxY+vr8vK4Ddio7iKcOgUUKWJsx3bKjh3m5/RKwMo33MrkBDhvnfqBWJlJzGaQi3LyZOQtefXYmc9hjC0EsFB3bIZqezmAOk7y9GVPAEDsFUAkwcPMBGenTkDFiq6rZJr34MF8kpNRS7hy5dCkJzV162r3mzcPhdP1gnLleByb1TYhwHfs4N4g+pANorRowRVQ1ar2aSNBrwT0vRA1ZcqYnwOAm2+OvD6xxug3FA3WrYvs+r//Bl55JbI8Hn00susTHN8qgZjjhYeP/tpotTCUIGV6F0ql/C++CL/GyHb+2Wfhx4zyc4KdOUyxiy9a5DxvJf9YrMMr4u8ukSQBCaEEhi4UWMLOKxTBt2KFuL3SSIGoQwb89ps3dVPYv5//P3GCu8ZZRWW0suuqZ24q9VX3PETDHqhRXzN/fnjPQBlXEH22ubn8u1DjdrDXCfPmRb8MicQHJIQSmLYywhWcRFAL8l9+Adq2DU0qcnKtQvv2oe2WLY3jnihYtbitbPT16/NJMu+/z/eNXAZFJ6+0b8+Ftro8o7DIdryomqXeq1e4V5Mi/EXcGxnjk6vattUej5WpQiLJBwgpASL6i4jWEdEaInI5q8PnqAW5MtAk6pEjYkqycmW08i4RGahVTDBGglU01DMAbN8e/Wn4TnoCeXnAmjVRrY5Ekt9x0hO4jDHWjDEWocN6FPjtN21LOyuLT/1Xs327tUBRC3JFKOvT6/PdvZv3GpRrzWbAAlxAr13L66EE31JcGPVK4PXXQ9si4Rs2buTeQmZrBYuuv3vkiDbaplf8+Wdo24kScBJZU49IQDSJRAKhNSgB/AWgjOialZGsMaygrDMstN6wEsJLYdQovr9kiXkaPd9/z89ffDFj779vnH7ECH7su+/4fno63+/SJZTe7LNsWWh72LDQ9tGjjJ0/ry3PLA/9vag/1arZ18EPn3Hj+P8KFcTS9+oV/zrLj/w4+bgEPl9jmAFYTESrDYIaAQCI6B4l8FFOvBdWUPy/3Ux2Ub5KI3bu5P8V047SqhUxoain0atnjp47p+0JmJVth1UvxE8ovw3RgeEEXKlJIkkkRJXARYyxFuAz0IYQUUd9AsbYqywQ+CgtLfI5aL/cpfVM+XzL56BxhO1Ht9tfLCKUFy7kgaz27+czddXzBIwmBi1cCHz0Ed/Oy9NOvhIpT60YP/kktP3++9oJMz16mOcxeXLiC8Xx4/l/0YZCpJOJJBKJJUJKgDG2N/D/IID54HGto0rbKlqPkNlrZwMAVu+zmYykxqpVfdVVfGHsK68E+vYNCXXGjEPRquPW5OZqw/o6VQJqHnpIG1rXbDFyABgxIvIp8n5BtCdgN/lMIpFEhK0SIKLCRFRU2QbQFcD6aFdMz4cbPwQApKdYrFyk4MTDRTGjOFn/NDc35KvvBVbuo3oSvSegEG+ToUQiASAWO6g8gPnEBWsagPcYYxbN1eiSlhKhqYkx4D//Ce0r4XL37AmdV/Pkk3ztWzUDB2pD+1q13hWuvtq6TqK4jbvuN9zGopdIJJ5i2xNgfCmzpoFPI8aY4Ayq6OBICRgJ13XrjOPWKKtBMQZcf33o+LhxPGSxGidx4L3GaCUuiUQSf9q2DU3cTCB8GUVUgUBguoVzIu4J2E2eYiw82JpEIpHY8cMPofWrEwhfh424qu5VYcfSUtL4qkj33x9uV9aHvzUaG7AL+7tyZXwXr5ZIJIlJgsoNXyuBPBYeTiEtJY2vqPTKK+HhfceO1e4bmYOyssKP6UnQL1MiiQu33x6+6lt+JEHlRsIpgfTU9FAPQB9uQTnu5stI0C9QIok7s2bxYIPPPefsOitniVgxfbp3eSWoDEk4JZBCqiqPH8+9d4IXBNIrPuhG8fzNPHHUx5991llFJRJJ+DrIdngwqdRXSCXgPbl54ROK2r6umkS2YgX33lFQlMD2wKziF16IYu0kEh9TqZLza665BrjwQu/roka9CtiMGebpYoXbMC1GSCXgPUY9AQDmD1tRAlYTkbz80iUSrxFd/8GOr792fs3NN4cv4OMEkXdr8ODQdvnyfNZ+tOnTx/ycXZ1jUb84k5hKwGj5RAB44w3g99+1IQlKl9bOyL32Ws/qJ5F4jt16xdFGv7ayE0SUgL4BV6qU+/JEsaqXnQnLqYkrAfG1EshlBvFlbH5nh596VKsEjh4F3nvP24pJJF6zYAEPELhwITBNt5Ke1eDl2rXu12vWQ+S9EnjzTetrPv4YGDYM6N8f+OYb92U//DBfo+Ott7RxvQDjRZsGDQJGj+blqvn9d+1+v37AE0+E9q+/nu/rPRETmWjEp/ZiPQHGGOvwRoewdQXoCetY3h+2KMhYrVra4+PHxz/GuPwk5qdiRbF0NWtGVo4e9bmPPrK/buJE7fGNG/n/+vXF6zB3LmM5Oe7r/sIL2uMFCzK2f394WrN71t+3k8/KlaE8srO15264wfp5m9UPYGzSJPM6232HDoHP1xOIC3pzUFoukPeUwIXFi0enQhKJGTVqeJtftWqhbZEBR30axYxxwQXiZRJ5O7jZoAEXj7EmGgO0bgbaE4SEUgKFBeZ55QHhC5HH44co8R9GLsNqNm7U7m/YYP7bWbQI+Oqr0P4HH/CPwsyZkQmOn38OmWbcKIHq1fk6GXPnhqe1UgxOBejSpaFt9bN65RUeWNHIFLN8udja0f36OauLgt6k5cX7/+uv4UvWJgkJpQSY6O9T/0O2Wshdkn9o0cI6LlSDBtr9hg3N03brBnTtGtovVQq46abQftWqwB13uKomAK5AnEymMhLevXsDJUtqjxUpol3USJ+HEyVQpQrQqVNoXy1s+/QBypUzXjeiXTugaVP7/N97z3qRJTXqsvX3wFjkPbUqVfwxuS0KJJYSELjmpt/OAY8+qj2onlAmyb+kpIgJHzUXXeS+vOrV3V8LAPXq8f/lytmnVQs+Kw+jNhbrQTntBTRpot1XC2IlkFrBgs7y1NOokVg6q9a+055A+fKh7cqV7dN75dYbJxJKCUiSkCFDxNMuWRJ5eW+9xU05op4o06drW8716omZMgDg7ruBl14Cvv3WaS05Tz/NTU4dOvAV8ETo2BFYb7Lm0/jxwKefml/rRAksWxZualKEbfHioXE5dRh2keemrPXRvj3/P3q01sym9OSef168rnol8L//Wadfu5abfxYuFDNJ/fIL/yQovlYC+hnDiTkfT2LJhAniaS+/PPLyChfmppwrrhBLX64c0LNnaP+CC8R6E4pp5f77gcsuc1fXjIyQyal5c/vyAKBlS21LVs3o0dwcZIeI737HjkDRotpjirAdOND4GpHnpvSeKlbk/4sV05rZMjL4f7Upzg69Eqha1Tp9+fK8x9S9u5hiLFOGryUQA4ioGxFtJqJtRPSIwfniRLSAiNYS0QYiGmCXp7+VgG6eQIoc300+IvFL9won3mQtWkSvHpGgmI5EzSfRQBG2kXjnKIPWZs/50kv5fyffmX5MMEHDOxBRKoBpALoDaAigHxHpB66GANjIGGsKoBOAyUSUYZWvD95Ac/TmIJJKwP/cf792386m7rUScLP85o4dwM6d1mkqVOD/hw3THt+7l3+ijSK41GEX1DPnr7oK+O034M47Iy9D4bffnF1vpgR27QIOHBDLo3VrPmFLP66nMGUKXx2wWjXtoLRdvRJU8OtoA2Ab46s9ZgGYC6CnLg0DUJT4esBFABwFYLmgd2IpgTjVQ+IARVgqZFg2QrxXAi1bOr+mVCn7QVxFiOhNCxUrhkwX0UQpX+12qn/WzZt7I+yUe7Qzm5hdp69DlSpig9sKzZqZ/y7S00NB7kRXAFSmcyn4VyGkEdEq1ece3fnKAHap9ncHjql5BUADAHsBrAPwAGPWg6sJpQSkOSgB0He9U1Ot09udjzaiwklZd1pvB3eKkQBy4nro1udd7/5qhL5uToWlooDbtXN2nVs6dgxtGz0XxbQkOv4Tf3IYY61Un1d1542+EP2NXwlgDYBKAJoBeIWIilkV6mslcGUtrUeENAdFgRMn+HKdXqF/GdVrrm7dGp5e3+Lbs8d5JEsRV8yjR8OPHTvGTUFqDh40vv7FF4H9+/lAZSQcPx5+7MMP7a+LpPV65AiwenX48QYN+IQ4xRdfKUNd1pEjoe19+/jvxegeAD5Yu3s3cN117uvqhFtuMT936BCwZQv/PT34oPaezJ6lfnla/7EbgLp7VgW8xa9mAICPA5EotgHYCaC+Vaa+VgJTrpyCWT1nof0/PGREHYP3OF8RjVZz0aLhZoVIMFIChQvzbSOvFb0SqFTJ2aBf0aKh/K3QT5oCgBIlwq81KzstzdzrxggzQaPvSaSlAQUKiOerfr6ivYJSpYyjYRYowCfEWS3uovYUqlCB199KEYr41ceCMmX4/VWqxL8LkWdVokTUqxUhKwHUIaIagcHevgA+06X5B8AVAEBE5QHUA6Br6WjxtRJIT01Hwz1Z+PlN4NlvgJ9sAhImPV7bMp0INRGuuircHKTuCRhhdE9OxgmMegGRePBE016stJDVtmz1QK8VZmMSXqCEWYn2gjJ+QR2XKRKcuKl6AGMsB8BQAF8B2ATgA8bYBiIaRESDAsnGA+hAROsALAHwMGPssFW+vlYCAJB+mDf/mwg6F8QMfbjaKHGwvyoOklpARToTc9Ik3nW3om9f/v/VV7lvvR3Dh4crATdLCDpRAka9mBUrgKws4Px552VHqgTM5gRkZQEffcS3N24EsrN5/aZOdV8vrxTCbbcB584BtWt7k18ssZs/oaA8vw0bvGv8LFrEv9cYwhhbyBiryxirxRh7JnBsBmNsRmB7L2OsK2OsMWPsQsbYu3Z5+lsJHD+OUsu4fdh34/mRCmFR1AJRLQgiXYyjYEF7Aa204gsUEBfmRj0Bp4LVSXojQZiaysu180yywq3XklnPJz09lGdqKn+eGRnOn40bc5AIapNUIgVctOtp6nFierMjJcV5+T7E30qgXz9c8NbHAHw4KByrSU5qIaH2UY+0xSpSf7UwuPtusXx76tyWb78dGDOGbxcsKNZy09ctI8N88NdrgaWUrV672gn33sv/N27sTX0UjMxBbmPWKM9SP+chEVHmpUTSixE1ySUp/lYCW7YEN6PVE2Bu/MqB6Lk26s0JamGvXgXJTgmMHBl+jDG+ohLgTIkRhQt3M1q31gqqzp2BUaP4sfR0PgHJTnDr67Z4sflkLq8nAqWk8DxHj3Z3fa9e/Hqv488bKQG3vcGSJXk+A0wiCvjXjz6cW2/l91K6tPs8pk1LrN6Px/hbCah+jClRUgN7T7l0j4yRfzuZubbZCXGz+inmGpEXPV4vhh9CSfiNRBLMkoTC32+b6odf5UR0ijiRe8bdhWpBJdpKFkEneE9c1iG0ow4RbCco1RNpjPLXX2/l2x0NATRpElCzJt/u3Fl7zurentItLWcWXkChZk1eVjIxfDhw8cWxK++pp6zXVkgEnnuOmyOTeIUwtySMEkiNUlTp8wVctujVguqTT6LTamYMOZVVIQnUYXmtBCVj5r7/Zj0Bo/C60ewJjBwJbN/Ot5VwvcoiLFYrQ40Zo13ZVa9A9GzfbmwaSzTU5qApU4Affohd2WPGcK+aRObGG4GzZ43nS+RzEkcJRGkUPivd5SOIkTlIs5qauky7noDZebOegFVrP9qmCC+iTyY70ZwnIMnXJIwSKFPE44lNBmU4Ipp26wEDeJgCAES6cvr35wNZduXr70tZvEXpCYgogTFj+OpRokv8ueW667g3zSOB8Oh6QWcn+GbM4P8fftjzqvmOWCiB998HLrkkEWbQSjzAxUyeGKIeGPZbC8itEnjiCW5jffJJ82Uv37SYGj17Nv//8svW5eiF+iuv8P9m5iAjJVC/Pl9lKdqULg388Udo3+ma0BddlPwt5Fj2krp2jflsWEn8SJieQOZOm9mtscatOcisJa6gE2ZhPYHQCetyzM67MQeJ4pUgdqoE8hPJruwkMSdhegJ+g6WYO62+1QwYkNLCeFEOJy6aVuk+/piHHJg5k++PHcvjvyvKKTtbm1Zh0iSep3rJPn058+eHR9c044or+OzXcuXEF/mwo3JlYOhQoEsXPugeS08YvzJsGPDnn8CIEfGuSXyYPZuHtpB4Tr5UAr9cXB3tfvwLAJCTZ7nojilrD61DM5Nzd14HDPitSrgSaNvWvCVuw+7S6aiiPlC/PreFK0pAb1o6e5b/b9cuFAsf4F5DiklJjfpZOwkFXLcuX4zdS4hC5q5rr/U270SlWDHgXdswMMmLeqKkxFMSxhzkJXmqbM9ku5sncI5lWycwii1UqJA2Ho8RpUvj9k9uB40LVDKF/3ccNiMnoNyMQih7iZexWCQSSczJd0pgSQ2Apdjn+486ZPqoUcCmTVimikDLjOr266+4V1kkato0TOoAPK+a64V33+V5jRgR8tYBgLfe4qERBg8GJkzA22vfDp0LlMPMqjxrFrBsWfjxiy/mE6neesv8Jp0yZw6v56OP8gikI0e6j7EjkUj8AWNM6AMgFcDvAD63S1uoUCHmCU2bqqcFRfz5pxgYngT7vF2p4LFvGmYapp3ZIrT979l/2ccbP2Z4Emx6K37sp1H9QulV4ElehmbbIB1PYHxcnceW5V8wBrC/y6R780ytMKtnvPOSSPIBAE4zQXns5cdJT+AB8IUMYsM993junqhYVLbAfomyYMu7XDncNv829PqgF4CQWYbFKr5NrL1BIl1DVyKRJBRCkoyIqgC4CsDr0a2OitdeiziLe64Gzqs8ORXB/vjlwLfV+TYBuOLRyuh9o/5qYPSAasAvv2D7se1h50zNM14TUALn8mzGILxgzhzg99+9yWvhQmDlSm/ykmg4l3MOP/wdw7ARkqRGtDk7FcAoAKYO3ER0DxGtIqJVOTnuPG685rVW2n2lTX02A5jSXjlK2F4uDfN0K+sRA56p9je2Fc9FHgvddjRl/7/n/g07xgJlx0Tp3HyzaYz64+eO45/j/4jn1b070KqVfToLTmedxgcbPogoj2RkyBdD0HFWR2w9sjXeVZEkAbZKgIiuBnCQMbbaKh1j7FXGWCvGWKs0N0sKqonSLNVtqvDraiMLg7nJpdH0RsqYCACVOSiwrxlAdsmWI1vw5bYvUfK5knhv3Xuac0p5eTFQAjNWzcDGQxsNzzX+b2NUm+rR2qyCDF44GH0+7INVe1fFtFy/88dBPrvaqNEgkThFRFpfBOBaIuoBoCCAYkT0LmPs1qjVasGCiC7vcCdwuFD48d43hR8DoBHyerJyszQ9AYU8MFx5K7CuHLBXsF7jl43HmEvHhB2v90q94PbXO77W1i03l5cXgRLYfHgzTpw/gdaVW1umu++L+5CWkobsMeGmp10ndrmvgEuUnsfJ8ydjXnYiYNV4kUhEse0JMMYeZYxVYYxVB9AXwLdRVQBAxMHZll8AbC0TfvyYgWIgMn6ZzGSucpwBWFwb2OegJ/DEd08Et/ef2m+YJkwhKeYg8WLCqD+tPtq83kYordvJc35l2opp+G2fwcztBIb8t+K2JIHx5zwBDz1vfq1sn8aoJ7BVYzoKP68/xhjDsr8M/PUDnNX1uU5nnTaui76sQN1iNhCdZAxdNBQtX3W5hKjPserBSiSiODLeM8a+A/BdVGqixqESqPEAcMFxYNms8HPX3Awcn2h9vSJ4aw4DWu7jre6PG4TOawaGmfYahY83fYwb5xm4GAFofB9wSNcLEe7KB170WIwJSBIDimNMram/TEVOXg5GdMinMYySEH/GDnKoBP4qCZwORGPQW+9PGERvUEOMgi2qnaX4R8+OY6Fgasrr9+Oun4DafPvLbV/inT/eMS1jvW4pBKMxBlNyedpoKwHZqpSI8OBXDwKAVAJJhD/NQRG0dETNJup0TgbYVgdWe1ySFhoo7T6nOz7d/KnQ9R3e6IDUp1Lxy+5fzP1t1fXMyw3UMbrIQcbEQ35nEi/wpxKw6QmM7wgsrqk9pgh10ddCHZDNSSv4v62BRoOB76sLX6Jh+e7lAIClO5ei7Cig4kPa8+EDw7ExB/m5J7B632os3bnU8Nwbv72BL7d9GeMaxRc5MCzxEn+ag2zihh8qBBwooj2miDCnwtLMO8j8AmBjOftk53POW57PQx6OGngrqevCGAuuP8CIm5FSzBaZiRA/typHfs0Ximdjw+t494K7Tc8lO35W3JLEwZ89gcces02ypIbxcSMloI7+qbC1NP//Y4PCUXmZHvnmEcvzIuMCDAzZFbjG+bQekBswDRnx7A/PYsCnA4L7O4/txLwN8wRrqxUo175/LYo+m5wxhKb+MhU0jhJ6olU8B4YlyYc/lYANjIDZzcOPAcZKoOttQHGdTN5SBig7EphzaSlnA7WC7PjXemUuISXAGLIqlkWZkcCES6yveezbxzBrzazgfotXW6D3h72F66vOe8GWBTiVdUr42kRi5mq+CM++k/viXBNJLNl+dDseX/J4wveeiKgbEW0mom1EFNbSJKKRRLQm8FlPRLlEZODuEiIhlYAVRgPDWWnGXkKHCwOUkoIjZ494Xo/PNn9meV7kx6gI5iOFAZYC5DLznoAepaVr1XvQ1MdH5qBzOedcL/ZjRyILAf3cEj99Z37n6vevxoQfJ2g8/RINIkoFMA1AdwANAfQjoobqNIyx5xljzRhjzQA8CmAZY8wybHJCK4GfVestMt1/UeIlFMwEuro+eSwvbN8Iq/GH+764T6g+fhKO1aZWQ+EJhR1dM2LxCDy+5PEo1Sj+fPfXdyjybBEs2bEkpgPDs9fMxk///BSz8qJFVm5WvKvgBW0AbGOM7WCMZQGYC6CnRfp+AN63yzQhlYAiri66C0gJRGKwMgdZselw7JZIUCNiDspjeZrWnlmr/sp3rzTN47XfxEJyuzWJbT2yFQM/GxgWbmL70e2uvXYOnj7o+JrJyydjwo8TXJWXCHyz4xsAwM+7fo5puXd8egcufuvimJbphNy83Li37mMY26oyAHUQr92BY2EQUSEA3QB8ZJdpQiqBIMTNJGoSJbyCmdBVC319GnXv4fi545i3YR4Wb1+MZX+bh6swYtTXo7Bo6yLTcp1wy8e34PXfX8fqvdogs7Vfro3uc7qHpa8+tTpqvljTNFqpCLl5uaaxl0QxG1xljKHg0wUxfeX0iPL3GsU8Vig95FLmp95bvHj828dR66Va+Ovfv+JS/h8H/kCxicXCov+6JE0Jxx/43KM7b/SjNfsRXAPgJztTEJDoSsCARAmvINwTMDEHdZrdCb0/7G3ZCzDj+Z+fR4/3emiOmQmUw2cO49aPQ/EC9THs01K4l7HoeMXfx//Gzn93otH0RlixZwUWbV3kWJg98s0jqDi5Ig6dPuToOhFy8nJwPvc8HvjyAc/zNuPLbV+CxhHW7jcPoX42+ywAIDM9U3oHqfh257cAgAOnDgil93ocRfnOFm5d6EV2OUo4/sDnVd353QCqqvarwDyIcV8ImIKABFUCRq39FF2cf69oXK6xxzlyRJWAGrU5aM3+NZ7Wx+zlGPfdOMxZNye432BaA815RQm4iT762JLH0OO9Hnj3j3cdXffZFj7oHo0BfYVYtrI//ZPPNv9pl7ntXXm+yvMG5MCwH1AUcoy+i5UA6hBRDSLKABf0YR4oRFQcwKUAhMIYJKQSMCIY2M3jRlK0vlwz+37YwLCFechLRPPOZbmgcYQ5f3DFkJrC1+90owSULvzuE7sdXxst4tHKdlqmnDHsH5TvIhaNBsZYDoChAL4CX+/9A8bYBiIaRESDVEmvB7CYMWYcqlhHQiqBz+qFH1NeC6/NQesPrvc2wwCuegIOXESd4vRHPHfDXAAqc5CNK+rGQxvDylCEn5eC104Z+bn1LG387tH/hnLzcrUrAkZJcca60cAYW8gYq8sYq8UYeyZwbAZjbIYqzSzGWF/RPBNOCeR8txS7i4cfj5Y5KFocO3fM8LgmbASYkIuonh//+dHwOI0jzN8037ZcJ4iYg77e/jUaTW+Et9a85aoMPVYv9KPfPBrcPnLGubkoHoJYREAZCZto1fV8znlsO7otKnm7ZdvRbfjz8J/C6dPGp+GOT+8I7kdb+fu5cWFHwikByjMWhLFci9cLzFz9rMxBIhO/dp/YjUveusT0/CsrX7EtVwQlvaIEsvPCl6RUUF5e/QpfSh6ns07j+LnjYdd999d3QnVQowToA4Br515reb1l3nF4qa3K1LRqo9z6LPhMQdR5uY7pwkfxoM7LdcLGo+x4e+3bUapNCCWWVyL34hJPCZg87EOFgUW1gT4m6wgnIuX/Uz64zi4gZg6ye3F/+PuHsGNz18/Fq6v1jggcM4Gz+8RuHDh1wPYleHvt27ZC6+kfnkaJ50pYphGpE6AVpJsOOZ8DEg/h71dvn7M5Zx2ln71mNv7+9+8o1cYY0e8rauYgxHRgOCr4M4qoBZRr3BPISwHefe5mfO+Nv65v6PNhn+C2iDnoi61fWJ43arH3+6ifaXoz4b72wFpUmFwhlM7kJfh257doVamVZZ28JN4tspV7VqJc4XKoVsIgaqENjntj0TZxOKhPVm4W7vj0DlQtVhX/PPiP/QWCnDx/EgdO27t/xnuwPN6/u0hIvJ6AiTkIQNTCLMcSqxdbxBz00OKHbNM4wcrMI0JYaOxI8tJdv+fkHsvyrPKIltBo83obVH+xuqNrnNSFAn+xwImSUZ5rpJP49HR5pwvqvFzH9fVv/e7NOJQZfu3FOSFxpOYlATt3rrkgjHdrwAus7iE7Lxu7ju/CsbPGg8pek8fy8ObvbwqlNRPweSzPtMt8Ptd6zQU9o78drdnv8k4Xw3q8vfZt23kUTWY0MQxS50ZRnco6hd7zemsmLDHG8PiSxx15lzlt2Sdy61OUX/f8GtH1d352p2ZSodfPLBnMQYmhBMqVA4oV49sqJfBR74/wYrcXg/splIJRHUbFunaeYvVjOpN9BhdMvQClJllGhhXGLnTD8z89L9wTMJu4xRgLtpb+OPCH5px+fsBHGz/Cgs0LTMt44/c3AFgrSgaG2z+5Hc1nNjdNA3DzhX6gWl9vUd5Z+w7mbZyHsd+NDR47fv44Jvw4AZfOutT2esfzBCzSbzq0CedyrBdlEsULgckYw9ilY6PmbSRSx0FfDLJN45bgZLEEVsiJoQQYA1L5pCTk5qJacW5vJZDGRJJCKehRp4dRDkmB194ajaY3wubDm03Przu4TjivgQsGGh5XKzWrGbEAcOO8Gy09ehTbsOXAsIOX0SitmxadUX2U8RsnE/wiFSTHzh5Dw+kNcddnd0WUT7A+HrRud53Yhae+fwo95kT3vbT6Tdit8ucFsicQbYg0SqBphaaBw6TxmCFQwo8LWLVyoxFjf98pbxdXOZ9zXhO2N9YtJEd2bAdpj549igunX6hRmi//+jKW71pumF499jDuu3GoOqWqYToljRv09T+dzRsJy/5yFlAwmijPwanpT8ELEy8RRc12nwwm6MTwDtL1BNQvmLqllUIpSTFQY0Y0Zwwb4eZZFnxGu3oPA4vpi2KndNQrphn2BHTHDp4+iLfXvo3SmaWx4dAGPPvjs5h13SwAwLAvhwEA/nvVf8OuVQQ0EeHJZU+K1d1qnoDqnNnzVI57FV7ETdhqM/Oh28ZARmqGawWiQKCoNUakOSiWlC/P/2dmal4wvTko0TXz/zb8z/ScR+FqY4rfegJqRSrSE+g/vz9Gfj0Sq/auMk1j9JtT7lukZ+pVw0U0mFlWbhY6zeqEFXtWAAA++fMT0DjSzEkBgBs+uMFVPT7aaBvCXpj01HTN/tfbv9Z4/Ih8h+rnO27ZOE9/k3JgOFYQAZMmAdOmAVdfrXnB1K2eaHb7/MC8jeILx4ti9UJ4oVCjEfTOql7qwV6jF1O0Psq1wWU6A8pDOW4Xv14pR1/XT/78BCv3rAzu//TPT3hlhfEsbjVKPjl5OcH1I/Tfnejs1Q0HN2DZ38twyVuXYNaaWcFwHr/t+01YQOoXDFI/ayOF6fa9zEjN0Ox3fbcr7vzsTsf5KOXPWTfH0iHALbInEG0YAwoVAgYPBohQplAZAHyBjYJpIfNDoo8HxAOrFowXCjWX5fpKMWtMNg4GhvXCvN4roSiGRi1wxV9ef+/X/+96tHm9TXD/4rcuDikYC0FyLpd7/Dz747OmaURbpUqdsnKzMODTAZpImKItWqMFg6xwKyTTU9LtE9mg/+68NKv66bftlsSQmg01aynj5e4vY3qP6bis+mUY2mZo8HgymINijZceR0aT2XLzcoXXORaBxhE2HNoglNZO8LjppSh5qge/jcIJt329reac2/IUlDUX1G61eoEtap/WN5a8iIkfaWyjQZ8Pwj0L9AtphfcEzFA/Z7NotQpersUhzUGxYr428mXRAkVxX+v7QEQokFYgeHxgi4GoWtzcC0MSzm3zb/Msr5NZ4WutRnMNBDvUL2azGc3Cjhmx54R2FrJ6/MkJymCmOuSBF2GurephJZB2HtuJKcunaNIpqGf5mimQ3Lxc4VhMRg2xXSd2Wa4EN3P1TMP1sO2UgEgPQ1+fez+/1/YaK7JzszH4i8HYd3Jf8PuI5+88UhJDCZQsKZSsaYWmqFKsCq6pe02UK5Q8HD8fHr1TwWkX3ujl98vLsfZA+NKNRsKy9WutDa8XafHZCXG1Eqj9Um1c9OZFlunt0D9vq55A13e74v8W/x8Onzkcdu6X3b+YXqcwZukYNJze0PS8iAIbumiobZqs3CzNZDc3Jl6zHpJXfLH1C/x31X8xZOGQpLA8JIYScMg717+D8ZeNR9daXQEAYy8da3OFxAinXVyrSVPx4MT5E2HH7NZnMFOKli1wQSGjVgLbj20Pc8GMNICckYvo4TOH0Xte76DnTx7LMxWsDOZjAvP/nG943AlqE5rCyfMn8c2Ob4L7Dac1ROYzmRGVE2YO8lhQK8/Xy7hY8SQplUDxgsUxuuNoNCvfDAA0g8cScZz+sPVhIQDg6x1fe1WdiDmXc850MR9RInnZnSzBeS7nHLq80yXsmYoINLVwmvjjRMzbOE87hmGitKzuzW5BF7fP5ZaPb9HEgNp+bDsAoPe83o7yUd9TtHsCwXxBnoynxJukVAISb3D6wzZazMbN2sPR4o5P7tDsqwXXZ5s/A40z9/f3ojXpZExg5Z6V+GbHNxiycIgmjdqz5b4v7tO0ooP5WAhkxqwn70WzRWuU96bDxuMMIu7QjLGg0rArR4Q1+9dg78m9wunP5ZwLuhAnck8gMWYMuyQZ3LfiSSJOTrNCPxFPLXRnr51teI2SJuiDb6AYRRcbz86NLCy3ni1HtqDLO13AxjJN3ZT/vef1DhOmeSzPvCcQiXeQekazRatcTyTKdfba2cYmPxMzmR3NZzbnM5RHi81QXrRtERZtWySU1s/InoAk3+JkIXKrQVfl3JtrrMNu2/YEPGpNnjh/AvM3zTdsTVeZUsU0mJuTeQJWPPPDM8FtuzGhSOb2KDOe7XAyydJo3EKPV4EH/YJUApJ8i92LO2/DvODsVyslIbqQj50SOJV1Cq+tfs2xMjh85jBe/OVFzXW9PuhlmDaP5WHnvzsNzzEw14ro193Gcf+dzlnQY9Zr0SsXAmHPiT1YsmNJXEwz0hzkcxL5C5JED7vfRe8PQ4OTU3+dCoAHVdPH2FHswnbYKYGnvn8KAFC7VG2kpaQJ1RHgYwMfbvzQ1ZKWXtH5nc6GxzUeNEamNJcm2+zcbO0EMTA0m9kMh88cxtnHna2N7AWyJ+BTksGHVxI93Ly4fx//G9WmuhO2IgvMAKGQ0KKczeZC7+jZo47rpMYrc5AaO3OQrRnO5HxOXk6YAlHmQETa6Dt0+pCns4r9jq0SIKKCRLSCiNYS0QYiGheLinnByItGok+jPhjcenC8qyLxIWph8dEm7yJfmiGyYLobimQUAcB97iNhx7Edjq9ZtXdVcLKZEepnfODUAdR+qbZmlTG3YwI5eTkaBaHvFVjVw47mM5ubrkxnpigT2dog8g2cB3A5Y6wpgGYAuhFRu6jWSmHAAKFknap3wiMXPRJ2vFRmKcy9cS6KFyzudc0kScCxc8fwzPfPhJl34s0171+D139/XTh9agpfayPSwGijl462nQ+gp/VrrdH+jfZhx5XFd9QC89c9v2L7se146deXgsfcrhJ32/zbHJmS9K6k+gFg9brde05qQ4eoKft8Wdw076aw44lsDrIdE2D8m1BW4kgPfKJ7xwUKAA88ADz3HPCm/ULnS29fGtXqSJKT2z+5HQDw/vr341yTcN5e+zYAMZu54srrRWvUbm1mUepPq4/dD+42NAep6+m2J7BgywIMbmXcw3fzHCpOriiUzmot7URF6BsgolQiWgPgIICvGWNhrgBEdA8RrSKiVTk5LicIMQbMmgWcP8/XEJBIYoCRr7lfcLRmss9ao93mdLN1p7ScuAZm3VMwuV+R56Cu19PfP227etn+U/tx1XtX2eabiAgpAcZYLmOsGYAqANoQ0YUGaV5ljLVijLVKS3PpdPT55yETUJSUwBc3fxGVfCWJy64Tu+JdBUu2HtkqlM5vrdF9J/fZ2s/tegJGAe8U/rvqv8FttbK4f9H9tnVT91DGLB1jm77DGx2wcOtC0/N+U8BOcNQXY4z9C+A7AN2iURkcVwXvipISqFy0sm2au5vfHZWyJRI3NJ3RVCjdqG9GRbkmztCv/KegFphWSoAxZqkEzJi1ZpZtGqeBDc3mVijESgETUTci2kxE24gofCCUp+lERGsCjjzL7PIU8Q4qS0QlAtuZADoDcDZ6JIpa8EdJCZQtXNY2Teeaxj7PEkk8OJsTe793EU5lnbI8T0SGQQVFF6Cp83Id95Wzofuc7qBxhI2HNnqSXyx6AkSUCmAagO4AGgLoR0QNdWlKAJgO4FrGWCMA4aPYOkR6AhUBLCWiPwCsBB8T+NxZ9QVJUVUnSkqgUtFKwmlvamj8/ObeMBfdakenMySRqPGzmaHR9EaW5w+ePmi4aNGM1TOE8rdrfUfCD//8AAD4due3nuQXo55AGwDbGGM7GGNZAOYC6KlLczOAjxlj/wTqddAuU1slwBj7gzHWnDHWhDF2IWPsKReVFyMGSsCOzjU7a1onHat1DG4PaT0EvRv1xjX1rvGd/VWSnPhlUR4jInGtXbTV28Br8Z4Y6pGyTlOcawIf/XqblQGoB7B2B46pqQugJBF9R0Sriai/baGR1dljomgO2jFsh+HyhwqplMoXRddNOll2xzKMXDwS/1n+H1QrXg2v9HgFgL9fTknyYDURK5HZcmQL2lVpF/fGlFfleyQPchhjrSzOGwlF/Q2kAWgJ4AoAmQCWE9EvjLEtZpnmGyVQo2QNw+MFUgtgTq85yMnLQd+P+vKidc/a0MPBx910icQPEMj0PRn+1XAM/2q4Z2W5fR/NQog7JTcvsol6guwGoF5EvQoA/QIIuwEcZoydBnCaiL4H0BSAqRLwV+ygOJmDbmh4A0pmhq9jrLQSgguLqOOkq1oQrSpZKW+JJH+SCA2l1ftWm55jjGHPCfPZw5q0YNh+dDsOnrY1wUfCSgB1iKgGEWUA6AvgM12aTwFcQkRpRFQIQFsAxiv3BPCXEoiBd5CeIa2HhB3TeywoP2Z1D6FT9U6uy7yu/nWur5VIJOFEw6z0/M/Po8qUKthyZIvtfIY8lofaL9dGpcnijidOYYzlABgK4Ctwwf4BY2wDEQ0iokGBNJsAfAngDwArALzOGFtvlW++MQcZkT0mG6mUGnbc7AelVg6PXfKY0CQTI+JtB5VIJPYoS3f+9e9ftjZ/xRwUafwmOxhjCwEs1B2bodt/HsDzonnmSyWQkZqBVpVaBWO2A/brsgLankAkKyLJQWWJxFuiYXpyItAT+Z32lxKI0ZiA3RqipuYgj+qUCLZSiSSRmPPHHM/zVOYQKD0CKxJZCfhrTECtBFLiX7Xgwt2C5puvb/taKF0i/2AkEj/ywi8vRC1vkbUmEvmdjr+kVaNuacdYCahb+XVK8enqHS/gE8WGthmKSkUroXej3sbXBsxExQoUEyorkX8wEkl+Q2SxnUR+p/2rBGI8Y1jd2m9aoSn+euAvDGs7DABQp3Qd7Pm/PY5CTqi5osYVmv1o/WB+v/f3qOQrkUisifaAcDTxlxLwgTlI6RFUK1EtojGA448cNz03pmPIq6hN5Tauy1DTqGwjQ08nJ5TOLO1JXaxoUbFF1MuQSGKN7Al4RRzNQemp6QCAwumFPclPbRrSDwRffMHFQdPS/7X7PxwdFdkC4QDww4AfNN5ObnjqsuiFhVK4rUl4QDGJJNGRSsArXnwxtB1jJdCpeieMv2w8Xr3mVcfXinr7lCxYEqM6hMd8j8TdNJh3ZsngWrNuicX8Baf32qxCs+hURCLxkERWAv5yEV2kiiwYYyWQQikY3XF0VPJWhOu8m+bhipra8QEGZikY9z+0HxUmVxAqJ1JzUCxcV50qgRIFS0SnIhKJh4gMHvsVf/UE1PjARVQUszC2io3daJ6B+hqrsYfyRcoL1yNSc1AscKoE3PaSrq13ravrJJL8hn8lbQIpATPWD16PFXevCO6bKQsvzEEAHJmDjAak/WgO6lStk6tyLqt+mavrJJL8hn8lbQIogW33bwubILbn//Zg6/18YfAKRSqgdeXWeKnbS7i8xuVoV6VdWB6MWZuDrOhRp4dm30lPYH6f+WHzGmKyRJ7DxT+GtxvuqpwH2j7g6jqJJL/hX0mbAEqgVqlaYesRVypaCbVL1dYca1y+MZb0X4LM9MzgsQ5VOwTzEBWMH/f+GOvv4wEBr6l7DVpWbKk572RMgEAY1maY5hhjDBWKiI0/2HFgxAHD404VntvBbq9CfEgkyY5/JW2Sv8T3t7kfW4ZuQZvKbUwF4xc3f6HZv77B9WhUrhEOjzyMD3t/GJbe6ZjA+MvHa/YZGF7u/rKjPMwwq0usxgQkEokY/n3DEqAnoMeJTZ2IUKc0D0+hFnTHHj4W3NabexRKFyqNjNSMsB6Ek1azUUvZyzEBr8Y/IvV4kkgk1vhX0iaQEojU9KAWjE5cIhUb/sMXPQzAuPU98+qZjvIzUwRVi1XF/D7zhfMyeyayJyCR+Av/+hQmkBKIFDslckWNKyzDLWSm8bEGo1azmRB1OkC77r51KF6wuHB6s/yduLwCzpXAXc3vQs96PR1dI5HkZ/wrafORErDjm/7fYFKXSWHHqxbja05XLlYZgHFPwImJp27puqbnnCgAwFyx1SlVB6sGroo4HzNql6qNa+pd4+iaaDG87fB4V0EiscW/kjaBlIDiZaP3Coo2d7e4G5/2/RR3Nb8LgHGruXud7gAQNuCrF65rB63F1XWv9sxN1Kqn0bJSS9NzfuKj3vZx5GNF11pd410FSZLiX0mbQErglia3gI1lKF0o+lE41RARrq13bVCgG7WaqxSrAjaWYUjrIZZ5NSnfxPO6WR1/9/p3PS0vGkQ6UO7lvAs5+U0SLfwraRNICSQCeqHsdExAoVzhcmLl2eR/S5NbXJVvh1pwRxo6QjRGvJk7rJdBxdx+XxKJHf6VtEk+T8AIr4OllS/sbBBWhO3DtguFvjbtCURZmKlb3yULlowor9y8yBYK8dTlNh++D5LY4F8lEIM4Nn5iQb8FWHPvGs/yO/f4Ofzz4D+aYwNbDAxuuzVVFMkogpKZ9sLVC2F/aOShiK6P1BwTaUs+WmE43IwPlClUJgo1MefNa9+MaXkS9/hXCeQlbnxuN1xd92pUK1HNs/wKpBVARmqG5tiL3V40SR3Cq9ar3ZiAHY3KNgoKru/v+D54XG8bL5VZSrOvrn+sbPpmE9qMyncb6VXdK9HfswiRKlSnyPkdiYN/v6l8pgRiQWZ6JsoWKgsg+hFDzXoCoj2Ei6peFNy+pNolQfu+WolkpmVaCrdIW+IXFL9AKJ3ZTO08loeCaQVNr7M6p0c9PuF2FnW14t41MuyIlRJIhPDpfkcqgXyGV7Zlu/ELdTnPdX7ONn3jco3xfJfnDa8HeKwlQKtEUlNSw4SNWvBHoug6VuuIOqXqCKXVC6LpPaYH67Jq4Cr0b9o/eE5d/1k9ZwnXR90TcBtUz4nSiZQutbrEpJwm5Zvguc7PBfdf6vZSTMpNJvyrBPLZmECiUjCtYDBshRq1sFNPQjNSQg9f9DCW37UcIzqMCB4LE+7MemEeI9QKoVHZRpZp9Yh6QQHhLXOljowxNCrXCE90fCLsHMAjzoqi7gkkgqlF6XFGG72ilz0D5/j31yR7AnGhfdX2AOwHH+f0moN2Vdrh1KOnMLHzxLD0RITNQzdjcKvBuK7+dYYC++XuL+PNa9/ExM4TUTijsPZ6XXplkFZ0RTb1NQBQKL2QZVo9jDHhXpNe8Ch1tFtRzgk5eTkR5+H1XBArRBVVl5qR9xj0vUOJM6QSyKeY2csvKH4B2FiGK2tdaXl9jzo9sPyu5cGX7qtbv8KJR04EzxMIdUvXxbSrpmmuU7+wQ9sMxYDmAzTnlYV39AK4VqlawXI/7/d5WF7B+zIZGHY6PqD3DLqlsfm8Bn1IDXVPQI9bc5zGHESpphFmrXir51uuyjbiq1u/wu1Nbzc9L3qfXntgJXtPgIi6EdFmItpGRI8YnO9ERMeJaE3g84RRPmqkEkgyapeqjTub3Wl6XrQV6caeXrRA0VA5+slpgkKh34X9AIS3JGuXqo2DIw7igbYPBHsrdnlGMj7AwISf1YyrZmj2w3oCiLwnoDYHERE+7/c5Hmr/kKM89L2tSFAP3PuJZFYCRJQKYBqA7gAaAuhHRA0Nkv7AGGsW+Dxll69UAknG1vu34o2eb9imsxOKkXrWmHoHCZpwjK4vW7gsiCg4wGnUGjYT/JH2BKzQhwtRei2NyzW2vM5Jr0BfHyJytYTmpdUuFU5bo0QN03NeORhE+jvTm+2SfP2JNgC2McZ2MMayAMwFEHHIXKkEfMgdze5A70a9o5K36MsbqQup2zAVSmRUq2B8hdILYecDO20nJKlNOI57AirhkpGaYendpM/78hqXY9XAVRjWlgcW9EJgGs1ediNAb2hwg3DaHQ/sMD1nNWbiZMDba1flBB8TSCOiVarPPbrzlQHsUu3vDhzT056I1hLRIiKy9YiQSsCHvNXzLfzvxv9FtQw7AdK6cuuolGunDHo16IXFty7GkDbWAe+ql6iOAmkFwo6rhUrP+j2x+p7VALQt6cpF+XtTv0x90/zV6YsVKIaJnSda1kdPy0otQ4H9TAazzZ6FkZujV3GIsnKzhNK90PUFV/kPbzscm4duFk4fcU9Ad32Cx1jKYYy1Un1e1Z03ujn9A/wNQDXGWFMALwP4xK5QWyVARFWJaCkRbSKiDUTkvA/qhnysBKKJ6EvSqXonHBxx0LtyBVvDRIQutbp45gZpdL/KgKZVGXksT3NtkYwiuLv53cZlEKFPoz5C9Xns4scMj6tNR0PbDA07bxTMzk0rWlQJtKncxnHeADeNFckoojlm5S4a8cAw047dJPOYAHjLv6pqvwqAveoEjLETjLFTge2FANKJyDJmiMiblgPgIcZYAwDtAAwxGYzwFqkEooqIAClbuCz2P7QfR0Yd8azcWAdCUwS9usWoCB4rJWDUQjU69vBFD6NJ+SaW96U+9+glj+LBdg9qzneo2kHbQzDISy0s9QPPgLblrpjUjBBVAl7GPbIakGaMoUTBEsIT8+xoX7W9Y3dgL6lVslY0s18JoA4R1SCiDAB9AXymTkBEFSjwAyKiNuAy3vIFtlUCjLF9jLHfAtsnAWyCsR3KW6QSiAoXX3AxAHG/+fJFyjuKVfP7vb9rZnAqxKqbHmYeMHDXVASqehBRb8KoWaJm2LVG9zCx80TbXov6WRMIL1z5AthYZ55LhmMCgeuqFa+GB9uHFIvaS0tPzZI1bcsSrZMoVq1zBoajo446MiHpr7+1ya0A+NhN2UJlMeHyCa7y8oJoTuRjjOUAGArgK3A5/AFjbAMRDSKiQYFkNwJYT0RrAbwEoC+z+TId1ZiIqgNoDuBXg3P3KAMaOTk5Ydc6pmRkYYAlxsy+bjbW3LtGKBKoG5pVaIZRF40yPR9rm61Rq/nS6txDRv3C6t+TyVdODrs2Mz3Tthwj1BE8zVr8ds/FyGxStjA3swxvN1xz3OqdVwSmwl3N78LiWxdblm1ECqUgPSU97LjRfVh57CgDzJH0EMsXKQ82luH86PNITw2vUyxxa0YThTG2kDFWlzFWizH2TODYDMbYjMD2K4yxRoyxpoyxdoyxn+3yFFYCRFQEwEcAhjPGTujPM8ZeVQY00tIitMsNHw7072+bTOKczPRMNK3QNLj/1wN/4cCIA1EvN5pmoJ/v/BktK/IlK/UCUClXLUSVkBBmrbZLLrgEBdMKBlvwtzbmgjNa9mYR00suyw2LAlskowjYWBauBCzy038PBVILaOL8KD1FuzplpmeGlWvGU5eZu6pblTOtxzT8OOBHy7zd9FgiXbdD+V0YKbyp3aZGlHc8EFICRJQOrgDmMMY+jm6VAEyZIlcWixHVSlRzFCcnUqKhDNpXbY9JXSYB4BFHNeXB3BykKIHr61+vEUbli/DFeDLTM3HikRN44coXwvIAgMGtBge33djQrVr/YzqO0eyXKVQmGBrc7hlWL1HdcV3s6lQ0I9zE1LBs+NCgUd2s3J2thHjjco3RoWoH0/NuYGOZsEnMiCOjjmDHMO46a3SvBVLDPdb8joh3EAF4A8Amxpg7vzFJvifaZqDLa1yOE4+cQOeanbXlUrg5SLGvp1AKssdk48PeH5oKo6IFiob5nhsJkUi9XKwE+9Qrp+Lpy58WzqtikYr44MYPhFaWExlDASDs/WTGkv5LDI9bNUAiNRNFg1KZpYLeT0a/ab/VVwSR5vZFAG4DcLkqHoXzwCUSSZQxGhA1elGDA8MpqUhLSRMezDMTmIAHk+sslORdLe5yFAaageGmRjfhpoY3AdCuKAdwE6DiJhtmPtONgyh2f30Pywyz+7i8xuWGrqKRxjOK1uptIiRCNFcRRLyDfmSMEWOsiSoexcJYVE6SfER7MRs9Vt5BaoFVuVjI4U2016JOF4kwMpp9e1Wdq1znF8w3UCd9+IpqJaqhbeW2zvLy4HtbdscyAHxCIMAHjK0cFJTnu+iWRdg0ZBMOjzyMbfdvi7geehbdssjVdUat/kScrJYcqkzie+LVTTbyDjKaJ1CsQDHMvWEuv8akrl4rMKtn0rZKSEgbjWuIYLQGQ1iawHPpUrOLJlSHvqwaJc3jCKmxKqtB2QZgYxnm9JoDQDzEQ7fa3VC/TH2ULlQ6LGKr0TMxGqtQozfdKeYdIx//zLTMMMcJ5R5TKAXdanfDfa3uCzuXSEglIIkpse6+G/UEqhSrAsD5gu03NORxdy6vfrkmbwAoncmDyP1fu/9zXEe7Z+I0DpNyr1aT4vR5Lr5tMbbev9VUiLWs2BLr71tvWa4oBVIL4J4W92Dp7Us1xx9q/xCeveLZ4L5aESqItLS71OqCP4f8GdzXjztk52aHXbN56GasHLgy7LilpxUIi25ZhOlXTXdUP78hlYAkJsTr5VDKVbf+apWqhX+G/4PHLjEO4WBGx2odwcYyXFjuQtM0dUqLz3x1GmJaGfS+pYn52gZqhGZG24wJqGlQtoFtmSL3QUSYec3MMM+f/3T9D+5teS8AoHiB4oYuuVbLiaqpV6ZeUNn/dOdPmnPqBXoU6paua2ia0oelUI4p95EMSCUgiSlxGxPQCYuqxasKCxTTvGOk2JRy6pSuAzaWBX35zVDuw2g9A32eot5BfhF4Tuqx4u4VWHr70rCItIoSMPJM+qyvJgqD4W/CKty5X56TE6QSkMSEuI8JOFA+dsL9zuZ3onej3hjdcXTwmBvl1rJSS9zd/G68e/27wrGHRFC8cKxarGZ5NivfjOdRONyTR7SVH03MWuVGVCxaEZ2qdwo7np3HzUFKT0Od5zX1rtGk1XtWASHFYGhmS0BzUFKH3JP4j3iNCXhJ0QJFw0J91y1dF4B58Lb1963H8t3LNcfSUtLw2rWveV4/ZU6BegBTj5lynNh5Im5qdFPYesR+EW5euGUqPQHF/dXsN3nmsTMokFYAR85o468pE8L6Nw2PapCIPQGpBCQxId5jAiLKJxJT1YPtH0SLii1wWY3LDM83KtcIjcrZru9hiNNnp8wpmHDFBBAoLF4QYG4mS09ND67zbHaNFcpaDQDw/g3vW65O5gYvhGyv+r3wyspXuKfRcfN0ZrGiCqQVwIlHThgGYfSLsnSCNAdJYkq8xgREZvR2r9MdTcs3xdhLxzouJ4VSTBWACFbCw21QtDKFymDmNTMdTTQzQy181b2EwumhMNEL+i3AzY1vDu73vbCvoYdPJLgdx1lz7xq8dg3vdU3pNgWHRh6ynPmrxkjxGM0kT1RkT0ASExJhTKBEwRJYM2hNlGtkjPJ8lLkKAPdb335su7AJJDMtE2dzzoqVJ/hc9EL2hwE/oEGZkJfQtmHbUHFyRQDA1XWvFio7Ety2tJtWaBoMnJiWkqaJ7CqK4gZshTQHSSQ2xG2eQBzDC4igCLdqJaoFj60YuAJHzx4VzuPgyIOG6w4YlufwuSj103smVShSQbh+0SCSnuXUK6di8MLBaF6xuWf1SURzkFQCkpgQ9zGBGJuhvKBUZilHC/rol3VMZMxa1F62tFtXbm04QWzz0M2uVyeTPQGJxAa/zBPwK7F6PopnjNHCMEb4RbhFOrdDBMXTyw2J2BOQA8OSmDDhCr7knxKrP1aoewJbhm7BuvvWxbR8UVpXag0ArmzVbuh7YV+MaD8CEztPjEl5XpGRmoH3er0X3J/WY1rMyk6UhoRTZE9AEhNubXKroatitFH3BJyEdIg1k6+cjDua3RGzOqanpuP5rs/HpCyv6de4H85kn8HFF1yMemXqxbs6AHjU1y+2fuGbHpMTpBKQJDWJMiaQkZqBlpVaxrsaCcNdLe6KdxU0fNj7Q+w/tT/e1XCFNAdJkppEGxPwK4lo644lBdMKRrSsZzyRSkCS1CRKT0AiiRfSHCRJapRZnYUzCtuklLjluvrXGS7I4gal5ybqtRQLkr0XJJWAJKkpU6gMJnWeFFzSUOKMB9o+gMnLJ1uGSJjfZ75n5RUvUBxPdHwCfS6MbGF7iTgUjW5y4cKF2enTp13UJqBxZdddIvEFjDEwsKRZVN0NR84cQZnny6BUZikcGXXE/gKXENEZxljMu6yyJyCRSEwhoqQ3h9ihmKjcziL2O/5SAl9+CRy3iO0qkUgkMaZUZik8e8WzuKHBDfGuSlTwlzlIIpFI8inxMgflX0OfRCKRJBhE1I2INhPRNiJ6xCJdayLKJaIb7fKUSkAikUgSACJKBTANQHcADQH0I6KGJumeA/CVSL5SCUgkEkli0AbANsbYDsZYFoC5AHoapLsfwEcADopkKpWARCKR+IM0Ilql+tyjO18ZwC7V/u7AsSBEVBnA9QBmCBfqtrYSiUQi8ZQcxlgri/NGvrp6z56pAB5mjOWKRjSVSkAikUgSg90Aqqr2qwDYq0vTCsDcgAIoA6AHEeUwxj4xy1QqAYlEIkkMVgKoQ0Q1AOwB0BfAzeoEjLEayjYRzQLwuZUCAKQSkEgkkoSAMZZDREPBvX5SAbzJGNtARIMC54XHAdREZbIYEeUBOOvy8jQAOR5WJ9HI7/cPyGcAyGeQH+8/kzEWc2edqCiBSCCiVTaDI0lNfr9/QD4DQD6D/H7/sUS6iEokEkk+RioBiUQiycf4UQm8Gu8KxJn8fv+AfAaAfAb5/f5jhu/GBCQSiUQSO/zYE5BIJBJJjJBKQCKRSPIxvlEConGykwEi+ouI1hHRGiJaFThWioi+JqKtgf8lVekfDTyXzUR0Zfxq7h4iepOIDhLRetUxx/dMRC0Dz24bEb1EogFS4ozJ/T9JRHsCv4M1RNRDdS7Z7r8qES0lok1EtIGIHggczze/Ad/CGIv7B3z223YANQFkAFgLoGG86xXF+/0LQBndsUkAHglsPwLgucB2w8DzKACgRuA5pcb7Hlzcc0cALQCsj+SeAawA0B48mNYiAN3jfW8R3P+TAEYYpE3G+68IoEVguyiALYH7zDe/Ab9+/NITEI2Tncz0BDA7sD0bwHWq43MZY+cZYzsBbAN/XgkFY+x7AEd1hx3dMxFVBFCMMbaccWnwtuoaX2Ny/2Yk4/3vY4z9Ftg+CWATeBjkfPMb8Ct+UQK2cbKTDAZgMRGtVsUML88Y2wfwFwZAucDxZH42Tu+5cmBbfzyRGUpEfwTMRYopJKnvn4iqA2gO4FfI30Dc8YsSEImTnUxcxBhrAb5M3BAi6miRNr89G8D8npPtWfwXQC0AzQDsAzA5cDxp75+IioCvejWcMXbCKqnBsaR4Bn7DL0pAJE520sAY2xv4fxDAfHDzzoFAVxeB/8rScMn8bJze8+7Atv54QsIYO8AYy2WM5QF4DSEzX1LePxGlgyuAOYyxjwOH8/VvwA/4RQkE42QTUQZ4nOzP4lynqEBEhYmoqLINoCuA9eD3e3sg2e0APg1sfwagLxEVCMQRrwM+MJYMOLrngLngJBG1C3iE9Fddk3Aowi/A9eC/AyAJ7z9Q3zcAbGKMvaA6la9/A74g3iPTygdAD3CPge0AHo93faJ4nzXBvR7WAtig3CuA0gCWANga+F9Kdc3jgeeyGQnqCQHgfXCTRzZ4a+4uN/cMvnLS+sC5VxCY9e73j8n9vwNgHYA/wIVexSS+/4vBzTZ/AFgT+PTIT78Bv35k2AiJRCLJx/jFHCSRSCSSOCCVgEQikeRjpBKQSCSSfIxUAhKJRJKPkUpAIpFI8jFSCUgkEkk+RioBiUQiycf8PxeZlA97jOsJAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 2 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"iters = [item[\"iter\"] for item in result]\n",
"fig, ax1 = plt.subplots()\n",
"ax1.plot(iters,[item[\"loss\"] for item in result],'g')\n",
"ax2 = ax1.twinx()\n",
"ax2.plot(iters,[item[\"accuracy\"] for item in result],'r')\n",
"plt.xlabel(\"iter\")\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 91,
"id": "cab7889f",
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"gpu allocated : 1776 MB\n",
"gpu reserved : 1910 MB\n"
]
}
],
"source": [
"torch.cuda.empty_cache()\n",
"print(f\"gpu allocated : {torch.cuda.memory_allocated() // 1024**2} MB\")\n",
"print(f\"gpu reserved : {torch.cuda.memory_reserved() // 1024 ** 2} MB\")"
]
},
{
"cell_type": "code",
"execution_count": 38,
"id": "29ffab84",
"metadata": {},
"outputs": [],
"source": [
"torch.save(model.state_dict(), \"model.zip\")"
]
},
{
"cell_type": "code",
"execution_count": 39,
"id": "4b9b9579",
"metadata": {},
"outputs": [],
"source": [
"del batch_inputs\n",
"del batch_labels\n",
"del loss\n",
"del optimizer"
]
},
{
"cell_type": "code",
"execution_count": 42,
"id": "fff7a7d0",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|███████████████████████████████████████████████████████████████████████████| 3125/3125 [01:26<00:00, 36.25batch/s]\n"
]
}
],
"source": [
"model.eval()\n",
"collect_list = []\n",
"with torch.no_grad():\n",
" with tqdm(test_loader, unit=\"batch\") as tepoch:\n",
" for batch_i,batch_l in tepoch:\n",
" batch_inputs = {k: v.cuda(device) for k, v in list(batch_i.items())}\n",
" batch_labels = batch_l.cuda(device)\n",
" output = model(**batch_inputs)\n",
" loss = BCELoss(output, batch_labels.double())\n",
" \n",
" prediction = (output > 0).to(device,dtype=torch.int64)\n",
" correct = (prediction == batch_labels).sum().item()\n",
" accuracy = correct / prediction.size()[0]\n",
" \n",
" collect_list.append({\"loss\":loss.item(),\"accuracy\":accuracy, \"batch_size\":batch_labels.size(0),\n",
" \"predict\":prediction.cpu(),\n",
" \"actual\":batch_labels.cpu()})"
]
},
{
"cell_type": "code",
"execution_count": 43,
"id": "4e9a90b5",
"metadata": {},
"outputs": [],
"source": [
"def getConfusionMatrix(predict,actual,attention_mask):\n",
" ret = torch.zeros((2,2),dtype=torch.long)\n",
" for p_s,a_s in zip(predict,actual):\n",
" ret[p_s,a_s] += 1\n",
" return ret"
]
},
{
"cell_type": "code",
"execution_count": 44,
"id": "b7a513c9",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"average_loss : 0.3252932393981423, average_accuracy : 0.86136, size :50000\n"
]
}
],
"source": [
"total_loss = 0\n",
"total_accuracy = 0\n",
"total_size = 0\n",
"confusion = torch.zeros((2,2),dtype=torch.long)\n",
"\n",
"for item in collect_list:\n",
" batch_size = item[\"batch_size\"]\n",
" total_loss += batch_size * item[\"loss\"]\n",
" total_accuracy += batch_size * item[\"accuracy\"]\n",
" total_size += batch_size\n",
" confusion += getConfusionMatrix(item[\"predict\"],item[\"actual\"],item[\"attention_mask\"])\n",
"print(f\"\"\"average_loss : {total_loss/total_size}, average_accuracy : {total_accuracy/total_size}, size :{total_size}\"\"\")"
]
},
{
"cell_type": "code",
"execution_count": 45,
"id": "1ac327de",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[21382, 3487],\n",
" [ 3445, 21686]])"
]
},
"execution_count": 45,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"confusion"
]
},
{
"cell_type": "code",
"execution_count": 46,
"id": "3e71d4d2",
"metadata": {},
"outputs": [],
"source": [
"def getF1Score(confusion,c):\n",
" TP = confusion[c,c]\n",
" FP = confusion[c].sum() - TP\n",
" FN = confusion[:,c].sum() - TP\n",
" precision = TP / (TP + FP)\n",
" recall = TP / (TP + FN)\n",
"\n",
" f1Score = (2*precision*recall)/(precision + recall)\n",
" return f1Score"
]
},
{
"cell_type": "code",
"execution_count": 49,
"id": "6756408c",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"f1 score : 0.862197756767273\n"
]
}
],
"source": [
"print(f\"f1 score : {getF1Score(confusion,1)}\")"
]
},
{
"cell_type": "code",
"execution_count": 50,
"id": "f28f64e9",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"MyModel(\n",
" (bert): BertModel(\n",
" (embeddings): BertEmbeddings(\n",
" (word_embeddings): Embedding(119547, 768, padding_idx=0)\n",
" (position_embeddings): Embedding(512, 768)\n",
" (token_type_embeddings): Embedding(2, 768)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (encoder): BertEncoder(\n",
" (layer): ModuleList(\n",
" (0): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (1): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (2): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (3): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (4): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (5): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (6): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (7): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (8): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (9): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (10): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (11): BertLayer(\n",
" (attention): BertAttention(\n",
" (self): BertSelfAttention(\n",
" (query): Linear(in_features=768, out_features=768, bias=True)\n",
" (key): Linear(in_features=768, out_features=768, bias=True)\n",
" (value): Linear(in_features=768, out_features=768, bias=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" (output): BertSelfOutput(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" (intermediate): BertIntermediate(\n",
" (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
" )\n",
" (output): BertOutput(\n",
" (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
" (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" )\n",
" )\n",
" )\n",
" )\n",
" (pooler): BertPooler(\n",
" (dense): Linear(in_features=768, out_features=768, bias=True)\n",
" (activation): Tanh()\n",
" )\n",
" )\n",
" (dropout): Dropout(p=0.1, inplace=False)\n",
" (lin1): Linear(in_features=768, out_features=256, bias=True)\n",
" (lin2): Linear(in_features=256, out_features=1, bias=True)\n",
")"
]
},
"execution_count": 50,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.eval()"
]
},
{
"cell_type": "markdown",
"id": "2da5789b",
"metadata": {},
"source": [
"한번 테스트해보기"
]
},
{
"cell_type": "code",
"execution_count": 90,
"id": "cc727fd9",
"metadata": {},
"outputs": [
{
"ename": "KeyboardInterrupt",
"evalue": "Interrupted by user",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m~\\AppData\\Local\\Temp/ipykernel_10708/4160447663.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0msen\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0minput\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2\u001b[0m \u001b[0minputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtokenizer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msen\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mreturn_tensors\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m'pt'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mpadding\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'longest'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3\u001b[0m \u001b[0moutput\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m**\u001b[0m\u001b[1;33m{\u001b[0m\u001b[0mk\u001b[0m\u001b[1;33m:\u001b[0m \u001b[0mv\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mk\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mv\u001b[0m \u001b[1;32min\u001b[0m \u001b[0minputs\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m}\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[0mprob\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msigmoid\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0moutput\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mitem\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"긍정적 output :\"\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mprob\u001b[0m \u001b[1;33m*\u001b[0m \u001b[1;36m100\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;34m\"%\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;32m~\\anaconda3\\envs\\nn\\lib\\site-packages\\ipykernel\\kernelbase.py\u001b[0m in \u001b[0;36mraw_input\u001b[1;34m(self, prompt)\u001b[0m\n\u001b[0;32m 1008\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_parent_ident\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m\"shell\"\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1009\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mget_parent\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"shell\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1010\u001b[1;33m \u001b[0mpassword\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mFalse\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1011\u001b[0m )\n\u001b[0;32m 1012\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;32m~\\anaconda3\\envs\\nn\\lib\\site-packages\\ipykernel\\kernelbase.py\u001b[0m in \u001b[0;36m_input_request\u001b[1;34m(self, prompt, ident, parent, password)\u001b[0m\n\u001b[0;32m 1049\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mKeyboardInterrupt\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1050\u001b[0m \u001b[1;31m# re-raise KeyboardInterrupt, to truncate traceback\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1051\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0mKeyboardInterrupt\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Interrupted by user\"\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfrom\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1052\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1053\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mlog\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwarning\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Invalid Message:\"\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mexc_info\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mTrue\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;31mKeyboardInterrupt\u001b[0m: Interrupted by user"
]
}
],
"source": [
"sen = input()\n",
"inputs = tokenizer(sen, return_tensors = 'pt', padding='longest')\n",
"output = model(**{k: v.to(device) for k,v in inputs.items() })\n",
"prob = torch.sigmoid(output).item()\n",
"print(\"긍정적 output :\",prob * 100,\"%\")\n",
"print(\"부정적 output :\", (1-prob) * 100,\"%\")"
]
},
{
"cell_type": "markdown",
"id": "2faa8141",
"metadata": {},
"source": [
"```\n",
"5471412\t맘에 들어요~ 0\n",
"```\n",
"라벨이 잘못 붙어있는 것들이 있다. 별점가지고만 긍정, 부정을 매긴 것 같다."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b40f071c",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.11"
}
},
"nbformat": 4,
"nbformat_minor": 5
}