├── .gitignore ├── img ├── output.png └── output1.png ├── requirements.txt ├── docs └── Machine_translation.pdf ├── device.py ├── hyper_parameters.py ├── translate.py ├── README.md ├── data_processing.py ├── transformer.py ├── train.py ├── data_preparation.ipynb └── translation.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | .ipynb_checkpoints 2 | .vscode 3 | .__pycache__ 4 | ./Arab-Acquis -------------------------------------------------------------------------------- /img/output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strifee/arabic2english/HEAD/img/output.png -------------------------------------------------------------------------------- /img/output1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strifee/arabic2english/HEAD/img/output1.png -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | torch>=1.6.0 2 | torchtext==0.10.0 3 | spacy 4 | transformers 5 | nltk 6 | pandas 7 | -------------------------------------------------------------------------------- /docs/Machine_translation.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strifee/arabic2english/HEAD/docs/Machine_translation.pdf -------------------------------------------------------------------------------- /device.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import os 3 | 4 | os.environ['CUDA_LAUNCH_BLOCKING'] = '1' 5 | 6 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 7 | 8 | print(torch.cuda.get_device_name(device)) 9 | -------------------------------------------------------------------------------- /hyper_parameters.py: -------------------------------------------------------------------------------- 1 | from train import BATCH_SIZE 2 | 3 | 4 | class Hyperparam: 5 | """Hyper parameters""" 6 | # Training 7 | BATCH_SIZE = 16 8 | learning_rate = 0.0001 9 | num_epochs = 30 10 | 11 | # Model 12 | num_heads = 8 13 | num_encoder_layers = 3 14 | num_decoder_layers = 3 15 | 16 | max_len= 230 17 | dropout = 0.4 18 | embedding_size= 256 19 | -------------------------------------------------------------------------------- /translate.py: -------------------------------------------------------------------------------- 1 | from data_processing import SRC, TRG, engTokenizer 2 | import torch 3 | import device 4 | from train import model 5 | 6 | def translate_sentence(model,sentence,srcField,targetField,srcTokenizer): 7 | model.eval() 8 | processed_sentence = srcField.process([srcTokenizer(sentence)]).to(device) 9 | trg = ["بداية"] 10 | 11 | for _ in range(60): 12 | trg_indecies = [targetField.vocab.stoi[word] for word in trg] 13 | trg_tensor = torch.LongTensor(trg_indecies).unsqueeze(1).to(device) 14 | outputs = model(processed_sentence,trg_tensor) 15 | 16 | if targetField.vocab.itos[outputs.argmax(2)[-1:].item()] == "": 17 | continue 18 | trg.append(targetField.vocab.itos[outputs.argmax(2)[-1:].item()]) 19 | if targetField.vocab.itos[outputs.argmax(2)[-1:].item()] == "نهاية": 20 | break 21 | return " ".join([word for word in trg if word != ""][1:-1]) 22 | 23 | 24 | if __name__ == '__main__': 25 | print("I'm home -> {}",translate_sentence(model,"I'm at home" ,SRC,TRG,engTokenizer)) 26 | print("I'm alone -> {}",translate_sentence(model,"I'm alone" ,SRC,TRG,engTokenizer)) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Arabic2English - Arabic to English Translator 3 | 4 | **This is a PyTorch implementation of an Arabic to English Neural Machine Translation built using Transformers architecture ([Attention Is All You Need](https://arxiv.org/pdf/1706.03762.pdf))** 5 | 6 | 7 | # Setup and Requirements 8 | **1. CUDA:** 9 |
10 | install [CUDA](https://developer.nvidia.com/cuda-downloads) before installing the required packages or check if it is already installed 11 |
12 |
13 | **2. Clone the Translate repo:** 14 | ``` 15 | $ git clone https://github.com/Strifee/arabic2english.git 16 | ``` 17 | **3. install requirements:** 18 | ``` 19 | pip install -r requirements.txt 20 | ``` 21 | `if you have problem with CUDA package try this:` 22 | ``` 23 | conda install -q pytorch torchvision cudatoolkit=11 -c pytorch-nightly 24 | ``` 25 | 26 | # Data 27 | 28 | **Arabic to English Translation Sentences :** 29 | 30 | [Arabic to English Translation Sentences](https://www.kaggle.com/samirmoustafa/arabic-to-english-translation-sentences) is a dataset for machine translation between English and Arabic. 31 | 32 | # Training 33 | 34 | **1. Clone the Translate repo:** 35 | ``` 36 | $ git clone clone https://github.com/Strifee/arabic2english.git 37 | ``` 38 | **2. Training** 39 | ``` 40 | $ python translate.py 41 | ``` 42 | **2. Regularization** 43 | ### Hyperparameters : 44 | ```python 45 | BATCH_SIZE = 16 46 | learning_rate = 0.0001 47 | num_epochs = 30 48 | 49 | num_heads = 8 50 | num_encoder_layers = 3 51 | num_decoder_layers = 3 52 | 53 | max_len= 230 54 | dropout = 0.4 55 | embedding_size= 256 56 | ``` 57 | ### Before regularization : 58 | ![image](img/output1.png) 59 |
60 | 61 | ### After regularization : 62 | ![image](img/output.png) 63 |
64 | 65 | # Results 66 | ``` 67 | "I'm ready" -> 'أنا مستعد' 68 | "i'm lucky" -> 'انا محظوظ' 69 | "I'm sad" -> 'أنا حزين' 70 | 71 | ``` 72 | 73 | -------------------------------------------------------------------------------- /data_processing.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import torch.nn as nn 3 | import random 4 | import re 5 | import spacy 6 | from torchtext.legacy import data 7 | from spacy.tokenizer import Tokenizer 8 | from spacy.lang.ar import Arabic 9 | 10 | 11 | 12 | random.seed(0) 13 | df = pd.read_csv("data/arabic_english.txt",delimiter="\t",names=["eng","ar"]) 14 | 15 | ''' 16 | First : 17 | python -m spacy download en_core_web_sm 18 | ''' 19 | spacy_eng = spacy.load("en_core_web_sm") 20 | 21 | ar = Arabic() 22 | ar_Tokenizer = Tokenizer(ar.vocab) 23 | 24 | def engTokenizer(text): 25 | return [word.text for word in spacy_eng.tokenizer(text)] 26 | 27 | def arTokenizer(sentence): 28 | return [word.text for word in 29 | ar_Tokenizer(re.sub(r"\s+"," ",re.sub(r"[\.\'\"\n+]"," ",sentence)).strip())] 30 | 31 | SRC = data.Field(tokenize=engTokenizer,batch_first=False,init_token="",eos_token="") 32 | TRG = data.Field(tokenize=arTokenizer,batch_first=False,tokenizer_language="ar",init_token="ببدأ",eos_token="نهها") 33 | 34 | class TextDataset(data.Dataset): 35 | 36 | def __init__(self, df, src_field, target_field, is_test=False, **kwargs): 37 | fields = [('eng', src_field), ('ar',target_field)] 38 | samples = [] 39 | for i, row in df.iterrows(): 40 | eng = row.eng 41 | ar = row.ar 42 | samples.append(data.Example.fromlist([eng, ar], fields)) 43 | 44 | super().__init__(samples, fields, **kwargs) 45 | 46 | def __len__(self): 47 | return len(self.samples) 48 | 49 | def __getitem__(self, idx): 50 | return self.samples[idx] 51 | 52 | torchdataset = TextDataset(df,SRC,TRG) 53 | 54 | train_data, valid_data = torchdataset.split(split_ratio=0.8, random_state = random.seed(0)) 55 | 56 | SRC.build_vocab(train_data,min_freq=2) 57 | TRG.build_vocab(train_data,min_freq=2) 58 | 59 | if __name__=='__main__': 60 | print(TRG.vocab.freqs.most_common(50)) 61 | 62 | -------------------------------------------------------------------------------- /transformer.py: -------------------------------------------------------------------------------- 1 | from torch import nn 2 | import torch 3 | from device import device 4 | 5 | class Transformer(nn.Module): 6 | def __init__( 7 | self, 8 | embedding_size, 9 | src_vocab_size, 10 | trg_vocab_size, 11 | src_pad_idx, 12 | num_heads, 13 | num_encoder_layers, 14 | num_decoder_layers, 15 | forward_expansion, 16 | dropout, 17 | max_len, 18 | device, 19 | ): 20 | super(Transformer, self).__init__() 21 | self.src_embeddings = nn.Embedding(src_vocab_size,embedding_size) 22 | self.src_positional_embeddings= nn.Embedding(max_len,embedding_size) 23 | self.trg_embeddings= nn.Embedding(trg_vocab_size,embedding_size) 24 | self.trg_positional_embeddings= nn.Embedding(max_len,embedding_size) 25 | self.device = device 26 | self.transformer = nn.Transformer( 27 | embedding_size, 28 | num_heads, 29 | num_encoder_layers, 30 | num_decoder_layers, 31 | forward_expansion, 32 | dropout, 33 | ) 34 | 35 | self.fc_out = nn.Linear(embedding_size, trg_vocab_size) 36 | self.dropout = nn.Dropout(dropout) 37 | self.src_pad_idx = src_pad_idx 38 | 39 | def make_src_mask(self, src): 40 | src_mask = src.transpose(0,1) == self.src_pad_idx 41 | 42 | return src_mask 43 | 44 | def forward(self,src,trg): 45 | src_seq_length, S = src.shape 46 | trg_seq_length, S = trg.shape 47 | #adding zeros is an easy way 48 | src_positions = ( 49 | torch.arange(0, src_seq_length).unsqueeze(1).expand(src_seq_length, S).to(self.device) 50 | ) 51 | 52 | 53 | trg_positions = ( 54 | torch.arange(0, trg_seq_length).unsqueeze(1).expand(trg_seq_length, S).to(self.device) 55 | ) 56 | 57 | embed_src = self.dropout( 58 | ( self.src_embeddings(src) + self.src_positional_embeddings(src_positions) ) 59 | ) 60 | 61 | embed_trg = self.dropout( 62 | ( self.trg_embeddings(trg) + self.trg_positional_embeddings(trg_positions) ) 63 | ) 64 | 65 | src_padding_mask = self.make_src_mask(src) 66 | trg_mask = self.transformer.generate_square_subsequent_mask(trg_seq_length).to(device) 67 | 68 | 69 | out = self.transformer(embed_src,embed_trg, src_key_padding_mask=src_padding_mask,tgt_mask=trg_mask ) 70 | out= self.fc_out(out) 71 | 72 | return out 73 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | from torch import optim 4 | from torch import nn 5 | from torchtext.legacy import data 6 | from data_processing import SRC,TRG 7 | from transformer import Transformer 8 | from device import device 9 | from data_processing import train_data, valid_data 10 | from hyper_parameters import BATCH_SIZE, embedding_size, src_pad_idx, num_heads, num_encoder_layers, num_decoder_layers, forward_expansion, dropout, max_len, learning_rate, num_epochs 11 | 12 | train_iter, valid_iter = data.BucketIterator.splits( 13 | (train_data,valid_data), 14 | batch_size = BATCH_SIZE, 15 | sort=None, 16 | sort_within_batch=False, 17 | sort_key=lambda x: len(x.eng), 18 | device = device, 19 | shuffle=True 20 | ) 21 | 22 | src_vocab_size = len(SRC.vocab) 23 | print("Size of english vocabulary:",src_vocab_size) 24 | 25 | #No. of unique tokens in label 26 | trg_vocab_size =len(TRG.vocab) 27 | print("Size of arabic vocabulary:",trg_vocab_size) 28 | 29 | 30 | model = Transformer( 31 | embedding_size, 32 | src_vocab_size, 33 | trg_vocab_size, 34 | src_pad_idx, 35 | num_heads, 36 | num_encoder_layers, 37 | num_decoder_layers, 38 | forward_expansion, 39 | dropout, 40 | max_len, 41 | device, 42 | ).to(device) 43 | 44 | loss_track = [] 45 | loss_validation_track= [] 46 | 47 | 48 | optimizer = optim.Adam(model.parameters(), lr=learning_rate) 49 | 50 | pad_idx = SRC.vocab.stoi[""] 51 | criterion = nn.CrossEntropyLoss(ignore_index = pad_idx) 52 | for epoch in range(num_epochs): 53 | stepLoss=[] 54 | model.train() 55 | for batch in train_iter: 56 | input_data = batch.eng.to(device) 57 | target = batch.ar.to(device) 58 | 59 | output = model(input_data,target[:-1]) 60 | optimizer.zero_grad() 61 | 62 | output = output.reshape(-1,trg_vocab_size) 63 | target = target[1:].reshape(-1) 64 | 65 | loss = criterion(output,target) 66 | loss.backward() 67 | 68 | optimizer.step() 69 | stepLoss.append(loss.item()) 70 | 71 | loss_track.append(np.mean(stepLoss)) 72 | print(" Epoch {} | Train Cross Entropy Loss: ".format(epoch),np.mean(stepLoss)) 73 | with torch.no_grad(): 74 | stepValidLoss=[] 75 | model.eval() # the evaluation mode for the model (doesn't apply dropout and batchNorm) 76 | for i,batch in enumerate(valid_iter): 77 | input_sentence = batch.eng.to(device) 78 | target = batch.ar.to(device) 79 | optimizer.zero_grad() 80 | output = model(input_sentence,target[:-1]) 81 | output = output.reshape(-1,trg_vocab_size) 82 | target = target[1:].reshape(-1) 83 | loss = criterion(output,target) 84 | 85 | stepValidLoss.append(loss.item()) 86 | 87 | loss_validation_track.append(np.mean(stepValidLoss)) 88 | print(" Epoch {} | Validation Cross Entropy Loss: ".format(epoch),np.mean(stepValidLoss)) -------------------------------------------------------------------------------- /data_preparation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 24, 6 | "source": [ 7 | "import string\r\n", 8 | "import pandas as pd\r\n", 9 | "import regex as re\r\n", 10 | "import nltk\r\n", 11 | "from unicodedata import normalize\r\n", 12 | "from pickle import load\r\n", 13 | "from pickle import dump\r\n", 14 | "from collections import Counter" 15 | ], 16 | "outputs": [], 17 | "metadata": {} 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 25, 22 | "source": [ 23 | "# load doc into memory\r\n", 24 | "def load_doc(filename):\r\n", 25 | "\t# open the file as read only\r\n", 26 | "\tfile = open(filename, mode='rt', encoding='utf-8')\r\n", 27 | "\t# read all text\r\n", 28 | "\ttext = file.read()\r\n", 29 | "\t# close the file\r\n", 30 | "\tfile.close()\r\n", 31 | "\treturn text" 32 | ], 33 | "outputs": [], 34 | "metadata": {} 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 26, 39 | "source": [ 40 | "# split a loaded document into sentences\r\n", 41 | "def to_sentences(doc):\r\n", 42 | "\treturn doc.strip().split('\\n')" 43 | ], 44 | "outputs": [], 45 | "metadata": {} 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 27, 50 | "source": [ 51 | "# shortest and longest sentence lengths\r\n", 52 | "def sentence_lengths(sentences):\r\n", 53 | "\tlengths = [len(s.split()) for s in sentences]\r\n", 54 | "\treturn min(lengths), max(lengths)" 55 | ], 56 | "outputs": [], 57 | "metadata": {} 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 28, 62 | "source": [ 63 | "# load doc into memory\r\n", 64 | "def load_doc(filename):\r\n", 65 | "\t# open the file as read only\r\n", 66 | "\tfile = open(filename, mode='rt', encoding='utf-8')\r\n", 67 | "\t# read all text\r\n", 68 | "\ttext = file.read()\r\n", 69 | "\t# close the file\r\n", 70 | "\tfile.close()\r\n", 71 | "\treturn text\r\n", 72 | "\r\n", 73 | "# split a loaded document into sentences\r\n", 74 | "def to_sentences(doc):\r\n", 75 | "\treturn doc.strip().split('\\n')\r\n", 76 | "\r\n", 77 | "# shortest and longest sentence lengths\r\n", 78 | "def sentence_lengths(sentences):\r\n", 79 | "\tlengths = [len(s.split()) for s in sentences]\r\n", 80 | "\treturn min(lengths), max(lengths)" 81 | ], 82 | "outputs": [], 83 | "metadata": {} 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": 29, 88 | "source": [ 89 | "# load 1st Arabic data\r\n", 90 | "arabic_filename = 'data/Arab-Acquis/Arabic-Translations/test.en_ref.ar'\r\n", 91 | "arabic_doc = load_doc(arabic_filename)\r\n", 92 | "arabic_sentences = to_sentences(arabic_doc)\r\n", 93 | "minlen1, maxlen1 = sentence_lengths(arabic_sentences)\r\n", 94 | "print('Arabic data: sentences=%d, min=%d, max=%d' % (len(arabic_sentences), minlen1, maxlen1))\r\n", 95 | "\r\n", 96 | "# load 1st English data\r\n", 97 | "english_filename = 'data/Arab-Acquis/JRC-ACQUIS/ac-test.en'\r\n", 98 | "english_doc = load_doc(english_filename)\r\n", 99 | "english_sentences = to_sentences(english_doc)\r\n", 100 | "minlen1, maxlen1 = sentence_lengths(english_sentences)\r\n", 101 | "print('English data: sentences=%d, min=%d, max=%d' % (len(english_sentences), minlen1, maxlen1))" 102 | ], 103 | "outputs": [ 104 | { 105 | "output_type": "stream", 106 | "name": "stdout", 107 | "text": [ 108 | "Arabic data: sentences=4107, min=1, max=246\n", 109 | "English data: sentences=4107, min=1, max=269\n" 110 | ] 111 | } 112 | ], 113 | "metadata": {} 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 30, 118 | "source": [ 119 | "arabic_sentences[0]" 120 | ], 121 | "outputs": [ 122 | { 123 | "output_type": "execute_result", 124 | "data": { 125 | "text/plain": [ 126 | "'مجلس الجماعة الاقتصادية الأوروبية'" 127 | ] 128 | }, 129 | "metadata": {}, 130 | "execution_count": 30 131 | } 132 | ], 133 | "metadata": {} 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 31, 138 | "source": [ 139 | "english_sentences[0]" 140 | ], 141 | "outputs": [ 142 | { 143 | "output_type": "execute_result", 144 | "data": { 145 | "text/plain": [ 146 | "'THE COUNCIL OF THE EUROPEAN ECONOMIC COMMUNITY,'" 147 | ] 148 | }, 149 | "metadata": {}, 150 | "execution_count": 31 151 | } 152 | ], 153 | "metadata": {} 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 32, 158 | "source": [ 159 | "import pandas as pd\r\n", 160 | "df1 = pd.read_csv(\"data/arabic_english.txt\",delimiter=\"\\t\",names=[\"eng\",\"ar\"])\r\n", 161 | "df1" 162 | ], 163 | "outputs": [ 164 | { 165 | "output_type": "execute_result", 166 | "data": { 167 | "text/plain": [ 168 | " eng \\\n", 169 | "0 Hi. \n", 170 | "1 Run! \n", 171 | "2 Help! \n", 172 | "3 Jump! \n", 173 | "4 Stop! \n", 174 | "... ... \n", 175 | "24633 rising voices promoting a more linguistically ... \n", 176 | "24634 following last year s successful campaign we i... \n", 177 | "24635 during last year s challenge we also met langu... \n", 178 | "24636 to take part just follow the simple steps outl... \n", 179 | "24637 you will also find links to some free web base... \n", 180 | "\n", 181 | " ar \n", 182 | "0 مرحبًا. \n", 183 | "1 اركض! \n", 184 | "2 النجدة! \n", 185 | "3 اقفز! \n", 186 | "4 قف! \n", 187 | "... ... \n", 188 | "24633 شاركنا تحدي ابداع ميم بلغتك الام تعزيزا للتنوع... \n", 189 | "24634 استكمالا لنجاح حملة العام السابق ندعوكم للمشار... \n", 190 | "24635 تعرفنا خلال تحدي العام الماضي على ابطال لغويين... \n", 191 | "24636 للمشاركة في التحدي اتبع الخطوات الموضحة على ال... \n", 192 | "24637 ستجد ايضا روابط لمجموعة من منصات ابداع الميم ا... \n", 193 | "\n", 194 | "[24638 rows x 2 columns]" 195 | ], 196 | "text/html": [ 197 | "
\n", 198 | "\n", 211 | "\n", 212 | " \n", 213 | " \n", 214 | " \n", 215 | " \n", 216 | " \n", 217 | " \n", 218 | " \n", 219 | " \n", 220 | " \n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | " \n", 225 | " \n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | " \n", 236 | " \n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | " \n", 241 | " \n", 242 | " \n", 243 | " \n", 244 | " \n", 245 | " \n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | " \n", 251 | " \n", 252 | " \n", 253 | " \n", 254 | " \n", 255 | " \n", 256 | " \n", 257 | " \n", 258 | " \n", 259 | " \n", 260 | " \n", 261 | " \n", 262 | " \n", 263 | " \n", 264 | " \n", 265 | " \n", 266 | " \n", 267 | " \n", 268 | " \n", 269 | " \n", 270 | " \n", 271 | " \n", 272 | " \n", 273 | " \n", 274 | " \n", 275 | " \n", 276 | "
engar
0Hi.مرحبًا.
1Run!اركض!
2Help!النجدة!
3Jump!اقفز!
4Stop!قف!
.........
24633rising voices promoting a more linguistically ...شاركنا تحدي ابداع ميم بلغتك الام تعزيزا للتنوع...
24634following last year s successful campaign we i...استكمالا لنجاح حملة العام السابق ندعوكم للمشار...
24635during last year s challenge we also met langu...تعرفنا خلال تحدي العام الماضي على ابطال لغويين...
24636to take part just follow the simple steps outl...للمشاركة في التحدي اتبع الخطوات الموضحة على ال...
24637you will also find links to some free web base...ستجد ايضا روابط لمجموعة من منصات ابداع الميم ا...
\n", 277 | "

24638 rows × 2 columns

\n", 278 | "
" 279 | ] 280 | }, 281 | "metadata": {}, 282 | "execution_count": 32 283 | } 284 | ], 285 | "metadata": {} 286 | }, 287 | { 288 | "cell_type": "code", 289 | "execution_count": 33, 290 | "source": [ 291 | "import pandas as pd\r\n", 292 | "df2 = pd.DataFrame(list(zip(english_sentences,arabic_sentences)), columns=['eng','ar'])\r\n", 293 | "df2" 294 | ], 295 | "outputs": [ 296 | { 297 | "output_type": "execute_result", 298 | "data": { 299 | "text/plain": [ 300 | " eng \\\n", 301 | "0 THE COUNCIL OF THE EUROPEAN ECONOMIC COMMUNITY, \n", 302 | "1 Whereas the adoption of a common transport pol... \n", 303 | "2 Article 1 \n", 304 | "3 3. The types of carriage listed in Annex II sh... \n", 305 | "4 Member States shall inform the Commission of t... \n", 306 | "... ... \n", 307 | "4102 Having regard to the request made by Luxembour... \n", 308 | "4103 (2) Such derogations should be granted, at the... \n", 309 | "4104 Article 1 \n", 310 | "4105 (b) France is granted derogations for the prod... \n", 311 | "4106 After expiry of the transitional period, Austr... \n", 312 | "\n", 313 | " ar \n", 314 | "0 مجلس الجماعة الاقتصادية الأوروبية \n", 315 | "1 حيث أن اعتماد سياسة نقل مشتركة تنطوي من بين أم... \n", 316 | "2 المادة 1 \n", 317 | "3 3. لا تخضع أنواع النقل المدرجة في الملحق الثان... \n", 318 | "4 تبلغ الدول الأعضاء المفوضية الأوروبية بالتدابي... \n", 319 | "... ... \n", 320 | "4102 باعتبار الطلب الذي تقدمت به لوكسمبورغ في 25 تم... \n", 321 | "4103 (2) يجب أن يتم منح هذه الاستثناءات إلى النمسا ... \n", 322 | "4104 المادة 1 \n", 323 | "4105 (ب) تمنح فرنسا الاستثناءات للحصول على النتائج ... \n", 324 | "4106 بعد انتهاء الفترة الانتقالية، تقوم النمسا وفرن... \n", 325 | "\n", 326 | "[4107 rows x 2 columns]" 327 | ], 328 | "text/html": [ 329 | "
\n", 330 | "\n", 343 | "\n", 344 | " \n", 345 | " \n", 346 | " \n", 347 | " \n", 348 | " \n", 349 | " \n", 350 | " \n", 351 | " \n", 352 | " \n", 353 | " \n", 354 | " \n", 355 | " \n", 356 | " \n", 357 | " \n", 358 | " \n", 359 | " \n", 360 | " \n", 361 | " \n", 362 | " \n", 363 | " \n", 364 | " \n", 365 | " \n", 366 | " \n", 367 | " \n", 368 | " \n", 369 | " \n", 370 | " \n", 371 | " \n", 372 | " \n", 373 | " \n", 374 | " \n", 375 | " \n", 376 | " \n", 377 | " \n", 378 | " \n", 379 | " \n", 380 | " \n", 381 | " \n", 382 | " \n", 383 | " \n", 384 | " \n", 385 | " \n", 386 | " \n", 387 | " \n", 388 | " \n", 389 | " \n", 390 | " \n", 391 | " \n", 392 | " \n", 393 | " \n", 394 | " \n", 395 | " \n", 396 | " \n", 397 | " \n", 398 | " \n", 399 | " \n", 400 | " \n", 401 | " \n", 402 | " \n", 403 | " \n", 404 | " \n", 405 | " \n", 406 | " \n", 407 | " \n", 408 | "
engar
0THE COUNCIL OF THE EUROPEAN ECONOMIC COMMUNITY,مجلس الجماعة الاقتصادية الأوروبية
1Whereas the adoption of a common transport pol...حيث أن اعتماد سياسة نقل مشتركة تنطوي من بين أم...
2Article 1المادة 1
33. The types of carriage listed in Annex II sh...3. لا تخضع أنواع النقل المدرجة في الملحق الثان...
4Member States shall inform the Commission of t...تبلغ الدول الأعضاء المفوضية الأوروبية بالتدابي...
.........
4102Having regard to the request made by Luxembour...باعتبار الطلب الذي تقدمت به لوكسمبورغ في 25 تم...
4103(2) Such derogations should be granted, at the...(2) يجب أن يتم منح هذه الاستثناءات إلى النمسا ...
4104Article 1المادة 1
4105(b) France is granted derogations for the prod...(ب) تمنح فرنسا الاستثناءات للحصول على النتائج ...
4106After expiry of the transitional period, Austr...بعد انتهاء الفترة الانتقالية، تقوم النمسا وفرن...
\n", 409 | "

4107 rows × 2 columns

\n", 410 | "
" 411 | ] 412 | }, 413 | "metadata": {}, 414 | "execution_count": 33 415 | } 416 | ], 417 | "metadata": {} 418 | }, 419 | { 420 | "cell_type": "code", 421 | "execution_count": 34, 422 | "source": [ 423 | "data = pd.concat([df1, df2], ignore_index=True)\r\n", 424 | "data" 425 | ], 426 | "outputs": [ 427 | { 428 | "output_type": "execute_result", 429 | "data": { 430 | "text/plain": [ 431 | " eng \\\n", 432 | "0 Hi. \n", 433 | "1 Run! \n", 434 | "2 Help! \n", 435 | "3 Jump! \n", 436 | "4 Stop! \n", 437 | "... ... \n", 438 | "28740 Having regard to the request made by Luxembour... \n", 439 | "28741 (2) Such derogations should be granted, at the... \n", 440 | "28742 Article 1 \n", 441 | "28743 (b) France is granted derogations for the prod... \n", 442 | "28744 After expiry of the transitional period, Austr... \n", 443 | "\n", 444 | " ar \n", 445 | "0 مرحبًا. \n", 446 | "1 اركض! \n", 447 | "2 النجدة! \n", 448 | "3 اقفز! \n", 449 | "4 قف! \n", 450 | "... ... \n", 451 | "28740 باعتبار الطلب الذي تقدمت به لوكسمبورغ في 25 تم... \n", 452 | "28741 (2) يجب أن يتم منح هذه الاستثناءات إلى النمسا ... \n", 453 | "28742 المادة 1 \n", 454 | "28743 (ب) تمنح فرنسا الاستثناءات للحصول على النتائج ... \n", 455 | "28744 بعد انتهاء الفترة الانتقالية، تقوم النمسا وفرن... \n", 456 | "\n", 457 | "[28745 rows x 2 columns]" 458 | ], 459 | "text/html": [ 460 | "
\n", 461 | "\n", 474 | "\n", 475 | " \n", 476 | " \n", 477 | " \n", 478 | " \n", 479 | " \n", 480 | " \n", 481 | " \n", 482 | " \n", 483 | " \n", 484 | " \n", 485 | " \n", 486 | " \n", 487 | " \n", 488 | " \n", 489 | " \n", 490 | " \n", 491 | " \n", 492 | " \n", 493 | " \n", 494 | " \n", 495 | " \n", 496 | " \n", 497 | " \n", 498 | " \n", 499 | " \n", 500 | " \n", 501 | " \n", 502 | " \n", 503 | " \n", 504 | " \n", 505 | " \n", 506 | " \n", 507 | " \n", 508 | " \n", 509 | " \n", 510 | " \n", 511 | " \n", 512 | " \n", 513 | " \n", 514 | " \n", 515 | " \n", 516 | " \n", 517 | " \n", 518 | " \n", 519 | " \n", 520 | " \n", 521 | " \n", 522 | " \n", 523 | " \n", 524 | " \n", 525 | " \n", 526 | " \n", 527 | " \n", 528 | " \n", 529 | " \n", 530 | " \n", 531 | " \n", 532 | " \n", 533 | " \n", 534 | " \n", 535 | " \n", 536 | " \n", 537 | " \n", 538 | " \n", 539 | "
engar
0Hi.مرحبًا.
1Run!اركض!
2Help!النجدة!
3Jump!اقفز!
4Stop!قف!
.........
28740Having regard to the request made by Luxembour...باعتبار الطلب الذي تقدمت به لوكسمبورغ في 25 تم...
28741(2) Such derogations should be granted, at the...(2) يجب أن يتم منح هذه الاستثناءات إلى النمسا ...
28742Article 1المادة 1
28743(b) France is granted derogations for the prod...(ب) تمنح فرنسا الاستثناءات للحصول على النتائج ...
28744After expiry of the transitional period, Austr...بعد انتهاء الفترة الانتقالية، تقوم النمسا وفرن...
\n", 540 | "

28745 rows × 2 columns

\n", 541 | "
" 542 | ] 543 | }, 544 | "metadata": {}, 545 | "execution_count": 34 546 | } 547 | ], 548 | "metadata": {} 549 | }, 550 | { 551 | "cell_type": "code", 552 | "execution_count": 36, 553 | "source": [ 554 | "arabic_stopwords = set(nltk.corpus.stopwords.words(\"arabic\"))\r\n", 555 | "arabic_punctuations = '''`÷×؛<>_()*&^%][ـ،/:\"؟.,'{}~¦+|!”…“–ـ'''\r\n", 556 | "\r\n", 557 | "punctuations = arabic_punctuations + string.punctuation\r\n", 558 | "def remove_stopwords(text):\r\n", 559 | " filtered_sentence = [w for w in text.split() if not w in punctuations]\r\n", 560 | " return ' '.join(filtered_sentence)\r\n", 561 | "\r\n", 562 | "def clean_Data(line):\r\n", 563 | " if (isinstance(line, float)):\r\n", 564 | " return None\r\n", 565 | " line.replace('\\n', ' ')\r\n", 566 | " line = ' '.join(line)\r\n", 567 | " translator = str.maketrans('', '', punctuations)\r\n", 568 | " line = line.translate(translator)\r\n", 569 | " line = ' '.join(line)\r\n", 570 | " return line" 571 | ], 572 | "outputs": [], 573 | "metadata": {} 574 | }, 575 | { 576 | "cell_type": "code", 577 | "execution_count": 22, 578 | "source": [ 579 | "data.eng = data.eng.apply(clean_Data)" 580 | ], 581 | "outputs": [], 582 | "metadata": {} 583 | }, 584 | { 585 | "cell_type": "code", 586 | "execution_count": 18, 587 | "source": [ 588 | "data" 589 | ], 590 | "outputs": [ 591 | { 592 | "output_type": "execute_result", 593 | "data": { 594 | "text/plain": [ 595 | " eng \\\n", 596 | "0 H i \n", 597 | "1 R u n \n", 598 | "2 H e l p \n", 599 | "3 J u m p \n", 600 | "4 S t o p \n", 601 | "... ... \n", 602 | "28740 H a v i n g r e g a r ... \n", 603 | "28741 2 S u c h d e r o ... \n", 604 | "28742 A r t i c l e 1 \n", 605 | "28743 b F r a n c e i s ... \n", 606 | "28744 A f t e r e x p i r y ... \n", 607 | "\n", 608 | " ar \n", 609 | "0 مرحبًا. \n", 610 | "1 اركض! \n", 611 | "2 النجدة! \n", 612 | "3 اقفز! \n", 613 | "4 قف! \n", 614 | "... ... \n", 615 | "28740 باعتبار الطلب الذي تقدمت به لوكسمبورغ في 25 تم... \n", 616 | "28741 (2) يجب أن يتم منح هذه الاستثناءات إلى النمسا ... \n", 617 | "28742 المادة 1 \n", 618 | "28743 (ب) تمنح فرنسا الاستثناءات للحصول على النتائج ... \n", 619 | "28744 بعد انتهاء الفترة الانتقالية، تقوم النمسا وفرن... \n", 620 | "\n", 621 | "[28745 rows x 2 columns]" 622 | ], 623 | "text/html": [ 624 | "
\n", 625 | "\n", 638 | "\n", 639 | " \n", 640 | " \n", 641 | " \n", 642 | " \n", 643 | " \n", 644 | " \n", 645 | " \n", 646 | " \n", 647 | " \n", 648 | " \n", 649 | " \n", 650 | " \n", 651 | " \n", 652 | " \n", 653 | " \n", 654 | " \n", 655 | " \n", 656 | " \n", 657 | " \n", 658 | " \n", 659 | " \n", 660 | " \n", 661 | " \n", 662 | " \n", 663 | " \n", 664 | " \n", 665 | " \n", 666 | " \n", 667 | " \n", 668 | " \n", 669 | " \n", 670 | " \n", 671 | " \n", 672 | " \n", 673 | " \n", 674 | " \n", 675 | " \n", 676 | " \n", 677 | " \n", 678 | " \n", 679 | " \n", 680 | " \n", 681 | " \n", 682 | " \n", 683 | " \n", 684 | " \n", 685 | " \n", 686 | " \n", 687 | " \n", 688 | " \n", 689 | " \n", 690 | " \n", 691 | " \n", 692 | " \n", 693 | " \n", 694 | " \n", 695 | " \n", 696 | " \n", 697 | " \n", 698 | " \n", 699 | " \n", 700 | " \n", 701 | " \n", 702 | " \n", 703 | "
engar
0H iمرحبًا.
1R u nاركض!
2H e l pالنجدة!
3J u m pاقفز!
4S t o pقف!
.........
28740H a v i n g r e g a r ...باعتبار الطلب الذي تقدمت به لوكسمبورغ في 25 تم...
287412 S u c h d e r o ...(2) يجب أن يتم منح هذه الاستثناءات إلى النمسا ...
28742A r t i c l e 1المادة 1
28743b F r a n c e i s ...(ب) تمنح فرنسا الاستثناءات للحصول على النتائج ...
28744A f t e r e x p i r y ...بعد انتهاء الفترة الانتقالية، تقوم النمسا وفرن...
\n", 704 | "

28745 rows × 2 columns

\n", 705 | "
" 706 | ] 707 | }, 708 | "metadata": {}, 709 | "execution_count": 18 710 | } 711 | ], 712 | "metadata": {} 713 | }, 714 | { 715 | "cell_type": "code", 716 | "execution_count": 21, 717 | "source": [ 718 | "data.to_csv('data/data.txt')" 719 | ], 720 | "outputs": [], 721 | "metadata": {} 722 | }, 723 | { 724 | "cell_type": "code", 725 | "execution_count": 22, 726 | "source": [ 727 | "df = pd.read_csv(\"data/data.txt\")\r\n", 728 | "df = df.drop(df.columns[0], axis=1)\r\n", 729 | "df" 730 | ], 731 | "outputs": [ 732 | { 733 | "output_type": "execute_result", 734 | "data": { 735 | "text/plain": [ 736 | " eng \\\n", 737 | "0 Hi. \n", 738 | "1 Run! \n", 739 | "2 Help! \n", 740 | "3 Jump! \n", 741 | "4 Stop! \n", 742 | "... ... \n", 743 | "28740 Having regard to the request made by Luxembour... \n", 744 | "28741 (2) Such derogations should be granted, at the... \n", 745 | "28742 Article 1 \n", 746 | "28743 (b) France is granted derogations for the prod... \n", 747 | "28744 After expiry of the transitional period, Austr... \n", 748 | "\n", 749 | " ar \n", 750 | "0 مرحبًا. \n", 751 | "1 اركض! \n", 752 | "2 النجدة! \n", 753 | "3 اقفز! \n", 754 | "4 قف! \n", 755 | "... ... \n", 756 | "28740 باعتبار الطلب الذي تقدمت به لوكسمبورغ في 25 تم... \n", 757 | "28741 (2) يجب أن يتم منح هذه الاستثناءات إلى النمسا ... \n", 758 | "28742 المادة 1 \n", 759 | "28743 (ب) تمنح فرنسا الاستثناءات للحصول على النتائج ... \n", 760 | "28744 بعد انتهاء الفترة الانتقالية، تقوم النمسا وفرن... \n", 761 | "\n", 762 | "[28745 rows x 2 columns]" 763 | ], 764 | "text/html": [ 765 | "
\n", 766 | "\n", 779 | "\n", 780 | " \n", 781 | " \n", 782 | " \n", 783 | " \n", 784 | " \n", 785 | " \n", 786 | " \n", 787 | " \n", 788 | " \n", 789 | " \n", 790 | " \n", 791 | " \n", 792 | " \n", 793 | " \n", 794 | " \n", 795 | " \n", 796 | " \n", 797 | " \n", 798 | " \n", 799 | " \n", 800 | " \n", 801 | " \n", 802 | " \n", 803 | " \n", 804 | " \n", 805 | " \n", 806 | " \n", 807 | " \n", 808 | " \n", 809 | " \n", 810 | " \n", 811 | " \n", 812 | " \n", 813 | " \n", 814 | " \n", 815 | " \n", 816 | " \n", 817 | " \n", 818 | " \n", 819 | " \n", 820 | " \n", 821 | " \n", 822 | " \n", 823 | " \n", 824 | " \n", 825 | " \n", 826 | " \n", 827 | " \n", 828 | " \n", 829 | " \n", 830 | " \n", 831 | " \n", 832 | " \n", 833 | " \n", 834 | " \n", 835 | " \n", 836 | " \n", 837 | " \n", 838 | " \n", 839 | " \n", 840 | " \n", 841 | " \n", 842 | " \n", 843 | " \n", 844 | "
engar
0Hi.مرحبًا.
1Run!اركض!
2Help!النجدة!
3Jump!اقفز!
4Stop!قف!
.........
28740Having regard to the request made by Luxembour...باعتبار الطلب الذي تقدمت به لوكسمبورغ في 25 تم...
28741(2) Such derogations should be granted, at the...(2) يجب أن يتم منح هذه الاستثناءات إلى النمسا ...
28742Article 1المادة 1
28743(b) France is granted derogations for the prod...(ب) تمنح فرنسا الاستثناءات للحصول على النتائج ...
28744After expiry of the transitional period, Austr...بعد انتهاء الفترة الانتقالية، تقوم النمسا وفرن...
\n", 845 | "

28745 rows × 2 columns

\n", 846 | "
" 847 | ] 848 | }, 849 | "metadata": {}, 850 | "execution_count": 22 851 | } 852 | ], 853 | "metadata": {} 854 | } 855 | ], 856 | "metadata": { 857 | "orig_nbformat": 4, 858 | "language_info": { 859 | "name": "python", 860 | "version": "3.7.11", 861 | "mimetype": "text/x-python", 862 | "codemirror_mode": { 863 | "name": "ipython", 864 | "version": 3 865 | }, 866 | "pygments_lexer": "ipython3", 867 | "nbconvert_exporter": "python", 868 | "file_extension": ".py" 869 | }, 870 | "kernelspec": { 871 | "name": "python3", 872 | "display_name": "Python 3.7.11 64-bit ('torch': conda)" 873 | }, 874 | "interpreter": { 875 | "hash": "4bb0fe8ced3cf0716ac3718fe834e829af40e8ba0fef1c4cadecb390da29a017" 876 | } 877 | }, 878 | "nbformat": 4, 879 | "nbformat_minor": 2 880 | } -------------------------------------------------------------------------------- /translation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "source": [ 6 | "# English to Arabic Translation " 7 | ], 8 | "metadata": {} 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "source": [ 13 | "## Imports" 14 | ], 15 | "metadata": {} 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 2, 20 | "source": [ 21 | "import pandas as pd\r\n", 22 | "import numpy as np\r\n", 23 | "import matplotlib.pyplot as plt \r\n", 24 | "import torch\r\n", 25 | "import random\r\n", 26 | "import re\r\n", 27 | "import os\r\n", 28 | "\r\n", 29 | "import spacy\r\n", 30 | "from spacy.tokenizer import Tokenizer\r\n", 31 | "from spacy.lang.ar import Arabic\r\n", 32 | "\r\n", 33 | "\r\n", 34 | "import torch\r\n", 35 | "import torch.nn as nn\r\n", 36 | "from torch import optim\r\n", 37 | "from torch.utils.tensorboard import SummaryWriter\r\n", 38 | "\r\n", 39 | "from torchtext import data\r\n", 40 | "from torchtext.legacy import data\r\n", 41 | "\r\n", 42 | "\r\n", 43 | "os.environ['CUDA_LAUNCH_BLOCKING'] = '1'" 44 | ], 45 | "outputs": [], 46 | "metadata": {} 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "source": [ 51 | "## Data Processing" 52 | ], 53 | "metadata": {} 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": 3, 58 | "source": [ 59 | "df = pd.read_csv(\"data/arabic_english.txt\",delimiter=\"\\t\",names=[\"eng\",\"ar\"])\r\n", 60 | "df" 61 | ], 62 | "outputs": [ 63 | { 64 | "output_type": "execute_result", 65 | "data": { 66 | "text/html": [ 67 | "
\n", 68 | "\n", 81 | "\n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | " \n", 90 | " \n", 91 | " \n", 92 | " \n", 93 | " \n", 94 | " \n", 95 | " \n", 96 | " \n", 97 | " \n", 98 | " \n", 99 | " \n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | "
engar
0Hi.مرحبًا.
1Run!اركض!
2Help!النجدة!
3Jump!اقفز!
4Stop!قف!
.........
24633rising voices promoting a more linguistically ...شاركنا تحدي ابداع ميم بلغتك الام تعزيزا للتنوع...
24634following last year s successful campaign we i...استكمالا لنجاح حملة العام السابق ندعوكم للمشار...
24635during last year s challenge we also met langu...تعرفنا خلال تحدي العام الماضي على ابطال لغويين...
24636to take part just follow the simple steps outl...للمشاركة في التحدي اتبع الخطوات الموضحة على ال...
24637you will also find links to some free web base...ستجد ايضا روابط لمجموعة من منصات ابداع الميم ا...
\n", 147 | "

24638 rows × 2 columns

\n", 148 | "
" 149 | ], 150 | "text/plain": [ 151 | " eng \\\n", 152 | "0 Hi. \n", 153 | "1 Run! \n", 154 | "2 Help! \n", 155 | "3 Jump! \n", 156 | "4 Stop! \n", 157 | "... ... \n", 158 | "24633 rising voices promoting a more linguistically ... \n", 159 | "24634 following last year s successful campaign we i... \n", 160 | "24635 during last year s challenge we also met langu... \n", 161 | "24636 to take part just follow the simple steps outl... \n", 162 | "24637 you will also find links to some free web base... \n", 163 | "\n", 164 | " ar \n", 165 | "0 مرحبًا. \n", 166 | "1 اركض! \n", 167 | "2 النجدة! \n", 168 | "3 اقفز! \n", 169 | "4 قف! \n", 170 | "... ... \n", 171 | "24633 شاركنا تحدي ابداع ميم بلغتك الام تعزيزا للتنوع... \n", 172 | "24634 استكمالا لنجاح حملة العام السابق ندعوكم للمشار... \n", 173 | "24635 تعرفنا خلال تحدي العام الماضي على ابطال لغويين... \n", 174 | "24636 للمشاركة في التحدي اتبع الخطوات الموضحة على ال... \n", 175 | "24637 ستجد ايضا روابط لمجموعة من منصات ابداع الميم ا... \n", 176 | "\n", 177 | "[24638 rows x 2 columns]" 178 | ] 179 | }, 180 | "metadata": {}, 181 | "execution_count": 3 182 | } 183 | ], 184 | "metadata": {} 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 33, 189 | "source": [ 190 | "!python -m spacy download en_core_web_sm" 191 | ], 192 | "outputs": [ 193 | { 194 | "output_type": "stream", 195 | "name": "stdout", 196 | "text": [ 197 | "Collecting en-core-web-sm==3.1.0\n", 198 | " Downloading https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.1.0/en_core_web_sm-3.1.0-py3-none-any.whl (13.6 MB)\n", 199 | "Requirement already satisfied: spacy<3.2.0,>=3.1.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from en-core-web-sm==3.1.0) (3.1.2)\n", 200 | "Requirement already satisfied: srsly<3.0.0,>=2.4.1 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2.4.1)\n", 201 | "Requirement already satisfied: thinc<8.1.0,>=8.0.8 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (8.0.8)\n", 202 | "Requirement already satisfied: preshed<3.1.0,>=3.0.2 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (3.0.5)\n", 203 | "Requirement already satisfied: murmurhash<1.1.0,>=0.28.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (1.0.5)\n", 204 | "Requirement already satisfied: catalogue<2.1.0,>=2.0.4 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2.0.5)\n", 205 | "Requirement already satisfied: spacy-legacy<3.1.0,>=3.0.7 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (3.0.8)\n", 206 | "Requirement already satisfied: wasabi<1.1.0,>=0.8.1 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (0.8.2)\n", 207 | "Requirement already satisfied: packaging>=20.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (21.0)\n", 208 | "Requirement already satisfied: pathy>=0.3.5 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (0.6.0)\n", 209 | "Requirement already satisfied: requests<3.0.0,>=2.13.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2.26.0)\n", 210 | "Requirement already satisfied: jinja2 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (3.0.1)\n", 211 | "Requirement already satisfied: typing-extensions<4.0.0.0,>=3.7.4 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (3.10.0.0)\n", 212 | "Requirement already satisfied: tqdm<5.0.0,>=4.38.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (4.62.1)\n", 213 | "Requirement already satisfied: cymem<2.1.0,>=2.0.2 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2.0.5)\n", 214 | "Requirement already satisfied: numpy>=1.15.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (1.21.2)\n", 215 | "Requirement already satisfied: typer<0.4.0,>=0.3.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (0.3.2)\n", 216 | "Requirement already satisfied: blis<0.8.0,>=0.4.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (0.7.4)\n", 217 | "Requirement already satisfied: pydantic!=1.8,!=1.8.1,<1.9.0,>=1.7.4 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (1.8.2)\n", 218 | "Requirement already satisfied: setuptools in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (52.0.0.post20210125)\n", 219 | "Requirement already satisfied: zipp>=0.5 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from catalogue<2.1.0,>=2.0.4->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (3.5.0)\n", 220 | "Requirement already satisfied: pyparsing>=2.0.2 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from packaging>=20.0->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2.4.7)\n", 221 | "Requirement already satisfied: smart-open<6.0.0,>=5.0.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from pathy>=0.3.5->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (5.2.0)\n", 222 | "Requirement already satisfied: certifi>=2017.4.17 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from requests<3.0.0,>=2.13.0->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2021.5.30)\n", 223 | "Requirement already satisfied: idna<4,>=2.5 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from requests<3.0.0,>=2.13.0->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2.10)\n", 224 | "Requirement already satisfied: charset-normalizer~=2.0.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from requests<3.0.0,>=2.13.0->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2.0.4)\n", 225 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from requests<3.0.0,>=2.13.0->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (1.26.6)\n", 226 | "Requirement already satisfied: colorama in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from tqdm<5.0.0,>=4.38.0->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (0.4.4)\n", 227 | "Requirement already satisfied: click<7.2.0,>=7.1.1 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from typer<0.4.0,>=0.3.0->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (7.1.2)\n", 228 | "Requirement already satisfied: MarkupSafe>=2.0 in c:\\users\\ultrapc\\anaconda3\\envs\\torch\\lib\\site-packages (from jinja2->spacy<3.2.0,>=3.1.0->en-core-web-sm==3.1.0) (2.0.1)\n", 229 | "✔ Download and installation successful\n", 230 | "You can now load the package via spacy.load('en_core_web_sm')\n" 231 | ] 232 | } 233 | ], 234 | "metadata": {} 235 | }, 236 | { 237 | "cell_type": "markdown", 238 | "source": [ 239 | "### tokenizers" 240 | ], 241 | "metadata": {} 242 | }, 243 | { 244 | "cell_type": "code", 245 | "execution_count": 5, 246 | "source": [ 247 | "seed=32\r\n", 248 | "\r\n", 249 | "spacy_eng = spacy.load(\"en_core_web_sm\")\r\n", 250 | "\r\n", 251 | "arab = Arabic()\r\n", 252 | "ar_Tokenizer = Tokenizer(arab.vocab)\r\n", 253 | "\r\n", 254 | "def engTokenizer(text):\r\n", 255 | " return [word.text for word in spacy_eng.tokenizer(text)] \r\n", 256 | "\r\n", 257 | "def arTokenizer(sentence):\r\n", 258 | " return [word.text for word in \r\n", 259 | " ar_Tokenizer(re.sub(r\"\\s+\",\" \",re.sub(r\"[\\.\\'\\\"\\n+]\",\" \",sentence)).strip())]\r\n", 260 | "\r\n", 261 | "SRC = data.Field(tokenize=engTokenizer,batch_first=False,init_token=\"\",eos_token=\"\")\r\n", 262 | "TRG = data.Field(tokenize=arTokenizer,batch_first=False,tokenizer_language=\"ar\",init_token=\"بداية\",eos_token=\"نهاية\")\r\n", 263 | "\r\n", 264 | "class TextDataset(data.Dataset):\r\n", 265 | "\r\n", 266 | " def __init__(self, df, src_field, target_field, is_test=False, **kwargs):\r\n", 267 | " fields = [('eng', src_field), ('ar',target_field)]\r\n", 268 | " samples = []\r\n", 269 | " for i, row in df.iterrows():\r\n", 270 | " eng = row.eng \r\n", 271 | " ar = row.ar\r\n", 272 | " samples.append(data.Example.fromlist([eng, ar], fields))\r\n", 273 | "\r\n", 274 | " super().__init__(samples, fields, **kwargs)\r\n", 275 | " def __len__(self):\r\n", 276 | " return len(self.samples)\r\n", 277 | " \r\n", 278 | " def __getitem__(self, idx):\r\n", 279 | " return self.samples[idx]\r\n", 280 | "\r\n", 281 | "torchdataset = TextDataset(df,SRC,TRG)\r\n", 282 | "\r\n", 283 | "train_data, valid_data = torchdataset.split(split_ratio=0.8, random_state = random.seed(32))\r\n", 284 | "\r\n", 285 | "SRC.build_vocab(train_data,min_freq=2)\r\n", 286 | "TRG.build_vocab(train_data,min_freq=2)\r\n", 287 | "\r\n", 288 | "print(train_data[1].__dict__)\r\n" 289 | ], 290 | "outputs": [ 291 | { 292 | "output_type": "stream", 293 | "name": "stdout", 294 | "text": [ 295 | "{'eng': ['I', 'was', 'delayed', 'by', 'a', 'traffic', 'jam', '.'], 'ar': ['أخّرني', 'زحام', 'السير']}\n" 296 | ] 297 | } 298 | ], 299 | "metadata": {} 300 | }, 301 | { 302 | "cell_type": "markdown", 303 | "source": [ 304 | "### seting up the device" 305 | ], 306 | "metadata": {} 307 | }, 308 | { 309 | "cell_type": "code", 310 | "execution_count": 6, 311 | "source": [ 312 | "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\r\n", 313 | "#device = torch.device(\"cpu\")\r\n", 314 | "print(device)\r\n", 315 | "print(torch.cuda.get_device_name(0))\r\n", 316 | "# full infos\r\n", 317 | "# !nvidia-smi" 318 | ], 319 | "outputs": [ 320 | { 321 | "output_type": "stream", 322 | "name": "stdout", 323 | "text": [ 324 | "cuda\n", 325 | "NVIDIA GeForce RTX 3060 Ti\n" 326 | ] 327 | } 328 | ], 329 | "metadata": {} 330 | }, 331 | { 332 | "cell_type": "markdown", 333 | "source": [ 334 | "## Transformer Class" 335 | ], 336 | "metadata": {} 337 | }, 338 | { 339 | "cell_type": "code", 340 | "execution_count": 7, 341 | "source": [ 342 | "class Transformer(nn.Module):\r\n", 343 | " def __init__(\r\n", 344 | " self,\r\n", 345 | " embedding_size,\r\n", 346 | " src_vocab_size,\r\n", 347 | " trg_vocab_size,\r\n", 348 | " src_pad_idx,\r\n", 349 | " num_heads,\r\n", 350 | " num_encoder_layers,\r\n", 351 | " num_decoder_layers,\r\n", 352 | " max_len,\r\n", 353 | " ):\r\n", 354 | " super(Transformer, self).__init__()\r\n", 355 | " self.src_embeddings = nn.Embedding(src_vocab_size,embedding_size)\r\n", 356 | " self.src_positional_embeddings= nn.Embedding(max_len,embedding_size)\r\n", 357 | " self.trg_embeddings= nn.Embedding(trg_vocab_size,embedding_size)\r\n", 358 | " self.trg_positional_embeddings= nn.Embedding(max_len,embedding_size)\r\n", 359 | " self.device = device\r\n", 360 | " self.transformer = nn.Transformer(\r\n", 361 | " embedding_size,\r\n", 362 | " num_heads,\r\n", 363 | " num_encoder_layers,\r\n", 364 | " num_decoder_layers,\r\n", 365 | " )\r\n", 366 | "\r\n", 367 | " self.fc_out = nn.Linear(embedding_size, trg_vocab_size)\r\n", 368 | " self.dropout = nn.Dropout(dropout)\r\n", 369 | " self.src_pad_idx = src_pad_idx\r\n", 370 | " \r\n", 371 | " def make_src_mask(self, src):\r\n", 372 | " src_mask = src.transpose(0,1) == self.src_pad_idx\r\n", 373 | "\r\n", 374 | " return src_mask.to(device)\r\n", 375 | "\r\n", 376 | " def forward(self,src,trg) :\r\n", 377 | " src_seq_length, S = src.shape\r\n", 378 | " trg_seq_length, S = trg.shape\r\n", 379 | " #adding zeros is an easy way\r\n", 380 | " src_positions = (\r\n", 381 | " torch.arange(0, src_seq_length).unsqueeze(1).expand(src_seq_length, S).to(self.device)\r\n", 382 | " )\r\n", 383 | " \r\n", 384 | " \r\n", 385 | " trg_positions = (\r\n", 386 | " torch.arange(0, trg_seq_length).unsqueeze(1).expand(trg_seq_length, S).to(self.device)\r\n", 387 | " )\r\n", 388 | "\r\n", 389 | " embed_src = self.dropout(\r\n", 390 | " ( self.src_embeddings(src) + self.src_positional_embeddings(src_positions) )\r\n", 391 | " )\r\n", 392 | "\r\n", 393 | " embed_trg = self.dropout(\r\n", 394 | " ( self.trg_embeddings(trg) + self.trg_positional_embeddings(trg_positions) )\r\n", 395 | " )\r\n", 396 | " \r\n", 397 | " src_padding_mask = self.make_src_mask(src)\r\n", 398 | " trg_mask = self.transformer.generate_square_subsequent_mask(trg_seq_length).to(device)\r\n", 399 | " \r\n", 400 | " \r\n", 401 | " out = self.transformer(embed_src,embed_trg, src_key_padding_mask=src_padding_mask,tgt_mask=trg_mask )\r\n", 402 | " out= self.fc_out(out)\r\n", 403 | "\r\n", 404 | " return out" 405 | ], 406 | "outputs": [], 407 | "metadata": {} 408 | }, 409 | { 410 | "cell_type": "markdown", 411 | "source": [ 412 | "## Model and Parameters " 413 | ], 414 | "metadata": {} 415 | }, 416 | { 417 | "cell_type": "code", 418 | "execution_count": 8, 419 | "source": [ 420 | "BATCH_SIZE = 16\r\n", 421 | "\r\n", 422 | "train_iter, valid_iter = data.BucketIterator.splits(\r\n", 423 | " (train_data,valid_data), \r\n", 424 | " batch_size = BATCH_SIZE,\r\n", 425 | " sort=None,\r\n", 426 | " sort_within_batch=False,\r\n", 427 | " sort_key=lambda x: len(x.eng),\r\n", 428 | " device=device,\r\n", 429 | " shuffle=True\r\n", 430 | ")" 431 | ], 432 | "outputs": [], 433 | "metadata": {} 434 | }, 435 | { 436 | "cell_type": "code", 437 | "execution_count": 14, 438 | "source": [ 439 | "load_model = False\r\n", 440 | "save_model = True\r\n", 441 | "\r\n", 442 | "num_epochs = 30\r\n", 443 | "learning_rate = 0.0001\r\n", 444 | "\r\n", 445 | "num_heads = 8\r\n", 446 | "num_encoder_layers = 3\r\n", 447 | "num_decoder_layers = 3\r\n", 448 | "\r\n", 449 | "max_len= 230\r\n", 450 | "dropout = 0.4\r\n", 451 | "embedding_size= 256\r\n", 452 | "src_pad_idx = SRC.vocab.stoi[\"\"]\r\n", 453 | "\r\n", 454 | "\r\n", 455 | "src_vocab_size = len(SRC.vocab)\r\n", 456 | "print(\"Size of english vocabulary:\",src_vocab_size)\r\n", 457 | "\r\n", 458 | "trg_vocab_size =len(TRG.vocab)\r\n", 459 | "print(\"Size of arabic vocabulary:\",trg_vocab_size)\r\n", 460 | "\r\n", 461 | "\r\n", 462 | "model = Transformer( \r\n", 463 | " embedding_size,\r\n", 464 | " src_vocab_size,\r\n", 465 | " trg_vocab_size,\r\n", 466 | " src_pad_idx,\r\n", 467 | " num_heads,\r\n", 468 | " num_encoder_layers,\r\n", 469 | " num_decoder_layers,\r\n", 470 | " max_len,\r\n", 471 | ").to(device)\r\n", 472 | "\r\n" 473 | ], 474 | "outputs": [ 475 | { 476 | "output_type": "stream", 477 | "name": "stdout", 478 | "text": [ 479 | "Size of english vocabulary: 12812\n", 480 | "Size of arabic vocabulary: 22067\n" 481 | ] 482 | } 483 | ], 484 | "metadata": {} 485 | }, 486 | { 487 | "cell_type": "code", 488 | "execution_count": 15, 489 | "source": [ 490 | "print(model)" 491 | ], 492 | "outputs": [ 493 | { 494 | "output_type": "stream", 495 | "name": "stdout", 496 | "text": [ 497 | "Transformer(\n", 498 | " (src_embeddings): Embedding(12812, 256)\n", 499 | " (src_positional_embeddings): Embedding(230, 256)\n", 500 | " (trg_embeddings): Embedding(22067, 256)\n", 501 | " (trg_positional_embeddings): Embedding(230, 256)\n", 502 | " (transformer): Transformer(\n", 503 | " (encoder): TransformerEncoder(\n", 504 | " (layers): ModuleList(\n", 505 | " (0): TransformerEncoderLayer(\n", 506 | " (self_attn): MultiheadAttention(\n", 507 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 508 | " )\n", 509 | " (linear1): Linear(in_features=256, out_features=2048, bias=True)\n", 510 | " (dropout): Dropout(p=0.1, inplace=False)\n", 511 | " (linear2): Linear(in_features=2048, out_features=256, bias=True)\n", 512 | " (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 513 | " (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 514 | " (dropout1): Dropout(p=0.1, inplace=False)\n", 515 | " (dropout2): Dropout(p=0.1, inplace=False)\n", 516 | " )\n", 517 | " (1): TransformerEncoderLayer(\n", 518 | " (self_attn): MultiheadAttention(\n", 519 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 520 | " )\n", 521 | " (linear1): Linear(in_features=256, out_features=2048, bias=True)\n", 522 | " (dropout): Dropout(p=0.1, inplace=False)\n", 523 | " (linear2): Linear(in_features=2048, out_features=256, bias=True)\n", 524 | " (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 525 | " (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 526 | " (dropout1): Dropout(p=0.1, inplace=False)\n", 527 | " (dropout2): Dropout(p=0.1, inplace=False)\n", 528 | " )\n", 529 | " (2): TransformerEncoderLayer(\n", 530 | " (self_attn): MultiheadAttention(\n", 531 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 532 | " )\n", 533 | " (linear1): Linear(in_features=256, out_features=2048, bias=True)\n", 534 | " (dropout): Dropout(p=0.1, inplace=False)\n", 535 | " (linear2): Linear(in_features=2048, out_features=256, bias=True)\n", 536 | " (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 537 | " (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 538 | " (dropout1): Dropout(p=0.1, inplace=False)\n", 539 | " (dropout2): Dropout(p=0.1, inplace=False)\n", 540 | " )\n", 541 | " )\n", 542 | " (norm): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 543 | " )\n", 544 | " (decoder): TransformerDecoder(\n", 545 | " (layers): ModuleList(\n", 546 | " (0): TransformerDecoderLayer(\n", 547 | " (self_attn): MultiheadAttention(\n", 548 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 549 | " )\n", 550 | " (multihead_attn): MultiheadAttention(\n", 551 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 552 | " )\n", 553 | " (linear1): Linear(in_features=256, out_features=2048, bias=True)\n", 554 | " (dropout): Dropout(p=0.1, inplace=False)\n", 555 | " (linear2): Linear(in_features=2048, out_features=256, bias=True)\n", 556 | " (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 557 | " (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 558 | " (norm3): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 559 | " (dropout1): Dropout(p=0.1, inplace=False)\n", 560 | " (dropout2): Dropout(p=0.1, inplace=False)\n", 561 | " (dropout3): Dropout(p=0.1, inplace=False)\n", 562 | " )\n", 563 | " (1): TransformerDecoderLayer(\n", 564 | " (self_attn): MultiheadAttention(\n", 565 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 566 | " )\n", 567 | " (multihead_attn): MultiheadAttention(\n", 568 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 569 | " )\n", 570 | " (linear1): Linear(in_features=256, out_features=2048, bias=True)\n", 571 | " (dropout): Dropout(p=0.1, inplace=False)\n", 572 | " (linear2): Linear(in_features=2048, out_features=256, bias=True)\n", 573 | " (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 574 | " (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 575 | " (norm3): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 576 | " (dropout1): Dropout(p=0.1, inplace=False)\n", 577 | " (dropout2): Dropout(p=0.1, inplace=False)\n", 578 | " (dropout3): Dropout(p=0.1, inplace=False)\n", 579 | " )\n", 580 | " (2): TransformerDecoderLayer(\n", 581 | " (self_attn): MultiheadAttention(\n", 582 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 583 | " )\n", 584 | " (multihead_attn): MultiheadAttention(\n", 585 | " (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True)\n", 586 | " )\n", 587 | " (linear1): Linear(in_features=256, out_features=2048, bias=True)\n", 588 | " (dropout): Dropout(p=0.1, inplace=False)\n", 589 | " (linear2): Linear(in_features=2048, out_features=256, bias=True)\n", 590 | " (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 591 | " (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 592 | " (norm3): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 593 | " (dropout1): Dropout(p=0.1, inplace=False)\n", 594 | " (dropout2): Dropout(p=0.1, inplace=False)\n", 595 | " (dropout3): Dropout(p=0.1, inplace=False)\n", 596 | " )\n", 597 | " )\n", 598 | " (norm): LayerNorm((256,), eps=1e-05, elementwise_affine=True)\n", 599 | " )\n", 600 | " )\n", 601 | " (fc_out): Linear(in_features=256, out_features=22067, bias=True)\n", 602 | " (dropout): Dropout(p=0.3, inplace=False)\n", 603 | ")\n" 604 | ] 605 | } 606 | ], 607 | "metadata": {} 608 | }, 609 | { 610 | "cell_type": "markdown", 611 | "source": [ 612 | "## Training" 613 | ], 614 | "metadata": {} 615 | }, 616 | { 617 | "cell_type": "code", 618 | "execution_count": 16, 619 | "source": [ 620 | "torch.cuda.empty_cache()" 621 | ], 622 | "outputs": [], 623 | "metadata": {} 624 | }, 625 | { 626 | "cell_type": "code", 627 | "execution_count": 17, 628 | "source": [ 629 | "loss_track = []\r\n", 630 | "loss_validation_track= []\r\n", 631 | "\r\n", 632 | "\r\n", 633 | "optimizer = optim.Adam(model.parameters(), lr=learning_rate)\r\n", 634 | "\r\n", 635 | "pad_idx = SRC.vocab.stoi[\"\"]\r\n", 636 | "criterion = nn.CrossEntropyLoss(ignore_index = pad_idx)\r\n", 637 | "for epoch in range(num_epochs):\r\n", 638 | " stepLoss=[]\r\n", 639 | " model.train()\r\n", 640 | " for batch in train_iter:\r\n", 641 | " input_data = batch.eng.to(device)\r\n", 642 | " target = batch.ar.to(device)\r\n", 643 | "\r\n", 644 | " output = model(input_data,target[:-1])\r\n", 645 | " optimizer.zero_grad()\r\n", 646 | " \r\n", 647 | " output = output.reshape(-1,trg_vocab_size)\r\n", 648 | " target = target[1:].reshape(-1)\r\n", 649 | "\r\n", 650 | " loss = criterion(output,target)\r\n", 651 | " loss.backward()\r\n", 652 | "\r\n", 653 | " optimizer.step()\r\n", 654 | " stepLoss.append(loss.item())\r\n", 655 | "\r\n", 656 | " loss_track.append(np.mean(stepLoss))\r\n", 657 | " print(\" Epoch {} | Train Cross Entropy Loss: \".format(epoch),np.mean(stepLoss))\r\n", 658 | " with torch.no_grad(): \r\n", 659 | " stepValidLoss=[]\r\n", 660 | " model.eval() # the evaluation mode for the model (doesn't apply dropout and batchNorm)\r\n", 661 | " for i,batch in enumerate(valid_iter):\r\n", 662 | " input_sentence = batch.eng.to(device)\r\n", 663 | " target = batch.ar.to(device)\r\n", 664 | " optimizer.zero_grad()\r\n", 665 | " output = model(input_sentence,target[:-1])\r\n", 666 | " output = output.reshape(-1,trg_vocab_size)\r\n", 667 | " target = target[1:].reshape(-1)\r\n", 668 | " loss = criterion(output,target)\r\n", 669 | " \r\n", 670 | " stepValidLoss.append(loss.item())\r\n", 671 | " \r\n", 672 | " loss_validation_track.append(np.mean(stepValidLoss))\r\n", 673 | " print(\" Epoch {} | Validation Cross Entropy Loss: \".format(epoch),np.mean(stepValidLoss)) " 674 | ], 675 | "outputs": [ 676 | { 677 | "output_type": "stream", 678 | "name": "stdout", 679 | "text": [ 680 | " Epoch 0 | Train Cross Entropy Loss: 7.435049949141292\n", 681 | " Epoch 0 | Validation Cross Entropy Loss: 5.988370576462188\n", 682 | " Epoch 1 | Train Cross Entropy Loss: 6.977232069164128\n", 683 | " Epoch 1 | Validation Cross Entropy Loss: 5.755757348103956\n", 684 | " Epoch 2 | Train Cross Entropy Loss: 6.727246981162529\n", 685 | " Epoch 2 | Validation Cross Entropy Loss: 5.524308341664153\n", 686 | " Epoch 3 | Train Cross Entropy Loss: 6.47047579946456\n", 687 | " Epoch 3 | Validation Cross Entropy Loss: 5.395510543476451\n", 688 | " Epoch 4 | Train Cross Entropy Loss: 6.229757437845329\n", 689 | " Epoch 4 | Validation Cross Entropy Loss: 5.26321325131825\n", 690 | " Epoch 5 | Train Cross Entropy Loss: 5.9832683666185895\n", 691 | " Epoch 5 | Validation Cross Entropy Loss: 5.190949131142009\n", 692 | " Epoch 6 | Train Cross Entropy Loss: 5.732032602870619\n", 693 | " Epoch 6 | Validation Cross Entropy Loss: 5.094015572752271\n", 694 | " Epoch 7 | Train Cross Entropy Loss: 5.481263580647382\n", 695 | " Epoch 7 | Validation Cross Entropy Loss: 5.061300001361153\n", 696 | " Epoch 8 | Train Cross Entropy Loss: 5.221863861207838\n", 697 | " Epoch 8 | Validation Cross Entropy Loss: 5.003460148712257\n", 698 | " Epoch 9 | Train Cross Entropy Loss: 4.972142156068381\n", 699 | " Epoch 9 | Validation Cross Entropy Loss: 4.966425876338761\n", 700 | " Epoch 10 | Train Cross Entropy Loss: 4.713377501283373\n", 701 | " Epoch 10 | Validation Cross Entropy Loss: 4.975224611047026\n", 702 | " Epoch 11 | Train Cross Entropy Loss: 4.459344664177337\n", 703 | " Epoch 11 | Validation Cross Entropy Loss: 4.963081742648955\n", 704 | " Epoch 12 | Train Cross Entropy Loss: 4.216912533942756\n", 705 | " Epoch 12 | Validation Cross Entropy Loss: 4.948643896099809\n", 706 | " Epoch 13 | Train Cross Entropy Loss: 3.9796205166872447\n", 707 | " Epoch 13 | Validation Cross Entropy Loss: 5.041126395587797\n", 708 | " Epoch 14 | Train Cross Entropy Loss: 3.754485915427084\n", 709 | " Epoch 14 | Validation Cross Entropy Loss: 5.053339765830473\n", 710 | " Epoch 15 | Train Cross Entropy Loss: 3.545525817127971\n", 711 | " Epoch 15 | Validation Cross Entropy Loss: 5.0739604056655585\n", 712 | " Epoch 16 | Train Cross Entropy Loss: 3.3388276191113833\n", 713 | " Epoch 16 | Validation Cross Entropy Loss: 5.128433674960942\n", 714 | " Epoch 17 | Train Cross Entropy Loss: 3.1556254357293057\n", 715 | " Epoch 17 | Validation Cross Entropy Loss: 5.156803880032006\n", 716 | " Epoch 18 | Train Cross Entropy Loss: 2.9846578783222606\n", 717 | " Epoch 18 | Validation Cross Entropy Loss: 5.189165875896231\n", 718 | " Epoch 19 | Train Cross Entropy Loss: 2.8305458025885866\n", 719 | " Epoch 19 | Validation Cross Entropy Loss: 5.24085449514451\n", 720 | " Epoch 20 | Train Cross Entropy Loss: 2.6819157435909493\n", 721 | " Epoch 20 | Validation Cross Entropy Loss: 5.320800462713489\n", 722 | " Epoch 21 | Train Cross Entropy Loss: 2.5431851465980726\n", 723 | " Epoch 21 | Validation Cross Entropy Loss: 5.328162451843163\n", 724 | " Epoch 22 | Train Cross Entropy Loss: 2.4220719888999866\n", 725 | " Epoch 22 | Validation Cross Entropy Loss: 5.437457458539442\n", 726 | " Epoch 23 | Train Cross Entropy Loss: 2.3040674579414455\n", 727 | " Epoch 23 | Validation Cross Entropy Loss: 5.465106003470235\n", 728 | " Epoch 24 | Train Cross Entropy Loss: 2.1977922209091\n", 729 | " Epoch 24 | Validation Cross Entropy Loss: 5.51173290803835\n", 730 | " Epoch 25 | Train Cross Entropy Loss: 2.098390280619844\n", 731 | " Epoch 25 | Validation Cross Entropy Loss: 5.609959487017099\n", 732 | " Epoch 26 | Train Cross Entropy Loss: 2.0048216544575506\n", 733 | " Epoch 26 | Validation Cross Entropy Loss: 5.693122586646638\n", 734 | " Epoch 27 | Train Cross Entropy Loss: 1.9181972871256339\n", 735 | " Epoch 27 | Validation Cross Entropy Loss: 5.720419735103459\n", 736 | " Epoch 28 | Train Cross Entropy Loss: 1.8346951236198474\n", 737 | " Epoch 28 | Validation Cross Entropy Loss: 5.777838484033362\n", 738 | " Epoch 29 | Train Cross Entropy Loss: 1.7577206774481706\n", 739 | " Epoch 29 | Validation Cross Entropy Loss: 5.902188576661147\n" 740 | ] 741 | } 742 | ], 743 | "metadata": {} 744 | }, 745 | { 746 | "cell_type": "code", 747 | "execution_count": 39, 748 | "source": [ 749 | "#the train loss after 50 epoch\r\n", 750 | "plt.figure(figsize=(10,5))\r\n", 751 | "plt.plot(range(30),loss_track,label=\"train loss\")\r\n", 752 | "plt.plot(range(30),loss_validation_track,label=\"valiadtion loss\")\r\n", 753 | "plt.legend()\r\n", 754 | "plt.show()" 755 | ], 756 | "outputs": [ 757 | { 758 | "output_type": "display_data", 759 | "data": { 760 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAy0AAAGsCAYAAADQY0hSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAA9hAAAPYQGoP6dpAABibUlEQVR4nO3dd3hUdf728Xtm0nuFdEIg9F6lq3QVsbvKrmLbR8XKWnct4K7iWhBdXd1VF/W3Kq4FsSECShWkdwgQCAkQCARSIW3mPH+cEAg1gSRnkrxf13WumTnnZM4HhiFzz7fZDMMwBAAAAABuym51AQAAAABwNoQWAAAAAG6N0AIAAADArRFaAAAAALg1QgsAAAAAt0ZoAQAAAODWCC0AAAAA3JpHXV/Q5XJp7969CgwMlM1mq+vLAwAAAHAThmEoPz9fMTExstvP3J5S56Fl7969io+Pr+vLAgAAAHBTGRkZiouLO+PxOg8tgYGBkszCgoKC6vryAAAAANxEXl6e4uPjKzLCmdR5aDnWJSwoKIjQAgAAAOCcw0YYiA8AAADArRFaAAAAALg1QgsAAAAAt1bnY1oAAADg/pxOp0pLS60uA/Wcp6enHA7HBT8PoQUAAAAVDMPQvn37lJOTY3UpaCBCQkIUFRV1QWs0EloAAABQ4VhgadKkifz8/FgMHOfNMAwdOXJEWVlZkqTo6Ojzfi5CCwAAACSZXcKOBZbw8HCry0ED4OvrK0nKyspSkyZNzrurGAPxAQAAIEkVY1j8/PwsrgQNybF/TxcyRorQAgAAgEroEoaaVBP/nggtAAAAANwaoQUAAACAWyO0AAAAACdITEzUlClTLH8OHMfsYQAAAKjXLr74YnXp0qXGQsLy5cvl7+9fI8+FmtGoW1pmrs/Utv35VpcBAACAWmYYhsrKyqp0bmRkJDOouZlGG1pmrNmjez5epds+WK4D+cVWlwMAAOB2DMPQkZIySzbDMKpU49ixYzV//ny9/vrrstlsstlsSktL07x582Sz2TRz5kx1795d3t7eWrRokVJTUzV69Gg1bdpUAQEB6tmzp+bMmVPpOU/u2mWz2fTee+/p6quvlp+fn5KTk/XNN99U6+8yPT1do0ePVkBAgIKCgnTDDTdo//79FcfXrl2rSy65RIGBgQoKClL37t21YsUKSdKuXbs0atQohYaGyt/fX+3bt9cPP/xQrevXd422e9iA5EglhvspLfuI7vxohabddZF8vc5vsRsAAICG6GipU+2emWXJtTc9N1x+Xuf+qPr6669r69at6tChg5577jlJZktJWlqaJOmJJ57QK6+8oqSkJIWGhiojI0OXXXaZnn/+eXl7e+ujjz7SqFGjlJKSooSEhDNeZ+LEiXrppZf08ssv6x//+IfGjBmjXbt2KSws7Jw1ulyuisAyf/58lZWVady4cbrxxhs1b948SdKYMWPUtWtXvf3223I4HFqzZo08PT0lSePGjVNJSYkWLFggf39/bdq0SQEBAee8bkPSaENLmL+Xpt7WS1f/c7HWZuTo4c/W6J9jusluZ15yAACA+iI4OFheXl7y8/NTVFTUKcefe+45DR06tOJxWFiYOnfuXPH4r3/9q6ZPn65vvvlG99133xmvM3bsWN10002SpBdeeEFvvPGGli1bphEjRpyzxrlz52r9+vXauXOn4uPjJUkfffSR2rdvr+XLl6tnz55KT0/Xo48+qjZt2kiSkpOTK34+PT1d1157rTp27ChJSkpKOuc1G5pGG1okqXmEv/79hx76/Xu/6ceN+/T3H7foycvaWl0WAACAW/D1dGjTc8Mtu3ZN6NGjR6XHBQUFmjBhgr7//ntlZmaqrKxMR48eVXp6+lmfp1OnThX3/f39FRQUpKysrCrVsHnzZsXHx1cEFklq166dQkJCtHnzZvXs2VPjx4/XnXfeqf/7v//TkCFDdP3116tFixaSpAceeED33HOPfvrpJw0ZMkTXXnttpXoag0Y7puWYXs3D9PL15ov+rwU79PFvuyyuCAAAwD3YbDb5eXlYstXEKuqSTpkF7JFHHtH06dP1wgsvaOHChVqzZo06duyokpKSsz7Psa5aJ/7duFyuGqlRkiZMmKCNGzfq8ssv188//6x27dpp+vTpkqQ777xTO3bs0B/+8AetX79ePXr00D/+8Y8au3Z90OhDiySN7hKr8UNbSZKembFR81KqlpoBAABgPS8vLzmdziqdu3jxYo0dO1ZXX321OnbsqKioqIrxL7Wlbdu2ysjIUEZGRsW+TZs2KScnR+3atavY16pVKz388MP66aefdM0112jq1KkVx+Lj43X33Xfrq6++0p/+9Ce9++67tVqzuyG0lLv/0pa6tlucnC5D932yWpsz86wuCQAAAFWQmJio3377TWlpaTp48OBZW0CSk5P11Vdfac2aNVq7dq1uvvnmGm0xOZ0hQ4aoY8eOGjNmjFatWqVly5bplltu0aBBg9SjRw8dPXpU9913n+bNm6ddu3Zp8eLFWr58udq2NYctPPTQQ5o1a5Z27typVatW6Zdffqk41lgQWsrZbDZNuqajLkoKU0FxmW7/YLn25xVZXRYAAADO4ZFHHpHD4VC7du0UGRl51vEpkydPVmhoqPr27atRo0Zp+PDh6tatW63WZ7PZNGPGDIWGhmrgwIEaMmSIkpKS9Nlnn0mSHA6HsrOzdcstt6hVq1a64YYbNHLkSE2cOFGS5HQ6NW7cOLVt21YjRoxQq1at9M9//rNWa3Y3NqOqk2DXkLy8PAUHBys3N1dBQUF1eekqyT1SqmveXqzUA4VqHxOk//2/PvL3btTzFQAAgEaiqKhIO3fuVPPmzeXj42N1OWggzvbvqqrZgJaWkwT7eWrq2F4K9/fSxr15enDaajlddZrrAAAAAJyA0HIaCeF+evfWHvL2sGvO5iz99btNVpcEAAAANFqEljPolhCqyTd0kSR98Guapi7eaW1BAAAAQCNFaDmLyztF64mR5qqkz323SbM37be4IgAAAKDxIbScw/8bmKSbesXLMKQHPl2t9btzrS4JAAAAaFQILedgs9n03OgOGpAcoaOlTt3x4XLtzTlqdVkAAABAo0FoqQJPh11vjemm1k0DlZVfrNs/WK78olKrywIAAAAaBUJLFQX5eOo/t/VUZKC3tuzL17hPVqvMWburpwIAAAAgtFRLbIiv3r+1h3w9HVqw9YCe+Waj6nhtTgAAANSCxMRETZkypeKxzWbT119/fUHPOWHCBHXp0uWs56Slpclms2nNmjUXdK1zGTt2rK666qpavUZtIrRUU6e4EL3+uy6y2aRPfkvXuwt3WF0SAAAAalhmZqZGjhxZo895uuAQHx+vzMxMdejQoUav1dAQWs7DsPZReurydpKkF37YopnrMy2uCAAAADUpKipK3t7etX4dh8OhqKgoeXh41Pq16jNCy3m6vV+ibunTTJL00GdrtDr9sMUVAQAAND7//ve/FRMTI5er8ljj0aNH6/bbb5ckpaamavTo0WratKkCAgLUs2dPzZkz56zPe3L3sMcff1ytWrWSn5+fkpKS9PTTT6u0tPLETC+++KKaNm2qwMBA3XHHHSoqKqo4NmHCBH344YeaMWOGbDabbDab5s2bd9ruYfPnz1evXr3k7e2t6OhoPfHEEyorK6s4fvHFF+uBBx7QY489prCwMEVFRWnChAnV+nsrLi7WAw88oCZNmsjHx0f9+/fX8uXLK44fPnxYY8aMUWRkpHx9fZWcnKypU6dKkkpKSnTfffcpOjpaPj4+atasmSZNmlSt61cXoeU82Ww2PXNFO13apomKy1y666MVyjh0xOqyAAAAao5hSCWF1mxVHDd8/fXXKzs7W7/88kvFvkOHDunHH3/UmDFjJEkFBQW67LLLNHfuXK1evVojRozQqFGjlJ6eXuW/isDAQH3wwQfatGmTXn/9db377rt67bXXKo7/73//04QJE/TCCy9oxYoVio6O1j//+c+K44888ohuuOEGjRgxQpmZmcrMzFTfvn1Puc6ePXt02WWXqWfPnlq7dq3efvttvf/++/rb3/5W6bwPP/xQ/v7++u233/TSSy/pueee0+zZs6v853nsscf05Zdf6sMPP9SqVavUsmVLDR8+XIcOHZIkPf3009q0aZNmzpypzZs36+2331ZERIQk6Y033tA333yj//3vf0pJSdHHH3+sxMTEKl/7fNAOdQE8HHb946auuv6dJdqUmafbPliuL+/uq2A/T6tLAwAAuHClR6QXYqy59p/3Sl7+5zwtNDRUI0eO1CeffKLBgwdLkr744gtFRETokksukSR17txZnTt3rviZv/71r5o+fbq++eYb3XfffVUq56mnnqq4n5iYqEceeUTTpk3TY489JkmaMmWK7rjjDt1xxx2SpL/97W+aM2dORWtLQECAfH19VVxcrKioqDNe55///Kfi4+P15ptvymazqU2bNtq7d68ef/xxPfPMM7LbzTaHTp066dlnn5UkJScn680339TcuXM1dOjQc/5ZCgsL9fbbb+uDDz6oGLfz7rvvavbs2Xr//ff16KOPKj09XV27dlWPHj0q/szHpKenKzk5Wf3795fNZlOzZs2q9Hd4IWhpuUD+3h76z9ieigry0fasAt3z8UqVlDEVMgAAQF0ZM2aMvvzySxUXF0uSPv74Y/3ud7+r+IBfUFCgRx55RG3btlVISIgCAgK0efPmarW0fPbZZ+rXr5+ioqIUEBCgp556qtLPb968Wb179670M3369Kn2n2Xz5s3q06ePbDZbxb5+/fqpoKBAu3fvrtjXqVOnSj8XHR2trKysKl0jNTVVpaWl6tevX8U+T09P9erVS5s3b5Yk3XPPPZo2bZq6dOmixx57TL/++mvFuWPHjtWaNWvUunVrPfDAA/rpp5+q/eesrmq1tCQmJmrXrl2n7L/33nv11ltv1VhR9U1UsI/+M7anrn/nV/2amq0/T1+vl6/rVOkfGwAAQL3j6We2eFh17SoaNWqUDMPQ999/r549e2rhwoWVum498sgjmj17tl555RW1bNlSvr6+uu6661RSUlKl51+yZInGjBmjiRMnavjw4QoODta0adP06quvVvuPVVM8PSv37LHZbKeM67kQI0eO1K5du/TDDz9o9uzZGjx4sMaNG6dXXnlF3bp1086dOzVz5kzNmTNHN9xwg4YMGaIvvviixq5/smq1tCxfvryiD15mZmZFv7nrr7++VoqrT9rFBOnNm7vJbpO+WLlbb/2y3eqSAAAALozNZnbRsmKrxpe/Pj4+uuaaa/Txxx/r008/VevWrdWtW7eK44sXL9bYsWN19dVXq2PHjoqKilJaWlqVn//XX39Vs2bN9Je//EU9evRQcnLyKV/kt23bVr/99lulfUuXLq302MvLS06n86zXatu2rZYsWVJpLcDFixcrMDBQcXFxVa75bFq0aCEvLy8tXry4Yl9paamWL1+udu3aVeyLjIzUrbfeqv/+97+aMmWK/v3vf1ccCwoK0o033qh3331Xn332mb788suK8TC1oVotLZGRkZUev/jii2rRooUGDRpUo0XVV5e0aaKJozvo6a836JWftio+zE+ju8RaXRYAAECDN2bMGF1xxRXauHGjfv/731c6lpycrK+++kqjRo2SzWbT008/Xa1WieTkZKWnp2vatGnq2bOnvv/+e02fPr3SOQ8++KDGjh2rHj16qF+/fvr444+1ceNGJSUlVZyTmJioWbNmKSUlReHh4QoODj7lWvfee6+mTJmi+++/X/fdd59SUlL07LPPavz48RXd3S6Uv7+/7rnnHj366KMKCwtTQkKCXnrpJR05cqRiTM4zzzyj7t27q3379iouLtZ3332ntm3bSpImT56s6Ohode3aVXa7XZ9//rmioqIUEhJSI/Wdznn/yUtKSvTf//5Xt99++1m7QRUXFysvL6/S1pD94aJmurN/c0nSo5+v0/K02kucAAAAMF166aUKCwtTSkqKbr755krHJk+erNDQUPXt21ejRo3S8OHDK7XEnMuVV16phx9+WPfdd5+6dOmiX3/9VU8//XSlc2688UY9/fTTeuyxx9S9e3ft2rVL99xzT6Vz7rrrLrVu3Vo9evRQZGRkpZaOY2JjY/XDDz9o2bJl6ty5s+6++27dcccdlSYCqAkvvviirr32Wv3hD39Qt27dtH37ds2aNUuhoaGSzFahJ598Up06ddLAgQPlcDg0bdo0SeZMai+99JJ69Oihnj17Ki0tTT/88EONharTsRlGFeeTO8n//vc/3XzzzUpPT1dMzJlnlZgwYYImTpx4yv7c3FwFBQWdz6XdntNl6N6PV2rWxv0K8vHQxNHtdVWXWMa4AAAAt1ZUVKSdO3eqefPm8vHxsbocNBBn+3eVl5en4ODgc2aD845D77//vkaOHHnWwCJJTz75pHJzcyu2jIyM871kveGw2zTlxq7qlhCivKIyPfzZWt3yn2VKz2YdFwAAAKC6ziu07Nq1S3PmzNGdd955znO9vb0VFBRUaWsMfL0cmvbHPnp0eGt5edi1cNtBDZsyX/+an6oyJ1MiAwAAAFV1XqFl6tSpatKkiS6//PKarqdB8fKwa9wlLfXjgwN0UVKYikpdmjRzi658c7HW7c6xujwAAACgXqh2aHG5XJo6dapuvfVWeXhUa/KxRispMkCf3nWRXrquk4J9PbUpM09XvbVYf/1ukwqLy6wuDwAAAHBr1Q4tc+bMUXp6um6//fbaqKfBstlsuqFHvOaMH6QrO8fIZUjvL9qpYa8t0C8pVVu9FAAAAGiMqh1ahg0bJsMw1KpVq9qop8GLDPTWGzd11dTbeio2xFd7co7qtqnLdf+nq3Ugv9jq8gAAAGp0ZXWgJv49nfeUx+erqtOaNQaFxWV6bfZW/WfxTrkMKdjXU3+5rK2u7xHH9MgAAKDOuVwubdu2TQ6HQ5GRkfLy8uIzCc6bYRgqKSnRgQMH5HQ6lZycfMpaLlXNBoQWN7Bud46e+HK9NmWaC29elBSmSdd0UvMIf4srAwAAjU1JSYkyMzN15AhLNaBm+Pn5KTo6Wl5eXqccI7TUM2VOl/6zeKcmz96qolKXvDzsenBwsu4akCQvj9pbXRQAAOBkhmGorKxMTqfT6lJQzzkcDnl4eJyxxY7QUk+lZx/RX75er4XbDkqSWjcN1KRrO6pbQqjFlQEAAAA1q6rZgK/w3UxCuJ8+ur2XptzYRWH+XkrZn69r3/5Vz87YoPyiUqvLAwAAAOococUN2Ww2XdU1VnPGD9I13WJlGNKHS3Zp6OQFmr1pv9XlAQAAAHWK0OLGwvy9NPmGLvrvHb2VEOanfXlFuuujFbrnvyuVlVdkdXkAAABAnSC01AP9kyM066GBuufiFnLYbZq5YZ8GT56vj3/bJZerTockAQAAAHWO0FJP+Ho59PiINvr2vv7qHBes/KIy/WX6Bt347yXanlVgdXkAAABArSG01DPtYoL01b399MwV7eTn5dDytMO67I2FeuuX7Sp1snotAAAAGh5CSz3ksNt0e//mmj1+kC5uHamSMpdenpWiq95arA17cq0uDwAAAKhRhJZ6LDbEV1PH9tTkGzorxM9TG/fmafRbi/XyrC0qKmUxKAAAADQMhJZ6zmaz6ZpucZr98CBd1jFKTpeht35J1eVvLNTKXYetLg8AAAC4YISWBiIy0Fv/HNNd7/y+myICvJV6oFDXvfOrJn67UUdKyqwuDwAAADhvhJYGZkSHaM0ZP1DXdY+TYUhTF6dp2GsLtGjbQatLAwAAAM4LoaUBCvHz0ivXd9aHt/dSbIivdh8+qt+//5se/2Kdco+WWl0eAAAAUC2ElgZsUKtIzXp4oG7p00yS9NmKDA17bb5mb9pvcWUAAABA1RFaGrgAbw89N7qD/vf/+qh5hL/25xXrro9W6L5PVim7oNjq8gAAAIBzIrQ0Er2ah2nmgwN096AWctht+m5dpoZMnq8Za/bIMAyrywMAAADOiNDSiPh4OvTEyDb6+t5+ahMVqMNHSvXgtDW688MV2pdbZHV5AAAAwGkRWhqhjnHB+ua+/ho/tJU8HTbN3ZKloZPn69Nl6bS6AAAAwO0QWhopLw+7HhicrO8fGKAu8SHKLy7Tk1+t183v/qZd2YVWlwcAAABUILQ0cq2aBurLe/rqqcvbysfTriU7sjV8ygK9t3CHnC5aXQAAAGA9QgvksNt054AkzXpooPokhauo1KW/fb9Z173zq7btz7e6PAAAADRyhBZUaBbur0/u6q1J13RUoLeHVqfn6PI3FmnKnK0qKnVaXR4AAAAaKUILKrHZbLqpV4J+Gj9Ql7ZpohKnS1PmbNOIKQu0YOsBq8sDAABAI0RowWlFB/vq/Vt76B83dVWTQG+lZR/RLf9ZpnEfr2J6ZAAAANQpQgvOyGazaVTnGM390yDd3q+57Dbp+/WZGvzqPL23cIfKnC6rSwQAAEAjYDPqeGGOvLw8BQcHKzc3V0FBQXV5aVygjXtz9fTXG7QqPUeS1CYqUM9f3UHdm4VZWxgAAADqpapmA1paUGXtY4L1xd199eI1HRXi56kt+/J17dtL9PgX63S4sMTq8gAAANBAEVpQLXa7Tb/rlaCf/3SxbugRJ0n6bEWGLn11nj5bni4Xa7sAAACghtE9DBdkRdohPfX1Bm3ZZ67n0i0hRH+7qqPaxfDaAgAA4OzoHoY60SMxTN/e319PXd5W/l4OrUrP0ag3F+mv321SQXGZ1eUBAACgASC04IJ5Ouy6c0CS5vxpkC7rGCWny9D7i3Zq8Kvz9N26varjxjwAAAA0MIQW1JjoYF/9c0x3fXBbTzUL99P+vGLd98lq3fKfZdp5sNDq8gAAAFBPEVpQ4y5u3USzHhqoh4Yky8vDroXbDmr4lAV6bfZWFZU6rS4PAAAA9QyhBbXCx9Ohh4a00qyHBmpAcoRKylx6fe42DZ+yQPNSsqwuDwAAAPUIoQW1qnmEvz66vZfeurmbmgZ5a1f2EY2dulz3frxSmblHrS4PAAAA9QChBbXOZrPp8k7Rmvuni3Vn/+Zy2G36Yf0+DXl1vt5buEOlTpfVJQIAAMCNsU4L6tymvXl66uv1WpWeI0lqExWo56/uqO7NQq0tDAAAAHWKdVrgttrFBOmLu/vqpWs7KdTPU1v25eu6d37VU1+vV+7RUqvLAwAAgJshtMASdrtNN/SM19w/XazrusfJMKT/Lk3XkMnzWdsFAAAAlRBaYKkwfy+9cn1nfXrXRUqK8NeBfHNtl9s+WK6MQ0esLg8AAABugNACt9CnRbhmPjTAXNvFYde8lAMa+tp8vTM/lYH6AAAAjRyhBW7D28Nc2+WHBweod/MwFZW69OLMLRr1j0ValX7Y6vIAAABgEUIL3E7LJgGa9seL9PJ1xwfqX/v2r3r66w3KK2KgPgAAQGNDaIFbstlsur6HOVD/2m7mQP3/W7pLQ16dr+/XZTJQHwAAoBEhtMCthfl76dUbOuuTu3qreYS/svKLNe6TVbqdgfoAAACNBqEF9ULfFhGa+eAAPTA4WZ4Om35JOaBhry3QvxcwUB8AAKChI7Sg3vDxdGj80Faa+eBA9WoepqOlTr3wwxZd+eZirWagPgAAQINV7dCyZ88e/f73v1d4eLh8fX3VsWNHrVixojZqA06rZZMAffbHi/TSdZ0U4uepzZl5uubtX/XMDAbqAwAANETVCi2HDx9Wv3795OnpqZkzZ2rTpk169dVXFRoaWlv1Aadls9l0Q494zR0/SNd0i5VhSB8tMQfq/7CegfoAAAANic2oxqe7J554QosXL9bChQvP+4J5eXkKDg5Wbm6ugoKCzvt5gBP9uv2g/vL1Bu08WChJurRNEz03ur3iQv0srgwAAABnUtVsUK2Wlm+++UY9evTQ9ddfryZNmqhr16569913z/ozxcXFysvLq7S5jfTfJCfdiRqCvi3LB+pf2lKeDpt+3pKloZMX6N0FO1TGQH0AAIB6rVqhZceOHXr77beVnJysWbNm6Z577tEDDzygDz/88Iw/M2nSJAUHB1ds8fHxF1x0jchOlT68QnpngLRjvtXVoAb4eDo0flhrzXxwgHolmgP1n/9hs658c7HWZuRYXR4AAADOU7W6h3l5ealHjx769ddfK/Y98MADWr58uZYsWXLanykuLlZxcXHF47y8PMXHx1vfPWz7HOmrP0pHss3H7UZLw56XQtwkVOGCuFyGvli5W8//sFm5R0tlt0m39Wuu8UNbyd/bw+ryAAAAoFrqHhYdHa127dpV2te2bVulp6ef8We8vb0VFBRUaXMLLYdI96+Uev1RstmlTTOkN3tK81+SSousrg4XyG636Yae8Zr7p0Ea3SVGLkN6f9FODXttgealZFldHgAAAKqhWqGlX79+SklJqbRv69atatasWY0WVWd8Q6XLXpb+30KpWT+p7Kj0y/PSW72kLd9LzEBV70UEeOv133XV1Nt6KjbEV3tyjmrs1OV6cNpqZRcUn/sJAAAAYLlqhZaHH35YS5cu1QsvvKDt27frk08+0b///W+NGzeutuqrG1EdpLHfS9e+LwXGSDm7pGk3S/+9Vjq4zerqUAMuad1EPz08ULf3ay67TZqxZq+GTJ6vL1fuZnpkAADQuBhGvftyvlpjWiTpu+++05NPPqlt27apefPmGj9+vO66664q/7zbT3lcXCAtfFVa8qbkLJHsnlKfe6WBj0regVZXhxqwNiNHj3+5Tlv25UuSBiRH6PmrOiohnOmRAQBAA+VySbuXSZu/NXsUXfueFNfD6qqqnA2qHVoulNuHlmOyU6Ufn5C2/WQ+DoiShv1V6ni9ZLNZWxsuWKnTpXcX7tDrc7apuMwlH0+7Hh7SSnf0by4PR7UaIAEAANxTWbG0c4G05Ttpyw9S4Qnjevs/LA2ZYFlpxxBaakrKj2Z4ObzTfJzQRxr5khTdydq6UCN2HizUn79aryU7zFnk2scE6e/XdlKH2GCLKwMAADgPxfnSttlmUNk2Wyo+YY1E72Cp1TCpzRXmpFTeAdbVWY7QUpNKi8zuYgtflUqPmLONdb9NuvQpyS/M6upwgQzD0Ocrd+v5783pkR12m+7o31wPD2klXy+H1eUBAACcXcEBKeUHM6jsmGcOcTgmIEpqc5kZVBIHSB5elpV5OoSW2pC7W/rpaWnjV+Zj31Dp0qel7mMlOx9u67sD+cWa+O1GfbcuU5IUH+arF67uqAHJkRZXBgAAcJLDaebYlM3fSRlLJcN1/FhYC6ntFVKbUVJsd8nuvl3fCS21aecCaebjUtYm83FUJ3Pq5ISLrK0LNWLu5v166usNysw11+u5plusnrq8ncL83eubCQAA0IgYhrR/o9masvk7af/6yseju5itKW2vkCLb1Jsx2ISW2uYsk5a/J/3yglSca+7rdKM09DkpMMra2nDBCorL9MqsFH24JE2GIYX5e+mZK9ppdJcY2erJfwIAAKCeczmljGXlA+m/M1tXjrHZzXUG21whtblcCom3rMwLQWipKwUHpLkTpdX/lWRIXgHSoMek3ve4XZ9BVN+q9MN68sv1StlvTo88sFWknr+qg+LDmB4ZAADUgtKjUtoic2rilB+kwgPHj3n4SC0uNYNKqxGSf7h1ddYQQktd27NS+uFR81aSwpOlkX+XWg62ti5csJIyl/41P1X/+Hm7Spwu+Xo69KdhrTS2byLTIwMAgOorKzEXM8/ebi6zcSi1/P4OKW935XO9g6VWw81uXy0Gu8WMXzWJ0GIFl0ta+4k0Z8LxVNzmCmn481JoopWVoQakHijQk1+t17KdhyRJneKC9eI1ndQupoH9OwYAABfOWSblpptB5FCqGU6yt5v3c9IrD5w/WWC01PoyM6g069+ge+8QWqx0NEea/3fpt39JhlNyeEv9HjQX8fGiW1F95nIZ+mxFhl74YbPyi8rksNt014AkPTQkWT6ezCAHAECj4nJJeXtOCCWpx+8fTpNcpWf+WU9/KbyFuYW1kMJbHr/vF1ZvBtJfKEKLO8jaLM18zJxtTJKC4qThf5PaXdVo/iE2VFl5RZrw7Ub9sH6fJKlZuJ9euLqj+rWMsLgyAABwQVwuc0HGohzp6OHy7cT7h81AcmiHuZUVnfm5HN5SWNLpw0lAUz4PitDiPgxD2vyNNOsvUm6GuS9xgDnepWl7a2vDBZu9ab+e/nqD9uWZ/2Fd1z1Of7msrUKZHhkAAGuVlZwUPE4KH2c6VpRz9q5bJ7N7mMMAKgJJ0vH7QbFuvUaKOyC0uJuSI9Li16XFU8xEbnNIPe+ULnnSXKQS9VZ+UalenpWi/1u6S4Yhhft76ZlR7XRlZ6ZHBgCg1h09LO1ZVb6tNNcyOZItlRZe2PN6+Jqf0XxDJd+Q47c+IVJw/PHWk+AEyeFRA3+QxonQ4q4O75J+espsfZEk3zBp8DNSt1skO2Mi6rOVuw7piS/Xa1tWgSTp4taR+ttVHRQXyjgmAABqRGmRtG+9GU6ObYdSz/IDNskn+KTgUb75nPT4xOM+IZKnT538kRo7Qou7S/1Fmvm4dDDFfBzdWRr5spTQ29q6cEFKylx6Z36q3iyfHtnPy6E/DWutsX0T5bDT6gIAQJW5XFL2NjOY7F5xvBXldIPbw5Kk2O7mFt3ZXOjbN1TyDuJLYTdHaKkPnKXSsneleZPMAV+S1Ol30tCJ5psN9db2rAL9+av1WpbG9MgAAFRJ3t7KLSh7Vksl+aee5xdxPKDEdpdiu5mzbaFeIrTUJwUHpLkTpdX/lWRIXgHSoMek3vc06Hm5GzqXy9C05RmaNJPpkQEAqKQoV9q7ujyclI9Fyc889TxPPym6ixlMjoWUkARm3WpACC310Z6V0g+PSXtWmI/DW0ojXpSSh1pbFy5IVl6Rnv1mo2ZuYHpkAEAjlbtbSltkbhm/SQe3nnqOzS41aV85oES2YZB7A0doqa9cLmndNGn2s1Jhlrmv1Qhp+AvmDBWot37auE/PzNjI9MgAgIYvJ8MMKLvKg8rhtFPPCWlWuZtXdCfJy7/OS4W1CC31XVGuNP8l6bd3JFeZ5PCS+twnDfiT5B1gdXU4T0yPDABokHLSy1tSFktpC6WcXZWP2xxSTBepWT9zi+sh+dPjAISWhuPAVunHx6XUn83HgTHSsL9KHa6lP2c9xvTIAIB67fAuadfi8qCy0AwtJzoWUhL7m4tqx/eWfPjch1MRWhoSw5BSfpB+fPL4NxcJfaXLXpKiOlpbG84b0yMDAOqNw7uOj0lJWyTlni6kdD0eUhJ6S96B1tSKeoXQ0hCVFkm//kNa+KpUdtQcsNZ9rNTrj+ZANVpe6iWmRwYAuBXDML8krQgpi08fUmK7lYeU/mZLCiEF54HQ0pDl7pZ+ekraOP34vvBkqd1oqd2VUlQnAkw9w/TIAIA6ZRhSUY75mSInw7zNzTC7ee1Zad4/kd1DijkWUvpJ8RcxxhY1gtDSGOxcKC150xzv4iw5vj800QwwbUeb34IQYOoNpkcGANQIZ5mUv7c8jOw2w8ix+8cCSknBmX++Ukg51pJCSEHNI7Q0JkV50tZZ0uYZ0rY5ZtexY4LjpbZXmi0wcb0ku926OlFlJ0+PfH33OD11eTsF+3laXBkAwC0U5R0PH7kZJ7WY7DYDi+E69/P4RUjBceVbvBQSb3Y5T7iI6YdRJwgtjVVJobRttrRphhlkSguPHwuMltqOMkNMs76SnW5H7uzk6ZGbBHrr+as7ami7plaXBgCoKYZh/u4+evjcW1GudCRbyt0jFeee+7ntnpUDSXCcGUqOPQ6KlbyYtRLWIrRAKj1qdh3bNENKmSkV5x0/5h8ptbnCbIFJHCA5+AbfXa1IO6THvlinHQfNADq6S4wmjGrPopQA4G5cTrMb1imBI+fsYcRVen7X8w0tDyAJx8NJSPzxgOLfhB4WcHuEFlRWViztmG8GmC3fmYPvjvENldpcbo6BSbpY8uDDsLspKnXqtTlb9e6CHXIZUkSAl54b3UGXdYy2ujQAaNzy9krb50qpc6Ud88wQcj4cXubv49NuIZUfB8WaG2NM0AAQWnBmzlJzIahNM6TN30lHDh4/5h0stR5pDuRvcank6WNdnTjFmowcPfbFWm3dbw6evKxjlCZe2UGRgd4WVwYAjURpkbmoYurPZlg5sLnycQ8fc5zI6cLGGfeFSp5+TJyDRonQgqpxOaVdv5YHmG+lgn3Hj3kFSK2GmwEmeZjk6WtdnahQXObUmz9v1z/npcrpMhTq56kJV7bXlZ1jZOMXHgDULMOQDqSYLSnb55qBpazohBNs5kydLQZLLQdLsT0kh4dl5QL1DaEF1edySbuXmQFm0zdS3u7jxzz9pdYjpPZXSy2HEGDcwIY9uXrsi3XalGmOVRrStomev7qjmgbROgYAF+ToYbOr1/a5UuovlX8fSubENi0GSy0vlZIukfzCLCkTaAgILbgwhiHtWSVtmi5tnFF5JVyvALMLWfurzf+06UJmmVKnS+/MS9UbP29TqdNQoI+Hnr6ina7vHkerCwBUlctpLqh4bGzKnpWVpwt2eJuzbrYcbP7ea9KWrlxADSG0oOYcCzAbv5I2fl35GyevQKnNZeUB5lLJg7EVVkjZl6/HvlirtbvNKTAHtorUpGs6KjaEFjEAOK3c3ZUH0BedNIVwROvjIaVZX6YGBmoJoQW1wzCk3SukjdOlTV9LeXuOH/MOMmcha3+12VzOLGR1qszp0nuLdmry7K0qKXMpwNtDT17WRjf1TJDdzjeCAOohwzBbQVyl5iQyrjJzc5aW7yt/fOLxivOOHT92rPx59m2Qts+RDqZUvpZPsDmD5rGxKcFxlvyRgcaG0ILa53JJu5eb4WXj1+bqu8d4B58QYC4mwNSh1AMFeuyLdVq5y5x2s09SuP5+bSclhPMtIQA3U5Al7V19fNu3XirOrxw8aovNLsV2Px5SYroxgB6wAKEFdevYIP6N080Ac+IsZD7BUptR5QFmEAtZ1gGny9AHv6bp5VlbVFTqkq+nQ4+PaK1b+iTS6gLAGoXZZjDJXC3tXWPeP7G1vjrsHuZq7w5P8/6x24r7nmYAOd15QTFmd+bmgxhAD7gBQgus43JJGUvLu5DNkAr2Hz/mGyq1ucIMMM0HEmBq2a7sQj32xTr9tvOQJKlnYqj+fm0nJUWyIBmAWnT08PFgsrc8pJw4oUsFmxTZWoruIsV0lWK6mGucOE4KGyeGEbuDQfBAA0JogXtwOaX0JccDTOGB48d8w6S2o8y1YKI6SsHx/CKqBS6XoY+XpevFHzarsMQpbw+7/jSsle7onyQHrS4ALlRRnpS5tnI3r8M7T39ueMvycFK+RXWUvAPrtl4AboXQAvfjcpqLcm2cbq4Dc+Rg5eM+wVLTDuYWVX7bpC1rwtSQ3YeP6Mmv1mvhNvPvvXN8iF65rpOSm/KBAUAVFRdI+9Ydbz3Zu1rK3nb6c0Obmy0nxwJKdGfz/3kAOAGhBe7NWSbtWmSOf9m9XDqwxRx0eTKbXQpPPh5iojqat4FRtMqcB8Mw9PmK3frr95uUX1QmL4ddDw5J1h8HJsnTYbe6PABWMwyzS+/hNOnwLvM2p/z2cJqUt1fSaT42BCecGlAYLwKgCggtqF/KSszpJ/dtkPZvMGeQ2b9BOpJ9+vP9wiuHmKgO5pz6zFJWJftyi/Tn6ev185YsSVL7mCD9/dpO6hDLt6BAg1ecbwaSE8NIRUBJl8qOnv3ng2LLg0mX4+NQ/CNqvWwADROhBfWfYUj5+yqHmH0bzK4IJ65UfIzd0xzQ2bSD1LR9eetMRykgsu5rrwcMw9DXa/ZowjeblHu0VHabdEf/5np4aCv5eTHtJ1BvOcvMRYDP1Fpypi+DjrHZzTVKQppJoYlSaDOzq1doorkRUADUIEILGq7So1LW5uMh5thtce7pzw+MlhL6SIn9pGb9zWBD17IKB/KL9dx3m/TtWnOdndgQXz1/dQdd3LqJxZUBqFBWYo4DLDxYfpt90uODZhjJ22uu9G44z/58vmEnBJLEEwJKohlYmNkRQB0htKBxMQwpN+PU7mWHduqU/td+EVKzvlJif6lZP6lJO8nOeI5ftmTpqa83aE+O2TVkdJcYPX1FO0UEeFtcGdAAlRypHDYqhY+TQ0m2VJxXved3eJ8mkJzw2IffvwDcA6EFkMyZbvauNmctS1tkDvovK6p8jm+olNC3vCWmrxTVyVwHoBEqLC7T5NlbNXXxTrkMKdjXU3+5vK2u7x4nG61TwKlcTqko11yX5Mgh8/bosdvT7DsWRkqPVP9aNoc5ns8/4oTbiMqPA6LMYBLQlC9jANQLhBbgdMqKzRCTtsgMMum/SaWFlc/xDpISLjJbYRL7m7PgNLKuEut25+iJL9drU6b57W6fpHC9cE1HNY/wt7gyoBYV5ZqtGkcOVw4aR04IISfvK8rVaWfTqgqHV3noCD8hfJzucXko8QkhiABocAgtQFU4S81F0SpCzNJTu2F4+ksJvY+HmJhujWKWsjKnS/9ZvFOTZ29VUalLXh52PTg4WXcNSJKXBx+cUA+5XFLBPrPb6KEd5gKIJ94vOsO4uKrwCpT8Qs2WW98w89YvrPJj39DyFpHyUOIdyPg6AI0eoQU4Hy6nuXBa2mIzxOz6VSrKqXyOh68U39Mc1J/YT4rtIXn6WFJuXUjPPqK/fH18UcrWTQM16dqO6pYQanFlwGk4S83xbYd2mIHkcFrl++eaztfTvzxcnCOAVHoc0uhaYwGgphBagJrgcklZm46Pidm1+NTpQu2e5gxlFV06Ik+6f0KXD/9Iyav+dbEyDEMz1uzVc99t0qHCEtls0h8uaqZHh7dWoA8f1lDHSo6UT9+789RWk5z0s8+cZXNIIfHmFL5hzaWwpPL7SeZA9Xr4/gSA+ozQAtQGw5AOpEi7Fh1vjSnYX73n8PA1g8wpg2hPDDgnBB8vf7fpQnKosETPf79ZX67aLUmKCvLRc6Pba1j7KIsrQ71iGOZA9OJ8c7KMkvzj94vzKz8uKd9XnG/OpHV4p5Sfefbn9/Apny2rPIyElQeU0OZSSAKtIgDgRggtQF04NtVy/v7y6UkPVJ7CtPDA8elLCw9IzuLqX8PDx1w3IayFFN5SCm9RvrWUAmMsGZi7ePtB/Xn6eu3KNmdAGtE+ShNHt1fToIbbTa7RMAzJVWbOsldWfNLtyfuKKz8uOTFknC6MFBzff7oFYqvDO1gKSzyhpeSEVpPAaAasA0A9USuhZcKECZo4cWKlfa1bt9aWLVtqvDCgwTEM80PbaUPNaRaJKzxw6vTMJ/PwNT+ohSeVB5qWx8ONf0StttAUlTr1+txt+veCHXK6DAV6e+ixkW00pleC7Hb3aBlq9Jxl5hit9KXSnpXmQHNn8WnCyEm3FxooqsxmDkb3CjBvvQNOenzSfd9QswUlLMm87yYtkACA81fVbOBR3Sdu37695syZc/wJPKr9FEDjZLMd//AV1vzc5xuGVFJohpecdCl7u9l3P3u7lJ1qdpMpOyplbTS3k3kHmS0yFS00Lc1wE9bCHDh8gXw8HXp8RBtd2TlGT3y1XmszcvT01xv09eo9mnRNR7VqGnjB10A1FReYaxGlL5XSl0i7V5w6pXd1ObzM1j4P77PfOrzMroynBJCzhBE36voIAHBv1U4cHh4eioqi/zpQ62y28g97AWbISRpU+bizTMrZVTnIZG+XDqVKORnm1M17V5vbyfwiKnc1C000F6ULaCoFNKnWVKxto4P01T199X9L0vTyrBSt3HVYl7+xUPcMaqF7L2kpH8/GuVBnncjfVx5QykPKvvWnDkL3CTHXHYrvZb7GFWHjTAHE+4Qg4k03KwCAW6h2aNm2bZtiYmLk4+OjPn36aNKkSUpISDjj+cXFxSouPt6PPy8v74znAqgGh8fx0JE8tPKx0iJzdqVjIaYi1KSa61QcKe+KlrH09M/t4WuGl2MhJvCEQHPirX8TycNLDrtNY/s117D2UXpmxgbN2ZylN37eru/WZ+qFqzvqoqTwWv/raPAMQzq4zQwnx0LK4Z2nnheSICX0MYNKQh8pojXBAwBQ71VrTMvMmTNVUFCg1q1bKzMzUxMnTtSePXu0YcMGBQaevivI6cbBSGJMC2CV4vwTWmfKb3PSpcIsqSDr1MU1z8U3tFKQMfybaEuBnz7dXKQdRwN0wAjWwM6tdf8lLRXkZZjraLhKJWeJeb/i9tj9EnMg+LH7lY6d5WftjnO0Hpyji5Pj5O5OFnd9LSsxx6Ps+tUMKRlLT51uWzYpqsPxkBJ/kRQca0m5AACcjzqZPSwnJ0fNmjXT5MmTdccdd5z2nNO1tMTHxxNaAHdVUmiGl4Isczrngv0n3D/p1lVqdbW1x3aaEOTpa24e5beePifcP/GYj+TpZ/5cxf7yfWf6mdIj0u5lx7t77V5x6kKIHj7mYqbHWlHie0o+wdb8/QAAUANqbSD+iUJCQtSqVStt3779jOd4e3vL29v7Qi4DoC55+R9f1+JsDEM6evg0waZyuCnN3SfP4kMqM+wqlYcMh6d8vH1k9/Ay18uwe5qDuB0n3h677yXZPY7fd5x4/8Sf9TAXAj3ttLxFZovM2WbLKis/fmIIM5zmIPYLHch+IXzDKnf1iu4seXhZVw8AABa5oNBSUFCg1NRU/eEPf6ipegDUFzab5Bdmbk3anvE0T0lFJWWaMne7/r0gVS5DCrd5aeLI9rq8Y7Rs7jR7lMt5+mDjLJZKj5pbWdEJ98tvS4tOuH/snCPm/tIj5Y9PvH/k+M+cKLT5SeNRkpldCwAAVbN72COPPKJRo0apWbNm2rt3r5599lmtWbNGmzZtUmRkZJWeg3VagMZrbUaOHvtinVL250uShrdvqr9e1UFNAhvpopSGcTwE2Wzm+CAAABqRqmaDak0ps3v3bt10001q3bq1brjhBoWHh2vp0qVVDiwAGrfO8SH69v7+enBwsjzsNs3auF9DJy/Qlyt36wKG19VfNps5psUvjMACAMBZXNBA/PNBSwsASdq0N0+PfblWG/aYs5Vd3DpSL1zdUTEhvhZXBgAA6kqttLQAQE1pFxOkr+/tp8dGtJaXw655KQc07LUF+uS39MbZ6gIAAM6I0ALAMh4Ou+69uKV+eLC/uiaEqKC4TH+evl5j3vtN6dlHrC4PAAC4CUILAMu1bBKoL+7uq6evaCcfT7t+Tc3W8CkL9MHinXK5aHUBAKCxI7QAcAsOu0139G+uHx8cqN7Nw3S01KkJ327SDf9aoh0HCqwuDwAAWIjQAsCtJEb469O7LtJfr+ogfy+HVuw6rJGvL9S/5qeqzOmyujwAAGABQgsAt2O32/SHi5pp1sMDNSA5QsVlLk2auUXXvv2rUvblW10eAACoY4QWAG4rLtRPH93eSy9d10mBPh5auztXV/xjod6Yu02ltLoAANBoEFoAuDWbzaYbesRrzvhBGtK2iUqdhibP3qor31ysDXtyrS4PAADUAUILgHqhaZCP3r2lh17/XReF+nlqc2aeRr+1WK/MSlFxmdPq8gAAQC0itACoN2w2m0Z3idXs8YN0ecdoOV2G3vxluy5/Y5FWpR+2ujwAAFBLCC0A6p2IAG+9Naab3h7TTREB3tqeVaBr3/5VE7/dqMLiMqvLAwAANYzQAqDeGtkxWnPGD9Q13WJlGNLUxWka9toCLdh6wOrSAABADSK0AKjXQvy8NPmGLvrw9l6KDfHVnpyjuuU/yzT+f2t0uLDE6vIAAEANILQAaBAGtYrUTw8P1G39EmWzSV+t2qOhr83Xt2v3yjAMq8sDAAAXgNACoMHw9/bQs6Pa64u7+yq5SYAOFpTo/k9X666PVigz96jV5QEAgPNEaAHQ4HRvFqrvHuivBwcny9Nh05zNWRo6eYH+u3SXXC5aXQAAqG8ILQAaJG8Phx4e2krfPzBAXeJDVFBcpqe+3qDfvbtUOw4UWF0eAACoBkILgAatVdNAfXlPXz1zRTv5ejq0bOchjXh9od76ZbtKnS6rywMAAFVAaAHQ4DnsNt3ev7l+enigBiRHqKTMpZdnpWj0m4u1fneu1eUBAIBzILQAaDTiw/z00e299Or1nRXi56lNmXm66p+LNemHzTpa4rS6PAAAcAaEFgCNis1m07Xd4zRn/CBd0SlaTpehfy3YoRGvL9CvqQetLg8AAJwGoQVAoxQR4K03b+6m927poaggH+3KPqKb3/1NT3y5TrlHS60uDwAAnIDQAqBRG9KuqX4aP1BjeidIkqYtz9DQyfP144Z9FlcGAACOIbQAaPSCfDz1/NUd9dkfL1JShL+y8ot1939X6p7/rlRWfpHV5QEA0OgRWgCgXO+kcP3w4ADde3ELOew2zdywT0Nena//Lc+QYbAoJQAAViG0AMAJfDwdemxEG31zXz91iA1SXlGZHvtynX7//m8sSgkAgEUILQBwGu1jgvX1vf305Mg28vawa/H2bI2YslCTZ29VUSnTIwMAUJcILQBwBh4Ou/7foBb66eGBGtgqUiVOl96Yu00jpizQgq0HrC4PAIBGg9ACAOfQLNxfH97WU2/d3E1NAr2Vln1Et/xnme77ZJX25zFQHwCA2kZoAYAqsNlsurxTtOb+aZBu65cou036bl2mBr86Xx8s3imni4H6AADUFptRx1Pi5OXlKTg4WLm5uQoKCqrLSwNAjdmwJ1d/+XqD1mbkSJI6xgbr+as7qFNciKV1AQBQn1Q1G9DSAgDnoUNssL66p6/+dlUHBfp4aP2eXI1+a7GembFBuUdLrS4PAIAGhdACAOfJYbfp9xc1089/ulhXd42VYUgfLdmlwa/O14w1e1jbBQCAGkJoAYALFBnorddu7KJP7uytpAh/HSwo1oPT1rC2CwAANYTQAgA1pG/LCM18aID+NLSVvFjbBQCAGkNoAYAa5O3h0P2DkzX74YEadMLaLsOnLNB81nYBAOC8EFoAoBY0C/fXB+VruzQN8tau7CO69T/LNI61XQAAqDZCCwDUkmNru8wZP0i392suu036vnxtl6ms7QIAQJWxTgsA1JGT13bpEBuk56/qqM7xIZbWBQCAVVinBQDczIlruwT5eGjDnjxd9c/Fevpr1nYBAOBsCC0AUIeOre0y94S1Xf5vqbm2y/TVu1nbBQCA0yC0AIAFKq3tEmmu7fLwZ2t147+Wasu+PKvLAwDArRBaAMBCfVtGaOaDA/To8Nby8bRrWdohXf7GIv31u03KL6LLGAAAEqEFACzn7eHQuEtaau6fLtaI9lFyugy9v2inBr86XzPW7KHLGACg0SO0AICbiA3x1Tt/6K4PbuupxHA/ZeUX68Fpa3TTu0u1dX++1eUBAGAZpjwGADdUVOrUewt36M1ftquo1CUPu02392+uBwYnK8Dbw+ryAACoEUx5DAD1mI+nQ/ddmqzZDw/SsHZNVeYy9O8FOzT41Xn6du1euowBABoVQgsAuLH4MD/9+5Ye+s/YHkoI89P+vGLd/+lq/f7937Q9q8Dq8gAAqBOEFgCoBy5t01Q/PTxQDw9pJW8PuxZvz9bI1xfoxZlbVFhcZnV5AADUKkILANQTPp4OPTjE7DI2uE0TlToNvTM/VUMmz9cP6zPpMgYAaLAILQBQzySE++n9sT313i09FBfqq8zcIt378Srd8p9l2nGALmMAgIbngkLLiy++KJvNpoceeqiGygEAVNWQdk01Z/wgPTA4WV4edi3cdlDDpyzQy7O26EgJXcYAAA3HeYeW5cuX61//+pc6depUk/UAAKrBx9Oh8UNb6aeHBuri1pEqdRp665dUDZ28QD9u2EeXMQBAg3BeoaWgoEBjxozRu+++q9DQ0JquCQBQTYkR/po6tqf+/Yfuig3x1Z6co7r7vys1dupy7TxYaHV5AABckPMKLePGjdPll1+uIUOGnPPc4uJi5eXlVdoAADXPZrNpWPsozRk/SPdd0lJeDrvmbz2g4a8t0Ks/pehoidPqEgEAOC/VDi3Tpk3TqlWrNGnSpCqdP2nSJAUHB1ds8fHx1S4SAFB1vl4OPTK8tX58aIAGJEeoxOnSP37ersGvztOMNXvoMgYAqHeqFVoyMjL04IMP6uOPP5aPj0+VfubJJ59Ubm5uxZaRkXFehQIAqicpMkAf3d5L7/y+m2JDfLU3t0gPTluja9/+VWsycqwuDwCAKrMZ1fjK7euvv9bVV18th8NRsc/pdMpms8lut6u4uLjSsdPJy8tTcHCwcnNzFRQUdP6VAwCqrKjUqXcX7NA/56XqaKnZTeyarrF6bEQbRQVX7UsoAABqWlWzQbVCS35+vnbt2lVp32233aY2bdro8ccfV4cOHWqsMABAzdufV6SXfkzRl6t2S5J8PR26e1AL/XFgkny9zv6lEwAANa1WQsvpXHzxxerSpYumTJlSo4UBAGrP2owc/fW7TVqx67AkKSbYR4+PbKMrO8fIZrNZXB0AoLGoaja4oMUlAQD1U+f4EH1+dx/946aujHcBALi9C25pqS5aWgDAvRSVOvXeQnO8y5ESxrsAAOpOnXUPqy5CCwC4J8a7AADqGqEFAHBe1u3O0XPfHh/vEh3soycY7wIAqAWEFgDAeTMMQ9+ty9SLM7doT85RSVLXhBA9c0U7dU0Itbg6AEBDQWgBAFyw0413ubprrB4b0VrRwb4WVwcAqO8ILQCAGsN4FwBAbSC0AABqHONdAAA1idACAKgVZxrv8vQV7dSN8S4AgGogtAAAatXpxrsMb99UjwxrreSmgRZXBwCoDwgtAIA6sT+vSK/MMse7uAzJbpOu6Ranh4YkKy7Uz+ryAABujNACAKhT2/bn65WfUjRr435JkpfDrt9f1EzjLmmh8ABvi6sDALgjQgsAwBKr0g/rpR+3aOmOQ5Ikfy+H7hyQpDsHNFegj6fF1QEA3AmhBQBgGcMwtHDbQb00a4s27MmTJIX5e2ncJS01pneCfDyZJhkAQGgBALgBl8vQzA379OpPKdpxsFCSFBviqweHJOuarrHycNgtrhAAYCVCCwDAbZQ5Xfpi5W5NmbNN+/KKJEktmwTokWGtNbx9U9Z4AYBGitACAHA7RaVOfbQkTW/9kqrco6WSpM7xIXp8RGv1bRFhcXUAgLpGaAEAuK3co6V6d8EOvb9op46Wmmu8DEiO0GPD26hjXLDF1QEA6gqhBQDg9rLyi/TWz9v1ybJ0lTrNX0eXd4zW+GGt1CIywOLqAAC1jdACAKg30rOP6LU5W/X1mj0yDMlht+n67nF6cEiyooN9rS4PAFBLCC0AgHpny748vTIrRXM2Z0mSvDzsGts3UfcMaqFQfy+LqwMA1DRCCwCg3lqRdkgv/ZiiZWnmApWB3h7648Ak3d6/ufy9PSyuDgBQUwgtAIB6zTAMzdt6QC/9mKLNmeYCleHlC1TezAKVANAgEFoAAA2Cy2Xo23V7NXn2Vu3KPiJJign20UNDWumabixQCQD1GaEFANCglDpd+nzFbr0x9/gClUkR/ho/rJUu6xAtu50FKgGgviG0AAAapKJSp/67dJfe+mW7Dh8xF6hsHxOkR4a31sWtImWzEV4AoL4gtAAAGrT8olK9v2in3lu4UwXFZZKknomhenR4G/VqHmZxdQCAqiC0AAAahUOFJXpnfqo+/DVNxWUuSdLFrSP1yLDW6hAbbHF1AICzIbQAABqVfblFeuPnbfrf8gyVucxfbZd3jNb4Ya3UIjLA4uoAAKdDaAEANEppBws1Zc5WzVi7V4Yh2W3Sdd3j9OCQVooN8bW6PADACQgtAIBGbXNmnl79aavmbN4vSfJy2DXmogSNu6SlIgK8La4OACARWgAAkCStSj+sl39M0ZId2ZIkPy+Hbu/XXHcNTFKwr6fF1QFA40ZoAQDgBIu3H9RLs1K0NiNHkhTs66m7B7XQ2L6J8vVyWFscADRShBYAAE5iGIZ+2rRfr/6Uoq37CyRJkYHeuv/SlvpdzwR5edgtrhAAGhdCCwAAZ+B0GZqxZo9em7NVGYeOSpLiQn1178UtdU23WPl40vICAHWB0AIAwDmUlLn02YoM/WPuNmXlF0uSIgK8dXv/RI3p3YwxLwBQywgtAABU0dESpz5dlq73Fu7Q3twiSVKAt4fG9E7Q7f2bq2mQj8UVAkDDRGgBAKCaSp0ufbt2r96Zn1ox5sXLYdfVXWP1x0FJLFIJADWM0AIAwHlyuQz9kpKld+anannaYUmSzSYNa9dUdw9qoa4JoRZXCAANA6EFAIAasHLXIb09b0fFIpWS1Lt5mO6+uIUubhUpm81mYXUAUL8RWgAAqEHb9ufrXwt2aMaaPSp1mr8620QF6u5BLXRFp2h5OJguGQCqi9ACAEAtyMw9qvcX7tSny9JVWOKUJMWG+OquAc11Q894+Xl5WFwhANQfhBYAAGpR7pFS/d/SNE1dnKbswhJJUqifp27tm6hb+yQq1N/L4goBwP0RWgAAqANFpU59vnK33l2wQ+mHjkiSfD0durFnvO4c0FxxoX4WVwgA7ovQAgBAHSpzujRzwz69Mz9VG/fmSZIcdpuu7Byj/zcoSW2i+J0HACcjtAAAYAHDMLRo+0G9Mz9Vi7dnV+y/pHWk/jiwhS5KCmPGMQAoR2gBAMBi63bn6F/zd+iHDZk69tu2TVSgbu/XXFd2iZGPp8PaAgHAYoQWAADcRNrBQr27cIe+WrVHR0vNGcfC/L10c68E/aFPMzUN8rG4QgCwBqEFAAA3k3OkRNOWZ+ijX9O0N7dIkuRht+myjtG6rV+iuiaEWlwhANQtQgsAAG6qzOnST5v2a+rinVqedrhif5f4EN3WL1GXdYyWJ4tVAmgECC0AANQDG/bk6j+Ld+q7tZkqcbokSU2DvHVLn0Td1CtBYaz3AqABI7QAAFCPHMgv1se/7dJ/l6brYEGxJMnbw66rusTqtv6JTJkMoEGqajaoVtvz22+/rU6dOikoKEhBQUHq06ePZs6cecHFAgDQ2EUGeuuhIa20+IlLNPmGzuoYG6ziMpc+W5GhEVMW6qZ/L9VPG/fJ6arT7xoBwC1Uq6Xl22+/lcPhUHJysgzD0IcffqiXX35Zq1evVvv27av0HLS0AABwboZhaOWuw5q6OE0/nhBW4sN8dWufRN3QM15BPp4WVwkAF6bOuoeFhYXp5Zdf1h133FGjhQEAANOenKP6vyW79OmydOUeLZUk+Xs5dF33ON3aN1FJkQEWVwgA56fWQ4vT6dTnn3+uW2+9VatXr1a7du1Oe15xcbGKi4srFRYfH09oAQCgmo6WODV99R5NXbxT27IKKvZf0jpSt/VrrgHJEbLZbBZWCADVU2uhZf369erTp4+KiooUEBCgTz75RJdddtkZz58wYYImTpx4yn5CCwAA58cwDC3aflBTF6fp5y1ZFftbRPrrpl4JurprrMIDvC2sEACqptZCS0lJidLT05Wbm6svvvhC7733nubPn09LCwAAFth5sFAf/pqmz1dkqLDEKUnydNg0rF2UbuwZr/4tI2S30/oCwD3V2ZiWIUOGqEWLFvrXv/5Vo4UBAICqyy8q1Tdr9+qz5Rlatzu3Yn9siK9u7Bmv63vEKTrY18IKAeBUVc0GHhd6IZfLVaklBQAA1L1AH0+N6d1MY3o308a9ufrf8gxNX71He3KOavLsrZoyZ6sGtYrUjT0TNLhtE3k6qrXqAQBYqlotLU8++aRGjhyphIQE5efn65NPPtHf//53zZo1S0OHDq3Sc9DSAgBA3SgqderHDfs0bXm6lu44VLE/IsBL13aP04094pl5DIClaqV72B133KG5c+cqMzNTwcHB6tSpkx5//PEqB5bqFAYAAGrOzoOF+mx5hr5YuVsHC473kOjdPEy/6xWvkR2i5ePpsLBCAI1RnY1pqS5CCwAA1il1uvTzlix9tjxD81KyVL5mpQJ9PHR111jd2DNe7WOCrS0SQKNBaAEAAGeVmXtUX6zYrc9WZGj34aMV+zvGBuvGnvG6skuMgnw8LawQQENHaAEAAFXichlanHpQ05Zn6KeN+1TqND8a+HjadXnHGN3UK17dm4WycCWAGkdoAQAA1XaosERfrdqtz5ZnaFtWQcX+FpH++l3PBF3VNVaRgSxcCaBmEFoAAMB5MwxDq9Jz9NnydH27NlNHS82FKz3sNl3cuomu6x6nS9s0kZcHUycDOH+EFgAAUCPyi0r13bpMTVueobUZORX7w/y9NLpLjK7vHq92MfxOB1B9hBYAAFDjtu3P1xerduurVXt0IP/41MntY4J0Xfc4je4SqzB/LwsrBFCfEFoAAECtKXO6tHDbQX2+MkNzNmWpxOmSJHk6bBrcpqmu7xGnQa0i5eGg+xiAMyO0AACAOnG4sETfrN2rz1dmaMOevIr9EQHeuqZbrK7rHqdWTQMtrBCAuyK0AACAOrc5M09frNytr1fvUXZhScX+znHBuq57nK7sHKtgP9Z+AWAitAAAAMuUOl36ZUuWvli5Wz9vyVKZy/y44eVh17B2TXVd9zgNSI6Uw87aL0BjRmgBAABu4WBBsWas2avPV2Roy778iv1Ng7x1Tbc4Xdc9Ti0iAyysEIBVCC0AAMCtGIahjXvLu4+t2aOcI6UVx7olhOj6HvG6vFO0gnzoPgY0FoQWAADgtorLnPp5c5Y+X7lb81KyVN57TF4edl3auolGd4nRJW2ayMfTYW2hAGoVoQUAANQLWXlFmr56j75YuVvbsgoq9gd6e2hEhyiN7hKrPi3CGf8CNECEFgAAUK8YhqEt+/L19Zo9+nbNXu3NLao4FhHgrVGdozW6S6w6xwXLZiPAAA0BoQUAANRbLpehFbsOa8aaPfp+fWal8S/Nwv00unOMruwSq5ZNGMAP1GeEFgAA0CCUlLm0aPsBzVizVz9t3K+jpc6KY+1jgnRVl1hd0Tla0cG+FlYJ4HwQWgAAQINzpKRMszft14w1e7Vg64GK9V9sNql38zCN7hKrkR2iFOLnZXGlAKqC0AIAABq0Q4Ul+mF9pr5Zs1fL0g5V7Pd02DSoVRNd1TVGg9s0la8XM5AB7orQAgAAGo09OUf17dq9mrFmrzZn5lXs9/dyaHj7KF3ZJUb9WkbI02G3sEoAJyO0AACARmnr/nx9s2avZqzdo4xDRyv2h/t7aUSHKA1t11R9WoTL24MWGMBqhBYAANCoGYahVek5+mbNHn23LlPZhSUVxwK8PTSoVaSGtmuqS1o3UbCfp4WVAo0XoQUAAKBcmdOlxanZmrVxn+Zs2q+s/OKKYw67Tb0SwzS0XVMNbddU8WF+FlYKNC6EFgAAgNNwuQyt35Or2Zv2a/am/UrZn1/peJuoQA1t11RD2jZVx9hg2e0sZAnUFkILAABAFaRnH9Hszfs1e9M+LU87LKfr+EejpkHeGtK2qYa0a6q+jIMBahyhBQAAoJpyjpTol5Qszd60X/NTDqiw5PhClv5eDg1qHakhbZvq0jZNWAsGqAGEFgAAgAtQXObUktRszd60X3M279f+vMrjYHo0C9XQdk01rF2UEsIZBwOcD0ILAABADTGMyuNgtuyrPA6mVdOA8oH8UerEOBigyggtAAAAtSTj0JGKFpjfdh6qNA4mIsBbl7aJ1OC2TdW/ZYT8vT0srBRwb4QWAACAOpB7pNQcB7PZHAdTUFxWcczLw64+SeEa0raJLm3bVLEhvhZWCrgfQgsAAEAdKylzadnOQ5q7Zb/mbs5S+qEjlY63iQo0B/K3baIucSF0I0OjR2gBAACwkGEY2p5VoLlbsjR3836t3HVYJ/QiU0SAly5p3USD2zbRgORIupGhUSK0AAAAuJFDhSWavzVLczZnaUHKAeWf2I3MYddFLcI1uI0ZYuJCmY0MjQOhBQAAwE2VlLm0Iu2Q5mzO0twt+7Ur+9RuZIPbNtGlbZqqS3yIHHQjQwNFaAEAAKgHDMNQ6oFCzd1sjoNZsetQpW5k4f5eurh1Ew1p20QDWkUqgG5kaEAILQAAAPVQzpESzUs5oDmb92v+1gPKLzrejczTYVP3ZqEakBypAckRah8TTCsM6jVCCwAAQD1X6nRpedohzd1sDuZPO6kbWYifp/q1iFD/5Aj1bxmh+DDGwqB+IbQAAAA0MDsOFGjR9oNauO2glqZmVxrML0mJ4X7lASZSfVqEK9jX06JKgaohtAAAADRgZU6X1u7O0cJtB7Vo20GtzsiR84TBMHab1Dk+RANaRqh/cqS6JoTI02G3sGLgVIQWAACARiS/qFRLdxzSom0HtHD7Qe04UFjpuL+XQxclhat/coQGJEeoRWSAbDbGw8BahBYAAIBGbE/OUS3edlALtx/U4u0HdaiwpNLx6GAf9WtpBph+LSMUEeBtUaVozAgtAAAAkCS5XIY2ZeaZXcm2H9DytMMqKXNVOqdddJAGJEeob8sI9WgWKn+mVkYdILQAAADgtIpKnVq281DFoP7NmXmVjnvYbeoUF6w+LcLVJylC3ZuFytfLYVG1aMgILQAAAKiSA/nF+jXVDDBLUrO1J+dopeOeDpu6xIfooqRw9UkKV7dmofLxJMTgwhFaAAAAcF4yDh3Rkh3ZWpqarSU7spWZW1TpuJfDri4JIeqTFK6LksLVNSGEEIPzQmgBAADABTMMQ+mHjmjpjmwtKQ8x+/OKK53j5WFX94RQsyWmRbg6xwfL24MQg3MjtAAAAKDGGYahtOwjWpKabQaZHdk6kF85xPh42tW9Waguam6GmE5xIfLyYI0YnIrQAgAAgFpnGIZ2HCysaIX5bUe2DhZUnl7Z19OhHolmS8xFSeHqGBtMiIEkQgsAAAAsYBiGtmcVVLTCLN1x6JQ1Ynw87eoSH6KeiWHqmRimbs1CFcAUy40SoQUAAACWc7kMbTsWYlKztSzt1BBjt0ntY4LVIzFUvRLD1CMxTJGBLHbZGBBaAAAA4HYMw1DqgUItTzuk5TsPaVnaIe0+fPSU85Ii/NUjMVQ9E8PUq3mYEsL8ZLPZLKgYtalWQsukSZP01VdfacuWLfL19VXfvn3197//Xa1bt67xwgAAANA4ZOYe1fK0w1q+85CWpx1Syv58nfwJtUmgd3l3slD1bB6mNlFBctgJMfVdrYSWESNG6He/+5169uypsrIy/fnPf9aGDRu0adMm+fv712hhAAAAaJxyj5RqZfohLdt5WMvTDmnd7hyVOit/ZA309lC3ZqHq1dwcF9MpLpi1YuqhOukeduDAATVp0kTz58/XwIEDa7QwAAAAQJKKSp1am5Gj5WmHtCztsFbtOqyC4rJK53g57OoUF6yezcPULSFUXRNCFBHAuBh3V9VscEHTNOTm5kqSwsLCznhOcXGxiouPz92dl5d3IZcEAABAI+Pj6VDvpHD1TgqXJJU5XdqyL98cF5NmtsgcLCjWil2HtWLX4YqfSwjzU9eEkIoQ0zY6SJ4Oplquj867pcXlcunKK69UTk6OFi1adMbzJkyYoIkTJ56yn5YWAAAA1ATDMLQr+4iWpR3SirRDWp2eo21ZBaec5+1hV8fYYHVrFqqu8SHqmhCqqGAfCyrGMbXePeyee+7RzJkztWjRIsXFxZ3xvNO1tMTHxxNaAAAAUGtyj5Zq3e4crdqVo9UZh7U6PUe5R0tPOS8m2Eddy1tiuiaEqn1MEGNj6lCthpb77rtPM2bM0IIFC9S8efNaKQwAAACoKYZhaMfBQq1Oz9HqdDPEbNmXJ9dJn4Q9HTa1iwlWt/IQ0zU+RHGhvky3XEtqJbQYhqH7779f06dP17x585ScnFxrhQEAAAC1qbC4TOt251a0xKxOP6yDBSWnnBcZ6F3RnaxrQog6xQXLz+uChoajXK2ElnvvvVeffPKJZsyYUWltluDgYPn6+tZoYQAAAEBdMgxDuw8f1ar04yFm4948lZ3UHGO3Sa2aBqpLfIg6x4eoS3yIkpsEyINB/tVWK6HlTM1iU6dO1dixY2u0MAAAAMBqRaVObdybWzE2ZtWuHO3LKzrlPF9PhzrGBqtzfHBFkIkNoVvZudTJOi3ng9ACAACA+mx/XpHWZORobUaO1u7O0bqMXOWftG6MJEUEeKlz3PHWmM5xIQr287SgYvdFaAEAAADqgMtlaMfBAq3JyNXajBytycjR5sxTu5VJUvMIf3WOC67oWtY2unHPVkZoAQAAACxSVOrUpsw8rUk3W2PWZuQoLfvIKed5OmxqGx1U0RLTOT5ESRH+stsbR7cyQgsAAADgRg4XlpQHmFyt3W22yBwqPHW2sgBvD7WLCVLH2GB1iA1Sh5hgJUUGyNEAgwyhBQAAAHBjx2YrOzY+Zk1GjtbvyVVxmeuUc3087WoXHaQOscHqEBOs9rFBSm4SKC+P+j1jGaEFAAAAqGdKnS6lHijQhj152rAnVxv35mrj3jwdKXGecq6Xw67WUYHqEBuk9jHB6hAbrDZRgfVqjAyhBQAAAGgAnC5DadmF5SHGDDMb9uQqr+jUGcscdpuSmwSUhxizZaZtdJACvN1zMUxCCwAAANBAHetatmFPrjbsza1omck+zRgZm82ctaxDzPExMu1jgxXsa/30y4QWAAAAoBExDEP784orBZmNe3OVmXvqYph/GtpK9w9OtqDKyqqaDdyznQgAAABAtdhsNkUF+ygq2EdD2jWt2H+woLiiW9nG8jDTITbYwkqrj9ACAAAANGARAd4a1CpSg1pFWl3Keavfc6QBAAAAaPAILQAAAADcGqEFAAAAgFsjtAAAAABwa4QWAAAAAG6N0AIAAADArRFaAAAAALg1QgsAAAAAt0ZoAQAAAODWCC0AAAAA3BqhBQAAAIBbI7QAAAAAcGuEFgAAAABujdACAAAAwK151PUFDcOQJOXl5dX1pQEAAAC4kWOZ4FhGOJM6Dy35+fmSpPj4+Lq+NAAAAAA3lJ+fr+Dg4DMetxnnijU1zOVyae/evQoMDJTNZqvLS58iLy9P8fHxysjIUFBQkKW1oHbwGjcOvM4NH69x48Dr3PDxGjcO1XmdDcNQfn6+YmJiZLefeeRKnbe02O12xcXF1fVlzyooKIg3TgPHa9w48Do3fLzGjQOvc8PHa9w4VPV1PlsLyzEMxAcAAADg1ggtAAAAANxaow4t3t7eevbZZ+Xt7W11KaglvMaNA69zw8dr3DjwOjd8vMaNQ228znU+EB8AAAAAqqNRt7QAAAAAcH+EFgAAAABujdACAAAAwK0RWgAAAAC4NUILAAAAALfWaEPLW2+9pcTERPn4+Kh3795atmyZ1SWhBk2YMEE2m63S1qZNG6vLwgVasGCBRo0apZiYGNlsNn399deVjhuGoWeeeUbR0dHy9fXVkCFDtG3bNmuKxXk512s8duzYU97bI0aMsKZYnJdJkyapZ8+eCgwMVJMmTXTVVVcpJSWl0jlFRUUaN26cwsPDFRAQoGuvvVb79++3qGJUV1Ve44svvviU9/Ldd99tUcU4H2+//bY6depUsep9nz59NHPmzIrjNf0+bpSh5bPPPtP48eP17LPPatWqVercubOGDx+urKwsq0tDDWrfvr0yMzMrtkWLFlldEi5QYWGhOnfurLfeeuu0x1966SW98cYbeuedd/Tbb7/J399fw4cPV1FRUR1XivN1rtdYkkaMGFHpvf3pp5/WYYW4UPPnz9e4ceO0dOlSzZ49W6WlpRo2bJgKCwsrznn44Yf17bff6vPPP9f8+fO1d+9eXXPNNRZWjeqoymssSXfddVel9/JLL71kUcU4H3FxcXrxxRe1cuVKrVixQpdeeqlGjx6tjRs3SqqF97HRCPXq1csYN25cxWOn02nExMQYkyZNsrAq1KRnn33W6Ny5s9VloBZJMqZPn17x2OVyGVFRUcbLL79csS8nJ8fw9vY2Pv30UwsqxIU6+TU2DMO49dZbjdGjR1tSD2pHVlaWIcmYP3++YRjm+9bT09P4/PPPK87ZvHmzIclYsmSJVWXiApz8GhuGYQwaNMh48MEHrSsKtSI0NNR47733auV93OhaWkpKSrRy5UoNGTKkYp/dbteQIUO0ZMkSCytDTdu2bZtiYmKUlJSkMWPGKD093eqSUIt27typffv2VXpvBwcHq3fv3ry3G5h58+apSZMmat26te655x5lZ2dbXRIuQG5uriQpLCxMkrRy5UqVlpZWei+3adNGCQkJvJfrqZNf42M+/vhjRUREqEOHDnryySd15MgRK8pDDXA6nZo2bZoKCwvVp0+fWnkfe9RUsfXFwYMH5XQ61bRp00r7mzZtqi1btlhUFWpa79699cEHH6h169bKzMzUxIkTNWDAAG3YsEGBgYFWl4dasG/fPkk67Xv72DHUfyNGjNA111yj5s2bKzU1VX/+8581cuRILVmyRA6Hw+ryUE0ul0sPPfSQ+vXrpw4dOkgy38teXl4KCQmpdC7v5frpdK+xJN18881q1qyZYmJitG7dOj3++ONKSUnRV199ZWG1qK7169erT58+KioqUkBAgKZPn6527dppzZo1Nf4+bnShBY3DyJEjK+536tRJvXv3VrNmzfS///1Pd9xxh4WVAbgQv/vd7yrud+zYUZ06dVKLFi00b948DR482MLKcD7GjRunDRs2MOawATvTa/zHP/6x4n7Hjh0VHR2twYMHKzU1VS1atKjrMnGeWrdurTVr1ig3N1dffPGFbr31Vs2fP79WrtXouodFRETI4XCcMnvB/v37FRUVZVFVqG0hISFq1aqVtm/fbnUpqCXH3r+8txuXpKQkRURE8N6uh+677z599913+uWXXxQXF1exPyoqSiUlJcrJyal0Pu/l+udMr/Hp9O7dW5J4L9czXl5eatmypbp3765Jkyapc+fOev3112vlfdzoQouXl5e6d++uuXPnVuxzuVyaO3eu+vTpY2FlqE0FBQVKTU1VdHS01aWgljRv3lxRUVGV3tt5eXn67bffeG83YLt371Z2djbv7XrEMAzdd999mj59un7++Wc1b9680vHu3bvL09Oz0ns5JSVF6enpvJfriXO9xqezZs0aSeK9XM+5XC4VFxfXyvu4UXYPGz9+vG699Vb16NFDvXr10pQpU1RYWKjbbrvN6tJQQx555BGNGjVKzZo10969e/Xss8/K4XDopptusro0XICCgoJK38Lt3LlTa9asUVhYmBISEvTQQw/pb3/7m5KTk9W8eXM9/fTTiomJ0VVXXWVd0aiWs73GYWFhmjhxoq699lpFRUUpNTVVjz32mFq2bKnhw4dbWDWqY9y4cfrkk080Y8YMBQYGVvRvDw4Olq+vr4KDg3XHHXdo/PjxCgsLU1BQkO6//3716dNHF110kcXVoyrO9Rqnpqbqk08+0WWXXabw8HCtW7dODz/8sAYOHKhOnTpZXD2q6sknn9TIkSOVkJCg/Px8ffLJJ5o3b55mzZpVO+/jmpngrP75xz/+YSQkJBheXl5Gr169jKVLl1pdEmrQjTfeaERHRxteXl5GbGysceONNxrbt2+3uixcoF9++cWQdMp26623GoZhTnv89NNPG02bNjW8vb2NwYMHGykpKdYWjWo522t85MgRY9iwYUZkZKTh6elpNGvWzLjrrruMffv2WV02quF0r68kY+rUqRXnHD161Lj33nuN0NBQw8/Pz7j66quNzMxM64pGtZzrNU5PTzcGDhxohIWFGd7e3kbLli2NRx991MjNzbW2cFTL7bffbjRr1szw8vIyIiMjjcGDBxs//fRTxfGafh/bDMMwzjdhAQAAAEBta3RjWgAAAADUL4QWAAAAAG6N0AIAAADArRFaAAAAALg1QgsAAAAAt0ZoAQAAAODWCC0AAAAA3BqhBQAAAIBbI7QAAAAAcGuEFgAAAABujdACAAAAwK39f+Xa7Yn/YBMzAAAAAElFTkSuQmCC", 761 | "text/plain": [ 762 | "
" 763 | ] 764 | }, 765 | "metadata": {} 766 | } 767 | ], 768 | "metadata": {} 769 | }, 770 | { 771 | "cell_type": "markdown", 772 | "source": [ 773 | "## Translation" 774 | ], 775 | "metadata": {} 776 | }, 777 | { 778 | "cell_type": "code", 779 | "execution_count": 19, 780 | "source": [ 781 | "def translate_sentence(model,sentence,srcField,targetField,srcTokenizer):\r\n", 782 | " model.eval()\r\n", 783 | " processed_sentence = srcField.process([srcTokenizer(sentence)]).to(device)\r\n", 784 | " trg = [\"بداية\"]\r\n", 785 | "\r\n", 786 | " for _ in range(60):\r\n", 787 | " trg_indecies = [targetField.vocab.stoi[word] for word in trg]\r\n", 788 | " trg_tensor = torch.LongTensor(trg_indecies).unsqueeze(1).to(device)\r\n", 789 | " outputs = model(processed_sentence,trg_tensor)\r\n", 790 | " \r\n", 791 | " if targetField.vocab.itos[outputs.argmax(2)[-1:].item()] == \"\":\r\n", 792 | " continue \r\n", 793 | " trg.append(targetField.vocab.itos[outputs.argmax(2)[-1:].item()])\r\n", 794 | " if targetField.vocab.itos[outputs.argmax(2)[-1:].item()] == \"نهاية\":\r\n", 795 | " break\r\n", 796 | " return \" \".join([word for word in trg if word != \"\"][1:-1])\r\n" 797 | ], 798 | "outputs": [], 799 | "metadata": {} 800 | }, 801 | { 802 | "cell_type": "code", 803 | "execution_count": 20, 804 | "source": [ 805 | "translate_sentence(model,\"I'm ready\" ,SRC,TRG,engTokenizer)" 806 | ], 807 | "outputs": [ 808 | { 809 | "output_type": "execute_result", 810 | "data": { 811 | "text/plain": [ 812 | "'أنا مستعد'" 813 | ] 814 | }, 815 | "metadata": {}, 816 | "execution_count": 20 817 | } 818 | ], 819 | "metadata": {} 820 | }, 821 | { 822 | "cell_type": "code", 823 | "execution_count": 23, 824 | "source": [ 825 | "translate_sentence(model,\"i'm lucky\" ,SRC,TRG,engTokenizer)" 826 | ], 827 | "outputs": [ 828 | { 829 | "output_type": "execute_result", 830 | "data": { 831 | "text/plain": [ 832 | "'انا محظوظ'" 833 | ] 834 | }, 835 | "metadata": {}, 836 | "execution_count": 23 837 | } 838 | ], 839 | "metadata": {} 840 | }, 841 | { 842 | "cell_type": "code", 843 | "execution_count": 24, 844 | "source": [ 845 | "translate_sentence(model,\"I'm sad\" ,SRC,TRG,engTokenizer)" 846 | ], 847 | "outputs": [ 848 | { 849 | "output_type": "execute_result", 850 | "data": { 851 | "text/plain": [ 852 | "'أنا حزين'" 853 | ] 854 | }, 855 | "metadata": {}, 856 | "execution_count": 24 857 | } 858 | ], 859 | "metadata": {} 860 | } 861 | ], 862 | "metadata": { 863 | "interpreter": { 864 | "hash": "4bb0fe8ced3cf0716ac3718fe834e829af40e8ba0fef1c4cadecb390da29a017" 865 | }, 866 | "kernelspec": { 867 | "name": "python3", 868 | "display_name": "Python 3.7.11 64-bit ('torch': conda)" 869 | }, 870 | "language_info": { 871 | "codemirror_mode": { 872 | "name": "ipython", 873 | "version": 3 874 | }, 875 | "file_extension": ".py", 876 | "mimetype": "text/x-python", 877 | "name": "python", 878 | "nbconvert_exporter": "python", 879 | "pygments_lexer": "ipython3", 880 | "version": "3.7.11" 881 | } 882 | }, 883 | "nbformat": 4, 884 | "nbformat_minor": 2 885 | } --------------------------------------------------------------------------------