├── .gitignore ├── biochem_transformer ├── pfam │ ├── data │ │ └── pfam_data.csv │ ├── pfam_example.py │ └── pfam_model.py └── pfam_rnn │ ├── data │ └── pfam_data.csv │ ├── pfam_example.py │ └── pfam_model.py ├── diffeq ├── img │ └── simple_derivative.png └── simple_derivative.py ├── experimental ├── custom_transformer.py ├── embedding_extraction.py ├── lora.py ├── pfam_autoenc_example.py ├── pfam_autoenc_model.py ├── pfam_autoencoder.py ├── protbert_tokenize.py └── smiles.py ├── multihead ├── multihead.ipynb └── multihead_attention.py ├── readme.md ├── timeseries_attention ├── complex_trig_example.py ├── forecasting_model.py ├── img │ ├── complex_trig_example.png │ ├── sine_example.png │ └── sunspots_example.png ├── sine_example.py ├── sunspots.csv └── sunspots_example.py ├── timeseries_transformer ├── forecasting_model.py ├── img │ ├── sine_example.png │ ├── sine_extended.png │ └── sunspots_example.png ├── sine_example.py ├── sunspots.csv └── sunspots_example.py └── trl └── basic_rl.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | *__pycache__ 2 | .domino* 3 | .vscode* 4 | vscode_settings 5 | results 6 | *.pkl 7 | *.h5 -------------------------------------------------------------------------------- /biochem_transformer/pfam/pfam_example.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch 3 | import numpy as np 4 | import pandas as pd 5 | import torch.nn as nn 6 | from pfam_model import PFamModel 7 | from torch.optim.lr_scheduler import ExponentialLR 8 | from sklearn.model_selection import train_test_split 9 | from imblearn.over_sampling import RandomOverSampler 10 | from torch.utils.data import TensorDataset, DataLoader 11 | 12 | 13 | # Start Timer 14 | import time 15 | start = time.time() 16 | 17 | 18 | # Create a Dictionary of Amino Acids 19 | amino_dict = { 20 | "[PAD]": 0, "[UNK]": 1, "[CLS]": 2, 21 | "[SEP]": 3, "[MASK]": 4, 22 | "A":6, "B":27, "C":23, "D":14, "E":9, "F":19, 23 | "G":7, "H":22, "I":11, "J":1, "K":12, "L":5, 24 | "M":21, "N":17, "O":29, "P":16, "Q":18, "R":13, 25 | "S":10, "T":15, "U":26, "V":8, "W":24, "X":25, 26 | "Y":20, "Z":28 27 | } 28 | 29 | 30 | # A naive padding / truncate function 31 | def process_seqs(X, max_len): 32 | for idx, xx in enumerate(X): 33 | xx = [2] + xx 34 | if len(xx) >= max_len: 35 | xx = xx[:max_len] 36 | xx[max_len-1] = 3 37 | X[idx] = xx 38 | else: 39 | xx = xx + (max_len-len(xx))*[0] 40 | xx[max_len-1] = 3 41 | X[idx] = xx 42 | return X 43 | 44 | 45 | # One-Hot Encoding For Labels 46 | def to_onehot(Y, maxlen): 47 | ohvecs = [] 48 | for y in Y: 49 | onehot = np.zeros(maxlen) 50 | onehot[y] = 1 51 | ohvecs.append(onehot.tolist()) 52 | return ohvecs 53 | 54 | 55 | # Set Model Parameters 56 | LEARNING_RATE = 1E-6 # Learning Rate of Model 57 | LR_GAMMA = 1.000 # Learning Rate Decay Of Model 58 | MIN_LEN = 0 # Minimum Sequence Length 59 | MAX_LEN = 512 # Maximum Sequence Length 60 | EMBED_SIZE = 64 # Embedding Size 61 | NHEAD = 8 # Number of Multi-Attention Heads 62 | DIM_FF = 512 # Feed Forward Layer of Transformer 63 | DROPOUT = 0.1 # Transformer Dropout 64 | EPOCHS = 100 # Number of Epochs 65 | BATCH_SIZE = 1 # Batch Size 66 | NUMCLASSES = 30 # Number of Classes to Attempt (out of 50) 67 | EARLY_STOPPING = 200 # Number of Epochs before Early Stopping is invoked 68 | FROM_CHECKPOINT = False # Load Model from Checkpoint 69 | DEVICE="cuda:0" # Device for Primary Model 70 | 71 | 72 | # Process Train Data 73 | data = pd.read_csv("data/pfam_data.csv") 74 | data = data[data["Y"]MIN_LEN] 82 | data = data[data["length"] EARLY_STOPPING: 144 | break 145 | 146 | 147 | # Evaluate the model 148 | model = PFamModel(MAX_LEN, 149 | embed_size=EMBED_SIZE, 150 | nhead=NHEAD, 151 | dim_feedforward=DIM_FF, 152 | dropout=DROPOUT, 153 | num_classes=NUMCLASSES, 154 | device=DEVICE) 155 | model.load_state_dict(torch.load(f"./models/pfam_model-{NUMCLASSES}.h5")) 156 | model.to(DEVICE) 157 | model.eval() 158 | predictions = [] 159 | for xx, yy in test_dataloader: 160 | out = model(xx) 161 | true_out = out.detach().cpu().numpy() 162 | true_out = [np.argmax(y) for y in true_out] 163 | predictions = predictions + true_out 164 | 165 | 166 | # Output Results 167 | results = pd.DataFrame({"Y": Y_test, "Y_hat": predictions}) 168 | results["Correct"] = results.apply(lambda x: 1 if round(x["Y_hat"])==x["Y"] else 0, axis=1) 169 | results.to_csv(f"./results/pfam_results-{NUMCLASSES}class-{EMBED_SIZE}embed-{EPOCHS}epoch.csv", index=False) 170 | pd.DataFrame(status).to_csv(f"./results/pfam_losses-{NUMCLASSES}class-{EMBED_SIZE}embed-{EPOCHS}epoch.csv", index=False) 171 | 172 | 173 | # End Timer 174 | end = time.time() 175 | print(f"Total Time: {(end - start)/3_600}") 176 | pd.DataFrame({"Total Time": [(end - start)/3_600]}).to_csv(f"./results/pfam_timer-{NUMCLASSES}class-{EMBED_SIZE}embed-{EPOCHS}epoch.csv", index=False) 177 | -------------------------------------------------------------------------------- /biochem_transformer/pfam/pfam_model.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch, math 3 | import numpy as np 4 | from torch import nn, Tensor 5 | import torch.nn.functional as F 6 | from torch.nn.modules.transformer import TransformerEncoderLayer 7 | 8 | # Positional Encoding - https://pytorch.org/tutorials/beginner/transformer_tutorial.html 9 | class PositionalEncoding(nn.Module): 10 | def __init__(self, d_model: int, dropout: float = 0.1, max_len: int = 5000): 11 | super().__init__() 12 | self.dropout = nn.Dropout(p=dropout) 13 | position = torch.arange(max_len).unsqueeze(1) 14 | div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model)) 15 | pe = torch.zeros(max_len, 1, d_model) 16 | pe[:, 0, 0::2] = torch.sin(position * div_term) 17 | pe[:, 0, 1::2] = torch.cos(position * div_term) 18 | self.register_buffer('pe', pe) 19 | def forward(self, x: Tensor) -> Tensor: 20 | x = x + self.pe[:x.size(0)] 21 | return self.dropout(x) 22 | 23 | # A forcasting model 24 | class PFamModel(torch.nn.Module): 25 | def __init__(self, 26 | max_len=200, 27 | embed_size = 1024, 28 | nhead = 4, 29 | dim_feedforward = 2048, 30 | dropout = 0.1, 31 | num_classes=0, 32 | device = "cuda"): 33 | super(PFamModel, self).__init__() 34 | self.device = device 35 | self.max_len = max_len 36 | self.embed_size = embed_size 37 | self.transformer_encoder = TransformerEncoderLayer( 38 | d_model = self.embed_size, 39 | nhead = nhead, 40 | dim_feedforward = dim_feedforward, 41 | dropout = dropout, 42 | batch_first = True 43 | ) 44 | self.input_embedding = nn.Embedding(30, embed_size, padding_idx=0) 45 | self.position_encoder = PositionalEncoding(d_model=embed_size, 46 | dropout=dropout, 47 | max_len=max_len) 48 | self.conv_out = nn.Conv1d(max_len, 1, kernel_size=3, padding=1) 49 | self.linear = nn.Linear(embed_size, embed_size) 50 | self.linear_softmax = nn.Linear(embed_size, num_classes) 51 | self.softmax = nn.Softmax() 52 | def forward(self, x): 53 | src_mask = self._generate_square_subsequent_mask() 54 | src_mask.to(self.device) 55 | x = self.input_embedding(x) 56 | x = self.position_encoder(x) 57 | x = self.transformer_encoder(x, src_mask=src_mask) 58 | x = self.conv_out(x) 59 | x = self.linear(x) 60 | x = x.reshape(-1, self.embed_size) 61 | x = self.linear_softmax(x) 62 | return self.softmax(x) 63 | 64 | def _generate_square_subsequent_mask(self): 65 | return torch.triu( 66 | torch.full((self.max_len, self.max_len), float('-inf'), dtype=torch.float32, device=self.device), 67 | diagonal=1, 68 | ) 69 | -------------------------------------------------------------------------------- /biochem_transformer/pfam_rnn/pfam_example.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch 3 | import numpy as np 4 | import pandas as pd 5 | import torch.nn as nn 6 | from pfam_model import PFamModel 7 | from torch.optim.lr_scheduler import ExponentialLR 8 | from sklearn.model_selection import train_test_split 9 | from imblearn.over_sampling import RandomOverSampler 10 | from torch.utils.data import TensorDataset, DataLoader 11 | 12 | 13 | # Start Timer 14 | import time 15 | start = time.time() 16 | 17 | 18 | # Create a Dictionary of Amino Acids 19 | amino_dict = { 20 | "[PAD]": 0, "[UNK]": 1, "[CLS]": 2, 21 | "[SEP]": 3, "[MASK]": 4, 22 | "A":6, "B":27, "C":23, "D":14, "E":9, "F":19, 23 | "G":7, "H":22, "I":11, "J":1, "K":12, "L":5, 24 | "M":21, "N":17, "O":29, "P":16, "Q":18, "R":13, 25 | "S":10, "T":15, "U":26, "V":8, "W":24, "X":25, 26 | "Y":20, "Z":28 27 | } 28 | 29 | 30 | # A naive padding / truncate function 31 | def process_seqs(X, max_len): 32 | for idx, xx in enumerate(X): 33 | xx = [2] + xx 34 | if len(xx) >= max_len: 35 | xx = xx[:max_len] 36 | xx[max_len-1] = 3 37 | X[idx] = xx 38 | else: 39 | xx = xx + (max_len-len(xx))*[0] 40 | xx[max_len-1] = 3 41 | X[idx] = xx 42 | return X 43 | 44 | 45 | # One-Hot Encoding For Labels 46 | def to_onehot(Y, maxlen): 47 | ohvecs = [] 48 | for y in Y: 49 | onehot = np.zeros(maxlen) 50 | onehot[y] = 1 51 | ohvecs.append(onehot.tolist()) 52 | return ohvecs 53 | 54 | 55 | # Set Model Parameters 56 | LEARNING_RATE = 1E-6 # Learning Rate of Model 57 | LR_GAMMA = 1.000 # Learning Rate Decay Of Model 58 | MIN_LEN = 0 # Minimum Sequence Length 59 | MAX_LEN = 512 # Maximum Sequence Length 60 | EMBED_SIZE = 64 # Embedding Size 61 | DROPOUT = 0.1 # Transformer Dropout 62 | EPOCHS = 100 # Number of Epochs 63 | BATCH_SIZE = 1 # Batch Size 64 | NUMCLASSES = 30 # Number of Classes to Attempt (out of 50) 65 | EARLY_STOPPING = 200 # Number of Epochs before Early Stopping is invoked 66 | FROM_CHECKPOINT = False # Load Model from Checkpoint 67 | DEVICE="cuda:0" # Device for Primary Model 68 | 69 | 70 | # Process Train Data 71 | data = pd.read_csv("data/pfam_data.csv") 72 | data = data[data["Y"]MIN_LEN] 80 | data = data[data["length"] EARLY_STOPPING: 140 | break 141 | 142 | 143 | # Evaluate the model 144 | model = PFamModel(MAX_LEN, 145 | embed_size=EMBED_SIZE, 146 | dropout=DROPOUT, 147 | num_classes=NUMCLASSES, 148 | device=DEVICE) 149 | model.load_state_dict(torch.load(f"./models/pfam_model-{NUMCLASSES}.h5")) 150 | model.to(DEVICE) 151 | model.eval() 152 | predictions = [] 153 | for xx, yy in test_dataloader: 154 | out = model(xx) 155 | true_out = out.detach().cpu().numpy() 156 | true_out = [np.argmax(y) for y in true_out] 157 | predictions = predictions + true_out 158 | 159 | 160 | # Output Results 161 | results = pd.DataFrame({"Y": Y_test, "Y_hat": predictions}) 162 | results["Correct"] = results.apply(lambda x: 1 if round(x["Y_hat"])==x["Y"] else 0, axis=1) 163 | results.to_csv(f"./results/pfam_results-{NUMCLASSES}class-{EMBED_SIZE}embed-{EPOCHS}epoch.csv", index=False) 164 | pd.DataFrame(status).to_csv(f"./results/pfam_losses-{NUMCLASSES}class-{EMBED_SIZE}embed-{EPOCHS}epoch.csv", index=False) 165 | 166 | 167 | # End Timer 168 | end = time.time() 169 | print(f"Total Time: {(end - start)/3_600}") 170 | pd.DataFrame({"Total Time": [(end - start)/3_600]}).to_csv(f"./results/pfam_timer-{NUMCLASSES}class-{EMBED_SIZE}embed-{EPOCHS}epoch.csv", index=False) 171 | -------------------------------------------------------------------------------- /biochem_transformer/pfam_rnn/pfam_model.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch, math 3 | import numpy as np 4 | from torch import nn, Tensor 5 | import torch.nn.functional as F 6 | from torch.nn.modules.transformer import TransformerEncoderLayer 7 | 8 | # Positional Encoding - https://pytorch.org/tutorials/beginner/transformer_tutorial.html 9 | class PositionalEncoding(nn.Module): 10 | def __init__(self, d_model: int, dropout: float = 0.1, max_len: int = 5000): 11 | super().__init__() 12 | self.dropout = nn.Dropout(p=dropout) 13 | position = torch.arange(max_len).unsqueeze(1) 14 | div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model)) 15 | pe = torch.zeros(max_len, 1, d_model) 16 | pe[:, 0, 0::2] = torch.sin(position * div_term) 17 | pe[:, 0, 1::2] = torch.cos(position * div_term) 18 | self.register_buffer('pe', pe) 19 | def forward(self, x: Tensor) -> Tensor: 20 | x = x + self.pe[:x.size(0)] 21 | return self.dropout(x) 22 | 23 | # A forcasting model 24 | class PFamModel(torch.nn.Module): 25 | def __init__(self, 26 | max_len=200, 27 | embed_size = 1024, 28 | dropout = 0.1, 29 | num_classes=0, 30 | device = "cuda"): 31 | super(PFamModel, self).__init__() 32 | self.device = device 33 | self.max_len = max_len 34 | self.embed_size = embed_size 35 | self.input_embedding = nn.Embedding(30, embed_size, padding_idx=0) 36 | self.position_encoder = PositionalEncoding(d_model=embed_size, 37 | dropout=dropout, 38 | max_len=max_len) 39 | self.lstm = torch.nn.LSTM(embed_size, embed_size, batch_first=True) 40 | self.conv_out = nn.Conv1d(max_len, 1, kernel_size=3, padding=1) 41 | self.linear = nn.Linear(embed_size, embed_size) 42 | self.linear_softmax = nn.Linear(embed_size, num_classes) 43 | self.softmax = nn.Softmax() 44 | def forward(self, x): 45 | x = self.input_embedding(x) 46 | x = self.position_encoder(x) 47 | x = self.lstm(x)[0] 48 | x = self.conv_out(x) 49 | x = self.linear(x) 50 | x = x.reshape(-1, self.embed_size) 51 | x = self.linear_softmax(x) 52 | return self.softmax(x) 53 | -------------------------------------------------------------------------------- /diffeq/img/simple_derivative.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BrandenKeck/pytorch_fun/8d26c3be4f358083f2aa8487ec656e4cbbeca218/diffeq/img/simple_derivative.png -------------------------------------------------------------------------------- /diffeq/simple_derivative.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import matplotlib.pyplot as plt 3 | 4 | x = torch.linspace(0, 10, 1000, requires_grad = True) 5 | y = torch.sin(x)-torch.cos(x)**2 6 | Y = torch.sum(y) 7 | Y.backward() 8 | 9 | fig = plt.figure(figsize=(6, 6)) 10 | plt.plot(x.detach().numpy(), y.detach().numpy(), label="f(x)") 11 | plt.plot(x.detach().numpy(), x.grad.detach().numpy(), label="f'(x)") 12 | plt.legend() 13 | fig.savefig("./img/simple_derivative.png") -------------------------------------------------------------------------------- /experimental/custom_transformer.py: -------------------------------------------------------------------------------- 1 | import torch, math 2 | import torch.nn as nn 3 | from torch import Tensor 4 | import torch.nn.functional as F 5 | from torch.nn.modules.normalization import LayerNorm 6 | from multihead_attention import MultiHeadAttention 7 | 8 | # Positional Encoding - https://pytorch.org/tutorials/beginner/transformer_tutorial.html 9 | class PositionalEncoding(nn.Module): 10 | def __init__(self, d_model: int, dropout: float = 0.1, max_len: int = 5000): 11 | super().__init__() 12 | self.dropout = nn.Dropout(p=dropout) 13 | position = torch.arange(max_len).unsqueeze(1) 14 | div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model)) 15 | pe = torch.zeros(max_len, 1, d_model) 16 | pe[:, 0, 0::2] = torch.sin(position * div_term) 17 | pe[:, 0, 1::2] = torch.cos(position * div_term) 18 | self.register_buffer('pe', pe) 19 | def forward(self, x: Tensor) -> Tensor: 20 | x = x + self.pe[:x.size(0)] 21 | return self.dropout(x) 22 | 23 | 24 | class TransformerCustom(nn.Module): 25 | 26 | def __init__(self, 27 | d_model=16, 28 | nhead=2, 29 | max_len=512, 30 | dim_feedforward=128, 31 | dropout=0, 32 | layer_norm_eps=1E-5, 33 | bias=True, 34 | activation=F.relu, 35 | device="cpu"): 36 | super(TransformerCustom, self).__init__() 37 | self.max_len = max_len 38 | self.device=device 39 | self.activation=activation 40 | self.self_attn = MultiHeadAttention(d_model, nhead, bias, activation) 41 | 42 | # Input Embedding Component 43 | self.input_embedding = nn.Linear(1, d_model) 44 | 45 | # Positional Encoding Component 46 | self.position_encoder = PositionalEncoding(d_model=d_model, dropout=dropout, max_len=max_len) 47 | 48 | # Implementation of Feedforward model 49 | self.linear1 = nn.Linear(d_model, dim_feedforward, bias=bias) 50 | self.dropout = nn.Dropout(dropout) 51 | self.linear2 = nn.Linear(dim_feedforward, d_model, bias=bias) 52 | 53 | # Normalization and Dropout Components 54 | self.norm1 = LayerNorm(d_model, eps=layer_norm_eps, bias=bias) 55 | self.norm2 = LayerNorm(d_model, eps=layer_norm_eps, bias=bias) 56 | self.dropout1 = nn.Dropout(dropout) 57 | self.dropout2 = nn.Dropout(dropout) 58 | 59 | def forward(self, x): 60 | src_mask = self._generate_square_subsequent_mask() 61 | src_mask.to(self.device) 62 | x = self.input_embedding(x) 63 | x = self.position_encoder(x) 64 | x = self.norm1(x + self._sa_block(x, src_mask)) 65 | x = self.norm2(x + self._ff_block(x)) 66 | return x 67 | 68 | # self-attention block 69 | def _sa_block(self, x, mask): 70 | x = self.self_attn(x, x, x, mask=mask)#[0] 71 | return self.dropout1(x) 72 | 73 | # feed forward block 74 | def _ff_block(self, x): 75 | x = self.linear2(self.dropout(self.activation(self.linear1(x)))) 76 | return self.dropout2(x) 77 | 78 | # Masking Function 79 | def _generate_square_subsequent_mask(self): 80 | return torch.triu( 81 | torch.full((self.max_len, self.max_len), float('-inf'), dtype=torch.float32, device=self.device), 82 | diagonal=1, 83 | ) 84 | -------------------------------------------------------------------------------- /experimental/embedding_extraction.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from transformers import BertModel 4 | 5 | class ProtEmbedding(nn.Module): 6 | def __init__(self, embed=nn.Embedding(30, 1024, padding_idx=0)): 7 | super(ProtEmbedding, self).__init__() 8 | self.embed = embed 9 | def forward(self, x): 10 | return self.embed(x) 11 | 12 | input_embedding = BertModel.from_pretrained("Rostlab/prot_bert").embeddings.word_embeddings 13 | model = ProtEmbedding(input_embedding) 14 | torch.save(model.state_dict(), "protembed.h5") 15 | -------------------------------------------------------------------------------- /experimental/lora.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | ### 5 | # https://www.linkedin.com/pulse/more-efficient-finetuning-implementing-lora-from-scratch-george-davis 6 | ### 7 | 8 | 9 | # let's start building out a LoRALinear layer 10 | class LoRALinear(nn.Module): 11 | """ 12 | This is a low-rank adapted linear layer that can be used to replace a standard linear layer. 13 | 14 | 15 | Args: 16 | module: The linear layer module to adapt. 17 | rank: The rank of the approximation. 18 | alpha: The alpha parameter. 19 | """ 20 | 21 | def __init__( 22 | self, 23 | module: nn.Module, 24 | # in_dim: int, 25 | # out_dim: int, 26 | rank: int = 4, 27 | alpha: float = 4.0 28 | ): 29 | # ensure the module is a linear layer 30 | assert isinstance(module, nn.Linear), "Module must be a linear layer." 31 | 32 | super().__init__() 33 | self.rank = rank # rank of the approximation 34 | self.alpha = alpha # alpha parameter 35 | self.scaling = self.alpha / self.rank # scaling factor 36 | self.in_dim = module.in_features # number of input features 37 | self.out_dim = module.out_features # number of output features 38 | 39 | # make sure that rank is at least 1 40 | assert self.rank >= 1, "Rank must be at least 1." 41 | 42 | # recreate the linear layer and freeze it 43 | # note: we will copy over the pretrained weights after initializing 44 | self.pretrained = nn.Linear(self.in_dim, self.out_dim, bias=True) 45 | self.pretrained.weight = nn.Parameter(module.weight.detach().clone()) 46 | self.pretrained.bias = nn.Parameter(module.bias.detach().clone()) 47 | self.pretrained.weight.requires_grad = False # freeze the weights 48 | self.pretrained.bias.requires_grad = False # freeze the bias 49 | 50 | # create the A and initialize with Kaiming 51 | self.A = nn.Linear(self.in_dim, rank, bias=False) 52 | nn.init.kaiming_uniform_(self.A.weight, a=math.sqrt(5)) 53 | 54 | # create B and initialize with zeros 55 | self.B = nn.Linear(rank, self.out_dim, bias=False) 56 | nn.init.zeros_(self.B.weight) 57 | 58 | # ensure that the weights in A and B are trainable 59 | self.A.weight.requires_grad = True 60 | self.B.weight.requires_grad = True 61 | 62 | def forward(self, x: torch.Tensor): 63 | """ 64 | Perform the forward pass of the layer. 65 | 66 | Args: 67 | x: The input tensor. 68 | """ 69 | pretrained_out = self.pretrained(x) # get the pretrained weights 70 | lora_out = self.A(x) # 71 | lora_out = self.B(lora_out) 72 | lora_out = lora_out * self.scaling 73 | return pretrained_out + lora_out 74 | -------------------------------------------------------------------------------- /experimental/pfam_autoenc_example.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch 3 | import numpy as np 4 | import pandas as pd 5 | import torch.nn as nn 6 | from pfam_model import PFamModel 7 | from pfam_autoencoder import PFamAutoencoder 8 | from torch.optim.lr_scheduler import ExponentialLR 9 | from sklearn.model_selection import train_test_split 10 | from imblearn.over_sampling import RandomOverSampler 11 | from torch.utils.data import TensorDataset, DataLoader 12 | 13 | 14 | # Create a Dictionary of Amino Acids 15 | amino_dict = { 16 | "[PAD]": 0, "[UNK]": 1, "[CLS]": 2, 17 | "[SEP]": 3, "[MASK]": 4, 18 | "A":6, "B":27, "C":23, "D":14, "E":9, "F":19, 19 | "G":7, "H":22, "I":11, "J":1, "K":12, "L":5, 20 | "M":21, "N":17, "O":29, "P":16, "Q":18, "R":13, 21 | "S":10, "T":15, "U":26, "V":8, "W":24, "X":25, 22 | "Y":20, "Z":28 23 | } 24 | 25 | 26 | # A naive padding / truncate function 27 | def process_seqs(X, max_len): 28 | for idx, xx in enumerate(X): 29 | xx = [2] + xx 30 | if len(xx) >= max_len: 31 | xx = xx[:max_len] 32 | xx[max_len-1] = 3 33 | X[idx] = xx 34 | else: 35 | xx = xx + (max_len-len(xx))*[0] 36 | xx[max_len-1] = 3 37 | X[idx] = xx 38 | return X 39 | 40 | 41 | # One-Hot Encoding For Labels 42 | def to_onehot(Y, maxlen): 43 | ohvecs = [] 44 | for y in Y: 45 | onehot = np.zeros(maxlen) 46 | onehot[y] = 1 47 | ohvecs.append(onehot.tolist()) 48 | return ohvecs 49 | 50 | 51 | # Set Model Parameters 52 | LEARNING_RATE = 1E-6 # Learning Rate of Model 53 | LR_GAMMA = 1.000 # Learning Rate Decay Of Model 54 | MIN_LEN = 0 # Minimum Sequence Length 55 | MAX_LEN = 512 # Maximum Sequence Length 56 | EMBED_SIZE = 256 # Embedding Size 57 | NHEAD = 8 # Number of Multi-Attention Heads 58 | DIM_FF = 512 # Feed Forward Layer of Transformer 59 | DROPOUT = 0.1 # Transformer Dropout 60 | EPOCHS = 300 # Number of Epochs 61 | BATCH_SIZE = 1 # Batch Size 62 | NUMCLASSES = 100 # Number of Classes to Attempt (out of 50) 63 | EARLY_STOPPING = 200 # Number of Epochs before Early Stopping is invoked 64 | FROM_CHECKPOINT = False # Load Model from Checkpoint 65 | USE_SOFTMAX = NUMCLASSES # Use softmax instead of Autoencoder 66 | DEVICE="cuda:0" # Device for Primary Model 67 | 68 | 69 | # Process Train Data 70 | data = pd.read_csv("data/pfam_data.csv") 71 | data = data[data["Y"]MIN_LEN] 79 | data = data[data["length"] EARLY_STOPPING: 183 | break 184 | 185 | 186 | # Evaluate the model 187 | model = PFamModel(MAX_LEN, 188 | embed_size=EMBED_SIZE, 189 | nhead=NHEAD, 190 | dim_feedforward=DIM_FF, 191 | dropout=DROPOUT, 192 | use_softmax=USE_SOFTMAX, 193 | device=DEVICE) 194 | model.load_state_dict(torch.load(f"./models/pfam_model-{NUMCLASSES}.h5")) 195 | model.to(DEVICE) 196 | model.eval() 197 | predictions = [] 198 | for xx, yy in test_dataloader: 199 | out = model(xx) 200 | if USE_SOFTMAX==0: 201 | true_out = autoencoder.decoder(out.detach().to(device=AUTODEVICE)) 202 | predictions = predictions + true_out.flatten().detach().cpu().tolist() 203 | else: 204 | true_out = out.detach().cpu().numpy() 205 | true_out = [np.argmax(y) for y in true_out] 206 | predictions = predictions + true_out 207 | 208 | 209 | # Output Results 210 | results = pd.DataFrame({"Y": Y_test, "Y_hat": predictions}) 211 | results["Correct"] = results.apply(lambda x: 1 if round(x["Y_hat"])==x["Y"] else 0, axis=1) 212 | results.to_csv(f"./results/pfam_results-{NUMCLASSES}class.csv", index=False) 213 | pd.DataFrame(status).to_csv(f"./results/pfam_losses-{NUMCLASSES}class.csv", index=False) 214 | -------------------------------------------------------------------------------- /experimental/pfam_autoenc_model.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch, math 3 | import numpy as np 4 | from torch import nn, Tensor 5 | import torch.nn.functional as F 6 | from torch.nn.modules.transformer import TransformerEncoderLayer 7 | 8 | # Positional Encoding - https://pytorch.org/tutorials/beginner/transformer_tutorial.html 9 | class PositionalEncoding(nn.Module): 10 | def __init__(self, d_model: int, dropout: float = 0.1, max_len: int = 5000): 11 | super().__init__() 12 | self.dropout = nn.Dropout(p=dropout) 13 | position = torch.arange(max_len).unsqueeze(1) 14 | div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model)) 15 | pe = torch.zeros(max_len, 1, d_model) 16 | pe[:, 0, 0::2] = torch.sin(position * div_term) 17 | pe[:, 0, 1::2] = torch.cos(position * div_term) 18 | self.register_buffer('pe', pe) 19 | def forward(self, x: Tensor) -> Tensor: 20 | x = x + self.pe[:x.size(0)] 21 | return self.dropout(x) 22 | 23 | # A forcasting model 24 | class PFamModel(torch.nn.Module): 25 | def __init__(self, 26 | max_len=200, 27 | embed_size = 1024, 28 | nhead = 4, 29 | dim_feedforward = 2048, 30 | dropout = 0.1, 31 | use_softmax=0, 32 | device = "cuda"): 33 | super(PFamModel, self).__init__() 34 | self.device = device 35 | self.max_len = max_len 36 | self.embed_size = embed_size 37 | self.use_softmax = use_softmax 38 | self.transformer_encoder = TransformerEncoderLayer( 39 | d_model = self.embed_size, 40 | nhead = nhead, 41 | dim_feedforward = dim_feedforward, 42 | dropout = dropout, 43 | batch_first = True 44 | ) 45 | self.input_embedding = nn.Embedding(30, embed_size, padding_idx=0) 46 | self.position_encoder = PositionalEncoding(d_model=embed_size, 47 | dropout=dropout, 48 | max_len=max_len) 49 | self.conv_out = nn.Conv1d(max_len, 1, kernel_size=3, padding=1) 50 | self.linear = nn.Linear(embed_size, embed_size) 51 | self.linear_softmax = nn.Linear(embed_size, use_softmax) 52 | self.softmax = nn.Softmax() 53 | def forward(self, x): 54 | src_mask = self._generate_square_subsequent_mask() 55 | src_mask.to(self.device) 56 | x = self.input_embedding(x) 57 | x = self.position_encoder(x) 58 | x = self.transformer_encoder(x, src_mask=src_mask) 59 | x = self.conv_out(x) 60 | x = self.linear(x) 61 | x = x.reshape(-1, self.embed_size) 62 | if self.use_softmax>0: 63 | x = self.linear_softmax(x) 64 | return self.softmax(x) 65 | else: return x 66 | 67 | def _generate_square_subsequent_mask(self): 68 | return torch.triu( 69 | torch.full((self.max_len, self.max_len), float('-inf'), dtype=torch.float32, device=self.device), 70 | diagonal=1, 71 | ) 72 | -------------------------------------------------------------------------------- /experimental/pfam_autoencoder.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | class PFamEncode(nn.Module): 5 | def __init__(self, embed_size=1024): 6 | super(PFamEncode, self).__init__() 7 | self.activation = nn.LeakyReLU() 8 | self.lin1 = nn.Linear(1, int(embed_size/32)) 9 | self.lin2 = nn.Linear(int(embed_size/32), int(embed_size/16)) 10 | self.lin3 = nn.Linear(int(embed_size/16), int(embed_size/8)) 11 | self.lin4 = nn.Linear(int(embed_size/8), int(embed_size/4)) 12 | self.lin5 = nn.Linear(int(embed_size/4), embed_size) 13 | def forward(self, x): 14 | x = self.activation(self.lin1(x)) 15 | x = self.activation(self.lin2(x)) 16 | x = self.activation(self.lin3(x)) 17 | x = self.activation(self.lin4(x)) 18 | x = self.activation(self.lin5(x)) 19 | return x 20 | 21 | 22 | class PFamDecode(nn.Module): 23 | def __init__(self, embed_size=1024): 24 | super(PFamDecode, self).__init__() 25 | assert embed_size%32==0, "Embedding size must be a multiple of 32" 26 | self.activation = nn.LeakyReLU() 27 | self.lin1 = nn.Linear(embed_size, int(embed_size/4)) 28 | self.lin2 = nn.Linear(int(embed_size/4), int(embed_size/8)) 29 | self.lin3 = nn.Linear(int(embed_size/8), int(embed_size/16)) 30 | self.lin4 = nn.Linear(int(embed_size/16), int(embed_size/32)) 31 | self.lin5 = nn.Linear(int(embed_size/32), 1) 32 | def forward(self, x): 33 | x = self.activation(self.lin1(x)) 34 | x = self.activation(self.lin2(x)) 35 | x = self.activation(self.lin3(x)) 36 | x = self.activation(self.lin4(x)) 37 | x = self.activation(self.lin5(x)) 38 | return x 39 | 40 | class PFamAutoencoder(nn.Module): 41 | def __init__(self, num_classes=1000, embed_size=1024): 42 | super(PFamAutoencoder, self).__init__() 43 | self.encoder = PFamEncode(embed_size) 44 | self.decoder = PFamDecode(embed_size) 45 | def forward(self, x): 46 | x = self.encoder(x) 47 | x = self.decoder(x) 48 | return x.flatten() 49 | -------------------------------------------------------------------------------- /experimental/protbert_tokenize.py: -------------------------------------------------------------------------------- 1 | import os, torch 2 | os.environ['CURL_CA_BUNDLE'] = "" 3 | os.environ['REQUESTS_CA_BUNDLE'] = "" 4 | 5 | # Tokenize 6 | from transformers import BertTokenizer 7 | tokenizer = BertTokenizer.from_pretrained("Rostlab/prot_bert", do_lower_case=False) 8 | tokens = tokenizer([ 9 | " ".join("ABCDEFGHIJKLMNOPQRSTUVWXYZ"), 10 | " [CLS] [UNK] [SEP] [MASK] [PAD] [PAD]" 11 | ])['input_ids'] 12 | tokenizer.pad_token 13 | tokenizer("[PAD] [PAD]") 14 | 15 | { 16 | "[PAD]": 0, "[UNK]": 1, "[CLS]": 2, 17 | "[SEP]": 3, "[MASK]": 4, 18 | "A":6, "B":27, "C":23, "D":14, "E":9, "F":19, 19 | "G":7, "H":22, "I":11, "J":1, "K":12, "L":5, 20 | "M":21, "N":17, "O":29, "P":16, "Q":18, "R":13, 21 | "S":10, "T":15, "U":26, "V":8, "W":24, "Y":20, "Z":28 22 | } 23 | 24 | # Embed 25 | model.embeddings.word_embeddings(torch.tensor(tokens)) 26 | -------------------------------------------------------------------------------- /experimental/smiles.py: -------------------------------------------------------------------------------- 1 | from rdkit import Chem 2 | import networkx as nx 3 | smiles = 'CC(C)(C)c1ccc2occ(CC(=O)Nc3ccccc3F)c2c1' 4 | mol = Chem.MolFromSmiles(smiles) 5 | adjacency_matrix = Chem.GetAdjacencyMatrix(mol, useBO = True) 6 | 7 | -------------------------------------------------------------------------------- /multihead/multihead.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Setup" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "# Imports\n", 17 | "import math, random, torch\n", 18 | "import numpy as np\n", 19 | "import seaborn as sns\n", 20 | "import torch.nn as nn\n", 21 | "import torch.nn.functional as F\n", 22 | "from torchmetrics import ConfusionMatrix\n", 23 | "from torch.utils.data import TensorDataset, DataLoader\n", 24 | "\n", 25 | "# Custom Import\n", 26 | "from multihead_attention import MultiheadAttention" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 2, 32 | "metadata": {}, 33 | "outputs": [], 34 | "source": [ 35 | "BATCH_SIZE = 8\n", 36 | "SEQ_LEN = 16\n", 37 | "NHEAD = 8\n", 38 | "EMBED_SIZE = 512\n", 39 | "DEVICE = \"cuda\"\n", 40 | "model = MultiheadAttention(EMBED_SIZE, NHEAD, device=DEVICE).to(DEVICE)" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "## Run Model (Forward Pass)" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 3, 53 | "metadata": {}, 54 | "outputs": [ 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "Attention Output: torch.Size([8, 16, 512])\n", 60 | "Attention Weights: torch.Size([64, 16, 16])\n" 61 | ] 62 | } 63 | ], 64 | "source": [ 65 | "X = torch.rand(BATCH_SIZE, SEQ_LEN, EMBED_SIZE).to(DEVICE) # (Batch Size, Seq. Len., Embed Size)\n", 66 | "Q, K, V = X, X, X # Queries, Keys and Values are the same for non-translation\n", 67 | "attn_output, attn_output_weights = model(Q, K, V)\n", 68 | "print(f\"Attention Output: {attn_output.shape}\")\n", 69 | "print(f\"Attention Weights: {attn_output_weights.shape}\")" 70 | ] 71 | }, 72 | { 73 | "cell_type": "markdown", 74 | "metadata": {}, 75 | "source": [ 76 | "## Attention Weights" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": 4, 82 | "metadata": {}, 83 | "outputs": [ 84 | { 85 | "data": { 86 | "text/plain": [ 87 | "" 88 | ] 89 | }, 90 | "execution_count": 4, 91 | "metadata": {}, 92 | "output_type": "execute_result" 93 | }, 94 | { 95 | "data": { 96 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgMAAAGiCAYAAAB6c8WBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy80BEi2AAAACXBIWXMAAA9hAAAPYQGoP6dpAAA7yklEQVR4nO3de1xVVf7/8fcB4WAoeCG5eEOl1FIxMQnNrGQ062tS30odJ7xM9c1xGpPJlCzJrNCayhpNp7vfmtKm0b5ONZoxWlkUCZrZxUuamgpeSDDMg3L2749+MXMOCGzYh3Nwv5491uMxrL3PZ61Tcx7nc9Zaey2HYRiGAACAbQX5uwMAAMC/SAYAALA5kgEAAGyOZAAAAJsjGQAAwOZIBgAAsDmSAQAAbI5kAAAAmyMZAADA5kgGAACwOZIBAAACxAcffKCRI0cqLi5ODodDb775Zq2vWb9+vfr16yen06mEhAS99NJLptslGQAAIECUlZUpMTFRixYtqtP9u3fv1jXXXKMrrrhCmzdv1p133qlbbrlFa9asMdWug4OKAAAIPA6HQytXrlRaWtoZ75kxY4befvttbd26tbJuzJgxOnbsmFavXl3nthgZAADAh1wul0pLSz2Ky+WyJHZubq5SU1M96oYPH67c3FxTcZpZ0hsLnDqyy+dtNI8b7PM2AAC+d7p8v0/jW/mdlL3wfzVnzhyPuqysLN1///0Njl1YWKjo6GiPuujoaJWWluqnn35S8+bN6xQnYJIBAAAChrvCslCZmZnKyMjwqHM6nZbFtwLJAAAAPuR0On325R8TE6OioiKPuqKiIkVERNR5VEAiGQAAoCrD7e8e1ElKSoreeecdj7q1a9cqJSXFVBwWEAIA4M3ttq6Y8OOPP2rz5s3avHmzpJ8fHdy8ebP27t0r6ecph/T09Mr7b7/9du3atUt33323vvnmGz399NN6/fXXNW3aNFPtMjIAAIAXw08jAxs3btQVV1xR+fcvaw3Gjx+vl156SQcPHqxMDCSpS5cuevvttzVt2jQ9+eST6tChg5577jkNHz7cVLsBs88ATxMAAOrK108TlB/40rJYoXEXWhbLVxgZAADAm8nh/aaOZAAAAG9NZAGhVUwnA0eOHNELL7yg3NxcFRYWSvr50YaBAwdqwoQJOvfccy3vJAAA8B1TawY+++wzDR8+XOecc45SU1Mrdz0qKipSTk6OTpw4oTVr1qh///41xnG5XFW2Ygw6vt/nmzCwZgAAzg4+XzOwp8CyWKGd+1kWy1dMJQOXXHKJEhMTtWTJEjkcDo9rhmHo9ttv15YtW2rdE/n++++vsjXjvdP/oNl3TzXRdfNIBgDg7ODzZOC7jZbFCo2v+QdyIDCVDDRv3lybNm1Sjx49qr3+zTff6KKLLtJPP/1UYxxGBgAADUEyYC1TawZiYmKUl5d3xmQgLy+vyoEJ1alua8ZT5UfMdAUAAN/haYIzu+uuu3TbbbcpPz9fQ4cOrbJm4Nlnn9Wf/vQnn3QUAIDG4q9Nh/zFVDIwZcoURUVF6YknntDTTz+tioqfT3UKDg5WUlKSXnrpJd10000+6SgAAPCNeu9AeOrUKR058vPQflRUlEJCQhrUEXYgBADUla/XDLh2fGxZLOd5Ay2L5Sv13nQoJCREsbGxVvYFAIDAwDQBAAA2567wdw8aFUcYAwBgc4wMAADgjWkCAABszmb7DDBNAACAzQXMyMDnfTN83kbuuQN8Gj/lcJ5P4wMAGgnTBAAA2BzTBAAAwE4YGQAAwIth2GufAZIBAAC82WzNANMEAADYHCMDAAB4s9kCQpIBAAC82WyagGQAAABvHFQEAADsxC8jAy6XSy6Xy6Ou3KhQqCPYH90BAMCTzaYJLB8Z2LdvnyZNmlTjPdnZ2YqMjPQoLx3fYXVXAACoH7fbutIEOAzDMKwM+Pnnn6tfv36qqDjzfEt1IwNbe47z+ciAYTh8Gp+zCQCgcZwu3+/T+Cc/WW5ZrLBLRlsWy1dMTxOsWrWqxuu7du2qNYbT6ZTT6fSoY4oAABAwbDZNYDoZSEtLk8PhUE0DCg6Hb3+BAwDgU01keN8qptcMxMbGasWKFXK73dWWgoICX/QTAAD4iOlkICkpSfn5+We8XtuoAQAAAc9mCwhNTxNMnz5dZWVlZ7yekJCgdevWNahTAAD4E6cW1mLw4ME1Xg8PD9eQIUPq3SEAANC42I4YAABvTWR43yokAwAAeOPRQgAAbM5mIwMcVAQAgM0FzMjAmuBwf3ehwebEXu7zNrIOrvd5GwBge0wTAABgc0wTAAAAO2FkAAAAb0wTAABgc0wTAAAAO2FkAAAAbzYbGSAZAADAm83WDDBNAACAzTEyAACAN6YJfM/lcsnlcnnUnTYq1MwR7I/uAADgiWmCmv3000/asGGDvvrqqyrXTp48qf/93/+tNUZ2drYiIyM9yvslX5rtCgAAvuF2W1eaAFPJwPbt29WzZ09ddtll6t27t4YMGaKDBw9WXi8pKdHEiRNrjZOZmamSkhKPMiTyQvO9BwAADWYqGZgxY4Z69eqlQ4cOadu2bWrZsqUGDRqkvXv3mmrU6XQqIiLCozBFAAAIGIbbutIEmFoz8PHHH+u9995TVFSUoqKi9I9//EO/+93vNHjwYK1bt07h4U3/5EEAAJrK8L5VTI0M/PTTT2rW7N/5g8Ph0OLFizVy5EgNGTJE27dvt7yDAADAt0yNDPTo0UMbN25Uz549PeoXLlwoSbr22mut6xkAAP7CyMCZXXfddXrttdeqvbZw4UKNHTtWhmFY0jEAAPzGMKwrTYCpZCAzM1PvvPPOGa8//fTTctssmwIAoKljB0IAALzZ7IctyQAAAN5slgxwUBEAADbHyAAAAN6ayGZBVgmYZOBbh6v2mxrIUNNY1VmT9LgUn7fxvwdyfd4GAAQ0pgkAALA5Pz5auGjRIsXHxyssLEzJycnKy8ur8f4FCxaoe/fuat68uTp27Khp06bp5MmTptokGQAAIEAsX75cGRkZysrKUkFBgRITEzV8+HAdOnSo2vtfffVVzZw5U1lZWfr666/1/PPPa/ny5brnnntMtUsyAACANz8dYfz444/r1ltv1cSJE3XBBRdoyZIlOuecc/TCCy9Ue//HH3+sQYMG6de//rXi4+M1bNgwjR07ttbRBG8kAwAAeLMwGXC5XCotLfUoLlfVdXLl5eXKz89XampqZV1QUJBSU1OVm1v9Wq6BAwcqPz+/8st/165deuedd3T11VeberskAwAA+FB2drYiIyM9SnZ2dpX7jhw5ooqKCkVHR3vUR0dHq7CwsNrYv/71r/XAAw/o0ksvVUhIiLp166bLL7+caQIAABrMcFtWMjMzVVJS4lEyMzMt6eb69ev18MMP6+mnn1ZBQYFWrFiht99+W3PnzjUVJ2AeLQQAIFAYbuseRXc6nXI6nbXeFxUVpeDgYBUVFXnUFxUVKSYmptrX3Hfffbr55pt1yy23SJJ69+6tsrIy3XbbbZo1a5aCgur2m5+RAQAAAkBoaKiSkpKUk5NTWed2u5WTk6OUlOr3mDlx4kSVL/zg4GBJMnWKMCMDAAB489OmQxkZGRo/frz69++vAQMGaMGCBSorK9PEiRMlSenp6Wrfvn3lmoORI0fq8ccf10UXXaTk5GTt3LlT9913n0aOHFmZFNQFyQAAAN78tB3x6NGjdfjwYc2ePVuFhYXq27evVq9eXbmocO/evR4jAffee68cDofuvfde7d+/X+eee65Gjhyphx56yFS7DsPMOIKkr7/+Wp988olSUlLUo0cPffPNN3ryySflcrn0m9/8RldeeWWtMVwuV5XHKu7oPV7BjrpnMfVxNmxH7JDD522wHTGAQHe6fL9P459YfIdlsc6Z/GfLYvmKqTUDq1evVt++fXXXXXfpoosu0urVq3XZZZdp586d2rNnj4YNG6Z//etftcap7jGLLSXb6v0mAACwlNuwrjQBppKBBx54QNOnT9fRo0f14osv6te//rVuvfVWrV27Vjk5OZo+fbrmzZtXa5zqHrPoE9m93m8CAABL+WkHQn8xlQx8+eWXmjBhgiTppptu0vHjx3XDDTdUXh83bpy2bNlSaxyn06mIiAiP4uspAgAA6oxkoGYOx89z1kFBQQoLC1NkZGTltZYtW6qkpMS63gEAAJ8zlQzEx8drx44dlX/n5uaqU6dOlX/v3btXsbGx1vUOAAB/8OMRxv5g6tHCyZMnq6KiovLvXr16eVz/5z//WaenCQAACGhNZHjfKqaSgdtvv73G6w8//HCDOgMAABofmw4BAOCtiTwSaBWSAQAAvPlpB0J/4aAiAABsjpEBAAC8MU3gH4fdJ/3dBfx/I2Iu8nkb/yzc5PM2AKC+DJs9TcA0AQAANhcwIwMAAAQMpgkAALA5mz1NQDIAAIA3m40MsGYAAACbY2QAAABvNnuagGQAAABvTBMAAAA7sWRkwDAMORwOK0IBAOB/NnuawJKRAafTqa+//tqKUAAA+J/bsK40AaZGBjIyMqqtr6io0Lx589S2bVtJ0uOPP15jHJfLJZfL5RnDqFCwI9hMdwAAgAVMJQMLFixQYmKiWrVq5VFvGIa+/vprhYeH12m6IDs7W3PmzPGoOy/iPHWPPN9MdwAA8Am7nU3gMAyjzmMY8+bN0zPPPKPnnntOV155ZWV9SEiIPv/8c11wwQV1ilPdyMCYC0czMmAjHFQEoCFOl+/3afwfZ1xvWawW81dYFstXTK0ZmDlzppYvX67Jkyfrrrvu0qlTp+rVqNPpVEREhEchEQAAwD9MLyC8+OKLlZ+fr8OHD6t///7aunUrTxIAAM4uLCCsXYsWLbR06VItW7ZMqampqqiosLpfAAD4j80eLWzQPgNjxozRpZdeqvz8fHXu3NmqPgEA4F9N5Be9VRq86VCHDh3UoUMHK/oCAAD8gLMJAADwYjAyAACAzdksGeCgIgAAbI6RAQAAvNlsB8KASQaOu0/6uwtoRJe26+nzNjYc4vAsAPXENAEAALCTgBkZAAAgYNhsZIBkAAAALybO8DsrME0AAIDNMTIAAIA3pgkAALA5kgEAAOzNbtsRs2YAAACbY2QAAABvNhsZIBkAAMCbvXYjblgyUFZWptdff107d+5UbGysxo4dq7Zt29b6OpfLJZfL5VHnNtwKcjBrAQBAYzP17XvBBReouLhYkrRv3z716tVL06ZN09q1a5WVlaULLrhAu3fvrjVOdna2IiMjPcre49/V6w0AAGA1w21YVpoCU8nAN998o9OnT0uSMjMzFRcXpz179igvL0979uxRnz59NGvWrFrjZGZmqqSkxKN0ahlfrzcAAIDl3IZ1pQmo9zRBbm6ulixZosjISElSixYtNGfOHI0ZM6bW1zqdTjmdTo86pggAAPAP08mAw+GQJJ08eVKxsbEe19q3b6/Dhw9b0zMAAPyFBYQ1Gzp0qJo1a6bS0lJt27ZNvXr1qry2Z8+eOi0gBAAgkDWVuX6rmEoGsrKyPP5u0aKFx9//+Mc/NHjw4Ib3CgAANBqHESDnNF7eIdXfXcBZZsOhr/3dBQA+crp8v0/j//Dfl1sWq/Xf11sWy1fYdAgAAC9MEwAAYHc2W0DI83wAANgcIwMAAHgxbDYyEDDJwAn3KX93AWeZflEJPo1fcGSnT+MD8CObJQNMEwAAYHMBMzIAAECgYJoAAAC7s1kywDQBAAA2x8gAAABe7DZNwMgAAABeDLd1xaxFixYpPj5eYWFhSk5OVl5eXo33Hzt2TFOmTFFsbKycTqfOP/98vfPOO6baZGQAAAAv/hoZWL58uTIyMrRkyRIlJydrwYIFGj58uLZt26Z27dpVub+8vFy/+tWv1K5dO73xxhtq37699uzZo1atWplql2QAAIAA8fjjj+vWW2/VxIkTJUlLlizR22+/rRdeeEEzZ86scv8LL7yg4uJiffzxxwoJCZEkxcfHm26XaQIAALwZDsuKy+VSaWmpR3G5XFWaLC8vV35+vlJT/32Kb1BQkFJTU5Wbm1ttN1etWqWUlBRNmTJF0dHR6tWrlx5++GFVVFSYerskAwAAeLFyzUB2drYiIyM9SnZ2dpU2jxw5ooqKCkVHR3vUR0dHq7CwsNp+7tq1S2+88YYqKir0zjvv6L777tNjjz2mBx980NT7NTVNUFBQoNatW6tLly6SpJdffllLlizR3r171blzZ/3+97/XmDFjao3jcrmqZEVuw60gB7kJAODskpmZqYyMDI86p9NpSWy326127drpmWeeUXBwsJKSkrR//349+uijysrKqnMcU9++EydO1LfffitJeu655/Q///M/6t+/v2bNmqWLL75Yt956q1544YVa41SXJR38ca+ZrgAA4DOG22FZcTqdioiI8CjVJQNRUVEKDg5WUVGRR31RUZFiYmKq7WdsbKzOP/98BQcHV9b17NlThYWFKi8vr/P7NZUM7NixQ+edd54k6emnn9aTTz6pJ598UrfffrueeOIJ/eUvf9Fjjz1Wa5zMzEyVlJR4lNgWncx0BQAAn/HHo4WhoaFKSkpSTk5OZZ3b7VZOTo5SUlKqfc2gQYO0c+dOud3/bmj79u2KjY1VaGhonds2lQycc845OnLkiCRp//79GjBggMf15ORk7d69u9Y41WVJTBEAAOwuIyNDzz77rJYuXaqvv/5akydPVllZWeXTBenp6crMzKy8f/LkySouLtbUqVO1fft2vf3223r44Yc1ZcoUU+2aWjMwYsQILV68WM8995yGDBmiN954Q4mJiZXXX3/9dSUk+PbYWAAAfM0wHH5pd/To0Tp8+LBmz56twsJC9e3bV6tXr65cVLh3714FBf37x3PHjh21Zs0aTZs2TX369FH79u01depUzZgxw1S7DsMwjLrefODAAQ0aNEidOnVS//79tXjxYiUlJalnz57atm2bPvnkE61cuVJXX321qU5I0oC4IaZfA/hTwZGd/u4CYFuny/f7NP73yVdaFqvDp/+yLJavmBqbj4uL06ZNm5SSkqLVq1fLMAzl5eXp3XffVYcOHfTRRx/VKxEAAAD+Y2pkwJcYGUBTw8gA4D++HhnYd/FQy2J1/Cyn9pv8jO2IAQDwEhg/kxsPyQAAAF4Mt38WEPoLz/MBAGBzjAwAAODFbiMDAZMMnHKf9nkbbtlsEgg+1btNvM/b+KL4O5+3AaAqu60ZYJoAAACbC5iRAQAAAgXTBAAA2Jy/tiP2F6YJAACwOUYGAADwYubo4bMByQAAAF7cTBMAAAA7YWQAAAAvdltASDIAAIAXHi0EAMDm2IGwBnfccYc+/PDDBjfqcrlUWlrqUdx2W7oJAECAMJUMLFq0SJdffrnOP/98zZ8/X4WFhfVqNDs7W5GRkR6lqOz7esUCAMBqhtthWWkKTD9N8O677+rqq6/Wn/70J3Xq1EmjRo3SW2+9Jbe77r/sMzMzVVJS4lGiwzuY7QoAAD7hNhyWlabAdDLQu3dvLViwQAcOHNArr7wil8ultLQ0dezYUbNmzdLOnTtrjeF0OhUREeFRghw85QgAgD/U+xs4JCREN910k1avXq1du3bp1ltv1V//+ld1797dyv4BANDoDMNhWWkKLPk53qlTJ91///3avXu3Vq9ebUVIAAD8xjCsK02BqWSgc+fOCg4OPuN1h8OhX/3qVw3uFAAAaDym9hnYvXu3r/oBAEDAaCoL/6zCpkMAAHhpKnP9VmEJPwAANsfIAAAAXprKwj+rkAwAAOCFNQN+Um6c9ncXgIDTvbXvd+bc9gNbgQPeWDMAAABsJWBGBgAACBRMEwAAYHM2Wz/INAEAAHbHyAAAAF6YJgAAwOZ4mgAAANgKIwMAAHhx+7sDjYxkAAAAL4aYJgAAADZiOhlYuHCh0tPTtWzZMknSyy+/rAsuuEA9evTQPffco9Ona99W2OVyqbS01KO4DbsNygAAApXbsK40BaamCR588EE98sgjGjZsmKZNm6Y9e/bo0Ucf1bRp0xQUFKQnnnhCISEhmjNnTo1xsrOzq9wTdU6c2rXw/T7sAADUxm2zaQKHYdT9oMaEhAQ98sgjuv766/X5558rKSlJS5cu1bhx4yRJK1eu1N13360dO3bUGMflcsnlcnnUJScMVZCDWQugsXFQEZqi0+X7fRo/J3q0ZbGGFi23LJavmBoZOHDggPr37y9JSkxMVFBQkPr27Vt5vV+/fjpw4ECtcZxOp5xOp0cdiQAAAP5h6hs4JiZGX331lSRpx44dqqioqPxbkr788ku1a9fO2h4CANDI3BaWpsDUyMC4ceOUnp6uUaNGKScnR3fffbfuuusuHT16VA6HQw899JBuuOEGX/UVAIBGYbdHC00lA3PmzFHz5s2Vm5urW2+9VTNnzlRiYqLuvvtunThxQiNHjtTcuXN91VcAAOADphYQ+tKF0cn+7gJgSywgRFPk6wWEq6PHWBbrqqJllsXyFXYgBADAS1OZ67cKS/gBALA5RgYAAPDCAkIAAGzOba9cgGkCAADsLmBGBk65K/zdBcCWukbG+ryNXSUHfd4GYCW7nU0QMMkAAACBIiCeuW9EJAMAAHjh0UIAAGArjAwAAODF7WDNAAAAtma3NQNMEwAAYHOMDAAA4MVuCwhJBgAA8GK3HQhNJwMHDx7U4sWLtWHDBh08eFBBQUHq2rWr0tLSNGHCBAUHB/uinwAAwEdMrRnYuHGjevbsqXfeeUenTp3Sjh07lJSUpPDwcN1111267LLLdPz48VrjuFwulZaWehS3YbdBGQBAoHLLYVkxa9GiRYqPj1dYWJiSk5OVl5dXp9ctW7ZMDodDaWlppts0lQzceeedmjZtmjZu3KgPP/xQL730krZv365ly5Zp165dOnHihO69995a42RnZysyMtKj/HCi0HTnAQDwBcPCYsby5cuVkZGhrKwsFRQUKDExUcOHD9ehQ4dqfN13332nu+66S4MHDzbZ4s8chmHUua/nnHOOtm7dqq5du0qS3G63wsLCtG/fPkVHR2vt2rWaMGGC9u/fX2Mcl8sll8vlUdev6+UKcvBwA3A24mwCWO10ec3fMw31StxvLIt14+7nq3znOZ1OOZ3OKvcmJyfr4osv1sKFCyX9/D3bsWNH3XHHHZo5c2a18SsqKnTZZZdp0qRJ+vDDD3Xs2DG9+eabpvpo6tu3Xbt2Onjw3x/qoqIinT59WhEREZKk8847T8XFxbXGcTqdioiI8CgkAgCAQOF2WFeqGw3Pzs6u0mZ5ebny8/OVmppaWRcUFKTU1FTl5uaesa8PPPCA2rVrp9/+9rf1fr+mFhCmpaXp9ttv16OPPiqn06m5c+dqyJAhat68uSRp27Ztat++fb07AwBAILByFVtmZqYyMjI86qobFThy5IgqKioUHR3tUR8dHa1vvvmm2tgbNmzQ888/r82bNzeoj6aSgQcffFAHDx7UyJEjVVFRoZSUFL3yyiuV1x0OR7XZDgAATYmVOxCeaUqgoY4fP66bb75Zzz77rKKiohoUy1Qy0KJFCy1fvlwnT57U6dOn1aJFC4/rw4YNa1BnAACwq6ioKAUHB6uoqMijvqioSDExMVXu//bbb/Xdd99p5MiRlXVu989jGs2aNdO2bdvUrVu3OrVdr4n6sLCwKokAAABnCyvXDNRVaGiokpKSlJOT8+9+uN3KyclRSkpKlft79OihL774Qps3b64s1157ra644gpt3rxZHTt2rHPb7EAIAIAXf+18k5GRofHjx6t///4aMGCAFixYoLKyMk2cOFGSlJ6ervbt2ys7O1thYWHq1auXx+tbtWolSVXqa0MyAABAgBg9erQOHz6s2bNnq7CwUH379tXq1asrFxXu3btXQUHWP31nap8BXzr/3P7+7gIAH2GfAVjN1/sM/KWDdfsM/M/3r9R+k58xMgAAgBfDZgcVsdMPAAA2FzAjAxUcVASctTpHRNd+UwPtKS2q/Sagjuz2jRQwyQAAAIHCbskA0wQAANgcIwMAAHgJiMfsGhHJAAAAXszsHHg2IBkAAMALawYAAICt1GtkoLy8XG+++aZyc3NVWFgoSYqJidHAgQM1atQohYaGWtpJAAAaEyMDtdi5c6d69uyp8ePHa9OmTXK73XK73dq0aZPS09N14YUXaufOnb7oKwAAjcKwsDQFpkcGJk+erN69e2vTpk2KiIjwuFZaWqr09HRNmTJFa9assayTAADAd0wnAx999JHy8vKqJAKSFBERoblz5yo5ObnGGC6XSy6Xy6POMNxyOFjCAADwP7s9TWD627dVq1b67rvvznj9u+++qzxP+Uyys7MVGRnpUX74ia1EAQCBwW1haQpMJwO33HKL0tPT9cQTT2jLli0qKipSUVGRtmzZoieeeEITJkzQbbfdVmOMzMxMlZSUeJTWzX2/dzkAAKjK9DTBAw88oPDwcD366KP64x//KIfj57EUwzAUExOjGTNm6O67764xhtPplNPp9KhjigAAECiaysI/qzgMw6j3e969e7fHo4VdunSpd0e6RfWr92sBgFML7eV0+X6fxn+o8zjLYs3a81fLYvlKg36Od+nSRSkpKUpJSalMBPbt26dJkyZZ0jkAAOB7lo/NFxcXa+nSpVaHBQCg0dhtAaHpNQOrVq2q8fquXbvq3RkAAAKB3dYMmE4G0tLS5HA4VNNSg18WFQIA0BQ1lV/0VjE9TRAbG6sVK1ZUbkPsXQoKCnzRTwAA4COmk4GkpCTl5+ef8XptowYAAAQ6t8O60hSYniaYPn26ysrKzng9ISFB69ata1CnAADwJ7fNVg2YTgYGDx5c4/Xw8HANGTKk3h0CAACNy3Qy4Ctuo8LfXQDQhHVsGeXT+PuOH/FpfAQWe40LBFAyAABAoOBpAgAAYCuMDAAA4IUFhAAA2Jy9UgGmCQAAsD1GBgAA8MICwgYqKirSAw88YHVYAAAajVuGZaUpsDwZKCws1Jw5c6wOCwBAozEsLE2B6WmCLVu21Hh927Zt9e4MAABofKaTgb59+57xMKJf6ms7wtjlcsnlcnnUGYZbDgfrGQEA/seagVq0adNGzz77rHbv3l2l7Nq1S2+99VatMbKzsxUZGelRjv10qF5vAAAAqxkW/tMUmB4ZSEpK0oEDB9S5c+dqrx87dqzWI4wzMzOVkZHhUdcnfpDZrgAAAAuYTgZuv/32Go8w7tSpk1588cUaYzidTjmdTo86pggAAIHCbtMEppOB6667rsbrrVu31vjx4+vdIQAA/K2pPBJoFct/ju/bt0+TJk2yOiwAAPARy5OB4uJiLV261OqwAAA0GvYZqMWqVatqvL5r1656dwYAgEBgt2kC08lAWlraGfcZ+EVt+wwAAIDAYXqaIDY2VitWrJDb7a62FBQU+KKfAAA0GreFpSkwnQwkJSUpPz//jNdrGzUAACDQselQLaZPn17jPgMJCQlat25dgzoFAIA/NZVf9FYxnQwMHjy4xuvh4eEaMmRIvTsEAAAal+lkwFcqDLvlYQCakrgWbXzexoEfi33eBuqmqQzvWyVgkgEAAAKF3X6eciAAAAA2x8gAAABe3DZ7Ko5kAAAAL/ZKBZgmAADA9hgZAADAi93OJqj3yMD333+vH3/8sUr9qVOn9MEHHzSoUwAA+JPddiA0nQwcPHhQAwYMUOfOndWqVSulp6d7JAXFxcW64oorLO0kAADwHdPJwMyZMxUUFKRPP/1Uq1ev1ldffaUrrrhCP/zwQ+U9nE0AAGjK7HZQkek1A++9955Wrlyp/v37S5I++ugj3XjjjbryyiuVk5MjqfYjjF0ul1wul0edYbjlcLCeEQDgf6wZqEVJSYlat25d+bfT6dSKFSsUHx+vK664QocOHao1RnZ2tiIjIz1K6cnDZrsCAIBPsGagFl27dtWWLVs86po1a6a//e1v6tq1q/7rv/6r1hiZmZkqKSnxKBFh55rtCgAAsIDpZGDEiBF65plnqtT/khD07du31jUDTqdTERERHoUpAgBAoGDNQC0eeughnThxovpgzZrp73//u/bv39/gjgEA4C92Wwhv+ud4s2bNFBERccbrBw8e1Jw5cxrUKQAA7GrRokWKj49XWFiYkpOTlZeXd8Z7n332WQ0ePFitW7dW69atlZqaWuP9Z2L52HxxcbGWLl1qdVgAABqNW4ZlxYzly5crIyNDWVlZKigoUGJiooYPH37Gxfnr16/X2LFjtW7dOuXm5qpjx44aNmyY6RF6h2FyLGTVqlU1Xt+1a5f++Mc/qqKiwlRHOrXpbep+ADjbHPix2N9daDJOl/t2Onpkp9oXw9fVGzv+XuVxeqfTKafTWeXe5ORkXXzxxVq4cKEkye12q2PHjrrjjjs0c+bMWtuqqKhQ69attXDhQqWnp9e5j6bXDKSlpcnhcNQ4n1LbPgMAANhFdnZ2lenzrKws3X///R515eXlys/PV2ZmZmVdUFCQUlNTlZubW6e2Tpw4oVOnTqlNmzam+mh6miA2NlYrVqyQ2+2uthQUFJgNCQBAQLFyn4HqHqf/zy/8Xxw5ckQVFRWKjo72qI+OjlZhYWGd+j1jxgzFxcUpNTXV1Ps1PTKQlJSk/Px8jRo1qtrrtY0aAAAQ6KzcgfBMUwJWmzdvnpYtW6b169crLCzM1GtNJwPTp09XWVnZGa8nJCRo3bp1ZsMCAGBrUVFRCg4OVlFRkUd9UVGRYmJianztn/70J82bN0/vvfee+vTpY7pt09MEgwcP1lVXXXXG6+Hh4RoyZIjpjgAAECgMw7Cs1FVoaKiSkpIqz/mRfl5AmJOTo5SUlDO+7pFHHtHcuXO1evXqynODzDI9MuArdjsUAgC8xbRoXftNDVT44w+13wS/7RyYkZGh8ePHq3///howYIAWLFigsrIyTZw4UZKUnp6u9u3bKzs7W5I0f/58zZ49W6+++qri4+Mr1xa0aNFCLVq0qHO7AZMMAAAQKPx1wNDo0aN1+PBhzZ49W4WFherbt69Wr15duahw7969Cgr696D+4sWLVV5erhtuuMEjTnVPK9TE9D4DvtKhTS9/dwEAznpny8iAr/cZGNbxzNPhZr27b7VlsXyFkQEAALzYbeqaZAAAAC8BMmjeaDg3GAAAm2NkAAAAL0wT1MHRo0e1ZcsWJSYmqk2bNjpy5Iief/55uVwu3XjjjerZs6fV/QQAoNH462kCfzGdDOTl5WnYsGEqLS1Vq1attHbtWt14441q1qyZ3G635s2bpw0bNqhfv36+6C8AALCY6TUDs2bN0o033qiSkhLdc889SktL09ChQ7V9+3bt3LlTY8aM0dy5c33RVwAAGoXbMCwrTYHpfQbatGmjjz76SD179tSpU6cUFham3NxcDRgwQJJUUFCga6+9Vt9//72pjrDPAAD4HvsM1M3g9kMti/Xh/pzab/Iz09ME5eXlat68uSQpJCRE55xzjqKioiqvR0VF6ejRozXGcLlccrlcHnWG4ZbDwcMNAAA0NtPfvh07dtSuXbsq/162bJliY2Mr/z548KBHclCd7OxsRUZGepTjJ4+Y7QoAAD7hlmFZaQpMJwNjxozRoUOHKv++5pprKkcKJGnVqlWVUwZnkpmZqZKSEo/SMqzmBAIAgMZit2TA8rMJTpw4oeDgYDmdTlOvY80AAPgeawbq5pK4yy2L9cmB9ZbF8hXLJ+mPHj2qyZMnWx0WAAD4iOXJQHFxsZYuXWp1WAAAGo3dpglMP02watWqGq//5+JCAACaInYgrEVaWpocDkeNJzo5HI4GdQoAADQe09MEsbGxWrFihdxud7WloKDAF/0EAKDRGIZhWWkKTCcDSUlJys/PP+P12kYNAAAIdKwZqMX06dNVVlZ2xusJCQlat25dgzoFAAAaj+lkYPDgwTVeDw8P15AhQ+rdIQAA/M1uI9ymkwFfsdu/eADwh+jwVj5vo6jsmM/b8LWmMrxvFU4GAgDA5gJmZAAAgEDBPgMAANic22ZT1yQDAAB4sdvIAGsGAACwOUYGAADwYrdpAstGBrp27aodO3ZYFQ4AAL8xLPynKTA9MvDUU09VW7937169+OKLiomJkST94Q9/aFjPAABAo3AYJnf7CQoKUvv27dWsmWcesWfPHsXFxSkkJEQOh8P0UcbtW19o6n4AQGBqjE2HTpfv92n888/tb1ms7Yc3WhbLV0yPDNx222369NNP9eqrr6pnz56V9SEhIXr33Xd1wQUXWNpBAAAaW1MZ3reK6TUDS5Ys0ezZszV8+HAtXLiwXo26XC6VlpZ6FMNw1ysWAABomHotILzuuuuUm5urlStXasSIESosLDT1+uzsbEVGRnqU4yeP1KcrAABYzm0YlpWmoN5PE7Rv317vvfeeLrvsMl100UWmDhrKzMxUSUmJR2kZFlXfrgAAYCmeJjDB4XAoMzNTw4YN04YNGxQbG1un1zmdTjmdTq9Y7H8EAIA/WPINnJSUpKlTp6p169bat2+fJk2aZEVYAAD8wjDclpWmwPKf48XFxVq6dKnVYQEAaDRuGZaVpsD0NMGqVatqvG52fwEAAAKNyS14mjzTyUBaWpocDkeN/6IcDkeDOgUAABqP6WmC2NhYrVixQm63u9pSUFDgi34CANBo7DZNYDoZSEpKUn5+/hmv1zZqAABAoDMMw7LSFJieJpg+fbrKysrOeD0hIUHr1q1rUKcAAEDjMX1Qka9wUBEAnB3OhoOKYltZd87OwWNfWRbLVxq06ZCVGmPLxqayE5QdOOT7Raa+/u99NrwHqXHeR2M4Gz7fZ8t/i3PPifR3FxrsbPj/kxls+wcAgM0FzMgAAACBIkBm0BsNyQAAAF6ayiOBVmGaAAAAm2NkAAAAL0wTAABgc43xhFsgaXAyYBiG1q9fr507dyo2NlbDhw9XSEiIFX0DAMAvGBmoxdVXX63XXntNkZGRKi4u1tVXX628vDxFRUXp6NGjOv/88/XBBx/o3HPP9UV/AQCAxUwvIFy9erVcLpck6d5779Xx48f17bff6tChQ9qzZ4/Cw8M1e/ZsyzsKAEBj4aAiE/71r38pOztbXbp0kSR16NBB8+fP15o1ayzpHAAA/sBBRXXgcPy8ZeYPP/ygbt26eVxLSEjQgQMHany9y+WqHF34hWG45XDwpCMAAI2tXt++EyZM0PXXX69Tp05p9+7dHtcKCwvVqlWrGl+fnZ2tyMhIj/Kj62h9ugIAgOXchmFZaQpMJwPjx49Xu3btFBkZqVGjRunEiRMe1//+97+rb9++NcbIzMxUSUmJR2nhbGu2KwAA+IRh4T9NgeVHGJeVlSk4OFhhYWGmXmflcZFn0lT+o9jB2XDi39nwHqSz56S8s+Hzfbb8t2gMvj4WOPyceMtilZ34zrJYvmL5JH1xcbF+97vfWR0WAIBGwzRBAxUXF2vp0qVWhwUAoNHwNEEtVq1aVeP1Xbt21bszAACg8ZlOBtLS0uRwOGrMdn559BAAgKbobFiDYobpaYLY2FitWLFCbre72lJQUOCLfgIA0GjsNk1gOhlISkpSfn7+Ga/XNmoAAECg82cysGjRIsXHxyssLEzJycnKy8ur8f6//e1v6tGjh8LCwtS7d2+98847pts0nQxMnz5dAwcOPOP1hIQErVu3znRHAACwu+XLlysjI0NZWVkqKChQYmKihg8frkOHDlV7/8cff6yxY8fqt7/9rTZt2qS0tDSlpaVp69atptq1fJ+B+mKfAXs5G57RPxveg3T2PNt+Nny+z5b/Fo3B1/sMNAttb1mssuO7qmzB73Q65XQ6q9ybnJysiy++WAsXLpQkud1udezYUXfccYdmzpxZ5f7Ro0errKxMb731VmXdJZdcor59+2rJkiV176TRBJ08edLIysoyTp48SRt+buNseA+0ETjxaSOw2jgb3kMgyMrKMiR5lKysrCr3uVwuIzg42Fi5cqVHfXp6unHttddWG7tjx47GE0884VE3e/Zso0+fPqb62CSTgZKSEkOSUVJSQht+buNseA+0ETjxaSOw2jgb3kMgOHnypFFSUuJRqkt+9u/fb0gyPv74Y4/66dOnGwMGDKg2dkhIiPHqq6961C1atMho166dqT7W69RCAABQN2eaEggknBkMAEAAiIqKUnBwsIqKijzqi4qKFBMTU+1rYmJiTN1/JiQDAAAEgNDQUCUlJSknJ6eyzu12KycnRykpKdW+JiUlxeN+SVq7du0Z7z+TJjlN4HQ6lZWV5dNhF9oIjPi0EVhtnA3vgTYCJ35jtdGUZGRkaPz48erfv78GDBigBQsWqKysTBMnTpQkpaenq3379srOzpYkTZ06VUOGDNFjjz2ma665RsuWLdPGjRv1zDPPmGo3YB4tBAAA0sKFC/Xoo4+qsLBQffv21VNPPaXk5GRJ0uWXX674+Hi99NJLlff/7W9/07333qvvvvtO5513nh555BFdffXVptokGQAAwOZYMwAAgM2RDAAAYHMkAwAA2BzJAAAANtckkwGzxzua8cEHH2jkyJGKi4uTw+HQm2++aVlsScrOztbFF1+sli1bql27dkpLS9O2bdssbWPx4sXq06ePIiIiFBERoZSUFP3zn/+0tA1v8+bNk8Ph0J133mlZzPvvv18Oh8Oj9OjRw7L4krR//3795je/Udu2bdW8eXP17t1bGzdutCx+fHx8lffgcDg0ZcoUy9qoqKjQfffdpy5duqh58+bq1q2b5s6da/lR4sePH9edd96pzp07q3nz5ho4cKA+++yzeser7bNmGIZmz56t2NhYNW/eXKmpqdqxY4elbaxYsULDhg1T27Zt5XA4tHnzZsvinzp1SjNmzFDv3r0VHh6uuLg4paen68CBA5a+h/vvv189evRQeHi4WrdurdTUVH366aeWtvGfbr/9djkcDi1YsMDSNiZMmFDlc3LVVVeZagP11+SSAbPHO5pVVlamxMRELVq0yJJ43t5//31NmTJFn3zyidauXatTp05p2LBhKisrs6yNDh06aN68ecrPz9fGjRt15ZVXatSoUfryyy8ta+M/ffbZZ/rLX/6iPn36WB77wgsv1MGDByvLhg0bLIv9ww8/aNCgQQoJCdE///lPffXVV3rsscfUunVry9r47LPPPPq/du1aSdKNN95oWRvz58/X4sWLtXDhQn399deaP3++HnnkEf35z3+2rA1JuuWWW7R27Vq9/PLL+uKLLzRs2DClpqZq//799YpX22ftkUce0VNPPaUlS5bo008/VXh4uIYPH66TJ09a1kZZWZkuvfRSzZ8/3/L3cOLECRUUFOi+++5TQUGBVqxYoW3btunaa6+1rA1JOv/887Vw4UJ98cUX2rBhg+Lj4zVs2DAdPnzYsjZ+sXLlSn3yySeKi4sz9R7q2sZVV13l8Xl57bXXTLeDejJ1kkEAGDBggDFlypTKvysqKoy4uDgjOzvb8rYkVTk9ymqHDh0yJBnvv/++T9tp3bq18dxzz1ke9/jx48Z5551nrF271hgyZIgxdepUy2JnZWUZiYmJlsXzNmPGDOPSSy/1WfzqTJ061ejWrZvhdrsti3nNNdcYkyZN8qi7/vrrjXHjxlnWxokTJ4zg4GDjrbfe8qjv16+fMWvWrAbH9/6sud1uIyYmxnj00Ucr644dO2Y4nU7jtddes6SN/7R7925DkrFp06Z6xa4t/i/y8vIMScaePXt81sYvB/+89957lrbx/fffG+3btze2bt1qdO7cucpJeQ1tY/z48caoUaPqHRMN06RGBsrLy5Wfn6/U1NTKuqCgIKWmpio3N9ePPau/kpISSVKbNm18Er+iokLLli1TWVmZ6e0p62LKlCm65pprPP6bWGnHjh2Ki4tT165dNW7cOO3du9ey2KtWrVL//v114403ql27drrooov07LPPWhbfW3l5uV555RVNmjRJDod159YPHDhQOTk52r59uyTp888/14YNGzRixAjL2jh9+rQqKioUFhbmUd+8eXNLR2t+sXv3bhUWFnr8/yoyMlLJyclN9rMu/fx5dzgcatWqlU/il5eX65lnnlFkZKQSExMti+t2u3XzzTdr+vTpuvDCCy2L6239+vVq166dunfvrsmTJ+vo0aM+awuemtR2xEeOHFFFRYWio6M96qOjo/XNN9/4qVf153a7deedd2rQoEHq1auXpbG/+OILpaSk6OTJk2rRooVWrlypCy64wNI2li1bpoKCggbNG9ckOTlZL730krp3766DBw9qzpw5Gjx4sLZu3aqWLVs2OP6uXbu0ePFiZWRk6J577tFnn32mP/zhDwoNDdX48eMteAee3nzzTR07dkwTJkywNO7MmTNVWlqqHj16KDg4WBUVFXrooYc0btw4y9po2bKlUlJSNHfuXPXs2VPR0dF67bXXlJubq4SEBMva+UVhYaEkVftZ/+VaU3Py5EnNmDFDY8eOVUREhKWx33rrLY0ZM0YnTpxQbGys1q5dq6ioKMviz58/X82aNdMf/vAHy2J6u+qqq3T99derS5cu+vbbb3XPPfdoxIgRys3NVXBwsM/axc+aVDJwtpkyZYq2bt3qk19W3bt31+bNm1VSUqI33nhD48eP1/vvv29ZQrBv3z5NnTpVa9eurfJr0Sr/+cu2T58+Sk5OVufOnfX666/rt7/9bYPju91u9e/fXw8//LAk6aKLLtLWrVu1ZMkSnyQDzz//vEaMGFGv+daavP766/rrX/+qV199VRdeeKE2b96sO++8U3FxcZa+j5dfflmTJk1S+/btFRwcrH79+mns2LHKz8+3rI2z1alTp3TTTTfJMAwtXrzY8vhXXHGFNm/erCNHjujZZ5/VTTfdpE8//VTt2rVrcOz8/Hw9+eSTKigosHREy9uYMWMq/3fv3r3Vp08fdevWTevXr9fQoUN91i5+1qSmCepzvGOg+v3vf6+33npL69atU4cOHSyPHxoaqoSEBCUlJSk7O1uJiYl68sknLYufn5+vQ4cOqV+/fmrWrJmaNWum999/X0899ZSaNWumiooKy9r6RatWrXT++edr586dlsSLjY2tkhz17NnT0qmIX+zZs0fvvfeebrnlFstjT58+XTNnztSYMWPUu3dv3XzzzZo2bVrlQSZW6datm95//339+OOP2rdvn/Ly8nTq1Cl17drV0nYkVX6ez4bP+i+JwJ49e7R27VrLRwUkKTw8XAkJCbrkkkv0/PPPq1mzZnr++ectif3hhx/q0KFD6tSpU+Vnfc+ePfrjH/+o+Ph4S9qoTteuXRUVFWXZ5x01a1LJQH2Odww0hmHo97//vVauXKl//etf6tKlS6O063a75XK5LIs3dOhQffHFF9q8eXNl6d+/v8aNG6fNmzf7ZFjvxx9/1LfffqvY2FhL4g0aNKjKY53bt29X586dLYn/n1588UW1a9dO11xzjeWxT5w4oaAgz49ycHCw3G635W1JP3/xxMbG6ocfftCaNWs0atQoy9vo0qWLYmJiPD7rpaWl+vTTT5vMZ136dyKwY8cOvffee2rbtm2jtGvl5/3mm2/Wli1bPD7rcXFxmj59utasWWNJG9X5/vvvdfToUcs+76hZk5smqO14x4b68ccfPTLR3bt3a/PmzWrTpo06derU4PhTpkzRq6++qv/7v/9Ty5YtK+c/IyMj1bx58wbHl6TMzEyNGDFCnTp10vHjx/Xqq69q/fr1ln5wW7ZsWWWdQ3h4uNq2bWvZ+oe77rpLI0eOVOfOnXXgwAFlZWUpODhYY8eOtST+tGnTNHDgQD388MO66aablJeXp2eeecb00Z+1cbvdevHFFzV+/Hg1a2b9R27kyJF66KGH1KlTJ1144YXatGmTHn/8cU2aNMnSdtasWSPDMNS9e3ft3LlT06dPV48ePer92avts3bnnXfqwQcf1HnnnacuXbrovvvuU1xcnNLS0ixro7i4WHv37q189v+X5DAmJqZOIxA1xY+NjdUNN9yggoICvfXWW6qoqKj8vLdp00ahoaENfg9t27bVQw89pGuvvVaxsbE6cuSIFi1apP3795t6fLW2f0/eSUxISIhiYmLUvXt3S9po06aN5syZo//+7/9WTEyMvv32W919991KSEjQ8OHD69wGGsDPTzPUy5///GejU6dORmhoqDFgwADjk08+sSz2unXrDElVyvjx4y2JX11sScaLL75oSXzDMIxJkyYZnTt3NkJDQ41zzz3XGDp0qPHuu+9aFv9MrH60cPTo0UZsbKwRGhpqtG/f3hg9erSxc+dOy+IbhmH84x//MHr16mU4nU6jR48exjPPPGNpfMMwjDVr1hiSjG3btlke2zAMo7S01Jg6darRqVMnIywszOjatasxa9Ysw+VyWdrO8uXLja5duxqhoaFGTEyMMWXKFOPYsWP1jlfbZ83tdhv33XefER0dbTidTmPo0KGm/x3W1saLL75Y7fWsrKwGx//lccXqyrp16yx5Dz/99JNx3XXXGXFxcUZoaKgRGxtrXHvttUZeXp6l/5681efRwpraOHHihDFs2DDj3HPPNUJCQozOnTsbt956q1FYWGiqDdQfRxgDAGBzTWrNAAAAsB7JAAAANkcyAACAzZEMAABgcyQDAADYHMkAAAA2RzIAAIDNkQwAAGBzJAMAANgcyQAAADZHMgAAgM39P91yGp8XQ/9OAAAAAElFTkSuQmCC", 97 | "text/plain": [ 98 | "
" 99 | ] 100 | }, 101 | "metadata": {}, 102 | "output_type": "display_data" 103 | } 104 | ], 105 | "source": [ 106 | "sns.heatmap(torch.mean(attn_output_weights, dim=0).detach().cpu().numpy())" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "metadata": {}, 112 | "source": [ 113 | "## Mask" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 5, 119 | "metadata": {}, 120 | "outputs": [ 121 | { 122 | "name": "stdout", 123 | "output_type": "stream", 124 | "text": [ 125 | "torch.Size([8, 16, 16])\n", 126 | "torch.Size([64, 16, 16])\n" 127 | ] 128 | } 129 | ], 130 | "source": [ 131 | "# Get a standard mask from class function\n", 132 | "mask = model.get_mask(X).to(DEVICE)\n", 133 | "print(mask.shape)\n", 134 | "mask = mask.repeat(model.nheads, 1, 1)\n", 135 | "print(mask.shape)" 136 | ] 137 | }, 138 | { 139 | "cell_type": "markdown", 140 | "metadata": {}, 141 | "source": [ 142 | "## Creating Attention Inputs" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": 6, 148 | "metadata": {}, 149 | "outputs": [ 150 | { 151 | "name": "stdout", 152 | "output_type": "stream", 153 | "text": [ 154 | "torch.Size([8, 16, 512])\n", 155 | "torch.Size([8, 16, 512])\n", 156 | "torch.Size([64, 16, 64])\n" 157 | ] 158 | } 159 | ], 160 | "source": [ 161 | "# Create Q, K, V matrices and reshape\n", 162 | "bsz, tgt_len, _ = X.shape\n", 163 | "x = model.linear_q(X)\n", 164 | "print(x.shape)\n", 165 | "x = model.activation(X)\n", 166 | "print(x.shape)\n", 167 | "x = x.view(tgt_len, bsz * model.nheads, model.head_dim).transpose(0, 1)\n", 168 | "print(x.shape)" 169 | ] 170 | }, 171 | { 172 | "cell_type": "markdown", 173 | "metadata": {}, 174 | "source": [ 175 | "## The Attention Equation" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": 7, 181 | "metadata": {}, 182 | "outputs": [ 183 | { 184 | "name": "stdout", 185 | "output_type": "stream", 186 | "text": [ 187 | "torch.Size([64, 16, 64])\n", 188 | "torch.Size([128, 512])\n" 189 | ] 190 | } 191 | ], 192 | "source": [ 193 | "# Apply attention - Part 1\n", 194 | "x_scaled = x * math.sqrt(1.0 / float(model.embed_size))\n", 195 | "attn_output_weights = torch.baddbmm(\n", 196 | " mask, x_scaled, x.transpose(-2, -1)\n", 197 | ")\n", 198 | "attn_output_weights = F.softmax(attn_output_weights, dim=-1)\n", 199 | "attn_output = torch.bmm(attn_output_weights, x)\n", 200 | "print(attn_output.shape)\n", 201 | "attn_output = (\n", 202 | " attn_output.transpose(0, 1).contiguous().view(tgt_len * bsz, model.embed_size)\n", 203 | ")\n", 204 | "print(attn_output.shape)" 205 | ] 206 | }, 207 | { 208 | "cell_type": "markdown", 209 | "metadata": {}, 210 | "source": [ 211 | "## Post-Attention Linear Layer" 212 | ] 213 | }, 214 | { 215 | "cell_type": "code", 216 | "execution_count": 8, 217 | "metadata": {}, 218 | "outputs": [ 219 | { 220 | "name": "stdout", 221 | "output_type": "stream", 222 | "text": [ 223 | "torch.Size([128, 512])\n", 224 | "torch.Size([128, 512])\n", 225 | "torch.Size([8, 16, 512])\n" 226 | ] 227 | } 228 | ], 229 | "source": [ 230 | "# Apply attention - Part 2\n", 231 | "attn_output = model.linear_o(attn_output)\n", 232 | "print(attn_output.shape)\n", 233 | "attn_output = model.activation(attn_output)\n", 234 | "print(attn_output.shape)\n", 235 | "attn_output = attn_output.view(bsz, tgt_len, model.embed_size)\n", 236 | "print(attn_output.shape)" 237 | ] 238 | }, 239 | { 240 | "cell_type": "markdown", 241 | "metadata": {}, 242 | "source": [ 243 | "## Create a Dataset to Test the Model" 244 | ] 245 | }, 246 | { 247 | "cell_type": "code", 248 | "execution_count": 9, 249 | "metadata": {}, 250 | "outputs": [], 251 | "source": [ 252 | "# Fibonacci Numbers\n", 253 | "def fibonacci(x):\n", 254 | " v1=0\n", 255 | " v2=1\n", 256 | " result = [None]*x\n", 257 | " for i in range(x):\n", 258 | " next_val = v1 + v2\n", 259 | " result[i] = v1\n", 260 | " v1 = v2\n", 261 | " v2 = next_val\n", 262 | " return result\n", 263 | "\n", 264 | "# Lucas numbers\n", 265 | "def lucas(x):\n", 266 | " result = [None]*x\n", 267 | " fib = fibonacci(x+2)\n", 268 | " for i in range(1, x+1):\n", 269 | " result[i-1] = fib[i-1] + fib[i+1]\n", 270 | " return result\n", 271 | "\n", 272 | "\n", 273 | "# Squares\n", 274 | "def squares(x):\n", 275 | " result = [xx**2 for xx in range(x)]\n", 276 | " return result\n", 277 | "\n", 278 | "# Get Sequences\n", 279 | "SS = 4\n", 280 | "MAX = 20\n", 281 | "fib = fibonacci(MAX)\n", 282 | "luc = lucas(MAX)\n", 283 | "sqr = squares(10*MAX)\n", 284 | "\n", 285 | "# Generate Data\n", 286 | "N = 200\n", 287 | "X, Y = [], []\n", 288 | "for n in range(N):\n", 289 | " seq = random.randint(0, 2)\n", 290 | " idx = random.randint(0, MAX-SS-1)\n", 291 | " if seq==0: X.append(fib[idx:idx+SS])\n", 292 | " elif seq==1: X.append(luc[idx:idx+SS])\n", 293 | " elif seq==2: X.append(sqr[idx:idx+SS])\n", 294 | " Y.append([0, 1] if seq==0 else [1, 0])" 295 | ] 296 | }, 297 | { 298 | "cell_type": "markdown", 299 | "metadata": {}, 300 | "source": [ 301 | "## Create A Model with Data Embedding" 302 | ] 303 | }, 304 | { 305 | "cell_type": "code", 306 | "execution_count": 10, 307 | "metadata": {}, 308 | "outputs": [], 309 | "source": [ 310 | "# Multihead Attention Modulde\n", 311 | "class TestModel(nn.Module):\n", 312 | "\n", 313 | " def __init__(self,\n", 314 | " max_len,\n", 315 | " max_embed,\n", 316 | " embed_size,\n", 317 | " nheads,\n", 318 | " num_classes = 2,\n", 319 | " device=\"cpu\",\n", 320 | " activation=F.relu):\n", 321 | " super(TestModel, self).__init__()\n", 322 | " self.embed_size = embed_size\n", 323 | " self.embedding = nn.Embedding(max_embed, embed_size)\n", 324 | " self.multihead = MultiheadAttention(embed_size, nheads, activation, device)\n", 325 | " self.conv_out = nn.Conv1d(max_len, 1, kernel_size=3, padding=1)\n", 326 | " self.linear = nn.Linear(embed_size, num_classes)\n", 327 | " self.softmax = nn.Softmax()\n", 328 | "\n", 329 | " def forward(self, q, k, v):\n", 330 | " q, k, v = self.embedding(q), self.embedding(k), self.embedding(v)\n", 331 | " attn_output, attn_output_weights = self.multihead(q, k, v)\n", 332 | " x = self.conv_out(attn_output)\n", 333 | " x = x.reshape(-1, self.embed_size) \n", 334 | " x = self.linear(x)\n", 335 | " x = self.softmax(x)\n", 336 | " return x, attn_output_weights" 337 | ] 338 | }, 339 | { 340 | "cell_type": "markdown", 341 | "metadata": {}, 342 | "source": [ 343 | "## Training Loop" 344 | ] 345 | }, 346 | { 347 | "cell_type": "code", 348 | "execution_count": 11, 349 | "metadata": {}, 350 | "outputs": [ 351 | { 352 | "name": "stderr", 353 | "output_type": "stream", 354 | "text": [ 355 | "c:\\Users\\kril\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\torch\\nn\\modules\\module.py:1518: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n", 356 | " return self._call_impl(*args, **kwargs)\n" 357 | ] 358 | }, 359 | { 360 | "name": "stdout", 361 | "output_type": "stream", 362 | "text": [ 363 | "Final Loss: 62.6523494720459\n" 364 | ] 365 | } 366 | ], 367 | "source": [ 368 | "DEVICE = \"cuda\"\n", 369 | "EPOCHS = 200\n", 370 | "BATCH_SIZE = 1\n", 371 | "EMBED_SIZE = 512\n", 372 | "LEARNING_RATE = 4.12E-5\n", 373 | "test_model = TestModel(max_len=SS, \n", 374 | " max_embed=np.max(X)+1, \n", 375 | " embed_size=EMBED_SIZE, \n", 376 | " nheads=4,\n", 377 | " device=DEVICE).to(DEVICE)\n", 378 | "criterion = torch.nn.CrossEntropyLoss()\n", 379 | "optimizer = torch.optim.AdamW(test_model.parameters(), lr=LEARNING_RATE)\n", 380 | "train_dataset = TensorDataset(torch.Tensor(X).type(torch.LongTensor).to(DEVICE), \n", 381 | " torch.Tensor(Y).to(DEVICE))\n", 382 | "train_dataloader = DataLoader(train_dataset, batch_size=BATCH_SIZE)\n", 383 | "\n", 384 | "for epoch in range(EPOCHS):\n", 385 | " test_model.train()\n", 386 | " train_loss = 0\n", 387 | " for xx, yy in train_dataloader:\n", 388 | " optimizer.zero_grad()\n", 389 | " out, attn_weights = test_model(xx, xx, xx)\n", 390 | " loss = criterion(out, yy)\n", 391 | " train_loss = train_loss + loss.item()\n", 392 | " loss.backward()\n", 393 | " optimizer.step()\n", 394 | "\n", 395 | "print(f\"Final Loss: {train_loss}\")" 396 | ] 397 | }, 398 | { 399 | "cell_type": "code", 400 | "execution_count": 12, 401 | "metadata": {}, 402 | "outputs": [ 403 | { 404 | "data": { 405 | "text/plain": [ 406 | "tensor([[136, 5],\n", 407 | " [ 17, 42]])" 408 | ] 409 | }, 410 | "execution_count": 12, 411 | "metadata": {}, 412 | "output_type": "execute_result" 413 | } 414 | ], 415 | "source": [ 416 | "x = torch.Tensor(X).type(torch.LongTensor).to(DEVICE)\n", 417 | "y = torch.Tensor(Y).to(DEVICE)\n", 418 | "y_hat, _ = test_model(x, x, x)\n", 419 | "yam = torch.argmax(y.detach().cpu(), dim=1)\n", 420 | "yham = torch.argmax(y_hat.detach().cpu(), dim=1)\n", 421 | "confmat = ConfusionMatrix(task=\"binary\", num_classes=2)\n", 422 | "confmat(yham, yam)" 423 | ] 424 | }, 425 | { 426 | "cell_type": "code", 427 | "execution_count": 13, 428 | "metadata": {}, 429 | "outputs": [ 430 | { 431 | "data": { 432 | "text/plain": [ 433 | "0.83" 434 | ] 435 | }, 436 | "execution_count": 13, 437 | "metadata": {}, 438 | "output_type": "execute_result" 439 | } 440 | ], 441 | "source": [ 442 | "166/200" 443 | ] 444 | }, 445 | { 446 | "cell_type": "code", 447 | "execution_count": 14, 448 | "metadata": {}, 449 | "outputs": [ 450 | { 451 | "data": { 452 | "text/plain": [ 453 | "" 454 | ] 455 | }, 456 | "execution_count": 14, 457 | "metadata": {}, 458 | "output_type": "execute_result" 459 | }, 460 | { 461 | "data": { 462 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgMAAAGiCAYAAAB6c8WBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy80BEi2AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAfqElEQVR4nO3df3DU1b3/8dcmkg0qRJnIBkK4sVoFL0JskBgoIBrJt3pj0xlritbEFPErjVxkRwrbIhFtWeoPRC8BKhWxc4eC8hVrJcaLEXS8xBtJzFSsQBExXr5sIGUMGGGD2f3+Id949ySELGz4LD3Ph/P5g7OfnM87E2fyzvt9zvm4wuFwWAAAwFoJTgcAAACcRTIAAIDlSAYAALAcyQAAAJYjGQAAwHIkAwAAWI5kAAAAy5EMAABgOZIBAAAsRzIAAIDlSAYAAIgT77zzjgoKCjR48GC5XC698sorp/yaLVu26Hvf+57cbrcuv/xyrV69OurnkgwAABAnWltbNWrUKFVUVPTo/k8//VS33HKLJk2apIaGBj3wwAO655579MYbb0T1XBcvKgIAIP64XC5t2LBBhYWFJ71nzpw52rhxo7Zv394x9pOf/ERffPGFqqqqevwsKgMAAPSiYDCow4cPR1zBYDAmc9fU1CgvLy9iLD8/XzU1NVHNc15MoomB4817nA4BJ/QdPN7pEACgW1+37evV+WP5O8m/9A9asGBBxFh5ebkefvjhM547EAjI4/FEjHk8Hh0+fFhHjx5V3759ezRP3CQDAADEjVB7zKby+Xzyer0RY263O2bzxwLJAAAAvcjtdvfaL/+0tDQ1NTVFjDU1Nal///49rgpIJAMAAHQWDjkdQY/k5uaqsrIyYmzTpk3Kzc2Nah4WEAIAYAqFYndF4csvv1RDQ4MaGhokfbN1sKGhQY2NjZK+aTkUFxd33H/fffdpz549+sUvfqEdO3Zo2bJlevHFFzVr1qyonktlAAAAQ9ihysC2bds0adKkjn///7UGJSUlWr16tfbv39+RGEjSpZdeqo0bN2rWrFl6+umnNWTIEP3+979Xfn5+VM+Nm3MG2E0QP9hNACDe9fZugrb/+1HM5koa/M8xm6u3UBkAAMAUZXn/XEcyAACA6RxZQBgrLCAEAMByVAYAADDF8NChcwHJAAAAJtoEAADAJlQGAAAwsZsAAAC7OXXokFNoEwAAYDkqAwAAmGgTAABgOcvaBCQDAACYLDtngDUDAABYjsoAAAAm2gQAAFjOsgWEtAkAALAclQEAAEy0CQAAsBxtAgAAYBMqAwAAGMJhu84ZIBkAAMBk2ZoB2gQAAFiOygAAACbLFhCSDAAAYLKsTUAyAACAiRcVAQAAm1AZAADARJsAAADLWbaAkDYBAACWozIAAICJNgEAAJajTQAAAGxCZQAAAJNllQGSAQAADLa9tZA2AQAAlqMyAACAiTYBAACWY2th95qbm7Vq1SrV1NQoEAhIktLS0jR27FjdfffduuSSS2IeJAAAZ5VllYGo1gy8//77uuKKK/TMM88oJSVFEyZM0IQJE5SSkqJnnnlGw4YN07Zt2045TzAY1OHDhyOuYDB42t8EAAA4fVFVBmbMmKEf//jHWrFihVwuV8Rn4XBY9913n2bMmKGamppu5/H7/VqwYEHE2LzZ/6r5v5gZTTgAAPQOy9oErnA4HO7pzX379tUHH3ygYcOGdfn5jh07dM011+jo0aPdzhMMBjtVAhKO7JPb7e5pKOhFfQePdzoEAOjW1237enX+o/+xLGZz9Z3885jN1VuiqgykpaWptrb2pMlAbW2tPB7PKedxu92dfvEfb2uOJhQAABAjUSUDDz74oO69917V1dXpxhtv7PjF39TUpOrqaq1cuVJPPPFErwQKAMBZY1mbIKpkoKysTKmpqXrqqae0bNkytbd/c0JTYmKisrOztXr1at1+++29EigAAGeNZbsJot5aWFRUpKKiIh0/flzNzd+U9lNTU9WnT5+YBwcAAHrfaR861KdPHw0aNCiWsQAAEB+oDAAAYDnL1gzwoiIAACxHZQAAABNtAgAALGdZm4BkAAAAk2WVAdYMAABgOSoDAACYaBMAAGA52gQAAMAmVAYAADBZVhkgGQAAwBQOOx3BWUWbAAAAy1EZAADARJsAAADLWZYM0CYAAMByVAYAADBx6BAAAJajTQAAgOXC4dhdUaqoqFBmZqaSk5OVk5Oj2trabu9fsmSJrrzySvXt21cZGRmaNWuWjh07FtUzSQYAAIgT69atk9frVXl5uerr6zVq1Cjl5+frwIEDXd6/Zs0azZ07V+Xl5fr444/13HPPad26dfrlL38Z1XNJBgAAMIVCsbuisHjxYk2bNk2lpaW66qqrtGLFCp1//vlatWpVl/dv3bpV48aN0x133KHMzExNnjxZU6ZMOWU1wUQyAACAKYbJQDAY1OHDhyOuYDDY6ZFtbW2qq6tTXl5ex1hCQoLy8vJUU1PTZZhjx45VXV1dxy//PXv2qLKyUjfffHNU3y7JAAAAvcjv9yslJSXi8vv9ne5rbm5We3u7PB5PxLjH41EgEOhy7jvuuEOPPPKIvv/976tPnz667LLLdP3119MmAADgjIVDMbt8Pp9aWloiLp/PF5Mwt2zZooULF2rZsmWqr6/Xyy+/rI0bN+rRRx+Nah62FgIAYAiHYveiIrfbLbfbfcr7UlNTlZiYqKampojxpqYmpaWldfk1Dz30kO666y7dc889kqSrr75ara2tuvfee/WrX/1KCQk9+5ufygAAAHEgKSlJ2dnZqq6u7hgLhUKqrq5Wbm5ul1/z1VdfdfqFn5iYKEkKR7GtkcoAAAAmhw4d8nq9Kikp0ejRozVmzBgtWbJEra2tKi0tlSQVFxcrPT29Y81BQUGBFi9erGuuuUY5OTnavXu3HnroIRUUFHQkBT1BMgAAgMmh44iLiop08OBBzZ8/X4FAQFlZWaqqqupYVNjY2BhRCZg3b55cLpfmzZunffv26ZJLLlFBQYF+85vfRPVcVziaOkIvOt68x+kQcELfweOdDgEAuvV1275enf+r5TNiNtf50/8tZnP1FioDAACYYriA8FxAMgAAgMmyFxWRDAAAYLIsGWBrIQAAlqMyAACAKT7W1p81JAMAAJhoEwAAAJtQGQAAwMTWQgAALOfQCYROoU0AAIDlqAwAAGCiTeCMnWP+1ekQcMJbA8Y6HQJOuOHQVqdDAKwUZjcBAACwSdxUBgAAiBu0CQAAsJxluwlIBgAAMFlWGWDNAAAAlqMyAACAybLdBCQDAACYaBMAAACbUBkAAMDEbgIAACxHmwAAANiEygAAAAbb3k1AMgAAgIk2AQAAsAmVAQAATJZVBkgGAAAwsbUQAADLWVYZYM0AAACWozIAAIAhbFllgGQAAACTZckAbQIAACxHZQAAABMnEAIAYDnaBAAAwCZUBgAAMFlWGSAZAADAEA7blQzQJgAAwHJUBgAAMNEmAADAciQDAADYzbbjiFkzAACA5agMAABgsqwyQDIAAIDJrtOIaRMAAGA7KgMAABhsW0BIMgAAgMmyZIA2AQAAlqMyAACAybIFhCQDAAAYbFszQJsAAADLURkAAMBEmwAAALvZ1iYgGQAAwGRZZYA1AwAAWM6RykAwGFQwGIwYawu3K8mV6EQ4AABECFMZODOff/65fvazn3V7j9/vV0pKSsT1+y8+iXUoAACcnlAMr3NAzJOBQ4cO6YUXXuj2Hp/Pp5aWlojrnosui3UoAACgB6JuE7z66qvdfr5nz55TzuF2u+V2uyPGaBEAAOKFbW2CqJOBwsJCuVwuhcMn33bhcrnOKCgAABxlWTIQdZtg0KBBevnllxUKhbq86uvreyNOAADQS6JOBrKzs1VXV3fSz09VNQAAIN6FQ7G7zgVRJwOzZ8/W2LFjT/r55Zdfrs2bN59RUAAAOMnJZKCiokKZmZlKTk5WTk6Oamtru73/iy++UFlZmQYNGiS3260rrrhClZWVUT0z6jUD48eP7/bzCy64QBMnTox2WgAA4oZTf9GvW7dOXq9XK1asUE5OjpYsWaL8/Hzt3LlTAwcO7HR/W1ubbrrpJg0cOFDr169Xenq6PvvsM1100UVRPZfjiAEAiBOLFy/WtGnTVFpaKklasWKFNm7cqFWrVmnu3Lmd7l+1apUOHTqkrVu3qk+fPpKkzMzMqJ/LccQAAJjCrphdwWBQhw8fjrjMU3ilb/7Kr6urU15eXsdYQkKC8vLyVFNT02WYr776qnJzc1VWViaPx6MRI0Zo4cKFam9vj+rbJRkAAMAQyzUDXZ266/f7Oz2zublZ7e3t8ng8EeMej0eBQKDLOPfs2aP169ervb1dlZWVeuihh/Tkk0/q17/+dVTfL20CAAB6kc/nk9frjRgzD947XaFQSAMHDtSzzz6rxMREZWdna9++fXr88cdVXl7e43lIBgAAMIRDsTs8r6tTd7uSmpqqxMRENTU1RYw3NTUpLS2ty68ZNGiQ+vTpo8TEb0/xHT58uAKBgNra2pSUlNSjGGkTAABgcGJrYVJSkrKzs1VdXd0xFgqFVF1drdzc3C6/Zty4cdq9e7dCoW8ftGvXLg0aNKjHiYBEMgAAQNzwer1auXKlXnjhBX388ceaPn26WltbO3YXFBcXy+fzddw/ffp0HTp0SDNnztSuXbu0ceNGLVy4UGVlZVE9lzYBAACGcNiZd+wUFRXp4MGDmj9/vgKBgLKyslRVVdWxqLCxsVEJCd/+HZ+RkaE33nhDs2bN0siRI5Wenq6ZM2dqzpw5UT3XFY6Ts4O3f+dfnA4BJxz6qq/TIeCEGw5tdToEIC593bavV+f/75wbYjbXkP96K2Zz9RbaBAAAWI42AQAAhljuJjgXkAwAAGCIjwb62UMyAACAwbbKAGsGAACwHJUBAAAMtlUGSAYAADDYtmaANgEAAJajMgAAgIE2AQAAlnPqOGKn0CYAAMByVAYAADBE8+rhfwQkAwAAGEK0CQAAgE2oDAAAYLBtASHJAAAABrYWAgBgOU4gBAAAVqEyAACAgTYBAACWY2shAACwCpUBAAAMbC0EAMBy7CYAAABWoTIAAIDBtgWEJAMAABhsWzNAmwAAAMtRGQAAwGDbAkKSAQAADKwZcMiLoRSnQ8AJ14XbnQ4BJ9w+aIzTIeCEF/fXOh0CziLWDAAAAKvETWUAAIB4QZsAAADLWbZ+kDYBAAC2ozIAAICBNgEAAJZjNwEAALAKlQEAAAwhpwM4y0gGAAAwhEWbAAAAWITKAAAAhpBlBw2QDAAAYAhZ1iYgGQAAwMCaAQAAYBUqAwAAGNhaCACA5WgTAAAAq1AZAADAQJsAAADL2ZYM0CYAAMByVAYAADDYtoCQZAAAAEPIrlyANgEAALajMgAAgIF3EwAAYDnLXlpIMgAAgImthQAAwCpUBgAAMIRcrBkAAMBqtq0ZoE0AAIDlqAwAAGCwbQEhyQAAAAZOIAQAAFYhGQAAwBCSK2ZXtCoqKpSZmank5GTl5OSotra2R1+3du1auVwuFRYWRv1MkgEAAAzhGF7RWLdunbxer8rLy1VfX69Ro0YpPz9fBw4c6Pbr9u7dqwcffFDjx4+P8onfIBkAAKAXBYNBHT58OOIKBoNd3rt48WJNmzZNpaWluuqqq7RixQqdf/75WrVq1Unnb29v15133qkFCxboO9/5zmnFSDIAAIAh5Ird5ff7lZKSEnH5/f5Oz2xra1NdXZ3y8vI6xhISEpSXl6eampqTxvrII49o4MCBmjp16ml/v+wmAADAEMuthT6fT16vN2LM7XZ3uq+5uVnt7e3yeDwR4x6PRzt27Ohy7nfffVfPPfecGhoazihGkgEAAAyxPIHQ7XZ3+cv/TB05ckR33XWXVq5cqdTU1DOai2QAAIA4kJqaqsTERDU1NUWMNzU1KS0trdP9n3zyifbu3auCgoKOsVDom5rGeeedp507d+qyyy7r0bNZMwAAgCGWawZ6KikpSdnZ2aqurv42jlBI1dXVys3N7XT/sGHD9OGHH6qhoaHjuvXWWzVp0iQ1NDQoIyOjx8+mMgAAgMGp44i9Xq9KSko0evRojRkzRkuWLFFra6tKS0slScXFxUpPT5ff71dycrJGjBgR8fUXXXSRJHUaPxWSAQAA4kRRUZEOHjyo+fPnKxAIKCsrS1VVVR2LChsbG5WQEPuiviscDsfFmxrnZ97pdAg44bqj7U6HgBP+Pbnrvcg4+17c37NT4HB2fN22r1fn/92Qn8Zsrv/93/8es7l6C5UBAAAMYV5U1L2jR4/q3Xff1V//+tdOnx07dkx/+MMfTjlHV6cxfR3mr1EAAJwQVTKwa9cuDR8+XBMmTNDVV1+tiRMnav/+/R2ft7S0dCxy6E5XpzH9Z8tH0UcPAEAvCMXwOhdElQzMmTNHI0aM0IEDB7Rz507169dP48aNU2NjY1QP9fl8amlpibjGpfxzVHMAANBbbEsGolozsHXrVr355ptKTU1Vamqq/vznP+vnP/+5xo8fr82bN+uCCy7o0TxdncZ0nisxmlAAAECMRFUZOHr0qM4779v8weVyafny5SooKNDEiRO1a9eumAcIAMDZ5tQrjJ0SVWVg2LBh2rZtm4YPHx4xvnTpUknSrbfeGrvIAABwSDQnB/4jiKoy8KMf/Uh//OMfu/xs6dKlmjJliuLk2AIAAE6bbWsGokoGfD6fKisrT/r5smXLOl6SAAAAzg0cOgQAgMG2P2tJBgAAMNjW8OYVxgAAWI7KAAAABtt2E5AMAABgsG3NAG0CAAAsR2UAAACDbQsISQYAADCELEsHaBMAAGA5KgMAABhsW0BIMgAAgMGuJgHJAAAAndhWGWDNAAAAlqMyAACAgRMIAQCwHFsLAQCAVagMAABgsKsuQDIAAEAn7CYAAABWoTIAAIDBtgWEJAMAABjsSgVoEwAAYD0qAwAAGGxbQEgyAACAgTUDAABYzq5UgDUDAABYj8oAAAAG1gwAAGC5sGWNAtoEAABYjsoAAAAG2gQAAFjOtq2FtAkAALAclQEAAAx21QVIBgAA6IQ2AQAAsAqVAQAADOwmAADAcrYdOkQyAACAwbbKAGsGAACwXNxUBrYcDzgdAk7YHDf/V+DQsS+dDgEnfDJiuNMh4CyiTQAAgOVoEwAAAKtQGQAAwBAK0yYAAMBqdqUCtAkAALAelQEAAAy2vZuAZAAAAINtWwtpEwAAYDkqAwAAGGw7Z4BkAAAAA2sGAACwHGsGAACAVagMAABgYM0AAACWC1t2HDFtAgAA4khFRYUyMzOVnJysnJwc1dbWnvTelStXavz48br44ot18cUXKy8vr9v7T4ZkAAAAQ0jhmF3RWLdunbxer8rLy1VfX69Ro0YpPz9fBw4c6PL+LVu2aMqUKdq8ebNqamqUkZGhyZMna9++fVE91xWOk1rIhPQbnQ4BJ8TF/xCQJB36+kunQ8AJlen9nA4B/8M/1b/Zq/MXDP2XmM21/m//R8FgMGLM7XbL7XZ3ujcnJ0fXXnutli5dKkkKhULKyMjQjBkzNHfu3FM+q729XRdffLGWLl2q4uLiHsdIZQAAgF7k9/uVkpIScfn9/k73tbW1qa6uTnl5eR1jCQkJysvLU01NTY+e9dVXX+n48eMaMGBAVDGygBAAAEMszxnw+Xzyer0RY11VBZqbm9Xe3i6PxxMx7vF4tGPHjh49a86cORo8eHBEQtETJAMAABhieQLhyVoCsbZo0SKtXbtWW7ZsUXJyclRfSzIAAEAcSE1NVWJiopqamiLGm5qalJaW1u3XPvHEE1q0aJHefPNNjRw5Mupns2YAAABDOByO2dVTSUlJys7OVnV1dcdYKBRSdXW1cnNzT/p1jz32mB599FFVVVVp9OjRp/X9UhkAAMDg1AmEXq9XJSUlGj16tMaMGaMlS5aotbVVpaWlkqTi4mKlp6d3LED87W9/q/nz52vNmjXKzMxUIBCQJF144YW68MILe/xckgEAAAxOvaioqKhIBw8e1Pz58xUIBJSVlaWqqqqORYWNjY1KSPi2qL98+XK1tbXptttui5invLxcDz/8cI+fyzkD6CQu/oeAJM4ZiCecMxBfevucgckZ/ytmc/3H51Uxm6u3UBkAAMAQy90E5wKSAQAADHFSND9r2E0AAIDlqAwAAGCgTQAAgOWc2k3gFNoEAABYjsoAAACGkGULCEkGAAAw2JUK0CYAAMB6VAYAADCwmwAAAMuRDAAAYDlOIAQAAFahMgAAgIE2AQAAluMEQgAAYBUqAwAAGGxbQEgyAACAwbY1A7QJAACwXNSVgY8//ljvvfeecnNzNWzYMO3YsUNPP/20gsGgfvrTn+qGG2445RzBYFDBYDBiLBQOKcFFbgIAcJ5tbYKofvtWVVUpKytLDz74oK655hpVVVVpwoQJ2r17tz777DNNnjxZb7311inn8fv9SklJibg+P7L3dL8HAABiKqRwzK5zQVTJwCOPPKLZs2fr73//u55//nndcccdmjZtmjZt2qTq6mrNnj1bixYtOuU8Pp9PLS0tEVdGv8zT/R4AAMAZiCoZ+Oijj3T33XdLkm6//XYdOXJEt912W8fnd955p/7yl7+cch63263+/ftHXLQIAADxIhzD/84FUa8ZcLlckqSEhAQlJycrJSWl47N+/fqppaUldtEBAOCAEGsGTi4zM1N/+9vfOv5dU1OjoUOHdvy7sbFRgwYNil10AAA4gMpAN6ZPn6729vaOf48YMSLi89dff71HuwkAAED8iCoZuO+++7r9fOHChWcUDAAA8cC2NgEnEAIAYDhXyvuxwhJ+AAAsR2UAAAADbQIAACxHmwAAAFiFygAAAAbaBAAAWI42AQAAsAqVAQAADOFwyOkQziqSAQAADCHL2gQkAwAAGMKWLSBkzQAAAJajMgAAgIE2AQAAlqNNAAAArEJlAAAAAycQAgBgOU4gBAAAVqEyAACAwbYFhCQDAAAYbNtaSJsAAADLURkAAMBAmwAAAMuxtRAAAMvZVhlgzQAAAJajMgAAgMG23QQkAwAAGGgTAAAAq1AZAADAwG4CAAAsx4uKAACAVagMAABgoE0AAIDl2E0AAACsQmUAAACDbQsISQYAADDQJgAAwHLhcDhmV7QqKiqUmZmp5ORk5eTkqLa2ttv7X3rpJQ0bNkzJycm6+uqrVVlZGfUzSQYAAIgT69atk9frVXl5uerr6zVq1Cjl5+frwIEDXd6/detWTZkyRVOnTtUHH3ygwsJCFRYWavv27VE91xWOk1rIhPQbnQ4BJ8TF/xCQJB36+kunQ8AJlen9nA4B/8M/1b/Zq/Ofl5Qes7laj+xRMBiMGHO73XK73Z3uzcnJ0bXXXqulS5dKkkKhkDIyMjRjxgzNnTu30/1FRUVqbW3Va6+91jF23XXXKSsrSytWrOh5kGHExLFjx8Ll5eXhY8eOOR0Kwvw84gk/i/jBz8IZ5eXlYX3zd1bHVV5e3um+YDAYTkxMDG/YsCFivLi4OHzrrbd2OXdGRkb4qaeeihibP39+eOTIkVHFSJsgRoLBoBYsWNAp+4Mz+HnED34W8YOfhTN8Pp9aWloiLp/P1+m+5uZmtbe3y+PxRIx7PB4FAoEu5w4EAlHdfzLsJgAAoBedrCUQT6gMAAAQB1JTU5WYmKimpqaI8aamJqWlpXX5NWlpaVHdfzIkAwAAxIGkpCRlZ2erurq6YywUCqm6ulq5ubldfk1ubm7E/ZK0adOmk95/MrQJYsTtdqu8vDzuS0G24OcRP/hZxA9+FvHP6/WqpKREo0eP1pgxY7RkyRK1traqtLRUklRcXKz09HT5/X5J0syZMzVx4kQ9+eSTuuWWW7R27Vpt27ZNzz77bFTPjZuthQAAQFq6dKkef/xxBQIBZWVl6ZlnnlFOTo4k6frrr1dmZqZWr17dcf9LL72kefPmae/evfrud7+rxx57TDfffHNUzyQZAADAcqwZAADAciQDAABYjmQAAADLkQwAAGA5koEYifaVk+gd77zzjgoKCjR48GC5XC698sorTodkJb/fr2uvvVb9+vXTwIEDVVhYqJ07dzodlrWWL1+ukSNHqn///urfv79yc3P1+uuvOx0W4gjJQAxE+8pJ9J7W1laNGjVKFRUVToditbfffltlZWV67733tGnTJh0/flyTJ09Wa2ur06FZaciQIVq0aJHq6uq0bds23XDDDfrhD3+ojz76yOnQECfYWhgD0b5yEmeHy+XShg0bVFhY6HQo1jt48KAGDhyot99+WxMmTHA6HEgaMGCAHn/8cU2dOtXpUBAHqAycoba2NtXV1SkvL69jLCEhQXl5eaqpqXEwMiB+tLS0SPrmFxCc1d7errVr16q1tTXqI2vxj4vjiM9Qd6+c3LFjh0NRAfEjFArpgQce0Lhx4zRixAinw7HWhx9+qNzcXB07dkwXXnihNmzYoKuuusrpsBAnSAYA9KqysjJt375d7777rtOhWO3KK69UQ0ODWlpatH79epWUlOjtt98mIYAkkoEzdjqvnARscf/99+u1117TO++8oyFDhjgdjtWSkpJ0+eWXS5Kys7P1/vvv6+mnn9bvfvc7hyNDPGDNwBk6nVdOAv/owuGw7r//fm3YsEFvvfWWLr30UqdDgiEUCikYDDodBuIElYEYONUrJ3H2fPnll9q9e3fHvz/99FM1NDRowIABGjp0qIOR2aWsrExr1qzRn/70J/Xr10+BQECSlJKSor59+zocnX18Pp9+8IMfaOjQoTpy5IjWrFmjLVu26I033nA6NMQJthbGSHevnMTZs2XLFk2aNKnTeElJScQrP9G7XC5Xl+PPP/+87r777rMbDDR16lRVV1dr//79SklJ0ciRIzVnzhzddNNNToeGOEEyAACA5VgzAACA5UgGAACwHMkAAACWIxkAAMByJAMAAFiOZAAAAMuRDAAAYDmSAQAALEcyAACA5UgGAACwHMkAAACW+3973DP7NB+7mAAAAABJRU5ErkJggg==", 463 | "text/plain": [ 464 | "
" 465 | ] 466 | }, 467 | "metadata": {}, 468 | "output_type": "display_data" 469 | } 470 | ], 471 | "source": [ 472 | "sns.heatmap(torch.mean(attn_weights, dim=0).detach().cpu().numpy())" 473 | ] 474 | } 475 | ], 476 | "metadata": { 477 | "kernelspec": { 478 | "display_name": "Python 3", 479 | "language": "python", 480 | "name": "python3" 481 | }, 482 | "language_info": { 483 | "codemirror_mode": { 484 | "name": "ipython", 485 | "version": 3 486 | }, 487 | "file_extension": ".py", 488 | "mimetype": "text/x-python", 489 | "name": "python", 490 | "nbconvert_exporter": "python", 491 | "pygments_lexer": "ipython3", 492 | "version": "3.10.8" 493 | } 494 | }, 495 | "nbformat": 4, 496 | "nbformat_minor": 2 497 | } 498 | -------------------------------------------------------------------------------- /multihead/multihead_attention.py: -------------------------------------------------------------------------------- 1 | # Basic Multihead Attention Implementation 2 | # See: https://github.com/CyberZHG/torch-multi-head-attention/blob/master/torch_multi_head_attention/multi_head_attention.py 3 | # See: https://github.com/pytorch/pytorch/blob/a1a2023eb86805b1a3867dbda9c89be3cd63dd27/torch/nn/functional.py#L6081 4 | 5 | 6 | # Imports 7 | import math 8 | import torch 9 | import torch.nn as nn 10 | import torch.nn.functional as F 11 | 12 | 13 | # Multihead Attention Module 14 | class MultiheadAttention(nn.Module): 15 | 16 | def __init__(self, 17 | embed_size, 18 | nheads, 19 | activation=F.relu, 20 | device = "cpu"): 21 | super(MultiheadAttention, self).__init__() 22 | self.embed_size = embed_size 23 | self.nheads = nheads 24 | self.activation = activation 25 | self.head_dim = embed_size // nheads 26 | self.device = device 27 | self.linear_q = nn.Linear(embed_size, embed_size, bias=True) 28 | self.linear_k = nn.Linear(embed_size, embed_size, bias=True) 29 | self.linear_v = nn.Linear(embed_size, embed_size, bias=True) 30 | self.linear_o = nn.Linear(embed_size, embed_size, bias=True) 31 | 32 | def forward(self, q, k, v): 33 | 34 | # Get a standard mask from class function 35 | mask = self.get_mask(q) 36 | mask = mask.repeat(self.nheads, 1, 1).to(self.device) 37 | 38 | # Create Q, K, V matrices and reshape 39 | bsz, seq_len, _ = q.shape 40 | q, k, v = self.linear_q(q), self.linear_k(k), self.linear_v(v) 41 | q, k, v = self.activation(q), self.activation(k), self.activation(v) 42 | q = q.view(seq_len, bsz * self.nheads, self.head_dim).transpose(0, 1) 43 | k = k.view(seq_len, bsz * self.nheads, self.head_dim).transpose(0, 1) 44 | v = v.view(seq_len, bsz * self.nheads, self.head_dim).transpose(0, 1) 45 | 46 | # Apply attention 47 | q_scaled = q * math.sqrt(1.0 / float(self.embed_size)) 48 | attn_output_weights = torch.baddbmm( 49 | mask, q_scaled, k.transpose(-2, -1) 50 | ) 51 | attn_output_weights = F.softmax(attn_output_weights, dim=-1) 52 | attn_output = torch.bmm(attn_output_weights, v) 53 | attn_output = ( 54 | attn_output.transpose(0, 1).contiguous().view(seq_len * bsz, self.embed_size) 55 | ) 56 | attn_output = self.linear_o(attn_output) 57 | attn_output = self.activation(attn_output) 58 | attn_output = attn_output.view(bsz, seq_len, self.embed_size) 59 | 60 | # Return Outputs 61 | return attn_output, attn_output_weights 62 | 63 | # Method to generate a standard mask 64 | @staticmethod 65 | def get_mask(x): 66 | batch_size, seq_len, _ = x.shape 67 | return torch.tril((1e9)*torch.ones(seq_len, seq_len)).view(1, seq_len, seq_len).repeat(batch_size, 1, 1) - 1e9 68 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | ## PyTorch Examples 2 | - *Biochem Graphormer / Lipophilicity (Unfinished)*: Using Graphormer to Predict Small Molecule Lipophilicity 3 | - *Biochem Transformer / PFam*: Using Transformer Encoder to Classify Protein Sequences 4 | - *Biochem Transformer / PFam_RNN*: Using LSTM to Classify Protein Sequences for Benchmarking 5 | - *DiffEq*: Experimentation with Autograd for Differential Equations 6 | - *Experimental*: Junkyard with ideas currently being fleshed out 7 | - *Multihead*: Notebook to help understand multihead attention 8 | - *Timeseries Attention*: Application of Basic Transformer Attention to Timeseries Forcasting 9 | - *Timeseries Transformer*: Application of Basic Transformer Encoder Layer to Timeseries Forcasting 10 | 11 | ### Notes: 12 | - Examples are not currently packaged, cd into a subfolder to run scripts 13 | - Timeseries Attention models are in timeseries_attention/forcasting_model.py; Examples are run from other scripts; 14 | - Timeseries Encoder models are in timeseries_transformer/forcasting_model.py; Examples are run from other scripts; 15 | - Protein Family Transformer Classifier models are in biochem_transformer/pfam/pfam_model.py; Example is run from the pfam_example.py script in this folder 16 | - Protein Family LSTM Classifier models are in biochem_transformer/pfam_rnn/pfam_model.py; Example is run from the pfam_example.py script in this folder 17 | -------------------------------------------------------------------------------- /timeseries_attention/complex_trig_example.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | from forecasting_model import ForecastingModel 6 | from torch.utils.data import TensorDataset, DataLoader 7 | 8 | 9 | # Get a noisy, more complex wave 10 | DATA_SIZE = 6000 11 | x = torch.linspace(0, 30, DATA_SIZE, requires_grad = True) 12 | y = torch.sin(x)-torch.cos(x)**2 13 | Y = torch.sum(y) 14 | Y.backward() 15 | x = x.grad + np.random.normal(0, 0.05, DATA_SIZE) 16 | x = x.detach().numpy() 17 | 18 | 19 | # Create a dataset 20 | seq_len = 1600 21 | X = np.array([x[ii:ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]) 22 | Y = np.array([x[ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]) 23 | 24 | 25 | # Training Loop 26 | EPOCHS = 10 27 | BATCH_SIZE = 2 28 | LEARNING_RATE = 1e-6 29 | model = ForecastingModel(seq_len).to("cuda") 30 | model.train() 31 | criterion = torch.nn.MSELoss() 32 | optimizer = torch.optim.Adam(model.parameters(), lr=LEARNING_RATE) 33 | dataset = TensorDataset(torch.Tensor(X).to("cuda"), torch.Tensor(Y).to("cuda")) 34 | dataloader = DataLoader(dataset, batch_size=BATCH_SIZE) 35 | for epoch in range(EPOCHS): 36 | for xx, yy in dataloader: 37 | optimizer.zero_grad() 38 | out = model(xx) 39 | loss = criterion(out, yy) 40 | loss.backward() 41 | optimizer.step() 42 | print(f"Epoch {epoch+1}/{EPOCHS}: Loss={loss}") 43 | 44 | 45 | # Prediction Loop 46 | FORCAST = 6000 47 | model.eval() 48 | for ff in range(FORCAST): 49 | xx = x[len(x)-seq_len:len(x)] 50 | yy = model(torch.Tensor(xx).reshape(1, xx.shape[0]).to("cuda")) 51 | x = np.concatenate((x, yy.detach().cpu().numpy().reshape(1,))) 52 | 53 | 54 | # Plot Predictions 55 | import matplotlib.pyplot as plt 56 | fig = plt.figure(figsize=(6, 6)) 57 | x_new = torch.linspace(30, 60, FORCAST, requires_grad = True) 58 | y_new = torch.sin(x_new)-torch.cos(x_new)**2 59 | Y_new = torch.sum(y_new) 60 | Y_new.backward() 61 | plt.plot(range(x[:DATA_SIZE].shape[0]), x[:DATA_SIZE], label="Training") 62 | plt.plot(range(x[:DATA_SIZE].shape[0], x.shape[0]), x[DATA_SIZE:DATA_SIZE+FORCAST], 'r--', label="Predicted") 63 | plt.plot(range(x[:DATA_SIZE].shape[0], x.shape[0]), x_new.grad.detach().numpy(), 'g-', label="Actual") 64 | plt.legend() 65 | fig.savefig("./img/complex_trig_example.png") 66 | -------------------------------------------------------------------------------- /timeseries_attention/forecasting_model.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch 3 | import numpy as np 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | 7 | 8 | # A basic attention mechanism 9 | class Attention(torch.nn.Module): 10 | def __init__(self, seq_len=200, device="cuda"): 11 | super(Attention, self).__init__() 12 | self.device=device 13 | self.queries = nn.Linear(seq_len, seq_len) 14 | self.keys = nn.Linear(seq_len, seq_len) 15 | self.values = nn.Linear(seq_len, seq_len) 16 | def forward(self, x, mask=True): 17 | q = self.queries(x).reshape(x.shape[0], x.shape[1], 1) 18 | k = self.keys(x).reshape(x.shape[0], x.shape[1], 1) 19 | v = self.values(x).reshape(x.shape[0], x.shape[1], 1) 20 | scores = torch.bmm(q, k.transpose(-2, -1)) 21 | if mask: 22 | maskmat = torch.tril(torch.ones((x.shape[1], x.shape[1]))).to(self.device) 23 | scores = scores.masked_fill(maskmat == 0, -1e9) 24 | attention_weights = F.softmax(scores, dim=-1) 25 | output = torch.bmm(attention_weights, v) 26 | return output.reshape(output.shape[0], output.shape[1]) 27 | 28 | 29 | # A forcasting model 30 | class ForecastingModel(torch.nn.Module): 31 | def __init__(self, seq_len=200, ffdim=64, device="cuda"): 32 | super(ForecastingModel, self).__init__() 33 | self.relu = nn.ReLU() 34 | self.attention = Attention(seq_len, device=device) 35 | self.linear1 = nn.Linear(seq_len, int(ffdim)) 36 | self.linear2 = nn.Linear(int(ffdim), int(ffdim/2)) 37 | self.linear3 = nn.Linear(int(ffdim/2), int(ffdim/4)) 38 | self.outlayer = nn.Linear(int(ffdim/4), 1) 39 | def forward(self, x): 40 | x = self.attention(x) 41 | x = self.linear1(x) 42 | x = self.relu(x) 43 | x = self.linear2(x) 44 | x = self.relu(x) 45 | x = self.linear3(x) 46 | x = self.relu(x) 47 | return self.outlayer(x) 48 | -------------------------------------------------------------------------------- /timeseries_attention/img/complex_trig_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BrandenKeck/pytorch_fun/8d26c3be4f358083f2aa8487ec656e4cbbeca218/timeseries_attention/img/complex_trig_example.png -------------------------------------------------------------------------------- /timeseries_attention/img/sine_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BrandenKeck/pytorch_fun/8d26c3be4f358083f2aa8487ec656e4cbbeca218/timeseries_attention/img/sine_example.png -------------------------------------------------------------------------------- /timeseries_attention/img/sunspots_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BrandenKeck/pytorch_fun/8d26c3be4f358083f2aa8487ec656e4cbbeca218/timeseries_attention/img/sunspots_example.png -------------------------------------------------------------------------------- /timeseries_attention/sine_example.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | from forecasting_model import ForecastingModel 6 | from torch.utils.data import TensorDataset, DataLoader 7 | 8 | # Get a noisy sin wave 9 | DATA_SIZE = 1000 10 | x = np.sin(np.linspace(0, 10, DATA_SIZE)) 11 | x = x + np.random.normal(0, 0.05, DATA_SIZE) 12 | 13 | 14 | # Create a dataset 15 | seq_len = 200 16 | X = np.array([x[ii:ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]) 17 | Y = np.array([x[ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]) 18 | 19 | 20 | # Training Loop 21 | EPOCHS = 20 22 | BATCH_SIZE = 8 23 | LEARNING_RATE = 4.12e-5 24 | model = ForecastingModel(seq_len) 25 | model.train() 26 | criterion = torch.nn.MSELoss() 27 | optimizer = torch.optim.Adam(model.parameters(), lr=LEARNING_RATE) 28 | dataset = TensorDataset(torch.Tensor(X).to("cuda"), torch.Tensor(Y).to("cuda")) 29 | dataloader = DataLoader(dataset, batch_size=BATCH_SIZE) 30 | for epoch in range(EPOCHS): 31 | for xx, yy in dataloader: 32 | optimizer.zero_grad() 33 | out = model(xx) 34 | loss = criterion(out, yy) 35 | loss.backward() 36 | optimizer.step() 37 | print(f"Epoch {epoch+1}/{EPOCHS}: Loss={loss}") 38 | 39 | 40 | # New Prediction Loop 41 | FORCAST = 1000 42 | model.eval() 43 | for ff in range(FORCAST): 44 | xx = x[len(x)-seq_len:len(x)] 45 | yy = model(torch.Tensor(xx).reshape(1, xx.shape[0]).to("cuda")) 46 | x = np.concatenate((x, yy.detach().cpu().numpy().reshape(1,))) 47 | 48 | 49 | # Plot Predictions 50 | import matplotlib.pyplot as plt 51 | fig = plt.figure(figsize=(6, 6)) 52 | plt.plot(range(x[:DATA_SIZE].shape[0]), x[:DATA_SIZE], label="Training") 53 | plt.plot(range(x[:DATA_SIZE].shape[0], x.shape[0]), x[DATA_SIZE:DATA_SIZE+FORCAST], 'r--', label="Predicted") 54 | plt.plot(range(x[:DATA_SIZE].shape[0], x.shape[0]), np.sin(np.linspace(10, 20, DATA_SIZE)), 'g-', label="Actual") 55 | plt.legend() 56 | fig.savefig("./img/sine_example.png") -------------------------------------------------------------------------------- /timeseries_attention/sunspots.csv: -------------------------------------------------------------------------------- 1 | ,Date,Monthly Mean Total Sunspot Number 2 | 0,1749-01-31,96.7 3 | 1,1749-02-28,104.3 4 | 2,1749-03-31,116.7 5 | 3,1749-04-30,92.8 6 | 4,1749-05-31,141.7 7 | 5,1749-06-30,139.2 8 | 6,1749-07-31,158.0 9 | 7,1749-08-31,110.5 10 | 8,1749-09-30,126.5 11 | 9,1749-10-31,125.8 12 | 10,1749-11-30,264.3 13 | 11,1749-12-31,142.0 14 | 12,1750-01-31,122.2 15 | 13,1750-02-28,126.5 16 | 14,1750-03-31,148.7 17 | 15,1750-04-30,147.2 18 | 16,1750-05-31,150.0 19 | 17,1750-06-30,166.7 20 | 18,1750-07-31,142.3 21 | 19,1750-08-31,171.7 22 | 20,1750-09-30,152.0 23 | 21,1750-10-31,109.5 24 | 22,1750-11-30,105.5 25 | 23,1750-12-31,125.7 26 | 24,1751-01-31,116.7 27 | 25,1751-02-28,72.5 28 | 26,1751-03-31,75.5 29 | 27,1751-04-30,94.0 30 | 28,1751-05-31,101.2 31 | 29,1751-06-30,84.5 32 | 30,1751-07-31,110.5 33 | 31,1751-08-31,99.7 34 | 32,1751-09-30,39.2 35 | 33,1751-10-31,38.7 36 | 34,1751-11-30,47.5 37 | 35,1751-12-31,73.3 38 | 36,1752-01-31,58.3 39 | 37,1752-02-29,83.3 40 | 38,1752-03-31,118.3 41 | 39,1752-04-30,98.8 42 | 40,1752-05-31,99.5 43 | 41,1752-06-30,66.0 44 | 42,1752-07-31,130.7 45 | 43,1752-08-31,48.8 46 | 44,1752-09-30,45.2 47 | 45,1752-10-31,77.7 48 | 46,1752-11-30,62.7 49 | 47,1752-12-31,66.7 50 | 48,1753-01-31,73.3 51 | 49,1753-02-28,53.3 52 | 50,1753-03-31,76.2 53 | 51,1753-04-30,63.3 54 | 52,1753-05-31,60.0 55 | 53,1753-06-30,52.8 56 | 54,1753-07-31,36.7 57 | 55,1753-08-31,65.0 58 | 56,1753-09-30,46.7 59 | 57,1753-10-31,41.7 60 | 58,1753-11-30,33.3 61 | 59,1753-12-31,11.2 62 | 60,1754-01-31,0.0 63 | 61,1754-02-28,5.0 64 | 62,1754-03-31,2.8 65 | 63,1754-04-30,22.8 66 | 64,1754-05-31,34.5 67 | 65,1754-06-30,44.5 68 | 66,1754-07-31,31.3 69 | 67,1754-08-31,20.5 70 | 68,1754-09-30,13.7 71 | 69,1754-10-31,40.2 72 | 70,1754-11-30,22.0 73 | 71,1754-12-31,7.0 74 | 72,1755-01-31,17.0 75 | 73,1755-02-28,18.7 76 | 74,1755-03-31,11.3 77 | 75,1755-04-30,10.8 78 | 76,1755-05-31,0.0 79 | 77,1755-06-30,0.0 80 | 78,1755-07-31,14.3 81 | 79,1755-08-31,5.3 82 | 80,1755-09-30,29.7 83 | 81,1755-10-31,39.5 84 | 82,1755-11-30,11.3 85 | 83,1755-12-31,33.3 86 | 84,1756-01-31,20.8 87 | 85,1756-02-29,11.8 88 | 86,1756-03-31,9.0 89 | 87,1756-04-30,15.7 90 | 88,1756-05-31,20.8 91 | 89,1756-06-30,21.5 92 | 90,1756-07-31,6.0 93 | 91,1756-08-31,10.7 94 | 92,1756-09-30,19.7 95 | 93,1756-10-31,23.8 96 | 94,1756-11-30,28.3 97 | 95,1756-12-31,15.7 98 | 96,1757-01-31,23.5 99 | 97,1757-02-28,35.3 100 | 98,1757-03-31,43.7 101 | 99,1757-04-30,50.0 102 | 100,1757-05-31,63.5 103 | 101,1757-06-30,21.3 104 | 102,1757-07-31,41.7 105 | 103,1757-08-31,85.5 106 | 104,1757-09-30,66.2 107 | 105,1757-10-31,54.2 108 | 106,1757-11-30,107.8 109 | 107,1757-12-31,55.8 110 | 108,1758-01-31,62.7 111 | 109,1758-02-28,86.7 112 | 110,1758-03-31,81.7 113 | 111,1758-04-30,120.5 114 | 112,1758-05-31,77.3 115 | 113,1758-06-30,75.0 116 | 114,1758-07-31,73.3 117 | 115,1758-08-31,64.5 118 | 116,1758-09-30,104.2 119 | 117,1758-10-31,62.8 120 | 118,1758-11-30,71.7 121 | 119,1758-12-31,71.7 122 | 120,1759-01-31,80.5 123 | 121,1759-02-28,73.3 124 | 122,1759-03-31,78.0 125 | 123,1759-04-30,78.3 126 | 124,1759-05-31,81.7 127 | 125,1759-06-30,83.3 128 | 126,1759-07-31,85.0 129 | 127,1759-08-31,118.8 130 | 128,1759-09-30,128.7 131 | 129,1759-10-31,99.5 132 | 130,1759-11-30,77.2 133 | 131,1759-12-31,95.0 134 | 132,1760-01-31,112.2 135 | 133,1760-02-29,99.2 136 | 134,1760-03-31,124.5 137 | 135,1760-04-30,97.2 138 | 136,1760-05-31,120.0 139 | 137,1760-06-30,80.5 140 | 138,1760-07-31,110.0 141 | 139,1760-08-31,126.0 142 | 140,1760-09-30,102.2 143 | 141,1760-10-31,84.3 144 | 142,1760-11-30,99.5 145 | 143,1760-12-31,101.7 146 | 144,1761-01-31,116.7 147 | 145,1761-02-28,151.7 148 | 146,1761-03-31,134.5 149 | 147,1761-04-30,119.5 150 | 148,1761-05-31,178.7 151 | 149,1761-06-30,165.5 152 | 150,1761-07-31,156.8 153 | 151,1761-08-31,151.8 154 | 152,1761-09-30,167.8 155 | 153,1761-10-31,147.8 156 | 154,1761-11-30,149.5 157 | 155,1761-12-31,76.7 158 | 156,1762-01-31,73.0 159 | 157,1762-02-28,121.3 160 | 158,1762-03-31,76.2 161 | 159,1762-04-30,100.3 162 | 160,1762-05-31,66.5 163 | 161,1762-06-30,128.5 164 | 162,1762-07-31,56.3 165 | 163,1762-08-31,112.8 166 | 164,1762-09-30,114.2 167 | 165,1762-10-31,115.5 168 | 166,1762-11-30,129.7 169 | 167,1762-12-31,128.7 170 | 168,1763-01-31,94.2 171 | 169,1763-02-28,53.2 172 | 170,1763-03-31,57.0 173 | 171,1763-04-30,54.8 174 | 172,1763-05-31,54.5 175 | 173,1763-06-30,59.7 176 | 174,1763-07-31,90.3 177 | 175,1763-08-31,44.2 178 | 176,1763-09-30,113.5 179 | 177,1763-10-31,77.2 180 | 178,1763-11-30,101.5 181 | 179,1763-12-31,102.3 182 | 180,1764-01-31,99.5 183 | 181,1764-02-29,99.5 184 | 182,1764-03-31,67.0 185 | 183,1764-04-30,57.3 186 | 184,1764-05-31,73.8 187 | 185,1764-06-30,50.0 188 | 186,1764-07-31,50.0 189 | 187,1764-08-31,50.0 190 | 188,1764-09-30,47.0 191 | 189,1764-10-31,46.7 192 | 190,1764-11-30,43.3 193 | 191,1764-12-31,42.8 194 | 192,1765-01-31,40.0 195 | 193,1765-02-28,43.3 196 | 194,1765-03-31,41.7 197 | 195,1765-04-30,36.7 198 | 196,1765-05-31,33.7 199 | 197,1765-06-30,33.3 200 | 198,1765-07-31,45.0 201 | 199,1765-08-31,49.5 202 | 200,1765-09-30,26.7 203 | 201,1765-10-31,23.3 204 | 202,1765-11-30,23.3 205 | 203,1765-12-31,21.7 206 | 204,1766-01-31,20.0 207 | 205,1766-02-28,18.3 208 | 206,1766-03-31,61.0 209 | 207,1766-04-30,10.0 210 | 208,1766-05-31,44.7 211 | 209,1766-06-30,5.0 212 | 210,1766-07-31,5.5 213 | 211,1766-08-31,6.7 214 | 212,1766-09-30,7.2 215 | 213,1766-10-31,8.3 216 | 214,1766-11-30,9.5 217 | 215,1766-12-31,32.0 218 | 216,1767-01-31,45.7 219 | 217,1767-02-28,50.0 220 | 218,1767-03-31,71.7 221 | 219,1767-04-30,54.8 222 | 220,1767-05-31,49.7 223 | 221,1767-06-30,55.5 224 | 222,1767-07-31,36.5 225 | 223,1767-08-31,68.0 226 | 224,1767-09-30,71.2 227 | 225,1767-10-31,73.5 228 | 226,1767-11-30,91.2 229 | 227,1767-12-31,88.8 230 | 228,1768-01-31,89.2 231 | 229,1768-02-29,110.2 232 | 230,1768-03-31,77.2 233 | 231,1768-04-30,71.2 234 | 232,1768-05-31,129.5 235 | 233,1768-06-30,129.0 236 | 234,1768-07-31,87.7 237 | 235,1768-08-31,111.3 238 | 236,1768-09-30,124.7 239 | 237,1768-10-31,129.7 240 | 238,1768-11-30,151.0 241 | 239,1768-12-31,186.3 242 | 240,1769-01-31,123.2 243 | 241,1769-02-28,107.0 244 | 242,1769-03-31,107.2 245 | 243,1769-04-30,161.2 246 | 244,1769-05-31,122.7 247 | 245,1769-06-30,157.3 248 | 246,1769-07-31,197.7 249 | 247,1769-08-31,200.5 250 | 248,1769-09-30,248.0 251 | 249,1769-10-31,263.7 252 | 250,1769-11-30,246.8 253 | 251,1769-12-31,186.7 254 | 252,1770-01-31,173.3 255 | 253,1770-02-28,237.5 256 | 254,1770-03-31,133.5 257 | 255,1770-04-30,85.0 258 | 256,1770-05-31,116.8 259 | 257,1770-06-30,138.8 260 | 258,1770-07-31,183.0 261 | 259,1770-08-31,210.5 262 | 260,1770-09-30,174.0 263 | 261,1770-10-31,172.7 264 | 262,1770-11-30,220.3 265 | 263,1770-12-31,170.5 266 | 264,1771-01-31,60.0 267 | 265,1771-02-28,77.0 268 | 266,1771-03-31,77.8 269 | 267,1771-04-30,108.2 270 | 268,1771-05-31,254.5 271 | 269,1771-06-30,199.2 272 | 270,1771-07-31,112.8 273 | 271,1771-08-31,97.5 274 | 272,1771-09-30,169.0 275 | 273,1771-10-31,150.0 276 | 274,1771-11-30,166.2 277 | 275,1771-12-31,159.5 278 | 276,1772-01-31,168.2 279 | 277,1772-02-29,151.3 280 | 278,1772-03-31,51.8 281 | 279,1772-04-30,153.7 282 | 280,1772-05-31,63.3 283 | 281,1772-06-30,95.0 284 | 282,1772-07-31,128.8 285 | 283,1772-08-31,93.7 286 | 284,1772-09-30,84.2 287 | 285,1772-10-31,131.0 288 | 286,1772-11-30,102.2 289 | 287,1772-12-31,106.7 290 | 288,1773-01-31,91.0 291 | 289,1773-02-28,48.3 292 | 290,1773-03-31,85.3 293 | 291,1773-04-30,54.8 294 | 292,1773-05-31,68.5 295 | 293,1773-06-30,47.3 296 | 294,1773-07-31,46.2 297 | 295,1773-08-31,21.2 298 | 296,1773-09-30,48.8 299 | 297,1773-10-31,43.8 300 | 298,1773-11-30,68.2 301 | 299,1773-12-31,72.0 302 | 300,1774-01-31,78.0 303 | 301,1774-02-28,109.0 304 | 302,1774-03-31,92.8 305 | 303,1774-04-30,73.0 306 | 304,1774-05-31,85.5 307 | 305,1774-06-30,47.5 308 | 306,1774-07-31,29.2 309 | 307,1774-08-31,11.0 310 | 308,1774-09-30,13.2 311 | 309,1774-10-31,23.3 312 | 310,1774-11-30,29.5 313 | 311,1774-12-31,20.3 314 | 312,1775-01-31,7.3 315 | 313,1775-02-28,0.0 316 | 314,1775-03-31,19.3 317 | 315,1775-04-30,18.7 318 | 316,1775-05-31,6.5 319 | 317,1775-06-30,20.5 320 | 318,1775-07-31,1.7 321 | 319,1775-08-31,13.2 322 | 320,1775-09-30,5.3 323 | 321,1775-10-31,9.3 324 | 322,1775-11-30,25.2 325 | 323,1775-12-31,13.2 326 | 324,1776-01-31,36.2 327 | 325,1776-02-29,19.3 328 | 326,1776-03-31,10.5 329 | 327,1776-04-30,36.3 330 | 328,1776-05-31,18.7 331 | 329,1776-06-30,31.7 332 | 330,1776-07-31,1.7 333 | 331,1776-08-31,40.3 334 | 332,1776-09-30,26.7 335 | 333,1776-10-31,50.0 336 | 334,1776-11-30,58.3 337 | 335,1776-12-31,66.7 338 | 336,1777-01-31,75.0 339 | 337,1777-02-28,60.8 340 | 338,1777-03-31,65.0 341 | 339,1777-04-30,159.2 342 | 340,1777-05-31,133.8 343 | 341,1777-06-30,134.5 344 | 342,1777-07-31,158.3 345 | 343,1777-08-31,186.7 346 | 344,1777-09-30,193.7 347 | 345,1777-10-31,177.5 348 | 346,1777-11-30,243.3 349 | 347,1777-12-31,262.2 350 | 348,1778-01-31,295.5 351 | 349,1778-02-28,182.2 352 | 350,1778-03-31,223.3 353 | 351,1778-04-30,241.7 354 | 352,1778-05-31,398.2 355 | 353,1778-06-30,286.0 356 | 354,1778-07-31,255.0 357 | 355,1778-08-31,233.3 358 | 356,1778-09-30,286.2 359 | 357,1778-10-31,260.5 360 | 358,1778-11-30,250.5 361 | 359,1778-12-31,175.0 362 | 360,1779-01-31,191.2 363 | 361,1779-02-28,276.2 364 | 362,1779-03-31,196.7 365 | 363,1779-04-30,241.7 366 | 364,1779-05-31,233.3 367 | 365,1779-06-30,189.5 368 | 366,1779-07-31,238.3 369 | 367,1779-08-31,186.7 370 | 368,1779-09-30,185.0 371 | 369,1779-10-31,206.7 372 | 370,1779-11-30,190.0 373 | 371,1779-12-31,183.3 374 | 372,1780-01-31,116.7 375 | 373,1780-02-29,163.3 376 | 374,1780-03-31,163.3 377 | 375,1780-04-30,158.3 378 | 376,1780-05-31,178.7 379 | 377,1780-06-30,146.7 380 | 378,1780-07-31,143.3 381 | 379,1780-08-31,143.3 382 | 380,1780-09-30,156.2 383 | 381,1780-10-31,128.3 384 | 382,1780-11-30,100.0 385 | 383,1780-12-31,97.8 386 | 384,1781-01-31,164.5 387 | 385,1781-02-28,124.5 388 | 386,1781-03-31,88.3 389 | 387,1781-04-30,113.8 390 | 388,1781-05-31,174.5 391 | 389,1781-06-30,162.8 392 | 390,1781-07-31,122.5 393 | 391,1781-08-31,110.0 394 | 392,1781-09-30,85.0 395 | 393,1781-10-31,45.5 396 | 394,1781-11-30,111.7 397 | 395,1781-12-31,58.7 398 | 396,1782-01-31,90.0 399 | 397,1782-02-28,62.5 400 | 398,1782-03-31,61.7 401 | 399,1782-04-30,68.3 402 | 400,1782-05-31,90.5 403 | 401,1782-06-30,63.3 404 | 402,1782-07-31,61.7 405 | 403,1782-08-31,73.3 406 | 404,1782-09-30,56.7 407 | 405,1782-10-31,38.7 408 | 406,1782-11-30,52.5 409 | 407,1782-12-31,50.0 410 | 408,1783-01-31,46.7 411 | 409,1783-02-28,64.5 412 | 410,1783-03-31,44.5 413 | 411,1783-04-30,47.2 414 | 412,1783-05-31,38.3 415 | 413,1783-06-30,42.0 416 | 414,1783-07-31,53.7 417 | 415,1783-08-31,33.3 418 | 416,1783-09-30,30.0 419 | 417,1783-10-31,13.3 420 | 418,1783-11-30,25.0 421 | 419,1783-12-31,17.5 422 | 420,1784-01-31,21.7 423 | 421,1784-02-29,13.3 424 | 422,1784-03-31,18.3 425 | 423,1784-04-30,16.7 426 | 424,1784-05-31,10.0 427 | 425,1784-06-30,15.0 428 | 426,1784-07-31,10.0 429 | 427,1784-08-31,16.7 430 | 428,1784-09-30,16.7 431 | 429,1784-10-31,13.3 432 | 430,1784-11-30,28.3 433 | 431,1784-12-31,23.3 434 | 432,1785-01-31,10.8 435 | 433,1785-02-28,13.3 436 | 434,1785-03-31,15.0 437 | 435,1785-04-30,26.2 438 | 436,1785-05-31,34.5 439 | 437,1785-06-30,43.8 440 | 438,1785-07-31,60.5 441 | 439,1785-08-31,33.3 442 | 440,1785-09-30,53.3 443 | 441,1785-10-31,78.7 444 | 442,1785-11-30,67.0 445 | 443,1785-12-31,45.5 446 | 444,1786-01-31,62.0 447 | 445,1786-02-28,79.3 448 | 446,1786-03-31,79.5 449 | 447,1786-04-30,142.3 450 | 448,1786-05-31,153.8 451 | 449,1786-06-30,98.3 452 | 450,1786-07-31,138.3 453 | 451,1786-08-31,149.5 454 | 452,1786-09-30,185.8 455 | 453,1786-10-31,187.2 456 | 454,1786-11-30,193.3 457 | 455,1786-12-31,187.8 458 | 456,1787-01-31,224.5 459 | 457,1787-02-28,176.7 460 | 458,1787-03-31,145.7 461 | 459,1787-04-30,212.0 462 | 460,1787-05-31,224.7 463 | 461,1787-06-30,165.3 464 | 462,1787-07-31,213.3 465 | 463,1787-08-31,228.7 466 | 464,1787-09-30,262.2 467 | 465,1787-10-31,261.7 468 | 466,1787-11-30,235.8 469 | 467,1787-12-31,290.0 470 | 468,1788-01-31,230.0 471 | 469,1788-02-29,215.3 472 | 470,1788-03-31,238.8 473 | 471,1788-04-30,180.8 474 | 472,1788-05-31,188.3 475 | 473,1788-06-30,257.0 476 | 474,1788-07-31,235.8 477 | 475,1788-08-31,226.7 478 | 476,1788-09-30,235.0 479 | 477,1788-10-31,236.7 480 | 478,1788-11-30,157.8 481 | 479,1788-12-31,215.8 482 | 480,1789-01-31,190.0 483 | 481,1789-02-28,208.8 484 | 482,1789-03-31,200.0 485 | 483,1789-04-30,205.5 486 | 484,1789-05-31,205.8 487 | 485,1789-06-30,200.0 488 | 486,1789-07-31,195.0 489 | 487,1789-08-31,171.7 490 | 488,1789-09-30,186.7 491 | 489,1789-10-31,149.5 492 | 490,1789-11-30,223.3 493 | 491,1789-12-31,225.8 494 | 492,1790-01-31,171.7 495 | 493,1790-02-28,212.5 496 | 494,1790-03-31,160.5 497 | 495,1790-04-30,156.7 498 | 496,1790-05-31,155.0 499 | 497,1790-06-30,151.7 500 | 498,1790-07-31,115.5 501 | 499,1790-08-31,145.0 502 | 500,1790-09-30,128.8 503 | 501,1790-10-31,140.5 504 | 502,1790-11-30,136.7 505 | 503,1790-12-31,123.3 506 | 504,1791-01-31,121.2 507 | 505,1791-02-28,103.3 508 | 506,1791-03-31,123.3 509 | 507,1791-04-30,128.7 510 | 508,1791-05-31,122.8 511 | 509,1791-06-30,107.0 512 | 510,1791-07-31,118.3 513 | 511,1791-08-31,71.7 514 | 512,1791-09-30,110.8 515 | 513,1791-10-31,102.8 516 | 514,1791-11-30,111.7 517 | 515,1791-12-31,110.0 518 | 516,1792-01-31,96.7 519 | 517,1792-02-29,106.7 520 | 518,1792-03-31,105.0 521 | 519,1792-04-30,126.2 522 | 520,1792-05-31,103.3 523 | 521,1792-06-30,101.7 524 | 522,1792-07-31,76.3 525 | 523,1792-08-31,100.0 526 | 524,1792-09-30,98.3 527 | 525,1792-10-31,98.3 528 | 526,1792-11-30,95.0 529 | 527,1792-12-31,93.3 530 | 528,1793-01-31,93.3 531 | 529,1793-02-28,91.7 532 | 530,1793-03-31,92.5 533 | 531,1793-04-30,88.3 534 | 532,1793-05-31,87.2 535 | 533,1793-06-30,85.0 536 | 534,1793-07-31,83.3 537 | 535,1793-08-31,48.8 538 | 536,1793-09-30,40.0 539 | 537,1793-10-31,78.3 540 | 538,1793-11-30,73.3 541 | 539,1793-12-31,76.2 542 | 540,1794-01-31,75.0 543 | 541,1794-02-28,73.3 544 | 542,1794-03-31,63.3 545 | 543,1794-04-30,47.3 546 | 544,1794-05-31,92.8 547 | 545,1794-06-30,69.2 548 | 546,1794-07-31,68.3 549 | 547,1794-08-31,66.7 550 | 548,1794-09-30,18.5 551 | 549,1794-10-31,47.5 552 | 550,1794-11-30,112.3 553 | 551,1794-12-31,85.7 554 | 552,1795-01-31,35.7 555 | 553,1795-02-28,66.5 556 | 554,1795-03-31,21.0 557 | 555,1795-04-30,31.0 558 | 556,1795-05-31,51.7 559 | 557,1795-06-30,28.5 560 | 558,1795-07-31,21.5 561 | 559,1795-08-31,42.8 562 | 560,1795-09-30,22.5 563 | 561,1795-10-31,32.5 564 | 562,1795-11-30,41.7 565 | 563,1795-12-31,30.0 566 | 564,1796-01-31,36.7 567 | 565,1796-02-29,39.7 568 | 566,1796-03-31,26.2 569 | 567,1796-04-30,52.8 570 | 568,1796-05-31,35.0 571 | 569,1796-06-30,11.2 572 | 570,1796-07-31,44.8 573 | 571,1796-08-31,2.5 574 | 572,1796-09-30,30.7 575 | 573,1796-10-31,18.3 576 | 574,1796-11-30,14.0 577 | 575,1796-12-31,8.5 578 | 576,1797-01-31,24.0 579 | 577,1797-02-28,7.0 580 | 578,1797-03-31,6.7 581 | 579,1797-04-30,6.7 582 | 580,1797-05-31,12.2 583 | 581,1797-06-30,18.5 584 | 582,1797-07-31,7.2 585 | 583,1797-08-31,10.0 586 | 584,1797-09-30,9.5 587 | 585,1797-10-31,11.5 588 | 586,1797-11-30,9.7 589 | 587,1797-12-31,5.0 590 | 588,1798-01-31,3.3 591 | 589,1798-02-28,6.7 592 | 590,1798-03-31,20.7 593 | 591,1798-04-30,1.8 594 | 592,1798-05-31,0.0 595 | 593,1798-06-30,0.0 596 | 594,1798-07-31,0.0 597 | 595,1798-08-31,5.0 598 | 596,1798-09-30,4.0 599 | 597,1798-10-31,2.5 600 | 598,1798-11-30,20.8 601 | 599,1798-12-31,16.5 602 | 600,1799-01-31,2.7 603 | 601,1799-02-28,21.0 604 | 602,1799-03-31,36.2 605 | 603,1799-04-30,14.0 606 | 604,1799-05-31,13.7 607 | 605,1799-06-30,17.7 608 | 606,1799-07-31,3.5 609 | 607,1799-08-31,0.0 610 | 608,1799-09-30,0.0 611 | 609,1799-10-31,7.7 612 | 610,1799-11-30,4.5 613 | 611,1799-12-31,14.3 614 | 612,1800-01-31,11.5 615 | 613,1800-02-28,15.5 616 | 614,1800-03-31,23.2 617 | 615,1800-04-30,0.0 618 | 616,1800-05-31,8.3 619 | 617,1800-06-30,39.5 620 | 618,1800-07-31,35.0 621 | 619,1800-08-31,32.5 622 | 620,1800-09-30,19.2 623 | 621,1800-10-31,20.5 624 | 622,1800-11-30,17.5 625 | 623,1800-12-31,66.8 626 | 624,1801-01-31,45.0 627 | 625,1801-02-28,48.3 628 | 626,1801-03-31,50.0 629 | 627,1801-04-30,51.7 630 | 628,1801-05-31,53.3 631 | 629,1801-06-30,52.0 632 | 630,1801-07-31,58.3 633 | 631,1801-08-31,64.5 634 | 632,1801-09-30,55.8 635 | 633,1801-10-31,54.3 636 | 634,1801-11-30,66.3 637 | 635,1801-12-31,80.3 638 | 636,1802-01-31,79.7 639 | 637,1802-02-28,78.3 640 | 638,1802-03-31,68.0 641 | 639,1802-04-30,70.0 642 | 640,1802-05-31,73.3 643 | 641,1802-06-30,76.7 644 | 642,1802-07-31,80.0 645 | 643,1802-08-31,83.3 646 | 644,1802-09-30,86.3 647 | 645,1802-10-31,64.2 648 | 646,1802-11-30,57.5 649 | 647,1802-12-31,83.3 650 | 648,1803-01-31,83.3 651 | 649,1803-02-28,84.7 652 | 650,1803-03-31,49.2 653 | 651,1803-04-30,41.7 654 | 652,1803-05-31,73.8 655 | 653,1803-06-30,60.0 656 | 654,1803-07-31,80.5 657 | 655,1803-08-31,56.8 658 | 656,1803-09-30,75.5 659 | 657,1803-10-31,90.5 660 | 658,1803-11-30,85.0 661 | 659,1803-12-31,80.0 662 | 660,1804-01-31,75.5 663 | 661,1804-02-29,80.5 664 | 662,1804-03-31,80.0 665 | 663,1804-04-30,84.3 666 | 664,1804-05-31,55.7 667 | 665,1804-06-30,58.0 668 | 666,1804-07-31,49.7 669 | 667,1804-08-31,71.8 670 | 668,1804-09-30,88.3 671 | 669,1804-10-31,103.8 672 | 670,1804-11-30,101.7 673 | 671,1804-12-31,100.0 674 | 672,1805-01-31,101.7 675 | 673,1805-02-28,73.5 676 | 674,1805-03-31,85.7 677 | 675,1805-04-30,62.5 678 | 676,1805-05-31,65.0 679 | 677,1805-06-30,67.5 680 | 678,1805-07-31,62.7 681 | 679,1805-08-31,71.2 682 | 680,1805-09-30,74.0 683 | 681,1805-10-31,49.0 684 | 682,1805-11-30,68.3 685 | 683,1805-12-31,63.8 686 | 684,1806-01-31,65.0 687 | 685,1806-02-28,49.3 688 | 686,1806-03-31,54.5 689 | 687,1806-04-30,46.2 690 | 688,1806-05-31,44.0 691 | 689,1806-06-30,42.7 692 | 690,1806-07-31,50.0 693 | 691,1806-08-31,43.8 694 | 692,1806-09-30,40.0 695 | 693,1806-10-31,45.0 696 | 694,1806-11-30,41.7 697 | 695,1806-12-31,40.0 698 | 696,1807-01-31,20.0 699 | 697,1807-02-28,20.3 700 | 698,1807-03-31,16.0 701 | 699,1807-04-30,39.7 702 | 700,1807-05-31,16.7 703 | 701,1807-06-30,20.0 704 | 702,1807-07-31,21.2 705 | 703,1807-08-31,20.0 706 | 704,1807-09-30,9.5 707 | 705,1807-10-31,13.3 708 | 706,1807-11-30,4.3 709 | 707,1807-12-31,0.0 710 | 708,1808-01-31,0.0 711 | 709,1808-02-29,7.5 712 | 710,1808-03-31,0.0 713 | 711,1808-04-30,20.5 714 | 712,1808-05-31,22.5 715 | 713,1808-06-30,22.5 716 | 714,1808-07-31,11.2 717 | 715,1808-08-31,13.3 718 | 716,1808-09-30,19.5 719 | 717,1808-10-31,7.8 720 | 718,1808-11-30,17.5 721 | 719,1808-12-31,20.5 722 | 720,1809-01-31,12.0 723 | 721,1809-02-28,15.3 724 | 722,1809-03-31,1.5 725 | 723,1809-04-30,4.2 726 | 724,1809-05-31,3.3 727 | 725,1809-06-30,12.8 728 | 726,1809-07-31,0.5 729 | 727,1809-08-31,0.3 730 | 728,1809-09-30,0.7 731 | 729,1809-10-31,0.0 732 | 730,1809-11-30,0.0 733 | 731,1809-12-31,0.0 734 | 732,1810-01-31,0.0 735 | 733,1810-02-28,0.0 736 | 734,1810-03-31,0.0 737 | 735,1810-04-30,0.0 738 | 736,1810-05-31,0.0 739 | 737,1810-06-30,0.0 740 | 738,1810-07-31,0.0 741 | 739,1810-08-31,0.0 742 | 740,1810-09-30,0.0 743 | 741,1810-10-31,0.0 744 | 742,1810-11-30,0.0 745 | 743,1810-12-31,0.0 746 | 744,1811-01-31,0.0 747 | 745,1811-02-28,0.0 748 | 746,1811-03-31,0.0 749 | 747,1811-04-30,0.0 750 | 748,1811-05-31,0.0 751 | 749,1811-06-30,0.0 752 | 750,1811-07-31,11.0 753 | 751,1811-08-31,0.0 754 | 752,1811-09-30,4.0 755 | 753,1811-10-31,10.2 756 | 754,1811-11-30,1.3 757 | 755,1811-12-31,1.8 758 | 756,1812-01-31,18.8 759 | 757,1812-02-29,3.2 760 | 758,1812-03-31,1.2 761 | 759,1812-04-30,0.0 762 | 760,1812-05-31,1.7 763 | 761,1812-06-30,2.2 764 | 762,1812-07-31,0.8 765 | 763,1812-08-31,26.0 766 | 764,1812-09-30,8.7 767 | 765,1812-10-31,6.5 768 | 766,1812-11-30,13.2 769 | 767,1812-12-31,16.8 770 | 768,1813-01-31,0.0 771 | 769,1813-02-28,17.2 772 | 770,1813-03-31,3.2 773 | 771,1813-04-30,27.7 774 | 772,1813-05-31,9.2 775 | 773,1813-06-30,18.7 776 | 774,1813-07-31,30.5 777 | 775,1813-08-31,14.0 778 | 776,1813-09-30,25.5 779 | 777,1813-10-31,46.3 780 | 778,1813-11-30,27.8 781 | 779,1813-12-31,23.8 782 | 780,1814-01-31,37.0 783 | 781,1814-02-28,20.0 784 | 782,1814-03-31,9.5 785 | 783,1814-04-30,39.7 786 | 784,1814-05-31,9.7 787 | 785,1814-06-30,24.8 788 | 786,1814-07-31,30.8 789 | 787,1814-08-31,3.8 790 | 788,1814-09-30,13.5 791 | 789,1814-10-31,32.2 792 | 790,1814-11-30,24.2 793 | 791,1814-12-31,33.5 794 | 792,1815-01-31,32.0 795 | 793,1815-02-28,53.7 796 | 794,1815-03-31,43.7 797 | 795,1815-04-30,52.7 798 | 796,1815-05-31,16.3 799 | 797,1815-06-30,93.2 800 | 798,1815-07-31,59.2 801 | 799,1815-08-31,78.7 802 | 800,1815-09-30,52.5 803 | 801,1815-10-31,55.8 804 | 802,1815-11-30,62.0 805 | 803,1815-12-31,108.3 806 | 804,1816-01-31,43.8 807 | 805,1816-02-29,114.7 808 | 806,1816-03-31,122.8 809 | 807,1816-04-30,98.0 810 | 808,1816-05-31,73.8 811 | 809,1816-06-30,72.7 812 | 810,1816-07-31,64.7 813 | 811,1816-08-31,38.7 814 | 812,1816-09-30,79.7 815 | 813,1816-10-31,94.0 816 | 814,1816-11-30,63.5 817 | 815,1816-12-31,49.8 818 | 816,1817-01-31,60.7 819 | 817,1817-02-28,96.5 820 | 818,1817-03-31,160.3 821 | 819,1817-04-30,44.0 822 | 820,1817-05-31,35.3 823 | 821,1817-06-30,66.7 824 | 822,1817-07-31,83.3 825 | 823,1817-08-31,75.0 826 | 824,1817-09-30,61.2 827 | 825,1817-10-31,42.7 828 | 826,1817-11-30,48.2 829 | 827,1817-12-31,47.3 830 | 828,1818-01-31,58.1 831 | 829,1818-02-28,37.4 832 | 830,1818-03-31,42.4 833 | 831,1818-04-30,57.5 834 | 832,1818-05-31,88.5 835 | 833,1818-06-30,60.8 836 | 834,1818-07-31,46.8 837 | 835,1818-08-31,52.6 838 | 836,1818-09-30,43.4 839 | 837,1818-10-31,52.8 840 | 838,1818-11-30,18.3 841 | 839,1818-12-31,43.0 842 | 840,1819-01-31,54.7 843 | 841,1819-02-28,34.6 844 | 842,1819-03-31,6.3 845 | 843,1819-04-30,33.7 846 | 844,1819-05-31,32.6 847 | 845,1819-06-30,58.3 848 | 846,1819-07-31,52.5 849 | 847,1819-08-31,43.5 850 | 848,1819-09-30,24.8 851 | 849,1819-10-31,45.9 852 | 850,1819-11-30,41.8 853 | 851,1819-12-31,50.9 854 | 852,1820-01-31,32.0 855 | 853,1820-02-29,44.4 856 | 854,1820-03-31,7.5 857 | 855,1820-04-30,32.3 858 | 856,1820-05-31,48.9 859 | 857,1820-06-30,17.9 860 | 858,1820-07-31,34.1 861 | 859,1820-08-31,43.1 862 | 860,1820-09-30,8.6 863 | 861,1820-10-31,14.9 864 | 862,1820-11-30,13.2 865 | 863,1820-12-31,15.1 866 | 864,1821-01-31,36.0 867 | 865,1821-02-28,7.0 868 | 866,1821-03-31,9.5 869 | 867,1821-04-30,15.2 870 | 868,1821-05-31,2.8 871 | 869,1821-06-30,3.0 872 | 870,1821-07-31,4.3 873 | 871,1821-08-31,8.0 874 | 872,1821-09-30,7.3 875 | 873,1821-10-31,31.4 876 | 874,1821-11-30,7.3 877 | 875,1821-12-31,0.4 878 | 876,1822-01-31,0.0 879 | 877,1822-02-28,1.5 880 | 878,1822-03-31,26.9 881 | 879,1822-04-30,21.7 882 | 880,1822-05-31,2.4 883 | 881,1822-06-30,9.3 884 | 882,1822-07-31,13.2 885 | 883,1822-08-31,3.4 886 | 884,1822-09-30,0.0 887 | 885,1822-10-31,0.6 888 | 886,1822-11-30,0.0 889 | 887,1822-12-31,0.0 890 | 888,1823-01-31,0.0 891 | 889,1823-02-28,0.0 892 | 890,1823-03-31,1.0 893 | 891,1823-04-30,0.0 894 | 892,1823-05-31,0.0 895 | 893,1823-06-30,0.0 896 | 894,1823-07-31,0.8 897 | 895,1823-08-31,0.0 898 | 896,1823-09-30,0.0 899 | 897,1823-10-31,0.0 900 | 898,1823-11-30,0.0 901 | 899,1823-12-31,34.0 902 | 900,1824-01-31,36.0 903 | 901,1824-02-29,18.0 904 | 902,1824-03-31,0.0 905 | 903,1824-04-30,32.3 906 | 904,1824-05-31,4.6 907 | 905,1824-06-30,0.0 908 | 906,1824-07-31,0.0 909 | 907,1824-08-31,2.3 910 | 908,1824-09-30,34.3 911 | 909,1824-10-31,42.0 912 | 910,1824-11-30,0.0 913 | 911,1824-12-31,1.4 914 | 912,1825-01-31,8.4 915 | 913,1825-02-28,25.9 916 | 914,1825-03-31,37.4 917 | 915,1825-04-30,6.2 918 | 916,1825-05-31,25.8 919 | 917,1825-06-30,25.6 920 | 918,1825-07-31,51.5 921 | 919,1825-08-31,42.8 922 | 920,1825-09-30,26.2 923 | 921,1825-10-31,26.0 924 | 922,1825-11-30,19.6 925 | 923,1825-12-31,36.7 926 | 924,1826-01-31,29.4 927 | 925,1826-02-28,30.2 928 | 926,1826-03-31,61.3 929 | 927,1826-04-30,40.0 930 | 928,1826-05-31,53.9 931 | 929,1826-06-30,61.7 932 | 930,1826-07-31,87.5 933 | 931,1826-08-31,66.0 934 | 932,1826-09-30,31.6 935 | 933,1826-10-31,84.4 936 | 934,1826-11-30,65.8 937 | 935,1826-12-31,113.6 938 | 936,1827-01-31,57.7 939 | 937,1827-02-28,79.0 940 | 938,1827-03-31,96.3 941 | 939,1827-04-30,76.7 942 | 940,1827-05-31,93.8 943 | 941,1827-06-30,94.4 944 | 942,1827-07-31,70.4 945 | 943,1827-08-31,89.4 946 | 944,1827-09-30,82.6 947 | 945,1827-10-31,93.5 948 | 946,1827-11-30,80.4 949 | 947,1827-12-31,76.8 950 | 948,1828-01-31,88.0 951 | 949,1828-02-29,107.3 952 | 950,1828-03-31,108.5 953 | 951,1828-04-30,101.9 954 | 952,1828-05-31,148.6 955 | 953,1828-06-30,163.4 956 | 954,1828-07-31,90.4 957 | 955,1828-08-31,127.3 958 | 956,1828-09-30,83.9 959 | 957,1828-10-31,91.1 960 | 958,1828-11-30,95.0 961 | 959,1828-12-31,78.1 962 | 960,1829-01-31,71.6 963 | 961,1829-02-28,82.3 964 | 962,1829-03-31,120.6 965 | 963,1829-04-30,158.4 966 | 964,1829-05-31,112.3 967 | 965,1829-06-30,123.2 968 | 966,1829-07-31,151.5 969 | 967,1829-08-31,129.3 970 | 968,1829-09-30,88.0 971 | 969,1829-10-31,95.3 972 | 970,1829-11-30,112.7 973 | 971,1829-12-31,94.1 974 | 972,1830-01-31,87.0 975 | 973,1830-02-28,120.1 976 | 974,1830-03-31,141.0 977 | 975,1830-04-30,177.3 978 | 976,1830-05-31,110.5 979 | 977,1830-06-30,108.4 980 | 978,1830-07-31,73.2 981 | 979,1830-08-31,84.6 982 | 980,1830-09-30,103.4 983 | 981,1830-10-31,140.6 984 | 982,1830-11-30,135.4 985 | 983,1830-12-31,136.8 986 | 984,1831-01-31,79.0 987 | 985,1831-02-28,83.6 988 | 986,1831-03-31,155.8 989 | 987,1831-04-30,90.9 990 | 988,1831-05-31,63.4 991 | 989,1831-06-30,55.6 992 | 990,1831-07-31,75.4 993 | 991,1831-08-31,91.6 994 | 992,1831-09-30,63.2 995 | 993,1831-10-31,77.1 996 | 994,1831-11-30,72.5 997 | 995,1831-12-31,48.3 998 | 996,1832-01-31,51.5 999 | 997,1832-02-29,92.6 1000 | 998,1832-03-31,91.9 1001 | 999,1832-04-30,44.8 1002 | 1000,1832-05-31,68.9 1003 | 1001,1832-06-30,44.5 1004 | 1002,1832-07-31,23.3 1005 | 1003,1832-08-31,14.8 1006 | 1004,1832-09-30,13.7 1007 | 1005,1832-10-31,35.2 1008 | 1006,1832-11-30,23.8 1009 | 1007,1832-12-31,45.8 1010 | 1008,1833-01-31,18.7 1011 | 1009,1833-02-28,24.9 1012 | 1010,1833-03-31,19.6 1013 | 1011,1833-04-30,4.6 1014 | 1012,1833-05-31,21.5 1015 | 1013,1833-06-30,1.7 1016 | 1014,1833-07-31,11.6 1017 | 1015,1833-08-31,9.5 1018 | 1016,1833-09-30,19.3 1019 | 1017,1833-10-31,12.5 1020 | 1018,1833-11-30,9.8 1021 | 1019,1833-12-31,16.6 1022 | 1020,1834-01-31,8.2 1023 | 1021,1834-02-28,30.1 1024 | 1022,1834-03-31,6.5 1025 | 1023,1834-04-30,2.4 1026 | 1024,1834-05-31,14.8 1027 | 1025,1834-06-30,13.1 1028 | 1026,1834-07-31,14.4 1029 | 1027,1834-08-31,6.6 1030 | 1028,1834-09-30,19.2 1031 | 1029,1834-10-31,41.3 1032 | 1030,1834-11-30,50.9 1033 | 1031,1834-12-31,57.5 1034 | 1032,1835-01-31,12.4 1035 | 1033,1835-02-28,40.7 1036 | 1034,1835-03-31,32.7 1037 | 1035,1835-04-30,102.4 1038 | 1036,1835-05-31,72.6 1039 | 1037,1835-06-30,55.2 1040 | 1038,1835-07-31,99.6 1041 | 1039,1835-08-31,98.5 1042 | 1040,1835-09-30,168.0 1043 | 1041,1835-10-31,158.6 1044 | 1042,1835-11-30,166.7 1045 | 1043,1835-12-31,129.3 1046 | 1044,1836-01-31,147.6 1047 | 1045,1836-02-29,179.4 1048 | 1046,1836-03-31,163.7 1049 | 1047,1836-04-30,238.0 1050 | 1048,1836-05-31,185.7 1051 | 1049,1836-06-30,207.9 1052 | 1050,1836-07-31,194.6 1053 | 1051,1836-08-31,179.6 1054 | 1052,1836-09-30,158.5 1055 | 1053,1836-10-31,228.9 1056 | 1054,1836-11-30,201.5 1057 | 1055,1836-12-31,343.8 1058 | 1056,1837-01-31,313.4 1059 | 1057,1837-02-28,292.6 1060 | 1058,1837-03-31,224.3 1061 | 1059,1837-04-30,230.3 1062 | 1060,1837-05-31,186.1 1063 | 1061,1837-06-30,263.4 1064 | 1062,1837-07-31,271.2 1065 | 1063,1837-08-31,223.5 1066 | 1064,1837-09-30,160.4 1067 | 1065,1837-10-31,206.2 1068 | 1066,1837-11-30,178.4 1069 | 1067,1837-12-31,216.1 1070 | 1068,1838-01-31,241.5 1071 | 1069,1838-02-28,141.3 1072 | 1070,1838-03-31,234.5 1073 | 1071,1838-04-30,211.1 1074 | 1072,1838-05-31,229.4 1075 | 1073,1838-06-30,157.6 1076 | 1074,1838-07-31,180.4 1077 | 1075,1838-08-31,131.3 1078 | 1076,1838-09-30,122.7 1079 | 1077,1838-10-31,151.5 1080 | 1078,1838-11-30,129.2 1081 | 1079,1838-12-31,132.9 1082 | 1080,1839-01-31,175.9 1083 | 1081,1839-02-28,170.8 1084 | 1082,1839-03-31,129.5 1085 | 1083,1839-04-30,102.9 1086 | 1084,1839-05-31,89.6 1087 | 1085,1839-06-30,91.2 1088 | 1086,1839-07-31,141.3 1089 | 1087,1839-08-31,218.8 1090 | 1088,1839-09-30,221.1 1091 | 1089,1839-10-31,151.5 1092 | 1090,1839-11-30,114.7 1093 | 1091,1839-12-31,106.2 1094 | 1092,1840-01-31,135.4 1095 | 1093,1840-02-29,146.1 1096 | 1094,1840-03-31,112.9 1097 | 1095,1840-04-30,109.8 1098 | 1096,1840-05-31,115.4 1099 | 1097,1840-06-30,80.9 1100 | 1098,1840-07-31,101.0 1101 | 1099,1840-08-31,96.4 1102 | 1100,1840-09-30,123.3 1103 | 1101,1840-10-31,91.7 1104 | 1102,1840-11-30,90.4 1105 | 1103,1840-12-31,89.5 1106 | 1104,1841-01-31,40.1 1107 | 1105,1841-02-28,49.9 1108 | 1106,1841-03-31,49.6 1109 | 1107,1841-04-30,66.9 1110 | 1108,1841-05-31,112.4 1111 | 1109,1841-06-30,92.8 1112 | 1110,1841-07-31,51.4 1113 | 1111,1841-08-31,65.4 1114 | 1112,1841-09-30,60.9 1115 | 1113,1841-10-31,47.4 1116 | 1114,1841-11-30,32.9 1117 | 1115,1841-12-31,64.8 1118 | 1116,1842-01-31,34.0 1119 | 1117,1842-02-28,36.8 1120 | 1118,1842-03-31,36.2 1121 | 1119,1842-04-30,44.9 1122 | 1120,1842-05-31,41.4 1123 | 1121,1842-06-30,34.2 1124 | 1122,1842-07-31,21.0 1125 | 1123,1842-08-31,44.4 1126 | 1124,1842-09-30,30.6 1127 | 1125,1842-10-31,63.5 1128 | 1126,1842-11-30,67.5 1129 | 1127,1842-12-31,29.3 1130 | 1128,1843-01-31,22.2 1131 | 1129,1843-02-28,5.9 1132 | 1130,1843-03-31,13.9 1133 | 1131,1843-04-30,15.8 1134 | 1132,1843-05-31,35.1 1135 | 1133,1843-06-30,17.6 1136 | 1134,1843-07-31,15.9 1137 | 1135,1843-08-31,19.6 1138 | 1136,1843-09-30,6.9 1139 | 1137,1843-10-31,8.9 1140 | 1138,1843-11-30,31.7 1141 | 1139,1843-12-31,21.2 1142 | 1140,1844-01-31,15.7 1143 | 1141,1844-02-29,24.5 1144 | 1142,1844-03-31,22.5 1145 | 1143,1844-04-30,34.6 1146 | 1144,1844-05-31,19.2 1147 | 1145,1844-06-30,6.2 1148 | 1146,1844-07-31,35.2 1149 | 1147,1844-08-31,39.7 1150 | 1148,1844-09-30,11.6 1151 | 1149,1844-10-31,35.9 1152 | 1150,1844-11-30,17.9 1153 | 1151,1844-12-31,36.0 1154 | 1152,1845-01-31,42.8 1155 | 1153,1845-02-28,72.8 1156 | 1154,1845-03-31,72.1 1157 | 1155,1845-04-30,95.0 1158 | 1156,1845-05-31,79.7 1159 | 1157,1845-06-30,51.8 1160 | 1158,1845-07-31,51.0 1161 | 1159,1845-08-31,53.9 1162 | 1160,1845-09-30,49.3 1163 | 1161,1845-10-31,67.8 1164 | 1162,1845-11-30,65.7 1165 | 1163,1845-12-31,99.5 1166 | 1164,1846-01-31,64.6 1167 | 1165,1846-02-28,84.9 1168 | 1166,1846-03-31,106.5 1169 | 1167,1846-04-30,115.5 1170 | 1168,1846-05-31,99.9 1171 | 1169,1846-06-30,108.4 1172 | 1170,1846-07-31,77.5 1173 | 1171,1846-08-31,91.3 1174 | 1172,1846-09-30,178.6 1175 | 1173,1846-10-31,93.4 1176 | 1174,1846-11-30,100.6 1177 | 1175,1846-12-31,109.1 1178 | 1176,1847-01-31,104.4 1179 | 1177,1847-02-28,74.9 1180 | 1178,1847-03-31,143.0 1181 | 1179,1847-04-30,74.6 1182 | 1180,1847-05-31,125.7 1183 | 1181,1847-06-30,142.2 1184 | 1182,1847-07-31,87.1 1185 | 1183,1847-08-31,234.3 1186 | 1184,1847-09-30,268.2 1187 | 1185,1847-10-31,300.6 1188 | 1186,1847-11-30,231.5 1189 | 1187,1847-12-31,182.6 1190 | 1188,1848-01-31,265.2 1191 | 1189,1848-02-29,186.4 1192 | 1190,1848-03-31,181.2 1193 | 1191,1848-04-30,178.6 1194 | 1192,1848-05-31,170.5 1195 | 1193,1848-06-30,214.9 1196 | 1194,1848-07-31,232.0 1197 | 1195,1848-08-31,220.9 1198 | 1196,1848-09-30,167.3 1199 | 1197,1848-10-31,220.6 1200 | 1198,1848-11-30,191.0 1201 | 1199,1848-12-31,265.8 1202 | 1200,1849-01-31,298.3 1203 | 1201,1849-02-28,250.3 1204 | 1202,1849-03-31,182.7 1205 | 1203,1849-04-30,194.7 1206 | 1204,1849-05-31,153.2 1207 | 1205,1849-06-30,154.2 1208 | 1206,1849-07-31,148.2 1209 | 1207,1849-08-31,128.6 1210 | 1208,1849-09-30,178.0 1211 | 1209,1849-10-31,135.7 1212 | 1210,1849-11-30,188.2 1213 | 1211,1849-12-31,184.3 1214 | 1212,1850-01-31,148.2 1215 | 1213,1850-02-28,169.8 1216 | 1214,1850-03-31,156.8 1217 | 1215,1850-04-30,83.8 1218 | 1216,1850-05-31,117.0 1219 | 1217,1850-06-30,133.0 1220 | 1218,1850-07-31,74.3 1221 | 1219,1850-08-31,117.0 1222 | 1220,1850-09-30,163.7 1223 | 1221,1850-10-31,134.9 1224 | 1222,1850-11-30,104.1 1225 | 1223,1850-12-31,115.9 1226 | 1224,1851-01-31,143.5 1227 | 1225,1851-02-28,200.1 1228 | 1226,1851-03-31,122.8 1229 | 1227,1851-04-30,107.3 1230 | 1228,1851-05-31,118.9 1231 | 1229,1851-06-30,120.0 1232 | 1230,1851-07-31,68.6 1233 | 1231,1851-08-31,109.0 1234 | 1232,1851-09-30,128.9 1235 | 1233,1851-10-31,118.7 1236 | 1234,1851-11-30,96.8 1237 | 1235,1851-12-31,135.5 1238 | 1236,1852-01-31,129.9 1239 | 1237,1852-02-29,126.2 1240 | 1238,1852-03-31,116.2 1241 | 1239,1852-04-30,124.3 1242 | 1240,1852-05-31,104.3 1243 | 1241,1852-06-30,89.0 1244 | 1242,1852-07-31,80.0 1245 | 1243,1852-08-31,75.3 1246 | 1244,1852-09-30,71.2 1247 | 1245,1852-10-31,127.9 1248 | 1246,1852-11-30,103.1 1249 | 1247,1852-12-31,86.1 1250 | 1248,1853-01-31,77.9 1251 | 1249,1853-02-28,81.5 1252 | 1250,1853-03-31,71.6 1253 | 1251,1853-04-30,90.4 1254 | 1252,1853-05-31,65.9 1255 | 1253,1853-06-30,76.0 1256 | 1254,1853-07-31,87.2 1257 | 1255,1853-08-31,95.8 1258 | 1256,1853-09-30,63.7 1259 | 1257,1853-10-31,80.5 1260 | 1258,1853-11-30,54.7 1261 | 1259,1853-12-31,44.5 1262 | 1260,1854-01-31,29.3 1263 | 1261,1854-02-28,38.0 1264 | 1262,1854-03-31,39.4 1265 | 1263,1854-04-30,50.3 1266 | 1264,1854-05-31,45.6 1267 | 1265,1854-06-30,40.1 1268 | 1266,1854-07-31,35.5 1269 | 1267,1854-08-31,30.1 1270 | 1268,1854-09-30,42.6 1271 | 1269,1854-10-31,24.1 1272 | 1270,1854-11-30,53.5 1273 | 1271,1854-12-31,41.1 1274 | 1272,1855-01-31,23.4 1275 | 1273,1855-02-28,21.6 1276 | 1274,1855-03-31,33.0 1277 | 1275,1855-04-30,8.4 1278 | 1276,1855-05-31,17.3 1279 | 1277,1855-06-30,10.0 1280 | 1278,1855-07-31,0.7 1281 | 1279,1855-08-31,5.9 1282 | 1280,1855-09-30,0.0 1283 | 1281,1855-10-31,18.3 1284 | 1282,1855-11-30,7.9 1285 | 1283,1855-12-31,5.8 1286 | 1284,1856-01-31,1.0 1287 | 1285,1856-02-29,9.3 1288 | 1286,1856-03-31,0.7 1289 | 1287,1856-04-30,12.4 1290 | 1288,1856-05-31,0.0 1291 | 1289,1856-06-30,9.8 1292 | 1290,1856-07-31,8.8 1293 | 1291,1856-08-31,11.3 1294 | 1292,1856-09-30,8.4 1295 | 1293,1856-10-31,8.5 1296 | 1294,1856-11-30,14.7 1297 | 1295,1856-12-31,13.7 1298 | 1296,1857-01-31,26.0 1299 | 1297,1857-02-28,14.2 1300 | 1298,1857-03-31,10.0 1301 | 1299,1857-04-30,21.1 1302 | 1300,1857-05-31,54.2 1303 | 1301,1857-06-30,30.5 1304 | 1302,1857-07-31,42.2 1305 | 1303,1857-08-31,32.0 1306 | 1304,1857-09-30,80.6 1307 | 1305,1857-10-31,77.1 1308 | 1306,1857-11-30,59.6 1309 | 1307,1857-12-31,70.7 1310 | 1308,1858-01-31,74.2 1311 | 1309,1858-02-28,66.3 1312 | 1310,1858-03-31,109.2 1313 | 1311,1858-04-30,72.8 1314 | 1312,1858-05-31,78.6 1315 | 1313,1858-06-30,84.5 1316 | 1314,1858-07-31,107.7 1317 | 1315,1858-08-31,104.9 1318 | 1316,1858-09-30,152.2 1319 | 1317,1858-10-31,173.2 1320 | 1318,1858-11-30,98.6 1321 | 1319,1858-12-31,127.0 1322 | 1320,1859-01-31,159.0 1323 | 1321,1859-02-28,166.4 1324 | 1322,1859-03-31,171.5 1325 | 1323,1859-04-30,162.9 1326 | 1324,1859-05-31,172.8 1327 | 1325,1859-06-30,165.2 1328 | 1326,1859-07-31,180.7 1329 | 1327,1859-08-31,203.0 1330 | 1328,1859-09-30,200.9 1331 | 1329,1859-10-31,217.8 1332 | 1330,1859-11-30,184.6 1333 | 1331,1859-12-31,153.9 1334 | 1332,1860-01-31,156.6 1335 | 1333,1860-02-29,167.9 1336 | 1334,1860-03-31,188.0 1337 | 1335,1860-04-30,135.7 1338 | 1336,1860-05-31,203.5 1339 | 1337,1860-06-30,206.3 1340 | 1338,1860-07-31,221.9 1341 | 1339,1860-08-31,190.5 1342 | 1340,1860-09-30,175.2 1343 | 1341,1860-10-31,171.1 1344 | 1342,1860-11-30,186.0 1345 | 1343,1860-12-31,181.7 1346 | 1344,1861-01-31,118.5 1347 | 1345,1861-02-28,147.5 1348 | 1346,1861-03-31,192.0 1349 | 1347,1861-04-30,187.1 1350 | 1348,1861-05-31,107.8 1351 | 1349,1861-06-30,167.5 1352 | 1350,1861-07-31,148.2 1353 | 1351,1861-08-31,156.7 1354 | 1352,1861-09-30,151.9 1355 | 1353,1861-10-31,127.7 1356 | 1354,1861-11-30,102.0 1357 | 1355,1861-12-31,152.8 1358 | 1356,1862-01-31,119.9 1359 | 1357,1862-02-28,122.4 1360 | 1358,1862-03-31,82.7 1361 | 1359,1862-04-30,102.1 1362 | 1360,1862-05-31,122.4 1363 | 1361,1862-06-30,159.7 1364 | 1362,1862-07-31,139.5 1365 | 1363,1862-08-31,118.7 1366 | 1364,1862-09-30,126.4 1367 | 1365,1862-10-31,79.8 1368 | 1366,1862-11-30,96.0 1369 | 1367,1862-12-31,77.6 1370 | 1368,1863-01-31,91.8 1371 | 1369,1863-02-28,107.7 1372 | 1370,1863-03-31,126.0 1373 | 1371,1863-04-30,77.1 1374 | 1372,1863-05-31,102.1 1375 | 1373,1863-06-30,77.6 1376 | 1374,1863-07-31,62.0 1377 | 1375,1863-08-31,91.3 1378 | 1376,1863-09-30,41.7 1379 | 1377,1863-10-31,75.7 1380 | 1378,1863-11-30,71.6 1381 | 1379,1863-12-31,78.1 1382 | 1380,1864-01-31,109.7 1383 | 1381,1864-02-29,89.5 1384 | 1382,1864-03-31,125.9 1385 | 1383,1864-04-30,68.1 1386 | 1384,1864-05-31,77.1 1387 | 1385,1864-06-30,109.9 1388 | 1386,1864-07-31,103.8 1389 | 1387,1864-08-31,104.1 1390 | 1388,1864-09-30,54.2 1391 | 1389,1864-10-31,64.4 1392 | 1390,1864-11-30,109.3 1393 | 1391,1864-12-31,54.3 1394 | 1392,1865-01-31,92.5 1395 | 1393,1865-02-28,74.6 1396 | 1394,1865-03-31,75.0 1397 | 1395,1865-04-30,55.9 1398 | 1396,1865-05-31,65.6 1399 | 1397,1865-06-30,63.8 1400 | 1398,1865-07-31,50.8 1401 | 1399,1865-08-31,71.8 1402 | 1400,1865-09-30,41.0 1403 | 1401,1865-10-31,32.5 1404 | 1402,1865-11-30,46.7 1405 | 1403,1865-12-31,24.3 1406 | 1404,1866-01-31,60.0 1407 | 1405,1866-02-28,72.9 1408 | 1406,1866-03-31,46.6 1409 | 1407,1866-04-30,33.5 1410 | 1408,1866-05-31,24.6 1411 | 1409,1866-06-30,31.4 1412 | 1410,1866-07-31,17.6 1413 | 1411,1866-08-31,24.2 1414 | 1412,1866-09-30,13.9 1415 | 1413,1866-10-31,26.8 1416 | 1414,1866-11-30,17.2 1417 | 1415,1866-12-31,2.9 1418 | 1416,1867-01-31,0.0 1419 | 1417,1867-02-28,1.4 1420 | 1418,1867-03-31,17.4 1421 | 1419,1867-04-30,9.7 1422 | 1420,1867-05-31,5.5 1423 | 1421,1867-06-30,2.8 1424 | 1422,1867-07-31,9.5 1425 | 1423,1867-08-31,9.1 1426 | 1424,1867-09-30,18.7 1427 | 1425,1867-10-31,25.5 1428 | 1426,1867-11-30,18.1 1429 | 1427,1867-12-31,47.9 1430 | 1428,1868-01-31,25.9 1431 | 1429,1868-02-29,26.3 1432 | 1430,1868-03-31,44.0 1433 | 1431,1868-04-30,61.1 1434 | 1432,1868-05-31,44.5 1435 | 1433,1868-06-30,51.8 1436 | 1434,1868-07-31,48.3 1437 | 1435,1868-08-31,57.3 1438 | 1436,1868-09-30,78.7 1439 | 1437,1868-10-31,102.8 1440 | 1438,1868-11-30,98.7 1441 | 1439,1868-12-31,112.8 1442 | 1440,1869-01-31,101.6 1443 | 1441,1869-02-28,99.9 1444 | 1442,1869-03-31,88.0 1445 | 1443,1869-04-30,68.4 1446 | 1444,1869-05-31,173.4 1447 | 1445,1869-06-30,180.7 1448 | 1446,1869-07-31,98.8 1449 | 1447,1869-08-31,132.7 1450 | 1448,1869-09-30,134.5 1451 | 1449,1869-10-31,99.0 1452 | 1450,1869-11-30,130.2 1453 | 1451,1869-12-31,174.1 1454 | 1452,1870-01-31,129.0 1455 | 1453,1870-02-28,191.7 1456 | 1454,1870-03-31,262.7 1457 | 1455,1870-04-30,266.9 1458 | 1456,1870-05-31,293.6 1459 | 1457,1870-06-30,226.3 1460 | 1458,1870-07-31,220.9 1461 | 1459,1870-08-31,256.5 1462 | 1460,1870-09-30,226.8 1463 | 1461,1870-10-31,244.3 1464 | 1462,1870-11-30,246.0 1465 | 1463,1870-12-31,216.7 1466 | 1464,1871-01-31,147.2 1467 | 1465,1871-02-28,209.0 1468 | 1466,1871-03-31,238.7 1469 | 1467,1871-04-30,270.9 1470 | 1468,1871-05-31,242.6 1471 | 1469,1871-06-30,152.9 1472 | 1470,1871-07-31,171.8 1473 | 1471,1871-08-31,183.7 1474 | 1472,1871-09-30,133.9 1475 | 1473,1871-10-31,148.5 1476 | 1474,1871-11-30,175.7 1477 | 1475,1871-12-31,150.8 1478 | 1476,1872-01-31,132.7 1479 | 1477,1872-02-29,200.3 1480 | 1478,1872-03-31,147.5 1481 | 1479,1872-04-30,170.4 1482 | 1480,1872-05-31,179.5 1483 | 1481,1872-06-30,183.3 1484 | 1482,1872-07-31,176.0 1485 | 1483,1872-08-31,155.0 1486 | 1484,1872-09-30,191.1 1487 | 1485,1872-10-31,171.2 1488 | 1486,1872-11-30,186.8 1489 | 1487,1872-12-31,139.9 1490 | 1488,1873-01-31,144.6 1491 | 1489,1873-02-28,178.4 1492 | 1490,1873-03-31,164.0 1493 | 1491,1873-04-30,127.1 1494 | 1492,1873-05-31,80.0 1495 | 1493,1873-06-30,74.8 1496 | 1494,1873-07-31,111.7 1497 | 1495,1873-08-31,113.8 1498 | 1496,1873-09-30,78.7 1499 | 1497,1873-10-31,78.6 1500 | 1498,1873-11-30,92.5 1501 | 1499,1873-12-31,82.0 1502 | 1500,1874-01-31,101.4 1503 | 1501,1874-02-28,107.1 1504 | 1502,1874-03-31,77.5 1505 | 1503,1874-04-30,53.5 1506 | 1504,1874-05-31,74.6 1507 | 1505,1874-06-30,63.8 1508 | 1506,1874-07-31,113.2 1509 | 1507,1874-08-31,102.3 1510 | 1508,1874-09-30,46.8 1511 | 1509,1874-10-31,57.4 1512 | 1510,1874-11-30,48.2 1513 | 1511,1874-12-31,48.8 1514 | 1512,1875-01-31,24.4 1515 | 1513,1875-02-28,35.9 1516 | 1514,1875-03-31,56.4 1517 | 1515,1875-04-30,48.6 1518 | 1516,1875-05-31,19.2 1519 | 1517,1875-06-30,39.9 1520 | 1518,1875-07-31,20.9 1521 | 1519,1875-08-31,24.4 1522 | 1520,1875-09-30,4.0 1523 | 1521,1875-10-31,21.3 1524 | 1522,1875-11-30,29.6 1525 | 1523,1875-12-31,16.5 1526 | 1524,1876-01-31,23.9 1527 | 1525,1876-02-29,25.1 1528 | 1526,1876-03-31,51.1 1529 | 1527,1876-04-30,3.9 1530 | 1528,1876-05-31,8.5 1531 | 1529,1876-06-30,2.7 1532 | 1530,1876-07-31,25.5 1533 | 1531,1876-08-31,14.8 1534 | 1532,1876-09-30,16.6 1535 | 1533,1876-10-31,23.9 1536 | 1534,1876-11-30,16.6 1537 | 1535,1876-12-31,13.6 1538 | 1536,1877-01-31,40.6 1539 | 1537,1877-02-28,14.5 1540 | 1538,1877-03-31,19.9 1541 | 1539,1877-04-30,26.3 1542 | 1540,1877-05-31,36.1 1543 | 1541,1877-06-30,23.6 1544 | 1542,1877-07-31,10.0 1545 | 1543,1877-08-31,10.5 1546 | 1544,1877-09-30,28.1 1547 | 1545,1877-10-31,11.2 1548 | 1546,1877-11-30,23.6 1549 | 1547,1877-12-31,3.5 1550 | 1548,1878-01-31,5.5 1551 | 1549,1878-02-28,11.0 1552 | 1550,1878-03-31,13.1 1553 | 1551,1878-04-30,0.2 1554 | 1552,1878-05-31,9.9 1555 | 1553,1878-06-30,10.7 1556 | 1554,1878-07-31,0.2 1557 | 1555,1878-08-31,0.0 1558 | 1556,1878-09-30,8.8 1559 | 1557,1878-10-31,1.9 1560 | 1558,1878-11-30,6.8 1561 | 1559,1878-12-31,0.9 1562 | 1560,1879-01-31,1.6 1563 | 1561,1879-02-28,0.9 1564 | 1562,1879-03-31,0.0 1565 | 1563,1879-04-30,10.4 1566 | 1564,1879-05-31,4.0 1567 | 1565,1879-06-30,8.0 1568 | 1566,1879-07-31,12.6 1569 | 1567,1879-08-31,17.9 1570 | 1568,1879-09-30,10.1 1571 | 1569,1879-10-31,20.4 1572 | 1570,1879-11-30,21.9 1573 | 1571,1879-12-31,12.2 1574 | 1572,1880-01-31,40.1 1575 | 1573,1880-02-29,45.4 1576 | 1574,1880-03-31,32.1 1577 | 1575,1880-04-30,32.4 1578 | 1576,1880-05-31,39.1 1579 | 1577,1880-06-30,56.9 1580 | 1578,1880-07-31,36.5 1581 | 1579,1880-08-31,80.3 1582 | 1580,1880-09-30,110.1 1583 | 1581,1880-10-31,71.7 1584 | 1582,1880-11-30,51.1 1585 | 1583,1880-12-31,49.4 1586 | 1584,1881-01-31,60.6 1587 | 1585,1881-02-28,88.6 1588 | 1586,1881-03-31,85.8 1589 | 1587,1881-04-30,86.1 1590 | 1588,1881-05-31,72.5 1591 | 1589,1881-06-30,100.8 1592 | 1590,1881-07-31,128.2 1593 | 1591,1881-08-31,97.3 1594 | 1592,1881-09-30,88.6 1595 | 1593,1881-10-31,107.3 1596 | 1594,1881-11-30,91.5 1597 | 1595,1881-12-31,78.8 1598 | 1596,1882-01-31,75.0 1599 | 1597,1882-02-28,115.9 1600 | 1598,1882-03-31,111.5 1601 | 1599,1882-04-30,159.6 1602 | 1600,1882-05-31,106.9 1603 | 1601,1882-06-30,75.4 1604 | 1602,1882-07-31,75.7 1605 | 1603,1882-08-31,67.5 1606 | 1604,1882-09-30,96.1 1607 | 1605,1882-10-31,98.6 1608 | 1606,1882-11-30,140.6 1609 | 1607,1882-12-31,69.7 1610 | 1608,1883-01-31,101.0 1611 | 1609,1883-02-28,78.2 1612 | 1610,1883-03-31,71.3 1613 | 1611,1883-04-30,136.8 1614 | 1612,1883-05-31,52.5 1615 | 1613,1883-06-30,127.2 1616 | 1614,1883-07-31,134.4 1617 | 1615,1883-08-31,76.6 1618 | 1616,1883-09-30,87.8 1619 | 1617,1883-10-31,139.6 1620 | 1618,1883-11-30,140.8 1621 | 1619,1883-12-31,126.5 1622 | 1620,1884-01-31,152.6 1623 | 1621,1884-02-29,144.8 1624 | 1622,1884-03-31,145.8 1625 | 1623,1884-04-30,126.9 1626 | 1624,1884-05-31,110.9 1627 | 1625,1884-06-30,85.3 1628 | 1626,1884-07-31,88.5 1629 | 1627,1884-08-31,93.1 1630 | 1628,1884-09-30,103.1 1631 | 1629,1884-10-31,79.6 1632 | 1630,1884-11-30,60.9 1633 | 1631,1884-12-31,78.6 1634 | 1632,1885-01-31,71.4 1635 | 1633,1885-02-28,119.7 1636 | 1634,1885-03-31,82.9 1637 | 1635,1885-04-30,91.7 1638 | 1636,1885-05-31,121.6 1639 | 1637,1885-06-30,139.5 1640 | 1638,1885-07-31,110.9 1641 | 1639,1885-08-31,83.5 1642 | 1640,1885-09-30,66.0 1643 | 1641,1885-10-31,64.5 1644 | 1642,1885-11-30,51.6 1645 | 1643,1885-12-31,36.3 1646 | 1644,1886-01-31,49.8 1647 | 1645,1886-02-28,43.2 1648 | 1646,1886-03-31,95.5 1649 | 1647,1886-04-30,72.9 1650 | 1648,1886-05-31,51.2 1651 | 1649,1886-06-30,45.3 1652 | 1650,1886-07-31,50.4 1653 | 1651,1886-08-31,28.1 1654 | 1652,1886-09-30,35.7 1655 | 1653,1886-10-31,14.4 1656 | 1654,1886-11-30,0.6 1657 | 1655,1886-12-31,21.7 1658 | 1656,1887-01-31,17.0 1659 | 1657,1887-02-28,22.0 1660 | 1658,1887-03-31,7.1 1661 | 1659,1887-04-30,11.6 1662 | 1660,1887-05-31,33.4 1663 | 1661,1887-06-30,26.1 1664 | 1662,1887-07-31,38.9 1665 | 1663,1887-08-31,35.7 1666 | 1664,1887-09-30,12.4 1667 | 1665,1887-10-31,11.0 1668 | 1666,1887-11-30,11.5 1669 | 1667,1887-12-31,34.5 1670 | 1668,1888-01-31,21.2 1671 | 1669,1888-02-29,11.8 1672 | 1670,1888-03-31,13.1 1673 | 1671,1888-04-30,8.5 1674 | 1672,1888-05-31,11.7 1675 | 1673,1888-06-30,11.8 1676 | 1674,1888-07-31,5.0 1677 | 1675,1888-08-31,4.6 1678 | 1676,1888-09-30,14.6 1679 | 1677,1888-10-31,3.4 1680 | 1678,1888-11-30,17.9 1681 | 1679,1888-12-31,11.1 1682 | 1680,1889-01-31,1.3 1683 | 1681,1889-02-28,14.2 1684 | 1682,1889-03-31,11.1 1685 | 1683,1889-04-30,7.3 1686 | 1684,1889-05-31,4.0 1687 | 1685,1889-06-30,10.8 1688 | 1686,1889-07-31,15.8 1689 | 1687,1889-08-31,34.3 1690 | 1688,1889-09-30,10.9 1691 | 1689,1889-10-31,3.4 1692 | 1690,1889-11-30,0.3 1693 | 1691,1889-12-31,11.2 1694 | 1692,1890-01-31,8.8 1695 | 1693,1890-02-28,1.1 1696 | 1694,1890-03-31,8.5 1697 | 1695,1890-04-30,2.7 1698 | 1696,1890-05-31,7.9 1699 | 1697,1890-06-30,2.2 1700 | 1698,1890-07-31,19.3 1701 | 1699,1890-08-31,14.2 1702 | 1700,1890-09-30,28.6 1703 | 1701,1890-10-31,18.6 1704 | 1702,1890-11-30,15.9 1705 | 1703,1890-12-31,13.1 1706 | 1704,1891-01-31,22.5 1707 | 1705,1891-02-28,36.9 1708 | 1706,1891-03-31,17.2 1709 | 1707,1891-04-30,34.2 1710 | 1708,1891-05-31,68.5 1711 | 1709,1891-06-30,80.4 1712 | 1710,1891-07-31,98.0 1713 | 1711,1891-08-31,55.1 1714 | 1712,1891-09-30,89.7 1715 | 1713,1891-10-31,86.0 1716 | 1714,1891-11-30,69.8 1717 | 1715,1891-12-31,54.3 1718 | 1716,1892-01-31,115.2 1719 | 1717,1892-02-29,126.1 1720 | 1718,1892-03-31,83.3 1721 | 1719,1892-04-30,116.0 1722 | 1720,1892-05-31,132.7 1723 | 1721,1892-06-30,127.3 1724 | 1722,1892-07-31,127.6 1725 | 1723,1892-08-31,169.0 1726 | 1724,1892-09-30,104.7 1727 | 1725,1892-10-31,117.5 1728 | 1726,1892-11-30,108.9 1729 | 1727,1892-12-31,131.0 1730 | 1728,1893-01-31,125.0 1731 | 1729,1893-02-28,121.6 1732 | 1730,1893-03-31,109.5 1733 | 1731,1893-04-30,146.8 1734 | 1732,1893-05-31,141.3 1735 | 1733,1893-06-30,149.8 1736 | 1734,1893-07-31,147.6 1737 | 1735,1893-08-31,215.4 1738 | 1736,1893-09-30,129.9 1739 | 1737,1893-10-31,133.3 1740 | 1738,1893-11-30,125.2 1741 | 1739,1893-12-31,156.4 1742 | 1740,1894-01-31,138.6 1743 | 1741,1894-02-28,141.0 1744 | 1742,1894-03-31,87.1 1745 | 1743,1894-04-30,136.0 1746 | 1744,1894-05-31,168.7 1747 | 1745,1894-06-30,164.8 1748 | 1746,1894-07-31,176.7 1749 | 1747,1894-08-31,117.1 1750 | 1748,1894-09-30,110.0 1751 | 1749,1894-10-31,125.7 1752 | 1750,1894-11-30,94.3 1753 | 1751,1894-12-31,100.1 1754 | 1752,1895-01-31,105.4 1755 | 1753,1895-02-28,112.0 1756 | 1754,1895-03-31,101.6 1757 | 1755,1895-04-30,128.2 1758 | 1756,1895-05-31,112.5 1759 | 1757,1895-06-30,119.1 1760 | 1758,1895-07-31,79.7 1761 | 1759,1895-08-31,114.9 1762 | 1760,1895-09-30,96.1 1763 | 1761,1895-10-31,113.2 1764 | 1762,1895-11-30,78.6 1765 | 1763,1895-12-31,117.9 1766 | 1764,1896-01-31,48.4 1767 | 1765,1896-02-29,95.7 1768 | 1766,1896-03-31,86.5 1769 | 1767,1896-04-30,73.0 1770 | 1768,1896-05-31,46.1 1771 | 1769,1896-06-30,81.7 1772 | 1770,1896-07-31,75.2 1773 | 1771,1896-08-31,45.4 1774 | 1772,1896-09-30,102.2 1775 | 1773,1896-10-31,47.9 1776 | 1774,1896-11-30,63.3 1777 | 1775,1896-12-31,71.0 1778 | 1776,1897-01-31,67.6 1779 | 1777,1897-02-28,49.0 1780 | 1778,1897-03-31,48.7 1781 | 1779,1897-04-30,51.7 1782 | 1780,1897-05-31,33.3 1783 | 1781,1897-06-30,18.9 1784 | 1782,1897-07-31,46.1 1785 | 1783,1897-08-31,36.4 1786 | 1784,1897-09-30,80.4 1787 | 1785,1897-10-31,23.9 1788 | 1786,1897-11-30,14.0 1789 | 1787,1897-12-31,55.5 1790 | 1788,1898-01-31,50.4 1791 | 1789,1898-02-28,60.7 1792 | 1790,1898-03-31,63.9 1793 | 1791,1898-04-30,24.2 1794 | 1792,1898-05-31,43.0 1795 | 1793,1898-06-30,37.1 1796 | 1794,1898-07-31,15.0 1797 | 1795,1898-08-31,52.3 1798 | 1796,1898-09-30,58.1 1799 | 1797,1898-10-31,57.1 1800 | 1798,1898-11-30,51.6 1801 | 1799,1898-12-31,21.1 1802 | 1800,1899-01-31,32.4 1803 | 1801,1899-02-28,15.3 1804 | 1802,1899-03-31,30.3 1805 | 1803,1899-04-30,23.6 1806 | 1804,1899-05-31,12.9 1807 | 1805,1899-06-30,34.1 1808 | 1806,1899-07-31,22.5 1809 | 1807,1899-08-31,4.9 1810 | 1808,1899-09-30,14.0 1811 | 1809,1899-10-31,21.7 1812 | 1810,1899-11-30,13.0 1813 | 1811,1899-12-31,17.6 1814 | 1812,1900-01-31,15.7 1815 | 1813,1900-02-28,22.8 1816 | 1814,1900-03-31,14.4 1817 | 1815,1900-04-30,26.8 1818 | 1816,1900-05-31,25.3 1819 | 1817,1900-06-30,20.1 1820 | 1818,1900-07-31,13.9 1821 | 1819,1900-08-31,7.1 1822 | 1820,1900-09-30,13.9 1823 | 1821,1900-10-31,21.6 1824 | 1822,1900-11-30,7.5 1825 | 1823,1900-12-31,0.5 1826 | 1824,1901-01-31,0.4 1827 | 1825,1901-02-28,4.0 1828 | 1826,1901-03-31,7.4 1829 | 1827,1901-04-30,0.0 1830 | 1828,1901-05-31,17.0 1831 | 1829,1901-06-30,9.7 1832 | 1830,1901-07-31,1.2 1833 | 1831,1901-08-31,1.7 1834 | 1832,1901-09-30,0.9 1835 | 1833,1901-10-31,6.3 1836 | 1834,1901-11-30,6.4 1837 | 1835,1901-12-31,0.0 1838 | 1836,1902-01-31,9.3 1839 | 1837,1902-02-28,0.0 1840 | 1838,1902-03-31,20.7 1841 | 1839,1902-04-30,0.0 1842 | 1840,1902-05-31,4.7 1843 | 1841,1902-06-30,2.4 1844 | 1842,1902-07-31,1.5 1845 | 1843,1902-08-31,3.8 1846 | 1844,1902-09-30,12.6 1847 | 1845,1902-10-31,27.2 1848 | 1846,1902-11-30,17.2 1849 | 1847,1902-12-31,1.8 1850 | 1848,1903-01-31,13.9 1851 | 1849,1903-02-28,28.4 1852 | 1850,1903-03-31,22.5 1853 | 1851,1903-04-30,43.5 1854 | 1852,1903-05-31,24.3 1855 | 1853,1903-06-30,27.2 1856 | 1854,1903-07-31,46.4 1857 | 1855,1903-08-31,48.0 1858 | 1856,1903-09-30,18.5 1859 | 1857,1903-10-31,64.8 1860 | 1858,1903-11-30,74.2 1861 | 1859,1903-12-31,76.2 1862 | 1860,1904-01-31,52.6 1863 | 1861,1904-02-29,40.8 1864 | 1862,1904-03-31,61.9 1865 | 1863,1904-04-30,71.6 1866 | 1864,1904-05-31,65.7 1867 | 1865,1904-06-30,69.8 1868 | 1866,1904-07-31,84.3 1869 | 1867,1904-08-31,97.1 1870 | 1868,1904-09-30,50.2 1871 | 1869,1904-10-31,90.4 1872 | 1870,1904-11-30,63.4 1873 | 1871,1904-12-31,91.1 1874 | 1872,1905-01-31,91.2 1875 | 1873,1905-02-28,143.0 1876 | 1874,1905-03-31,94.3 1877 | 1875,1905-04-30,65.5 1878 | 1876,1905-05-31,79.9 1879 | 1877,1905-06-30,81.6 1880 | 1878,1905-07-31,121.6 1881 | 1879,1905-08-31,98.1 1882 | 1880,1905-09-30,91.8 1883 | 1881,1905-10-31,131.2 1884 | 1882,1905-11-30,178.7 1885 | 1883,1905-12-31,92.5 1886 | 1884,1906-01-31,75.8 1887 | 1885,1906-02-28,52.3 1888 | 1886,1906-03-31,107.4 1889 | 1887,1906-04-30,92.2 1890 | 1888,1906-05-31,96.2 1891 | 1889,1906-06-30,105.3 1892 | 1890,1906-07-31,172.7 1893 | 1891,1906-08-31,79.6 1894 | 1892,1906-09-30,93.5 1895 | 1893,1906-10-31,29.7 1896 | 1894,1906-11-30,64.8 1897 | 1895,1906-12-31,108.0 1898 | 1896,1907-01-31,127.4 1899 | 1897,1907-02-28,180.3 1900 | 1898,1907-03-31,101.1 1901 | 1899,1907-04-30,87.6 1902 | 1900,1907-05-31,71.4 1903 | 1901,1907-06-30,67.2 1904 | 1902,1907-07-31,82.9 1905 | 1903,1907-08-31,90.5 1906 | 1904,1907-09-30,141.7 1907 | 1905,1907-10-31,109.1 1908 | 1906,1907-11-30,102.5 1909 | 1907,1907-12-31,78.9 1910 | 1908,1908-01-31,65.4 1911 | 1909,1908-02-29,56.6 1912 | 1910,1908-03-31,47.9 1913 | 1911,1908-04-30,96.1 1914 | 1912,1908-05-31,68.0 1915 | 1913,1908-06-30,80.2 1916 | 1914,1908-07-31,65.8 1917 | 1915,1908-08-31,150.9 1918 | 1916,1908-09-30,144.9 1919 | 1917,1908-10-31,53.9 1920 | 1918,1908-11-30,75.9 1921 | 1919,1908-12-31,65.9 1922 | 1920,1909-01-31,94.6 1923 | 1921,1909-02-28,77.6 1924 | 1922,1909-03-31,110.5 1925 | 1923,1909-04-30,53.7 1926 | 1924,1909-05-31,60.0 1927 | 1925,1909-06-30,37.8 1928 | 1926,1909-07-31,59.6 1929 | 1927,1909-08-31,38.5 1930 | 1928,1909-09-30,64.7 1931 | 1929,1909-10-31,97.4 1932 | 1930,1909-11-30,93.0 1933 | 1931,1909-12-31,90.2 1934 | 1932,1910-01-31,44.0 1935 | 1933,1910-02-28,52.5 1936 | 1934,1910-03-31,35.7 1937 | 1935,1910-04-30,14.1 1938 | 1936,1910-05-31,36.9 1939 | 1937,1910-06-30,20.5 1940 | 1938,1910-07-31,23.5 1941 | 1939,1910-08-31,19.3 1942 | 1940,1910-09-30,43.7 1943 | 1941,1910-10-31,63.9 1944 | 1942,1910-11-30,8.3 1945 | 1943,1910-12-31,9.6 1946 | 1944,1911-01-31,5.6 1947 | 1945,1911-02-28,15.0 1948 | 1946,1911-03-31,13.0 1949 | 1947,1911-04-30,27.6 1950 | 1948,1911-05-31,15.1 1951 | 1949,1911-06-30,3.7 1952 | 1950,1911-07-31,5.9 1953 | 1951,1911-08-31,6.7 1954 | 1952,1911-09-30,6.7 1955 | 1953,1911-10-31,4.5 1956 | 1954,1911-11-30,7.0 1957 | 1955,1911-12-31,3.7 1958 | 1956,1912-01-31,0.4 1959 | 1957,1912-02-29,0.0 1960 | 1958,1912-03-31,8.2 1961 | 1959,1912-04-30,7.6 1962 | 1960,1912-05-31,7.4 1963 | 1961,1912-06-30,6.9 1964 | 1962,1912-07-31,4.9 1965 | 1963,1912-08-31,0.5 1966 | 1964,1912-09-30,15.9 1967 | 1965,1912-10-31,7.6 1968 | 1966,1912-11-30,1.9 1969 | 1967,1912-12-31,10.6 1970 | 1968,1913-01-31,3.8 1971 | 1969,1913-02-28,4.8 1972 | 1970,1913-03-31,0.8 1973 | 1971,1913-04-30,1.6 1974 | 1972,1913-05-31,0.0 1975 | 1973,1913-06-30,0.0 1976 | 1974,1913-07-31,2.9 1977 | 1975,1913-08-31,0.4 1978 | 1976,1913-09-30,2.0 1979 | 1977,1913-10-31,5.2 1980 | 1978,1913-11-30,1.2 1981 | 1979,1913-12-31,6.3 1982 | 1980,1914-01-31,4.7 1983 | 1981,1914-02-28,4.4 1984 | 1982,1914-03-31,5.3 1985 | 1983,1914-04-30,28.9 1986 | 1984,1914-05-31,8.7 1987 | 1985,1914-06-30,19.1 1988 | 1986,1914-07-31,9.1 1989 | 1987,1914-08-31,12.9 1990 | 1988,1914-09-30,21.2 1991 | 1989,1914-10-31,13.7 1992 | 1990,1914-11-30,27.3 1993 | 1991,1914-12-31,37.3 1994 | 1992,1915-01-31,38.5 1995 | 1993,1915-02-28,70.4 1996 | 1994,1915-03-31,64.8 1997 | 1995,1915-04-30,68.9 1998 | 1996,1915-05-31,55.1 1999 | 1997,1915-06-30,114.7 2000 | 1998,1915-07-31,119.4 2001 | 1999,1915-08-31,116.0 2002 | 2000,1915-09-30,82.4 2003 | 2001,1915-10-31,89.1 2004 | 2002,1915-11-30,70.9 2005 | 2003,1915-12-31,57.5 2006 | 2004,1916-01-31,75.5 2007 | 2005,1916-02-29,92.4 2008 | 2006,1916-03-31,111.7 2009 | 2007,1916-04-30,119.8 2010 | 2008,1916-05-31,124.0 2011 | 2009,1916-06-30,112.9 2012 | 2010,1916-07-31,89.3 2013 | 2011,1916-08-31,58.6 2014 | 2012,1916-09-30,75.1 2015 | 2013,1916-10-31,84.5 2016 | 2014,1916-11-30,109.5 2017 | 2015,1916-12-31,88.3 2018 | 2016,1917-01-31,124.5 2019 | 2017,1917-02-28,119.9 2020 | 2018,1917-03-31,158.0 2021 | 2019,1917-04-30,124.6 2022 | 2020,1917-05-31,190.2 2023 | 2021,1917-06-30,191.4 2024 | 2022,1917-07-31,199.7 2025 | 2023,1917-08-31,257.7 2026 | 2024,1917-09-30,215.6 2027 | 2025,1917-10-31,120.3 2028 | 2026,1917-11-30,160.7 2029 | 2027,1917-12-31,215.5 2030 | 2028,1918-01-31,160.0 2031 | 2029,1918-02-28,108.7 2032 | 2030,1918-03-31,120.4 2033 | 2031,1918-04-30,134.1 2034 | 2032,1918-05-31,127.9 2035 | 2033,1918-06-30,99.0 2036 | 2034,1918-07-31,179.5 2037 | 2035,1918-08-31,169.6 2038 | 2036,1918-09-30,133.2 2039 | 2037,1918-10-31,141.7 2040 | 2038,1918-11-30,139.0 2041 | 2039,1918-12-31,98.5 2042 | 2040,1919-01-31,80.2 2043 | 2041,1919-02-28,132.5 2044 | 2042,1919-03-31,110.9 2045 | 2043,1919-04-30,86.3 2046 | 2044,1919-05-31,146.9 2047 | 2045,1919-06-30,185.3 2048 | 2046,1919-07-31,107.8 2049 | 2047,1919-08-31,115.1 2050 | 2048,1919-09-30,91.1 2051 | 2049,1919-10-31,88.0 2052 | 2050,1919-11-30,69.9 2053 | 2051,1919-12-31,58.0 2054 | 2052,1920-01-31,85.3 2055 | 2053,1920-02-29,89.8 2056 | 2054,1920-03-31,116.9 2057 | 2055,1920-04-30,24.7 2058 | 2056,1920-05-31,55.4 2059 | 2057,1920-06-30,64.6 2060 | 2058,1920-07-31,45.7 2061 | 2059,1920-08-31,31.9 2062 | 2060,1920-09-30,60.5 2063 | 2061,1920-10-31,82.7 2064 | 2062,1920-11-30,45.3 2065 | 2063,1920-12-31,49.8 2066 | 2064,1921-01-31,52.5 2067 | 2065,1921-02-28,47.1 2068 | 2066,1921-03-31,44.5 2069 | 2067,1921-04-30,54.0 2070 | 2068,1921-05-31,37.0 2071 | 2069,1921-06-30,56.2 2072 | 2070,1921-07-31,69.7 2073 | 2071,1921-08-31,38.0 2074 | 2072,1921-09-30,29.6 2075 | 2073,1921-10-31,30.3 2076 | 2074,1921-11-30,29.6 2077 | 2075,1921-12-31,33.9 2078 | 2076,1922-01-31,19.7 2079 | 2077,1922-02-28,43.9 2080 | 2078,1922-03-31,91.3 2081 | 2079,1922-04-30,18.3 2082 | 2080,1922-05-31,13.4 2083 | 2081,1922-06-30,9.8 2084 | 2082,1922-07-31,18.3 2085 | 2083,1922-08-31,10.9 2086 | 2084,1922-09-30,7.8 2087 | 2085,1922-10-31,10.4 2088 | 2086,1922-11-30,12.2 2089 | 2087,1922-12-31,29.0 2090 | 2088,1923-01-31,7.5 2091 | 2089,1923-02-28,2.5 2092 | 2090,1923-03-31,5.5 2093 | 2091,1923-04-30,10.2 2094 | 2092,1923-05-31,5.3 2095 | 2093,1923-06-30,15.3 2096 | 2094,1923-07-31,5.9 2097 | 2095,1923-08-31,0.8 2098 | 2096,1923-09-30,22.1 2099 | 2097,1923-10-31,19.5 2100 | 2098,1923-11-30,16.7 2101 | 2099,1923-12-31,4.6 2102 | 2100,1924-01-31,0.8 2103 | 2101,1924-02-29,8.5 2104 | 2102,1924-03-31,3.0 2105 | 2103,1924-04-30,18.9 2106 | 2104,1924-05-31,34.6 2107 | 2105,1924-06-30,40.0 2108 | 2106,1924-07-31,46.9 2109 | 2107,1924-08-31,32.1 2110 | 2108,1924-09-30,41.9 2111 | 2109,1924-10-31,42.6 2112 | 2110,1924-11-30,37.5 2113 | 2111,1924-12-31,27.5 2114 | 2112,1925-01-31,9.1 2115 | 2113,1925-02-28,38.6 2116 | 2114,1925-03-31,30.0 2117 | 2115,1925-04-30,53.0 2118 | 2116,1925-05-31,71.4 2119 | 2117,1925-06-30,79.1 2120 | 2118,1925-07-31,64.2 2121 | 2119,1925-08-31,63.3 2122 | 2120,1925-09-30,100.3 2123 | 2121,1925-10-31,115.3 2124 | 2122,1925-11-30,97.6 2125 | 2123,1925-12-31,164.4 2126 | 2124,1926-01-31,119.7 2127 | 2125,1926-02-28,116.4 2128 | 2126,1926-03-31,104.2 2129 | 2127,1926-04-30,64.2 2130 | 2128,1926-05-31,107.0 2131 | 2129,1926-06-30,122.4 2132 | 2130,1926-07-31,87.2 2133 | 2131,1926-08-31,102.7 2134 | 2132,1926-09-30,101.4 2135 | 2133,1926-10-31,119.3 2136 | 2134,1926-11-30,100.8 2137 | 2135,1926-12-31,132.4 2138 | 2136,1927-01-31,136.0 2139 | 2137,1927-02-28,155.1 2140 | 2138,1927-03-31,116.1 2141 | 2139,1927-04-30,155.8 2142 | 2140,1927-05-31,131.9 2143 | 2141,1927-06-30,98.5 2144 | 2142,1927-07-31,91.6 2145 | 2143,1927-08-31,89.6 2146 | 2144,1927-09-30,114.1 2147 | 2145,1927-10-31,105.2 2148 | 2146,1927-11-30,112.1 2149 | 2147,1927-12-31,75.3 2150 | 2148,1928-01-31,139.2 2151 | 2149,1928-02-29,122.4 2152 | 2150,1928-03-31,142.4 2153 | 2151,1928-04-30,134.3 2154 | 2152,1928-05-31,128.3 2155 | 2153,1928-06-30,152.4 2156 | 2154,1928-07-31,163.4 2157 | 2155,1928-08-31,139.6 2158 | 2156,1928-09-30,149.6 2159 | 2157,1928-10-31,102.3 2160 | 2158,1928-11-30,83.9 2161 | 2159,1928-12-31,98.3 2162 | 2160,1929-01-31,114.8 2163 | 2161,1929-02-28,104.7 2164 | 2162,1929-03-31,83.8 2165 | 2163,1929-04-30,87.9 2166 | 2164,1929-05-31,97.0 2167 | 2165,1929-06-30,119.9 2168 | 2166,1929-07-31,117.1 2169 | 2167,1929-08-31,109.6 2170 | 2168,1929-09-30,57.3 2171 | 2169,1929-10-31,90.1 2172 | 2170,1929-11-30,135.1 2173 | 2171,1929-12-31,179.9 2174 | 2172,1930-01-31,108.8 2175 | 2173,1930-02-28,83.1 2176 | 2174,1930-03-31,58.4 2177 | 2175,1930-04-30,63.7 2178 | 2176,1930-05-31,61.4 2179 | 2177,1930-06-30,48.0 2180 | 2178,1930-07-31,36.5 2181 | 2179,1930-08-31,41.5 2182 | 2180,1930-09-30,53.5 2183 | 2181,1930-10-31,57.2 2184 | 2182,1930-11-30,59.4 2185 | 2183,1930-12-31,43.0 2186 | 2184,1931-01-31,24.4 2187 | 2185,1931-02-28,71.8 2188 | 2186,1931-03-31,50.1 2189 | 2187,1931-04-30,52.0 2190 | 2188,1931-05-31,40.8 2191 | 2189,1931-06-30,25.5 2192 | 2190,1931-07-31,29.0 2193 | 2191,1931-08-31,21.6 2194 | 2192,1931-09-30,31.7 2195 | 2193,1931-10-31,16.6 2196 | 2194,1931-11-30,31.2 2197 | 2195,1931-12-31,29.7 2198 | 2196,1932-01-31,20.1 2199 | 2197,1932-02-29,17.6 2200 | 2198,1932-03-31,18.8 2201 | 2199,1932-04-30,18.7 2202 | 2200,1932-05-31,29.7 2203 | 2201,1932-06-30,36.9 2204 | 2202,1932-07-31,16.0 2205 | 2203,1932-08-31,11.3 2206 | 2204,1932-09-30,6.6 2207 | 2205,1932-10-31,14.8 2208 | 2206,1932-11-30,13.7 2209 | 2207,1932-12-31,18.5 2210 | 2208,1933-01-31,20.4 2211 | 2209,1933-02-28,36.9 2212 | 2210,1933-03-31,16.8 2213 | 2211,1933-04-30,4.9 2214 | 2212,1933-05-31,5.3 2215 | 2213,1933-06-30,8.7 2216 | 2214,1933-07-31,4.7 2217 | 2215,1933-08-31,0.4 2218 | 2216,1933-09-30,8.5 2219 | 2217,1933-10-31,5.0 2220 | 2218,1933-11-30,1.0 2221 | 2219,1933-12-31,0.4 2222 | 2220,1934-01-31,5.7 2223 | 2221,1934-02-28,12.9 2224 | 2222,1934-03-31,7.2 2225 | 2223,1934-04-30,18.8 2226 | 2224,1934-05-31,32.9 2227 | 2225,1934-06-30,11.2 2228 | 2226,1934-07-31,15.3 2229 | 2227,1934-08-31,13.9 2230 | 2228,1934-09-30,6.7 2231 | 2229,1934-10-31,9.5 2232 | 2230,1934-11-30,14.5 2233 | 2231,1934-12-31,25.7 2234 | 2232,1935-01-31,31.1 2235 | 2233,1935-02-28,34.1 2236 | 2234,1935-03-31,38.5 2237 | 2235,1935-04-30,20.4 2238 | 2236,1935-05-31,45.4 2239 | 2237,1935-06-30,76.2 2240 | 2238,1935-07-31,56.5 2241 | 2239,1935-08-31,50.2 2242 | 2240,1935-09-30,70.1 2243 | 2241,1935-10-31,88.7 2244 | 2242,1935-11-30,107.0 2245 | 2243,1935-12-31,102.5 2246 | 2244,1936-01-31,104.7 2247 | 2245,1936-02-29,123.9 2248 | 2246,1936-03-31,128.4 2249 | 2247,1936-04-30,124.8 2250 | 2248,1936-05-31,90.9 2251 | 2249,1936-06-30,116.7 2252 | 2250,1936-07-31,87.2 2253 | 2251,1936-08-31,145.0 2254 | 2252,1936-09-30,126.7 2255 | 2253,1936-10-31,148.2 2256 | 2254,1936-11-30,192.3 2257 | 2255,1936-12-31,205.6 2258 | 2256,1937-01-31,220.9 2259 | 2257,1937-02-28,214.1 2260 | 2258,1937-03-31,139.7 2261 | 2259,1937-04-30,182.2 2262 | 2260,1937-05-31,194.4 2263 | 2261,1937-06-30,217.1 2264 | 2262,1937-07-31,241.8 2265 | 2263,1937-08-31,229.5 2266 | 2264,1937-09-30,167.9 2267 | 2265,1937-10-31,208.2 2268 | 2266,1937-11-30,123.9 2269 | 2267,1937-12-31,147.9 2270 | 2268,1938-01-31,164.0 2271 | 2269,1938-02-28,198.6 2272 | 2270,1938-03-31,144.2 2273 | 2271,1938-04-30,168.4 2274 | 2272,1938-05-31,212.3 2275 | 2273,1938-06-30,162.6 2276 | 2274,1938-07-31,275.6 2277 | 2275,1938-08-31,192.8 2278 | 2276,1938-09-30,149.3 2279 | 2277,1938-10-31,165.3 2280 | 2278,1938-11-30,203.6 2281 | 2279,1938-12-31,154.7 2282 | 2280,1939-01-31,133.9 2283 | 2281,1939-02-28,129.0 2284 | 2282,1939-03-31,107.8 2285 | 2283,1939-04-30,181.9 2286 | 2284,1939-05-31,197.3 2287 | 2285,1939-06-30,168.3 2288 | 2286,1939-07-31,162.6 2289 | 2287,1939-08-31,176.3 2290 | 2288,1939-09-30,187.7 2291 | 2289,1939-10-31,146.9 2292 | 2290,1939-11-30,113.7 2293 | 2291,1939-12-31,70.2 2294 | 2292,1940-01-31,84.1 2295 | 2293,1940-02-29,99.1 2296 | 2294,1940-03-31,138.9 2297 | 2295,1940-04-30,101.1 2298 | 2296,1940-05-31,90.6 2299 | 2297,1940-06-30,139.8 2300 | 2298,1940-07-31,112.5 2301 | 2299,1940-08-31,175.9 2302 | 2300,1940-09-30,110.8 2303 | 2301,1940-10-31,91.5 2304 | 2302,1940-11-30,97.4 2305 | 2303,1940-12-31,113.8 2306 | 2304,1941-01-31,76.1 2307 | 2305,1941-02-28,74.3 2308 | 2306,1941-03-31,77.5 2309 | 2307,1941-04-30,54.6 2310 | 2308,1941-05-31,49.0 2311 | 2309,1941-06-30,99.6 2312 | 2310,1941-07-31,111.4 2313 | 2311,1941-08-31,100.1 2314 | 2312,1941-09-30,109.8 2315 | 2313,1941-10-31,77.2 2316 | 2314,1941-11-30,64.0 2317 | 2315,1941-12-31,56.2 2318 | 2316,1942-01-31,59.5 2319 | 2317,1942-02-28,87.9 2320 | 2318,1942-03-31,90.4 2321 | 2319,1942-04-30,101.2 2322 | 2320,1942-05-31,41.8 2323 | 2321,1942-06-30,18.9 2324 | 2322,1942-07-31,29.6 2325 | 2323,1942-08-31,33.7 2326 | 2324,1942-09-30,28.7 2327 | 2325,1942-10-31,32.0 2328 | 2326,1942-11-30,51.3 2329 | 2327,1942-12-31,37.6 2330 | 2328,1943-01-31,20.7 2331 | 2329,1943-02-28,48.1 2332 | 2330,1943-03-31,45.7 2333 | 2331,1943-04-30,43.5 2334 | 2332,1943-05-31,23.6 2335 | 2333,1943-06-30,12.7 2336 | 2334,1943-07-31,22.0 2337 | 2335,1943-08-31,32.3 2338 | 2336,1943-09-30,16.7 2339 | 2337,1943-10-31,13.0 2340 | 2338,1943-11-30,17.0 2341 | 2339,1943-12-31,31.4 2342 | 2340,1944-01-31,6.1 2343 | 2341,1944-02-29,0.8 2344 | 2342,1944-03-31,18.3 2345 | 2343,1944-04-30,0.4 2346 | 2344,1944-05-31,4.1 2347 | 2345,1944-06-30,8.2 2348 | 2346,1944-07-31,8.5 2349 | 2347,1944-08-31,27.9 2350 | 2348,1944-09-30,23.7 2351 | 2349,1944-10-31,28.1 2352 | 2350,1944-11-30,17.9 2353 | 2351,1944-12-31,47.4 2354 | 2352,1945-01-31,30.8 2355 | 2353,1945-02-28,21.2 2356 | 2354,1945-03-31,35.9 2357 | 2355,1945-04-30,53.4 2358 | 2356,1945-05-31,51.0 2359 | 2357,1945-06-30,60.2 2360 | 2358,1945-07-31,71.0 2361 | 2359,1945-08-31,43.1 2362 | 2360,1945-09-30,58.1 2363 | 2361,1945-10-31,114.5 2364 | 2362,1945-11-30,76.6 2365 | 2363,1945-12-31,45.7 2366 | 2364,1946-01-31,79.3 2367 | 2365,1946-02-28,143.8 2368 | 2366,1946-03-31,127.7 2369 | 2367,1946-04-30,126.1 2370 | 2368,1946-05-31,141.6 2371 | 2369,1946-06-30,122.6 2372 | 2370,1946-07-31,193.7 2373 | 2371,1946-08-31,178.7 2374 | 2372,1946-09-30,157.4 2375 | 2373,1946-10-31,170.5 2376 | 2374,1946-11-30,206.3 2377 | 2375,1946-12-31,202.9 2378 | 2376,1947-01-31,163.7 2379 | 2377,1947-02-28,188.9 2380 | 2378,1947-03-31,183.8 2381 | 2379,1947-04-30,212.1 2382 | 2380,1947-05-31,285.0 2383 | 2381,1947-06-30,232.1 2384 | 2382,1947-07-31,223.5 2385 | 2383,1947-08-31,267.4 2386 | 2384,1947-09-30,239.9 2387 | 2385,1947-10-31,231.7 2388 | 2386,1947-11-30,181.3 2389 | 2387,1947-12-31,164.9 2390 | 2388,1948-01-31,153.6 2391 | 2389,1948-02-29,122.0 2392 | 2390,1948-03-31,134.3 2393 | 2391,1948-04-30,268.5 2394 | 2392,1948-05-31,246.4 2395 | 2393,1948-06-30,237.5 2396 | 2394,1948-07-31,201.4 2397 | 2395,1948-08-31,223.7 2398 | 2396,1948-09-30,202.9 2399 | 2397,1948-10-31,192.9 2400 | 2398,1948-11-30,135.6 2401 | 2399,1948-12-31,195.3 2402 | 2400,1949-01-31,168.6 2403 | 2401,1949-02-28,258.0 2404 | 2402,1949-03-31,223.0 2405 | 2403,1949-04-30,208.1 2406 | 2404,1949-05-31,150.4 2407 | 2405,1949-06-30,172.4 2408 | 2406,1949-07-31,178.3 2409 | 2407,1949-08-31,175.3 2410 | 2408,1949-09-30,205.8 2411 | 2409,1949-10-31,186.3 2412 | 2410,1949-11-30,203.2 2413 | 2411,1949-12-31,166.6 2414 | 2412,1950-01-31,143.9 2415 | 2413,1950-02-28,134.3 2416 | 2414,1950-03-31,155.4 2417 | 2415,1950-04-30,160.6 2418 | 2416,1950-05-31,150.5 2419 | 2417,1950-06-30,118.3 2420 | 2418,1950-07-31,128.9 2421 | 2419,1950-08-31,120.6 2422 | 2420,1950-09-30,72.7 2423 | 2421,1950-10-31,87.0 2424 | 2422,1950-11-30,77.7 2425 | 2423,1950-12-31,76.7 2426 | 2424,1951-01-31,85.0 2427 | 2425,1951-02-28,84.8 2428 | 2426,1951-03-31,79.3 2429 | 2427,1951-04-30,131.7 2430 | 2428,1951-05-31,153.7 2431 | 2429,1951-06-30,142.4 2432 | 2430,1951-07-31,87.2 2433 | 2431,1951-08-31,86.5 2434 | 2432,1951-09-30,117.7 2435 | 2433,1951-10-31,73.3 2436 | 2434,1951-11-30,74.2 2437 | 2435,1951-12-31,65.0 2438 | 2436,1952-01-31,57.9 2439 | 2437,1952-02-29,32.4 2440 | 2438,1952-03-31,31.5 2441 | 2439,1952-04-30,41.7 2442 | 2440,1952-05-31,33.6 2443 | 2441,1952-06-30,52.0 2444 | 2442,1952-07-31,56.1 2445 | 2443,1952-08-31,77.8 2446 | 2444,1952-09-30,40.4 2447 | 2445,1952-10-31,34.2 2448 | 2446,1952-11-30,31.8 2449 | 2447,1952-12-31,48.9 2450 | 2448,1953-01-31,37.9 2451 | 2449,1953-02-28,5.9 2452 | 2450,1953-03-31,14.7 2453 | 2451,1953-04-30,39.6 2454 | 2452,1953-05-31,18.3 2455 | 2453,1953-06-30,31.5 2456 | 2454,1953-07-31,12.7 2457 | 2455,1953-08-31,33.6 2458 | 2456,1953-09-30,28.0 2459 | 2457,1953-10-31,12.3 2460 | 2458,1953-11-30,2.5 2461 | 2459,1953-12-31,3.9 2462 | 2460,1954-01-31,0.4 2463 | 2461,1954-02-28,0.8 2464 | 2462,1954-03-31,15.8 2465 | 2463,1954-04-30,2.7 2466 | 2464,1954-05-31,1.2 2467 | 2465,1954-06-30,0.4 2468 | 2466,1954-07-31,7.3 2469 | 2467,1954-08-31,12.6 2470 | 2468,1954-09-30,2.3 2471 | 2469,1954-10-31,10.5 2472 | 2470,1954-11-30,13.4 2473 | 2471,1954-12-31,11.3 2474 | 2472,1955-01-31,33.4 2475 | 2473,1955-02-28,29.9 2476 | 2474,1955-03-31,7.3 2477 | 2475,1955-04-30,16.4 2478 | 2476,1955-05-31,41.3 2479 | 2477,1955-06-30,45.2 2480 | 2478,1955-07-31,38.2 2481 | 2479,1955-08-31,58.0 2482 | 2480,1955-09-30,60.8 2483 | 2481,1955-10-31,83.0 2484 | 2482,1955-11-30,126.3 2485 | 2483,1955-12-31,108.8 2486 | 2484,1956-01-31,104.1 2487 | 2485,1956-02-29,175.6 2488 | 2486,1956-03-31,167.7 2489 | 2487,1956-04-30,156.7 2490 | 2488,1956-05-31,193.4 2491 | 2489,1956-06-30,165.1 2492 | 2490,1956-07-31,182.7 2493 | 2491,1956-08-31,240.2 2494 | 2492,1956-09-30,245.4 2495 | 2493,1956-10-31,219.9 2496 | 2494,1956-11-30,285.0 2497 | 2495,1956-12-31,272.0 2498 | 2496,1957-01-31,233.7 2499 | 2497,1957-02-28,184.5 2500 | 2498,1957-03-31,222.8 2501 | 2499,1957-04-30,248.0 2502 | 2500,1957-05-31,233.0 2503 | 2501,1957-06-30,284.3 2504 | 2502,1957-07-31,265.1 2505 | 2503,1957-08-31,223.7 2506 | 2504,1957-09-30,334.0 2507 | 2505,1957-10-31,359.4 2508 | 2506,1957-11-30,298.6 2509 | 2507,1957-12-31,339.0 2510 | 2508,1958-01-31,286.7 2511 | 2509,1958-02-28,233.6 2512 | 2510,1958-03-31,270.0 2513 | 2511,1958-04-30,277.6 2514 | 2512,1958-05-31,248.2 2515 | 2513,1958-06-30,242.9 2516 | 2514,1958-07-31,271.0 2517 | 2515,1958-08-31,283.5 2518 | 2516,1958-09-30,285.1 2519 | 2517,1958-10-31,256.9 2520 | 2518,1958-11-30,215.6 2521 | 2519,1958-12-31,265.7 2522 | 2520,1959-01-31,307.7 2523 | 2521,1959-02-28,202.6 2524 | 2522,1959-03-31,263.0 2525 | 2523,1959-04-30,231.3 2526 | 2524,1959-05-31,243.6 2527 | 2525,1959-06-30,238.9 2528 | 2526,1959-07-31,211.9 2529 | 2527,1959-08-31,282.6 2530 | 2528,1959-09-30,205.6 2531 | 2529,1959-10-31,157.7 2532 | 2530,1959-11-30,175.6 2533 | 2531,1959-12-31,177.1 2534 | 2532,1960-01-31,207.2 2535 | 2533,1960-02-29,149.9 2536 | 2534,1960-03-31,144.6 2537 | 2535,1960-04-30,172.7 2538 | 2536,1960-05-31,169.3 2539 | 2537,1960-06-30,156.0 2540 | 2538,1960-07-31,172.4 2541 | 2539,1960-08-31,190.0 2542 | 2540,1960-09-30,180.1 2543 | 2541,1960-10-31,117.3 2544 | 2542,1960-11-30,126.9 2545 | 2543,1960-12-31,121.2 2546 | 2544,1961-01-31,82.1 2547 | 2545,1961-02-28,65.4 2548 | 2546,1961-03-31,75.2 2549 | 2547,1961-04-30,86.9 2550 | 2548,1961-05-31,72.3 2551 | 2549,1961-06-30,109.5 2552 | 2550,1961-07-31,99.3 2553 | 2551,1961-08-31,79.2 2554 | 2552,1961-09-30,90.1 2555 | 2553,1961-10-31,53.7 2556 | 2554,1961-11-30,46.5 2557 | 2555,1961-12-31,56.9 2558 | 2556,1962-01-31,55.1 2559 | 2557,1962-02-28,71.7 2560 | 2558,1962-03-31,64.9 2561 | 2559,1962-04-30,65.9 2562 | 2560,1962-05-31,61.9 2563 | 2561,1962-06-30,59.6 2564 | 2562,1962-07-31,31.4 2565 | 2563,1962-08-31,31.5 2566 | 2564,1962-09-30,72.7 2567 | 2565,1962-10-31,56.1 2568 | 2566,1962-11-30,38.8 2569 | 2567,1962-12-31,33.2 2570 | 2568,1963-01-31,28.7 2571 | 2569,1963-02-28,35.2 2572 | 2570,1963-03-31,24.8 2573 | 2571,1963-04-30,41.7 2574 | 2572,1963-05-31,61.1 2575 | 2573,1963-06-30,51.2 2576 | 2574,1963-07-31,28.7 2577 | 2575,1963-08-31,47.5 2578 | 2576,1963-09-30,55.2 2579 | 2577,1963-10-31,50.1 2580 | 2578,1963-11-30,33.7 2581 | 2579,1963-12-31,21.6 2582 | 2580,1964-01-31,22.6 2583 | 2581,1964-02-29,25.3 2584 | 2582,1964-03-31,24.1 2585 | 2583,1964-04-30,12.9 2586 | 2584,1964-05-31,14.3 2587 | 2585,1964-06-30,13.5 2588 | 2586,1964-07-31,4.8 2589 | 2587,1964-08-31,13.8 2590 | 2588,1964-09-30,7.0 2591 | 2589,1964-10-31,9.2 2592 | 2590,1964-11-30,11.1 2593 | 2591,1964-12-31,22.1 2594 | 2592,1965-01-31,25.4 2595 | 2593,1965-02-28,20.8 2596 | 2594,1965-03-31,17.5 2597 | 2595,1965-04-30,10.2 2598 | 2596,1965-05-31,34.5 2599 | 2597,1965-06-30,23.3 2600 | 2598,1965-07-31,17.3 2601 | 2599,1965-08-31,13.3 2602 | 2600,1965-09-30,24.5 2603 | 2601,1965-10-31,29.1 2604 | 2602,1965-11-30,22.8 2605 | 2603,1965-12-31,24.7 2606 | 2604,1966-01-31,40.3 2607 | 2605,1966-02-28,35.3 2608 | 2606,1966-03-31,36.4 2609 | 2607,1966-04-30,69.0 2610 | 2608,1966-05-31,64.2 2611 | 2609,1966-06-30,67.7 2612 | 2610,1966-07-31,80.2 2613 | 2611,1966-08-31,72.6 2614 | 2612,1966-09-30,71.1 2615 | 2613,1966-10-31,81.2 2616 | 2614,1966-11-30,81.0 2617 | 2615,1966-12-31,99.8 2618 | 2616,1967-01-31,157.0 2619 | 2617,1967-02-28,132.6 2620 | 2618,1967-03-31,158.3 2621 | 2619,1967-04-30,98.4 2622 | 2620,1967-05-31,122.5 2623 | 2621,1967-06-30,95.4 2624 | 2622,1967-07-31,129.5 2625 | 2623,1967-08-31,151.8 2626 | 2624,1967-09-30,108.7 2627 | 2625,1967-10-31,125.0 2628 | 2626,1967-11-30,133.6 2629 | 2627,1967-12-31,179.0 2630 | 2628,1968-01-31,172.5 2631 | 2629,1968-02-29,158.5 2632 | 2630,1968-03-31,130.5 2633 | 2631,1968-04-30,115.0 2634 | 2632,1968-05-31,180.0 2635 | 2633,1968-06-30,156.2 2636 | 2634,1968-07-31,136.2 2637 | 2635,1968-08-31,154.8 2638 | 2636,1968-09-30,166.0 2639 | 2637,1968-10-31,152.5 2640 | 2638,1968-11-30,121.7 2641 | 2639,1968-12-31,155.5 2642 | 2640,1969-01-31,147.8 2643 | 2641,1969-02-28,170.5 2644 | 2642,1969-03-31,192.3 2645 | 2643,1969-04-30,151.1 2646 | 2644,1969-05-31,169.9 2647 | 2645,1969-06-30,150.1 2648 | 2646,1969-07-31,137.1 2649 | 2647,1969-08-31,138.8 2650 | 2648,1969-09-30,129.3 2651 | 2649,1969-10-31,135.4 2652 | 2650,1969-11-30,132.4 2653 | 2651,1969-12-31,138.6 2654 | 2652,1970-01-31,157.9 2655 | 2653,1970-02-28,180.8 2656 | 2654,1970-03-31,145.7 2657 | 2655,1970-04-30,155.1 2658 | 2656,1970-05-31,180.5 2659 | 2657,1970-06-30,151.3 2660 | 2658,1970-07-31,159.3 2661 | 2659,1970-08-31,131.7 2662 | 2660,1970-09-30,140.8 2663 | 2661,1970-10-31,122.6 2664 | 2662,1970-11-30,134.8 2665 | 2663,1970-12-31,118.2 2666 | 2664,1971-01-31,129.2 2667 | 2665,1971-02-28,111.8 2668 | 2666,1971-03-31,85.9 2669 | 2667,1971-04-30,101.6 2670 | 2668,1971-05-31,81.5 2671 | 2669,1971-06-30,70.7 2672 | 2670,1971-07-31,114.7 2673 | 2671,1971-08-31,87.0 2674 | 2672,1971-09-30,71.3 2675 | 2673,1971-10-31,73.4 2676 | 2674,1971-11-30,89.5 2677 | 2675,1971-12-31,116.5 2678 | 2676,1972-01-31,87.0 2679 | 2677,1972-02-29,125.3 2680 | 2678,1972-03-31,113.5 2681 | 2679,1972-04-30,89.6 2682 | 2680,1972-05-31,113.9 2683 | 2681,1972-06-30,124.7 2684 | 2682,1972-07-31,108.3 2685 | 2683,1972-08-31,108.9 2686 | 2684,1972-09-30,90.7 2687 | 2685,1972-10-31,86.9 2688 | 2686,1972-11-30,59.2 2689 | 2687,1972-12-31,64.3 2690 | 2688,1973-01-31,61.8 2691 | 2689,1973-02-28,60.9 2692 | 2690,1973-03-31,65.4 2693 | 2691,1973-04-30,81.8 2694 | 2692,1973-05-31,60.3 2695 | 2693,1973-06-30,56.1 2696 | 2694,1973-07-31,33.2 2697 | 2695,1973-08-31,36.6 2698 | 2696,1973-09-30,84.1 2699 | 2697,1973-10-31,43.7 2700 | 2698,1973-11-30,34.3 2701 | 2699,1973-12-31,33.3 2702 | 2700,1974-01-31,39.4 2703 | 2701,1974-02-28,37.3 2704 | 2702,1974-03-31,30.9 2705 | 2703,1974-04-30,57.5 2706 | 2704,1974-05-31,56.3 2707 | 2705,1974-06-30,51.5 2708 | 2706,1974-07-31,79.1 2709 | 2707,1974-08-31,47.9 2710 | 2708,1974-09-30,57.2 2711 | 2709,1974-10-31,67.2 2712 | 2710,1974-11-30,35.9 2713 | 2711,1974-12-31,29.6 2714 | 2712,1975-01-31,27.3 2715 | 2713,1975-02-28,16.7 2716 | 2714,1975-03-31,16.9 2717 | 2715,1975-04-30,7.7 2718 | 2716,1975-05-31,13.1 2719 | 2717,1975-06-30,16.7 2720 | 2718,1975-07-31,40.4 2721 | 2719,1975-08-31,56.7 2722 | 2720,1975-09-30,20.3 2723 | 2721,1975-10-31,13.6 2724 | 2722,1975-11-30,27.9 2725 | 2723,1975-12-31,11.6 2726 | 2724,1976-01-31,11.9 2727 | 2725,1976-02-29,6.4 2728 | 2726,1976-03-31,31.5 2729 | 2727,1976-04-30,27.3 2730 | 2728,1976-05-31,18.2 2731 | 2729,1976-06-30,17.9 2732 | 2730,1976-07-31,2.9 2733 | 2731,1976-08-31,24.1 2734 | 2732,1976-09-30,20.0 2735 | 2733,1976-10-31,29.7 2736 | 2734,1976-11-30,7.9 2737 | 2735,1976-12-31,22.3 2738 | 2736,1977-01-31,23.8 2739 | 2737,1977-02-28,33.3 2740 | 2738,1977-03-31,13.0 2741 | 2739,1977-04-30,19.0 2742 | 2740,1977-05-31,27.0 2743 | 2741,1977-06-30,54.9 2744 | 2742,1977-07-31,30.6 2745 | 2743,1977-08-31,43.0 2746 | 2744,1977-09-30,62.4 2747 | 2745,1977-10-31,62.1 2748 | 2746,1977-11-30,41.6 2749 | 2747,1977-12-31,61.4 2750 | 2748,1978-01-31,73.7 2751 | 2749,1978-02-28,132.6 2752 | 2750,1978-03-31,108.4 2753 | 2751,1978-04-30,141.2 2754 | 2752,1978-05-31,117.1 2755 | 2753,1978-06-30,134.6 2756 | 2754,1978-07-31,99.7 2757 | 2755,1978-08-31,82.4 2758 | 2756,1978-09-30,195.7 2759 | 2757,1978-10-31,177.1 2760 | 2758,1978-11-30,138.5 2761 | 2759,1978-12-31,173.9 2762 | 2760,1979-01-31,235.9 2763 | 2761,1979-02-28,194.7 2764 | 2762,1979-03-31,195.3 2765 | 2763,1979-04-30,143.7 2766 | 2764,1979-05-31,190.3 2767 | 2765,1979-06-30,211.7 2768 | 2766,1979-07-31,225.7 2769 | 2767,1979-08-31,201.4 2770 | 2768,1979-09-30,266.9 2771 | 2769,1979-10-31,263.6 2772 | 2770,1979-11-30,259.5 2773 | 2771,1979-12-31,249.6 2774 | 2772,1980-01-31,226.1 2775 | 2773,1980-02-29,219.4 2776 | 2774,1980-03-31,178.7 2777 | 2775,1980-04-30,232.2 2778 | 2776,1980-05-31,254.7 2779 | 2777,1980-06-30,222.7 2780 | 2778,1980-07-31,192.9 2781 | 2779,1980-08-31,191.7 2782 | 2780,1980-09-30,219.6 2783 | 2781,1980-10-31,233.3 2784 | 2782,1980-11-30,209.5 2785 | 2783,1980-12-31,246.9 2786 | 2784,1981-01-31,156.6 2787 | 2785,1981-02-28,189.9 2788 | 2786,1981-03-31,196.6 2789 | 2787,1981-04-30,225.3 2790 | 2788,1981-05-31,194.7 2791 | 2789,1981-06-30,131.6 2792 | 2790,1981-07-31,205.3 2793 | 2791,1981-08-31,242.5 2794 | 2792,1981-09-30,245.3 2795 | 2793,1981-10-31,216.2 2796 | 2794,1981-11-30,186.0 2797 | 2795,1981-12-31,195.4 2798 | 2796,1982-01-31,149.8 2799 | 2797,1982-02-28,230.9 2800 | 2798,1982-03-31,221.1 2801 | 2799,1982-04-30,170.3 2802 | 2800,1982-05-31,119.3 2803 | 2801,1982-06-30,163.7 2804 | 2802,1982-07-31,139.4 2805 | 2803,1982-08-31,161.9 2806 | 2804,1982-09-30,167.4 2807 | 2805,1982-10-31,134.3 2808 | 2806,1982-11-30,127.5 2809 | 2807,1982-12-31,169.0 2810 | 2808,1983-01-31,115.5 2811 | 2809,1983-02-28,73.1 2812 | 2810,1983-03-31,88.7 2813 | 2811,1983-04-30,109.6 2814 | 2812,1983-05-31,132.5 2815 | 2813,1983-06-30,131.5 2816 | 2814,1983-07-31,108.9 2817 | 2815,1983-08-31,96.0 2818 | 2816,1983-09-30,69.9 2819 | 2817,1983-10-31,72.5 2820 | 2818,1983-11-30,45.7 2821 | 2819,1983-12-31,45.6 2822 | 2820,1984-01-31,74.8 2823 | 2821,1984-02-29,110.2 2824 | 2822,1984-03-31,116.7 2825 | 2823,1984-04-30,90.4 2826 | 2824,1984-05-31,96.9 2827 | 2825,1984-06-30,65.1 2828 | 2826,1984-07-31,55.7 2829 | 2827,1984-08-31,35.0 2830 | 2828,1984-09-30,22.6 2831 | 2829,1984-10-31,12.6 2832 | 2830,1984-11-30,26.5 2833 | 2831,1984-12-31,21.4 2834 | 2832,1985-01-31,17.8 2835 | 2833,1985-02-28,20.7 2836 | 2834,1985-03-31,16.9 2837 | 2835,1985-04-30,20.4 2838 | 2836,1985-05-31,32.4 2839 | 2837,1985-06-30,28.3 2840 | 2838,1985-07-31,39.9 2841 | 2839,1985-08-31,10.1 2842 | 2840,1985-09-30,4.3 2843 | 2841,1985-10-31,22.0 2844 | 2842,1985-11-30,17.9 2845 | 2843,1985-12-31,15.8 2846 | 2844,1986-01-31,2.8 2847 | 2845,1986-02-28,27.9 2848 | 2846,1986-03-31,13.8 2849 | 2847,1986-04-30,22.4 2850 | 2848,1986-05-31,16.1 2851 | 2849,1986-06-30,0.6 2852 | 2850,1986-07-31,18.1 2853 | 2851,1986-08-31,9.9 2854 | 2852,1986-09-30,5.1 2855 | 2853,1986-10-31,40.1 2856 | 2854,1986-11-30,15.4 2857 | 2855,1986-12-31,5.8 2858 | 2856,1987-01-31,9.8 2859 | 2857,1987-02-28,3.4 2860 | 2858,1987-03-31,17.4 2861 | 2859,1987-04-30,46.0 2862 | 2860,1987-05-31,39.1 2863 | 2861,1987-06-30,18.8 2864 | 2862,1987-07-31,38.2 2865 | 2863,1987-08-31,47.9 2866 | 2864,1987-09-30,42.2 2867 | 2865,1987-10-31,63.4 2868 | 2866,1987-11-30,48.8 2869 | 2867,1987-12-31,29.1 2870 | 2868,1988-01-31,70.5 2871 | 2869,1988-02-29,45.4 2872 | 2870,1988-03-31,91.2 2873 | 2871,1988-04-30,108.8 2874 | 2872,1988-05-31,74.2 2875 | 2873,1988-06-30,124.3 2876 | 2874,1988-07-31,131.4 2877 | 2875,1988-08-31,139.4 2878 | 2876,1988-09-30,142.7 2879 | 2877,1988-10-31,156.5 2880 | 2878,1988-11-30,156.8 2881 | 2879,1988-12-31,231.2 2882 | 2880,1989-01-31,210.1 2883 | 2881,1989-02-28,208.7 2884 | 2882,1989-03-31,170.4 2885 | 2883,1989-04-30,166.3 2886 | 2884,1989-05-31,195.4 2887 | 2885,1989-06-30,284.5 2888 | 2886,1989-07-31,180.5 2889 | 2887,1989-08-31,232.0 2890 | 2888,1989-09-30,225.1 2891 | 2889,1989-10-31,212.2 2892 | 2890,1989-11-30,238.2 2893 | 2891,1989-12-31,211.4 2894 | 2892,1990-01-31,227.4 2895 | 2893,1990-02-28,171.8 2896 | 2894,1990-03-31,191.7 2897 | 2895,1990-04-30,189.7 2898 | 2896,1990-05-31,175.2 2899 | 2897,1990-06-30,153.3 2900 | 2898,1990-07-31,191.1 2901 | 2899,1990-08-31,252.1 2902 | 2900,1990-09-30,169.1 2903 | 2901,1990-10-31,199.4 2904 | 2902,1990-11-30,178.8 2905 | 2903,1990-12-31,197.1 2906 | 2904,1991-01-31,195.3 2907 | 2905,1991-02-28,240.3 2908 | 2906,1991-03-31,197.0 2909 | 2907,1991-04-30,197.6 2910 | 2908,1991-05-31,166.9 2911 | 2909,1991-06-30,224.7 2912 | 2910,1991-07-31,240.2 2913 | 2911,1991-08-31,240.8 2914 | 2912,1991-09-30,168.9 2915 | 2913,1991-10-31,197.1 2916 | 2914,1991-11-30,159.5 2917 | 2915,1991-12-31,212.6 2918 | 2916,1992-01-31,198.3 2919 | 2917,1992-02-29,230.7 2920 | 2918,1992-03-31,151.0 2921 | 2919,1992-04-30,142.2 2922 | 2920,1992-05-31,94.3 2923 | 2921,1992-06-30,98.5 2924 | 2922,1992-07-31,114.2 2925 | 2923,1992-08-31,91.9 2926 | 2924,1992-09-30,94.0 2927 | 2925,1992-10-31,133.4 2928 | 2926,1992-11-30,129.6 2929 | 2927,1992-12-31,122.0 2930 | 2928,1993-01-31,81.4 2931 | 2929,1993-02-28,127.8 2932 | 2930,1993-03-31,102.4 2933 | 2931,1993-04-30,94.4 2934 | 2932,1993-05-31,78.8 2935 | 2933,1993-06-30,69.6 2936 | 2934,1993-07-31,80.4 2937 | 2935,1993-08-31,62.5 2938 | 2936,1993-09-30,31.2 2939 | 2937,1993-10-31,71.1 2940 | 2938,1993-11-30,48.2 2941 | 2939,1993-12-31,68.4 2942 | 2940,1994-01-31,84.9 2943 | 2941,1994-02-28,54.9 2944 | 2942,1994-03-31,47.5 2945 | 2943,1994-04-30,27.4 2946 | 2944,1994-05-31,29.8 2947 | 2945,1994-06-30,39.7 2948 | 2946,1994-07-31,50.6 2949 | 2947,1994-08-31,34.3 2950 | 2948,1994-09-30,40.5 2951 | 2949,1994-10-31,67.1 2952 | 2950,1994-11-30,29.5 2953 | 2951,1994-12-31,32.2 2954 | 2952,1995-01-31,32.6 2955 | 2953,1995-02-28,45.8 2956 | 2954,1995-03-31,46.3 2957 | 2955,1995-04-30,21.6 2958 | 2956,1995-05-31,19.4 2959 | 2957,1995-06-30,22.5 2960 | 2958,1995-07-31,20.4 2961 | 2959,1995-08-31,18.2 2962 | 2960,1995-09-30,15.7 2963 | 2961,1995-10-31,30.6 2964 | 2962,1995-11-30,14.0 2965 | 2963,1995-12-31,14.9 2966 | 2964,1996-01-31,13.3 2967 | 2965,1996-02-29,7.7 2968 | 2966,1996-03-31,12.6 2969 | 2967,1996-04-30,6.8 2970 | 2968,1996-05-31,7.6 2971 | 2969,1996-06-30,16.5 2972 | 2970,1996-07-31,11.8 2973 | 2971,1996-08-31,19.7 2974 | 2972,1996-09-30,3.0 2975 | 2973,1996-10-31,0.7 2976 | 2974,1996-11-30,24.9 2977 | 2975,1996-12-31,14.0 2978 | 2976,1997-01-31,7.4 2979 | 2977,1997-02-28,11.0 2980 | 2978,1997-03-31,12.1 2981 | 2979,1997-04-30,23.0 2982 | 2980,1997-05-31,25.4 2983 | 2981,1997-06-30,20.8 2984 | 2982,1997-07-31,12.9 2985 | 2983,1997-08-31,35.7 2986 | 2984,1997-09-30,59.7 2987 | 2985,1997-10-31,32.8 2988 | 2986,1997-11-30,50.4 2989 | 2987,1997-12-31,55.5 2990 | 2988,1998-01-31,44.5 2991 | 2989,1998-02-28,50.2 2992 | 2990,1998-03-31,82.0 2993 | 2991,1998-04-30,70.6 2994 | 2992,1998-05-31,74.0 2995 | 2993,1998-06-30,90.5 2996 | 2994,1998-07-31,96.7 2997 | 2995,1998-08-31,121.1 2998 | 2996,1998-09-30,132.0 2999 | 2997,1998-10-31,78.5 3000 | 2998,1998-11-30,97.3 3001 | 2999,1998-12-31,119.2 3002 | 3000,1999-01-31,86.0 3003 | 3001,1999-02-28,98.0 3004 | 3002,1999-03-31,103.5 3005 | 3003,1999-04-30,93.6 3006 | 3004,1999-05-31,149.6 3007 | 3005,1999-06-30,207.2 3008 | 3006,1999-07-31,173.5 3009 | 3007,1999-08-31,142.3 3010 | 3008,1999-09-30,106.3 3011 | 3009,1999-10-31,168.7 3012 | 3010,1999-11-30,188.3 3013 | 3011,1999-12-31,116.8 3014 | 3012,2000-01-31,133.1 3015 | 3013,2000-02-29,165.7 3016 | 3014,2000-03-31,217.7 3017 | 3015,2000-04-30,191.5 3018 | 3016,2000-05-31,165.9 3019 | 3017,2000-06-30,188.0 3020 | 3018,2000-07-31,244.3 3021 | 3019,2000-08-31,180.5 3022 | 3020,2000-09-30,156.0 3023 | 3021,2000-10-31,141.6 3024 | 3022,2000-11-30,158.1 3025 | 3023,2000-12-31,143.3 3026 | 3024,2001-01-31,142.6 3027 | 3025,2001-02-28,121.5 3028 | 3026,2001-03-31,165.8 3029 | 3027,2001-04-30,161.7 3030 | 3028,2001-05-31,142.1 3031 | 3029,2001-06-30,202.9 3032 | 3030,2001-07-31,123.0 3033 | 3031,2001-08-31,161.5 3034 | 3032,2001-09-30,238.2 3035 | 3033,2001-10-31,194.1 3036 | 3034,2001-11-30,176.6 3037 | 3035,2001-12-31,213.4 3038 | 3036,2002-01-31,184.6 3039 | 3037,2002-02-28,170.2 3040 | 3038,2002-03-31,147.1 3041 | 3039,2002-04-30,186.9 3042 | 3040,2002-05-31,187.5 3043 | 3041,2002-06-30,128.8 3044 | 3042,2002-07-31,161.0 3045 | 3043,2002-08-31,175.6 3046 | 3044,2002-09-30,187.9 3047 | 3045,2002-10-31,151.2 3048 | 3046,2002-11-30,147.2 3049 | 3047,2002-12-31,135.3 3050 | 3048,2003-01-31,133.5 3051 | 3049,2003-02-28,75.7 3052 | 3050,2003-03-31,100.7 3053 | 3051,2003-04-30,97.9 3054 | 3052,2003-05-31,86.8 3055 | 3053,2003-06-30,118.7 3056 | 3054,2003-07-31,128.3 3057 | 3055,2003-08-31,115.4 3058 | 3056,2003-09-30,78.5 3059 | 3057,2003-10-31,97.8 3060 | 3058,2003-11-30,82.9 3061 | 3059,2003-12-31,72.2 3062 | 3060,2004-01-31,60.6 3063 | 3061,2004-02-29,74.6 3064 | 3062,2004-03-31,74.8 3065 | 3063,2004-04-30,59.2 3066 | 3064,2004-05-31,72.8 3067 | 3065,2004-06-30,66.5 3068 | 3066,2004-07-31,83.8 3069 | 3067,2004-08-31,69.7 3070 | 3068,2004-09-30,48.8 3071 | 3069,2004-10-31,74.2 3072 | 3070,2004-11-30,70.1 3073 | 3071,2004-12-31,28.9 3074 | 3072,2005-01-31,48.1 3075 | 3073,2005-02-28,43.5 3076 | 3074,2005-03-31,39.6 3077 | 3075,2005-04-30,38.7 3078 | 3076,2005-05-31,61.9 3079 | 3077,2005-06-30,56.8 3080 | 3078,2005-07-31,62.4 3081 | 3079,2005-08-31,60.5 3082 | 3080,2005-09-30,37.2 3083 | 3081,2005-10-31,13.2 3084 | 3082,2005-11-30,27.5 3085 | 3083,2005-12-31,59.3 3086 | 3084,2006-01-31,20.9 3087 | 3085,2006-02-28,5.7 3088 | 3086,2006-03-31,17.3 3089 | 3087,2006-04-30,50.3 3090 | 3088,2006-05-31,37.2 3091 | 3089,2006-06-30,24.5 3092 | 3090,2006-07-31,22.2 3093 | 3091,2006-08-31,20.8 3094 | 3092,2006-09-30,23.7 3095 | 3093,2006-10-31,14.9 3096 | 3094,2006-11-30,35.7 3097 | 3095,2006-12-31,22.3 3098 | 3096,2007-01-31,29.3 3099 | 3097,2007-02-28,18.4 3100 | 3098,2007-03-31,7.2 3101 | 3099,2007-04-30,5.4 3102 | 3100,2007-05-31,19.5 3103 | 3101,2007-06-30,21.3 3104 | 3102,2007-07-31,15.1 3105 | 3103,2007-08-31,9.8 3106 | 3104,2007-09-30,4.0 3107 | 3105,2007-10-31,1.5 3108 | 3106,2007-11-30,2.8 3109 | 3107,2007-12-31,17.3 3110 | 3108,2008-01-31,4.1 3111 | 3109,2008-02-29,2.9 3112 | 3110,2008-03-31,15.5 3113 | 3111,2008-04-30,3.6 3114 | 3112,2008-05-31,4.6 3115 | 3113,2008-06-30,5.2 3116 | 3114,2008-07-31,0.6 3117 | 3115,2008-08-31,0.3 3118 | 3116,2008-09-30,1.2 3119 | 3117,2008-10-31,4.2 3120 | 3118,2008-11-30,6.6 3121 | 3119,2008-12-31,1.0 3122 | 3120,2009-01-31,1.3 3123 | 3121,2009-02-28,1.2 3124 | 3122,2009-03-31,0.6 3125 | 3123,2009-04-30,1.2 3126 | 3124,2009-05-31,2.9 3127 | 3125,2009-06-30,6.3 3128 | 3126,2009-07-31,5.5 3129 | 3127,2009-08-31,0.0 3130 | 3128,2009-09-30,7.1 3131 | 3129,2009-10-31,7.7 3132 | 3130,2009-11-30,6.9 3133 | 3131,2009-12-31,16.3 3134 | 3132,2010-01-31,19.5 3135 | 3133,2010-02-28,28.5 3136 | 3134,2010-03-31,24.0 3137 | 3135,2010-04-30,10.4 3138 | 3136,2010-05-31,13.9 3139 | 3137,2010-06-30,18.8 3140 | 3138,2010-07-31,25.2 3141 | 3139,2010-08-31,29.6 3142 | 3140,2010-09-30,36.4 3143 | 3141,2010-10-31,33.6 3144 | 3142,2010-11-30,34.4 3145 | 3143,2010-12-31,24.5 3146 | 3144,2011-01-31,27.3 3147 | 3145,2011-02-28,48.3 3148 | 3146,2011-03-31,78.6 3149 | 3147,2011-04-30,76.1 3150 | 3148,2011-05-31,58.2 3151 | 3149,2011-06-30,56.1 3152 | 3150,2011-07-31,64.5 3153 | 3151,2011-08-31,65.8 3154 | 3152,2011-09-30,120.1 3155 | 3153,2011-10-31,125.7 3156 | 3154,2011-11-30,139.1 3157 | 3155,2011-12-31,109.3 3158 | 3156,2012-01-31,94.4 3159 | 3157,2012-02-29,47.8 3160 | 3158,2012-03-31,86.6 3161 | 3159,2012-04-30,85.9 3162 | 3160,2012-05-31,96.5 3163 | 3161,2012-06-30,92.0 3164 | 3162,2012-07-31,100.1 3165 | 3163,2012-08-31,94.8 3166 | 3164,2012-09-30,93.7 3167 | 3165,2012-10-31,76.5 3168 | 3166,2012-11-30,87.6 3169 | 3167,2012-12-31,56.8 3170 | 3168,2013-01-31,96.1 3171 | 3169,2013-02-28,60.9 3172 | 3170,2013-03-31,78.3 3173 | 3171,2013-04-30,107.3 3174 | 3172,2013-05-31,120.2 3175 | 3173,2013-06-30,76.7 3176 | 3174,2013-07-31,86.2 3177 | 3175,2013-08-31,91.8 3178 | 3176,2013-09-30,54.5 3179 | 3177,2013-10-31,114.4 3180 | 3178,2013-11-30,113.9 3181 | 3179,2013-12-31,124.2 3182 | 3180,2014-01-31,117.0 3183 | 3181,2014-02-28,146.1 3184 | 3182,2014-03-31,128.7 3185 | 3183,2014-04-30,112.5 3186 | 3184,2014-05-31,112.5 3187 | 3185,2014-06-30,102.9 3188 | 3186,2014-07-31,100.2 3189 | 3187,2014-08-31,106.9 3190 | 3188,2014-09-30,130.0 3191 | 3189,2014-10-31,90.0 3192 | 3190,2014-11-30,103.6 3193 | 3191,2014-12-31,112.9 3194 | 3192,2015-01-31,93.0 3195 | 3193,2015-02-28,66.7 3196 | 3194,2015-03-31,54.5 3197 | 3195,2015-04-30,75.3 3198 | 3196,2015-05-31,88.8 3199 | 3197,2015-06-30,66.5 3200 | 3198,2015-07-31,65.8 3201 | 3199,2015-08-31,64.4 3202 | 3200,2015-09-30,78.6 3203 | 3201,2015-10-31,63.6 3204 | 3202,2015-11-30,62.2 3205 | 3203,2015-12-31,58.0 3206 | 3204,2016-01-31,57.0 3207 | 3205,2016-02-29,56.4 3208 | 3206,2016-03-31,54.1 3209 | 3207,2016-04-30,37.9 3210 | 3208,2016-05-31,51.5 3211 | 3209,2016-06-30,20.5 3212 | 3210,2016-07-31,32.4 3213 | 3211,2016-08-31,50.2 3214 | 3212,2016-09-30,44.6 3215 | 3213,2016-10-31,33.4 3216 | 3214,2016-11-30,21.4 3217 | 3215,2016-12-31,18.5 3218 | 3216,2017-01-31,26.1 3219 | 3217,2017-02-28,26.4 3220 | 3218,2017-03-31,17.7 3221 | 3219,2017-04-30,32.3 3222 | 3220,2017-05-31,18.9 3223 | 3221,2017-06-30,19.2 3224 | 3222,2017-07-31,17.8 3225 | 3223,2017-08-31,32.6 3226 | 3224,2017-09-30,43.7 3227 | 3225,2017-10-31,13.2 3228 | 3226,2017-11-30,5.7 3229 | 3227,2017-12-31,8.2 3230 | 3228,2018-01-31,6.8 3231 | 3229,2018-02-28,10.7 3232 | 3230,2018-03-31,2.5 3233 | 3231,2018-04-30,8.9 3234 | 3232,2018-05-31,13.1 3235 | 3233,2018-06-30,15.6 3236 | 3234,2018-07-31,1.6 3237 | 3235,2018-08-31,8.7 3238 | 3236,2018-09-30,3.3 3239 | 3237,2018-10-31,4.9 3240 | 3238,2018-11-30,4.9 3241 | 3239,2018-12-31,3.1 3242 | 3240,2019-01-31,7.7 3243 | 3241,2019-02-28,0.8 3244 | 3242,2019-03-31,9.4 3245 | 3243,2019-04-30,9.1 3246 | 3244,2019-05-31,9.9 3247 | 3245,2019-06-30,1.2 3248 | 3246,2019-07-31,0.9 3249 | 3247,2019-08-31,0.5 3250 | 3248,2019-09-30,1.1 3251 | 3249,2019-10-31,0.4 3252 | 3250,2019-11-30,0.5 3253 | 3251,2019-12-31,1.5 3254 | 3252,2020-01-31,6.2 3255 | 3253,2020-02-29,0.2 3256 | 3254,2020-03-31,1.5 3257 | 3255,2020-04-30,5.2 3258 | 3256,2020-05-31,0.2 3259 | 3257,2020-06-30,5.8 3260 | 3258,2020-07-31,6.1 3261 | 3259,2020-08-31,7.5 3262 | 3260,2020-09-30,0.6 3263 | 3261,2020-10-31,14.4 3264 | 3262,2020-11-30,34.0 3265 | 3263,2020-12-31,21.8 3266 | 3264,2021-01-31,10.4 3267 | -------------------------------------------------------------------------------- /timeseries_attention/sunspots_example.py: -------------------------------------------------------------------------------- 1 | #Imports 2 | import torch 3 | import numpy as np 4 | import pandas as pd 5 | import matplotlib.pyplot as plt 6 | from forecasting_model import ForecastingModel 7 | from torch.utils.data import TensorDataset, DataLoader 8 | from sklearn.metrics import (mean_squared_error, 9 | mean_absolute_error) 10 | 11 | # Create a dataset 12 | seq_len = 200 13 | data = list(pd.read_csv("sunspots.csv")["Monthly Mean Total Sunspot Number"])[1000:] 14 | x = np.array(data[:2000]) 15 | forcast = np.array(data[2000:]) 16 | X = np.array([x[ii:ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]) 17 | Y = np.array([x[ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]) 18 | 19 | 20 | # Training Loop 21 | device = "cpu" 22 | EPOCHS = 100 23 | BATCH_SIZE = 16 24 | LEARNING_RATE = 4.12e-6 25 | model = ForecastingModel(seq_len, device=device).to(device) 26 | model.train() 27 | criterion = torch.nn.MSELoss() 28 | optimizer = torch.optim.Adam(model.parameters(), lr=LEARNING_RATE) 29 | dataset = TensorDataset(torch.Tensor(X).to(device), torch.Tensor(Y).to(device)) 30 | dataloader = DataLoader(dataset, batch_size=BATCH_SIZE) 31 | for epoch in range(EPOCHS): 32 | for xx, yy in dataloader: 33 | optimizer.zero_grad() 34 | out = model(xx) 35 | loss = criterion(out, yy) 36 | loss.backward() 37 | optimizer.step() 38 | print(f"Epoch {epoch+1}/{EPOCHS}: Loss={loss}") 39 | 40 | 41 | # Prediction Loop 42 | model.eval() 43 | for ff in range(len(forcast)): 44 | xx = x[len(x)-seq_len:len(x)] 45 | yy = model(torch.Tensor(xx).reshape(1, xx.shape[0]).to(device)) 46 | x = np.concatenate((x, yy.detach().cpu().numpy().reshape(1,))) 47 | 48 | 49 | # Plot Predictions 50 | import matplotlib.pyplot as plt 51 | fig = plt.figure(figsize=(6, 6)) 52 | plt.plot(range(2000), data[:2000], label="Training") 53 | plt.plot(range(2000, len(data)), forcast, 'g-', label="Actual") 54 | plt.plot(range(2000, len(data)), x[2000:], 'r--', label="Predicted") 55 | plt.legend() 56 | fig.savefig("./img/sunspots_example.png") 57 | 58 | 59 | # Export Metrics 60 | print(f"MSE: {mean_squared_error(x[2000:], forcast)}") 61 | print(f"MAE: {mean_absolute_error(x[2000:], forcast)}") -------------------------------------------------------------------------------- /timeseries_transformer/forecasting_model.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch, math 3 | import numpy as np 4 | from torch import nn, Tensor 5 | import torch.nn.functional as F 6 | from torch.nn.modules.transformer import TransformerEncoderLayer 7 | 8 | # Positional Encoding - https://pytorch.org/tutorials/beginner/transformer_tutorial.html 9 | class PositionalEncoding(nn.Module): 10 | def __init__(self, d_model: int, dropout: float = 0.1, max_len: int = 5000): 11 | super().__init__() 12 | self.dropout = nn.Dropout(p=dropout) 13 | position = torch.arange(max_len).unsqueeze(1) 14 | div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model)) 15 | pe = torch.zeros(max_len, 1, d_model) 16 | pe[:, 0, 0::2] = torch.sin(position * div_term) 17 | pe[:, 0, 1::2] = torch.cos(position * div_term) 18 | self.register_buffer('pe', pe) 19 | def forward(self, x: Tensor) -> Tensor: 20 | x = x + self.pe[:x.size(0)] 21 | return self.dropout(x) 22 | 23 | # A forcasting model 24 | class ForecastingModel(torch.nn.Module): 25 | def __init__(self, 26 | seq_len=200, 27 | embed_size = 16, 28 | nhead = 4, 29 | dim_feedforward = 2048, 30 | dropout = 0.1, 31 | conv1d_emb = True, 32 | conv1d_kernel_size = 3, 33 | device = "cuda"): 34 | super(ForecastingModel, self).__init__() 35 | 36 | # Set Class-level Parameters 37 | self.device = device 38 | self.conv1d_emb = conv1d_emb 39 | self.conv1d_kernel_size = conv1d_kernel_size 40 | self.seq_len = seq_len 41 | self.embed_size = embed_size 42 | 43 | # Input Embedding Component 44 | if conv1d_emb: 45 | if conv1d_kernel_size%2==0: 46 | raise Exception("conv1d_kernel_size must be an odd number to preserve dimensions.") 47 | self.conv1d_padding = conv1d_kernel_size - 1 48 | self.input_embedding = nn.Conv1d(1, embed_size, kernel_size=conv1d_kernel_size) 49 | else: self.input_embedding = nn.Linear(1, embed_size) 50 | 51 | # Positional Encoder Componet (See Code Copied from PyTorch Above) 52 | self.position_encoder = PositionalEncoding(d_model=embed_size, 53 | dropout=dropout, 54 | max_len=seq_len) 55 | 56 | # Transformer Encoder Layer Component 57 | self.transformer_encoder = TransformerEncoderLayer( 58 | d_model = embed_size, 59 | nhead = nhead, 60 | dim_feedforward = dim_feedforward, 61 | dropout = dropout, 62 | batch_first = True 63 | ) 64 | 65 | # Regression Component 66 | self.linear1 = nn.Linear(seq_len*embed_size, int(dim_feedforward)) 67 | self.linear2 = nn.Linear(int(dim_feedforward), int(dim_feedforward/2)) 68 | self.linear3 = nn.Linear(int(dim_feedforward/2), int(dim_feedforward/4)) 69 | self.linear4 = nn.Linear(int(dim_feedforward/4), int(dim_feedforward/16)) 70 | self.linear5 = nn.Linear(int(dim_feedforward/16), int(dim_feedforward/64)) 71 | self.outlayer = nn.Linear(int(dim_feedforward/64), 1) 72 | 73 | # Basic Components 74 | self.relu = nn.ReLU() 75 | self.dropout = nn.Dropout(dropout) 76 | 77 | # Model Forward Pass 78 | def forward(self, x): 79 | src_mask = self._generate_square_subsequent_mask() 80 | src_mask.to(self.device) 81 | if self.conv1d_emb: 82 | x = F.pad(x, (0, 0, self.conv1d_padding, 0), "constant", -1) 83 | x = self.input_embedding(x.transpose(1, 2)) 84 | x = x.transpose(1, 2) 85 | else: 86 | x = self.input_embedding(x) 87 | x = self.position_encoder(x) 88 | x = self.transformer_encoder(x, src_mask=src_mask).reshape((-1, self.seq_len*self.embed_size)) 89 | x = self.linear1(x) 90 | x = self.relu(x) 91 | x = self.dropout(x) 92 | x = self.linear2(x) 93 | x = self.relu(x) 94 | x = self.dropout(x) 95 | x = self.linear3(x) 96 | x = self.relu(x) 97 | x = self.dropout(x) 98 | x = self.linear4(x) 99 | x = self.relu(x) 100 | x = self.dropout(x) 101 | x = self.linear5(x) 102 | x = self.relu(x) 103 | return self.outlayer(x) 104 | 105 | # Function Copied from PyTorch Library to create upper-triangular source mask 106 | def _generate_square_subsequent_mask(self): 107 | return torch.triu( 108 | torch.full((self.seq_len, self.seq_len), float('-inf'), dtype=torch.float32, device=self.device), 109 | diagonal=1, 110 | ) 111 | -------------------------------------------------------------------------------- /timeseries_transformer/img/sine_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BrandenKeck/pytorch_fun/8d26c3be4f358083f2aa8487ec656e4cbbeca218/timeseries_transformer/img/sine_example.png -------------------------------------------------------------------------------- /timeseries_transformer/img/sine_extended.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BrandenKeck/pytorch_fun/8d26c3be4f358083f2aa8487ec656e4cbbeca218/timeseries_transformer/img/sine_extended.png -------------------------------------------------------------------------------- /timeseries_transformer/img/sunspots_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BrandenKeck/pytorch_fun/8d26c3be4f358083f2aa8487ec656e4cbbeca218/timeseries_transformer/img/sunspots_example.png -------------------------------------------------------------------------------- /timeseries_transformer/sine_example.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import torch 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | from forecasting_model import ForecastingModel 6 | from torch.optim.lr_scheduler import ExponentialLR 7 | from torch.utils.data import TensorDataset, DataLoader 8 | 9 | # Get a noisy sin wave 10 | DATA_SIZE = 1000 11 | x = np.sin(np.linspace(0, 10, DATA_SIZE)) 12 | x = x + np.random.normal(0, 0.05, DATA_SIZE) 13 | 14 | 15 | # Create a dataset 16 | seq_len = 200 17 | X = np.array([x[ii:ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]).reshape((-1, seq_len, 1)) 18 | Y = np.array([x[ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]).reshape((-1, 1)) 19 | 20 | 21 | # Training Loop 22 | device="cuda" 23 | EPOCHS = 30 24 | BATCH_SIZE = 1 25 | LEARNING_RATE = 2.2e-6 26 | model = ForecastingModel(seq_len, embed_size=8, nhead=2, dim_feedforward=1024, dropout=0, device=device) 27 | model.to(device) 28 | model.train() 29 | criterion = torch.nn.HuberLoss() 30 | optimizer = torch.optim.AdamW(model.parameters(), lr=LEARNING_RATE) 31 | scheduler = ExponentialLR(optimizer, gamma=0.9) 32 | dataset = TensorDataset(torch.Tensor(X).to(device), torch.Tensor(Y).to(device)) 33 | dataloader = DataLoader(dataset, batch_size=BATCH_SIZE) 34 | for epoch in range(EPOCHS): 35 | for xx, yy in dataloader: 36 | optimizer.zero_grad() 37 | out = model(xx) 38 | loss = criterion(out, yy) 39 | loss.backward() 40 | optimizer.step() 41 | scheduler.step() 42 | print(f"Epoch {epoch+1}/{EPOCHS}: Loss={loss}") 43 | 44 | 45 | # Prediction Loop 46 | FORCAST = 1000 47 | model.eval() 48 | for ff in range(FORCAST): 49 | xx = x[len(x)-seq_len:len(x)] 50 | yy = model(torch.Tensor(xx).reshape(1, xx.shape[0], 1).to(device)) 51 | x = np.concatenate((x, yy.detach().cpu().numpy().reshape(1,))) 52 | 53 | 54 | # Plot Predictions 55 | import matplotlib.pyplot as plt 56 | fig = plt.figure(figsize=(6, 6)) 57 | plt.plot(range(x[:DATA_SIZE].shape[0]), x[:DATA_SIZE], label="Training") 58 | plt.plot(range(x[:DATA_SIZE].shape[0], x.shape[0]), x[DATA_SIZE:DATA_SIZE+FORCAST], 'r--', label="Predicted") 59 | plt.plot(range(x[:DATA_SIZE].shape[0], x.shape[0]), np.sin(np.linspace(10, 20, DATA_SIZE)), 'g-', label="Actual") 60 | plt.legend() 61 | fig.savefig("./img/sine_example.png") 62 | 63 | 64 | # Prediction Loop 65 | FORCAST_EXTENDED = 3000 66 | model.eval() 67 | for ff in range(FORCAST_EXTENDED): 68 | xx = x[len(x)-seq_len:len(x)] 69 | yy = model(torch.Tensor(xx).reshape(1, xx.shape[0], 1).to(device)) 70 | x = np.concatenate((x, yy.detach().cpu().numpy().reshape(1,))) 71 | 72 | 73 | # Plot Predictions 74 | import matplotlib.pyplot as plt 75 | fig = plt.figure(figsize=(12, 6)) 76 | plt.plot(range(x[:DATA_SIZE].shape[0]), x[:DATA_SIZE], label="Training") 77 | plt.plot(range(x[:DATA_SIZE].shape[0], x.shape[0]), x[DATA_SIZE:DATA_SIZE+FORCAST+FORCAST_EXTENDED], 'r--', label="Predicted") 78 | plt.plot(range(x[:DATA_SIZE].shape[0], x.shape[0]), np.sin(np.linspace(10, 50, FORCAST+FORCAST_EXTENDED)), 'g-', label="Actual") 79 | plt.legend() 80 | fig.savefig("./img/sine_extended.png") -------------------------------------------------------------------------------- /timeseries_transformer/sunspots_example.py: -------------------------------------------------------------------------------- 1 | #Imports 2 | import torch 3 | import numpy as np 4 | import pandas as pd 5 | import matplotlib.pyplot as plt 6 | from forecasting_model import ForecastingModel 7 | from torch.optim.lr_scheduler import ExponentialLR 8 | from torch.utils.data import TensorDataset, DataLoader 9 | from sklearn.metrics import (mean_squared_error, 10 | mean_absolute_error) 11 | 12 | 13 | # Create a dataset 14 | seq_len = 200 15 | data = list(pd.read_csv("sunspots.csv")["Monthly Mean Total Sunspot Number"])[1000:] 16 | x = np.array(data[:2000]) 17 | forcast = np.array(data[2000:]) 18 | X = np.array([x[ii:ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]).reshape((-1, seq_len, 1)) 19 | Y = np.array([x[ii+seq_len] for ii in range(0, x.shape[0]-seq_len)]).reshape((-1, 1)) 20 | 21 | 22 | # Training Loop 23 | device = "cuda:0" 24 | EPOCHS = 30 25 | BATCH_SIZE = 1 26 | LEARNING_RATE = 6.6E-6 27 | model = ForecastingModel(seq_len, embed_size=8, nhead=2, 28 | dim_feedforward=1024, dropout=0, 29 | conv1d_emb=False, conv1d_kernel_size=5, device=device).to(device) 30 | model.train() 31 | criterion = torch.nn.HuberLoss() 32 | optimizer = torch.optim.AdamW(model.parameters(), lr=LEARNING_RATE) 33 | scheduler = ExponentialLR(optimizer, gamma=0.9) 34 | dataset = TensorDataset(torch.Tensor(X).to(device), torch.Tensor(Y).to(device)) 35 | dataloader = DataLoader(dataset, batch_size=BATCH_SIZE) 36 | for epoch in range(EPOCHS): 37 | for xx, yy in dataloader: 38 | optimizer.zero_grad() 39 | out = model(xx) 40 | loss = criterion(out, yy) 41 | loss.backward() 42 | optimizer.step() 43 | scheduler.step() 44 | print(f"Epoch {epoch+1}/{EPOCHS}: Loss={loss}") 45 | 46 | 47 | # Prediction Loop 48 | model.eval() 49 | for ff in range(len(forcast)): 50 | xx = x[len(x)-seq_len:len(x)] 51 | yy = model(torch.Tensor(xx).reshape((1, seq_len, 1)).to(device)) 52 | x = np.concatenate((x, yy.detach().cpu().numpy().reshape(1,))) 53 | 54 | 55 | # Plot Predictions 56 | import matplotlib.pyplot as plt 57 | fig = plt.figure(figsize=(6, 6)) 58 | plt.plot(range(2000), data[:2000], label="Training") 59 | plt.plot(range(2000, len(data)), forcast, 'g-', label="Actual") 60 | plt.plot(range(2000, len(data)), x[2000:], 'r--', label="Predicted") 61 | plt.legend() 62 | fig.savefig("./img/sunspots_example.png") 63 | 64 | 65 | # Export Metrics 66 | print(f"MSE: {mean_squared_error(x[2000:], forcast)}") 67 | print(f"MAE: {mean_absolute_error(x[2000:], forcast)}") 68 | --------------------------------------------------------------------------------