├── .gitignore ├── README.md ├── policy_gradient ├── Multi_armed_bandit.py └── Multi_armed_bandit2.py ├── elman_rnn └── Elman_RNN.py ├── vanilla_rnn └── Vanilla_RNN.py └── timeseries ├── LSTM.py └── monthly-lake-erie-levels-1921-19.csv /.gitignore: -------------------------------------------------------------------------------- 1 | .ipynb_checkpoints 2 | .idea 3 | */.ipynb_checkpoints/* 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # pytorch_tutorials 3 | 4 | - elman_rnn https://www.cpuheater.com/deep-learning/introduction-to-recurrent-neural-networks-in-pytorch/ 5 | Implementation of Elman Recurrent Neural Network 6 | - policy_gradient 7 | multi armed bandit 8 | - LSTM & GRU 9 | Timeseries predictions 10 | - Vanilla RNN 11 | Using RNN to learn sin function 12 | 13 | work in progress ! 14 | -------------------------------------------------------------------------------- /policy_gradient/Multi_armed_bandit.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | 4 | 5 | torch.manual_seed(1) 6 | np.random.seed(1) 7 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 8 | 9 | def pull_arm(bandit): 10 | result = np.random.randn(1) 11 | if result > bandit: 12 | return 1 13 | else: 14 | return -1 15 | 16 | bandits = [0.1, 1, -0.4, -5] 17 | num_bandits = len(bandits) 18 | 19 | w = torch.ones(num_bandits, requires_grad=True, dtype=torch.float) 20 | rewards = np.zeros(num_bandits) 21 | 22 | lr = 0.001 23 | 24 | for t in range(1000): 25 | if np.random.rand(1) < 0.1: 26 | action = np.random.randint(0, num_bandits) 27 | else: 28 | action = np.argmax(w.data) 29 | reward = pull_arm(bandits[action]) 30 | loss = -torch.log(w[action]) * reward 31 | rewards[action] += reward 32 | loss.backward() 33 | w.data -= lr* w.grad.data 34 | w.grad.data.zero_() 35 | 36 | print(rewards) -------------------------------------------------------------------------------- /policy_gradient/Multi_armed_bandit2.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | import torch.optim as optim 6 | from torch.autograd import Variable 7 | 8 | 9 | class Policy(nn.Module): 10 | def __init__(self, num_bandits, num_actions): 11 | super(Policy, self).__init__() 12 | self.linear = nn.Linear(num_bandits, num_actions, bias=False) 13 | self.linear.weight = torch.nn.Parameter(torch.ones(num_actions, num_bandits)) 14 | 15 | def forward(self, input): 16 | one_hot = self.to_one_hot(input, 3) 17 | result = F.sigmoid(self.linear(Variable(one_hot))) 18 | return result 19 | 20 | def to_one_hot(self, action, depth): 21 | ones = torch.sparse.torch.eye(depth) 22 | return ones[action, :] 23 | 24 | def pull_arm(self, bandit, action): 25 | result = np.random.randn(1) 26 | if bandit[action] > result: 27 | return 1 28 | else: 29 | return -1 30 | 31 | 32 | 33 | bandits = [[-5, -1, 0, 1], [-1, -5, 1, 0], [0, 1, -1, -5]] 34 | num_bandits = len(bandits) 35 | num_actions = len(bandits[0]) 36 | 37 | policy = Policy(num_bandits, num_actions) 38 | 39 | optimizer = torch.optim.SGD(policy.parameters(), lr=0.001) 40 | 41 | 42 | rewards = np.zeros([num_bandits, num_actions]) 43 | 44 | for t in range(1000): 45 | 46 | state = np.random.randint(0, num_bandits) 47 | result = policy(state) 48 | if np.random.rand(1) < 0.25: 49 | action = np.random.randint(0, num_actions) 50 | else: 51 | action = np.argmax(result.data.numpy()) 52 | reward = policy.pull_arm(bandits[state], action) 53 | loss = -torch.log(result[action]) * reward 54 | rewards[state, action] += reward 55 | policy.zero_grad() 56 | loss.backward() 57 | optimizer.step() -------------------------------------------------------------------------------- /elman_rnn/Elman_RNN.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import pylab as pl 4 | 5 | 6 | torch.manual_seed(1) 7 | np.random.seed(1) 8 | 9 | input_size, hidden_size, output_size = 7, 6, 1 10 | epochs = 200 11 | seq_length = 20 12 | lr = 0.1 13 | 14 | data_time_steps = np.linspace(2, 10, seq_length + 1) 15 | data = np.sin(data_time_steps) 16 | data.resize((seq_length + 1, 1)) 17 | 18 | x = torch.tensor(data[:-1], requires_grad=False, dtype=torch.float) 19 | y = torch.tensor(data[1:], requires_grad=False, dtype=torch.float) 20 | 21 | w1 = torch.normal(size=(input_size, hidden_size), mean=0, std=0.4, requires_grad=True, dtype=torch.float) 22 | w2 = torch.normal(size=(hidden_size, output_size),mean=0, std=0.3, requires_grad=True, dtype=torch.float) 23 | 24 | def forward(input, context_state, w1, w2): 25 | xh = torch.cat((input, context_state), 1) 26 | context_state = torch.tanh(xh.mm(w1)) 27 | out = context_state.mm(w2) 28 | return (out, context_state) 29 | 30 | for i in range(epochs): 31 | total_loss = 0 32 | context_state = torch.zeros((1, hidden_size), requires_grad=True, dtype=torch.float) 33 | for j in range(x.size(0)): 34 | input = x[j:(j+1)] 35 | target = y[j:(j+1)] 36 | (pred, context_state) = forward(input, context_state, w1, w2) 37 | loss = (pred - target).pow(2).sum()/2 38 | total_loss += loss 39 | loss.backward() 40 | w1.data -= lr * w1.grad.data 41 | w2.data -= lr * w2.grad.data 42 | w1.grad.data.zero_() 43 | w2.grad.data.zero_() 44 | context_state = context_state.data 45 | if i % 10 == 0: 46 | print("Epoch: {} loss {}".format(i, total_loss.data.item())) 47 | 48 | context_state = torch.zeros((1, hidden_size), requires_grad=False, dtype=torch.float) 49 | predictions = [] 50 | 51 | for i in range(x.size(0)): 52 | input = x[i:i+1] 53 | (pred, context_state) = forward(input, context_state, w1, w2) 54 | context_state = context_state 55 | predictions.append(pred.data.numpy().ravel()[0]) 56 | 57 | pl.scatter(data_time_steps[:-1], x.data.numpy(), s=90, label="Actual") 58 | pl.scatter(data_time_steps[1:], predictions, label="Predicted") 59 | pl.legend() 60 | pl.show() 61 | 62 | 63 | -------------------------------------------------------------------------------- /vanilla_rnn/Vanilla_RNN.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.nn as nn 4 | from torch.autograd import Variable 5 | import torch.optim as optim 6 | import pylab as pl 7 | import torch.nn.init as init 8 | 9 | dtype = torch.float 10 | 11 | num_time_steps = 10 12 | input_size = 1 13 | hidden_size = 16 14 | output_size = 1 15 | lr=0.01 16 | 17 | time_steps = np.linspace(0, 10, num_time_steps) 18 | data = np.sin(time_steps) 19 | data = data.reshape(num_time_steps, 1) 20 | x = torch.tensor(data[:-1], dtype=dtype).view(1, num_time_steps - 1, 1) 21 | y = torch.tensor(data[1:], dtype=dtype).view(1, num_time_steps - 1, 1) 22 | 23 | class Net(nn.Module): 24 | def __init__(self, input_size, hidden_size, output_size): 25 | super(Net, self).__init__() 26 | 27 | self.rnn = nn.RNN( 28 | input_size=input_size, 29 | hidden_size=hidden_size, 30 | num_layers=1, 31 | batch_first=True, 32 | ) 33 | for p in self.rnn.parameters(): 34 | init.normal(p, mean=0.0, std=0.001) 35 | self.linear = nn.Linear(hidden_size, output_size) 36 | 37 | def forward(self, x, hidden_prev = None): 38 | out, hidden_prev = self.rnn(x, hidden_prev) 39 | out = out.view(-1, hidden_size) 40 | out = self.linear(out) 41 | return out, hidden_prev 42 | 43 | model = Net(input_size, hidden_size, output_size) 44 | criterion = nn.MSELoss() 45 | optimizer = optim.Adam(model.parameters(), lr) 46 | 47 | for iter in range(1000): 48 | output, _ = model(x) 49 | loss = criterion(output, y) 50 | model.zero_grad() 51 | loss.backward() 52 | optimizer.step() 53 | if iter % 100 == 0: 54 | print("Iteration: {} loss {}".format(iter, loss.item())) 55 | 56 | predictions = [] 57 | input = x[:, 0,:] 58 | for _ in range(x.shape[1]): 59 | input = input.view(1, 1, 1) 60 | (pred, hidden_prev) = model(input, hidden_prev) 61 | input = pred 62 | hidden_prev = hidden_prev 63 | predictions.append(pred.data.numpy().ravel()[0]) 64 | 65 | x = x.data.numpy().ravel() 66 | y = y.data.numpy() 67 | pl.scatter(time_steps[:-1], x.ravel(), s=90) 68 | pl.plot(time_steps[:-1], x.ravel()) 69 | pl.scatter(time_steps[1:], predictions) 70 | pl.show() 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /timeseries/LSTM.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import pandas as pd 4 | import torch 5 | import torch.nn as nn 6 | import numpy as np 7 | 8 | training_set = pd.read_csv('./monthly-lake-erie-levels-1921-19.csv') 9 | training_set = training_set.iloc[:,1:2].values 10 | 11 | 12 | plt.plot(training_set, label = 'Monthly lake erie levels') 13 | plt.show() 14 | 15 | 16 | from sklearn.preprocessing import MinMaxScaler 17 | sc = MinMaxScaler() 18 | training_data = sc.fit_transform(training_set) 19 | 20 | 21 | def sliding_windows(data, seq_length): 22 | x = [] 23 | y = [] 24 | 25 | for i in range(len(data)-seq_length-1): 26 | _x = data[i:(i+seq_length)] 27 | _y = data[i+seq_length] 28 | x.append(_x) 29 | y.append(_y) 30 | 31 | return np.array(x),np.array(y) 32 | 33 | 34 | seq_length = 7 35 | learning_rate = 0.01 36 | input_size = 1 37 | hidden_size = 5 38 | num_classes = 1 39 | num_layers = 1 40 | num_epochs = 400 41 | 42 | x, y = sliding_windows(training_data, seq_length) 43 | 44 | train_size = int(len(y) * 0.7) 45 | test_size = len(y) - train_size 46 | trainX = torch.tensor(np.array(x[0:train_size]), requires_grad=True, dtype=torch.float) 47 | testX = torch.tensor(np.array(x[train_size:len(x)]), requires_grad=True, dtype=torch.float) 48 | trainY = torch.tensor(np.array(y[0:train_size]), requires_grad=True, dtype=torch.float) 49 | testY = torch.tensor(np.array(y[train_size:len(y)]), requires_grad=True, dtype=torch.float) 50 | 51 | 52 | class LSTM(nn.Module): 53 | 54 | def __init__(self, num_classes, input_size, hidden_size, num_layers): 55 | super(LSTM, self).__init__() 56 | self.num_classes = num_classes 57 | self.num_layers = num_layers 58 | self.input_size = input_size 59 | self.hidden_size = hidden_size 60 | self.seq_length = seq_length 61 | self.lstm = nn.LSTM(input_size=input_size, hidden_size=hidden_size, 62 | num_layers=num_layers, batch_first=True) 63 | self.fc = nn.Linear(hidden_size, num_classes) 64 | 65 | def forward(self, x): 66 | h_0 = torch.zeros(( 67 | self.num_layers, x.size(0), self.hidden_size), requires_grad=True) 68 | c_0 = torch.zeros(( 69 | self.num_layers, x.size(0), self.hidden_size), requires_grad=True) 70 | # Propagate input through LSTM 71 | ula, (h_out, _) = self.lstm(x, (h_0, c_0)) 72 | h_out = h_out.view(-1, self.hidden_size) 73 | out = self.fc(h_out) 74 | return out 75 | 76 | 77 | lstm = LSTM(num_classes, input_size, hidden_size, num_layers) 78 | 79 | criterion = torch.nn.MSELoss() # mean-squared error for regression 80 | optimizer = torch.optim.Adam(lstm.parameters(), lr=learning_rate) 81 | 82 | # Train the model 83 | for epoch in range(num_epochs): 84 | outputs = lstm(trainX) 85 | optimizer.zero_grad() 86 | # obtain the loss function 87 | loss = criterion(outputs, trainY) 88 | loss.backward() 89 | optimizer.step() 90 | print("Epoch: %d, loss: %1.5f" % (epoch, loss.item())) 91 | 92 | 93 | lstm.eval() 94 | test_predict = lstm(testX) 95 | 96 | test_predict = test_predict.data.numpy() 97 | testY = testY.data.numpy() 98 | plt.plot(testY) 99 | plt.plot(test_predict) 100 | plt.show() -------------------------------------------------------------------------------- /timeseries/monthly-lake-erie-levels-1921-19.csv: -------------------------------------------------------------------------------- 1 | "Month","Monthly Lake Erie Levels 1921 ? 1970," 2 | "1921-01",14.763 3 | "1921-02",14.649 4 | "1921-03",15.085 5 | "1921-04",16.376 6 | "1921-05",16.926 7 | "1921-06",16.774 8 | "1921-07",16.490 9 | "1921-08",15.769 10 | "1921-09",15.180 11 | "1921-10",14.383 12 | "1921-11",14.478 13 | "1921-12",14.364 14 | "1922-01",13.928 15 | "1922-02",13.283 16 | "1922-03",13.700 17 | "1922-04",15.465 18 | "1922-05",16.243 19 | "1922-06",16.490 20 | "1922-07",16.243 21 | "1922-08",15.787 22 | "1922-09",15.446 23 | "1922-10",14.649 24 | "1922-11",13.776 25 | "1922-12",13.188 26 | "1923-01",13.283 27 | "1923-02",12.657 28 | "1923-03",12.979 29 | "1923-04",13.909 30 | "1923-05",14.535 31 | "1923-06",14.877 32 | "1923-07",14.858 33 | "1923-08",14.288 34 | "1923-09",13.947 35 | "1923-10",13.416 36 | "1923-11",12.903 37 | "1923-12",13.454 38 | "1924-01",13.491 39 | "1924-02",13.567 40 | "1924-03",13.397 41 | "1924-04",14.440 42 | "1924-05",15.161 43 | "1924-06",15.427 44 | "1924-07",15.693 45 | "1924-08",15.142 46 | "1924-09",14.763 47 | "1924-10",14.288 48 | "1924-11",13.074 49 | "1924-12",12.543 50 | "1925-01",12.239 51 | "1925-02",12.011 52 | "1925-03",12.827 53 | "1925-04",13.567 54 | "1925-05",13.548 55 | "1925-06",13.302 56 | "1925-07",13.188 57 | "1925-08",13.112 58 | "1925-09",12.827 59 | "1925-10",12.201 60 | "1925-11",11.917 61 | "1925-12",11.803 62 | "1926-01",11.157 63 | "1926-02",10.892 64 | "1926-03",11.120 65 | "1926-04",12.600 66 | "1926-05",13.283 67 | "1926-06",13.416 68 | "1926-07",13.340 69 | "1926-08",13.529 70 | "1926-09",13.776 71 | "1926-10",14.307 72 | "1926-11",13.852 73 | "1926-12",13.833 74 | "1927-01",13.169 75 | "1927-02",12.941 76 | "1927-03",13.188 77 | "1927-04",14.383 78 | "1927-05",14.763 79 | "1927-06",15.218 80 | "1927-07",15.161 81 | "1927-08",14.858 82 | "1927-09",14.156 83 | "1927-10",13.586 84 | "1927-11",13.150 85 | "1927-12",14.137 86 | "1928-01",14.231 87 | "1928-02",14.364 88 | "1928-03",13.833 89 | "1928-04",14.478 90 | "1928-05",15.009 91 | "1928-06",15.617 92 | "1928-07",16.148 93 | "1928-08",15.977 94 | "1928-09",15.142 95 | "1928-10",14.592 96 | "1928-11",14.364 97 | "1928-12",14.497 98 | "1929-01",14.554 99 | "1929-02",14.991 100 | "1929-03",15.863 101 | "1929-04",17.932 102 | "1929-05",19.184 103 | "1929-06",19.184 104 | "1929-07",18.956 105 | "1929-08",18.254 106 | "1929-09",17.514 107 | "1929-10",16.660 108 | "1929-11",16.338 109 | "1929-12",16.319 110 | "1930-01",17.457 111 | "1930-02",17.173 112 | "1930-03",17.856 113 | "1930-04",18.596 114 | "1930-05",18.558 115 | "1930-06",18.159 116 | "1930-07",17.685 117 | "1930-08",16.812 118 | "1930-09",16.072 119 | "1930-10",15.332 120 | "1930-11",14.478 121 | "1930-12",14.213 122 | "1931-01",13.738 123 | "1931-02",13.169 124 | "1931-03",12.581 125 | "1931-04",13.245 126 | "1931-05",13.852 127 | "1931-06",14.175 128 | "1931-07",14.288 129 | "1931-08",13.985 130 | "1931-09",13.435 131 | "1931-10",12.884 132 | "1931-11",12.429 133 | "1931-12",12.410 134 | "1932-01",13.397 135 | "1932-02",13.909 136 | "1932-03",13.833 137 | "1932-04",14.099 138 | "1932-05",14.687 139 | "1932-06",14.611 140 | "1932-07",14.383 141 | "1932-08",13.909 142 | "1932-09",13.359 143 | "1932-10",12.296 144 | "1932-11",12.106 145 | "1932-12",11.803 146 | "1933-01",12.353 147 | "1933-02",12.220 148 | "1933-03",12.827 149 | "1933-04",14.250 150 | "1933-05",15.085 151 | "1933-06",14.953 152 | "1933-07",14.440 153 | "1933-08",13.890 154 | "1933-09",13.036 155 | "1933-10",12.201 156 | "1933-11",11.404 157 | "1933-12",11.309 158 | "1934-01",10.987 159 | "1934-02",10.361 160 | "1934-03",10.304 161 | "1934-04",11.347 162 | "1934-05",11.784 163 | "1934-06",11.841 164 | "1934-07",11.841 165 | "1934-08",11.651 166 | "1934-09",11.404 167 | "1934-10",10.873 168 | "1934-11",10.209 169 | "1934-12",10.076 170 | "1935-01",10.247 171 | "1935-02",10.133 172 | "1935-03",10.740 173 | "1935-04",11.556 174 | "1935-05",12.201 175 | "1935-06",12.505 176 | "1935-07",12.732 177 | "1935-08",12.201 178 | "1935-09",12.068 179 | "1935-10",11.290 180 | "1935-11",11.139 181 | "1935-12",11.101 182 | "1936-01",10.342 183 | "1936-02",10.000 184 | "1936-03",11.347 185 | "1936-04",12.770 186 | "1936-05",13.321 187 | "1936-06",13.340 188 | "1936-07",13.188 189 | "1936-08",12.676 190 | "1936-09",12.315 191 | "1936-10",12.049 192 | "1936-11",11.594 193 | "1936-12",11.252 194 | "1937-01",12.467 195 | "1937-02",13.491 196 | "1937-03",13.491 197 | "1937-04",14.156 198 | "1937-05",15.256 199 | "1937-06",15.598 200 | "1937-07",16.034 201 | "1937-08",15.598 202 | "1937-09",14.516 203 | "1937-10",13.435 204 | "1937-11",12.638 205 | "1937-12",12.106 206 | "1938-01",12.144 207 | "1938-02",12.979 208 | "1938-03",11.917 209 | "1938-04",15.066 210 | "1938-05",15.199 211 | "1938-06",15.427 212 | "1938-07",15.427 213 | "1938-08",15.408 214 | "1938-09",14.706 215 | "1938-10",14.137 216 | "1938-11",13.302 217 | "1938-12",12.979 218 | "1939-01",13.036 219 | "1939-02",12.903 220 | "1939-03",13.719 221 | "1939-04",14.801 222 | "1939-05",15.541 223 | "1939-06",15.617 224 | "1939-07",15.503 225 | "1939-08",15.218 226 | "1939-09",14.592 227 | "1939-10",13.852 228 | "1939-11",13.416 229 | "1939-12",13.055 230 | "1940-01",12.315 231 | "1940-02",12.239 232 | "1940-03",12.562 233 | "1940-04",13.890 234 | "1940-05",14.687 235 | "1940-06",15.313 236 | "1940-07",15.408 237 | "1940-08",15.028 238 | "1940-09",14.782 239 | "1940-10",14.213 240 | "1940-11",13.435 241 | "1940-12",13.662 242 | "1941-01",14.288 243 | "1941-02",13.491 244 | "1941-03",13.150 245 | "1941-04",13.586 246 | "1941-05",13.871 247 | "1941-06",14.175 248 | "1941-07",14.099 249 | "1941-08",13.719 250 | "1941-09",13.093 251 | "1941-10",12.562 252 | "1941-11",12.030 253 | "1941-12",12.144 254 | "1942-01",11.860 255 | "1942-02",12.410 256 | "1942-03",12.770 257 | "1942-04",14.630 258 | "1942-05",15.218 259 | "1942-06",15.920 260 | "1942-07",15.882 261 | "1942-08",15.750 262 | "1942-09",15.275 263 | "1942-10",14.801 264 | "1942-11",14.554 265 | "1942-12",14.345 266 | "1943-01",15.104 267 | "1943-02",14.554 268 | "1943-03",14.953 269 | "1943-04",15.958 270 | "1943-05",17.590 271 | "1943-06",18.805 272 | "1943-07",18.805 273 | "1943-08",18.330 274 | "1943-09",17.476 275 | "1943-10",16.812 276 | "1943-11",16.224 277 | "1943-12",15.541 278 | "1944-01",14.744 279 | "1944-02",14.478 280 | "1944-03",14.725 281 | "1944-04",16.319 282 | "1944-05",17.324 283 | "1944-06",17.609 284 | "1944-07",17.211 285 | "1944-08",16.546 286 | "1944-09",15.977 287 | "1944-10",15.427 288 | "1944-11",14.972 289 | "1944-12",14.535 290 | "1945-01",14.213 291 | "1945-02",13.719 292 | "1945-03",15.009 293 | "1945-04",16.319 294 | "1945-05",17.078 295 | "1945-06",17.913 296 | "1945-07",18.159 297 | "1945-08",17.704 298 | "1945-09",17.059 299 | "1945-10",17.268 300 | "1945-11",16.546 301 | "1945-12",16.072 302 | "1946-01",15.996 303 | "1946-02",15.294 304 | "1946-03",15.901 305 | "1946-04",16.300 306 | "1946-05",16.546 307 | "1946-06",17.495 308 | "1946-07",17.666 309 | "1946-08",16.964 310 | "1946-09",16.148 311 | "1946-10",15.427 312 | "1946-11",14.839 313 | "1946-12",14.269 314 | "1947-01",14.118 315 | "1947-02",14.156 316 | "1947-03",14.080 317 | "1947-04",16.414 318 | "1947-05",18.216 319 | "1947-06",19.298 320 | "1947-07",18.824 321 | "1947-08",18.368 322 | "1947-09",17.875 323 | "1947-10",16.869 324 | "1947-11",16.129 325 | "1947-12",15.712 326 | "1948-01",15.617 327 | "1948-02",15.161 328 | "1948-03",16.148 329 | "1948-04",17.609 330 | "1948-05",18.406 331 | "1948-06",18.463 332 | "1948-07",18.254 333 | "1948-08",17.609 334 | "1948-09",16.812 335 | "1948-10",15.712 336 | "1948-11",15.066 337 | "1948-12",14.763 338 | "1949-01",14.877 339 | "1949-02",15.370 340 | "1949-03",15.731 341 | "1949-04",15.996 342 | "1949-05",16.224 343 | "1949-06",16.186 344 | "1949-07",16.015 345 | "1949-08",15.446 346 | "1949-09",14.573 347 | "1949-10",14.080 348 | "1949-11",13.226 349 | "1949-12",12.979 350 | "1950-01",14.459 351 | "1950-02",15.655 352 | "1950-03",15.636 353 | "1950-04",17.154 354 | "1950-05",17.381 355 | "1950-06",17.116 356 | "1950-07",16.736 357 | "1950-08",16.167 358 | "1950-09",15.920 359 | "1950-10",15.408 360 | "1950-11",15.066 361 | "1950-12",15.769 362 | "1951-01",15.787 363 | "1951-02",15.863 364 | "1951-03",17.154 365 | "1951-04",18.178 366 | "1951-05",18.653 367 | "1951-06",18.615 368 | "1951-07",18.406 369 | "1951-08",17.837 370 | "1951-09",17.078 371 | "1951-10",16.471 372 | "1951-11",16.224 373 | "1951-12",16.395 374 | "1952-01",17.400 375 | "1952-02",18.672 376 | "1952-03",19.089 377 | "1952-04",19.829 378 | "1952-05",20.000 379 | "1952-06",19.943 380 | "1952-07",19.526 381 | "1952-08",18.975 382 | "1952-09",18.463 383 | "1952-10",17.268 384 | "1952-11",16.414 385 | "1952-12",16.414 386 | "1953-01",16.755 387 | "1953-02",16.736 388 | "1953-03",17.173 389 | "1953-04",17.647 390 | "1953-05",18.216 391 | "1953-06",18.767 392 | "1953-07",18.539 393 | "1953-08",18.273 394 | "1953-09",17.419 395 | "1953-10",16.679 396 | "1953-11",15.996 397 | "1953-12",15.465 398 | "1954-01",15.408 399 | "1954-02",15.123 400 | "1954-03",16.072 401 | "1954-04",17.685 402 | "1954-05",18.235 403 | "1954-06",18.064 404 | "1954-07",17.818 405 | "1954-08",17.438 406 | "1954-09",16.812 407 | "1954-10",17.116 408 | "1954-11",17.211 409 | "1954-12",17.097 410 | "1955-01",17.495 411 | "1955-02",17.097 412 | "1955-03",18.121 413 | "1955-04",18.748 414 | "1955-05",18.767 415 | "1955-06",18.539 416 | "1955-07",18.102 417 | "1955-08",17.818 418 | "1955-09",17.002 419 | "1955-10",16.357 420 | "1955-11",15.560 421 | "1955-12",15.313 422 | "1956-01",14.782 423 | "1956-02",13.719 424 | "1956-03",14.839 425 | "1956-04",15.825 426 | "1956-05",17.324 427 | "1956-06",17.723 428 | "1956-07",17.685 429 | "1956-08",17.514 430 | "1956-09",16.907 431 | "1956-10",15.863 432 | "1956-11",14.934 433 | "1956-12",14.706 434 | "1957-01",14.383 435 | "1957-02",14.345 436 | "1957-03",14.763 437 | "1957-04",15.958 438 | "1957-05",16.698 439 | "1957-06",16.793 440 | "1957-07",17.230 441 | "1957-08",16.509 442 | "1957-09",15.787 443 | "1957-10",14.991 444 | "1957-11",14.080 445 | "1957-12",14.326 446 | "1958-01",14.497 447 | "1958-02",13.510 448 | "1958-03",13.662 449 | "1958-04",14.042 450 | "1958-05",14.269 451 | "1958-06",14.478 452 | "1958-07",14.972 453 | "1958-08",14.972 454 | "1958-09",14.573 455 | "1958-10",13.776 456 | "1958-11",13.017 457 | "1958-12",12.600 458 | "1959-01",12.296 459 | "1959-02",13.131 460 | "1959-03",13.966 461 | "1959-04",15.028 462 | "1959-05",15.844 463 | "1959-06",15.769 464 | "1959-07",15.237 465 | "1959-08",14.801 466 | "1959-09",14.137 467 | "1959-10",13.890 468 | "1959-11",13.416 469 | "1959-12",13.871 470 | "1960-01",14.478 471 | "1960-02",14.725 472 | "1960-03",14.763 473 | "1960-04",15.806 474 | "1960-05",16.565 475 | "1960-06",17.097 476 | "1960-07",17.306 477 | "1960-08",17.211 478 | "1960-09",16.717 479 | "1960-10",15.787 480 | "1960-11",14.801 481 | "1960-12",14.231 482 | "1961-01",13.966 483 | "1961-02",14.004 484 | "1961-03",15.313 485 | "1961-04",16.357 486 | "1961-05",17.742 487 | "1961-06",17.609 488 | "1961-07",17.249 489 | "1961-08",17.078 490 | "1961-09",16.509 491 | "1961-10",15.465 492 | "1961-11",14.706 493 | "1961-12",14.213 494 | "1962-01",13.662 495 | "1962-02",13.928 496 | "1962-03",14.516 497 | "1962-04",15.180 498 | "1962-05",15.351 499 | "1962-06",15.579 500 | "1962-07",15.446 501 | "1962-08",15.199 502 | "1962-09",14.725 503 | "1962-10",14.383 504 | "1962-11",14.231 505 | "1962-12",13.776 506 | "1963-01",13.150 507 | "1963-02",12.713 508 | "1963-03",13.283 509 | "1963-04",14.478 510 | "1963-05",14.858 511 | "1963-06",14.782 512 | "1963-07",14.307 513 | "1963-08",14.023 514 | "1963-09",13.529 515 | "1963-10",12.922 516 | "1963-11",12.410 517 | "1963-12",11.936 518 | "1964-01",11.784 519 | "1964-02",11.992 520 | "1964-03",12.619 521 | "1964-04",13.662 522 | "1964-05",14.231 523 | "1964-06",14.099 524 | "1964-07",13.833 525 | "1964-08",13.416 526 | "1964-09",12.922 527 | "1964-10",11.974 528 | "1964-11",11.461 529 | "1964-12",11.423 530 | "1965-01",11.860 531 | "1965-02",12.163 532 | "1965-03",13.226 533 | "1965-04",13.966 534 | "1965-05",14.535 535 | "1965-06",14.516 536 | "1965-07",14.231 537 | "1965-08",13.966 538 | "1965-09",13.738 539 | "1965-10",13.226 540 | "1965-11",12.998 541 | "1965-12",13.131 542 | "1966-01",13.700 543 | "1966-02",13.814 544 | "1966-03",14.383 545 | "1966-04",15.047 546 | "1966-05",15.693 547 | "1966-06",15.844 548 | "1966-07",15.712 549 | "1966-08",15.313 550 | "1966-09",14.763 551 | "1966-10",13.548 552 | "1966-11",13.586 553 | "1966-12",14.516 554 | "1967-01",14.383 555 | "1967-02",14.497 556 | "1967-03",14.744 557 | "1967-04",15.806 558 | "1967-05",16.527 559 | "1967-06",16.546 560 | "1967-07",16.717 561 | "1967-08",16.433 562 | "1967-09",15.769 563 | "1967-10",15.275 564 | "1967-11",15.123 565 | "1967-12",15.541 566 | "1968-01",15.825 567 | "1968-02",16.376 568 | "1968-03",16.357 569 | "1968-04",16.907 570 | "1968-05",17.021 571 | "1968-06",17.419 572 | "1968-07",17.533 573 | "1968-08",17.268 574 | "1968-09",16.603 575 | "1968-10",15.825 576 | "1968-11",15.446 577 | "1968-12",15.636 578 | "1969-01",15.693 579 | "1969-02",16.755 580 | "1969-03",16.509 581 | "1969-04",17.723 582 | "1969-05",18.691 583 | "1969-06",19.127 584 | "1969-07",19.564 585 | "1969-08",19.203 586 | "1969-09",18.216 587 | "1969-10",17.211 588 | "1969-11",16.660 589 | "1969-12",16.831 590 | "1970-01",15.769 591 | "1970-02",15.731 592 | "1970-03",15.996 593 | "1970-04",17.021 594 | "1970-05",17.552 595 | "1970-06",17.837 596 | "1970-07",17.856 597 | "1970-08",17.571 598 | "1970-09",17.078 599 | "1970-10",16.660 600 | "1970-11",16.433 601 | "1970-12",16.584 602 | --------------------------------------------------------------------------------