├── .gitignore ├── Architectures ├── CvGan.py ├── DCGAN.py ├── GAN_Conv.py ├── GAN_con_lin.py ├── Lin_GAN.py ├── RGAN.py └── __init__.py ├── Datasets └── VIX.csv ├── Experiments ├── Experimental Report.ipynb ├── Test.ipynb ├── Test_Emilien.ipynb ├── Tests new framework.ipynb └── __init__.py ├── GAN.py ├── GAN_synthetic.py ├── Helpers ├── Batch_Generator.py ├── __init__.py ├── statistical_tests.py └── utils.py ├── README.md └── __init__.py /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | Helpers/.DS_Store 3 | Helpers/__pycache__/ 4 | Architectures/__pycache__/ 5 | Experiments/.ipynb_checkpoints/ 6 | __pycache__/ 7 | -------------------------------------------------------------------------------- /Architectures/CvGan.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pickle 3 | 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | import torch 7 | 8 | import sys 9 | sys.path.append('../') 10 | 11 | from Helpers.utils import get_data 12 | from GAN import GAN 13 | 14 | 15 | class Interpolate(nn.Module): 16 | def __init__(self, size): 17 | super(Interpolate, self).__init__() 18 | self.interp = nn.functional.interpolate 19 | self.size = size 20 | #self.mode = mode 21 | 22 | def forward(self, x): 23 | x = self.interp(x, size=self.size) 24 | return x 25 | 26 | 27 | Upsample = Interpolate 28 | 29 | 30 | class Generator(nn.Module): 31 | # Deconv block : conv1d, reakyrelu upslamping 32 | def __init__(self, window=60, kernel_size=5, PRIOR_N=2, PRIOR_STD=10): 33 | super().__init__() 34 | self.window = window 35 | self.PRIOR_N = PRIOR_N 36 | self.PRIOR_STD = PRIOR_STD 37 | self.kernel_size = kernel_size 38 | self.padding = int((self.kernel_size-1)/2) 39 | self.lin1 = nn.Linear(self.PRIOR_N, 15) 40 | self.bn1 = nn.BatchNorm1d(15) 41 | self.conv1 = nn.Conv1d(1, 32, kernel_size=self.kernel_size, padding=self.padding) 42 | 43 | self.bn2 = nn.BatchNorm1d(32) 44 | self.up1 = Upsample(size=30) 45 | self.conv2 = nn.Conv1d(32, 32, kernel_size=self.kernel_size, padding=self.padding) 46 | self.bn3 = nn.BatchNorm1d(32) 47 | self.up2 = Upsample(size=60) 48 | self.conv3 = nn.Conv1d(32, 32, kernel_size=self.kernel_size, padding=self.padding) 49 | self.bn4 = nn.BatchNorm1d(32) 50 | self.up3 = Upsample(size=120) 51 | self.bn5 = nn.BatchNorm1d(120) 52 | 53 | self.conv4 = nn.Conv1d(32, 1, kernel_size=1) 54 | self.lin2 = nn.Linear(120, self.window) 55 | 56 | def __call__(self, x): 57 | x = self.lin1(x) 58 | x = x.view(x.size()[0], 15, 1) 59 | x = self.bn1(x) 60 | x = F.leaky_relu(x, negative_slope=0.2) 61 | x = x.view(x.size()[0], 1, 15) 62 | x = self.up1(x) 63 | x = F.leaky_relu(self.bn2(self.conv1(x)), negative_slope=0.2) 64 | x = F.leaky_relu(self.bn3(self.conv2(x)), negative_slope=0.2) 65 | x = self.up2(x) 66 | x = F.leaky_relu(self.bn4(self.conv3(x)), negative_slope=0.2) 67 | x = self.up3(x) 68 | x = self.conv4(x) 69 | x = x.view(x.size()[0], x.size()[2]) 70 | x = F.leaky_relu(self.bn5(x), negative_slope=0.2) 71 | x = x.view(x.size()[0], x.size()[-1]) 72 | x = F.leaky_relu(self.lin2(x), negative_slope=0.5) 73 | 74 | return x 75 | 76 | def generate(self, batchlen): 77 | z = torch.normal(torch.zeros(batchlen, 1, self.PRIOR_N), self.PRIOR_STD) 78 | return self.__call__(z) 79 | 80 | 81 | class Discriminator(nn.Module): 82 | def __init__(self, window=60): 83 | super().__init__() 84 | self.window = window 85 | self.maxpool = nn.MaxPool1d(kernel_size=2) 86 | self.conv1 = nn.Conv1d(in_channels=1, out_channels=32, kernel_size=3, padding=1) 87 | self.conv2 = nn.Conv1d(in_channels=32, out_channels=32, kernel_size=3, padding=1) 88 | self.conv3 = nn.Conv1d(in_channels=32, out_channels=32, kernel_size=3, padding=1) 89 | self.conv4 = nn.Conv1d(in_channels=32, out_channels=32, kernel_size=3, padding=1) 90 | self.fc1 = nn.Linear(32*int(self.window/8), 50) 91 | self.fc2 = nn.Linear(50, 15) 92 | self.fc3 = nn.Linear(15, 1) 93 | 94 | def __call__(self, x): 95 | x = x.view(x.size()[0], 1, x.size()[1]) 96 | x = F.leaky_relu(self.conv1(x), negative_slope=0.2) 97 | x = self.maxpool(x) 98 | x = F.leaky_relu(self.conv2(x), negative_slope=0.2) 99 | x = self.maxpool(x) 100 | x = F.leaky_relu(self.conv3(x), negative_slope=0.2) 101 | x = self.maxpool(x) 102 | 103 | x = F.leaky_relu(self.conv4(x), negative_slope=0.2) 104 | x = x.view(x.size()[0], 32*int(self.window/8)) 105 | x = F.leaky_relu(self.fc1(x), negative_slope=0.2) 106 | x = F.leaky_relu(self.fc2(x), negative_slope=0.2) 107 | x = F.leaky_relu(self.fc3(x), negative_slope=0.2) 108 | return x 109 | 110 | 111 | if __name__=='__main__': 112 | param = { 113 | 'serie': get_data('VIX.csv'), 114 | 'window': 60, 115 | 'frame': 10, 116 | 'is_notebook': False, 117 | 'batchlen_plot': 10, 118 | 'Generator': Generator, 119 | 'Discriminator': Discriminator 120 | } 121 | training_param = { 122 | 'N_ITER': 2001, 123 | 'TRAIN_RATIO': 10, 124 | 'BATCHLEN': 30, 125 | # Depth and Withdraw of Hidden Layers 126 | 'generator_args': { 127 | # Random Noise used by the Generator 128 | 'PRIOR_N': 20, 129 | 'PRIOR_STD': 500., 130 | 'kernel_size': 5 131 | }, 132 | # 'WDTH': 1000, 133 | # 'DPTH': 5}, 134 | 'discriminator_args': {}, 135 | # 'WDTH': 100, 136 | # 'DPTH': 3}, 137 | 138 | # Adam Optimizer parameters for G/D 139 | 'lr_G': 1e-4, 140 | 'betas_G': (0.5, 0.9), 141 | 'lr_D': 1e-4, 142 | 'betas_D': (0.5, 0.9), 143 | 'plot': False, 144 | 'frame_plot': 100, 145 | 'time_max': 1800, 146 | 'save_model': True, 147 | 'save_name': 'CG2_'+str(int(np.random.uniform()*1e9)) 148 | } 149 | param.update(training_param) 150 | if param['save_model']: 151 | pickle.dump(param, open('Parameters/'+param['save_name']+'.pk', 'wb')) 152 | param.update(training_param) 153 | GAN(**param) 154 | -------------------------------------------------------------------------------- /Architectures/DCGAN.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pickle 3 | 4 | import torch.nn as nn 5 | import torch 6 | 7 | import sys 8 | sys.path.append('../') 9 | 10 | from Helpers.utils import get_data 11 | from GAN import GAN 12 | 13 | 14 | # Define the generator 15 | class Generator(nn.Module): 16 | def __init__(self, window, WDTH=0, PRIOR_N=10, PRIOR_CHANNEL=20, 17 | PRIOR_STD=1.): 18 | super().__init__() 19 | self.PRIOR_N = PRIOR_N 20 | self.PRIOR_STD = PRIOR_STD 21 | self.PRIOR_CHANNEL = PRIOR_CHANNEL 22 | self.WDTH = WDTH 23 | self.window = window 24 | self.main = nn.Sequential( 25 | nn.ConvTranspose1d(PRIOR_CHANNEL, 8*WDTH, 4, 1, 0, bias=False), # Kernel size/stride/padding 26 | nn.BatchNorm1d(8*WDTH), 27 | nn.ReLU(True), 28 | nn.ConvTranspose1d(8*WDTH, 4*WDTH, 4, 2, 1, bias=False), 29 | nn.BatchNorm1d(4*WDTH), 30 | nn.ReLU(True), 31 | nn.ConvTranspose1d(4*WDTH, 2*WDTH, 4, 2, 1, bias=False), 32 | nn.BatchNorm1d(2*WDTH), 33 | nn.ReLU(True), 34 | nn.ConvTranspose1d(2*WDTH, WDTH, 4, 2, 1, bias=False), 35 | nn.BatchNorm1d(WDTH), 36 | nn.ReLU(True), 37 | nn.ConvTranspose1d(WDTH, 1, 4, 2, 1, bias=False) 38 | #,nn.Tanh() 39 | ) 40 | 41 | def generate(self, batchlen): 42 | z = torch.normal(torch.zeros(batchlen, self.PRIOR_CHANNEL, self.PRIOR_N), self.PRIOR_STD) 43 | z = self.main(z) 44 | return z[:, 0, :self.window] 45 | 46 | 47 | # Define the discriminator. 48 | class Discriminator(nn.Module): 49 | def __init__(self, window, WDTH=20): 50 | super().__init__() 51 | self.WDTH = WDTH 52 | self.main = nn.Sequential( 53 | nn.Conv1d(1, WDTH, 4, 2, 1, bias=False), 54 | nn.LeakyReLU(0.2, inplace=True), 55 | nn.Conv1d(WDTH, 2*WDTH, 4, 2, 1, bias=False), 56 | nn.BatchNorm1d(2*WDTH), 57 | nn.LeakyReLU(0.2, inplace=True), 58 | nn.Conv1d(2*WDTH, 4*WDTH, 4, 2, 1, bias=False), 59 | nn.BatchNorm1d(4*WDTH), 60 | nn.LeakyReLU(0.2, inplace=True), 61 | nn.Conv1d(4*WDTH, 8*WDTH, 4, 2, 1, bias=False), 62 | nn.BatchNorm1d(8*WDTH), 63 | nn.LeakyReLU(0.2, inplace=True), 64 | nn.Conv1d(8*WDTH, 1, 4, 1, 0, bias=False), 65 | nn.Linear(4, 1) 66 | # , nn.Sigmoid() 67 | ) 68 | 69 | def __call__(self, x): 70 | x = x.unsqueeze(1) 71 | return self.main(x)[:, 0, :] 72 | 73 | 74 | def random_xp(n_xp): 75 | window = [60, 125, 250] 76 | PRIOR_N = [5] 77 | BATCHLEN = [30, 50, 100] 78 | Prior_channel = [1, 3, 5, 10, 20] 79 | Train_ratio = [1, 3, 5, 10] 80 | PRIOR_STD = [1., 10., 20., 100.] 81 | WDTH_G = [10, 50, 100, 200, 500, 1000] 82 | for i in range(n_xp): 83 | param['window'] = np.random.choice(window) 84 | param['BATCHLEN'] = np.random.choice(BATCHLEN) 85 | param['generator_args']['PRIOR_N'] = np.random.choice(PRIOR_N) 86 | param['generator_args']['PRIOR_STD'] = np.random.choice(PRIOR_STD) 87 | param['TRAIN_RATIO'] = np.random.choice(Train_ratio) 88 | param['generator_args']['WDTH'] = np.random.choice(WDTH_G) 89 | param['generator_args']['PRIOR_CHANNEL'] = np.random.choice(Prior_channel) 90 | WDTH_D = [x for x in WDTH_G if x <= param['generator_args']['WDTH']] 91 | param['discriminator_args']['WDTH'] = np.random.choice(WDTH_D) 92 | param['save_name'] = 'Lin_G_'+str(int(np.random.uniform()*1e9)) 93 | print('Iteration %f' % i) 94 | print((param['window'], param['BATCHLEN'], param['TRAIN_RATIO'])) 95 | print(param['generator_args']) 96 | print(param['discriminator_args']) 97 | if param['save_model']: 98 | pickle.dump(param, open('Parameters/'+param['save_name']+'.pk', 'wb')) 99 | GAN(**param) 100 | 101 | 102 | if __name__ == '__main__': 103 | param = { 104 | 'serie': get_data('VIX.csv'), 105 | 'window': 125, 106 | 'frame': 20, 107 | 'is_notebook': False, 108 | 'batchlen_plot': 20, 109 | 'Generator': Generator, 110 | 'Discriminator': Discriminator 111 | } 112 | training_param = { 113 | 'N_ITER': 2001, 114 | 'TRAIN_RATIO': 5, 115 | 'BATCHLEN': 50, 116 | # Random Noise used by the Generator 117 | 'generator_args': { 118 | 'PRIOR_N': 5, 119 | 'PRIOR_STD': 100., 120 | 'WDTH': 100, 121 | 'PRIOR_CHANNEL': 3, 122 | }, 123 | 'discriminator_args': {'WDTH': 40}, 124 | # Adam Optimizer parameters for G/D 125 | 'lr_G': 1e-4, 126 | 'betas_G': (0.5, 0.9), 127 | 'lr_D': 1e-4, 128 | 'betas_D': (0.5, 0.9), 129 | 'plot': False, 130 | 'frame_plot': 100, 131 | 'time_max': 1800, 132 | 'save_model': True, 133 | 'save_name': 'CG_'+str(int(np.random.uniform()*1e9)) 134 | } 135 | param.update(training_param) 136 | 137 | # random_xp(2000) 138 | # if param['save_model']: 139 | # pickle.dump(param, open('Parameters/'+param['save_name']+'.pk', 'wb')) 140 | pickle.dump(param, open('Parameters/'+param['save_name']+'.pk', 'wb')) 141 | GAN(**param) 142 | -------------------------------------------------------------------------------- /Architectures/GAN_Conv.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import torch.nn.functional as F 3 | import torch 4 | 5 | import sys 6 | sys.path.append('../') 7 | 8 | from Helpers.utils import get_data 9 | from GAN import GAN 10 | 11 | 12 | class Generator(nn.Module): 13 | def __init__(self, window, PRIOR_N=10, PRIOR_STD=1.): 14 | super().__init__() 15 | self.PRIOR_N = PRIOR_N 16 | self.PRIOR_STD = PRIOR_STD 17 | self.window = window 18 | self.conv1 = nn.Conv1d(in_channels=1, out_channels=4, 19 | kernel_size=3, padding=1) 20 | self.conv2 = nn.Conv1d(in_channels=4, out_channels=16, 21 | kernel_size=3, padding=1) 22 | self.conv3 = nn.Conv1d(in_channels=16, out_channels=30, 23 | kernel_size=3, padding=1) 24 | self.conv4 = nn.Conv1d(in_channels=30, out_channels=self.window, 25 | kernel_size=3, padding=1) 26 | # In_channels de conv6 doit être le nombre sources de random 27 | self.conv5 = nn.Conv1d(in_channels=self.PRIOR_N, out_channels=5, 28 | kernel_size=3, padding=1) 29 | self.conv6 = nn.Conv1d(in_channels=5, out_channels=1, 30 | kernel_size=3, padding=1) 31 | self.bn = nn.BatchNorm1d(window) 32 | 33 | def __call__(self, z): 34 | z = F.relu(self.conv1(z)) 35 | z = F.relu(self.conv2(z)) 36 | z = F.relu(self.conv3(z)) 37 | z = F.relu(self.conv4(z)) 38 | z = z.view(z.size()[0], self.PRIOR_N, self.window) # La dim devient window 39 | 40 | z = F.relu(self.conv5(z)) 41 | z = self.conv6(z) 42 | 43 | return self.bn(z.view(-1, self.window)) 44 | 45 | def generate(self, batchlen): 46 | z = torch.normal(torch.zeros(batchlen, 1, self.PRIOR_N), self.PRIOR_STD) 47 | return self.__call__(z) 48 | 49 | 50 | class Discriminator(nn.Module): 51 | def __init__(self, window): 52 | super().__init__() 53 | self.window = window 54 | self.conv1 = nn.Conv1d(in_channels=1, out_channels=3, 55 | kernel_size=3, padding=1,) 56 | self.conv2 = nn.Conv1d(in_channels=3, out_channels=6, 57 | kernel_size=3, padding=1,) 58 | self.conv3 = nn.Conv1d(in_channels=6, out_channels=3, 59 | kernel_size=3, padding=1,) 60 | self.conv4 = nn.Conv1d(in_channels=3, out_channels=1, 61 | kernel_size=3, padding=1,) 62 | self.fc1 = nn.Linear(self.window, 30) 63 | self.fc2 = nn.Linear(30, 15) 64 | self.fc3 = nn.Linear(15, 1) 65 | self.bn = nn.BatchNorm1d(window) 66 | self.fc_single = nn.Linear(self.window, 1) 67 | 68 | def __call__(self, z): 69 | z = z.view(z.size()[0], 1, z.size()[-1]) 70 | z = F.relu(self.conv1(z)) 71 | z = F.relu(self.conv2(z)) 72 | z = F.relu(self.conv3(z)) 73 | z = F.relu(self.conv4(z)) 74 | 75 | z = self.bn(z.view(-1, self.window)) 76 | z = F.relu(self.fc_single(z)) 77 | # z = F.relu(self.fc1(z)) 78 | # z = F.relu(self.fc2(z)) 79 | # z = self.fc3(z) 80 | return z 81 | 82 | 83 | if __name__ == '__main__': 84 | param = { 85 | 'serie': get_data('VIX.csv'), 86 | 'window': 60, 87 | 'frame': 10, 88 | 'frame_plot': 20, 89 | 'is_notebook': False, 90 | 'batchlen_plot': 10, 91 | 'Generator': Generator, 92 | 'Discriminator': Discriminator 93 | } 94 | training_param = { 95 | 'N_ITER': 1001, 96 | 'TRAIN_RATIO': 10, 97 | 'BATCHLEN': 50, 98 | # Depth and Withdraw of Hidden Layers 99 | 'generator_args': { 100 | # Random Noise used by the Generator 101 | 'PRIOR_N': 200, 102 | 'PRIOR_STD': 10.}, 103 | 'discriminator_args': {}, 104 | # Adam Optimizer parameters for G/D 105 | 'lr_G': 1e-4, 106 | 'betas_G': (0.5, 0.9), 107 | 'lr_D': 1e-4, 108 | 'betas_D': (0.5, 0.9) 109 | } 110 | 111 | param.update(training_param) 112 | GAN(**param) 113 | -------------------------------------------------------------------------------- /Architectures/GAN_con_lin.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import torch.nn.functional as F 3 | import torch 4 | 5 | import sys 6 | sys.path.append('../') 7 | 8 | from Helpers.utils import get_data 9 | from GAN import GAN 10 | 11 | 12 | class Generator(nn.Module): 13 | def __init__(self, window=60, PRIOR_N=10, PRIOR_STD=1., DPTH = 0,WDTH =100): 14 | super().__init__() 15 | self.PRIOR_N = PRIOR_N 16 | self.PRIOR_STD = PRIOR_STD 17 | self.window = window 18 | self.fc1 = nn.Linear(PRIOR_N, WDTH) 19 | 20 | self.hidden_layers = [] 21 | for _ in range(DPTH): 22 | self.hidden_layers.append(nn.Linear(WDTH, WDTH)) 23 | self.hidden_layers = nn.ModuleList(self.hidden_layers) 24 | 25 | self.fc2 = nn.Linear(WDTH, window) 26 | 27 | # In_channels de conv6 doit être le nombre sources de random 28 | self.conv1 = nn.Conv1d(in_channels=1, out_channels=3, 29 | kernel_size=3, padding=1) 30 | self.conv2 = nn.Conv1d(in_channels=3, out_channels=6, 31 | kernel_size=3, padding=1) 32 | self.conv3 = nn.Conv1d(in_channels=6, out_channels=3, 33 | kernel_size=3, padding=1) 34 | self.conv4 = nn.Conv1d(in_channels=3, out_channels=1, 35 | kernel_size=3, padding=1) 36 | 37 | def __call__(self, z): 38 | h = F.relu(self.fc1(z)) 39 | for hidden_layer in self.hidden_layers: 40 | h = F.relu(hidden_layer(h)) 41 | h = self.fc2(h) 42 | h = h.view(h.size()[0], 1, h.size()[-1]) 43 | h = F.leaky_relu(self.conv1(h), negative_slope=0.2) 44 | h = F.leaky_relu(self.conv2(h), negative_slope=0.2) 45 | h = F.leaky_relu(self.conv3(h), negative_slope=0.2) 46 | h = F.leaky_relu(self.conv4(h), negative_slope=0.2) 47 | h = h.view(h.size()[0], self.window) 48 | return h 49 | 50 | def generate(self, batchlen): 51 | z = torch.normal(torch.zeros(batchlen, 1, self.PRIOR_N), self.PRIOR_STD) 52 | return self.__call__(z) 53 | 54 | 55 | class Discriminator(nn.Module): 56 | def __init__(self, window): 57 | super().__init__() 58 | self.window = window 59 | self.conv1 = nn.Conv1d(in_channels=1, out_channels=3, 60 | kernel_size=3, padding=1,) 61 | self.conv2 = nn.Conv1d(in_channels=3, out_channels=6, 62 | kernel_size=3, padding=1,) 63 | self.conv3 = nn.Conv1d(in_channels=6, out_channels=3, 64 | kernel_size=3, padding=1,) 65 | self.conv4 = nn.Conv1d(in_channels=3, out_channels=1, 66 | kernel_size=3, padding=1,) 67 | self.fc1 = nn.Linear(self.window, 30) 68 | self.fc2 = nn.Linear(30, 15) 69 | self.fc3 = nn.Linear(15, 1) 70 | 71 | def __call__(self, z): 72 | 73 | h = z.view(z.size()[0], 1, z.size()[-1]) 74 | h = F.relu(self.conv1(h)) 75 | h = F.relu(self.conv2(h)) 76 | h = F.relu(self.conv3(h)) 77 | h = F.relu(self.conv4(h)) 78 | 79 | h = h.view(-1, self.window) 80 | h = F.relu(self.fc1(h)) 81 | h = F.relu(self.fc2(h)) 82 | h = self.fc3(h) 83 | return h 84 | 85 | 86 | if __name__ == '__main__': 87 | param = { 88 | 'serie': get_data('VIX.csv'), 89 | 'window': 60, 90 | 'frame': 100, 91 | 'frame_plot': 100, 92 | 'is_notebook': False, 93 | 'batchlen_plot': 10, 94 | 'Generator': Generator, 95 | 'Discriminator': Discriminator 96 | } 97 | training_param = { 98 | 'N_ITER': 2001, 99 | 'TRAIN_RATIO': 10, 100 | 'BATCHLEN': 30, 101 | # Depth and Withdraw of Hidden Layers 102 | 'generator_args': { 103 | # Random Noise used by the Generator 104 | 'PRIOR_N': 20, 105 | 'PRIOR_STD': 500., 106 | 'WDTH': 100, 107 | 'DPTH': 1 108 | }, 109 | 'discriminator_args': {}, 110 | # Adam Optimizer parameters for G/D 111 | 'lr_G': 1e-4, 112 | 'betas_G': (0.5, 0.9), 113 | 'lr_D': 1e-4, 114 | 'betas_D': (0.5, 0.9) 115 | } 116 | 117 | param.update(training_param) 118 | GAN(**param) 119 | -------------------------------------------------------------------------------- /Architectures/Lin_GAN.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pickle 3 | 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | import torch 7 | 8 | import sys 9 | sys.path.append('../') 10 | 11 | from Helpers.utils import get_data, load_models 12 | from GAN import GAN 13 | 14 | 15 | # Define the generator 16 | class Generator(nn.Module): 17 | def __init__(self, window, WDTH=100, PRIOR_N=10, DPTH=1, PRIOR_STD=100.): 18 | super().__init__() 19 | self.PRIOR_N = PRIOR_N 20 | self.PRIOR_STD = PRIOR_STD 21 | self.fc1 = nn.Linear(PRIOR_N, WDTH) 22 | self.hidden_layers = [] 23 | self.window = window 24 | for _ in range(DPTH): 25 | self.hidden_layers.append(nn.Linear(WDTH, WDTH)) 26 | self.hidden_layers = nn.ModuleList(self.hidden_layers) 27 | self.fc2 = nn.Linear(WDTH, window) 28 | self.bn = nn.BatchNorm1d(WDTH) 29 | self.bn_out = nn.BatchNorm1d(window) 30 | 31 | def __call__(self, z): 32 | h = F.relu(self.bn(self.fc1(z))) 33 | for hidden_layer in self.hidden_layers: 34 | h = F.relu(self.bn(hidden_layer(h))) 35 | return self.fc2(h) 36 | # return self.bn_out(self.fc2(h)) 37 | 38 | def generate(self, batchlen): 39 | z = torch.normal(torch.zeros(batchlen, self.PRIOR_N), self.PRIOR_STD) 40 | return self.__call__(z) 41 | 42 | 43 | # Define the discriminator. 44 | class Discriminator(nn.Module): 45 | def __init__(self, window, WDTH=0, DPTH=0): 46 | super().__init__() 47 | self.fc1 = nn.Linear(window, WDTH) 48 | self.hidden_layers = [] 49 | for _ in range(DPTH): 50 | self.hidden_layers.append(nn.Linear(WDTH, WDTH)) 51 | self.hidden_layers = nn.ModuleList(self.hidden_layers) 52 | self.fc2 = nn.Linear(WDTH, 1) 53 | 54 | def __call__(self, x): 55 | h = F.relu(self.fc1(x)) 56 | for hidden_layer in self.hidden_layers: 57 | h = F.relu(hidden_layer(h)) 58 | return self.fc2(h) 59 | 60 | 61 | def random_xp(n_xp): 62 | window = [60, 125, 250] 63 | PRIOR_N = [1, 10, 20, 50, 100, 200] 64 | BATCHLEN = [10, 30, 50, 100] 65 | Train_ratio = [1, 3, 5, 10] 66 | PRIOR_STD = [1., 10, 20, 100] 67 | WDTH_G = [10, 50, 100, 200, 500, 1000] 68 | DPTH_G = [3, 5, 10, 15, 20] 69 | WDTH_D = [10, 50, 100, 200, 1000] 70 | DPTH_D = [1, 2, 3, 5] 71 | for i in range(n_xp): 72 | param['window'] = np.random.choice(window) 73 | param['BATCHLEN'] = np.random.choice(BATCHLEN) 74 | param['generator_args']['PRIOR_N'] = np.random.choice(PRIOR_N) 75 | param['generator_args']['PRIOR_STD'] = np.random.choice(PRIOR_STD) 76 | param['TRAIN_RATIO'] = np.random.choice(Train_ratio) 77 | param['generator_args']['WDTH'] = np.random.choice(WDTH_G) 78 | param['generator_args']['DPTH'] = np.random.choice(DPTH_G) 79 | param['discriminator_args']['WDTH'] = np.random.choice(WDTH_D) 80 | param['discriminator_args']['DPTH'] = np.random.choice(DPTH_D) 81 | param['save_name'] = 'Lin_G_'+str(int(np.random.uniform()*1e9)) 82 | print('Iteration %f' % i) 83 | print((param['window'], param['BATCHLEN'], param['TRAIN_RATIO'])) 84 | print(param['generator_args']) 85 | print(param['discriminator_args']) 86 | if param['save_model']: 87 | pickle.dump(param, open('Parameters/'+param['save_name']+'.pk', 'wb')) 88 | GAN(**param) 89 | 90 | 91 | if __name__ == '__main__': 92 | import matplotlib.pyplot as plt 93 | 94 | param = { 95 | 'serie': get_data('../Datasets/VIX.csv'), 96 | 'window': 60, 97 | 'frame': 20, 98 | 'frame_plot': 50, 99 | 'is_notebook': False, 100 | 'batchlen_plot': 100, 101 | 'Generator': Generator, 102 | 'Discriminator': Discriminator 103 | } 104 | training_param = { 105 | 'N_ITER': 500, 106 | 'TRAIN_RATIO': 5, 107 | 'BATCHLEN': 100, 108 | # Depth and Withdraw of Hidden Layers 109 | 'generator_args': { 110 | # Random Noise used by the Generator 111 | 'PRIOR_N': 50, 112 | 'PRIOR_STD': 10., 113 | 'WDTH': 1000, 114 | 'DPTH': 30 115 | }, 116 | 'discriminator_args': { 117 | 'WDTH': 100, 118 | 'DPTH': 5 119 | }, 120 | # Adam Optimizer parameters for G/D 121 | 'lr_G': 1e-4, 122 | 'betas_G': (0.5, 0.9), 123 | 'lr_D': 1e-4, 124 | 'betas_D': (0.5, 0.9), 125 | 'time_max': 600, 126 | 'save_model': False, 127 | 'save_name': 'Lin_G_'+str(int(np.random.uniform()*1e9)), 128 | 'plot': False 129 | } 130 | param.update(training_param) 131 | if param['save_model']: 132 | pickle.dump(param, open('Parameters/'+param['save_name']+'.pk', 'wb')) 133 | GAN(**param) 134 | 135 | # random_xp(2000) 136 | name = 'Lin_G_711663071' 137 | G, D, param_name = load_models(name, Generator, Discriminator) 138 | print(param) 139 | plt.plot(G.generate(30).detach().numpy().T) 140 | plt.show() 141 | 142 | -------------------------------------------------------------------------------- /Architectures/RGAN.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pickle 3 | 4 | import torch.nn as nn 5 | import torch 6 | 7 | import sys 8 | sys.path.append('../') 9 | 10 | from Helpers.utils import get_data 11 | from GAN import GAN 12 | 13 | 14 | # Define the generator 15 | class Generator(nn.Module): 16 | def __init__(self, window=60, PRIOR_N=10, PRIOR_STD=1., nlayers=2, hidden_size=20): 17 | super().__init__() 18 | self.window = window 19 | self.PRIOR_N = PRIOR_N 20 | self.PRIOR_STD = PRIOR_STD 21 | self.nlayers = nlayers 22 | self.hidden_size = hidden_size 23 | self.fc_in = nn.Linear(PRIOR_N, window) 24 | self.rnn = nn.LSTM(input_size=1, hidden_size=hidden_size, num_layers=nlayers, 25 | batch_first=True) 26 | self.fc_out = nn.Linear(hidden_size, 1) 27 | 28 | def __call__(self, batchlen, input): 29 | output, (hx, cx) = self.rnn(input) 30 | return self.fc_out(output)[:, :, 0] 31 | 32 | def generate(self, batchlen): 33 | input_ = torch.normal(torch.zeros(batchlen, self.PRIOR_N), self.PRIOR_STD) 34 | input_ = self.fc_in(input_).unsqueeze(2) 35 | return self.__call__(batchlen, input_) 36 | 37 | 38 | # Define the discriminator. 39 | class Discriminator(nn.Module): 40 | def __init__(self, window=0, nlayers=1, hidden_size=1): 41 | super().__init__() 42 | self.nlayers = nlayers 43 | self.hidden_size = hidden_size 44 | self.rnn = nn.LSTM(input_size=1, hidden_size=hidden_size, num_layers=nlayers, 45 | batch_first=True) 46 | self.fc_hidden = nn.Linear(hidden_size, 1) 47 | self.fc_out = nn.Linear(window, 1) 48 | 49 | def __call__(self, x): 50 | input = x.unsqueeze(2) 51 | output, (hx, cx) = self.rnn(input) 52 | output = self.fc_hidden(output)[:, :, 0] 53 | return self.fc_out(output) 54 | # return torch.sigmoid(output) 55 | 56 | 57 | def random_xp(n_xp): 58 | window = [60, 125, 250] 59 | PRIOR_N = [60] 60 | Train_ratio = [1, 3, 5, 10, 20, 50] 61 | PRIOR_STD = [1., 10, 20, 100] 62 | N_LAYERS = [1, 2, 3, 5, 10] 63 | for i in range(n_xp): 64 | param['window'] = np.random.choice(window) 65 | param['generator_args']['PRIOR_N'] = np.random.choice(PRIOR_N) 66 | param['generator_args']['PRIOR_STD'] = np.random.choice(PRIOR_STD) 67 | param['TRAIN_RATIO'] = np.random.choice(Train_ratio) 68 | param['generator_args']['nlayers'] = np.random.choice(N_LAYERS) 69 | param['discriminator_args']['nlayers'] = np.random.choice(N_LAYERS) 70 | param['save_name'] = 'RGAN_'+str(int(np.random.uniform()*1e9)) 71 | print('Iteration %f' % i) 72 | print((param['window'], param['TRAIN_RATIO'])) 73 | print(param['generator_args']) 74 | print(param['discriminator_args']) 75 | if param['save_model']: 76 | pickle.dump(param, open('Parameters/'+param['save_name']+'.pk', 'wb')) 77 | GAN(**param) 78 | 79 | 80 | if __name__ == '__main__': 81 | param = { 82 | 'serie': get_data('VIX.csv'), 83 | 'window': 60, 84 | 'frame': 10, 85 | 'frame_plot': 100, 86 | 'is_notebook': False, 87 | 'batchlen_plot': 5, 88 | 'Generator': Generator, 89 | 'Discriminator': Discriminator, 90 | 'BATCHLEN': 50 91 | } 92 | training_param = { 93 | 'N_ITER': 1001, 94 | 'TRAIN_RATIO': 5, 95 | # Random Noise used by the Generator 96 | 'generator_args': { 97 | 'PRIOR_N': 300, 98 | 'PRIOR_STD': 10, 99 | 'nlayers': 1, 100 | 'hidden_size': 50 101 | }, 102 | # Depth and Withdraw of Hidden Layers 103 | 'discriminator_args': { 104 | 'nlayers': 1, 105 | 'hidden_size': 10 106 | }, 107 | # Adam Optimizer parameters for G/D 108 | 'lr_G': 1e-4, 109 | 'betas_G': (0.5, 0.9), 110 | 'lr_D': 1e-4, 111 | 'betas_D': (0.5, 0.9), 112 | # 'loss': utils.negative_cross_entropy, 113 | # 'argloss_real': torch.ones(param['BATCHLEN'], dtype=torch.int64), 114 | # 'argloss_fake': torch.zeros(param['BATCHLEN'], dtype=torch.int64), 115 | # 'argloss_gen': torch.ones(param['BATCHLEN'], dtype=torch.int64), 116 | 'save_model': True, 117 | 'save_name': 'RGAN_'+str(int(np.random.uniform()*1e9)), 118 | 'plot': False, 119 | 'time_max': 7200 120 | } 121 | 122 | param.update(training_param) 123 | 124 | # random_xp(100) 125 | if param['save_model']: 126 | pickle.dump(param, open('Parameters/'+param['save_name']+'.pk', 'wb')) 127 | GAN(**param) 128 | 129 | -------------------------------------------------------------------------------- /Architectures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DBaudry/Generating_Financial_Time_Series_with_GAN/d00a5199996d3fed88efdc19218209d04719140a/Architectures/__init__.py -------------------------------------------------------------------------------- /Datasets/VIX.csv: -------------------------------------------------------------------------------- 1 | DATE,VIX 2 | 31/03/2014,13.88 3 | 01/04/2014,13.1 4 | 02/04/2014,13.09 5 | 03/04/2014,13.37 6 | 04/04/2014,13.96 7 | 07/04/2014,15.57 8 | 08/04/2014,14.89 9 | 09/04/2014,13.82 10 | 10/04/2014,15.89 11 | 11/04/2014,17.03 12 | 14/04/2014,16.11 13 | 15/04/2014,15.61 14 | 16/04/2014,14.18 15 | 17/04/2014,13.36 16 | 21/04/2014,13.25 17 | 22/04/2014,13.19 18 | 23/04/2014,13.27 19 | 24/04/2014,13.32 20 | 25/04/2014,14.06 21 | 28/04/2014,13.97 22 | 29/04/2014,13.71 23 | 30/04/2014,13.41 24 | 01/05/2014,13.25 25 | 02/05/2014,12.91 26 | 05/05/2014,13.29 27 | 06/05/2014,13.8 28 | 07/05/2014,13.4 29 | 08/05/2014,13.43 30 | 09/05/2014,12.92 31 | 12/05/2014,12.23 32 | 13/05/2014,12.13 33 | 14/05/2014,12.17 34 | 15/05/2014,13.17 35 | 16/05/2014,12.44 36 | 19/05/2014,12.42 37 | 20/05/2014,12.96 38 | 21/05/2014,11.91 39 | 22/05/2014,12.03 40 | 23/05/2014,11.36 41 | 27/05/2014,11.51 42 | 28/05/2014,11.68 43 | 29/05/2014,11.57 44 | 30/05/2014,11.4 45 | 02/06/2014,11.58 46 | 03/06/2014,11.87 47 | 04/06/2014,12.08 48 | 05/06/2014,11.68 49 | 06/06/2014,10.73 50 | 09/06/2014,11.15 51 | 10/06/2014,10.99 52 | 11/06/2014,11.6 53 | 12/06/2014,12.56 54 | 13/06/2014,12.18 55 | 16/06/2014,12.65 56 | 17/06/2014,12.06 57 | 18/06/2014,10.61 58 | 19/06/2014,10.62 59 | 20/06/2014,10.85 60 | 23/06/2014,10.98 61 | 24/06/2014,12.13 62 | 25/06/2014,11.59 63 | 26/06/2014,11.63 64 | 27/06/2014,11.26 65 | 30/06/2014,11.57 66 | 01/07/2014,11.15 67 | 02/07/2014,10.82 68 | 03/07/2014,10.32 69 | 07/07/2014,11.33 70 | 08/07/2014,11.98 71 | 09/07/2014,11.65 72 | 10/07/2014,12.59 73 | 11/07/2014,12.08 74 | 14/07/2014,11.82 75 | 15/07/2014,11.96 76 | 16/07/2014,11 77 | 17/07/2014,14.54 78 | 18/07/2014,12.06 79 | 21/07/2014,12.81 80 | 22/07/2014,12.24 81 | 23/07/2014,11.52 82 | 24/07/2014,11.84 83 | 25/07/2014,12.69 84 | 28/07/2014,12.56 85 | 29/07/2014,13.28 86 | 30/07/2014,13.33 87 | 31/07/2014,16.95 88 | 01/08/2014,17.03 89 | 04/08/2014,15.12 90 | 05/08/2014,16.87 91 | 06/08/2014,16.37 92 | 07/08/2014,16.66 93 | 08/08/2014,15.77 94 | 11/08/2014,14.23 95 | 12/08/2014,14.13 96 | 13/08/2014,12.9 97 | 14/08/2014,12.42 98 | 15/08/2014,13.15 99 | 18/08/2014,12.32 100 | 19/08/2014,12.21 101 | 20/08/2014,11.78 102 | 21/08/2014,11.76 103 | 22/08/2014,11.47 104 | 25/08/2014,11.7 105 | 26/08/2014,11.63 106 | 27/08/2014,11.78 107 | 28/08/2014,12.05 108 | 29/08/2014,11.98 109 | 02/09/2014,12.25 110 | 03/09/2014,12.36 111 | 04/09/2014,12.64 112 | 05/09/2014,12.09 113 | 08/09/2014,12.66 114 | 09/09/2014,13.5 115 | 10/09/2014,12.88 116 | 11/09/2014,12.8 117 | 12/09/2014,13.31 118 | 15/09/2014,14.12 119 | 16/09/2014,12.73 120 | 17/09/2014,12.65 121 | 18/09/2014,12.03 122 | 19/09/2014,12.11 123 | 22/09/2014,13.69 124 | 23/09/2014,14.93 125 | 24/09/2014,13.27 126 | 25/09/2014,15.64 127 | 26/09/2014,14.85 128 | 29/09/2014,15.98 129 | 30/09/2014,16.31 130 | 01/10/2014,16.71 131 | 02/10/2014,16.16 132 | 03/10/2014,14.55 133 | 06/10/2014,15.46 134 | 07/10/2014,17.2 135 | 08/10/2014,15.11 136 | 09/10/2014,18.76 137 | 10/10/2014,21.24 138 | 13/10/2014,24.64 139 | 14/10/2014,22.79 140 | 15/10/2014,26.25 141 | 16/10/2014,25.2 142 | 17/10/2014,21.99 143 | 20/10/2014,18.57 144 | 21/10/2014,16.08 145 | 22/10/2014,17.87 146 | 23/10/2014,16.53 147 | 24/10/2014,16.11 148 | 27/10/2014,16.04 149 | 28/10/2014,14.39 150 | 29/10/2014,15.15 151 | 30/10/2014,14.52 152 | 31/10/2014,14.03 153 | 03/11/2014,14.73 154 | 04/11/2014,14.89 155 | 05/11/2014,14.17 156 | 06/11/2014,13.67 157 | 07/11/2014,13.12 158 | 10/11/2014,12.67 159 | 11/11/2014,12.92 160 | 12/11/2014,13.02 161 | 13/11/2014,13.79 162 | 14/11/2014,13.31 163 | 17/11/2014,13.99 164 | 18/11/2014,13.86 165 | 19/11/2014,13.96 166 | 20/11/2014,13.58 167 | 21/11/2014,12.9 168 | 24/11/2014,12.62 169 | 25/11/2014,12.25 170 | 26/11/2014,12.07 171 | 28/11/2014,13.33 172 | 01/12/2014,14.29 173 | 02/12/2014,12.85 174 | 03/12/2014,12.47 175 | 04/12/2014,12.38 176 | 05/12/2014,11.82 177 | 08/12/2014,14.21 178 | 09/12/2014,14.89 179 | 10/12/2014,18.53 180 | 11/12/2014,20.08 181 | 12/12/2014,21.08 182 | 15/12/2014,20.42 183 | 16/12/2014,23.57 184 | 17/12/2014,19.44 185 | 18/12/2014,16.81 186 | 19/12/2014,16.49 187 | 22/12/2014,15.25 188 | 23/12/2014,14.8 189 | 24/12/2014,14.37 190 | 26/12/2014,14.5 191 | 29/12/2014,15.06 192 | 30/12/2014,15.92 193 | 31/12/2014,19.2 194 | 02/01/2015,17.79 195 | 05/01/2015,19.92 196 | 06/01/2015,21.12 197 | 07/01/2015,19.31 198 | 08/01/2015,17.01 199 | 09/01/2015,17.55 200 | 12/01/2015,19.6 201 | 13/01/2015,20.56 202 | 14/01/2015,21.48 203 | 15/01/2015,22.39 204 | 16/01/2015,20.95 205 | 20/01/2015,19.89 206 | 21/01/2015,18.85 207 | 22/01/2015,16.4 208 | 23/01/2015,16.66 209 | 26/01/2015,15.52 210 | 27/01/2015,17.22 211 | 28/01/2015,20.44 212 | 29/01/2015,18.76 213 | 30/01/2015,20.97 214 | 02/02/2015,19.43 215 | 03/02/2015,17.33 216 | 04/02/2015,18.33 217 | 05/02/2015,16.85 218 | 06/02/2015,17.29 219 | 09/02/2015,18.55 220 | 10/02/2015,17.23 221 | 11/02/2015,16.96 222 | 12/02/2015,15.34 223 | 13/02/2015,14.69 224 | 17/02/2015,15.8 225 | 18/02/2015,15.45 226 | 19/02/2015,15.29 227 | 20/02/2015,14.3 228 | 23/02/2015,14.56 229 | 24/02/2015,13.69 230 | 25/02/2015,13.84 231 | 26/02/2015,13.91 232 | 27/02/2015,13.34 233 | 02/03/2015,13.04 234 | 03/03/2015,13.86 235 | 04/03/2015,14.23 236 | 05/03/2015,14.04 237 | 06/03/2015,15.2 238 | 09/03/2015,15.06 239 | 10/03/2015,16.69 240 | 11/03/2015,16.87 241 | 12/03/2015,15.42 242 | 13/03/2015,16 243 | 16/03/2015,15.61 244 | 17/03/2015,15.66 245 | 18/03/2015,13.97 246 | 19/03/2015,14.07 247 | 20/03/2015,13.02 248 | 23/03/2015,13.41 249 | 24/03/2015,13.62 250 | 25/03/2015,15.44 251 | 26/03/2015,15.8 252 | 27/03/2015,15.07 253 | 30/03/2015,14.51 254 | 31/03/2015,15.29 255 | 01/04/2015,15.11 256 | 02/04/2015,14.67 257 | 06/04/2015,14.74 258 | 07/04/2015,14.78 259 | 08/04/2015,13.98 260 | 09/04/2015,13.09 261 | 10/04/2015,12.58 262 | 13/04/2015,13.94 263 | 14/04/2015,13.67 264 | 15/04/2015,12.84 265 | 16/04/2015,12.6 266 | 17/04/2015,13.89 267 | 20/04/2015,13.3 268 | 21/04/2015,13.25 269 | 22/04/2015,12.71 270 | 23/04/2015,12.48 271 | 24/04/2015,12.29 272 | 27/04/2015,13.12 273 | 28/04/2015,12.41 274 | 29/04/2015,13.39 275 | 30/04/2015,14.55 276 | 01/05/2015,12.7 277 | 04/05/2015,12.85 278 | 05/05/2015,14.31 279 | 06/05/2015,15.15 280 | 07/05/2015,15.13 281 | 08/05/2015,12.86 282 | 11/05/2015,13.85 283 | 12/05/2015,13.86 284 | 13/05/2015,13.76 285 | 14/05/2015,12.74 286 | 15/05/2015,12.38 287 | 18/05/2015,12.73 288 | 19/05/2015,12.85 289 | 20/05/2015,12.88 290 | 21/05/2015,12.11 291 | 22/05/2015,12.13 292 | 26/05/2015,14.06 293 | 27/05/2015,13.27 294 | 28/05/2015,13.31 295 | 29/05/2015,13.84 296 | 01/06/2015,13.97 297 | 02/06/2015,14.24 298 | 03/06/2015,13.66 299 | 04/06/2015,14.71 300 | 05/06/2015,14.21 301 | 08/06/2015,15.29 302 | 09/06/2015,14.47 303 | 10/06/2015,13.22 304 | 11/06/2015,12.85 305 | 12/06/2015,13.78 306 | 15/06/2015,15.39 307 | 16/06/2015,14.81 308 | 17/06/2015,14.5 309 | 18/06/2015,13.19 310 | 19/06/2015,13.96 311 | 22/06/2015,12.74 312 | 23/06/2015,12.11 313 | 24/06/2015,13.26 314 | 25/06/2015,14.01 315 | 26/06/2015,14.02 316 | 29/06/2015,18.85 317 | 30/06/2015,18.23 318 | 01/07/2015,16.09 319 | 02/07/2015,16.79 320 | 06/07/2015,17.01 321 | 07/07/2015,16.09 322 | 08/07/2015,19.66 323 | 09/07/2015,19.97 324 | 10/07/2015,16.83 325 | 13/07/2015,13.9 326 | 14/07/2015,13.37 327 | 15/07/2015,13.23 328 | 16/07/2015,12.11 329 | 17/07/2015,11.95 330 | 20/07/2015,12.25 331 | 21/07/2015,12.22 332 | 22/07/2015,12.12 333 | 23/07/2015,12.64 334 | 24/07/2015,13.74 335 | 27/07/2015,15.6 336 | 28/07/2015,13.44 337 | 29/07/2015,12.5 338 | 30/07/2015,12.13 339 | 31/07/2015,12.12 340 | 03/08/2015,12.56 341 | 04/08/2015,13 342 | 05/08/2015,12.51 343 | 06/08/2015,13.77 344 | 07/08/2015,13.39 345 | 10/08/2015,12.23 346 | 11/08/2015,13.71 347 | 12/08/2015,13.61 348 | 13/08/2015,13.49 349 | 14/08/2015,12.83 350 | 17/08/2015,13.02 351 | 18/08/2015,13.79 352 | 19/08/2015,15.25 353 | 20/08/2015,19.14 354 | 21/08/2015,28.03 355 | 24/08/2015,40.74 356 | 25/08/2015,36.02 357 | 26/08/2015,30.32 358 | 27/08/2015,26.1 359 | 28/08/2015,26.05 360 | 31/08/2015,28.43 361 | 01/09/2015,31.4 362 | 02/09/2015,26.09 363 | 03/09/2015,25.61 364 | 04/09/2015,27.8 365 | 08/09/2015,24.9 366 | 09/09/2015,26.23 367 | 10/09/2015,24.37 368 | 11/09/2015,23.2 369 | 14/09/2015,24.25 370 | 15/09/2015,22.54 371 | 16/09/2015,21.35 372 | 17/09/2015,21.14 373 | 18/09/2015,22.28 374 | 21/09/2015,20.14 375 | 22/09/2015,22.44 376 | 23/09/2015,22.13 377 | 24/09/2015,23.47 378 | 25/09/2015,23.62 379 | 28/09/2015,27.63 380 | 29/09/2015,26.83 381 | 30/09/2015,24.5 382 | 01/10/2015,22.55 383 | 02/10/2015,20.94 384 | 05/10/2015,19.54 385 | 06/10/2015,19.4 386 | 07/10/2015,18.4 387 | 08/10/2015,17.42 388 | 09/10/2015,17.08 389 | 12/10/2015,16.17 390 | 13/10/2015,17.67 391 | 14/10/2015,18.03 392 | 15/10/2015,16.05 393 | 16/10/2015,15.05 394 | 19/10/2015,14.98 395 | 20/10/2015,15.75 396 | 21/10/2015,16.7 397 | 22/10/2015,14.45 398 | 23/10/2015,14.46 399 | 26/10/2015,15.29 400 | 27/10/2015,15.43 401 | 28/10/2015,14.33 402 | 29/10/2015,14.61 403 | 30/10/2015,15.07 404 | 02/11/2015,14.15 405 | 03/11/2015,14.54 406 | 04/11/2015,15.51 407 | 05/11/2015,15.05 408 | 06/11/2015,14.33 409 | 09/11/2015,16.52 410 | 10/11/2015,15.29 411 | 11/11/2015,16.06 412 | 12/11/2015,18.37 413 | 13/11/2015,20.08 414 | 16/11/2015,18.16 415 | 17/11/2015,18.84 416 | 18/11/2015,16.85 417 | 19/11/2015,16.99 418 | 20/11/2015,15.47 419 | 23/11/2015,15.62 420 | 24/11/2015,15.93 421 | 25/11/2015,15.19 422 | 27/11/2015,15.12 423 | 30/11/2015,16.13 424 | 01/12/2015,14.67 425 | 02/12/2015,15.91 426 | 03/12/2015,18.11 427 | 04/12/2015,14.81 428 | 07/12/2015,15.84 429 | 08/12/2015,17.6 430 | 09/12/2015,19.61 431 | 10/12/2015,19.34 432 | 11/12/2015,24.39 433 | 14/12/2015,22.73 434 | 15/12/2015,20.95 435 | 16/12/2015,17.86 436 | 17/12/2015,18.94 437 | 18/12/2015,20.7 438 | 21/12/2015,18.7 439 | 22/12/2015,16.6 440 | 23/12/2015,15.57 441 | 24/12/2015,15.74 442 | 28/12/2015,16.91 443 | 29/12/2015,16.08 444 | 30/12/2015,17.29 445 | 31/12/2015,18.21 446 | 04/01/2016,20.7 447 | 05/01/2016,19.34 448 | 06/01/2016,20.59 449 | 07/01/2016,24.99 450 | 08/01/2016,27.01 451 | 11/01/2016,24.3 452 | 12/01/2016,22.47 453 | 13/01/2016,25.22 454 | 14/01/2016,23.95 455 | 15/01/2016,27.02 456 | 19/01/2016,26.05 457 | 20/01/2016,27.59 458 | 21/01/2016,26.69 459 | 22/01/2016,22.34 460 | 25/01/2016,24.15 461 | 26/01/2016,22.5 462 | 27/01/2016,23.11 463 | 28/01/2016,22.42 464 | 29/01/2016,20.2 465 | 01/02/2016,19.98 466 | 02/02/2016,21.98 467 | 03/02/2016,21.65 468 | 04/02/2016,21.84 469 | 05/02/2016,23.38 470 | 08/02/2016,26 471 | 09/02/2016,26.54 472 | 10/02/2016,26.29 473 | 11/02/2016,28.14 474 | 12/02/2016,25.4 475 | 16/02/2016,24.11 476 | 17/02/2016,22.31 477 | 18/02/2016,21.64 478 | 19/02/2016,20.53 479 | 22/02/2016,19.38 480 | 23/02/2016,20.98 481 | 24/02/2016,20.72 482 | 25/02/2016,19.11 483 | 26/02/2016,19.81 484 | 29/02/2016,20.55 485 | 01/03/2016,17.7 486 | 02/03/2016,17.09 487 | 03/03/2016,16.7 488 | 04/03/2016,16.86 489 | 07/03/2016,17.35 490 | 08/03/2016,18.67 491 | 09/03/2016,18.34 492 | 10/03/2016,18.05 493 | 11/03/2016,16.5 494 | 14/03/2016,16.92 495 | 15/03/2016,16.84 496 | 16/03/2016,14.99 497 | 17/03/2016,14.44 498 | 18/03/2016,14.02 499 | 21/03/2016,13.79 500 | 22/03/2016,14.17 501 | 23/03/2016,14.94 502 | 24/03/2016,14.74 503 | 28/03/2016,15.24 504 | 29/03/2016,13.82 505 | 30/03/2016,13.56 506 | 31/03/2016,13.95 507 | 01/04/2016,13.1 508 | 04/04/2016,14.12 509 | 05/04/2016,15.42 510 | 06/04/2016,14.09 511 | 07/04/2016,16.16 512 | 08/04/2016,15.36 513 | 11/04/2016,16.26 514 | 12/04/2016,14.85 515 | 13/04/2016,13.84 516 | 14/04/2016,13.72 517 | 15/04/2016,13.62 518 | 18/04/2016,13.35 519 | 19/04/2016,13.24 520 | 20/04/2016,13.28 521 | 21/04/2016,13.95 522 | 22/04/2016,13.22 523 | 25/04/2016,14.08 524 | 26/04/2016,13.96 525 | 27/04/2016,13.77 526 | 28/04/2016,15.22 527 | 29/04/2016,15.7 528 | 02/05/2016,14.68 529 | 03/05/2016,15.6 530 | 04/05/2016,16.05 531 | 05/05/2016,15.91 532 | 06/05/2016,14.72 533 | 09/05/2016,14.57 534 | 10/05/2016,13.63 535 | 11/05/2016,14.69 536 | 12/05/2016,14.41 537 | 13/05/2016,15.04 538 | 16/05/2016,14.68 539 | 17/05/2016,15.57 540 | 18/05/2016,15.95 541 | 19/05/2016,16.33 542 | 20/05/2016,15.2 543 | 23/05/2016,15.82 544 | 24/05/2016,14.42 545 | 25/05/2016,13.9 546 | 26/05/2016,13.43 547 | 27/05/2016,13.12 548 | 31/05/2016,14.19 549 | 01/06/2016,14.2 550 | 02/06/2016,13.63 551 | 03/06/2016,13.47 552 | 06/06/2016,13.65 553 | 07/06/2016,14.05 554 | 08/06/2016,14.08 555 | 09/06/2016,14.64 556 | 10/06/2016,17.03 557 | 13/06/2016,20.97 558 | 14/06/2016,20.5 559 | 15/06/2016,20.14 560 | 16/06/2016,19.37 561 | 17/06/2016,19.41 562 | 20/06/2016,18.37 563 | 21/06/2016,18.48 564 | 22/06/2016,21.17 565 | 23/06/2016,17.25 566 | 24/06/2016,25.76 567 | 27/06/2016,23.85 568 | 28/06/2016,18.75 569 | 29/06/2016,16.64 570 | 30/06/2016,15.63 571 | 01/07/2016,14.77 572 | 05/07/2016,15.58 573 | 06/07/2016,14.96 574 | 07/07/2016,14.76 575 | 08/07/2016,13.2 576 | 11/07/2016,13.54 577 | 12/07/2016,13.55 578 | 13/07/2016,13.04 579 | 14/07/2016,12.82 580 | 15/07/2016,12.67 581 | 18/07/2016,12.44 582 | 19/07/2016,11.97 583 | 20/07/2016,11.77 584 | 21/07/2016,12.74 585 | 22/07/2016,12.02 586 | 25/07/2016,12.87 587 | 26/07/2016,13.05 588 | 27/07/2016,12.83 589 | 28/07/2016,12.72 590 | 29/07/2016,11.87 591 | 01/08/2016,12.44 592 | 02/08/2016,13.37 593 | 03/08/2016,12.86 594 | 04/08/2016,12.42 595 | 05/08/2016,11.39 596 | 08/08/2016,11.5 597 | 09/08/2016,11.66 598 | 10/08/2016,12.05 599 | 11/08/2016,11.68 600 | 12/08/2016,11.55 601 | 15/08/2016,11.81 602 | 16/08/2016,12.64 603 | 17/08/2016,12.19 604 | 18/08/2016,11.43 605 | 19/08/2016,11.34 606 | 22/08/2016,12.27 607 | 23/08/2016,12.38 608 | 24/08/2016,13.45 609 | 25/08/2016,13.63 610 | 26/08/2016,13.65 611 | 29/08/2016,12.94 612 | 30/08/2016,13.12 613 | 31/08/2016,13.42 614 | 01/09/2016,13.48 615 | 02/09/2016,11.98 616 | 06/09/2016,12.02 617 | 07/09/2016,11.94 618 | 08/09/2016,12.51 619 | 09/09/2016,17.5 620 | 12/09/2016,15.16 621 | 13/09/2016,17.85 622 | 14/09/2016,18.14 623 | 15/09/2016,16.3 624 | 16/09/2016,15.37 625 | 19/09/2016,15.53 626 | 20/09/2016,15.92 627 | 21/09/2016,13.3 628 | 22/09/2016,12.02 629 | 23/09/2016,12.29 630 | 26/09/2016,14.5 631 | 27/09/2016,13.1 632 | 28/09/2016,12.39 633 | 29/09/2016,14.02 634 | 30/09/2016,13.29 635 | 03/10/2016,13.57 636 | 04/10/2016,13.63 637 | 05/10/2016,12.99 638 | 06/10/2016,12.84 639 | 07/10/2016,13.48 640 | 10/10/2016,13.38 641 | 11/10/2016,15.36 642 | 12/10/2016,15.91 643 | 13/10/2016,16.69 644 | 14/10/2016,16.12 645 | 17/10/2016,16.21 646 | 18/10/2016,15.28 647 | 19/10/2016,14.41 648 | 20/10/2016,13.75 649 | 21/10/2016,13.34 650 | 24/10/2016,13.02 651 | 25/10/2016,13.46 652 | 26/10/2016,14.24 653 | 27/10/2016,15.36 654 | 28/10/2016,16.19 655 | 31/10/2016,17.06 656 | 01/11/2016,18.56 657 | 02/11/2016,19.32 658 | 03/11/2016,22.08 659 | 04/11/2016,22.51 660 | 07/11/2016,18.71 661 | 08/11/2016,18.74 662 | 09/11/2016,14.38 663 | 10/11/2016,14.74 664 | 11/11/2016,14.17 665 | 14/11/2016,14.48 666 | 15/11/2016,13.37 667 | 16/11/2016,13.72 668 | 17/11/2016,13.35 669 | 18/11/2016,12.85 670 | 21/11/2016,12.42 671 | 22/11/2016,12.41 672 | 23/11/2016,12.43 673 | 25/11/2016,12.34 674 | 28/11/2016,13.15 675 | 29/11/2016,12.9 676 | 30/11/2016,13.33 677 | 01/12/2016,14.07 678 | 02/12/2016,14.12 679 | 05/12/2016,12.14 680 | 06/12/2016,11.79 681 | 07/12/2016,12.22 682 | 08/12/2016,12.64 683 | 09/12/2016,11.75 684 | 12/12/2016,12.64 685 | 13/12/2016,12.72 686 | 14/12/2016,13.19 687 | 15/12/2016,12.79 688 | 16/12/2016,12.2 689 | 19/12/2016,11.71 690 | 20/12/2016,11.45 691 | 21/12/2016,11.27 692 | 22/12/2016,11.43 693 | 23/12/2016,11.44 694 | 27/12/2016,11.99 695 | 28/12/2016,12.95 696 | 29/12/2016,13.37 697 | 30/12/2016,14.04 698 | 03/01/2017,12.85 699 | 04/01/2017,11.85 700 | 05/01/2017,11.67 701 | 06/01/2017,11.32 702 | 09/01/2017,11.56 703 | 10/01/2017,11.49 704 | 11/01/2017,11.26 705 | 12/01/2017,11.54 706 | 13/01/2017,11.23 707 | 17/01/2017,11.87 708 | 18/01/2017,12.48 709 | 19/01/2017,12.78 710 | 20/01/2017,11.54 711 | 23/01/2017,11.77 712 | 24/01/2017,11.07 713 | 25/01/2017,10.81 714 | 26/01/2017,10.63 715 | 27/01/2017,10.58 716 | 30/01/2017,11.88 717 | 31/01/2017,11.99 718 | 01/02/2017,11.81 719 | 02/02/2017,11.93 720 | 03/02/2017,10.97 721 | 06/02/2017,11.37 722 | 07/02/2017,11.29 723 | 08/02/2017,11.45 724 | 09/02/2017,10.88 725 | 10/02/2017,10.85 726 | 13/02/2017,11.07 727 | 14/02/2017,10.74 728 | 15/02/2017,11.97 729 | 16/02/2017,11.76 730 | 17/02/2017,11.49 731 | 21/02/2017,11.57 732 | 22/02/2017,11.74 733 | 23/02/2017,11.71 734 | 24/02/2017,11.47 735 | 27/02/2017,12.09 736 | 28/02/2017,12.92 737 | 01/03/2017,12.54 738 | 02/03/2017,11.81 739 | 03/03/2017,10.96 740 | 06/03/2017,11.24 741 | 07/03/2017,11.45 742 | 08/03/2017,11.86 743 | 09/03/2017,12.3 744 | 10/03/2017,11.66 745 | 13/03/2017,11.35 746 | 14/03/2017,12.3 747 | 15/03/2017,11.63 748 | 16/03/2017,11.21 749 | 17/03/2017,11.28 750 | 20/03/2017,11.34 751 | 21/03/2017,12.47 752 | 22/03/2017,12.81 753 | 23/03/2017,13.12 754 | 24/03/2017,12.96 755 | 27/03/2017,12.5 756 | 28/03/2017,11.53 757 | 29/03/2017,11.42 758 | 30/03/2017,11.54 759 | 31/03/2017,12.37 760 | 03/04/2017,12.38 761 | 04/04/2017,11.79 762 | 05/04/2017,12.89 763 | 06/04/2017,12.39 764 | 07/04/2017,12.87 765 | 10/04/2017,14.05 766 | 11/04/2017,15.07 767 | 12/04/2017,15.77 768 | 13/04/2017,15.96 769 | 17/04/2017,14.66 770 | 18/04/2017,14.42 771 | 19/04/2017,14.93 772 | 20/04/2017,14.15 773 | 21/04/2017,14.63 774 | 24/04/2017,10.84 775 | 25/04/2017,10.76 776 | 26/04/2017,10.85 777 | 27/04/2017,10.36 778 | 28/04/2017,10.82 779 | 01/05/2017,10.11 780 | 02/05/2017,10.59 781 | 03/05/2017,10.68 782 | 04/05/2017,10.46 783 | 05/05/2017,10.57 784 | 08/05/2017,9.77 785 | 09/05/2017,9.96 786 | 10/05/2017,10.21 787 | 11/05/2017,10.6 788 | 12/05/2017,10.4 789 | 15/05/2017,10.42 790 | 16/05/2017,10.65 791 | 17/05/2017,15.59 792 | 18/05/2017,14.66 793 | 19/05/2017,12.04 794 | 22/05/2017,10.93 795 | 23/05/2017,10.72 796 | 24/05/2017,10.02 797 | 25/05/2017,9.99 798 | 26/05/2017,9.81 799 | 30/05/2017,10.38 800 | 31/05/2017,10.41 801 | 01/06/2017,9.89 802 | 02/06/2017,9.75 803 | 05/06/2017,10.07 804 | 06/06/2017,10.45 805 | 07/06/2017,10.39 806 | 08/06/2017,10.16 807 | 09/06/2017,10.7 808 | 12/06/2017,11.46 809 | 13/06/2017,10.42 810 | 14/06/2017,10.64 811 | 15/06/2017,10.9 812 | 16/06/2017,10.38 813 | 19/06/2017,10.37 814 | 20/06/2017,10.86 815 | 21/06/2017,10.75 816 | 22/06/2017,10.48 817 | 23/06/2017,10.02 818 | 26/06/2017,9.9 819 | 27/06/2017,11.06 820 | 28/06/2017,10.03 821 | 29/06/2017,11.44 822 | 30/06/2017,11.18 823 | 03/07/2017,11.22 824 | 05/07/2017,11.07 825 | 06/07/2017,12.54 826 | 07/07/2017,11.19 827 | 10/07/2017,11.11 828 | 11/07/2017,10.89 829 | 12/07/2017,10.3 830 | 13/07/2017,9.9 831 | 14/07/2017,9.51 832 | 17/07/2017,9.82 833 | 18/07/2017,9.89 834 | 19/07/2017,9.79 835 | 20/07/2017,9.58 836 | 21/07/2017,9.36 837 | 24/07/2017,9.43 838 | 25/07/2017,9.43 839 | 26/07/2017,9.6 840 | 27/07/2017,10.11 841 | 28/07/2017,10.29 842 | 31/07/2017,10.26 843 | 01/08/2017,10.09 844 | 02/08/2017,10.28 845 | 03/08/2017,10.44 846 | 04/08/2017,10.03 847 | 07/08/2017,9.93 848 | 08/08/2017,10.96 849 | 09/08/2017,11.11 850 | 10/08/2017,16.04 851 | 11/08/2017,15.51 852 | 14/08/2017,12.33 853 | 15/08/2017,12.04 854 | 16/08/2017,11.74 855 | 17/08/2017,15.55 856 | 18/08/2017,14.26 857 | 21/08/2017,13.19 858 | 22/08/2017,11.35 859 | 23/08/2017,12.25 860 | 24/08/2017,12.23 861 | 25/08/2017,11.28 862 | 28/08/2017,11.32 863 | 29/08/2017,11.7 864 | 30/08/2017,11.22 865 | 31/08/2017,10.59 866 | 01/09/2017,10.13 867 | 05/09/2017,12.23 868 | 06/09/2017,11.63 869 | 07/09/2017,11.55 870 | 08/09/2017,12.12 871 | 11/09/2017,10.73 872 | 12/09/2017,10.58 873 | 13/09/2017,10.5 874 | 14/09/2017,10.44 875 | 15/09/2017,10.17 876 | 18/09/2017,10.15 877 | 19/09/2017,10.18 878 | 20/09/2017,9.78 879 | 21/09/2017,9.67 880 | 22/09/2017,9.59 881 | 25/09/2017,10.21 882 | 26/09/2017,10.17 883 | 27/09/2017,9.87 884 | 28/09/2017,9.55 885 | 29/09/2017,9.51 886 | 02/10/2017,9.45 887 | 03/10/2017,9.51 888 | 04/10/2017,9.63 889 | 05/10/2017,9.19 890 | 06/10/2017,9.65 891 | 09/10/2017,10.33 892 | 10/10/2017,10.08 893 | 11/10/2017,9.85 894 | 12/10/2017,9.91 895 | 13/10/2017,9.61 896 | 16/10/2017,9.91 897 | 17/10/2017,10.31 898 | 18/10/2017,10.07 899 | 19/10/2017,10.05 900 | 20/10/2017,9.97 901 | 23/10/2017,11.07 902 | 24/10/2017,11.16 903 | 25/10/2017,11.23 904 | 26/10/2017,11.3 905 | 27/10/2017,9.8 906 | 30/10/2017,10.5 907 | 31/10/2017,10.18 908 | 01/11/2017,10.2 909 | 02/11/2017,9.93 910 | 03/11/2017,9.14 911 | 06/11/2017,9.4 912 | 07/11/2017,9.89 913 | 08/11/2017,9.78 914 | 09/11/2017,10.5 915 | 10/11/2017,11.29 916 | 13/11/2017,11.5 917 | 14/11/2017,11.59 918 | 15/11/2017,13.13 919 | 16/11/2017,11.76 920 | 17/11/2017,11.43 921 | 20/11/2017,10.65 922 | 21/11/2017,9.73 923 | 22/11/2017,9.88 924 | 24/11/2017,9.67 925 | 27/11/2017,9.87 926 | 28/11/2017,10.03 927 | 29/11/2017,10.7 928 | 30/11/2017,11.28 929 | 01/12/2017,11.43 930 | 04/12/2017,11.68 931 | 05/12/2017,11.33 932 | 06/12/2017,11.02 933 | 07/12/2017,10.16 934 | 08/12/2017,9.58 935 | 11/12/2017,9.34 936 | 12/12/2017,9.92 937 | 13/12/2017,10.18 938 | 14/12/2017,10.49 939 | 15/12/2017,9.42 940 | 18/12/2017,9.53 941 | 19/12/2017,10.03 942 | 20/12/2017,9.72 943 | 21/12/2017,9.62 944 | 22/12/2017,9.9 945 | 26/12/2017,10.25 946 | 27/12/2017,10.47 947 | 28/12/2017,10.18 948 | 29/12/2017,11.04 949 | 02/01/2018,9.77 950 | 03/01/2018,9.15 951 | 04/01/2018,9.22 952 | 05/01/2018,9.22 953 | 08/01/2018,9.52 954 | 09/01/2018,10.08 955 | 10/01/2018,9.82 956 | 11/01/2018,9.88 957 | 12/01/2018,10.16 958 | 16/01/2018,11.66 959 | 17/01/2018,11.91 960 | 18/01/2018,12.22 961 | 19/01/2018,11.27 962 | 22/01/2018,11.03 963 | 23/01/2018,11.1 964 | 24/01/2018,11.47 965 | 25/01/2018,11.58 966 | 26/01/2018,11.08 967 | 29/01/2018,13.84 968 | 30/01/2018,14.79 969 | 31/01/2018,13.54 970 | 01/02/2018,13.47 971 | 02/02/2018,17.31 972 | 05/02/2018,37.32 973 | 06/02/2018,29.98 974 | 07/02/2018,27.73 975 | 08/02/2018,33.46 976 | 09/02/2018,29.06 977 | 12/02/2018,25.61 978 | 13/02/2018,24.97 979 | 14/02/2018,19.26 980 | 15/02/2018,19.13 981 | 16/02/2018,19.46 982 | 20/02/2018,20.6 983 | 21/02/2018,20.02 984 | 22/02/2018,18.72 985 | 23/02/2018,16.49 986 | 26/02/2018,15.8 987 | 27/02/2018,18.59 988 | 28/02/2018,19.85 989 | 01/03/2018,22.47 990 | 02/03/2018,19.59 991 | 05/03/2018,18.73 992 | 06/03/2018,18.36 993 | 07/03/2018,17.76 994 | 08/03/2018,16.54 995 | 09/03/2018,14.64 996 | 12/03/2018,15.78 997 | 13/03/2018,16.35 998 | 14/03/2018,17.23 999 | 15/03/2018,16.59 1000 | 16/03/2018,15.8 1001 | 19/03/2018,19.02 1002 | 20/03/2018,18.2 1003 | 21/03/2018,17.86 1004 | 22/03/2018,23.34 1005 | 23/03/2018,24.87 1006 | 26/03/2018,21.03 1007 | 27/03/2018,22.5 1008 | 28/03/2018,22.87 1009 | 29/03/2018,19.97 1010 | 02/04/2018,23.62 1011 | 03/04/2018,21.1 1012 | 04/04/2018,20.06 1013 | 05/04/2018,18.94 1014 | 06/04/2018,21.49 1015 | 09/04/2018,21.77 1016 | 10/04/2018,20.47 1017 | 11/04/2018,20.24 1018 | 12/04/2018,18.49 1019 | 13/04/2018,17.41 1020 | 16/04/2018,16.56 1021 | 17/04/2018,15.25 1022 | 18/04/2018,15.6 1023 | 19/04/2018,15.96 1024 | 20/04/2018,16.88 1025 | 23/04/2018,16.34 1026 | 24/04/2018,18.02 1027 | 25/04/2018,17.84 1028 | 26/04/2018,16.24 1029 | 27/04/2018,15.41 1030 | 30/04/2018,15.93 1031 | 01/05/2018,15.49 1032 | 02/05/2018,15.97 1033 | 03/05/2018,15.9 1034 | 04/05/2018,14.77 1035 | 07/05/2018,14.75 1036 | 08/05/2018,14.71 1037 | 09/05/2018,13.42 1038 | 10/05/2018,13.23 1039 | 11/05/2018,12.65 1040 | 14/05/2018,12.93 1041 | 15/05/2018,14.63 1042 | 16/05/2018,13.42 1043 | 17/05/2018,13.43 1044 | 18/05/2018,13.42 1045 | 21/05/2018,13.08 1046 | 22/05/2018,13.22 1047 | 23/05/2018,12.58 1048 | 24/05/2018,12.53 1049 | 25/05/2018,13.22 1050 | 29/05/2018,17.02 1051 | 30/05/2018,14.94 1052 | 31/05/2018,15.43 1053 | 01/06/2018,13.46 1054 | 04/06/2018,12.74 1055 | 05/06/2018,12.4 1056 | 06/06/2018,11.64 1057 | 07/06/2018,12.13 1058 | 08/06/2018,12.18 1059 | 11/06/2018,12.35 1060 | 12/06/2018,12.34 1061 | 13/06/2018,12.94 1062 | 14/06/2018,12.12 1063 | 15/06/2018,11.98 1064 | 18/06/2018,12.31 1065 | 19/06/2018,13.35 1066 | 20/06/2018,12.79 1067 | 21/06/2018,14.64 1068 | 22/06/2018,13.77 1069 | 25/06/2018,17.33 1070 | 26/06/2018,15.92 1071 | 27/06/2018,17.91 1072 | 28/06/2018,16.85 1073 | 29/06/2018,16.09 1074 | 02/07/2018,15.6 1075 | 03/07/2018,16.14 1076 | 05/07/2018,14.97 1077 | 06/07/2018,13.37 1078 | 09/07/2018,12.69 1079 | 10/07/2018,12.64 1080 | 11/07/2018,13.63 1081 | 12/07/2018,12.58 1082 | 13/07/2018,12.18 1083 | 16/07/2018,12.83 1084 | 17/07/2018,12.06 1085 | 18/07/2018,12.1 1086 | 19/07/2018,12.87 1087 | 20/07/2018,12.86 1088 | 23/07/2018,12.62 1089 | 24/07/2018,12.41 1090 | 25/07/2018,12.29 1091 | 26/07/2018,12.14 1092 | 27/07/2018,13.03 1093 | 30/07/2018,14.26 1094 | 31/07/2018,12.83 1095 | 01/08/2018,13.15 1096 | 02/08/2018,12.19 1097 | 03/08/2018,11.64 1098 | 06/08/2018,11.27 1099 | 07/08/2018,10.93 1100 | 08/08/2018,10.85 1101 | 09/08/2018,11.27 1102 | 10/08/2018,13.16 1103 | 13/08/2018,14.78 1104 | 14/08/2018,13.31 1105 | 15/08/2018,14.64 1106 | 16/08/2018,13.45 1107 | 17/08/2018,12.64 1108 | 20/08/2018,12.49 1109 | 21/08/2018,12.86 1110 | 22/08/2018,12.25 1111 | 23/08/2018,12.41 1112 | 24/08/2018,11.99 1113 | 27/08/2018,12.16 1114 | 28/08/2018,12.5 1115 | 29/08/2018,12.25 1116 | 30/08/2018,13.53 1117 | 31/08/2018,12.86 1118 | 04/09/2018,13.16 1119 | 05/09/2018,13.91 1120 | 06/09/2018,14.65 1121 | 07/09/2018,14.88 1122 | 10/09/2018,14.16 1123 | 11/09/2018,13.22 1124 | 12/09/2018,13.14 1125 | 13/09/2018,12.37 1126 | 14/09/2018,12.07 1127 | 17/09/2018,13.68 1128 | 18/09/2018,12.79 1129 | 19/09/2018,11.75 1130 | 20/09/2018,11.8 1131 | 21/09/2018,11.68 1132 | 24/09/2018,12.2 1133 | 25/09/2018,12.42 1134 | 26/09/2018,12.89 1135 | 27/09/2018,12.41 1136 | 28/09/2018,12.12 1137 | 01/10/2018,12 1138 | 02/10/2018,12.05 1139 | 03/10/2018,11.61 1140 | 04/10/2018,14.22 1141 | 05/10/2018,14.82 1142 | 08/10/2018,15.69 1143 | 09/10/2018,15.95 1144 | 10/10/2018,22.96 1145 | 11/10/2018,24.98 1146 | 12/10/2018,21.31 1147 | 15/10/2018,21.3 1148 | 16/10/2018,17.62 1149 | 17/10/2018,17.4 1150 | 18/10/2018,20.06 1151 | 19/10/2018,19.89 1152 | 22/10/2018,19.64 1153 | 23/10/2018,20.71 1154 | 24/10/2018,25.23 1155 | 25/10/2018,24.22 1156 | 26/10/2018,24.16 1157 | 29/10/2018,24.7 1158 | 30/10/2018,23.35 1159 | 31/10/2018,21.23 1160 | 01/11/2018,19.34 1161 | 02/11/2018,19.51 1162 | 05/11/2018,19.96 1163 | 06/11/2018,19.91 1164 | 07/11/2018,16.36 1165 | 08/11/2018,16.72 1166 | 09/11/2018,17.36 1167 | 12/11/2018,20.45 1168 | 13/11/2018,20.02 1169 | 14/11/2018,21.25 1170 | 15/11/2018,19.98 1171 | 16/11/2018,18.14 1172 | 19/11/2018,20.1 1173 | 20/11/2018,22.48 1174 | 21/11/2018,20.8 1175 | 23/11/2018,21.52 1176 | 26/11/2018,18.9 1177 | 27/11/2018,19.02 1178 | 28/11/2018,18.49 1179 | 29/11/2018,18.79 1180 | 30/11/2018,18.07 1181 | 03/12/2018,16.44 1182 | 04/12/2018,20.74 1183 | 06/12/2018,21.19 1184 | 07/12/2018,23.23 1185 | 10/12/2018,22.64 1186 | 11/12/2018,21.76 1187 | 12/12/2018,21.46 1188 | 13/12/2018,20.65 1189 | 14/12/2018,21.63 1190 | 17/12/2018,24.52 1191 | 18/12/2018,25.58 1192 | 19/12/2018,25.58 1193 | 20/12/2018,28.38 1194 | 21/12/2018,30.11 1195 | 24/12/2018,36.07 1196 | 26/12/2018,30.41 1197 | 27/12/2018,29.96 1198 | 28/12/2018,28.34 1199 | 31/12/2018,25.42 1200 | 02/01/2019,23.22 1201 | 03/01/2019,25.45 1202 | 04/01/2019,21.38 1203 | 07/01/2019,21.4 1204 | 08/01/2019,20.47 1205 | 09/01/2019,19.98 1206 | 10/01/2019,19.5 1207 | 11/01/2019,18.19 1208 | 14/01/2019,19.07 1209 | 15/01/2019,18.6 1210 | 16/01/2019,19.04 1211 | 17/01/2019,18.06 1212 | 18/01/2019,17.8 1213 | 22/01/2019,20.8 1214 | 23/01/2019,19.52 1215 | 24/01/2019,18.89 1216 | 25/01/2019,17.42 1217 | 28/01/2019,18.87 1218 | 29/01/2019,19.13 1219 | 30/01/2019,17.66 1220 | 31/01/2019,16.57 1221 | 01/02/2019,16.14 1222 | 04/02/2019,15.73 1223 | 05/02/2019,15.57 1224 | 06/02/2019,15.38 1225 | 07/02/2019,16.37 1226 | 08/02/2019,15.72 1227 | 11/02/2019,15.97 1228 | 12/02/2019,15.43 1229 | 13/02/2019,15.65 1230 | 14/02/2019,16.22 1231 | 15/02/2019,14.91 1232 | 19/02/2019,14.88 1233 | 20/02/2019,14.02 1234 | 21/02/2019,14.46 1235 | 22/02/2019,13.51 1236 | 25/02/2019,14.85 1237 | 26/02/2019,15.17 1238 | 27/02/2019,14.7 1239 | 28/02/2019,14.78 1240 | 01/03/2019,13.57 1241 | 04/03/2019,14.63 1242 | 05/03/2019,14.74 1243 | 06/03/2019,15.74 1244 | 07/03/2019,16.59 1245 | 08/03/2019,16.05 1246 | 11/03/2019,14.33 1247 | 12/03/2019,13.77 1248 | 13/03/2019,13.41 1249 | 14/03/2019,13.5 1250 | 15/03/2019,12.88 1251 | 18/03/2019,13.1 1252 | 19/03/2019,13.56 1253 | 20/03/2019,13.91 1254 | 21/03/2019,13.63 1255 | 22/03/2019,16.48 1256 | 25/03/2019,16.33 1257 | 26/03/2019,14.68 1258 | 27/03/2019,15.15 1259 | 28/03/2019,14.43 1260 | 29/03/2019,13.71 1261 | -------------------------------------------------------------------------------- /Experiments/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DBaudry/Generating_Financial_Time_Series_with_GAN/d00a5199996d3fed88efdc19218209d04719140a/Experiments/__init__.py -------------------------------------------------------------------------------- /GAN.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | from Helpers import utils 4 | 5 | import matplotlib.pyplot as plt 6 | from Helpers.utils import generate_batch 7 | from copy import copy 8 | from time import time 9 | 10 | if torch.cuda.is_available(): 11 | loadmap = {'cuda:0': 'gpu'} 12 | else: 13 | loadmap = {'cuda:0': 'cpu'} 14 | 15 | 16 | def GAN(serie, window, Generator, Discriminator , generator_args, discriminator_args, 17 | TRAIN_RATIO=10, N_ITER=40001, BATCHLEN=128, 18 | frame=1000, frame_plot=1000, is_notebook=True, batchlen_plot=5, 19 | lr_G=1e-3, betas_G=(0.5, 0.9), lr_D=1e-3, betas_D=(0.5, 0.9), 20 | loss=utils.softplus_loss, argloss_real=-1, argloss_fake=1, argloss_gen=1, 21 | save_model=False, save_name='model', tol=1e-6, plot=True, time_max=3000): 22 | """ 23 | serie: Input Financial Time Serie 24 | TRAIN_RATIO : int, number of times to train the discriminator between two generator steps 25 | N_ITER : int, total number of training iterations for the generator 26 | BATCHLEN : int, Batch size to use 27 | WDTH_G : int, width of the generator (number of neurons in hidden layers) 28 | DPTH_G : int, number of hidden FC layers of the generator 29 | WDTH_D : int, width of the discriminator (number of neurons in hidden layers) 30 | DPTH_D : int, number of hidden FC layers of the discriminator 31 | PRIOR_N : int, dimension of input noise 32 | PRIOR_STD : float, standard deviation of p(z) 33 | frame : int, display data each 'frame' iteration 34 | """ 35 | t0 = time() 36 | diff_loss_mean = 1 37 | prev_disc_loss, prev_gen_loss = 1, 1 38 | 39 | # if is_notebook: 40 | # from tqdm import tqdm_notebook as tqdm 41 | # else: 42 | # from tqdm import tqdm 43 | 44 | G = Generator(window, **generator_args) 45 | solver_G = torch.optim.Adam(G.parameters(), lr=lr_G, betas=betas_G) 46 | D = Discriminator(window, **discriminator_args) 47 | solver_D = torch.optim.Adam(D.parameters(), lr=lr_D, betas=betas_D) 48 | 49 | m, sd = np.mean(serie), np.std(serie) 50 | 51 | for i in range(N_ITER): #tqdm(range(N_ITER)): 52 | 53 | # train the discriminator 54 | for _ in range(TRAIN_RATIO): 55 | D.zero_grad() 56 | real_batch = (generate_batch(serie, window, BATCHLEN)-m)/sd 57 | fake_batch = G.generate(BATCHLEN) 58 | h_real = D(real_batch) 59 | h_fake = D(fake_batch) 60 | loss_real = loss(h_real, argloss_real) 61 | loss_fake = loss(h_fake, argloss_fake) 62 | disc_loss = loss_real + loss_fake 63 | disc_loss.backward() 64 | solver_D.step() 65 | 66 | # train the generator 67 | G.zero_grad() 68 | fake_batch = G.generate(BATCHLEN) 69 | # Compute here the generator loss, using fake_batch 70 | h_fake = D(fake_batch) 71 | gen_loss = - loss(h_fake, argloss_gen) 72 | gen_loss.backward() 73 | solver_G.step() 74 | 75 | diff_loss_mean = 0.9 * diff_loss_mean +\ 76 | 0.1 * (torch.abs(disc_loss - prev_disc_loss) + torch.abs(gen_loss - prev_gen_loss)) 77 | prev_disc_loss, prev_gen_loss = copy(disc_loss), copy(gen_loss) 78 | 79 | if diff_loss_mean < tol or time()-t0 > time_max: 80 | if save_model: 81 | torch.save(G.state_dict(), 'Generator/'+save_name+'.pth') 82 | torch.save(D.state_dict(), 'Discriminator/'+save_name+'.pth') 83 | return None 84 | 85 | if i % frame == 0: 86 | print('step {}: discriminator: {:.3e}, generator: {:.3e}'.format(i, float(disc_loss), float(gen_loss))) 87 | if save_model: 88 | torch.save(G.state_dict(), 'Generator/'+save_name+'.pth') 89 | torch.save(D.state_dict(), 'Discriminator/'+save_name+'.pth') 90 | 91 | if plot and i % frame_plot == 0: 92 | # plot the result 93 | real_batch = (generate_batch(serie, window, batchlen_plot)-m)/sd 94 | fake_batch = G.generate(batchlen_plot).detach() 95 | fig, axs = plt.subplots(2) 96 | fig.suptitle('Real Batch vs Generated Batch') 97 | axs[0].plot(real_batch.numpy().T) 98 | axs[1].plot(fake_batch.numpy().T) 99 | plt.show() 100 | return G, D 101 | 102 | -------------------------------------------------------------------------------- /GAN_synthetic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | from Helpers import utils 4 | 5 | import matplotlib.pyplot as plt 6 | from copy import copy 7 | from time import time 8 | 9 | if torch.cuda.is_available(): 10 | loadmap = {'cuda:0': 'gpu'} 11 | else: 12 | loadmap = {'cuda:0': 'cpu'} 13 | 14 | 15 | def GAN(Generator, Discriminator , generator_args, discriminator_args, 16 | generate_batch, param_gen_batch, 17 | TRAIN_RATIO=10, N_ITER=40001, BATCHLEN=128, 18 | frame=1000, frame_plot=1000, is_notebook=True, batchlen_plot=5, 19 | lr_G=1e-3, betas_G=(0.5, 0.9), lr_D=1e-3, betas_D=(0.5, 0.9), 20 | loss=utils.softplus_loss, argloss_real=-1, argloss_fake=1, argloss_gen=1, 21 | save_model=False, save_name='model', tol=1e-6, plot=True, time_max=3000): 22 | """ 23 | serie: Input Financial Time Serie 24 | TRAIN_RATIO : int, number of times to train the discriminator between two generator steps 25 | N_ITER : int, total number of training iterations for the generator 26 | BATCHLEN : int, Batch size to use 27 | WDTH_G : int, width of the generator (number of neurons in hidden layers) 28 | DPTH_G : int, number of hidden FC layers of the generator 29 | WDTH_D : int, width of the discriminator (number of neurons in hidden layers) 30 | DPTH_D : int, number of hidden FC layers of the discriminator 31 | PRIOR_N : int, dimension of input noise 32 | PRIOR_STD : float, standard deviation of p(z) 33 | frame : int, display data each 'frame' iteration 34 | """ 35 | t0 = time() 36 | diff_loss_mean = 1 37 | prev_disc_loss, prev_gen_loss = 1, 1 38 | 39 | # if is_notebook: 40 | # from tqdm import tqdm_notebook as tqdm 41 | # else: 42 | # from tqdm import tqdm 43 | T = param_gen_batch['T'] 44 | G = Generator(T, **generator_args) 45 | solver_G = torch.optim.Adam(G.parameters(), lr=lr_G, betas=betas_G) 46 | D = Discriminator(T, **discriminator_args) 47 | solver_D = torch.optim.Adam(D.parameters(), lr=lr_D, betas=betas_D) 48 | for i in range(N_ITER): #tqdm(range(N_ITER)): 49 | # train the discriminator 50 | for _ in range(TRAIN_RATIO): 51 | D.zero_grad() 52 | real_batch = generate_batch(**param_gen_batch) 53 | fake_batch = G.generate(BATCHLEN) 54 | h_real = D(real_batch) 55 | h_fake = D(fake_batch) 56 | loss_real = loss(h_real, argloss_real) 57 | loss_fake = loss(h_fake, argloss_fake) 58 | disc_loss = loss_real + loss_fake 59 | disc_loss.backward() 60 | solver_D.step() 61 | 62 | # train the generator 63 | G.zero_grad() 64 | fake_batch = G.generate(BATCHLEN) 65 | # Compute here the generator loss, using fake_batch 66 | h_fake = D(fake_batch) 67 | gen_loss = - loss(h_fake, argloss_gen) 68 | gen_loss.backward() 69 | solver_G.step() 70 | 71 | diff_loss_mean = 0.9 * diff_loss_mean +\ 72 | 0.1 * (torch.abs(disc_loss - prev_disc_loss) + torch.abs(gen_loss - prev_gen_loss)) 73 | prev_disc_loss, prev_gen_loss = copy(disc_loss), copy(gen_loss) 74 | 75 | if diff_loss_mean < tol or time()-t0 > time_max: 76 | if save_model: 77 | torch.save(G.state_dict(), 'Generator/'+save_name+'.pth') 78 | torch.save(D.state_dict(), 'Discriminator/'+save_name+'.pth') 79 | return None 80 | 81 | if i % frame == 0: 82 | print('step {}: discriminator: {:.3e}, generator: {:.3e}'.format(i, float(disc_loss), float(gen_loss))) 83 | if save_model: 84 | torch.save(G.state_dict(), 'Generator/'+save_name+'.pth') 85 | torch.save(D.state_dict(), 'Discriminator/'+save_name+'.pth') 86 | 87 | if plot and i % frame_plot == 0: 88 | # plot the result 89 | real_batch = generate_batch(**param_gen_batch) 90 | fake_batch = G.generate(batchlen_plot).detach() 91 | fig, axs = plt.subplots(2) 92 | fig.suptitle('Real Batch vs Generated Batch') 93 | axs[0].plot(real_batch.numpy().T) 94 | axs[1].plot(fake_batch.numpy().T) 95 | plt.show() 96 | return G, D 97 | 98 | -------------------------------------------------------------------------------- /Helpers/Batch_Generator.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import torch 4 | 5 | 6 | def generate_BS(r, sigma, T, batchsize): 7 | """ 8 | :param S0: price in t0 9 | :param r: drift 10 | :param sigma: volatility 11 | :param T: Time horizon 12 | :param batchsize: batch size 13 | :return: Black Scholes sample, computed by annualizing the provided drift and vol 14 | """ 15 | mu = (r - sigma**2/2)/250 16 | dl = sigma/np.sqrt(250)*np.random.normal(size=(batchsize, T)) + mu 17 | # batch = S0 * np.cumprod(np.exp(dl), axis=1) 18 | return torch.tensor(dl).float() 19 | 20 | 21 | if __name__ == '__main__': 22 | s = generate_BS(100, 0.02, 0.15, 1000, 200) 23 | plt.plot(s.T) 24 | plt.show() 25 | -------------------------------------------------------------------------------- /Helpers/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Helpers/statistical_tests.py: -------------------------------------------------------------------------------- 1 | from sklearn import svm 2 | import numpy as np 3 | from .utils import generate_batch, get_data, load_models 4 | """ 5 | Implement some unit test to check if a time serie verifies some facts that are generally true 6 | for financial time series 7 | """ 8 | 9 | from statsmodels.tsa.stattools import adfuller, acf 10 | from scipy.stats import skew, kurtosis 11 | 12 | 13 | def is_stationnary(serie, threshold, display_stats=True): 14 | result = adfuller(serie) 15 | if display_stats: 16 | print('ADF Statistic: %f' % result[0]) 17 | print('p-value: %f' % result[1]) 18 | print('Critical Values:') 19 | for key, value in result[4].items(): 20 | print('\t%s: %.3f' % (key, value)) 21 | if result[0] < result[4][threshold]: 22 | return True 23 | return False 24 | 25 | 26 | def check_autocorel(serie, nlags, alpha, qstat=True, score_min=0.8, display_stats=True): 27 | result = acf(serie, nlags=nlags, alpha=alpha, qstat=qstat) 28 | if display_stats: 29 | print('Autocorrelations: {}'.format(result[0])) 30 | print('Confidence intervals: {}'.format(result[1])) 31 | print('Q stats of Ljung Box test: {}'.format(result[2])) 32 | print('p-values: {}'.format(result[3])) 33 | score = sum(result[3] < alpha)/(len(result[3])-1) 34 | if score > score_min: 35 | return True, score 36 | return False, score, np.where(result[3] > alpha) 37 | 38 | 39 | def get_moments(serie, display=True): 40 | mean = serie.mean() 41 | vol = serie.std() 42 | skw = skew(serie) 43 | kurt = kurtosis(serie) 44 | if display: 45 | print('Empirical Moments Statistics') 46 | print('Mean: %f' % mean) 47 | print('Volatility: %f' % vol) 48 | print('Skewness: %f' % skw) 49 | print('Excess kurtosis (compared with normal): %f' % kurt) 50 | return mean, vol, skw, kurt 51 | 52 | 53 | def compare_moments(base_stats, generated_serie, display=False): 54 | M1 = get_moments(generated_serie, display=display) 55 | var = [M1[i]-base_stats[i] for i in range(len(base_stats))] 56 | 57 | pos_skew = (M1[2] > 0) 58 | is_leptokurtic = (M1[3] > 0) 59 | 60 | if display: 61 | print('Variations between Moments Statistics of the two series: {}'.format(var)) 62 | print('Positiveness of the skewness: %i' % pos_skew) 63 | print('Leptokurticity: %i' % is_leptokurtic) 64 | 65 | return (pos_skew, is_leptokurtic), M1, var 66 | 67 | 68 | def compute_all_stats(base_stats, serie, nlags=10, alpha=0.10, score_min=0.8, display=True): 69 | """ 70 | :param base_stats: Statistics of the original serie (get_moments style) 71 | :param serie: New serie to analyze 72 | :param nlags: Number of lags to consider in autocorrelation 73 | :param alpha: Percentage for confidence intervals in all stats 74 | :param score_min: Minimum number of non significant autocorrelations (under the threshold alpha) to 75 | set the non autocorrelation property to True 76 | :param display: Display all statistics or not 77 | :return: Results for Stationnarity test, autocorrelation test (for the serie and its square), 78 | """ 79 | str_alpha = str(int(100 * alpha))+'%' 80 | statn = is_stationnary(serie, str_alpha, display_stats=display) 81 | autocor = check_autocorel(serie, nlags, alpha, 82 | qstat=True, score_min=score_min, display_stats=display) 83 | square_autocor = check_autocorel(serie**2, nlags, alpha, 84 | qstat=True, score_min=score_min, display_stats=display) 85 | res_moments = compare_moments(base_stats, serie, display=display) 86 | return statn, autocor, square_autocor, res_moments 87 | 88 | 89 | def check_SVM(serie, generator, batchlen_train, batchlen_test): 90 | real_batch = generate_batch(serie, generator.window, batchlen_train).detach().numpy() 91 | fake_batch = generator.generate(batchlen_train).detach().numpy() 92 | label_real = np.ones(batchlen_train) 93 | label_fake = np.zeros(batchlen_train) 94 | X = np.concatenate([real_batch, fake_batch], axis=0) 95 | y = np.concatenate([label_real, label_fake], axis=0).astype('int') 96 | clf = svm.SVC() 97 | clf.fit(X, y) 98 | test_real = generate_batch(serie, generator.window, batchlen_test).detach().numpy() 99 | test_fake = generator.generate(batchlen_test).detach().numpy() 100 | test_r_label = np.ones(batchlen_test) 101 | test_f_label = np.zeros(batchlen_test) 102 | test_X = np.concatenate([test_real, test_fake], axis=0).astype('int') 103 | test_y = np.concatenate([test_r_label, test_f_label], axis=0) 104 | return clf.score(test_X, test_y) 105 | 106 | 107 | if __name__ == '__main__': 108 | from ..Architectures.Lin_GAN import Generator as LG 109 | from ..Architectures.Lin_GAN import Discriminator as LD 110 | import matplotlib.pyplot as plt 111 | 112 | serie = get_data('Datasets/VIX.csv') 113 | # batch = generate_batch(serie, 250, 2).numpy() 114 | # M0 = get_moments(batch[0]) 115 | # compare_moments(M0, batch[1], display=True) 116 | # compute_all_stats(M0, serie, 117 | # nlags=10, alpha=0.10, score_min=0.8, display=True) 118 | 119 | name = 'Lin_G_268169440' 120 | G, D, param_name = load_models(name, LG, LD) 121 | print(check_SVM(serie, G, 500, 500)) 122 | plt.plot(G.generate(100).detach().numpy().T) 123 | plt.show() 124 | -------------------------------------------------------------------------------- /Helpers/utils.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import torch 4 | import torch.nn.functional as F 5 | import pickle 6 | from scipy.stats import gaussian_kde, entropy 7 | from tqdm import tqdm 8 | import matplotlib.pyplot as plt 9 | 10 | 11 | def get_data(name, array=True): 12 | """ 13 | Load a dataset and returns its log-returns time series. 14 | """ 15 | df = pd.read_csv(name, index_col=0, parse_dates=True, dayfirst=True) 16 | df.iloc[:, 0] = df.iloc[:, 0].astype('float') 17 | r = np.log(df).diff()[1:] 18 | dt = [(df.index[t+1]-df.index[t]).days for t in range(r.shape[0])] 19 | r = 1/np.array(dt) * r.iloc[:, 0] 20 | if array: 21 | return np.array(r) 22 | return r 23 | 24 | 25 | def generate_batch(serie, length, BATCHLEN, add_noise=True, noise_lvl=0.2, proba_noise=0.5): 26 | """ 27 | Returns random sequences of the given length from a numpy array. 28 | """ 29 | if add_noise: 30 | sd = np.std(serie) 31 | sigma = noise_lvl * sd 32 | results = np.zeros((BATCHLEN, length)) 33 | for i in range(BATCHLEN): 34 | random_start = np.random.choice(serie.shape[0]-length) 35 | results[i] = serie[random_start: random_start+length] 36 | if add_noise: 37 | u = np.random.binomial(n=1, p=proba_noise) 38 | results[i] += u*sigma*np.random.normal(size=length) 39 | return torch.tensor(results).float() 40 | 41 | 42 | def softplus_loss(h, sgn): 43 | return torch.mean(F.softplus(h*sgn)) 44 | 45 | 46 | def negative_cross_entropy(h, target): 47 | return torch.mean(F.cross_entropy(h, target)) 48 | 49 | 50 | def KL_div(Gen, serie, length, batchlen): 51 | real_batch = generate_batch(serie, length, batchlen).detach().numpy().flatten() 52 | real_batch = (real_batch-np.mean(serie))/np.std(serie) 53 | fake_batch = Gen.generate(batchlen).detach().numpy().flatten() 54 | r_kde = gaussian_kde(real_batch) 55 | f_kde = gaussian_kde(fake_batch) 56 | eval_points = np.linspace(-6, 6, 500) 57 | pdf_values_real = r_kde.pdf(eval_points) 58 | pdf_values_fake = f_kde.pdf(eval_points) 59 | kl_div = entropy(pdf_values_real, pdf_values_fake) 60 | return kl_div 61 | 62 | 63 | def get_KL_div_list(data, label, batchlen, name_wdw, gen, disc): 64 | corresp = {'3M': 60, '6M': 125, 'Y': 250} 65 | name_order = [] 66 | KL_div_list = [] 67 | for name in tqdm(name_wdw[label]): 68 | G, D, param_name = load_models(name, gen, disc) 69 | name_order.append(name) 70 | KL_div_list.append(KL_div(G, data, corresp[label], batchlen)) 71 | return name_order, KL_div_list 72 | 73 | 74 | def compare_plots(name, serie, batchlen, window, Gen, Disc): 75 | G, D, param_name = load_models(name, Gen, Disc) 76 | print(param_name) 77 | real_batch = (generate_batch(serie, window, batchlen)-np.mean(serie))/np.std(serie) 78 | fake_batch = G.generate(batchlen).detach() 79 | random_batch = np.random.normal(size=(window, batchlen)) 80 | 81 | fig, ax = plt.subplots(3, figsize=(20, 20), sharey=True) 82 | fig.suptitle('Real Batch vs Generated Batch and Random Batch') 83 | ax[0].plot(real_batch.detach().numpy().T) 84 | ax[0].set_title('Batch from Real Serie') 85 | ax[1].plot(fake_batch.detach().numpy().T) 86 | ax[1].set_title('Generated Batch') 87 | ax[2].plot(random_batch) 88 | ax[2].set_title('Random Batch with the Standard deviation of the original serie') 89 | plt.show() 90 | 91 | 92 | def load_models(name, Generator, Discriminator): 93 | print(name) 94 | param = pickle.load(open('Parameters/'+name+'.pk', 'rb')) 95 | if 'window' not in param.keys(): 96 | param['window'] = param['param_gen_batch']['T'] 97 | G = Generator(param['window'], **param['generator_args']) 98 | G.load_state_dict(torch.load('Generator/'+name+'.pth')) 99 | D = Discriminator(param['window'], **param['discriminator_args']) 100 | D.load_state_dict(torch.load('Discriminator/'+name+'.pth')) 101 | return G, D, param 102 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Generating Financial Time Series with Generative Adversarial Networks 2 | 3 | This project is part of the "Machine Learning for Finance" course conducted by Romuald Elie at ENSAE Paris. In this project we explored 4 | different Generative Adversarial Networks architectures in order to generate financial Time Series. For a complete description of the methodology please refer 5 | to our pdf report. 6 | 7 | The lack of available data makes the training of Machine Learning algorithms sometimes difficult in Finance. Hence, there is a great interest 8 | in the development of Time Series generators as a data augmentation method. However, Financial Time Series exhibits different properties that are often difficult to reproduce with simple models. A good generator needs to be able both to get the properties of the general distribution of the data (Moments for instance), 9 | but also to get its temporal properties (Autocorrelation structures). 10 | 11 | Generative Adversarial Networks have been introduced by Goodfellow et al. in 2014. His novative approach was to propose a game theory framework, by training simultaneously two neural networks in an adversarial setting. The GAN architecture has provided impressive results in Image related fiels (Face generation for instance), but to our knowledge it has not been used successfully in finance yet. Our aim was to exhibit some properties of this framework for this kind of problems. 12 | 13 | Along with the Neural Networks we implement several methods for the training process and the output evaluation. 14 | They can be divided into the following categories: 15 | * Generation of Training Samples (utils file) 16 | * Implementation of Generator and Discriminator Neural Networks, following a given architecture (Lin_GAN, DCGAN, RGAN, CvGan,...) 17 | * Implementation of a general training framework for all architectures (GAN file) 18 | * Statistical tests to verify the adequacy of the properties of the generated samples with the ones of samples from the true distribution 19 | 20 | We also provide some pre-trained models that can be loaded using functions from the utils module. An experimental report is also provided in a Notebook format. Our experiments where conducted in order to sample from the distribution of the CBOE VIX Index in a time period from 2014 to 2019, when stationarity holds. The interest of this distribution is to combine a Gaussian process with recurring random spikes, hence it is very easy to evaluate the generated samples visually. 21 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DBaudry/Generating_Financial_Time_Series_with_GAN/d00a5199996d3fed88efdc19218209d04719140a/__init__.py --------------------------------------------------------------------------------