├── tensornetworks ├── __init__.py ├── PositiveMPS.py ├── RealBorn.py ├── ComplexBorn.py ├── RealLPS.py ├── ComplexLPS.py └── MPSClass.py ├── poster.png ├── LICENSE ├── .gitignore ├── hmm └── runHMM.py ├── fittensor.ipynb ├── README.md ├── fitdataset.ipynb └── datasets └── tumor /tensornetworks/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | -------------------------------------------------------------------------------- /poster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/glivan/tensor_networks_for_probabilistic_modeling/HEAD/poster.png -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Ivan Glasser 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /hmm/runHMM.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import numpy as np 4 | import sys 5 | import pickle 6 | import time 7 | import pomegranate 8 | 9 | def init(datasetload_init='lymphography', 10 | bond_dimension_init='2',n_iter_init='100'): 11 | """Initialize parameters : 12 | ---------- 13 | datasetload : str, path of dataset 14 | bond_dimension : int, number of hidden states 15 | n_iter : int, Number of iterations over the training dataset to perform 16 | """ 17 | global datasetload 18 | global bond_dimension 19 | global n_iter 20 | 21 | datasetload=str(datasetload_init) 22 | bond_dimension=int(bond_dimension_init) 23 | n_iter=int(n_iter_init) 24 | 25 | def run(): 26 | # Load dataset 27 | path='datasets/' 28 | with open(path+datasetload, 'rb') as f: 29 | a=pickle.load(f) 30 | X=a[0] 31 | X=X.astype(int) 32 | 33 | # Create HMM 34 | D=bond_dimension 35 | N=X.shape[1] 36 | d=np.max(X+1) 37 | list_of_states=[] 38 | for i in xrange(N): 39 | list_of_states.append([]) 40 | for u in xrange(bond_dimension): 41 | dictionnary=dict() 42 | for l in xrange(d): 43 | dictionnary[str(l)] = np.random.rand() 44 | list_of_states[i].append(pomegranate.State(pomegranate.DiscreteDistribution(dictionnary))) 45 | model = pomegranate.HiddenMarkovModel() 46 | for i in xrange(N-1): 47 | for d in xrange(D): 48 | for d2 in xrange(D): 49 | model.add_transition(list_of_states[i][d],list_of_states[i+1][d2],np.random.rand()) 50 | for d in xrange(D): 51 | model.add_transition(model.start,list_of_states[0][d],np.random.rand()) 52 | for d in xrange(D): 53 | model.add_transition(list_of_states[N-1][d],model.end,np.random.rand()) 54 | model.bake() 55 | 56 | # Train HMM 57 | begin = time.time() 58 | sequencetrain=[[str(i) for i in v] for v in X] 59 | np.random.seed() 60 | model.fit(sequencetrain,algorithm='baum-welch',stop_threshold=1e-50,min_iterations=1000,\ 61 | max_iterations=n_iter) 62 | 63 | u=0 64 | for i in sequencetrain: 65 | u+=model.log_probability(i) 66 | accuracy=-u/len(sequencetrain) 67 | 68 | time_elapsed = time.time()-begin 69 | 70 | print("Negative log likelihood = %.3f" % (accuracy)) 71 | print("Time elapsed = %.2fs" %(time_elapsed)) 72 | 73 | if __name__ == '__main__': 74 | # Main program : initialize with options from command line and run 75 | init(*sys.argv[1::]) 76 | run() 77 | -------------------------------------------------------------------------------- /fittensor.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Approximating a non-negative tensor with tensor networks\n", 8 | "Training a tensor network to approximate a probability mass function given as a non-negative tensor." 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "First import the dependencies and a tensor network model." 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 1, 21 | "metadata": { 22 | "collapsed": false 23 | }, 24 | "outputs": [], 25 | "source": [ 26 | "import numpy as np\n", 27 | "from tensornetworks.PositiveMPS import PositiveMPS\n", 28 | "from tensornetworks.RealBorn import RealBorn\n", 29 | "from tensornetworks.ComplexBorn import ComplexBorn\n", 30 | "from tensornetworks.RealLPS import RealLPS\n", 31 | "from tensornetworks.ComplexLPS import ComplexLPS" 32 | ] 33 | }, 34 | { 35 | "cell_type": "markdown", 36 | "metadata": {}, 37 | "source": [ 38 | "Create a random non-negative tensor with entries summing up to one." 39 | ] 40 | }, 41 | { 42 | "cell_type": "code", 43 | "execution_count": 2, 44 | "metadata": { 45 | "collapsed": false 46 | }, 47 | "outputs": [], 48 | "source": [ 49 | "X=np.random.rand(10,10)\n", 50 | "X=X/float(np.sum(X))" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": {}, 56 | "source": [ 57 | "Create a tensor network model (here a complex Born machine of Born-rank 3) with a maximum number of iterations of the optimization of 1000." 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 3, 63 | "metadata": { 64 | "collapsed": false 65 | }, 66 | "outputs": [], 67 | "source": [ 68 | "mps = RealBorn(D=3, n_iter=1000)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "metadata": {}, 74 | "source": [ 75 | "Fit the model to the tensor" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": 4, 81 | "metadata": { 82 | "collapsed": false 83 | }, 84 | "outputs": [ 85 | { 86 | "name": "stdout", 87 | "output_type": "stream", 88 | "text": [ 89 | "KL divergence = 0.050906, time = 3.22s\n" 90 | ] 91 | }, 92 | { 93 | "data": { 94 | "text/plain": [ 95 | "" 96 | ] 97 | }, 98 | "execution_count": 4, 99 | "metadata": {}, 100 | "output_type": "execute_result" 101 | } 102 | ], 103 | "source": [ 104 | "mps.fit_tensor(X)" 105 | ] 106 | }, 107 | { 108 | "cell_type": "markdown", 109 | "metadata": {}, 110 | "source": [ 111 | "Evaluate the KL-divergence between the tensor X and the fitted model." 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": 5, 117 | "metadata": { 118 | "collapsed": false 119 | }, 120 | "outputs": [ 121 | { 122 | "data": { 123 | "text/plain": [ 124 | "0.050905767947340599" 125 | ] 126 | }, 127 | "execution_count": 5, 128 | "metadata": {}, 129 | "output_type": "execute_result" 130 | } 131 | ], 132 | "source": [ 133 | "mps.distance(X)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "metadata": {}, 139 | "source": [ 140 | "Now repeat the procedure, this time with a complex Born machine of Born-rank 3." 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 6, 146 | "metadata": { 147 | "collapsed": false 148 | }, 149 | "outputs": [ 150 | { 151 | "name": "stdout", 152 | "output_type": "stream", 153 | "text": [ 154 | "KL divergence = 0.006638, time = 10.68s\n" 155 | ] 156 | }, 157 | { 158 | "data": { 159 | "text/plain": [ 160 | "" 161 | ] 162 | }, 163 | "execution_count": 6, 164 | "metadata": {}, 165 | "output_type": "execute_result" 166 | } 167 | ], 168 | "source": [ 169 | "mps2 = ComplexBorn(D=3, n_iter=1000)\n", 170 | "mps2.fit_tensor(X)" 171 | ] 172 | } 173 | ], 174 | "metadata": { 175 | "anaconda-cloud": {}, 176 | "celltoolbar": "Raw Cell Format", 177 | "kernelspec": { 178 | "display_name": "Python [conda root]", 179 | "language": "python", 180 | "name": "conda-root-py" 181 | }, 182 | "language_info": { 183 | "codemirror_mode": { 184 | "name": "ipython", 185 | "version": 2 186 | }, 187 | "file_extension": ".py", 188 | "mimetype": "text/x-python", 189 | "name": "python", 190 | "nbconvert_exporter": "python", 191 | "pygments_lexer": "ipython2", 192 | "version": "2.7.13" 193 | } 194 | }, 195 | "nbformat": 4, 196 | "nbformat_minor": 2 197 | } 198 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Expressive power of tensor-network factorizations for probabilistic modeling 2 | This is the code accompanying the paper "Expressive power of tensor-network factorizations for probabilistic modeling" (Advances in Neural Information Processing Systems 32, proceedings of the NeurIPS 2019 Conference) which allows for reproduction of its numerical results. If you use this code or these results please cite [1]. 3 | 4 | ## Prerequisite 5 | A working python 2.7, 3.4 or more recent installation with the following python libraries (all included in Anaconda) : 6 | ``` 7 | numpy, scikit-learn, os, sys, pickle, time 8 | ``` 9 | For training a hidden Markov model the python library [pomegranate](https://github.com/jmschrei/pomegranate) [2] and its dependencies must also be installed. This is not necessary for running the tensor-network algorithms. 10 | 11 | ## Overview of the code 12 | ### Datasets 13 | The preprocesssed datasets are included in the `datasets` folder. 14 | Preprocessing transformed the categorical data into a numpy array of integers. Each row corresponds to a training example and each column is an integer feature between 0 and d-1, where d is the number of different categories. As this work is only concerned with the expressivity of different functions, only training sets are used. 15 | 16 | Included datasets : 17 | From the R package TraMineR : 18 | - Family life states from the Swiss Household Panel biographical survey : `biofam` [3] 19 | 20 | From the UCI Machine Learning Repository [4] 21 | - SPECT Heart Data Set : `spect` 22 | - Lymphography Data Set : `lymphography` [5] 23 | - Primary Tumor Data Set : `tumor` [6] 24 | - Congressional Voting Records Data Set : `votes` 25 | - Solar Flare Data Set : `flare` 26 | 27 | ### Tensor networks 28 | The `tensornetworks` folder includes a generic tensor network class `MPSClass.py` as well as classes for positive MPS, Born machine MPS and Locally Purified States (LPS) with real or complex tensor elements. These classes include simple methods for performing maximum likelihood estimation on a dataset using batch gradient descent. The training is done by computing the gradients of the log-likelihood over all tensors for each minibatch of training examples and then updating all tensors at once in a gradient descent optimization scheme. This is different from a DMRG-like algorithm where only one (or two) tensor is updated at a time. For this reason canonical forms (that would be different for each class of tensor network) are not used, but they might be required for the numerical stability over much larger datasets. The bond dimension/rank is fixed to the same value for all tensors. Code for approximating a given non-negative tensor representing a probability mass function is also available. Note that the code is not optimized for speed and performance, but is rather a tool demonstrating how the algorithms work. 29 | 30 | ### HMM 31 | We include a simple script to define a hidden Markov model corresponding to an MPS of a certain bond dimension in the `hmm` folder. This script requires the pomegranate library. 32 | 33 | ## Running the code 34 | We provide a jupyter notebook `fitdataset.ipynb` that explains how to create a model, load a dataset and train the model on the dataset. We also provide a jupyter notebook `fittensor.ipynb` that explains how to train the model to approximate a given non-negative tensor. 35 | 36 | Input parameters of a tensor network (all parameters are optional): 37 | - D : int [default: 2], bond dimension/rank of the tensor networks. 38 | - learning_rate : float [default: 1.0], learning rate for gradient descent. 39 | - batch_size : int [default: 20], number of training examples per minibatch. 40 | - n_iter : int [default: 10], number of epochs over the training dataset to perform, or number of iterations of the optimization for approximating a given tensor. 41 | - random_state : int or numpy.RandomState [default: None], a random number generator instance to define the state of the random permutations generator. If an integer is given, it fixes the seed. Defaults to the global numpy random number generator. 42 | - verbose : int [default: 0], the verbosity level. Zero means silent mode. 43 | - mu : int [default: 2], only for real and complex LPS : the dimension of the purification index. 44 | 45 | Experiments in the paper used the following parameters: 46 | - batch size was set to 20 47 | - learning_rate was chosen using a grid search on powers of 10 going from 10-5 to 105. 48 | - n_iter was set to a maximum of 20000 49 | Each data point indicated in the paper is the lowest negative log-likelihood obtained from 10 trials with different initial tensors. 50 | 51 | For approximating a given non-negative tensor, the optimization is performed by a limited-memory BFGS algorithm. Batch size and learning rate parameters are not used, and experiments in the paper used a maximum number of iterations n_iter of 10000. 52 | 53 | We also include code to train a hidden Markov model corresponding to an MPS with positive tensors. The training is performed using the Baum-Welch algorithm by running 54 | ``` 55 | python runHMM.py lymphography 2 100 56 | ``` 57 | Input parameters (all parameters are optional): 58 | - datasetload : str [default: lymphography], Name of the dataset file which should be located in the datasets/ folder 59 | - bond_dimension : int [default: 2], bond dimension/rank (here number of hidden states per variable) 60 | - n_iter : int [default: 100], number of epochs over the training dataset to perform 61 | 62 | ## References 63 | [1] Glasser, I., Sweke, R., Pancotti, N., Eisert, J., Cirac, J. I. (2019) Expressive power of tensor-network factorizations for probabilistic modeling. Advances in Neural Information Processing Systems 32 (Proceedings of the NeurIPS 2019 Conference). [https://papers.nips.cc/paper/8429-expressive-power-of-tensor-network-factorizations-for-probabilistic-modeling]. See also extended version at [arxiv:1907.03741](https://arxiv.org/abs/1907.03741). 64 | [2] Schreiber, J. (2018). Pomegranate: fast and flexible probabilistic modeling in python. Journal of Machine Learning Research, 18(164), 1-6. 65 | [3] Müller, N. S., M. Studer, G. Ritschard (2007). Classification de parcours de vie à l'aide de l'optimal matching. In XIVe Rencontre de la Société francophone de classification (SFC 2007), Paris, 5 - 7 septembre 2007, pp. 157–160. 66 | [4] Dua, D. and Graff, C. (2019). UCI Machine Learning Repository [http://archive.ics.uci.edu/ml]. Irvine, CA: University of California, School of Information and Computer Science. 67 | [5] This lymphography domain was obtained from the University Medical Centre, Institute of Oncology, Ljubljana, Slovenia. Thanks go to M. Zwitter and M. Soklic for providing the data. 68 | [6] This primary tumor domain was obtained from the University Medical Centre, Institute of Oncology, Ljubljana, Slovenia. Thanks go to M. Zwitter and M. Soklic for providing the data. 69 | -------------------------------------------------------------------------------- /tensornetworks/PositiveMPS.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from .MPSClass import TN 4 | import numpy as np 5 | from sklearn.externals.six.moves import xrange 6 | 7 | class PositiveMPS(TN): 8 | """Matrix Product States with non-negative parameters 9 | Parametrization using the square of real parameters. 10 | Parameters 11 | ---------- 12 | D : int, optional 13 | Rank/Bond dimension of the MPS 14 | learning_rate : float, optional 15 | Learning rate of the gradient descent algorithm 16 | batch_size : int, optional 17 | Number of examples per minibatch. 18 | n_iter : int, optional 19 | Number of iterations (epochs) over the training dataset to perform 20 | during training. 21 | random_state : integer or numpy.RandomState, optional 22 | A random number generator instance to define the state of the 23 | random permutations generator. If an integer is given, it fixes the 24 | seed. Defaults to the global numpy random number generator. 25 | verbose : int, optional 26 | The verbosity level. The default, zero, means silent mode. 27 | ---------- 28 | Attributes 29 | ---------- 30 | w : numpy array, shape (m_parameters) 31 | Parameters of the tensor network 32 | norm : float 33 | normalization constant for the probability distribution 34 | n_samples : int 35 | number of training samples 36 | n_features : int 37 | number of features in the dataset 38 | d : int 39 | physical dimension (dimension of the features) 40 | m_parameters : int 41 | number of parameters in the network 42 | history : list 43 | saves the training accuracies during training 44 | """ 45 | def __init__(self, D=4, learning_rate=0.1, batch_size=10, 46 | n_iter=100, random_state=None, verbose=False): 47 | self.D = D 48 | self.learning_rate = float(learning_rate) 49 | self.batch_size = batch_size 50 | self.n_iter = n_iter 51 | self.random_state = random_state 52 | self.verbose = verbose 53 | 54 | def _probability(self, x): 55 | """Unnormalized probability of one configuration P(x) 56 | Parameters 57 | ---------- 58 | x : numpy array, shape (n_features,) 59 | One configuration 60 | Returns 61 | ------- 62 | probability : float 63 | """ 64 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 65 | tmp = np.square(w2[0,x[0],0,:]) #First tensor 66 | for i in xrange(1,self.n_features-1): 67 | tmp = np.dot(tmp,np.square(w2[i,x[i],:,:])) #MPS contraction 68 | probability = np.inner(tmp,np.square(w2[self.n_features-1, 69 | x[self.n_features-1],:,0])) 70 | return probability 71 | 72 | def _computenorm(self): 73 | """Compute norm of probability distribution 74 | Returns 75 | ------- 76 | norm : float 77 | """ 78 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 79 | tmp = np.sum(np.square(w2[0,:,0,:]),0) #First tensor 80 | for i in xrange(1,self.n_features-1): 81 | tmp = np.dot(tmp,np.sum(np.square(w2[i,:,:,:]),0)) #MPS contraction 82 | norm = np.inner(tmp,np.sum(np.square(w2[self.n_features-1,:,:,0]),0)) 83 | return norm 84 | 85 | def _derivative(self, x): 86 | """Compute the derivative of P(x) 87 | Parameters 88 | ---------- 89 | x : numpy array, shape (n_features,) 90 | One configuration 91 | Returns 92 | ------- 93 | derivative : numpy array, shape (m_parameters,) 94 | """ 95 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 96 | derivative = np.zeros((self.n_features,self.d,self.D,self.D)) 97 | 98 | #Store intermediate tensor contractions for the derivatives: 99 | #left to right and right to left 100 | #tmp stores the contraction of the first i+1 tensors from the left 101 | #in tmp[i,:,:], tmp2 the remaining tensors on the right 102 | #the mps contracted is the remaining contraction tmp[i-1]w[i]tmp2[i+1] 103 | tmp = np.zeros((self.n_features,self.D)) 104 | tmp2 = np.zeros((self.n_features,self.D)) 105 | tmp[0,:] = np.square(w2[0,x[0],0,:]) 106 | for i in xrange(1,self.n_features-1): 107 | tmp[i,:] = np.dot(tmp[i-1,:],np.square(w2[i,x[i],:,:])) 108 | tmp[self.n_features-1,:] = np.inner(tmp[self.n_features-2,:], 109 | np.square(w2[self.n_features-1,x[self.n_features-1],:,0])) 110 | tmp2[self.n_features-1,:] = np.square(w2[self.n_features-1, 111 | x[self.n_features-1],:,0]) 112 | for i in xrange(self.n_features-2,-1,-1): 113 | tmp2[i,:] = np.dot(np.square(w2[i,x[i],:]),tmp2[i+1,:]) 114 | tmp2[0,:] = np.inner(np.square(w2[0,x[0],0,:]),tmp2[1,:]) 115 | 116 | #The derivative of each tensor is the contraction of the other tensors 117 | derivative[0,x[0],0,:] = np.multiply(tmp2[1,:],2*(w2[0,x[0],0,:])) 118 | derivative[self.n_features-1,x[self.n_features-1],:,0] = \ 119 | np.multiply(tmp[self.n_features-2,:], 120 | 2*(w2[self.n_features-1,x[self.n_features-1],:,0])) 121 | for i in xrange(1,self.n_features-1): 122 | derivative[i,x[i],:,:]=np.multiply(np.outer(tmp[i-1,:], 123 | tmp2[i+1,:]),2*(w2[i,x[i],:])) 124 | 125 | return derivative.reshape(self.m_parameters) 126 | 127 | def _derivativenorm(self): 128 | """Compute the derivative of the norm 129 | Returns 130 | ------- 131 | derivative : numpy array, shape (m_parameters,) 132 | """ 133 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 134 | derivative = np.zeros((self.n_features,self.d,self.D,self.D)) 135 | 136 | tmp=np.zeros((self.n_features,self.D)) 137 | tmp2=np.zeros((self.n_features,self.D)) 138 | tmp[0,:]=np.sum(np.square(w2[0,:,0,:]),0) 139 | for i in xrange(1,self.n_features-1): 140 | tmp[i,:]=np.dot(tmp[i-1,:],np.sum(np.square(w2[i,:,:,:]),0)) 141 | tmp[self.n_features-1,:]=np.inner(tmp[self.n_features-2,:], 142 | np.sum(np.square(w2[self.n_features-1,:,:,0]),0)) 143 | tmp2[self.n_features-1,:]=np.sum(np.square(w2[self.n_features-1,:,:,0]),0) 144 | for i in xrange(self.n_features-2,-1,-1): 145 | tmp2[i,:]=np.dot(np.sum(np.square(w2[i,:,:,:]),0),tmp2[i+1,:]) 146 | tmp2[0,:]=np.inner(np.sum(np.square(w2[0,:,0,:]),0),tmp2[1,:]) 147 | 148 | for j in xrange(self.d): 149 | derivative[0,j,0,:]=np.multiply(tmp2[1,:],2*(w2[0,j,0,:])) 150 | derivative[self.n_features-1,j,:,0]=\ 151 | np.multiply(tmp[self.n_features-2,:],2*(w2[self.n_features-1,j,:,0])) 152 | for i in xrange(1,self.n_features-1): 153 | temp3=np.outer(tmp[i-1,:],tmp2[i+1,:]) 154 | for j in xrange(self.d): 155 | derivative[i,j,:,:]=np.multiply(temp3,2*(w2[i,j,:,:])) 156 | return derivative.reshape(self.m_parameters) 157 | 158 | -------------------------------------------------------------------------------- /tensornetworks/RealBorn.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from .MPSClass import TN 4 | import numpy as np 5 | from sklearn.externals.six.moves import xrange 6 | 7 | class RealBorn(TN): 8 | """Born machine with real parameters 9 | Probability is the square of the MPS 10 | Parameters 11 | ---------- 12 | D : int, optional 13 | Rank/Bond dimension of the MPS 14 | learning_rate : float, optional 15 | Learning rate of the gradient descent algorithm 16 | batch_size : int, optional 17 | Number of examples per minibatch. 18 | n_iter : int, optional 19 | Number of iterations (epochs) over the training dataset to perform 20 | during training. 21 | random_state : integer or numpy.RandomState, optional 22 | A random number generator instance to define the state of the 23 | random permutations generator. If an integer is given, it fixes the 24 | seed. Defaults to the global numpy random number generator. 25 | verbose : int, optional 26 | The verbosity level. The default, zero, means silent mode. 27 | ---------- 28 | Attributes 29 | ---------- 30 | w : numpy array, shape (m_parameters) 31 | Parameters of the tensor network 32 | norm : float 33 | normalization constant for the probability distribution 34 | n_samples : int 35 | number of training samples 36 | n_features : int 37 | number of features in the dataset 38 | d : int 39 | physical dimension (dimension of the features) 40 | m_parameters : int 41 | number of parameters in the network 42 | history : list 43 | saves the training accuracies during training 44 | """ 45 | def __init__(self, D=4, learning_rate=0.1, batch_size=10, 46 | n_iter=100, random_state=None, verbose=False): 47 | self.D = D 48 | self.learning_rate = float(learning_rate) 49 | self.batch_size = batch_size 50 | self.n_iter = n_iter 51 | self.random_state = random_state 52 | self.verbose = verbose 53 | 54 | def _probability(self, x): 55 | """Unnormalized probability of one configuration P(x) 56 | Parameters 57 | ---------- 58 | x : numpy array, shape (n_features,) 59 | One configuration 60 | Returns 61 | ------- 62 | probability : float 63 | """ 64 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 65 | 66 | tmp = w2[0,x[0],0,:] #First tensor 67 | for i in xrange(1,self.n_features-1): 68 | tmp = np.dot(tmp,w2[i,x[i],:,:]) #MPS contraction 69 | probability = np.inner(tmp, 70 | w2[self.n_features-1,x[self.n_features-1],:,0])**2 71 | return probability 72 | 73 | def _computenorm(self): 74 | """Compute norm of probability distribution 75 | Returns 76 | ------- 77 | norm : float 78 | """ 79 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 80 | tmp = np.tensordot(w2[0,:,0,:],w2[0,:,0,:],axes=([0],[0])).reshape(self.D*self.D) #First tensor 81 | for i in xrange(1,self.n_features-1): 82 | tmp = np.dot(tmp,np.tensordot(w2[i,:,:,:],w2[i,:,:,:], 83 | axes=([0],[0])).transpose((0,2,1,3)).reshape(self.D*self.D,self.D*self.D)) #MPS contraction 84 | norm = np.inner(tmp,np.tensordot(w2[self.n_features-1,:,:,0], 85 | w2[self.n_features-1,:,:,0],axes=([0],[0])).reshape(self.D*self.D)) 86 | return norm 87 | 88 | def _derivative(self, x): 89 | """Compute the derivative of P(x) 90 | Parameters 91 | ---------- 92 | x : numpy array, shape (n_features,) 93 | One configuration 94 | Returns 95 | ------- 96 | derivative : numpy array, shape (m_parameters,) 97 | """ 98 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 99 | derivative = np.zeros((self.n_features,self.d,self.D,self.D)) 100 | 101 | #Store intermediate tensor contractions for the derivatives: 102 | #left to right and right to left 103 | #tmp stores the contraction of the first i+1 tensors from the left 104 | #in tmp[i,:,:], tmp2 the remaining tensors on the right 105 | #the mps contracted is the remaining contraction tmp[i-1]w[i]tmp2[i+1] 106 | tmp=np.zeros((self.n_features,self.D)) 107 | tmp2=np.zeros((self.n_features,self.D)) 108 | tmp[0,:]=w2[0,x[0],0,:] 109 | for i in xrange(1,self.n_features-1): 110 | tmp[i,:]=np.dot(tmp[i-1,:],w2[i,x[i],:,:]) 111 | mpscontracted=np.inner(tmp[self.n_features-2,:], 112 | w2[self.n_features-1,x[self.n_features-1],:,0]) 113 | 114 | tmp[self.n_features-1,:]=np.inner(tmp[self.n_features-2,:], 115 | w2[self.n_features-1,x[self.n_features-1],:,0]) 116 | tmp2[self.n_features-1,:]=w2[self.n_features-1,x[self.n_features-1],:,0] 117 | for i in xrange(self.n_features-2,-1,-1): 118 | tmp2[i,:]=np.dot(w2[i,x[i],:,:],tmp2[i+1,:]) 119 | tmp2[0,:]=np.inner(w2[0,x[0],0,:],tmp2[1,:]) 120 | 121 | #The derivative of each tensor is the contraction of the other tensors 122 | derivative[0,x[0],0,:]=2*tmp2[1,:]*mpscontracted 123 | derivative[self.n_features-1,x[self.n_features-1],:,0]=\ 124 | 2*tmp[self.n_features-2,:]*mpscontracted 125 | for i in xrange(1,self.n_features-1): 126 | derivative[i,x[i],:,:]=2*np.outer(tmp[i-1,:],tmp2[i+1,:])*mpscontracted 127 | 128 | return derivative.reshape(self.m_parameters) 129 | 130 | def _derivativenorm(self): 131 | """Compute the derivative of the norm 132 | Returns 133 | ------- 134 | derivative : numpy array, shape (m_parameters,) 135 | """ 136 | w2=np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 137 | derivative=np.zeros((self.n_features,self.d,self.D,self.D)) 138 | 139 | tmp=np.zeros((self.n_features,self.D*self.D)) 140 | tmp2=np.zeros((self.n_features,self.D*self.D)) 141 | tmp[0,:]=np.tensordot(w2[0,:,0,:],w2[0,:,0,:],axes=([0],[0])).reshape(self.D*self.D) 142 | for i in xrange(1,self.n_features-1): 143 | tmp[i,:]=np.dot(tmp[i-1,:],np.tensordot(w2[i,:,:,:],w2[i,:,:,:], 144 | axes=([0],[0])).transpose((0,2,1,3)).reshape(self.D*self.D,self.D*self.D)) 145 | tmp[self.n_features-1,:]=np.inner(tmp[self.n_features-2,:], 146 | np.tensordot(w2[self.n_features-1,:,:,0],w2[self.n_features-1,:,:,0],axes=([0],[0])).reshape(self.D*self.D)) 147 | 148 | tmp2[self.n_features-1,:]=np.tensordot(w2[self.n_features-1,:,:,0], 149 | w2[self.n_features-1,:,:,0],axes=([0],[0])).reshape(self.D*self.D) 150 | for i in xrange(self.n_features-2,-1,-1): 151 | tmp2[i,:]=np.dot(np.tensordot(w2[i,:,:,:], 152 | w2[i,:,:,:],axes=([0],[0])).transpose((0,2,1,3)).reshape(self.D*self.D,self.D*self.D),tmp2[i+1,:]) 153 | tmp2[0,:]=np.inner(np.tensordot(w2[0,:,0,:],w2[0,:,0,:], 154 | axes=([0],[0])).reshape(self.D*self.D),tmp2[1,:]) 155 | 156 | 157 | for j in xrange(self.d): 158 | derivative[0,j,0,:]=2*np.dot(tmp2[1,:].reshape(self.D,self.D), 159 | w2[0,j,0,:]) 160 | derivative[self.n_features-1,j,:,0]=2*np.dot(tmp[self.n_features-2,:].reshape(self.D,self.D), 161 | w2[self.n_features-1,j,:,0]) 162 | for i in xrange(1,self.n_features-1): 163 | temp1=tmp[i-1,:].reshape(self.D,self.D) 164 | temp2=tmp2[i+1,:].reshape(self.D,self.D) 165 | 166 | for j in xrange(self.d): 167 | temp3=np.dot(np.dot(temp1,w2[i,j,:,:]),temp2.transpose()) 168 | derivative[i,j,:,:]=2*np.copy(temp3) 169 | 170 | return derivative.reshape(self.m_parameters) -------------------------------------------------------------------------------- /fitdataset.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Training tensor networks to approximate the distribution given by a dataset\n", 8 | "Simple notebook demonstrating how to train a tensor network to learn the distribution of a dataset." 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "First import the dependencies and a tensor network model." 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 1, 21 | "metadata": { 22 | "collapsed": false 23 | }, 24 | "outputs": [], 25 | "source": [ 26 | "import numpy as np\n", 27 | "import pickle\n", 28 | "from tensornetworks.PositiveMPS import PositiveMPS\n", 29 | "from tensornetworks.RealBorn import RealBorn\n", 30 | "from tensornetworks.ComplexBorn import ComplexBorn\n", 31 | "from tensornetworks.RealLPS import RealLPS\n", 32 | "from tensornetworks.ComplexLPS import ComplexLPS" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "Now load a dataset. A dataset needs to be an integer numpy array, with each row corresponding to a training example and each column to a categorical variable taking values in 0 to d-1." 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 2, 45 | "metadata": { 46 | "collapsed": false 47 | }, 48 | "outputs": [], 49 | "source": [ 50 | "with open('datasets/lymphography', 'rb') as f:\n", 51 | " a=pickle.load(f)\n", 52 | "X=a[0]\n", 53 | "X=X.astype(int)" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "Create a tensor network model (here we use a real Born machine) with bond dimension 2. Specify a learning rate, size of batch and number of epochs of training. Use 'verbose=True' to print the details during training." 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 3, 66 | "metadata": { 67 | "collapsed": false 68 | }, 69 | "outputs": [], 70 | "source": [ 71 | "mps = RealBorn(D=2, learning_rate=0.2, batch_size=20, n_iter=30, verbose=True) " 72 | ] 73 | }, 74 | { 75 | "cell_type": "markdown", 76 | "metadata": {}, 77 | "source": [ 78 | "Fit the model to the data" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": 4, 84 | "metadata": { 85 | "collapsed": false 86 | }, 87 | "outputs": [ 88 | { 89 | "name": "stdout", 90 | "output_type": "stream", 91 | "text": [ 92 | "Iteration 1, likelihood = 26.820, time = 0.24s\n", 93 | "Iteration 2, likelihood = 21.811, time = 0.23s\n", 94 | "Iteration 3, likelihood = 19.467, time = 0.27s\n", 95 | "Iteration 4, likelihood = 18.320, time = 0.25s\n", 96 | "Iteration 5, likelihood = 17.542, time = 0.23s\n", 97 | "Iteration 6, likelihood = 17.621, time = 0.20s\n", 98 | "Iteration 7, likelihood = 17.561, time = 0.21s\n", 99 | "Iteration 8, likelihood = 17.001, time = 0.21s\n", 100 | "Iteration 9, likelihood = 16.481, time = 0.21s\n", 101 | "Iteration 10, likelihood = 16.209, time = 0.23s\n", 102 | "Iteration 11, likelihood = 15.964, time = 0.21s\n", 103 | "Iteration 12, likelihood = 15.826, time = 0.22s\n", 104 | "Iteration 13, likelihood = 15.704, time = 0.21s\n", 105 | "Iteration 14, likelihood = 15.628, time = 0.19s\n", 106 | "Iteration 15, likelihood = 15.411, time = 0.19s\n", 107 | "Iteration 16, likelihood = 15.338, time = 0.18s\n", 108 | "Iteration 17, likelihood = 15.296, time = 0.23s\n", 109 | "Iteration 18, likelihood = 15.252, time = 0.18s\n", 110 | "Iteration 19, likelihood = 15.222, time = 0.21s\n", 111 | "Iteration 20, likelihood = 15.209, time = 0.24s\n", 112 | "Iteration 21, likelihood = 15.149, time = 0.23s\n", 113 | "Iteration 22, likelihood = 15.159, time = 0.24s\n", 114 | "Iteration 23, likelihood = 15.086, time = 0.20s\n", 115 | "Iteration 24, likelihood = 15.070, time = 0.21s\n", 116 | "Iteration 25, likelihood = 14.991, time = 0.18s\n", 117 | "Iteration 26, likelihood = 15.018, time = 0.20s\n", 118 | "Iteration 27, likelihood = 14.937, time = 0.17s\n", 119 | "Iteration 28, likelihood = 14.968, time = 0.24s\n", 120 | "Iteration 29, likelihood = 14.867, time = 0.19s\n", 121 | "Iteration 30, likelihood = 14.870, time = 0.20s\n" 122 | ] 123 | }, 124 | { 125 | "data": { 126 | "text/plain": [ 127 | "" 128 | ] 129 | }, 130 | "execution_count": 4, 131 | "metadata": {}, 132 | "output_type": "execute_result" 133 | } 134 | ], 135 | "source": [ 136 | "mps.fit(X)" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "metadata": {}, 142 | "source": [ 143 | "Finally evaluate the negative log-likelihood of the fitted model." 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": 5, 149 | "metadata": { 150 | "collapsed": false 151 | }, 152 | "outputs": [ 153 | { 154 | "data": { 155 | "text/plain": [ 156 | "14.869611163267741" 157 | ] 158 | }, 159 | "execution_count": 5, 160 | "metadata": {}, 161 | "output_type": "execute_result" 162 | } 163 | ], 164 | "source": [ 165 | "mps.likelihood(X)" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "metadata": {}, 171 | "source": [ 172 | "Now create a tensor network model which is a complex LPS with bond dimension 2 and purification dimension of 2." 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": 6, 178 | "metadata": { 179 | "collapsed": true 180 | }, 181 | "outputs": [], 182 | "source": [ 183 | "mps2 = ComplexLPS(D=2, learning_rate=0.5, batch_size=20, n_iter=30, verbose=True, mu=2) " 184 | ] 185 | }, 186 | { 187 | "cell_type": "markdown", 188 | "metadata": {}, 189 | "source": [ 190 | "Fit the model to the data" 191 | ] 192 | }, 193 | { 194 | "cell_type": "code", 195 | "execution_count": 7, 196 | "metadata": { 197 | "collapsed": false 198 | }, 199 | "outputs": [ 200 | { 201 | "name": "stdout", 202 | "output_type": "stream", 203 | "text": [ 204 | "Iteration 1, likelihood = 30.569, time = 0.24s\n", 205 | "Iteration 2, likelihood = 25.780, time = 0.42s\n", 206 | "Iteration 3, likelihood = 22.945, time = 0.44s\n", 207 | "Iteration 4, likelihood = 21.134, time = 0.37s\n", 208 | "Iteration 5, likelihood = 19.839, time = 0.39s\n", 209 | "Iteration 6, likelihood = 18.927, time = 0.47s\n", 210 | "Iteration 7, likelihood = 18.204, time = 0.43s\n", 211 | "Iteration 8, likelihood = 17.725, time = 0.41s\n", 212 | "Iteration 9, likelihood = 17.249, time = 0.39s\n", 213 | "Iteration 10, likelihood = 16.887, time = 0.45s\n", 214 | "Iteration 11, likelihood = 16.615, time = 0.45s\n", 215 | "Iteration 12, likelihood = 16.409, time = 0.41s\n", 216 | "Iteration 13, likelihood = 16.209, time = 0.55s\n", 217 | "Iteration 14, likelihood = 16.068, time = 0.45s\n", 218 | "Iteration 15, likelihood = 15.951, time = 0.67s\n", 219 | "Iteration 16, likelihood = 15.867, time = 0.52s\n", 220 | "Iteration 17, likelihood = 15.798, time = 0.52s\n", 221 | "Iteration 18, likelihood = 15.719, time = 0.58s\n", 222 | "Iteration 19, likelihood = 15.627, time = 0.49s\n", 223 | "Iteration 20, likelihood = 15.558, time = 0.56s\n", 224 | "Iteration 21, likelihood = 15.512, time = 0.59s\n", 225 | "Iteration 22, likelihood = 15.468, time = 0.55s\n", 226 | "Iteration 23, likelihood = 15.458, time = 0.51s\n", 227 | "Iteration 24, likelihood = 15.448, time = 0.56s\n", 228 | "Iteration 25, likelihood = 15.364, time = 0.60s\n", 229 | "Iteration 26, likelihood = 15.266, time = 0.58s\n", 230 | "Iteration 27, likelihood = 15.228, time = 0.53s\n", 231 | "Iteration 28, likelihood = 15.212, time = 0.58s\n", 232 | "Iteration 29, likelihood = 15.183, time = 0.56s\n", 233 | "Iteration 30, likelihood = 15.181, time = 0.53s\n" 234 | ] 235 | }, 236 | { 237 | "data": { 238 | "text/plain": [ 239 | "" 240 | ] 241 | }, 242 | "execution_count": 7, 243 | "metadata": {}, 244 | "output_type": "execute_result" 245 | } 246 | ], 247 | "source": [ 248 | "mps2.fit(X)" 249 | ] 250 | }, 251 | { 252 | "cell_type": "markdown", 253 | "metadata": {}, 254 | "source": [ 255 | "Finally evaluate the negative log-likelihood of the fitted model." 256 | ] 257 | }, 258 | { 259 | "cell_type": "code", 260 | "execution_count": 8, 261 | "metadata": { 262 | "collapsed": false 263 | }, 264 | "outputs": [ 265 | { 266 | "data": { 267 | "text/plain": [ 268 | "15.181376744658168" 269 | ] 270 | }, 271 | "execution_count": 8, 272 | "metadata": {}, 273 | "output_type": "execute_result" 274 | } 275 | ], 276 | "source": [ 277 | "mps2.likelihood(X)" 278 | ] 279 | } 280 | ], 281 | "metadata": { 282 | "anaconda-cloud": {}, 283 | "celltoolbar": "Raw Cell Format", 284 | "kernelspec": { 285 | "display_name": "Python [conda root]", 286 | "language": "python", 287 | "name": "conda-root-py" 288 | }, 289 | "language_info": { 290 | "codemirror_mode": { 291 | "name": "ipython", 292 | "version": 2 293 | }, 294 | "file_extension": ".py", 295 | "mimetype": "text/x-python", 296 | "name": "python", 297 | "nbconvert_exporter": "python", 298 | "pygments_lexer": "ipython2", 299 | "version": "2.7.13" 300 | } 301 | }, 302 | "nbformat": 4, 303 | "nbformat_minor": 2 304 | } 305 | -------------------------------------------------------------------------------- /tensornetworks/ComplexBorn.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | from .MPSClass import TN 5 | import numpy as np 6 | from sklearn.externals.six.moves import xrange 7 | 8 | 9 | class ComplexBorn(TN): 10 | """Born machine with complex parameters 11 | Probability is the absolute value squared of the MPS 12 | Parameters 13 | ---------- 14 | D : int, optional 15 | Rank/Bond dimension of the MPS 16 | learning_rate : float, optional 17 | Learning rate of the gradient descent algorithm 18 | batch_size : int, optional 19 | Number of examples per minibatch. 20 | n_iter : int, optional 21 | Number of iterations (epochs) over the training dataset to perform 22 | during training. 23 | random_state : integer or numpy.RandomState, optional 24 | A random number generator instance to define the state of the 25 | random permutations generator. If an integer is given, it fixes the 26 | seed. Defaults to the global numpy random number generator. 27 | verbose : int, optional 28 | The verbosity level. The default, zero, means silent mode. 29 | ---------- 30 | Attributes 31 | ---------- 32 | w : numpy array, shape (m_parameters) 33 | Parameters of the tensor network 34 | norm : float 35 | normalization constant for the probability distribution 36 | n_samples : int 37 | number of training samples 38 | n_features : int 39 | number of features in the dataset 40 | d : int 41 | physical dimension (dimension of the features) 42 | m_parameters : int 43 | number of parameters in the network 44 | history : list 45 | saves the training accuracies during training 46 | """ 47 | def __init__(self, D=4, learning_rate=0.1, batch_size=10, 48 | n_iter=100, random_state=None, verbose=False): 49 | self.D = D 50 | self.learning_rate = float(learning_rate) 51 | self.batch_size = batch_size 52 | self.n_iter = n_iter 53 | self.random_state = random_state 54 | self.verbose = verbose 55 | 56 | def _probability(self, x): 57 | """Unnormalized probability of one configuration P(x) 58 | Parameters 59 | ---------- 60 | x : numpy array, shape (n_features,) 61 | One configuration 62 | Returns 63 | ------- 64 | probability : float 65 | """ 66 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 67 | 68 | tmp = w2[0,x[0],0,:] #First tensor 69 | for i in xrange(1,self.n_features-1): 70 | tmp = np.dot(tmp,w2[i,x[i],:,:]) #MPS contraction 71 | output = np.inner(tmp,w2[self.n_features-1,x[self.n_features-1],:,0]) 72 | probability = np.abs(output)**2 73 | return probability 74 | 75 | def _computenorm(self): 76 | """Compute norm of probability distribution 77 | Returns 78 | ------- 79 | norm : float 80 | """ 81 | w2=np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 82 | tmp = np.tensordot(w2[0,:,0,:],np.conj(w2[0,:,0,:]), 83 | axes=([0],[0])).reshape(self.D*self.D) 84 | for i in xrange(1,self.n_features-1): 85 | tmp = np.dot(tmp,np.tensordot(w2[i,:,:,:],np.conj(w2[i,:,:,:]), 86 | axes=([0],[0])).transpose((0,2,1,3)).reshape(self.D*self.D,self.D*self.D)) 87 | norm = np.abs(np.inner(tmp,np.tensordot(w2[self.n_features-1,:,:,0], 88 | np.conj(w2[self.n_features-1,:,:,0]), 89 | axes=([0],[0])).reshape(self.D*self.D))) 90 | return norm 91 | 92 | def _derivative(self, x): 93 | """Compute the derivative of P(x) 94 | Parameters 95 | ---------- 96 | x : numpy array, shape (n_features,) 97 | One configuration 98 | Returns 99 | ------- 100 | derivative : numpy array, shape (m_parameters,) 101 | """ 102 | w2=np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 103 | derivative=np.zeros((self.n_features,self.d,self.D,self.D),dtype=np.complex128) 104 | 105 | #Store intermediate tensor contractions for the derivatives: 106 | #left to right and right to left 107 | #tmp stores the contraction of the first i+1 tensors from the left 108 | #in tmp[i,:,:], tmp2 the remaining tensors on the right 109 | #the mps contracted is the remaining contraction tmp[i-1]w[i]tmp2[i+1] 110 | tmp=np.zeros((self.n_features,self.D),dtype=np.complex128) 111 | tmp2=np.zeros((self.n_features,self.D),dtype=np.complex128) 112 | tmp[0,:]=w2[0,x[0],0,:] 113 | for i in xrange(1,self.n_features-1): 114 | tmp[i,:]=np.dot(tmp[i-1,:],w2[i,x[i],:,:]) 115 | mpscontracted=np.inner(tmp[self.n_features-2,:],w2[self.n_features-1, 116 | x[self.n_features-1],:,0]) 117 | 118 | tmp[self.n_features-1,:]=np.inner(tmp[self.n_features-2,:], 119 | w2[self.n_features-1,x[self.n_features-1],:,0]) 120 | tmp2[self.n_features-1,:]=w2[self.n_features-1,x[self.n_features-1],:,0] 121 | for i in xrange(self.n_features-2,-1,-1): 122 | tmp2[i,:]=np.dot(w2[i,x[i],:,:],tmp2[i+1,:]) 123 | tmp2[0,:]=np.inner(w2[0,x[0],0,:],tmp2[1,:]) 124 | 125 | #The derivative of each tensor is the contraction of the other tensors 126 | derivative[0,x[0],0,:]=2*np.conj(tmp2[1,:])*mpscontracted 127 | derivative[self.n_features-1, 128 | x[self.n_features-1],:,0]=2*np.conj(tmp[self.n_features-2,:])*mpscontracted 129 | for i in xrange(1,self.n_features-1): 130 | derivative[i,x[i],:,:]=2*np.conj(np.outer(tmp[i-1,:], 131 | tmp2[i+1,:]))*mpscontracted 132 | 133 | return derivative.reshape(self.m_parameters) 134 | 135 | def _derivativenorm(self): 136 | """Compute the derivative of the norm 137 | Returns 138 | ------- 139 | derivative : numpy array, shape (m_parameters,) 140 | """ 141 | 142 | w2=np.reshape(self.w,(self.n_features,self.d,self.D,self.D)) 143 | derivative=np.zeros((self.n_features,self.d,self.D,self.D),dtype=np.complex128) 144 | 145 | tmp=np.zeros((self.n_features,self.D*self.D),dtype=np.complex128) 146 | tmp2=np.zeros((self.n_features,self.D*self.D),dtype=np.complex128) 147 | tmp[0,:]=np.tensordot(w2[0,:,0,:],np.conj(w2[0,:,0,:]),axes=([0],[0])).reshape(self.D*self.D) 148 | for i in xrange(1,self.n_features-1): 149 | tmp[i,:]=np.dot(tmp[i-1,:],np.tensordot(w2[i,:,:,:],np.conj(w2[i,:,:,:]), 150 | axes=([0],[0])).transpose((0,2,1,3)).reshape(self.D*self.D,self.D*self.D)) 151 | tmp[self.n_features-1,:]=np.inner(tmp[self.n_features-2,:], 152 | np.tensordot(w2[self.n_features-1,:,:,0], 153 | np.conj(w2[self.n_features-1,:,:,0]), 154 | axes=([0],[0])).reshape(self.D*self.D)) 155 | 156 | tmp2[self.n_features-1,:]=np.tensordot(w2[self.n_features-1,:,:,0], 157 | np.conj(w2[self.n_features-1,:,:,0]), 158 | axes=([0],[0])).reshape(self.D*self.D) 159 | for i in xrange(self.n_features-2,-1,-1): 160 | tmp2[i,:]=np.dot(np.tensordot(w2[i,:,:,:],np.conj(w2[i,:,:,:]), 161 | axes=([0],[0])).transpose((0,2,1,3)).reshape(self.D*self.D, 162 | self.D*self.D),tmp2[i+1,:]) 163 | tmp2[0,:]=np.inner(np.tensordot(w2[0,:,0,:],np.conj(w2[0,:,0,:]), 164 | axes=([0],[0])).reshape(self.D*self.D),tmp2[1,:]) 165 | 166 | 167 | for j in xrange(self.d): 168 | derivative[0,j,0,:]=2*np.dot(w2[0,j,0,:], 169 | tmp2[1,:].reshape(self.D,self.D)) 170 | derivative[self.n_features-1,j,:,0]=2*np.dot(w2[self.n_features-1,j,:,0], 171 | tmp[self.n_features-2,:].reshape(self.D,self.D)) 172 | for i in xrange(1,self.n_features-1): 173 | temp1=tmp[i-1,:].reshape(self.D,self.D) 174 | temp2=tmp2[i+1,:].reshape(self.D,self.D) 175 | 176 | for j in xrange(self.d): 177 | temp3=np.dot(np.dot(temp1.transpose(),w2[i,j,:,:]),temp2) 178 | derivative[i,j,:,:]=2*np.copy(temp3) 179 | 180 | return derivative.reshape(self.m_parameters) 181 | 182 | 183 | def _weightinitialization(self, rng): 184 | """Initialize weights w randomly 185 | Parameters 186 | ---------- 187 | rng : random number generation 188 | """ 189 | self.w=np.asarray(rng.normal(0, 1, self.m_parameters))\ 190 | +1j*np.asarray(rng.normal(0, 1, self.m_parameters)) 191 | 192 | def _weightinitialization2(self,rng): 193 | """Initialize weights w randomly 194 | Parameters 195 | ---------- 196 | rng : random number generation 197 | """ 198 | self.m_parameters2=(self.n_features-2)*self.d*self.D*self.D+2*self.D*self.d 199 | return np.asarray(rng.normal(0, 1, self.m_parameters2))\ 200 | +1j*np.asarray(rng.normal(0, 1, self.m_parameters2)) 201 | 202 | def _likelihood_derivative(self, v): 203 | """Compute derivative of log-likelihood of configurations in v 204 | Parameters 205 | ---------- 206 | v : numpy array, shape (n_samples,n_features) 207 | Configurations 208 | Returns 209 | ------- 210 | update_w : numpy array, shape (m_parameters,) 211 | array of derivatives of the log-likelihood 212 | """ 213 | update_w=np.zeros(self.m_parameters,dtype=np.complex128) 214 | for n in xrange(v.shape[0]): 215 | update_w -= self._logderivative(v[n,:]) 216 | update_w += v.shape[0]*self._logderivativenorm() 217 | update_w /= v.shape[0] 218 | return update_w -------------------------------------------------------------------------------- /tensornetworks/RealLPS.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from .MPSClass import TN 4 | import numpy as np 5 | from sklearn.externals.six.moves import xrange 6 | 7 | 8 | class RealLPS(TN): 9 | """Locally purified states (LPS) with real elements 10 | Parameters 11 | ---------- 12 | D : int, optional 13 | Rank/Bond dimension of the MPS 14 | learning_rate : float, optional 15 | Learning rate of the gradient descent algorithm 16 | batch_size : int, optional 17 | Number of examples per minibatch. 18 | n_iter : int, optional 19 | Number of iterations (epochs) over the training dataset to perform 20 | during training. 21 | random_state : integer or numpy.RandomState, optional 22 | A random number generator instance to define the state of the 23 | random permutations generator. If an integer is given, it fixes the 24 | seed. Defaults to the global numpy random number generator. 25 | verbose : int, optional 26 | The verbosity level. The default, zero, means silent mode. 27 | mu : int, optional 28 | Dimension of the purification link 29 | ---------- 30 | Attributes 31 | ---------- 32 | w : numpy array, shape (m_parameters) 33 | Parameters of the tensor network 34 | norm : float 35 | normalization constant for the probability distribution 36 | n_samples : int 37 | number of training samples 38 | n_features : int 39 | number of features in the dataset 40 | d : int 41 | physical dimension (dimension of the features) 42 | m_parameters : int 43 | number of parameters in the network 44 | history : list 45 | saves the training accuracies during training 46 | """ 47 | def __init__(self, D=4, learning_rate=0.1, batch_size=10, 48 | n_iter=100, random_state=None, verbose=False, mu=2): 49 | self.D = D 50 | self.learning_rate = float(learning_rate) 51 | self.batch_size = batch_size 52 | self.n_iter = n_iter 53 | self.random_state = random_state 54 | self.verbose = verbose 55 | self.mu = mu 56 | 57 | def _probability(self, x): 58 | """Unnormalized probability of one configuration P(x) 59 | Parameters 60 | ---------- 61 | x : numpy array, shape (n_features,) 62 | One configuration 63 | Returns 64 | ------- 65 | probability : float 66 | """ 67 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D,self.mu)) 68 | 69 | tmp = w2[0,x[0],0,:,:] 70 | tmp2 = np.einsum('ij,kj->ik',tmp,np.conjugate(tmp)).reshape(self.D*self.D) 71 | for i in xrange(1,self.n_features-1): 72 | tmp = np.einsum('imj,klj->ikml',w2[i,x[i],:,:,:], 73 | np.conjugate(w2[i,x[i],:,:,:])).reshape((self.D*self.D,self.D*self.D)) 74 | tmp2 = np.dot(tmp2,tmp) 75 | 76 | tmp = np.einsum('ij,kj->ik',w2[self.n_features-1,x[self.n_features-1],:,0,:], 77 | np.conjugate(w2[self.n_features-1, 78 | x[self.n_features-1],:,0,:])).reshape(self.D*self.D) 79 | probability = np.abs(np.inner(tmp2,tmp)) 80 | return probability 81 | 82 | 83 | def _computenorm(self): 84 | """Compute norm of probability distribution 85 | Returns 86 | ------- 87 | norm : float 88 | """ 89 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D,self.mu)) 90 | 91 | tmp2 = np.einsum('ijk,ilk->jl',w2[0,:,0,:,:], 92 | np.conj(w2[0,:,0,:,:])).reshape(self.D*self.D) 93 | for i in xrange(1,self.n_features-1): 94 | tmp = np.einsum('pimj,pklj->ikml',w2[i,:,:,:,:], 95 | np.conjugate(w2[i,:,:,:,:])).reshape((self.D*self.D,self.D*self.D)) 96 | tmp2 = np.dot(tmp2,tmp) 97 | tmp = np.einsum('ijk,ilk->jl',w2[self.n_features-1,:,:,0,:], 98 | np.conjugate(w2[self.n_features-1,:,:,0,:])).reshape(self.D*self.D) 99 | norm = np.abs(np.inner(tmp2,tmp)) 100 | return norm 101 | 102 | def _derivative(self, x): 103 | """Compute the derivative of P(x) 104 | Parameters 105 | ---------- 106 | x : numpy array, shape (n_features,) 107 | One configuration 108 | Returns 109 | ------- 110 | derivative : numpy array, shape (m_parameters,) 111 | """ 112 | w2=np.reshape(self.w,(self.n_features,self.d,self.D,self.D,self.mu)) 113 | derivative=np.zeros((self.n_features,self.d,self.D,self.D,self.mu),dtype=np.float64) 114 | 115 | #Store intermediate tensor contractions for the derivatives: 116 | #left to right and right to left 117 | #tmp stores the contraction of the first i+1 tensors from the left 118 | #in tmp[i,:,:], tmp2 the remaining tensors on the right 119 | #the mps contracted is the remaining contraction tmp[i-1]w[i]tmp2[i+1] 120 | tmp=np.zeros((self.n_features,self.D*self.D),dtype=np.float64) 121 | tmp2=np.zeros((self.n_features,self.D*self.D),dtype=np.float64) 122 | tmp[0,:] = np.einsum('ij,kj->ik',w2[0,x[0],0,:,:], 123 | np.conjugate(w2[0,x[0],0,:,:])).reshape(self.D*self.D) 124 | for i in xrange(1,self.n_features-1): 125 | newtmp = np.einsum('imj,klj->ikml',w2[i,x[i],:,:,:], 126 | np.conjugate(w2[i,x[i],:,:,:])).reshape((self.D*self.D,self.D*self.D)) 127 | tmp[i,:]=np.dot(tmp[i-1,:],newtmp) 128 | newtmp = np.einsum('ij,kj->ik',w2[self.n_features-1, 129 | x[self.n_features-1],:,0,:],np.conjugate(w2[self.n_features-1, 130 | x[self.n_features-1],:,0,:])).reshape(self.D*self.D) 131 | mpscontracted=np.inner(tmp[self.n_features-2,:],newtmp) 132 | tmp[self.n_features-1,:]=mpscontracted 133 | 134 | 135 | tmp2[self.n_features-1,:]=newtmp 136 | for i in xrange(self.n_features-2,-1,-1): 137 | newtmp = np.einsum('imj,klj->ikml',w2[i,x[i],:,:,:], 138 | np.conjugate(w2[i,x[i],:,:,:])).reshape((self.D*self.D,self.D*self.D)) 139 | tmp2[i,:]=np.dot(newtmp,tmp2[i+1,:]) 140 | newtmp=np.einsum('ij,kj->ik',w2[0,x[0],0,:,:],np.conjugate(w2[0,x[0],0,:,:])).reshape(self.D*self.D) 141 | tmp2[0,:]=np.inner(newtmp,tmp2[1,:]) 142 | 143 | #Now for each tensor, the derivative is the contraction of the rest of the tensors 144 | 145 | derivative[0,x[0],0,:,:]=2*np.einsum('ij,il->lj', 146 | w2[0,x[0],0,:,:],tmp2[1,:].reshape(self.D,self.D)) 147 | derivative[self.n_features-1,x[self.n_features-1],:,0,:]=\ 148 | 2*np.einsum('ij,il->lj',w2[self.n_features-1, 149 | x[self.n_features-1],:,0,:],tmp[self.n_features-2,:].reshape(self.D,self.D)) 150 | for i in xrange(1,self.n_features-1): 151 | temp1=tmp[i-1,:].reshape(self.D,self.D) 152 | temp2=tmp2[i+1,:].reshape(self.D,self.D) 153 | derivative[i,x[i],:,:,:]=2*np.einsum('ikm,ij,kl->jlm',w2[i,x[i],:,:,:],temp1,temp2) 154 | 155 | return derivative.reshape(self.m_parameters) 156 | 157 | def _derivativenorm(self): 158 | """Compute the derivative of the norm 159 | Returns 160 | ------- 161 | derivative : numpy array, shape (m_parameters,) 162 | """ 163 | 164 | w2=np.reshape(self.w,(self.n_features,self.d,self.D,self.D,self.mu)) 165 | derivative=np.zeros((self.n_features,self.d,self.D,self.D,self.mu),dtype=np.float64) 166 | 167 | tmp=np.zeros((self.n_features,self.D*self.D),dtype=np.float64) 168 | tmp2=np.zeros((self.n_features,self.D*self.D),dtype=np.float64) 169 | 170 | tmp[0,:] = np.einsum('ijk,ilk->jl',w2[0,:,0,:,:],np.conj(w2[0,:,0,:,:])).reshape(self.D*self.D) 171 | for i in xrange(1,self.n_features-1): 172 | newtmp = np.einsum('pimj,pklj->ikml',w2[i,:,:,:,:], 173 | np.conjugate(w2[i,:,:,:,:])).reshape((self.D*self.D,self.D*self.D)) 174 | tmp[i,:] = np.dot(tmp[i-1,:],newtmp) 175 | newtmp = np.einsum('ijk,ilk->jl',w2[self.n_features-1,:,:,0,:], 176 | np.conjugate(w2[self.n_features-1,:,:,0,:])).reshape(self.D*self.D) 177 | mpscontracted=np.inner(tmp[self.n_features-2,:],newtmp) 178 | tmp[self.n_features-1,:]=mpscontracted 179 | 180 | tmp2[self.n_features-1,:]=newtmp 181 | for i in xrange(self.n_features-2,-1,-1): 182 | newtmp = np.einsum('pimj,pklj->ikml',w2[i,:,:,:,:], 183 | np.conjugate(w2[i,:,:,:,:])).reshape((self.D*self.D,self.D*self.D)) 184 | tmp2[i,:] = np.dot(newtmp,tmp2[i+1,:]) 185 | newtmp=np.einsum('ijk,ilk->jl',w2[0,:,0,:,:], 186 | np.conj(w2[0,:,0,:,:])).reshape(self.D*self.D) 187 | tmp2[0,:]=np.inner(newtmp,tmp2[1,:]) 188 | 189 | for j in xrange(self.d): 190 | derivative[0,j,0,:,:]=2*np.einsum('ij,il->lj',w2[0,j,0,:,:], 191 | tmp2[1,:].reshape(self.D,self.D)) 192 | derivative[self.n_features-1,j,:,0,:]=\ 193 | 2*np.einsum('ij,il->lj',w2[self.n_features-1,j,:,0,:], 194 | tmp[self.n_features-2,:].reshape(self.D,self.D)) 195 | for i in xrange(1,self.n_features-1): 196 | temp1=tmp[i-1,:].reshape(self.D,self.D) 197 | temp2=tmp2[i+1,:].reshape(self.D,self.D) 198 | for j in xrange(self.d): 199 | derivative[i,j,:,:,:]=2*np.einsum('ikm,ij,kl->jlm', 200 | w2[i,j,:,:,:],temp1,temp2) 201 | 202 | return derivative.reshape(self.m_parameters) 203 | 204 | 205 | def _weightinitialization(self,rng): 206 | """Initialize weights w randomly 207 | Parameters 208 | ---------- 209 | rng : random number generation 210 | """ 211 | self.m_parameters = self.n_features*self.d*self.D*self.D*self.mu 212 | self.w=np.asarray(rng.normal(0, 1, self.m_parameters)) 213 | 214 | 215 | def _weightinitialization2(self,rng): 216 | """Initialize weights w randomly 217 | Parameters 218 | ---------- 219 | rng : random number generation 220 | """ 221 | self.m_parameters2 = (self.n_features-2)*self.d*self.D*self.D*self.mu+self.d*self.D*self.mu*2 222 | return np.asarray(rng.rand(self.m_parameters2)) 223 | 224 | def _padding_function(self, w): 225 | """Reshaping function to add to the input parameters the unused parameters 226 | at the boundary conditions. 227 | Parameters 228 | ---------- 229 | w : numpy array, shape (m_parameters2,) 230 | Returns 231 | ------- 232 | w : numpy array, shape (m_parameters,) 233 | """ 234 | new_w=np.zeros((self.n_features,self.d,self.D,self.D,self.mu),dtype=w.dtype) 235 | new_w[0,:,0,:,:]=w[0:self.D*self.d*self.mu].reshape(self.d,self.D,self.mu) 236 | new_w[1:self.n_features-1,:,:,:,:]=w[self.D*self.d*self.mu*2:].reshape((self.n_features-2,self.d,self.D,self.D,self.mu)) 237 | new_w[self.n_features-1,:,:,0,:]=w[self.D*self.d*self.mu:self.D*self.d*self.mu*2].reshape(self.d,self.D,self.mu) 238 | return new_w.reshape(self.m_parameters) 239 | 240 | def _unpadding_function(self, w): 241 | """Reshaping function to remove the unused parameters of the boundary conditions. 242 | Parameters 243 | ---------- 244 | w : numpy array, shape (m_parameters,) 245 | Returns 246 | ------- 247 | w : numpy array, shape (m_parameters2,) 248 | """ 249 | w=w.reshape((self.n_features,self.d,self.D,self.D,self.mu)) 250 | new_w=np.zeros(self.m_parameters2,dtype=w.dtype) 251 | new_w[0:self.D*self.d*self.mu]=w[0,:,0,:,:].reshape(self.d*self.D*self.mu) 252 | new_w[self.D*self.d*self.mu:self.D*self.d*self.mu*2]=w[self.n_features-1,:,:,0,:].reshape(self.d*self.D*self.mu) 253 | new_w[self.D*self.d*self.mu*2:]=w[1:self.n_features-1,:,:,:,:].reshape((self.n_features-2)*self.d*self.D*self.D*self.mu) 254 | return new_w 255 | 256 | def _likelihood_derivative(self, v): 257 | """Compute derivative of log-likelihood of configurations in v 258 | Parameters 259 | ---------- 260 | v : numpy array, shape (n_samples,n_features) 261 | Configurations 262 | Returns 263 | ------- 264 | update_w : numpy array, shape (m_parameters,) 265 | array of derivatives of the log-likelihood 266 | """ 267 | update_w=np.zeros(self.m_parameters,dtype=np.float64) 268 | for n in xrange(v.shape[0]): 269 | update_w -= self._logderivative(v[n,:]) 270 | update_w += v.shape[0]*self._logderivativenorm() 271 | update_w /= v.shape[0] 272 | return update_w -------------------------------------------------------------------------------- /tensornetworks/ComplexLPS.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from .MPSClass import TN 4 | import numpy as np 5 | from sklearn.externals.six.moves import xrange 6 | 7 | 8 | class ComplexLPS(TN): 9 | """Locally purified states (LPS) with complex elements 10 | Parameters 11 | ---------- 12 | D : int, optional 13 | Rank/Bond dimension of the MPS 14 | learning_rate : float, optional 15 | Learning rate of the gradient descent algorithm 16 | batch_size : int, optional 17 | Number of examples per minibatch. 18 | n_iter : int, optional 19 | Number of iterations (epochs) over the training dataset to perform 20 | during training. 21 | random_state : integer or numpy.RandomState, optional 22 | A random number generator instance to define the state of the 23 | random permutations generator and of the initial parameters. 24 | If an integer is given, it fixes the 25 | seed. Defaults to the global numpy random number generator. 26 | verbose : int, optional 27 | The verbosity level. The default, zero, means silent mode. 28 | mu : int, optional 29 | Dimension of the purification link 30 | ---------- 31 | Attributes 32 | ---------- 33 | w : numpy array, shape (m_parameters) 34 | Parameters of the tensor network 35 | norm : float 36 | normalization constant for the probability distribution 37 | n_samples : int 38 | number of training samples 39 | n_features : int 40 | number of features in the dataset 41 | d : int 42 | physical dimension (dimension of the features) 43 | m_parameters : int 44 | number of parameters in the network 45 | history : list 46 | saves the training accuracies during training 47 | """ 48 | def __init__(self, D=4, learning_rate=0.1, batch_size=10, 49 | n_iter=100, random_state=None, verbose=False, mu=2): 50 | self.D = D 51 | self.learning_rate = float(learning_rate) 52 | self.batch_size = batch_size 53 | self.n_iter = n_iter 54 | self.random_state = random_state 55 | self.verbose = verbose 56 | self.mu = mu 57 | 58 | def _probability(self, x): 59 | """Unnormalized probability of one configuration P(x) 60 | Parameters 61 | ---------- 62 | x : numpy array, shape (n_features,) 63 | One configuration 64 | Returns 65 | ------- 66 | probability : float 67 | """ 68 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D,self.mu)) 69 | 70 | tmp = w2[0,x[0],0,:,:] 71 | tmp2 = np.einsum('ij,kj->ik',tmp,np.conjugate(tmp)).reshape(self.D*self.D) 72 | for i in xrange(1,self.n_features-1): 73 | tmp = np.einsum('imj,klj->ikml',w2[i,x[i],:,:,:], 74 | np.conjugate(w2[i,x[i],:,:,:])).reshape((self.D*self.D,self.D*self.D)) 75 | tmp2 = np.dot(tmp2,tmp) 76 | 77 | tmp = np.einsum('ij,kj->ik',w2[self.n_features-1,x[self.n_features-1],:,0,:], 78 | np.conjugate(w2[self.n_features-1, 79 | x[self.n_features-1],:,0,:])).reshape(self.D*self.D) 80 | probability = np.abs(np.inner(tmp2,tmp)) 81 | return probability 82 | 83 | def _computenorm(self): 84 | """Compute norm of probability distribution 85 | Returns 86 | ------- 87 | norm : float 88 | """ 89 | w2 = np.reshape(self.w,(self.n_features,self.d,self.D,self.D,self.mu)) 90 | 91 | tmp2 = np.einsum('ijk,ilk->jl',w2[0,:,0,:,:], 92 | np.conj(w2[0,:,0,:,:])).reshape(self.D*self.D) 93 | for i in xrange(1,self.n_features-1): 94 | tmp = np.einsum('pimj,pklj->ikml',w2[i,:,:,:,:], 95 | np.conjugate(w2[i,:,:,:,:])).reshape((self.D*self.D,self.D*self.D)) 96 | tmp2 = np.dot(tmp2,tmp) 97 | tmp = np.einsum('ijk,ilk->jl',w2[self.n_features-1,:,:,0,:], 98 | np.conjugate(w2[self.n_features-1,:,:,0,:])).reshape(self.D*self.D) 99 | norm = np.abs(np.inner(tmp2,tmp)) 100 | return norm 101 | 102 | def _derivative(self, x): 103 | """Compute the derivative of P(x) 104 | Parameters 105 | ---------- 106 | x : numpy array, shape (n_features,) 107 | One configuration 108 | Returns 109 | ------- 110 | derivative : numpy array, shape (m_parameters,) 111 | """ 112 | w2=np.reshape(self.w,(self.n_features,self.d,self.D,self.D,self.mu)) 113 | derivative=np.zeros((self.n_features,self.d,self.D,self.D,self.mu), 114 | dtype=np.complex128) 115 | 116 | #Store intermediate tensor contractions for the derivatives: 117 | #left to right and right to left 118 | #tmp stores the contraction of the first i+1 tensors from the left 119 | #in tmp[i,:,:], tmp2 the remaining tensors on the right 120 | #the mps contracted is the remaining contraction tmp[i-1]w[i]tmp2[i+1] 121 | tmp=np.zeros((self.n_features,self.D*self.D),dtype=np.complex128) 122 | tmp2=np.zeros((self.n_features,self.D*self.D),dtype=np.complex128) 123 | tmp[0,:] = np.einsum('ij,kj->ik',w2[0,x[0],0,:,:], 124 | np.conjugate(w2[0,x[0],0,:,:])).reshape(self.D*self.D) 125 | for i in xrange(1,self.n_features-1): 126 | newtmp = np.einsum('imj,klj->ikml',w2[i,x[i],:,:,:], 127 | np.conjugate(w2[i,x[i],:,:,:])).reshape((self.D*self.D,self.D*self.D)) 128 | tmp[i,:]=np.dot(tmp[i-1,:],newtmp) 129 | newtmp = np.einsum('ij,kj->ik',w2[self.n_features-1,x[self.n_features-1],:,0,:], 130 | np.conjugate(w2[self.n_features-1,x[self.n_features-1],:,0,:])).reshape(self.D*self.D) 131 | mpscontracted=np.inner(tmp[self.n_features-2,:],newtmp) 132 | tmp[self.n_features-1,:]=mpscontracted 133 | 134 | 135 | tmp2[self.n_features-1,:]=newtmp 136 | for i in xrange(self.n_features-2,-1,-1): 137 | newtmp = np.einsum('imj,klj->ikml',w2[i,x[i],:,:,:], 138 | np.conjugate(w2[i,x[i],:,:,:])).reshape((self.D*self.D,self.D*self.D)) 139 | tmp2[i,:]=np.dot(newtmp,tmp2[i+1,:]) 140 | newtmp=np.einsum('ij,kj->ik',w2[0,x[0],0,:,:], 141 | np.conjugate(w2[0,x[0],0,:,:])).reshape(self.D*self.D) 142 | tmp2[0,:]=np.inner(newtmp,tmp2[1,:]) 143 | 144 | #Now for each tensor, the derivative is the contraction of the rest of the tensors 145 | 146 | derivative[0,x[0],0,:,:]=2*np.einsum('ij,il->lj', 147 | w2[0,x[0],0,:,:],tmp2[1,:].reshape(self.D,self.D)) 148 | derivative[self.n_features-1,x[self.n_features-1],:,0,:]=\ 149 | 2*np.einsum('ij,il->lj',w2[self.n_features-1,x[self.n_features-1],:,0,:], 150 | tmp[self.n_features-2,:].reshape(self.D,self.D)) 151 | for i in xrange(1,self.n_features-1): 152 | temp1=tmp[i-1,:].reshape(self.D,self.D) 153 | temp2=tmp2[i+1,:].reshape(self.D,self.D) 154 | derivative[i,x[i],:,:,:]=2*np.einsum('ikm,ij,kl->jlm',w2[i,x[i],:,:,:],temp1,temp2) 155 | 156 | return derivative.reshape(self.m_parameters) 157 | 158 | def _derivativenorm(self): 159 | """Compute the derivative of the norm 160 | Returns 161 | ------- 162 | derivative : numpy array, shape (m_parameters,) 163 | """ 164 | 165 | w2=np.reshape(self.w,(self.n_features,self.d,self.D,self.D,self.mu)) 166 | derivative=np.zeros((self.n_features,self.d,self.D,self.D,self.mu),dtype=np.complex128) 167 | 168 | tmp=np.zeros((self.n_features,self.D*self.D),dtype=np.complex128) 169 | tmp2=np.zeros((self.n_features,self.D*self.D),dtype=np.complex128) 170 | 171 | tmp[0,:] = np.einsum('ijk,ilk->jl',w2[0,:,0,:,:], 172 | np.conj(w2[0,:,0,:,:])).reshape(self.D*self.D) 173 | for i in xrange(1,self.n_features-1): 174 | newtmp = np.einsum('pimj,pklj->ikml',w2[i,:,:,:,:], 175 | np.conjugate(w2[i,:,:,:,:])).reshape((self.D*self.D,self.D*self.D)) 176 | tmp[i,:] = np.dot(tmp[i-1,:],newtmp) 177 | newtmp = np.einsum('ijk,ilk->jl',w2[self.n_features-1,:,:,0,:], 178 | np.conjugate(w2[self.n_features-1,:,:,0,:])).reshape(self.D*self.D) 179 | mpscontracted=np.inner(tmp[self.n_features-2,:],newtmp) 180 | tmp[self.n_features-1,:]=mpscontracted 181 | 182 | tmp2[self.n_features-1,:]=newtmp 183 | for i in xrange(self.n_features-2,-1,-1): 184 | newtmp = np.einsum('pimj,pklj->ikml',w2[i,:,:,:,:], 185 | np.conjugate(w2[i,:,:,:,:])).reshape((self.D*self.D,self.D*self.D)) 186 | tmp2[i,:] = np.dot(newtmp,tmp2[i+1,:]) 187 | newtmp=np.einsum('ijk,ilk->jl',w2[0,:,0,:,:],np.conj(w2[0,:,0,:,:])).reshape(self.D*self.D) 188 | tmp2[0,:]=np.inner(newtmp,tmp2[1,:]) 189 | 190 | for j in xrange(self.d): 191 | derivative[0,j,0,:,:]=2*np.einsum('ij,il->lj',w2[0,j,0,:,:], 192 | tmp2[1,:].reshape(self.D,self.D)) 193 | derivative[self.n_features-1,j,:,0,:]=\ 194 | 2*np.einsum('ij,il->lj',w2[self.n_features-1,j,:,0,:], 195 | tmp[self.n_features-2,:].reshape(self.D,self.D)) 196 | for i in xrange(1,self.n_features-1): 197 | temp1=tmp[i-1,:].reshape(self.D,self.D) 198 | temp2=tmp2[i+1,:].reshape(self.D,self.D) 199 | for j in xrange(self.d): 200 | derivative[i,j,:,:,:]=2*np.einsum('ikm,ij,kl->jlm',w2[i,j,:,:,:],temp1,temp2) 201 | 202 | return derivative.reshape(self.m_parameters) 203 | 204 | 205 | def _weightinitialization(self,rng): 206 | """Initialize weights w randomly 207 | Parameters 208 | ---------- 209 | rng : random number generation 210 | """ 211 | self.m_parameters = self.n_features*self.d*self.D*self.D*self.mu 212 | self.w=np.asarray(rng.normal(0, 1, self.m_parameters))\ 213 | +1j*np.asarray(rng.normal(0, 1, self.m_parameters)) 214 | 215 | def _weightinitialization2(self,rng): 216 | """Initialize weights w randomly 217 | Parameters 218 | ---------- 219 | rng : random number generation 220 | """ 221 | self.m_parameters2 = (self.n_features-2)*self.d*self.D*self.D*self.mu+self.d*self.D*self.mu*2 222 | return np.asarray(rng.normal(0, 1, self.m_parameters2))\ 223 | +1j*np.asarray(rng.normal(0, 1, self.m_parameters2)) 224 | 225 | def _padding_function(self, w): 226 | """Reshaping function to add to the input parameters the unused parameters 227 | at the boundary conditions. 228 | Parameters 229 | ---------- 230 | w : numpy array, shape (m_parameters2,) 231 | Returns 232 | ------- 233 | w : numpy array, shape (m_parameters,) 234 | """ 235 | new_w=np.zeros((self.n_features,self.d,self.D,self.D,self.mu),dtype=w.dtype) 236 | new_w[0,:,0,:,:]=w[0:self.D*self.d*self.mu].reshape(self.d,self.D,self.mu) 237 | new_w[1:self.n_features-1,:,:,:,:]=w[self.D*self.d*self.mu*2:].reshape((self.n_features-2,self.d,self.D,self.D,self.mu)) 238 | new_w[self.n_features-1,:,:,0,:]=w[self.D*self.d*self.mu:self.D*self.d*self.mu*2].reshape(self.d,self.D,self.mu) 239 | return new_w.reshape(self.m_parameters) 240 | 241 | def _unpadding_function(self, w): 242 | """Reshaping function to remove the unused parameters of the boundary conditions. 243 | Parameters 244 | ---------- 245 | w : numpy array, shape (m_parameters,) 246 | Returns 247 | ------- 248 | w : numpy array, shape (m_parameters2,) 249 | """ 250 | w=w.reshape((self.n_features,self.d,self.D,self.D,self.mu)) 251 | new_w=np.zeros(self.m_parameters2,dtype=w.dtype) 252 | new_w[0:self.D*self.d*self.mu]=w[0,:,0,:,:].reshape(self.d*self.D*self.mu) 253 | new_w[self.D*self.d*self.mu:self.D*self.d*self.mu*2]=w[self.n_features-1,:,:,0,:].reshape(self.d*self.D*self.mu) 254 | new_w[self.D*self.d*self.mu*2:]=w[1:self.n_features-1,:,:,:,:].reshape((self.n_features-2)*self.d*self.D*self.D*self.mu) 255 | return new_w 256 | 257 | def _likelihood_derivative(self, v): 258 | """Compute derivative of log-likelihood of configurations in v 259 | Parameters 260 | ---------- 261 | v : numpy array, shape (n_samples,n_features) 262 | Configurations 263 | Returns 264 | ------- 265 | update_w : numpy array, shape (m_parameters,) 266 | array of derivatives of the log-likelihood 267 | """ 268 | update_w=np.zeros(self.m_parameters,dtype=np.complex128) 269 | for n in xrange(v.shape[0]): 270 | update_w -= self._logderivative(v[n,:]) 271 | update_w += v.shape[0]*self._logderivativenorm() 272 | update_w /= v.shape[0] 273 | return update_w -------------------------------------------------------------------------------- /tensornetworks/MPSClass.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import time 4 | import numpy as np 5 | from sklearn.externals.six.moves import xrange 6 | from sklearn.utils import check_array 7 | from sklearn.utils import check_random_state 8 | from functools import partial 9 | from scipy.optimize import minimize 10 | import itertools 11 | 12 | class TN(): 13 | """Generic Tensor Network Class. 14 | This class should not be used directly. Use derived classes instead. 15 | Parameters 16 | ---------- 17 | D : int, optional 18 | Rank/Bond dimension of the tensor network 19 | learning_rate : float, optional 20 | Learning rate of the gradient descent algorithm 21 | batch_size : int, optional 22 | Number of examples per minibatch. 23 | n_iter : int, optional 24 | Number of iterations (epochs) over the training dataset to perform 25 | during training. 26 | random_state : integer or numpy.RandomState, optional 27 | A random number generator instance to define the state of the 28 | random permutations generator. If an integer is given, it fixes the 29 | seed. Defaults to the global numpy random number generator. 30 | verbose : int, optional 31 | The verbosity level. The default, zero, means silent mode. 32 | ---------- 33 | Attributes 34 | ---------- 35 | w : numpy array, shape (m_parameters) 36 | Parameters of the tensor network 37 | norm : float 38 | normalization constant for the probability distribution 39 | n_samples : int 40 | number of training samples 41 | n_features : int 42 | number of features in the dataset 43 | d : int 44 | physical dimension (dimension of the categorical features) 45 | m_parameters : int 46 | number of parameters in the network 47 | history : list 48 | saves the training accuracies during training 49 | """ 50 | 51 | def __init__(self, D=4, learning_rate=0.1, batch_size=10, 52 | n_iter=100, random_state=None, verbose=False): 53 | self.D = D 54 | self.learning_rate = float(learning_rate) 55 | self.batch_size = batch_size 56 | self.n_iter = n_iter 57 | self.random_state = random_state 58 | self.verbose = verbose 59 | 60 | def _probability(self, x): 61 | """Unnormalized probability of one configuration P(x) 62 | Parameters 63 | ---------- 64 | x : numpy array, shape (n_features,) 65 | One configuration 66 | Returns 67 | ------- 68 | probability : float 69 | """ 70 | pass 71 | 72 | def _computenorm(self): 73 | """Compute norm of probability distribution 74 | Returns 75 | ------- 76 | norm : float 77 | """ 78 | pass 79 | 80 | def _derivative(self, x): 81 | """Compute the derivatives of P(x) 82 | Parameters 83 | ---------- 84 | x : numpy array, shape (n_features,) 85 | One configuration 86 | Returns 87 | ------- 88 | derivative : numpy array, shape (m_parameters,) 89 | """ 90 | pass 91 | 92 | def _derivativenorm(self): 93 | """Compute the derivatives of the norm 94 | Returns 95 | ------- 96 | derivative : numpy array, shape (m_parameters,) 97 | """ 98 | pass 99 | 100 | def _logderivative(self, x): 101 | """Compute the logderivatives of P(x) 102 | Parameters 103 | ---------- 104 | x : numpy array, shape (n_features,) 105 | One configuration 106 | Returns 107 | ------- 108 | derivative : numpy array, shape (m_parameters,) 109 | """ 110 | derivative=self._derivative(x)/self._probability(x) 111 | return derivative 112 | 113 | def _logderivativenorm(self): 114 | """Compute the logderivatives of the norm 115 | Returns 116 | ------- 117 | derivative : numpy array, shape (m_parameters,) 118 | """ 119 | derivative=self._derivativenorm()/self.norm 120 | return derivative 121 | 122 | def _fit(self, v): 123 | """Inner fit for one mini-batch of training 124 | Updatest the parameters and recomputes the norm 125 | Parameters 126 | ---------- 127 | v : numpy array, shape (n_samples, n_features) 128 | The data to use for training. 129 | """ 130 | update_w = self._likelihood_derivative(v) 131 | self.w -= self.learning_rate * update_w 132 | self.norm = self._computenorm() 133 | 134 | def _likelihood_derivative(self, v): 135 | """Compute derivatives of log-likelihood of configurations in v 136 | Parameters 137 | ---------- 138 | v : numpy array, shape (n_samples,n_features) 139 | Configurations 140 | Returns 141 | ------- 142 | update_w : numpy array, shape (m_parameters,) 143 | array of derivatives of the log-likelihood 144 | """ 145 | update_w=np.zeros(self.m_parameters) 146 | for n in xrange(v.shape[0]): 147 | update_w -= self._logderivative(v[n,:]) 148 | update_w += v.shape[0]*self._logderivativenorm() 149 | update_w /= v.shape[0] 150 | return update_w 151 | 152 | def likelihood(self, v, w=None): 153 | """Compute averaged negative log-likelihood of configurations in v 154 | Parameters 155 | ---------- 156 | v : numpy array, shape (n_samples,n_features) 157 | dataset to compute the likelihood of 158 | w : parameters of tensor network (optional) 159 | Returns 160 | ------- 161 | loglikelihood : float 162 | averaged negative log-likelihood of the data in v 163 | """ 164 | loglikelihood=0 165 | if w is not None: 166 | self.w=w 167 | self.norm=self._computenorm() 168 | for n in xrange(v.shape[0]): 169 | loglikelihood+=np.log(max(self._probability(v[n,:])/self.norm,10** (-50))) 170 | return -loglikelihood/v.shape[0] 171 | 172 | def distance(self, X, w=None): 173 | """Compute distance (here KL-divergence) between tensor X and tensor network 174 | Parameters 175 | ---------- 176 | X : array-like, shape (d, d, d, d,...) (dimension d^n_features) 177 | Tensor to fit 178 | w : parameters of tensor network (optional) 179 | Returns 180 | ------- 181 | distance : float 182 | KL-divergence between tensor X and the tensor network 183 | """ 184 | distance=0 185 | epsilon=10**(-10) 186 | if w is not None: 187 | self.w=self._padding_function(w) 188 | self.norm=self._computenorm() 189 | for i in itertools.product(np.arange(0,self.d), repeat = self.n_features): 190 | var=np.array(i) 191 | b=self._probability(var)/self.norm 192 | a=X[tuple(var)] 193 | if a 0: 299 | end = start + this_n 300 | if n_samples is not None: 301 | end = min(n_samples, end) 302 | yield array_rand[np.arange(start, end)] 303 | start = end 304 | 305 | def fit(self, X, w_init=None): 306 | """Fit the model to the data X, with parameters initialized at w_init 307 | Parameters 308 | ---------- 309 | X : {numpy array, integer matrix} shape (n_samples, n_features) 310 | Training data. 311 | w_init : {numpy array, float or complex} shape (m_parameters,) (optional) 312 | Initial value of the parameters 313 | Returns 314 | ------- 315 | self : TN 316 | The fitted model. 317 | """ 318 | 319 | # Some initial checks of the data, initialize random number generator 320 | X = check_array(X, dtype=np.int64) 321 | rng = check_random_state(self.random_state) 322 | 323 | # Initialize parameters of MPS 324 | self.n_samples = X.shape[0] 325 | self.n_features = X.shape[1] 326 | self.d = np.max(X)+1 327 | self.m_parameters = self.n_features*self.d*self.D*self.D 328 | if w_init is None: 329 | self._weightinitialization(rng) 330 | else: 331 | self.w=w_init 332 | self.norm=self._computenorm() 333 | self.history=[] 334 | 335 | n_batches = int(np.ceil(float(self.n_samples) / self.batch_size)) 336 | begin = time.time() 337 | for iteration in xrange(1, self.n_iter + 1): 338 | batch_slices = list(self._gen_even_slices(self.batch_size, 339 | n_batches, self.n_samples, rng)) 340 | for batch_slice in batch_slices: 341 | self._fit(X[batch_slice]) 342 | 343 | end = time.time() 344 | 345 | 346 | if self.verbose: 347 | train_likelihood=self.likelihood(X) 348 | print("Iteration %d, likelihood = %.3f," 349 | " time = %.2fs" 350 | % (iteration,train_likelihood, 351 | end - begin)) 352 | self.history.append(train_likelihood) 353 | begin = end 354 | 355 | return self 356 | 357 | 358 | def fit_tensor(self, X, w_init=None): 359 | """Fit the model to the tensor X, with parameters initialized at w_init 360 | Parameters 361 | ---------- 362 | X : {numpy array, non-negative tensor} shape (d, d, d, d,...) (dimension d^n_features) 363 | Tensor to be approximated 364 | w_init : {numpy array, float or complex} shape (m_parameters,) (optional) 365 | Initial value of the parameters 366 | Returns 367 | ------- 368 | self : TN 369 | The fitted model. 370 | """ 371 | 372 | # Some initial checks of the data, initialize random number generator 373 | rng = check_random_state(self.random_state) 374 | if np.abs(np.sum(X)-1)>10**(-15): 375 | print("Input tensor has been normalized") 376 | X=X/np.sum(X) #Tensor needs to be normalized to be a probability mass function 377 | 378 | # Initialize parameters of MPS 379 | self.d = int(X.shape[1]) 380 | self.n_features = len(X.shape) 381 | 382 | self.n_samples = self.d**self.n_features 383 | self.m_parameters = self.n_features*self.d*self.D*self.D 384 | 385 | if w_init is None: 386 | self._weightinitialization(rng) 387 | else: 388 | self.w=w_init 389 | self.norm=self._computenorm() 390 | self.history=[] 391 | 392 | begin = time.time() 393 | 394 | distancepartial = partial(self._function_real_to_complex,self.distance,X) 395 | derivativedistancepartial = partial(self._function_real_to_complex,self._derivativedistance,X) 396 | 397 | initial_value=self._weightinitialization2(rng) 398 | 399 | res=minimize(fun=distancepartial,jac=derivativedistancepartial,x0=initial_value.view(np.float64),\ 400 | method='L-BFGS-B',options={'maxiter': self.n_iter},tol=10**(-16)) 401 | 402 | self.w=self._padding_function(res.x.view(self.w.dtype)) 403 | self.norm=self._computenorm() 404 | 405 | end = time.time() 406 | print("KL divergence = %.6f, time = %.2fs" % (self.distance(X),end - begin)) 407 | return self 408 | 409 | -------------------------------------------------------------------------------- /datasets/tumor: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (L339L 16 | L17L 17 | tp7 18 | cnumpy 19 | dtype 20 | p8 21 | (S'O8' 22 | p9 23 | I0 24 | I1 25 | tp10 26 | Rp11 27 | (I3 28 | S'|' 29 | p12 30 | NNNI-1 31 | I-1 32 | I63 33 | tp13 34 | bI01 35 | (lp14 36 | L2L 37 | aL1L 38 | aL1L 39 | aL3L 40 | aL1L 41 | aL0L 42 | aL1L 43 | aL1L 44 | aL0L 45 | aL1L 46 | aL1L 47 | aL1L 48 | aL1L 49 | aL1L 50 | aL1L 51 | aL1L 52 | aL0L 53 | aL2L 54 | aL1L 55 | aL1L 56 | aL2L 57 | aL1L 58 | aL1L 59 | aL1L 60 | aL1L 61 | aL1L 62 | aL0L 63 | aL1L 64 | aL1L 65 | aL1L 66 | aL1L 67 | aL1L 68 | aL1L 69 | aL0L 70 | aL1L 71 | aL0L 72 | aL3L 73 | aL2L 74 | aL0L 75 | aL1L 76 | aL1L 77 | aL0L 78 | aL1L 79 | aL1L 80 | aL1L 81 | aL1L 82 | aL1L 83 | aL1L 84 | aL1L 85 | aL0L 86 | aL1L 87 | aL2L 88 | aL0L 89 | aL3L 90 | aL2L 91 | aL1L 92 | aL1L 93 | aL1L 94 | aL1L 95 | aL1L 96 | aL1L 97 | aL0L 98 | aL1L 99 | aL1L 100 | aL1L 101 | aL1L 102 | aL0L 103 | aL0L 104 | aL1L 105 | aL1L 106 | aL1L 107 | aL2L 108 | aL1L 109 | aL1L 110 | aL1L 111 | aL0L 112 | aL1L 113 | aL1L 114 | aL1L 115 | aL1L 116 | aL1L 117 | aL1L 118 | aL1L 119 | aL1L 120 | aL0L 121 | aL2L 122 | aL1L 123 | aL3L 124 | aL2L 125 | aL1L 126 | aL1L 127 | aL1L 128 | aL1L 129 | aL1L 130 | aL0L 131 | aL1L 132 | aL1L 133 | aL1L 134 | aL1L 135 | aL1L 136 | aL1L 137 | aL1L 138 | aL1L 139 | aL1L 140 | aL1L 141 | aL3L 142 | aL1L 143 | aL1L 144 | aL1L 145 | aL1L 146 | aL0L 147 | aL0L 148 | aL1L 149 | aL0L 150 | aL1L 151 | aL1L 152 | aL1L 153 | aL1L 154 | aL0L 155 | aL1L 156 | aL0L 157 | aL1L 158 | aL3L 159 | aL1L 160 | aL1L 161 | aL1L 162 | aL1L 163 | aL1L 164 | aL0L 165 | aL1L 166 | aL1L 167 | aL1L 168 | aL1L 169 | aL1L 170 | aL1L 171 | aL1L 172 | aL1L 173 | aL1L 174 | aL1L 175 | aL0L 176 | aL1L 177 | aL1L 178 | aL1L 179 | aL1L 180 | aL0L 181 | aL1L 182 | aL1L 183 | aL1L 184 | aL1L 185 | aL1L 186 | aL1L 187 | aL1L 188 | aL1L 189 | aL0L 190 | aL1L 191 | aL1L 192 | aL3L 193 | aL0L 194 | aL1L 195 | aL1L 196 | aL1L 197 | aL0L 198 | aL1L 199 | aL1L 200 | aL1L 201 | aL0L 202 | aL0L 203 | aL0L 204 | aL1L 205 | aL0L 206 | aL2L 207 | aL1L 208 | aL1L 209 | aL3L 210 | aL1L 211 | aL1L 212 | aL1L 213 | aL1L 214 | aL1L 215 | aL1L 216 | aL1L 217 | aL1L 218 | aL1L 219 | aL1L 220 | aL1L 221 | aL1L 222 | aL0L 223 | aL1L 224 | aL1L 225 | aL1L 226 | aL3L 227 | aL1L 228 | aL1L 229 | aL1L 230 | aL1L 231 | aL0L 232 | aL1L 233 | aL1L 234 | aL1L 235 | aL1L 236 | aL1L 237 | aL1L 238 | aL1L 239 | aL1L 240 | aL1L 241 | aL0L 242 | aL1L 243 | aL1L 244 | aL1L 245 | aL1L 246 | aL0L 247 | aL0L 248 | aL1L 249 | aL1L 250 | aL1L 251 | aL1L 252 | aL1L 253 | aL1L 254 | aL1L 255 | aL1L 256 | aL1L 257 | aL1L 258 | aL1L 259 | aL1L 260 | aL3L 261 | aL1L 262 | aL1L 263 | aL1L 264 | aL1L 265 | aL1L 266 | aL1L 267 | aL1L 268 | aL1L 269 | aL1L 270 | aL1L 271 | aL0L 272 | aL1L 273 | aL1L 274 | aL1L 275 | aL0L 276 | aL1L 277 | aL1L 278 | aL1L 279 | aL1L 280 | aL1L 281 | aL1L 282 | aL1L 283 | aL1L 284 | aL1L 285 | aL1L 286 | aL0L 287 | aL0L 288 | aL1L 289 | aL1L 290 | aL1L 291 | aL1L 292 | aL1L 293 | aL1L 294 | aL3L 295 | aL0L 296 | aL0L 297 | aL1L 298 | aL1L 299 | aL0L 300 | aL0L 301 | aL1L 302 | aL1L 303 | aL1L 304 | aL1L 305 | aL0L 306 | aL1L 307 | aL1L 308 | aL2L 309 | aL1L 310 | aL1L 311 | aL3L 312 | aL1L 313 | aL1L 314 | aL1L 315 | aL1L 316 | aL1L 317 | aL0L 318 | aL1L 319 | aL1L 320 | aL1L 321 | aL1L 322 | aL1L 323 | aL1L 324 | aL0L 325 | aL1L 326 | aL1L 327 | aL1L 328 | aL3L 329 | aL0L 330 | aL1L 331 | aL1L 332 | aL1L 333 | aL1L 334 | aL1L 335 | aL1L 336 | aL1L 337 | aL1L 338 | aL0L 339 | aL0L 340 | aL1L 341 | aL1L 342 | aL2L 343 | aL1L 344 | aL1L 345 | aL3L 346 | aL1L 347 | aL1L 348 | aL0L 349 | aL1L 350 | aL1L 351 | aL0L 352 | aL1L 353 | aL1L 354 | aL1L 355 | aL1L 356 | aL1L 357 | aL1L 358 | aL0L 359 | aL1L 360 | aL2L 361 | aL3L 362 | aL0L 363 | aL1L 364 | aL1L 365 | aL1L 366 | aL1L 367 | aL1L 368 | aL1L 369 | aL1L 370 | aL1L 371 | aL1L 372 | aL1L 373 | aL1L 374 | aL0L 375 | aL1L 376 | aL2L 377 | aL0L 378 | aL1L 379 | aL3L 380 | aL1L 381 | aL1L 382 | aL1L 383 | aL1L 384 | aL1L 385 | aL0L 386 | aL1L 387 | aL1L 388 | aL1L 389 | aL1L 390 | aL1L 391 | aL1L 392 | aL0L 393 | aL0L 394 | aL1L 395 | aL1L 396 | aL0L 397 | aL0L 398 | aL0L 399 | aL1L 400 | aL1L 401 | aL0L 402 | aL1L 403 | aL1L 404 | aL1L 405 | aL1L 406 | aL1L 407 | aL1L 408 | aL1L 409 | aL1L 410 | aL2L 411 | aL1L 412 | aL1L 413 | aL2L 414 | aL0L 415 | aL1L 416 | aL1L 417 | aL1L 418 | aL1L 419 | aL1L 420 | aL1L 421 | aL1L 422 | aL1L 423 | aL1L 424 | aL1L 425 | aL1L 426 | aL0L 427 | aL1L 428 | aL0L 429 | aL0L 430 | aL2L 431 | aL1L 432 | aL1L 433 | aL1L 434 | aL1L 435 | aL1L 436 | aL0L 437 | aL1L 438 | aL1L 439 | aL1L 440 | aL1L 441 | aL1L 442 | aL0L 443 | aL1L 444 | aL2L 445 | aL1L 446 | aL0L 447 | aL3L 448 | aL1L 449 | aL1L 450 | aL1L 451 | aL1L 452 | aL1L 453 | aL0L 454 | aL1L 455 | aL1L 456 | aL1L 457 | aL1L 458 | aL1L 459 | aL1L 460 | aL0L 461 | aL1L 462 | aL1L 463 | aL1L 464 | aL0L 465 | aL1L 466 | aL1L 467 | aL1L 468 | aL0L 469 | aL0L 470 | aL1L 471 | aL1L 472 | aL1L 473 | aL1L 474 | aL1L 475 | aL1L 476 | aL1L 477 | aL1L 478 | aL1L 479 | aL1L 480 | aL1L 481 | aL3L 482 | aL1L 483 | aL1L 484 | aL1L 485 | aL1L 486 | aL1L 487 | aL1L 488 | aL1L 489 | aL1L 490 | aL1L 491 | aL0L 492 | aL0L 493 | aL1L 494 | aL0L 495 | aL1L 496 | aL0L 497 | aL1L 498 | aL3L 499 | aL0L 500 | aL1L 501 | aL1L 502 | aL1L 503 | aL1L 504 | aL1L 505 | aL1L 506 | aL1L 507 | aL1L 508 | aL1L 509 | aL1L 510 | aL0L 511 | aL1L 512 | aL2L 513 | aL1L 514 | aL1L 515 | aL2L 516 | aL1L 517 | aL1L 518 | aL1L 519 | aL0L 520 | aL0L 521 | aL1L 522 | aL1L 523 | aL1L 524 | aL1L 525 | aL1L 526 | aL1L 527 | aL1L 528 | aL1L 529 | aL1L 530 | aL1L 531 | aL1L 532 | aL1L 533 | aL1L 534 | aL1L 535 | aL1L 536 | aL1L 537 | aL0L 538 | aL1L 539 | aL1L 540 | aL0L 541 | aL0L 542 | aL0L 543 | aL0L 544 | aL1L 545 | aL1L 546 | aL2L 547 | aL0L 548 | aL1L 549 | aL3L 550 | aL1L 551 | aL1L 552 | aL1L 553 | aL1L 554 | aL0L 555 | aL0L 556 | aL1L 557 | aL1L 558 | aL1L 559 | aL1L 560 | aL1L 561 | aL1L 562 | aL1L 563 | aL2L 564 | aL0L 565 | aL0L 566 | aL1L 567 | aL0L 568 | aL1L 569 | aL1L 570 | aL1L 571 | aL1L 572 | aL1L 573 | aL1L 574 | aL1L 575 | aL1L 576 | aL1L 577 | aL1L 578 | aL0L 579 | aL1L 580 | aL1L 581 | aL0L 582 | aL3L 583 | aL2L 584 | aL0L 585 | aL0L 586 | aL1L 587 | aL1L 588 | aL1L 589 | aL1L 590 | aL0L 591 | aL1L 592 | aL1L 593 | aL1L 594 | aL1L 595 | aL0L 596 | aL1L 597 | aL1L 598 | aL1L 599 | aL1L 600 | aL3L 601 | aL1L 602 | aL1L 603 | aL1L 604 | aL1L 605 | aL1L 606 | aL1L 607 | aL1L 608 | aL1L 609 | aL1L 610 | aL1L 611 | aL0L 612 | aL1L 613 | aL1L 614 | aL1L 615 | aL0L 616 | aL1L 617 | aL2L 618 | aL0L 619 | aL1L 620 | aL1L 621 | aL1L 622 | aL1L 623 | aL0L 624 | aL1L 625 | aL1L 626 | aL1L 627 | aL1L 628 | aL1L 629 | aL1L 630 | aL1L 631 | aL1L 632 | aL1L 633 | aL1L 634 | aL3L 635 | aL1L 636 | aL1L 637 | aL1L 638 | aL1L 639 | aL1L 640 | aL0L 641 | aL1L 642 | aL1L 643 | aL1L 644 | aL0L 645 | aL1L 646 | aL0L 647 | aL1L 648 | aL1L 649 | aL1L 650 | aL1L 651 | aL3L 652 | aL1L 653 | aL1L 654 | aL1L 655 | aL1L 656 | aL1L 657 | aL1L 658 | aL1L 659 | aL1L 660 | aL1L 661 | aL1L 662 | aL0L 663 | aL1L 664 | aL1L 665 | aL1L 666 | aL1L 667 | aL3L 668 | aL2L 669 | aL0L 670 | aL1L 671 | aL1L 672 | aL1L 673 | aL1L 674 | aL1L 675 | aL1L 676 | aL0L 677 | aL0L 678 | aL1L 679 | aL1L 680 | aL1L 681 | aL1L 682 | aL1L 683 | aL1L 684 | aL1L 685 | aL3L 686 | aL1L 687 | aL1L 688 | aL1L 689 | aL1L 690 | aL0L 691 | aL1L 692 | aL1L 693 | aL1L 694 | aL1L 695 | aL1L 696 | aL1L 697 | aL1L 698 | aL1L 699 | aL0L 700 | aL1L 701 | aL3L 702 | aL2L 703 | aL0L 704 | aL1L 705 | aL0L 706 | aL0L 707 | aL1L 708 | aL1L 709 | aL1L 710 | aL1L 711 | aL1L 712 | aL1L 713 | aL1L 714 | aL0L 715 | aL1L 716 | aL2L 717 | aL1L 718 | aL0L 719 | aL3L 720 | aL1L 721 | aL1L 722 | aL1L 723 | aL0L 724 | aL0L 725 | aL1L 726 | aL1L 727 | aL1L 728 | aL1L 729 | aL1L 730 | aL1L 731 | aL1L 732 | aL1L 733 | aL2L 734 | aL1L 735 | aL1L 736 | aL3L 737 | aL1L 738 | aL1L 739 | aL1L 740 | aL1L 741 | aL0L 742 | aL0L 743 | aL1L 744 | aL1L 745 | aL1L 746 | aL1L 747 | aL1L 748 | aL1L 749 | aL0L 750 | aL0L 751 | aL0L 752 | aL1L 753 | aL2L 754 | aL0L 755 | aL0L 756 | aL0L 757 | aL1L 758 | aL1L 759 | aL0L 760 | aL0L 761 | aL1L 762 | aL1L 763 | aL1L 764 | aL0L 765 | aL0L 766 | aL0L 767 | aL2L 768 | aL0L 769 | aL1L 770 | aL3L 771 | aL1L 772 | aL1L 773 | aL1L 774 | aL1L 775 | aL1L 776 | aL0L 777 | aL1L 778 | aL1L 779 | aL1L 780 | aL1L 781 | aL1L 782 | aL1L 783 | aL0L 784 | aL1L 785 | aL0L 786 | aL1L 787 | aL3L 788 | aL1L 789 | aL1L 790 | aL1L 791 | aL0L 792 | aL0L 793 | aL0L 794 | aL1L 795 | aL1L 796 | aL1L 797 | aL0L 798 | aL1L 799 | aL1L 800 | aL0L 801 | aL1L 802 | aL0L 803 | aL1L 804 | aL3L 805 | aL0L 806 | aL1L 807 | aL0L 808 | aL0L 809 | aL1L 810 | aL1L 811 | aL1L 812 | aL1L 813 | aL1L 814 | aL1L 815 | aL1L 816 | aL0L 817 | aL1L 818 | aL1L 819 | aL1L 820 | aL3L 821 | aL2L 822 | aL1L 823 | aL1L 824 | aL1L 825 | aL1L 826 | aL0L 827 | aL1L 828 | aL1L 829 | aL1L 830 | aL1L 831 | aL1L 832 | aL1L 833 | aL1L 834 | aL0L 835 | aL1L 836 | aL0L 837 | aL1L 838 | aL3L 839 | aL1L 840 | aL1L 841 | aL0L 842 | aL1L 843 | aL1L 844 | aL1L 845 | aL1L 846 | aL1L 847 | aL1L 848 | aL0L 849 | aL1L 850 | aL1L 851 | aL1L 852 | aL1L 853 | aL1L 854 | aL1L 855 | aL1L 856 | aL0L 857 | aL1L 858 | aL1L 859 | aL1L 860 | aL1L 861 | aL1L 862 | aL1L 863 | aL1L 864 | aL1L 865 | aL1L 866 | aL1L 867 | aL1L 868 | aL0L 869 | aL1L 870 | aL1L 871 | aL1L 872 | aL3L 873 | aL1L 874 | aL1L 875 | aL1L 876 | aL0L 877 | aL0L 878 | aL1L 879 | aL1L 880 | aL1L 881 | aL1L 882 | aL1L 883 | aL1L 884 | aL1L 885 | aL1L 886 | aL2L 887 | aL1L 888 | aL1L 889 | aL0L 890 | aL1L 891 | aL1L 892 | aL0L 893 | aL1L 894 | aL1L 895 | aL1L 896 | aL1L 897 | aL1L 898 | aL1L 899 | aL1L 900 | aL1L 901 | aL1L 902 | aL1L 903 | aL1L 904 | aL1L 905 | aL1L 906 | aL3L 907 | aL1L 908 | aL1L 909 | aL1L 910 | aL1L 911 | aL1L 912 | aL1L 913 | aL1L 914 | aL1L 915 | aL1L 916 | aL0L 917 | aL1L 918 | aL1L 919 | aL1L 920 | aL2L 921 | aL1L 922 | aL3L 923 | aL2L 924 | aL1L 925 | aL1L 926 | aL1L 927 | aL1L 928 | aL1L 929 | aL0L 930 | aL1L 931 | aL1L 932 | aL1L 933 | aL1L 934 | aL1L 935 | aL1L 936 | aL0L 937 | aL2L 938 | aL1L 939 | aL3L 940 | aL2L 941 | aL1L 942 | aL1L 943 | aL1L 944 | aL1L 945 | aL1L 946 | aL1L 947 | aL1L 948 | aL1L 949 | aL1L 950 | aL1L 951 | aL1L 952 | aL1L 953 | aL0L 954 | aL1L 955 | aL1L 956 | aL3L 957 | aL3L 958 | aL1L 959 | aL1L 960 | aL1L 961 | aL1L 962 | aL1L 963 | aL1L 964 | aL1L 965 | aL1L 966 | aL0L 967 | aL1L 968 | aL1L 969 | aL1L 970 | aL1L 971 | aL1L 972 | aL0L 973 | aL3L 974 | aL3L 975 | aL0L 976 | aL1L 977 | aL1L 978 | aL1L 979 | aL1L 980 | aL1L 981 | aL1L 982 | aL0L 983 | aL1L 984 | aL1L 985 | aL1L 986 | aL1L 987 | aL1L 988 | aL2L 989 | aL1L 990 | aL1L 991 | aL3L 992 | aL1L 993 | aL1L 994 | aL1L 995 | aL1L 996 | aL1L 997 | aL0L 998 | aL1L 999 | aL1L 1000 | aL1L 1001 | aL1L 1002 | aL1L 1003 | aL1L 1004 | aL0L 1005 | aL1L 1006 | aL1L 1007 | aL1L 1008 | aL3L 1009 | aL1L 1010 | aL1L 1011 | aL1L 1012 | aL0L 1013 | aL0L 1014 | aL1L 1015 | aL1L 1016 | aL1L 1017 | aL1L 1018 | aL1L 1019 | aL1L 1020 | aL1L 1021 | aL1L 1022 | aL1L 1023 | aL0L 1024 | aL3L 1025 | aL2L 1026 | aL0L 1027 | aL1L 1028 | aL1L 1029 | aL1L 1030 | aL0L 1031 | aL1L 1032 | aL0L 1033 | aL1L 1034 | aL1L 1035 | aL1L 1036 | aL1L 1037 | aL0L 1038 | aL0L 1039 | aL1L 1040 | aL0L 1041 | aL1L 1042 | aL0L 1043 | aL1L 1044 | aL1L 1045 | aL0L 1046 | aL0L 1047 | aL1L 1048 | aL1L 1049 | aL1L 1050 | aL1L 1051 | aL1L 1052 | aL1L 1053 | aL1L 1054 | aL0L 1055 | aL0L 1056 | aL1L 1057 | aL1L 1058 | aL1L 1059 | aL3L 1060 | aL1L 1061 | aL1L 1062 | aL1L 1063 | aL1L 1064 | aL1L 1065 | aL1L 1066 | aL1L 1067 | aL1L 1068 | aL1L 1069 | aL1L 1070 | aL0L 1071 | aL1L 1072 | aL1L 1073 | aL1L 1074 | aL0L 1075 | aL3L 1076 | aL2L 1077 | aL1L 1078 | aL1L 1079 | aL1L 1080 | aL1L 1081 | aL1L 1082 | aL1L 1083 | aL1L 1084 | aL1L 1085 | aL1L 1086 | aL0L 1087 | aL1L 1088 | aL0L 1089 | aL1L 1090 | aL1L 1091 | aL0L 1092 | aL1L 1093 | aL1L 1094 | aL0L 1095 | aL1L 1096 | aL1L 1097 | aL1L 1098 | aL1L 1099 | aL1L 1100 | aL1L 1101 | aL1L 1102 | aL1L 1103 | aL1L 1104 | aL1L 1105 | aL1L 1106 | aL1L 1107 | aL2L 1108 | aL0L 1109 | aL0L 1110 | aL0L 1111 | aL1L 1112 | aL1L 1113 | aL1L 1114 | aL1L 1115 | aL1L 1116 | aL1L 1117 | aL1L 1118 | aL1L 1119 | aL0L 1120 | aL0L 1121 | aL1L 1122 | aL1L 1123 | aL1L 1124 | aL1L 1125 | aL0L 1126 | aL3L 1127 | aL2L 1128 | aL0L 1129 | aL1L 1130 | aL1L 1131 | aL1L 1132 | aL1L 1133 | aL0L 1134 | aL1L 1135 | aL1L 1136 | aL1L 1137 | aL1L 1138 | aL1L 1139 | aL0L 1140 | aL1L 1141 | aL1L 1142 | aL0L 1143 | aL1L 1144 | aL3L 1145 | aL0L 1146 | aL1L 1147 | aL1L 1148 | aL1L 1149 | aL1L 1150 | aL1L 1151 | aL1L 1152 | aL1L 1153 | aL0L 1154 | aL1L 1155 | aL1L 1156 | aL1L 1157 | aL1L 1158 | aL2L 1159 | aL1L 1160 | aL1L 1161 | aL0L 1162 | aL1L 1163 | aL1L 1164 | aL1L 1165 | aL1L 1166 | aL0L 1167 | aL1L 1168 | aL1L 1169 | aL1L 1170 | aL1L 1171 | aL1L 1172 | aL1L 1173 | aL1L 1174 | aL1L 1175 | aL1L 1176 | aL1L 1177 | aL1L 1178 | aL1L 1179 | aL1L 1180 | aL1L 1181 | aL1L 1182 | aL1L 1183 | aL0L 1184 | aL1L 1185 | aL1L 1186 | aL1L 1187 | aL1L 1188 | aL1L 1189 | aL1L 1190 | aL1L 1191 | aL1L 1192 | aL2L 1193 | aL1L 1194 | aL1L 1195 | aL2L 1196 | aL1L 1197 | aL1L 1198 | aL1L 1199 | aL1L 1200 | aL0L 1201 | aL0L 1202 | aL1L 1203 | aL1L 1204 | aL1L 1205 | aL1L 1206 | aL1L 1207 | aL1L 1208 | aL1L 1209 | aL2L 1210 | aL1L 1211 | aL1L 1212 | aL1L 1213 | aL1L 1214 | aL1L 1215 | aL1L 1216 | aL1L 1217 | aL0L 1218 | aL1L 1219 | aL1L 1220 | aL1L 1221 | aL1L 1222 | aL1L 1223 | aL1L 1224 | aL1L 1225 | aL1L 1226 | aL1L 1227 | aL1L 1228 | aL1L 1229 | aL2L 1230 | aL1L 1231 | aL1L 1232 | aL1L 1233 | aL1L 1234 | aL0L 1235 | aL0L 1236 | aL1L 1237 | aL1L 1238 | aL1L 1239 | aL1L 1240 | aL0L 1241 | aL0L 1242 | aL0L 1243 | aL1L 1244 | aL0L 1245 | aL1L 1246 | aL3L 1247 | aL0L 1248 | aL1L 1249 | aL1L 1250 | aL1L 1251 | aL1L 1252 | aL1L 1253 | aL1L 1254 | aL1L 1255 | aL1L 1256 | aL1L 1257 | aL1L 1258 | aL1L 1259 | aL1L 1260 | aL1L 1261 | aL1L 1262 | aL1L 1263 | aL0L 1264 | aL1L 1265 | aL1L 1266 | aL1L 1267 | aL0L 1268 | aL0L 1269 | aL1L 1270 | aL1L 1271 | aL1L 1272 | aL1L 1273 | aL1L 1274 | aL1L 1275 | aL1L 1276 | aL1L 1277 | aL1L 1278 | aL0L 1279 | aL0L 1280 | aL3L 1281 | aL0L 1282 | aL1L 1283 | aL1L 1284 | aL0L 1285 | aL0L 1286 | aL1L 1287 | aL1L 1288 | aL1L 1289 | aL0L 1290 | aL0L 1291 | aL1L 1292 | aL1L 1293 | aL1L 1294 | aL2L 1295 | aL1L 1296 | aL3L 1297 | aL2L 1298 | aL1L 1299 | aL1L 1300 | aL1L 1301 | aL1L 1302 | aL1L 1303 | aL1L 1304 | aL1L 1305 | aL1L 1306 | aL1L 1307 | aL1L 1308 | aL1L 1309 | aL0L 1310 | aL0L 1311 | aL2L 1312 | aL0L 1313 | aL1L 1314 | aL3L 1315 | aL0L 1316 | aL1L 1317 | aL0L 1318 | aL1L 1319 | aL1L 1320 | aL1L 1321 | aL1L 1322 | aL1L 1323 | aL1L 1324 | aL1L 1325 | aL1L 1326 | aL1L 1327 | aL1L 1328 | aL2L 1329 | aL1L 1330 | aL3L 1331 | aL3L 1332 | aL1L 1333 | aL1L 1334 | aL1L 1335 | aL1L 1336 | aL1L 1337 | aL0L 1338 | aL1L 1339 | aL1L 1340 | aL1L 1341 | aL1L 1342 | aL1L 1343 | aL1L 1344 | aL0L 1345 | aL2L 1346 | aL0L 1347 | aL3L 1348 | aL2L 1349 | aL1L 1350 | aL1L 1351 | aL1L 1352 | aL1L 1353 | aL1L 1354 | aL1L 1355 | aL1L 1356 | aL1L 1357 | aL0L 1358 | aL1L 1359 | aL1L 1360 | aL1L 1361 | aL1L 1362 | aL2L 1363 | aL1L 1364 | aL1L 1365 | aL3L 1366 | aL1L 1367 | aL1L 1368 | aL0L 1369 | aL1L 1370 | aL1L 1371 | aL0L 1372 | aL1L 1373 | aL1L 1374 | aL1L 1375 | aL1L 1376 | aL1L 1377 | aL0L 1378 | aL0L 1379 | aL1L 1380 | aL1L 1381 | aL1L 1382 | aL3L 1383 | aL0L 1384 | aL1L 1385 | aL1L 1386 | aL1L 1387 | aL1L 1388 | aL1L 1389 | aL1L 1390 | aL1L 1391 | aL1L 1392 | aL1L 1393 | aL1L 1394 | aL1L 1395 | aL1L 1396 | aL1L 1397 | aL0L 1398 | aL1L 1399 | aL3L 1400 | aL1L 1401 | aL1L 1402 | aL0L 1403 | aL1L 1404 | aL1L 1405 | aL0L 1406 | aL1L 1407 | aL1L 1408 | aL1L 1409 | aL1L 1410 | aL1L 1411 | aL1L 1412 | aL0L 1413 | aL1L 1414 | aL0L 1415 | aL0L 1416 | aL0L 1417 | aL0L 1418 | aL1L 1419 | aL1L 1420 | aL1L 1421 | aL1L 1422 | aL1L 1423 | aL1L 1424 | aL1L 1425 | aL0L 1426 | aL0L 1427 | aL1L 1428 | aL1L 1429 | aL1L 1430 | aL2L 1431 | aL1L 1432 | aL1L 1433 | aL3L 1434 | aL1L 1435 | aL1L 1436 | aL1L 1437 | aL1L 1438 | aL1L 1439 | aL0L 1440 | aL1L 1441 | aL1L 1442 | aL1L 1443 | aL1L 1444 | aL1L 1445 | aL1L 1446 | aL0L 1447 | aL1L 1448 | aL0L 1449 | aL1L 1450 | aL2L 1451 | aL0L 1452 | aL1L 1453 | aL1L 1454 | aL1L 1455 | aL1L 1456 | aL1L 1457 | aL1L 1458 | aL0L 1459 | aL1L 1460 | aL1L 1461 | aL1L 1462 | aL1L 1463 | aL1L 1464 | aL2L 1465 | aL0L 1466 | aL1L 1467 | aL0L 1468 | aL1L 1469 | aL1L 1470 | aL1L 1471 | aL1L 1472 | aL1L 1473 | aL1L 1474 | aL1L 1475 | aL1L 1476 | aL1L 1477 | aL1L 1478 | aL1L 1479 | aL1L 1480 | aL0L 1481 | aL2L 1482 | aL1L 1483 | aL1L 1484 | aL0L 1485 | aL1L 1486 | aL1L 1487 | aL1L 1488 | aL1L 1489 | aL0L 1490 | aL1L 1491 | aL1L 1492 | aL1L 1493 | aL1L 1494 | aL1L 1495 | aL1L 1496 | aL1L 1497 | aL1L 1498 | aL1L 1499 | aL0L 1500 | aL1L 1501 | aL0L 1502 | aL0L 1503 | aL1L 1504 | aL1L 1505 | aL1L 1506 | aL1L 1507 | aL1L 1508 | aL1L 1509 | aL1L 1510 | aL1L 1511 | aL1L 1512 | aL1L 1513 | aL1L 1514 | aL1L 1515 | aL1L 1516 | aL1L 1517 | aL1L 1518 | aL0L 1519 | aL0L 1520 | aL1L 1521 | aL1L 1522 | aL1L 1523 | aL1L 1524 | aL1L 1525 | aL1L 1526 | aL1L 1527 | aL1L 1528 | aL1L 1529 | aL1L 1530 | aL1L 1531 | aL1L 1532 | aL2L 1533 | aL1L 1534 | aL3L 1535 | aL2L 1536 | aL1L 1537 | aL1L 1538 | aL0L 1539 | aL1L 1540 | aL1L 1541 | aL0L 1542 | aL0L 1543 | aL1L 1544 | aL1L 1545 | aL0L 1546 | aL1L 1547 | aL0L 1548 | aL1L 1549 | aL1L 1550 | aL0L 1551 | aL1L 1552 | aL3L 1553 | aL0L 1554 | aL1L 1555 | aL1L 1556 | aL1L 1557 | aL1L 1558 | aL1L 1559 | aL1L 1560 | aL1L 1561 | aL1L 1562 | aL1L 1563 | aL1L 1564 | aL1L 1565 | aL0L 1566 | aL1L 1567 | aL0L 1568 | aL3L 1569 | aL2L 1570 | aL1L 1571 | aL1L 1572 | aL1L 1573 | aL0L 1574 | aL1L 1575 | aL0L 1576 | aL1L 1577 | aL0L 1578 | aL1L 1579 | aL1L 1580 | aL1L 1581 | aL1L 1582 | aL1L 1583 | aL1L 1584 | aL1L 1585 | aL1L 1586 | aL3L 1587 | aL1L 1588 | aL1L 1589 | aL1L 1590 | aL0L 1591 | aL1L 1592 | aL1L 1593 | aL1L 1594 | aL1L 1595 | aL1L 1596 | aL1L 1597 | aL1L 1598 | aL1L 1599 | aL1L 1600 | aL1L 1601 | aL1L 1602 | aL1L 1603 | aL3L 1604 | aL1L 1605 | aL1L 1606 | aL1L 1607 | aL1L 1608 | aL1L 1609 | aL1L 1610 | aL1L 1611 | aL1L 1612 | aL1L 1613 | aL1L 1614 | aL0L 1615 | aL1L 1616 | aL1L 1617 | aL1L 1618 | aL0L 1619 | aL3L 1620 | aL2L 1621 | aL1L 1622 | aL1L 1623 | aL1L 1624 | aL0L 1625 | aL1L 1626 | aL0L 1627 | aL0L 1628 | aL1L 1629 | aL1L 1630 | aL1L 1631 | aL1L 1632 | aL0L 1633 | aL0L 1634 | aL1L 1635 | aL0L 1636 | aL1L 1637 | aL3L 1638 | aL1L 1639 | aL1L 1640 | aL1L 1641 | aL0L 1642 | aL0L 1643 | aL0L 1644 | aL1L 1645 | aL1L 1646 | aL1L 1647 | aL1L 1648 | aL1L 1649 | aL1L 1650 | aL0L 1651 | aL1L 1652 | aL1L 1653 | aL1L 1654 | aL2L 1655 | aL1L 1656 | aL1L 1657 | aL1L 1658 | aL1L 1659 | aL1L 1660 | aL1L 1661 | aL1L 1662 | aL1L 1663 | aL1L 1664 | aL0L 1665 | aL1L 1666 | aL1L 1667 | aL1L 1668 | aL1L 1669 | aL0L 1670 | aL3L 1671 | aL2L 1672 | aL1L 1673 | aL1L 1674 | aL1L 1675 | aL1L 1676 | aL1L 1677 | aL1L 1678 | aL1L 1679 | aL1L 1680 | aL1L 1681 | aL0L 1682 | aL1L 1683 | aL1L 1684 | aL1L 1685 | aL2L 1686 | aL0L 1687 | aL0L 1688 | aL3L 1689 | aL1L 1690 | aL1L 1691 | aL0L 1692 | aL1L 1693 | aL1L 1694 | aL0L 1695 | aL1L 1696 | aL1L 1697 | aL1L 1698 | aL1L 1699 | aL1L 1700 | aL0L 1701 | aL0L 1702 | aL0L 1703 | aL0L 1704 | aL3L 1705 | aL2L 1706 | aL0L 1707 | aL1L 1708 | aL1L 1709 | aL0L 1710 | aL0L 1711 | aL1L 1712 | aL1L 1713 | aL1L 1714 | aL1L 1715 | aL1L 1716 | aL1L 1717 | aL1L 1718 | aL1L 1719 | aL1L 1720 | aL0L 1721 | aL3L 1722 | aL2L 1723 | aL0L 1724 | aL1L 1725 | aL1L 1726 | aL1L 1727 | aL1L 1728 | aL1L 1729 | aL1L 1730 | aL1L 1731 | aL1L 1732 | aL1L 1733 | aL1L 1734 | aL1L 1735 | aL1L 1736 | aL2L 1737 | aL0L 1738 | aL0L 1739 | aL1L 1740 | aL1L 1741 | aL1L 1742 | aL1L 1743 | aL1L 1744 | aL1L 1745 | aL1L 1746 | aL1L 1747 | aL1L 1748 | aL0L 1749 | aL1L 1750 | aL1L 1751 | aL1L 1752 | aL1L 1753 | aL0L 1754 | aL0L 1755 | aL3L 1756 | aL2L 1757 | aL1L 1758 | aL1L 1759 | aL1L 1760 | aL1L 1761 | aL1L 1762 | aL0L 1763 | aL1L 1764 | aL1L 1765 | aL1L 1766 | aL0L 1767 | aL1L 1768 | aL0L 1769 | aL1L 1770 | aL1L 1771 | aL1L 1772 | aL1L 1773 | aL3L 1774 | aL1L 1775 | aL1L 1776 | aL0L 1777 | aL0L 1778 | aL0L 1779 | aL1L 1780 | aL1L 1781 | aL1L 1782 | aL1L 1783 | aL1L 1784 | aL1L 1785 | aL1L 1786 | aL1L 1787 | aL1L 1788 | aL1L 1789 | aL1L 1790 | aL3L 1791 | aL1L 1792 | aL1L 1793 | aL0L 1794 | aL1L 1795 | aL0L 1796 | aL0L 1797 | aL1L 1798 | aL1L 1799 | aL1L 1800 | aL1L 1801 | aL1L 1802 | aL1L 1803 | aL0L 1804 | aL1L 1805 | aL0L 1806 | aL1L 1807 | aL3L 1808 | aL0L 1809 | aL1L 1810 | aL1L 1811 | aL1L 1812 | aL1L 1813 | aL1L 1814 | aL1L 1815 | aL1L 1816 | aL1L 1817 | aL1L 1818 | aL1L 1819 | aL1L 1820 | aL1L 1821 | aL1L 1822 | aL1L 1823 | aL1L 1824 | aL3L 1825 | aL0L 1826 | aL1L 1827 | aL1L 1828 | aL0L 1829 | aL1L 1830 | aL1L 1831 | aL1L 1832 | aL1L 1833 | aL1L 1834 | aL1L 1835 | aL1L 1836 | aL1L 1837 | aL1L 1838 | aL0L 1839 | aL0L 1840 | aL2L 1841 | aL2L 1842 | aL1L 1843 | aL1L 1844 | aL1L 1845 | aL1L 1846 | aL1L 1847 | aL1L 1848 | aL1L 1849 | aL1L 1850 | aL1L 1851 | aL0L 1852 | aL1L 1853 | aL1L 1854 | aL1L 1855 | aL2L 1856 | aL1L 1857 | aL1L 1858 | aL2L 1859 | aL1L 1860 | aL1L 1861 | aL0L 1862 | aL1L 1863 | aL0L 1864 | aL0L 1865 | aL1L 1866 | aL1L 1867 | aL1L 1868 | aL1L 1869 | aL1L 1870 | aL1L 1871 | aL0L 1872 | aL1L 1873 | aL0L 1874 | aL0L 1875 | aL3L 1876 | aL1L 1877 | aL1L 1878 | aL0L 1879 | aL1L 1880 | aL1L 1881 | aL1L 1882 | aL1L 1883 | aL1L 1884 | aL0L 1885 | aL1L 1886 | aL1L 1887 | aL1L 1888 | aL1L 1889 | aL0L 1890 | aL0L 1891 | aL1L 1892 | aL3L 1893 | aL1L 1894 | aL1L 1895 | aL1L 1896 | aL0L 1897 | aL1L 1898 | aL1L 1899 | aL1L 1900 | aL1L 1901 | aL1L 1902 | aL1L 1903 | aL1L 1904 | aL1L 1905 | aL1L 1906 | aL1L 1907 | aL1L 1908 | aL1L 1909 | aL3L 1910 | aL1L 1911 | aL1L 1912 | aL0L 1913 | aL0L 1914 | aL0L 1915 | aL1L 1916 | aL1L 1917 | aL1L 1918 | aL1L 1919 | aL0L 1920 | aL1L 1921 | aL1L 1922 | aL1L 1923 | aL1L 1924 | aL0L 1925 | aL0L 1926 | aL2L 1927 | aL0L 1928 | aL1L 1929 | aL1L 1930 | aL1L 1931 | aL1L 1932 | aL1L 1933 | aL1L 1934 | aL1L 1935 | aL1L 1936 | aL1L 1937 | aL1L 1938 | aL1L 1939 | aL1L 1940 | aL1L 1941 | aL0L 1942 | aL1L 1943 | aL3L 1944 | aL1L 1945 | aL1L 1946 | aL0L 1947 | aL0L 1948 | aL1L 1949 | aL1L 1950 | aL1L 1951 | aL1L 1952 | aL1L 1953 | aL1L 1954 | aL1L 1955 | aL0L 1956 | aL0L 1957 | aL1L 1958 | aL0L 1959 | aL3L 1960 | aL2L 1961 | aL1L 1962 | aL1L 1963 | aL1L 1964 | aL1L 1965 | aL0L 1966 | aL0L 1967 | aL1L 1968 | aL1L 1969 | aL1L 1970 | aL1L 1971 | aL1L 1972 | aL0L 1973 | aL1L 1974 | aL1L 1975 | aL1L 1976 | aL0L 1977 | aL2L 1978 | aL0L 1979 | aL1L 1980 | aL1L 1981 | aL1L 1982 | aL1L 1983 | aL1L 1984 | aL1L 1985 | aL1L 1986 | aL0L 1987 | aL1L 1988 | aL1L 1989 | aL1L 1990 | aL1L 1991 | aL1L 1992 | aL0L 1993 | aL1L 1994 | aL2L 1995 | aL1L 1996 | aL1L 1997 | aL1L 1998 | aL1L 1999 | aL1L 2000 | aL1L 2001 | aL1L 2002 | aL1L 2003 | aL1L 2004 | aL0L 2005 | aL1L 2006 | aL0L 2007 | aL1L 2008 | aL0L 2009 | aL0L 2010 | aL3L 2011 | aL2L 2012 | aL1L 2013 | aL1L 2014 | aL0L 2015 | aL1L 2016 | aL1L 2017 | aL1L 2018 | aL1L 2019 | aL1L 2020 | aL1L 2021 | aL1L 2022 | aL1L 2023 | aL1L 2024 | aL1L 2025 | aL1L 2026 | aL1L 2027 | aL1L 2028 | aL3L 2029 | aL0L 2030 | aL1L 2031 | aL0L 2032 | aL0L 2033 | aL0L 2034 | aL0L 2035 | aL0L 2036 | aL1L 2037 | aL1L 2038 | aL1L 2039 | aL1L 2040 | aL1L 2041 | aL0L 2042 | aL1L 2043 | aL1L 2044 | aL1L 2045 | aL0L 2046 | aL1L 2047 | aL1L 2048 | aL1L 2049 | aL1L 2050 | aL0L 2051 | aL1L 2052 | aL1L 2053 | aL1L 2054 | aL1L 2055 | aL1L 2056 | aL1L 2057 | aL1L 2058 | aL1L 2059 | aL1L 2060 | aL1L 2061 | aL1L 2062 | aL3L 2063 | aL1L 2064 | aL1L 2065 | aL1L 2066 | aL1L 2067 | aL1L 2068 | aL0L 2069 | aL1L 2070 | aL1L 2071 | aL1L 2072 | aL1L 2073 | aL1L 2074 | aL1L 2075 | aL0L 2076 | aL1L 2077 | aL0L 2078 | aL0L 2079 | aL3L 2080 | aL1L 2081 | aL1L 2082 | aL1L 2083 | aL1L 2084 | aL1L 2085 | aL1L 2086 | aL1L 2087 | aL1L 2088 | aL1L 2089 | aL0L 2090 | aL2L 2091 | aL0L 2092 | aL1L 2093 | aL2L 2094 | aL1L 2095 | aL1L 2096 | aL3L 2097 | aL1L 2098 | aL1L 2099 | aL0L 2100 | aL0L 2101 | aL0L 2102 | aL1L 2103 | aL1L 2104 | aL1L 2105 | aL1L 2106 | aL1L 2107 | aL1L 2108 | aL1L 2109 | aL0L 2110 | aL2L 2111 | aL0L 2112 | aL1L 2113 | aL2L 2114 | aL1L 2115 | aL1L 2116 | aL1L 2117 | aL1L 2118 | aL1L 2119 | aL0L 2120 | aL1L 2121 | aL1L 2122 | aL1L 2123 | aL1L 2124 | aL1L 2125 | aL1L 2126 | aL1L 2127 | aL1L 2128 | aL1L 2129 | aL1L 2130 | aL3L 2131 | aL1L 2132 | aL1L 2133 | aL0L 2134 | aL1L 2135 | aL1L 2136 | aL1L 2137 | aL1L 2138 | aL1L 2139 | aL0L 2140 | aL0L 2141 | aL0L 2142 | aL0L 2143 | aL0L 2144 | aL2L 2145 | aL0L 2146 | aL0L 2147 | aL1L 2148 | aL1L 2149 | aL1L 2150 | aL1L 2151 | aL1L 2152 | aL1L 2153 | aL1L 2154 | aL1L 2155 | aL1L 2156 | aL0L 2157 | aL1L 2158 | aL1L 2159 | aL1L 2160 | aL1L 2161 | aL1L 2162 | aL1L 2163 | aL3L 2164 | aL3L 2165 | aL1L 2166 | aL1L 2167 | aL1L 2168 | aL1L 2169 | aL1L 2170 | aL1L 2171 | aL1L 2172 | aL1L 2173 | aL0L 2174 | aL0L 2175 | aL1L 2176 | aL1L 2177 | aL1L 2178 | aL2L 2179 | aL1L 2180 | aL1L 2181 | aL3L 2182 | aL1L 2183 | aL1L 2184 | aL0L 2185 | aL0L 2186 | aL1L 2187 | aL0L 2188 | aL1L 2189 | aL1L 2190 | aL1L 2191 | aL1L 2192 | aL1L 2193 | aL0L 2194 | aL0L 2195 | aL0L 2196 | aL1L 2197 | aL1L 2198 | aL0L 2199 | aL0L 2200 | aL1L 2201 | aL1L 2202 | aL1L 2203 | aL1L 2204 | aL1L 2205 | aL1L 2206 | aL1L 2207 | aL1L 2208 | aL0L 2209 | aL1L 2210 | aL1L 2211 | aL0L 2212 | aL2L 2213 | aL1L 2214 | aL1L 2215 | aL1L 2216 | aL1L 2217 | aL1L 2218 | aL1L 2219 | aL1L 2220 | aL1L 2221 | aL0L 2222 | aL1L 2223 | aL1L 2224 | aL1L 2225 | aL1L 2226 | aL1L 2227 | aL1L 2228 | aL0L 2229 | aL1L 2230 | aL1L 2231 | aL1L 2232 | aL3L 2233 | aL0L 2234 | aL1L 2235 | aL1L 2236 | aL1L 2237 | aL1L 2238 | aL1L 2239 | aL0L 2240 | aL1L 2241 | aL1L 2242 | aL1L 2243 | aL1L 2244 | aL0L 2245 | aL1L 2246 | aL1L 2247 | aL1L 2248 | aL1L 2249 | aL3L 2250 | aL1L 2251 | aL1L 2252 | aL0L 2253 | aL1L 2254 | aL1L 2255 | aL0L 2256 | aL1L 2257 | aL1L 2258 | aL1L 2259 | aL1L 2260 | aL1L 2261 | aL0L 2262 | aL0L 2263 | aL1L 2264 | aL0L 2265 | aL3L 2266 | aL2L 2267 | aL1L 2268 | aL1L 2269 | aL1L 2270 | aL1L 2271 | aL1L 2272 | aL0L 2273 | aL1L 2274 | aL1L 2275 | aL1L 2276 | aL0L 2277 | aL1L 2278 | aL1L 2279 | aL1L 2280 | aL1L 2281 | aL0L 2282 | aL1L 2283 | aL3L 2284 | aL1L 2285 | aL1L 2286 | aL1L 2287 | aL1L 2288 | aL1L 2289 | aL1L 2290 | aL0L 2291 | aL1L 2292 | aL1L 2293 | aL1L 2294 | aL1L 2295 | aL1L 2296 | aL0L 2297 | aL1L 2298 | aL0L 2299 | aL1L 2300 | aL3L 2301 | aL1L 2302 | aL1L 2303 | aL1L 2304 | aL0L 2305 | aL0L 2306 | aL1L 2307 | aL1L 2308 | aL1L 2309 | aL1L 2310 | aL1L 2311 | aL1L 2312 | aL0L 2313 | aL0L 2314 | aL1L 2315 | aL1L 2316 | aL1L 2317 | aL3L 2318 | aL1L 2319 | aL1L 2320 | aL1L 2321 | aL1L 2322 | aL0L 2323 | aL1L 2324 | aL1L 2325 | aL1L 2326 | aL1L 2327 | aL1L 2328 | aL1L 2329 | aL1L 2330 | aL1L 2331 | aL2L 2332 | aL1L 2333 | aL1L 2334 | aL0L 2335 | aL1L 2336 | aL1L 2337 | aL1L 2338 | aL1L 2339 | aL1L 2340 | aL1L 2341 | aL1L 2342 | aL1L 2343 | aL1L 2344 | aL0L 2345 | aL1L 2346 | aL1L 2347 | aL1L 2348 | aL1L 2349 | aL0L 2350 | aL1L 2351 | aL0L 2352 | aL1L 2353 | aL1L 2354 | aL1L 2355 | aL1L 2356 | aL1L 2357 | aL0L 2358 | aL1L 2359 | aL1L 2360 | aL1L 2361 | aL1L 2362 | aL1L 2363 | aL1L 2364 | aL1L 2365 | aL1L 2366 | aL1L 2367 | aL3L 2368 | aL3L 2369 | aL1L 2370 | aL1L 2371 | aL1L 2372 | aL0L 2373 | aL1L 2374 | aL1L 2375 | aL1L 2376 | aL1L 2377 | aL1L 2378 | aL1L 2379 | aL1L 2380 | aL1L 2381 | aL1L 2382 | aL1L 2383 | aL0L 2384 | aL3L 2385 | aL3L 2386 | aL1L 2387 | aL1L 2388 | aL1L 2389 | aL1L 2390 | aL1L 2391 | aL1L 2392 | aL1L 2393 | aL1L 2394 | aL1L 2395 | aL1L 2396 | aL1L 2397 | aL1L 2398 | aL0L 2399 | aL1L 2400 | aL0L 2401 | aL1L 2402 | aL3L 2403 | aL1L 2404 | aL1L 2405 | aL1L 2406 | aL1L 2407 | aL1L 2408 | aL1L 2409 | aL1L 2410 | aL1L 2411 | aL1L 2412 | aL1L 2413 | aL0L 2414 | aL1L 2415 | aL1L 2416 | aL1L 2417 | aL0L 2418 | aL1L 2419 | aL3L 2420 | aL0L 2421 | aL1L 2422 | aL0L 2423 | aL1L 2424 | aL1L 2425 | aL1L 2426 | aL0L 2427 | aL1L 2428 | aL1L 2429 | aL1L 2430 | aL1L 2431 | aL0L 2432 | aL1L 2433 | aL1L 2434 | aL1L 2435 | aL1L 2436 | aL1L 2437 | aL1L 2438 | aL1L 2439 | aL1L 2440 | aL1L 2441 | aL0L 2442 | aL1L 2443 | aL1L 2444 | aL1L 2445 | aL1L 2446 | aL1L 2447 | aL1L 2448 | aL1L 2449 | aL0L 2450 | aL1L 2451 | aL0L 2452 | aL0L 2453 | aL0L 2454 | aL0L 2455 | aL1L 2456 | aL1L 2457 | aL1L 2458 | aL1L 2459 | aL1L 2460 | aL1L 2461 | aL1L 2462 | aL1L 2463 | aL1L 2464 | aL1L 2465 | aL1L 2466 | aL1L 2467 | aL1L 2468 | aL1L 2469 | aL1L 2470 | aL3L 2471 | aL1L 2472 | aL1L 2473 | aL1L 2474 | aL1L 2475 | aL1L 2476 | aL1L 2477 | aL0L 2478 | aL1L 2479 | aL1L 2480 | aL1L 2481 | aL1L 2482 | aL1L 2483 | aL1L 2484 | aL1L 2485 | aL1L 2486 | aL2L 2487 | aL2L 2488 | aL1L 2489 | aL1L 2490 | aL1L 2491 | aL0L 2492 | aL0L 2493 | aL1L 2494 | aL1L 2495 | aL1L 2496 | aL1L 2497 | aL0L 2498 | aL1L 2499 | aL0L 2500 | aL1L 2501 | aL2L 2502 | aL0L 2503 | aL3L 2504 | aL2L 2505 | aL1L 2506 | aL1L 2507 | aL1L 2508 | aL0L 2509 | aL0L 2510 | aL1L 2511 | aL1L 2512 | aL1L 2513 | aL0L 2514 | aL1L 2515 | aL1L 2516 | aL0L 2517 | aL0L 2518 | aL2L 2519 | aL0L 2520 | aL0L 2521 | aL3L 2522 | aL1L 2523 | aL1L 2524 | aL1L 2525 | aL1L 2526 | aL1L 2527 | aL1L 2528 | aL1L 2529 | aL1L 2530 | aL0L 2531 | aL1L 2532 | aL1L 2533 | aL1L 2534 | aL1L 2535 | aL0L 2536 | aL0L 2537 | aL1L 2538 | aL3L 2539 | aL1L 2540 | aL1L 2541 | aL1L 2542 | aL1L 2543 | aL0L 2544 | aL0L 2545 | aL1L 2546 | aL0L 2547 | aL1L 2548 | aL1L 2549 | aL1L 2550 | aL1L 2551 | aL0L 2552 | aL2L 2553 | aL1L 2554 | aL3L 2555 | aL3L 2556 | aL1L 2557 | aL1L 2558 | aL1L 2559 | aL1L 2560 | aL1L 2561 | aL0L 2562 | aL1L 2563 | aL1L 2564 | aL1L 2565 | aL1L 2566 | aL1L 2567 | aL1L 2568 | aL1L 2569 | aL2L 2570 | aL1L 2571 | aL1L 2572 | aL2L 2573 | aL1L 2574 | aL1L 2575 | aL0L 2576 | aL1L 2577 | aL1L 2578 | aL0L 2579 | aL1L 2580 | aL1L 2581 | aL1L 2582 | aL1L 2583 | aL1L 2584 | aL0L 2585 | aL0L 2586 | aL1L 2587 | aL1L 2588 | aL1L 2589 | aL2L 2590 | aL0L 2591 | aL1L 2592 | aL1L 2593 | aL0L 2594 | aL1L 2595 | aL0L 2596 | aL1L 2597 | aL1L 2598 | aL1L 2599 | aL0L 2600 | aL1L 2601 | aL0L 2602 | aL1L 2603 | aL2L 2604 | aL1L 2605 | aL3L 2606 | aL2L 2607 | aL1L 2608 | aL1L 2609 | aL1L 2610 | aL1L 2611 | aL1L 2612 | aL0L 2613 | aL1L 2614 | aL1L 2615 | aL1L 2616 | aL1L 2617 | aL1L 2618 | aL0L 2619 | aL0L 2620 | aL0L 2621 | aL1L 2622 | aL3L 2623 | aL2L 2624 | aL0L 2625 | aL1L 2626 | aL0L 2627 | aL0L 2628 | aL1L 2629 | aL1L 2630 | aL1L 2631 | aL1L 2632 | aL1L 2633 | aL1L 2634 | aL1L 2635 | aL0L 2636 | aL1L 2637 | aL1L 2638 | aL0L 2639 | aL0L 2640 | aL3L 2641 | aL1L 2642 | aL1L 2643 | aL1L 2644 | aL1L 2645 | aL1L 2646 | aL1L 2647 | aL1L 2648 | aL1L 2649 | aL0L 2650 | aL1L 2651 | aL1L 2652 | aL1L 2653 | aL1L 2654 | aL1L 2655 | aL1L 2656 | aL3L 2657 | aL2L 2658 | aL1L 2659 | aL1L 2660 | aL0L 2661 | aL0L 2662 | aL0L 2663 | aL0L 2664 | aL1L 2665 | aL0L 2666 | aL1L 2667 | aL1L 2668 | aL1L 2669 | aL0L 2670 | aL0L 2671 | aL1L 2672 | aL1L 2673 | aL1L 2674 | aL0L 2675 | aL1L 2676 | aL1L 2677 | aL1L 2678 | aL1L 2679 | aL1L 2680 | aL1L 2681 | aL1L 2682 | aL1L 2683 | aL1L 2684 | aL0L 2685 | aL1L 2686 | aL1L 2687 | aL1L 2688 | aL0L 2689 | aL1L 2690 | aL3L 2691 | aL0L 2692 | aL1L 2693 | aL1L 2694 | aL1L 2695 | aL1L 2696 | aL1L 2697 | aL1L 2698 | aL1L 2699 | aL1L 2700 | aL1L 2701 | aL1L 2702 | aL1L 2703 | aL1L 2704 | aL1L 2705 | aL1L 2706 | aL0L 2707 | aL3L 2708 | aL2L 2709 | aL1L 2710 | aL1L 2711 | aL1L 2712 | aL1L 2713 | aL0L 2714 | aL1L 2715 | aL0L 2716 | aL0L 2717 | aL1L 2718 | aL1L 2719 | aL1L 2720 | aL0L 2721 | aL0L 2722 | aL1L 2723 | aL0L 2724 | aL3L 2725 | aL2L 2726 | aL1L 2727 | aL1L 2728 | aL1L 2729 | aL1L 2730 | aL1L 2731 | aL1L 2732 | aL1L 2733 | aL1L 2734 | aL0L 2735 | aL1L 2736 | aL1L 2737 | aL1L 2738 | aL1L 2739 | aL2L 2740 | aL1L 2741 | aL3L 2742 | aL3L 2743 | aL0L 2744 | aL1L 2745 | aL0L 2746 | aL1L 2747 | aL1L 2748 | aL1L 2749 | aL1L 2750 | aL1L 2751 | aL1L 2752 | aL1L 2753 | aL1L 2754 | aL1L 2755 | aL1L 2756 | aL1L 2757 | aL0L 2758 | aL1L 2759 | aL0L 2760 | aL0L 2761 | aL1L 2762 | aL1L 2763 | aL1L 2764 | aL1L 2765 | aL1L 2766 | aL1L 2767 | aL1L 2768 | aL0L 2769 | aL0L 2770 | aL1L 2771 | aL0L 2772 | aL1L 2773 | aL1L 2774 | aL1L 2775 | aL1L 2776 | aL0L 2777 | aL1L 2778 | aL1L 2779 | aL1L 2780 | aL0L 2781 | aL0L 2782 | aL1L 2783 | aL1L 2784 | aL1L 2785 | aL1L 2786 | aL1L 2787 | aL1L 2788 | aL1L 2789 | aL1L 2790 | aL1L 2791 | aL0L 2792 | aL1L 2793 | aL3L 2794 | aL0L 2795 | aL1L 2796 | aL1L 2797 | aL1L 2798 | aL1L 2799 | aL1L 2800 | aL1L 2801 | aL1L 2802 | aL1L 2803 | aL1L 2804 | aL1L 2805 | aL1L 2806 | aL1L 2807 | aL1L 2808 | aL1L 2809 | aL3L 2810 | aL2L 2811 | aL1L 2812 | aL1L 2813 | aL1L 2814 | aL1L 2815 | aL1L 2816 | aL1L 2817 | aL1L 2818 | aL1L 2819 | aL0L 2820 | aL0L 2821 | aL1L 2822 | aL1L 2823 | aL1L 2824 | aL1L 2825 | aL0L 2826 | aL0L 2827 | aL0L 2828 | aL1L 2829 | aL1L 2830 | aL1L 2831 | aL1L 2832 | aL1L 2833 | aL1L 2834 | aL1L 2835 | aL1L 2836 | aL0L 2837 | aL1L 2838 | aL1L 2839 | aL1L 2840 | aL1L 2841 | aL1L 2842 | aL1L 2843 | aL1L 2844 | aL3L 2845 | aL1L 2846 | aL1L 2847 | aL1L 2848 | aL1L 2849 | aL1L 2850 | aL1L 2851 | aL0L 2852 | aL1L 2853 | aL1L 2854 | aL1L 2855 | aL1L 2856 | aL0L 2857 | aL1L 2858 | aL2L 2859 | aL0L 2860 | aL1L 2861 | aL3L 2862 | aL1L 2863 | aL1L 2864 | aL0L 2865 | aL1L 2866 | aL1L 2867 | aL1L 2868 | aL1L 2869 | aL1L 2870 | aL1L 2871 | aL0L 2872 | aL1L 2873 | aL1L 2874 | aL1L 2875 | aL1L 2876 | aL1L 2877 | aL1L 2878 | aL1L 2879 | aL0L 2880 | aL1L 2881 | aL0L 2882 | aL1L 2883 | aL1L 2884 | aL1L 2885 | aL1L 2886 | aL1L 2887 | aL1L 2888 | aL1L 2889 | aL1L 2890 | aL0L 2891 | aL1L 2892 | aL2L 2893 | aL0L 2894 | aL0L 2895 | aL2L 2896 | aL1L 2897 | aL1L 2898 | aL1L 2899 | aL1L 2900 | aL1L 2901 | aL1L 2902 | aL1L 2903 | aL1L 2904 | aL0L 2905 | aL1L 2906 | aL1L 2907 | aL1L 2908 | aL1L 2909 | aL1L 2910 | aL0L 2911 | aL3L 2912 | aL2L 2913 | aL1L 2914 | aL1L 2915 | aL1L 2916 | aL1L 2917 | aL1L 2918 | aL1L 2919 | aL0L 2920 | aL0L 2921 | aL1L 2922 | aL0L 2923 | aL0L 2924 | aL0L 2925 | aL1L 2926 | aL1L 2927 | aL1L 2928 | aL1L 2929 | aL1L 2930 | aL0L 2931 | aL1L 2932 | aL1L 2933 | aL0L 2934 | aL1L 2935 | aL1L 2936 | aL1L 2937 | aL1L 2938 | aL1L 2939 | aL0L 2940 | aL0L 2941 | aL0L 2942 | aL0L 2943 | aL0L 2944 | aL0L 2945 | aL1L 2946 | aL0L 2947 | aL1L 2948 | aL1L 2949 | aL1L 2950 | aL1L 2951 | aL0L 2952 | aL1L 2953 | aL1L 2954 | aL1L 2955 | aL1L 2956 | aL0L 2957 | aL1L 2958 | aL1L 2959 | aL1L 2960 | aL1L 2961 | aL0L 2962 | aL0L 2963 | aL0L 2964 | aL1L 2965 | aL1L 2966 | aL1L 2967 | aL1L 2968 | aL1L 2969 | aL1L 2970 | aL1L 2971 | aL1L 2972 | aL0L 2973 | aL1L 2974 | aL1L 2975 | aL1L 2976 | aL1L 2977 | aL2L 2978 | aL1L 2979 | aL1L 2980 | aL3L 2981 | aL0L 2982 | aL1L 2983 | aL0L 2984 | aL1L 2985 | aL1L 2986 | aL1L 2987 | aL1L 2988 | aL1L 2989 | aL1L 2990 | aL1L 2991 | aL1L 2992 | aL1L 2993 | aL1L 2994 | aL0L 2995 | aL1L 2996 | aL3L 2997 | aL2L 2998 | aL0L 2999 | aL1L 3000 | aL1L 3001 | aL1L 3002 | aL1L 3003 | aL1L 3004 | aL0L 3005 | aL1L 3006 | aL1L 3007 | aL0L 3008 | aL0L 3009 | aL0L 3010 | aL1L 3011 | aL2L 3012 | aL0L 3013 | aL0L 3014 | aL0L 3015 | aL0L 3016 | aL1L 3017 | aL1L 3018 | aL1L 3019 | aL0L 3020 | aL0L 3021 | aL1L 3022 | aL1L 3023 | aL1L 3024 | aL1L 3025 | aL1L 3026 | aL0L 3027 | aL1L 3028 | aL1L 3029 | aL1L 3030 | aL1L 3031 | aL3L 3032 | aL1L 3033 | aL1L 3034 | aL0L 3035 | aL0L 3036 | aL0L 3037 | aL1L 3038 | aL1L 3039 | aL1L 3040 | aL1L 3041 | aL1L 3042 | aL1L 3043 | aL1L 3044 | aL0L 3045 | aL1L 3046 | aL1L 3047 | aL1L 3048 | aL2L 3049 | aL1L 3050 | aL1L 3051 | aL0L 3052 | aL1L 3053 | aL0L 3054 | aL0L 3055 | aL1L 3056 | aL1L 3057 | aL1L 3058 | aL1L 3059 | aL1L 3060 | aL0L 3061 | aL1L 3062 | aL2L 3063 | aL0L 3064 | aL1L 3065 | aL3L 3066 | aL0L 3067 | aL1L 3068 | aL0L 3069 | aL1L 3070 | aL1L 3071 | aL1L 3072 | aL1L 3073 | aL1L 3074 | aL1L 3075 | aL1L 3076 | aL1L 3077 | aL0L 3078 | aL0L 3079 | aL2L 3080 | aL0L 3081 | aL1L 3082 | aL2L 3083 | aL1L 3084 | aL1L 3085 | aL0L 3086 | aL1L 3087 | aL1L 3088 | aL0L 3089 | aL1L 3090 | aL1L 3091 | aL1L 3092 | aL1L 3093 | aL1L 3094 | aL1L 3095 | aL0L 3096 | aL2L 3097 | aL1L 3098 | aL1L 3099 | aL3L 3100 | aL1L 3101 | aL1L 3102 | aL0L 3103 | aL1L 3104 | aL1L 3105 | aL1L 3106 | aL1L 3107 | aL1L 3108 | aL1L 3109 | aL1L 3110 | aL1L 3111 | aL1L 3112 | aL1L 3113 | aL1L 3114 | aL1L 3115 | aL1L 3116 | aL3L 3117 | aL1L 3118 | aL1L 3119 | aL1L 3120 | aL1L 3121 | aL0L 3122 | aL1L 3123 | aL1L 3124 | aL1L 3125 | aL1L 3126 | aL1L 3127 | aL1L 3128 | aL1L 3129 | aL0L 3130 | aL1L 3131 | aL1L 3132 | aL1L 3133 | aL2L 3134 | aL1L 3135 | aL1L 3136 | aL1L 3137 | aL1L 3138 | aL1L 3139 | aL0L 3140 | aL1L 3141 | aL1L 3142 | aL1L 3143 | aL1L 3144 | aL1L 3145 | aL1L 3146 | aL0L 3147 | aL1L 3148 | aL0L 3149 | aL2L 3150 | aL2L 3151 | aL0L 3152 | aL0L 3153 | aL1L 3154 | aL1L 3155 | aL1L 3156 | aL1L 3157 | aL1L 3158 | aL1L 3159 | aL1L 3160 | aL1L 3161 | aL1L 3162 | aL0L 3163 | aL1L 3164 | aL1L 3165 | aL0L 3166 | aL1L 3167 | aL3L 3168 | aL0L 3169 | aL1L 3170 | aL0L 3171 | aL1L 3172 | aL1L 3173 | aL1L 3174 | aL0L 3175 | aL1L 3176 | aL1L 3177 | aL1L 3178 | aL1L 3179 | aL0L 3180 | aL1L 3181 | aL1L 3182 | aL1L 3183 | aL1L 3184 | aL3L 3185 | aL0L 3186 | aL1L 3187 | aL1L 3188 | aL1L 3189 | aL1L 3190 | aL0L 3191 | aL1L 3192 | aL1L 3193 | aL1L 3194 | aL1L 3195 | aL1L 3196 | aL1L 3197 | aL1L 3198 | aL2L 3199 | aL1L 3200 | aL0L 3201 | aL2L 3202 | aL0L 3203 | aL1L 3204 | aL0L 3205 | aL1L 3206 | aL1L 3207 | aL1L 3208 | aL1L 3209 | aL1L 3210 | aL1L 3211 | aL0L 3212 | aL1L 3213 | aL1L 3214 | aL1L 3215 | aL1L 3216 | aL0L 3217 | aL3L 3218 | aL2L 3219 | aL1L 3220 | aL1L 3221 | aL0L 3222 | aL1L 3223 | aL1L 3224 | aL0L 3225 | aL1L 3226 | aL1L 3227 | aL1L 3228 | aL1L 3229 | aL1L 3230 | aL0L 3231 | aL0L 3232 | aL1L 3233 | aL1L 3234 | aL1L 3235 | aL1L 3236 | aL1L 3237 | aL1L 3238 | aL1L 3239 | aL0L 3240 | aL0L 3241 | aL1L 3242 | aL1L 3243 | aL1L 3244 | aL1L 3245 | aL1L 3246 | aL1L 3247 | aL1L 3248 | aL1L 3249 | aL1L 3250 | aL0L 3251 | aL3L 3252 | aL2L 3253 | aL1L 3254 | aL1L 3255 | aL1L 3256 | aL1L 3257 | aL1L 3258 | aL1L 3259 | aL1L 3260 | aL1L 3261 | aL0L 3262 | aL0L 3263 | aL1L 3264 | aL0L 3265 | aL0L 3266 | aL1L 3267 | aL0L 3268 | aL1L 3269 | aL3L 3270 | aL1L 3271 | aL1L 3272 | aL0L 3273 | aL1L 3274 | aL1L 3275 | aL0L 3276 | aL1L 3277 | aL1L 3278 | aL1L 3279 | aL0L 3280 | aL0L 3281 | aL0L 3282 | aL1L 3283 | aL1L 3284 | aL0L 3285 | aL2L 3286 | aL2L 3287 | aL1L 3288 | aL1L 3289 | aL1L 3290 | aL1L 3291 | aL1L 3292 | aL1L 3293 | aL0L 3294 | aL1L 3295 | aL1L 3296 | aL1L 3297 | aL1L 3298 | aL1L 3299 | aL1L 3300 | aL1L 3301 | aL1L 3302 | aL2L 3303 | aL2L 3304 | aL1L 3305 | aL1L 3306 | aL1L 3307 | aL0L 3308 | aL1L 3309 | aL1L 3310 | aL0L 3311 | aL1L 3312 | aL1L 3313 | aL1L 3314 | aL1L 3315 | aL0L 3316 | aL1L 3317 | aL1L 3318 | aL0L 3319 | aL0L 3320 | aL2L 3321 | aL0L 3322 | aL1L 3323 | aL1L 3324 | aL1L 3325 | aL1L 3326 | aL1L 3327 | aL1L 3328 | aL1L 3329 | aL1L 3330 | aL1L 3331 | aL1L 3332 | aL1L 3333 | aL1L 3334 | aL1L 3335 | aL1L 3336 | aL1L 3337 | aL3L 3338 | aL1L 3339 | aL1L 3340 | aL1L 3341 | aL0L 3342 | aL1L 3343 | aL1L 3344 | aL1L 3345 | aL1L 3346 | aL1L 3347 | aL1L 3348 | aL1L 3349 | aL1L 3350 | aL1L 3351 | aL2L 3352 | aL0L 3353 | aL1L 3354 | aL1L 3355 | aL1L 3356 | aL1L 3357 | aL1L 3358 | aL0L 3359 | aL0L 3360 | aL1L 3361 | aL1L 3362 | aL1L 3363 | aL1L 3364 | aL1L 3365 | aL1L 3366 | aL1L 3367 | aL1L 3368 | aL2L 3369 | aL0L 3370 | aL1L 3371 | aL0L 3372 | aL1L 3373 | aL1L 3374 | aL1L 3375 | aL1L 3376 | aL0L 3377 | aL1L 3378 | aL1L 3379 | aL1L 3380 | aL1L 3381 | aL1L 3382 | aL1L 3383 | aL1L 3384 | aL0L 3385 | aL1L 3386 | aL0L 3387 | aL1L 3388 | aL3L 3389 | aL0L 3390 | aL1L 3391 | aL1L 3392 | aL1L 3393 | aL1L 3394 | aL1L 3395 | aL1L 3396 | aL1L 3397 | aL1L 3398 | aL1L 3399 | aL1L 3400 | aL1L 3401 | aL1L 3402 | aL2L 3403 | aL0L 3404 | aL1L 3405 | aL3L 3406 | aL1L 3407 | aL1L 3408 | aL0L 3409 | aL0L 3410 | aL1L 3411 | aL1L 3412 | aL1L 3413 | aL1L 3414 | aL1L 3415 | aL1L 3416 | aL1L 3417 | aL1L 3418 | aL1L 3419 | aL1L 3420 | aL1L 3421 | aL1L 3422 | aL3L 3423 | aL0L 3424 | aL1L 3425 | aL1L 3426 | aL0L 3427 | aL1L 3428 | aL0L 3429 | aL1L 3430 | aL1L 3431 | aL0L 3432 | aL0L 3433 | aL0L 3434 | aL0L 3435 | aL0L 3436 | aL1L 3437 | aL1L 3438 | aL1L 3439 | aL3L 3440 | aL1L 3441 | aL1L 3442 | aL1L 3443 | aL1L 3444 | aL0L 3445 | aL0L 3446 | aL1L 3447 | aL1L 3448 | aL1L 3449 | aL0L 3450 | aL1L 3451 | aL0L 3452 | aL0L 3453 | aL0L 3454 | aL1L 3455 | aL1L 3456 | aL3L 3457 | aL0L 3458 | aL1L 3459 | aL1L 3460 | aL1L 3461 | aL1L 3462 | aL1L 3463 | aL1L 3464 | aL1L 3465 | aL1L 3466 | aL1L 3467 | aL0L 3468 | aL1L 3469 | aL1L 3470 | aL1L 3471 | aL1L 3472 | aL1L 3473 | aL3L 3474 | aL0L 3475 | aL1L 3476 | aL1L 3477 | aL1L 3478 | aL1L 3479 | aL1L 3480 | aL1L 3481 | aL0L 3482 | aL1L 3483 | aL1L 3484 | aL1L 3485 | aL1L 3486 | aL1L 3487 | aL2L 3488 | aL0L 3489 | aL3L 3490 | aL2L 3491 | aL1L 3492 | aL1L 3493 | aL1L 3494 | aL1L 3495 | aL1L 3496 | aL0L 3497 | aL1L 3498 | aL1L 3499 | aL1L 3500 | aL0L 3501 | aL1L 3502 | aL0L 3503 | aL1L 3504 | aL2L 3505 | aL0L 3506 | aL1L 3507 | aL3L 3508 | aL1L 3509 | aL1L 3510 | aL1L 3511 | aL1L 3512 | aL0L 3513 | aL1L 3514 | aL1L 3515 | aL1L 3516 | aL1L 3517 | aL1L 3518 | aL1L 3519 | aL1L 3520 | aL1L 3521 | aL0L 3522 | aL0L 3523 | aL1L 3524 | aL3L 3525 | aL1L 3526 | aL1L 3527 | aL1L 3528 | aL1L 3529 | aL1L 3530 | aL1L 3531 | aL1L 3532 | aL1L 3533 | aL1L 3534 | aL0L 3535 | aL1L 3536 | aL1L 3537 | aL1L 3538 | aL1L 3539 | aL0L 3540 | aL3L 3541 | aL3L 3542 | aL1L 3543 | aL1L 3544 | aL1L 3545 | aL1L 3546 | aL0L 3547 | aL1L 3548 | aL1L 3549 | aL1L 3550 | aL1L 3551 | aL1L 3552 | aL1L 3553 | aL1L 3554 | aL1L 3555 | aL1L 3556 | aL0L 3557 | aL1L 3558 | aL0L 3559 | aL1L 3560 | aL1L 3561 | aL1L 3562 | aL1L 3563 | aL1L 3564 | aL1L 3565 | aL1L 3566 | aL1L 3567 | aL1L 3568 | aL1L 3569 | aL1L 3570 | aL0L 3571 | aL1L 3572 | aL1L 3573 | aL1L 3574 | aL1L 3575 | aL0L 3576 | aL1L 3577 | aL1L 3578 | aL1L 3579 | aL1L 3580 | aL0L 3581 | aL0L 3582 | aL1L 3583 | aL1L 3584 | aL1L 3585 | aL1L 3586 | aL1L 3587 | aL1L 3588 | aL0L 3589 | aL2L 3590 | aL1L 3591 | aL1L 3592 | aL3L 3593 | aL1L 3594 | aL1L 3595 | aL0L 3596 | aL1L 3597 | aL1L 3598 | aL1L 3599 | aL1L 3600 | aL1L 3601 | aL1L 3602 | aL1L 3603 | aL1L 3604 | aL1L 3605 | aL1L 3606 | aL2L 3607 | aL0L 3608 | aL0L 3609 | aL2L 3610 | aL0L 3611 | aL1L 3612 | aL0L 3613 | aL0L 3614 | aL1L 3615 | aL0L 3616 | aL1L 3617 | aL1L 3618 | aL1L 3619 | aL1L 3620 | aL1L 3621 | aL1L 3622 | aL0L 3623 | aL1L 3624 | aL1L 3625 | aL1L 3626 | aL3L 3627 | aL1L 3628 | aL1L 3629 | aL1L 3630 | aL0L 3631 | aL0L 3632 | aL1L 3633 | aL1L 3634 | aL1L 3635 | aL1L 3636 | aL1L 3637 | aL1L 3638 | aL1L 3639 | aL1L 3640 | aL1L 3641 | aL0L 3642 | aL2L 3643 | aL2L 3644 | aL1L 3645 | aL1L 3646 | aL1L 3647 | aL0L 3648 | aL1L 3649 | aL0L 3650 | aL1L 3651 | aL1L 3652 | aL0L 3653 | aL1L 3654 | aL1L 3655 | aL0L 3656 | aL0L 3657 | aL1L 3658 | aL0L 3659 | aL3L 3660 | aL3L 3661 | aL0L 3662 | aL1L 3663 | aL0L 3664 | aL1L 3665 | aL1L 3666 | aL1L 3667 | aL1L 3668 | aL1L 3669 | aL1L 3670 | aL1L 3671 | aL1L 3672 | aL1L 3673 | aL1L 3674 | aL1L 3675 | aL0L 3676 | aL0L 3677 | aL0L 3678 | aL1L 3679 | aL1L 3680 | aL1L 3681 | aL1L 3682 | aL1L 3683 | aL1L 3684 | aL1L 3685 | aL1L 3686 | aL0L 3687 | aL1L 3688 | aL1L 3689 | aL1L 3690 | aL1L 3691 | aL1L 3692 | aL1L 3693 | aL1L 3694 | aL0L 3695 | aL0L 3696 | aL1L 3697 | aL0L 3698 | aL1L 3699 | aL1L 3700 | aL1L 3701 | aL1L 3702 | aL1L 3703 | aL1L 3704 | aL1L 3705 | aL1L 3706 | aL1L 3707 | aL1L 3708 | aL0L 3709 | aL0L 3710 | aL1L 3711 | aL3L 3712 | aL0L 3713 | aL1L 3714 | aL1L 3715 | aL1L 3716 | aL1L 3717 | aL1L 3718 | aL1L 3719 | aL1L 3720 | aL1L 3721 | aL1L 3722 | aL1L 3723 | aL1L 3724 | aL1L 3725 | aL2L 3726 | aL0L 3727 | aL0L 3728 | aL3L 3729 | aL1L 3730 | aL1L 3731 | aL1L 3732 | aL1L 3733 | aL1L 3734 | aL1L 3735 | aL1L 3736 | aL1L 3737 | aL1L 3738 | aL1L 3739 | aL1L 3740 | aL1L 3741 | aL0L 3742 | aL2L 3743 | aL1L 3744 | aL2L 3745 | aL2L 3746 | aL1L 3747 | aL1L 3748 | aL1L 3749 | aL0L 3750 | aL1L 3751 | aL1L 3752 | aL1L 3753 | aL1L 3754 | aL1L 3755 | aL1L 3756 | aL1L 3757 | aL0L 3758 | aL1L 3759 | aL1L 3760 | aL1L 3761 | aL1L 3762 | aL3L 3763 | aL1L 3764 | aL1L 3765 | aL1L 3766 | aL1L 3767 | aL0L 3768 | aL0L 3769 | aL1L 3770 | aL1L 3771 | aL1L 3772 | aL1L 3773 | aL1L 3774 | aL0L 3775 | aL0L 3776 | aL1L 3777 | aL0L 3778 | aL1L 3779 | aL1L 3780 | aL1L 3781 | aL1L 3782 | aL0L 3783 | aL1L 3784 | aL1L 3785 | aL1L 3786 | aL1L 3787 | aL1L 3788 | aL0L 3789 | aL0L 3790 | aL1L 3791 | aL0L 3792 | aL1L 3793 | aL2L 3794 | aL1L 3795 | aL1L 3796 | aL0L 3797 | aL1L 3798 | aL1L 3799 | aL1L 3800 | aL1L 3801 | aL1L 3802 | aL0L 3803 | aL1L 3804 | aL1L 3805 | aL1L 3806 | aL1L 3807 | aL1L 3808 | aL1L 3809 | aL1L 3810 | aL1L 3811 | aL0L 3812 | aL0L 3813 | aL0L 3814 | aL1L 3815 | aL1L 3816 | aL1L 3817 | aL1L 3818 | aL1L 3819 | aL1L 3820 | aL1L 3821 | aL1L 3822 | aL0L 3823 | aL1L 3824 | aL1L 3825 | aL1L 3826 | aL1L 3827 | aL2L 3828 | aL0L 3829 | aL1L 3830 | aL0L 3831 | aL1L 3832 | aL1L 3833 | aL1L 3834 | aL1L 3835 | aL1L 3836 | aL0L 3837 | aL1L 3838 | aL1L 3839 | aL1L 3840 | aL1L 3841 | aL1L 3842 | aL1L 3843 | aL1L 3844 | aL2L 3845 | aL0L 3846 | aL0L 3847 | aL2L 3848 | aL1L 3849 | aL1L 3850 | aL1L 3851 | aL1L 3852 | aL1L 3853 | aL1L 3854 | aL1L 3855 | aL1L 3856 | aL1L 3857 | aL0L 3858 | aL1L 3859 | aL1L 3860 | aL1L 3861 | aL2L 3862 | aL1L 3863 | aL1L 3864 | aL2L 3865 | aL1L 3866 | aL1L 3867 | aL1L 3868 | aL1L 3869 | aL1L 3870 | aL0L 3871 | aL1L 3872 | aL1L 3873 | aL1L 3874 | aL1L 3875 | aL1L 3876 | aL1L 3877 | aL0L 3878 | aL1L 3879 | aL0L 3880 | aL1L 3881 | aL0L 3882 | aL1L 3883 | aL1L 3884 | aL1L 3885 | aL1L 3886 | aL0L 3887 | aL0L 3888 | aL1L 3889 | aL1L 3890 | aL1L 3891 | aL1L 3892 | aL1L 3893 | aL1L 3894 | aL0L 3895 | aL2L 3896 | aL1L 3897 | aL1L 3898 | aL3L 3899 | aL1L 3900 | aL1L 3901 | aL1L 3902 | aL1L 3903 | aL1L 3904 | aL0L 3905 | aL1L 3906 | aL1L 3907 | aL1L 3908 | aL1L 3909 | aL1L 3910 | aL1L 3911 | aL1L 3912 | aL2L 3913 | aL0L 3914 | aL3L 3915 | aL2L 3916 | aL0L 3917 | aL1L 3918 | aL1L 3919 | aL1L 3920 | aL1L 3921 | aL0L 3922 | aL1L 3923 | aL1L 3924 | aL1L 3925 | aL1L 3926 | aL1L 3927 | aL0L 3928 | aL0L 3929 | aL2L 3930 | aL1L 3931 | aL1L 3932 | aL3L 3933 | aL1L 3934 | aL1L 3935 | aL1L 3936 | aL1L 3937 | aL1L 3938 | aL0L 3939 | aL1L 3940 | aL1L 3941 | aL1L 3942 | aL1L 3943 | aL1L 3944 | aL1L 3945 | aL0L 3946 | aL1L 3947 | aL1L 3948 | aL1L 3949 | aL0L 3950 | aL1L 3951 | aL1L 3952 | aL1L 3953 | aL1L 3954 | aL0L 3955 | aL0L 3956 | aL1L 3957 | aL1L 3958 | aL1L 3959 | aL1L 3960 | aL1L 3961 | aL1L 3962 | aL0L 3963 | aL1L 3964 | aL0L 3965 | aL1L 3966 | aL1L 3967 | aL1L 3968 | aL1L 3969 | aL0L 3970 | aL1L 3971 | aL1L 3972 | aL1L 3973 | aL1L 3974 | aL1L 3975 | aL0L 3976 | aL0L 3977 | aL0L 3978 | aL0L 3979 | aL1L 3980 | aL1L 3981 | aL0L 3982 | aL1L 3983 | aL3L 3984 | aL1L 3985 | aL1L 3986 | aL1L 3987 | aL1L 3988 | aL0L 3989 | aL0L 3990 | aL1L 3991 | aL1L 3992 | aL1L 3993 | aL1L 3994 | aL1L 3995 | aL1L 3996 | aL0L 3997 | aL1L 3998 | aL0L 3999 | aL3L 4000 | aL2L 4001 | aL1L 4002 | aL1L 4003 | aL1L 4004 | aL1L 4005 | aL1L 4006 | aL1L 4007 | aL1L 4008 | aL1L 4009 | aL1L 4010 | aL0L 4011 | aL1L 4012 | aL0L 4013 | aL1L 4014 | aL1L 4015 | aL0L 4016 | aL0L 4017 | aL0L 4018 | aL1L 4019 | aL1L 4020 | aL1L 4021 | aL1L 4022 | aL1L 4023 | aL1L 4024 | aL1L 4025 | aL1L 4026 | aL0L 4027 | aL1L 4028 | aL1L 4029 | aL1L 4030 | aL1L 4031 | aL2L 4032 | aL1L 4033 | aL1L 4034 | aL3L 4035 | aL1L 4036 | aL1L 4037 | aL1L 4038 | aL0L 4039 | aL0L 4040 | aL1L 4041 | aL1L 4042 | aL1L 4043 | aL1L 4044 | aL1L 4045 | aL1L 4046 | aL1L 4047 | aL0L 4048 | aL0L 4049 | aL0L 4050 | aL3L 4051 | aL3L 4052 | aL1L 4053 | aL1L 4054 | aL1L 4055 | aL1L 4056 | aL0L 4057 | aL0L 4058 | aL1L 4059 | aL1L 4060 | aL1L 4061 | aL1L 4062 | aL1L 4063 | aL1L 4064 | aL1L 4065 | aL1L 4066 | aL0L 4067 | aL0L 4068 | aL2L 4069 | aL0L 4070 | aL1L 4071 | aL1L 4072 | aL1L 4073 | aL1L 4074 | aL1L 4075 | aL1L 4076 | aL0L 4077 | aL1L 4078 | aL1L 4079 | aL0L 4080 | aL1L 4081 | aL1L 4082 | aL1L 4083 | aL0L 4084 | aL3L 4085 | aL2L 4086 | aL1L 4087 | aL1L 4088 | aL0L 4089 | aL1L 4090 | aL0L 4091 | aL0L 4092 | aL1L 4093 | aL1L 4094 | aL1L 4095 | aL1L 4096 | aL1L 4097 | aL0L 4098 | aL0L 4099 | aL1L 4100 | aL0L 4101 | aL3L 4102 | aL2L 4103 | aL1L 4104 | aL1L 4105 | aL1L 4106 | aL1L 4107 | aL1L 4108 | aL1L 4109 | aL1L 4110 | aL1L 4111 | aL1L 4112 | aL1L 4113 | aL1L 4114 | aL0L 4115 | aL1L 4116 | aL1L 4117 | aL0L 4118 | aL1L 4119 | aL3L 4120 | aL1L 4121 | aL1L 4122 | aL1L 4123 | aL1L 4124 | aL1L 4125 | aL0L 4126 | aL1L 4127 | aL1L 4128 | aL1L 4129 | aL1L 4130 | aL1L 4131 | aL1L 4132 | aL0L 4133 | aL1L 4134 | aL0L 4135 | aL3L 4136 | aL2L 4137 | aL1L 4138 | aL1L 4139 | aL1L 4140 | aL0L 4141 | aL1L 4142 | aL1L 4143 | aL1L 4144 | aL1L 4145 | aL1L 4146 | aL0L 4147 | aL1L 4148 | aL0L 4149 | aL0L 4150 | aL1L 4151 | aL1L 4152 | aL1L 4153 | aL2L 4154 | aL1L 4155 | aL1L 4156 | aL1L 4157 | aL0L 4158 | aL0L 4159 | aL0L 4160 | aL1L 4161 | aL1L 4162 | aL1L 4163 | aL1L 4164 | aL1L 4165 | aL1L 4166 | aL0L 4167 | aL0L 4168 | aL1L 4169 | aL1L 4170 | aL2L 4171 | aL0L 4172 | aL1L 4173 | aL1L 4174 | aL1L 4175 | aL1L 4176 | aL1L 4177 | aL1L 4178 | aL1L 4179 | aL1L 4180 | aL1L 4181 | aL1L 4182 | aL0L 4183 | aL1L 4184 | aL2L 4185 | aL0L 4186 | aL3L 4187 | aL2L 4188 | aL1L 4189 | aL1L 4190 | aL1L 4191 | aL1L 4192 | aL1L 4193 | aL0L 4194 | aL1L 4195 | aL1L 4196 | aL1L 4197 | aL1L 4198 | aL1L 4199 | aL0L 4200 | aL0L 4201 | aL1L 4202 | aL1L 4203 | aL1L 4204 | aL0L 4205 | aL1L 4206 | aL1L 4207 | aL1L 4208 | aL0L 4209 | aL0L 4210 | aL1L 4211 | aL1L 4212 | aL1L 4213 | aL1L 4214 | aL0L 4215 | aL1L 4216 | aL1L 4217 | aL1L 4218 | aL1L 4219 | aL1L 4220 | aL1L 4221 | aL3L 4222 | aL1L 4223 | aL1L 4224 | aL0L 4225 | aL1L 4226 | aL1L 4227 | aL1L 4228 | aL1L 4229 | aL1L 4230 | aL1L 4231 | aL1L 4232 | aL1L 4233 | aL0L 4234 | aL0L 4235 | aL1L 4236 | aL1L 4237 | aL1L 4238 | aL3L 4239 | aL1L 4240 | aL1L 4241 | aL1L 4242 | aL1L 4243 | aL0L 4244 | aL1L 4245 | aL1L 4246 | aL1L 4247 | aL1L 4248 | aL1L 4249 | aL1L 4250 | aL1L 4251 | aL0L 4252 | aL1L 4253 | aL0L 4254 | aL1L 4255 | aL2L 4256 | aL1L 4257 | aL1L 4258 | aL1L 4259 | aL0L 4260 | aL1L 4261 | aL0L 4262 | aL1L 4263 | aL1L 4264 | aL1L 4265 | aL1L 4266 | aL1L 4267 | aL0L 4268 | aL0L 4269 | aL1L 4270 | aL0L 4271 | aL1L 4272 | aL3L 4273 | aL0L 4274 | aL1L 4275 | aL1L 4276 | aL1L 4277 | aL1L 4278 | aL1L 4279 | aL1L 4280 | aL1L 4281 | aL1L 4282 | aL1L 4283 | aL1L 4284 | aL0L 4285 | aL1L 4286 | aL2L 4287 | aL1L 4288 | aL1L 4289 | aL3L 4290 | aL1L 4291 | aL1L 4292 | aL1L 4293 | aL1L 4294 | aL1L 4295 | aL0L 4296 | aL1L 4297 | aL1L 4298 | aL1L 4299 | aL1L 4300 | aL1L 4301 | aL1L 4302 | aL1L 4303 | aL1L 4304 | aL0L 4305 | aL2L 4306 | aL2L 4307 | aL1L 4308 | aL1L 4309 | aL0L 4310 | aL1L 4311 | aL0L 4312 | aL1L 4313 | aL1L 4314 | aL1L 4315 | aL1L 4316 | aL1L 4317 | aL1L 4318 | aL1L 4319 | aL0L 4320 | aL2L 4321 | aL1L 4322 | aL1L 4323 | aL3L 4324 | aL1L 4325 | aL1L 4326 | aL0L 4327 | aL1L 4328 | aL1L 4329 | aL1L 4330 | aL1L 4331 | aL1L 4332 | aL1L 4333 | aL1L 4334 | aL1L 4335 | aL1L 4336 | aL1L 4337 | aL1L 4338 | aL1L 4339 | aL1L 4340 | aL3L 4341 | aL1L 4342 | aL1L 4343 | aL1L 4344 | aL0L 4345 | aL1L 4346 | aL1L 4347 | aL1L 4348 | aL0L 4349 | aL1L 4350 | aL1L 4351 | aL0L 4352 | aL1L 4353 | aL1L 4354 | aL2L 4355 | aL0L 4356 | aL1L 4357 | aL0L 4358 | aL0L 4359 | aL1L 4360 | aL1L 4361 | aL1L 4362 | aL1L 4363 | aL1L 4364 | aL1L 4365 | aL1L 4366 | aL1L 4367 | aL1L 4368 | aL1L 4369 | aL1L 4370 | aL1L 4371 | aL1L 4372 | aL0L 4373 | aL1L 4374 | aL3L 4375 | aL1L 4376 | aL1L 4377 | aL0L 4378 | aL0L 4379 | aL1L 4380 | aL1L 4381 | aL1L 4382 | aL1L 4383 | aL1L 4384 | aL1L 4385 | aL1L 4386 | aL0L 4387 | aL0L 4388 | aL1L 4389 | aL1L 4390 | aL1L 4391 | aL0L 4392 | aL1L 4393 | aL1L 4394 | aL1L 4395 | aL1L 4396 | aL0L 4397 | aL1L 4398 | aL1L 4399 | aL0L 4400 | aL1L 4401 | aL1L 4402 | aL1L 4403 | aL1L 4404 | aL1L 4405 | aL1L 4406 | aL0L 4407 | aL0L 4408 | aL1L 4409 | aL0L 4410 | aL1L 4411 | aL1L 4412 | aL1L 4413 | aL1L 4414 | aL1L 4415 | aL1L 4416 | aL2L 4417 | aL0L 4418 | aL0L 4419 | aL1L 4420 | aL1L 4421 | aL1L 4422 | aL2L 4423 | aL1L 4424 | aL1L 4425 | aL3L 4426 | aL1L 4427 | aL1L 4428 | aL1L 4429 | aL1L 4430 | aL1L 4431 | aL1L 4432 | aL1L 4433 | aL1L 4434 | aL1L 4435 | aL1L 4436 | aL1L 4437 | aL0L 4438 | aL0L 4439 | aL2L 4440 | aL0L 4441 | aL1L 4442 | aL3L 4443 | aL1L 4444 | aL1L 4445 | aL1L 4446 | aL0L 4447 | aL0L 4448 | aL0L 4449 | aL1L 4450 | aL1L 4451 | aL1L 4452 | aL1L 4453 | aL1L 4454 | aL1L 4455 | aL0L 4456 | aL1L 4457 | aL0L 4458 | aL1L 4459 | aL0L 4460 | aL0L 4461 | aL1L 4462 | aL1L 4463 | aL1L 4464 | aL1L 4465 | aL1L 4466 | aL1L 4467 | aL1L 4468 | aL1L 4469 | aL1L 4470 | aL1L 4471 | aL1L 4472 | aL1L 4473 | aL1L 4474 | aL0L 4475 | aL0L 4476 | aL0L 4477 | aL0L 4478 | aL1L 4479 | aL1L 4480 | aL1L 4481 | aL1L 4482 | aL1L 4483 | aL1L 4484 | aL0L 4485 | aL0L 4486 | aL0L 4487 | aL1L 4488 | aL1L 4489 | aL1L 4490 | aL1L 4491 | aL1L 4492 | aL1L 4493 | aL3L 4494 | aL1L 4495 | aL1L 4496 | aL1L 4497 | aL1L 4498 | aL1L 4499 | aL1L 4500 | aL1L 4501 | aL1L 4502 | aL1L 4503 | aL1L 4504 | aL0L 4505 | aL1L 4506 | aL1L 4507 | aL2L 4508 | aL1L 4509 | aL1L 4510 | aL2L 4511 | aL1L 4512 | aL1L 4513 | aL0L 4514 | aL1L 4515 | aL1L 4516 | aL0L 4517 | aL1L 4518 | aL1L 4519 | aL1L 4520 | aL1L 4521 | aL1L 4522 | aL1L 4523 | aL0L 4524 | aL2L 4525 | aL0L 4526 | aL1L 4527 | aL3L 4528 | aL1L 4529 | aL1L 4530 | aL1L 4531 | aL0L 4532 | aL0L 4533 | aL1L 4534 | aL1L 4535 | aL1L 4536 | aL1L 4537 | aL1L 4538 | aL1L 4539 | aL0L 4540 | aL0L 4541 | aL2L 4542 | aL1L 4543 | aL1L 4544 | aL0L 4545 | aL0L 4546 | aL1L 4547 | aL1L 4548 | aL1L 4549 | aL1L 4550 | aL1L 4551 | aL1L 4552 | aL1L 4553 | aL1L 4554 | aL1L 4555 | aL1L 4556 | aL1L 4557 | aL1L 4558 | aL2L 4559 | aL1L 4560 | aL1L 4561 | aL2L 4562 | aL1L 4563 | aL1L 4564 | aL0L 4565 | aL0L 4566 | aL0L 4567 | aL0L 4568 | aL1L 4569 | aL1L 4570 | aL1L 4571 | aL1L 4572 | aL1L 4573 | aL0L 4574 | aL0L 4575 | aL1L 4576 | aL0L 4577 | aL0L 4578 | aL3L 4579 | aL1L 4580 | aL1L 4581 | aL1L 4582 | aL0L 4583 | aL1L 4584 | aL0L 4585 | aL1L 4586 | aL1L 4587 | aL1L 4588 | aL1L 4589 | aL1L 4590 | aL1L 4591 | aL1L 4592 | aL1L 4593 | aL1L 4594 | aL1L 4595 | aL2L 4596 | aL1L 4597 | aL1L 4598 | aL1L 4599 | aL1L 4600 | aL0L 4601 | aL0L 4602 | aL1L 4603 | aL1L 4604 | aL1L 4605 | aL1L 4606 | aL1L 4607 | aL1L 4608 | aL1L 4609 | aL1L 4610 | aL1L 4611 | aL3L 4612 | aL3L 4613 | aL1L 4614 | aL1L 4615 | aL1L 4616 | aL1L 4617 | aL0L 4618 | aL1L 4619 | aL1L 4620 | aL1L 4621 | aL1L 4622 | aL1L 4623 | aL1L 4624 | aL1L 4625 | aL0L 4626 | aL2L 4627 | aL0L 4628 | aL1L 4629 | aL1L 4630 | aL1L 4631 | aL1L 4632 | aL1L 4633 | aL0L 4634 | aL0L 4635 | aL0L 4636 | aL1L 4637 | aL1L 4638 | aL1L 4639 | aL1L 4640 | aL1L 4641 | aL1L 4642 | aL0L 4643 | aL2L 4644 | aL0L 4645 | aL0L 4646 | aL1L 4647 | aL1L 4648 | aL1L 4649 | aL1L 4650 | aL1L 4651 | aL1L 4652 | aL1L 4653 | aL1L 4654 | aL1L 4655 | aL0L 4656 | aL1L 4657 | aL1L 4658 | aL1L 4659 | aL1L 4660 | aL1L 4661 | aL0L 4662 | aL0L 4663 | aL3L 4664 | aL1L 4665 | aL1L 4666 | aL1L 4667 | aL0L 4668 | aL1L 4669 | aL1L 4670 | aL1L 4671 | aL1L 4672 | aL0L 4673 | aL1L 4674 | aL1L 4675 | aL1L 4676 | aL1L 4677 | aL1L 4678 | aL0L 4679 | aL3L 4680 | aL2L 4681 | aL0L 4682 | aL1L 4683 | aL0L 4684 | aL0L 4685 | aL1L 4686 | aL0L 4687 | aL1L 4688 | aL1L 4689 | aL1L 4690 | aL1L 4691 | aL1L 4692 | aL0L 4693 | aL0L 4694 | aL0L 4695 | aL0L 4696 | aL0L 4697 | aL1L 4698 | aL1L 4699 | aL1L 4700 | aL1L 4701 | aL1L 4702 | aL1L 4703 | aL1L 4704 | aL1L 4705 | aL1L 4706 | aL0L 4707 | aL1L 4708 | aL1L 4709 | aL1L 4710 | aL1L 4711 | aL1L 4712 | aL0L 4713 | aL1L 4714 | aL0L 4715 | aL1L 4716 | aL1L 4717 | aL1L 4718 | aL1L 4719 | aL0L 4720 | aL0L 4721 | aL1L 4722 | aL1L 4723 | aL1L 4724 | aL1L 4725 | aL1L 4726 | aL1L 4727 | aL1L 4728 | aL2L 4729 | aL0L 4730 | aL0L 4731 | aL1L 4732 | aL1L 4733 | aL1L 4734 | aL1L 4735 | aL1L 4736 | aL1L 4737 | aL1L 4738 | aL1L 4739 | aL1L 4740 | aL0L 4741 | aL1L 4742 | aL1L 4743 | aL1L 4744 | aL1L 4745 | aL1L 4746 | aL0L 4747 | aL3L 4748 | aL2L 4749 | aL0L 4750 | aL0L 4751 | aL0L 4752 | aL0L 4753 | aL0L 4754 | aL0L 4755 | aL1L 4756 | aL1L 4757 | aL1L 4758 | aL1L 4759 | aL1L 4760 | aL0L 4761 | aL0L 4762 | aL2L 4763 | aL1L 4764 | aL1L 4765 | aL3L 4766 | aL1L 4767 | aL1L 4768 | aL1L 4769 | aL0L 4770 | aL0L 4771 | aL1L 4772 | aL0L 4773 | aL1L 4774 | aL1L 4775 | aL1L 4776 | aL1L 4777 | aL1L 4778 | aL0L 4779 | aL0L 4780 | aL0L 4781 | aL1L 4782 | aL2L 4783 | aL1L 4784 | aL1L 4785 | aL0L 4786 | aL1L 4787 | aL1L 4788 | aL1L 4789 | aL1L 4790 | aL1L 4791 | aL0L 4792 | aL0L 4793 | aL0L 4794 | aL1L 4795 | aL1L 4796 | aL2L 4797 | aL1L 4798 | aL3L 4799 | aL3L 4800 | aL1L 4801 | aL1L 4802 | aL1L 4803 | aL1L 4804 | aL0L 4805 | aL0L 4806 | aL1L 4807 | aL1L 4808 | aL1L 4809 | aL0L 4810 | aL1L 4811 | aL1L 4812 | aL1L 4813 | aL2L 4814 | aL1L 4815 | aL3L 4816 | aL2L 4817 | aL0L 4818 | aL1L 4819 | aL1L 4820 | aL0L 4821 | aL1L 4822 | aL1L 4823 | aL1L 4824 | aL1L 4825 | aL1L 4826 | aL1L 4827 | aL1L 4828 | aL1L 4829 | aL1L 4830 | aL1L 4831 | aL1L 4832 | aL1L 4833 | aL0L 4834 | aL1L 4835 | aL1L 4836 | aL1L 4837 | aL1L 4838 | aL0L 4839 | aL1L 4840 | aL1L 4841 | aL1L 4842 | aL1L 4843 | aL1L 4844 | aL1L 4845 | aL1L 4846 | aL1L 4847 | aL1L 4848 | aL1L 4849 | aL1L 4850 | aL3L 4851 | aL1L 4852 | aL1L 4853 | aL1L 4854 | aL1L 4855 | aL1L 4856 | aL0L 4857 | aL1L 4858 | aL1L 4859 | aL1L 4860 | aL1L 4861 | aL1L 4862 | aL1L 4863 | aL1L 4864 | aL2L 4865 | aL1L 4866 | aL1L 4867 | aL3L 4868 | aL0L 4869 | aL1L 4870 | aL0L 4871 | aL0L 4872 | aL1L 4873 | aL0L 4874 | aL1L 4875 | aL1L 4876 | aL1L 4877 | aL1L 4878 | aL1L 4879 | aL0L 4880 | aL0L 4881 | aL1L 4882 | aL1L 4883 | aL1L 4884 | aL3L 4885 | aL1L 4886 | aL1L 4887 | aL0L 4888 | aL1L 4889 | aL1L 4890 | aL0L 4891 | aL1L 4892 | aL1L 4893 | aL1L 4894 | aL1L 4895 | aL1L 4896 | aL1L 4897 | aL0L 4898 | aL1L 4899 | aL1L 4900 | aL0L 4901 | aL2L 4902 | aL1L 4903 | aL1L 4904 | aL1L 4905 | aL1L 4906 | aL1L 4907 | aL0L 4908 | aL0L 4909 | aL1L 4910 | aL1L 4911 | aL1L 4912 | aL1L 4913 | aL0L 4914 | aL1L 4915 | aL1L 4916 | aL1L 4917 | aL1L 4918 | aL3L 4919 | aL0L 4920 | aL1L 4921 | aL0L 4922 | aL0L 4923 | aL0L 4924 | aL0L 4925 | aL1L 4926 | aL1L 4927 | aL1L 4928 | aL1L 4929 | aL1L 4930 | aL0L 4931 | aL0L 4932 | aL2L 4933 | aL0L 4934 | aL1L 4935 | aL0L 4936 | aL1L 4937 | aL1L 4938 | aL0L 4939 | aL1L 4940 | aL1L 4941 | aL0L 4942 | aL1L 4943 | aL1L 4944 | aL1L 4945 | aL1L 4946 | aL1L 4947 | aL0L 4948 | aL0L 4949 | aL2L 4950 | aL1L 4951 | aL3L 4952 | aL2L 4953 | aL0L 4954 | aL1L 4955 | aL1L 4956 | aL0L 4957 | aL1L 4958 | aL0L 4959 | aL1L 4960 | aL1L 4961 | aL1L 4962 | aL1L 4963 | aL1L 4964 | aL1L 4965 | aL1L 4966 | aL1L 4967 | aL1L 4968 | aL1L 4969 | aL3L 4970 | aL1L 4971 | aL1L 4972 | aL1L 4973 | aL1L 4974 | aL0L 4975 | aL1L 4976 | aL1L 4977 | aL1L 4978 | aL1L 4979 | aL1L 4980 | aL1L 4981 | aL1L 4982 | aL1L 4983 | aL1L 4984 | aL1L 4985 | aL1L 4986 | aL3L 4987 | aL1L 4988 | aL1L 4989 | aL1L 4990 | aL1L 4991 | aL1L 4992 | aL1L 4993 | aL1L 4994 | aL1L 4995 | aL1L 4996 | aL1L 4997 | aL0L 4998 | aL1L 4999 | aL1L 5000 | aL1L 5001 | aL0L 5002 | aL1L 5003 | aL2L 5004 | aL1L 5005 | aL1L 5006 | aL1L 5007 | aL1L 5008 | aL1L 5009 | aL0L 5010 | aL1L 5011 | aL1L 5012 | aL1L 5013 | aL1L 5014 | aL1L 5015 | aL1L 5016 | aL1L 5017 | aL2L 5018 | aL0L 5019 | aL1L 5020 | aL3L 5021 | aL1L 5022 | aL1L 5023 | aL0L 5024 | aL1L 5025 | aL0L 5026 | aL0L 5027 | aL1L 5028 | aL1L 5029 | aL1L 5030 | aL1L 5031 | aL1L 5032 | aL1L 5033 | aL0L 5034 | aL1L 5035 | aL0L 5036 | aL0L 5037 | aL3L 5038 | aL1L 5039 | aL1L 5040 | aL0L 5041 | aL1L 5042 | aL1L 5043 | aL1L 5044 | aL1L 5045 | aL1L 5046 | aL1L 5047 | aL0L 5048 | aL1L 5049 | aL1L 5050 | aL1L 5051 | aL1L 5052 | aL0L 5053 | aL1L 5054 | aL0L 5055 | aL1L 5056 | aL1L 5057 | aL0L 5058 | aL1L 5059 | aL1L 5060 | aL1L 5061 | aL1L 5062 | aL0L 5063 | aL1L 5064 | aL1L 5065 | aL0L 5066 | aL1L 5067 | aL1L 5068 | aL1L 5069 | aL1L 5070 | aL1L 5071 | aL0L 5072 | aL0L 5073 | aL1L 5074 | aL1L 5075 | aL1L 5076 | aL1L 5077 | aL1L 5078 | aL1L 5079 | aL1L 5080 | aL1L 5081 | aL1L 5082 | aL1L 5083 | aL1L 5084 | aL1L 5085 | aL1L 5086 | aL1L 5087 | aL1L 5088 | aL3L 5089 | aL1L 5090 | aL1L 5091 | aL0L 5092 | aL1L 5093 | aL1L 5094 | aL1L 5095 | aL1L 5096 | aL1L 5097 | aL1L 5098 | aL1L 5099 | aL1L 5100 | aL1L 5101 | aL1L 5102 | aL2L 5103 | aL1L 5104 | aL0L 5105 | aL2L 5106 | aL1L 5107 | aL1L 5108 | aL1L 5109 | aL1L 5110 | aL1L 5111 | aL1L 5112 | aL1L 5113 | aL1L 5114 | aL1L 5115 | aL1L 5116 | aL1L 5117 | aL0L 5118 | aL1L 5119 | aL2L 5120 | aL1L 5121 | aL3L 5122 | aL2L 5123 | aL0L 5124 | aL1L 5125 | aL1L 5126 | aL1L 5127 | aL1L 5128 | aL1L 5129 | aL1L 5130 | aL1L 5131 | aL1L 5132 | aL1L 5133 | aL1L 5134 | aL1L 5135 | aL0L 5136 | aL2L 5137 | aL1L 5138 | aL1L 5139 | aL2L 5140 | aL1L 5141 | aL1L 5142 | aL1L 5143 | aL1L 5144 | aL1L 5145 | aL0L 5146 | aL1L 5147 | aL1L 5148 | aL1L 5149 | aL1L 5150 | aL1L 5151 | aL0L 5152 | aL0L 5153 | aL1L 5154 | aL0L 5155 | aL1L 5156 | aL2L 5157 | aL1L 5158 | aL1L 5159 | aL1L 5160 | aL1L 5161 | aL0L 5162 | aL0L 5163 | aL1L 5164 | aL1L 5165 | aL1L 5166 | aL1L 5167 | aL1L 5168 | aL1L 5169 | aL1L 5170 | aL1L 5171 | aL1L 5172 | aL3L 5173 | aL3L 5174 | aL0L 5175 | aL1L 5176 | aL0L 5177 | aL1L 5178 | aL1L 5179 | aL1L 5180 | aL1L 5181 | aL1L 5182 | aL1L 5183 | aL1L 5184 | aL1L 5185 | aL1L 5186 | aL1L 5187 | aL1L 5188 | aL1L 5189 | aL1L 5190 | aL0L 5191 | aL1L 5192 | aL1L 5193 | aL1L 5194 | aL1L 5195 | aL0L 5196 | aL0L 5197 | aL1L 5198 | aL1L 5199 | aL1L 5200 | aL1L 5201 | aL1L 5202 | aL1L 5203 | aL1L 5204 | aL1L 5205 | aL1L 5206 | aL3L 5207 | aL3L 5208 | aL0L 5209 | aL1L 5210 | aL1L 5211 | aL1L 5212 | aL1L 5213 | aL1L 5214 | aL1L 5215 | aL1L 5216 | aL1L 5217 | aL0L 5218 | aL0L 5219 | aL1L 5220 | aL1L 5221 | aL2L 5222 | aL1L 5223 | aL1L 5224 | aL2L 5225 | aL1L 5226 | aL1L 5227 | aL0L 5228 | aL1L 5229 | aL1L 5230 | aL1L 5231 | aL1L 5232 | aL1L 5233 | aL1L 5234 | aL1L 5235 | aL1L 5236 | aL0L 5237 | aL0L 5238 | aL2L 5239 | aL0L 5240 | aL1L 5241 | aL0L 5242 | aL1L 5243 | aL1L 5244 | aL1L 5245 | aL1L 5246 | aL1L 5247 | aL1L 5248 | aL0L 5249 | aL1L 5250 | aL1L 5251 | aL1L 5252 | aL1L 5253 | aL1L 5254 | aL1L 5255 | aL1L 5256 | aL0L 5257 | aL1L 5258 | aL0L 5259 | aL1L 5260 | aL1L 5261 | aL1L 5262 | aL1L 5263 | aL0L 5264 | aL1L 5265 | aL1L 5266 | aL1L 5267 | aL1L 5268 | aL1L 5269 | aL1L 5270 | aL1L 5271 | aL1L 5272 | aL1L 5273 | aL0L 5274 | aL1L 5275 | aL3L 5276 | aL1L 5277 | aL1L 5278 | aL1L 5279 | aL1L 5280 | aL1L 5281 | aL0L 5282 | aL1L 5283 | aL1L 5284 | aL1L 5285 | aL1L 5286 | aL1L 5287 | aL1L 5288 | aL1L 5289 | aL2L 5290 | aL1L 5291 | aL1L 5292 | aL3L 5293 | aL1L 5294 | aL1L 5295 | aL1L 5296 | aL1L 5297 | aL1L 5298 | aL1L 5299 | aL1L 5300 | aL1L 5301 | aL1L 5302 | aL1L 5303 | aL1L 5304 | aL1L 5305 | aL0L 5306 | aL1L 5307 | aL0L 5308 | aL0L 5309 | aL2L 5310 | aL1L 5311 | aL1L 5312 | aL0L 5313 | aL1L 5314 | aL1L 5315 | aL1L 5316 | aL1L 5317 | aL1L 5318 | aL0L 5319 | aL0L 5320 | aL0L 5321 | aL0L 5322 | aL1L 5323 | aL1L 5324 | aL1L 5325 | aL1L 5326 | aL1L 5327 | aL1L 5328 | aL1L 5329 | aL1L 5330 | aL1L 5331 | aL1L 5332 | aL1L 5333 | aL1L 5334 | aL1L 5335 | aL1L 5336 | aL1L 5337 | aL1L 5338 | aL1L 5339 | aL0L 5340 | aL1L 5341 | aL0L 5342 | aL0L 5343 | aL0L 5344 | aL1L 5345 | aL1L 5346 | aL1L 5347 | aL1L 5348 | aL1L 5349 | aL1L 5350 | aL1L 5351 | aL1L 5352 | aL0L 5353 | aL1L 5354 | aL1L 5355 | aL1L 5356 | aL1L 5357 | aL1L 5358 | aL1L 5359 | aL1L 5360 | aL3L 5361 | aL1L 5362 | aL1L 5363 | aL1L 5364 | aL0L 5365 | aL1L 5366 | aL1L 5367 | aL1L 5368 | aL1L 5369 | aL1L 5370 | aL1L 5371 | aL1L 5372 | aL1L 5373 | aL0L 5374 | aL1L 5375 | aL1L 5376 | aL1L 5377 | aL3L 5378 | aL0L 5379 | aL1L 5380 | aL1L 5381 | aL1L 5382 | aL1L 5383 | aL1L 5384 | aL1L 5385 | aL1L 5386 | aL1L 5387 | aL1L 5388 | aL1L 5389 | aL1L 5390 | aL1L 5391 | aL1L 5392 | aL1L 5393 | aL1L 5394 | aL3L 5395 | aL1L 5396 | aL1L 5397 | aL1L 5398 | aL0L 5399 | aL0L 5400 | aL1L 5401 | aL1L 5402 | aL1L 5403 | aL1L 5404 | aL1L 5405 | aL1L 5406 | aL1L 5407 | aL1L 5408 | aL1L 5409 | aL1L 5410 | aL1L 5411 | aL3L 5412 | aL1L 5413 | aL1L 5414 | aL0L 5415 | aL1L 5416 | aL1L 5417 | aL0L 5418 | aL1L 5419 | aL1L 5420 | aL1L 5421 | aL1L 5422 | aL1L 5423 | aL1L 5424 | aL0L 5425 | aL1L 5426 | aL0L 5427 | aL1L 5428 | aL3L 5429 | aL1L 5430 | aL1L 5431 | aL1L 5432 | aL0L 5433 | aL1L 5434 | aL1L 5435 | aL1L 5436 | aL1L 5437 | aL1L 5438 | aL1L 5439 | aL1L 5440 | aL1L 5441 | aL1L 5442 | aL2L 5443 | aL1L 5444 | aL1L 5445 | aL3L 5446 | aL1L 5447 | aL1L 5448 | aL1L 5449 | aL1L 5450 | aL1L 5451 | aL0L 5452 | aL1L 5453 | aL1L 5454 | aL1L 5455 | aL1L 5456 | aL1L 5457 | aL1L 5458 | aL1L 5459 | aL1L 5460 | aL0L 5461 | aL3L 5462 | aL3L 5463 | aL0L 5464 | aL1L 5465 | aL1L 5466 | aL1L 5467 | aL1L 5468 | aL1L 5469 | aL1L 5470 | aL1L 5471 | aL1L 5472 | aL1L 5473 | aL1L 5474 | aL1L 5475 | aL1L 5476 | aL2L 5477 | aL0L 5478 | aL1L 5479 | aL0L 5480 | aL1L 5481 | aL1L 5482 | aL1L 5483 | aL1L 5484 | aL1L 5485 | aL0L 5486 | aL1L 5487 | aL0L 5488 | aL1L 5489 | aL1L 5490 | aL1L 5491 | aL1L 5492 | aL1L 5493 | aL2L 5494 | aL1L 5495 | aL3L 5496 | aL2L 5497 | aL1L 5498 | aL1L 5499 | aL1L 5500 | aL0L 5501 | aL1L 5502 | aL0L 5503 | aL1L 5504 | aL1L 5505 | aL1L 5506 | aL0L 5507 | aL1L 5508 | aL0L 5509 | aL1L 5510 | aL1L 5511 | aL1L 5512 | aL1L 5513 | aL3L 5514 | aL1L 5515 | aL1L 5516 | aL1L 5517 | aL0L 5518 | aL0L 5519 | aL1L 5520 | aL1L 5521 | aL1L 5522 | aL1L 5523 | aL1L 5524 | aL1L 5525 | aL1L 5526 | aL1L 5527 | aL1L 5528 | aL0L 5529 | aL1L 5530 | aL3L 5531 | aL0L 5532 | aL1L 5533 | aL0L 5534 | aL1L 5535 | aL0L 5536 | aL1L 5537 | aL1L 5538 | aL1L 5539 | aL1L 5540 | aL1L 5541 | aL1L 5542 | aL1L 5543 | aL0L 5544 | aL1L 5545 | aL1L 5546 | aL1L 5547 | aL0L 5548 | aL1L 5549 | aL1L 5550 | aL1L 5551 | aL0L 5552 | aL0L 5553 | aL1L 5554 | aL1L 5555 | aL1L 5556 | aL1L 5557 | aL1L 5558 | aL1L 5559 | aL1L 5560 | aL1L 5561 | aL1L 5562 | aL0L 5563 | aL0L 5564 | aL2L 5565 | aL0L 5566 | aL1L 5567 | aL1L 5568 | aL0L 5569 | aL1L 5570 | aL1L 5571 | aL1L 5572 | aL1L 5573 | aL1L 5574 | aL0L 5575 | aL1L 5576 | aL0L 5577 | aL1L 5578 | aL0L 5579 | aL1L 5580 | aL1L 5581 | aL0L 5582 | aL0L 5583 | aL1L 5584 | aL0L 5585 | aL1L 5586 | aL1L 5587 | aL1L 5588 | aL1L 5589 | aL0L 5590 | aL1L 5591 | aL1L 5592 | aL1L 5593 | aL1L 5594 | aL1L 5595 | aL1L 5596 | aL0L 5597 | aL3L 5598 | aL2L 5599 | aL1L 5600 | aL1L 5601 | aL1L 5602 | aL1L 5603 | aL1L 5604 | aL1L 5605 | aL1L 5606 | aL0L 5607 | aL1L 5608 | aL1L 5609 | aL1L 5610 | aL1L 5611 | aL1L 5612 | aL1L 5613 | aL0L 5614 | aL1L 5615 | aL1L 5616 | aL0L 5617 | aL1L 5618 | aL1L 5619 | aL1L 5620 | aL1L 5621 | aL1L 5622 | aL1L 5623 | aL1L 5624 | aL1L 5625 | aL1L 5626 | aL1L 5627 | aL1L 5628 | aL1L 5629 | aL1L 5630 | aL0L 5631 | aL1L 5632 | aL3L 5633 | aL0L 5634 | aL1L 5635 | aL1L 5636 | aL1L 5637 | aL1L 5638 | aL1L 5639 | aL1L 5640 | aL1L 5641 | aL1L 5642 | aL1L 5643 | aL1L 5644 | aL1L 5645 | aL1L 5646 | aL2L 5647 | aL1L 5648 | aL1L 5649 | aL3L 5650 | aL1L 5651 | aL1L 5652 | aL1L 5653 | aL1L 5654 | aL0L 5655 | aL0L 5656 | aL1L 5657 | aL1L 5658 | aL1L 5659 | aL1L 5660 | aL1L 5661 | aL1L 5662 | aL1L 5663 | aL1L 5664 | aL1L 5665 | aL0L 5666 | aL3L 5667 | aL0L 5668 | aL1L 5669 | aL0L 5670 | aL1L 5671 | aL0L 5672 | aL0L 5673 | aL1L 5674 | aL1L 5675 | aL1L 5676 | aL1L 5677 | aL1L 5678 | aL1L 5679 | aL0L 5680 | aL2L 5681 | aL1L 5682 | aL1L 5683 | aL1L 5684 | aL1L 5685 | aL1L 5686 | aL1L 5687 | aL1L 5688 | aL1L 5689 | aL1L 5690 | aL1L 5691 | aL1L 5692 | aL1L 5693 | aL0L 5694 | aL0L 5695 | aL1L 5696 | aL1L 5697 | aL1L 5698 | aL1L 5699 | aL1L 5700 | aL3L 5701 | aL0L 5702 | aL1L 5703 | aL1L 5704 | aL0L 5705 | aL1L 5706 | aL1L 5707 | aL1L 5708 | aL0L 5709 | aL0L 5710 | aL0L 5711 | aL0L 5712 | aL0L 5713 | aL1L 5714 | aL2L 5715 | aL1L 5716 | aL1L 5717 | aL1L 5718 | aL1L 5719 | aL1L 5720 | aL1L 5721 | aL1L 5722 | aL1L 5723 | aL1L 5724 | aL1L 5725 | aL1L 5726 | aL0L 5727 | aL0L 5728 | aL0L 5729 | aL1L 5730 | aL1L 5731 | aL1L 5732 | aL0L 5733 | aL1L 5734 | aL3L 5735 | aL0L 5736 | aL1L 5737 | aL0L 5738 | aL1L 5739 | aL1L 5740 | aL1L 5741 | aL1L 5742 | aL1L 5743 | aL1L 5744 | aL1L 5745 | aL1L 5746 | aL1L 5747 | aL1L 5748 | aL2L 5749 | aL0L 5750 | aL1L 5751 | aL0L 5752 | aL1L 5753 | aL1L 5754 | aL1L 5755 | aL1L 5756 | aL0L 5757 | aL0L 5758 | aL1L 5759 | aL1L 5760 | aL1L 5761 | aL1L 5762 | aL1L 5763 | aL1L 5764 | aL0L 5765 | aL1L 5766 | aL1L 5767 | aL1L 5768 | aL3L 5769 | aL0L 5770 | aL1L 5771 | aL1L 5772 | aL1L 5773 | aL1L 5774 | aL1L 5775 | aL1L 5776 | aL1L 5777 | aL1L 5778 | aL1L 5779 | aL0L 5780 | aL1L 5781 | aL1L 5782 | aL2L 5783 | aL1L 5784 | aL1L 5785 | aL3L 5786 | aL1L 5787 | aL1L 5788 | aL1L 5789 | aL1L 5790 | aL1L 5791 | aL1L 5792 | aL1L 5793 | aL1L 5794 | aL1L 5795 | aL1L 5796 | aL1L 5797 | aL0L 5798 | aL0L 5799 | atp15 5800 | ba. --------------------------------------------------------------------------------