├── A_opt.py ├── A_optimal_diagonals.ipynb ├── LICENSE ├── README.md ├── analysis.ipynb ├── diffnet.py ├── docs ├── developers.pdf └── developers.tex ├── examples.py ├── examples ├── const_rel_error_net.pkl ├── random_net.pkl └── uniform_net.pkl ├── graph.py ├── issues └── 001 │ ├── issue.ipynb │ └── sij_sym.npy ├── netbfe.ipynb ├── netbfe.py └── test_diffnet.py /A_opt.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cvxopt 3 | from cvxopt import blas, lapack, solvers, matrix, spmatrix, misc 4 | 5 | def upper_index( i, j, K): 6 | ''' 7 | Return the position of the tuple (i,j) in the sequence (0,0), (0,1), ... 8 | (0,K-1), (1,1), ... (K-1,K-1). i.e., 0 tol: 495 | print 'dx=' 496 | print dx 497 | print x 498 | print xp 499 | if dy > tol: 500 | print 'dy=' 501 | print dy 502 | print y 503 | print yp 504 | if dz > tol: 505 | print 'dz=' 506 | print dz 507 | print z 508 | print zp 509 | if dx > tol or dy > tol or dz > tol: 510 | for i, (r, rti) in enumerate( zip(ris, rtis)): 511 | print 'r[%d]=' % i 512 | print r 513 | print 'rti[%d]=' % i 514 | print rti 515 | print 'rti.T*r=' 516 | print rti.T*r 517 | for i, d in enumerate( ds): 518 | print 'd[%d]=%g' % (i, d) 519 | print 'x0, y0, z0=' 520 | print x0 521 | print y0 522 | print z0 523 | print Bm0 524 | 525 | ### 526 | # END of kkt_solver. 527 | ### 528 | 529 | return kkt_solver 530 | 531 | def Aopt_Gfunc( si2, x, y, alpha=1.0, beta=0.0, trans='N'): 532 | ''' 533 | Compute 534 | 535 | y := alpha G x + beta y if trans=='N' 536 | 537 | and 538 | 539 | y := alpha G^t x + beta y if trans!='N' 540 | 541 | Let x = (n11, n12, ... nKK, u1, u2, ..., uK)^t 542 | a G x + b y = -a ( n11, n12, ..., nKK, 543 | vec( F(n), 0, 544 | 0, u1 ), 545 | vec( F(n), 0, 546 | 0, u2 ), 547 | ... ) + b y 548 | 549 | Let x = (x11, x12, ...., xKK, x_1, x_2, ... x_K), where 550 | x_i are (K+1)x(K+1) matrices. 551 | a Gt x + b y = -a ( x11 + vec( V11).vec( sum_i x_i), 552 | x12 + vec( V12).vec( sum_i x_i), 553 | ... 554 | xKK + vec( VKK).vec( sum_i x_i), 555 | x_{1,K+1,K+1}, 556 | ... 557 | x_{K,K+1,K+1} ) + b y 558 | ''' 559 | K = si2.size[0] 560 | hkkp1 = K*(K+1)/2 561 | kp1sqr = (K+1)*(K+1) 562 | if 'N'==trans: 563 | u = alpha*x[-K:] 564 | n = tri2symm( x[:-K], K) 565 | F = Fisher_matrix( si2, alpha*n) 566 | y[:hkkp1] = -alpha*x[:hkkp1] + beta*y[:hkkp1] 567 | Fu = matrix( 0., (K+1, K+1)) 568 | Fu[:K,:K] = F 569 | start = hkkp1 570 | for i in xrange(K): 571 | Fu[K,K] = u[i] 572 | y[start:start+kp1sqr] = -Fu[:] + beta*y[start:start+kp1sqr] 573 | start += kp1sqr 574 | if 'T'==trans: 575 | xab = alpha*x[:hkkp1] 576 | xKK = alpha*x[hkkp1+kp1sqr-1::kp1sqr] 577 | start = hkkp1 578 | xsum = matrix( 0., (kp1sqr, 1)) 579 | for i in xrange(K): 580 | xsum += x[start:start+kp1sqr] 581 | start += kp1sqr 582 | xsum *= alpha 583 | xsum = matrix( xsum, (K+1, K+1)) 584 | ab = 0 585 | for a in xrange(K): 586 | xab[ab] += si2[a,a]*xsum[a,a] 587 | ab += 1 588 | for b in xrange(a+1, K): 589 | xab[ab] += si2[b,a]*(xsum[a,a] + xsum[b,b] - 2*xsum[b,a]) 590 | ab += 1 591 | y[:hkkp1] = -xab + beta*y[:hkkp1] 592 | y[hkkp1:] = -xKK + beta*y[hkkp1:] 593 | 594 | def Aopt_GhA( si2, nsofar=None, di2=None, G_as_function=False): 595 | '''Return the G, h, and A matrix for the cone programming to solve the 596 | A-optimal problem. 597 | 598 | Args: 599 | 600 | si2: symmetric KxK matrix, si2[i,j] = 1/s_{ij}^2 601 | 602 | nsofar: symmetric KxK matrix, n_{ij} for existing samples 603 | 604 | di2: length K vector, di2[i] = 1/delta_i^2 for the uncertainty on 605 | quantity x_i as measured by other means. 606 | 607 | Returns: 608 | 609 | G: a K*(K+1)/2 + K*(K+1)^2 by K(K+1)/2 + K matrix or a function 610 | (if G_as_function==True). 611 | 612 | h: a vector of length K*(K+1)/2 + K*(K+1)^2. 613 | 614 | A: A 1 by K*(K+1)/2 + K matrix. 615 | 616 | ''' 617 | K = si2.size[0] 618 | 619 | if G_as_function: 620 | def G( x, y, alpha=1., beta=0., trans='N'): 621 | return Aopt_Gfunc( si2, x, y, alpha, beta, trans) 622 | else: 623 | nrows, ncols = K*(K+1)/2 + K*(K+1)*(K+1), K*(K+1)/2 + K 624 | 625 | Gs = [] 626 | # Gs = np.zeros( nrows*ncols) 627 | # -I_{K*(K+1)/2} identity matrix 628 | Gs.extend( [ ( i, i, -1. ) for i in xrange(K*(K+1)/2) ]) 629 | # Gs[:K*(K+1)/2*nrows:nrows+1] = -1. 630 | 631 | skip = K*(K+1)/2 632 | # vec( [ V_{ij}, 0; 0, 0 ]) 633 | col = 0 634 | for i in xrange(K): 635 | # Skip the first K*(K+1)/2 rows 636 | Gs.extend( [ (skip+i*(K+2)+t*(K+1)*(K+1), col, -si2[i,i]) 637 | for t in xrange( K) ]) 638 | # offset = col*nrows + K*(K+1)/2 639 | # Gs[offset+i*(K+2) : (col+1)*nrows : (K+1)*(K+1)] = si2[i,i] 640 | col += 1 641 | for j in xrange(i+1, K): 642 | Gs.extend( [ (skip+i*(K+2) + t*(K+1)*(K+1), col, -si2[i,j]) 643 | for t in xrange(K) ]) 644 | Gs.extend( [ (skip+j*(K+2) + t*(K+1)*(K+1), col, -si2[i,j]) 645 | for t in xrange(K) ]) 646 | Gs.extend( [ (skip+i*(K+1) + j + t*(K+1)*(K+1), col, si2[i,j]) 647 | for t in xrange(K) ]) 648 | Gs.extend( [ (skip+j*(K+1) + i + t*(K+1)*(K+1), col, si2[i,j]) 649 | for t in xrange(K) ]) 650 | col += 1 651 | 652 | # vec( [ 0, 0; 0, 1 ]) 653 | 654 | Gs.extend( [ (skip+(i+1)*(K+1)*(K+1)-1, K*(K+1)/2+i, -1.) 655 | for i in xrange(K) ]) 656 | I, J, X = [ [ ijx[p] for ijx in Gs ] for p in xrange(3) ] 657 | G = spmatrix( X, I, J, (nrows, ncols)) 658 | 659 | # h vector. 660 | h = matrix( 0., (K*(K+1)/2 + K*(K+1)*(K+1), 1)) 661 | # F := Fisher matrix 662 | if nsofar is None: 663 | F = matrix( 0., (K, K)) 664 | else: 665 | F = Fisher_matrix( si2, nsofar) 666 | if di2 is not None: F[::K+1] += matrix(di2[:]) 667 | elif np.all( np.diag( si2) == 0): 668 | # If the diagonal elements of 1/s[i,i]^2 are all zero, the Fisher 669 | # information matrix is singular, and the quantities are determined 670 | # up to a constant. In this case, we constrain the 671 | # mean of the quantities. This corresponds to adding a constant 672 | # omega to the Fisher matrix. The optimal allocation does not 673 | # depend on the value of omega. 674 | omega = 1. 675 | F += omega 676 | 677 | row = K*(K+1)/2 678 | for i in xrange(K): 679 | for j in xrange(K): 680 | h[row + j*(K+1) : row + (j+1)*(K+1)-1] = F[:,j] 681 | h[row + (i+1)*(K+1)-1] = 1. # e_i^t 682 | h[row + K*(K+1) + i] = 1. # e_i 683 | row += (K+1)*(K+1) 684 | h = matrix( h, (len(h), 1)) 685 | 686 | # A matrix 687 | A = matrix( np.concatenate( [ np.ones( K*(K+1)/2), np.zeros( K) ]), 688 | (1, K*(K+1)/2 + K)) 689 | 690 | return G, h, A 691 | 692 | def A_optimize_fast( sij, N=1., nsofar=None, delta=None, 693 | only_include_measurements=None, 694 | maxiters=100, feastol=1e-6): 695 | ''' 696 | Find the A-optimal of the difference network that minimizes the trace of 697 | the covariance matrix. This corresponds to minimizing the average error. 698 | 699 | In an iterative optimization of the difference network, the 700 | optimal allocation is updated with the estimate of s_{ij}, and we 701 | need to allocate the next iteration of sampling based on what has 702 | already been sampled for each pair. 703 | 704 | This implementation uses a customized KKT solver. The time complexity is 705 | O(K^5), memory complexity is O(K^4). 706 | 707 | Args: 708 | 709 | sij: KxK symmetric matrix, where the measurement variance of the 710 | difference between i and j is proportional to s[i][j]^2 = 711 | s[j][i]^2, and the measurement variance of i is proportional to 712 | s[i][i]^2. 713 | 714 | nadd: float, Nadd gives the additional number of samples to be collected in 715 | the next iteration. 716 | 717 | nsofar: KxK symmetric matrix, where nsofar[i,j] is the number of samples 718 | that has already been collected for (i,j) pair. 719 | 720 | delta: a length K vector. delta[i] is the measurement uncertainty on the 721 | quantity x[i] from an independent experiment; if no independent experiment 722 | provides a value for x[i], delta[i] is set to numpy.infty. 723 | 724 | only_include_measurements: set of pairs, if not None, indicate which 725 | pairs should be considered in the optimal network. Any pair (i,j) not in 726 | the set will be excluded in the allocation (i.e. dn[i,j] = 0). The pair 727 | (i,j) in the set must be ordered so that i<=j. 728 | 729 | Return: 730 | 731 | KxK symmetric matrix of float, the (i,j) element of which gives the 732 | number of samples to be allocated to the measurement of (i,j) difference 733 | in the next iteration. 734 | ''' 735 | si2 = cvxopt.div( 1., sij**2) 736 | K = si2.size[0] 737 | 738 | if delta is not None: 739 | di2 = np.array( [ 1./delta[i]**2 if delta[i] is not None else 0. 740 | for i in xrange(K)]) 741 | else: 742 | di2 = None 743 | 744 | if only_include_measurements is not None: 745 | for i in xrange(K): 746 | for j in xrange(i, K): 747 | if not (i,j) in only_include_measurements: 748 | # Set the s[i,j] to infinity, thus excluding the pair. 749 | si2[i,j] = si2[j,i] = 0. 750 | 751 | Gm, hv, Am = Aopt_GhA( si2, nsofar, di2=di2, G_as_function=True) 752 | dims = dict( l = K*(K+1)/2, 753 | q = [], 754 | s = [K+1]*K ) 755 | 756 | cv = matrix( np.concatenate( [ np.zeros( K*(K+1)/2), np.ones( K) ]), 757 | (K*(K+1)/2 + K, 1)) 758 | bv = matrix( float(N), (1, 1)) 759 | 760 | def default_kkt_solver( W): 761 | return misc.kkt_ldl( Gm, dims, Am)(W) 762 | 763 | sol = solvers.conelp( cv, Gm, hv, dims, Am, bv, 764 | options=dict(maxiters=maxiters, 765 | feastol=feastol), 766 | kktsolver=lambda W: Aopt_KKT_solver( si2, W)) 767 | 768 | return conelp_solution_to_nij( sol['x'], K) 769 | 770 | def A_optimize_sdp( sij): 771 | ''' 772 | Find the A-optimal of the difference network that minimizes the trace of 773 | the covariance matrix. This corresponds to minimizing the average error. 774 | 775 | Args: 776 | 777 | sij: KxK symmetric matrix, where the measurement variance of the 778 | difference between i and j is proportional to s[i][j]^2 = 779 | s[j][i]^2, and the measurement variance of i is proportional to 780 | s[i][i]^2. 781 | 782 | Return: 783 | 784 | nij: symmetric matrix, where n[i][j] is the fraction of measurements 785 | to be performed for the difference between i and j, satisfying 786 | \sum_i n[i][i] + \sum_{i= 0 805 | # for k = 1,2,...,K 806 | # where M = K*(K+1)/2 are the number of types of measurements. 807 | # m index the measurements, m = (i,j). 808 | # v_m is a length K measurement vector, where 809 | # v_{(i,i), a} = s_{ii}^{-1}\delta_{i,a} 810 | # v_{(i,j), a} = s_{ij]^{-1}\delta_{i,a} - s_{ij}^{-1}\delta_{j,a} 811 | # The matrix U_m = v_m.v_m^t is 812 | # U_{(i,i), (a,b)} = s_{ii}^{-2}\delta_{i,a}\delta_{i,b} 813 | # U_{(i,j), (a,b)} 814 | # = s_{ij}^{-2}(\delta_{i,a}\delta_{i,b} + \delta_{j,a}\delta_{j,b}) 815 | # - s_{ij}^{-2}(\delta_{i,a}\delta_{j,b} + \delta_{j,a}\delta_{i,b}) 816 | 817 | # G matrix, of dimension ((K+1)*(K+1), (M+K)). Each column is a 818 | # column-major vector representing the KxK matrix of U_m augmented 819 | # by a length K vector, hence the dimension (K+1)x(K+1). 820 | # Gs = [ matrix( 0., ((K+1)*(K+1), (M+K))) for k in xrange( K) ] 821 | G0 = [] 822 | hs = [ matrix( 0., (K+1, K+1)) for k in xrange( K) ] 823 | 824 | for i in xrange( K): 825 | # The index of matrix element (i,i) in column-major representation 826 | # of a (K+1)x(K+1) matrix is i*(K+1 + 1) 827 | # Gs[0][i*(K+2), i] = 1./(sij[i,i]*sij[i,i]) 828 | G0.append( (i*(K+2), i, -1./(sij[i,i]*sij[i,i]))) 829 | for j in xrange( i+1, K): 830 | m = measurement_index( i, j, K) 831 | # The index of matrix element (i,j) in column-major representation 832 | # of a (K+1)x(K+1) matrix is j*(K+1) + i 833 | v2 = 1./(sij[i,j]*sij[i,j]) 834 | # Gs[0][j*(K+1) + i, m] = Gs[0][i*(K+1) + j, m] = -v2 835 | G0.append( (j*(K+1) + i, m, v2)) 836 | G0.append( (i*(K+1) + j, m, v2)) 837 | # Gs[0][i*(K+2), m] = Gs[0][j*(K+2), m] = v2 838 | G0.append( (i*(K+2), m, -v2)) 839 | G0.append( (j*(K+2), m, -v2)) 840 | 841 | # G.(x, u) + h >=0 <=> -G.(x, u) <= h 842 | # Gs[0] *= -1. 843 | 844 | Gs = [] 845 | for k in xrange( K): 846 | # if (k>0): Gs[k][:,:M] = Gs[0][:,:M] 847 | # for the term u_k [ [0, 0], [0, 1] ] 848 | # Gs[k][-1, M+k] = -1. 849 | I = [ i for i, j, x in G0 ] + [ (K+1)*(K+1) - 1 ] 850 | J = [ j for i, j, x in G0 ] + [ M + k ] 851 | X = [ x for i, j, x in G0 ] + [ -1. ] 852 | Gs.append( spmatrix(X, I, J, ((K+1)*(K+1), M+K))) 853 | hs[k][k,-1] = hs[k][-1,k] = 1. 854 | 855 | # The constraint n >= 0, as G0.x <= h0 856 | # G0 = matrix( np.diag(np.concatenate( [ -np.ones( M), np.zeros( K) ]))) 857 | G0 = spmatrix( -np.ones( M), range( M), range( M), (M+K, M+K)) 858 | h0 = matrix( np.zeros( M + K)) 859 | 860 | # The constraint \sum_m n_m = 1. 861 | # A = matrix( [1.]*M + [0.]*K, (1, M + K) ) 862 | A = spmatrix( np.ones( M), np.zeros( M, dtype=int), range( M), (1, M+K)) 863 | b = matrix( 1., (1, 1) ) 864 | 865 | sol = cvxopt.solvers.sdp( c, G0, h0, Gs, hs, A, b) 866 | n = solution_to_nij( sol, K) 867 | 868 | return n 869 | 870 | def update_A_optimal_sdp( sij, nadd, nsofar, only_include_measurements=None): 871 | ''' 872 | In an iterative optimization of the difference network, the 873 | optimal allocation is updated with the estimate of s_{ij}, and we 874 | need to allocate the next iteration of sampling based on what has 875 | already been sampled for each pair. 876 | 877 | Args: 878 | 879 | sij: KxK symmetric matrix, where the measurement variance of the 880 | difference between i and j is proportional to s[i][j]^2 = 881 | s[j][i]^2, and the measurement variance of i is proportional to 882 | s[i][i]^2. 883 | nadd: float, Nadd gives the additional number of samples to be collected in 884 | the next iteration. 885 | nsofar: KxK symmetric matrix, where nsofar[i,j] is the number of samples 886 | that has already been collected for (i,j) pair. 887 | only_include_measurements: set of pairs, if not None, indicate which 888 | pairs should be considered in the optimal network. Any pair (i,j) not in 889 | the set will be excluded in the allocation (i.e. dn[i,j] = 0). The pair 890 | (i,j) in the set must be ordered so that i<=j. 891 | 892 | Return: 893 | 894 | KxK symmetric matrix of float, the (i,j) element of which gives the 895 | number of samples to be allocated to the measurement of (i,j) difference 896 | in the next iteration. 897 | ''' 898 | if not isinstance( sij, matrix): sij = matrix( sij) 899 | assert( sij.size[0] == sij.size[1]) 900 | K = sij.size[0] 901 | if only_include_measurements is None: 902 | M = K*(K+1)/2 903 | else: 904 | M = len(only_include_measurements) 905 | measure_indices = dict() 906 | for mid, (i,j) in enumerate( only_include_measurements): 907 | measure_indices[(i,j)] = mid 908 | 909 | # x = ( n, u ), where u=(u_1,u_2,...,u_K) is the dual variables. 910 | # We will minimize \sum_k u_k = c.x 911 | c = matrix( [0.]*M + [1.]*K ) 912 | 913 | # Subject to the following constraints 914 | # \sum_{m=1}^M (n_m + dn_m) [ [ v_m.v_m^t, 0 ], [0, 0] ] 915 | # + u_k [ [0, 0], [0, 1] ] + [ [0, e_k], [e_k^t, 0] ] >= 0 916 | # for k = 1,2,...,K 917 | # where M = K*(K+1)/2 are the number of types of measurements. 918 | # m index the measurements, m = (i,j). 919 | # v_m is a length K measurement vector, where 920 | # v_{(i,i), a} = s_{ii}^{-1}\delta_{i,a} 921 | # v_{(i,j), a} = s_{ij]^{-1}\delta_{i,a} - s_{ij}^{-1}\delta_{j,a} 922 | # The matrix U_m = v_m.v_m^t is 923 | # U_{(i,i), (a,b)} = s_{ii}^{-2}\delta_{i,a}\delta_{i,b} 924 | # U_{(i,j), (a,b)} 925 | # = s_{ij}^{-2}(\delta_{i,a}\delta_{i,b} + \delta_{j,a}\delta_{j,b}) 926 | # - s_{ij}^{-2}(\delta_{i,a}\delta_{j,b} + \delta_{j,a}\delta_{i,b}) 927 | # where \delta_{i,a} = 1 if i==a else 0 is the Kronecker delta. 928 | 929 | # G matrix, of dimension ((K+1)*(K+1), (M+K)). Each column is a 930 | # column-major vector representing the KxK matrix of U_m augmented 931 | # by a length K vector, hence the dimension (K+1)x(K+1). 932 | # Gs = [ matrix( 0., ((K+1)*(K+1), (M+K))) for k in xrange( K) ] 933 | G0 = [] 934 | hs = [ matrix( 0., (K+1, K+1)) for k in xrange( K) ] 935 | 936 | for i in xrange( K): 937 | # The index of matrix element (i,i) in column-major representation 938 | # of a (K+1)x(K+1) matrix is i*(K+1 + 1) 939 | v2 = 1./(sij[i,i]*sij[i,i]) 940 | if (only_include_measurements is not None): 941 | m = measure_indices.get( (i,i), None) 942 | if m is not None: 943 | # Gs[0][i*(K+2), m] = v2 944 | G0.append( (i*(K+2), m, -v2)) 945 | else: 946 | # Gs[0][i*(K+2), i] = v2 947 | G0.append( (i*(K+2), i, -v2)) 948 | hs[0][i,i] += nsofar[i,i]*v2 949 | for j in xrange( i+1, K): 950 | # The index of matrix element (i,j) in column-major representation 951 | # of a (K+1)x(K+1) matrix is j*(K+1) + i 952 | v2 = 1./(sij[i,j]*sij[i,j]) 953 | nv2 = nsofar[i,j]*v2 954 | hs[0][i,j] = hs[0][j,i] = -nv2 955 | hs[0][i,i] += nv2 956 | hs[0][j,j] += nv2 957 | if (only_include_measurements is not None): 958 | m = measure_indices.get( (i,j), None) 959 | if m is None: continue 960 | else: 961 | m = measurement_index( i, j, K) 962 | # Gs[0][j*(K+1) + i, m] = Gs[0][i*(K+1) + j, m] = -v2 963 | G0.append( (j*(K+1) + i, m, v2)) 964 | G0.append( (i*(K+1) + j, m, v2)) 965 | # Gs[0][i*(K+2), m] = Gs[0][j*(K+2), m] = v2 966 | G0.append( (i*(K+2), m, -v2)) 967 | G0.append( (j*(K+2), m, -v2)) 968 | 969 | # G.(x, u) + h >=0 <=> -G.(x, u) <= h 970 | # Gs[0] *= -1. 971 | 972 | Gs = [] 973 | for k in xrange( K): 974 | if (k>0): 975 | # Gs[k][:,:M] = Gs[0][:,:M] 976 | hs[k][:K,:K] = hs[0][:K,:K] 977 | # for the term u_k [ [0, 0], [0, 1] ] 978 | # Gs[k][-1, M+k] = -1. 979 | I = [ i for i, j, x in G0 ] + [ (K+1)*(K+1) - 1 ] 980 | J = [ j for i, j, x in G0 ] + [ M + k ] 981 | X = [ x for i, j, x in G0 ] + [ -1. ] 982 | Gs.append( spmatrix(X, I, J, ((K+1)*(K+1), M+K))) 983 | hs[k][k,-1] = hs[k][-1,k] = 1. 984 | 985 | # The constraint dn >= 0, as G0.x <= h0 986 | # G0 = matrix( np.diag(np.concatenate( [ -np.ones( M), np.zeros( K) ]))) 987 | G0 = spmatrix( -np.ones( M), range(M), range(M), (M+K, M+K)) 988 | h0 = matrix( np.zeros( M + K)) 989 | 990 | # The constraint \sum_m dn_m = nadd. 991 | # A = matrix( [1.]*M + [0.]*K, (1, M + K) ) 992 | A = spmatrix( np.ones( M), np.zeros( M, dtype=int), range( M), (1, M+K)) 993 | b = matrix( float(nadd), (1, 1) ) 994 | 995 | sol = cvxopt.solvers.sdp( c, G0, h0, Gs, hs, A, b) 996 | dn = solution_to_nij( sol, K, only_include_measurements and measure_indices) 997 | 998 | return dn 999 | 1000 | def test_kkt_solver( ntrials=5, tol=1e-6): 1001 | K = 5 1002 | sij = matrix( np.random.rand( K*K), (K, K)) 1003 | sij = 0.5*(sij + sij.T) 1004 | 1005 | si2 = cvxopt.div( 1., sij**2) 1006 | G, h, A = Aopt_GhA( si2) 1007 | K = si2.size[0] 1008 | 1009 | dims = dict( l = K*(K+1)/2, 1010 | q = [], 1011 | s = [K+1]*K ) 1012 | 1013 | def default_solver( W): 1014 | return misc.kkt_ldl( G, dims, A)(W) 1015 | 1016 | def my_solver( W): 1017 | return Aopt_KKT_solver( si2, W) 1018 | 1019 | success = True 1020 | for t in xrange( ntrials): 1021 | x = matrix( 1*(np.random.rand( K*(K+1)/2+K) - 0.5), (K*(K+1)/2+K, 1)) 1022 | y = matrix( np.random.rand( 1), (1,1)) 1023 | z = matrix( 0.0, (K*(K+1)/2 + K*(K+1)*(K+1), 1)) 1024 | z[:K*(K+1)/2] = 5.*(np.random.rand( K*(K+1)/2) - 0.5) 1025 | offset = K*(K+1)/2 1026 | for i in xrange(K): 1027 | r = 10*(np.random.rand( (K+1)*(K+2)/2) - 0.3) 1028 | p = 0 1029 | for a in xrange(K+1): 1030 | for b in xrange(a, K+1): 1031 | z[offset + a*(K+1) + b] = r[p] 1032 | z[offset + b*(K+1) + a] = r[p] 1033 | p+=1 1034 | offset += (K+1)*(K+1) 1035 | 1036 | ds = matrix( 10*np.random.rand( K*(K+1)/2), (K*(K+1)/2, 1)) 1037 | rs = [ matrix(np.random.rand( (K+1)*(K+1)) - 0.3, (K+1, K+1)) 1038 | for i in xrange(K) ] 1039 | W = dict( d=ds, 1040 | di=cvxopt.div(1., ds), 1041 | r=rs, 1042 | rti=[ matrix( np.linalg.inv( np.array(r)), (K+1,K+1)).T 1043 | for r in rs ], 1044 | beta=[], 1045 | v=[]) 1046 | xp = x[:] 1047 | yp = y[:] 1048 | zp = z[:] 1049 | 1050 | default_f = default_solver( W) 1051 | my_f = my_solver( W) 1052 | default_f( x, y, z) 1053 | my_f( xp, yp, zp) 1054 | 1055 | dx = xp - x 1056 | dy = yp - y 1057 | offset = K*(K+1)/2 1058 | for i in xrange(K): 1059 | symmetrize_matrix( zp, K+1, offset) 1060 | symmetrize_matrix( z, K+1, offset) 1061 | offset += (K+1)*(K+1) 1062 | dz = zp - z 1063 | 1064 | dx, dy, dz = np.max(np.abs(dx)), np.max(np.abs(dy)), np.max(np.abs(dz)) 1065 | 1066 | if tol < np.max( [dx, dy, dz]): 1067 | print 'KKT solver FAILS: max(dx=%g, dy=%g, dz=%g) > tol = %g' % \ 1068 | (dx, dy, dz, tol) 1069 | success = False 1070 | print 'KKT solver succeeds: dx=%g, dy=%g, dz=%g' % (dx, dy, dz) 1071 | return success 1072 | 1073 | def test_Gfunc( ntrials=10, tol=1e-10): 1074 | K = 5 1075 | sij = matrix( np.random.rand( K*K), (K, K)) 1076 | sij = 0.5*(sij.T + sij) 1077 | si2 = cvxopt.div( 1., sij**2) 1078 | alpha = 1.5 1079 | beta = 0.25 1080 | G, h, A = Aopt_GhA( si2) 1081 | 1082 | success = True 1083 | 1084 | for i in xrange( ntrials): 1085 | trans = 'N' 1086 | nx = K*(K+1)/2+K 1087 | ny = K*(K+1)/2+K*(K+1)*(K+1) 1088 | x = matrix( np.random.rand( nx), (nx, 1)) 1089 | y = matrix( 1.e6*np.random.rand( ny), (ny, 1)) 1090 | 1091 | yp = y[:] 1092 | Aopt_Gfunc( si2, x, y, alpha, beta, trans) 1093 | blas.gemv( matrix(G), x, yp, 'N', alpha, beta) 1094 | 1095 | dy = np.max(np.abs(y - yp)) 1096 | if (dy > tol): 1097 | success = False 1098 | print 'G function FAILS for trans=N: dy=%g' % dy 1099 | else: 1100 | print 'G function succeeds for trans=N: dy=%g' % dy 1101 | 1102 | trans = 'T' 1103 | nx = K*(K+1)/2 + K*(K+1)*(K+1) 1104 | ny = K*(K+1)/2 + K 1105 | x = matrix( np.random.rand( nx), (nx, 1)) 1106 | y = matrix( 1.e6*np.random.rand( ny), (ny, 1)) 1107 | 1108 | for i in xrange(K): 1109 | start = K*(K+1)/2 + i*(K+1)*(K+1) 1110 | for a in xrange(K+1): 1111 | for b in xrange(a+1, K+1): 1112 | x[start+a*(K+1)+b] = x[start+b*(K+1)+a] 1113 | 1114 | yp = y[:] 1115 | Aopt_Gfunc( si2, x, y, alpha, beta, trans) 1116 | blas.gemv( matrix(G), x, yp, 'T', alpha, beta) 1117 | 1118 | dy = np.max(np.abs(y - yp)) 1119 | if (dy > tol): 1120 | success = False 1121 | print 'G function FAILS for trans=T: dy=%g' % dy 1122 | else: 1123 | print 'G function succeeds for trans=T: dy=%g' % dy 1124 | 1125 | return success 1126 | 1127 | def test_sumdR2( ntrials=10, tol=1e-9): 1128 | K = 40 1129 | import time 1130 | 1131 | tnaive = tfast = 0. 1132 | 1133 | success = True 1134 | for t in xrange(ntrials): 1135 | Ris = [ matrix(np.random.rand(K*K), (K,K)) for i in xrange(K) ] 1136 | for i in xrange(K): 1137 | Ris[i] = 0.5*(Ris[i].T + Ris[i]) 1138 | 1139 | tstart = time.time() 1140 | ddR2 = sumdR2( Ris, K) 1141 | tend = time.time() 1142 | tnaive += (tend - tstart) 1143 | 1144 | tstart = time.time() 1145 | ddR2p = sumdR2_aligned( Ris, K) 1146 | tend = time.time() 1147 | tfast += (tend - tstart) 1148 | 1149 | delta = np.max(np.abs(ddR2 - ddR2p)) 1150 | if (delta > tol): 1151 | success = False 1152 | print 'sum dR test FAILED: delta=%g > tol=%g' % (delta, tol) 1153 | else: 1154 | print 'sum dR test succeeds: delta=%g' % delta 1155 | print 'Timing for naive sum dR: %f seconds per call.' % (tnaive/ntrials) 1156 | print 'Timing for aligned sum dR: %f seconds per call.' % (tfast/ntrials) 1157 | return success 1158 | 1159 | def test_relative_only(): 1160 | K = 5 1161 | np.random.seed( 1) 1162 | sij = matrix( np.random.rand( K*K), (K,K)) 1163 | sij = 0.5*(sij + sij.T) 1164 | for i in range(K): sij[i,i] = np.inf 1165 | nij = A_optimize_fast( sij) 1166 | print nij 1167 | 1168 | def unit_test(): 1169 | test_Gfunc( ntrials=100) 1170 | test_kkt_solver( ntrials=100) 1171 | test_sumdR2() 1172 | 1173 | if __name__ == '__main__': 1174 | test_relative_only() 1175 | K = 200 1176 | sij = matrix( np.random.rand( K*K), (K, K)) 1177 | nsofar = matrix( 0.2*np.random.rand( K*K), (K, K)) 1178 | sij = 0.5*(sij + sij.T) 1179 | nsofar = 0.5*(nsofar + nsofar.T) 1180 | 1181 | if (False): 1182 | connectivity = 5 1183 | only_include_measurements = set() 1184 | for i in xrange( K): 1185 | js = i + np.floor((K-i)*np.random.rand(connectivity)).astype('int') 1186 | for j in js: 1187 | only_include_measurements.add( (i,j)) 1188 | else: 1189 | only_include_measurements = None 1190 | 1191 | N = 1. 1192 | 1193 | import time 1194 | tstart = time.time() 1195 | nij = A_optimize_fast( sij, N, nsofar, only_include_measurements) 1196 | tend = time.time() 1197 | tlapse = tend - tstart 1198 | print 'Fast A-optimize took %g seconds.' % tlapse 1199 | # print nij 1200 | 1201 | if (K>=80): 1202 | import sys 1203 | sys.exit() 1204 | 1205 | tstart = time.time() 1206 | nij0 = update_A_optimal_sdp( sij, N, nsofar, only_include_measurements) 1207 | tend = time.time() 1208 | tlapse = tend - tstart 1209 | print 'SDP A-optimize took %g seconds.' % tlapse 1210 | 1211 | print 'dn=', np.max(np.abs( nij0 - nij)) 1212 | -------------------------------------------------------------------------------- /A_optimal_diagonals.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import diffnet as dn\n", 12 | "from cvxopt import matrix" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 2, 18 | "metadata": { 19 | "collapsed": true 20 | }, 21 | "outputs": [], 22 | "source": [ 23 | "import numpy as np" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 3, 29 | "metadata": { 30 | "collapsed": true 31 | }, 32 | "outputs": [], 33 | "source": [ 34 | "K = 5\n", 35 | "s = np.random.rand( K, K)\n", 36 | "s = 0.5*(s.T + s)\n", 37 | "# for i in range(K): s[i,i] = np.inf" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 4, 43 | "metadata": {}, 44 | "outputs": [ 45 | { 46 | "data": { 47 | "text/plain": [ 48 | "array([[0.31606839, 0.89819236, 0.89703422, 0.30095392, 0.54763611],\n", 49 | " [0.89819236, 0.70860277, 0.77928054, 0.47375313, 0.64210208],\n", 50 | " [0.89703422, 0.77928054, 0.8733601 , 0.08316998, 0.12200628],\n", 51 | " [0.30095392, 0.47375313, 0.08316998, 0.41130463, 0.29626953],\n", 52 | " [0.54763611, 0.64210208, 0.12200628, 0.29626953, 0.63852698]])" 53 | ] 54 | }, 55 | "execution_count": 4, 56 | "metadata": {}, 57 | "output_type": "execute_result" 58 | } 59 | ], 60 | "source": [ 61 | "s" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 5, 67 | "metadata": {}, 68 | "outputs": [ 69 | { 70 | "name": "stdout", 71 | "output_type": "stream", 72 | "text": [ 73 | " pcost dcost gap pres dres k/t\n", 74 | " 0: 0.0000e+00 -0.0000e+00 1e+02 4e+00 7e+02 1e+00\n", 75 | " 1: 3.3375e+00 3.6649e+00 1e+01 6e-01 1e+02 5e-01\n", 76 | " 2: 3.4839e+00 3.7049e+00 8e+00 4e-01 6e+01 3e-01\n", 77 | " 3: 3.4750e+00 3.5654e+00 3e+00 1e-01 2e+01 1e-01\n", 78 | " 4: 3.3033e+00 3.3647e+00 2e+00 7e-02 1e+01 8e-02\n", 79 | " 5: 3.3956e+00 3.4196e+00 1e+00 4e-02 6e+00 3e-02\n", 80 | " 6: 3.2196e+00 3.2352e+00 8e-01 2e-02 3e+00 2e-02\n", 81 | " 7: 3.1761e+00 3.1793e+00 2e-01 3e-03 6e-01 4e-03\n", 82 | " 8: 3.1702e+00 3.1708e+00 3e-02 6e-04 1e-01 7e-04\n", 83 | " 9: 3.1686e+00 3.1686e+00 4e-03 8e-05 1e-02 1e-04\n", 84 | "10: 3.1683e+00 3.1683e+00 4e-04 7e-06 1e-03 8e-06\n", 85 | "11: 3.1683e+00 3.1683e+00 8e-06 2e-07 3e-05 2e-07\n", 86 | "12: 3.1683e+00 3.1683e+00 6e-07 1e-08 2e-06 1e-08\n", 87 | "13: 3.1683e+00 3.1683e+00 4e-08 8e-10 1e-07 1e-09\n", 88 | "Optimal solution found.\n" 89 | ] 90 | } 91 | ], 92 | "source": [ 93 | "n = dn.A_optimize( matrix(s))" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 6, 99 | "metadata": {}, 100 | "outputs": [ 101 | { 102 | "name": "stdout", 103 | "output_type": "stream", 104 | "text": [ 105 | "[ 1.41e-01 1.94e-10 -3.02e-11 8.84e-02 4.21e-10]\n", 106 | "[ 1.94e-10 1.05e-01 9.93e-11 2.05e-01 5.44e-10]\n", 107 | "[-3.02e-11 9.93e-11 3.05e-11 6.61e-02 6.85e-02]\n", 108 | "[ 8.84e-02 2.05e-01 6.61e-02 3.26e-01 7.75e-11]\n", 109 | "[ 4.21e-10 5.44e-10 6.85e-02 7.75e-11 8.75e-10]\n", 110 | "\n" 111 | ] 112 | } 113 | ], 114 | "source": [ 115 | "print(n)" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": 7, 121 | "metadata": {}, 122 | "outputs": [ 123 | { 124 | "data": { 125 | "text/plain": [ 126 | "3.168258731683343" 127 | ] 128 | }, 129 | "execution_count": 7, 130 | "metadata": {}, 131 | "output_type": "execute_result" 132 | } 133 | ], 134 | "source": [ 135 | "np.trace(dn.covariance( n/s**2))" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": 8, 141 | "metadata": {}, 142 | "outputs": [ 143 | { 144 | "data": { 145 | "text/plain": [ 146 | "array([[0.48064717, 0.12440851, 0.15276036, 0.15276036, 0.15276036],\n", 147 | " [0.12440851, 1.13881256, 0.30465855, 0.30465855, 0.30465855],\n", 148 | " [0.15276036, 0.30465855, 0.47877161, 0.37408816, 0.47877161],\n", 149 | " [0.15276036, 0.30465855, 0.37408816, 0.37408816, 0.37408816],\n", 150 | " [0.15276036, 0.30465855, 0.47877161, 0.37408816, 0.69593923]])" 151 | ] 152 | }, 153 | "execution_count": 8, 154 | "metadata": {}, 155 | "output_type": "execute_result" 156 | } 157 | ], 158 | "source": [ 159 | "dn.covariance( n/s**2)" 160 | ] 161 | }, 162 | { 163 | "cell_type": "markdown", 164 | "metadata": {}, 165 | "source": [ 166 | "Clearly, the diagonal elements are not the same." 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 9, 172 | "metadata": { 173 | "collapsed": true 174 | }, 175 | "outputs": [], 176 | "source": [ 177 | "s = np.ones( (K, K)) + 0.1*(np.random.rand( K, K) - 0.5)\n", 178 | "s = 0.5*(s + s.T)" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": 10, 184 | "metadata": {}, 185 | "outputs": [ 186 | { 187 | "data": { 188 | "text/plain": [ 189 | "array([[0.97351052, 0.96050154, 1.01524271, 1.00311652, 1.00580902],\n", 190 | " [0.96050154, 0.95744738, 1.00080002, 1.03008442, 0.97986122],\n", 191 | " [1.01524271, 1.00080002, 1.01532568, 1.01995957, 1.00801957],\n", 192 | " [1.00311652, 1.03008442, 1.01995957, 1.02903125, 1.01919704],\n", 193 | " [1.00580902, 0.97986122, 1.00801957, 1.01919704, 0.99439068]])" 194 | ] 195 | }, 196 | "execution_count": 10, 197 | "metadata": {}, 198 | "output_type": "execute_result" 199 | } 200 | ], 201 | "source": [ 202 | "s" 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": 11, 208 | "metadata": {}, 209 | "outputs": [ 210 | { 211 | "name": "stdout", 212 | "output_type": "stream", 213 | "text": [ 214 | " pcost dcost gap pres dres k/t\n", 215 | " 0: 0.0000e+00 -0.0000e+00 1e+02 4e+00 2e+01 1e+00\n", 216 | " 1: 4.1070e+00 4.4992e+00 2e+01 1e+00 4e+00 6e-01\n", 217 | " 2: 8.1702e+00 1.0088e+01 2e+02 2e+00 7e+00 2e+00\n", 218 | " 3: 1.3893e+01 1.4048e+01 1e+01 3e-01 1e+00 2e-01\n", 219 | " 4: 1.6870e+01 1.6949e+01 6e+00 1e-01 5e-01 1e-01\n", 220 | " 5: 1.8994e+01 1.9039e+01 3e+00 5e-02 2e-01 6e-02\n", 221 | " 6: 2.0385e+01 2.0392e+01 4e-01 7e-03 3e-02 8e-03\n", 222 | " 7: 2.0644e+01 2.0645e+01 5e-02 1e-03 4e-03 1e-03\n", 223 | " 8: 2.0685e+01 2.0685e+01 2e-03 3e-05 1e-04 4e-05\n", 224 | " 9: 2.0686e+01 2.0686e+01 1e-04 2e-06 7e-06 2e-06\n", 225 | "10: 2.0686e+01 2.0686e+01 4e-06 7e-08 3e-07 9e-08\n", 226 | "Optimal solution found.\n" 227 | ] 228 | } 229 | ], 230 | "source": [ 231 | "n = dn.A_optimize( matrix( s))" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": 12, 237 | "metadata": {}, 238 | "outputs": [ 239 | { 240 | "data": { 241 | "text/plain": [ 242 | "array([[4.04384771, 0.9226813 , 0.87485767, 0.94890855, 0.85672638],\n", 243 | " [0.9226813 , 3.97632199, 0.89016928, 0.82843275, 0.90830927],\n", 244 | " [0.87485767, 0.89016928, 4.23165798, 0.98485259, 0.94333631],\n", 245 | " [0.94890855, 0.82843275, 0.98485259, 4.29673937, 0.9415661 ],\n", 246 | " [0.85672638, 0.90830927, 0.94333631, 0.9415661 , 4.13744 ]])" 247 | ] 248 | }, 249 | "execution_count": 12, 250 | "metadata": {}, 251 | "output_type": "execute_result" 252 | } 253 | ], 254 | "source": [ 255 | "dn.covariance( n/s**2)" 256 | ] 257 | }, 258 | { 259 | "cell_type": "markdown", 260 | "metadata": {}, 261 | "source": [ 262 | "The observed approximately equal diagonal elements may be attributable to similar $s_{ij}$ values in all the relative binding free energy calculations." 263 | ] 264 | }, 265 | { 266 | "cell_type": "code", 267 | "execution_count": null, 268 | "metadata": { 269 | "collapsed": true 270 | }, 271 | "outputs": [], 272 | "source": [] 273 | } 274 | ], 275 | "metadata": { 276 | "kernelspec": { 277 | "display_name": "Python 2", 278 | "language": "python", 279 | "name": "python2" 280 | }, 281 | "language_info": { 282 | "codemirror_mode": { 283 | "name": "ipython", 284 | "version": 2 285 | }, 286 | "file_extension": ".py", 287 | "mimetype": "text/x-python", 288 | "name": "python", 289 | "nbconvert_exporter": "python", 290 | "pygments_lexer": "ipython2", 291 | "version": "2.7.15" 292 | } 293 | }, 294 | "nbformat": 4, 295 | "nbformat_minor": 2 296 | } 297 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 forcefield 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DiffNet 2 | 3 | DiffNet is a Python tool for finding optimal allocations of sampling 4 | in computational or experimental measurements of the individual 5 | quantities and their pairwise differences, so as to minimize the covariance 6 | in the estimated quantities. 7 | 8 | ## Prerequisites 9 | 10 | DiffNet depends on [CVXOPT](http://cvxopt.org) and [networkx](https://networkx.github.io/). You can install these two libraries using 11 | anaconda: 12 | 13 | ``` 14 | conda install -c conda-forge cvxopt 15 | conda install -c anaconda networkx 16 | ``` 17 | 18 | ## Civil matters 19 | 20 | DiffNet is free open source software. NO WARRANTY, Use AS IS. 21 | 22 | Copyright (C) 2018-2020 Huafeng Xu 23 | 24 | If you use DiffNet in a published work, please cite 25 | 26 | Huafeng Xu, Optimal measurement network of pairwise differences, J. Chem. Inf. Model. 59, 4720-4728, 2019, https://doi.org/10.1021/acs.jcim.9b00528. 27 | 28 | ## How to use 29 | 30 | Some examples are provided in [examples.py](https://github.com/forcefield/DiffNet/blob/master/examples.py). 31 | 32 | The following outlines an example application of the DiffNet: the 33 | calculation of binding free energies of a set of molecules from 34 | individual (a.k.a. absolute) and relative binding free energy 35 | calculations. (Underscored function names __func__ indicate 36 | user-defined external functions.) 37 | 38 | NOTE: For large networks (number of nodes greater than 200), diffnet does 39 | not scale well in memory. The users may want to replace A_optimize() with 40 | the sparse approximation sparse_A_optimal_network() in such cases. 41 | 42 | ### Binding free energy calculations 43 | 44 | ``` 45 | import numpy as np 46 | from diffnet import A_optimize, update_A_optimal 47 | from diffnet import round_to_integers 48 | from diffnet import MLestimate, covariance 49 | 50 | mols = __get_set_of_molecules__(...) # load up the molecules 51 | nmols = len(mols) 52 | 53 | # Optionally, experimental values for reference molecules may be 54 | # incorporated 55 | if __experimental_values__: 56 | dgexp = np.array( [ None ]*nmols) 57 | for i, dg in __experimental_values__: 58 | dgexp[i] = dg 59 | else: 60 | dgexp = None 61 | 62 | nsofar = np.zeros( (nmols, nmols)) 63 | # Initialize s_{ij} with random numbers 64 | sij = np.random.rand( nmols, nmols) 65 | sij = 0.5*(sij + sij.T) # symmetrize the fluctuation matrix 66 | converged = False 67 | while not converged: 68 | # Update the A-optimal difference network given the current estimate of sij 69 | # the numbers of samples so far, and the total number of samples for 70 | # the next iteration 71 | nij = A_optimize(sij, ndelta, nsofar) 72 | # nij = A_optimize( sij) # call this if not using iterative optimization. 73 | # nij[nij < ncut] = 0 # Omit pairs with impractically small allocations. 74 | nij = round_to_integers( nij) 75 | for i in xrange(nmols): 76 | if nij[i,i] > 0: 77 | __individual_free_energy__( mols[i], nsamples=nij[i,i], ...) 78 | for j in xrange(i+1, nmols): 79 | if nij[i,j] == 0: continue 80 | # Compute the relative free energy between i and j, 81 | # using nij[i,j] number of samples. 82 | __relative_free_energy__(mols[i], mols[j], nsamples=nij[i,j], 83 | ...) 84 | nsofar += nij 85 | # Get ALL the past free energy results once the calculations are done 86 | fe_results = __get_free_energy_results__(...) 87 | dgij = np.zeros( (nmols, nmols)) 88 | invsij2 = np.zeros( (nmols, nmols)) 89 | # Loop over pairs (i, j) between which the free energy have been 90 | # computed in the past. 91 | for i, j, dg, var in fe_results: 92 | # Update the s_{ij} estimate from the calculated variance. 93 | # \sigma_e = s_e/\sqrt(n_e) => s_e = \sigma_e \sqrt(n_e) 94 | sij[i,j] = sij[j,i] = np.sqrt( var*nsofar[i,j]) 95 | invsij2[i,j] = invsij2[j,i] = 1./var 96 | dgij[i,j] = dg 97 | if (i!=j): dgij[j,i] = -dg 98 | 99 | # Use Maximum-likelihood estimator to derive the individual free energies 100 | # from the pairwise differences and their variances. 101 | dgi, vec = MLestimate( dgij, invsij2, dgexp) 102 | covar = covariance( sij, nsofar) 103 | 104 | converged = __check_convergence__(...) 105 | 106 | __report_free_energy__( dgi, covar, ...) 107 | 108 | ``` 109 | 110 | ### Binding free energy calculations with experimental data for some reference molecules 111 | 112 | Please refer to 113 | [netbfe.py](https://github.com/forcefield/DiffNet/blob/master/netbfe.py) 114 | and the Jupyter notebook 115 | [netbfe.ipynb](https://github.com/forcefield/DiffNet/blob/master/netbfe.ipynb) 116 | to see how to use DiffNet for binding free energy calculations when 117 | experimental binding free energies are available for some reference 118 | molecules. 119 | 120 | 121 | -------------------------------------------------------------------------------- /diffnet.py: -------------------------------------------------------------------------------- 1 | __doc__ = ''' 2 | DiffNet 3 | ------- 4 | 5 | DiffNet is a Python tool for finding optimal allocations of sampling 6 | in computational or experimental measurements of the individual 7 | quantities and their pairwise differences, so as to minimize the covariance 8 | in the estimated quantities. 9 | 10 | License 11 | ------- 12 | 13 | Released as free software. NO WARRANTY. Use AS IS. 14 | 15 | Copyright (C) 2018-2020 16 | Huafeng Xu 17 | 18 | Please cite https://doi.org/10.1021/acs.jcim.9b00528 if you use this 19 | in a publication. ''' 20 | 21 | import numpy as np 22 | from scipy import linalg 23 | import cvxopt 24 | from cvxopt import matrix, spmatrix 25 | import heapq 26 | import networkx as nx 27 | 28 | import graph 29 | 30 | try: 31 | from scipy.linalg import null_space 32 | except ImportError: 33 | from scipy.linalg import svd 34 | def null_space( A, rcond=None): 35 | u, s, vh = svd(A, full_matrices=True) 36 | M, N = u.shape[0], vh.shape[1] 37 | if rcond is None: 38 | rcond = np.finfo(s.dtype).eps * max(M, N) 39 | tol = np.amax(s) * rcond 40 | num = np.sum(s > tol, dtype=int) 41 | Q = vh[num:,:].T.conj() 42 | return Q 43 | 44 | from A_opt import solution_to_nij, measurement_index 45 | from A_opt import A_optimize_fast, update_A_optimal_sdp, A_optimize_sdp 46 | 47 | def sum_upper_triangle( x): 48 | ''' 49 | Return the sum of the upper triangle elements of the square matrix x. 50 | ''' 51 | if not isinstance(x, matrix): x = matrix( x) 52 | s = 0. 53 | for i in xrange( x.size[0]): 54 | for j in xrange( i, x.size[1]): 55 | s += x[i,j] 56 | return s 57 | 58 | def lndetC( sij, x, hessian=False): 59 | ''' 60 | f = ln det C = ln det F^{-1} = -ln det F 61 | where F = \sum_m^M x_m v_m.v_m^t 62 | 63 | By Jacob's formula 64 | 65 | df/dx_m = -tr(F^{-1}.(v_m.v_m^t)) 66 | 67 | The second derivative is 68 | 69 | d^2 f/dx_a dx_b = -tr( dC/dx_b.v_a.v_a^t) 70 | = tr( C.dF/dx_b.C.v_a.v_a^t) 71 | = tr( C.v_b.v_b^t.C.v_a.v_a^t) 72 | 73 | Return: 74 | tuple (f, d/dx f) if hessian is false 75 | tuple (f, d/dx f, d^2/dx^2 f) if hessian is true. 76 | ''' 77 | if not isinstance( sij, matrix): sij = matrix( sij) 78 | K = sij.size[0] 79 | M = K*(K+1)/2 80 | F = matrix( 0., (K, K)) 81 | for i in xrange( K): 82 | # n_{ii}*v_{ii}.v_{ii}^t 83 | F[i,i] += x[i]/(sij[i,i]*sij[i,i]) 84 | for j in xrange( i+1, K): 85 | m = measurement_index( i, j, K) 86 | v2 = x[m]/(sij[i,j]*sij[i,j]) 87 | F[i,i] += v2 88 | F[j,j] += v2 89 | F[i,j] = F[j,i] = -v2 90 | C = linalg.inv( F) 91 | fval = -np.log(linalg.det( F)) 92 | df = matrix( 0., (1, M)) 93 | for i in xrange( K): 94 | df[i] = -C[i,i]/(sij[i,i]*sij[i,i]) 95 | for j in xrange( i+1, K): 96 | m = measurement_index( i, j, K) 97 | df[m] = (2*C[i,j] - C[i,i] - C[j,j])/(sij[i,j]*sij[i,j]) 98 | if not hessian: 99 | return (fval, df) 100 | # Compute the Hessian 101 | d2f = matrix( 0., (M, M)) 102 | for i in xrange( K): 103 | for j in xrange( i, K): 104 | # d^2/dx_i dx_j = C_{ij}^2/(s_{ii}^2 s_{jj}^2) 105 | d2f[i, j] = C[i,j]*C[i,j]/(sij[i,i]*sij[i,i]*sij[j,j]*sij[j,j]) 106 | d2f[j, i] = d2f[i, j] 107 | for i2 in xrange( K): 108 | for j2 in xrange( i2+1, K): 109 | m2 = measurement_index( i2, j2, K) 110 | # d^2/dx_id_x(i',j') = (C_{ii'}-C_{ji'})^2/(s_{i'i'}^2 s_{ij}^2) 111 | dC = C[i2,i] - C[j2,i] 112 | d2f[i, m2] = dC*dC/(sij[i,i]*sij[i,i]*sij[i2,j2]*sij[i2,j2]) 113 | d2f[m2, i] = d2f[i, m2] 114 | for j in xrange( i+1, K): 115 | m = measurement_index( i, j, K) 116 | invs2 = 1/(sij[i,j]*sij[i,j]) 117 | for i2 in xrange( i, K): 118 | for j2 in xrange( i2+1, K): 119 | m2 = measurement_index( i2, j2, K) 120 | # d^2/dx_{ij}dx_{i'j'} = 121 | # (C_{ii'}+C_{jj'}-C_{ji'}-C_{ij'})^2/(s_{i'j'}^2 s_{ij}^2) 122 | dC = C[i,i2] + C[j,j2] - C[j,i2] - C[i,j2] 123 | d2f[m,m2] = dC*dC*invs2/(sij[i2,j2]*sij[i2,j2]) 124 | d2f[m2,m] = d2f[m,m2] 125 | return (fval, df, d2f) 126 | 127 | def A_optimize( sij, nadd=1., nsofar=None, delta=None, 128 | only_include_measurements=None, 129 | method='conelp'): 130 | ''' 131 | Find the A-optimal of the difference network that minimizes the trace of 132 | the covariance matrix. This corresponds to minimizing the average error. 133 | 134 | In an iterative optimization of the difference network, the 135 | optimal allocation is updated with the estimate of s_{ij}, and we 136 | need to allocate the next iteration of sampling based on what has 137 | already been sampled for each pair. 138 | 139 | Args: 140 | 141 | sij: KxK symmetric matrix, where the measurement variance of the 142 | difference between i and j is proportional to s[i][j]^2 = 143 | s[j][i]^2, and the measurement variance of i is proportional to 144 | s[i][i]^2. 145 | 146 | nadd: float, Nadd gives the additional number of samples to be collected in 147 | the next iteration. 148 | 149 | nsofar: KxK symmetric matrix, where nsofar[i,j] is the number of samples 150 | that has already been collected for (i,j) pair. 151 | 152 | delta: a length K vector. delta[i] is the measurement uncertainty on the 153 | quantity x[i] from an independent experiment; if no independent experiment 154 | provides a value for x[i], delta[i] can be set to None or numpy.infty. 155 | 156 | only_include_measurements: set of pairs, if not None, indicate which 157 | pairs should be considered in the optimal network. Any pair (i,j) not in 158 | the set will be excluded in the allocation (i.e. dn[i,j] = 0). The pair 159 | (i,j) in the set must be ordered so that i<=j. 160 | 161 | method: 'conelp' (DEFAULT, with fast KKT solver), 'sdp' 162 | (Semidefinite programming). 163 | 164 | Return: 165 | 166 | KxK symmetric matrix of float, the (i,j) element of which gives the 167 | number of samples to be allocated to the measurement of (i,j) difference 168 | in the next iteration. 169 | 170 | ''' 171 | if 'conelp'==method: 172 | nij = A_optimize_fast( sij, nadd, nsofar, delta, 173 | only_include_measurements) 174 | else: 175 | if delta is not None: 176 | raise ValueError, 'Currently delta values are only supported in A-optimal by the conelp method.' 177 | if nsofar is None: 178 | nij = A_optimize_sdp( sij) 179 | nij *= nadd 180 | else: 181 | nij = update_A_optimal_sdp( sij, nadd, nsofar, 182 | only_include_measurements) 183 | return nij 184 | 185 | def D_optimize( sij): 186 | ''' 187 | Find the D-optimal of the difference network that minimizes the log of 188 | the determinant of the covariance matrix. This corresponds to minimize 189 | the volume of the confidence ellipsoid for a fixed confidence level. 190 | 191 | Args: 192 | 193 | sij: KxK symmetric matrix, where the measurement variance of the 194 | difference between i and j is proportional to s[i][j]^2 = 195 | s[j][i]^2, and the measurement variance of i is proportional to 196 | s[i][i]^2. 197 | 198 | Return: 199 | 200 | nij: symmetric matrix, where n[i][j] is the fraction of measurements 201 | to be performed for the difference between i and j, satisfying 202 | \sum_i n[i][i] + \sum_{i= 0, formulated as G.x <= h 225 | G = matrix( np.diag( -np.ones( M))) 226 | h = matrix( np.zeros( M)) 227 | 228 | # The constraint \sum_m n_m = 1. 229 | A = matrix( [1.]*M, (1, M)) 230 | b = matrix( 1., (1, 1)) 231 | 232 | sol = cvxopt.solvers.cp( F, G, h, A=A, b=b) 233 | 234 | n = solution_to_nij( sol, K) 235 | return n 236 | 237 | def constant_relative_error( si): 238 | ''' 239 | Construct a difference network with constant relative error, such that 240 | s_{ij} = s_i - s_j, from the given $s_i$. 241 | 242 | Return: 243 | 244 | sij = s_i - s_j 245 | ''' 246 | K = len(si) 247 | si = np.sort( si) 248 | sij = np.diag( si) 249 | for i in xrange( K): 250 | for j in xrange( i+1, K): 251 | sij[i,j] = sij[j,i] = si[j] - si[i] 252 | return matrix( sij) 253 | 254 | def A_optimize_const_relative_error( si): 255 | ''' 256 | Find the A-optimal of the difference network where s_{ij} = |s_i - s_j|. 257 | ''' 258 | K = len(si) 259 | si = np.sort( si) 260 | 261 | nij = np.zeros( (K, K), dtype=float) 262 | N = nij[0,0] = np.sqrt( K)*si[0] 263 | for i in xrange(K-1): 264 | nij[i+1, i] = nij[i, i+1] = np.sqrt(K - (i+1))*(si[i+1] - si[i]) 265 | N += nij[i, i+1] 266 | 267 | nij = matrix( nij/N) 268 | assert( abs(sum_upper_triangle( nij) - 1) < 1e-10) 269 | return nij 270 | 271 | def D_optimize_const_relative_error( si): 272 | ''' 273 | Find the D-optimal of the difference network where s_{ij} = |s_i - s_j|. 274 | ''' 275 | K = len(si) 276 | si = np.sort( si) 277 | 278 | iK = 1./K 279 | nij = np.zeros( (K, K), dtype=float) 280 | nij[0,0] = iK 281 | for i in xrange(K-1): 282 | nij[i,i+1] = nij[i+1,i] = iK 283 | 284 | return matrix( nij) 285 | 286 | def E_optimize( sij): 287 | ''' 288 | Find the E-optimal of the difference network that minimizes the largest 289 | eigenvalue of the covariance matrix. This is equivalent to minimizing 290 | the diameter of the confidence ellipsoid. 291 | 292 | Args: 293 | 294 | sij: KxK symmetric matrix, where the measurement variance of the 295 | difference between i and j is proportional to s[i][j]^2 = 296 | s[j][i]^2, and the measurement variance of i is proportional to 297 | s[i][i]^2. 298 | 299 | Return: 300 | 301 | nij: symmetric matrix, where n[i][j] is the fraction of measurements 302 | to be performed for the difference between i and j, satisfying 303 | \sum_i n[i][i] + \sum_{i= 0 321 | 322 | # G matrix, of dimension (K*K, M+1). 323 | # G[i*K + j] = (v_m.v_m^t)[i,j] 324 | G = matrix( 0., (K*K, M+1)) 325 | h = matrix( 0., (K, K)) 326 | for i in xrange( K): 327 | G[i*(K+1), i] = 1./(sij[i,i]*sij[i,i]) 328 | G[i*(K+1), M] = 1. # The column-major identity matrix for t. 329 | for j in xrange( i+1, K): 330 | m = measurement_index( i, j, K) 331 | v2 = 1./(sij[i,j]*sij[i,j]) 332 | G[j*K + i, m] = G[i*K + j, m] = -v2 333 | G[i*(K+1), m] = G[j*(K+1), m] = v2 334 | 335 | # G.(x,t) >= 0 <=> -G.(x,t) + s = 0 & s >= 0 336 | G *= -1. 337 | 338 | # The constraint n >= 0. 339 | G0 = matrix( np.diag(np.concatenate( [ -np.ones( M), np.zeros( 1) ]))) 340 | h0 = matrix( np.zeros( M + 1)) 341 | 342 | # The constraint \sum_m n_m = 1. 343 | A = matrix( [1.]*M + [0.], (1, M + 1) ) 344 | b = matrix( 1., (1, 1) ) 345 | 346 | sol = cvxopt.solvers.sdp( c, G0, h0, [ G ], [ h ], A, b) 347 | n = solution_to_nij( sol, K) 348 | 349 | return n 350 | 351 | def Dijkstra_shortest_path( sij): 352 | ''' 353 | Find the shortest path tree from the origin to every node, where the 354 | distance between nodes (i, j) are given by sij[i,j], and the distance 355 | between node i and the origin is sij[i,i]. 356 | 357 | This implementation follows https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm. 358 | ''' 359 | K = sij.size[0] 360 | dist = np.inf*np.ones( K+1, dtype=float) 361 | prev = -1*np.ones( K+1, dtype=int) 362 | 363 | # The origin has index K. 364 | dist[K] = 0 365 | q = [(0, K)] 366 | 367 | while q: 368 | d, u = heapq.heappop( q) 369 | for v in xrange( K): 370 | suv = sij[u,v] if u != K else sij[v,v] 371 | dp = d + suv 372 | if dp < dist[v]: 373 | dist[v] = dp 374 | prev[v] = u 375 | heapq.heappush( q, (dp, v)) 376 | 377 | return dist, prev 378 | 379 | def E_optimal_tree( sij): 380 | ''' 381 | Construct a tree where each weighted edge represents a difference 382 | measurement--and the weight is the fraction of measurements allocated 383 | to the corresponding edege--so that the measurements minimizes the 384 | largest eigenvalue of the covariance matrix. 385 | 386 | Args: 387 | 388 | sij: KxK symmetric matrix, where the measurement variance of the 389 | difference between i and j is proportional to s[i][j]^2 = 390 | s[j][i]^2, and the measurement variance of i is proportional to 391 | s[i][i]^2. 392 | 393 | Return: 394 | 395 | nij: symmetric matrix, where n[i][j] is the fraction of measurements 396 | to be performed for the difference between i and j, satisfying 397 | \sum_i n[i][i] + \sum_{i epsilon: 736 | return 1./n 737 | else: 738 | return large 739 | else: 740 | def weight( i,j): 741 | return sij[i,j] 742 | 743 | # Next, get the k-connected graph that approximately minimizes the 744 | # sum of 1/n_{ij}. 745 | G = nx.Graph() 746 | G.add_nodes_from( range( K)) 747 | G.add_node( 'O') 748 | edges = [] 749 | 750 | for i in xrange(K): 751 | edges.append( ('O', i, weight( i,i))) 752 | for j in xrange(i+1, K): 753 | edges.append( (i, j, weight(i,j))) 754 | edges = list(nx.k_edge_augmentation( G, k=connectivity, partial=True)) 755 | 756 | # Include only the edges that guarantee k-connectivity and nothing else 757 | only_include_measurements = set([]) 758 | for i, j in edges: 759 | if 'O'==i: 760 | only_include_measurements.add( (j,j)) 761 | elif 'O'==j: 762 | only_include_measurements.add( (i,i)) 763 | else: 764 | if i0 and dij[i,i]' 66 | tbsS'offdiag' 67 | p14 68 | g2 69 | (g3 70 | (I0 71 | tS'b' 72 | tRp15 73 | (I1 74 | (I3 75 | I5 76 | tg11 77 | I00 78 | S'\xac\xab\xe9\xfc\x15\xce\xc3?\xb01\xec\xd9o\x1a\xb2?\x93\xcdWr\xe3C\xa5?\xd6\x89\xac\x06\xf9\x12\x9e?\xce)\xf8=\x03?\x97?A\x1d\x85\xd3P\x07\xab?\xc0\xcdv\xb0\x989\x98?A\x93\xa9&\xf9\x15\x8d?\xfa\xda*<\xcd\xc3\x84?\x98 \xe4\x88u$\x80?\\%\x9f@S\x11\x91?^]\x05\xcaU\xcc\x7f?\xa2T\xbd\x02\x9d\xa7s?\n/\xd9\xf0\x01ml?%*\xb9\xf2$@f?' 79 | tbsS'diag' 80 | p16 81 | g2 82 | (g3 83 | (I0 84 | tS'b' 85 | tRp17 86 | (I1 87 | (I3 88 | I5 89 | tg11 90 | I00 91 | S"\x15\x95\xc5\x80z\x0c\xdb?\xc9y\xc2\x04\xb2\xbc\xdd?&\x83\xda\xc8\xc1\xab\xde?\xb2\x9b\xca7h\x0f\xdf?\xb4>\x10\xe6\x07F\xdf?\x06\x15\xae\xb0A\xdd\xc5?l\xb9\xe9^3u\xcb?1\x1a`\xa4\xf0E\xcd?v\xfb[\xc2\xa4\r\xce?\xf2\x9c*\xfb\x94|\xce?T\xbft'\xd7\x10\xb1?\x94\xd3\xce;M\x0b\xb9?\x7f\x95f\xa7U\xb3\xbb?\xde>\xa8\xc9\x13\xe4\xbc?f\xbfs\xf5\xfb\x90\xbd?" 92 | tbssS'd' 93 | g2 94 | (g3 95 | (I0 96 | tS'b' 97 | tRp18 98 | (I1 99 | (I5 100 | tg11 101 | I00 102 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00$@\x00\x00\x00\x00\x00\x00.@\x00\x00\x00\x00\x00\x004@' 103 | tbs. -------------------------------------------------------------------------------- /graph.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import networkx as nx 3 | from cvxopt import matrix 4 | 5 | import sys 6 | 7 | def diffnet_to_graph( A, origins='O'): 8 | ''' 9 | Construct a graph of K+1 nodes from the KxK symmetric matrix A, 10 | where the weight of edge (i,j) is given by A[i][j], and the weight 11 | of edge (K=origin, i) is given by A[i][i]. 12 | ''' 13 | A = np.array( A) 14 | g = nx.from_numpy_matrix( A) 15 | if type(origins)==list: 16 | originIDs = list(set( origins)) 17 | originIDs.sort() 18 | for o in originIDs: 19 | g.add_node( o) 20 | else: 21 | if origins is None: origins = 'O' 22 | g.add_node( origins) 23 | origins = [ origins ]*A.shape[0] 24 | for i in xrange( A.shape[0]): 25 | if A[i][i] != 0: 26 | g.remove_edge( i, i) 27 | g.add_edge( origins[i], i, weight=A[i][i]) 28 | return g 29 | 30 | def diffnet_connectivity( n, src=None, tgt=None): 31 | ''' 32 | Compute the connectivity in the difference network of n[i,j]. 33 | If src and tgt are given, compute the local connectivity between 34 | the src and tgt nodes. 35 | ''' 36 | g = nx.from_numpy_matrix( n) 37 | return nx.edge_connectivity( g, src, tgt) 38 | 39 | def scale_edge_weight( g, src='weight', dest='scale', beta=0.5): 40 | ''' 41 | Scale the edge weights from between 0 to inf to betweeen 0 and 1. 42 | ''' 43 | scales = dict() 44 | weights = np.array( 45 | [ g.get_edge_data( e[0], e[1], src)[src] for e in g.edges() ]) 46 | wm = np.median( weights) 47 | # Scale the weights to 2/pi arctan( alpha x) so that the median is scaled 48 | # to beta. 49 | alpha = np.tan(0.5*beta*np.pi)/wm 50 | weights = np.arctan( weights*alpha)*2/np.pi 51 | nx.set_edge_attributes( g, dict( zip(g.edges(), weights)), dest) 52 | 53 | def draw_diffnet_graph( g, pos=None, ax=None, 54 | widthscale=None, nodescale=2.5, node_color=None, 55 | origins=['O']): 56 | ''' 57 | Draw a graph representing the difference network. 58 | 59 | Args: 60 | g: nx.Graph - the graph representing the difference network. 61 | pos: Kx2 numpy array or dict - the coordinates to place the nodes 62 | in the graph. If numpy array, pos[i] is the coordinates for node i, 63 | excluding origin. If dict, pos[i] is the coordinate of node i, including 64 | origin. If None, use a spring layout. 65 | 66 | Returns: 67 | pos: dict - pos[i] gives the positions of the node i. 68 | ''' 69 | K = g.number_of_nodes() - len(origins) 70 | 71 | if isinstance( pos, np.ndarray): 72 | mypos = dict( [(i, pos[i]) for i in xrange(K)]) 73 | if (len(pos) == K): 74 | for d, o in enumerate(origins): 75 | mypos.update( {o : (-1.0*d, -1.0*d)}) 76 | else: 77 | for d, o in enumerate(origins): 78 | mypos.update( {o : pos[K+d]}) 79 | elif type( pos) == dict: 80 | mypos = pos 81 | else: 82 | scale_edge_weight( g, src='weight', dest='scale') 83 | mypos = nx.spring_layout( g, weight='scale', k=2.5/np.power( K, 0.1)) 84 | 85 | node_size = nodescale*K 86 | if node_color is None: 87 | node_color = 'red' 88 | nx.draw_networkx_nodes( g, mypos, nodelist=range(K), 89 | node_size=node_size, 90 | node_color=node_color, 91 | ax=ax) 92 | nodeO = nx.draw_networkx_nodes( g, mypos, nodelist=origins, 93 | node_size=node_size*2, 94 | node_color='#FFFFFF', 95 | width=2., 96 | ax=ax) 97 | if node_color is None or len(node_color)<=K: 98 | nodeO.set_edgecolor( 'red') 99 | else: 100 | nodeO.set_edgecolor( node_color[K:]) 101 | 102 | if widthscale is None: 103 | widthscale = 5.*K 104 | 105 | weights = np.array( [ w for u, v, w in list(g.edges( data='weight')) ]) 106 | weights[weights<0] = 0 # Set negative numbers to 0. 107 | width = weights*widthscale 108 | nx.draw_networkx_edges( g, mypos, 109 | width=width, 110 | ax=ax) 111 | return mypos 112 | 113 | 114 | 115 | 116 | -------------------------------------------------------------------------------- /issues/001/issue.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | " pcost dcost gap pres dres k/t\n", 13 | " 0: 0.0000e+00 -1.1000e+01 2e+03 3e+00 2e+01 1e+00\n", 14 | " 1: 4.1596e+00 2.9899e+00 3e+02 3e-01 2e+00 1e-01\n", 15 | " 2: 7.8393e+00 6.7918e+00 3e+02 4e-01 2e+00 3e-01\n", 16 | " 3: 1.1229e+01 1.1102e+01 4e+01 5e-02 3e-01 7e-02\n", 17 | " 4: 1.1823e+01 1.1803e+01 3e+01 3e-02 1e-01 8e-02\n", 18 | " 5: 1.1859e+01 1.1860e+01 3e+00 3e-03 2e-02 1e-02\n", 19 | " 6: 1.1508e+01 1.1511e+01 3e+00 2e-03 1e-02 1e-02\n", 20 | " 7: 1.1508e+01 1.1511e+01 2e+00 1e-03 7e-03 8e-03\n", 21 | " 8: 1.1408e+01 1.1410e+01 6e-01 3e-04 2e-03 3e-03\n", 22 | " 9: 1.1298e+01 1.1299e+01 4e-01 2e-04 1e-03 2e-03\n", 23 | "10: 1.1288e+01 1.1288e+01 2e-01 7e-05 4e-04 7e-04\n", 24 | "11: 1.1265e+01 1.1265e+01 4e-02 2e-05 8e-05 2e-04\n", 25 | "12: 1.1262e+01 1.1262e+01 7e-03 3e-06 2e-05 3e-05\n", 26 | "13: 1.1261e+01 1.1261e+01 2e-03 8e-07 4e-06 9e-06\n", 27 | "14: 1.1261e+01 1.1261e+01 2e-04 8e-08 4e-07 9e-07\n", 28 | "15: 1.1261e+01 1.1261e+01 6e-05 2e-08 1e-07 3e-07\n", 29 | "16: 1.1261e+01 1.1261e+01 3e-05 1e-08 6e-08 1e-07\n", 30 | "17: 1.1261e+01 1.1261e+01 1e-06 5e-10 2e-09 5e-09\n", 31 | "Optimal solution found.\n" 32 | ] 33 | }, 34 | { 35 | "data": { 36 | "text/plain": [ 37 | "{0: array([0.06987596, 0.02819742]),\n", 38 | " 1: array([ 0.59746333, -0.18016295]),\n", 39 | " 2: array([-0.41300944, 0.01032677]),\n", 40 | " 3: array([-0.28480243, 0.28512536]),\n", 41 | " 4: array([-0.15666635, 0.6053899 ]),\n", 42 | " 5: array([ 0.3393108 , -0.49603989]),\n", 43 | " 6: array([0.44970797, 0.0470038 ]),\n", 44 | " 7: array([0.53206497, 0.37780336]),\n", 45 | " 8: array([-0.28564302, -0.47490033]),\n", 46 | " 9: array([ 0.01046237, -0.40003966]),\n", 47 | " 10: array([0.14123584, 0.51371975]),\n", 48 | " 'O': array([-1. , -0.31642354])}" 49 | ] 50 | }, 51 | "execution_count": 1, 52 | "metadata": {}, 53 | "output_type": "execute_result" 54 | }, 55 | { 56 | "data": { 57 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaUAAAEFCAYAAABDzvhAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzsnXdYVEfbxu+zLEuXqkgTARUbVixR\nARXFWLDHHrH3GBOjsSViiRp7SWKMvcQeu1FjRaPGKGo0EVsUC/YKCkrZ+/tj2f1Y9izsYiO+87uu\nucieM3XXnPvMzDPPI5GEQCAQCAT5AcW77oBAIBAIBFqEKAkEAoEg3yBESSAQCAT5BiFKAoFAIMg3\nCFESCAQCQb5BiJJAIBAI8g1ClAQCgUCQbxCiJBAIBIJ8gxAlgUAgEOQblO+6A/813NzcWLRo0Xfd\nDYFAIPhPERsb+4BkwdzyCVEyk6JFi+LEiRPvuhsCgUDwn0KSpGum5BPLdwKBQCDINwhREggEAkG+\nQYiSQCAQCPINQpQEAoFAkG8QoiQQCASCfIMQJYFAIBDkG4QoCQQCgSDfIERJIPhf4OxZoFcvICwM\n6N1b81kgyIcIURII3nemTweqVQMWLQIOHgQWLtR8nj79XfdMIDBAiJJA8D5z9iwwahSQkgJkZGiu\nZWRoPo8aJWZMgnyHECWB4H1mzhwgNVX+Xmoq8N13b7c/AkEuCFESCN5nLlz4/xlSdjIy8PT4cajV\n6rfbJ4EgB4QoCQTvM4GBUFtYyN5KB7Dq1CnY2toiLCwMixYtwqNHj95u/wSCbAhREgjeZz75BJJK\nJXsrFUDF+fMRGRmJf/75B927d4erqysCAgIwZMgQxMbGilmU4K0jREkgeJ8JCsL1nj2RDM3MCJl/\nkwGMAhD9yy9Ys2YNHjx4gNu3b2PYsGFQKpWYPn06goODUaBAATRt2hSrV6/G48eP39kwBP87SCTf\ndR/+UwQHB1PEUxL8l6hWrRrSTp5En4wMFCdxAcD3AP7OvN+9e3fMmzcPFlmW+dLS0rBq1SosXLgQ\nx48fR0pKCiRJQunSpdGmTRs0adIEFSpUgEIh3msFpiFJUizJ4FzzCVEyDyFKgv8S165dQ9GiRdG5\nc2fExsbin3/+kc3XsWNHLFmyBEqlYdxPkjhx4gRmz56N3377DQ8ePIBarYajoyMaN26MZs2aoX79\n+nB2dn7TwxH8hzFVlMRrjkDwHvPZZ59BkiSMHTsWFStWlM1Tvnx5/Pzzz2jfvj3S0tIM7kuShCpV\nqmD58uW4e/curl+/jujoaHh4eGDNmjVo164d3NzcUKVKFUyYMAGnTp2CeNkV5BUhSgLBe8qjR4+w\nZcsWhISEIDExEeXLl4e/v79BvsuXL+Pzzz/H+vXr0bp1a7x8+TLHer28vDB69GjExcXhyZMnWL58\nOUJDQ3H27FlER0ejUqVKKFSoELp27Yp169bhyZMnb2qIgvcQIUoCwXvKuHHjkJGRgVmzZuHq1avw\n8/NDy5YtDfI9f/4cfn5+GDNmDLZs2YLmzZsjJSXFpDbs7e3RsWNH7N+/H8+ePcPu3bvRuXNnkMSq\nVavQrl07uLq6olatWpg4cSJOnz4tZlGCHBF7SmYi9pQE/wVSUlLg6uoKLy8vXLp0CbNnz0atWrVA\nEsHBhsv6JUuWxLlz5zB58mQMGzYMdevWxZYtW2BnZ5en9kkiLi4Oa9euxcqVK3Ht2jUolUokJyej\ncOHCaNiwIRo1aoR69erBycnpVYcr+A9g6p4SSIpkRqpcuTIFgvzO9OnTCYCrV68mSQ4aNIiPHj1i\nSkoKFQoFARikPXv2kCRnzpxJAKxVqxafPn36Wvpz+/Ztzps3j6GhobSysqKzszMVCgUtLCwYEhLC\nCRMm8PTp01Sr1a+lPUH+A8AJmvCMfecP+f9aEqIkyO+kpaXR1dWVTk5OzMjIIEl26dJFd79kyZKy\notS8eXNdnrlz5xIAq1atykePHr3W/iUlJXHDhg1s3749HRwc6OLiwgIFChAAPT092a1bN65fv55P\nnjx5re0K3i2mitJb3VOSJMlbkqRFkiTdkiTppSRJ8ZIkzZQkyWRbUkmSDkiSxByStZFypSVJWitJ\n0j1Jkl5IknRBkqQxkiTZvL4RCgTvnvXr1+Phw4cYNGiQ7hyR5pmgoXz58rLltmzZgmvXrgEA+vTp\ng8WLF+P48eMIDw/Hw4cPX1v/7O3t0aJFC6xcuRKPHj3Cpk2b0KNHD/j4+OD58+fYuXMn2rZtCzc3\nN4SFhWHSpEk4c+aM3hgMEPGi3h9MUa7XkQAEALgLzVvZJgCTAOzL/HwegKuJ9RzILBNtJCllylQD\n8BwazyorAXwL4HhmPb8DsDJ1HGKmJMjPqNVqBgQE0NLSUrf0plarGRUVpcvzzTffyM6UAHDYsGF6\n9f3888+0sLBgUFAQ79y588b7fu7cOU6cOJHBwcG0tbWln58fnZ2dCYBeXl7s3r07f/nlF/1lxWnT\nSBsb0sKCBDR/bWw01wX5BuS35TsAuzL/4X+S7fr0zOs/mljPAY2WmtyuBYBzmW00zXJdAWB95vVh\nptYnREmQn9mzZw8BsEOHDrprDx484Geffab7vHXrVgKgj4+PgSi5uroyJSVFr87169dTqVSyZMmS\nTEhIeGtjuX37NufPn8/GjRvT1taWAQEBLFasGJVKJZVKJcPCwrhg0CBmWFlpHmXZk40NeebMW+uv\nIGfylSgB8M/8R38VgCLbPQcAzzJnMnYm1GWuKNXNbDsmh37FI9MSMbckREmQn6lRowYB8OLFi7pr\nx48f5+zZs3Wf4+PjCYDly5eXnS0tWbLEoN4tW7ZQpVIxICCA165deytjycqzZ8+4ceNGdunShS4u\nLvT19WWFChW41NqaqXKCpJ0x9er11vsqkMdUUXpbe0p1M//+RlLP7TDJJACHAdgCqG5qhZIktZUk\naZgkSZ9LktRQkiSrXNremf0GySsALgLwhUagBIL/LKdOncKRI0dQvXp1FC9eXHdde0ZJS5EiRVCg\nQAHcuXMH3t7eBvXMmTNH+9KmIzIyElu3bkVCQgJCQ0Nx5cqVNzcQGezs7NC8eXMsXrwY9+7dw4oV\nK1CvXj2UBGBprFBGBnjhwlvspeB18LZEKTDz70Uj9y9l/i1hRp2rAUwEMA3ArwCuS5LU+i21LRDk\nO8aPHw+FQoHRo0frXY+Pj9cTJUmSUK5cOdy9excff/yxQT2xsbH4888/Da5HRERgx44dePDgAUJD\nQ3HxorH/pd4sFhYWqFWrFtq1a4f7rq4wdIykIR3AiuPHMXDgQMTExCDDWLBDQb7ibYmSY+bfp0bu\na6+bcopuM4BIAN4AbACUhEacnACskSSp4RtsWyDIl1y9ehUbN25E4cKFERERoXcvPj4eRYsW1btW\nrlw5AEDFihWhkom39J2RMOm1a9fGrl27kJiYiNDQUKMOXt8kJ06cQGRkJD766CNMSEw0KkqpALb4\n+ODUqVNo27YtPD090bt3b+zatUvWx58gf5Bf3AxJmX9zdS9BcgbJbSQTSL4geYHkCACDoRnPhNfd\ntiRJvSRJOiFJ0on79++bWb1A8OaZOnUqlEolhg0bZhBO4vnz5waeGYKCggAAN2/eRNu2bQ3qW7Nm\nDe7evSvbVs2aNbFnzx68fPkStWvXxl9//fWaRpEzWjH6+OOPUbJkSdy7dw/xDg4YBcjGi4pt3hyd\nvv0WJUqUQEZGBuzt7XHmzBl069YNhQoVQlRUFLZs2YIXL168lf4LTONtiZJ2NuJo5H6BbPnywgJo\n/j1WkCTJ4XW2TfInksEkgwsWLPgKXRQIXj/379/HggULYGFhgS5duphURjtTOnPmDAYMGGBwPy0t\nDQsWLDBavmrVqti3bx9Iok6dOjh+/Hie+m4Kx48fR5MmTdC5c2e0b98ePXr0wLRp01CpUiUMHDgQ\nM6A587EAwH4AG5ydUQ1A3W3bUKlSJSxcuBB37tzBsmXLEBISAjs7OyiVSpw9exb9+/dHwYIF0bZt\nW6xduxbPnj17Y+MQmIgp1hCvmgD0gGYmMs/Ifa25ePgrtvMos57CWa6Nz7w23EiZC5n3A0xpQ1jf\nCfIbX3/9NVUqFT/55BODexkZGXreHLQ8ffqUAFipUiWSZJUqVQys8Dw8PJiWlpZj22fPnqW7uzsL\nFCjAw4cPv54BZfLnn3+ycePGLFWqFFetWsWUlBT26tWLABgVFcUXL16wR48eBv1u1qwZW7duTQD0\n9/eXdV10/vx5fvvtt6xRowbt7e1ZunRpFilShNbW1mzWrBmXLVv22j1Z/K+DfGYSHoDcTcKTYYJJ\neA5tBGa2kYgsB2ghTMIF7zHPnj2jo6MjJUni5cuXDe4nJCQYHIjV4ufnRysrK6alpXHp0qWy5uHr\n16/PtQ/nz5+np6cn7ezseODAgVce07Fjx9ioUSOdGKWnp/PRo0cMDw8nAE6aNEknNP7+/gZ9btiw\nIdPS0liwYEEC4MCBA3Ns7+7du1y4cCGbNm1KOzs7+vv709fXlyqVig0aNOBPP/3Ee/fuvfK4/tfJ\nV6Kk6Y95h2ehMWAome2aPwAvmbrdABzJrOenbPdyOjy7DuLwrOA/zKxZs2hpackPP/xQ9v7vv//O\nefPmyd5r2rQpATAuLo4pKSl0c3MzeMCHhISY1I/Lly+zSJEitLGx4W+//ZansciJEUleunSJgYGB\ntLGx4S+//KLLn5ycTGtra4M+V69enSR5+vRpSpJEADx69KhJfXj+/Dk3b97Mbt260dXVlR4eHixS\npAiVSiVr167N2bNn8+bNm3ka3/86+VGUsrsZmoj/dzN0AdncDGn/gWW71gWafaO9AH6CxlXRSgBP\nMvMfB+Ak03Z2N0OTINwMCf7jpKam0tvbm5aWlty9e7dsnhUrVhgViVGjRhEA16xZQ5IcPny47Gzp\n7NmzJvUnPj6e/v7+tLKy4rZt20wexx9//MGGDRuydOnSXL16tU6MSDImJoYuLi709PTkiRMn9Mpp\nPaFnT6GhoTpHtCNHjiQAOjo68uXLlyb3iSTT09N5+PBhDh06lMWKFaOjoyO9vb2pVCpZtWpVTpky\nhVeuXDGrzv9l8p0oafoEHwCLAdzOFIhrAGYBcJHJKydKQQCWADgL4CGANGj2kQ4B+ASAKoe2S2fO\njB4AeAnNuaUxAGzMGYMQJUF+YcWKFVQoFCxevLjRkA/jxo3jpUuXZO+tXbuWADhy5EiS5LVr12TD\nWvTo0cPkPt28eZOBgYG0tLTkhg0bcsybkxiR5OLFi2lpacmKFSsazE7S09NZs2ZNg74WKlSI/fv3\n57lz50hq/OmVLl1at6z3Kly4cIGTJ09mtWrVaG1tTQ8PD1paWjIoKIjjxo3TtSmQJ1+K0vuQhCgJ\n8gNqtZpBQUG0t7fn3Llzjebr2bOn0RnC+fPnCYCRkZG6ay1atDB40KtUKj5+/Njkvt25c4dly5al\nhYUFV61aZXBfK0ZlypThmjVrdLMaLRkZGRw2bBgBTTiNZ8+eGdSxZs0adu3aVXaWtHTpUj1XSXfv\n3qWlpSUBcMWKFSaPIyfu3bvHRYsWsXHjxrSxsWHBggV1bphGjRrFU6dOidhQ2RCiJERJ8B6zY8cO\nAqCDg4PsQ1tLVu/g2UlPT6e1tTV9fX111/bu3Su7JDZ16lSz+nf//n1WqFCBCoVCJxBHjx7lhx9+\naFSMSI3hRsuWLQmAQ4cOlc2jVqvZpEkTDhkyxKCfvXv3ZlxcHPv166dXZvny5TqBfd1GC8nJydyy\nZQu7dOlCR0dHOjo60srKil5eXhw8eDCPHj0qO47/NYQoCVESvMfUrl2bbm5uHDx4cI75chIlkqxc\nuTIB6ALqqdVqlipVyuBh7+7ubvaD9dGjRzpT8zJlyrBMmTJcu3at0XoSEhJYuXJlKpVKLly40Gi9\nu3fv5qRJk2RndTNmzGBGRgbDw8MNyn344YcEwNKlS5s1DnPIyMjgkSNHOGTIEPr4+NDGxobW1tZ0\ndXVl3759eeDAAYNlyv8VhCgJURK8pxw7dowAqFAoctxoT0tLY7du3XKsS7sE9vvvv+uuff/997Kz\npe3bt5vVzyNHjjA8PJy2trYEwJkzZxrNGxsbSy8vL7q4uHD//v051tu6dWs+efJENoLujh07SJIN\nGzY0CMGRkpJCJycnAuCoUaPMGkteuXjxIr/99lsGBQXR0tKS1tbWdHBw4Mcff8ydO3eabXzxX0aI\nkhAlwXtKq1atWLhwYTZr1izHfFeuXOHo0aNzzDNjxgwC4A8//KC7lpiYSAcHB4MHvvagbW4cOXKE\nDRo0YNmyZblu3To+ffqUderUIQBOnjzZIP/GjRtpa2vLEiVK6IXckCM2NpZDhw5lamoqlUqlQR/j\n4+NJaiwJ//jjD9m+AaAkSTzzlmMt3bt3jwsXLmRoaChVKhWtrKxoY2PDFi1acNOmTUxOTn6r/Xnb\nCFESoiR4D7lw4YJub2Tfvn055t23b59sbKSsaIMC9unTR+/6J598IjtbMmbJR2oe+BEREToxyrpM\nl5yczAYNGhAAx44dS1KzVDh58mRKksQ6derw4cOHuQ2fnTt3ZkJCAuPi4gz6Zmtrq2tz48aNejGk\nsjJw4EACYMGCBd/ZUpp2H6pFixa0s7OjSqWipaUlw8PDuWrVKiYlJb2Tfr1JhCgJURK8h/Tq1YuF\nChVi2bJlc7XuWrhwIWNiYnLMc+/ePQJgzZo19a7LPfS11nDZOXz4MCMiIhgUFMT169cb3TNKSUlh\nZGQkAU3Yde3SYY8ePZiamprLyDUHdHv37k1SIzrZ+1axYkVd3oSEBHbq1Em2HrVarfME8dFHH+Xa\n7psmIyODR48eZc+ePVmwYEEqlUpaWFiwWrVqnD9//nvj7kiIkhAlwXvG7du3qVKp6Orqyp9++inX\n/KNGjeL169dzzVe4cGE6ODgYiFz9+vUNHvwWFhY6a7/Dhw+zfv36uYpRVl6+fKkTJgCcMmWKyabT\n/fv35/nz50mSEyZMMOhb+/bt9fLXrVvXaF3Xrl3TLf9t2rTJpPbfFpcuXeLw4cPp5+dHCwsLKhQK\nli1bltOnT/9PuzsyVZTyS+gKgUCQC7Nnz4a9vT3UajU6duyYa/6EhAR4enrmmq9cuXJISkrCtWvX\n9K7LeQ/PyMhA9+7dERERgb59+6JPnz44ffo0WrVqZRAyQ474+HicP38eSqVS91nzvMqZe/fu4fHj\nxwgM1MTsPH/+vEGeUqVK6X12cnLC48ePZesrUqQIZs+eDQBo164dEhMTc+3D26JYsWKYMGECrly5\ngjt37mDatGmwsLDAkCFD4O7ujoCAAHz11VdISEh41119M5iiXCKJmZLg3fL06VM6OjoyICCAQ4cO\nNalMbubgWgYPHkwA3Lx5s9719PR0+vr6GsxIJEkyeWaUlX379tHZ2Zne3t48ceKE3vJdbns7o0aN\n0jNckPNqvnbtWr0yEydOzNUPX2hoKAHwv/D/dUpKClesWMGQkBCqVCpKkkRPT08OGDAgx72+/ALE\nTEkgeH+YP38+1Go14uPj0a9fv9datza20tmzZ/WuW1hYyLZFEnfv3jVpZqRlwYIFiIiIQEBAAP78\n809UrlwZCxYsQJ8+fbBgwQJ07doV6enpsmWfPXuGf/75B9WqVdO1b8pMqWrVqrJh3bOyfft22NnZ\nITY2FpMnTzZ5PO8Ca2trdOzYEQcPHkRKSgp27dqFypUrY+nSpShevDjc3NzQqVMnnDp1Sr6Cs2eB\nXr2AsDCgd2/N5/yIKcolkpgpCd4dL1++pJeXFytUqMCWLVuaVCY5OVlnFJAbp06dIgC2adNG7/qh\nQ4cYFham87SdNRUuXNikutPT0/nFF18QAFu3bs3nz5/r3Ver1fz000917csZPEyfPl3vjFRCQoJB\nfxQKBV+8eKFX7smTJ7KGGdnRWiAqFIpcTdLzK6dPn+bHH3+sC9fh4ODAJk2acO/evZo9u2nTqLax\nIS0sSEDz18aGnDbtrfURwtBBiJLg/WDRokVUqVS0trY2OV5RXFwcJ0yYYFLeFy9e0MLCgiVLliSp\nEaPw8HCWL1+eGzZsYJcuXWQt8X799dcc601KStKFxxg5cqTR5T61Ws0vv/ySgCZAX1ZxefnyJRs1\naqRnDKEVkaypWLFisnXXqVPHJEMK7VKip6fnf94l0I0bN/jpp5+ySJEimqVJlYopkqR53GdPNjbk\nWzqvJURJiJLgPSAjI4OlSpVitWrVWK5cOZMt1Xbs2CHrDNUYpUuXpkKhYO3atVm+fHlu3LhR93CO\njY2VFSWtiMlx48YNVqhQgZaWlly6dGmu7avVakZHRxPQePPWHiRdsmQJly9frpf3u+++M+hLkyZN\nZOuNiooyyQIxPT2dXl5eBMCuXbvmmj8/kpGRwTt37jA2NpZbtmzh3LlzOXjwYC6zsWGqnCBpZ0y9\ner2V/pkqSso3tzAoEAhelW3btiEuLg4eHh4YN24cJEkyqdzVq1dRqVIlk/IeOnQIDx48gFqtRpMm\nTfDZZ5/p7RdVqlQJH3zwAY4ePapX7sKFCzh58qRBOydOnEDTpk2RmpqKvXv3IiQkJNc+SJKE0aNH\nw8rKCsOHD0dkZCQ2btyItWvXYtOmTXp54+LiDMqXLFlStl7tvpKPj0+O7VtYWGDv3r0oXbo0Fi9e\njI4dOyI8PDzXfr8tUlJSkJCQkGO6ffs20tLSDMoeAGBprOKMDODChTfYc/MRoiQQ5GMmT56M6tWr\n49KlS+jQoYPJ5a5evYqWLVvmmOfgwYMYM2YMHj16hIiICKxYsQKurq6yBgwDBgwwECWSGDBgAI4c\nOaK7tn79enTu3Bm+vr7Ytm0bAgICTO4zAAwbNgzW1tb47LPPUL16dURFRcHSUv+RaoqRg5aqVati\n3bp1aNWqVa5tBwYGYsKECRg2bBiaNWuGe/fuwdbW1qz+m4tarcaDBw9yFRxjpu22trbw8vKCv78/\nQkJC4OXlBUdHR+zatQuHDh0CSZwHUANGhMnCAsg0s883mDKdEkks3wnePr///jsBsEKFChw+fLhZ\nZaOioowu9cXExLBu3bqsUKECN23aRLVazW3bthEAP/vsM9kyL1++pLu7u6yBwfnz56lWq3UHWuvX\nr29W/CU5fvjhBwJglSpVDOry9PQ06Mfhw4eN9rtBgwZmta31nG5qKHhjpKSk8PLly4yJieHKlSs5\nZcoUDho0iB999BFr1KhBX19fXZyn7EmSJLq7u7NSpUqMjIxknz59OG7cOC5atIi7du3i33//zceP\nH+t+45SUFK5fv54NGzY0CNRYFuBzY8t3+XBPScyUBIJ8yuTJkxEcHIxTp05hy5YtZpfPvtR38OBB\nREdH48mTJ4iOjkZkZKQuT1BQEADgzJkzsnWpVCr06tUL48aN07uuVqsxePBguLq6YtmyZejTpw9m\nz55tMLsxl/LlyyMyMhLbtm1DeHg4fvvtN7i6uiIxMRG3bt0yyG9s+U6lUiEjIwMZGRmwsLAwqe3d\nu3fD09MThw4dwg8//GBgFk/SpNnNo0ePZOu3sbGBl5cXihYtipo1a8LLy8sgeXh45PodqtVqHDp0\nCCtWrMC6detQoEABPH78GGq1Wi/f3wBGARgPQIXM5TELC0ClAsaPBzJ/+3yDKcolkpgpCd4u//zz\nj27WkRf/bFkPzh44cIB16tRhxYoVuXnzZtkZlFqtpqOjIwsVKmS0zps3b9LCwkL2rV6SJM6aNeu1\nRVtt374979+/zxUrVtDCwoJBQUG8e/euLmxH1pRTn0mNc9m///7bpHZfvHjBK1eucPz48bqZYM+e\nPdmmTRvWrFmTRYsWpUqlkp3dABonrxUqVGCTJk3Yu3dvjh07lgsXLuTOnTt59uxZPnr06JW/o7i4\nOI4cOZK+vr50d3fnJ598wnbt2hntEwB26dKFlVUq7g4I4BErK6b36PHWZkhaIGZKAsF/l6lTp6JM\nmTI4ePAg9uzZY1bZxMREODg4ICYmBtHR0UhMTER0dDSaNGli1FBCkiSUK1cOhw4dwt27d+Hu7m6Q\nx8vLCy1btsS6dev0rpNEvXr1MHDgQLP6aYxz586hYMGCcHNzQ8eOHWFlZYX27dsjLCwMffr0Mchv\nbD9JS9WqVXHs2DEULlw419nNgwcP9Mqq1WrMnz8fVlZW8PLygo+PD6pXr250dmNlZfVavoPs3Lt3\nD6tWrcKKFStw7tw5tGjRAvPmzYOrqyuioqJw7tw52XKWlpaoWbMmhg4ditJLliBx8mRET5+O5SNG\nwM/P74309ZUxRblEEjMlwdvjxo0btLS0ZJs2bVixYkWz36wXLFjAgIAAVqpUiVu2bDHL4SmAHF3z\nxMTEyL6Jq1Qq3rx506x+GqNHjx68evWq3rUtW7ZQpVLR2dnZoG1t/t9//51r1qzh9OnTOXjwYLZr\n144hISH09vaWneFpk5ubG8uXL89GjRqxZ8+ejI6O5vz587l582ZdUMABAwa8lrGZw/Pnz7ly5Uo2\natSIKpWKERERXLZsGZOSkpiRkcHJkycb3ZMCwOLFi9Pd3Z137tzRHWB++PAhO3XqxL1797718UCc\nUxKiJPhvMnjwYBYpUoReXl5cvHixyeX279/PsLAwBgQEcMSIEWaL2Y8//kgAnDp1qtE8arVa1tDA\n3t6eAwcONKs9OW7cuMEuXbro2nr48CHPnDnDHTt2cNCgQbLeJYwllUpFPz8/1qxZk+7u7vz88885\nbdo0rl69mocOHeKVK1cMvEBk59SpU7o2jRlTvE7S09O5Z88eRkVF0d7enhUqVOC0adOYkJCgy3P9\n+nXWrl3b6LiVSiX79u1L4P8j8QYFBem8cIwaNSrHcPNvCiFKQpQE/0EeP35Me3t7dunShW5ubgYh\nveXQilHlypW5detWTp8+nSdPnjS7bW1UVmOOXNPT0zlo0CCjD0MbGxvevXvXpLZevnzJ+Ph4Hj58\nmGvXruWMGTP4xRdfsGTJkqxUqRIDAgJoY2Njkvh8+OGHHD16NH/66Sdu376dp0+f5v379/VEuVGj\nRnmO7DpixAgCoKOj4xsLX37mzBkOGTKEXl5e9Pb25pdffsmzZ88a5Fu1ahUdHR2Nfhf29vbctGkT\nbWxsdJaUarWaKpWKrVu3JqlS4y+wAAAgAElEQVSZSX/11VdvZBw5YaooiT0lgSAfMXfuXFhZWeHC\nhQvo3bs3rK2tZfORxIEDBxAdHY3k5GRER0ejUaNGkCQJn376aZ72C8qWLQtA3gIvKSkJ7du3x/bt\n2zF8+HB8//33BuEeHB0dMX36dAwbNizXvZt79+5p3oqzoVAoUKRIEXh5eaFy5cp6ezbu7u6oX78+\nMjIy9MrMmzcPRYoUyXFsFStWxOnTp/HBBx+Y+7Vg/Pjx2LRpE86dO4dmzZphx44dZtchR0JCAlat\nWoXly5fj6tWraN26NZYtW4awsDADS8EnT55gwIAB+Pnnn2XrsrCwgKurK/bv348OHTogMDAQEydO\nBKD5PVNTU9GsWTMAQNGiRRETE/NaxvBGMEW5RBIzJcGbJyUlhe7u7uzZsyeVSqXsHo1area+ffsY\nGhrK4OBgbtu2zWCZztSQFXL4+/tTpVIxLS1Ndy0+Pp5BQUFUqVScPXs2jx49yiZNmpi8jKZNLi4u\nDAoK4ocffsju3bvz66+/5rx587ht2zaeOnWKI0aM4J49e4z2zVg0XFNmhZs3b+bMmTPz/L3cvn1b\nt3+T3e2ROSQmJnLp0qWsV68eVSoVGzduzNWrVxs4qs3KgQMHdH7s5JKNjQ2LFi3K+Ph4Dho0iLa2\ntoyLi9OV1zq81bpb+vfff1mrVq08jyGvQCzfCVES/LeYN28ebWxs2KZNG7Zr107vnilipMVcUVKr\n1Xzy5An/+ecffvDBBwTAgQMHsm/fvgwJCaFSqaRCoTBpP8fOzo7Vq1fnlClTuHLlSsbExPDy5cu5\nLp0lJyczMjIyx32wDRs2GLRnYWFBZ2dnHj9+PMf6b926xQ4dOpj1vWRn6dKlBEBLS0uzIsCmpaXx\n119/ZYcOHWhra8sqVapw9uzZuS51vnz5kl9++aXR793Ozo6Ojo4sUaIEb968yV9//ZUAOH/+fL16\nSpUqRRcXF716fX19zRr768BUURLLdwJBPiDj9GkUGDIEpxwdcWj9elRZuhSA5qVx//79iI6OxosX\nLxAdHY2GDRsaNe3W/L///6Snp+POnTu5Lqc9f/5cr5w2KisAWFlZ4YMPPkCxYsX0ltMmTZqk52JI\ni3bp0cHBweTxL126FFFRUTn69pNzL9SgQQOcOHEC4eHh2Llzp9HlOQ8PD9y5c8fk/sjRuXNnrF69\nGjt27EBYWJhRM2xA8zucPHkSK1aswKpVq2BjY4NOnTrh5MmTuui5OREXF4eOHTsajY1UsWJFxMfH\nw9PTE3v27IEkSejSpQtatWqF7t276/KlpaXh4sWLiIyM1F1TqVQAgNTUVN1/5ytMUS6RxExJ8AaZ\nNo1pKpXOk3MaQLWNDS/06cOQkBBWqVKF27dvN5hFJCYmMi4ujnv27OGSJUv4zTffsGvXrgwICGBw\ncDA9PDwMXM5ok1KppI+PD6tXr85WrVpx4MCB/Pbbbzlw4EACYHBwsM6I4OnTp7Ld3r59u2zdpUuX\n5rfffmvy8NPT09moUaNco89+/PHHBm2NHTuWcXFx9PT0pL29PWNiYoyWb9WqFR8+fGhyv+RITk7W\nmYmPGDHC4H58fDy/+eYblipVik5OTuzduzcPHTpksiWkWq3md999R2tra6O/W79+/ejk5MQKFSrw\n/v37zMjIYIMGDejj48NHjx7p1Xfw4EEqFAr++OOPetdDQkJ4+fLlvH8ReQBi+U6IkuA/wJkzmuBr\nmYKUNSVLEuf06sU5c+Zw+PDh7Ny5M8PDw1myZEk6ODgYXUKztrZm6dKlWb9+fXbp0oUjR47kDz/8\nwM2bN/PEiRO8ffu20ZhBZ86c0dXzySef6O0tZScjI4P+/v4G7RctWpSFChXKcZ8kK2vXruVPP/2U\naz65EOjr1q0jSV6+fJlFihShjY0Nd+/eLVv+22+/5c6dO03qU04cPnyYgMaTxenTp/n48WPOnz+f\nYWFhtLKyYosWLfjLL7/kam6endu3b/PDDz80+rsGBgZywYIFdHBwYJUqVXQCO336dCoUCllB7tOn\nDwHwn3/+0bveuXNno9/Tm0KIkhAlwX+Bnj2ZoVDIilIawLnZ9k+8vb1ZrVo1tmzZkp988gknTZrE\n5cuXc9++fbxw4QKXLl3Kbdu25akrd+/eZfXq1QmAzs7OJpWZNm2a7AO0XLlynDVrVq7l1Wo1mzRp\nkqvpu1qtlhXirGbT8fHx9Pf3p5WVlV6kWi379+/n2LFjTRpXbvTr14+A5iyUlZUVa9asyblz5+Z5\nJrZp0ya6ubkZFaR+/fpxx44dtLOzY40aNfjkyROS5MmTJ2lpaWnUxNvf35/29vYGLyFff/21wd7T\nmyZfihIAbwCLANwC8BJAPICZAJxNLG8HoCOAlQDOA3gOIAnACQCDAaiMlMtpc/YPc8YgREnwWgkN\nlRUkbfrDxoYbN27krVu3cl3eIslJkyYZvBWbwtmzZ1m0aFEWKFCAxYsXJwDdgy8nHj16JHueqFat\nWvTy8sp1trBnzx5OnDgx13Zu3rxp0IZcCPQbN26wRIkStLS05MaNG/XuJSYmsmnTprm2ZQy1Ws2j\nR4+yX79+dHV1pVKpJACzvZBnJSkpiT169DD6fCpUqBC3bdvGXbt20dramrVr12ZSUhJJ8tmzZwwM\nDOQHH3wgO6NNTEykhYUFGzZsaHBv0aJFHDlyZJ77nRdMFSXDwClvCEmSAgDEAugK4E8AMwBcAfAp\ngKOSJLmaUE0IgBUAGkDj/HYOgFUAvABMBbBfkiT5gx3ANQBjZNKCPA5JIHhlHhYsCMOwbBrSAZxK\nSUGLFi3QpEkTLFiwAAkJCTnWFx8fj6JFi5rVh507d6JGjRqQJAlHjx7VBeU7e/ZsrmWdnZ3RqVMn\ng+tHjhxBwYIFsTTTYMMYP/74o6w/u+zIBfbz9/c38DXn7e2NmJgYFC9eHK1bt8aaNWt09xwcHPDs\n2TPti6rJ/PvvvxgzZgxKlCiByMhIKBQK/Prrr7h48SIsLCywa9cubNiwwaw6AeDYsWOoWLEiFiyQ\nfwRFRkbi7NmzIInIyEiEhIRg+/btsLe3BwAMGjQIt2/fxsqVK6FUGtqsHThwAAqFAnXr1jW45+fn\nh/j4eLP7/FYwRbleRwKwCxr1/yTb9emZ1380oY4K0MyUVNmuO0AjeAQwWKYcARx4HeMQMyXB6+TL\nRo2YIkmys6Tn0MTCyfz3S2dnZ0qSxMqVKzM6OpqxsbGvfEZpzpw5VCgUrFmzps7MeebMmQTA77//\n3qQ6Tp8+LfuW/9FHH7Fo0aJMTU2VLRcbG8shQ4aY3M/s9UdGRhrNf+/ePVaoUIEKhYLLli3TXe/a\ntSvj4+Nzbe/Bgwf84Ycf+MEHH9Da2ppt2rTh1q1bDcaiDc1ubW1tcgyptLQ0jhkzxqg/PltbW86b\nN49qtZq//PILLS0t2ahRI70lznXr1hFAjiHvo6KiCIBHjx41uHf16lXWqFHDpP6+LpCflu8A+Gd+\n4VcBKLLdcwDwDJqlOLtXaKNDZhtbZe4JURLkO+Lj42lhYcHdjRoxWZKYnmUv6TnAz4w8sHr16sXI\nyEja2NjQy8uLvXv35vbt25mSkmKyKKWlpekcsHbq1ElvGWzv3r0EwN69e5s8lpCQEIO+urm5sVy5\ncly6dKlsmaioKD2fbjmh7WvWlJugPXz4kFWqVKEkSTpDirlz53Lt2rWy+VNSUrhu3To2a9aMVlZW\nrFOnDhcuXJjrMqZ27BUrVsx1HJcvX9bt28mlKlWq8MKFCyQ1LoUsLCzYokULPfdG165do5OTU66/\ntaenJ1UqlaxrpLS0NBYpUiTX/r5O8pso9cj80ucZua+dRYW/QhsfZdaxUeYeAZwG0A3ACAD9AVTP\nSztClASvi4EDBzIgIIANGzbkjsmTudXLi79bWvJSeDhbFCtm1JwbACtXrsxLly5x69at7NmzJz08\nPGhjY8MiRYpwwYIFvH37ttF2nzx5wgYNGhAAx48fbzDbunfvHgGY9Sa9du1a2X4OGDCAgYGBBvth\n//77L3v16mVy/XXr1jWoe9GiRbmWe/LkCWvUqEEAnDNnDmNjY/nFF1/o7mdkZDAmJoY9e/ako6Mj\nS5cuzYkTJ+q8H5jC06dPaWdnRwCcMGGCbB61Ws2FCxfq8mVPCoWCX331lW4mtnTpUioUCrZt21Zv\ndpaens6QkBAWK1aMiYmJRvt08+ZNWlpaMiwszGgePz8/sy0EX4X8JkpTjC2tZd7/LvN+31doY0dm\nHb1l7hn7n/s0gCBz2hGiJHgdPHjwgLa2tvzyyy9ZpkwZpqens169enR3d2daWhr9/f05a9asHIXJ\nwsKC3bp104Uy2L59O2vVqsWKFStSkiRWrVqV48eP519//aUTnitXrrB06dK0trY2OmMgSQ8PDzo4\nOBg1Hc9OamqqrPfwqlWrsmTJklyzZo1e/gEDBui5wskNubqPHDliUtmkpCSdV+1vv/2WERERjIuL\n44gRI3SB8j777DOePHkyzwH4du3apRMX7UxHy/3799myZUujv6Ofn5+eB/KffvqJkiSxc+fOBmI+\nduxYKpVK/vnnnzn2Z8mSJbSzs+PXX39tNE/t2rV58eLFPIw2b+Q3Ufop8wfoYeT+N5n3h+ex/gGZ\n5U8BsJS5Pw1ADQBuAOwBBANYl1nmPgCvXOrvBY2F34m3PeUVvJ+MGTOGhQoVYkREhO6BHR4eTnd3\nd5Lk999/z7Zt23LBggW6PQtjDzXtw2fnzp26Zarr16/zhx9+YMOGDWllZcUiRYqwZcuWuuiyx44d\ny7F/2pnUlStXTB7T2LFjZfsXHR3NoKAgncDdvXvXLJc/T548ka03+0HRnHj+/DnDwsIIaMy4bWxs\n2KlTJ+7cuTPHs1jmoN3D8fDw0I11586d9PDwMPrbde3aVW/Go90769Gjh8ELweHDh2lhYWHSweTW\nrVsTyDk2VpcuXbhr1648jtZ8/muiNCHz/rA81N0SGkOl2wD8zSy7PrPdGaaWETMlwavy/Plzurm5\nsXfv3ixTpozu4RMeHq6LeZOcnMzChQvz8uXLHD58OAGwRIkSRh9uDg4OdHBwYOfOnQ023JOSkjho\n0CAqFAoqlUra2dmxVatWXLJkiVEfbtqgcJs2bTJ5XFmdlmZNH3/8Mf38/Lh582aSmng+cpvvxpAL\nga4V79zQBspr2LAhVSoVCxYsqBOD1xW6XUvW2WKHDh103jHkkouLi+7gr5apU6fqljyzC9KTJ0/o\n6+vL8PDwXGevarWaTk5OVCgUOvNxOaKjozlv3ry8D9hM8psovZHlOwDNAaRCc+4pMA/9qpfZbqyp\nZYQoCV6V7777jnZ2dgwPD9dbQssqSqRm5tGvXz9mZGSwTZs2BDRGCX5+fkYfdmXKlKGPjw9HjRrF\nBw8eMCMjg1999RUBsEmTJnzy5AmPHj3KESNGMCgoiAqFgjVq1NCdb9I+qJctW0YAHDdunFlj69Ch\ng0GfrKysOG3aNFapUoWJiYls3ry5WXUuWbLEoM6c9kqMBcq7desWX758qfMM8cUXX7x2YTp37lyu\njmvr169v4AF+/PjxBMDBgwcb9EmtVrNdu3Z0dXU1yTDk7NmztLOzY27PqiVLlnD48OHmDzKP5DdR\neu2GDtAYNqQBuAGgeB77VT6z3fOmlhGiJHgV0tLS6Ofnx3bt2rFs2bJ6b71169bVE6WHDx/Szc2N\nd+/eZXJyss6D9w8//KDzKCCXfHx82KVLF3p7e7NkyZIEwM8//1z28O3Vq1c5e/Zs1q9fn5aWlvT3\n9+enn37KefPmEdCYdZuDNlBg9jR27Fh6e3uzT58+st4WcmLYsGEG9fXp08cg319//WVSoLy4uDgG\nBAQYnZXklfT0dE6ePNnoPqCVlRVnzpyp155areaoUaMIgCNHjpQVSa0ob9myxaR+TJ8+na6urhw0\naFCO+WJiYti+fXvzBvkK5DdRCsj8Ya7CuEl4Mkw0CYfG/DsdmgOxZi3ZZaund2a/fjW1jBAlwauw\natUqKpVKhoWFGRga1K1blx4eHnrXBg0apHMhc+/ePfr7+1OpVHLv3r3cvXt3jnF2ChYsSIVCQRcX\nF37++ec5WuSRGiuydevW8eOPP6aLi4tuWXDFihUmu89Rq9WsVKmSrFBOnTqVLi4uZs9OmjVrZlCf\nNjbSzZs3OWXKFJYrV44ODg7s2rUr9+3bl6PQqNVq1qlTh126dCEA9uzZ85WF6dq1azmGKC9XrpyB\nQKrVag4ZMiTHGenFixdpZ2fH/v37m9yXiIgISpLEX375Jdc+V69e3eR6X5V8JUqa/ph3eBZASQAl\nZeqJApABjTcIXxParSQndgDKAXiQ2XYHU8chREmQV9RqNStUqMCGDRsazJJIeVG6du0a3d3ddXsD\ncXFxdHJyoqOjI8+dO8enT5/m6KbG09OTu3bt4pQpU+jr68uBAwfKBg/MTnp6Ov38/ChJEkuUKEEL\nCwuGhoZy6tSpBtZl2Vm0aJFsX/r27UsHB4ccPXnLERgYaFDXF198oQuU16RJE65evdqscOeNGzdm\nUlISe/fuTQCylm6msnLlyhxDlNetW9fA9FqtVvOTTz4hAE6ePFm23pcvX7Jy5cosW7asyWN7+fKl\nzigmt3hNaWlp9PHxMW2Qr4H8KEoBAO5m/lCbAEwEsC/z8wUArtnyEwCzXauTKUgEsBBAtEwalK3M\nEgCJmW3OgcYd0bbMmRahMcKQTB2HECVBXvntt98IaM7/ZN/kJuVFidSEbJgxY4bu8759+6hUKunn\n56d78GzdupW2trayD0VJkvj555/zwYMHnDFjBv38/Ni3b99cPRu0b9+eAHjixAleunSJ06dPZ506\ndWhhYcESJUpw8ODBjImJMbBeS05O1s20siZXV1dOnDiRERERJn9nL1++lPV8UKFCBc6ePdusYHtZ\n+eqrr/j7779TrVbrIrO2a9fOqPcJOR4/fsyOHTsaFSPtd2BhYaH3XWdkZLBXr14EwNmzZxutf+jQ\nobS2tpZdgjRGTEwMCxYsyMDAQJPy+/v7myXmr0K+EyVNn+ADYDE0lnKp0Cy/zQLgIpNXTpS6GPsH\nkCXFZyvTHMAGAJczxSk1s/2tAJqaOwYhSoK8Eh4ezho1auiZR2elTp06sqJ05swZFilSRO+BuXjx\nYgJg9erVmZyczMuXLzM8PDzHTfbAwED+8ccfTElJ4XfffUd/f3/26NGD//77r2x/J06cSMDwkOqj\nR4+4cuVKtm/fnk5OTnR2dmbHjh25evVqnfeDoUOHyvbhzz//pIuLS64m6aRmNqF1p5M12djYvPJy\n29atWzl9+nRdO9r+tmjRwqQDpbmFKG/bti0fPXrEVq1aEQB9fX2pVquZnp7OqKgoSpKUo+Xb7t27\nCZju6knLqFGj6O3tze7du5uUv27dujx//rxZbeSVfClK70MSoiTICydOnCCgcSOzfv162Tx16tSh\np6en7L1GjRrp+XAjqdsgb9WqFRs3bkwA7NKlC9evX8/ChQvLPiwVCgWHDRvGFy9e8MWLF5w3bx6L\nFSvGqKgog2U5bRC/nDbMU1NTuX//fn7++ecsVqwYlUolw8PD+fXXX8u2369fP44ZMyZHb91ZA+XZ\n29sb1FGpUiWjZU3lzp07eiHn1Wq1rs/Z/cxlJbcQ5QUKFODy5ct1+2YpKSm6kBS9e/dmu3btKEkS\nFy9ebLRv9+7do4eHB5s2bWr2/lu1atWoUqm4ZMkSk/J369aNO3bsMKuNvCJESYiSIB/Rpk0blilT\nxugsicxZlGJiYli2bFm9h5Rarda9iWsfetr7Dx48kDXP1qYyZcrwxIkTJDXCsmjRIgYGBrJDhw66\n0BfXr1/X7YmYglqtZlxcHCdPnizrCw/QHPSNj49ngQIFePr0aV1ZbaC80NBQXaC8DRs2cMyYMQZ1\ndOzY0aT+5IbcuL755hsCYL169QyCFP7zzz+sUKGC0e80JCSEV69eNahT+0KiXcr7+eefjfZJrVYz\nMjKSHh4evH//vlnjefz4MVUqFQEYnf1mZ+zYsZw7d65Z7eQVIUpClAT5hMuXL1OhULB8+fI5WkTV\nrl3bqCip1WpWq1ZNz5z68uXLLFGihM4EWe6k//r163UHRrMnCwsLfvXVVzqHnWlpaVy+fDlLly7N\njz76iKdPn6aTkxPd3NzydJ5n/fr1su1269aNgwcPZuvWrbl582a2bt2a1tbWrFmzJn/88Uc9Sz+5\nEOjmnp0yxkcffST74J8+fToBMDQ0lImJiVSr1ZwzZ06OIconTpxo1FAiJSVFZ4JuY2OT4/Lgd999\nR0mSuGfPHrPHs3HjRvr5+dHDw8Pk32vZsmX88ssvzW4rLwhREqIkyCf07duXRYoUYbly5XLcC6ld\nuza9vLyM3t+wYQNDQ0NJamZOrq6u9PT05J49e+jg4ECFQiHrVubu3bt6M6rsqXz58nqzlvT0dK5e\nvZply5alq6srAeRqTi5HRkYGixYtKiuG2uUvZ2dnfv7550bf7IODgw3KyxmJ5IUpU6bw119/lb33\n/fffE9A4vg0PDzf63ZUsWZKxsbFG23j+/DkjIiKoUqno7e1NQHN4Vo4zZ87QysoqzyLRr18/BgYG\nsk2bNiaXOXToENu2bZun9sxFiJIQJUE+4O7du7S2tmapUqW4YcOGHPPmJkrp6eksUaIER40aRUtL\nS1asWFFn3t2iRQs6OzuzQIEC/Pvvvw3KqtVqrlq1StYqTvu2P3bsWD1jioyMDDZs2JAAWK1aNZOM\nE7Kj9dCdPdWpU4flypXTnaUqV64cR44cyT/++EMn3Gq1WnZPSW58eSEmJobR0dFG78uFy8ia+vfv\nb7DEl5WkpCTWqVOH1tbW3Llzp85zN2BoPJKcnMwyZcqwSpUqZlkAZqVEiRJ0cHDgnDlzTC5z48YN\nVq1aNU/tmYsQJSFKgnzAqFGj6OrqmuNekpawsLAcRSmrSDRv3pzPnj3T3YuKimJMTAwtLS3p6+vL\nO3fuyNZx+/ZtNm3a1OiDtnLlynoPfa1nhy5dujA4OJgNGjTg77//btLYz5w5w+DgYFkPB02bNmVC\nQgLt7Ox47NgxLl68mC1atKCdnR3d3d3ZvXt3nTParEkuBHpeSUpKkg0UmFuIcnd391y9Ujx9+pQ1\na9akra0t9+3bp7uutZq0tLTUm33269eP9vb2vHTpUp7Gcu3aNZ2AZ5315kZ6ejq9vb3z1Ka5CFES\noiR4xyQlJdHZ2ZnFihXLdZZEakTJ2APi2bNnuvAH9vb2PHfunO5ecnKyLiDf8uXLCWhCRhh7i1er\n1Vy2bJnRA58qlYqTJk1iWloajx49SkBzuFStVvPXX39l9erVWbduXR44cMCgbm2gvKZNm9LCwoLB\nwcGsU6eOQRuSJPHq1avs27evXlyllJQU7tixg/369ZPdCytevHiu36M51K1bV2//5ejRo7r9H7kU\nERGR69moR48esWrVqnRwcOChQ4cM7ms9sJcoUYJqtZqbNm0iAKPBEE1h4cKFLF++PAsUKGD2IeBi\nxYrlOON7XQhREqIkeMfMmDGDdnZ2DAoKMmnj2ZgoJSQksHLlylQqlVy4cCEnTZrEHj166O6fO3eO\nEydO1H0ePXo0AbBly5Y5zs5u3rypm3nJpWrVqjE2NpaA5rCqFrVazd27dzMkJIQhISHctWsXDxw4\nwB49etDR0ZFlypTh6NGj2bhxY5IaqzW5+ocOHcqrV6/Szs6ON27cMOjfrFmzDMo4Ojpy9OjRPHHi\nxGtxptq9e3deuXKFaWlpjI6ONhqiXKVSUalUsnjx4rJ91XL//n1WrFiRTk5ORpc7k5KSdC8Effv2\npYuLC9u3b/9K42nXrh2Dg4PZsGFDs8vWq1dP7yXnTSFESYiS4B2SmppKHx8f+vr6cuPGjSaVkROl\n2NhYenl50cXFhfv37yepCWNQsGBB3rp1i6TmPNHq1at1ZdRqNTt16kQg95DharWaCxYsoIODg+zD\n2Nramq6urlSpVAZ7HXFxcezUqROtra1paWnJFi1aMDY2lmq1mhMmTNBbtpKLHOvi4sLk5GR27dqV\nAwcONOibnNPZBg0aGISC37ZtW569EsybN48zZ85ktWrVjIpz1apVefHiRe7YsYPW1tb08/OTNf2+\nc+eOzjgkJ+MHkjx48KCufk9Pz1xDrudERkYGCxYsSA8PD6ORb3OiR48eZjvJzQtClIQoCd4hy5Yt\no6WlJcuUKWPyG3BoaKieKG3cuJG2trYsUaKEQYTQIUOG6Ky0vvvuO4O38hcvXujOCv3444+5tn3t\n2jXWq1cvx439X3/9lXfu3OHMmTNZuXJl2tnZsVOnTrqZUoMGDRgcHMx169YxMjJSb9wbNmyQrXPR\nokW8cOEC7ezsDPbB5Jb9tAYCz58/59atW9mrVy96eHjQ1taWTZs2zTUUfFa0B2aVSqVs3xQKBb/+\n+ms9Md6zZw9tbW3p4+Oj95vcvHmTgYGBLFSoEM+cOWNS+1qv7w4ODnk2biDJU6dO6QIJHjx40Ozy\n48ePN9tzRF4QoiRESfCOUKvVLFu2LD08PMwKkhcaGkofHx+q1WpOnjyZkiSxTp06sh66ExISWLBg\nQT558oSDBw+W3ed48OABixcvTgsLC+7cudOkfs+dO5d2dnayD2lJkqhUKlm/fn0uX75cNoDcsWPH\nGBQUxKJFi3L9+vW65UOt88/sdVasWJFqtZrt27c3MIWWi9gqFxwwIyODx48f59dff60XCn7cuHF6\noeCzcv/+fbZo0cKoAPv7+xsNt37w4EHa29vTw8OD586d47Vr1xgQEEAPDw+TQ7wfO3aMSqWSTk5O\nBJCjh4vcmDJlCsPCwqhSqYx6osiJFStW5Dqjfh0IURKiJHhHbN++nZIksVSpUmbtE2hFqVu3bgQ0\nIbFzeoPu3r07J0+ezKioKKPtXLp0ia6urnRwcDD5Df7ff/9laGio0Qd2nTp1ZJevSI01V6NGjfjn\nn3+yRYsWDAoK4qpVq/qTchUAACAASURBVJiens4JEybI1nfkyBGePXuWBQoU0AmwsRDo2aPqyiEX\nCr5///7cuXMnX7x4wZ07dxp1wwRoDvdmDVEux9GjR+no6EgXFxd6enrSx8fHZMu5xMREBgQEMDQ0\nlBcuXNDtY2UPZWIqERERDA8PZ61atfJU/vDhw2zdunWeypqDECUhSoJ3RGhoKN3c3HThv03lgw8+\noJWVFSVJ4tSpU3MVtLi4OHp5ebFTp0455jt06BBVKhV9fHx0+1DG+Ouvv/jFF1/Q09OTTk5ORpe2\n7O3tOW/ePIM+rl27Vs/R6F9//cU2bdqwdOnS/P7772XDpXfo0IGk5qzV6NGjSZJ//PGHQb6sARBN\nJSkpiRs3bmS3bt1YqFAh2faz7nHlFoMoKxs2bKAkSVQoFNy6davJ5Tp37kxnZ2dev36dJDlz5kwC\nmiCADx48MGt8KSkptLW1ZcmSJTls2DCzympJSEhgcHBwnsqagxAlIUqCd4DWhDowMNCsWdKFCxdo\nbW1NSZLMErPmzZuzRo0aueZbuXIlAc05pKznm0jNfsjkyZN1gfK6deumC5R38eJF3d6HXKpfv77u\n4apWq9mkSRPZJaR//vmHHTp0YIECBQzq0J7ZOXHiBJ2dnfn06VPZEOi1a9c2+XvJzsmTJ3VReOVS\nQECASXGmtPz99990d3enr68vXVxc6OjoKLu0mJ2ff/6ZAPSc8qrVatasWZOAxruGOezdu5dlypQh\ngDwbK2RkZOR4Pu51IURJiJLgHdCiRQs6OjqaHLqa1MRHcnZ2pkqlMns28Ntvv9HJycmkUA7jxo0j\nADZr1oyPHz/mkiVLTAqUl56eLrsfpE0FChTgokWLuGfPHj3TdDnkQlEAmnDpJNmwYUNOnDiRX375\npUEeuRDouZGens5vv/3W6AxJG5dKkiRdKPg9e/bkuGx6+vRpurm5sVSpUrx16xbPnTtHDw8P2tvb\n52hocOXKFRYoUIA9e/Y0uPfw4UPdXt6YMWNMHt/w4cPZvHlzSpJk0tKmMYoXLy67R/g6EaIkREnw\nljl//jwlSWJAQIDJs6T58+dTqVQyODiYVatWZZEiRcxq89SpU/T39zfJ7Dw1NVVnYadUKlm1alXO\nmTPHpEB53bt3JwCWKlXKqDgVLlzYpPMuVatWNSjr4eHB1NRU/v777yxYsKAuFEfWNGvWLJO+Ey3X\nrl1jWFiY0f6WL1+ef//9N9VqNUNCQrhu3Tp27tyZrq6uLFCgANu0aWMQCv748eN0dnZmuXLl9CK7\nXrp0iT4+PrS1tZV1ppqamsrq1auzZMmSBjNVLdpQIQqFwmRXSsHBwWzdurXZM6zsREREmBVMMC8I\nURKiJHjL9OjRg3Z2diYtv6Wnp/OLL74gALZu3ZrPnz9nrVq16Ovra1abGzZs4MiRI1m9enVZIVSr\n1Txx4gQ//fRTFipUiEWLFqWvry8B8wLIaQ+yzpo1ixMnTtSFSMienJyc9OIJybFs2TLZslFRUXz+\n/Dlr164t681h165dJvc3pxDlkiRxyJAheu6KGjdurBOL9PR0Hjp0iEOHDmWpUqV0oeD79+9Pe3t7\nVq5cWXbv5+rVq/Tz86OVlZWBo9dRo0ZRpVLx1KlTOfZb6xXd3d09V88MDx8+pI2NDatWrcr+/fub\n+tXI0qtXL7P2xfKCECUhSoK3yK1bt2hpacmiRYvmOktKSkrS+Z8bOXKkbuktL6I0bdo0njx5kmXK\nlNFbOtIGyitZsiSdnZ3Zu3dvXfjvhw8fMjAwkAqFwuR9iH379hGAziXQ2bNnWbFiRaOzkObNmxv1\nv5eSkiIrOp6envT19dVZH2ZP2r2rnHj8+HGOcaS8vb31DvVq+frrr40uvV2+fJkDBgzQWckFBARw\n8ODBPHDggEEo+Bs3brB48eJUqVS64wAHDhygJEl6Ie2N8eLFC3p6ehL4fwMQY6xbt45hYWG0tLTU\nOzydFyZMmGCWI9e8IERJiJLgLfLll1/Sysoq12W0GzdusEKFCrS0tDTwdZYXURowYACfPn3KpUuX\nMiIiQi9QXsuWLblhwwZZB6aXL1+mm5sb7e3tTXLgef/+fQLgBx98oLuWmprKMWPGGHXN4+rqyjVr\n1sjWN2LECNkye/fuZVRUlMF1Ozu7XMV+//79Oe59tWvXjo8ePZItu23bNk6dOlX23u7du2ljY8OQ\nkBBev36dq1atYocOHXSh4Dt06KAXCv7WrVssXbo0lUolFy1aRG9vb3744Ycmh3A/c+aMzonttm3b\njObr3bu3blnVHCMNOVauXMnBgwe/Uh25IURJiJLgLfHkyRPa2dnR29s7xwfn8ePH6eHhQVdXV9m3\n8po1a5otSp06deKmTZt0zlorVqxoECjPGIcPH6aVlRW9vLxMeqh5enrS3t7e4OHarl07Fi9e3KgY\ntGnTxiCY3vXr12W9h/fq1Yu//PKL7P6PMV68eMGhQ4fmGKI8p2ivpCbEiFwcou3bt9PKyorh4eEG\ne0FZQ8EXL16cSqWSdevW5YwZM3js2DGWK1dO176xWaMx/o+96w6L4vraZ3fpHQEJiGAQRIqgiLFh\nFwsidiM2RBE1KNZYoqImir1X7MEeSywRa+xi74qKXUGkWJBedt7vj2Xnx7KzDdDPJPM+z31059YZ\nYN49955zXqlTir6+vsKYKUdHRwwZMgTff/+9RmNz4dKlS+jatWu5x1EGnpR4UuLxlTB37lxoaWkp\nFZ/bvXs39PX1UbNmTTx9+pSzTePGjVGtWjWV8zEMg7i4OPz000/Q1dVFjRo18Ouvv2Ly5Mno37+/\nRmvfuXMnS2aqvK/atWsHIlmp7dTUVPTu3Rv5+fmYPHmyQqupcuXKclaklEhLFgMDA0yePJnz+pQp\nU+TOclRJlDdt2hQvX75U61m0aNFC5vP+/fuhra2N9u3bq5Vb79GjR6wUvFAohLW1NbuOtWvXqrUG\nKRiGYbdH69evL1f/7NkzmJubw8/PT+OfOReSk5Ph7e1d7nGUgSclnpR4fAXk5eXBwsJCoQS1NDkp\nkSSmR5nbripSevLkCaZNm4bq1avD0tISw4cPR4cOHdh5MzMzUblyZbx69Uqje5CuLyAgQOnh+s8/\n/wwikiGXqVOnyqTjuXr1qlIPvT59+rBWnPScqnSpW7eu3LXJkydjxowZsLe3x4QJE/Du3TssW7ZM\noUS5trY25syZo5GMQ8+ePVmPul27dkFLSwudOnUqk35TXFwcdHR0UL16dZao69evjz179qjMFiFF\ncnIydHV1QURYtGiRTF10dDS6du0KIyMjrFu3TuP1lQbDMF88VoknJZ6UeHwFrF+/HkKhkHN7KC8v\nD/379weRJMZGVdLNRo0ayZFSeno6Vq5ciQYNGkBfXx8//vgj/vrrLxQUFCAlJQXjxo2TaT958mSM\nHj1ao3tgGIZ1LuDK1i2FVKtJGlOUmZmJzp07y7XLzc3F+PHjObfniCTu33/99RcYhoGbm5tcvfRF\nXLJIg00zMjIwadIkhWREpFqiXBEWLFiAv/76C1u2bIFQKESPHj3KlCg1Ly8PXl5eqF27NvLy8pCe\nns5u5VWqVAk6Ojpo06YNli9frtKKk1qyIpFIxsLu0aMHJk6cCCLCo0ePNF4jF1xcXJCRkVEhY3GB\nJyWelHh8YYjFYlStWhWVK1eWs5LS0tLg6+sLoVCIpUuXqhW31KhRI3z//fcyQnm6urpo0aIFNm7c\nKPfCuHz5spxbd0pKCiwtLdU6UyqJgoICtGrVCkSEZcuWcba5c+cOiIjNk7Z48WKlB/FxcXFKz5pC\nQkKwcOFChfUlizSGZt++fbCwsFDYbvjw4WUWrDt37hw6duwIgUCAvn37ynnWqYtRo0bBwMBAJjlr\ndnY2/Pz8WOKPiopCw4YNFUrBl0Tnzp1BRKhatSrEt29DHBqKC1pauFqnDpqam1eIrhQg2Z69c+dO\nhYzFBZ6UeFLi8YUhlWPYsGGDzPX4+Hg4OjrC2NhYbZdrsVgMDw8PGBsbs0J5c+bMUeoGvWPHDrl4\nGAAYOnQoZs6cqdnNQOJO7erqqjCXW35+PrS0tFCjRg0UFBTA399f5QsxOzsbo0aNUuiEYGtrCwMD\nA5WkFBMTw3qacRWRSAR/f3+1z4+4sGjRIhARBg0apLF6qxSxsbEgIs4ttdzcXPj7+4OIMHXqVDAM\ng5SUFGzatAldu3aVkYLfv38/srKywDAM0tPTUalSJYwmQp5IBLFQCBChUCBAnkgELFxY5nsuiaFD\nh2qcr1ET8KTEkxKPLwiGYeDq6opKlSrJvJiPHz8OU1NTODg4qJWVOz4+Hr/88gscHBygra0NU1NT\n3Lp1S61vv1FRUZxSCU+fPoWNjU2ZhO+eP38OKysrGBoa4ubNm3L1Hh4eEAgEWLduHWJiYtQe9+zZ\ns3B0dFTLKipdzM3NObf0pKVTp054/fo1Vq5cCUdHR4SGhso4Y6gDKSHZ2tqWmZDevHkDKysr+Pv7\nIz4+HteuXcPp06dx6NAh7Ny5E+vXr8eCBQvYLUtvb28MGDAAPXr0QPv27eHr64vq1avD1NRUzmHE\ngwjZRJJXdumirw+omQFeGebMmaNx1gxNwJMST0o8viDOnj0LItnUN6tXr4ZIJEKDBg2UugCXFsrr\n168fjh07hgYNGsDR0VHtNYSFhSkknp49e2LVqlXq31AJXLp0CXp6erC1tZWT/pYGpvr6+mp83pKV\nlYXw8PAyERNXMTQ0xPr162UIPC8vD9HR0XByckJwcDAeP34st46ioiJ8/vwZb9++RUJCAoYPHw4i\nScBvy5YtMXfuXCxfvhxz5szBlClTMGrUKISGhqJXr17o2LEjWrRogXr16sHV1RX29vaoVKmS0uzj\nFVHWEKFAESmJREBxUHN5sHPnTo3PIzUBT0o8KfH4gmjQoAFMTEzAMAyKioowatQoEEkCNLmIIjs7\nG9u2bUP79u2ho6ODtm3bygnlNWzYUCNSUuYKfP36dVSvXr3M3/qliVO9vLxkvMXmzJnD3mdZ8fff\nf7OpjkoXj+IX8Jnifz0UvKSdnZ0xf/58rF69GvPnz8e0adMwduxYDB06FH379kVgYCDc3Nygp6cH\nU1NTVKlSBVZWVtDX1/+i5PGlyhlFhCQtzZqV+echxZUrVzgdVyoK6pKSFvHgwUMj3Lt3jy5fvkwz\nZ86krKwsCgoKosOHD9P06dMpMjKSBAIBERGJxWI6ffo0bdmyhfbt20fOzs7Ur18/2rBhA9nY2MiN\nK/m7VR/SebhQt25dqlatGu3du5d69uyp2Q0SUffu3Wnu3Lk0YcIE6tWrFx04cIC0tLTI09OTiIhM\nTU0pMzOTcnNzZUpeXp5a19q1a0dnz56lR48esXOOJqKZRKRdXBoRUT8imkJEi0ut78mTJ/Tzzz+r\ndS95eXmUkZGh8TP4lvCIJM9Dm6tSJCJycSn3HNWqVaOXL1+We5zy4quSkkAgsCOiX4moHRFZEFEy\nEe0nohkAPmowTiUiiiSizkRkQ0TviegoEUUCSPySc/PgMXbsWDIwMKDevXtT48aNKSEhgbZv305B\nQUFERHT37l3asmULbd++nUQiEfXp04cuX75M7u7uKsdWRjQlIRaLSSgUEhFRYWEhJwl06NCBpk6d\nSvr6+pSfn68xgeTk5FClSpUoNjaWzM3NSU9PjzIzM4mIKDo6mqKjo8v4BOXhQRJCMihxTUpOM4no\nBBHdr7DZ/nlYQRKC5iQlHR2i4cPLPYeVlRWlpaWVe5zyQqDpt7MyTyQQVCeiOCKqTEQHSEL+PxBR\nCyJ6TESNAbxXYxyL4nFqENEpIrpGRDWJqBMRpRJRQwDPv8TcREQ+Pj64fv26Ok15/Jtw7x7R8uWU\nd+cO/X71Kr0KCKCN164RANq/fz/Z29vTtm3baOvWrfTq1Svq0KEDBQYGUq1atVhCUEUC69ato5yc\nHOrRo4dKAsnKyqKsrCxiGIbEYvH/99MpN9YQ0UDifukWEdF6Ihr2VVdUcdDV1SVDQ0MyMjJiS8nP\nhoaGdOvWLbpx4wa5u7vT4MGDyczMjIyMjCg5OZnmzJlDSUlJtPOHH6jT9eukDZAIoCIiKhQISH/B\nAqIxYypkre7u7nTx4kUyMzOrkPFKQiAQ3ADgo7KhOnt8FVGI6BhJ9kdHlLq+qPj6GjXHiS5uv6jU\n9Yji60e/1Nzgz5T+m1i4UOLhJBIBxQfO2UQYJxTC1NT0ix9y/xfKGRVnJqe+whpEIhFMTU1hZGQE\nBwcHeHt7o2nTpvD390fPnj0REhKCESNGYNKkSZg1axZCQkJAJIk7+uuvv3DmzBlcv34dDx8+xJs3\nb/Dx40e1nUEYhsHUqVNBROjQoQPS09Nx/vx52Nvbg0gSsFxYWIj6Bga4UqcOzhBhg7Y2PEgzUUBV\n8Pf3VymvUVaQmmdKX8VSEggEjkT0jIheElF1AEyJOmOSbKUJiKgygGwl4xgSURoRMURkAyCzRJ2w\neI5qxXM8r8i5peAtpf8Y7t0jql+fKDdXriqHiOrTf3tbqaJQVktJW1ubLC0tyc7OjipVqiRnkUgt\nEX19fdq2bRvFxcXRmDFjqEePHmzd1atXadGiRWRoaEiRkZH07t07ysjIoLCwMIXrff36NXl5eVGn\nTp1o8+bNFfEIiIho1qxZNGXKFKpXrx69fv2aUlJSaPHixTRq1Ch68OABeXh4UFhYGN25c4dmzZpF\nrVu3JqFQSPfv3ydXV9dyzx8eHk5+fn7UuXPnCrgbWahrKX2tM6WWxf8eL0kKREQAMgUCwUUiakNE\nDYjobyXjNCQi/eJxMktWAGAEAsFxIgojybacdAuvoubm8V/E8uVEBQWcVTpEFE7/3G2lioCOjg7p\n6+uTnp4e6evryxSua6WvJyYm0vLly2kFoPDMpICIViqYv7CwkJKTk+ndu3fUqlUr6ty5M3Xp0oWM\njIxk2vTu3ZsuXbpEW7ZsoT59+siMUa1aNerRowedOnWKpk6dSnl5eWRhYUGDBw/mPOMTi8XUt29f\nsrS0pOXLl5f52ZVGdnY2hYSEUEpKCjvu8uXLaXjxedHly5fJ1dWVrl27Ri1btqRWrVrR4MGDad26\nddSyZUtKSkpizxnLim/B2eFrkZLUNSRBQf0TkhBDDVJODOqMQ8XjVPTcPP6LePyYSMGZjRb975fr\nW4CUIEoTgI6ODt28eZOaNGlCFhYWapFF6Wtr1qyhzp07U2pqKmtB/Pjjj7Rz584yrzclJYXq1KlD\nAOg+SbzsZpKE7LVIYiEVFF+/T0S2trb09u1bzrEA0MmTJ+nkyZNkaGhI3bp1o/79+1PDhg0pKCiI\nYmNjaefOndSjRw/O/gKBgFq1akWtWrWiM2fOUNeuXalRo0YUGRlJ7dq1kyGnqKgounTpEsXFxZGx\nsXGZ77/k2t+9e0f6+vr09OlT2rx5MwmFQmIYhnbs2EH9+vUjU1NTunz5Mnl7e9OOHTto2rRpRES0\nYsUKOnr0KL1584ZCQkLo999/L9davv/+e7pw4UK576k8+FqkZFr8ryK/TOl1VadrZRmnoubm8V+E\niwvRxYucxFREEi8ZZRAIBKSrq0v6+vpkbGxMZmZmZGFhQUZGRnIEsHPnTsrPz6dJkyaptDamTZtG\ny5Ytk2mn7Fvyr7/+SqmpqbRixQqNH8GnT58oKyuLOnXqREREaWlpNHnyZIqNjaWioiLS0tL8NSIW\ni6l3796UnJzMXltMEi+7cJKQ/WOSWEjS7dGUlBQ6efIkXblyhRYsWEAfP3I7zWZnZ1NMTAzFxMSQ\nrq4uFRUV0ZIlSxQSUmk0b96cfH19acSIEbRw4UKKjIykqVOnUseOHenSpUs0Y8YMioqKonr16ml8\n36WRmZlJnz9/JhsbGzp+/Dh17dqVGIah/fv3U0pKCoWFhVHr1q3p2LFjdPnyZWrRogUxDEO+vr5E\nJPkicuzYMfLw8KCYmBjq168ftW7duszrqVatGm3ZsqXc91UuqHPwVN5CRGtJcpgYqqA+qrh+oopx\nfiluN1NBfVhx/ZqKnLt43OtEdN3e3l7jAz4e/2DcvStxcuA4fM8VCtGjZk1s2bIFiYmJSE5OxtWr\nV7F161ZMmjQJAQEBCoNEHRwcEBAQgEmTJmH79u24d+8e6tWrB2dnZ7WWFRwcrNFtpKenw9LSEqmp\nqRo/gqioKPz999/sZ4ZhYGJiAiJCaGhomRKCSg/1NS2dOnUCACQlJUFfXx99+/ZFpUqV1Orr4+OD\nZcuWqfUMpk+fjrNnzwKQBJUGBASgVq1asLKyQqtWrdRWkVUEhmHw9u1bVq12z5490NbWhqGhocyz\njomJgVAoRK1atSAQCBAaGgo3Nze58WbPng0iSZaLsqSXkiItLQ2enp5l7q8M9C1ldCCi+cW/GGMV\n1K8orh+mYpzw4nbLFdSPK66fW9FzSwvvffcfRCnvO7FAAOjrI2HoUIwYMQIDBw7EgAED5NRVpcjI\nyEBcXByio6MxfPhwNGvWjPNFKhAIoKOjg6CgIERFReHQoUN4+fKl3Es/Pz8foaGhGt9GREQEIiMj\nNeqTm5uLjh07yq1BmrmaiBTKiCvC0aNHFSZoLVmsrKw4r0vJIjw8HKGhocjPz8eff/6JLl26qOUJ\nqaWlhcDAQOzZs0ehVlJsbCzmz5/PfmYYBm3btoWOjg5cXFywY8eOMmfL+Pz5M5KSklhi27x5M4RC\nIczMzHDp0iW59rt27YJQKIRQKET9+vURxpFSiGEY+Pj4gIjQrBzZHRiGga2tbYVlHi+Jb42UQot/\nIaIV1EtdtlupGKd1cbtjCuql7uKDKnpuaeFJ6T+Ku3eBsDAwzZrhaLVqbALMW7duYdiwYThx4gSa\nNm2KzZs3q/UHzTAMkpKScPToUSxYsADBwcEwMDDgfFmbmJigUaNGGDJkCFasWIFt27Zh0qRJGt/C\ny5cvYW1tLSfrrQzR0dGcirrTpk0DkUROQSAQYO/evWqN9+bNG1haWnKSRel7Hzp0KGe7evXqQSwW\n49WrVzA0NJTJpP7s2TNUq1ZNoZZT6WJubo6hQ4ciLi5O5ueWlpaGHj16sJ83b94MIsLBgwdx9+5d\n9OzZE25uboiJiVFb4kJqHZWUIFm5ciVLwMpcsaU5BwUCgcKkqSkpKazOlKZKtyXh4eGBDx8+lLm/\nInxrpFS9+JfgBREJS9UZE1EWSTxsDVWMY1TcLouIjEvVCYvHBxE5VvTc0sKTEo8BAwbI/NE+f/4c\ngwYNwrt37/Dbb7+hffv2ZRJeq1evHmrUqIFHjx5h9+7diIyMRJcuXeDk5MRJVra2tmjbti3GjRuH\n33//HTdv3kRubq7SOfr06aN2JuiioiL4+/tzWgR79+4FEeGnn36CjY0N9PX1ceXKFaXjFRQUoFGj\nRgoJovQ97t27F927d+dsu337dgDAoEGDMGLECACSLUpvb2+Ympri0qVLePToESZPnszG+qgqTk5O\n+PXXX/HixQsAQMuWLQEACQkJMDQ0RHh4uMz9xMfHo0+fPnBxccGGDRuUxiRlZGQgKSlJhvikW252\ndnYqf18CAgLQokULtr10jaUhzVmopaWFt2/fKh1T2VxlEUlUhW+KlCBrkagVwEqSLA01OcaRWkML\nS13ng2d5fBVs27YNf/zxh8y11NRUDBw4EG/evEFCQgI6dOiAGTNmaCSlLSUlLmRnZ+PatWvYuHEj\nWrVqBR8fH1hbW8u9WEUiEWrWrIkePXpgxowZ2LdvH548ecJuFd25cwcODg5qBXXu3r0ba9as4ax7\n8uQJiAj+/v64fv06DAwMYG1trVTPaOzYsQoJISgoSO7avXv38OTJE84tOQcHB+Tm5uLJkycwNDTE\nvXv3UKtWLZibm+P69esy84rFYpw6dQoDBgyAkZGRWgTVtGlT/PDDD7h//z58fHzg4eGh8KwmISEB\nAwYMgJOTE6Kjo2V+5gzDIDExUcY6YhgGkyZNAhHB0dFRIcGUbG9paYn+/fvDysoKenp6sLe3x5Mn\nTzjbd+nSBUSk8HdJFYYPH6625asJvkVSqk5EKcU/9P1ENJskaYJAEkcbi1LtQUTgGMeiuD1I4sI9\nu3g8FI9fvbxzKys8KfFITU3lPNPJzMxEWFgYHjx4AIZhEBMTgyZNmrBnIKqgjJRKYtKkSUhMTGTX\n8vfff2PJkiUIDQ1F/fr1YWhoKPeSNTAwQL169TBw4EC4urpi4sSJSuU1GIZBQECAQstLLBbDwMAA\ndnZ2AIADBw5AIBDA3d2dPbwvif379yskgLp162LPnj1yVpOUBEaOHMnZT3rm06VLF1SqVAlWVla4\nffu20meXlZWFrVu3ok2bNmpt8QmFQohEIqxatUrlNt2zZ88QGhoKR0dHrFixAikpKXLWkVgsZqUy\n3NzckJSUpHRMQKKPRURo3bo1goKCcPbsWRgZGcHW1pZTTys7O5tV5504caLK8UtjwYIFWFhBwoEl\n8c2RkmRNVJWINpEki0IBEb0ioqVEVImjLScpFddVKu73qnicZCLaSER2FTG3ssKTEg9ActDPdXZU\nUFCAUaNG4eLFiwAkW0qDBg3CoEGDkJ6ernRMHx8ftUhpwIABSr2/xGIxnj17hv379+O3335Dz549\n4erqKiccR8VnGS1btsTIkSOxfv16XLlyBVlZWfj7778RFRWldB0//PADiIjdylyyZAmICH5+fjKW\n2LNnz2Bqasr50jczM8Pz588xf/58OUtIivT0dM7+ZmZmuHPnDuzt7SEQCNhnri4SExMxb948uLu7\nq2U9fffddxg7dqxKyfDnz5+jf//+sLe3x+LFi1l59sLCQgwYMIAlYkWOMaWxdetWODg4wMzMjNXI\nunTpEkxNTVG5cmVWKr4krly5AoFAAIFAoJKoS2Pv3r3slmhF4pskpX9D4UmJBwBERkYqfDkxDIPp\n06fLSEufOXMGTZo0QUxMjEJHCB8fH7i4uKicW1N3cCny8vJw+/ZtxMTEwMbGBj4+PrCzs5N7+QoE\nAhgaGsLf3x9ThbwLSgAAIABJREFUpkzBrl27EB8fL2cphIaGguh/3nAMw7AiflJX8dzcXHh7eyt8\n0UufkXQsaWnTpo3MXPPmzePsb2JiAjs7O7Rp0wZTp04t03NhGAY3b97EqFGjULlyZbUIysvLCwsX\nLkRycrLMWJ8+fUJycjK7bRcREQEHBwdERUWxHou+vr6c1qQiDB8+HO3atQMRyagZX79+Hebm5rCw\nsOA8A5Jul1paWmrkKXjjxg0EBASo3V5d8KTEkxKPL4i4uDjMnTtXaZvly5djw4YN7Oe8vDzMmDED\nHTp0QEJCglz7L01KJbFnzx40b94cAPDhwwecO3cOK1euxNChQ1G7dm3o6OjIvYh1dXVRu3Zt9OvX\nD/PmzWO945YtW8aOW1hYCH9/fxAR5s6di2HDhil8sY8bN47t17RpU5m64cOHy6w3NzdXYczX6dOn\ncfPmTZiZmWn0sudCQUEBDh06BFtbW7XISSgUon379ti2bRuePHkiI9ooxbNnz/D999+DSOJMUZrI\nVKFevXro3r07zM3N5SzkO3fuwMrKCmZmZrh8+bJMXVFREZydnUFEMp6EqvD+/Xt4eHhotEZ1wJMS\nT0o8viAKCwvRrVs3le127NiBuXPnylhHjx49Qvv27fHbb78hPz+fvV63bl2VpJSVlYVhw4aVfeHF\nkL6wuDzmgoOD8ebNG7x+/RqHDx/GnDlz0KdPH3h6enI6Hejo6KBp06b46aefsHr1ahw/fhweHh5K\nX+aNGzeW2eL77rvvZOpLEp0UW7du5RxL+sL19/fHrFmzyv1sVqxYAYFAgP3796NJkyaoW7euWgRl\nYmKCQYMG4ezZsyx5ZGRksITr7++PcePGwd7eHtOnT1fL7TonJwdaWlrw8/NTaL3Ex8fDxsYGxsbG\nOH/+vEzdq1evoKWlBSKJO7s6+FKxSjwp8aTE4wujb9++MlLhinDixAlMmDBBZguFYRhs3rwZTZs2\nxblz5wBISKlmzZpKx7p//75KC01drF27Fl27dpW59vz5cwwePFhhn4KCAty/fx87d+7E6NGjWQtK\nnZe2tFhaWuLNmzfsmBkZGXJtjh49Kje3WCyGq6sr55iXLl1CXFwcLCwsNIrDKo27d+9CV1cXEyZM\nAACsX78e27Ztw9OnTzF9+nQ4OjqqdY/VqlXD2LFjUatWLRAR+vXrx25/pqenY+rUqbC3t8fkyZOV\nnjVevHgR2traqFKlitKfe0JCAuzs7GBgYCCTEQIAVq1aBSKCvr4+pyXHBU9PT5VnoJqCJyWelHh8\nYWzatAn79+9Xq+2NGzcQHh4u582WlpaGkJAQhIaGwsvLSyUpHTp0SM4dvazIzc2Fra0tHj9+zF4b\nMWIE4uPj1R6jSpUqMDQ0xKdPn3Dp0iWsXbsWw4YNg4GBgcIXtoODA3r16oVZs2bh4MGDOHjwoFyb\n58+fy81148YNNr0Rl+XFMAxatmxZZs+xnJwcuLu7w8fHh7Vg7969i5EjR7JtGIbBhQsXEBwcDGNj\nY7UIytraGitXrsT79+9l5vv48SN+/fVX2NvbY8KECUhJSZFb08KFC1G7dm0QkUpHjufPn6NatWrQ\n09PDkSNHZOqk1lqDBg3UehaBgYG4du2aWm3VBU9KPCnx+MJ4+/atRltpT58+xaBBg/Dx40e5ulOn\nTrFuvsq2TZYtW4arV6+Wab1cmD17NmsZpaamIigoSKP+7du3BxHh6dOnACQv7eDgYIUvaBsbGzbr\ngKKipaWFkydPynxTv3z5MkxNTeHt7Y22bdty9tu7dy9OnTqF7777TmUQMRd++uknGBkZycT/FBUV\nwc/Pj/1cWFiIpKQkZGdnIzc3F3/88QcCAgI4PRu5tjm7deuGAwcOyGxdZmRkICoqCvb29hgzZoxM\n0GuPHj3Qtm1b6OnpyWz1KsLr16/h5OQEHR0dGUebjIwMNkaLa2u0NCIiIjgzeZQHPCnxpMTjK0CR\na7gipKSkICQkhDM+pU6dOrC0tERAQAD7ki+NMWPGqO1KrA4+fvwIS0tLvH37FpGRkYiLi9Oo//jx\n40FE2LdvHwBgw4YNCl/K5ubmyMnJQVFRER4/fow9e/Zg2rRpCrfkpCRWr149aGtrw9HREWfOnMHN\nmzc5ScDJyQl5eXlo1KgRVq5cqdF9HDhwAESE33//Xa6uTZs2KCgowPv37xXGdr179w6LFy9Wei+l\ntzBHjBiBa9eusb8/mZmZmD9/PhwcHDBixAi8efMGVatWRcuWLTXKZ/f27Vu4urpCS0tLhliOHTsG\nIkmAtbIgZwBYtGiRTO6/igBPSjwp8fgKmDBhgsYphT5//ozBgwfLBT56e3vD1dUVDx8+RLt27TBr\n1iy5b8fBwcEVfgA9btw4jBkzBp07d9a4r9T5YPr06bh9+7ZCK0i6nTdw4EC59ZfO5uDt7Y0xY8bA\nz88P5ubmcmMJi2XoueZZtmwZYmNjYW9vr5ZlAUjilSwsLBAUFMT5bEePHo0jR46w8UaKcOfOHTbL\nRqtWrWBjY6MWQbm6umL27NnsOVt2djaWLFmCqlWrsudTkydPVvMnIkFKSgo8PT0hFAqxdetW9nr/\n/v3ZMZX9Hv35559yaZXKC56UeFLi8RVw5swZLF68WON++fn5iIiIkMkKLSUlQLINtnHjRjRt2hQX\nLlxg21SEO3hpJCYmwtDQELt27dK47927d0FECAwMZN2PSxeRSIRTp06hY8eOICK5oNzS3m1SJ4Mj\nR45AT08Pvr6+iI2NxbJlyzB48GA0aNCAM2uFdK7evXvDzs4OY8aMYWOGFKGoqAgtW7ZEtWrVON3J\n379/j3Xr1mH16tVKn8Ply5dhbm4OoVCIjRs3smMfO3YMffr0gb6+vkpyEggEaN26NWJiYpCZmYkd\nO3awW25t27ZVaD0rQnp6OurWrQuBQMCGJhQUFLBkqSxA9tatW/D399doPlXgSYknJR5fAfn5+RrF\ngJSEWCzG1KlT8ddffwGQbN9JSUmK1NRUBAcHY8iQIfjw4cMXIaWCggLY2dmVyasvPz8f2traSnPK\nzZkzB4Bke6pOnTogIuzcuROAhHxLOwxs2LABBw4cgI6ODtq2bcuZc04sFmPMmDFqb5W1aNECERER\nWLduHS5fvsx6oc2ZMwcikUhu2zI/Px9JSUnIycnBy5cvERISovAZnD59GkZGRtDS0lLohPL582ds\n2rSJTaqqqhgaGsLd3R01a9aEUCjE0qVL4eTkhP79+8s4pqjCx48f0aBBAxARmw3i/v37EAqFEAgE\ncrFNJftx6TaVBzwp8aTE4yshKCioXMJqS5YswebNm1GnTh2FL4KTJ0+iQYMGaNeuXYVv38XExGD2\n7Nmws7PTKIGsFMoCTQMCAmQCPpOSkmBnZwddXV1cvHgRycnJcn1+++03aGlpoWPHjkodFrKzsznn\n1tbWxqJFi2BpaQkvLy92G6x0sbW1hUAgQLNmzbBz5048ePAAhYWFSE9PlxEClHr1ceHw4cPQ09OD\nnp4eDh8+rNbzevnyJWbNmoUaNWqoRVDa2tqYOHEibt++jU2bNsHFxQVBQUF48OCBWvN9/vwZTZo0\nARFh0aJFAIAZM2aASJKqSVFy3oqOVeJJiSclHl8Ja9askXPB1RRbt25FlSpVlH47jYuLQ5s2bdCx\nY0c8e/asXPNJwTAM/P39UVBQgMDAQJkMFOpAmmON62Xq4OAg5wYNALdv34aRkREsLS2xbds2zjOj\nbt26qXUmtHHjRs65+/bti+3bt8Pd3R1isRgfP37E+fPnsXr1avz0009o2LAhZ0JWbW1t1KpVC337\n9sXcuXNx+PBhvH79GoGBgXIxaX/88Qe0tLRgZGSE06dPa/TcAMmzv3LlCsLDwzVSz128eDFWrVoF\nNzc39OjRQ2UuPkASdN26dWsQEWbPng2GYdgYKkVBubVr1y6TUrEi8KTEkxKPr4SXL18iIiKi3ONU\nr14dlpaWCpOt7tmzB/v378eDBw/Qpk0bzJ49Wy0JCmU4fPgweyZ28eJF1KxZU22p7/fv3ytM/aOt\nra1UXyk2NhZCoZBTfqN3795qC+cVFRWxL9fS5erVq3B2dmY9A0uif//+MDMzw5UrVxAbG4vIyEh0\n794dXl5enCmW9PT04OHhgWHDhmHVqlWYOHEihEIhzM3NVepIqYOyqOd27NgRo0ePhoeHBzp37qxS\nAyk3N5dNATVt2jS8e/eOvVeu88TOnTtXyL1JwZMST0o8viLK4rlWGrVr14ajoyOGDx/OuY22YMEC\n9luxWCzG+vXr0axZM43duEuiS5cuMlH+vr6+agUEi8ViBAQEKHxpqhMLI800UPr8R1OZ8aNHj3Ku\noUWLFtiwYQO8vb1ltqGk1plUDj0pKUnmeRcWFiI+Ph67du3ClClT0KlTJ7k0SNJibW2N9u3bY/z4\n8diyZQtu375dpi3QkkhPT8ePP/6odqYMMzMztGnTBs7OzujQoYPCcyJAkn9Rmhh24sSJ2LJlC4gk\nWTlKW7WjRo0qk/OLIvCkxJMSj6+I0aNHl3tLrXbt2nB3d0dCQgJCQ0PlvMHCw8PltpBSUlLQr18/\nDB06lDMoVxkuXbqEKVOmyFw7ePAgGjVqpLLv3LlzFb4ke/ToofZZROm0PX369NHoHqRo06YN51r2\n798Pe3t7xMbGApBkPTAxMcHgwYORmpqqdszX+/fvERgYiCFDhoCIYGRkhAYNGsDKyorTinFzc8OP\nP/6ImTNn4sCBA3j27JnaFigA9OvXDz4+PnBwcNBIPdfGxga2trZo0qSJjNdmSRQUFKBnz54gIowc\nOZINgK5du7ZMuyVLllRYSiuAJyWelHh8VRw7doz1biorvLy84O7uDgBITk5GSEiITHS/Ms+748eP\nw9fXF7t27VKbEHr37i2X2kYsFsPNzU0usWdJnDt3TmEGAycnJxmVVVWwtLSU6f/bb7+p3bck7ty5\nw3m25ebmhuXLl6Nhw4YoKChAgwYN4OLigoSEBLXjmADJ+Y+UGJycnPDq1Su27t27dzhx4gQWLVqE\nkJAQ+Pj4cLqAGxkZoX79+ggNDcXSpUtx6tQphaTo7OwMNzc31uuvLOq5JiYmqFmzJg4dOiT3O1FY\nWIh+/fqBSCIzIo37Kumuf+DAgQpJ/isFT0o8KfH4isjNzcWPP/5YrjG8vLxkJAMyMjIQGhrKugCr\ncgfPycnB5MmT0alTJ87ccSXx8OFDOXkIKTZt2qTw8DslJUWpt93x48eVzisFwzCYNm2aXP/yiMuF\nhIRwrmnFihWwsbFB3759oaOjo7FTglgsZiU4atasqZb0RFFRERISErBv3z7MmDED3bt3h4uLC6dz\nxXfffQc/Pz+MHj0aGzduxIkTJ0AkSaAqjXkqCU3VcwUCAaysrDBz5kyZM0ixWMzqWEnPmoRCIfv7\ndufOHbRr106jZ6UMPCnxpMTjK6Nnz57lOk8oTUqA5AxgxIgRuHLlitoxSvfv34efnx/mzp2r0BEi\nLCxM4XZjfn4+qlativv378tcLyoqYj24FBV1vBAZhsEvv/zC2V9bW1uplaYMiYmJnBaKtbU1S1ia\nps4paVHY29vLZEcoC3JycnDjxg1s3rwZY8eORZs2bZSSfJs2bTBt2jTs2bMHjx8/ljtv01Q9V1tb\nG126dGHVaEvKs0uDn6tUqQKxWIxPnz6pTBCsCXhS4kmJx1fGsmXL5GQDNAEXKQGSF8fIkSPRvXt3\ntccSi8WIjo5Gs2bN5A6+k5KSVBLcwoUL5dpwWTbS0rBhQxCRyjMIhmHYoFdpypuSxdjYGBYWFpwi\niOpgypQpnOszMjLSmPDy8vLQpUsXEBGaNWuGEydOaJzuR12kp6fjzJkzWL58Oby9vaGnp8e5Hamn\npwdvb28EBwdjwYIFOHbsGN6+fQuxWMyq53Kdc3GVatWqYcGCBXj79i2rUitNBzVo0CAAkizwFRWr\nxJMST0o8vjISEhLw888/l7m/p6enQsXPixcvolu3btiyZYtGYyYnJ6NPnz4IDw9nHSfGjx+Pmzdv\nKu33+fNnVK5cmc3Hdvz4cYXxSO7u7jhz5gyIlDsqiMVi/PTTTyAiLFy4EIsXL5YZp0qVKjh69ChE\nIhGcnZ3LpOcjXTfXOocPH6526pzs7GzWeaJ9+/bIzs5GTk5Ohafe4YKfnx9q1qyJ7t2748WLFzh4\n8CCioqIQFBQEDw8PVrSvZLGwsEDz5s0xYsQIrF69GvPnz0eXLl3U8uATCARo27Yt65Un/TmfPn0a\n3t7eGivlKgJPSjwp8fjKYBimXK7hnp6eqFWrFmfd9u3bceTIEfz+++9YvHixxt9ejx49Cl9fX2ze\nvFlti+uXX37BmDFjkJiYqPDbt6GhIR4+fIisrCwIBAJ4enpyjlVUVMSeX6xYsQIA5KTSW7RoAQCI\njo4GEaFJkyZl2g5dvXo151r79esHMzMzlfE8nz59gq+vL4gI3bt3l3GIaNWqlUZedJpCLBbD2NgY\nJiYmWLp0KWeb/Px83L17F9u2bcPEiRPRoUMHhd559vb28PT0hJ2dnVrWU8kYrR/09XHUwQEZtWsD\nYWHA3bvlujeelHhS4vH/gPDwcBlVVU2gjJRmzZrFZiOPjY3FlClTNH45Zmdno1mzZmjcuLFK6QJA\n4lVmaWnJ5k7jKtu3b2fbOzs7Q1tbW86rTXouIxAIsG7dOvZ6q1atZMYaOnQoW/fzzz+zlpcmBJyd\nnY2XL1/CxcVFbq1CoRBDhgyRU9stibS0NDZB7IABA+SCeIcOHapR7jlNER8fz65XFXmWxqdPn3Dx\n4kWsWbMG4eHhaNq0KWeWdXXKaCJkE6GASEITIhGgrw+UUUAR4EmJJyUe/y84dOgQ1q9fX6a+tWrV\nUkhKYWFhMnngLl++jIiICI3cmnNzcxEQEIA7d+7Az88PCxYsUJk5wcvLS+GLq7S7cNeuXUFEuFvi\nG7U0JkYoFMppFZX+9i7NywZILIZu3bqBSJJ9QB0kJyfjw4cPAIC9e/dyrrlVq1YwNjaWc+IAJDpE\nbm5uIJJ4AXKR/saNGzXeQtUEGzduhLW1NYyNjTUOIuYCwzBITEzEkSNHMG/ePPTv3x916tThzFoh\nLR7FhASuoq9fZouJJyWelHj8PyAzM7PMAaC1atVSuP3F5Zjw6NEjDB48WC6gVhGio6PZLNZisRir\nV69G8+bNFaaSkQrfcRVvb2+5ZKnTp08HEbEeanl5eejUqRNEIhGbFVyKrKwsuTGl2dKlyM7Oxg8/\n/AAiQkxMjML7ysrKQlJSksxLfOTIkQrdpXv27InevXvLjPHixQtUr14dRIRffvlFoXV2//79crmt\nq0JYWBgcHR3Rtm3bLzYHILFeHz58iK1bt6Jbt24y6Z7WlLSQSheRSLKVVwbwpMSTEo//J3Tr1k3t\n3G0loSkpARJPupCQEIWKqFIUFRXB399f7tv327dvERQUhBEjRsgEvT5//hxmZmacL3VTU1NOd/J9\n+/aBSKKHlJOTg/bt20NbW5sz99zt27flxuXyuHv37h0cHBygra2Ns2fPytW/fftWLpNFbGwsiAiT\nJ0/mXL+bmxsMDQ3Z+R4+fIgqVaqA6H8yG8qeY+vWrZW2KQ88PT1hYWGBmTNnfrE5FOH169fo3r07\nzgoE3IQkLRqo4JYET0o8KfH4f8KCBQvKFGvj4eHBSUpFRUVK9Xw+ffqE0NBQPHnyRGGbPXv2YM2a\nNQrrY2Nj4evri7179yI3Nxc+Pj4KraQ///yTc4ynT5+CiODn54fWrVtDV1dXzvqR4o8//pAZU0tL\nS2FM1YMHD2Bqagpzc3P2XC0zMxOJiYlyJPvu3TtUrlwZ3bp1A8Mw6N27N+c9tG7dGgMHDsStW7dY\nJw51JdTbtm2r0bapuvj8+TPr+XbmzJkKH58Lnz59wqpVq1CnTh02S0c0EQp5S+mfU3hS4qEK9+/f\nL1M8i4eHB7y8vOSuv3z5Ui5HXWnk5uYiPDwc169fl6tjGAYdO3ZUqfmUlZWF8ePHy+WjK1nGjh2r\nsL9YLIaBgQF0dHSgr6+vNLvDzJkzZcZ1cXFRurYTJ05AS0sLjo6OuHfvHqdKrFgsRtu2bVG1alX2\nbOnFixec5yffffcd9PT0YGJiwnnepQzjx4/HtWvX1G6vLk6dOgUdHR1oa2uXS59LFRITE7FgwQK4\nubmxJCgUCuHl5YVVq1Yh9+pVydkRf6b0zyg8KfFQhbK6hisipdOnT6ulc1RUVISJEyfKkcGpU6cw\na9Ystdawc+dOhYRkaGio1EL4+PEjm5ft4MGDSucpHTjbsWNHlWtbsWIFiAiNGjXiFP9btGgRhEKh\n3Daf1JOvdBEKhRAKhdizZ4/KuUtiz549altVmiAqKgq2trZqJcTVBAzD4N69e4iMjJT5wiEQCFCr\nVi2sXLlSnuQXLpQQkEgE1kLive++zcKTEg91EBYWJpfsVBXc3d05SWnTpk04deqUWmMwDIP58+fL\nuGr36NGDtRyU4dGjRwqTfVaqVAlubm4KM1a8f/8edevWZbWAVG0/1a9fX20LjGEYJCUlISMjAxMn\nTgQRoVevXjLecTdv3oS2tjamTp0q1//jx48KRfR0dXVlkt6qg9evX38RWfrAwEBYWVlh/Pjx5R6r\nqKgI586dw8iRI1nZDalV5ObmhiVLlqj+/bx7V7JV16wZH6f0LReelHiog7179yr1GOOCu7u7nHwA\nAERGRuLFixcajbVx40YsXboUt27dwrhx41S2z87OViiWR0To3Lkz1q9fz+kVlpqaCk9PT5ibm7NW\niTI9JYZh5JwooqOjOdtmZGQgKSmJ9YYTi8Xo0aMHiIjd0szKyoKLiwsaNmyo0MFkyZIlnPdVo0YN\npYSoaP2K5NHLCoZhYGlpCYFAgEOHDpVpjOzsbOzfvx/BwcEwMTEBEbHnRM7OzpgzZ45a8WlfCt8c\nKRFRIyKKJaIPRJRDRHeJaBQRiTQYowoRjSCiI0T0kojyieg9EZ0goq4K+jRX9IdWXOZoch88KfFQ\nB58+fUL//v016qOIlAYOHFgmb75Dhw7By8sLr1+/VtlWUYZtIsLUqVNx6NAhNG7cGNbW1rh16xbb\nTxrbY2lpiVu3brHphkJDQxXOlZaWJjdH6czdDMPgzZs3nDIYOTk5bEDvpk2bEBoaChMTE6WZ0fPz\n8zmzUgiFQhgaGqqtqyRF586dOc+1yornz5+z1ow6Vq0UaWlp2LRpEwIDA9mUQtIzNAcHB0ydOhXx\n8fEVts7y4JsiJSLqRERFRJRFRBuIaD4RPSr+xditwThzivs8J6JNRDSbiLYTUV7x9UUcfaSkdIaI\npnOU1prcC09KPNRFly5dNAqAVERKZd0qevHiBQIDAzFy5EilsukbN25USEgtW7Zk7yErKwstW7ZE\n1apV8fr1a7x58wbOzs6wtrZmg1Hfv38PIsIPP/ygcL6LFy/KzVNyC620dcSFlJQUfP/992wsUsnt\nSi4sWrRI4T3a2tpq7Jgyc+bMciXfLY3t27fDxMREYfB0STx9+hQLFy5E06ZNIRAIIBKJ2ESq3333\nHUaPHo2rV69WWCLVisI3Q0pEZEJEqcVWjU+J63pEFFf8i9FLzbG6ElEzjuuuRJRRPFbdUnVSUppe\nEffDkxIPdTFr1ixcvXpV7fZubm6cpKSpxSVFREQE4uPjER8fj8GDB8vInktx9+5dTrkH6QuudPxT\nVlYWLCws4O3tDQsLC1SpUkUu7Y6dnR0MDAwUpkHatGmTzDxGRkZgGAZisRiJiYlqBwOfPHkSAoEA\n2traePjwIWcbhmEwY8YM1nKoXbs2570aGRlppNx7/PhxzJ49W+32qhAREQFLS0tOUT2GYXDt2jVM\nmTIFHh4eIJJoLUlTCJmbmyMsLAynT5+ukCwQXwrqkpKQvjy6E5EVEe0EcF16EUAeEU0p/jhMnYEA\n7ANwluP6QyLaVfyxeblWy4NHBaF9+/Z05MgRjfoIBAKZz/n5+aSjo6Px3GlpaZSamkqurq7k6upK\n06ZNo4iICEpNTWXbZGZmUvfu3Sk3N1euv1AopJ07d5K1tbXMdUNDQ+rduzclJCQQALK3t6esrCyZ\nNp6enpSTk0PPnz/nXNvjx49lPteoUYM+f/5MKSkpZGtrS8bGxirvTywW04wZM8jW1pYYhiF/f39K\nS0uTaQOAxo8fT9OmTaMaNWrQ+fPnaeXKlZzjiUQiWr58ucp5pfDx8aFr166p3V4V4uLi6OPHj9Sk\nSRMiIiooKKDjx49TeHg4Va1alerVq0erVq2i9+/fE5Hk59O+fXv666+/6N27dxQdHU3NmzcnkUhU\nYWv6f4M6zFWeQkRbSfJtJIijTouIsomokIh0yznPyuJ5Rpa63rz4+lYiGk5EvxDRQCJyLss8vKXE\nQ12IxWJ06dJF7fZubm6oU6eOzLXHjx+XSSI8MjISFy9elLn24cMHDBo0CM+ePQPDMPjxxx8Vbmkp\nsgIePnyI7777DkKhEDdv3kRiYiJ69uyJ0aNHs5bYhAkTQETYu3cv5xjSHHnSEhgYyGnFKcOvv/4K\nLS0tXL16lbW8GjZsyMb3FBUVISwsDEQELy8vGYtPmlOvdDEyMtJoHRXl7JCbm8s6JKxcuRK9evVi\nHRXs7e1RvXp1iEQiaGtro1OnTti5cyeysrIqZO6vCfqGtu+uEce2Won6+8X1ruWYw4SI3hERU3oc\nUu7osIeIzDWZiyclHpogJCQE79+/V6utq6urHCkdO3ZMY7XTrKwsdOrUibMuJycHP/30E+tazVX8\n/f05t97u3buHypUro0aNGggJCZFJlHrw4EE0btwYBw4cwLZt20CkOJGqdAtKWrjcuJXh4sWLEIlE\nMoKCUnE/qfqvNJND/fr15RwHEhISODWJ9PT0NNqS69u3LxITEzVae2kkJiZi3LhxMo4X0ng1HR0d\nCIVCtGrVCuvXr9fIAeJbxLdESgnFD9xJQf3F4vqGZRxfQER/FI+xkqPenYgmEJEHERkRkSURtSOi\nm8V9LhACXwheAAAgAElEQVSRUMUcYUR0nYiu29vbV8TPh8d/BNu3b8euXbvUauvq6gpvb2+Za2vW\nrJGzeFRhyZIlSt2KL1++rDBZqb29Pae43s2bN2FhYQF3d3ckJyfjxYsXsLa2lvnGnpmZidGjR7Pi\neFwSEWKxWC7DwrZt29S+t0+fPsHBwUFO14hhGAQFBYHof7LeLVq0UHg+FRERwXn/xsbGamdTWLZs\nmcKUS4rAMAzu37+PWbNmoV69eqBibzmRSAQrKys2TqxBgwZYunRphQnsfQuoUFIiifu1wm9WHGVr\nib6qSEnq7NBAnbVw9F9U3P8cabAFWGxdPS/u20ndfrylxEMTpKens9LSqsBFShMmTNAouLOgoECh\npQNItvCqVavG+Xerra0tJ50OAFeuXIGZmRlq164t4zrdu3dvzniky5cvQyAQwMrKSu7g/c6dO3Lz\nqpuyh2EY9OrVCxYWFkhKSpKrT09Ph6mpKYgInp6enFkfpEhLS2O3yEoWkUikMilryfucNGmSynZF\nRUU4f/48xo4dCycnJxARKleujI4dO8Lf358NOLa1tcWsWbM4k93+G1DRpPQ3SVy41S3zSvT9Ytt3\nJHEtBxGdJSKjMvSfWdx/obp9eFLioSk6d+6slntuzZo15UgpODhYI9feLVu2YPPmzZx1DMMgMDBQ\n4ZfJJUuWyPW5cOECjI2NUa9ePbltyNu3b6NatWqc7uaenp4QCARo2rQpbt26hcLCQiQlJXHKYagb\n77N582YQcacw+vjxIxo1asSeDYlEIpw4cULpeHPnzuV8DkZGRmolXM3NzUX79u0563JycnDgwAEM\nHDiQjY9ycXHBgAED0KdPH1StWhVEhO+//x6GhoYgIjx48ECt5/BPRYWSUnkKfSFHByJaXDzuKSIy\nKOPaRhaPsUbdPjwp8dAU06ZNw+3bt1W2q1mzJkr/fmniDs4wDPz9/RW+UOfPn6+QkNzc3OSsq1On\nTsHQ0BCNGjVSSBxt27bl3H7r27cviAgHDhxAYGAghg4diqysLCxfvlxmXmtra7XuLSEhAYaGhggP\nD5erS01NRZ06dUBEGDRoEB48eABzc3OYmJhwivlJkZubyykjLhAIEBUVpda6Sm4jpqWlYfPmzejc\nuTP09fUhEAjQqFEjjBs3DsOHD2fVcK2trREREYFLly4hMTERRBK37m8trqii8S2R0sDiH/bvHHUt\npZaOBuMJ6H+edseJSL8ca9tRPM54dfvwpMRDU1y6dEmtLaHyktLhw4dl1FtL4vz586yHV+ni5OSE\nI0eOYPTo0azVc/ToUejp6aF58+ZKPdJOnToFLy8vuRfqvHnzQESYN28esrOz8eeff8LX1xcBAQEy\nczdp0kTlfeXn58PHxwceHh5y5z2JiYmoWbMmiAijRo1i13HmzBloa2vDwcFBqdbU1q1bFVpL6mTR\n6Nu3LyZMmICmTZtCKBRCV1cXHTp0wNy5czFt2jRWWt3U1BQDBw7EyZMnZbY09+3bB11dXYWOKf8m\nfEukZEJEaaRB8CwRGRBRTSKyL3VdQETrivvEEpGeGvM3Jg5HBiLqSxJvvXwiqqbu/fCkxENTFBUV\noVu3birblSalzMxMzmBKRejcuTMngaSmpsLW1pbz5aurq8umDbp//z6GDBmC3bt3Q0dHB35+fsjO\nzlY6J8MwqFevHo4cOSJzXaqXVFKl9fPnz3KWiTrnbePHj4eenh7u3bsnc/3Zs2f4/vvvQUSIjIyU\nI8aYmBgQSbJLKLoPsVgMb29vzmczffp0zvu9fv06pkyZwuYKNDAwQL9+/bBx40YsWbKEzbSgr6+P\nH3/8Efv370deXp7Ce9PV1cWCBQtUPod/Or4ZUpKshTrT/9IMrSeieVQizRARCUq1b15cd6bU9WnF\n13OIKIq40wZ1LtXnJRG9JqKdRLSAiFYQ0ZXicQqJaIAm91JuUmIYID4euHBB8u+/3GTnIUG/fv04\n87iVhIuLiwwp3bt3D/PmzVNr/EuXLnFqLhUVFcHPz0/htt3atWtl2q9ZswYCgQB+fn5KHQVKYvfu\n3WjRogUAiaNFUlISK/jXrJRKqZREpEWVBXnixAkQyQvwxcfHs0Q7f/58hf2nTZsGIkK3bt0UOn+c\nOnWK89no6+ujqKgI+fn5OH78OMLDw2FnZwciSXaIiIgIrF69Gi1btoS/vz+0tLSgpaWFDh06YNu2\nbWrFPEnl3rkcTP5t+KZICf+zWGKJ6CMR5RLRPSIaTRwJWZWQ0mZFf1wlyuZSfSaQJGHrm+J584jo\nGUly53lpeh9lJiWGATZsAGrVAuztgYYNJf/WqiW5zpPTvxqbN29W6T7s4uICHx8f9vPBgwexe/du\ntcbv3bs3pxSBNMUOV+nXr5+MdbF9+3aIRCJ06NABwcHBamcmLyoqgpOTE44fP86ugWEYWFhYyJyV\n5OXlsfIJ0uLp6anwvC01NRU2NjYIDAyUWeeNGzfYjNrK1HSl65CebymThCi9rSgtrq6urEdf7dq1\nMX36dFy5cgX79u1Dz549oaenByJC8+bNER0dzelOrwiFhYXQ0dGBnp6e0tyE/xZ8c6T0byllIiWG\nAYYMAby9gRMn/kdAYrHks7e3pJ4npn8tkpOTMXToUKVtSpPS0qVLOZVkS+PRo0ecDgAnTpyQIwFp\ncXNzk4kx2rRpEwQCAXr16oWCggLk5ORg6NChuHPnjsr58/PzMXv2bDlhwxYtWoCI2ADTBw8eyK3j\n5MmT6Nq1K37++WeZ9UjVcm1sbGTc0C9cuAATExOIRCK1g4rz8vLQpEkTECmWyIiPj+eM3RIIBFi4\ncCGePn2K48ePIyQkhCUpHx8fLFy4EM2bN1e4PacMt27d4rQm/63gSelbIqUNGyTEo8icz8yU1Kuh\nLsrjnwtVruE1atSQIaVRo0aplQ1iyJAhcrEtSUlJnFINVHwGUlLOIDo6GkSE4OBgmUP4wsJCjBs3\nTk5WoiTS0tKQmpqK3Nxc2NjYICEhga2TBqjGxsYCkBzql1yHUChEXl4eGIbB3r170bhxY7btihUr\nIBAIcPLkSXa848ePs3Lrmgatpqenw9nZGSKRCMeOHQMgIb4HDx4gKiqK3UbjKh4eHqhcuTKICDVr\n1sSvv/4qc58TJ07ElStXNFoPAKxevRo6OjqIjIzUuO8/ETwpfSukxDCSLToVMRM4fhzw9OStpX8x\nJk2apFTbpkaNGqhXrx77uX///irdhJOSkuQ89AoLC1nLgKuUtDCWLVsGIsLgwYM5z1wYhsGsWbPk\nJMPz8vKQlJQkc+4UFRWFsLAw9vP69etlzo3mzJkjsw5HR0eZMTMyMjBixAi0a9cOurq6mDBhAlu3\nf/9+6OjowMDAQE7uXV0kJCSgUqVKMDAwQHBwMBvIam1tjcGDB2Pr1q2sBETpEhERgVu3bnH+PPbt\n24fly5drvJ6ePXuCiFTGU/1bwJPSt0JK8fGSsyMFh6wsxGKgalVJex7/Spw9e1ahyzbATUqqMH78\neNy8eVPmmjQhKlcZMmQI204atzR8+HCV5BcdHc2e36SlpXGK4n348AGWlpZsapyrV6+CiNC7d28A\nErHCkmvhCjzNycmBo6MjjI2NsWzZMojFYmzbtg0ikQgmJia4cOGCymfCNebBgwcxcOBAdutNS0sL\n4eHhiIuLg1gsxrNnzzBz5kzWIipdevXqpXD8xMRE9OvXT+N12draQigUapyM9p8KdUnpa0hX/Lfx\n4QNRlSpEQhWPWiiUtPvw4eusi8dXR8OGDSkuLk5hveTvVv7/ivDp0yd6/vw51alTh732119/0dy5\ncznb16lTh5YsWUJERDNnzqSff/6Zxo4dS8uWLZOTzCiNsLAwMjU1pfHjx5ORkRFZWlrKtTE3N6fg\n4GBatmwZERG5u7uTQCCge/fuERFRQkKCTPsaNWrIjTFu3DhKSUmhq1evUkFBAbm7u1Pfvn3J3Nyc\nTp8+TY0bN1a6Tinev39Pv//+O3Xt2pUsLS2pU6dO9PDhQ5o0aRLNnz+fioqK6MKFC3Tu3Dlq2LAh\nVa9enebNm0d+fn5UqVIlufH++OMPevPmDedcVapUoaSkJLXWJcWHDx/o7du35O7uTkZGRhr1/ddD\nHebiC28p8agYBAUFKYyZcXZ2ZhVb379/j1GjRikda86cOTJnLi9evGCF30oXExMTPH36FAzDYPLk\nySAiTJ48We0sAqmpqUhLS8OZM2cwduxYhYGlb968gZWVFev+7uzsDC0tLeTn58tZIaXdvKUpiKRp\nkqSWnJ6eHkJDQ1XGTD1//hyLFy9Gs2bNIBKJoKurC39/f6xdu5a13j58+IB169axrulCoRDdunXD\n3r172a3IDRs2cD7Dtm3bKpy7S5cuGokEHjlyBCKRSOXP+N8E4rfvvhFS4s+UeJTA2rVrcfjwYc66\nkqR0/fp1LF26VOE4ubm5CAgIkHG3lmad5ip79+4FwzCsTIK6Gk05OTlISkqSSV109+5dDB06VCFJ\nDBgwgA0GlWoXnT9/Xm5NJc9SEhMTYWFhgaCgIIjFYkRGRoJIkhvu6dOn2LNnD3x9fXH06FG2D8Mw\nuHHjBqZOnQpPT08QSdL19O3bF7t372YzhGdlZWHHjh3o2LEjtLW1IRKJ0LZtWzRu3BhEhDFjxsis\nv6ioiA2MLV1Kb5VKERUVpdHZkFQ6ZN++fWr3+aeDJ6VvhZQA3vuOB4tXr17JZDkoiZKktHv3bs7E\no1KsXbtWRhJjxIgRCglp9OjREIvFGD58OIhI7YDclJQUhXE3L1++xMCBAznrHzx4ADs7O+Tn57Nx\nUtOnT5db16tXrwBISKBly5aoVq0aPn78iFGjRrGebiX1ij59+oRhw4ahVatWCAkJYZOa2tvbIyIi\nAn///Tcb75Ofn4+DBw8iKCiIdV7w9fXFqlWrkJqayrZp3rw5iAirVq2SuYejR49yPksfHx9O6/Lk\nyZOYNWuWWs8VAJt+SLqW/wJ4UvqWSKlknNLx4//byhOLJZ/5OKX/FErH80jh5OTEktK8efPk0upI\nUVRUBH9/f9Z9W5rSh6s0aNAAubm5GDx4MIiIU2qiNLKzs5GYmKgyoDM9PR0DBw5kyaUkOnbsiE2b\nNuHPP/8EEaFDhw4y69LX12e9/ebMmQORSITz588jNDQURJJAVekL+/Pnz/jjjz/Qu3dvmJmZgYhg\naGiIgIAA3LhxgyWJoqIinDp1CoMHD2a3MevUqYN58+ZxrhGQbJO6uLhAKBSy7uhSKMqEwWXpfvr0\nSeHPtTTEYjF0dXVhZ2enVvt/C3hS+pZICfhfRgdPT8nZUYMGkn89PfmMDv8xjBkzBk+fPpW77uTk\nhPr16wMAhg0bptAra8+ePVi9ejUAiVy6sbEx58vTwsICL168QP/+/SEQCBQGjpZEcnKyRlkJsrKy\nMGTIEDkCvXDhAlxdXZGQkAAiQvXq1WXW5unpCUCi1aSlpYXp06ez8uyNGjXCw4cPsWbNGrRv354V\nwWvRogWWLl2KFy9eoKCgAPPmzYOfnx+2b9+OUaNGwcbGBkQSkb/IyEg8fPhQrXt4+vQpLC0tYWRk\nJJNd4vbt25zBx46Ojpxnai1atFDrjO7hw4cgIvTt21et9f1bwJPSt0ZKUpTMfffwIU9G/0GcOHEC\nK1askLtekpQUuYNLMx3k5OQgJyeHPUvhKgcPHkSvXr0gFAoVaixJIbWO1MmMXRoFBQUYM2YMzp07\nJ3O9UaNG+PPPP2FoaAh9fX2ZtXXv3h2fP39G9erV4evry1pS1atXh4+PD2sNdevWDTExMXJBxA8e\nPMCUKVPYBK9GRkaIiIjA9evXyyQBcfH/2jv3sCirfY9/fzMIg2iYl/KChFpeaqOJHRQRFdywi06m\nltk5aZminp60MrqftG270naZlvsxy1uaqZkddUudvOEtr4nXXWqmlCRkoolyHZ35nj/e4T3MMAMz\nMDADrM/zrGeYd613vWv9Zpjvu971W7+1a5c+eim7geDo0aOd2tYxZiCpfWZZWVmVXmv+/PkUES5Z\nssTjdtZllCj5qygpGjxFRUUcMWJEueOdOnWqVJS2bt2qz12MHTvWpSC99NJLHDp0KI1GI5cvX15h\ne3Jycnjp0qVq9clqtfL111+3i7Swbt06xsbGsk+fPuXa98orr3DUqFFs0qSJXQTzVq1aMSUlhWlp\naeUCwmZmZnL69Om6ELdo0YITJkzg1q1buWLFCvbr16/KC2tJcuXKlfojv9JRalZWVjlBBcCWLVuW\nG8nOmTOHX375ZaXXuf/++wmAZ86cqXJb6yJKlJQoKfyYhx56qFy8tE6dOrFPnz60WCx87LHHnJ43\nfPhwXrp0Sd+F1VmKi4vTt9mu6EcyPz+/yqMjV8ydO1cfRVgsFnbr1s3pbreleyCVpm7dunHHjh3l\nokr89ttvnDNnjt2usiNHjuRXX31Vbs7rjz/+4BNPPMGRI0c6DU7rDm+++SYB8L777tPn7Epd6B3T\na6+9Znfuvn377KJQuKJNmzYMDQ2t95v6OaJESYmSwo+ZM2eO3Roj8v9FKTs72+mP2+HDh5mamspj\nx445vXsHwJtuuon9+/dnYGAg169f7/L62dnZ1R4duWLVqlV86623aLVauWjRIj2cT9lkMBj0Powf\nP94u5t7ly5e5ePFiJiYm0mAwMDAwkEOGDOHnn39e6Volkty9ezf79+/P+fPnu9yuwhVWq5WPP/44\nAfDpp58mqTlaOIv0YDKZmJ2drZ9bXFxc4VomUtsjCwCTkpI8ald9QImSEiWFH/PTTz8xNTXV7ljH\njh3Zp08f7tq1y+mWDKNHj+bx48f1bbUdk4iwR48eNJlMetBRR65evcqsrCw7EagJli9fzoSEBMbF\nxfFPAOcB3GZ7/RPARo0aEQBTU1NptVpZWFjIVatWcejQoQwKCqLBYGBiYiIXLVrk0aLUUsxmM2fM\nmMGkpCR+//33Hp1bUlLChIQEAtBj2s2dO9epzceNG2d37qBBgyq07ZYtWwigwjVo9RUlSkqUFH6O\nowtxx44dGRMTw2XLlpUTlczMTI4dO5YPP/ywy8d24eHhDAkJcRrV22q1Mjs7u0o/8O5gtVp58OBB\nTp06lT169NAftb3ZogWLRGgGSIBmgAUAJ0PbLfarr77S55YAMCYmhh988IEegaG6nD59moMHD+ar\nr77q9qaFpPYosFu3bjQYDExLS6PZbHZ6MyAi/Ne//qWf9+STT1YogqVrsFztIVWfUaKkREnh50yc\nOJFnz57V35eK0t/+9je7rRFIbRuIKVOmuBSk0NBQNm3a1GnA0qtXr/LcuXMeP8qqDLPZzM2bN3Pi\nxIm6F1z79u05adIkbt68meaMDFpMJu1nxiEVGQyMs605ioyM5PTp093eVNBTrFar7gjh+Mi0Is6c\nOcNWrVoxJCSEhw4d0sMgOabk5GT9nCVLlnDx4sUu6+zevTuDgoJqfKTqjyhRUqKk8HPS0tI4f/58\n/X2pKKWkpNg5QVy4cIFJSUkMDAx0+qPYqFEjhoaGltvTx2q18ty5c7x8+bLX2ly6kPWRRx7RF7L2\n6NGDU6dO5cGDB+0n78eNI41Gp6J0DeC+nj3tRhk1zaVLlzhhwgQ++uijbkdS2LNnD00mE9u2bcuz\nZ8+yf//+Tj+DUrE7fvw4n3jiCad1Wa1WmkwmRkVFea1PdQklSkqUFH5Ofn6+vq0DSXbo0IExMTHl\n3MFffPFFfWGoq1FSRkaG3Tl5eXnMysryyugoOzubH330UbmFrLNnz3Y6urly5Qo3btzIzPBwp4JU\nmnK6drVzFKgtvv32W/bv358LFy50ywPuiy++IKBFmdi6davTz+DOO++kxWKhxWJhQkKC03p+/vln\nAuDLL7/s7S7VCdwVJbV1hULhI0JCQmA2m3Ht2jX9mOMWEvn5+ViyZAlycnKc1tGkSRPs3LkTUVFR\nALSbzOzsbABAWFgYDJVtmeKCEydOYMaMGYiJiUHbtm0xefJkBAcHY8GCBTh//jzS09Px9NNPIyIi\nAjk5OVi9ejWeeeYZ9OrVC82aNUNycjIOFRXB4mJLjOsA1p44gbZt2yIyMhKpqan45ptvUFhYWKX2\nekJsbCw2bdqE3377DcnJyThx4kSF5R988EG8/fbbOHz4MN555x2MGDGiXJnDhw9j2bJlMBgMCAwM\nRHFxcbkyaWlpAIDk5GTvdKS+4o5yqaRGSoqaYebMmXokhNKR0pgxY/T8IUOGuBwhmUwmu51s8/Ly\neO7cuSqtf7FYLNy1axdfeOEFdu7cWXcvT0lJ4fr161lYWEhSewR14sQJzp8/n4899pgePqhJkyZM\nTEzktGnTmJ6ezvz8fPLoUTI42PlIKTiYv2/ZwqVLl3LUqFFs3bo1ATAwMJAJCQmcMWMGMzIyvD4P\n5sipU6d47733curUqRU6QlitVj1+4KhRo3TvwbIpLCyMhYWFfPnll7lnz55yddx99936FvANEajH\nd0qUFP7P999/z1deeYUkGRERwaioKE6dOpWktlOts9hrAGg0GvVtFKxWK3/99Vd9qwZ3KSoq4vr1\n65mSksKbb76ZgBY37vnnn+euXbt4/fp1ms1m7t27l++++y6HDBnCli1bEgBbt27NBx98kO+//z4z\nMjJcL8CdOVMTptK5JaNRez9zpl0xq9XKo0eP8t133+Vf/vIXmkwmAlrkhIcffpiLFi1yK4RPVbBa\nrfzss88YFxfH9PR0l+XMZrMepDU+Pt7p5/LWW29xzZo1Tl2+b7rppnJbwDcklCgpUVLUAaxWq+4a\nHhERwTvuuIOLFy/m77//7nLDPgBMS0sjqS009WR0dPHiRS5dupTDhg1jSEgIAbB3796cPn06f/jh\nB+bl5XHDhg2cMmUK4+Pj9QWuXbp0YUpKCj/55BN9s0C3OXqUHD+eHDBAez16tNJTioqKuHnzZr7w\nwgvs2bOn3u9u3brxqaeeYlpamte3Eb948SJTUlI4evRop9u9k5q9b7/9doqI7sZeNjVt2pRHjx7l\nI488YndecXExRaRK26bXF5QoKVFS1BEmTJjAnJwcRkRE8NZbb2V6ejqTkpJcCtKMGTNosViYlZXl\n1ugoMzOTs2fPZnx8PI1GIwMDA3nPPfdw3rx5PHToEFetWsWnnnqKUVFRNBgMDAgIYHR0NFNTU7lm\nzRq/2PPn/PnzXL58OUePHs127drpXocDBgzgG2+8wf3793vNzXrHjh2Mi4vj4sWLnYpvZmYmb775\nZpfekBMnTmR8fLzdORs2bCAAu9iADQ13RUm0sgp3ueuuu3jgwAFfN0NRj1izZg2uXr2KqVOnwmKx\nYMSIEZg5c6bTssOGDcPChQtRVFSE1q1bl3OMALQbzcOHD2PdunVYu3Ytjhw5gtDQUCQnJyM6OhoB\nAQE4cOAAdu7ciTNnzqBJkybo27cv+vXrh379+iE6OhohISE13e0qQxLHjx/Hpk2bsHHjRmzbtg2F\nhYW48cYb8ec//xmJiYlITExEREREla9hNpvx97//Hbt378asWbPQpUsXu/z9+/dj4MCBMJvNsFgs\ndnkBAQFISEjAihUr0Lx5cwDAuHHjsGDBAly+fBmhoaFVblddRkQySN5VaTklSp6hREnhba5cuYJJ\nkyZh+/btyMvLQ15eHpz9X9566634+uuv0aZNGzRp0sQu79q1a9i5cyfWrl2LdevW4ezZswgLC0Of\nPn3QokULZGdnY8+ePcjNzUXr1q0RFxeni1D37t0REBBQW931OiUlJdizZ48uUhkZGSCJ2267DUlJ\nSUhMTER8fDxuuOEGj+s+deoUJk+ejOjoaLz44osICgrS89asWYMHHnjA6Wd1++23Y9asWUhKSgIA\ndOvWDbm5ubhw4ULVO1rHcVeUfP44rK4l9fhOURMMGzaMYWFhLh0bTCYTt23bZnfOlStX+MUXX3Dk\nyJH6QtYOHTowLi6OvXr10ueDunbtqs8HnT59ut5Hp87NzeXnn3/OlJQUPdKE0WhkbGwsp02bxt27\nd3sUGd1qtfLTTz9lXFxcuc/gnXfecfmYdezYsXq5oKCgBhmEtSxQc0pKlBR1h48nTeJ8EbugpWV/\n4EojP+Tk5PDjjz9mcnIyAwMDKSJs164d27Vrp88H9e7d26/mg3yJ1WrlyZMnOWfOHA4ePFh3TggN\nDeXQoUP54YcfOt0F2Bm5ubkcO3Ysx4wZo+/OW+qo4kyUmjVrRqvVytOnTxNwbyv6+oy7oqQe33mI\nenyn8DrvvYeSF16AwWJBIwDXbOlVALMADB8+HFFRUVi5ciWOHDkCo9EIk8mEgoIChISEIDY2Fv36\n9UNcXByio6PRuHFjn3bHn7l27Rr27duHjRs3YtOmTdi/fz+sVis6duyIxMREJCUlISEhAc2aNXNZ\nx/bt2zFlyhSMGzcOI0eOhMViQVhYGM6fP1+u7IoVK/DTTz9hypQp+PXXX9GuXbua7J5f43dzSiLS\nF9r/WR8AJgA/AVgEYA5JS0XnOtRTUYP3kezj4rx/B/AcgJ4AjAC+BzCX5BJ3rw0oUVJ4mWPHYI2O\nhsFJBIBCAH0NBhyxWiEiIInmzZtj4MCBGDhwIPr164fIyMg6PR/ka/744w+kp6fr81GZmZkwGAyI\njo7WRap3795o1KiR3XklJSV4++23sW/fPsyePRv5+fl6VI2ytG/fHuHh4Th06BAKCgpqq1t+iV+J\nkojcD+BLAMUAPgdwCcB9ALoAWE1yuAd1EcAvAD5xkv0ryQVOzpkIYA6Ai7brmwE8CCAMwEySz7l7\nfSVKCq8yfjywaBFgKX9fdh3AJ0Yj5nbvjpiYGPTs2RO33HILgoODYTKZ9OT43mg01n4/6gmnT5/W\nR1FbtmzBlStX0LRpU8THx+siddttt+lejydPnsTkyZMRExODU6dO4dNPPy1XZ3BwMLp27YqDBw/W\ndnf8Cr8RJRG5AdqoKBRALMkDtuMmAOkAYgD8B8mVbtZHANtJDnSzfASAEwAKAPQi+bPt+I0AvgPQ\nCUqubB4AAArpSURBVEBfknvcqU+JksKrDBgA7NjhMnsrgAQPqwwICLATKU+So8BVJVU13p6/cf36\ndXz33Xe6SO3duxcWiwXh4eG6V9+gQYPQvHlzLF26FPPmzcPhw4f1uHd/AjARQFcAxttvR7+VK4HI\nSF92yaf4kyiNAbAQwFKSjznkJQDYAmAHyQFu1uepKL0OYAqA10m+5m7bXKFESeFVKhgp0WDAhfvv\nx4lnnkFxcXG1U1FRkcs8b9KoUSOviFtVhDIoKMjp2i1vkJeXh23btukiderUKYgIevXqhaSkJERH\nR2PatGk4dOgQJgN4A0AjW7ouggCTCXjjDeDZZ2ukff6OP4nSMgCPAPhPkisc8gIA5AEIBNCEZIkb\n9RHAEQAfAGhtOz+D5F4X5b8FEAsnoyERaQMgG9pjv/bu9EeJksKrHDsG9O4NFBWVzwsOBvbtq/G7\na5Iwm81eEzhP6zGbzV7tT1BQUI2NAsvWlZubi++++w67du3Czp07cfnyZYSEhKBzSQm+vX4dTt1N\naukz9UfcFaXamCEtXQr9o2MGyesikgngDgAdARx3s84e0EY4OiJyBMAoksc8uH6OiBQACBORxiRr\nPm6+QlGWyEjt7vnVVwGzWRsxGY1AYKB2vBZ+vEQEQUFBCAoK8km0AavVipKSkhodCRYXF6OgoAAX\nL14sd7zs1iHVpaCgAP8FbXTkFLMZ+Mc/gI8+8to16xu1IUql3/I8F/mlx137YNrzHjSniR+hOU50\nBfAiNMeFdBG5k+Q5D68fYiunRElR+zz7LJCYqP1YnTwJdOkCTJzYYO6mDQYDgoODERwc7JPrWywW\nO1Gs7igw7p//RKPff3d1Me0zVrjELVESkZ8B3OJBvZ+RHOlm2dIHwG49RySZ6nDoAIDhIrIawAPQ\n3L4nu3ltt64vIuMBjAeA8PBwD6pWKNwkMlLdPfsIo9GIxo0be299lwi4aBHEyTwhjEbtpkPhEndH\nSqehjUrcJbvM36UjFFfPBW5wKFdV5kETpf4Ox/MAtLRd/2IF17/iqmKSHwP4GNDmlKrZToVCUZ+Z\nNAmybJnzecLAQG0UrHCJW6JEclA1rnESwF0AOgPIKJthc3ToAG1JxplqXAMASiMdOoY3PglNlDoD\ncOboEALN0UE9ulMoFNXHD+YJ6zK1saAg3fZ6t5O8/gAaA9jtjuddJZRGcnAUt4quf49DGYVCoag+\nzz6redmNHautRRs7VnvfQN3BPaG2Fs+ehvaYzK3FsyLSGEA4gEKSZ8scjwJwkqRdvA4R6W6rqwWA\nR0guL5PXAZpXn1o8q1AoFD7Cb1zCSV4RkXEAVgPYJiIroYUZGgxbmCFooX/KEg1tMft2AAPLHH8K\nwDARSQeQBaAEmvfd3dDi2c0HYLcWimSmiDwPbV3TARFxFmbILUFSKBQKRc1SK5EcSa4VkQEA/hua\nM0JpQNZnAXxA94dra6GNuLpDi75igua88L8A5pP8p4vrz7F5ED4H4FFojy1/APCqpwFZFQqFQlFz\nqK0rPEQ9vlMoFArPcffxXf2InKhQKBSKeoESJYVCoVD4DUqUFAqFQuE3KFFSKBQKhd+gHB08REQu\nQNv5trq0BJDrhXoUnqHsXvsom/sGf7P7LSRbVVZIiZKPEJED7niiKLyLsnvto2zuG+qq3dXjO4VC\noVD4DUqUFAqFQuE3KFHyHR/7ugENFGX32kfZ3DfUSburOSWFQqFQ+A1qpKRQKBQKv0GJkkKhUCj8\nBiVKNYiINBKRp0VksYgcFhGziFBEUqpRZ18R+VpELolIoYgcFZFnRMTozbbXB7xlK9tn5irtran2\n+ysiEiYii0QkW0RKRORnEZlt26PMk3qa28772VZPtq3esJpqe13FGzYXkW2VfJdNNdkHd6mVrSsa\nMCEAZtv+Pg/gNwDtq1qZiNwP4EsAxdD2oLoE4D4AswDEAhhencbWJ2rAVr8A+MTJ8V+r3sq6h4h0\nArAbwE0A1gE4AW3/s6cB3C0isSQvulFPC1s9naFt0LkS2t5ojwO4V0RiSDruIt0g8ZbNyzDNxfHr\n1WqotyCpUg0lAIHQtlxvY3v/VwAEkFKFum4A8Du0jQ3vKnPcBO0LSwAP+7rP/pC8bStb+W2+7pc/\nJAAbbPaY5HD8PdvxeW7W85Gt/HsOx5+yHf/G1331l+RFm2/TfvJ936cK2+nrBjSkVE1RGmM7d4mT\nvARb3nZf99EfkrdtpURJt0NHmy0yARgc8poCyAdQACCkknpCABTayjd1yDPY6ieAjr7us6+Tt2xu\nK18nREnNKdUdEmyv3zjJ2wHtn7yviATVXpP8lpqwVTMRGSMir4jIkyLSp9qtrHuU2nUjSWvZDJJX\nAewC0BhAZbaJARAMYJftvLL1WAFstL2Nr3aL6z7esrmOiIwQkZdE5FkRucfffjOUKNUduthef3TM\nIHkd2p1UALQ7q4ZOTdiqB4CFAN4E8A8Ae2zOK5HVbGtdwqVdbZyyvXaupXoaAjVhq5UApgOYCeBr\nAGdF5MGqNc/7KFGqO4TaXvNc5Jceb1YLbfF3vG2r96A5R7SC9sjk3wCshiZU6SLSrortrGt4y67q\nu+w+3rTVOmjOPmHQRqpdoYlTMwCfi8g91Win11CiVAk218uK3Cgd0zJfNdX2Wi9CdNSw3T2yFclU\nkrtJ5pLMJ3mA5HBo3n0tATznYffqK976Dtar73IN47atSM4imUbyHMlikidJvgIgFZoWvFWTDXUX\n5RJeOaehuRW7S3YNtaP0jijURf4NDuXqOtWxe23Zah6ABwD0r2Y9dQVv2bWhfZerQ23YagG0pRJ3\nikhTx3m+2kaJUiWQHOTrNtg4CeAuaM+OM8pmiEgAgA7Q1hnUi7Ud1bR7bdnqgu01pJr11BVO2l5d\nzV/cZnt1Nf/h7XoaAjVuK5LFInIVwI3Qvss+FSX1+K7ukG57vdtJXn9oHji7SZbUXpP8ltqyVanH\nU724EXCDrbbXJBGx++0QkabQ5t2KAFQW5WKvrVys7byy9RgAJDlcryHjLZu7RES6QBOkq/CDnWqV\nKPkZIhIqIl1FpI1D1mpoX5iHReSuMuVNAN6wvf2wlprp73hsKxFpbLN7uMPxKBEpNxISke7QPPEA\nwFfziLUKydPQ3LUjADzpkD0N2l32UpIFpQdtNu3qUE8+gE9t5f/qUM9EW/0bqCI6eM3mItLRmUOO\niLQEsNj2dqXNO9WnqK0rahgReQmalwsA3AnNY2s3/t+V81uSC8qUHw3tS7KE5GiHuoZA+8EthubW\neQnAYGhuo6sBPET1gQLw3FYiMhDaXel2kgPLHP8EwDBoo68saFEiukIbhRkBzAcwoaHY3UnIm+MA\nekNbU/QjgL4sE/JGRLTVx6Q41OMYZmg/gG4A7ocWjaOv7Qe5weMNm9t+VxYA2A5tvvYSgHAAydDm\nqw4ASCR5ueZ7VAm+Xr1b3xNsq6grSJ84lB/t7HiZ/Fhoawv+gDZsPwZgMgCjr/vqb8kTWwEYCCeR\nGwAMAfA/AH4CcAWAGUAOgPUABvu6jz6ya3toN045Nnv8AuB9AM2dlCVcRBEA0Nx23i9l7LoIQJiv\n++hvqbo2BxAJLXbjMQAXAVyDJkw7AUwCEOjrPpYmNVJSKBQKhd+g5pQUCoVC4TcoUVIoFAqF36BE\nSaFQKBR+gxIlhUKhUPgNSpQUCoVC4TcoUVIoFAqF36BESaFQKBR+gxIlhUKhUPgNSpQUCoVC4Tco\nUVIoFAqF3/B/L7gMC2NErAkAAAAASUVORK5CYII=\n", 58 | "text/plain": [ 59 | "" 60 | ] 61 | }, 62 | "metadata": {}, 63 | "output_type": "display_data" 64 | } 65 | ], 66 | "source": [ 67 | "%matplotlib inline\n", 68 | "import matplotlib\n", 69 | "from matplotlib import pyplot as plt\n", 70 | "matplotlib.rcParams['font.size'] = 20\n", 71 | "import networkx as nx\n", 72 | "import numpy as np\n", 73 | "from cvxopt import matrix\n", 74 | "\n", 75 | "import sys\n", 76 | "sys.path.insert( 0, '../../')\n", 77 | "\n", 78 | "import diffnet as dn\n", 79 | "import graph as gph\n", 80 | "from netbfe import *\n", 81 | "\n", 82 | "sij = matrix(np.sqrt(np.load(\"sij_sym.npy\")))\n", 83 | "\n", 84 | "#insert delta with stdev=1 for first s_ii:\n", 85 | "delta = sij.size[0] * [np.inf]\n", 86 | "fict_delta = [1] + delta[1:]\n", 87 | "\n", 88 | "#minimal graph generation:\n", 89 | "n = networkBFEalloc( sij, N=1000, delta=fict_delta)\n", 90 | "G = gph.diffnet_to_graph( n)\n", 91 | "gph.draw_diffnet_graph( G, widthscale=1e-1, nodescale=5)" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": null, 97 | "metadata": { 98 | "collapsed": true 99 | }, 100 | "outputs": [], 101 | "source": [] 102 | } 103 | ], 104 | "metadata": { 105 | "kernelspec": { 106 | "display_name": "Python 2", 107 | "language": "python", 108 | "name": "python2" 109 | }, 110 | "language_info": { 111 | "codemirror_mode": { 112 | "name": "ipython", 113 | "version": 2 114 | }, 115 | "file_extension": ".py", 116 | "mimetype": "text/x-python", 117 | "name": "python", 118 | "nbconvert_exporter": "python", 119 | "pygments_lexer": "ipython2", 120 | "version": "2.7.15" 121 | } 122 | }, 123 | "nbformat": 4, 124 | "nbformat_minor": 2 125 | } 126 | -------------------------------------------------------------------------------- /issues/001/sij_sym.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/forcefield/DiffNet/63da35032dbe9e9b10783499fb4fe067fe1a45b4/issues/001/sij_sym.npy -------------------------------------------------------------------------------- /netbfe.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This is an example of how to allocate computational resources for computing 3 | binding free energies of a set of molecules using a combination of relative 4 | and absolute binding free energy calculations. 5 | ''' 6 | 7 | import numpy as np 8 | import cvxopt 9 | from cvxopt import matrix 10 | from diffnet import A_optimize, MLestimate, round_to_integers 11 | from diffnet import covariance, sum_upper_triangle 12 | 13 | def COX2params(): 14 | ''' 15 | Generate the variance s[i,j] and the free energy results. s[i,i] is the 16 | variance of the absolute binding free energy for molecule i, 17 | s[i,j] (i!=j) is the variance of the relative binding free energy 18 | between molecules i and j. 19 | 20 | Also generate the experimental uncertainties delta[i]. 21 | 22 | ''' 23 | nheavy = dict(A1=7, A2=6, B1=9, B2=6, C1=10, C2=10) 24 | sCOX2 = np.diag( [nheavy['A1'] + nheavy['B1'] + nheavy['C1'], 25 | nheavy['A1'] + nheavy['B1'] + nheavy['C2'], 26 | nheavy['A1'] + nheavy['B2'] + nheavy['C1'], 27 | nheavy['A1'] + nheavy['B2'] + nheavy['C2'], 28 | nheavy['A2'] + nheavy['B1'] + nheavy['C1'], 29 | nheavy['A2'] + nheavy['B1'] + nheavy['C2'], 30 | nheavy['A2'] + nheavy['B2'] + nheavy['C1'], 31 | nheavy['A2'] + nheavy['B2'] + nheavy['C2']]) + \ 32 | np.array( [[ 0, 1, 16, 17, 1, 2, 16, 17], 33 | [ 1, 0, 17, 16, 2, 1, 17, 16], 34 | [16, 17, 0, 1, 16, 17, 1, 2], 35 | [17, 16, 1, 0, 17, 16, 2, 1], 36 | [ 1, 2, 16, 17, 0, 1, 16, 17], 37 | [ 2, 1, 17, 16, 1, 0, 17, 16], 38 | [16, 17, 1, 2, 16, 17, 0, 1], 39 | [17, 16, 2, 1, 17, 16, 1, 0]], dtype=float) 40 | sCOX2 = 10.*np.sqrt( sCOX2) 41 | sCOX2 = matrix( sCOX2) 42 | 43 | K = sCOX2.size[0] 44 | 45 | # Experimental values and error bars 46 | dG0 = np.array([ -9.9, -8.9, -9.5, -7.2, -9.4, -4.6, -9, -9.6 ]) 47 | delta = np.array([ 0.4, 0.5, 0.3, 0.4, 0.5, 0.3, 0.2, 0.5 ]) 48 | 49 | return dict( s=sCOX2, 50 | dG0=dG0, 51 | delta=delta) 52 | 53 | def mockupBFEresults( n, s, dG0): 54 | ''' 55 | Generate a mock-up of BFE results. Return dG and 1/sigma^2, where 56 | dG[i,i] is the absolute binding free energy result for molecule i, 57 | and dG[i, j] = dG[i] - dG[j] is the relative binding free energy 58 | result between molecules i and j. 59 | 60 | ''' 61 | si2 = cvxopt.div( 1., s) 62 | # isigma2 := \sigma_{ij}^{-2} = n_{ij}/s_{ij}^2 63 | isigma2 = cvxopt.div( n, si2) 64 | 65 | K = n.size[0] 66 | dG = matrix( 0., n.size) 67 | for i in xrange(K): 68 | if n[i,i] > 0: 69 | # results for absolute binding free energy. 70 | dG[i,i] = dG0[i] 71 | dG[i,i] += 2*np.sqrt(1/isigma2[i,i])*(np.random.rand() - 0.5) 72 | for j in xrange(i+1, K): 73 | if n[i,j] > 0: 74 | ddG = dG0[i] - dG0[j] 75 | ddG += 2*np.sqrt(1./isigma2[i,j])*(np.random.rand() - 0.5) 76 | dG[i, j] = ddG 77 | dG[j, i] = -ddG 78 | 79 | return dG, isigma2 80 | 81 | def networkBFEalloc( s, N, delta=None): 82 | '''Use A-optimal to allocate the network of binding free energy 83 | calculations. 84 | 85 | Args: 86 | 87 | s: KxK symmetric matrix. 88 | 89 | s[i,i] gives the fluctuations in the absolute binding free energy 90 | calculation for molecule i; 91 | 92 | s[i,j] gives the fluctuations in the relative binding free energy 93 | calculation between molecule i and j. s[i,j] = s[j,i]. 94 | 95 | delta: length K array. 96 | 97 | delta[i] gives the experimental uncertainty for the measured 98 | binding free energy for the reference molecule i. 99 | 100 | N: total samples. 101 | 102 | Return: 103 | 104 | n: KxK symmetric matrix. 105 | 106 | n[i,i] gives the allocation to the sampling of absolute binding 107 | free energy calculation of molecule i. 108 | 109 | n[i,j] gives the allocation of the sampling of relative binding 110 | free energy calculation between molecule i and j. 111 | ''' 112 | n = A_optimize( s, N, delta=delta) 113 | return n 114 | 115 | def networkBFEdG( ddG, isigma2, dG0, delta): 116 | '''Use maximum-likelihood to estimate the individual binding free 117 | energies given the computed absolute and binding free energy 118 | values, supplemented by the experimental binding free energies for 119 | some of the reference molecules. 120 | 121 | Args: 122 | 123 | ddG: KxK matrix, ddG[i,i] is the computed absolute binding free 124 | energy for molecule i. ddG[i,j] = dG[i] - dG[j] is the computed 125 | relative binding free energy between molecules i and j. dG[i,j] = 126 | -dG[j,i]. 127 | 128 | isignam2: KxK matrix, isigma2[i,j] = 1/sigma[i,j]^2, where 129 | sigma[i,j] is the standard deviation in the computed free energy 130 | ddG[i,j]. 131 | 132 | dG0: length K array, dG0[i] is the experimental binding free 133 | energy for molecule i. dG0[i]=None if the experimental 134 | value is unavailable for molecule i. 135 | 136 | delta: length K array, delta[i] is the standard deviation in the 137 | measured dG0[i]. 138 | 139 | Return: 140 | 141 | dG: length K array, dG[i] is the ML estimate for the individual 142 | binding free energy of molecule i. 143 | 144 | ''' 145 | dG, v = MLestimate( ddG, isigma2, dG0, (1./delta)**2) 146 | return dG 147 | 148 | def test_A_optimality_with_reference( s, n, delta, dn=1E-1, ntimes=10): 149 | ''' 150 | Return True if n is the A-optimal. 151 | ''' 152 | K = n.size[0] 153 | cov = covariance( cvxopt.div( n, s**2), delta) 154 | f = np.trace( cov) 155 | df = np.zeros( ntimes) 156 | for t in xrange( ntimes): 157 | zeta = matrix( 1. + 2*dn*(np.random.rand( K, K) - 0.5)) 158 | n1 = cvxopt.mul( n, zeta) 159 | n1 = 0.5*(n1 + n1.trans()) # Symmetrize 160 | tot = sum_upper_triangle( n1) 161 | n1 /= tot 162 | Cp = covariance( cvxopt.div(n1, s**2), delta) 163 | fp = np.trace( Cp) 164 | df[t] = fp - f 165 | 166 | success = np.all( df >= 0) 167 | 168 | if success: 169 | print 'A-optimality with references passed!' 170 | else: 171 | print 'A-optimality with references FAILED!' 172 | print 'df = ', df 173 | 174 | return success 175 | 176 | def unit_test(): 177 | references = [0] 178 | cox2 = COX2params() 179 | s = cox2['s'] 180 | dG0 = cox2['dG0'] 181 | delta = cox2['delta'] 182 | 183 | K = s.size[0] 184 | dG0p = [ x for x in dG0 ] 185 | # The experimental values of the molecules not in the references 186 | # will be unavailable. 187 | if references is not None: 188 | for i in xrange( K): 189 | if i not in references: 190 | dG0p[i] = None 191 | delta[i] = np.infty 192 | 193 | N = 1000. 194 | n = networkBFEalloc( s, N, delta) 195 | 196 | success = True 197 | success = success and test_A_optimality_with_reference( s, n, delta) 198 | 199 | nint = round_to_integers( n) 200 | n = matrix( nint[:], (K,K), tc='d') 201 | 202 | ddG, isigma2 = mockupBFEresults( n, s, dG0) 203 | 204 | dG = networkBFEdG( ddG, isigma2, dG0p, delta) 205 | 206 | cov = covariance( cvxopt.div(n, s**2), delta) 207 | err = np.sqrt(np.diag( cov)) 208 | deltaG = np.abs(dG - dG0) 209 | 210 | success = success and np.all( deltaG < err) 211 | if (success): 212 | print 'ML estimate with references passed!' 213 | else: 214 | print 'ML estimate with references FAILED!' 215 | print 'dG = ', dG 216 | print 'stderr = ', err 217 | print 'dG0 = ', dG0 218 | print 'max(|dG - dG0|) = %g' % np.max( deltaG) 219 | 220 | if __name__ == '__main__': 221 | unit_test() 222 | -------------------------------------------------------------------------------- /test_diffnet.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cvxopt 3 | from cvxopt import matrix 4 | from diffnet import * 5 | import netbfe 6 | import A_opt 7 | 8 | def check_optimality( sij, nij, optimality='A', delta=1E-1, ntimes=10): 9 | ''' 10 | Return True if nij is the optimal. 11 | ''' 12 | K = sij.size[0] 13 | C = covariance( cvxopt.div( nij, sij**2)) 14 | fC = dict() 15 | if optimality=='A': 16 | fC['A'] = np.trace( C) 17 | if optimality=='D': 18 | fC['D'] = np.log( linalg.det( C)) 19 | if optimality=='E': 20 | fC['E'] = np.max( linalg.eig( C)[0]).real 21 | if optimality=='Etree': 22 | fC['Etree'] = np.max( linalg.eig( C)[0]).real 23 | 24 | df = np.zeros( ntimes) 25 | for t in xrange( ntimes): 26 | zeta = matrix( 1. + 2*delta*(np.random.rand( K, K) - 0.5)) 27 | nijp = cvxopt.mul( nij, zeta) 28 | nijp = 0.5*(nijp + nijp.trans()) # Symmetrize 29 | s = sum_upper_triangle( nijp) 30 | nijp /= s 31 | Cp = covariance( cvxopt.div( nijp, sij**2)) 32 | if (optimality=='A'): 33 | fCp = np.trace( Cp) 34 | elif (optimality=='D'): 35 | fCp = np.log( linalg.det( Cp)) 36 | elif (optimality=='E' or optimality=='Etree'): 37 | fCp = np.max( linalg.eig( Cp)[0]).real 38 | df[t] = fCp - fC[optimality] 39 | print df 40 | return np.all( df >= 0) 41 | 42 | def check_update_A_optimal( sij, delta=5e-1, ntimes=10, tol=1e-5): 43 | ''' 44 | ''' 45 | K = matrix(sij).size[0] 46 | 47 | ntotal = 100 48 | fopt = A_optimize( sij) 49 | nopt = ntotal*fopt 50 | # remove some random samples from the optimal 51 | nsofar = nopt - nopt*0.1*np.random.rand( K, K) 52 | nsofar = matrix( 0.5*(nsofar + nsofar.T)) 53 | nadd = ntotal - sum_upper_triangle( nsofar) 54 | nnext = A_optimize( sij, nadd, nsofar) 55 | success1 = True 56 | if np.abs(sum_upper_triangle( matrix(nnext)) - nadd) > tol: 57 | print 'Failed to allocate additional samples to preserve the sum!' 58 | print '|%f - %f| > %f' % (sum_upper_triangle( matrix(nnext)), nadd, tol) 59 | success1 = False 60 | # The new samples and the existing samples should together make up the 61 | # optimal allocation. 62 | delta = sum_upper_triangle( abs( nnext + nsofar - nopt))/ntotal 63 | delta /= (0.5*K*(K+1)) 64 | if delta > tol: 65 | print 'Failed: Updating allocation does not yield A-optimal!' 66 | print 'delta = %f > %f' % (delta, tol) 67 | success1 = False 68 | 69 | sij0 = np.random.rand( K, K) 70 | sij0 = matrix(0.5*(sij0 + sij0.T)) 71 | 72 | nsofar = 100*A_optimize( sij0) 73 | 74 | nadd = 100 75 | # nnext = update_A_optimal_sdp( sij, nadd, nsofar) 76 | nnext = A_optimize( sij, nadd, nsofar) 77 | ntotal = matrix( nsofar + nnext) 78 | 79 | C = covariance( cvxopt.div( ntotal/sum_upper_triangle(ntotal), matrix(sij)**2)) 80 | trC = np.trace( C) 81 | 82 | dtr = np.zeros( ntimes) 83 | for t in xrange( ntimes): 84 | zeta = matrix( 1. + 2*delta*(np.random.rand( K, K) - 0.5)) 85 | nnextp = cvxopt.mul( nnext, zeta) 86 | nnextp = 0.5*(nnextp + nnextp.trans()) 87 | s = sum_upper_triangle( nnextp) 88 | nnextp *= (nadd/sum_upper_triangle( nnextp)) 89 | ntotal = matrix( nsofar + nnextp) 90 | Cp = covariance( cvxopt.div( ntotal/sum_upper_triangle(ntotal), matrix(sij)**2 )) 91 | dtr[t] = np.trace( Cp) - trC 92 | 93 | success2 = np.all( dtr[np.abs(dtr/trC) > tol] >= 0) 94 | # success2 = np.all( dtr >= 0) 95 | if not success2: 96 | print 'Iterative update of A-optimal failed to minimize tr(C)=%f!' % trC 97 | print dtr 98 | 99 | nnext = round_to_integers( nnext) 100 | if sum_upper_triangle( matrix(nnext)) != nadd: 101 | print 'Failed to allocate additional samples to preserve the sum!' 102 | print '%d != %d' % (sum_upper_triangle( matrix(nnext)), nadd) 103 | success2 = False 104 | 105 | return success1 and success2 106 | 107 | def check_sparse_A_optimal( sij, ntimes=10, delta=1e-1, tol=1e-5): 108 | ''' 109 | ''' 110 | sij = matrix( sij) 111 | K = sij.size[0] 112 | nsofar = np.zeros( (K, K)) 113 | nadd = 1. 114 | 115 | nopt = A_optimize( sij) 116 | nij = sparse_A_optimal_network( sij, nadd, nsofar, 0, K, False) 117 | 118 | success = True 119 | 120 | deltan = sum_upper_triangle( abs(nopt - nij))/(0.5*K*(K+1)) 121 | if deltan > tol: 122 | print 'FAIL: sparse optimization disagree with dense optimzation.' 123 | print '| n - nopt | = %g > %g' % (deltan, tol) 124 | success = False 125 | else: 126 | print 'SUCCESS: sparse optimization agrees with dense optimization.' 127 | print '| n - nopt | = %g <= %g' % (deltan, tol) 128 | 129 | n_measures = 8 130 | connectivity = 2 131 | nij = sparse_A_optimal_network( sij, nadd, nsofar, n_measures, connectivity, 132 | True) 133 | print nij 134 | trC = np.trace( covariance( cvxopt.div( nij, sij**2))) 135 | 136 | dtr = np.zeros( ntimes) 137 | for t in xrange( ntimes): 138 | zeta = matrix( 1. + 2*delta*(np.random.rand( K, K) - 0.5)) 139 | nijp = cvxopt.mul( nij, zeta) 140 | nijp = 0.5*(nijp + nijp.trans()) # Symmetrize 141 | s = sum_upper_triangle( nijp) 142 | nijp *= nadd/s 143 | 144 | trCp = np.trace( covariance( cvxopt.div( nijp, sij**2))) 145 | dtr[t] = trCp - trC 146 | 147 | success2 = np.all( dtr >= 0) 148 | if not success2: 149 | print 'FAIL: sparse optimization fail to minimize.' 150 | print dtr 151 | else: 152 | print 'SUCCESS: sparse optimization minimizes.' 153 | 154 | return success and success2 155 | 156 | def check_relative_only_A_optimal( sij): 157 | ''' 158 | ''' 159 | sij = matrix(sij) 160 | K = sij.size[0] 161 | for i in range(K): sij[i,i] = np.inf 162 | nij = A_optimize( sij) 163 | success = check_optimality( sij, nij) 164 | if (not success): 165 | print 'FAIL: A_optimize for relative-only measurements did not generate optimal.' 166 | else: 167 | print 'SUCCESS: A_optimize for relative-only measurements.' 168 | return success 169 | 170 | def check_hessian( dF, d2F, x0): 171 | ''' 172 | Check the Hessian for correctness. 173 | 174 | Returns: 175 | err: float - the square root of the sum of squres of the difference 176 | between finite difference approximation and the analytical results 177 | at the point x0. 178 | ''' 179 | from scipy.optimize import check_grad 180 | 181 | N = len(x0) 182 | esqr = 0. 183 | for i in xrange( N): 184 | def func( x): 185 | return dF(x)[i] 186 | def dfunc( x): 187 | return d2F(x)[i,:] 188 | e = check_grad( func, dfunc, x0) 189 | esqr += e*e 190 | return np.sqrt(esqr) 191 | 192 | def fabricate_measurements( K=10, sigma=0.1, noerror=True, disconnect=False): 193 | x0 = np.random.rand( K) 194 | xij = np.zeros( (K, K)) 195 | invsij2 = 1/(sigma*sigma)*np.random.rand( K, K) 196 | invsij2 = 0.5*(invsij2 + np.transpose( invsij2)) 197 | sij = np.sqrt( 1./invsij2) 198 | if noerror: sij *= 0. 199 | for i in xrange(K): 200 | xij[i][i] = x0[i] + sij[i,i]*np.random.randn() 201 | for j in xrange(i+1, K): 202 | xij[i][j] = x0[i] - x0[j] + sij[i][j]*np.random.randn() 203 | xij[j][i] = -xij[i][j] 204 | 205 | if (disconnect >= 1): 206 | # disconnect the origin and thus eliminate the individual measurements 207 | for i in xrange(K): invsij2[i][i] = 0 208 | if (disconnect >= 2): 209 | # disconnect the network into the given number of disconnected 210 | # components. 211 | for i in xrange( K): 212 | c1 = i % disconnect 213 | for j in xrange( i+1, K): 214 | c2 = j % disconnect 215 | if (c1 != c2): 216 | invsij2[i][j] = invsij2[j][i] = 0 217 | 218 | return x0, xij, invsij2 219 | 220 | def check_MLest( K=10, sigma=0.1, noerr=True, disconnect=False): 221 | x0, xij, invsij2 = fabricate_measurements( K, sigma, noerr, disconnect) 222 | if (not disconnect): 223 | xML, vML = MLestimate( xij, invsij2) 224 | else: 225 | xML, vML = MLestimate( xij, invsij2, 226 | np.concatenate( [x0[:disconnect+1], 227 | [None]*(K-disconnect-1)])) 228 | # Compute the RMSE between the input quantities and the estimation by ML. 229 | return np.sqrt(np.sum(np.square(xML - x0))/K) 230 | 231 | def test_covariance( K=5, nodiag=False, T=4000, tol=0.25): 232 | sigma = 10. 233 | x0 = 100*np.random.rand( K) 234 | xij = np.zeros( (K, K)) 235 | sij = sigma*np.random.rand( K, K) 236 | sij = 0.5*(sij + sij.T) 237 | if nodiag: 238 | for i in range(K): sij[i,i] = np.inf 239 | xML = np.zeros( (K, T)) 240 | for t in range( T): 241 | for i in range(K): 242 | if not nodiag: 243 | xij[i,i] = x0[i] + sij[i,i]*np.random.randn() 244 | for j in range(i+1, K): 245 | xij[i,j] = x0[i] - x0[j] + sij[i,j]*np.random.randn() 246 | xij[j,i] = -xij[i,j] 247 | xML[:, t], vML = MLestimate( xij, 1./sij**2, x0) 248 | cov0 = np.cov( xML) 249 | cov = covariance( 1/sij**2) 250 | dx = x0 - np.mean( xML, axis=1) 251 | if np.max( np.abs( dx)) > sigma/np.sqrt(T): 252 | print 'WARNING: MLE deviates from reference by %g' % np.max(np.abs(dx)) 253 | 254 | success = True 255 | dr = np.minimum( np.abs(cov - cov0), np.abs(cov/cov0 - 1.)) 256 | if np.max( np.abs( dr)) > tol: 257 | print 'FAIL: covariance testing fails with relative deviation of %g' % np.max( np.abs( dr)) 258 | print 'covariance =' 259 | print cov 260 | print 'reference =' 261 | print cov0 262 | success = False 263 | else: 264 | print 'SUCCESS: covariance testing passed. Relative deviation < %g' % np.max( np.abs( dr)) 265 | return success 266 | 267 | def unitTest( tol=1.e-4): 268 | if (True): 269 | K = 10 270 | sij = np.random.rand( K, K) 271 | sij = matrix( 0.5*(sij + sij.T)) 272 | # nij = A_optimize( sij) 273 | nij = sparse_A_optimal_network( sij ) 274 | 275 | if (True): 276 | sij = matrix( [[ 1.5, 0.1, 0.2, 0.5], 277 | [ 0.1, 1.1, 0.3, 0.2], 278 | [ 0.2, 0.3, 1.2, 0.1], 279 | [ 0.5, 0.2, 0.1, 0.9]]) 280 | elif (False): 281 | sij = np.ones( (4, 4), dtype=float) 282 | sij += np.diag( 4.*np.ones( 4)) 283 | sij = matrix( sij) 284 | else: 285 | sij = matrix ( [[ 1., 0.1, 0.1 ], 286 | [ 0.1, 1., 0.1 ], 287 | [ 0.1, 0.1, 1.2 ]]) 288 | 289 | from scipy.optimize import check_grad 290 | 291 | def F( x): 292 | return lndetC( sij, x)[0] 293 | 294 | def dF( x): 295 | return np.array( lndetC( sij, x)[1])[0] 296 | 297 | def d2F( x): 298 | return np.array( lndetC( sij, x, True)[2]) 299 | 300 | K = sij.size[0] 301 | 302 | x0 = np.random.rand( K*(K+1)/2) 303 | err = check_grad( F, dF, x0) 304 | print 'Gradient check for ln(det(C)) error=%g:' % err, 305 | if (err < tol): 306 | print 'Passed!' 307 | else: 308 | print 'Failed!' 309 | 310 | err = check_hessian( dF, d2F, x0) 311 | print 'Hessian check for ln(det(C)) error=%g:' % err, 312 | if (err < tol): 313 | print 'Passed!' 314 | else: 315 | print 'Failed!' 316 | 317 | print 'Testing ML estimator' 318 | for disconnect, label in [ 319 | (False, 'Full-rank'), 320 | (1, 'No individual measurement'), 321 | (2, '2-disconnected') ]: 322 | err = check_MLest( K, disconnect=disconnect) 323 | print '%s: RMSE( x0, xML) = %g' % (label, err), 324 | if (err < tol): 325 | print 'Passed!' 326 | else: 327 | print 'Failed!' 328 | 329 | results = optimize( sij) 330 | for o in [ 'D', 'A', 'E', 'Etree' ]: 331 | nij = results[o] 332 | C = covariance( cvxopt.div( nij, sij**2)) 333 | print '%s-optimality' % o 334 | print 'n (sum=%g):' % sum_upper_triangle( nij) 335 | print nij 336 | D = np.log(linalg.det( C)) 337 | A = np.trace( C) 338 | E = np.max(linalg.eig(C)[0]).real 339 | print 'C: (ln(det(C))=%.4f; tr(C)=%.4f; max(eig(C))=%.4f)' % \ 340 | ( D, A, E ) 341 | print C 342 | if (check_optimality( sij, nij, o)): 343 | print '%s-optimality check passed!' % o 344 | else: 345 | print '%s-optimality check failed!' % o 346 | 347 | # Check iteration update 348 | success = check_update_A_optimal( sij) 349 | if success: 350 | print 'Iterative update of A-optimal passed!' 351 | 352 | # Check sparse A-optimal 353 | if (check_sparse_A_optimal( sij)): 354 | print 'Sparse A-optimal passed!' 355 | 356 | # Check A-optimal when only relative measurements are included. 357 | if (check_relative_only_A_optimal( sij)): 358 | print 'Relative-only A-optimal passed!' 359 | 360 | # Test covariance computation 361 | if (test_covariance(5, T=4000)): 362 | print 'Covariance computation passed!' 363 | 364 | if (test_covariance(5, T=4000, nodiag=True)): 365 | print 'Covariance with only relative values passed!' 366 | 367 | if __name__ == '__main__': 368 | unitTest() 369 | A_opt.unit_test() 370 | netbfe.unit_test() 371 | --------------------------------------------------------------------------------