├── .gitattributes ├── .gitignore ├── Fourier_analysis └── 1d-QAOA-Fourier │ ├── .ipynb_checkpoints │ └── 1D-data-qaoa-fourier-checkpoint.ipynb │ ├── 1D-data-qaoa-fourier.ipynb │ ├── X_1d_sep.txt │ ├── Y_1d_sep.txt │ └── embeddings_circuit.py ├── README.md ├── Report ├── Readme.md └── Variational_Embedding_in_QML.pdf ├── Simulation_of_Variational_Circuits ├── data │ ├── X_1d2_sep.txt │ ├── X_1d_sep.txt │ ├── X_1d_sep_test.txt │ ├── X_2d_sep.txt │ ├── X_2d_sep_test.txt │ ├── Y_1d_sep.txt │ ├── Y_1d_sep_test.txt │ ├── Y_2d_sep.txt │ └── Y_2d_sep_test.txt ├── embedding_training.ipynb ├── featuremaps.py ├── fidelity.py ├── generate_data.py ├── plots.py └── trained_embeddings │ ├── 1d_sep-l2-300s-1l-4w.npy │ ├── 1d_sep-l2-300s-1l-4w.svg │ ├── 1d_sep-l2-300s-2l-2w.npy │ ├── 1d_sep-l2-300s-2l-2w.svg │ ├── 1d_sep-l2-300s-2l-3w.npy │ ├── 1d_sep-l2-300s-2l-3w.svg │ ├── 1d_sep-l2-300s-2l-4w.npy │ ├── 1d_sep-l2-300s-2l-4w.svg │ ├── 1d_sep-l2-300s-4l-1w.npy │ ├── 1d_sep-l2-300s-4l-1w.svg │ ├── 1d_sep-l2-300s-4l-2w.npy │ ├── 1d_sep-l2-300s-4l-2w.svg │ ├── 1d_sep-l2-300s-4l-4w.npy │ └── 1d_sep-l2-300s-4l-4w.svg ├── overlap_vs_HS_cost ├── .ipynb_checkpoints │ └── optimizing_overlap_cost-checkpoint.ipynb ├── X_1d_sep.txt ├── Y_1d_sep.txt ├── __pycache__ │ └── embeddings_circuit.cpython-38.pyc ├── embeddings_circuit.py ├── optimizing_overlap_cost.ipynb └── overleaf │ ├── data-1.png │ ├── data-2.png │ ├── hs-cost-1.png │ ├── hs-cost-2.png │ ├── overlap-1.png │ ├── overlap-2.png │ └── overlap_optimization.pdf ├── random_embedding_circuits ├── .ipynb_checkpoints │ └── two-qubit-random-embedding-checkpoint.ipynb ├── X_1d_sep.txt ├── Y_1d_sep.txt ├── two-qubit-random-embedding.ipynb └── two_wires_random_unitary_embeddings.py └── risk_function └── 2d_data ├── .ipynb_checkpoints └── risk_function-checkpoint.ipynb ├── embeddings_circuit.py └── risk_function.ipynb /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Object file 2 | *.o 3 | 4 | # Ada Library Information 5 | *.ali 6 | 7 | #directories 8 | __pycache__/ 9 | .ipynb_checkpoints/ 10 | *.DS_Store 11 | -------------------------------------------------------------------------------- /Fourier_analysis/1d-QAOA-Fourier/X_1d_sep.txt: -------------------------------------------------------------------------------- 1 | 1.428386558165781750e+00 2 | -1.313839087028072727e+00 3 | 1.596285966845742754e+00 4 | 1.367550496433965534e+00 5 | 1.468173134298068616e+00 6 | -1.454971615808443364e+00 7 | 1.161811926465180944e+00 8 | -1.714610930998586413e+00 9 | -1.327999547708873740e+00 10 | 1.382642255659186192e+00 11 | 1.074868758557305348e+00 12 | 1.459529311267870444e+00 13 | 1.382177001267317351e+00 14 | -1.131668993147058488e+00 15 | -1.248997409178646656e+00 16 | -1.322204908516744437e+00 17 | -1.464985657964124277e+00 18 | -1.454739026129264534e+00 19 | 1.557625966631448922e+00 20 | -1.401922341642383385e+00 21 | -3.593397378255333563e-01 22 | -6.950145572910067064e-03 23 | 1.138715720991759078e-01 24 | -1.704154411162837890e-01 25 | -1.840487346705927951e-01 26 | 2.641666606852424715e-01 27 | 3.363208579927841058e-01 28 | 1.640524890397370150e-01 29 | -1.572146825398952896e-01 30 | -2.091069596104288553e-01 31 | -1.205649127814735350e-01 32 | -2.119293925672509904e-01 33 | 1.764063617893480984e-01 34 | -5.099904663363319379e-01 35 | 2.850906880823069756e-01 36 | 2.240966054048715017e-01 37 | 3.109652609661974765e-01 38 | 1.891447943386029285e-01 39 | -2.009277177789230429e-01 40 | 2.326104293168703568e-02 41 | -------------------------------------------------------------------------------- /Fourier_analysis/1d-QAOA-Fourier/Y_1d_sep.txt: -------------------------------------------------------------------------------- 1 | -1.000000000000000000e+00 2 | -1.000000000000000000e+00 3 | -1.000000000000000000e+00 4 | -1.000000000000000000e+00 5 | -1.000000000000000000e+00 6 | -1.000000000000000000e+00 7 | -1.000000000000000000e+00 8 | -1.000000000000000000e+00 9 | -1.000000000000000000e+00 10 | -1.000000000000000000e+00 11 | -1.000000000000000000e+00 12 | -1.000000000000000000e+00 13 | -1.000000000000000000e+00 14 | -1.000000000000000000e+00 15 | -1.000000000000000000e+00 16 | -1.000000000000000000e+00 17 | -1.000000000000000000e+00 18 | -1.000000000000000000e+00 19 | -1.000000000000000000e+00 20 | -1.000000000000000000e+00 21 | 1.000000000000000000e+00 22 | 1.000000000000000000e+00 23 | 1.000000000000000000e+00 24 | 1.000000000000000000e+00 25 | 1.000000000000000000e+00 26 | 1.000000000000000000e+00 27 | 1.000000000000000000e+00 28 | 1.000000000000000000e+00 29 | 1.000000000000000000e+00 30 | 1.000000000000000000e+00 31 | 1.000000000000000000e+00 32 | 1.000000000000000000e+00 33 | 1.000000000000000000e+00 34 | 1.000000000000000000e+00 35 | 1.000000000000000000e+00 36 | 1.000000000000000000e+00 37 | 1.000000000000000000e+00 38 | 1.000000000000000000e+00 39 | 1.000000000000000000e+00 40 | 1.000000000000000000e+00 41 | -------------------------------------------------------------------------------- /Fourier_analysis/1d-QAOA-Fourier/embeddings_circuit.py: -------------------------------------------------------------------------------- 1 | import pennylane as qml 2 | from pennylane import numpy as np 3 | 4 | 5 | 6 | def embedding_circuit(x,weights,wires): 7 | 8 | no_qubits = len(wires) 9 | 10 | for params_layer in weights: 11 | for i in range(no_qubits): 12 | qml.RX(x,wires=wires[i]) 13 | 14 | for i in range(no_qubits): 15 | qml.RY(params_layer[i],wires=wires[i]) 16 | 17 | for w in range(no_qubits): 18 | qml.RX(x,wires=wires[w]) 19 | 20 | 21 | # In[ ]: 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # **Variational Embeddings in Quantum Machine Learning** 2 | ## **[QOSF mentorship program](https://qosf.org)** 3 | ### January 23rd, 2021 4 | 5 | **Mentor:** [Aroosa Ijaz](https://aroosaijaz.github.io/) 6 | 7 | **Mentees:** [Narges Alavi Samani](https://www.linkedin.com/in/narges-alavi-samani/), [Mudassir Moosa](https://www.linkedin.com/in/mudassir-moosa/), [Syed Raza](https://www.linkedin.com/in/syedraza22/) 8 | 9 | The project [report](https://github.com/mudassirmoosa/variational_embedding_circuits/tree/master/Report) in the folder `Report'. 10 | 11 | ## **Project Description:** 12 | 13 | Machine Learning is a potential application for near-term intermediate scale quantum computers with possible speed-ups over their classical counterparts. Quantum classifiers are quantum circuits that can be trained to classify data in two stages; 1) *Embedding*: the input data is encoded into quantum states, embedding it to a high-dimensional Hilbert space. 2) *Measurement*: A quantum measurement of the circuit to discriminate between classes. Usually, the *measurement* part of the circuit is trained but a recent work [1] adopts an alternate approach where the *embedding* part of the circuit is trained instead, freeing up more precious resources. In this work, we benchmark various embeddings and cost functions and propose improvements. Some key results: 1) We compare the performance of various variational embedding circuits for classification tasks. 2) We present an alternate to Hilbert-Schmidt cost function, an *empirical risk function* which can lead to better performance as illustrated by some toy examples. 3) In single-wire circuits, the optimization of the Hilbert-Schmidt cost function is a computationally expensive task. We propose a more efficient *overlap function* that takes a third of the time. 4) We conjecture a framework to quantify the expressivity of various embedding circuits for classification tasks. 14 | 15 | ## **Key Results:** 16 | 1) We benchmark the performance of various variational embedding circuits for classification tasks. 17 | 2) We present an alternate to Hilbert-Schmidt cost function, an empirical risk function which can lead to better performance and is illustrated by some toy examples. 18 | 3) In single-wire circuits, the optimization of the Hilbert-Schmidt cost function is a computationally expensive task. We propose a more efficient overlap function that takes a third of the time. 19 | 4) We propose a framework on using Fourier series to quantify the expressivity of the various embedding circuits for classification problems. 20 | 21 | ## **Code Repo Descriptions:** 22 | 1) Folder overlap_vs_HS_cost has code for the comparision between the optimization of the Hilbert-Schmidt cost function and the optimization of the overlap. These codes are used in the analysis presented in Sec. (III) of the report. 23 | 2) Folder random_embedding_circuits has code for the random variational embedding circuits that we have presented in Sec. (IV) of the report. These codes are used to generate the plots shown in Fig. (3) and Fig. (4) of the report. 24 | 3) Folder risk_function/2d_data has code for the analysis of the risk function presented in Sec. (II) of the report. There is also a code for generating a data set shown in Fig. (1) of the report. 25 | 4) Folder Fourier_analysis/1d-QAOA-Fourier has code for performing a Pauli decomposition of the output of a single-wire embedded circuit. See, for e.g., Eqs. (2.3) and (2.4) of the report. 26 | 5) Folder Simulation_of_Variational_Circuits has code for implementing and comparing different variational circuit structures, varying in types of gates, circuit depth, number of qubits, as an embedding circuit for data classification problem. 27 | 28 | ## **References:** 29 | 30 | [1] Seth Lloyd, Maria Schuld, Aroosa Ijaz, Josh Izaac, and Nathan Killoran, “Quantum embeddings for machine learning,” arXiv e-prints, arXiv:2001.03622 (2020) 31 | -------------------------------------------------------------------------------- /Report/Readme.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Report/Variational_Embedding_in_QML.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/Report/Variational_Embedding_in_QML.pdf -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/X_1d2_sep.txt: -------------------------------------------------------------------------------- 1 | 1.428386558165781750e+00 1.428386558165781750e+00 2 | -1.313839087028072727e+00 -1.313839087028072727e+00 3 | 1.596285966845742754e+00 1.596285966845742754e+00 4 | 1.367550496433965534e+00 1.367550496433965534e+00 5 | 1.468173134298068616e+00 1.468173134298068616e+00 6 | -1.454971615808443364e+00 -1.454971615808443364e+00 7 | 1.161811926465180944e+00 1.161811926465180944e+00 8 | -1.714610930998586413e+00 -1.714610930998586413e+00 9 | -1.327999547708873740e+00 -1.327999547708873740e+00 10 | 1.382642255659186192e+00 1.382642255659186192e+00 11 | 1.074868758557305348e+00 1.074868758557305348e+00 12 | 1.459529311267870444e+00 1.459529311267870444e+00 13 | 1.382177001267317351e+00 1.382177001267317351e+00 14 | -1.131668993147058488e+00 -1.131668993147058488e+00 15 | -1.248997409178646656e+00 -1.248997409178646656e+00 16 | -1.322204908516744437e+00 -1.322204908516744437e+00 17 | -1.464985657964124277e+00 -1.464985657964124277e+00 18 | -1.454739026129264534e+00 -1.454739026129264534e+00 19 | 1.557625966631448922e+00 1.557625966631448922e+00 20 | -1.401922341642383385e+00 -1.401922341642383385e+00 21 | -3.593397378255333563e-01 -3.593397378255333563e-01 22 | -6.950145572910067064e-03 -6.950145572910067064e-03 23 | 1.138715720991759078e-01 1.138715720991759078e-01 24 | -1.704154411162837890e-01 -1.704154411162837890e-01 25 | -1.840487346705927951e-01 -1.840487346705927951e-01 26 | 2.641666606852424715e-01 2.641666606852424715e-01 27 | 3.363208579927841058e-01 3.363208579927841058e-01 28 | 1.640524890397370150e-01 1.640524890397370150e-01 29 | -1.572146825398952896e-01 -1.572146825398952896e-01 30 | -2.091069596104288553e-01 -2.091069596104288553e-01 31 | -1.205649127814735350e-01 -1.205649127814735350e-01 32 | -2.119293925672509904e-01 -2.119293925672509904e-01 33 | 1.764063617893480984e-01 1.764063617893480984e-01 34 | -5.099904663363319379e-01 -5.099904663363319379e-01 35 | 2.850906880823069756e-01 2.850906880823069756e-01 36 | 2.240966054048715017e-01 2.240966054048715017e-01 37 | 3.109652609661974765e-01 3.109652609661974765e-01 38 | 1.891447943386029285e-01 1.891447943386029285e-01 39 | -2.009277177789230429e-01 -2.009277177789230429e-01 40 | 2.326104293168703568e-02 2.326104293168703568e-02 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/X_1d_sep.txt: -------------------------------------------------------------------------------- 1 | 1.428386558165781750e+00 2 | -1.313839087028072727e+00 3 | 1.596285966845742754e+00 4 | 1.367550496433965534e+00 5 | 1.468173134298068616e+00 6 | -1.454971615808443364e+00 7 | 1.161811926465180944e+00 8 | -1.714610930998586413e+00 9 | -1.327999547708873740e+00 10 | 1.382642255659186192e+00 11 | 1.074868758557305348e+00 12 | 1.459529311267870444e+00 13 | 1.382177001267317351e+00 14 | -1.131668993147058488e+00 15 | -1.248997409178646656e+00 16 | -1.322204908516744437e+00 17 | -1.464985657964124277e+00 18 | -1.454739026129264534e+00 19 | 1.557625966631448922e+00 20 | -1.401922341642383385e+00 21 | -3.593397378255333563e-01 22 | -6.950145572910067064e-03 23 | 1.138715720991759078e-01 24 | -1.704154411162837890e-01 25 | -1.840487346705927951e-01 26 | 2.641666606852424715e-01 27 | 3.363208579927841058e-01 28 | 1.640524890397370150e-01 29 | -1.572146825398952896e-01 30 | -2.091069596104288553e-01 31 | -1.205649127814735350e-01 32 | -2.119293925672509904e-01 33 | 1.764063617893480984e-01 34 | -5.099904663363319379e-01 35 | 2.850906880823069756e-01 36 | 2.240966054048715017e-01 37 | 3.109652609661974765e-01 38 | 1.891447943386029285e-01 39 | -2.009277177789230429e-01 40 | 2.326104293168703568e-02 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/X_1d_sep_test.txt: -------------------------------------------------------------------------------- 1 | -1.502022138572153942e+00 2 | -1.386991641751017079e+00 3 | -1.528618084786739661e+00 4 | -1.088755555917180873e+00 5 | -1.471301021290353672e+00 6 | -1.461588412590179464e+00 7 | 1.429677174205972623e+00 8 | 1.360432830293142992e+00 9 | -1.419294948993901961e+00 10 | 1.540457780535192001e+00 11 | 1.018791557505601020e+00 12 | 1.293995293793449353e+00 13 | 1.262704454682892274e+00 14 | 1.466641663729284728e+00 15 | -1.400975254756609667e+00 16 | -1.419264228719061549e+00 17 | -1.212445788712345651e+00 18 | 1.580624158715746796e+00 19 | 1.441849547221183636e+00 20 | 1.547659636215666046e+00 21 | 1.740857187057834321e-01 22 | -1.462874877528895223e-01 23 | 1.471075095574518043e-01 24 | -1.877183282493123662e-01 25 | -3.391350206112936361e-01 26 | -2.637586120222375485e-01 27 | -2.567681514566488299e-01 28 | -3.163121581697344586e-01 29 | 1.004454473497733702e-01 30 | -8.340919507643049235e-02 31 | 1.776783014515835590e-01 32 | 1.502632412122923145e-01 33 | 1.051879569695972488e-01 34 | 4.325758418301889896e-01 35 | 7.934482190713236516e-02 36 | 1.631216743049007378e-01 37 | -1.392934823066256100e-01 38 | -1.181091585569131325e-01 39 | 1.915980915510041849e-01 40 | 7.780596855378824062e-02 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/X_2d_sep.txt: -------------------------------------------------------------------------------- 1 | 1.045671136079457497e+00 1.489629225136359647e+00 2 | -1.014353317677219701e+00 -1.819879464760940202e+00 3 | -1.620163581427086275e+00 -1.743044995833967992e+00 4 | -2.033001512661398724e-01 5.273969534423614824e-01 5 | -4.826002057748022267e-01 7.146205318269012174e-01 6 | 1.585109274820189373e+00 1.872635225428825434e+00 7 | 1.958595991965283778e+00 1.500819211473782389e+00 8 | 1.779890808901490207e+00 1.970806962445840416e+00 9 | -9.344408182707302224e-01 7.076329427186704990e-01 10 | -7.284736653373933279e-01 6.268006640244656591e-01 11 | 7.448260876453196566e-01 -3.632751267428104835e-01 12 | 1.110783591479933552e+00 1.075478191652501803e+00 13 | 6.960976754591372551e-01 -2.787737346344110190e-01 14 | 1.467973924218323400e+00 1.642882209981819219e+00 15 | 1.950324962704873322e+00 1.247272424892533094e+00 16 | 3.618807928239857752e-01 1.940513291174241761e-01 17 | 1.278558294522147420e-01 4.331989587097935246e-01 18 | -1.885746510344320725e+00 -1.091229767708427723e+00 19 | 5.209228842700928119e-01 4.372237635977338499e-02 20 | 1.492267825702193740e-01 6.159393766339980036e-01 21 | 1.673464365864987480e+00 1.124420650231359842e+00 22 | -1.445074914310983116e+00 -1.410136601293266123e+00 23 | -1.009857661378553262e+00 -1.189774484277745570e+00 24 | -1.349573402784390197e+00 -1.744301524414649140e+00 25 | 1.136249061510725689e-01 -2.842676871669249650e-01 26 | -1.199764248617296580e+00 -1.237249906654789733e+00 27 | 1.530579239086239163e+00 1.875545419788785306e+00 28 | -5.874252458162845247e-01 -4.888207504219350685e-01 29 | -1.063391885365539080e+00 -1.963186176700124852e+00 30 | 1.252086703567543591e+00 1.908226698807007793e+00 31 | 2.384404205810397581e-01 4.966997067951515188e-01 32 | -1.542040514934004847e+00 -1.420780343675349222e+00 33 | 8.168175207648669112e-01 8.359092305459230321e-01 34 | -1.013304750704408619e+00 -1.799977978252482735e+00 35 | 2.302515037990058300e-01 -1.952147865980953245e-01 36 | 1.243226556939644567e+00 1.598749996977127275e+00 37 | 1.979895906054846222e+00 1.321361172328829436e+00 38 | 8.370816133349125998e-01 -2.476779249247342829e-01 39 | 1.478682247093415558e+00 1.292816676842798440e+00 40 | 2.632769816078437408e-01 5.617972994476023718e-01 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/X_2d_sep_test.txt: -------------------------------------------------------------------------------- 1 | -1.502022138572153942e+00 -1.502022138572153942e+00 2 | -1.386991641751017079e+00 -1.386991641751017079e+00 3 | -1.528618084786739661e+00 -1.528618084786739661e+00 4 | -1.088755555917180873e+00 -1.088755555917180873e+00 5 | -1.471301021290353672e+00 -1.471301021290353672e+00 6 | -1.461588412590179464e+00 -1.461588412590179464e+00 7 | 1.429677174205972623e+00 1.429677174205972623e+00 8 | 1.360432830293142992e+00 1.360432830293142992e+00 9 | -1.419294948993901961e+00 -1.419294948993901961e+00 10 | 1.540457780535192001e+00 1.540457780535192001e+00 11 | 1.018791557505601020e+00 1.018791557505601020e+00 12 | 1.293995293793449353e+00 1.293995293793449353e+00 13 | 1.262704454682892274e+00 1.262704454682892274e+00 14 | 1.466641663729284728e+00 1.466641663729284728e+00 15 | -1.400975254756609667e+00 -1.400975254756609667e+00 16 | -1.419264228719061549e+00 -1.212445788712345651e+00 17 | -1.212445788712345651e+00 -1.419264228719061549e+00 18 | 1.580624158715746796e+00 1.441849547221183636e+00 19 | 1.441849547221183636e+00 1.580624158715746796e+00 20 | 1.547659636215666046e+00 1.740857187057834321e-01 21 | 1.740857187057834321e-01 1.547659636215666046e+00 22 | -1.462874877528895223e-01 -1.877183282493123662e-01 23 | 1.471075095574518043e-01 1.471075095574518043e-01 24 | -1.877183282493123662e-01 -1.462874877528895223e-01 25 | -3.391350206112936361e-01 -3.391350206112936361e-01 26 | -2.637586120222375485e-01 -2.637586120222375485e-01 27 | -2.567681514566488299e-01 -3.163121581697344586e-01 28 | -3.163121581697344586e-01 -2.567681514566488299e-01 29 | 1.004454473497733702e-01 1.004454473497733702e-01 30 | -8.340919507643049235e-02 -8.340919507643049235e-02 31 | 1.776783014515835590e-01 1.776783014515835590e-01 32 | 1.502632412122923145e-01 1.502632412122923145e-01 33 | 1.051879569695972488e-01 1.051879569695972488e-01 34 | 4.325758418301889896e-01 4.325758418301889896e-01 35 | 7.934482190713236516e-02 7.934482190713236516e-02 36 | 1.631216743049007378e-01 1.631216743049007378e-01 37 | -1.392934823066256100e-01 -1.392934823066256100e-01 38 | -1.181091585569131325e-01 -1.181091585569131325e-01 39 | 1.915980915510041849e-01 1.915980915510041849e-01 40 | 7.780596855378824062e-02 7.780596855378824062e-02 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/Y_1d_sep.txt: -------------------------------------------------------------------------------- 1 | -1.000000000000000000e+00 2 | -1.000000000000000000e+00 3 | -1.000000000000000000e+00 4 | -1.000000000000000000e+00 5 | -1.000000000000000000e+00 6 | -1.000000000000000000e+00 7 | -1.000000000000000000e+00 8 | -1.000000000000000000e+00 9 | -1.000000000000000000e+00 10 | -1.000000000000000000e+00 11 | -1.000000000000000000e+00 12 | -1.000000000000000000e+00 13 | -1.000000000000000000e+00 14 | -1.000000000000000000e+00 15 | -1.000000000000000000e+00 16 | -1.000000000000000000e+00 17 | -1.000000000000000000e+00 18 | -1.000000000000000000e+00 19 | -1.000000000000000000e+00 20 | -1.000000000000000000e+00 21 | 1.000000000000000000e+00 22 | 1.000000000000000000e+00 23 | 1.000000000000000000e+00 24 | 1.000000000000000000e+00 25 | 1.000000000000000000e+00 26 | 1.000000000000000000e+00 27 | 1.000000000000000000e+00 28 | 1.000000000000000000e+00 29 | 1.000000000000000000e+00 30 | 1.000000000000000000e+00 31 | 1.000000000000000000e+00 32 | 1.000000000000000000e+00 33 | 1.000000000000000000e+00 34 | 1.000000000000000000e+00 35 | 1.000000000000000000e+00 36 | 1.000000000000000000e+00 37 | 1.000000000000000000e+00 38 | 1.000000000000000000e+00 39 | 1.000000000000000000e+00 40 | 1.000000000000000000e+00 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/Y_1d_sep_test.txt: -------------------------------------------------------------------------------- 1 | -1.000000000000000000e+00 2 | -1.000000000000000000e+00 3 | -1.000000000000000000e+00 4 | -1.000000000000000000e+00 5 | -1.000000000000000000e+00 6 | -1.000000000000000000e+00 7 | -1.000000000000000000e+00 8 | -1.000000000000000000e+00 9 | -1.000000000000000000e+00 10 | -1.000000000000000000e+00 11 | -1.000000000000000000e+00 12 | -1.000000000000000000e+00 13 | -1.000000000000000000e+00 14 | -1.000000000000000000e+00 15 | -1.000000000000000000e+00 16 | -1.000000000000000000e+00 17 | -1.000000000000000000e+00 18 | -1.000000000000000000e+00 19 | -1.000000000000000000e+00 20 | -1.000000000000000000e+00 21 | 1.000000000000000000e+00 22 | 1.000000000000000000e+00 23 | 1.000000000000000000e+00 24 | 1.000000000000000000e+00 25 | 1.000000000000000000e+00 26 | 1.000000000000000000e+00 27 | 1.000000000000000000e+00 28 | 1.000000000000000000e+00 29 | 1.000000000000000000e+00 30 | 1.000000000000000000e+00 31 | 1.000000000000000000e+00 32 | 1.000000000000000000e+00 33 | 1.000000000000000000e+00 34 | 1.000000000000000000e+00 35 | 1.000000000000000000e+00 36 | 1.000000000000000000e+00 37 | 1.000000000000000000e+00 38 | 1.000000000000000000e+00 39 | 1.000000000000000000e+00 40 | 1.000000000000000000e+00 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/Y_2d_sep.txt: -------------------------------------------------------------------------------- 1 | 1.000000000000000000e+00 2 | 1.000000000000000000e+00 3 | 1.000000000000000000e+00 4 | -1.000000000000000000e+00 5 | -1.000000000000000000e+00 6 | 1.000000000000000000e+00 7 | 1.000000000000000000e+00 8 | 1.000000000000000000e+00 9 | -1.000000000000000000e+00 10 | -1.000000000000000000e+00 11 | -1.000000000000000000e+00 12 | 1.000000000000000000e+00 13 | -1.000000000000000000e+00 14 | 1.000000000000000000e+00 15 | 1.000000000000000000e+00 16 | -1.000000000000000000e+00 17 | -1.000000000000000000e+00 18 | 1.000000000000000000e+00 19 | -1.000000000000000000e+00 20 | -1.000000000000000000e+00 21 | 1.000000000000000000e+00 22 | 1.000000000000000000e+00 23 | 1.000000000000000000e+00 24 | 1.000000000000000000e+00 25 | -1.000000000000000000e+00 26 | 1.000000000000000000e+00 27 | 1.000000000000000000e+00 28 | -1.000000000000000000e+00 29 | 1.000000000000000000e+00 30 | 1.000000000000000000e+00 31 | -1.000000000000000000e+00 32 | 1.000000000000000000e+00 33 | -1.000000000000000000e+00 34 | 1.000000000000000000e+00 35 | -1.000000000000000000e+00 36 | 1.000000000000000000e+00 37 | 1.000000000000000000e+00 38 | -1.000000000000000000e+00 39 | 1.000000000000000000e+00 40 | -1.000000000000000000e+00 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/data/Y_2d_sep_test.txt: -------------------------------------------------------------------------------- 1 | -1.000000000000000000e+00 2 | -1.000000000000000000e+00 3 | -1.000000000000000000e+00 4 | -1.000000000000000000e+00 5 | -1.000000000000000000e+00 6 | -1.000000000000000000e+00 7 | -1.000000000000000000e+00 8 | -1.000000000000000000e+00 9 | -1.000000000000000000e+00 10 | -1.000000000000000000e+00 11 | -1.000000000000000000e+00 12 | -1.000000000000000000e+00 13 | -1.000000000000000000e+00 14 | -1.000000000000000000e+00 15 | -1.000000000000000000e+00 16 | -1.000000000000000000e+00 17 | -1.000000000000000000e+00 18 | -1.000000000000000000e+00 19 | -1.000000000000000000e+00 20 | -1.000000000000000000e+00 21 | 1.000000000000000000e+00 22 | 1.000000000000000000e+00 23 | 1.000000000000000000e+00 24 | 1.000000000000000000e+00 25 | 1.000000000000000000e+00 26 | 1.000000000000000000e+00 27 | 1.000000000000000000e+00 28 | 1.000000000000000000e+00 29 | 1.000000000000000000e+00 30 | 1.000000000000000000e+00 31 | 1.000000000000000000e+00 32 | 1.000000000000000000e+00 33 | 1.000000000000000000e+00 34 | 1.000000000000000000e+00 35 | 1.000000000000000000e+00 36 | 1.000000000000000000e+00 37 | 1.000000000000000000e+00 38 | 1.000000000000000000e+00 39 | 1.000000000000000000e+00 40 | 1.000000000000000000e+00 41 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/featuremaps.py: -------------------------------------------------------------------------------- 1 | """ 2 | Feature maps 3 | ************ 4 | 5 | This module contains feature maps. Each feature map function 6 | takes an input vector x and weights, and constructs a circuit that maps 7 | these two to a quantum state. The feature map function can be called in a qnode. 8 | 9 | A feature map has the following positional arguments: weights, x, wires. It can have optional 10 | keyword arguments. 11 | 12 | Each feature map comes with a function that generates initial parameters 13 | for that particular feature map. 14 | """ 15 | import numpy as np 16 | import pennylane as qml 17 | 18 | 19 | def _entanglerZ(w_, w1, w2): 20 | qml.CNOT(wires=[w2, w1]) 21 | qml.RZ(2*w_, wires=w1) 22 | qml.CNOT(wires=[w2, w1]) 23 | 24 | 25 | 26 | def qaoa(weights, x, wires, n_layers=1, circuit_ID = 1): 27 | """ 28 | 1-d Ising-coupling QAOA feature map, according to arXiv1812.11075. 29 | 30 | Example one layer, 4 wires, 2 inputs: 31 | 32 | |0> - R_x(x1) - |^| -------- |_| - R_y(w7) - 33 | |0> - R_x(x2) - |_|-|^| ---------- R_y(w8) - 34 | |0> - ___H___ ------|_|-|^| ------ R_y(w9) - 35 | |0> - ___H___ ----------|_| -|^| - R_y(w10) - 36 | 37 | After the last layer, another block of R_x(x_i) rotations is applied. 38 | 39 | :param weights: trainable weights of shape 2*n_layers*n_wires 40 | :param 1d x: input, len(x) is <= len(wires) 41 | :param wires: list of wires on which the feature map acts 42 | :param n_layers: number of repetitions of the first layer 43 | """ 44 | n_wires = len(wires) 45 | 46 | if n_wires == 1: 47 | n_weights_needed = n_layers 48 | elif n_wires == 2: 49 | n_weights_needed = 3 * n_layers 50 | else: 51 | n_weights_needed = 2 * n_wires * n_layers 52 | 53 | if len(x) > n_wires: 54 | raise ValueError("Feat map can encode at most {} features (which is the " 55 | "number of wires), got {}.".format(n_wires, len(x))) 56 | 57 | if len(weights) != n_weights_needed: 58 | raise ValueError("Feat map needs {} weights, got {}." 59 | .format(n_weights_needed, len(weights))) 60 | 61 | for l in range(n_layers): 62 | 63 | # inputs 64 | for i in range(n_wires): 65 | # Either feed in feature 66 | if i < len(x): 67 | if circuit_ID == 1: 68 | qml.RX(x[i], wires=wires[i]) 69 | elif circuit_ID == 2: 70 | qml.RY(x[i], wires=wires[i]) 71 | # or a Hadamard 72 | else: 73 | qml.Hadamard(wires=wires[i]) 74 | 75 | # 1-d nearest neighbour coupling 76 | if n_wires == 1: 77 | if circuit_ID == 1: 78 | qml.RY(weights[l], wires=wires[0]) 79 | elif circuit_ID == 2: 80 | qml.RX(weights[l], wires=wires[0]) 81 | 82 | elif n_wires == 2: 83 | _entanglerZ(weights[l * 3 + 2], wires[0], wires[1]) 84 | # local fields 85 | for i in range(n_wires): 86 | if circuit_ID == 1: 87 | qml.RY(weights[l * 3 + i], wires=wires[i]) 88 | elif circuit_ID == 2: 89 | qml.RX(weights[l * 3 + i], wires=wires[i]) 90 | else: 91 | for i in range(n_wires): 92 | if i < n_wires-1: 93 | _entanglerZ(weights[l * 2 * n_wires + i], wires[i], wires[i + 1]) 94 | else: 95 | # enforce periodic boundary condition 96 | _entanglerZ(weights[l * 2 * n_wires + i], wires[i], wires[0]) 97 | # local fields 98 | for i in range(n_wires): 99 | if circuit_ID == 1: 100 | qml.RY(weights[l * 2 * n_wires + n_wires + i], wires=wires[i]) 101 | elif circuit_ID == 2: 102 | qml.RX(weights[l * 2 * n_wires + n_wires + i], wires=wires[i]) 103 | 104 | # repeat feature encoding once more at the end 105 | for i in range(n_wires): 106 | # Either feed in feature 107 | if i < len(x): 108 | if circuit_ID == 1: 109 | qml.RX(x[i], wires=wires[i]) 110 | elif circuit_ID == 2: 111 | qml.RY(x[i], wires=wires[i]) 112 | # or a Hadamard 113 | else: 114 | qml.Hadamard(wires=wires[i]) 115 | 116 | 117 | 118 | def pars_qaoa(n_wires, n_layers=1): 119 | """ 120 | Initial weight generator for 1-d qaoa feature map 121 | :param n_wires: number of wires 122 | :param n_layers: number of layers 123 | :return: array of weights 124 | """ 125 | if n_wires == 1: 126 | return 0.001*np.ones(n_layers) 127 | elif n_wires == 2: 128 | return 0.001 * np.ones(n_layers * 3) 129 | elif n_wires == 4: 130 | return 0.001 * np.ones(n_wires * n_layers * 2) 131 | return 0.001*np.ones(n_layers * n_wires * 2) 132 | 133 | 134 | 135 | 136 | def shallow_circuit(weights, x, wires, n_layers=1,circuit_ID=1): 137 | """ 138 | Circuits are designed based on paper arXiv:1905.10876. 139 | 140 | Example one layer, 4 wires, 2 inputs: 141 | 142 | |0> - R_x(x1) - |^| -------- |_| - R_y(w5) - 143 | |0> - R_x(x2) - |_|-|^| ---------- R_y(w6) - 144 | |0> - ___H___ ------|_|-|^| ------ R_y(w7) - 145 | |0> - ___H___ ----------|_| -|^| - R_y(w8) - 146 | 147 | After the last layer, another block of R_x(x_i) rotations is applied. 148 | 149 | :param weights: trainable weights of shape 2*n_layers*n_wires 150 | :param 1d x: input, len(x) is <= len(wires) 151 | :param wires: list of wires on which the feature map acts 152 | :param n_layers: number of repetitions of the first layer 153 | :param circuit_ID: the ID of the circuit based on 154 | """ 155 | n_wires = len(wires) 156 | 157 | if n_wires == 1: 158 | n_weights_needed = n_layers 159 | elif n_wires == 2: 160 | n_weights_needed = 3 * n_layers 161 | else: 162 | n_weights_needed = 2 * n_wires * n_layers 163 | 164 | if len(x) > n_wires: 165 | raise ValueError("Feat map can encode at most {} features (which is the " 166 | "number of wires), got {}.".format(n_wires, len(x))) 167 | 168 | if len(weights) != n_weights_needed: 169 | raise ValueError("Feat map needs {} weights, got {}." 170 | .format(n_weights_needed, len(weights))) 171 | 172 | for l in range(n_layers): 173 | 174 | # inputs 175 | for i in range(n_wires): 176 | # Either feed in feature 177 | if i < len(x): 178 | if circuit_ID == 18 or circuit_ID == 19: 179 | qml.RX(x[i], wires=wires[i]) 180 | 181 | elif circuit_ID == 11 or circuit_ID == 12: 182 | qml.RY(x[i], wires=wires[i]) 183 | else: 184 | raise ValueError("Wrong circuit_ID: It should be between 1-19, got {}.".format(circuit_ID)) 185 | else: 186 | qml.Hadamard(wires=wires[i]) 187 | 188 | # 1-d nearest neighbour coupling 189 | if n_wires == 1: 190 | if circuit_ID == 18 or circuit_ID == 19: 191 | qml.RZ(weights[l], wires=wires[0]) 192 | 193 | elif n_wires == 2: 194 | # local fields 195 | for i in range(n_wires): 196 | if circuit_ID == 18 or circuit_ID == 19: 197 | qml.RZ(weights[l * 3 + i], wires=wires[i]) 198 | else: 199 | raise ValueError("Wrong circuit_ID: It should be between 1-19, got {}.".format(circuit_ID)) 200 | if circuit_ID == 18: 201 | qml.CRZ(weights[l * 3 + 2], wires=[wires[1], wires[0]]) 202 | elif circuit_ID == 19: 203 | qml.CRX(weights[l * 3 + 2], wires=[wires[1], wires[0]]) 204 | else: 205 | # local fields 206 | for i in range(n_wires): 207 | if circuit_ID == 18 or circuit_ID == 19: 208 | qml.RZ(weights[l * 2 * n_wires + i], wires=wires[i]) 209 | 210 | for i in range(n_wires): 211 | if i == 0: 212 | if circuit_ID == 18: 213 | qml.CRZ(weights[l * 2 * n_wires + n_wires + i], wires=[wires[n_wires-1], wires[0]]) 214 | elif circuit_ID == 19: 215 | qml.CRX(weights[l * 2 * n_wires + n_wires + i], wires=[wires[n_wires-1], wires[0]]) 216 | elif i < n_wires-1: 217 | if circuit_ID == 18: 218 | qml.CRZ(weights[l * 2 * n_wires + n_wires + i], wires=[wires[i], wires[i + 1]]) 219 | elif circuit_ID == 19: 220 | qml.CRX(weights[l * 2 * n_wires + n_wires + i], wires=[wires[i], wires[i + 1]]) 221 | 222 | # repeat feature encoding once more at the end 223 | for i in range(n_wires): 224 | # Either feed in feature 225 | if i < len(x): 226 | if circuit_ID == 18 or circuit_ID == 19: 227 | qml.RX(x[i], wires=wires[i]) 228 | # or a Hadamard 229 | else: 230 | qml.Hadamard(wires=wires[i]) 231 | 232 | 233 | 234 | def HVA_XXZ(weights, x, wires, n_layers=1): 235 | """ 236 | 1-d Ising-coupling QAOA feature map, according to arXiv1812.11075. 237 | 238 | :param weights: trainable weights of shape 2*n_layers*n_wires 239 | :param 1d x: input, len(x) is <= len(wires) 240 | :param wires: list of wires on which the feature map acts 241 | :param n_layers: number of repetitions of the first layer 242 | """ 243 | n_wires = len(wires) 244 | 245 | if n_wires == 1: 246 | n_weights_needed = n_layers 247 | elif n_wires == 2: 248 | n_weights_needed = 3 * n_layers 249 | else: 250 | n_weights_needed = 2 * n_wires * n_layers 251 | 252 | if len(x) > n_wires: 253 | raise ValueError("Feat map can encode at most {} features (which is the " 254 | "number of wires), got {}.".format(n_wires, len(x))) 255 | 256 | if len(weights) != n_weights_needed: 257 | raise ValueError("Feat map needs {} weights, got {}." 258 | .format(n_weights_needed, len(weights))) 259 | 260 | for l in range(n_layers): 261 | 262 | # inputs 263 | for i in range(n_wires): 264 | # Either feed in feature 265 | if i < len(x): 266 | qml.RX(x[i], wires=wires[i]) 267 | # or a Hadamard 268 | else: 269 | qml.Hadamard(wires=wires[i]) 270 | 271 | # 1-d nearest neighbour coupling 272 | if n_wires == 1: 273 | qml.RY(weights[l], wires=wires[0]) 274 | elif n_wires == 2: 275 | _entanglerZ(weights[l * 3 + 2], wires[0], wires[1]) 276 | # local fields 277 | for i in range(n_wires): 278 | qml.RY(weights[l * 3 + i], wires=wires[i]) 279 | else: 280 | for i in range(n_wires): 281 | if i < n_wires-1: 282 | _entanglerZ(weights[l * 2 * n_wires + i], wires[i], wires[i + 1]) 283 | else: 284 | # enforce periodic boundary condition 285 | _entanglerZ(weights[l * 2 * n_wires + i], wires[i], wires[0]) 286 | # local fields 287 | for i in range(n_wires): 288 | qml.RY(weights[l * 2 * n_wires + n_wires + i], wires=wires[i]) 289 | 290 | # repeat feature encoding once more at the end 291 | for i in range(n_wires): 292 | # Either feed in feature 293 | if i < len(x): 294 | qml.RX(x[i], wires=wires[i]) 295 | # or a Hadamard 296 | else: 297 | qml.Hadamard(wires=wires[i]) 298 | 299 | def HVA_TFIM_2D_data(weights, x, wires, n_layers=1, types = 1): 300 | """ 301 | 1-d Ising-coupling HVA_TFIM feature map, according to 2008.02941v2.11075. 302 | 303 | :param weights: trainable weights of shape 2*n_layers*n_wires 304 | :param 1d x: input, len(x) is <= len(wires) 305 | :param wires: list of wires on which the feature map acts 306 | :param n_layers: number of repetitions of the first layer 307 | """ 308 | wires = range(0, 4) 309 | n_wires = len(wires) 310 | if types == 1: 311 | n_weights_needed = 4 * n_layers 312 | elif types == 2: 313 | n_weights_needed = 2 * n_layers 314 | else: 315 | n_weights_needed = 6 * n_layers 316 | 317 | if len(x) > n_wires: 318 | raise ValueError("Feat map can encode at most {} features (which is the " 319 | "number of wires), got {}.".format(n_wires, len(x))) 320 | 321 | if len(weights) != n_weights_needed: 322 | raise ValueError("Feat map needs {} weights, got {}." 323 | .format(n_weights_needed, len(weights))) 324 | 325 | for l in range(n_layers): 326 | 327 | # inputs 328 | for i in range(n_wires): 329 | qml.Hadamard(wires=wires[i]) 330 | 331 | if types == 1: 332 | _entanglerZ(x[0], wires[0], wires[1]) 333 | _entanglerZ(x[1], wires[2], wires[3]) 334 | _entanglerZ(weights[l * 4 ], wires[0], wires[3]) 335 | _entanglerZ(weights[l * 4 + 1], wires[1], wires[2]) 336 | elif types == 2: 337 | _entanglerZ(weights[l * 2], wires[0], wires[1]) 338 | _entanglerZ(weights[l * 2], wires[2], wires[3]) 339 | _entanglerZ(weights[l * 2], wires[0], wires[3]) 340 | _entanglerZ(weights[l * 2], wires[1], wires[2]) 341 | else: 342 | _entanglerZ(weights[l * 6 ], wires[0], wires[1]) 343 | _entanglerZ(weights[l * 6 + 1], wires[2], wires[3]) 344 | _entanglerZ(weights[l * 6 + 2], wires[0], wires[3]) 345 | _entanglerZ(weights[l * 6 + 3], wires[1], wires[2]) 346 | 347 | # repeat feature encoding once more at the end 348 | # Either feed in feature 349 | qml.RX(x[0], wires=wires[0]) 350 | qml.RX(x[1], wires=wires[2]) 351 | if types == 1: 352 | qml.RX(weights[l * 4 + 2], wires=wires[1]) 353 | qml.RX(weights[l * 4 + 3], wires=wires[3]) 354 | elif types == 2: 355 | qml.RX(weights[l * 2 + 1], wires=wires[1]) 356 | qml.RX(weights[l * 2 + 1], wires=wires[3]) 357 | 358 | else: 359 | qml.RX(weights[l * 6 + 4], wires=wires[1]) 360 | qml.RX(weights[l * 6 + 5], wires=wires[3]) 361 | 362 | def HVA_TFIM_1D_data(weights, x, wires, n_layers=1, types = 1): 363 | """ 364 | 1-d Ising-coupling HVA_TFIM feature map, according to 2008.02941v2.11075. 365 | 366 | :param weights: trainable weights of shape 2*n_layers*n_wires 367 | :param 1d x: input, len(x) is <= len(wires) 368 | :param wires: list of wires on which the feature map acts 369 | :param n_layers: number of repetitions of the first layer 370 | """ 371 | wires = range(0, 4) 372 | n_wires = len(wires) 373 | if types == 1: 374 | n_weights_needed = 6 * n_layers # Data encoded via first and last layer 375 | elif types == 2: 376 | n_weights_needed = 2 * n_layers #all zz have same params, all rx have same params 377 | else: 378 | n_weights_needed = 7 * n_layers #encode layer just in last layer 379 | 380 | if len(x) > n_wires: 381 | raise ValueError("Feat map can encode at most {} features (which is the " 382 | "number of wires), got {}.".format(n_wires, len(x))) 383 | 384 | if len(weights) != n_weights_needed: 385 | raise ValueError("Feat map needs {} weights, got {}." 386 | .format(n_weights_needed, len(weights))) 387 | 388 | for l in range(n_layers): 389 | 390 | # inputs 391 | for i in range(n_wires): 392 | qml.Hadamard(wires=wires[i]) 393 | 394 | if types == 1: 395 | _entanglerZ(x[0], wires[0], wires[1]) 396 | _entanglerZ(weights[l * 6 ], wires[2], wires[3]) 397 | _entanglerZ(weights[l * 6 + 1], wires[0], wires[3]) 398 | _entanglerZ(weights[l * 6 + 2], wires[1], wires[2]) 399 | elif types == 2: 400 | _entanglerZ(weights[l * 2], wires[0], wires[1]) 401 | _entanglerZ(weights[l * 2], wires[2], wires[3]) 402 | _entanglerZ(weights[l * 2], wires[0], wires[3]) 403 | _entanglerZ(weights[l * 2], wires[1], wires[2]) 404 | else: 405 | _entanglerZ(weights[l * 7 ], wires[0], wires[1]) 406 | _entanglerZ(weights[l * 7 + 1], wires[2], wires[3]) 407 | _entanglerZ(weights[l * 7 + 2], wires[0], wires[3]) 408 | _entanglerZ(weights[l * 7 + 3], wires[1], wires[2]) 409 | 410 | # repeat feature encoding once more at the end 411 | # Either feed in feature 412 | qml.RX(x[0], wires=wires[0]) 413 | if types == 1: 414 | 415 | qml.RX(weights[l * 6 + 3], wires=wires[1]) 416 | qml.RX(weights[l * 6 + 4], wires=wires[2]) 417 | qml.RX(weights[l * 6 + 5], wires=wires[3]) 418 | elif types == 2: 419 | qml.RX(weights[l * 2 + 1], wires=wires[1]) 420 | qml.RX(weights[l * 2 + 1], wires=wires[2]) 421 | qml.RX(weights[l * 2 + 1], wires=wires[3]) 422 | 423 | else: 424 | qml.RX(weights[l * 7 + 4], wires=wires[1]) 425 | qml.RX(weights[l * 7 + 5], wires=wires[1]) 426 | qml.RX(weights[l * 7 + 6], wires=wires[3]) 427 | 428 | def VQC(weights, x, wires, n_layers=1, types = 1): 429 | """ Circuits ID = 5, 6 in arXiv:1905.10876 paper 430 | :param weights: trainable weights of shape 2*n_layers*n_wires 431 | :param 1d x: input, len(x) is <= len(wires) 432 | :param wires: list of wires on which the feature map acts 433 | :param n_layers: number of repetitions of the first layer 434 | """ 435 | data_size = len(x) 436 | n_wires = len(wires) 437 | weights_each_layer = (n_wires*(n_wires+3) - 2*data_size) 438 | n_weights_needed = weights_each_layer * n_layers 439 | 440 | if len(x) > n_wires: 441 | raise ValueError("Feat map can encode at most {} features (which is the " 442 | "number of wires), got {}.".format(n_wires, len(x))) 443 | 444 | if len(weights) != n_weights_needed: 445 | raise ValueError("Feat map needs {} weights, got {}." 446 | .format(n_weights_needed, len(weights))) 447 | 448 | for l in range(n_layers): 449 | 450 | # inputs 451 | for i in range(data_size): 452 | qml.RX(x[i], wires=wires[i]) 453 | 454 | for i in range(n_wires-data_size): 455 | qml.RX(weights[weights_each_layer*l+i], wires=wires[i+data_size]) 456 | 457 | for i in range(n_wires): 458 | qml.RZ(weights[weights_each_layer*l+n_wires-data_size+i], wires=wires[i]) 459 | 460 | for i in reversed(range(n_wires)): 461 | for j in reversed(range(n_wires)): 462 | if j == i: 463 | continue 464 | if types == 1: #type 6 in Aspuru's paper 465 | qml.CRX(weights[weights_each_layer*l+2*n_wires-data_size+i*(n_wires-1)+j],wires=[wires[i],wires[j]]) 466 | if types == 2: #type 5 in Aspuru's paper 467 | qml.CRZ(weights[weights_each_layer*l+2*n_wires-data_size+i*(n_wires-1)+j],wires=[wires[i],wires[j]]) 468 | 469 | 470 | for i in range(data_size): 471 | qml.RX(x[i], wires=wires[i]) 472 | 473 | for i in range(n_wires-data_size): 474 | qml.RX(weights[weights_each_layer*l+n_wires*(n_wires+1)-data_size+i], wires=wires[i+data_size]) 475 | 476 | for i in range(n_wires): 477 | qml.RZ(weights[weights_each_layer*l+n_wires*(n_wires+2)-2*data_size+i], wires=wires[i]) 478 | 479 | 480 | def pars_HVA(n_layers=1,types=1): 481 | """ 482 | Initial weight generator for 1-d qaoa feature map 483 | :param n_wires: number of wires 484 | :param n_layers: number of layers 485 | :return: array of weights 486 | """ 487 | if types == 1: 488 | return 0.001*np.ones(n_layers * 4) 489 | elif types == 2: 490 | return 0.001*np.ones(n_layers * 2) 491 | else: 492 | return 0.001*np.ones(n_layers * 6) 493 | 494 | def pars_HVA_TFIM_1D_data(n_layers=1,types=1): 495 | """ 496 | Initial weight generator for 1-d qaoa feature map 497 | :param n_wires: number of wires 498 | :param n_layers: number of layers 499 | :return: array of weights 500 | """ 501 | if types == 1: 502 | return 0.001*np.ones(n_layers * 6) 503 | elif types == 2: 504 | return 0.001*np.ones(n_layers * 2) 505 | else: 506 | return 0.001*np.ones(n_layers * 7) 507 | 508 | def pars_VQC(x_dim, n_wires, n_layers=1, types = 1): 509 | 510 | weights_each_layer = (n_wires*(n_wires+3) - 2*x_dim) 511 | 512 | return np.random.uniform(0,2*np.pi)*np.ones(n_layers * weights_each_layer) 513 | 514 | 515 | 516 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/fidelity.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fidelity classifier 3 | =================== 4 | 5 | Implements the fidelity classifier. 6 | 7 | ``predict()`` returns the predicted label or continuous output for a new input 8 | ``accuracy()`` returns the accuracy on a test set 9 | 10 | The 'exact' implementation computes overlap of ket vectors numerically. 11 | The 'circuit' implementation performs a swap test on all data pairs. 12 | 13 | """ 14 | import pennylane as qml 15 | from pennylane import numpy as np 16 | import dill as pickle # to load featuremap 17 | 18 | 19 | def negate(item): 20 | if isinstance(item, list): 21 | return [-i for i in item] 22 | else: 23 | return -item 24 | 25 | 26 | def cphase_inv(k): 27 | gate = [[1, 0, 0, 0], 28 | [0, 1, 0, 0], 29 | [0, 0, 1, 0], 30 | [0, 0, 0, np.exp(np.complex(0, -2*np.pi/2**k))]] 31 | return np.array(gate) 32 | 33 | 34 | def _fast(x_new, A_samples, B_samples, featmap, pars, n_inp): 35 | """ 36 | Implements the fidelity measurement circuit using the "overlap with 0" trick. 37 | """ 38 | # Allocate registers 39 | dev = qml.device('default.qubit', wires=n_inp) 40 | # Identify input register wires 41 | wires = list(range(n_inp)) 42 | Proj0 = np.zeros((2**n_inp, 2**n_inp)) 43 | Proj0[0, 0] = 1 44 | 45 | @qml.qnode(dev) 46 | def circuit(weights, x1=None, x2=None): 47 | # Apply embedding 48 | featmap(weights, x1, wires) 49 | # Apply inverse embedding 50 | featmap(negate(weights), negate(x2), wires) 51 | # Measure overlap with |0..0> 52 | return qml.expval(qml.Hermitian(Proj0, wires=wires)) 53 | 54 | # Compute mean overlap with A 55 | overlap_A = 0 56 | for a in A_samples: 57 | overlap_A += circuit(pars, x1=a, x2=x_new) 58 | overlap_A = overlap_A/len(A_samples) 59 | 60 | # Compute mean overlap with B 61 | overlap_B = 0 62 | for b in B_samples: 63 | overlap_B += circuit(pars, x1=b, x2=x_new) 64 | overlap_B = overlap_B/len(B_samples) 65 | 66 | return overlap_A, overlap_B 67 | 68 | 69 | def _circuit(x_new, A_samples, B_samples, featmap, pars, n_inp): 70 | """ 71 | Implements the fidelity measurement circuit using samples of class A and B. 72 | """ 73 | # Allocate registers 74 | n_qubits = 2*n_inp + 1 # Total number of qubits 75 | dev = qml.device('default.qubit', wires=n_qubits) 76 | # Identify input register wires 77 | wires_x1 = list(range(1, n_inp+1)) 78 | wires_x2 = list(range(n_inp+1, 2*n_inp+1)) 79 | 80 | @qml.qnode(dev) 81 | def circuit(weights, x1=None, x2=None): 82 | # Load the two inputs into two different registers 83 | featmap(weights, x1, wires_x1) 84 | featmap(weights, x2, wires_x2) 85 | 86 | # Do a SWAP test 87 | qml.Hadamard(wires=0) 88 | for k in range(n_inp): 89 | qml.CSWAP(wires=[0, k + 1, n_inp + k + 1]) 90 | qml.Hadamard(wires=0) 91 | 92 | # Measure overlap by checking ancilla 93 | return qml.expval(qml.PauliZ(0)) 94 | 95 | # Compute mean overlap with A 96 | overlap_A = 0 97 | for a in A_samples: 98 | overlap_A += circuit(pars, x1=a, x2=x_new) 99 | overlap_A = overlap_A/len(A_samples) 100 | 101 | # Compute mean overlap with B 102 | overlap_B = 0 103 | for b in B_samples: 104 | overlap_B += circuit(pars, x1=b, x2=x_new) 105 | overlap_B = overlap_B/len(B_samples) 106 | 107 | return overlap_A, overlap_B 108 | 109 | 110 | def _exact(x_new, A_samples, B_samples, featmap, n_inp, pars): 111 | """Calculates the analytical result of the fidelity measurement, 112 | 113 | overlap_A = \sum_i p_A |<\phi(x_new)|\phi(a_i)>|^2, 114 | overlap_B = \sum_i p_B |<\phi(x_new)|\phi(b_i)>|^2, 115 | 116 | using numpy as well as pennylane to simulate the feature map. 117 | """ 118 | 119 | dev = qml.device('default.qubit', wires=n_inp) 120 | 121 | @qml.qnode(dev) 122 | def fm(weights, x=None): 123 | """Circuit to get the state after feature map""" 124 | featmap(weights, x, range(n_inp)) 125 | return qml.expval(qml.PauliZ(0)) 126 | 127 | # Compute feature states for A 128 | A_states = [] 129 | for a in A_samples: 130 | fm(pars, x=a) 131 | phi_a = dev._state 132 | A_states.append(phi_a) 133 | 134 | # Compute feature states for B 135 | B_states = [] 136 | for b in B_samples: 137 | fm(pars, x=b) 138 | phi_b = dev._state 139 | B_states.append(phi_b) 140 | 141 | # Get feature state for new input 142 | fm(pars, x=x_new) 143 | phi_x = dev._state 144 | 145 | # Put together 146 | overlap_A = sum([np.abs(np.vdot(phi_x, phi_a)) ** 2 for phi_a in A_states]) 147 | overlap_A = overlap_A/len(A_states) 148 | 149 | overlap_B = sum([np.abs(np.vdot(phi_x, phi_b)) ** 2 for phi_b in B_states]) 150 | overlap_B = overlap_B/len(B_states) 151 | 152 | return overlap_A, overlap_B 153 | 154 | 155 | def predict(x_new, path_to_featmap, n_samples=None, 156 | probs_A=None, probs_B=None, binary=True, implementation=None, seed=None): 157 | """ 158 | Predicts which class the new input is from, using either exact numerical simulation 159 | or a simulated quantum circuit. 160 | 161 | As a convention, the class labeled by +1 is 'A', the class labeled by -1 is 'B'. 162 | 163 | :param x_new: new input to predict label for 164 | :param path_to_featmap: Where to load featmap from. 165 | :param n_samples: How many samples to use, if None, use full class (simulating perfect measurement) 166 | :param probs_A: Probabilities with which to draw each samples from A. If None, use uniform. 167 | :param probs_B: Probabilities with which to draw each samples from B. If None, use uniform. 168 | :param binary: If True, return probability, else return value {-1, 1} 169 | :param implementation: String that chooses the background implementation. Can be 'exact', 170 | 'fast' or 'circuit' 171 | :return: probability or prediction of class for x_new 172 | """ 173 | 174 | if seed is not None: 175 | np.random.seed(seed) 176 | 177 | # Load settings from result of featmap learning function 178 | settings = np.load(path_to_featmap, allow_pickle=True).item() 179 | featmap = pickle.loads(settings['featmap']) 180 | pars = settings['pars'] 181 | n_inp = settings['n_wires'] 182 | X = settings['X'] 183 | Y = settings['Y'] 184 | A = X[Y == 1] 185 | B = X[Y == -1] 186 | 187 | if probs_A is not None and len(probs_A) != len(A): 188 | raise ValueError("Length of probs_A and A have to be the same, got {} and {}." 189 | .format(len(probs_A), len(A))) 190 | if probs_B is not None and len(probs_B) != len(B): 191 | raise ValueError("Length of probs_B and B have to be the same, got {} and {}." 192 | .format(len(probs_B), len(B))) 193 | 194 | # Sample subsets from A and B 195 | if n_samples is None: 196 | # Consider all samples from A, B 197 | A_samples = A 198 | B_samples = B 199 | else: 200 | selectA = np.random.choice(range(len(A)), size=(n_samples,), replace=True, p=probs_A) 201 | A_samples = A[selectA] 202 | selectB = np.random.choice(range(len(B)), size=(n_samples,), replace=True, p=probs_B) 203 | B_samples = B[selectB] 204 | 205 | if implementation == "exact": 206 | overlap_A, overlap_B = _exact(x_new=x_new, A_samples=A_samples, B_samples=B_samples, 207 | featmap=featmap, n_inp=n_inp, pars=pars) 208 | elif implementation == "circuit": 209 | overlap_A, overlap_B = _circuit(x_new=x_new, A_samples=A_samples, B_samples=B_samples, 210 | featmap=featmap, pars=pars, n_inp=n_inp) 211 | elif implementation == "fast": 212 | overlap_A, overlap_B = _fast(x_new=x_new, A_samples=A_samples, B_samples=B_samples, 213 | featmap=featmap, pars=pars, n_inp=n_inp) 214 | else: 215 | raise ValueError("Implementation not recognized.") 216 | 217 | if binary: 218 | if overlap_A > overlap_B: 219 | return 1 220 | elif overlap_A < overlap_B: 221 | return -1 222 | else: 223 | return 0 224 | else: 225 | return overlap_A - overlap_B 226 | 227 | 228 | def accuracy(X, Y, path_to_featmap, n_samples=None, probs_A=None, probs_B=None, 229 | implementation=None, seed=None): 230 | """ 231 | Computes the ratio of correctly classified samples to all samples. 232 | 233 | :param X: Array of test inputs 234 | :param Y: 1-d array of test labels 235 | :param path_to_featmap: Where to load featmap from. 236 | :param n_samples: How many samples to use, if None, use full class (simulating perfect measurement) 237 | :param probs_A: Probabilities with which to draw each samples from A. If None, use uniform. 238 | :param probs_B: Probabilities with which to draw each samples from B. If None, use uniform. 239 | :param implementation: String that chooses the background implementation. 240 | :return: accuracy of predictions on test set 241 | """ 242 | 243 | acc = [] 244 | for x_test, y_test in zip(X, Y): 245 | y_pred = predict(x_new=x_test, 246 | path_to_featmap=path_to_featmap, 247 | n_samples=n_samples, 248 | probs_A=probs_A, 249 | probs_B=probs_B, 250 | binary=True, 251 | implementation=implementation, 252 | seed=seed) 253 | 254 | if y_test == y_pred: 255 | acc.append(1) 256 | else: 257 | acc.append(0) 258 | 259 | return sum(acc)/len(acc) 260 | 261 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/generate_data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def generate_data(finename_X, finename_Y, number_of_data): 4 | X = [] 5 | Y = [] 6 | for i in range(40): 7 | x =[] 8 | y = 0 9 | r1 = np.random.uniform(0,1) 10 | if r1 < 0.5: 11 | x.append(np.random.uniform(-1.0,1.0)) 12 | x.append(np.random.uniform(-1.0,1.0)) 13 | y = -1.0 14 | else: 15 | r2 = np.random.uniform(0,1) 16 | y = 1.0 17 | if r2 < 0.5: 18 | x.append(np.random.uniform(-2.0,-1.0)) 19 | x.append(np.random.uniform(-2.0,-1.0)) 20 | else: 21 | x.append(np.random.uniform(1.0,2.0)) 22 | x.append(np.random.uniform(1.0,2.0)) 23 | X.append(x) 24 | Y.append(y) 25 | 26 | np.savetxt('./data/{}'.format(finename_X), X) 27 | np.savetxt('./data/{}'.format(finename_Y), Y) 28 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/plots.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | #import matplotlib.axes as axes 3 | import numpy as np 4 | import pandas 5 | 6 | def plot_axes_IdVsCost(): 7 | #axes.Axis.set_axisbelow(True) 8 | x = range(8) 9 | # for L=1,Nq=1,d=1 10 | # for L=1,Nq=2,d=1 11 | # for L=1,Nq=3,d=1 12 | # for L=1,Nq=4,d=1 13 | y = np.array([0.207044,np.nan,np.nan,0.206619,np.nan,np.nan,np.nan,np.nan]) 14 | plt.scatter(x, y, marker='^',facecolors='blue',edgecolors='blue',label='L=1,Nq=4,d=1') 15 | # for l=2,Nq=1,d=1 16 | # for l=2,Nq=2,d=1 17 | # for l=2,Nq=3,d=1 18 | y = np.array([0.376935,np.nan,0.326575,0.182479,np.nan,np.nan,np.nan,np.nan]) 19 | plt.scatter(x, y, marker='o',facecolors='red',edgecolors='red',label='L=2,Nq=3,d=1') 20 | # for l=2,Nq=4,d=1 21 | y = np.array([0.400412,np.nan,np.nan,np.nan,0.593843,0.722007,np.nan,np.nan]) 22 | plt.scatter(x, y, marker='o',facecolors='blue',edgecolors='blue',label='L=2,Nq=4,d=1') 23 | # for l=3 24 | # for l=4,Nq=1,d=1 25 | y = np.array([0.116092,0.103657,0.312526,np.nan,np.nan,np.nan,np.nan,np.nan]) 26 | plt.scatter(x, y, marker='s',facecolors='purple',edgecolors='purple',label='L=4,Nq=1,d=1') 27 | # for l=4,Nq=2,d=1 28 | y = np.array([np.nan,np.nan,0.375075,0.325434,np.nan,np.nan,0.398591,0.660803]) 29 | plt.scatter(x, y, marker='s',facecolors='green',edgecolors='green',label='L=4,Nq=2,d=1') 30 | # for l=4,Nq=3,d=1 31 | # for l=4,Nq=4,d=1 32 | 33 | # for l=1,Nq=1..3,d=2 34 | # for l=1,Nq=4,d=2 35 | y = np.array([np.nan,np.nan,np.nan,np.nan,0.748411,np.nan,np.nan,np.nan]) 36 | plt.scatter(x, y,marker='^',facecolors='none',edgecolors='blue',label='L=1,Nq=4,d=2') 37 | # for l=2,Nq=1,d=2 38 | # for l=2,Nq=2,d=2 39 | y = np.array([np.nan,np.nan,np.nan,np.nan,np.nan,np.nan,0.270515,0.92881]) 40 | plt.scatter(x, y,marker='o',facecolors='none',edgecolors='green',label='L=2,Nq=2,d=2') 41 | # for l=2,Nq=3,d=2 42 | # for l=2,Nq=4,d=2 43 | y = np.array([np.nan,np.nan,np.nan,np.nan,0.719350,np.nan,np.nan,0.568995]) 44 | plt.scatter(x, y,marker='o',facecolors='none',edgecolors='blue',label='L=2,Nq=4,d=2') 45 | # for l=3 46 | # for l=4,Nq=2,d=2 47 | y = np.array([0.482175,np.nan,np.nan,np.nan,np.nan,np.nan,0.469099,0.398838]) 48 | plt.scatter(x, y,marker='s',facecolors='none',edgecolors='green',label='L=4,Nq=2,d=2') 49 | 50 | plt.grid(b=True, which='both', color='#666666', linestyle='--') 51 | 52 | plt.legend(bbox_to_anchor=(1.001, 1), loc='upper left') 53 | plt.xlabel("Circuit ID", fontsize=13) 54 | plt.ylabel("Cost Function After 300 Steps", fontsize=13) 55 | plt.show() 56 | 57 | 58 | 59 | #L,Nq,d,ID 60 | data = np.full((4,4,2,8), np.nan) 61 | data[0,3,0,0]=0.207044 62 | data[0,3,0,3]=0.206619 63 | data[1,2,0,0]=0.376935 64 | data[1,2,0,2]=0.326575 65 | data[1,2,0,3]=0.182479 66 | data[1,3,0,0]=0.400412 67 | data[1,3,0,4]=0.593843 68 | data[1,3,0,5]=0.722007 69 | data[3,0,0,0]=0.116092 70 | data[3,0,0,1]=0.103657 71 | data[3,0,0,2]=0.312526 72 | 73 | 74 | data[0,1,0,0]=np.nan 75 | data[0,1,0,1]=np.nan 76 | data[0,1,0,2]=np.nan 77 | data[0,1,0,3]=np.nan 78 | data[0,1,0,4]=np.nan 79 | data[0,1,0,5]=np.nan 80 | data[0,1,0,6]=np.nan 81 | data[0,1,0,7]=np.nan 82 | 83 | data[1,1,0,0]=np.nan 84 | data[1,1,0,1]=np.nan 85 | data[1,1,0,2]=np.nan 86 | data[1,1,0,3]=np.nan 87 | data[1,1,0,4]=np.nan 88 | data[1,1,0,5]=np.nan 89 | data[1,1,0,6]=np.nan 90 | data[1,1,0,7]=np.nan 91 | 92 | data[3,1,0,0]=np.nan 93 | data[3,1,0,1]=np.nan 94 | data[3,1,0,2]=0.375075 95 | data[3,1,0,3]=0.325434 96 | data[3,1,0,4]=np.nan 97 | data[3,1,0,5]=np.nan 98 | data[3,1,0,6]=0.398591 99 | data[3,1,0,7]=0.660803 100 | 101 | data[0,3,1,4]=0.748411 102 | 103 | data[1,1,1,6]=0.270515 104 | data[1,1,1,7]=0.92881 105 | 106 | data[1,3,1,4]=0.719350 107 | data[1,3,1,7]=0.568995 108 | 109 | data[3,1,1,0]=0.482175 110 | data[3,1,1,6]=0.469099 111 | data[3,1,1,7]=0.398838 112 | 113 | def define_plot_legend(x_Axis,L=None,Nq=None,data_length=None,ID=None): 114 | #ID == marker, Nq==color, d==filled/nonfilled 115 | if x_Axis == 'ID': 116 | if L==0: 117 | marker='^' 118 | elif L==1: 119 | marker='o' 120 | elif L==2: 121 | marker='+' 122 | else: 123 | marker='s' 124 | if Nq==0: 125 | edgecolors='purple' 126 | elif Nq==1: 127 | edgecolors='green' 128 | elif Nq==2: 129 | edgecolors='red' 130 | else: 131 | edgecolors='blue' 132 | if data_length==0: 133 | facecolors=edgecolors 134 | else: 135 | facecolors='none' 136 | elif x_Axis=='L': 137 | if ID==0: 138 | marker='^' 139 | elif ID==1: 140 | marker='o' 141 | elif ID==2: 142 | marker='+' 143 | elif ID==3: 144 | marker='s' 145 | elif ID==4: 146 | marker="D" 147 | elif ID==5: 148 | marker="p" 149 | elif ID==6: 150 | marker="d" 151 | elif ID==7: 152 | marker="v" 153 | if Nq==0: 154 | edgecolors='purple' 155 | elif Nq==1: 156 | edgecolors='green' 157 | elif Nq==2: 158 | edgecolors='red' 159 | else: 160 | edgecolors='blue' 161 | if data_length==0: 162 | facecolors=edgecolors 163 | else: 164 | facecolors='none' 165 | elif x_Axis=='Nq': 166 | if ID==0: 167 | marker='^' 168 | elif ID==1: 169 | marker='o' 170 | elif ID==2: 171 | marker='+' 172 | elif ID==3: 173 | marker='s' 174 | elif ID==4: 175 | marker="D" 176 | elif ID==5: 177 | marker="p" 178 | elif ID==6: 179 | marker="d" 180 | elif ID==7: 181 | marker="v" 182 | if L==0: 183 | edgecolors='purple' 184 | elif L==1: 185 | edgecolors='green' 186 | elif L==2: 187 | edgecolors='red' 188 | else: 189 | edgecolors='blue' 190 | if data_length==0: 191 | facecolors=edgecolors 192 | else: 193 | facecolors='none' 194 | else: 195 | print("Incorrect label for X axis") 196 | return 197 | return marker,facecolors,edgecolors 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | def plotting(data,x_Axis,draw_line=False): 206 | L = len(data) 207 | Nq = len(data[0]) 208 | data_length = len(data[0][0]) 209 | ID = len(data[0][0][0]) 210 | print(L,Nq,data_length,ID) 211 | if x_Axis == "ID": 212 | for l in range(L): 213 | for n in range(Nq): 214 | for d in range(data_length): 215 | if np.isnan(data[l,n,d,:]).all(): 216 | continue 217 | marker,facecolors,edgecolors=define_plot_legend(x_Axis=x_Axis,L=l,Nq=n,data_length=d,ID=None) 218 | x = np.arange(1,ID+1) 219 | plt.scatter(x, data[l,n,d,:],s = 70,marker=marker,facecolors=facecolors,edgecolors=edgecolors,\ 220 | label='L={},Nq={},d={}'.format(l+1,n+1,d+1)) 221 | if draw_line: 222 | datamask = np.isfinite(data[l,n,d,:].astype(np.double)) 223 | plt.plot(x, data[l,n,d,datamask], color=edgecolors) 224 | plt.xlabel("Circuit ID", fontsize=13) 225 | elif x_Axis == "L": 226 | for id_ in range(ID): 227 | for n in range(Nq): 228 | for d in range(data_length): 229 | #n=1 230 | if np.isnan(data[:,n,d,id_]).all(): 231 | continue 232 | marker,facecolors,edgecolors=define_plot_legend(x_Axis=x_Axis,Nq=n,data_length=d,ID=id_) 233 | x = np.arange(L-1) 234 | my_xticks = ['1','2','4'] 235 | data_ = np.array([data[0,n,d,id_],data[1,n,d,id_],data[3,n,d,id_]]).astype(np.double) 236 | plt.xticks(x, my_xticks) 237 | plt.scatter(x, data_,s = 70, marker=marker,\ 238 | facecolors=facecolors,edgecolors=edgecolors,\ 239 | label='ID={},Nq={},d={}'.format(id_+1,n+1,d+1)) 240 | if draw_line: 241 | datamask = np.isfinite(data_) 242 | plt.plot(x[datamask], data_[datamask], color=edgecolors) 243 | #for i in range(L): plt.annotate(np.around(data[i,n,d,id_], 8), (x[i], data[i,n,d,id_])) 244 | plt.xlabel("Number of Layers", fontsize=13) 245 | elif x_Axis == "Nq": 246 | for id_ in range(ID): 247 | for l in range(L): 248 | for d in range(data_length): 249 | if np.isnan(data[l,:,d,id_]).all(): 250 | continue 251 | marker,facecolors,edgecolors=define_plot_legend(x_Axis=x_Axis,L=l,data_length=d,ID=id_) 252 | x = np.arange(Nq) 253 | my_xticks = ['1','2','3','4'] 254 | data_ = data[l,:,d,id_].astype(np.double) 255 | plt.xticks(x, my_xticks) 256 | plt.scatter(x, data_,marker=marker,\ 257 | facecolors=facecolors,edgecolors=edgecolors,\ 258 | label='ID={},L={},d={}'.format(id_+1,l+1,d+1)) 259 | if draw_line: 260 | datamask = np.isfinite(data_) 261 | plt.plot(x[datamask], data_[datamask], color=edgecolors) 262 | #for i in range(L): plt.annotate(np.around(data[i,n,d,id_], 8), (x[i], data[i,n,d,id_])) 263 | plt.xlabel("Number of Layers", fontsize=13) 264 | 265 | plt.legend(bbox_to_anchor=(1.001, 1), loc='upper left') 266 | plt.ylabel("Cost Function After 300 Steps", fontsize=13) 267 | plt.grid(b=True, which='both', color='#666666', linestyle='--') 268 | plt.show() 269 | 270 | #plotting(data) 271 | 272 | 273 | plotting(data,x_Axis='ID') 274 | #print(data[1,3,0,:]) 275 | 276 | 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-1l-4w.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-1l-4w.npy -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-2l-2w.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-2l-2w.npy -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-2l-3w.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-2l-3w.npy -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-2l-4w.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-2l-4w.npy -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-4l-1w.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-4l-1w.npy -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-4l-2w.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-4l-2w.npy -------------------------------------------------------------------------------- /Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-4l-4w.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/Simulation_of_Variational_Circuits/trained_embeddings/1d_sep-l2-300s-4l-4w.npy -------------------------------------------------------------------------------- /overlap_vs_HS_cost/X_1d_sep.txt: -------------------------------------------------------------------------------- 1 | 1.428386558165781750e+00 2 | -1.313839087028072727e+00 3 | 1.596285966845742754e+00 4 | 1.367550496433965534e+00 5 | 1.468173134298068616e+00 6 | -1.454971615808443364e+00 7 | 1.161811926465180944e+00 8 | -1.714610930998586413e+00 9 | -1.327999547708873740e+00 10 | 1.382642255659186192e+00 11 | 1.074868758557305348e+00 12 | 1.459529311267870444e+00 13 | 1.382177001267317351e+00 14 | -1.131668993147058488e+00 15 | -1.248997409178646656e+00 16 | -1.322204908516744437e+00 17 | -1.464985657964124277e+00 18 | -1.454739026129264534e+00 19 | 1.557625966631448922e+00 20 | -1.401922341642383385e+00 21 | -3.593397378255333563e-01 22 | -6.950145572910067064e-03 23 | 1.138715720991759078e-01 24 | -1.704154411162837890e-01 25 | -1.840487346705927951e-01 26 | 2.641666606852424715e-01 27 | 3.363208579927841058e-01 28 | 1.640524890397370150e-01 29 | -1.572146825398952896e-01 30 | -2.091069596104288553e-01 31 | -1.205649127814735350e-01 32 | -2.119293925672509904e-01 33 | 1.764063617893480984e-01 34 | -5.099904663363319379e-01 35 | 2.850906880823069756e-01 36 | 2.240966054048715017e-01 37 | 3.109652609661974765e-01 38 | 1.891447943386029285e-01 39 | -2.009277177789230429e-01 40 | 2.326104293168703568e-02 41 | -------------------------------------------------------------------------------- /overlap_vs_HS_cost/Y_1d_sep.txt: -------------------------------------------------------------------------------- 1 | -1.000000000000000000e+00 2 | -1.000000000000000000e+00 3 | -1.000000000000000000e+00 4 | -1.000000000000000000e+00 5 | -1.000000000000000000e+00 6 | -1.000000000000000000e+00 7 | -1.000000000000000000e+00 8 | -1.000000000000000000e+00 9 | -1.000000000000000000e+00 10 | -1.000000000000000000e+00 11 | -1.000000000000000000e+00 12 | -1.000000000000000000e+00 13 | -1.000000000000000000e+00 14 | -1.000000000000000000e+00 15 | -1.000000000000000000e+00 16 | -1.000000000000000000e+00 17 | -1.000000000000000000e+00 18 | -1.000000000000000000e+00 19 | -1.000000000000000000e+00 20 | -1.000000000000000000e+00 21 | 1.000000000000000000e+00 22 | 1.000000000000000000e+00 23 | 1.000000000000000000e+00 24 | 1.000000000000000000e+00 25 | 1.000000000000000000e+00 26 | 1.000000000000000000e+00 27 | 1.000000000000000000e+00 28 | 1.000000000000000000e+00 29 | 1.000000000000000000e+00 30 | 1.000000000000000000e+00 31 | 1.000000000000000000e+00 32 | 1.000000000000000000e+00 33 | 1.000000000000000000e+00 34 | 1.000000000000000000e+00 35 | 1.000000000000000000e+00 36 | 1.000000000000000000e+00 37 | 1.000000000000000000e+00 38 | 1.000000000000000000e+00 39 | 1.000000000000000000e+00 40 | 1.000000000000000000e+00 41 | -------------------------------------------------------------------------------- /overlap_vs_HS_cost/__pycache__/embeddings_circuit.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/overlap_vs_HS_cost/__pycache__/embeddings_circuit.cpython-38.pyc -------------------------------------------------------------------------------- /overlap_vs_HS_cost/embeddings_circuit.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | import pennylane as qml 5 | from pennylane import numpy as np 6 | 7 | # This function generates a QAOA type circuit. 8 | 9 | def embedding_circuit(x,weights,wires): 10 | 11 | no_qubits = len(wires) 12 | 13 | for params_layer in weights: 14 | for i in range(no_qubits): 15 | qml.RX(x,wires=wires[i]) 16 | 17 | for i in range(no_qubits): 18 | qml.RY(params_layer[i],wires=wires[i]) 19 | 20 | for w in range(no_qubits): 21 | qml.RX(x,wires=wires[w]) 22 | 23 | 24 | # In[ ]: 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /overlap_vs_HS_cost/optimizing_overlap_cost.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pennylane as qml\n", 10 | "from pennylane import numpy as np\n", 11 | "from embeddings_circuit import embedding_circuit\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "import dill as pickle\n", 14 | "import time\n", 15 | "import seaborn as sns\n", 16 | "sns.set(context='notebook', font='serif')" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 2, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "CSWAP = np.array([[1, 0, 0, 0, 0, 0, 0, 0],\n", 26 | " [0, 1, 0, 0, 0, 0, 0, 0],\n", 27 | " [0, 0, 1, 0, 0, 0, 0, 0],\n", 28 | " [0, 0, 0, 1, 0, 0, 0, 0],\n", 29 | " [0, 0, 0, 0, 1, 0, 0, 0],\n", 30 | " [0, 0, 0, 0, 0, 0, 1, 0],\n", 31 | " [0, 0, 0, 0, 0, 1, 0, 0],\n", 32 | " [0, 0, 0, 0, 0, 0, 0, 1]])" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 3, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "def featmap(x,weights,wires):\n", 42 | " return embedding_circuit(x,weights,wires)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 4, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "n_layers = 4 # number of layers for featuremap, if applicable\n", 52 | "n_inp = 1 # number of wires that feature map acts on\n", 53 | "n_steps = 300 # steps of GD performed\n", 54 | "log_step = 5 # how often the test error is calculated\n", 55 | "batch_size = 2 # how many pairs are sampled in each training step\n", 56 | "step_size = 0.05\n", 57 | "n_all = 2*n_inp + 1\n", 58 | "\n", 59 | "optimizer = qml.RMSPropOptimizer(stepsize=step_size)\n", 60 | "dev = qml.device('default.qubit', wires=n_all)" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "metadata": {}, 67 | "outputs": [], 68 | "source": [ 69 | "X = np.loadtxt(\"X_1d_sep.txt\") # load features\n", 70 | "Y = np.loadtxt(\"Y_1d_sep.txt\") # load labels\n", 71 | "\n", 72 | "# Divide inputs into classes\n", 73 | "A = X[Y == -1]\n", 74 | "B = X[Y == 1]" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 6, 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "init_pars = []\n", 84 | "for i in range(n_layers):\n", 85 | " pars = [0.001 for j in range(n_inp)]\n", 86 | " init_pars.append(pars)" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 7, 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "@qml.qnode(dev, cache=True)\n", 96 | "def circuit(weights, x1=None, x2=None):\n", 97 | "\n", 98 | " # Load the two inputs into two different registers\n", 99 | " featmap(x1,weights, range(1, n_inp+1))\n", 100 | " featmap(x2,weights, range(n_inp+1, 2*n_inp+1))\n", 101 | "\n", 102 | " # Do a SWAP test\n", 103 | " qml.Hadamard(wires=0)\n", 104 | " for k in range(n_inp):\n", 105 | " qml.QubitUnitary(CSWAP, wires=[0, k+1, n_inp+k+1])\n", 106 | " qml.Hadamard(wires=0)\n", 107 | "\n", 108 | " # Measure overlap by checking ancilla\n", 109 | " return qml.expval(qml.PauliZ(0))\n", 110 | "\n", 111 | "def tr_rr(weights, A=None):\n", 112 | " # Compute intra-class overlap A\n", 113 | " tr_rr = 0\n", 114 | " for a1 in A:\n", 115 | " for a2 in A:\n", 116 | " tr_rr += circuit(weights, x1=a1, x2=a2)\n", 117 | " tr_rr = tr_rr / len(A)**2\n", 118 | " return tr_rr\n", 119 | "\n", 120 | "def tr_ss(weights, B=None):\n", 121 | " # Compute intra-class overlap B\n", 122 | " tr_ss = 0\n", 123 | " for b1 in B:\n", 124 | " for b2 in B:\n", 125 | " tr_ss += circuit(weights, x1=b1, x2=b2)\n", 126 | " tr_ss = tr_ss/len(B)**2\n", 127 | " return tr_ss\n", 128 | "\n", 129 | "def tr_rs(weights, A=None, B=None):\n", 130 | " # Compute inter-class overlap A-B\n", 131 | " tr_rs = 0\n", 132 | " for a in A:\n", 133 | " for b in B:\n", 134 | " tr_rs += circuit(weights, x1=a, x2=b)\n", 135 | " tr_rs = tr_rs/(len(A)*len(B))\n", 136 | " return tr_rs\n", 137 | "\n", 138 | "def cost(weights, A=None, B=None):\n", 139 | "\n", 140 | " # Fidelity cost,\n", 141 | " rr = tr_rr(weights, A=A)\n", 142 | " ss = tr_ss(weights, B=B)\n", 143 | " rs = tr_rs(weights, A=A, B=B)\n", 144 | " distance = - rs + 0.5 * (ss + rr)\n", 145 | " return 1 - distance # min is 0" 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": 9, 151 | "metadata": {}, 152 | "outputs": [], 153 | "source": [] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 10, 158 | "metadata": {}, 159 | "outputs": [ 160 | { 161 | "name": "stdout", 162 | "output_type": "stream", 163 | "text": [ 164 | "Initial parameters [[2.1979763050602665], [1.203060801566918], [5.817729631651635], [0.6017195302106214]]\n", 165 | "Initial cost 0 -- 0.9930738658535236\n", 166 | "Time taken by cost function: 25.575092315673828 seconds\n", 167 | " 0: ──H───────────────────────────────────────────────────────────────────────────────────────────────────────╭U0──H──┤ ⟨Z⟩ \n", 168 | " 1: ──RX(-0.802)──RY(2.198)──RX(-0.802)──RY(1.203)──RX(-0.802)──RY(5.818)──RX(-0.802)──RY(0.602)──RX(-0.802)──├U0─────┤ \n", 169 | " 2: ──RX(1.07)────RY(2.198)──RX(1.07)────RY(1.203)──RX(1.07)────RY(5.818)──RX(1.07)────RY(0.602)──RX(1.07)────╰U0─────┤ \n", 170 | "U0 =\n", 171 | "[[1 0 0 0 0 0 0 0]\n", 172 | " [0 1 0 0 0 0 0 0]\n", 173 | " [0 0 1 0 0 0 0 0]\n", 174 | " [0 0 0 1 0 0 0 0]\n", 175 | " [0 0 0 0 1 0 0 0]\n", 176 | " [0 0 0 0 0 0 1 0]\n", 177 | " [0 0 0 0 0 1 0 0]\n", 178 | " [0 0 0 0 0 0 0 1]]\n", 179 | "\n" 180 | ] 181 | } 182 | ], 183 | "source": [] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": 10, 188 | "metadata": {}, 189 | "outputs": [ 190 | { 191 | "name": "stdout", 192 | "output_type": "stream", 193 | "text": [ 194 | "Step 0 -- rs 0.630934-- rr 0.678287 -- ss 0.596345 -- cst 0.993618\n", 195 | "Step 5 -- rs 0.576883-- rr 0.677360 -- ss 0.636323 -- cst 0.920042\n", 196 | "Step 10 -- rs 0.498550-- rr 0.674745 -- ss 0.696585 -- cst 0.812885\n", 197 | "Step 15 -- rs 0.440135-- rr 0.680727 -- ss 0.736060 -- cst 0.731742\n", 198 | "Step 20 -- rs 0.375155-- rr 0.682218 -- ss 0.785338 -- cst 0.641377\n", 199 | "Step 25 -- rs 0.316345-- rr 0.683287 -- ss 0.830515 -- cst 0.559444\n", 200 | "Step 30 -- rs 0.273980-- rr 0.683037 -- ss 0.867099 -- cst 0.498912\n", 201 | "Step 35 -- rs 0.247009-- rr 0.691506 -- ss 0.887871 -- cst 0.457321\n", 202 | "Step 40 -- rs 0.218558-- rr 0.706667 -- ss 0.907372 -- cst 0.411538\n", 203 | "Step 45 -- rs 0.199745-- rr 0.720172 -- ss 0.919475 -- cst 0.379922\n", 204 | "Step 50 -- rs 0.178473-- rr 0.741357 -- ss 0.930816 -- cst 0.342387\n", 205 | "Step 55 -- rs 0.163642-- rr 0.755178 -- ss 0.946396 -- cst 0.312855\n", 206 | "Step 60 -- rs 0.151944-- rr 0.773384 -- ss 0.945180 -- cst 0.292662\n", 207 | "Step 65 -- rs 0.134910-- rr 0.799695 -- ss 0.946759 -- cst 0.261683\n", 208 | "Step 70 -- rs 0.120027-- rr 0.822317 -- ss 0.949806 -- cst 0.233965\n", 209 | "Step 75 -- rs 0.106570-- rr 0.841561 -- ss 0.956023 -- cst 0.207778\n", 210 | "Step 80 -- rs 0.099974-- rr 0.852906 -- ss 0.968170 -- cst 0.189437\n", 211 | "Step 85 -- rs 0.091747-- rr 0.866817 -- ss 0.973664 -- cst 0.171507\n", 212 | "Step 90 -- rs 0.081813-- rr 0.880690 -- ss 0.974560 -- cst 0.154187\n", 213 | "Step 95 -- rs 0.073668-- rr 0.890720 -- ss 0.974087 -- cst 0.141265\n", 214 | "Step 100 -- rs 0.072283-- rr 0.898405 -- ss 0.979211 -- cst 0.133475\n", 215 | "Step 105 -- rs 0.066833-- rr 0.907222 -- ss 0.980881 -- cst 0.122782\n", 216 | "Step 110 -- rs 0.060684-- rr 0.906681 -- ss 0.976495 -- cst 0.119096\n", 217 | "Step 115 -- rs 0.063198-- rr 0.912783 -- ss 0.983300 -- cst 0.115156\n", 218 | "Step 120 -- rs 0.065488-- rr 0.914549 -- ss 0.984841 -- cst 0.115792\n", 219 | "Step 125 -- rs 0.057948-- rr 0.909110 -- ss 0.979552 -- cst 0.113617\n", 220 | "Step 130 -- rs 0.056693-- rr 0.912088 -- ss 0.983236 -- cst 0.109030\n", 221 | "Step 135 -- rs 0.056602-- rr 0.914378 -- ss 0.985033 -- cst 0.106897\n", 222 | "Step 140 -- rs 0.062160-- rr 0.918619 -- ss 0.986600 -- cst 0.109551\n", 223 | "Step 145 -- rs 0.055660-- rr 0.914555 -- ss 0.985498 -- cst 0.105633\n", 224 | "Step 150 -- rs 0.054657-- rr 0.911696 -- ss 0.984116 -- cst 0.106751\n", 225 | "Step 155 -- rs 0.055270-- rr 0.915343 -- ss 0.986299 -- cst 0.104449\n", 226 | "Step 160 -- rs 0.057532-- rr 0.916994 -- ss 0.986672 -- cst 0.105699\n", 227 | "Step 165 -- rs 0.053713-- rr 0.913812 -- ss 0.985941 -- cst 0.103836\n", 228 | "Step 170 -- rs 0.053663-- rr 0.911169 -- ss 0.985060 -- cst 0.105549\n", 229 | "Step 175 -- rs 0.052970-- rr 0.914905 -- ss 0.987079 -- cst 0.101978\n", 230 | "Step 180 -- rs 0.052417-- rr 0.913475 -- ss 0.987021 -- cst 0.102169\n", 231 | "Step 185 -- rs 0.052161-- rr 0.912405 -- ss 0.987090 -- cst 0.102414\n", 232 | "Step 190 -- rs 0.051676-- rr 0.914962 -- ss 0.987877 -- cst 0.100257\n", 233 | "Step 195 -- rs 0.051825-- rr 0.916135 -- ss 0.987842 -- cst 0.099836\n", 234 | "Step 200 -- rs 0.054094-- rr 0.919172 -- ss 0.987489 -- cst 0.100763\n", 235 | "Step 205 -- rs 0.053454-- rr 0.919121 -- ss 0.987413 -- cst 0.100187\n", 236 | "Step 210 -- rs 0.050954-- rr 0.916720 -- ss 0.987877 -- cst 0.098656\n", 237 | "Step 215 -- rs 0.050451-- rr 0.916999 -- ss 0.987967 -- cst 0.097968\n", 238 | "Step 220 -- rs 0.050634-- rr 0.917510 -- ss 0.987862 -- cst 0.097947\n", 239 | "Step 225 -- rs 0.049989-- rr 0.915451 -- ss 0.988102 -- cst 0.098212\n", 240 | "Step 230 -- rs 0.049814-- rr 0.915428 -- ss 0.988302 -- cst 0.097949\n", 241 | "Step 235 -- rs 0.049812-- rr 0.917693 -- ss 0.988123 -- cst 0.096904\n", 242 | "Step 240 -- rs 0.049955-- rr 0.918096 -- ss 0.987887 -- cst 0.096963\n", 243 | "Step 245 -- rs 0.049728-- rr 0.917738 -- ss 0.987934 -- cst 0.096892\n", 244 | "Step 250 -- rs 0.050728-- rr 0.919744 -- ss 0.987400 -- cst 0.097156\n", 245 | "Step 255 -- rs 0.050588-- rr 0.914443 -- ss 0.988380 -- cst 0.099177\n", 246 | "Step 260 -- rs 0.050512-- rr 0.920683 -- ss 0.987178 -- cst 0.096581\n", 247 | "Step 265 -- rs 0.049706-- rr 0.920135 -- ss 0.987389 -- cst 0.095944\n", 248 | "Step 270 -- rs 0.049052-- rr 0.919626 -- ss 0.987728 -- cst 0.095375\n", 249 | "Step 275 -- rs 0.050211-- rr 0.915334 -- ss 0.988665 -- cst 0.098212\n", 250 | "Step 280 -- rs 0.048274-- rr 0.918534 -- ss 0.988223 -- cst 0.094896\n", 251 | "Step 285 -- rs 0.049189-- rr 0.916559 -- ss 0.988750 -- cst 0.096534\n", 252 | "Step 290 -- rs 0.049032-- rr 0.921864 -- ss 0.986822 -- cst 0.094689\n", 253 | "Step 295 -- rs 0.047939-- rr 0.919468 -- ss 0.988022 -- cst 0.094194\n", 254 | "Total time taken: 39.22258322238922 minutes\n", 255 | "Time taken for optimization: 9.085919630527496 minutes\n" 256 | ] 257 | } 258 | ], 259 | "source": [ 260 | "cst_history = []\n", 261 | "rr_history = []\n", 262 | "ss_history = []\n", 263 | "rs_history = []\n", 264 | "par_history = [init_pars]\n", 265 | "pars = init_pars\n", 266 | "\n", 267 | "total_opt_time = 0\n", 268 | "for i in range(n_steps):\n", 269 | "\n", 270 | " if i % log_step == 0:\n", 271 | " cst = cost(pars, A=A, B=B)\n", 272 | " rr = tr_rr(pars, A=A)\n", 273 | " ss = tr_ss(pars, B=B)\n", 274 | " rs = tr_rs(pars, A=A, B=B)\n", 275 | " cst_history.append([i, cst])\n", 276 | " rr_history.append([i, rr])\n", 277 | " ss_history.append([i, ss])\n", 278 | " rs_history.append([i, rs])\n", 279 | " print(\"Step {} -- rs {:2f}-- rr {:2f} -- ss {:2f} -- cst {:2f}\".\n", 280 | " format(i, rs, rr, ss, cst))\n", 281 | " \n", 282 | " \n", 283 | " # Sample a batch of pairs\n", 284 | " selectA = np.random.choice(range(len(A)), size=(batch_size,), replace=True)\n", 285 | " selectB = np.random.choice(range(len(B)), size=(batch_size,), replace=True)\n", 286 | " A_batch = [A[s] for s in selectA]\n", 287 | " B_batch = [B[s] for s in selectB]\n", 288 | " \n", 289 | " \n", 290 | " # Walk one optimization step (using all training samples)\n", 291 | " \n", 292 | " opt_time_start = time.time()\n", 293 | " pars = optimizer.step(lambda w: cost(w, A=A_batch, B=B_batch), pars)\n", 294 | " #pars = optimizer.step(lambda w: tr_rs(w, A=A_batch, B=B_batch), pars)\n", 295 | " par_history.append(pars)\n", 296 | " opt_time_end = time.time()\n", 297 | " total_opt_time = total_opt_time + opt_time_end - opt_time_start\n", 298 | " \n", 299 | " \n", 300 | "print('Time taken for optimization: ', total_opt_time/60, ' minutes')" 301 | ] 302 | }, 303 | { 304 | "cell_type": "code", 305 | "execution_count": 13, 306 | "metadata": {}, 307 | "outputs": [ 308 | { 309 | "data": { 310 | "text/plain": [ 311 | "Text(0.5, 0, 'steps')" 312 | ] 313 | }, 314 | "execution_count": 13, 315 | "metadata": {}, 316 | "output_type": "execute_result" 317 | }, 318 | { 319 | "data": { 320 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjUAAADlCAYAAACrtAaeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdd3gU1frA8e/sZnfTIY0WOkpHNPQuglKkSC5VwN5QVK6ioAiEoshVkYtwFRRFkR9FA9JBUem9V5EWQhJIIAmk7mbL/P5YWbIkIW0TSPJ+nsfH7Mw5Z87Oktk3pyqqqqoIIYQQQpRwmrtdASGEEEIIV5CgRgghhBClggQ1QgghhCgVJKgRQgghRKkgQY0QQgghSgUJaoQQQghRKkhQU0QOHjzIs88+y9ChQxk8eDCjRo3i0qVLuebbv38/zz33XJ6v8+KLL7Jnz57CVJWMjAyGDx9OvXr1iIqKyjX95s2b6d69O8OHDy/UdYUQZUNBn4d3MnfuXObMmVOgvKtXr2bcuHG5psvv87gwYmJiGDhwIPXq1St0WadPn3aUNWDAAE6fPg3Y33f37t1p3Lix0/N748aNDBo0iOHDhzNkyBBGjRrFsWPHCl2Pu0IVLrd79261c+fO6vnz5x3HNm7cqLZv3169cuXKHfPabDY1KSkpz9dKTk5WbTZbgeuaWd26ddVLly7lKW14eLg6bNiwPKWdNWuWOmbMmMJUTQhRQhXmeXh7GZmZTCbVaDQWqE4Wi0VNSUnJNV1+n8f5cenSJbVu3bq5Hits+REREU7Hw8PD1Q4dOjheR0REqK1bt3Z8FlarVX3vvffUb775xiX1yIv8fPfkRlpqXMxmszFhwgReeeUVatWq5Tj+2GOP0axZMz7//PM75lcUBR8fnzxfz9vbG0VRClxfIYQoKoV9Ht6JXq/HYDAUKK9Wq8XLyyvXdPl9HpdEJ0+eJDg4mIoVKwKg0Wh45plnqF279l2uWcG43e0KlDYnT54kIiKCtm3bZjnXoUMHpk2bhs1mY/LkyaxZs4Zhw4Zx9uxZDhw4QPfu3Tlx4gRHjhxxNBcmJCQwZswYEhMTqVixIuXLl2fz5s0MGTIEd3d3vvvuOwYPHszrr79OWFiYo8zz589z+vRpunXrxltvvQXAmTNnmD59OkajEbPZTGhoKIMGDcrT+0pLS2P8+PGcO3eOSpUqZWkiXbp0KStWrECv16MoCuPHj+e+++5j3bp1rFixApPJxPDhw2nbti0jRoxgzpw5bN26FYPBgIeHB5MnT3b8UgkhSoe8Pg/feOMNtmzZwgsvvMDRo0eJi4ujTZs2jBkzhosXL/LRRx9x9epVhg8fjp+fH4MGDWLKlCkEBQWxcOFCli5dyty5c2natCk+Pj4cPHiQ+vXrM3LkSGbMmMGpU6d45plnGDp0KKdPn+bdd98lOTmZP/74gz179jBx4kSCgoIAiIiIwMfHhx9//JFXXnnF8Tw+evQo48ePJzk5mSeffJLNmzdz48YNZs2a5QjYjh07xsSJE9Hr9dSrV4+///6bxMRE3nnnHbp06eJ478nJyY7n8s1uoAULFjjOr1ixgpUrV3L58mWmTZtGSEgIAPHx8UycOJHExESsVisvvPACXbt2LdRnFBwczKlTp9i0aZOjrLp161K3bt0c86xatYoff/wRd3d3AEaMGEGbNm0wm83MmDGDQ4cOAfDQQw/x1ltvodPpOHfuHJMmTQLAYrHQv39/QkNDeeGFFwB46623MBgMfPrpp4X7LnBJe49wWLt2rVq3bl01IyMjy7lt27apdevWVa9du6aqqqoOGzZMffbZZ1WLxaKePXtWXbZsWZbmxzfeeEMdP368qqr2rqZHHnnEqStnzJgx6qxZsxyvhw0bpr744ouqzWZTY2Nj1YYNGzqaFQ8fPqwePnxYVVVVzcjIULt3765euHDBkfdOTYDTp09Xn3/+edVqtaomk0kdMmSIU/fT4sWLVZPJpKqqval4yJAhjnPZdT/98MMPjm6z8PBwdfTo0TndUiFECZWf52Hnzp3VUaNGqTabTTUajWrv3r3VJUuWqKqafffT7V3gs2bNUjt27KgmJSWpJpNJbdOmjTpu3DjVZrOpx48fVx988EHVbDZnKW/fvn3qqlWrVFVV1bi4OLVVq1bqli1bVFXN2h20e/dutVGjRuq+fftUVVXViRMnOp7PJpNJ7dixo7p69WpVVVX15MmTaoMGDdTw8PBs782dup/Wrl2rqqqqzps3T33uuecc55999ll15syZqqqqamxsrNqyZcscn9k3y+rfv786bNgwx3/dunVz6n5SVVWdMmWKWq9ePbVHjx7qF198ccduwQMHDqht27ZV4+PjVVVV1fXr1zue77Nnz1affvpp1WKxqBaLRX3uuefU2bNnq6pq/y67+b7i4uLU559/3lGmdD+VIp06dUKr1VKnTh0GDBjgdM5qtbJp0yb69OkD2LuaOnfunGuZ7du3R1EUKlSoQPny5YmOjgagRo0a/PzzzwwePJjnnnuOq1evcvLkyTzVc8OGDfTq1QuNRoNer+fRRx91On/ffffxyiuv8OSTT/LZZ59x4sSJO5ZXuXJlnnrqKYYOHcr333+fa3ohROnXs2dPFEXBYDDQrVs31q5dm6/8DzzwAD4+Puj1emrUqEG9evVQFIV69eqRlpZGfHx8ljzNmzend+/eAHzwwQd0796djh075ngNT09PmjdvDuA0ueLw4cPEx8fTo0cPABo0aECdOnXyVf+bbl6/fv36jvJjY2PZsWMH/fv3B6BChQqEhITkeo8+/fRTFi5c6PjvpZdeypLmgw8+YP369XTt2pXw8HC6devGrl27si1v+fLldOzYEX9/fwC6du3KkCFDAFi5ciVPPPEEWq0WrVZL3759Wb58OQDlypVjw4YNREVFERQUxBdffFGAO5M7CWpcrFq1agDExcVlORcbG4uvry9+fn6OY3fqr01ISMBisTilL1euXK518Pb2dvxsMBgwm80AfPzxx8THx7No0SIWLlxIgwYNMBqNub8p4OrVqznWIzk5mZdffpmBAwfyf//3f8yYMeOO5UZERDBq1CjeeecdFi1axPvvv5/negghSo78Pg99fX0dP5cvX56rV6/m63qZx8m4ubk5Xru52Uda3HwWZmfp0qVcuHCBd999947XyOn5evXqVXx9fdFqtU7voSBuXkOv1zvKv3LlCgBjxoxh+PDhDB8+nEuXLpGWllaga9yuVq1avPXWW2zatInevXsze/bsbNNduXLFEdCA/d42bdrUcS7z5+nv709sbCwA77//PvXr1+fpp59myJAhHD582CX1vp0ENS7WqFEjatSowc6dO7Oc2759O926dUOjydtt9/f3x83NjYSEBMex69evF7huR48epW3bto5fujv9gt8uKCiIxMTEbOtx4cIFUlJS6NChA2DvL72TkydP4uXlxQMPPJCn9EKIkim/z8MbN244fk5MTHSMcylqkZGRfPLJJ0yfPh1PT88ClREUFERSUpLT86wwz+vbVapUCYBZs2Y5Wl3Cw8OzbXnJjyNHjjhaU8A+iLpr164kJydnm75y5cpO30kWi4W//vrLcS7z90RCQoJjfExSUhKvvvoqmzZtYtCgQYwYMcJlAVlmEtS4mEajYfLkyXz11VdEREQ4jm/atIkjR44watSoPJel1Wp59NFHWblyJQApKSls27atwHWrXr06R44cAex/Od0cjJwXPXr0YPXq1dhsNjIyMti4caPjXJUqVXBzc+Po0aMAWero5eVFeno6qqry2muvUaNGDZKSkrhw4UK26YUQpUN+n4cbN25EVVVMJhMbN27k8ccfB249QwCmTJnC5cuXXVZHm83GmDFjGDp0KA899BAAEyZMyHc5Dz74IAEBAaxbtw6AU6dOOb3n291sRUpPT2fevHm5tlxUrFiR9u3bO74PACZOnFjodcqMRiPLli1zBBg2m43ffvuNFi1aZJu+X79+bN261RHYrFu3zhEU9evXj1WrVmG1WrHZbKxatYrQ0FAA3nvvPa5du4aiKLRo0QKLxeKYuevp6YnRaGTlypVs2LChUO9HGxYWFlaoEkQWVatWpVGjRkyfPp2ff/6Zn376iStXrvDZZ585otb//Oc/bNmyhVOnTmE2m3nooYdISEjg9ddfJzY2lr1799K3b19at27NL7/8wvfff8++ffuoW7cuVquVrl278s0337By5UrOnDmDp6cna9eudZTZqFEj5s6dy/79+zl+/DgtWrSgXbt2LF26lOXLl3P69GmMRiP79u2jZs2ajBs3jujoaI4cOULHjh2dmljBPop9165dfPnll2zevJmGDRuydetWYmJi6NmzJ/7+/nzyySfs3LkTRVE4cuQIhw4dom/fvvj7+7Nw4UJWrVpFu3bt6N69OxaLhU8++YTdu3ej1+s5cOAAERERWcbqCCFKtrw8DwG+//57Hn74Yb766iu+/fZb2rRpw0svvYRGo8Hf35/t27ezZMkSFEUhODiYzz//nMjISGJjY0lKSuK7777j/PnzeHh4sGXLFjZt2uR4Fn744YecP3+eI0eOUKNGDT766COio6P566+/yMjIYOHChfj6+vLrr7+yfv169u/fT+/evZ2ex02aNGH8+PFER0dz5coVAgIC+Pjjj4mMjOT69et07NiRZs2aMXPmTH755RcSEhLQaDQ0atSIBg0aZLkvHh4eREREMH/+fK5evUq/fv0YNWoUsbGxnDx5kpCQEMaOHUt0dLTj2di+fXuWLFniaKVp2rRplrGYYF98b+zYscTGxnLkyBGaNm1KYGAgq1ev5quvviI2NpZdu3YRGhqKp6cnkZGRzJ07l1WrVrF48WKCgoIYPXo0er0+S9mVK1cmMDCQjz/+mFWrVnH58mXef/999Ho9TZs25dy5c8yePZvw8HAaNWrEyJEj0Wq12Gw2PvnkE1atWsXKlSsZM2YMDRs2BOx/sM+dO5e///6b559/Pk/T7XOiqKqqFji3KHJJSUl4e3s7mmgnTZqEp6cn77zzzl2umRBCuM4jjzzCtGnTaNWq1d2uSoFdv37daRzN448/zrvvvkunTp3uYq3KFul+usfNnz+fHTt2APYAZ/PmzbRv3/4u10oIIcTt3nnnHUe3zPHjx7l69apjEK0oHtJSc4/bsWMH//3vfzEYDKSmptK3b1+efvrpu10tIYRwmZEjR7JlyxZq167Nhx9+SOPGje92lQpkwYIFrFq1Ck9PTzIyMvj3v/9NmzZt7na1yhQJaoQQQghRKkj3kxBCCCFKBQlqhBBCCFEqSFAjhBBCiFJBghohhBBClAoS1AghhBCiVJCgRgghhBClggQ1QgghhCgVJKgRQgghRKkgQY0QQgghSgUJaoQQQghRKkhQI4QQQohSQYIaIYQQQpQKbq4qKCYmhqlTpxIYGEhcXBzjxo2jWrVqWdIdOHCAsLAwOnXqxOjRox3HVVXlk08+IT4+npSUFLp06UJoaKirqieEEGWaxWLhhx9+YNasWYSHh1OnTp0saeQ5LEo6lwU1YWFhDBw4kK5du7J582bGjx/PggULnNKcO3eOw4cPU69evSz5N2zYwMWLF5kzZw4mk4kePXrQsmVLqlat6qoqCiFEmRUeHs6DDz5Ienp6jmnkOSxKOpd0PyUmJrJ9+3Y6dOgAQNu2bdm/fz+xsbFO6erUqcPzzz+Pm1vWWGrlypV07NgRAIPBQMuWLVm7dq0rqieEEGXeoEGDCAkJuWMaeQ6Lks4lQU1MTAyenp4YDAYA9Ho9vr6+REdH57mM6OhoAgICHK8DAgKIiopyRfWEEELkgTyHRUnnkqBGVVUURclyPLtj+VHY/EIIIQpHnsOiJHHJmJrg4GBSU1MxmUwYDAbMZjNJSUlUqVIlX2XEx8c7XsfHx1OzZs181SMxMRWbTQUgIMCb+PiUfOUvC+S+5OzmvdFoFPz8vO52dYQodq54DoPzsxhg4kQDa9bo8PeHX39NocTESenpBCRfI0HjgRoYaD+mqugOH8QQ/hM2fz/S3hrjSK6NvIjuj99wO3qE1HFhqJlavbzHvYthwzqsVYK5vnrjrTx/n6b8kH8BkPLue5gGDXWcc/+/hXh9Nh2btw+Jm3eS+caV7/UY2ssxGPv1J/WDMMdx3YH9+L70DABJ877D3KzFrTpMeA/D2tVYg6tyfdWGW+/TZiOgxQMApL00gvSXX3OcMqxcgffk8QAkrlyPreqtCUA+I15Av3c3Ga3bkDzn61vv6fw5yg/oC0DylGlk9OztOOcx7394zv0fNj8/Ejdtc7rdt38/FeRZ7JKWGj8/P9q1a8e2bfYK7tixg5CQECpWrMimTZtITk7OtYw+ffqwdetWAEwmE3v37uXxxx/PVz1sNtXx3+2v5T+5L/m5N0KUFZmf0a54DkPWZ0zlylYuXoRDh+DixXvrGUREBG6rV6H7aanTceXIEfzvrw4NG6L/OdM5qw3Pl5/H8NUc3Nasccqj2bsHr9H/xvDDApTz55zL+/tvuHgRm8nkdNxcOdh+Uy5exGaxOdft0iW4eBHNieOoCQnO56Kj7fkiLji/pxs3bpWXbnQ6p16Lt5+LinbOo+LIoyYlO59LSblVntbN+f2eOGE/F3PZ6bjV4G4vKzISm9ninOfoUbh4EeXo0Sznsvu3k1+KqqoueYJHRUXx4YcfEhQURFxcHO+99x41atSgV69ehIWF0bx5c2w2G1OnTmXXrl14eHjQtm1bx7RuVVWZPn06CQkJpKSk8Mgjj9C/f/981SE+PsVxE4KCfLh6NfdgqqyR+5Kzm/dGo1EICPC+29URwqUOHTrE6tWrWbRoEb1796Z79+507drV6RntiucwOD+LAfbt0/D44/a/uL/5Jp0+fSwue1+F5TvkXxh+/w1Vr+da1LVbJ8xmAhrfhyYxkbRX3yA1bKrjlPv8eXiHjcPUsxfJc79zHNfERBPwYAOs1WuSNHsultZtHOd0u3eiibiA6uNLxuO3Wi4A9BvXg0bB8sCD2CpWupVn8x+4nTqJtUZNMh5+BDw9Hee0x4+BVostIBC1QoVbhaWlob0cjb+fF1fdyzvlUZKTwJQBGgXV/1Yr0s18KAq4uYFOd+t4ejpKSgqK1YItqAJotbfKu3oVTfw1VL0eW+1MSwTYbChpqahe3mRpljOb0VyOQXP5MpbmLZzKu/37qSDPYpcFNfcCCWpyJ/clZxLUCOEatwc16elQp443FovCq69mEBZmuou1c2b4eSm+r74IwNXIOHB3v3Vu8Y/4BpUnoWY9rPfdfytTWhpKSopzMPEPTewVp8DkbimJz3pXBDUuW6dGCCGEyI6HBzRoYOPYMS2HD9+lhewtFvS/bkBJTcE0YLDjsKlHLxLX/IatYkXQ652ymIYMgyAfrLcHB56eqJlaQDK7FwKaskyCGiGEEEXuwQetHDum5cgRLVarU6+DS2miLoGioOoNqEFBjuOen36M14z/oLq5YatWHXPrtvYTXl5YWrYqmsqIYid7PwkhhChyDz1kAyA1VeHMmaL76vEe8xYBDzXEZ/SbTsdNAwYBoHp4oj1xrMiuL+4uaakRQghR5B56yOr4+fBhDfXr24rkOpp4+2BfzZUYp+PWOvdz48elZLTrCF6yZENpJUGNEEKIIlevng0PD/ug4UOHtAweXPgZUMqN6+h27SSje0/HsdQxH6C9FInt9tk9QMZjPQp9TXFvk6BGCCFEkXNzg2bNYPt2e1BT6PIOH8T3hafRxERzfdUGLM1bAmDu3AVzoUsvwaxW3I4dAW89btfTsAVVsC+Yl59BTDYbZGSgmDNQFQ3o9ShJSVmngqekYNi4Dk1CPKqHJ6q7O6rBHdXHB0tIM1TfclnLTk3FsGkjut07sQUGYalbj4xefbNO/S4gCWqEEEIUixYt7EHNiRMaMjKyTDbKH0VBczkGxWLBfeliUv4JaoqFquJ2/Cja03+hiY4CNx2mHo/b12qxWtFeOI/27Bkyuj5mj+b+4f32m6AoWEKaYWnQEE1MDLo9u9CeOY328mWU1BSSv/jq1iBmiwW3wwfR7dqJJu4KtsAg1KAK2Pz8UX180ERH4fb3adJGvH5rxWOg/BOPQ1oqfjer6+6OrUJFsFhQzGYwGkmZMQtT31BHHo95/0P/20a0F86juRSJks1qL6nvvk/a6LH2F2Yz/m1C0MZeyf4WabVYGj9A8uy5WOvVtx+0WvFv/ZBTHmtwVRJ6P1GADyF7EtQIIYQoFk2b2v9vNitcvKjh/vsLPq7G0vQhUiZPQ7GYSX/pVRfVMHtKbCxqxYqO15rYK5R/tBOK7Vb9vSd9gKXOfWgvx6CkpQEQv/cItpq17AlMJtzDl9rP/fBtjteyZpoSromLxa9n11zrZ3oiFMvNoEarxdK4Cbq9u2/V32hEG3nROdM/dbzJ7eB+9Fv+vON1tBcjbr3Q6ch4tBseP36fbVrFakV35BC6PbtuBTVaLeZOndEuW4xqMKCYTM7r/7iABDVCCCGKxf2Zvr8uXFCcXudKVcFsdmreMT7/kusqlw3l6lX4YDQBCxaQuGkb1voNALBVqkxG18cw/LrBKb3bubNOr7XnzzqCGk38NczNW6HbuwvFaHSkUd3csNatj7VqVVQf31tBEGCrEoy1Rk20FyNQPTxQ0tOz1FHValFSnPfzS572Kf4ZydxITEETHY32zGk0CQmoOh246VDdDVmCCVulKpgfeBBr7drYatTC5u0NOj2oqr0byt0d84PNnPKkvzKSjMd6YG7eEsVkRDGmg9GE9nI0ui1/otu7234sc56XRmB6vA8Zj3RFMWfYu7VcSFYULmPkvuRMVhQWwjVuX1H4Fh9uLsI7ebKRV17J++gX9/lzcV+2mKRvf8QWXNU1Fc2BkhCPx9w5eHw9F02K/XmZ8UhXbixZ7kijPXcGzBasVauhjY7CsHwZukMHsdashaVJUyz1G2Bp0Ai8b3uWWCxoz53F7a+T2PwDMIc0v+NsLLd9e7BVrmIfF5OaiubaVTTXE1Fu3MAWVAFrnfuy7ccric96WVFYCCFEiREYCD4+KsnJCufP532tGrcjh/Ae/x6KxYLvU0O4vmmrywaWOrHZcP/uG7w+nOQIZgAyOnchZdJHTkmtdW61dFjr1SftvQl5u4abG9Z69W91yeTC0iLTwoBeXti8vLDVqJm3a5VBsvieEEKIYqEoULu2fRzKhQt5//qxNGiEcfgzqHo9Kf+ZUSQBjSbiAuWe6InPe6MdAY25WXPYsIEbS5bnOQgRd5cENUIIIYpNQYIa9HpSps8gcetuLM1auLxOmkuR+PXsgn73TgCs1apzY9Eyrq/7Hbp1K5pWIVEkJKgRQghRbGrVsgc1UVEKpnxu1m2tfV8R1AhswVXJ6NgZgPRnXyBhy24yHu0uwUwJJEGNEEKIYnMzqLHZFCIjc/4KUuLi0K9ZBZbCrzycK42G5FlfcmPB/5EyfUbWwb2ixJCgRgghRLG5GdQAnD+fc0uIx48LKPfcMPybN0GTwwJvBaaq6FetwKmpSK8no2cv115HFDsJaoQQQhSb2rVvTfXOcQaUqmJYaZ8+bfMPsK+G6yJKSjI+r71EuReexvfVF8FqzT2TKDFkSrcQQohiExCg4uurkpSk5DxYWFFIXPc77st/whYY5LKxLdrzZ/EdOtCxSJ7bnl1oYqKxVavukvLF3SctNUIIIYqNotzqgrrjWjVeXhiHP0NGj8ddc92kG04BTUbHziT+vl0CmlJGghohhBDFqkDTugvDZsPn1RcdAU3aiNe5sWyF035OonSQoEYIIUSxutO0bu2Zv8n3XO9ceM74j2OfJlO3HqROnAIa+forjWRMjRBClBExMTFMnTqVwMBA4uLiGDduHNWqVXNKExcXx4QJE6hSpQopKSkEBATw7rvvorhwzZabQY2q2nfrrlv3nxlRqkq5oQNQ4uNJf30UaaNGF/pabgf34/npxwBY6txH8px5EtCUYhLUCCFEGREWFsbAgQPp2rUrmzdvZvz48SxYsMApzbx586hUqRITJtj3MurZsyetWrXi4Ycfdlk9bnY/AZw7dyuocTt6GG3EBQBUd3eXXMvt8CFQFFSdjqRvfkD1LeeScsW9ScJVIYQoAxITE9m+fTsdOnQAoG3btuzfv5/Y2FindBUqVCA+Ph4Ao9FISkqKS1tpAOrVuxXUnDx562vIcn89bnzzPaZefTH16eeSaxmfe5Hra38j5ZOZWBs1dkmZ4t4lQY0QQpQBMTExeHp6YjAYANDr9fj6+hIdHe2U7oUXXkCv1zNixAiefvpp/vWvf9GpUyeX1sXXF6pXtwc2x45l+hry9CSjTz+Svl2IrUqwy65nCWmO8cnhLitP3Luk+0kIIcoAVVWzbXG5/djnn3+Ol5cXn332GRkZGbz88sscPXqUBx54IM/XCgjIeZuBoCAfAJo3h8hIOHVKR1CQLs9l54mqQkoK+Pi4pLibdS5pSmK9C1tnCWqEEKIMCA4OJjU1FZPJhMFgwGw2k5SURJUqVZzS/fHHH/z73/8G7K05DRs25KeffspXUBMfn4LNpmY5HhTkw9WryQDcf78eMHDhApw9m0w5X9Vli+wZfl6K98RxpIyfhGngkEINDM5c55KkJNb79jprNModA+TsSPeTEEKUAX5+frRr145t27YBsGPHDkJCQqhYsSKbNm0iOdn+ZVKzZk3Onj3ryHfu3DkqV67s8vo0aXJre4ITxzWU79IBn5efRbd9a+EKTknBa/IENFfj8J4yESUttZA1FSWJy1pq8jJVUFVVPvnkE+Lj40lJSaFLly6EhoYCsHz5cj7++GN0OnszZKVKlQgPD3dV9YQQosybOHEiH374IVu3biUuLo4pU6YAMHPmTMLCwmjevDnvv/8+U6ZMYfLkyaSmpuLj48Ozzz7r8ro0bnxrsHDcphPojh9Fd/wolpDmmNt3LFihNhu+b76K9splAFLGT0L1LnldMKLgXBbU5GWq4IYNG7h48SJz5szBZDLRo0cPWrZsSdWqVQH44osvaNWqlauqJIQQIpOqVavy5ZdfZjm+Zs0ax8/BwcF89dVXRV6XypVV/P1tJCRo+PusG8YnQtFv+g1T99wM7loAACAASURBVIJvi+A5fSqG1b8AkNHhYXvXkyhTXNL9lNepgitXrqRjR3sEbjAYaNmyJWvXrnWc//nnn5k+fTqTJk3i9OnTrqiaEEKIe5Ci3GqtWR/VlOR5C4j/6wK2GjULVJ7799/i9fmngH2RvaT538sie2WQS1pq7jRVsGKmvTWio6MJCAhwvA4ICCAqKgqAunXrUqdOHZo2bUpkZCSDBw9mxYoVTvmFEEKUHo0b29i6FU6f1mAygcGgz38hqorn55/g9fFUAGzly5O0aBlqeT8X11aUBC4JavI6VTA7N9M0bnxrUaTq1atTv359Nm/ezKBBg/Jcj9tHSZfE6WzFQe5LzuTeCFF8bg4WtlgU/v5bQ5MmtlxyZOXxxee3Aho/P24s+glr7ftcWk9RcrgkqMnrVMHg4GDHSpUA8fHx1KxZE4ALFy5Qq1YtxzmdTofRaMxXPTJPIyyJ09mKg9yXnN28NwWZRiiEyL/GjW2MYyp/U5fT+zrRpIlnvsswPvM8biePo9u7hxtLlmOtW68IaipKCpd0OOZ1qmCfPn3YutU+Xc9kMrF3714ef9w+KGzq1KncuHEDgLS0NI4fP07Lli1dUT0hhBD3oLqGCCYzgWUM4v6FUwtUhupbjuQv55O4dbcENMJ1s5/yMlWwR48eHD16lHfffZeUlBReffVVx7TvTp06MWbMGGrUqMGlS5d45513aNCggauqJ4QQ4h5jOPcXydrylLMmskj3NE3zmjE9HWw28PKyv1YUmbotAFBUVc267GMJJd1PuZP7kjPpfhLCNfKyovBNb70KiT9vYU9gT06ezH2hPCU+nnLDB2Hz8yPp+8XgVrQL45fUZ2ZJrLcrVhSWbRKEEELcNXUa6Zj0cy+4BlevKgQF5fx3tibyIuUGPoHb+XMAeMyfS/rLrxVXVYuUzWYlLS0Zq9WKK9oajMYbGI1mF9SsaCmKglarxdPTNS1tEtQIIYS4a+rXvzXj6fRpDUFB1mzTaSIuUD60F9qoSwAYQweQ/swLxVLH4pCWloybmx4vL488zRzOjaennrS0DBfUrGipqorJlE5aWjJQvtDlycpEQgghipWSmID7wgUoyUlZgprsaP8+Tfl+jzsCmrRXRpL8v6/hn7XRSgOr1YrB4JqApiRRFAWDwQOrNftgNr8kqBFCCFGsdNu34vP2GwQ0qUu1pJP4+Kgo2Dhz3Lm7RHMpEq/338Gvc1u00faFWtPeeIvUSR+WutWCc1rvrSxQFMUlXW4g3U9CCCGKgdvhg3B4L+4aA/rfNtgPms14zfqMM8ad+HOF7zePBd515DFsWIvnN3Mdr1PfHkPau+/b91gQ97x161Zz6NABxo0LK7ZrSlAjhBCiyOl2bIdJH5B5OKhiNuMevgz3f167xV1BVTPFLBZ7l0RG+46khk3F8sCDxVllUQJJUCOEEKLIKUnXsz2u6nT8fV93wk81Zqe5Lc1iFSpVsndFmHr2wtyuPZYmTaV1ppj9/vuvbNu2hQoVKnD27FmeeupZrFYrK1cup1KlSsTGXuGll14jOLgqFy9G8PXX/6Nq1erExl6hdeu2NGzYmI0b13PtWhwzZkynXbuOtGrVpsjrLUGNEEKIIpc2djxekyYQf/YS2vPncPvrJKrBHdPjfTh2oiLjBti3SHjqVBqVKtlbaGw1apL/3aBKvoMHNcyYYSAlpeBlaDQabDZtluPe3vDWWyZCQnK+sxERF5g9eyZLl/6CXq9n587tHD58kJ9/XsrChUvx8/Nnz55dhIW9z9df/8CKFT/RoEEjhg59mpSUFH79dT3VqlWnW7ceHDp0gLfeGlPwN5JPEtQIIYQoeooCPj7Ygqtiq1oNc8eHHadunwHVubNrZsKUVPPm6fn1V1d8PWc/mNrbW+Wrr3LeW3H//j3cf39d9Hr7rult27bHZrPh5+eHn58/AE2aNOXUqZNcv36dVq3a8OGHk4iJiaZz567069ffBXUvGAlqhBBCFBu/zu2wBQZhHPYUpif+BUCFCip+fiqJiQp//VW6ZjUVxEsvZZCSorigpSZra4y3N7z8cl7Wr8na3Zfd7CxFgTZt2rNkyQo2b97E7NkzadSoMe+8835Bql1oEtQIIYQoHpcu4XbyOAAZHTo6DisK1K9vZdcuN/76K2uXSVkTEmLjxx/TC1VGYRbfa9GiNYsW/UBGRoaj+yki4jwJCQkkJibg5+fPsWNHaNCgIeXKlefbb+cxYMAQevV6gnr1GvLRR2EA6PV6bDb7CskbNqylR49ehXpPeSFBjRBCiOJhtZI+/Fl027dgbtfB6VT9+jZ27YK//tJgtYJWYpu7pkaNmrz22pt8+OFEAgMrkJqawptvjqZ+/YZ89tl0KlasyNWrVwkL+wiAoKAKfPTRJIKDqxIXF8srr7wOQNOmD7Fs2WKmTJlASEjzYqm7bGhZxsh9yZlsaCmEa2TZ0NJiwW3/PvxiLnA19En7Mae527BwoY6337ZP7t61K4U6de6Nr6biemZev36N8uUDXVZeSdkm4abr169x//21Cr2hpXReCiGEKFJeH07Cr083eO01lBv/TO2+bXxG48a3BgcfPy7NNKJgpPtJCCHKiJiYGKZOnUpgYCBxcXGMGzeOatWqZUm3du1aDhw4AMCZM2cYNWoUzZo1K/B1Mzo+jOec/4LVim7rFjJ6982Spn59GxqNis2mcPy4hr5ZkwiRKwlqhBCijAgLC2PgwIF07dqVzZs3M378eBYsWOCU5sSJExw+fJgJEyYA9kBIW8gBLuaWrVE1GhSbDcP6NdkGNR4ecP/9Nk6f1kpLjSgw6X4SQogyIDExke3bt9Ohg32Abtu2bdm/fz+xsbFO6RYuXEilSpWYMWMGkyZN4tChQ1SsWLFQ19bt3Y3yz/Ri/W8b7ONpstGokT3N8ePy1SQKRlpqhBCiDIiJicHT0xODwQDYp9v6+voSHR3tFLScO3eOy5cv8+2332K1WhkyZAgGg4GuXbvm+VpZBne62cDfHxIS0Ny4QdCVCHjggSz5WrWC5cshNlaDqvpQoUKB3qrLBQX55J6okIzGG3h66l1apqvLK0pGow4o/L2WoEYIIcoAVVVzWDzN+Vhqaiq9e/dGq9Wi1Wp57LHHWLduXb6Cmiyznzo8imbd7wS0fgiAlJ9Xkl65VpZ8NWtqAft2CVu2pPHww3d/ZeHimv1kNJpdOluppM1+MhrNADL7SQghRO6Cg4NJTU3FZDIBYDabSUpKokqVKk7pKlWq5DSGRqfTOfIUhq12HbjvPgD0f/yWbZqb3U8gXVCiYORfjRBClAF+fn60a9eObdu2AbBjxw5CQkKoWLEimzZtIjnZ/hdyjx492LNnjyPf/v37adeunWsq0aMHALo9uyA1NcvpChVUKla8Oa5GBgvfTf379+by5Zi7XY18k6BGCCHKiIkTJxIeHs6ECRNYsmQJU6ZMAWDmzJmcPn0agNDQUKpXr86ECRP44IMPqFWrFoMHD3ZNBTp1AkCxWHA7cTzbJI0b24OaEyfk60nkn4ypEUKIMqJq1ap8+eWXWY6vWbPG8bNWq2X06NFFU4GQEMePbscOY2nZKkuSxo2t/P67G2fOaEhPt0/1FsVr2bLFJCUlMX/+XLy9fRgwYDDjx4/F3d2dunXrc+jQAapUqcK0aZ858hw7doTp0z+kRo0aeHl5c/ToEdq0aceVKzFs27aFt98eyx9//Ma5c2dZt+73Iqu7BDVCCCGKR82a2MqXR3P9Om5Hj2SbpEkTe0uNzaZw5IiW1q3v/mDhu8GwZBHuSxbdMY2lcRNSp053vNYeO4r3+LH2nzUKOlvWqfPGwUMxDR56x3IHDhzCsmX/x/PPv0zlyvYxVyNHjmLs2LeZPPljRo4cxdq1q5zyNGnSlCefHM63385j4cJlZGSY2L9/L2+++Tbt2zenQoWKfPHFXMLDl+Xp/ReUBDVCCCGKh6JgafIg+m2b0eUQ1LRqdSuI2bGj7AY12siL6Hduz1ceTdINpzzZjUoyt21f4DrVqlWbwED7/lR9+4Zmm6ZRoyZ4eHjg4eFBly6POY63aGFvlfvXvwYW+Pp5IUGNEEKIYmN5oCn6bZvRnj4FRiO4uzudr1hRpW5dK3//rWXHDi1vv32XKnqXWavXICOXAMTSuInTa5tvOUcerUbBmk1LjbV6jQLXSafT5ZpGr89+bZycjruaBDVCCCGKjeWBpsA/g4X/OonlwZAsadq2tQc1+/Zps4t7ygRTHrqJbmdt8gA3flkHFH6dGr1ej81mY9++PdSoUbPA5RQ3GV4uhBCi2NwMaoAcx9W0b2/vcjKZFA4ckKndd0PXrt2YPXsmy5cvw2az8dNPS4iKusTcuXOyTR8ZGcHGjes5efI4ixf/6Dj+9df2gekzZkwnISG+yOutqGoOm3DkU152f1VVlU8++YT4+HhSUlLo0qULoaGhuZ7Lq8yrWBbXKpAljdyXnN28NwVZxVIIcUuWFYX/ERTkw9XYG3h8NQdLkwewNH0Q1bdclnTXrik0bGj/HXz7bRNjxty9lXGL65l5/fo1ypcPdFl5JW1F4evXr3H//bUKvaKwy7qf8rL764YNG7h48SJz5szBZDLRo0cPWrZsSdWqVe94rigk7d7JteXhWBLicfMPIDD0X/i2buuUJvKz6RhPncq1LDf/ADwfeIC0o0fvWF5B7DpxheVbzhGfZCLA10Bopzq0aVQp2+Pbj8Zw6uL1LGV4e7jRon4F9p6KJdVo/wvIoNPipsXx+k683LU8+Wg92jSqVOj3c7vMn4PG2xtVVVFTU7Pcw6TdO7nywwLIyOGX1M0NLBYAFC8vKg4Z6pL7L4RwMY2G9Fdfv2OSwECVBg2snDplH1cjRF65pPspr7u/rly5ko4dOwJgMBho2bIla9euzfWcqyXt3knsDwuw/NMUZkmIJ/aHBSTt3ulIk9eA5mb+pM1/3rG8gth14grfr/+L+CT7EuXxSSa+X/8XCzf+leX416tPZhvQAKSkW/jzUIxTAGMyW/MU0IA98Pl2zUl2nbhSqPdzu9s/B1tKCuo/q4xmvodJu3dyZf7XOQc04AhoANTUVK58N7/Q918Icfe0a2d/Ph04oCUt7S5XRpQYLglq7rT7a2bR0dEEBAQ4XgcEBBAVFZXrOVe7tjwc9bYvSDUjg2vLwx2v8xrQ5OT28gpi+ZZzZFhsTscyLDa2HI7JcryoWVV7fVwpu88hs5v38NrycMhvL6nVWuj7L4QoBjn8bt8MasxmhX37pLVG5I1Lup/yuvtrdu6UJi/5M7u97y2nLcz/TkzI9rglMcGR5+98XTl7mcsriISk7DeRy6arulgkJJkKvS18Zjl9DplZ8pDmTnnzW19Xvj8hRPaU64n4vP0mbkcOkzbyTYzPPJ8lTdu2FjQaFZtNYf16Nzp1Kpvr1Yj8cUlQk3n3V4PBkOPur8HBwcTH3xr9HB8fT82aNXM9l1d5HSjs5ufv6PK4/bgrB4QVtjx/X4OjiykzjXJ3Aht/X4PL7092n8PtaYBc0+WUNz/1lYHCQhQP1bccul3b0Vy7hn7blmyDGj8/ePhhK3/84caKFTomTTLxT2eAEDlySfdTXnd/7dOnD1u3bgXAZDKxd+9eHn/88VzPuVpg6L9QblsISNHrCQz9l+O1e4MGhbrG7eUVRGinOujdnD8ivZuGTg9WyXK8qGkVe31cKbvPIbOb9zAw9F+Qz1Y7tNpC338hRBHRaDD16IWpe09MXR/LMdnAgWYAEhMVfvtNllUTudOGhYWFuaKgkJAQ5s6dy969e9mzZw8TJkygfPnyvPHGGzRp0oQqVapw3333cezYMVatWsXq1asZOHAgbdq0AbjjubxKT89wdM96eRlynM5mqFoNXUAAxogIbOnpuPkHUGHwk06zZcq1bU/qmdNYrl3L9bpu/gF4t2qNNSk5x/IKoloFbwLKuXPxShLpJisBvgaGdK3L421qZjk+7LF6pJvMXLthzFKOt4cb7ZpU4ur1NMwW+w0y6LS46zWO13fi5a7lqR4NXD776fbPQePtDTodmM1O99BQtRq6ChVIOX4MrDk0Qbu5gc0+zkjx8qLS8Kfzff9v/ptRFAVPz+JZ/VKI0ijzszizzM/ljG49MPXrj7XJAzmWU7Omjfnz9WRkKJhM0K+fJce0ReVO3yWuZDSm4e7u6bLydDotZnPBu+z69+9Nhw4P4+NTPF3yRmMaAQF+Tve6IM9il61Tcy+QdWpyJ/clZ9L9JIRr3HGdmnw+f/79bwOLFulxc1M5ejSVwMDi/coqq+vU9O/fmy++mOvY0LKo3XPr1AghhBCuNmiQhUWL9FgsCr/84sYLL5jvdpVKvWXLFpOUlMT8+XPx9vZhwIDBjB8/Fnd3d+rWrc+hQweoUqUK06Z95pTvm2++4vr1RDw8PImKusTEiVMB+M9/PsTfPwCj0Yi7uzsjR44qsrrLNglCCCHuCs2lSDzmzsH3maFgyn62Z8uWVqpXt3ct//xz7hsqlhaGJYso90RPyj3RM8s5n5eeodwTPfGYNcPpuPbYUUcezW1bUHjMmkG5J3piWLIo12sPHDgEX19fnn/+ZUaNGk1wcFVGjhzF2bNnGDbsGebPX0jr1u2c8iQl3WDZssX8+9/v8tprb9K8eUusVgt79uwkKekGI0eOYvTosZQrl3UFaVeSoEYIIcRdodu/F+/x72FYtxrdgX3ZptFoIDTU3jpz8KCWyMh8ThooobSRF9Hv3I5+5/Ys53QH9qPfuR23M86Lj2iSbtzKc915MVa3M3+j37kdbeTFAtepVq3aBAYG4ubmRt++ztsYeXv70KBBQ1566RmWLVvMI490xcvLm3r1GnLxYgRjx77Fr79uYODAJwt8/byQoEYIIcRdkdG+k+Nn3dbNOabr0+fWAOHVq8vGqAlr9RpktG1PRtv2Wc6ZmzUno217LPfXdTpu8y13K0/58k7nLPfXJaNte6zVaxS4Tjpdzi1lGo2G//73S0aPHktMTBTDhg0gMvIilSpVYvHi5fTu3Y8//viVF198Coul6AZ8l41/HUIIIe45alAQlgaNcDt1At0dtjVp1MhG7do2zp/XsHq1jtdeK/3jakyDh2IaPDTbc8nzFmR73NrkAW78sg6wDxQm00Dh9DfeIv2Nt/J8fb1ej81mY9++PdSoUTPX9NeuXWPjxrUMHfo0DRo0IjY2losXL3DpUiQGg4F27TrQrl0HevbsQnp6epHNqpKgRgghxF1jbt3GHtQc3G8fV5PNCnuKAn37mvn8c4OjC6p69VIzcfee1LVrN2bPngmovPnmaH76aQlRUZeYO3cOL7/8Wpb0er2ew4cPEhcXi6Jo8PX1pXXrdvz992m+/XYeu3btICUlmWHDni7SaeIypbuMkfuSM5nSLUq7mJgYpk6dSmBgIHFxcYwbN45q1aplmzYqKoo+ffrwwQcfEBoamm2anORnSrdh+U/4vmJfUThx7W9YWrTKtswTJzR07uwFwIQJRkaOLJ7WmrI6pbu4uWpKt4ypEUKIMiIsLIzQ0FAmT57M4MGDGT9+fLbpVFXls88+y7LVTVEwZ1okU7d7V47pGja0cd999sXkVq4sO7OgRP5IUCOEEGVAYmIi27dvp0OHDgC0bduW/fv3ExsbmyXtjz/+SPfu3fHz8yvyetmqBGOtVh0A3d6cgxp7F5R9gOmRI1p275adu0VWEtQIIUQZEBMTg6enJ4Z/xqzo9Xp8fX2Jjo52ShcREcGpU6fo1q1bsdXN3LI1ALo9uxzbnWTnmWfMGAz2bq2ZM2UrE5GVDBQWQogyQFVVlGw2hs18zGaz8emnnzJlypRCXetO4yCCgrIZJProIxC+DM316wRdvQSNG+eQF158EWbPhj/+cCMiwocWLQpV1TzJts4uZjTecPmecyVpDzuj0d6lWNh7LUGNEEKUAcHBwaSmpmIymTAYDJjNZpKSkpzGzZw+fRqTycTnn38OwIULF1ixYgXnzp3jnXfeyfO18rv3k7bhQ/j/83Py+k0YK+a8lspzzynMneuF2awwYYKZH37IuomvKxXXQGGj0ezSgb0lbaCw0Wgf+C17PwkhhMiVn58f7dq1Y9u2bXTt2pUdO3YQEhJCxYoV2bRpE61ataJBgwZ8/fXXjjwXLlygX79++Z79lF/WuvWw+fmhSUxEt3snxmeezzFt1aoqgwaZ+fFHPRs26Pjrrwzq18+5y0qULTKmRgghyoiJEycSHh7OhAkTWLJkiaObaebMmZw+fdqRzmKxMHnyZCIiIli1ahU//fRT0VZMo7k1rmb3TshlpZGRIzNQFHuaBQtkJpS4RdapKWPkvuRM1qkRwjXy2/0E4PHlbLwnvg9Awu6DWGvfd8drDBniwe+/u+HtrXL0aAreRfQrW1bXqenfvzdffDGXypWLflo/yDo1QgghSpGMDpn2gdq2Ndf0Tz9t/8JOSVFYvlxaa4SdjKkRQghx11kbNsIWEICSkJCnnaQffdRKcLCN6GgNCxboGD7cTDaTu0QBLFu2mKSkJObPn4u3tw8DBgxm/PixuLu7U7dufQ4dOkCVKlWYNu0zR55jx44wffqH1KhRAy8vb44ePUKbNu3o3LkLP/+8lCpVgomKukSfPv1o+U9XY1GQoEYIIcTdp9FwY9FPWGvXQS2f+6J/Wi0MH27m448NHD+u5eBBDc2alZ4Bw4Yli3BfsqjA+dWnnoLQwVnKM95ho8ybBg4cwrJl/8fzz7/s6H4aOXIUY8e+zeTJHzNy5CjWrl3llKdJk6Y8+eRwvv12HgsXLiMjw8T+/XtZtOh7HnusJ126PEpk5EXOnz9b4PeUFxLUCCGEuCdYQprnK/3QoWY+/VSPxaLwxRd6Fiwo2undxUkbeRH9zu0Fzp/RqZPT65vlmdu2L3CZtWrVJjDQPu6nb9/sZ8Q1atQEDw8PPDw86NLlMdLS0pg161NOnjzOI488ysMPdynw9fNCghohhBAlUsWKKv37W1iyRMe6dToOH87gwQdLR2uNtXoNMgoRgKg1nNf6uVmetXrOawDlRqfLfeySXu+84F/v3k/Qrl0Hfv/9NyZNGkefPv0YNuyZAtchNxLUCCGEuKco167hduY05jbtck07erSJ8HA3zGaFadMMLF2aXgw1LHqmPHQT3Ymnpx4yzX7Kb3l6vR6bzca+fXuoUaNmgesxe/ZMRox4nQEDBlOpUiXWrVtT4LLyQmY/CSGEuGd4fjyVwIa18X1yAJjNuaavXl1l2DB7uj//dGPXLtno0hW6du3G7NkzWb58GTabjZ9+WkJU1CXmzp2TbfrIyAg2blzPyZPHWbz4R8dxb29vJk/+gDlz/svGjet55pkXirTesk5NGSP3JWeyTo0QrlGQdWpuMoQvw3eE/YvvevhqzB063TE9wJUrCi1bemE0KjRpYmXdujT+2bez0MrqOjXFTdapEUIIUepkPNqN9OdfInHdJsztO+YpT6VKKq+8Yv8CP3ZMy4QJLopoRIkjQY0QQoh7hupbjpRpn2Jp3pL8LDwzenQGzZpZAfjuOz0rVsiQ0bJIghohhBAlnl4PX3+djp+fvdvr7bfduXZNVuMraySoEUIIcW+y2dAeO5rn5FWrqvz3v/bZTykpCl9+WXK2T1AUhVI0xDVfVFVFcdFy0BLUCCGEuOfoN67Hv1lj/B7tiObK5Tzn69bNSosW9m6o+fP1xMeXjNYarVaLyZRe5gIbVVUxmdLRal0za63QnY4ZGRmEhYWh0Wi4evUqTz75JJ06ZT9afc2aNaxfvx4fHx+8vb15//330Wg0REVF0bt3bzw9PR1pf/75ZypXrlzY6gkhhCiBbEFBaKOjAPD4Zi6pH4TlKZ+i2NeuGTTIk7Q0ha++0jFu3L0/C8jT04e0tGSXBTZGow6jMfcp8XeboihotVo8PX1cUl6hg5offvjBEaAkJCTQu3dvfv31V7y8vJzSxcbG8tlnn7F+/Xrc3d154403WL16NX379gXgueee4/XXXy9sdYQQQpQCloeaYW7WHN2B/XjMnUP6U89iy+NquA8/bKVZMysHDmj55hs9I0Zk4O9fxBUuJI1Gi7d3eZeVV1aX7yh099PKlSvp2NE+7c7f35/atWvz559/Zkm3bt06QkJCcHd3B+Dhhx9m1apbG2Lt37+fadOmMWnSJDZt2lTYagkhhCjJFIWUydPsP5pMeE2ZmJ+svPOOCYDUVIX//lemeJcVhQ5qoqOjCQgIcLwOCAggKioq23T+mULlzOl8fHwYMGAA7733Hu+++y5ffPFFtoGREEKIssPSohXG0P4AuK9cjtvuXXnO27mzlVatLADMn6/j4sWSMbZGFE6u3U9DhgwhNjY223O//fYbQJZRy9mNYlZVFY0m+xiqXLly9OrVCwAPDw969OjBmjVr6Ny5c27Vc3L7yoNBQa7poytt5L7kTO6NEPeW1A8mYVi3BsVoxDvsfa6v/yNP69coCoSFmejRw42MDIWPPjIwd27p2cVbZC/XoGbx4sV3PB8cHMy1a9ccr+Pj46lSpUqWdFWrVuX48eNO6YKDgwGIiYkhICAAwz/rWut0OozG/P/jk20Scif3JWeyTYIQ9x5b1WqkjRiJ1+efojt4AP2alWT0fiJPeZs1s9Gvn5kVK3SsWKHj5ZczCAkpHbt4i+wVuvupT58+bN26FYCEhATOnz/vaGE5fPgw586dA6Bnz54cPHjQEaxs3ryZPn36ABAeHs7u3bsdZe7evZvWrVsXtmpCCCFKgfTX3sT2z/AFrw8n5Wmjy5vef9+EXm//Y/eDD9wpYzOmyxxtWFhYWGEKaNKkCRs3buTPP/9k9erVvPnmm9StWxeA//3vf8THx9OsWTO8vb3x9/dnzpw5bNu2jfLly/Piiy+iKApWq5UFCxZw4sQJVq9eTdWqVXnppZdy7K7KSXp6huMfrJeXoURt5lVc5L7k7Oa9URQFT0/93a6OECVW3HLLjwAAFmlJREFU5mdxZgV+/hjcQadD/+fvaBITsVWqjOXBkDxlLV8eUlJg7143YmI01Kplo1GjvLfWlNRnZkms9+11LsizWHbpLmPkvuRMup9EaRcTE8PUqVMJDAwkLi6OcePGUa1aNac08+bN4+zZs/j7+3P+/HmGDx9Ohw4d8nWdwuzSnSOTCf92zdFGXsTm70/ib1uxVauep6wpKdC6tRdxcRoqVrSxa1cq3nn8FS+pz8ySWO/b6yy7dAshhMhRWFgYoaGhTJ48mcGDBzN+/PgsabZt28ZHH33E2LFjGT16NG+88QYmk+ku1PY2BgMpEyYDoElIwPfpJyEtLU9Zvb1hwgT7e4iN1fDppzLFu7SSoEYIIcqAxMREtm/f7mh1adu2Lfv3788yu/X777/Hzc0+h6RatWqkpaWRnHxv/MWf0acfaS++AoDu+FF83nyVvA6S6d/fQvPm9u0TvvxSx7p1sot3aSRBjRBClAExMTF4eno6Zpnq9Xp8fX2Jjo52Spd5LOPmzZt59NFHCQwMLNa63klq2IdkdLBvxeO+cjnu387LUz6NBj7/3IiXl4qqKrz6qjvHjslXYGkjoaoQQpQBOe2EnNPuyDExMSxbtowZM2bk+1p3GgfhkrWgVoRDSAgoCj4d2uCTxzKDgmDpUujdG9LSFJ5+2ou9eyG3bQZL6vpVJbHeha2zBDVCCFEGBAcHk5qaislkwmAwYDabSUpKynZdsejoaD766CM+/fRT/Pz88n2tIhko7ESPduEybFWqoJYrD/kos2VLmDhRR1iYO1FR0KOHlV9+SeO27QqLoM7FqyTWWwYKCyGEyBM/Pz/atWvHtm3bANixYwchISFUrFiRTZs2OcbNREZGMm3aNKZOnUpAQADr1q3j4MGDd7Pq2bI2aGgPaDKz5W2q9ogRZoYPt08dPnJEy6uvumO1urqG4m6QoEYIIcqIiRMnEh4ezoQJE1iyZAlTpkwBYObMmZw+fRqAF154gT179tCrVy/atWvH+PHjMedjsbu7xf2H7yg3qB+kpuaaVlHg449NdOpk3xtq/XodEyYYZGG+UkDWqSlj5L7kTNapEcI1ir77yZn+91/xfXIAiqpi7BtK8v+3d+fBUVX5Ase/t28vSSdkRRIIApIRtEQcHeENLkMiMAgaUB8KqIBjEHHBUcepx/KA+AQFHaEGhXpTDBYPZ0T0uUDUoDiSYtMBVORFYQggUgkxIZ2101u6+7w/Gts0ZiEk0unO71NFVd/bd/mdG3Lyq3POPWft+nM6r64Obr3VyuHDOgB5eS4efjg0gYvUOjMS45buJyGEEN2eZ8QNNGbdhLJYcDz25Dmfl5AAr73mJD090G2VlxfD66/LUNNIJkmNEEKIyGa1UvvqJmrefg/flUPbdWrfvorXXnMSHx9oWXrssViWLTOf6/Ac0cVIUiOEECLymc14h/1byC7j3n9i3PfPNk8dMsTPa685SUwMJDYrVli4555YvvlG/kRGGvmJCSGEiDrmrR+QNCmHxHvvQv+6qM3jf/1rH1u3NjBwYKCJ5h//MJKVFceUKVBR0fxcPqLrkaRGCCFE1DHYKtFcLgzV1STdcQvGr75s85zMTEVBQQNTpzai64FWm02bYORIKx98YJS3oyKAJDVCCCGijuue6dj/61kADNXVJP77BIx72+6KSk6GP//Zxe7dDeTkBN6EstkM3HdfLEOGxJGbG8O6dSaOHDFgt0N5uYbNpknC00XIMG8hhBBRyTn7UZTJTI95T2GoqyVpUg51q9fiyZnY5rkDByrWrXPx8ccmHnpIUVurcfq0gfx8A/n5pp8c36OHIjPTz403ehkzxscvf+kjJibwXWWlRn09DBigaGFVCtFJJKkRQggRtVy5s1BWKz2enIPmcpEwczrOR36Pc9ZD+NPbWPQJmDoVrryygfx8I599pvPppzqnT/+0k6O+XuPAAZ0DB3Reegk0TZGRoXC7CR7fs6efYcN8KAW1tRqJiYrMTEViosJm03A4oHdvRe/eitpaKCszkJioGDbMx+WXB8b6+P3g9YLPBz17qhaXd+iuZPK9bkaeS8tk8j0hOseFnnzvXJg+2UZC7gwMDXYAlNGI+/ZJNPxxHv4Bl7R43tkxKwXFxQb27NFpaACrFVwu+PZbAwcP6nz5pQGlLlxzTK9eftLSFGZzYKbk+npoaNCIiTFgMvlISVGkpSlSUxXx8YqYmMCK5UoFEqPGxsB5RiPExCgSEwm+BfYDTQsc7/WC3a5x8qRGWZmB3r39DB7sp3dvhdWqsFoD1/D54PBhnaNHDfTq5efaa30kJUFZmUZDg0afPn769FEYz2pW6YzJ96SlRgghRNRrvGkMNVu20uOPv8f0xedoXi8xb76O5d23cE3/HQ3/sQCV1PbinZoGgwb5GTSo+YlsKio0duwI/EE/ccKA0QiXXeYjNhY+/VTn4EGd2NgfW2dOnDDQ2KhhtSpiYxU224+tQLGxCqez9QSposJARUVL3+ptlidcNE0RGxtIgiZP9vL00+7Oua601HQv8lxaJi01QnSOrthS05Tx831Y17yEJf/d4D5frzTsy1fguSUn5NifO2avN9BaEhsb2HY6A4OPk5ICrSZVVRr79hkoKTFgMICug8kUGJtTVmbg+HEDNTUabnegayohIdAlpesmqqsbsdkMlJdrVFUFurf8/o63Ium6omdPxenTWqdcLyFB8a9/2UlPl5YaIYQQol28vxpG3boNGL/6krinF2LetQO9opzE391D7boNeHJuu2CxGI2EdMPExgYGFP8gNVVx880+oH3LiF90kYnTp10h+5QCjyf03oYzDUM+X6AbraZGo64u8DbXD91OEPhsMoHFEhjzYzIFjj961EB1tYbTCQ6HhssFPp9GZqafwYN9lJQY+OILHY8H0tMVcXGK0lIDp05pOJ0ajY0wdqwXvZMalSSpEUII0S15r7qa2rfysbyxkfiFc1Fx8XjG3Bz8Xj9WDEv+B+OYW/EOG/5jBhChNA0slua/MxohPp4zy0WcWwdOTExgNubWJCf7ufLKs49pX4LWHpLUCCGE6L40Dffku/Fkj0Y/8S3B97AB/fgxWLWK5FWrqPvLK7hvnxQ4pb4O4769GI8Vox87ijJbcE27D9+lg5q/h9uN5nKiEpMuRIm6NUlqhBBCdHuqVy+8vXqF7mz0gq6jzGY8Y8YGd+uHDpE05Y6QQ2P/shr3hNvxDv0lKjUVZTaDUph278SSvxlDfR2e627AdV8untG/RcX3CJ5r/GI/evERNLsdV+6sn7Wc0U6SGiGEEKIZnvG3Qmkpddt3hSQhvkE/tsgoaxw4HWhKEbP5bdj8dovXM+/ZhXnPLmre3EzjyOzg/oRZ96OfPEHjtcNDkhrL5reJy/tPVFwcKtYKMTGomBiUyQRGI8psCfQn6Tr4/Wj19ejfHkM/eRLMJuLunELD0ueD19OL/g/z7h3gVzjvfyCkL8r83hY0b+OPA2n8/sC/H0YnezxotTVoDQ2gndmnaWDQUJYYXHdNJThpjs+H+aOtaLU1+DJ/EbLQqKG0BENJCegGVGIS/uQUVM+e5/cDaoYkNUIIIURL0tLwjPptyC6VlEzN5gJ8lwzEn5aOfuRfWFe+gGXr+2gOR+ixJhOeUb/F168fMW9sxFBTg0oK7YbyDRwYSGpGXB96b6cTvbTk/OJ2gOZpDNll/nQX8QvnAeC6+15Uk6Smx+OPYKirPb97Ae6Jt6N+SGoMBhLuvxfN58M5/X7sTZIay/9uIn7p08Ft34BLqNr71Xnf92yS1AghhBDt1DQB8Q2+jPr/Xke9UmgNdjSbLdDq4Vf409JQCYkANMxfjOmz3fgGZoZcy563FIxGfBf3C9nvG/gLXFPuAYcDzdGA5nKhOZ3g86I1esHjRnO7A++F6zoqJgbfJQPx9R+ANcZE49BfhQbtajIXjP+swbu+jg3ebdqShaahkpPRKivx9Qstk3bWffwpKR2679lknppuRp5Ly2SeGiE6R1efp6Y9IjFmaCFujwfN0QAGA6pHAk0XotKPFocmNroOGoEXobxeMBrxJyYFW2M0vy/YTaW5XPjT0kNupR8+hLJYUElJqOQfExfDdyfQvz2O5m1Eq61FWePwjLul2ZhlnhohhBBCNM9sDgxgbobvF5e261JNU1aV2Mz1Lru82fP8/Qfg7z+gXfdqjw4nNR6Ph7y8PAwGA6dPn+buu+9m5MiRzR5bVlbGM888Q2VlJW+88UbId++99x4FBQX06NGD+Ph45s+fjyHC5wQQQoiu5NSpUyxZsoSePXtSUVHBggULuPjii0OOUUrxwgsvYLPZsNvtjBo1ijvuuKOFKwrRtXQ4qdmwYUMwCamqqiInJ4ePPvqIuGaWDl2/fj3Dhg2joKAgZH95eTkvvvgiBQUFxMTE8Nhjj5Gfn8/EiW0vDy+EEOLc5OXlcddddzF69GgKCwtZuHAh69evDzlm69atfPfdd6xevRq32824ceMYPnw4ffv2DU/QQrRDh5tCNm/ezG9+8xsAUlJSGDhwINu3b2/22Hnz5pGY+NN2qg8++IBrrrmGmDOTHmVlZbFly5aOhiaEEOKM6upqdu3axY033gjAddddx/79+ykvLw85rmmdbrFYGD58OO+///4Fj1eI89HhlprS0lJSU1OD26mpqZSUtO8VtNLSUlKajIA+n2tAYFBRa9siQJ5LywwGTZ6PiEqnTp3CarViOfMar9lsJiEhgdLSUtLS0oLHdUad3trvUCT+fkVizBCZcTeN+XzibzOpmTp16k8y+R9s27YNAE0LvfHZ221RSnXK+Jnk5NAuL3mDpXnyXFomz0ZEK6VUs3XzudTX7a3Tz66Lm4rE37FIjBkiM+6OxtxmUrNx48ZWv8/IyKCysjK4bbPZ6NOnT7uC6Nu3L0VFRSHXyMjIaNc1hBBCtCwjI4OGhgbcbjcWi4XGxkbq6up+Ul9nZGRgs9mC2zabjQEDBlzgaIU4Px1uHpkwYQI7duwAoKqqiuPHj5OdHZj++cCBAxw7dqzNa4wfP54vvvgClyuwTHphYSETJkzoaGhCCCHOSE5O5vrrr2fnzp0A7N69m2uuuYa0tDQ+/vhj6usD84M0rdPdbjd79+7llltuCVvcQrRHhyff83g8LF68GIPBQGVlJVOnTiUrKwuARYsW0bdvX2bNCqxlsX79egoLCykuLmbs2LHMnj2bXmcWENuyZQtbt24lISGBuLg4FixYIK90CyFEJyopKWHp0qVcdNFFVFRUMG/ePPr378+tt95KXl4e1157LUopli9fTlVVFXa7nZtuuolJkyaFO3QhzklUzSgshBBCiO5LmkKEEEIIERUkqRFCCCFEVJCkRgghhBBRQZIaIYQQQkQFSWqEEEIIERU6vExCV/bss8/icDiIi4vj+PHjzJkzh6FDh4Y7rLA4l9V5u6Py8nL+9Kc/kZycjNvtpqamhsWLF4cs2yGE6JiuWv94vV42bNjAqlWreOutt8jMzARajzfcZWmtzurKcS9ZsgSn00lcXByHDx/moYceYsSIEZ0fs4piU6dODX7+5JNP1OjRo8MYTXg98MADatu2bUoppbZv365mzJgR3oC6iM8++0ytXLkyuL1s2TI1f/78MEYkRPTpqvXP66+/rj7//HM1aNAgdfTo0eD+1uINd1laq7O6ctzPP/988PP777+vxo8f32Zc5xNzVCc1Pp8v+Lm4uFhdddVVyu/3hzGi8KiqqlKXX365crlcSiml3G63uuKKK9T3338f5sjCz+/3h/yf+Nvf/qZyc3PDGJEQ0SUS6p+mSU1r8XaFsrRUZ3X1uJtavXq1evzxx3+WmKO6+6npjMSFhYVMnjy53QuzRYNzXZ23Ozr7/8OOHTuYMmVKmKIRIvpEWv3TWrwWiyXsZWmpzurqcQMUFRWxZs0a6urqWLVq1c8Sc0QnNW2tIK7rOgDffPMN+/btY9WqVRcyvC5DdWB13u7kzTff5NJLL2X06NHhDkWIqBFp9U9r8Xa1sjSts4qKirp83EOGDGHNmjVs376de++9l+eee67TY47opKatFcQBDh06xNq1a1m5cmUw4+tuznV13u7snXfeoaSkhKeeeircoQgRVSKt/mktXrPZ3GXKcnad1ZXj9vl8uFwu4uLiAMjOzubJJ59E1/VOjzmqX+k+ePAgf/3rX1m2bBlWq5V169ZRVlYW7rAuuNZW5xWwadMmSktLeeKJJ4DAKH0hROeItPqntXi7Slmaq7O6ctxlZWUsWrQouF1SUoLX66VPnz6dHnPULmjpdDq54YYbMBqNGI2BBimHw0F+fj59+/YNc3QXXkur83Z3+/fvZ9q0aSGvcMfHx/Phhx+GMSohoktXrX++/PJL8vPz+fvf/05OTg4333wzo0ePbjXecJeltTqrq8Ztt9tZsGABsbGxJCYmcvToUaZMmcKYMWM6PeaoTWqEEEII0b1EdfeTEEIIIboPSWqEEEIIERUkqRFCCCFEVJCkRgghhBBRQZIaIYQQQkQFSWqEEEIIERUkqRFCCCE6ydy5c3nppZfCHUa3JUmNEEIIIaJCRK/9JIQQQoSD3+/n6aef5siRI+i6Tv/+/Rk0aBA7d+7EYrGwd+9eJkyYwJ133klRUVFw8UZd11m0aBGZmZm8/PLLbNy4kaysLKqrqykvLyc1NZVly5aRkpJCZWUlc+fOxe124/V6yc7OZtasWeEuepcmSY0QQgjRTjt37qS0tDS4sPIjjzzCqFGjOHToEBkZGcyZMweA+vp6Zs6cycqVKxkxYgSFhYU8/PDDFBQU8Oijj1JSUsLevXt55513iI+PZ+HChSxdupQXX3yRV155heHDhzNr1iwcDge5ubmS1LRBup+EEEKIdkpISODIkSPs3r0bv9/PihUrml1Bevv27VitVkaMGAFAVlYWlZWVfPXVV8FjRo4cSXx8PAATJ07kww8/xOfzkZSUxM6dOykuLsZqtfLKK69cmMJFMGmpEUIIIdrp6quv5plnnmHt2rXMnz+fyZMn8+CDD/7kuO+//57a2lqmTZsW3JeSkkJNTU1wOzExMfg5KSmJxsZGqquryc3NJTY2lieeeAJd15k9ezbjxo37eQsW4SSpEUIIIdqpvr6e4cOHM3LkSE6ePMnMmTNJS0v7yXG9e/cmPT2dV199NbjPbrdjNpuD27W1tcHP1dXVmEwmkpOTsdlsTJs2jWnTprFnzx4efPBBrrjiCvr16/fzFi6CSfeTEEII0U7btm1j06ZNAPTr14+0tDT8fj9xcXE4nU4cDgd/+MMfyM7OpqamhoMHDwLgcDiYPn06drs9eK1du3YFt999913Gjh2LruusWLGCQ4cOATB06FBMJhNKqQtc0siiKXlCQgghRLscP36cZcuW4XK5cDgcDB48mMWLF/P1118zb9484uPjmTFjBjk5ORQVFbF8+XKUUiilmDlzJtnZ2UBgXpvY2FhsNhulpaWkpKSwfPlyUlJSKCwsZO3atei6jt1u57bbbmP69OlhLnnXJkmNEEIIESZz584NeVtKdIx0PwkhhBAiKshAYSGEECIMXn755eBkfenp6dx5553hDiniSfeTEEIIIaKCdD8JIYQQIipIUiOEEEKIqCBJjRBCCCGigiQ1QgghhIgKktQIIYQQIipIUiOEEEKIqPD/qVy886vXk10AAAAASUVORK5CYII=\n", 321 | "text/plain": [ 322 | "
" 323 | ] 324 | }, 325 | "metadata": {}, 326 | "output_type": "display_data" 327 | } 328 | ], 329 | "source": [ 330 | "# Start figure\n", 331 | "fig = plt.figure(figsize=(9, 6))\n", 332 | "# Plotting 1: original data\n", 333 | "ax1 = fig.add_subplot(2, 3, 1)\n", 334 | "ax1.set_title(\"Original data\", pad=20)\n", 335 | "ax1.scatter(A[:], np.zeros(len(A)), c='r')\n", 336 | "ax1.scatter(B[:], np.zeros(len(B)), c='b')\n", 337 | "ax1.set_ylim((-0.1, 0.1))\n", 338 | "ax1.set_xlim((-2.1, 2.1))\n", 339 | "\n", 340 | " \n", 341 | "ax2 = fig.add_subplot(2, 3, 3)\n", 342 | "ax2.set_title(\"Optimizing the HS cost\", pad=20)\n", 343 | "cst_history = np.array(cst_history)\n", 344 | "rr_history = np.array(rr_history)\n", 345 | "ss_history = np.array(ss_history)\n", 346 | "rs_history = np.array(rs_history)\n", 347 | "ax2.plot(cst_history[:, 0], cst_history[:, 1],color='blue', marker='', linestyle='-', linewidth=2.5, label=\"cost\")\n", 348 | "ax2.plot(rr_history[:, 0], rr_history[:, 1],color='red', marker='', linestyle='--', linewidth=2.5, label=\"tr rr\")\n", 349 | "ax2.plot(ss_history[:, 0], ss_history[:, 1],color='red', marker='', linestyle=':', linewidth=2.5, label=\"tr ss\")\n", 350 | "ax2.plot(rs_history[:, 0], rs_history[:, 1],color='red', marker='', linestyle='-.', linewidth=2.5, label=\"tr rs\")\n", 351 | "plt.legend(fancybox=True, framealpha=0.5, loc='center right')\n", 352 | "ax2.set_ylim((0, 1))\n", 353 | "ax2.set_xlabel(\"steps\")" 354 | ] 355 | }, 356 | { 357 | "cell_type": "code", 358 | "execution_count": null, 359 | "metadata": {}, 360 | "outputs": [], 361 | "source": [] 362 | } 363 | ], 364 | "metadata": { 365 | "kernelspec": { 366 | "display_name": "Python 3", 367 | "language": "python", 368 | "name": "python3" 369 | }, 370 | "language_info": { 371 | "codemirror_mode": { 372 | "name": "ipython", 373 | "version": 3 374 | }, 375 | "file_extension": ".py", 376 | "mimetype": "text/x-python", 377 | "name": "python", 378 | "nbconvert_exporter": "python", 379 | "pygments_lexer": "ipython3", 380 | "version": "3.8.3" 381 | } 382 | }, 383 | "nbformat": 4, 384 | "nbformat_minor": 2 385 | } 386 | -------------------------------------------------------------------------------- /overlap_vs_HS_cost/overleaf/data-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/overlap_vs_HS_cost/overleaf/data-1.png -------------------------------------------------------------------------------- /overlap_vs_HS_cost/overleaf/data-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/overlap_vs_HS_cost/overleaf/data-2.png -------------------------------------------------------------------------------- /overlap_vs_HS_cost/overleaf/hs-cost-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/overlap_vs_HS_cost/overleaf/hs-cost-1.png -------------------------------------------------------------------------------- /overlap_vs_HS_cost/overleaf/hs-cost-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/overlap_vs_HS_cost/overleaf/hs-cost-2.png -------------------------------------------------------------------------------- /overlap_vs_HS_cost/overleaf/overlap-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/overlap_vs_HS_cost/overleaf/overlap-1.png -------------------------------------------------------------------------------- /overlap_vs_HS_cost/overleaf/overlap-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/overlap_vs_HS_cost/overleaf/overlap-2.png -------------------------------------------------------------------------------- /overlap_vs_HS_cost/overleaf/overlap_optimization.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mudassirmoosa/variational_embedding_circuits/a39d71990daa637d6d4c2c3dc6b53b3b69169818/overlap_vs_HS_cost/overleaf/overlap_optimization.pdf -------------------------------------------------------------------------------- /random_embedding_circuits/.ipynb_checkpoints/two-qubit-random-embedding-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pennylane as qml\n", 10 | "from pennylane import numpy as np\n", 11 | "from two_wires_random_unitary_embeddings import random_gate_sequence, random_embedding_circuit\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "import seaborn as sns\n", 14 | "sns.set(context='notebook', font='serif')\n", 15 | "import dill as pickle" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 3, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "CSWAP = np.array([[1, 0, 0, 0, 0, 0, 0, 0],\n", 25 | " [0, 1, 0, 0, 0, 0, 0, 0],\n", 26 | " [0, 0, 1, 0, 0, 0, 0, 0],\n", 27 | " [0, 0, 0, 1, 0, 0, 0, 0],\n", 28 | " [0, 0, 0, 0, 1, 0, 0, 0],\n", 29 | " [0, 0, 0, 0, 0, 0, 1, 0],\n", 30 | " [0, 0, 0, 0, 0, 1, 0, 0],\n", 31 | " [0, 0, 0, 0, 0, 0, 0, 1]])" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 4, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "def featmap(x,weights,wires,gate_sequence):\n", 41 | " \"\"\"Wrapper for feature map to define specific keyword arguments.\"\"\"\n", 42 | " return random_embedding_circuit(x,weights,wires,gate_sequence)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 5, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "seed = 42 # random seed for reproducibility\n", 52 | "n_layers = 2 # number of layers for featuremap, if applicable\n", 53 | "n_inp = 2 # number of wires that feature map acts on\n", 54 | "n_steps = 200 # steps of GD performed\n", 55 | "log_step = 5 # how often the test error is calculated\n", 56 | "batch_size = 2 # how many pairs are sampled in each training step\n", 57 | "step_size = 0.02 # learning rate\n", 58 | "n_all = 2*n_inp + 1\n", 59 | "\n", 60 | "\n", 61 | "dev = qml.device('default.qubit', wires=n_all)\n", 62 | "optimizer = qml.RMSPropOptimizer(stepsize=step_size)" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 7, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "X = np.loadtxt(\"X_1d_sep.txt\") # load features\n", 72 | "Y = np.loadtxt(\"Y_1d_sep.txt\") # load labels\n", 73 | "\n", 74 | "# Divide inputs into classes\n", 75 | "\n", 76 | "A = X[Y == -1]\n", 77 | "B = X[Y == 1]" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 8, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "# initial parameters are taken to be small\n", 87 | "init_pars = []\n", 88 | "for i in range(n_layers):\n", 89 | " pars = [0.001 for j in range(n_inp)]\n", 90 | " init_pars.append(pars)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": null, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "# Fixing seed for reproducability\n", 100 | "np.random.seed(seed)\n", 101 | "#This generates a random sequence of gate. \n", 102 | "random_gate_sequence = random_gate_sequence(n_inp,n_layers)" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": 11, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "@qml.qnode(dev, cache=True)\n", 112 | "def circuit(weights, x1=None, x2=None,gate_sequence=None):\n", 113 | "\n", 114 | " # Load the two inputs into two different registers\n", 115 | " featmap(x1,weights, range(1, n_inp+1),gate_sequence)\n", 116 | " featmap(x2,weights, range(n_inp+1, 2*n_inp+1),gate_sequence)\n", 117 | "\n", 118 | " # Do a SWAP test\n", 119 | " qml.Hadamard(wires=0)\n", 120 | " for k in range(n_inp):\n", 121 | " qml.QubitUnitary(CSWAP, wires=[0, k+1, n_inp+k+1])\n", 122 | " qml.Hadamard(wires=0)\n", 123 | "\n", 124 | " # Measure overlap by checking ancilla\n", 125 | " return qml.expval(qml.PauliZ(0))\n", 126 | "\n", 127 | "def tr_rr(weights, A=None,gate_sequence=None):\n", 128 | " # Compute intra-class overlap A\n", 129 | " tr_rr = 0\n", 130 | " for a1 in A:\n", 131 | " for a2 in A:\n", 132 | " tr_rr += circuit(weights, x1=a1, x2=a2,gate_sequence=gate_sequence)\n", 133 | " tr_rr = tr_rr / len(A)**2\n", 134 | " return tr_rr\n", 135 | "\n", 136 | "def tr_ss(weights, B=None,gate_sequence=None):\n", 137 | " # Compute intra-class overlap B\n", 138 | " tr_ss = 0\n", 139 | " for b1 in B:\n", 140 | " for b2 in B:\n", 141 | " tr_ss += circuit(weights, x1=b1, x2=b2,gate_sequence=gate_sequence)\n", 142 | " tr_ss = tr_ss/len(B)**2\n", 143 | " return tr_ss\n", 144 | "\n", 145 | "def tr_rs(weights, A=None, B=None,gate_sequence=None):\n", 146 | " # Compute inter-class overlap A-B\n", 147 | " tr_rs = 0\n", 148 | " for a in A:\n", 149 | " for b in B:\n", 150 | " tr_rs += circuit(weights, x1=a, x2=b,gate_sequence=gate_sequence)\n", 151 | " tr_rs = tr_rs/(len(A)*len(B))\n", 152 | " return tr_rs\n", 153 | "\n", 154 | "def cost(weights, A=None, B=None,gate_sequence=None):\n", 155 | "\n", 156 | " # Fidelity cost,\n", 157 | " rr = tr_rr(weights, A=A,gate_sequence=gate_sequence)\n", 158 | " ss = tr_ss(weights, B=B,gate_sequence=gate_sequence)\n", 159 | " rs = tr_rs(weights, A=A, B=B,gate_sequence=gate_sequence)\n", 160 | " distance = - rs + 0.5 * (ss + rr)\n", 161 | " return 1 - distance # min is 0" 162 | ] 163 | }, 164 | { 165 | "cell_type": "code", 166 | "execution_count": 18, 167 | "metadata": {}, 168 | "outputs": [ 169 | { 170 | "name": "stdout", 171 | "output_type": "stream", 172 | "text": [ 173 | "Step 0 -- rs 0.190116-- rr 0.521261 -- ss 0.861625 -- cst 0.498673\n", 174 | "Step 5 -- rs 0.158762-- rr 0.540512 -- ss 0.839327 -- cst 0.468843\n", 175 | "Step 10 -- rs 0.145454-- rr 0.558638 -- ss 0.830175 -- cst 0.451047\n", 176 | "Step 15 -- rs 0.128236-- rr 0.606654 -- ss 0.819267 -- cst 0.415276\n", 177 | "Step 20 -- rs 0.108551-- rr 0.636439 -- ss 0.808112 -- cst 0.386276\n", 178 | "Step 25 -- rs 0.098144-- rr 0.651337 -- ss 0.804406 -- cst 0.370273\n", 179 | "Step 30 -- rs 0.091919-- rr 0.687369 -- ss 0.805823 -- cst 0.345323\n", 180 | "Step 35 -- rs 0.080947-- rr 0.736391 -- ss 0.801702 -- cst 0.311900\n", 181 | "Step 40 -- rs 0.071450-- rr 0.734553 -- ss 0.794562 -- cst 0.306893\n", 182 | "Step 45 -- rs 0.072170-- rr 0.781344 -- ss 0.798373 -- cst 0.282312\n", 183 | "Step 50 -- rs 0.072222-- rr 0.795575 -- ss 0.798881 -- cst 0.274994\n", 184 | "Step 55 -- rs 0.072438-- rr 0.812306 -- ss 0.799412 -- cst 0.266579\n", 185 | "Step 60 -- rs 0.071878-- rr 0.832059 -- ss 0.799556 -- cst 0.256071\n", 186 | "Step 65 -- rs 0.070966-- rr 0.836287 -- ss 0.799574 -- cst 0.253035\n", 187 | "Step 70 -- rs 0.072995-- rr 0.856626 -- ss 0.801294 -- cst 0.244035\n", 188 | "Step 75 -- rs 0.073904-- rr 0.873162 -- ss 0.802494 -- cst 0.236076\n", 189 | "Step 80 -- rs 0.074439-- rr 0.882848 -- ss 0.804224 -- cst 0.230903\n", 190 | "Step 85 -- rs 0.070002-- rr 0.869263 -- ss 0.803925 -- cst 0.233408\n", 191 | "Step 90 -- rs 0.073089-- rr 0.895879 -- ss 0.806533 -- cst 0.221883\n", 192 | "Step 95 -- rs 0.071768-- rr 0.896136 -- ss 0.806899 -- cst 0.220251\n", 193 | "Step 100 -- rs 0.073635-- rr 0.910488 -- ss 0.809452 -- cst 0.213666\n", 194 | "Step 105 -- rs 0.072391-- rr 0.910759 -- ss 0.810601 -- cst 0.211711\n", 195 | "Step 110 -- rs 0.074761-- rr 0.922322 -- ss 0.813705 -- cst 0.206747\n", 196 | "Step 115 -- rs 0.073570-- rr 0.920451 -- ss 0.816344 -- cst 0.205173\n", 197 | "Step 120 -- rs 0.073786-- rr 0.921830 -- ss 0.817627 -- cst 0.204058\n", 198 | "Step 125 -- rs 0.073057-- rr 0.919692 -- ss 0.818564 -- cst 0.203929\n", 199 | "Step 130 -- rs 0.073423-- rr 0.921143 -- ss 0.816726 -- cst 0.204489\n", 200 | "Step 135 -- rs 0.072407-- rr 0.916846 -- ss 0.820209 -- cst 0.203879\n", 201 | "Step 140 -- rs 0.073530-- rr 0.921108 -- ss 0.820910 -- cst 0.202522\n", 202 | "Step 145 -- rs 0.071576-- rr 0.913191 -- ss 0.821791 -- cst 0.204085\n", 203 | "Step 150 -- rs 0.070383-- rr 0.905993 -- ss 0.818878 -- cst 0.207948\n", 204 | "Step 155 -- rs 0.073432-- rr 0.921585 -- ss 0.820636 -- cst 0.202322\n", 205 | "Step 160 -- rs 0.071795-- rr 0.915596 -- ss 0.818936 -- cst 0.204529\n", 206 | "Step 165 -- rs 0.073250-- rr 0.920746 -- ss 0.820711 -- cst 0.202521\n", 207 | "Step 170 -- rs 0.074033-- rr 0.922831 -- ss 0.820137 -- cst 0.202549\n", 208 | "Step 175 -- rs 0.073332-- rr 0.921243 -- ss 0.821566 -- cst 0.201928\n", 209 | "Step 180 -- rs 0.072749-- rr 0.919193 -- ss 0.822266 -- cst 0.202020\n", 210 | "Step 185 -- rs 0.070227-- rr 0.904459 -- ss 0.816923 -- cst 0.209537\n", 211 | "Step 190 -- rs 0.072940-- rr 0.919562 -- ss 0.820632 -- cst 0.202843\n", 212 | "Step 195 -- rs 0.071922-- rr 0.915178 -- ss 0.822983 -- cst 0.202841\n" 213 | ] 214 | } 215 | ], 216 | "source": [ 217 | "# Optimising the circuit\n", 218 | "\n", 219 | "cst_history = []\n", 220 | "rr_history = []\n", 221 | "ss_history = []\n", 222 | "rs_history = []\n", 223 | "par_history = [init_pars]\n", 224 | "pars = init_pars\n", 225 | "\n", 226 | "for i in range(n_steps): \n", 227 | " \n", 228 | " if i % log_step == 0:\n", 229 | " cst = cost(pars, A=A, B=B,gate_sequence=random_gate_sequence)\n", 230 | " rr = tr_rr(pars, A=A,gate_sequence=random_gate_sequence)\n", 231 | " ss = tr_ss(pars, B=B,gate_sequence=random_gate_sequence)\n", 232 | " rs = tr_rs(pars, A=A, B=B,gate_sequence=random_gate_sequence)\n", 233 | " cst_history.append([i, cst])\n", 234 | " rr_history.append([i, rr])\n", 235 | " ss_history.append([i, ss])\n", 236 | " rs_history.append([i, rs])\n", 237 | " print(\"Step {} -- rs {:2f}-- rr {:2f} -- ss {:2f} -- cst {:2f}\".\n", 238 | " format(i, rs, rr, ss, cst))\n", 239 | " \n", 240 | " \n", 241 | " # Sample a batch of pairs\n", 242 | " selectA = np.random.choice(range(len(A)), size=(batch_size,), replace=True)\n", 243 | " selectB = np.random.choice(range(len(B)), size=(batch_size,), replace=True)\n", 244 | " A_batch = [A[s] for s in selectA]\n", 245 | " B_batch = [B[s] for s in selectB]\n", 246 | " \n", 247 | " # Walk one optimization step (using all training samples)\n", 248 | " pars = optimizer.step(lambda w: cost(w, A=A_batch, B=B_batch,gate_sequence=random_gate_sequence), pars)\n", 249 | " par_history.append(pars)" 250 | ] 251 | }, 252 | { 253 | "cell_type": "code", 254 | "execution_count": 19, 255 | "metadata": {}, 256 | "outputs": [ 257 | { 258 | "data": { 259 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjYAAADlCAYAAABAg72dAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdeVxUVf/A8c+dGWaGYd9EAXfT1NSi0lzTRFNzKcs925en57Eye3qyx0cj09TU9n4tlplmmmuaS5kVbpVbpqZmuQckKCAwLLPe3x+jAyMgKMMifN+vF8Xce+6537ngnS/nnHuOoqqqihBCCCFEDaCp6gCEEEIIIbxFEhshhBBC1BiS2AghhBCixpDERgghhBA1hiQ2QgghhKgxJLERQgghRI0hiU0F+eWXX3jwwQcZNWoUw4cPZ+zYsfz111+lHrdr1y4eeuihMp/n0UcfZfv27eUJFavVyujRo2nRogWJiYmllk9ISKBPnz6MHj26XOcVQojqJikpiSeeeIJRo0Zx//33M3r0aBYuXOjVc1zpPXTfvn3ExcWxYsWKIvtUVeWjjz7iuuuu81aYVy1dVQdQE23fvp0XXniBjz/+mMaNGwOwYcMGRo4cybJly4iMjCzx2BtvvJE333yzzOd6/fXX8fPzK1e8er2eBQsW0KJFizKV7969O+np6axcubJM5d9++22SkpKYPn16ecIUQogKN378ePr06cOoUaMA1/18ypQp7tfecLn3UIBvv/2Wr7/+msDAwGL3P//889SrVw+bzeatMK9a0mLjZU6nk0mTJvGPf/zDndQA9O7dmxtvvJHXX3/9kscrikJAQECZz+fv74+iKFccrxBCiAL79++nQ4cO7tcdOnRgwIABVRiRS5s2bZg9e3aJf8iOGzeOIUOGVHJU1ZO02HjZwYMHOXHiBJ06dSqyr2vXrkybNg2n08nkyZNZs2YN9957L0eOHGH37t306dOHAwcOsHfvXg4fPgxAeno6zz//PBkZGURGRhIcHExCQgIjRozAaDTyySefMHz4cJ588kni4+PddR47dozDhw9z++23M27cOAD+/PNPZsyYQX5+PjabjcGDBzNs2LAyva/c3FwmTpzI0aNHqVu3bpHWnS+++IKVK1ei1+tRFIWJEyfSrFkz1q1bx8qVK7FYLIwePZpOnTrxxBNP8O6777J582YMBgO+vr5Mnjz5ki1ZQghRGaKiovjoo4+YNGkSJpMJgMcee8y9Py0tjRdffJGMjAwcDgePPPIIcXFxAOTk5DBlyhROnDiBqqoMGjSIESNGAKXfQ0tTt27dUveXZShBbSCJjZedOnUKoNgP6cjISLKzs8nIyCA+Pp6jR4+yb98+5syZw4kTJ/jll194+OGH6dmzp/uYl156iXr16jFnzhzMZjODBg2ia9eujBkzBoAjR464y16o8+DBg3zwwQecOXOGHj16MGrUKCIjI8nNzeXJJ5+kXbt22Gw2Bg4cSIcOHWjUqFGp7+udd94hMzOTFStWYLfbeeCBB9Bqte79qqoyf/589Ho927dvZ9KkSXz++ef069ePo0ePFumKCgwMZPHixSiKwooVK5g1axYzZ8687OsthBDeNHHiRMaOHcvGjRvp1asXd911F+3bt3fvf+6552jXrh1PP/00qampDBgwgGuvvZaYmBimTZuGw+Fg0aJF7vv1Nddcw0033VTqPfSTTz5hx44dvPfee1XxtmsU6YqqYrfeeitarZamTZsWaUZ0OBxs3LiRgQMHAq5upx49epRaZ5cuXVAUhTp16hAcHExSUhIADRs2ZNmyZQwfPpyHHnqIM2fOcPDgwTLF+fXXX9O/f380Gg16vZ5evXp57G/WrBn/+Mc/GDlyJLNnz+bAgQOXrK9evXrcd999jBo1ik8//bTU8kIIURk6duzIDz/8wPPPP09iYiL33XcfkyZNAiAlJYVt27Zxzz33AFCnTh1iY2NZu3YtTqeTVatWufdduF+vXr0aKP0eGhERQYMGDSrxndZc0mLjZfXr1wcgNTWV6Ohoj30pKSkEBgYSEhLi3nap8TTp6enY7XaP8kFBQZjN5kvG4O/v7/7eYDC4B5NNnz6drKwsFi5ciFarZfTo0eTn55fpfZ05c6ZIHBdkZ2fz+OOPM3XqVPr06UNiYqJHq9PFTpw4wdixY/n8889p27ate7C1EEJUByaTiSFDhjBkyBB27NjB/fffzyOPPEJGRgbgGqh7YWxjRkYGzZs3Jz09HavVysyZMzEajQBkZWXRsmVL4NL3UID+/fvTv3//ynh7NZ4kNl7WunVrGjZsyI8//likBWbr1q3cfvvtaDRlaygLDQ1Fp9ORnp5O06ZNATh37twVx7Zv3z5Gjhzpbv68nNHzERER7n/UF8dx/PhxzGYzXbt2BcBut1+yroMHD+Ln50fbtm3LVF4IISrLiy++yEsvveR+3b59e4KDgzGbze5xLm+99RahoaEAWCwW7HY7vr6+6PV6Jk6c6L632Ww29x+Pl7qHCu+Srigv02g0TJ48mffff58TJ064t2/cuJG9e/cyduzYMtel1Wrp1asXq1atAsBsNrNly5Yrjq1Bgwbs3bsXcLUoXRigXBZ9+/blq6++wul0YrVa+eabb9z7oqKi0Ol07Nu3D6BIjH5+fuTl5aGqKv/6179o2LAhWVlZHD9+vNjyQghRVX766Sf3vQxgx44dKIpCkyZNiIyMpEuXLu57MrgSoe3bt6PRaLjzzjvdXU8A7733Hl9++SVw6XsowJo1a5g2bVoFv7vaQRsfHx9f1UHUNDExMbRu3ZoZM2awbNkyli5dyunTp5k9e7Z7UPGrr77Kpk2bOHToEDabjRtuuIH09HSefPJJUlJS2LFjB4MGDeKWW27hyy+/5NNPP2Xnzp00b94ch8NBXFwcH330EatWreLPP//EZDKxdu1ad52tW7fmgw8+YNeuXfz222/cfPPNdO7cmS+++IIVK1Zw+PBh8vPz2blzJ40aNWLChAkkJSWxd+9eunXr5tGdBXDDDTfw008/8d5775GQkECrVq3YvHkzycnJ9OvXj9DQUGbOnMmPP/6Ioijs3buXPXv2MGjQIEJDQ1mwYAGrV6+mc+fO9OnTB7vdzsyZM/n555/R6/Xs3r2bEydOFOl3FkKIymQwGJg7dy4rV65k+fLl7Ny5k6lTpxITEwO4xjAuXryYBQsWsHz5ctq1a+dune/QoQM//PADc+bM4csvvyQwMJAnnngCjUZzyXto9+7d2bJlC4cOHSqxO+q3337j2Wef5dChQxw6dIiff/6Zfv36ufe/9957fPDBB+7PD5vNVmsn61NUVVWrOghRsqysLPz9/d3dVy+99BImk4nnnnuuiiMTQgghqh/piqrmPv74Y7Zt2wa4kpyEhAS6dOlSxVEJIYQQ1ZO02FRz27Zt480338RgMJCTk8OgQYO4//77qzosIYQQolqSxEYIIYQQNYZ0RQkhhBCixpDERgghhBA1hiQ2QgghhKgxJLERQgghRI0hiY0QQgghagxJbIQQQghRY0hiI4QQQogaQxIbIYQQQtQYktgIIYQQosaQxEYIIYQQNYYkNkIIIYSoMSSxEUIIIUSNofNWRcnJyUyZMoXw8HBSU1OZMGEC9evXL1Ju9+7dxMfHc+utt/Lvf//bvV1VVWbOnElaWhpms5mePXsyePBgb4UnhBC1mt1uZ/78+bz11lssX76cpk2bFikj92FRE3gtsYmPj2fo0KHExcWRkJDAxIkTmTdvnkeZo0eP8uuvv9KiRYsix3/99decPHmSd999F4vFQt++fWnfvj0xMTHeClEIIWqt5cuXc/3115OXl1diGbkPi5rAK11RGRkZbN26la5duwLQqVMndu3aRUpKike5pk2b8vDDD6PTFc2nVq1aRbdu3QAwGAy0b9+etWvXeiM8IYSo9YYNG0ZsbOwly8h9WNQEXklskpOTMZlMGAwGAPR6PYGBgSQlJZW5jqSkJMLCwtyvw8LCSExM9EZ4QgghykDuw6Im8Epio6oqiqIU2V7ctstR3uOFEEKUj9yHxdXGK2NsoqOjycnJwWKxYDAYsNlsZGVlERUVdVl1pKWluV+npaXRqFGjy4ojIyMHp1O9rGMqUliYP2lp5qoOw4PEVDqNRiEkxK+qwxCi0nnjPgwl34ur27/1qibXo6jC1+RK78VeSWxCQkLo3LkzW7ZsIS4ujm3bthEbG0tkZCQbN26kQ4cOBAQEXLKOgQMHsnbtWoYNG4bFYmHHjh3861//uqw4nE61WiU2QLWLByQmIUSBwvdob9yH4dL3Yvm37kmuR1HlvSba+Pj4eG8EEhsbywcffMCOHTvYvn07kyZNIjg4mKeeeoo2bdoQFRWF0+nk5ZdfZteuXZw+fZq//vqLTp06AdCsWTP279/P6tWr+eqrrxg6dCgdO3a8rBjy8qyo1eh3xM/PQG6utarD8CAxlU5RFEwmfVWHIYRX7dmzhzlz5rB//36ysrLQaDQ0adLE4x7tjfswlHwvrm7/1quaXI+iCl+TK70XK6panVKB8klLM1er7DciIoAzZ7KrOgwPElPpNBqFsDD/qg5DiKtWSffi6vZvvarJ9Siq8DW50nuxzDwshBBCiBpDEhshhBBC1BiS2AghhBCixpDERgghhBA1hiQ2QgghhKgxvLYIphBCCCG8R//NenS/7EQNCQWrDU12FkpWJjhVnBEROOtEYrvxZhxt2rqPUdLS0O3fi711G9SICJQzZ9Ad2I/u94M4g0Ow3t7XVV8JfL7fiM8vu7Bf1xZbx06oQcFFyuh278T0zpsoGemoJhNqUDDOkBDUkFCcEXVcX/XqYW93A2i1ngfn5qL7/SCOmAaodep47Vp5xFchtQohhBCi7HJyUJwO1IBA9yb9xg34fvrxJQ/Lu+8hzLPecL/22baZoEfuB8DpH4DG7Pk4uarTYek3gOw586DQchlKehr+/30O44plBWU1GhzNW2Dt2ZucF18uKJuZiWHt6lLfUubHC7AOGFTwftavxf8/z6BNOQ2Ao04k9nbXk7XgC9B4rwNJuqKEEEKISqRkpOP/3DMQEUFY4yjCG0YS0bgeptdneRa0WlALfeCrWi3OkBCcoQUtLkrWOY9DdL/td39/cVIDoNjtoNV4JDX67zYQ2uVmj6QGQHE60f1+CP036zy2O+tFYb+mObYOHbG1uwFHg0Y4CyVkANYu3bD2H+iq5+xZAh59gKD7R7iTGgBtagraUye9mtSAtNgIIYQQlUNVMSxZhH/8BDTn1+Qq/JHuO/dDcv/5FGp4OADmN/8P82tvo2RlouoNYDIVJCQ2G5q0s2D1nLk477F/YrulE7oDv6E9dRJHw0bYW7XG0ao12iN/YvhyBZZ+/T2O0e3bi+bsWQAsfe7A/NJUtMePod+6Gd3unTgjI8FuB50rZXC0bEXGtl1F35/ViibtLJrUFJzhEe5Ylfw89Bs3AOAMCib3ufFgtaH7bR/OmPrluqTFkZmHK1B1nFVSYiqdzDwsRPnIzMPFCx5wOz7bfyrYcNtt5F7T0jUORavFdn0s1n79i45Lqei4bu+O9tgxzK+8iuWeYR6tOd5inDsH/ZZNZE+fjRoZWWI5b8w8LC02QgghhJdpkpNw1osqMo4FXGNLcqbOIPDh+8g5W/Wre2e/8yHOiAjU4JAKO0f+g4+Q/9CjFVZ/YTLGRgghhLgSViumN2YR3LcnpmmTwexKUjQnjhMS1xX/sf8Cm81d3NalGznP/JuMH3dhGTS4QlpGroTjmuYVmtQAlfpepcVGCCGEuBI+PhhWLEX3+yF8du/EuGghuc9PwPfdN9GcPYvvos+w9u2PtU8/AMwzXqvigGsHabERQgghysJuh9zcgteKQt6DBd0r2tN/E/DMGHRH/gQgd8xYd1IjKo8kNkIIISrHqVNVHcEV05w8QfCgvgQ8N9Zje/59D3Lm9Dmy3noPZ0TBhHP5dw4m53/xlRylAElshBBCVALj3DnQvDn6r9eVXria0W9YT8htXfDZuR3j0sXo160p2KnVgkaDZfgo0n/aTc74/5Ez7j9kv/W+1+dnEWUjY2yEEEJUKE1SIv6TXgCrFf9/P01611vBz6+qwyoT47yP8R//LIrTCUDew49h7dGz2LJqYBC54/5TmeGJYkg6KYQQokI5o2PImrcQ6tYl6/OlV0dS43Ti9/KLBPznGRSnE9XXl8xPFmKeNgt8fas6OnEJ0mIjhBCiwlnjboejR7HnOAo2Ohz4fvB/WO662zXnSzXh/++x+Gz/Ed3h3wFwhoeT+dkS7LE3VXFkoiykxUYIIUTlMJk8XuoTvsM/fgKhN7RCv/arqompGLp9e9xJjb1pMzLWfSdJzVVEWmyEEEJUCZ+EHwBQTX7YunStmiDMZhSbFTWkYGFJW6euqIHB2G68kbzH/4UaGlY1sZUkPx/dvr0oebk4WlyLs2499y5Nymm0vx9CycrEOuBOj8MC/vEwmox0LLf385gFWMlIx/DlCjTpaeQ9+g/UwCD3Pp+ff0T72z7UkFAsdw8tqCw3F7/pU1BycsgfPhL7zR3cu4wff4DvJx9hb9WanPipOKOiXefJysT3w/fQHj+GZdBdWHv39faVASSxEUKIWiM5OZkpU6YQHh5OamoqEyZMoH59z0UIU1NTmTRpElFRUZjNZsLCwvjPf/6DUgEzx+ZMfgXL4HvQnjiOGhRcsMNmI+CJR7CMGIX1tl4VNmutJjmJwHuHofr7k7l0FRgMrrjip1TI+YrlcKCYs1Gys10JQKEnqfzHPYnPT9twXNOcrPmLC+I+/Tch/XsBkPXWe1iGj3LvM736Cr4L5uEMCSHtosRGv+l7NGlpOBo28tiu2/srAc+PA8DSfxCOQomN7/+9jeHrtdja3+KR2ChmM6b33wHAfv0NHomNJisL3R+H0R75E/Nrb7u3qzofTDOnoagqjgYNJbERQghRPvHx8QwdOpS4uDgSEhKYOHEi8+bN8yjz4YcfUrduXSZNmgRAv3796NChA927d/d+QIqC/YYbsd9wo8dmw7IvMK5eiXH1SrI+/ATLnXe7djid+GzbgpKZiaNpMxwtW13xqfXfrCfg6SfQpKcD4Dt3DnlPjLm8SlTVcy2otDQ059LB4cTRvIVHUdP0KShZmdi6dsfa9w73dsOaVQQ++gAAab8c8FjtWpuUiO7oEdSgII+6VN+CLj0lL89jn6NJMwA0GRkoaWmoYQWtTfbr2qJkZeKIjvE4RvtXwfxCSkaGxz5NRvr57eke2xW7DdXkh+rnVyTxtLdsjaV3HxSrFdU/oNBFMOGs3xBsVncSWREksRFCiFogIyODrVu38vbbrr+gO3XqxJgxY0hJSSGy0GrLderUYf/+/QDk5+djNpsrpLXmUjTmbJx+/qiBgVj69vfYF3TPQBRVJWfcf8gtlNjo9v2K8dNPsPbpi7VrdzAaXTtyc9FvTkCx5GO/pgWOxk3wf+l/+M6d4z7WFnsT+fcM8ziPYelitEf+wBkVQ/79DxXEdvwYwYP6osnKIuvt97EOGOTe5zv3Q/xmTsMZFEzan56TERo/+xRtagp5drtHYqP6F6xerT1x3COxsXbqgurnj63d9R51qSEhnFu8HEx+2Bs39dhn6T8Qe5u2OJo2Qw3xXP8pc+kqimMZMAhr7z44g0OKJBxZc+ah6nxQg4M9tjujojl74u9i67P26VfijMvp23ZWaFIDktgIIUStkJycjMlkwnD+Q0Wv1xMYGEhSUpJHYvPII4/w3HPP8cQTT5Cens7dd9/NrbfeWqmx5j36BPlDhqM9esTzQ1CjQQ0IRMnKRMnK9DhGt3sXvgs+wXfBJ2RsSMB+fSzgalkIum+4q95ho/DZ+wu63w8B4DQa0eTn4/PLLjTZmTgiItz1Gb9YhH7zD1hv6eSR2KDXoz3t+kDXXNSKgcPu2p55zrUgZqGkRQ0Kwmm1gNXqcYi9+bWYJ7+C6uePo1Fjz+vw9LN4tscUxGC7rVex187ZsBHOi7qaSqMGh6CWsK/w+B2vqOCkBiSxEUKIWkFV1WJbXi7e9vrrr+Pn58fs2bOxWq08/vjj7Nu3j7Zt25b5XGFh/iXui4gIKHGfZ8EAuKZB0e1frwdfX0xRUZgK12XQuD40w8MJietW0D0SEQBRUZCcjO8XCwvKd+yIZuRIePJJAEKDTa6yF+hdMwrrNRfF7FsfHnoIgoIIuOVGAgrvu/9eiG0HRiMR9UIKWo0A/jjsOvz81wVhsa0htnXZrkktUebfkRJIYiOEELVAdHQ0OTk5WCwWDAYDNpuNrKwsoqI854/5/vvveeaZZwBXq06rVq1YunTpZSU2aWlmnM6ibQAREQGcOZNdvjfS7LqC7wvXNfIhGDgU7fFjOM6aPQ7RzfucwJFD0J49g2owkPvMc+Q+5Rosy4AhrmUR9HrP+j5b5kqOFMVzO8D0N4qPoV5j1xdAts31dQleuR41TOFrotEol0ySSyLz2AghRC0QEhJC586d2bJlCwDbtm0jNjaWyMhINm7cSHa268OkUaNGHDlyxH3c0aNHqVfPy90RFcXfH0ebogmY/fpYzDNmY70tjvTN213LHuh0ri+TydXSc3FrlkZTYU9jiYqlqKpaUtfaZSnLY4SqqjJz5kzS0tIwm8307NmTwYMHA7BixQqmT5+Oj48PAHXr1mX58uWXFUNJfyVUleqYjUtMpbvSvxKEqO4SExOZOnUqERERpKam8sILL9CwYUP69+9PfHw8N910E0lJSbz88stERUWRk5OD0+lk8uTJ+F7GMgIV2mJzpS581FWjZKW63fuqA2+02HgtsXnsscc8HiOcN29ekccI169fz5o1a3j33XexWCz07duX+fPnExMTw4oVK4iOjqZDhw7Fn6AMJLEpncRUOklshCifapHY2GyubqmLHruuTqrbva86qDZdURceI+za1TVzZKdOndi1axcpKSke5VatWkW3bt0AMBgMtG/fnrVr17r3L1u2jBkzZvDSSy9x+PBhb4QmhBCiFjIuWURI1/YEPPoAytmzVR2OqEReGTxc1scIk5KSCCs0WVBYWBiJiYkANG/enKZNm9KuXTtOnTrF8OHDWblypcfxQgghRKlsNkyvz0JRVXx2bkcNKN9TNuLq4pXEpqyPERbnQpnrrisY6d6gQQOuvfZaEhISGDZsWEmHFlEduw/K+9haRZCYhBA1jZJ5DgA1MAjDsi/QnjoB4Hr6qRLmThHVh1cSm7I+RhgdHU1aWpr7dVpaGo0aNQLg+PHjNG5cMDmRj48P+fn5lxWHjLEpncRUOhljI8TVxbDoMwKf/icAqk7nHiDsqBdF/qj7qjI0UQW8MsamrI8RDhw4kM2bNwNgsVjYsWMHd9zhmlp6ypQpZGa6ZpLMzc3lt99+o3379t4ITwghRA1mWL/G/b1it6PYXPPH5D71jLTW1EJeeyqqLI8RqqrKjBkzSE9Px2w2c9ttt3HPPfcAMH/+fH788UcaNmzIX3/9Re/evbnzzjtLOasnabEpncRUOmmxEaJ8Kv2pKFVF/9WXGBcvxNHqOpS0szijY8h95jnX5HvVVHW791UH1epx7+pAEpvSSUylk8RGiPKpFo97XwXkehRVbR73FkIIIYSoDiSxEUIIcfVRVfz/PRafLZsKZhUWAklshBBCXIX0336N7/y5BN89AOPnC6o6HFGNSGIjhBDi6qKqmN6YDYAzIBDLgEFVHJCoTiSxEUIIcVUxLFmEz64dAOQ/9ChqYFAVRySqE0lshBBCXDWUM2fwn/QCAI46keT+66kqjkhUN5LYCCGEuGr4T3gOTUYGAObps1GDQ6o4IlHdSGIjhBDiqqD/Zj3GL1cAYLljINb+A6s4IlEdSWIjhBCi2lOyMvH/zzMAOIOCMU+fVcURiepKEhshhBDVnmowkj9yNKqPDznxU3BG1q3qkEQ15ZXVvYUQQgivy80Fk8n1vcFA7vMTyB8yHGfjJlUbl6jWpMVGCCFEtaNkpBNyW2d0u3d6bHc2aQqKUkVRiauBJDZCCCGqF6eTgH8+iu7YUQLGPQkWS1VHJK4iktgIIYSoVkyzZ2D47lsAHE2agV5fxRGJq4mMsRFCiFoiOTmZKVOmEB4eTmpqKhMmTKB+/fpFyq1du5bdu3cD8OeffzJ27FhuvPHGSolR/90GTLOmA2Bvdg3Zb/2fdD2JyyKJjRBC1BLx8fEMHTqUuLg4EhISmDhxIvPmzfMoc+DAAX799VcmTZoEuJIhrVZbKfFpkhIJeOIRFFVFNfmR9clC1IDASjm3qDmkK0oIIWqBjIwMtm7dSteuXQHo1KkTu3btIiUlxaPcggULqFu3Lq+99hovvfQSe/bsITIyslJiNL0+C825cwBkv/EOjhbXVsp5Rc0iLTZCCFELJCcnYzKZMBgMAOj1egIDA0lKSvJIXI4ePcrff//N3LlzcTgcjBgxAoPBQFxcXJnPFRbmX+K+iIiA4nckJsLiz1zf9+9P4KMPlPl8V7MSr0ctVt5rIomNEELUAqqqohQzVuXibTk5OQwYMACtVotWq6V3796sW7fushKbtDQzTqdaZHtERABnzmQXe4zfy69gsloByPjnWOwllKtJLnU9aqvC10SjUS6ZJJdEEhshhKgFoqOjycnJwWKxYDAYsNlsZGVlERUV5VGubt26HmNqfHx8sFTC49Z5j/0TxWpFk5SI/ab2FX4+UXPJGBshhKgFQkJC6Ny5M1u2bAFg27ZtxMbGEhkZycaNG8nOdv2V3LdvX7Zv3+4+bteuXXTu3LnC43M2aIj51dfJ+mxJhZ9L1GzSYiOEELXEiy++yNSpU9m8eTOpqam8/PLLALzxxhvEx8dz0003MXjwYE6ePMmkSZNwOp00btyY4cOHV16Q8mi3KCdFVdWiHaFXqZL6datKdew/lZhKd6X9ukIIlzKPsVFVfBK+x3ZLJ/D1rcQIq4fqdu+rDrwxxka6ooQQQlS+vDz8nxlD8LC7CLupDYYVS6s6IlFDSFeUEEKISqU5eYLAh0bjs38vAKqi4LimeRVHJWoKabERQghRaXy2bCKk963upMbWoSPnvtuCvU27Ko5M1BSS2AghhKgc771H0NA70WRkAJD7+D85t2INzsbOomsAACAASURBVMi6VRyYqEmkK0oIIUTFstnw/9/z8MlHKIBqMJD92ttYhlTi01ai1vBaYlOWVWNVVWXmzJmkpaVhNpvp2bMngwcPLnWfEEKIq5eSl4vPlk0AOOpEkvXp59hvvLmKoxI1ldcSm7KsGvv1119z8uRJ3n33XSwWC3379qV9+/bExMRccp+3ZP38I2dXLMeenoYuNIzwwXcTeEsnjzKnZs8g/9Ahj22KwYBazMybutAwTG3bkrtv3yXrvFI/HTjNik1HScuyEBZoYPCtTenYum6x248knuOHPcnF1hMWaKBt0zB2HEohJ98BgMFHi06L+3Vp/IxaRvZqQcfWFdNkXPhno/H3R1VV1JycYq9pcT8jABQF47XXYv3rL5xms2uTnx+RI0Z57WcihLh8amAQWZ99QeiE5zg3622c0d67rwtxMa+MsSnrqrGrVq2iW7duABgMBtq3b8/atWtL3ecNWT//SMr8edjT0wCwp6eRMn8eWT//6C5T0gdmcUnNhTqyEn64ZJ1X6qcDp/l0/e+kZbnOnZZl4dP1v7Pgm9+LbP/oq4MlJjUXyvywJ9kjibHYHGVOasCVAM1dc5CfDpy+wndUstRNmz1+Nk6zGTUnByh6TUtMagBUlfxDh9xJDYCak8PpTz72ys9ECHHlHE2awcaNktSICueVxOZSq8YWlpSURFhYmPt1WFgYiYmJpe7zhrMrlqOeX2DtAtVq5eyK5e7XJX5gXoaL67xSKzYdxWp3emyz2p1s+jW5yPbKmpLQobri8rZTCxYW+dkUVviaXtHPyOHwys9ECHFlNm/W0reviUWLqjoSURt4pSuqrKvGFudSZcpyfGGXmqHwj4z0YrfbM9LdS6T/cVlnK1nhOq90+fX0rOJbiap6YuX0LEu5l5S/2B9n00otc+GaXunPqPDPRAhRuVas0LF7t5bRo2HlSi233FL21mIhLpdXEpuyrhobHR1NWlrBh1haWhqNGjUqdV9ZXWpJBV1IqLur4+Lt3p7S+kKd5ZkuOzTQ4O5uKkyjVG1yExpo8Pr1MoSHYTlz9pJlyvtzupzjZUkFIbzrwQdtLFvmg9Wq8MgjRr77LpfIyOqz/I2oWbzSFVXWVWMHDhzI5s2bAbBYLOzYsYM77rij1H3eED74bhS93mObotcTPvhu92tjy5blPs/FdV6pwbc2Ra/z/PHodRpuvT6qyPbKWjJOq7ji8rYGo0cV+dkUVviaXtHPSKv1ys9ECHFl2rVzMm2a6w+11FQNjz5qxGar4qBEjeW1RTATExOZOnUqERERpKam8sILL9CwYUP69+/vXjVWVVVmzJhBeno6ZrOZ2267jXvuuQfgkvvKqrRFMCv7qajyLnBWW56KiogI4OhX31Sbp6KkxUaI8inuXmyxWPjmGztJSXY0Gmje3MkNN9TsLilFUVAUDUajCb3eUGS/LIJZlDcWwZTVvStQdfyllZhKJ4mNEOVz8b3YarWQn59DUFAo8+ZpSU3VAtCmjYOePR34+FRVpBVLVVUcDju5udkYjX5Fkpvqdu+rDmR1byGEENVefn4uJlMAvr56Bg1y4OfnSnr279eycKGO8yss1DiKoqDT+WAyBZCfn1vV4dQaktgIIYSoUKrqRKt1PasSHAz33WcjJsY1bcWZMxrmz/fh1KnKGi1Y+bRaHarqLL2g8ApJbIQQQlSoi6cE8feHYcPstG9vB8BqVVi7Vkd+flVFWLEURaEGjfqo9iSxEUIIUek0Grj1Vie9ermSG7NZYdMmbRVHdXnWrfuKqVPjqzoMcRFJbIQQQlSZdu2cNGzo6qbZt0/LyZM1t0tKVA6vLYIphBBCXC5Fgd697cyb54PNpvDNNzoefNBW4U9KfffdBrZs2USdOnU4cuQI9933IA6Hg1WrVlC3bl1SUk7z2GP/Ijo6hpMnTzBnzv8RE9OAlJTT3HJLJ1q1uo5vvlnP2bOpvPbaDDp37kaHDh0rNmhRJpLYCCFELZGcnMyUKVMIDw8nNTWVCRMmUL9+/WLLJiYmMnDgQP73v/8xePBgr8bxyy8aXnvNQKH1asnOVjh3ztVa8/nnPgQHX9mYFH9/GDfOQmxsyYN1T5w4zjvvvMEXX3yJXq/nxx+38uuvv7Bs2RcsWPAFISGhbN/+E/Hx/2XOnPmsXLmUli1bM2rU/ZjNZjZsWE/9+g24/fa+7Nmzm3Hjnr+iWEXFkMRGCCFqifj4eIYOHUpcXBwJCQlMnDiRefPmFSmnqiqzZ88usiyOt3z4oZ4NGyru48ffX+X990seibxr13auuaY5+vMznnfq1AWn00lISAghIaEAtGnTjkOHDnLu3Dk6dOjI1KkvkZycRI8ecdx11+VNHisqlyQ2QghRC2RkZLB161befvttADp16sSYMWNISUkhMjLSo+xnn31Gnz59OHv20mu4XanHHrNiNiseLTYANhukpGhQVVcXVd26TnSX+Snl7w+PP24tQ8myLdysKNCxYxcWL15JQsJG3nnnDVq3vo7nnvvv5QUmKo0kNkIIUQskJydjMpkwGFyz3+r1egIDA0lKSvJIbE6cOMGhQ4cYPXo0n332WYXEEhvr5LPP8ordd/iwwurVrgE24eFORo2yc4ml5K7IzTffwsKF87Fare6uqBMnjpGenk5GRjohIaHs37+Xli1bERQUzNy5HzJkyAj697+TFi1a8cor8YDrGjqdDlRV5euv19K3b3/vBiquiCQ2QghRC1w8l8wFhbc5nU5mzZrFyy+/XK5zXTwNfn5+JiaTKzu58P+S3HADpKfD1q1w9qyGDRv0DBniajnxlpYtmzNu3LNMnz6ZOnXqYDZn89xzz9OuXVvefHMmkZF1SU1NZfr0mZhMeqKj6/Hqqy8TExPD6dMpPP30M5hMem65pT3Lli3mlVfiufnm9pd8b/n5PkREBBTZXty22q6810TWiqpA1XEdEImpdLJWlKiJMjIy6Ny5M3v27MFgMGCz2bjhhhv47rvv3C02hw4d4rXXXqNevXoAfP/99zRu3Ji2bdvy3HPPlflcF9+Lz507S3BwOCaTntzc0ruJnE5YsULH8eOuGUlatHDQq5cDX9/LecfVy4VrUFh1u/dVB95YK0pabIQQohYICQmhc+fObNmyhbi4OLZt20ZsbCyRkZFs3LiRDh060LJlS+bMmeM+5vjx49x1111efyqqNBoN9O9vZ8ECH86dUzh8WEtiooY+few0aVJ9/ngV1ZNM0CeEELXEiy++yPLly5k0aRKLFy92dzm98cYbHD582F3ObrczefJkTpw4werVq1m6dGmlx2o0wsiRNpo1cwCQk6OwfLkP33+vpeb0M4iKIF1RFag6NjNKTKWTrighyqe8XVGFqSocOKDw3Xc6rFbXQJubbnLQvbvDq+NuKpp0RZWNN7qipMVGCCFEtaUocN11Kg88YHNP2rdrl5bt2+XjSxRPfjOEEEJUe0FBMHSoDX9/V3KzZYuOX3+VjzBRlPxWCCGEuCoEBcGQITZ8fV3Jzbffajl69CrqjxKVQhIbIYQQV43wcLj7bhs+Piqg8PXXOnJzqzoqUZ1IYiOEEOKqUq8e9OplByA3V+Gbby7/Sal77hnA338nV0B0oqpJYiOEEOKq06qVSvPmrkfBjxzRcuCAdEkJF5mgTwghxFVHUaB3bwdJSRpyclyPg0dH2wgJKf3YJUsWkZWVxccff4C/fwBDhgxn4sTxGI1Gmje/lj17dhMVFcW0abPdx+zfv5cZM6bSsGFD/Pz82bdvLx07dub06WS2bNnEs8+O5/vvv+Xo0SOsW/ddBb5zURpJbIQQQlQ6w+KFGBcvvGQZ+3VtyJkyw/1au38f/hPHu18HAc/kw5kz51tr5sPOW+4lfeC9NGnipFkztdi5boYOHcGSJZ/z8MOPU69eFABjxoxl/PhnmTx5OmPGjGXt2tUex7Rp046RI0czd+6HLFiwBKvVwq5dO3j66Wfp0uUm6tSJ5O23P2D58iVXdkGE10hiI4QQotJpT51E/+PWyzpGk5VZ5Bg9EFjo9bH63di3T8u+fVo6drTTpYuzzPU3btyE8HDXJHqDBhW/jETr1m3w9fXF19eXnj17u7fffHMHAO6+e2iZzycqhiQ2QgghKp2jQUOsnbpcsoz9ujYer52BQSUek58PlnxQGjfAx0fFZlP46ScdPj52OnQoW3Lj4+NTahm9vvgVvEvaLiqfJDZCCCEqnWX4KCzDR13WMY42bcn8ct0ly7QDGmTYWLTIh5wchc2bdRgMdq6/3jO50ev1OJ1Odu7cTsOGjS4zelGdyVNRQgghapSQENdEfkZjwUR+Gzdq+esvBef5/CYu7nbeeecNVqxYgtPpZOnSxSQm/sUHH7xbbJ2nTp3gm2/Wc/Dgbyxa9Jl7+5w57wHw2mszSE9Pq9g3JspEFsGsQNVxgTOJqXSyCKYQ5ePNRTDL4++/YckSH/fimQC+virXXOOkVSsnMTHFDy6uCLIIZtl4YxHMcndFWa1W4uPj0Wg0nDlzhpEjR3LrrbcWW3bNmjWsX7+egIAA/P39+e9//4tGoyExMZEBAwZgMpncZZctW0a9evXKG54QQohaql491/pSmzbpSExUUFWFvDzFPbg4MFClVSsH7do5CQwsvT5xdSh3YjN//nx3kpKens6AAQPYsGEDfn5+HuVSUlKYPXs269evx2g08tRTT/HVV18xaNAgAB566CGefPLJ8oYjhBBCuNWrB8OH28nNhaNHFf78U8vx4wpOp0JWlsLPP+vYvl3l2mud3Hyzg8jIqo5YlFe5E5tVq1bx/PPPAxAaGkqTJk344Ycf6N+/v0e5devWERsbi9FoBKB79+6sXr3andjs2rWLadOmYbVa6dy5M3FxceUNTQghhADAZII2bVTatHElOb//ruHgQQ1//61BVRUOHdJy6JCWkBCV4GCVoCCVsDCV5s2d+EvP9FWl3IlNUlISYWFh7tdhYWEkJiYWWy40NLTYcgEBAQwZMoT+/fuTl5fH8OHD0Wq19OjRo7zhCSGEEB5MJoiNdRIb6yQlBXbu1PL7764EJyPD9XXB99+rNG6s0qaNg6ZNVbTaKgxclEmpic2IESNISUkpdt+3334LgHLR6KuLXwOoqopGU/xDWEFBQe4WHl9fX/r27cuaNWsuO7GpjgM+IyICqjqEIiQmIYRwiYyE/v0ddOvmYN8+DWfPKpw7p5CZqWC1usblHDumcOyYxj3w+NprndSvr1LCR5qoYqUmNosWLbrk/ujoaM6ePet+nZaWRlRUVJFyMTEx/Pbbbx7loqOjAUhOTiYsLAyDwQC4JknKz88v2zsoRJ6KKp3EVDp5KkqI2icwEI9ZilUVTp+GfftcrTlWq+fAY19flehoJ1FRKnXrqjgckJ2tkJ3tahFq3drJ+Y80UcnK3RU1cOBANm/eTJcuXUhPT+fYsWPulpZff/2VgIAAmjZtSr9+/Zg/fz75+fkYjUYSEhIYOHAgAMuXL6dt27bup6l+/vlnunXrVt7QhBBCiGLdc88A3n77A/daURdTFNfA43r1HPTo4eDPPxV+/13LiROugcd5eQpHjmg5cqT4+rduVbnpJgexsU7ODy0VlaTcic3999/Piy++yIQJEzh79ixTp07F//xIqxUrVhATE0PTpk2JjIzkmWeeYdy4cQQGBhIREeFObG688UY+/vhjtm3bxrlz52jWrBkjRowob2hCCCFEuen10Lq1SuvWdvLy4M8/NZw8qZCcrCErq/iJcCwWhW3bdOzc6RqEHBioZf16I02bOunRo+hMyMJ7ZIK+ClTdulhAYioL6YoSNVVycjJTpkwhPDyc1NRUJkyYQP369T3KfPjhhxw5coTQ0FCOHTvG6NGj6dq162Wdp7pM0FeSJUsW8dFH79OtW3f8/QMYMmQ4EyeOx2g00rz5tezZs5uoqCimTZvtcdxHH73PuXMZ+PqaSEz8ixdfnILZDK++OpXQ0DCcznz8/Y306/cM27ZpSU0tGIQTGHiGiRNj3K9DQ5307KkhIsJKeLiT8HCVJk2ctGnjxNe30i5FtVMtJugTQghxdYiPj2fo0KHExcWRkJDAxIkTmTdvnkeZLVu28Mknn6DT6fjjjz8YNmwYP//8s3sMpLcYFi/EuHghQJH1nwIeewBNairW2+LIe2qce7t2/z78J44HwPzydBxt2rr3+b71GvrvN5JfhjWohg4dwZIln/Pww4+7u6LGjBnL+PHPMnnydMaMGcvatas9jsnKymTJkkWsX/89Wq2W5cuX4HDYOXBgB05nJuPHvwzAggWf0KyZStOmdo4eVfj9dw25uQqgUr++k7/+ciU76ekali4F1/rkBXQ6lVatnLRp4yAszPXoeXAwBASo+Pmp+PuD0eiaMfnC4GW7HaxWBZvNNTZIrwe9XsVgAH9/lcBAlYAAcDo5PzAaUlI0HD6s4Y8/NBw5oiEsTOWWWxx07OigeXMnmZmQmuoaTO3rqxIe7voqNI/uJVkssGmTljVrfNi4UYuPD/ToYadnTwddu9o96vH2+qGS2AghRC2QkZHB1q1befvttwHo1KkTY8aMISUlhchCs9J9+umn7idY69evT25uLtnZ2V5PbLSnTqL/cWux+3x270L71ymc9Rt4bNdkZbqP0WRl4ii0T/fnH+h/3IqtlBXDL6Vx4yaEh7uWPRg0aLDHPn//AFq2bMVjjz3A7bf3o1ev2/Hz86dFi1a8884bjB8/jttu683QoSMB1xidZs1UmjVzRXnunJ2hQ3P4+2+FH37Q8t13On791YfUVBWLpaA7y24vGKBc2Vau9Dkfu4qqFt/FptN59or4+BQkUjqdK7FSVddAaldCV+Dzz/V8/nnROgcPtvH++5f/wFBJJLERQohaIDk5GZPJ5E5Q9Ho9gYGBJCUleSQ2haflSEhIoFevXu4Pe29yNGiItYQkxHbjTTjqN8B+TXOP7c7AIPcxzsAgj332a5pj7dQFR4OGVxyTj49Pifs0Gg1vvvkehw4d4Jtv1nHvvUN47725NGjQkEWLVrB9+0989dVKPvvsE+bOXYhOV/zHa716KiNH2hk50k5EhA+pqWbMZkhNVThwQMvu3Vr27HG1opw7p2C3V+xiViaTStOmTpKTFdLSXD/7kpIaoEg8djvk5QEUf4zRqNK9ux2HQ2HrVi15eUXLJSRocTjw2hxBktgIIUQtoKpqsXOMFbcNXInQkiVLeO211y77XBePi8jPz8Tko8CePZiuv97VnPHQg9geehCAi3s3HJ99jgPXR6XHvg43YduwEYAi7Ufjx2MbPx5tMfUVx2g0oNdr2b//Fxo1aoTR6INWq8FkKr5f5MyZM6xd+xUPPPAQN954A2lpZzh9OpHU1GQMBgO9evWkV6+e9OjRFUWxe6x96LoGPsXO11WnTgB16kCTJnDLLfDwwwX7VBXMZsjIgOxs15fZ7EokLrSMOJ0XWkwKunQsFtdXfj5kZUFmputLq4XQUNfq5+Hh0KIFNGigoNFoUVX4/XfYtAlOnoSICNdTYXXqQG4upKbCmTOu+i78yqgq2GwF57PbXfsUxRVL9+7Qt6+Cv7/P+Wvgqn/PHtyrrCsK9OunoW7dgmtT3nnNJLERQohaIDo6mpycHCwWCwaDAZvNRlZWVrHzjiUlJfHKK68wa9YsQkJCLvtcFw8etpxJx7loCZqzZ7HYHDiua3uJoyvHbbf1ZtasmYDK00//mwULFnDy5Elef/11Hn/8X0XKOxwKO3fuJCkpGUXRYDL5c8MN7fnjj8PMnfshCQmbMJuzGTXqfrRaY5FB0vn5tiIPSZT1wQlfX9dXnTrlesvFSksr+D48HO6+27v15+VdaNFxiY11fV3szBnX/2XwsBBCiDIJCQmhc+fObNmyhbi4OLZt20ZsbCyRkZFs3LiRDh06EBAQwKlTp3j11VeZMmUKISEhrFu3jrp16xJb3KdRGak6HUpuLgD6jRvIrxeFGub97q3L8dBDj3m8njZt1iXLBwYGMnPmm0W2t259HbNnv+XV2ET5yITQQghRS7z44ossX76cSZMmsXjxYl5+2fUkzxtvvMHhw4cBeOSRR9i+fTv9+/enc+fOTJw4EZvNVr4T+/piucM1b5lis2FY/aWrD0OICiDz2FSg6jY/C0hMZSHz2AhRPiXOY/PzVtiyBQBbm3bYbu9bMGCjhrtwDQqrbve+6sAbXVHSYiOEEKJydO+OI9o1SZ3P/r0YFsxDSfyrioMSNY0kNkIIISqHRoOl/yCcQa5HtbUpp/Fd9Bn61V+6HvkRwgsksRFCCFF5AgPJf+ARrB07o56f60V3+BC+n36M5lgJK0oKcRkksRFCCFG59HrsXbqR9/Bj2K9tCYCSl4dx+VJ8Er4Hh6OUCoQomSQ2QgghqkZgENYBd2K5YwCqj2t2OZ+d2zEsXgg5OVUcnLhaSWIjhBCiSjlaXUf+fQ/gqONa2kGbnITx8/muqWwryD33DODvv5MrrH5RdSSxEUIIUeXU0DAso+7Dfl0bAGw3d4AS1lsS4lLkt0YIIUT1oNNh7XMH9patcTZqXLBdVVHS07w2W/GSJYvIysri448/wN8/gCFDhjNx4niMRiPNm1/Lnj27iYqKYtq02e5j9u/fy4wZU2nYsCF+fv7s27eXjh0706NHT5Yt+4KoqGgSE/9i4MC7aN/+Fq/EKa6MJDZCCCEqnWHxQoyLF5aprGLJR5Oaimow4gwLA62W/OGjsAwfVaS+i7cXZ+jQESxZ8jkPP/w49eq51soaM2Ys48c/y+TJ0xkzZixr1672OKZNm3aMHDmauXM/ZMGCJVitFnbt2sHChZ/Su3c/evbsxalTJzkmT3ZVOUlshBBCVDrtqZPof9x6+Qeezxtsnbq4vrFaQG9w1+fefgUaN25CeLirVWjQoMHFlmndug2+vr74+vrSs2dvcnNzeeutWRw8+Bu33daL7t17XvH5hXdIYiOEEKLSORo0xFrWJMRuR5OdDQ47zvAI9/GYzfh+/AGO5i1whoZh7dTFtf0K+fj4lFpGr9d7vB4w4E46d+7Kd999y0svTWDgwLu4994HrjgGUX6S2AghhKh0ljJ0GRWhqh5rS/ls+gHFakX3235UIO++B7FffwM4naC59LMxer0ep9PJzp3badiw0eW/gfPeeecNnnjiSYYMGU7dunVZt27NFdflpqquJ8LKkGiJoiSxEUIIcXW4aMFMe4trUTIy0P55GAXQHT2C7ugRnEFBOJo0BaeK4rC7Jvy7sN6z6vrP7TENeOfNWaDV8vTT/2bp0sUkHTvCnPHP8o9R9+Fo0859Hs2RP0nasJ5vE77nTI6ZZU89wejmLcDpJOS3fUwdeQ+RISEk2Ww83G8g2l/3oMk8h5J5DiU7G0eLa6FZE3d9xk/nYpz/CeTloKz/HjUo2PX2sjIJieuGJuU0Sl4eqo8PakAAqn8AoIDqdL0XRUE1GMBgRDXoQQXFbgenk4xNPxVcoNxcgkYPQ8kxk/fAIx6JpM+WTfhs24zqF4Dq54cmIx3NqZNoT55Ac+4czqAg1OAQ1KAgsFpQMjPRZGai5JhR8vLI+c9/sdwzzF2f7ztvYly+BGdwMJkr1xbEoKoY533sGieVlIg2KQklPQ0MBlSTH46GjciJn1Le3wwPktgIIYS4Kql162G9czBKWhq6XTvQHTqAYrOhycxEs+eXSx77REQd8h56DDUsDIBp02Zh/PgDNOnp2I8f90hsFEs+TXNy+ODmDgUV5OUB8Og1LTwrPnbE9VWI8/zCn+76zmXgs3+v6z34mgrej38AmqREFJvNVc5mQ0lPh/T0MlyNC5UUatXS69Fv2QSANe52j2I+Wzfh9/qsstd7EU2GZ0zaxFPoDuzHGRLisV3JziLg+XEl1mNvdR3enopREhshhBBXNTUsDNvtfbF1vw3twQPo9v2K5lwG6HSoWh1otQUf9gqu7xUF9aLuKtXoi+rri2o0emx3BoXgiKnvaiXR+YDP+Xp1WtBoXdWmnUWTmoLGbHbVpdGgBgahBgTgDPd8TN3RoiWWuN4YIiOg8JgdjYb8e+9HNRhRg4JQcnNRzNkoZrMrYdFqUTUaFFUFSz5KvgXFko+q1YJW55r3x+l0vV9wPT7fpRv4+BQZe6RYbaiK4qrrwvuMqIOjYSOc4eEoWVloMjJQMs+hGgyoQUGogcGo/v6oRqOrRawQe8vWWPrcgWoyeWzXJBdMgqj6+uKIjsEZHoFitaDk5OBo3ARvU1S10Lu6yqWlmXE6q8/biYgI4MyZ6rVircRUOo1GISzMv6rDEOKqdfG9+Ny5swQHh2My6cnNtVZhZJUgJ8c1PiYgwGOcz4VrUFiV3/ucTsjNRZNjxukfAH5+FXIOzZlUVJ0Pamhoke7EixW+Jld6L5YWGyGEEMJbKiI5qCgaDfj74/SvwD/kNBqckXUrrv7iTlmpZxNCCFHrKIpCDeocuGyqqqKU0lIhvEcSGyGEEBVKp/MhNzcbh8NeqxIcVVVxOBzk5maj08mj25VFuqKEEEJUKJMpAIslD7M5i9xcS61JbhRFQVEU9HojBoNvVYdTa5Q7sbFarcTHx6PRaDhz5gwjR47k1ltvLbbs33//zcsvv8zZs2dZsmSJx741a9awfv16AgIC8Pf357///S+aUiZYEkIIUXbJyclMmTKF8PBwUlNTmTBhAvXr1/coo6oqM2fOJC0tDbPZTM+ePRk8uPjlBcpKURSMRlPVD5YVtUK5E5v58+e7E5H09HQGDBjAhg0b8CtmANW8efO4+eabWb9+vcf2lJQUZs+ezfr16zEajTz11FN89dVXDBo0qLzhCSGEOC8+Pp6hQ4cSFxdHQkICEydOZN68eR5lvv76a06ePMm7776LxWKhb9++tG/fnpiYmOIrFaKaKXeTyKpVq+jWrRsAoaGhNGnShB9++KHYsi+88AJBQUFFtq9bt47Y2FiM5+cO6N69O6tXry5S0Hf6QwAACMFJREFUTgghxJXJyMhg69atdO3aFYBOnTqxa9cuUlJSPMoVvqcbDAbat2/P2rVri9QnRHVV7habpKQkws7P3AgQFhZGYmLiZdcRGhparjrA9cx7dSMxlU11iqk6xSKEtyQnJ2MymTAYDIBrraTAwECSkpKIjIx0l/PGPf1S/4bk35cnuR5FXbgmV3ptSk1sRowYUSSjv+Dbb78FKPIY2+U+1qaqqlfG04SEVL/5A6rjRG8SkxC1T0mPHJflfn259/RL3Yvl37onuR5FlfealJrYLFq06JL7o6OjOXv2rPt1WloaUVFRlxVETEwMv/32m0cd0dHRl1WHEEKIkkVHR5OTk4PFYsFgMGCz2cjKyipyv46OjiYtLc39Oi0tjUaNGlVytEJcuXI3kwwcOJDNmzcDkJ6ezrFjx+jRowcAv/76K0ePHi21jn79+vHLL7+Qn58PQEJCAgMHDixvaEIIIc4LCQmhc+fObNmyBYBt27YRGxtLZGQkGzduJDvb9bRS4Xu6xfL/7d1fSFN9GAfwr00NdZguSiPRQDJCMIIQhEAtwaKsLhITml5s/qHyQupCjbSo1QalYLtbedF/qTDyokTK0SrKikgsQcOLUNKYOnFMV7rfeyHvzPd93ZvkOtvP7+fqnHEYz+9hPDyc38553Ojs7MSePXsUi5tosX57VtT3799RV1eHFStWwG63o7CwEFlZWQCA2tpaJCQkoLS0FMDsU1FWqxV9fX3Izc1FeXk51q5dCwB4+PAhHj9+jOjoaERFReHkyZN83JuIaAkNDAzAYDBgzZo1+PbtG6qrq5GUlIS9e/fi9OnT2LZtG4QQMJlMGB0dhdPpxI4dO3Dw4EGlQyf6ZVINwSQiIqLljbdEiIiISBpsbIiIiEgabGyIiIhIGmxsiIiISBpsbIiIiEgavz1SIZCcP38eLpcLUVFR6O/vR0VFBdLS0pQOC0+ePIHBYEBJSQkKCwsVieFXpvr+adPT07h27RoaGxtx//59JCcnKxrP8PAwLl68iNjYWLjdbjgcDtTV1c0b90FEixeI9cffFqpvvnIhc5581dclz4mQSGFhoff46dOnIicnR8FoZr1+/Vrcu3dPHD58WNy6dUuxOEpKSkR7e7sQQoiOjg5RXFysWCx/u3Pnjnj37p1ISUkRnz9/Vjoc8erVK9HQ0OA9NxqNoqamRsGIiOQQiPXH3xaqb75yIXOefNXXpc6JVI3NzMyM97ivr09s2bJFeDweBSOao2RjMzo6KjZv3iympqaEEEK43W6RmpoqhoaGFInnnwKlsfF4PPN+Lzdu3BA6nU7BiIiCX6DXH3/7ub75yoXseVqovvojJ1L9x+bnNxVbrVYUFBQsenibjHxN9aU5ISEh834vz549w6FDhxSMiCj4sf7M8ZUL2fO0UH31R06C6j82/zdpXKVSAQA+ffqEN2/eoLGxMWBiUpL4jam+y9Xdu3exceNG5OTkKB0KUVBj/ZnjKxfLKU8/19fu7u4lz0lQNTb/N2kcAHp6emCxWNDQ0ODt8pSOSWm/OtWXZrW0tGBgYAAnTpxQOhSioMf6M8dXLsLDw5dFnv5ZX/2RE6m2orq6unDlyhUYjUZERkbi6tWr+Pr1q9JhKc7XVF+ar7m5GYODg6isrAQAnDt3TuGIiIIb688cX7lYDnn6r/rqj5xIMwRzcnIS27dvR2hoKEJDZ29EuVwutLa2IiEhQbG4RkdHYTab0d7ejg0bNiAzMxN6vf6Px7HQVF8lvX//Hq2trbh58yby8vKwa9cuRbd+3r59C61WO+/xbrVajba2NsViIpJBINYff1uovvnKhcx58lVflzon0jQ2RERERFJtRREREdHyxsaGiIiIpMHGhoiIiKTBxoaIiIikwcaGiIiIpMHGhoiIiKTBxoaIiGiJVFVV4fLly0qHsayxsSEiIiJpBNWsKCIiokDg8Xhw5swZ9Pb2QqVSISkpCSkpKbDZbFi5ciU6Ozuxb98+5Ofno7u7GxcuXEBISAhUKhVqa2uRnJwMs9mM27dvIysrC2NjYxgeHsbq1athNBqh0Whgt9tRVVUFt9uN6elpZGdno7S0VOmlBzw2NkRERItks9kwODjoHYR89OhR7Ny5Ez09PVi/fj0qKioAABMTE9Dr9WhoaEBGRgasViuOHDmCR48e4dixYxgYGEBnZydaWlqgVqtx6tQpGAwGXLp0CU1NTUhPT0dpaSlcLhd0Oh0bm1/ArSgiIqJFio6ORm9vL168eAGPx4P6+vr/nDrd0dGByMhIZGRkAACysrJgt9vx4cMH7zWZmZlQq9UAgP3796OtrQ0zMzOIiYmBzWZDX18fIiMj0dTU9GcWF+R4x4aIiGiRtm7dirNnz8JisaCmpgYFBQUoKyv713VDQ0MYHx+HVqv1fqbRaOBwOLznq1at8h7HxMTgx48fGBsbg06nQ0REBCorK6FSqVBeXo7du3f7d2ESYGNDRES0SBMTE0hPT0dmZia+fPkCvV6PuLi4f123bt06xMfH4/r1697PnE4nwsPDvefj4+Pe47GxMYSFhSE2NhYjIyPQarXQarV4+fIlysrKkJqaisTERP8uLshxK4qIiGiR2tvb0dzcDABITExEXFwcPB4PoqKiMDk5CZfLhePHjyM7OxsOhwNdXV0AAJfLhaKiIjidTu93PX/+3Hv+4MED5ObmQqVSob6+Hj09PQCAtLQ0hIWFQQjxh1cafEIEs0RERLQo/f39MBqNmJqagsvlwqZNm1BXV4ePHz+iuroaarUaxcXFyMvLQ3d3N0wmE4QQEEJAr9cjOzsbwOx7byIiIjAyMoLBwUFoNBqYTCZoNBpYrVZYLBaoVCo4nU4cOHAARUVFCq888LGxISIiUkhVVdW8p6jo93ErioiIiKTBPw8TEREpwGw2e1/oFx8fj/z8fKVDkgK3ooiIiEga3IoiIiIiabCxISIiImmwsSEiIiJpsLEhIiIiabCxISIiImmwsSEiIiJp/AVeZqZYkiFBRQAAAABJRU5ErkJggg==\n", 260 | "text/plain": [ 261 | "
" 262 | ] 263 | }, 264 | "metadata": {}, 265 | "output_type": "display_data" 266 | } 267 | ], 268 | "source": [ 269 | "# Start figure\n", 270 | "fig = plt.figure(figsize=(9, 6))\n", 271 | "# Plotting 1: original data\n", 272 | "ax1 = fig.add_subplot(2, 3, 1)\n", 273 | "ax1.set_title(\"Original data\", pad=20)\n", 274 | "ax1.scatter(A[:], np.zeros(len(A)), c='r')\n", 275 | "ax1.scatter(B[:], np.zeros(len(B)), c='b')\n", 276 | "ax1.set_ylim((-0.1, 0.1))\n", 277 | "ax1.set_xlim((-2, 2))\n", 278 | " \n", 279 | "# Plotting: cost\n", 280 | "ax2 = fig.add_subplot(2, 3, 3)\n", 281 | "title = 'Seed: '+str(11)\n", 282 | "ax2.set_title(title, pad=20)\n", 283 | "cst_history = np.array(cst_history)\n", 284 | "rr_history = np.array(rr_history)\n", 285 | "ss_history = np.array(ss_history)\n", 286 | "rs_history = np.array(rs_history)\n", 287 | "ax2.plot(cst_history[:, 0], cst_history[:, 1], color='blue', marker='', linestyle='-', linewidth=2.5, label=\"cost\")\n", 288 | "ax2.plot(rr_history[:, 0], rr_history[:, 1], color='red', marker='', linestyle='--', linewidth=2.5, label=\"tr rr\")\n", 289 | "ax2.plot(ss_history[:, 0], ss_history[:, 1], color='red', marker='', linestyle=':', linewidth=2.5, label=\"tr ss\")\n", 290 | "ax2.plot(rs_history[:, 0], rs_history[:, 1], color='red', marker='', linestyle='-.', linewidth=2.5, label=\"tr rs\")\n", 291 | "plt.legend(fancybox=True, framealpha=0.5, loc='lower left')\n", 292 | "ax2.set_ylim((0, 1))\n", 293 | "ax2.set_xlabel(\"steps\")" 294 | ] 295 | } 296 | ], 297 | "metadata": { 298 | "kernelspec": { 299 | "display_name": "Python 3", 300 | "language": "python", 301 | "name": "python3" 302 | }, 303 | "language_info": { 304 | "codemirror_mode": { 305 | "name": "ipython", 306 | "version": 3 307 | }, 308 | "file_extension": ".py", 309 | "mimetype": "text/x-python", 310 | "name": "python", 311 | "nbconvert_exporter": "python", 312 | "pygments_lexer": "ipython3", 313 | "version": "3.8.3" 314 | } 315 | }, 316 | "nbformat": 4, 317 | "nbformat_minor": 2 318 | } 319 | -------------------------------------------------------------------------------- /random_embedding_circuits/X_1d_sep.txt: -------------------------------------------------------------------------------- 1 | 1.428386558165781750e+00 2 | -1.313839087028072727e+00 3 | 1.596285966845742754e+00 4 | 1.367550496433965534e+00 5 | 1.468173134298068616e+00 6 | -1.454971615808443364e+00 7 | 1.161811926465180944e+00 8 | -1.714610930998586413e+00 9 | -1.327999547708873740e+00 10 | 1.382642255659186192e+00 11 | 1.074868758557305348e+00 12 | 1.459529311267870444e+00 13 | 1.382177001267317351e+00 14 | -1.131668993147058488e+00 15 | -1.248997409178646656e+00 16 | -1.322204908516744437e+00 17 | -1.464985657964124277e+00 18 | -1.454739026129264534e+00 19 | 1.557625966631448922e+00 20 | -1.401922341642383385e+00 21 | -3.593397378255333563e-01 22 | -6.950145572910067064e-03 23 | 1.138715720991759078e-01 24 | -1.704154411162837890e-01 25 | -1.840487346705927951e-01 26 | 2.641666606852424715e-01 27 | 3.363208579927841058e-01 28 | 1.640524890397370150e-01 29 | -1.572146825398952896e-01 30 | -2.091069596104288553e-01 31 | -1.205649127814735350e-01 32 | -2.119293925672509904e-01 33 | 1.764063617893480984e-01 34 | -5.099904663363319379e-01 35 | 2.850906880823069756e-01 36 | 2.240966054048715017e-01 37 | 3.109652609661974765e-01 38 | 1.891447943386029285e-01 39 | -2.009277177789230429e-01 40 | 2.326104293168703568e-02 41 | -------------------------------------------------------------------------------- /random_embedding_circuits/Y_1d_sep.txt: -------------------------------------------------------------------------------- 1 | -1.000000000000000000e+00 2 | -1.000000000000000000e+00 3 | -1.000000000000000000e+00 4 | -1.000000000000000000e+00 5 | -1.000000000000000000e+00 6 | -1.000000000000000000e+00 7 | -1.000000000000000000e+00 8 | -1.000000000000000000e+00 9 | -1.000000000000000000e+00 10 | -1.000000000000000000e+00 11 | -1.000000000000000000e+00 12 | -1.000000000000000000e+00 13 | -1.000000000000000000e+00 14 | -1.000000000000000000e+00 15 | -1.000000000000000000e+00 16 | -1.000000000000000000e+00 17 | -1.000000000000000000e+00 18 | -1.000000000000000000e+00 19 | -1.000000000000000000e+00 20 | -1.000000000000000000e+00 21 | 1.000000000000000000e+00 22 | 1.000000000000000000e+00 23 | 1.000000000000000000e+00 24 | 1.000000000000000000e+00 25 | 1.000000000000000000e+00 26 | 1.000000000000000000e+00 27 | 1.000000000000000000e+00 28 | 1.000000000000000000e+00 29 | 1.000000000000000000e+00 30 | 1.000000000000000000e+00 31 | 1.000000000000000000e+00 32 | 1.000000000000000000e+00 33 | 1.000000000000000000e+00 34 | 1.000000000000000000e+00 35 | 1.000000000000000000e+00 36 | 1.000000000000000000e+00 37 | 1.000000000000000000e+00 38 | 1.000000000000000000e+00 39 | 1.000000000000000000e+00 40 | 1.000000000000000000e+00 41 | -------------------------------------------------------------------------------- /random_embedding_circuits/two-qubit-random-embedding.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pennylane as qml\n", 10 | "from pennylane import numpy as np\n", 11 | "from two_wires_random_unitary_embeddings import random_gate_sequence, random_embedding_circuit\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "import seaborn as sns\n", 14 | "sns.set(context='notebook', font='serif')\n", 15 | "import dill as pickle" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 3, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "CSWAP = np.array([[1, 0, 0, 0, 0, 0, 0, 0],\n", 25 | " [0, 1, 0, 0, 0, 0, 0, 0],\n", 26 | " [0, 0, 1, 0, 0, 0, 0, 0],\n", 27 | " [0, 0, 0, 1, 0, 0, 0, 0],\n", 28 | " [0, 0, 0, 0, 1, 0, 0, 0],\n", 29 | " [0, 0, 0, 0, 0, 0, 1, 0],\n", 30 | " [0, 0, 0, 0, 0, 1, 0, 0],\n", 31 | " [0, 0, 0, 0, 0, 0, 0, 1]])" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 4, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "def featmap(x,weights,wires,gate_sequence):\n", 41 | " \"\"\"Wrapper for feature map to define specific keyword arguments.\"\"\"\n", 42 | " return random_embedding_circuit(x,weights,wires,gate_sequence)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 5, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "seed = 42 # random seed for reproducibility\n", 52 | "n_layers = 2 # number of layers for featuremap, if applicable\n", 53 | "n_inp = 2 # number of wires that feature map acts on\n", 54 | "n_steps = 200 # steps of GD performed\n", 55 | "log_step = 5 # how often the test error is calculated\n", 56 | "batch_size = 2 # how many pairs are sampled in each training step\n", 57 | "step_size = 0.02 # learning rate\n", 58 | "n_all = 2*n_inp + 1\n", 59 | "\n", 60 | "\n", 61 | "dev = qml.device('default.qubit', wires=n_all)\n", 62 | "optimizer = qml.RMSPropOptimizer(stepsize=step_size)" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 7, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "X = np.loadtxt(\"X_1d_sep.txt\") # load features\n", 72 | "Y = np.loadtxt(\"Y_1d_sep.txt\") # load labels\n", 73 | "\n", 74 | "# Divide inputs into classes\n", 75 | "\n", 76 | "A = X[Y == -1]\n", 77 | "B = X[Y == 1]" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 8, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "# initial parameters are taken to be small\n", 87 | "init_pars = []\n", 88 | "for i in range(n_layers):\n", 89 | " pars = [0.001 for j in range(n_inp)]\n", 90 | " init_pars.append(pars)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": null, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "# Fixing seed for reproducability\n", 100 | "np.random.seed(seed)\n", 101 | "#This generates a random sequence of gate. \n", 102 | "random_gate_sequence = random_gate_sequence(n_inp,n_layers)" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": 11, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "@qml.qnode(dev, cache=True)\n", 112 | "def circuit(weights, x1=None, x2=None,gate_sequence=None):\n", 113 | "\n", 114 | " # Load the two inputs into two different registers\n", 115 | " featmap(x1,weights, range(1, n_inp+1),gate_sequence)\n", 116 | " featmap(x2,weights, range(n_inp+1, 2*n_inp+1),gate_sequence)\n", 117 | "\n", 118 | " # Do a SWAP test\n", 119 | " qml.Hadamard(wires=0)\n", 120 | " for k in range(n_inp):\n", 121 | " qml.QubitUnitary(CSWAP, wires=[0, k+1, n_inp+k+1])\n", 122 | " qml.Hadamard(wires=0)\n", 123 | "\n", 124 | " # Measure overlap by checking ancilla\n", 125 | " return qml.expval(qml.PauliZ(0))\n", 126 | "\n", 127 | "def tr_rr(weights, A=None,gate_sequence=None):\n", 128 | " # Compute intra-class overlap A\n", 129 | " tr_rr = 0\n", 130 | " for a1 in A:\n", 131 | " for a2 in A:\n", 132 | " tr_rr += circuit(weights, x1=a1, x2=a2,gate_sequence=gate_sequence)\n", 133 | " tr_rr = tr_rr / len(A)**2\n", 134 | " return tr_rr\n", 135 | "\n", 136 | "def tr_ss(weights, B=None,gate_sequence=None):\n", 137 | " # Compute intra-class overlap B\n", 138 | " tr_ss = 0\n", 139 | " for b1 in B:\n", 140 | " for b2 in B:\n", 141 | " tr_ss += circuit(weights, x1=b1, x2=b2,gate_sequence=gate_sequence)\n", 142 | " tr_ss = tr_ss/len(B)**2\n", 143 | " return tr_ss\n", 144 | "\n", 145 | "def tr_rs(weights, A=None, B=None,gate_sequence=None):\n", 146 | " # Compute inter-class overlap A-B\n", 147 | " tr_rs = 0\n", 148 | " for a in A:\n", 149 | " for b in B:\n", 150 | " tr_rs += circuit(weights, x1=a, x2=b,gate_sequence=gate_sequence)\n", 151 | " tr_rs = tr_rs/(len(A)*len(B))\n", 152 | " return tr_rs\n", 153 | "\n", 154 | "def cost(weights, A=None, B=None,gate_sequence=None):\n", 155 | "\n", 156 | " # Fidelity cost,\n", 157 | " rr = tr_rr(weights, A=A,gate_sequence=gate_sequence)\n", 158 | " ss = tr_ss(weights, B=B,gate_sequence=gate_sequence)\n", 159 | " rs = tr_rs(weights, A=A, B=B,gate_sequence=gate_sequence)\n", 160 | " distance = - rs + 0.5 * (ss + rr)\n", 161 | " return 1 - distance # min is 0" 162 | ] 163 | }, 164 | { 165 | "cell_type": "code", 166 | "execution_count": 18, 167 | "metadata": {}, 168 | "outputs": [ 169 | { 170 | "name": "stdout", 171 | "output_type": "stream", 172 | "text": [ 173 | "Step 0 -- rs 0.190116-- rr 0.521261 -- ss 0.861625 -- cst 0.498673\n", 174 | "Step 5 -- rs 0.158762-- rr 0.540512 -- ss 0.839327 -- cst 0.468843\n", 175 | "Step 10 -- rs 0.145454-- rr 0.558638 -- ss 0.830175 -- cst 0.451047\n", 176 | "Step 15 -- rs 0.128236-- rr 0.606654 -- ss 0.819267 -- cst 0.415276\n", 177 | "Step 20 -- rs 0.108551-- rr 0.636439 -- ss 0.808112 -- cst 0.386276\n", 178 | "Step 25 -- rs 0.098144-- rr 0.651337 -- ss 0.804406 -- cst 0.370273\n", 179 | "Step 30 -- rs 0.091919-- rr 0.687369 -- ss 0.805823 -- cst 0.345323\n", 180 | "Step 35 -- rs 0.080947-- rr 0.736391 -- ss 0.801702 -- cst 0.311900\n", 181 | "Step 40 -- rs 0.071450-- rr 0.734553 -- ss 0.794562 -- cst 0.306893\n", 182 | "Step 45 -- rs 0.072170-- rr 0.781344 -- ss 0.798373 -- cst 0.282312\n", 183 | "Step 50 -- rs 0.072222-- rr 0.795575 -- ss 0.798881 -- cst 0.274994\n", 184 | "Step 55 -- rs 0.072438-- rr 0.812306 -- ss 0.799412 -- cst 0.266579\n", 185 | "Step 60 -- rs 0.071878-- rr 0.832059 -- ss 0.799556 -- cst 0.256071\n", 186 | "Step 65 -- rs 0.070966-- rr 0.836287 -- ss 0.799574 -- cst 0.253035\n", 187 | "Step 70 -- rs 0.072995-- rr 0.856626 -- ss 0.801294 -- cst 0.244035\n", 188 | "Step 75 -- rs 0.073904-- rr 0.873162 -- ss 0.802494 -- cst 0.236076\n", 189 | "Step 80 -- rs 0.074439-- rr 0.882848 -- ss 0.804224 -- cst 0.230903\n", 190 | "Step 85 -- rs 0.070002-- rr 0.869263 -- ss 0.803925 -- cst 0.233408\n", 191 | "Step 90 -- rs 0.073089-- rr 0.895879 -- ss 0.806533 -- cst 0.221883\n", 192 | "Step 95 -- rs 0.071768-- rr 0.896136 -- ss 0.806899 -- cst 0.220251\n", 193 | "Step 100 -- rs 0.073635-- rr 0.910488 -- ss 0.809452 -- cst 0.213666\n", 194 | "Step 105 -- rs 0.072391-- rr 0.910759 -- ss 0.810601 -- cst 0.211711\n", 195 | "Step 110 -- rs 0.074761-- rr 0.922322 -- ss 0.813705 -- cst 0.206747\n", 196 | "Step 115 -- rs 0.073570-- rr 0.920451 -- ss 0.816344 -- cst 0.205173\n", 197 | "Step 120 -- rs 0.073786-- rr 0.921830 -- ss 0.817627 -- cst 0.204058\n", 198 | "Step 125 -- rs 0.073057-- rr 0.919692 -- ss 0.818564 -- cst 0.203929\n", 199 | "Step 130 -- rs 0.073423-- rr 0.921143 -- ss 0.816726 -- cst 0.204489\n", 200 | "Step 135 -- rs 0.072407-- rr 0.916846 -- ss 0.820209 -- cst 0.203879\n", 201 | "Step 140 -- rs 0.073530-- rr 0.921108 -- ss 0.820910 -- cst 0.202522\n", 202 | "Step 145 -- rs 0.071576-- rr 0.913191 -- ss 0.821791 -- cst 0.204085\n", 203 | "Step 150 -- rs 0.070383-- rr 0.905993 -- ss 0.818878 -- cst 0.207948\n", 204 | "Step 155 -- rs 0.073432-- rr 0.921585 -- ss 0.820636 -- cst 0.202322\n", 205 | "Step 160 -- rs 0.071795-- rr 0.915596 -- ss 0.818936 -- cst 0.204529\n", 206 | "Step 165 -- rs 0.073250-- rr 0.920746 -- ss 0.820711 -- cst 0.202521\n", 207 | "Step 170 -- rs 0.074033-- rr 0.922831 -- ss 0.820137 -- cst 0.202549\n", 208 | "Step 175 -- rs 0.073332-- rr 0.921243 -- ss 0.821566 -- cst 0.201928\n", 209 | "Step 180 -- rs 0.072749-- rr 0.919193 -- ss 0.822266 -- cst 0.202020\n", 210 | "Step 185 -- rs 0.070227-- rr 0.904459 -- ss 0.816923 -- cst 0.209537\n", 211 | "Step 190 -- rs 0.072940-- rr 0.919562 -- ss 0.820632 -- cst 0.202843\n", 212 | "Step 195 -- rs 0.071922-- rr 0.915178 -- ss 0.822983 -- cst 0.202841\n" 213 | ] 214 | } 215 | ], 216 | "source": [ 217 | "# Optimising the circuit\n", 218 | "\n", 219 | "cst_history = []\n", 220 | "rr_history = []\n", 221 | "ss_history = []\n", 222 | "rs_history = []\n", 223 | "par_history = [init_pars]\n", 224 | "pars = init_pars\n", 225 | "\n", 226 | "for i in range(n_steps): \n", 227 | " \n", 228 | " if i % log_step == 0:\n", 229 | " cst = cost(pars, A=A, B=B,gate_sequence=random_gate_sequence)\n", 230 | " rr = tr_rr(pars, A=A,gate_sequence=random_gate_sequence)\n", 231 | " ss = tr_ss(pars, B=B,gate_sequence=random_gate_sequence)\n", 232 | " rs = tr_rs(pars, A=A, B=B,gate_sequence=random_gate_sequence)\n", 233 | " cst_history.append([i, cst])\n", 234 | " rr_history.append([i, rr])\n", 235 | " ss_history.append([i, ss])\n", 236 | " rs_history.append([i, rs])\n", 237 | " print(\"Step {} -- rs {:2f}-- rr {:2f} -- ss {:2f} -- cst {:2f}\".\n", 238 | " format(i, rs, rr, ss, cst))\n", 239 | " \n", 240 | " \n", 241 | " # Sample a batch of pairs\n", 242 | " selectA = np.random.choice(range(len(A)), size=(batch_size,), replace=True)\n", 243 | " selectB = np.random.choice(range(len(B)), size=(batch_size,), replace=True)\n", 244 | " A_batch = [A[s] for s in selectA]\n", 245 | " B_batch = [B[s] for s in selectB]\n", 246 | " \n", 247 | " # Walk one optimization step (using all training samples)\n", 248 | " pars = optimizer.step(lambda w: cost(w, A=A_batch, B=B_batch,gate_sequence=random_gate_sequence), pars)\n", 249 | " par_history.append(pars)" 250 | ] 251 | }, 252 | { 253 | "cell_type": "code", 254 | "execution_count": 19, 255 | "metadata": {}, 256 | "outputs": [ 257 | { 258 | "data": { 259 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjYAAADlCAYAAABAg72dAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdeVxUVf/A8c+dGWaGYd9EAXfT1NSi0lzTRFNzKcs925en57Eye3qyx0cj09TU9n4tlplmmmuaS5kVbpVbpqZmuQckKCAwLLPe3x+jAyMgKMMifN+vF8Xce+6537ngnS/nnHuOoqqqihBCCCFEDaCp6gCEEEIIIbxFEhshhBBC1BiS2AghhBCixpDERgghhBA1hiQ2QgghhKgxJLERQgghRI0hiU0F+eWXX3jwwQcZNWoUw4cPZ+zYsfz111+lHrdr1y4eeuihMp/n0UcfZfv27eUJFavVyujRo2nRogWJiYmllk9ISKBPnz6MHj26XOcVQojqJikpiSeeeIJRo0Zx//33M3r0aBYuXOjVc1zpPXTfvn3ExcWxYsWKIvtUVeWjjz7iuuuu81aYVy1dVQdQE23fvp0XXniBjz/+mMaNGwOwYcMGRo4cybJly4iMjCzx2BtvvJE333yzzOd6/fXX8fPzK1e8er2eBQsW0KJFizKV7969O+np6axcubJM5d9++22SkpKYPn16ecIUQogKN378ePr06cOoUaMA1/18ypQp7tfecLn3UIBvv/2Wr7/+msDAwGL3P//889SrVw+bzeatMK9a0mLjZU6nk0mTJvGPf/zDndQA9O7dmxtvvJHXX3/9kscrikJAQECZz+fv74+iKFccrxBCiAL79++nQ4cO7tcdOnRgwIABVRiRS5s2bZg9e3aJf8iOGzeOIUOGVHJU1ZO02HjZwYMHOXHiBJ06dSqyr2vXrkybNg2n08nkyZNZs2YN9957L0eOHGH37t306dOHAwcOsHfvXg4fPgxAeno6zz//PBkZGURGRhIcHExCQgIjRozAaDTyySefMHz4cJ588kni4+PddR47dozDhw9z++23M27cOAD+/PNPZsyYQX5+PjabjcGDBzNs2LAyva/c3FwmTpzI0aNHqVu3bpHWnS+++IKVK1ei1+tRFIWJEyfSrFkz1q1bx8qVK7FYLIwePZpOnTrxxBNP8O6777J582YMBgO+vr5Mnjz5ki1ZQghRGaKiovjoo4+YNGkSJpMJgMcee8y9Py0tjRdffJGMjAwcDgePPPIIcXFxAOTk5DBlyhROnDiBqqoMGjSIESNGAKXfQ0tTt27dUveXZShBbSCJjZedOnUKoNgP6cjISLKzs8nIyCA+Pp6jR4+yb98+5syZw4kTJ/jll194+OGH6dmzp/uYl156iXr16jFnzhzMZjODBg2ia9eujBkzBoAjR464y16o8+DBg3zwwQecOXOGHj16MGrUKCIjI8nNzeXJJ5+kXbt22Gw2Bg4cSIcOHWjUqFGp7+udd94hMzOTFStWYLfbeeCBB9Bqte79qqoyf/589Ho927dvZ9KkSXz++ef069ePo0ePFumKCgwMZPHixSiKwooVK5g1axYzZ8687OsthBDeNHHiRMaOHcvGjRvp1asXd911F+3bt3fvf+6552jXrh1PP/00qampDBgwgGuvvZaYmBimTZuGw+Fg0aJF7vv1Nddcw0033VTqPfSTTz5hx44dvPfee1XxtmsU6YqqYrfeeitarZamTZsWaUZ0OBxs3LiRgQMHAq5upx49epRaZ5cuXVAUhTp16hAcHExSUhIADRs2ZNmyZQwfPpyHHnqIM2fOcPDgwTLF+fXXX9O/f380Gg16vZ5evXp57G/WrBn/+Mc/GDlyJLNnz+bAgQOXrK9evXrcd999jBo1ik8//bTU8kIIURk6duzIDz/8wPPPP09iYiL33XcfkyZNAiAlJYVt27Zxzz33AFCnTh1iY2NZu3YtTqeTVatWufdduF+vXr0aKP0eGhERQYMGDSrxndZc0mLjZfXr1wcgNTWV6Ohoj30pKSkEBgYSEhLi3nap8TTp6enY7XaP8kFBQZjN5kvG4O/v7/7eYDC4B5NNnz6drKwsFi5ciFarZfTo0eTn55fpfZ05c6ZIHBdkZ2fz+OOPM3XqVPr06UNiYqJHq9PFTpw4wdixY/n8889p27ate7C1EEJUByaTiSFDhjBkyBB27NjB/fffzyOPPEJGRgbgGqh7YWxjRkYGzZs3Jz09HavVysyZMzEajQBkZWXRsmVL4NL3UID+/fvTv3//ynh7NZ4kNl7WunVrGjZsyI8//likBWbr1q3cfvvtaDRlaygLDQ1Fp9ORnp5O06ZNATh37twVx7Zv3z5Gjhzpbv68nNHzERER7n/UF8dx/PhxzGYzXbt2BcBut1+yroMHD+Ln50fbtm3LVF4IISrLiy++yEsvveR+3b59e4KDgzGbze5xLm+99RahoaEAWCwW7HY7vr6+6PV6Jk6c6L632Ww29x+Pl7qHCu+Srigv02g0TJ48mffff58TJ064t2/cuJG9e/cyduzYMtel1Wrp1asXq1atAsBsNrNly5Yrjq1Bgwbs3bsXcLUoXRigXBZ9+/blq6++wul0YrVa+eabb9z7oqKi0Ol07Nu3D6BIjH5+fuTl5aGqKv/6179o2LAhWVlZHD9+vNjyQghRVX766Sf3vQxgx44dKIpCkyZNiIyMpEuXLu57MrgSoe3bt6PRaLjzzjvdXU8A7733Hl9++SVw6XsowJo1a5g2bVoFv7vaQRsfHx9f1UHUNDExMbRu3ZoZM2awbNkyli5dyunTp5k9e7Z7UPGrr77Kpk2bOHToEDabjRtuuIH09HSefPJJUlJS2LFjB4MGDeKWW27hyy+/5NNPP2Xnzp00b94ch8NBXFwcH330EatWreLPP//EZDKxdu1ad52tW7fmgw8+YNeuXfz222/cfPPNdO7cmS+++IIVK1Zw+PBh8vPz2blzJ40aNWLChAkkJSWxd+9eunXr5tGdBXDDDTfw008/8d5775GQkECrVq3YvHkzycnJ9OvXj9DQUGbOnMmPP/6Ioijs3buXPXv2MGjQIEJDQ1mwYAGrV6+mc+fO9OnTB7vdzsyZM/n555/R6/Xs3r2bEydOFOl3FkKIymQwGJg7dy4rV65k+fLl7Ny5k6lTpxITEwO4xjAuXryYBQsWsHz5ctq1a+dune/QoQM//PADc+bM4csvvyQwMJAnnngCjUZzyXto9+7d2bJlC4cOHSqxO+q3337j2Wef5dChQxw6dIiff/6Zfv36ufe/9957fPDBB+7PD5vNVmsn61NUVVWrOghRsqysLPz9/d3dVy+99BImk4nnnnuuiiMTQgghqh/piqrmPv74Y7Zt2wa4kpyEhAS6dOlSxVEJIYQQ1ZO02FRz27Zt480338RgMJCTk8OgQYO4//77qzosIYQQolqSxEYIIYQQNYZ0RQkhhBCixpDERgghhBA1hiQ2QgghhKgxJLERQgghRI0hiY0QQgghagxJbIQQQghRY0hiI4QQQogaQxIbIYQQQtQYktgIIYQQosaQxEYIIYQQNYYkNkIIIYSoMSSxEUIIIUSNofNWRcnJyUyZMoXw8HBSU1OZMGEC9evXL1Ju9+7dxMfHc+utt/Lvf//bvV1VVWbOnElaWhpms5mePXsyePBgb4UnhBC1mt1uZ/78+bz11lssX76cpk2bFikj92FRE3gtsYmPj2fo0KHExcWRkJDAxIkTmTdvnkeZo0eP8uuvv9KiRYsix3/99decPHmSd999F4vFQt++fWnfvj0xMTHeClEIIWqt5cuXc/3115OXl1diGbkPi5rAK11RGRkZbN26la5duwLQqVMndu3aRUpKike5pk2b8vDDD6PTFc2nVq1aRbdu3QAwGAy0b9+etWvXeiM8IYSo9YYNG0ZsbOwly8h9WNQEXklskpOTMZlMGAwGAPR6PYGBgSQlJZW5jqSkJMLCwtyvw8LCSExM9EZ4QgghykDuw6Im8Epio6oqiqIU2V7ctstR3uOFEEKUj9yHxdXGK2NsoqOjycnJwWKxYDAYsNlsZGVlERUVdVl1pKWluV+npaXRqFGjy4ojIyMHp1O9rGMqUliYP2lp5qoOw4PEVDqNRiEkxK+qwxCi0nnjPgwl34ur27/1qibXo6jC1+RK78VeSWxCQkLo3LkzW7ZsIS4ujm3bthEbG0tkZCQbN26kQ4cOBAQEXLKOgQMHsnbtWoYNG4bFYmHHjh3861//uqw4nE61WiU2QLWLByQmIUSBwvdob9yH4dL3Yvm37kmuR1HlvSba+Pj4eG8EEhsbywcffMCOHTvYvn07kyZNIjg4mKeeeoo2bdoQFRWF0+nk5ZdfZteuXZw+fZq//vqLTp06AdCsWTP279/P6tWr+eqrrxg6dCgdO3a8rBjy8qyo1eh3xM/PQG6utarD8CAxlU5RFEwmfVWHIYRX7dmzhzlz5rB//36ysrLQaDQ0adLE4x7tjfswlHwvrm7/1quaXI+iCl+TK70XK6panVKB8klLM1er7DciIoAzZ7KrOgwPElPpNBqFsDD/qg5DiKtWSffi6vZvvarJ9Siq8DW50nuxzDwshBBCiBpDEhshhBBC1BiS2AghhBCixpDERgghhBA1hiQ2QgghhKgxvLYIphBCCCG8R//NenS/7EQNCQWrDU12FkpWJjhVnBEROOtEYrvxZhxt2rqPUdLS0O3fi711G9SICJQzZ9Ad2I/u94M4g0Ow3t7XVV8JfL7fiM8vu7Bf1xZbx06oQcFFyuh278T0zpsoGemoJhNqUDDOkBDUkFCcEXVcX/XqYW93A2i1ngfn5qL7/SCOmAaodep47Vp5xFchtQohhBCi7HJyUJwO1IBA9yb9xg34fvrxJQ/Lu+8hzLPecL/22baZoEfuB8DpH4DG7Pk4uarTYek3gOw586DQchlKehr+/30O44plBWU1GhzNW2Dt2ZucF18uKJuZiWHt6lLfUubHC7AOGFTwftavxf8/z6BNOQ2Ao04k9nbXk7XgC9B4rwNJuqKEEEKISqRkpOP/3DMQEUFY4yjCG0YS0bgeptdneRa0WlALfeCrWi3OkBCcoQUtLkrWOY9DdL/td39/cVIDoNjtoNV4JDX67zYQ2uVmj6QGQHE60f1+CP036zy2O+tFYb+mObYOHbG1uwFHg0Y4CyVkANYu3bD2H+iq5+xZAh59gKD7R7iTGgBtagraUye9mtSAtNgIIYQQlUNVMSxZhH/8BDTn1+Qq/JHuO/dDcv/5FGp4OADmN/8P82tvo2RlouoNYDIVJCQ2G5q0s2D1nLk477F/YrulE7oDv6E9dRJHw0bYW7XG0ao12iN/YvhyBZZ+/T2O0e3bi+bsWQAsfe7A/NJUtMePod+6Gd3unTgjI8FuB50rZXC0bEXGtl1F35/ViibtLJrUFJzhEe5Ylfw89Bs3AOAMCib3ufFgtaH7bR/OmPrluqTFkZmHK1B1nFVSYiqdzDwsRPnIzMPFCx5wOz7bfyrYcNtt5F7T0jUORavFdn0s1n79i45Lqei4bu+O9tgxzK+8iuWeYR6tOd5inDsH/ZZNZE+fjRoZWWI5b8w8LC02QgghhJdpkpNw1osqMo4FXGNLcqbOIPDh+8g5W/Wre2e/8yHOiAjU4JAKO0f+g4+Q/9CjFVZ/YTLGRgghhLgSViumN2YR3LcnpmmTwexKUjQnjhMS1xX/sf8Cm81d3NalGznP/JuMH3dhGTS4QlpGroTjmuYVmtQAlfpepcVGCCGEuBI+PhhWLEX3+yF8du/EuGghuc9PwPfdN9GcPYvvos+w9u2PtU8/AMwzXqvigGsHabERQgghysJuh9zcgteKQt6DBd0r2tN/E/DMGHRH/gQgd8xYd1IjKo8kNkIIISrHqVNVHcEV05w8QfCgvgQ8N9Zje/59D3Lm9Dmy3noPZ0TBhHP5dw4m53/xlRylAElshBBCVALj3DnQvDn6r9eVXria0W9YT8htXfDZuR3j0sXo160p2KnVgkaDZfgo0n/aTc74/5Ez7j9kv/W+1+dnEWUjY2yEEEJUKE1SIv6TXgCrFf9/P01611vBz6+qwyoT47yP8R//LIrTCUDew49h7dGz2LJqYBC54/5TmeGJYkg6KYQQokI5o2PImrcQ6tYl6/OlV0dS43Ti9/KLBPznGRSnE9XXl8xPFmKeNgt8fas6OnEJ0mIjhBCiwlnjboejR7HnOAo2Ohz4fvB/WO662zXnSzXh/++x+Gz/Ed3h3wFwhoeT+dkS7LE3VXFkoiykxUYIIUTlMJk8XuoTvsM/fgKhN7RCv/arqompGLp9e9xJjb1pMzLWfSdJzVVEWmyEEEJUCZ+EHwBQTX7YunStmiDMZhSbFTWkYGFJW6euqIHB2G68kbzH/4UaGlY1sZUkPx/dvr0oebk4WlyLs2499y5Nymm0vx9CycrEOuBOj8MC/vEwmox0LLf385gFWMlIx/DlCjTpaeQ9+g/UwCD3Pp+ff0T72z7UkFAsdw8tqCw3F7/pU1BycsgfPhL7zR3cu4wff4DvJx9hb9WanPipOKOiXefJysT3w/fQHj+GZdBdWHv39faVASSxEUKIWiM5OZkpU6YQHh5OamoqEyZMoH59z0UIU1NTmTRpElFRUZjNZsLCwvjPf/6DUgEzx+ZMfgXL4HvQnjiOGhRcsMNmI+CJR7CMGIX1tl4VNmutJjmJwHuHofr7k7l0FRgMrrjip1TI+YrlcKCYs1Gys10JQKEnqfzHPYnPT9twXNOcrPmLC+I+/Tch/XsBkPXWe1iGj3LvM736Cr4L5uEMCSHtosRGv+l7NGlpOBo28tiu2/srAc+PA8DSfxCOQomN7/+9jeHrtdja3+KR2ChmM6b33wHAfv0NHomNJisL3R+H0R75E/Nrb7u3qzofTDOnoagqjgYNJbERQghRPvHx8QwdOpS4uDgSEhKYOHEi8+bN8yjz4YcfUrduXSZNmgRAv3796NChA927d/d+QIqC/YYbsd9wo8dmw7IvMK5eiXH1SrI+/ATLnXe7djid+GzbgpKZiaNpMxwtW13xqfXfrCfg6SfQpKcD4Dt3DnlPjLm8SlTVcy2otDQ059LB4cTRvIVHUdP0KShZmdi6dsfa9w73dsOaVQQ++gAAab8c8FjtWpuUiO7oEdSgII+6VN+CLj0lL89jn6NJMwA0GRkoaWmoYQWtTfbr2qJkZeKIjvE4RvtXwfxCSkaGxz5NRvr57eke2xW7DdXkh+rnVyTxtLdsjaV3HxSrFdU/oNBFMOGs3xBsVncSWREksRFCiFogIyODrVu38vbbrr+gO3XqxJgxY0hJSSGy0GrLderUYf/+/QDk5+djNpsrpLXmUjTmbJx+/qiBgVj69vfYF3TPQBRVJWfcf8gtlNjo9v2K8dNPsPbpi7VrdzAaXTtyc9FvTkCx5GO/pgWOxk3wf+l/+M6d4z7WFnsT+fcM8ziPYelitEf+wBkVQ/79DxXEdvwYwYP6osnKIuvt97EOGOTe5zv3Q/xmTsMZFEzan56TERo/+xRtagp5drtHYqP6F6xerT1x3COxsXbqgurnj63d9R51qSEhnFu8HEx+2Bs39dhn6T8Qe5u2OJo2Qw3xXP8pc+kqimMZMAhr7z44g0OKJBxZc+ah6nxQg4M9tjujojl74u9i67P26VfijMvp23ZWaFIDktgIIUStkJycjMlkwnD+Q0Wv1xMYGEhSUpJHYvPII4/w3HPP8cQTT5Cens7dd9/NrbfeWqmx5j36BPlDhqM9esTzQ1CjQQ0IRMnKRMnK9DhGt3sXvgs+wXfBJ2RsSMB+fSzgalkIum+4q95ho/DZ+wu63w8B4DQa0eTn4/PLLjTZmTgiItz1Gb9YhH7zD1hv6eSR2KDXoz3t+kDXXNSKgcPu2p55zrUgZqGkRQ0Kwmm1gNXqcYi9+bWYJ7+C6uePo1Fjz+vw9LN4tscUxGC7rVex187ZsBHOi7qaSqMGh6CWsK/w+B2vqOCkBiSxEUKIWkFV1WJbXi7e9vrrr+Pn58fs2bOxWq08/vjj7Nu3j7Zt25b5XGFh/iXui4gIKHGfZ8EAuKZB0e1frwdfX0xRUZgK12XQuD40w8MJietW0D0SEQBRUZCcjO8XCwvKd+yIZuRIePJJAEKDTa6yF+hdMwrrNRfF7FsfHnoIgoIIuOVGAgrvu/9eiG0HRiMR9UIKWo0A/jjsOvz81wVhsa0htnXZrkktUebfkRJIYiOEELVAdHQ0OTk5WCwWDAYDNpuNrKwsoqI854/5/vvveeaZZwBXq06rVq1YunTpZSU2aWlmnM6ibQAREQGcOZNdvjfS7LqC7wvXNfIhGDgU7fFjOM6aPQ7RzfucwJFD0J49g2owkPvMc+Q+5Rosy4AhrmUR9HrP+j5b5kqOFMVzO8D0N4qPoV5j1xdAts31dQleuR41TOFrotEol0ySSyLz2AghRC0QEhJC586d2bJlCwDbtm0jNjaWyMhINm7cSHa268OkUaNGHDlyxH3c0aNHqVfPy90RFcXfH0ebogmY/fpYzDNmY70tjvTN213LHuh0ri+TydXSc3FrlkZTYU9jiYqlqKpaUtfaZSnLY4SqqjJz5kzS0tIwm8307NmTwYMHA7BixQqmT5+Oj48PAHXr1mX58uWXFUNJfyVUleqYjUtMpbvSvxKEqO4SExOZOnUqERERpKam8sILL9CwYUP69+9PfHw8N910E0lJSbz88stERUWRk5OD0+lk8uTJ+F7GMgIV2mJzpS581FWjZKW63fuqA2+02HgtsXnsscc8HiOcN29ekccI169fz5o1a3j33XexWCz07duX+fPnExMTw4oVK4iOjqZDhw7Fn6AMJLEpncRUOklshCifapHY2GyubqmLHruuTqrbva86qDZdURceI+za1TVzZKdOndi1axcpKSke5VatWkW3bt0AMBgMtG/fnrVr17r3L1u2jBkzZvDSSy9x+PBhb4QmhBCiFjIuWURI1/YEPPoAytmzVR2OqEReGTxc1scIk5KSCCs0WVBYWBiJiYkANG/enKZNm9KuXTtOnTrF8OHDWblypcfxQgghRKlsNkyvz0JRVXx2bkcNKN9TNuLq4pXEpqyPERbnQpnrrisY6d6gQQOuvfZaEhISGDZsWEmHFlEduw/K+9haRZCYhBA1jZJ5DgA1MAjDsi/QnjoB4Hr6qRLmThHVh1cSm7I+RhgdHU1aWpr7dVpaGo0aNQLg+PHjNG5cMDmRj48P+fn5lxWHjLEpncRUOhljI8TVxbDoMwKf/icAqk7nHiDsqBdF/qj7qjI0UQW8MsamrI8RDhw4kM2bNwNgsVjYsWMHd9zhmlp6ypQpZGa6ZpLMzc3lt99+o3379t4ITwghRA1mWL/G/b1it6PYXPPH5D71jLTW1EJeeyqqLI8RqqrKjBkzSE9Px2w2c9ttt3HPPfcAMH/+fH788UcaNmzIX3/9Re/evbnzzjtLOasnabEpncRUOmmxEaJ8Kv2pKFVF/9WXGBcvxNHqOpS0szijY8h95jnX5HvVVHW791UH1epx7+pAEpvSSUylk8RGiPKpFo97XwXkehRVbR73FkIIIYSoDiSxEUIIcfVRVfz/PRafLZsKZhUWAklshBBCXIX0336N7/y5BN89AOPnC6o6HFGNSGIjhBDi6qKqmN6YDYAzIBDLgEFVHJCoTiSxEUIIcVUxLFmEz64dAOQ/9ChqYFAVRySqE0lshBBCXDWUM2fwn/QCAI46keT+66kqjkhUN5LYCCGEuGr4T3gOTUYGAObps1GDQ6o4IlHdSGIjhBDiqqD/Zj3GL1cAYLljINb+A6s4IlEdSWIjhBCi2lOyMvH/zzMAOIOCMU+fVcURiepKEhshhBDVnmowkj9yNKqPDznxU3BG1q3qkEQ15ZXVvYUQQgivy80Fk8n1vcFA7vMTyB8yHGfjJlUbl6jWpMVGCCFEtaNkpBNyW2d0u3d6bHc2aQqKUkVRiauBJDZCCCGqF6eTgH8+iu7YUQLGPQkWS1VHJK4iktgIIYSoVkyzZ2D47lsAHE2agV5fxRGJq4mMsRFCiFoiOTmZKVOmEB4eTmpqKhMmTKB+/fpFyq1du5bdu3cD8OeffzJ27FhuvPHGSolR/90GTLOmA2Bvdg3Zb/2fdD2JyyKJjRBC1BLx8fEMHTqUuLg4EhISmDhxIvPmzfMoc+DAAX799VcmTZoEuJIhrVZbKfFpkhIJeOIRFFVFNfmR9clC1IDASjm3qDmkK0oIIWqBjIwMtm7dSteuXQHo1KkTu3btIiUlxaPcggULqFu3Lq+99hovvfQSe/bsITIyslJiNL0+C825cwBkv/EOjhbXVsp5Rc0iLTZCCFELJCcnYzKZMBgMAOj1egIDA0lKSvJIXI4ePcrff//N3LlzcTgcjBgxAoPBQFxcXJnPFRbmX+K+iIiA4nckJsLiz1zf9+9P4KMPlPl8V7MSr0ctVt5rIomNEELUAqqqohQzVuXibTk5OQwYMACtVotWq6V3796sW7fushKbtDQzTqdaZHtERABnzmQXe4zfy69gsloByPjnWOwllKtJLnU9aqvC10SjUS6ZJJdEEhshhKgFoqOjycnJwWKxYDAYsNlsZGVlERUV5VGubt26HmNqfHx8sFTC49Z5j/0TxWpFk5SI/ab2FX4+UXPJGBshhKgFQkJC6Ny5M1u2bAFg27ZtxMbGEhkZycaNG8nOdv2V3LdvX7Zv3+4+bteuXXTu3LnC43M2aIj51dfJ+mxJhZ9L1GzSYiOEELXEiy++yNSpU9m8eTOpqam8/PLLALzxxhvEx8dz0003MXjwYE6ePMmkSZNwOp00btyY4cOHV16Q8mi3KCdFVdWiHaFXqZL6datKdew/lZhKd6X9ukIIlzKPsVFVfBK+x3ZLJ/D1rcQIq4fqdu+rDrwxxka6ooQQQlS+vDz8nxlD8LC7CLupDYYVS6s6IlFDSFeUEEKISqU5eYLAh0bjs38vAKqi4LimeRVHJWoKabERQghRaXy2bCKk963upMbWoSPnvtuCvU27Ko5M1BSS2AghhKgc771H0NA70WRkAJD7+D85t2INzsbOomsAACAASURBVMi6VRyYqEmkK0oIIUTFstnw/9/z8MlHKIBqMJD92ttYhlTi01ai1vBaYlOWVWNVVWXmzJmkpaVhNpvp2bMngwcPLnWfEEKIq5eSl4vPlk0AOOpEkvXp59hvvLmKoxI1ldcSm7KsGvv1119z8uRJ3n33XSwWC3379qV9+/bExMRccp+3ZP38I2dXLMeenoYuNIzwwXcTeEsnjzKnZs8g/9Ahj22KwYBazMybutAwTG3bkrtv3yXrvFI/HTjNik1HScuyEBZoYPCtTenYum6x248knuOHPcnF1hMWaKBt0zB2HEohJ98BgMFHi06L+3Vp/IxaRvZqQcfWFdNkXPhno/H3R1VV1JycYq9pcT8jABQF47XXYv3rL5xms2uTnx+RI0Z57WcihLh8amAQWZ99QeiE5zg3622c0d67rwtxMa+MsSnrqrGrVq2iW7duABgMBtq3b8/atWtL3ecNWT//SMr8edjT0wCwp6eRMn8eWT//6C5T0gdmcUnNhTqyEn64ZJ1X6qcDp/l0/e+kZbnOnZZl4dP1v7Pgm9+LbP/oq4MlJjUXyvywJ9kjibHYHGVOasCVAM1dc5CfDpy+wndUstRNmz1+Nk6zGTUnByh6TUtMagBUlfxDh9xJDYCak8PpTz72ys9ECHHlHE2awcaNktSICueVxOZSq8YWlpSURFhYmPt1WFgYiYmJpe7zhrMrlqOeX2DtAtVq5eyK5e7XJX5gXoaL67xSKzYdxWp3emyz2p1s+jW5yPbKmpLQobri8rZTCxYW+dkUVviaXtHPyOHwys9ECHFlNm/W0reviUWLqjoSURt4pSuqrKvGFudSZcpyfGGXmqHwj4z0YrfbM9LdS6T/cVlnK1nhOq90+fX0rOJbiap6YuX0LEu5l5S/2B9n00otc+GaXunPqPDPRAhRuVas0LF7t5bRo2HlSi233FL21mIhLpdXEpuyrhobHR1NWlrBh1haWhqNGjUqdV9ZXWpJBV1IqLur4+Lt3p7S+kKd5ZkuOzTQ4O5uKkyjVG1yExpo8Pr1MoSHYTlz9pJlyvtzupzjZUkFIbzrwQdtLFvmg9Wq8MgjRr77LpfIyOqz/I2oWbzSFVXWVWMHDhzI5s2bAbBYLOzYsYM77rij1H3eED74bhS93mObotcTPvhu92tjy5blPs/FdV6pwbc2Ra/z/PHodRpuvT6qyPbKWjJOq7ji8rYGo0cV+dkUVviaXtHPSKv1ys9ECHFl2rVzMm2a6w+11FQNjz5qxGar4qBEjeW1RTATExOZOnUqERERpKam8sILL9CwYUP69+/vXjVWVVVmzJhBeno6ZrOZ2267jXvuuQfgkvvKqrRFMCv7qajyLnBWW56KiogI4OhX31Sbp6KkxUaI8inuXmyxWPjmGztJSXY0Gmje3MkNN9TsLilFUVAUDUajCb3eUGS/LIJZlDcWwZTVvStQdfyllZhKJ4mNEOVz8b3YarWQn59DUFAo8+ZpSU3VAtCmjYOePR34+FRVpBVLVVUcDju5udkYjX5Fkpvqdu+rDmR1byGEENVefn4uJlMAvr56Bg1y4OfnSnr279eycKGO8yss1DiKoqDT+WAyBZCfn1vV4dQaktgIIYSoUKrqRKt1PasSHAz33WcjJsY1bcWZMxrmz/fh1KnKGi1Y+bRaHarqLL2g8ApJbIQQQlSoi6cE8feHYcPstG9vB8BqVVi7Vkd+flVFWLEURaEGjfqo9iSxEUIIUek0Grj1Vie9ermSG7NZYdMmbRVHdXnWrfuKqVPjqzoMcRFJbIQQQlSZdu2cNGzo6qbZt0/LyZM1t0tKVA6vLYIphBBCXC5Fgd697cyb54PNpvDNNzoefNBW4U9KfffdBrZs2USdOnU4cuQI9933IA6Hg1WrVlC3bl1SUk7z2GP/Ijo6hpMnTzBnzv8RE9OAlJTT3HJLJ1q1uo5vvlnP2bOpvPbaDDp37kaHDh0rNmhRJpLYCCFELZGcnMyUKVMIDw8nNTWVCRMmUL9+/WLLJiYmMnDgQP73v/8xePBgr8bxyy8aXnvNQKH1asnOVjh3ztVa8/nnPgQHX9mYFH9/GDfOQmxsyYN1T5w4zjvvvMEXX3yJXq/nxx+38uuvv7Bs2RcsWPAFISGhbN/+E/Hx/2XOnPmsXLmUli1bM2rU/ZjNZjZsWE/9+g24/fa+7Nmzm3Hjnr+iWEXFkMRGCCFqifj4eIYOHUpcXBwJCQlMnDiRefPmFSmnqiqzZ88usiyOt3z4oZ4NGyru48ffX+X990seibxr13auuaY5+vMznnfq1AWn00lISAghIaEAtGnTjkOHDnLu3Dk6dOjI1KkvkZycRI8ecdx11+VNHisqlyQ2QghRC2RkZLB161befvttADp16sSYMWNISUkhMjLSo+xnn31Gnz59OHv20mu4XanHHrNiNiseLTYANhukpGhQVVcXVd26TnSX+Snl7w+PP24tQ8myLdysKNCxYxcWL15JQsJG3nnnDVq3vo7nnvvv5QUmKo0kNkIIUQskJydjMpkwGFyz3+r1egIDA0lKSvJIbE6cOMGhQ4cYPXo0n332WYXEEhvr5LPP8ordd/iwwurVrgE24eFORo2yc4ml5K7IzTffwsKF87Fare6uqBMnjpGenk5GRjohIaHs37+Xli1bERQUzNy5HzJkyAj697+TFi1a8cor8YDrGjqdDlRV5euv19K3b3/vBiquiCQ2QghRC1w8l8wFhbc5nU5mzZrFyy+/XK5zXTwNfn5+JiaTKzu58P+S3HADpKfD1q1w9qyGDRv0DBniajnxlpYtmzNu3LNMnz6ZOnXqYDZn89xzz9OuXVvefHMmkZF1SU1NZfr0mZhMeqKj6/Hqqy8TExPD6dMpPP30M5hMem65pT3Lli3mlVfiufnm9pd8b/n5PkREBBTZXty22q6810TWiqpA1XEdEImpdLJWlKiJMjIy6Ny5M3v27MFgMGCz2bjhhhv47rvv3C02hw4d4rXXXqNevXoAfP/99zRu3Ji2bdvy3HPPlflcF9+Lz507S3BwOCaTntzc0ruJnE5YsULH8eOuGUlatHDQq5cDX9/LecfVy4VrUFh1u/dVB95YK0pabIQQohYICQmhc+fObNmyhbi4OLZt20ZsbCyRkZFs3LiRDh060LJlS+bMmeM+5vjx49x1111efyqqNBoN9O9vZ8ECH86dUzh8WEtiooY+few0aVJ9/ngV1ZNM0CeEELXEiy++yPLly5k0aRKLFy92dzm98cYbHD582F3ObrczefJkTpw4werVq1m6dGmlx2o0wsiRNpo1cwCQk6OwfLkP33+vpeb0M4iKIF1RFag6NjNKTKWTrighyqe8XVGFqSocOKDw3Xc6rFbXQJubbnLQvbvDq+NuKpp0RZWNN7qipMVGCCFEtaUocN11Kg88YHNP2rdrl5bt2+XjSxRPfjOEEEJUe0FBMHSoDX9/V3KzZYuOX3+VjzBRlPxWCCGEuCoEBcGQITZ8fV3Jzbffajl69CrqjxKVQhIbIYQQV43wcLj7bhs+Piqg8PXXOnJzqzoqUZ1IYiOEEOKqUq8e9OplByA3V+Gbby7/Sal77hnA338nV0B0oqpJYiOEEOKq06qVSvPmrkfBjxzRcuCAdEkJF5mgTwghxFVHUaB3bwdJSRpyclyPg0dH2wgJKf3YJUsWkZWVxccff4C/fwBDhgxn4sTxGI1Gmje/lj17dhMVFcW0abPdx+zfv5cZM6bSsGFD/Pz82bdvLx07dub06WS2bNnEs8+O5/vvv+Xo0SOsW/ddBb5zURpJbIQQQlQ6w+KFGBcvvGQZ+3VtyJkyw/1au38f/hPHu18HAc/kw5kz51tr5sPOW+4lfeC9NGnipFkztdi5boYOHcGSJZ/z8MOPU69eFABjxoxl/PhnmTx5OmPGjGXt2tUex7Rp046RI0czd+6HLFiwBKvVwq5dO3j66Wfp0uUm6tSJ5O23P2D58iVXdkGE10hiI4QQotJpT51E/+PWyzpGk5VZ5Bg9EFjo9bH63di3T8u+fVo6drTTpYuzzPU3btyE8HDXJHqDBhW/jETr1m3w9fXF19eXnj17u7fffHMHAO6+e2iZzycqhiQ2QgghKp2jQUOsnbpcsoz9ujYer52BQSUek58PlnxQGjfAx0fFZlP46ScdPj52OnQoW3Lj4+NTahm9vvgVvEvaLiqfJDZCCCEqnWX4KCzDR13WMY42bcn8ct0ly7QDGmTYWLTIh5wchc2bdRgMdq6/3jO50ev1OJ1Odu7cTsOGjS4zelGdyVNRQgghapSQENdEfkZjwUR+Gzdq+esvBef5/CYu7nbeeecNVqxYgtPpZOnSxSQm/sUHH7xbbJ2nTp3gm2/Wc/Dgbyxa9Jl7+5w57wHw2mszSE9Pq9g3JspEFsGsQNVxgTOJqXSyCKYQ5ePNRTDL4++/YckSH/fimQC+virXXOOkVSsnMTHFDy6uCLIIZtl4YxHMcndFWa1W4uPj0Wg0nDlzhpEjR3LrrbcWW3bNmjWsX7+egIAA/P39+e9//4tGoyExMZEBAwZgMpncZZctW0a9evXKG54QQohaql491/pSmzbpSExUUFWFvDzFPbg4MFClVSsH7do5CQwsvT5xdSh3YjN//nx3kpKens6AAQPYsGEDfn5+HuVSUlKYPXs269evx2g08tRTT/HVV18xaNAgAB566CGefPLJ8oYjhBBCuNWrB8OH28nNhaNHFf78U8vx4wpOp0JWlsLPP+vYvl3l2mud3Hyzg8jIqo5YlFe5E5tVq1bx/PPPAxAaGkqTJk344Ycf6N+/v0e5devWERsbi9FoBKB79+6sXr3andjs2rWLadOmYbVa6dy5M3FxceUNTQghhADAZII2bVTatHElOb//ruHgQQ1//61BVRUOHdJy6JCWkBCV4GCVoCCVsDCV5s2d+EvP9FWl3IlNUlISYWFh7tdhYWEkJiYWWy40NLTYcgEBAQwZMoT+/fuTl5fH8OHD0Wq19OjRo7zhCSGEEB5MJoiNdRIb6yQlBXbu1PL7764EJyPD9XXB99+rNG6s0qaNg6ZNVbTaKgxclEmpic2IESNISUkpdt+3334LgHLR6KuLXwOoqopGU/xDWEFBQe4WHl9fX/r27cuaNWsuO7GpjgM+IyICqjqEIiQmIYRwiYyE/v0ddOvmYN8+DWfPKpw7p5CZqWC1usblHDumcOyYxj3w+NprndSvr1LCR5qoYqUmNosWLbrk/ujoaM6ePet+nZaWRlRUVJFyMTEx/Pbbbx7loqOjAUhOTiYsLAyDwQC4JknKz88v2zsoRJ6KKp3EVDp5KkqI2icwEI9ZilUVTp+GfftcrTlWq+fAY19flehoJ1FRKnXrqjgckJ2tkJ3tahFq3drJ+Y80UcnK3RU1cOBANm/eTJcuXUhPT+fYsWPulpZff/2VgIAAmjZtSr9+/Zg/fz75+fkYjUYSEhIYOHAgAMuXL6dt27bup6l+/vlnunXrVt7QhBBCiGLdc88A3n77A/daURdTFNfA43r1HPTo4eDPPxV+/13LiROugcd5eQpHjmg5cqT4+rduVbnpJgexsU7ODy0VlaTcic3999/Piy++yIQJEzh79ixTp07F//xIqxUrVhATE0PTpk2JjIzkmWeeYdy4cQQGBhIREeFObG688UY+/vhjtm3bxrlz52jWrBkjRowob2hCCCFEuen10Lq1SuvWdvLy4M8/NZw8qZCcrCErq/iJcCwWhW3bdOzc6RqEHBioZf16I02bOunRo+hMyMJ7ZIK+ClTdulhAYioL6YoSNVVycjJTpkwhPDyc1NRUJkyYQP369T3KfPjhhxw5coTQ0FCOHTvG6NGj6dq162Wdp7pM0FeSJUsW8dFH79OtW3f8/QMYMmQ4EyeOx2g00rz5tezZs5uoqCimTZvtcdxHH73PuXMZ+PqaSEz8ixdfnILZDK++OpXQ0DCcznz8/Y306/cM27ZpSU0tGIQTGHiGiRNj3K9DQ5307KkhIsJKeLiT8HCVJk2ctGnjxNe30i5FtVMtJugTQghxdYiPj2fo0KHExcWRkJDAxIkTmTdvnkeZLVu28Mknn6DT6fjjjz8YNmwYP//8s3sMpLcYFi/EuHghQJH1nwIeewBNairW2+LIe2qce7t2/z78J44HwPzydBxt2rr3+b71GvrvN5JfhjWohg4dwZIln/Pww4+7u6LGjBnL+PHPMnnydMaMGcvatas9jsnKymTJkkWsX/89Wq2W5cuX4HDYOXBgB05nJuPHvwzAggWf0KyZStOmdo4eVfj9dw25uQqgUr++k7/+ciU76ekali4F1/rkBXQ6lVatnLRp4yAszPXoeXAwBASo+Pmp+PuD0eiaMfnC4GW7HaxWBZvNNTZIrwe9XsVgAH9/lcBAlYAAcDo5PzAaUlI0HD6s4Y8/NBw5oiEsTOWWWxx07OigeXMnmZmQmuoaTO3rqxIe7voqNI/uJVkssGmTljVrfNi4UYuPD/ToYadnTwddu9o96vH2+qGS2AghRC2QkZHB1q1befvttwHo1KkTY8aMISUlhchCs9J9+umn7idY69evT25uLtnZ2V5PbLSnTqL/cWux+3x270L71ymc9Rt4bNdkZbqP0WRl4ii0T/fnH+h/3IqtlBXDL6Vx4yaEh7uWPRg0aLDHPn//AFq2bMVjjz3A7bf3o1ev2/Hz86dFi1a8884bjB8/jttu683QoSMB1xidZs1UmjVzRXnunJ2hQ3P4+2+FH37Q8t13On791YfUVBWLpaA7y24vGKBc2Vau9Dkfu4qqFt/FptN59or4+BQkUjqdK7FSVddAaldCV+Dzz/V8/nnROgcPtvH++5f/wFBJJLERQohaIDk5GZPJ5E5Q9Ho9gYGBJCUleSQ2haflSEhIoFevXu4Pe29yNGiItYQkxHbjTTjqN8B+TXOP7c7AIPcxzsAgj332a5pj7dQFR4OGVxyTj49Pifs0Gg1vvvkehw4d4Jtv1nHvvUN47725NGjQkEWLVrB9+0989dVKPvvsE+bOXYhOV/zHa716KiNH2hk50k5EhA+pqWbMZkhNVThwQMvu3Vr27HG1opw7p2C3V+xiViaTStOmTpKTFdLSXD/7kpIaoEg8djvk5QEUf4zRqNK9ux2HQ2HrVi15eUXLJSRocTjw2hxBktgIIUQtoKpqsXOMFbcNXInQkiVLeO211y77XBePi8jPz8Tko8CePZiuv97VnPHQg9geehCAi3s3HJ99jgPXR6XHvg43YduwEYAi7Ufjx2MbPx5tMfUVx2g0oNdr2b//Fxo1aoTR6INWq8FkKr5f5MyZM6xd+xUPPPAQN954A2lpZzh9OpHU1GQMBgO9evWkV6+e9OjRFUWxe6x96LoGPsXO11WnTgB16kCTJnDLLfDwwwX7VBXMZsjIgOxs15fZ7EokLrSMOJ0XWkwKunQsFtdXfj5kZUFmputLq4XQUNfq5+Hh0KIFNGigoNFoUVX4/XfYtAlOnoSICNdTYXXqQG4upKbCmTOu+i78yqgq2GwF57PbXfsUxRVL9+7Qt6+Cv7/P+Wvgqn/PHtyrrCsK9OunoW7dgmtT3nnNJLERQohaIDo6mpycHCwWCwaDAZvNRlZWVrHzjiUlJfHKK68wa9YsQkJCLvtcFw8etpxJx7loCZqzZ7HYHDiua3uJoyvHbbf1ZtasmYDK00//mwULFnDy5Elef/11Hn/8X0XKOxwKO3fuJCkpGUXRYDL5c8MN7fnjj8PMnfshCQmbMJuzGTXqfrRaY5FB0vn5tiIPSZT1wQlfX9dXnTrlesvFSksr+D48HO6+27v15+VdaNFxiY11fV3szBnX/2XwsBBCiDIJCQmhc+fObNmyhbi4OLZt20ZsbCyRkZFs3LiRDh06EBAQwKlTp3j11VeZMmUKISEhrFu3jrp16xJb3KdRGak6HUpuLgD6jRvIrxeFGub97q3L8dBDj3m8njZt1iXLBwYGMnPmm0W2t259HbNnv+XV2ET5yITQQghRS7z44ossX76cSZMmsXjxYl5+2fUkzxtvvMHhw4cBeOSRR9i+fTv9+/enc+fOTJw4EZvNVr4T+/piucM1b5lis2FY/aWrD0OICiDz2FSg6jY/C0hMZSHz2AhRPiXOY/PzVtiyBQBbm3bYbu9bMGCjhrtwDQqrbve+6sAbXVHSYiOEEKJydO+OI9o1SZ3P/r0YFsxDSfyrioMSNY0kNkIIISqHRoOl/yCcQa5HtbUpp/Fd9Bn61V+6HvkRwgsksRFCCFF5AgPJf+ARrB07o56f60V3+BC+n36M5lgJK0oKcRkksRFCCFG59HrsXbqR9/Bj2K9tCYCSl4dx+VJ8Er4Hh6OUCoQomSQ2QgghqkZgENYBd2K5YwCqj2t2OZ+d2zEsXgg5OVUcnLhaSWIjhBCiSjlaXUf+fQ/gqONa2kGbnITx8/muqWwryD33DODvv5MrrH5RdSSxEUIIUeXU0DAso+7Dfl0bAGw3d4AS1lsS4lLkt0YIIUT1oNNh7XMH9patcTZqXLBdVVHS07w2W/GSJYvIysri448/wN8/gCFDhjNx4niMRiPNm1/Lnj27iYqKYtq02e5j9u/fy4wZU2nYsCF+fv7s27eXjh0706NHT5Yt+4KoqGgSE/9i4MC7aN/+Fq/EKa6MJDZCCCEqnWHxQoyLF5aprGLJR5Oaimow4gwLA62W/OGjsAwfVaS+i7cXZ+jQESxZ8jkPP/w49eq51soaM2Ys48c/y+TJ0xkzZixr1672OKZNm3aMHDmauXM/ZMGCJVitFnbt2sHChZ/Su3c/evbsxalTJzkmT3ZVOUlshBBCVDrtqZPof9x6+Qeezxtsnbq4vrFaQG9w1+fefgUaN25CeLirVWjQoMHFlmndug2+vr74+vrSs2dvcnNzeeutWRw8+Bu33daL7t17XvH5hXdIYiOEEKLSORo0xFrWJMRuR5OdDQ47zvAI9/GYzfh+/AGO5i1whoZh7dTFtf0K+fj4lFpGr9d7vB4w4E46d+7Kd999y0svTWDgwLu4994HrjgGUX6S2AghhKh0ljJ0GRWhqh5rS/ls+gHFakX3235UIO++B7FffwM4naC59LMxer0ep9PJzp3badiw0eW/gfPeeecNnnjiSYYMGU7dunVZt27NFdflpqquJ8LKkGiJoiSxEUIIcXW4aMFMe4trUTIy0P55GAXQHT2C7ugRnEFBOJo0BaeK4rC7Jvy7sN6z6vrP7TENeOfNWaDV8vTT/2bp0sUkHTvCnPHP8o9R9+Fo0859Hs2RP0nasJ5vE77nTI6ZZU89wejmLcDpJOS3fUwdeQ+RISEk2Ww83G8g2l/3oMk8h5J5DiU7G0eLa6FZE3d9xk/nYpz/CeTloKz/HjUo2PX2sjIJieuGJuU0Sl4eqo8PakAAqn8AoIDqdL0XRUE1GMBgRDXoQQXFbgenk4xNPxVcoNxcgkYPQ8kxk/fAIx6JpM+WTfhs24zqF4Dq54cmIx3NqZNoT55Ac+4czqAg1OAQ1KAgsFpQMjPRZGai5JhR8vLI+c9/sdwzzF2f7ztvYly+BGdwMJkr1xbEoKoY533sGieVlIg2KQklPQ0MBlSTH46GjciJn1Le3wwPktgIIYS4Kql162G9czBKWhq6XTvQHTqAYrOhycxEs+eXSx77REQd8h56DDUsDIBp02Zh/PgDNOnp2I8f90hsFEs+TXNy+ODmDgUV5OUB8Og1LTwrPnbE9VWI8/zCn+76zmXgs3+v6z34mgrej38AmqREFJvNVc5mQ0lPh/T0MlyNC5UUatXS69Fv2QSANe52j2I+Wzfh9/qsstd7EU2GZ0zaxFPoDuzHGRLisV3JziLg+XEl1mNvdR3enopREhshhBBXNTUsDNvtfbF1vw3twQPo9v2K5lwG6HSoWh1otQUf9gqu7xUF9aLuKtXoi+rri2o0emx3BoXgiKnvaiXR+YDP+Xp1WtBoXdWmnUWTmoLGbHbVpdGgBgahBgTgDPd8TN3RoiWWuN4YIiOg8JgdjYb8e+9HNRhRg4JQcnNRzNkoZrMrYdFqUTUaFFUFSz5KvgXFko+q1YJW55r3x+l0vV9wPT7fpRv4+BQZe6RYbaiK4qrrwvuMqIOjYSOc4eEoWVloMjJQMs+hGgyoQUGogcGo/v6oRqOrRawQe8vWWPrcgWoyeWzXJBdMgqj6+uKIjsEZHoFitaDk5OBo3ARvU1S10Lu6yqWlmXE6q8/biYgI4MyZ6rVircRUOo1GISzMv6rDEOKqdfG9+Ny5swQHh2My6cnNtVZhZJUgJ8c1PiYgwGOcz4VrUFiV3/ucTsjNRZNjxukfAH5+FXIOzZlUVJ0Pamhoke7EixW+Jld6L5YWGyGEEMJbKiI5qCgaDfj74/SvwD/kNBqckXUrrv7iTlmpZxNCCFHrKIpCDeocuGyqqqKU0lIhvEcSGyGEEBVKp/MhNzcbh8NeqxIcVVVxOBzk5maj08mj25VFuqKEEEJUKJMpAIslD7M5i9xcS61JbhRFQVEU9HojBoNvVYdTa5Q7sbFarcTHx6PRaDhz5gwjR47k1ltvLbbs33//zcsvv8zZs2dZsmSJx741a9awfv16AgIC8Pf357///S+aUiZYEkIIUXbJyclMmTKF8PBwUlNTmTBhAvXr1/coo6oqM2fOJC0tDbPZTM+ePRk8uPjlBcpKURSMRlPVD5YVtUK5E5v58+e7E5H09HQGDBjAhg0b8CtmANW8efO4+eabWb9+vcf2lJQUZs+ezfr16zEajTz11FN89dVXDBo0qLzhCSGEOC8+Pp6hQ4cSFxdHQkICEydOZN68eR5lvv76a06ePMm7776LxWKhb9++tG/fnpiYmOIrFaKaKXeTyKpVq+jWrRsAoaGhNGnShB9++KHYsi+88AJBQUFFtq9bt47Y2FiM5+cO6N69O6tXry5S0Hf6QwAACMFJREFUTgghxJXJyMhg69atdO3aFYBOnTqxa9cuUlJSPMoVvqcbDAbat2/P2rVri9QnRHVV7habpKQkws7P3AgQFhZGYmLiZdcRGhparjrA9cx7dSMxlU11iqk6xSKEtyQnJ2MymTAYDIBrraTAwECSkpKIjIx0l/PGPf1S/4bk35cnuR5FXbgmV3ptSk1sRowYUSSjv+Dbb78FKPIY2+U+1qaqqlfG04SEVL/5A6rjRG8SkxC1T0mPHJflfn259/RL3Yvl37onuR5FlfealJrYLFq06JL7o6OjOXv2rPt1WloaUVFRlxVETEwMv/32m0cd0dHRl1WHEEKIkkVHR5OTk4PFYsFgMGCz2cjKyipyv46OjiYtLc39Oi0tjUaNGlVytEJcuXI3kwwcOJDNmzcDkJ6ezrFjx+jRowcAv/76K0ePHi21jn79+vHLL7+Qn58PQEJCAgMHDixvaEIIIc4LCQmhc+fObNmyBYBt27YRGxtLZGQkGzduJDvb9bRS4Xu6xfL/7d1fSFN9GAfwr00NdZguSiPRQDJCMIIQhEAtwaKsLhITml5s/qHyQupCjbSo1QalYLtbedF/qTDyokTK0SrKikgsQcOLUNKYOnFMV7rfeyHvzPd93ZvkOtvP7+fqnHEYz+9hPDyc38553Ojs7MSePXsUi5tosX57VtT3799RV1eHFStWwG63o7CwEFlZWQCA2tpaJCQkoLS0FMDsU1FWqxV9fX3Izc1FeXk51q5dCwB4+PAhHj9+jOjoaERFReHkyZN83JuIaAkNDAzAYDBgzZo1+PbtG6qrq5GUlIS9e/fi9OnT2LZtG4QQMJlMGB0dhdPpxI4dO3Dw4EGlQyf6ZVINwSQiIqLljbdEiIiISBpsbIiIiEgabGyIiIhIGmxsiIiISBpsbIiIiEgavz1SIZCcP38eLpcLUVFR6O/vR0VFBdLS0pQOC0+ePIHBYEBJSQkKCwsVieFXpvr+adPT07h27RoaGxtx//59JCcnKxrP8PAwLl68iNjYWLjdbjgcDtTV1c0b90FEixeI9cffFqpvvnIhc5581dclz4mQSGFhoff46dOnIicnR8FoZr1+/Vrcu3dPHD58WNy6dUuxOEpKSkR7e7sQQoiOjg5RXFysWCx/u3Pnjnj37p1ISUkRnz9/Vjoc8erVK9HQ0OA9NxqNoqamRsGIiOQQiPXH3xaqb75yIXOefNXXpc6JVI3NzMyM97ivr09s2bJFeDweBSOao2RjMzo6KjZv3iympqaEEEK43W6RmpoqhoaGFInnnwKlsfF4PPN+Lzdu3BA6nU7BiIiCX6DXH3/7ub75yoXseVqovvojJ1L9x+bnNxVbrVYUFBQsenibjHxN9aU5ISEh834vz549w6FDhxSMiCj4sf7M8ZUL2fO0UH31R06C6j82/zdpXKVSAQA+ffqEN2/eoLGxMWBiUpL4jam+y9Xdu3exceNG5OTkKB0KUVBj/ZnjKxfLKU8/19fu7u4lz0lQNTb/N2kcAHp6emCxWNDQ0ODt8pSOSWm/OtWXZrW0tGBgYAAnTpxQOhSioMf6M8dXLsLDw5dFnv5ZX/2RE6m2orq6unDlyhUYjUZERkbi6tWr+Pr1q9JhKc7XVF+ar7m5GYODg6isrAQAnDt3TuGIiIIb688cX7lYDnn6r/rqj5xIMwRzcnIS27dvR2hoKEJDZ29EuVwutLa2IiEhQbG4RkdHYTab0d7ejg0bNiAzMxN6vf6Px7HQVF8lvX//Hq2trbh58yby8vKwa9cuRbd+3r59C61WO+/xbrVajba2NsViIpJBINYff1uovvnKhcx58lVflzon0jQ2RERERFJtRREREdHyxsaGiIiIpMHGhoiIiKTBxoaIiIikwcaGiIiIpMHGhoiIiKTBxoaIiGiJVFVV4fLly0qHsayxsSEiIiJpBNWsKCIiokDg8Xhw5swZ9Pb2QqVSISkpCSkpKbDZbFi5ciU6Ozuxb98+5Ofno7u7GxcuXEBISAhUKhVqa2uRnJwMs9mM27dvIysrC2NjYxgeHsbq1athNBqh0Whgt9tRVVUFt9uN6elpZGdno7S0VOmlBzw2NkRERItks9kwODjoHYR89OhR7Ny5Ez09PVi/fj0qKioAABMTE9Dr9WhoaEBGRgasViuOHDmCR48e4dixYxgYGEBnZydaWlqgVqtx6tQpGAwGXLp0CU1NTUhPT0dpaSlcLhd0Oh0bm1/ArSgiIqJFio6ORm9vL168eAGPx4P6+vr/nDrd0dGByMhIZGRkAACysrJgt9vx4cMH7zWZmZlQq9UAgP3796OtrQ0zMzOIiYmBzWZDX18fIiMj0dTU9GcWF+R4x4aIiGiRtm7dirNnz8JisaCmpgYFBQUoKyv713VDQ0MYHx+HVqv1fqbRaOBwOLznq1at8h7HxMTgx48fGBsbg06nQ0REBCorK6FSqVBeXo7du3f7d2ESYGNDRES0SBMTE0hPT0dmZia+fPkCvV6PuLi4f123bt06xMfH4/r1697PnE4nwsPDvefj4+Pe47GxMYSFhSE2NhYjIyPQarXQarV4+fIlysrKkJqaisTERP8uLshxK4qIiGiR2tvb0dzcDABITExEXFwcPB4PoqKiMDk5CZfLhePHjyM7OxsOhwNdXV0AAJfLhaKiIjidTu93PX/+3Hv+4MED5ObmQqVSob6+Hj09PQCAtLQ0hIWFQQjxh1cafEIEs0RERLQo/f39MBqNmJqagsvlwqZNm1BXV4ePHz+iuroaarUaxcXFyMvLQ3d3N0wmE4QQEEJAr9cjOzsbwOx7byIiIjAyMoLBwUFoNBqYTCZoNBpYrVZYLBaoVCo4nU4cOHAARUVFCq888LGxISIiUkhVVdW8p6jo93ErioiIiKTBPw8TEREpwGw2e1/oFx8fj/z8fKVDkgK3ooiIiEga3IoiIiIiabCxISIiImmwsSEiIiJpsLEhIiIiabCxISIiImmwsSEiIiJp/AVeZqZYkiFBRQAAAABJRU5ErkJggg==\n", 260 | "text/plain": [ 261 | "
" 262 | ] 263 | }, 264 | "metadata": {}, 265 | "output_type": "display_data" 266 | } 267 | ], 268 | "source": [ 269 | "# Start figure\n", 270 | "fig = plt.figure(figsize=(9, 6))\n", 271 | "# Plotting 1: original data\n", 272 | "ax1 = fig.add_subplot(2, 3, 1)\n", 273 | "ax1.set_title(\"Original data\", pad=20)\n", 274 | "ax1.scatter(A[:], np.zeros(len(A)), c='r')\n", 275 | "ax1.scatter(B[:], np.zeros(len(B)), c='b')\n", 276 | "ax1.set_ylim((-0.1, 0.1))\n", 277 | "ax1.set_xlim((-2, 2))\n", 278 | " \n", 279 | "# Plotting: cost\n", 280 | "ax2 = fig.add_subplot(2, 3, 3)\n", 281 | "title = 'Seed: '+str(11)\n", 282 | "ax2.set_title(title, pad=20)\n", 283 | "cst_history = np.array(cst_history)\n", 284 | "rr_history = np.array(rr_history)\n", 285 | "ss_history = np.array(ss_history)\n", 286 | "rs_history = np.array(rs_history)\n", 287 | "ax2.plot(cst_history[:, 0], cst_history[:, 1], color='blue', marker='', linestyle='-', linewidth=2.5, label=\"cost\")\n", 288 | "ax2.plot(rr_history[:, 0], rr_history[:, 1], color='red', marker='', linestyle='--', linewidth=2.5, label=\"tr rr\")\n", 289 | "ax2.plot(ss_history[:, 0], ss_history[:, 1], color='red', marker='', linestyle=':', linewidth=2.5, label=\"tr ss\")\n", 290 | "ax2.plot(rs_history[:, 0], rs_history[:, 1], color='red', marker='', linestyle='-.', linewidth=2.5, label=\"tr rs\")\n", 291 | "plt.legend(fancybox=True, framealpha=0.5, loc='lower left')\n", 292 | "ax2.set_ylim((0, 1))\n", 293 | "ax2.set_xlabel(\"steps\")" 294 | ] 295 | } 296 | ], 297 | "metadata": { 298 | "kernelspec": { 299 | "display_name": "Python 3", 300 | "language": "python", 301 | "name": "python3" 302 | }, 303 | "language_info": { 304 | "codemirror_mode": { 305 | "name": "ipython", 306 | "version": 3 307 | }, 308 | "file_extension": ".py", 309 | "mimetype": "text/x-python", 310 | "name": "python", 311 | "nbconvert_exporter": "python", 312 | "pygments_lexer": "ipython3", 313 | "version": "3.8.3" 314 | } 315 | }, 316 | "nbformat": 4, 317 | "nbformat_minor": 2 318 | } 319 | -------------------------------------------------------------------------------- /random_embedding_circuits/two_wires_random_unitary_embeddings.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import pennylane as qml 4 | from pennylane import numpy as np 5 | 6 | 7 | # The function `random_gate_sequence` generates a sequence of rotation operators by sampling uniformaly from $\{ RX,RY,RZ\}$. It takes as an input **num_wires** and **num_layers** as an `int` and returns an array of shape (num_layer, 2$*$num_wires). 8 | 9 | def random_gate_sequence(num_wires,num_layers): 10 | 11 | gate_set = [qml.RX, qml.RY, qml.RZ] 12 | 13 | gate_sequence = [] 14 | 15 | for i in range(num_layers): 16 | gate = [np.random.choice(gate_set) for i in range(2*num_wires)] 17 | gate_sequence.append(gate) 18 | 19 | return gate_sequence 20 | 21 | 22 | # The function `random_embedding_circuit` generates a variational circuit that embeds the data $x$ into a quantum state $|x>$. This circuit will act of $N$ wires and it will consist of $L$ layers. In addition to the data input $x$, this function will take the following inputs: 23 | # 24 | # 1. **weights**: an array of shape $(L,N)$. 25 | # 2. **wires**: a list of size $N$. This will determine which wires the circuit acts on. 26 | # 3. **gate_sequence**: an array of shape $(L,2N)$. This will determine what gates are part of the embedding circuit. 27 | # 28 | # Note that each layer will consists of a random Pauli rotation by amount $x$ on each wire followed by a controlled-Z gate between wires $i$ and $i+1$ for $i = \{1,2,...,N-1\}$ followed by another round of random Pauli rotations by amount $\theta \in $ **weights**. In addition to $L$ layers of this form, the circuit will start off with a $RY(\pi/4)$ acting on each wire and it will end with a $RX(x)$ acting on each wire. This circuit is inspired by [Mclean et al; 2018](https://arxiv.org/pdf/1803.11173.pdf). 29 | 30 | 31 | 32 | def random_embedding_circuit(x,weights,wires,gate_sequence): 33 | 34 | no_qubits = len(wires) 35 | 36 | for w in wires: 37 | qml.RY(np.pi/4,wires=w) 38 | 39 | for params_layer, gates_layer in zip(weights,gate_sequence): 40 | for i in range(no_qubits): 41 | gates_layer[i](x,wires=wires[i]) 42 | 43 | for i in range(no_qubits - 1): 44 | qml.CZ(wires=wires[i:i+2]) 45 | 46 | for i in range(no_qubits): 47 | gates_layer[no_qubits+i](params_layer[i],wires=wires[i]) 48 | 49 | for i in range(no_qubits - 1): 50 | qml.CZ(wires=wires[i:i+2]) 51 | 52 | for w in wires: 53 | qml.RX(x,wires=w) 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /risk_function/2d_data/.ipynb_checkpoints/risk_function-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pennylane as qml\n", 10 | "from pennylane import numpy as np\n", 11 | "from embeddings_circuit import generate_data, generate_grid_data, embedding_circuit\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "import seaborn as sns\n", 14 | "sns.set(context='notebook', font='serif')" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 2, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "CSWAP = np.array([[1, 0, 0, 0, 0, 0, 0, 0],\n", 24 | " [0, 1, 0, 0, 0, 0, 0, 0],\n", 25 | " [0, 0, 1, 0, 0, 0, 0, 0],\n", 26 | " [0, 0, 0, 1, 0, 0, 0, 0],\n", 27 | " [0, 0, 0, 0, 1, 0, 0, 0],\n", 28 | " [0, 0, 0, 0, 0, 0, 1, 0],\n", 29 | " [0, 0, 0, 0, 0, 1, 0, 0],\n", 30 | " [0, 0, 0, 0, 0, 0, 0, 1]])" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 3, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "def featmap(x,weights,wires):\n", 40 | " \"\"\"Wrapper for feature map to define specific keyword arguments.\"\"\"\n", 41 | " return embedding_circuit(x,weights,wires)" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 4, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "margin = 0.1 # choose 0.1 \n", 51 | "data_size = 40 # chose and 40 for random data\n", 52 | "n_layers = 1 # number of layers for featuremap, if applicable\n", 53 | "n_inp = 1 # number of wires that feature map acts on\n", 54 | "n_all = 2*n_inp + 1\n", 55 | "\n", 56 | "pennylane_dev = 'default.qubit'\n", 57 | "dev = qml.device(pennylane_dev, wires=n_all) # Device to ||^2 using the SWAP trick." 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 5, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "np.random.seed(137)\n", 67 | "X, Y = generate_data(data_size,margin)" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 6, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "# Divide inputs into classes\n", 77 | "A = []\n", 78 | "B = []\n", 79 | "for i in range(len(Y)):\n", 80 | " if Y[i] == 1:\n", 81 | " B.append(X[i])\n", 82 | " else:\n", 83 | " A.append(X[i])" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 7, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "@qml.qnode(dev, cache=True)\n", 93 | "def circuit(weights, x1=None, x2=None):\n", 94 | "\n", 95 | " # Load the two inputs into two different registers\n", 96 | " featmap(x1,weights, range(1, n_inp+1))\n", 97 | " featmap(x2,weights, range(n_inp+1, 2*n_inp+1))\n", 98 | "\n", 99 | " # Do a SWAP test\n", 100 | " qml.Hadamard(wires=0)\n", 101 | " for k in range(n_inp):\n", 102 | " qml.QubitUnitary(CSWAP, wires=[0, k+1, n_inp+k+1])\n", 103 | " qml.Hadamard(wires=0)\n", 104 | "\n", 105 | " # Measure overlap by checking ancilla\n", 106 | " return qml.expval(qml.PauliZ(0))\n", 107 | "\n", 108 | "def tr_rr(weights, A=None):\n", 109 | " # Compute intra-class overlap A\n", 110 | " tr_rr = 0\n", 111 | " for a1 in A:\n", 112 | " for a2 in A:\n", 113 | " tr_rr += circuit(weights, x1=a1, x2=a2)\n", 114 | " tr_rr = tr_rr / len(A)**2\n", 115 | " return tr_rr\n", 116 | "\n", 117 | "def tr_ss(weights, B=None):\n", 118 | " # Compute intra-class overlap B\n", 119 | " tr_ss = 0\n", 120 | " for b1 in B:\n", 121 | " for b2 in B:\n", 122 | " tr_ss += circuit(weights, x1=b1, x2=b2)\n", 123 | " tr_ss = tr_ss/len(B)**2\n", 124 | " return tr_ss\n", 125 | "\n", 126 | "def tr_rs(weights, A=None, B=None):\n", 127 | " # Compute inter-class overlap A-B\n", 128 | " tr_rs = 0\n", 129 | " for a in A:\n", 130 | " for b in B:\n", 131 | " tr_rs += circuit(weights, x1=a, x2=b)\n", 132 | " tr_rs = tr_rs/(len(A)*len(B))\n", 133 | " return tr_rs\n", 134 | "\n", 135 | "def cost(weights, A=None, B=None):\n", 136 | "\n", 137 | " # Fidelity cost,\n", 138 | " rr = tr_rr(weights, A=A)\n", 139 | " ss = tr_ss(weights, B=B)\n", 140 | " rs = tr_rs(weights, A=A, B=B)\n", 141 | " distance = - rs + 0.5 * (ss + rr)\n", 142 | " return 1 - distance # min is 0" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": 15, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "def classifier(x_new,weights, A=None, B=None):\n", 152 | " \n", 153 | " overlap_A = 0\n", 154 | " for a in A:\n", 155 | " overlap_A += circuit(weights, x1=x_new, x2=a)\n", 156 | " overlap_A = overlap_A/len(A)\n", 157 | " \n", 158 | " overlap_B = 0\n", 159 | " for b in B:\n", 160 | " overlap_B += circuit(weights, x1=x_new, x2=b)\n", 161 | " overlap_B = overlap_B/len(B)\n", 162 | " \n", 163 | " #return np.tanh(len(A)*(overlap_B-overlap_A)) \n", 164 | " \n", 165 | " if overlap_A > overlap_B:\n", 166 | " return -1\n", 167 | " elif overlap_A < overlap_B:\n", 168 | " return 1\n", 169 | " else:\n", 170 | " return 0\n", 171 | "\n", 172 | "def risk(weights,A=None,B=None):\n", 173 | " \n", 174 | " dataset_size = len(A)+len(B)\n", 175 | " I = 0\n", 176 | " \n", 177 | " for a in A:\n", 178 | " I = I - classifier(a,weights,A,B)\n", 179 | " for b in B:\n", 180 | " I = I + classifier(b,weights,A,B)\n", 181 | " \n", 182 | " I = I/dataset_size\n", 183 | " \n", 184 | " return 0.5 - 0.5*I\n", 185 | "\n", 186 | "\n", 187 | "# We now define 'smooth' and 'differentiable' version\n", 188 | "# of risk function. We do this by replacing \n", 189 | "# sign(fidelity) with tanh(fidelity).\n", 190 | "\n", 191 | "def smooth_classifier(x_new,weights, A=None, B=None):\n", 192 | " \n", 193 | " overlap_A = 0\n", 194 | " for a in A:\n", 195 | " overlap_A += circuit(weights, x1=x_new, x2=a)\n", 196 | " overlap_A = overlap_A/len(A)\n", 197 | " \n", 198 | " overlap_B = 0\n", 199 | " for b in B:\n", 200 | " overlap_B += circuit(weights, x1=x_new, x2=b)\n", 201 | " overlap_B = overlap_B/len(B)\n", 202 | " \n", 203 | " return np.tanh(len(A)*(overlap_B-overlap_A))\n", 204 | "\n", 205 | "def smooth_risk(weights,A=None,B=None):\n", 206 | " \n", 207 | " dataset_size = len(A)+len(B)\n", 208 | " I = 0\n", 209 | " \n", 210 | " for a in A:\n", 211 | " I = I - smooth_classifier(a,weights,A,B)\n", 212 | " for b in B:\n", 213 | " I = I + smooth_classifier(b,weights,A,B)\n", 214 | " \n", 215 | " I = I/dataset_size\n", 216 | " \n", 217 | " return 0.5 - 0.5*I" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": 9, 223 | "metadata": {}, 224 | "outputs": [], 225 | "source": [ 226 | "th = np.linspace(0, 2.0*np.pi, 50)\n", 227 | "\n", 228 | "cost_th = np.array([cost([[th_]], A=A, B=B) for th_ in th])\n", 229 | "risk_th = np.array([risk([[th_]], A=A, B=B) for th_ in th])" 230 | ] 231 | }, 232 | { 233 | "cell_type": "code", 234 | "execution_count": null, 235 | "metadata": {}, 236 | "outputs": [], 237 | "source": [ 238 | "fig = plt.figure(figsize=(12, 6))\n", 239 | "# Plotting 1: original data\n", 240 | "ax1 = fig.add_subplot(2, 3, 1)\n", 241 | "ax1.set_title(\"Data set 1\", pad=20)\n", 242 | "A = np.array(A)\n", 243 | "B = np.array(B)\n", 244 | "ax1.scatter(A[:, 0], A[:, 1], c='r')\n", 245 | "ax1.scatter(B[:, 0], B[:, 1], c='b')\n", 246 | "ax1.set_ylim((-0.1-0.5*np.pi, 0.1+0.5*np.pi))\n", 247 | "ax1.set_xlim((-0.1-0.5*np.pi, 0.1+0.5*np.pi))\n", 248 | "\n", 249 | "# Plotting the HS cost\n", 250 | "ax2 = fig.add_subplot(2, 3, 2)\n", 251 | "ax2.set_title(\"HS cost function\", pad=20)\n", 252 | "ax2.plot(th, cost_th,color='green', marker='', linestyle='-', linewidth=2.5)\n", 253 | "ax2.set_ylim((-0.1, 1.1))\n", 254 | "ax2.set_xlim((0, 2.0*np.pi))\n", 255 | "ax2.set_xlabel(\"theta\")\n", 256 | "\n", 257 | "# Plotting the risk function\n", 258 | "ax3 = fig.add_subplot(2, 3, 3)\n", 259 | "ax3.set_title(\"Risk function\", pad=20)\n", 260 | "ax3.plot(th, risk_th,color='green', marker='', linestyle='-', linewidth=2.5)\n", 261 | "ax3.set_ylim((-0.1, 1.1))\n", 262 | "ax3.set_xlim((0, 2.0*np.pi))\n", 263 | "ax3.set_xlabel(\"theta\")" 264 | ] 265 | } 266 | ], 267 | "metadata": { 268 | "kernelspec": { 269 | "display_name": "Python 3", 270 | "language": "python", 271 | "name": "python3" 272 | }, 273 | "language_info": { 274 | "codemirror_mode": { 275 | "name": "ipython", 276 | "version": 3 277 | }, 278 | "file_extension": ".py", 279 | "mimetype": "text/x-python", 280 | "name": "python", 281 | "nbconvert_exporter": "python", 282 | "pygments_lexer": "ipython3", 283 | "version": "3.8.3" 284 | } 285 | }, 286 | "nbformat": 4, 287 | "nbformat_minor": 4 288 | } 289 | -------------------------------------------------------------------------------- /risk_function/2d_data/embeddings_circuit.py: -------------------------------------------------------------------------------- 1 | import pennylane as qml 2 | from pennylane import numpy as np 3 | 4 | 5 | # This function generates a 2-dimensional data set. 6 | # The argument 'size' determines the number of 7 | # data points generated. 8 | # The argument 'margin' determines the gap between 9 | # the data points of different classes. 10 | # For i = 1,2, we transform the x_i to x_i + margin * sign(x_i). 11 | 12 | def generate_data(size,margin): 13 | X = [] 14 | Y = [] 15 | for i in range(size): 16 | x1 = 2.0*np.random.random() - 1.0 17 | x2 = 2.0*np.random.random() - 1.0 18 | y = x1*x2 19 | 20 | 21 | if x1 < 0: 22 | x1 = x1 - margin 23 | else: 24 | x1 = x1 + margin 25 | if x2 < 0: 26 | x2 = x2 - margin 27 | else: 28 | x2 = x2 + margin 29 | 30 | X.append([x1,x2]) 31 | if y>0: 32 | Y.append(1) 33 | else: 34 | Y.append(-1) 35 | 36 | return X, Y 37 | 38 | 39 | 40 | def embedding_circuit(x,weights,wires): 41 | 42 | no_qubits = len(wires) 43 | no_layers = len(weights) 44 | 45 | for l in range(no_layers): 46 | p = l%2 47 | xp = x[p] 48 | params = weights[l] 49 | 50 | for i in range(no_qubits): 51 | qml.RX(xp,wires=wires[i]) 52 | 53 | for i in range(no_qubits): 54 | qml.RY(params[i],wires=wires[i]) 55 | 56 | if no_layers%2 == 0: 57 | xp = x[0]*x[1] 58 | elif no_layers%2 == 1: 59 | xp = x[1] 60 | for w in range(no_qubits): 61 | qml.RX(xp,wires=wires[w]) 62 | 63 | 64 | # In[ ]: 65 | 66 | 67 | 68 | 69 | -------------------------------------------------------------------------------- /risk_function/2d_data/risk_function.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pennylane as qml\n", 10 | "from pennylane import numpy as np\n", 11 | "from embeddings_circuit import generate_data, generate_grid_data, embedding_circuit\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "import seaborn as sns\n", 14 | "sns.set(context='notebook', font='serif')" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 2, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "CSWAP = np.array([[1, 0, 0, 0, 0, 0, 0, 0],\n", 24 | " [0, 1, 0, 0, 0, 0, 0, 0],\n", 25 | " [0, 0, 1, 0, 0, 0, 0, 0],\n", 26 | " [0, 0, 0, 1, 0, 0, 0, 0],\n", 27 | " [0, 0, 0, 0, 1, 0, 0, 0],\n", 28 | " [0, 0, 0, 0, 0, 0, 1, 0],\n", 29 | " [0, 0, 0, 0, 0, 1, 0, 0],\n", 30 | " [0, 0, 0, 0, 0, 0, 0, 1]])" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 3, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "def featmap(x,weights,wires):\n", 40 | " \"\"\"Wrapper for feature map to define specific keyword arguments.\"\"\"\n", 41 | " return embedding_circuit(x,weights,wires)" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 4, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "margin = 0.1 # choose 0.1 \n", 51 | "data_size = 40 # chose and 40 for random data\n", 52 | "n_layers = 1 # number of layers for featuremap, if applicable\n", 53 | "n_inp = 1 # number of wires that feature map acts on\n", 54 | "n_all = 2*n_inp + 1\n", 55 | "\n", 56 | "pennylane_dev = 'default.qubit'\n", 57 | "dev = qml.device(pennylane_dev, wires=n_all) # Device to ||^2 using the SWAP trick." 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 5, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "np.random.seed(137)\n", 67 | "X, Y = generate_data(data_size,margin)" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 6, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "# Divide inputs into classes\n", 77 | "A = []\n", 78 | "B = []\n", 79 | "for i in range(len(Y)):\n", 80 | " if Y[i] == 1:\n", 81 | " B.append(X[i])\n", 82 | " else:\n", 83 | " A.append(X[i])" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 7, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "@qml.qnode(dev, cache=True)\n", 93 | "def circuit(weights, x1=None, x2=None):\n", 94 | "\n", 95 | " # Load the two inputs into two different registers\n", 96 | " featmap(x1,weights, range(1, n_inp+1))\n", 97 | " featmap(x2,weights, range(n_inp+1, 2*n_inp+1))\n", 98 | "\n", 99 | " # Do a SWAP test\n", 100 | " qml.Hadamard(wires=0)\n", 101 | " for k in range(n_inp):\n", 102 | " qml.QubitUnitary(CSWAP, wires=[0, k+1, n_inp+k+1])\n", 103 | " qml.Hadamard(wires=0)\n", 104 | "\n", 105 | " # Measure overlap by checking ancilla\n", 106 | " return qml.expval(qml.PauliZ(0))\n", 107 | "\n", 108 | "def tr_rr(weights, A=None):\n", 109 | " # Compute intra-class overlap A\n", 110 | " tr_rr = 0\n", 111 | " for a1 in A:\n", 112 | " for a2 in A:\n", 113 | " tr_rr += circuit(weights, x1=a1, x2=a2)\n", 114 | " tr_rr = tr_rr / len(A)**2\n", 115 | " return tr_rr\n", 116 | "\n", 117 | "def tr_ss(weights, B=None):\n", 118 | " # Compute intra-class overlap B\n", 119 | " tr_ss = 0\n", 120 | " for b1 in B:\n", 121 | " for b2 in B:\n", 122 | " tr_ss += circuit(weights, x1=b1, x2=b2)\n", 123 | " tr_ss = tr_ss/len(B)**2\n", 124 | " return tr_ss\n", 125 | "\n", 126 | "def tr_rs(weights, A=None, B=None):\n", 127 | " # Compute inter-class overlap A-B\n", 128 | " tr_rs = 0\n", 129 | " for a in A:\n", 130 | " for b in B:\n", 131 | " tr_rs += circuit(weights, x1=a, x2=b)\n", 132 | " tr_rs = tr_rs/(len(A)*len(B))\n", 133 | " return tr_rs\n", 134 | "\n", 135 | "def cost(weights, A=None, B=None):\n", 136 | "\n", 137 | " # Fidelity cost,\n", 138 | " rr = tr_rr(weights, A=A)\n", 139 | " ss = tr_ss(weights, B=B)\n", 140 | " rs = tr_rs(weights, A=A, B=B)\n", 141 | " distance = - rs + 0.5 * (ss + rr)\n", 142 | " return 1 - distance # min is 0" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": 15, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "def classifier(x_new,weights, A=None, B=None):\n", 152 | " \n", 153 | " overlap_A = 0\n", 154 | " for a in A:\n", 155 | " overlap_A += circuit(weights, x1=x_new, x2=a)\n", 156 | " overlap_A = overlap_A/len(A)\n", 157 | " \n", 158 | " overlap_B = 0\n", 159 | " for b in B:\n", 160 | " overlap_B += circuit(weights, x1=x_new, x2=b)\n", 161 | " overlap_B = overlap_B/len(B)\n", 162 | " \n", 163 | " #return np.tanh(len(A)*(overlap_B-overlap_A)) \n", 164 | " \n", 165 | " if overlap_A > overlap_B:\n", 166 | " return -1\n", 167 | " elif overlap_A < overlap_B:\n", 168 | " return 1\n", 169 | " else:\n", 170 | " return 0\n", 171 | "\n", 172 | "def risk(weights,A=None,B=None):\n", 173 | " \n", 174 | " dataset_size = len(A)+len(B)\n", 175 | " I = 0\n", 176 | " \n", 177 | " for a in A:\n", 178 | " I = I - classifier(a,weights,A,B)\n", 179 | " for b in B:\n", 180 | " I = I + classifier(b,weights,A,B)\n", 181 | " \n", 182 | " I = I/dataset_size\n", 183 | " \n", 184 | " return 0.5 - 0.5*I\n", 185 | "\n", 186 | "\n", 187 | "# We now define 'smooth' and 'differentiable' version\n", 188 | "# of risk function. We do this by replacing \n", 189 | "# sign(fidelity) with tanh(fidelity).\n", 190 | "\n", 191 | "def smooth_classifier(x_new,weights, A=None, B=None):\n", 192 | " \n", 193 | " overlap_A = 0\n", 194 | " for a in A:\n", 195 | " overlap_A += circuit(weights, x1=x_new, x2=a)\n", 196 | " overlap_A = overlap_A/len(A)\n", 197 | " \n", 198 | " overlap_B = 0\n", 199 | " for b in B:\n", 200 | " overlap_B += circuit(weights, x1=x_new, x2=b)\n", 201 | " overlap_B = overlap_B/len(B)\n", 202 | " \n", 203 | " return np.tanh(len(A)*(overlap_B-overlap_A))\n", 204 | "\n", 205 | "def smooth_risk(weights,A=None,B=None):\n", 206 | " \n", 207 | " dataset_size = len(A)+len(B)\n", 208 | " I = 0\n", 209 | " \n", 210 | " for a in A:\n", 211 | " I = I - smooth_classifier(a,weights,A,B)\n", 212 | " for b in B:\n", 213 | " I = I + smooth_classifier(b,weights,A,B)\n", 214 | " \n", 215 | " I = I/dataset_size\n", 216 | " \n", 217 | " return 0.5 - 0.5*I" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": 9, 223 | "metadata": {}, 224 | "outputs": [], 225 | "source": [ 226 | "th = np.linspace(0, 2.0*np.pi, 50)\n", 227 | "\n", 228 | "cost_th = np.array([cost([[th_]], A=A, B=B) for th_ in th])\n", 229 | "risk_th = np.array([risk([[th_]], A=A, B=B) for th_ in th])" 230 | ] 231 | }, 232 | { 233 | "cell_type": "code", 234 | "execution_count": null, 235 | "metadata": {}, 236 | "outputs": [], 237 | "source": [ 238 | "fig = plt.figure(figsize=(12, 6))\n", 239 | "# Plotting 1: original data\n", 240 | "ax1 = fig.add_subplot(2, 3, 1)\n", 241 | "ax1.set_title(\"Data set 1\", pad=20)\n", 242 | "A = np.array(A)\n", 243 | "B = np.array(B)\n", 244 | "ax1.scatter(A[:, 0], A[:, 1], c='r')\n", 245 | "ax1.scatter(B[:, 0], B[:, 1], c='b')\n", 246 | "ax1.set_ylim((-0.1-0.5*np.pi, 0.1+0.5*np.pi))\n", 247 | "ax1.set_xlim((-0.1-0.5*np.pi, 0.1+0.5*np.pi))\n", 248 | "\n", 249 | "# Plotting the HS cost\n", 250 | "ax2 = fig.add_subplot(2, 3, 2)\n", 251 | "ax2.set_title(\"HS cost function\", pad=20)\n", 252 | "ax2.plot(th, cost_th,color='green', marker='', linestyle='-', linewidth=2.5)\n", 253 | "ax2.set_ylim((-0.1, 1.1))\n", 254 | "ax2.set_xlim((0, 2.0*np.pi))\n", 255 | "ax2.set_xlabel(\"theta\")\n", 256 | "\n", 257 | "# Plotting the risk function\n", 258 | "ax3 = fig.add_subplot(2, 3, 3)\n", 259 | "ax3.set_title(\"Risk function\", pad=20)\n", 260 | "ax3.plot(th, risk_th,color='green', marker='', linestyle='-', linewidth=2.5)\n", 261 | "ax3.set_ylim((-0.1, 1.1))\n", 262 | "ax3.set_xlim((0, 2.0*np.pi))\n", 263 | "ax3.set_xlabel(\"theta\")" 264 | ] 265 | } 266 | ], 267 | "metadata": { 268 | "kernelspec": { 269 | "display_name": "Python 3", 270 | "language": "python", 271 | "name": "python3" 272 | }, 273 | "language_info": { 274 | "codemirror_mode": { 275 | "name": "ipython", 276 | "version": 3 277 | }, 278 | "file_extension": ".py", 279 | "mimetype": "text/x-python", 280 | "name": "python", 281 | "nbconvert_exporter": "python", 282 | "pygments_lexer": "ipython3", 283 | "version": "3.8.3" 284 | } 285 | }, 286 | "nbformat": 4, 287 | "nbformat_minor": 4 288 | } 289 | --------------------------------------------------------------------------------