├── README.md ├── Tutorial1 ├── Installing Tensorflow.pdf ├── Tutorial1.pdf ├── Tutorial1.tex ├── Uniandes_logo.jpeg ├── ising_mc.py ├── plot_ising.py └── test_program_tensorflow.py ├── Tutorial2 ├── Tutorial2.pdf ├── Tutorial2.tex ├── Tutorial2_solutions.pdf ├── Uniandes_logo.jpeg ├── bonus_tutorial2_ffnn_2dising.pdf ├── spiral_data.pdf └── tutorial2_spirals.py ├── Tutorial3 ├── Tutorial3.pdf ├── Tutorial3.tex ├── Tutorial3_solutions.pdf ├── Uniandes_logo.jpeg ├── data_tutorial3 │ ├── spinConfigs_Ising_L20.txt │ ├── spinConfigs_Ising_L40.txt │ ├── spinConfigs_Ising_L80.txt │ ├── temperatures_Ising_L20.txt │ ├── temperatures_Ising_L40.txt │ └── temperatures_Ising_L80.txt └── tutorial3_pca_solution.py ├── Tutorial4 ├── Data_ising2d │ └── MC_results │ │ ├── MC_ising2d_L4_Observables.txt │ │ ├── ising2d_L4_T1.0_test.txt │ │ ├── ising2d_L4_T1.0_train.txt │ │ ├── ising2d_L4_T1.254_test.txt │ │ ├── ising2d_L4_T1.254_train.txt │ │ ├── ising2d_L4_T1.508_test.txt │ │ ├── ising2d_L4_T1.508_train.txt │ │ ├── ising2d_L4_T1.762_test.txt │ │ ├── ising2d_L4_T1.762_train.txt │ │ ├── ising2d_L4_T2.016_test.txt │ │ ├── ising2d_L4_T2.016_train.txt │ │ ├── ising2d_L4_T2.269_test.txt │ │ ├── ising2d_L4_T2.269_train.txt │ │ ├── ising2d_L4_T2.524_test.txt │ │ ├── ising2d_L4_T2.524_train.txt │ │ ├── ising2d_L4_T2.778_test.txt │ │ ├── ising2d_L4_T2.778_train.txt │ │ ├── ising2d_L4_T3.032_test.txt │ │ ├── ising2d_L4_T3.032_train.txt │ │ ├── ising2d_L4_T3.286_test.txt │ │ ├── ising2d_L4_T3.286_train.txt │ │ ├── ising2d_L4_T3.54_test.txt │ │ └── ising2d_L4_T3.54_train.txt ├── RBM.pdf ├── Tutorial4.pdf ├── Tutorial4.tex ├── Uniandes_logo.jpeg ├── lattice.pdf ├── rbm.py ├── tutorial4_sample_ising2d.py └── tutorial4_train_ising2d.py └── Tutorial5 ├── Tutorial5.pdf ├── Tutorial5.tex ├── tutorial5_training_vmc_ho.py └── tutorial5_vmc_ho.py /README.md: -------------------------------------------------------------------------------- 1 | # QuLAPENO 2 | Quantum at the University de los Andes; PIQuIL Education Programming 3 | 4 | ## Tutorials for a mini course: Machine learning for quantum matter and technology 5 | 6 | * Juan Carrasquilla (Vector Institute, University of Waterloo, Canada), 7 | * Roger Melko (University of Waterloo and Perimeter Institute, Canada), 8 | * Giacomo Torlai (Flatiron Institute, Simons Foundation, New York, EE.UU.), 9 | * Estelle Inack (Perimeter Institute, Canada), 10 | * Lauren Hayward Sierens (University of Waterloo and Perimeter Institute, Canada). 11 | 12 | Universidad de los Andes del 27 al 31 de mayo de 2019. 13 | https://matematicas.uniandes.edu.co/~cursillo_gr/escuela2019/index_en.php 14 | -------------------------------------------------------------------------------- /Tutorial1/Installing Tensorflow.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial1/Installing Tensorflow.pdf -------------------------------------------------------------------------------- /Tutorial1/Tutorial1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial1/Tutorial1.pdf -------------------------------------------------------------------------------- /Tutorial1/Tutorial1.tex: -------------------------------------------------------------------------------- 1 | \documentclass[letterpaper]{scrartcl} 2 | \usepackage[top=0.88in, bottom=1in, left=1in, right=1in]{geometry} 3 | 4 | \makeatletter 5 | \DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt} 6 | \makeatother 7 | 8 | \usepackage{scalefnt} 9 | 10 | 11 | %-------------------------------------------------------------- 12 | % We need this package, part of the KOMA class, for the custom 13 | % headings. 14 | %-------------------------------------------------------------- 15 | \usepackage{scrpage2} 16 | 17 | 18 | %-------------------------------------------------------------- 19 | % One of many packages you can use if you want to include 20 | % graphics. 21 | %-------------------------------------------------------------- 22 | \usepackage{graphicx} 23 | 24 | %-------------------------------------------------------------- 25 | % The AMS packages are useful but not required. They offer a 26 | % number of nice fonts, environments for formatting multiline 27 | % equations, etc. 28 | %-------------------------------------------------------------- 29 | \usepackage{amsmath} 30 | \usepackage{amsfonts} 31 | \usepackage{amssymb} 32 | \usepackage{amsthm} 33 | 34 | %-------------------------------------------------------------- 35 | % Basic way to set-up the page margins. 36 | %-------------------------------------------------------------- 37 | %\addtolength{\oddsidemargin}{-.2in} 38 | %\addtolength{\evensidemargin}{-.2in} 39 | %\addtolength{\textwidth}{0.45in} 40 | %\addtolength{\topmargin}{-.175in} 41 | %\addtolength{\textheight}{0.75in} 42 | 43 | %-------------------------------------------------------------- 44 | % Comment out the following to add indents and remove space between paragraphs. 45 | %-------------------------------------------------------------- 46 | \usepackage{parskip} 47 | 48 | %-------------------------------------------------------------- 49 | % This package is used to define custom colours. 50 | %-------------------------------------------------------------- 51 | \usepackage{xcolor} 52 | 53 | %-------------------------------------------------------------- 54 | % A few colours for hyperlinks. 55 | %-------------------------------------------------------------- 56 | \definecolor{plum}{rgb}{0.36078, 0.20784, 0.4} 57 | \definecolor{chameleon}{rgb}{0.30588, 0.60392, 0.023529} 58 | \definecolor{cornflower}{rgb}{0.12549, 0.29020, 0.52941} 59 | \definecolor{scarlet}{rgb}{0.8, 0, 0} 60 | \definecolor{brick}{rgb}{0.64314, 0, 0} 61 | 62 | %-------------------------------------------------------------- 63 | % A command for typesetting and linking an email address. 64 | %-------------------------------------------------------------- 65 | \newcommand{\email}[1]{\href{mailto:#1}{\tt \textcolor{cornflower}{#1}}} 66 | \newcommand{\web}[1]{\href{#1}{\tt \textcolor{cornflower}{#1}}} 67 | 68 | %-------------------------------------------------------------- 69 | % The following declaration includes the hyperref package and 70 | % assigns metadata. If you compile with pdflatex, this data 71 | % will be automatically included in the pdf file. 72 | %-------------------------------------------------------------- 73 | %\usepackage[ 74 | % pdftitle={QFT Tutorial 1},% 75 | % pdfauthor={PSI Tutors},% 76 | % pdfsubject={QFT Tutorial 1},% 77 | % pdfkeywords={PSI}, 78 | % colorlinks=true, 79 | % linkcolor=cornflower, 80 | % citecolor=scarlet, 81 | % urlcolor=chameleon% 82 | %]{hyperref} 83 | 84 | %\setcounter{secnumdepth}{2} % section number depth 85 | %\setcounter{tocdepth}{2} % depth of TOC 86 | 87 | %-------------------------------------------------------------- 88 | % Specify the font used in captions. 89 | %-------------------------------------------------------------- 90 | \setkomafont{captionlabel}{\usekomafont{descriptionlabel}} 91 | 92 | %-------------------------------------------------------------- 93 | % This is where we define the custom title. The image that is 94 | % placed on the left-hand-side of the title, PILogo.pdf in 95 | % this case, should be in the same directory as this file. Note 96 | % that you can always use hyperlinks for the Title, Semester, 97 | % and Author fields, below, in case you want to link to a seminar 98 | % web page or a lecturer's email address. 99 | %-------------------------------------------------------------- 100 | 101 | \titlehead{% 102 | \vspace*{-1cm} 103 | \begin{minipage}[b]{4.0cm} 104 | \includegraphics*[height=1.3cm]{Uniandes_logo.jpeg}% 105 | \end{minipage} 106 | \hfill 107 | \begin{minipage}[b]{12cm} 108 | \begin{flushright} 109 | \usekomafont{descriptionlabel} 110 | \large Machine Learning for Quantum Matter and Technology \\ 111 | \normalsize \normalfont 112 | J. Carrasquilla, E. Inack, G. Torlai, R. Melko, L. Hayward Sierens 113 | \end{flushright} 114 | \end{minipage} 115 | \\[-3mm] 116 | \hrule 117 | \vspace{-3mm} 118 | } 119 | % ----------- 120 | 121 | %-------------------------------------------------------------- 122 | % Other useful physic-related packages 123 | %-------------------------------------------------------------- 124 | \usepackage{braket} 125 | % Use \Bra{}, \Ket{} or \Braket{x | \psi} for Dirac notation 126 | 127 | %-------------------------------------------------------------- 128 | % Nice numbering for question parts. 129 | %-------------------------------------------------------------- 130 | \newcommand{\be}{\begin{equation}} 131 | \newcommand{\ee}{\end{equation}} 132 | \newcommand{\ba}{\begin{eqnarray}} 133 | \newcommand{\ea}{\end{eqnarray}} 134 | 135 | \newcommand{\ssk}{\smallskip} 136 | 137 | \newcommand{\msk}{\medskip} 138 | 139 | \newcommand{\nin}{\noindent} 140 | 141 | \newcommand{\beq}{\begin{equation}} 142 | 143 | \newcommand{\eeq}{\end{equation}} 144 | 145 | \renewcommand{\vec}[1]{{\mathbf{#1}}} 146 | \renewcommand{\labelenumi}{\alph{enumi})} 147 | \renewcommand{\labelenumiii}{\roman{enumiii})} 148 | 149 | %%%%%%%%%%%%% 150 | 151 | \def\be{\begin{eqnarray}} 152 | \def\ee{\end{eqnarray}} 153 | \newcommand{\nn}{\nonumber} 154 | \newcommand\para{\paragraph{}} 155 | \newcommand{\ft}[2]{{\textstyle\frac{#1}{#2}}} 156 | \newcommand{\eqn}[1]{(\ref{#1})} 157 | \newcommand{\pl}[1]{\frac{\partial {\cal L}}{\partial{#1}}} 158 | \newcommand{\ppp}[2]{\frac{\partial {#1}}{\partial {#2}}} 159 | \newcommand{\ph}[1]{\frac{\partial {\cal H}}{\partial{#1}}} 160 | \newcommand{\leftp}[3]{\left.\ppp{#1}{#2}\right|_{#3}} 161 | %\newcommand{\Vec}[2]{\left(\begin{array}{c} {#1} \\ {#2}\end{array}\right)} 162 | \newcommand\vx{\vec{x}} 163 | \newcommand\vy{\vec{y}} 164 | \newcommand\vp{\vec{p}} 165 | \newcommand\vq{\vec{q}} 166 | \newcommand\vk{\vec{k}} 167 | \newcommand\avp{a^{\ }_{\vp}} 168 | \newcommand\advp{a^\dagger_{\vp}} 169 | \newcommand\ad{a^\dagger} 170 | 171 | \newcommand\balpha{\mbox{\boldmath $\alpha$}} 172 | \newcommand\bbeta{\mbox{\boldmath $\beta$}} 173 | \newcommand\bgamma{\mbox{\boldmath $\gamma$}} 174 | \newcommand\bomega{\mbox{\boldmath $\omega$}} 175 | \newcommand\blambda{\mbox{\boldmath $\lambda$}} 176 | \newcommand\bmu{\mbox{\boldmath $\mu$}} 177 | \newcommand\bphi{\mbox{\boldmath $\phi$}} 178 | \newcommand\bzeta{\mbox{\boldmath $\zeta$}} 179 | \newcommand\bsigma{\mbox{\boldmath $\sigma$}} 180 | \newcommand\bepsilon{\mbox{\boldmath $\epsilon$}} 181 | \newcommand\btau{\mbox{\boldmath $\tau$}} 182 | \newcommand\beeta{\mbox{\boldmath $\eta$}} 183 | \newcommand\btheta{\mbox{\boldmath $\theta$}} 184 | 185 | \def\norm#1{:\!\!#1\!\!:} 186 | 187 | \def\part{\partial} 188 | 189 | \def\dbox{\hbox{{$\sqcup$}\llap{$\sqcap$}}} 190 | 191 | \def\sla#1{\hbox{{$#1$}\llap{$/$}}} 192 | \def\Dslash{\,\,{\raise.15ex\hbox{/}\mkern-13mu D}} 193 | \def\Dbarslash{\,\,{\raise.15ex\hbox{/}\mkern-12mu {\bar D}}} 194 | \def\delslash{\,\,{\raise.15ex\hbox{/}\mkern-10mu \partial}} 195 | \def\delbarslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu {\bar\partial}}} 196 | \def\pslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu p}} 197 | \def\qslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu q}} 198 | \def\kslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu k}} 199 | \def\eslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu \epsilon}} 200 | \def\calDslash{\,\,{\rais.15ex\hbox{/}\mkern-12mu {\cal D}}} 201 | \newcommand{\slsh}[1]{\,\,{\raise.15ex\hbox{/}\mkern-12mu {#1}}} 202 | 203 | 204 | \newcommand\Bprime{B${}^\prime$} 205 | \newcommand{\sign}{{\rm sign}} 206 | 207 | \newcommand\bx{{\bf x}} 208 | \newcommand\br{{\bf r}} 209 | \newcommand\bF{{\bf F}} 210 | \newcommand\bp{{\bf p}} 211 | \newcommand\bL{{\bf L}} 212 | \newcommand\bR{{\bf R}} 213 | \newcommand\bP{{\bf P}} 214 | \newcommand\bE{{\bf E}} 215 | \newcommand\bB{{\bf B}} 216 | \newcommand\bA{{\bf A}} 217 | \newcommand\bee{{\bf e}} 218 | \newcommand\bte{\tilde{\bf e}} 219 | \def\ket#1{\left| #1 \right\rangle} 220 | \def\bra#1{\left\langle #1 \right|} 221 | \def\vev#1{\left\langle #1 \right\rangle} 222 | 223 | \newcommand\lmn[2]{\Lambda^{#1}_{\ #2}} 224 | \newcommand\mup[2]{\eta^{#1 #2}} 225 | \newcommand\mdown[2]{\eta_{#1 #2}} 226 | \newcommand\deld[2]{\delta^{#1}_{#2}} 227 | \newcommand\df{\Delta_F} 228 | \newcommand\cL{{\cal L}} 229 | %\def\theequation{\thesection.\arabic{equation} 230 | %%%%%%%%% 231 | 232 | %\renewcommand{\ttdefault}{pcr} 233 | 234 | \usepackage{enumitem} 235 | 236 | \begin{document} 237 | 238 | %\scalefont{1.35} 239 | 240 | \title{Tutorial 1: \\ Monte Carlo Simulation of the 2D Ising Model} 241 | 242 | \date{May 27, 2019} 243 | 244 | 245 | \maketitle 246 | 247 | 248 | In this tutorial, we will study the phase transition in the classical two-dimensional Ising model, with Hamiltonian 249 | \begin{equation*} 250 | H = -J \sum_{\langle ij \rangle} \sigma_i \sigma_j, 251 | \end{equation*} 252 | where $\sigma_i = \pm 1$, $J$ is the coupling strength and $\sum_{\langle ij \rangle}$ denotes a sum over nearest neighbours. 253 | We will consider simulations on a square lattice with periodic boundaries. 254 | In the thermodynamic limit, the critical temperature is known to be ${ T_\text{c} }/{J} \approx 2.269$. 255 | 256 | We will use and modify the two Python programs \texttt{ising{\textunderscore}mc.py} and \texttt{plot{\textunderscore}ising.py} throughout this tutorial 257 | in order to implement Monte Carlo (MC) methods that estimate $T_\text{c}$ and compare with this known exact solution. 258 | %The program \texttt{plot{\textunderscore}ising.py} reads in these measurements and plots observable quantities as a function of temperature $T$. 259 | 260 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 261 | %%%%%%%%%%%%%% Q1 %%%%%%%%%%%%%% 262 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 263 | \section{Monte Carlo algorithm} 264 | Consider the Monte Carlo program \texttt{ising{\textunderscore}mc.py}, which is designed to perform a Monte Carlo simulation (using the single-spin-flip Metropolis algorithm) and record measurements of the system's energy $E$ and magnetization $M$. 265 | 266 | \begin{enumerate}[label=\alph*)] 267 | 268 | %%%%%%%%%%%%%% (a) %%%%%%%%%%%%%% 269 | \item Examine the section of the code that computes the two-dimensional \texttt{neighbours} array, which is used when calculating the system's energy. 270 | The code is already written such that \texttt{neighbours[i,0]} and \texttt{neighbours[i,1]} 271 | store the lattice location of spin \texttt{i}'s rightward and upward neighbours, respectively. 272 | Modify the code such that it will also store spin \texttt{i}'s leftward neighbour in \texttt{neighbours[i,2]} and its downward neighbour in \texttt{neighbours[i,3]}. 273 | 274 | \textbf{Hint:} Don't forget to consider the periodic boundary conditions. 275 | 276 | %%%%%%%%%%%%%% (b) %%%%%%%%%%%%%% 277 | \item Examine the \texttt{sweep()} function, which proposes a number \texttt{N{\textunderscore}spins} single spin-flip Monte Carlo updates. 278 | Convince yourself that this code is implementing the single-spin-flip Metropolis algorithm. 279 | 280 | %%%%%%%%%%%%%% (c) %%%%%%%%%%%%%% 281 | \item Implement a more efficient way of calculating the energy difference \texttt{deltaE} within the \texttt{sweep()} function. 282 | The given implementation calculates this energy difference by using the \texttt{getEnergy()} function, 283 | which involves iterating a loop \texttt{N{\textunderscore}spins} times. 284 | However, you should be able to calculate \texttt{deltaE} by summing only four terms. 285 | 286 | %\textbf{Hint:} To appreciate the difference in time required for these two different implementations, 287 | %set \texttt{animate = False} and increase the linear size \texttt{L} when you run the two versions of the code. 288 | 289 | %%%%%%%%%%%%%%% (d) %%%%%%%%%%%%%% 290 | %\item Run the code for various values of \texttt{L}, \texttt{J}, \texttt{n{\textunderscore}eqSweeps} and \texttt{n{\textunderscore}measSweeps} 291 | %and explain how each of these parameters affects the resulting animated samples. 292 | 293 | %%%%%%%%%%%%%% (e) %%%%%%%%%%%%%% 294 | \item Run your code with % \texttt{L=4}, \texttt{J=1}, \texttt{T{\textunderscore}list = np.linspace(5.0,0.5,10)}, 295 | \texttt{n{\textunderscore}eqSweeps=1000} and \texttt{n{\textunderscore}measSweeps=10000}. 296 | The code will generate files in a directory called \texttt{Data} that will store the energy and magnetization 297 | corresponding to each of your sampled system configurations. 298 | (In Question 2, we will analyze and plot the resulting data.) 299 | 300 | \textbf{Hint:} Set \texttt{animate = False} for this part so that the code runs faster. 301 | 302 | \end{enumerate} 303 | 304 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 305 | %%%%%%%%%%%%%% Q2 %%%%%%%%%%%%%% 306 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 307 | \section{Estimating the critical temperature} 308 | 309 | Recall from lecture that the specific heat $C_V$ and susceptibility $\chi$ can be expressed as 310 | \begin{equation*} 311 | C_V = \frac{\langle E^2 \rangle - \langle E \rangle^2}{T^2} \, , \qquad \qquad 312 | \chi = \frac{\langle M^2 \rangle - \langle M \rangle^2}{T}, 313 | \end{equation*} 314 | where $E$ is the energy and $M = \sum_i \sigma_i$ is the magnetization. 315 | For our Monte Carlo calculations on finite lattices, there is no spontaneous symmetry breaking and therefore $\langle M \rangle = 0$ at all $T$. 316 | As a result, we instead examine $\langle | M | \rangle$ and calculate the susceptibility as 317 | \begin{equation*} 318 | \chi = \frac{\langle M^2 \rangle - \langle | M | \rangle^2}{T}. 319 | \end{equation*} 320 | 321 | The quantities $C_V/N$ versus $T$ and $\chi/N$ versus $T$ both diverge at the critical temperature $T_\text{c}$ in the thermodynamic limit $N \to \infty$. 322 | On a finite lattice, these quantities do not diverge but will acquire peaks near $T_\text{c}$. 323 | 324 | \begin{enumerate}[label=\alph*)] 325 | 326 | %%%%%%%%%%%%%% (a) %%%%%%%%%%%%%% 327 | \item Use the code \texttt{plot{\textunderscore}ising.py} to read in the Monte Carlo data from Question 1e 328 | and plot the estimators for $\langle E \rangle/N$ and $\langle | M | \rangle/N$. 329 | Consider the values you find for these estimators in the limit of small $T$; do they match your theoretical expectations? 330 | 331 | %%%%%%%%%%%%%% (b) %%%%%%%%%%%%%% 332 | \item Modify \texttt{plot{\textunderscore}ising.py} to calculate $C_V$ and $\chi$. 333 | Plot $C_V/N$ and $\chi/N$ versus $T$ and verify that there are peaks in these quantities near $T_\text{c}$. 334 | 335 | %%%%%%%%%%%%%% (c) %%%%%%%%%%%%%% 336 | \item Use \texttt{ising{\textunderscore}mc.py} to generate additional data for higher $L$ and for more temperatures close to $T_\text{c}$. 337 | Modify \texttt{plot{\textunderscore}ising.py} to plot your results for several different values of $L$ and confirm that the peaks in $C_V/N$ and $\chi/N$ 338 | get closer to $T_\text{c}$ as $L$ increases. 339 | 340 | \end{enumerate} 341 | \end{document} 342 | -------------------------------------------------------------------------------- /Tutorial1/Uniandes_logo.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial1/Uniandes_logo.jpeg -------------------------------------------------------------------------------- /Tutorial1/ising_mc.py: -------------------------------------------------------------------------------- 1 | ########## Machine Learning for Quantum Matter and Technology ###################### 2 | ### Juan Carrasquilla, Estelle Inack, Giacomo Torlai, Roger Melko 3 | ### with code from Lauren Hayward Sierens/PSI 4 | ### Tutorial 1: Monte Carlo for the Ising model 5 | ##################################################################################### 6 | 7 | import matplotlib.pyplot as plt 8 | import matplotlib.colors as colors 9 | import numpy as np 10 | import os 11 | import random 12 | import time 13 | 14 | ### Input parameters: ### 15 | T_list = np.linspace(5.0,0.5,10) #temperature list 16 | L = 4 #linear size of the lattice 17 | N_spins = L**2 #total number of spins 18 | J = 1 #coupling parameter 19 | 20 | ### Critical temperature: ### 21 | Tc = 2.0/np.log(1.0 + np.sqrt(2))*J #T/J ~ 2.269 22 | 23 | ### Monte Carlo parameters: ### 24 | n_eqSweeps = 0 #number of equilibration sweeps 25 | n_measSweeps = 20 #number of measurement sweeps 26 | 27 | ### Parameters needed to show animation of spin configurations: ### 28 | animate = True 29 | bw_cmap = colors.ListedColormap(['black', 'white']) 30 | 31 | ### Create a directory where measured observables will be stored: ### 32 | results_dir = 'Data' 33 | if not(os.path.isdir(results_dir)): 34 | os.mkdir(results_dir) 35 | 36 | ### Initially, the spins are in a random state (a high-T phase): ### 37 | spins = np.zeros(N_spins,dtype=np.int) 38 | for i in range(N_spins): 39 | spins[i] = 2*random.randint(0,1) - 1 #either +1 or -1 40 | 41 | ### Store each spin's four nearest neighbours in a neighbours array (using periodic boundary conditions): ### 42 | neighbours = np.zeros((N_spins,4),dtype=np.int) 43 | for i in range(N_spins): 44 | #neighbour to the right: 45 | neighbours[i,0]=i+1 46 | if i%L==(L-1): 47 | neighbours[i,0]=i+1-L 48 | 49 | #upwards neighbour: 50 | neighbours[i,1]=i+L 51 | if i >= (N_spins-L): 52 | neighbours[i,1]=i+L-N_spins 53 | 54 | # *********************************************************************** # 55 | # ********** 1a) FILL IN CODE TO CALCULATE *********** # 56 | # ********** THE NEIGHBOUR TO THE LEFT (IN neighbours[i,2]) *********** # 57 | # ********** AND THE DOWNWARDS NEIGHBOUR (IN neighbours[i,3]) *********** # 58 | # *********************************************************************** # 59 | #end of for loop 60 | 61 | ### Function to calculate the total energy ### 62 | def getEnergy(): 63 | currEnergy = 0 64 | for i in range(N_spins): 65 | currEnergy += -J*( spins[i]*spins[neighbours[i,0]] + spins[i]*spins[neighbours[i,1]] ) 66 | return currEnergy 67 | #end of getEnergy() function 68 | 69 | ### Function to calculate the total magnetization ### 70 | def getMag(): 71 | return np.sum(spins) 72 | #end of getMag() function 73 | 74 | ### Function to perform one Monte Carlo sweep ### 75 | def sweep(): 76 | #do one sweep (N_spins single-spin flips): 77 | for i in range(N_spins): 78 | #randomly choose which spin to consider flipping: 79 | site = random.randint(0,N_spins-1) 80 | 81 | #calculate the change in energy for the proposed move: 82 | E_init = getEnergy() 83 | spins[site] = -spins[site] #flip the spin before calculating E_final 84 | E_final = getEnergy() 85 | spins[site] = -spins[site] #flip the spin back since we might not accept the move 86 | deltaE = E_final - E_init 87 | # *********************************************************************** # 88 | # ************ 1c) REPLACE THE ABOVE FIVE LINES. *********** # 89 | # ************ FILL IN CODE TO CALCULATE THE CHANGE IN ENERGY *********** # 90 | # ************ USING ONLY THE FOUR NEAREST NEIGHBOURS *********** # 91 | # *********************************************************************** # 92 | 93 | if (deltaE <= 0) or (random.random() < np.exp(-deltaE/T)): #Metropolis algorithm 94 | #flip the spin: 95 | spins[site] = -spins[site] 96 | #end loop over i 97 | #end of sweep() function 98 | 99 | ################################################################################# 100 | ########## Loop over all temperatures and perform Monte Carlo updates: ########## 101 | ################################################################################# 102 | t1 = time.clock() #for timing 103 | for T in T_list: 104 | print('\nT = %f' %T) 105 | 106 | #open a file where observables will be recorded: 107 | fileName = '%s/ising2d_L%d_T%.4f.txt' %(results_dir,L,T) 108 | file_observables = open(fileName, 'w') 109 | 110 | #equilibration sweeps: 111 | for i in range(n_eqSweeps): 112 | sweep() 113 | 114 | #start doing measurements: 115 | for i in range(n_measSweeps): 116 | sweep() 117 | 118 | #Write the observables to file: 119 | energy = getEnergy() 120 | mag = getMag() 121 | file_observables.write('%d \t %.8f \t %.8f \n' %(i, energy, mag)) 122 | 123 | if animate: 124 | #Display the current spin configuration: 125 | plt.clf() 126 | plt.imshow( spins.reshape((L,L)), cmap=bw_cmap, norm=colors.BoundaryNorm([-1,0,1], bw_cmap.N), interpolation='nearest' ) 127 | plt.xticks([]) 128 | plt.yticks([]) 129 | plt.title('%d x %d Ising model, T = %.3f' %(L,L,T)) 130 | plt.pause(0.01) 131 | #end if 132 | 133 | if (i+1)%1000==0: 134 | print(' %d sweeps complete' %(i+1)) 135 | #end loop over i 136 | 137 | file_observables.close() 138 | #end loop over temperature 139 | 140 | t2 = time.clock() 141 | print('Elapsed time: %f seconds' %(t2-t1)) 142 | -------------------------------------------------------------------------------- /Tutorial1/plot_ising.py: -------------------------------------------------------------------------------- 1 | ########## Machine Learning for Quantum Matter and Technology ###################### 2 | ### Juan Carrasquilla, Estelle Inack, Giacomo Torlai, Roger Melko 3 | ### with code from Lauren Hayward Sierens/PSI 4 | ### Tutorial 1: Monte Carlo for the Ising model 5 | ##################################################################################### 6 | 7 | import matplotlib.pyplot as plt 8 | import numpy as np 9 | 10 | ### Input parameters (these should be the same as in ising_mc.py): ### 11 | T_list = np.linspace(5.0,0.5,10) #temperature list 12 | L = 4 #linear size of the lattice 13 | N_spins = L**2 #total number of spins 14 | J = 1 #coupling parameter 15 | 16 | ### Critical temperature: ### 17 | Tc = 2.0/np.log(1.0 + np.sqrt(2))*J 18 | 19 | ### Observables to plot as a function of temperature: ### 20 | energy = np.zeros(len(T_list)) 21 | mag = np.zeros(len(T_list)) 22 | specHeat = np.zeros(len(T_list)) 23 | susc = np.zeros(len(T_list)) 24 | 25 | ### Loop to read in data for each temperature: ### 26 | for (iT,T) in enumerate(T_list): 27 | file = open('Data/ising2d_L%d_T%.4f.txt' %(L,T), 'r') 28 | data = np.loadtxt( file ) 29 | 30 | E = data[:,1] 31 | M = abs(data[:,2]) 32 | 33 | energy[iT] = np.mean(E) 34 | mag[iT] = np.mean(M) 35 | 36 | # *********************************************************************** # 37 | # *********** 2b) FILL IN CODE TO CALCULATE THE SPECIFIC HEAT *********** # 38 | # *********** AND SUSCEPTIBILITY *********** # 39 | # *********************************************************************** # 40 | specHeat[iT] = 0 41 | susc[iT] = 0 42 | #end loop over T 43 | 44 | plt.figure(figsize=(8,6)) 45 | 46 | plt.subplot(221) 47 | plt.axvline(x=Tc, color='k', linestyle='--') 48 | plt.plot(T_list, energy/(1.0*N_spins), 'o-') 49 | plt.xlabel('$T$') 50 | plt.ylabel('$/N$') 51 | 52 | plt.subplot(222) 53 | plt.axvline(x=Tc, color='k', linestyle='--') 54 | plt.plot(T_list, mag/(1.0*N_spins), 'o-') 55 | plt.xlabel('$T$') 56 | plt.ylabel('$<|M|>/N$') 57 | 58 | plt.subplot(223) 59 | plt.axvline(x=Tc, color='k', linestyle='--') 60 | plt.plot(T_list, specHeat/(1.0*N_spins), 'o-') 61 | plt.xlabel('$T$') 62 | plt.ylabel('$C_V/N$') 63 | 64 | plt.subplot(224) 65 | plt.axvline(x=Tc, color='k', linestyle='--') 66 | plt.plot(T_list, susc/(1.0*N_spins), 'o-') 67 | plt.xlabel('$T$') 68 | plt.ylabel('$\chi/N$') 69 | 70 | plt.suptitle('%d x %d Ising model' %(L,L)) 71 | 72 | plt.show() 73 | -------------------------------------------------------------------------------- /Tutorial1/test_program_tensorflow.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | mnist = tf.keras.datasets.mnist 3 | 4 | (x_train, y_train),(x_test, y_test) = mnist.load_data() 5 | x_train, x_test = x_train / 255.0, x_test / 255.0 6 | 7 | model = tf.keras.models.Sequential([ 8 | tf.keras.layers.Flatten(input_shape=(28, 28)), 9 | tf.keras.layers.Dense(512, activation=tf.nn.relu), 10 | tf.keras.layers.Dropout(0.2), 11 | tf.keras.layers.Dense(10, activation=tf.nn.softmax) 12 | ]) 13 | model.compile(optimizer='adam', 14 | loss='sparse_categorical_crossentropy', 15 | metrics=['accuracy']) 16 | 17 | model.fit(x_train, y_train, epochs=5) 18 | model.evaluate(x_test, y_test) -------------------------------------------------------------------------------- /Tutorial2/Tutorial2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial2/Tutorial2.pdf -------------------------------------------------------------------------------- /Tutorial2/Tutorial2.tex: -------------------------------------------------------------------------------- 1 | \documentclass[letterpaper]{scrartcl} 2 | \usepackage[top=0.88in, bottom=1in, left=1in, right=1in]{geometry} 3 | 4 | \makeatletter 5 | \DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt} 6 | \makeatother 7 | 8 | \usepackage{scalefnt} 9 | \usepackage{bm} 10 | \usepackage{cancel} 11 | 12 | %-------------------------------------------------------------- 13 | % We need this package, part of the KOMA class, for the custom 14 | % headings. 15 | %-------------------------------------------------------------- 16 | \usepackage{scrpage2} 17 | 18 | 19 | %-------------------------------------------------------------- 20 | % One of many packages you can use if you want to include 21 | % graphics. 22 | %-------------------------------------------------------------- 23 | \usepackage{graphicx} 24 | 25 | %-------------------------------------------------------------- 26 | % The AMS packages are useful but not required. They offer a 27 | % number of nice fonts, environments for formatting multiline 28 | % equations, etc. 29 | %-------------------------------------------------------------- 30 | \usepackage{amsmath} 31 | \usepackage{amsfonts} 32 | \usepackage{amssymb} 33 | \usepackage{amsthm} 34 | 35 | %-------------------------------------------------------------- 36 | % Basic way to set-up the page margins. 37 | %-------------------------------------------------------------- 38 | %\addtolength{\oddsidemargin}{-.2in} 39 | %\addtolength{\evensidemargin}{-.2in} 40 | %\addtolength{\textwidth}{0.45in} 41 | %\addtolength{\topmargin}{-.175in} 42 | %\addtolength{\textheight}{0.75in} 43 | 44 | %-------------------------------------------------------------- 45 | % Comment out the following to add indents and remove space between paragraphs. 46 | %-------------------------------------------------------------- 47 | \usepackage{parskip} 48 | 49 | %-------------------------------------------------------------- 50 | % This package is used to define custom colours. 51 | %-------------------------------------------------------------- 52 | \usepackage[usenames,dvipsnames,svgnames,table]{xcolor} 53 | 54 | %-------------------------------------------------------------- 55 | % Package for adding in solutions: 56 | %-------------------------------------------------------------- 57 | \usepackage[nosoln,regf,nolf]{optional} 58 | %\usepackage[soln,regf]{optional} 59 | 60 | %\newcommand{\soln}[1]{\opt{soln}{\\[4pt] \textcolor{JungleGreen}{\textbf{Solution:}} #1}} 61 | \newcommand{\soln}[1]{\opt{soln}{\textcolor{JungleGreen}{\usekomafont{descriptionlabel}{Solution:}} #1}} 62 | 63 | \newcommand{\hint}[1]{{\usekomafont{descriptionlabel}{Hint:}} #1} 64 | \newcommand{\note}[1]{{\usekomafont{descriptionlabel}{Note:}} #1} 65 | \newcommand{\reference}[1]{{\usekomafont{descriptionlabel}{Reference:}} #1} 66 | 67 | %-------------------------------------------------------------- 68 | % A few colours for hyperlinks. 69 | %-------------------------------------------------------------- 70 | \definecolor{plum}{rgb}{0.36078, 0.20784, 0.4} 71 | \definecolor{chameleon}{rgb}{0.30588, 0.60392, 0.023529} 72 | \definecolor{cornflower}{rgb}{0.12549, 0.29020, 0.52941} 73 | \definecolor{scarlet}{rgb}{0.8, 0, 0} 74 | \definecolor{brick}{rgb}{0.64314, 0, 0} 75 | 76 | %-------------------------------------------------------------- 77 | % A command for typesetting and linking an email address. 78 | %-------------------------------------------------------------- 79 | \newcommand{\email}[1]{\href{mailto:#1}{\tt \textcolor{cornflower}{#1}}} 80 | \newcommand{\web}[1]{\href{#1}{\tt \textcolor{cornflower}{#1}}} 81 | 82 | %-------------------------------------------------------------- 83 | % The following declaration includes the hyperref package and 84 | % assigns metadata. If you compile with pdflatex, this data 85 | % will be automatically included in the pdf file. 86 | %-------------------------------------------------------------- 87 | %\usepackage[ 88 | % pdftitle={QFT Tutorial 1},% 89 | % pdfauthor={PSI Tutors},% 90 | % pdfsubject={QFT Tutorial 1},% 91 | % pdfkeywords={PSI}, 92 | % colorlinks=true, 93 | % linkcolor=cornflower, 94 | % citecolor=scarlet, 95 | % urlcolor=chameleon% 96 | %]{hyperref} 97 | 98 | %\setcounter{secnumdepth}{2} % section number depth 99 | %\setcounter{tocdepth}{2} % depth of TOC 100 | 101 | %-------------------------------------------------------------- 102 | % Specify the font used in captions. 103 | %-------------------------------------------------------------- 104 | \setkomafont{captionlabel}{\usekomafont{descriptionlabel}} 105 | 106 | %-------------------------------------------------------------- 107 | % This is where we define the custom title. The image that is 108 | % placed on the left-hand-side of the title, PILogo.pdf in 109 | % this case, should be in the same directory as this file. Note 110 | % that you can always use hyperlinks for the Title, Semester, 111 | % and Author fields, below, in case you want to link to a seminar 112 | % web page or a lecturer's email address. 113 | %-------------------------------------------------------------- 114 | 115 | %\titlehead{% 116 | % \vspace*{-1cm} 117 | % \begin{minipage}[b]{4.0cm} 118 | % \includegraphics*[height=1.3cm]{PSIletterhead.eps}% 119 | % \end{minipage} 120 | % \hfill 121 | % \begin{minipage}[b]{10cm} 122 | %% \begin{flushright} 123 | % \usekomafont{descriptionlabel} 124 | % \Large Machine Learning for Many-Body Physics \\ 125 | % \normalsize \normalfont Spring 2019\\ 126 | % Lauren Hayward Sierens 127 | %\end{flushright} 128 | % \end{minipage} 129 | % \\[-3mm] 130 | % \hrule 131 | % \vspace{-3mm} 132 | %} 133 | % ----------- 134 | \titlehead{% 135 | \vspace*{-1cm} 136 | \begin{minipage}[b]{4.0cm} 137 | \includegraphics*[height=1.3cm]{Uniandes_logo.jpeg}% 138 | \end{minipage} 139 | \hfill 140 | \begin{minipage}[b]{12cm} 141 | \begin{flushright} 142 | \usekomafont{descriptionlabel} 143 | \large Machine Learning for Quantum Matter and Technology \\ 144 | \normalsize \normalfont 145 | J. Carrasquilla, E. Inack, G. Torlai, R. Melko, L. Hayward Sierens 146 | \end{flushright} 147 | \end{minipage} 148 | \\[-3mm] 149 | \hrule 150 | \vspace{-3mm} 151 | } 152 | 153 | %-------------------------------------------------------------- 154 | % Other useful physic-related packages 155 | %-------------------------------------------------------------- 156 | \usepackage{braket} 157 | % Use \Bra{}, \Ket{} or \Braket{x | \psi} for Dirac notation 158 | 159 | %-------------------------------------------------------------- 160 | % Nice numbering for question parts. 161 | %-------------------------------------------------------------- 162 | \newcommand{\ba}{\begin{eqnarray}} 163 | \newcommand{\ea}{\end{eqnarray}} 164 | 165 | \newcommand{\ssk}{\smallskip} 166 | \newcommand{\msk}{\medskip} 167 | 168 | \newcommand{\nin}{\noindent} 169 | 170 | \newcommand{\beq}{\begin{equation}} 171 | \newcommand{\eeq}{\end{equation}} 172 | 173 | \newcommand{\beqs}{\begin{equation*}} 174 | \newcommand{\eeqs}{\end{equation*}} 175 | 176 | \renewcommand{\vec}[1]{{\mathbf{#1}}} 177 | \renewcommand{\labelenumi}{\alph{enumi})} 178 | \renewcommand{\labelenumiii}{\roman{enumiii})} 179 | 180 | %%%%%%%%%%%%% 181 | 182 | \def\be{\begin{eqnarray}} 183 | \def\ee{\end{eqnarray}} 184 | \newcommand{\nn}{\nonumber} 185 | \newcommand\para{\paragraph{}} 186 | \newcommand{\ft}[2]{{\textstyle\frac{#1}{#2}}} 187 | \newcommand{\eqn}[1]{(\ref{#1})} 188 | \newcommand{\pl}[1]{\frac{\partial {\cal L}}{\partial{#1}}} 189 | \newcommand{\ppp}[2]{\frac{\partial {#1}}{\partial {#2}}} 190 | \newcommand{\ph}[1]{\frac{\partial {\cal H}}{\partial{#1}}} 191 | \newcommand{\leftp}[3]{\left.\ppp{#1}{#2}\right|_{#3}} 192 | %\newcommand{\Vec}[2]{\left(\begin{array}{c} {#1} \\ {#2}\end{array}\right)} 193 | \newcommand\vx{\vec{x}} 194 | \newcommand\vy{\vec{y}} 195 | \newcommand\vp{\vec{p}} 196 | \newcommand\vq{\vec{q}} 197 | \newcommand\vk{\vec{k}} 198 | \newcommand\avp{a^{\ }_{\vp}} 199 | \newcommand\advp{a^\dagger_{\vp}} 200 | \newcommand\ad{a^\dagger} 201 | 202 | \newcommand\balpha{\mbox{\boldmath $\alpha$}} 203 | \newcommand\bbeta{\mbox{\boldmath $\beta$}} 204 | \newcommand\bgamma{\mbox{\boldmath $\gamma$}} 205 | \newcommand\bomega{\mbox{\boldmath $\omega$}} 206 | \newcommand\blambda{\mbox{\boldmath $\lambda$}} 207 | \newcommand\bmu{\mbox{\boldmath $\mu$}} 208 | \newcommand\bphi{\mbox{\boldmath $\phi$}} 209 | \newcommand\bzeta{\mbox{\boldmath $\zeta$}} 210 | \newcommand\bsigma{\mbox{\boldmath $\sigma$}} 211 | \newcommand\bepsilon{\mbox{\boldmath $\epsilon$}} 212 | \newcommand\btau{\mbox{\boldmath $\tau$}} 213 | \newcommand\beeta{\mbox{\boldmath $\eta$}} 214 | \newcommand\btheta{\mbox{\boldmath $\theta$}} 215 | 216 | \def\norm#1{:\!\!#1\!\!:} 217 | 218 | \def\part{\partial} 219 | 220 | \def\dbox{\hbox{{$\sqcup$}\llap{$\sqcap$}}} 221 | 222 | \def\sla#1{\hbox{{$#1$}\llap{$/$}}} 223 | \def\Dslash{\,\,{\raise.15ex\hbox{/}\mkern-13mu D}} 224 | \def\Dbarslash{\,\,{\raise.15ex\hbox{/}\mkern-12mu {\bar D}}} 225 | \def\delslash{\,\,{\raise.15ex\hbox{/}\mkern-10mu \partial}} 226 | \def\delbarslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu {\bar\partial}}} 227 | \def\pslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu p}} 228 | \def\qslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu q}} 229 | \def\kslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu k}} 230 | \def\eslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu \epsilon}} 231 | \def\calDslash{\,\,{\rais.15ex\hbox{/}\mkern-12mu {\cal D}}} 232 | \newcommand{\slsh}[1]{\,\,{\raise.15ex\hbox{/}\mkern-12mu {#1}}} 233 | 234 | 235 | \newcommand\Bprime{B${}^\prime$} 236 | %\newcommand{\sign}{{\rm sign}} 237 | 238 | \newcommand\bx{{\bf x}} 239 | \newcommand\br{{\bf r}} 240 | \newcommand\bF{{\bf F}} 241 | \newcommand\bp{{\bf p}} 242 | \newcommand\bL{{\bf L}} 243 | \newcommand\bR{{\bf R}} 244 | \newcommand\bP{{\bf P}} 245 | \newcommand\bE{{\bf E}} 246 | \newcommand\bB{{\bf B}} 247 | \newcommand\bA{{\bf A}} 248 | \newcommand\bee{{\bf e}} 249 | \newcommand\bte{\tilde{\bf e}} 250 | \def\ket#1{\left| #1 \right\rangle} 251 | \def\bra#1{\left\langle #1 \right|} 252 | \def\vev#1{\left\langle #1 \right\rangle} 253 | 254 | \newcommand\lmn[2]{\Lambda^{#1}_{\ #2}} 255 | \newcommand\mup[2]{\eta^{#1 #2}} 256 | \newcommand\mdown[2]{\eta_{#1 #2}} 257 | \newcommand\deld[2]{\delta^{#1}_{#2}} 258 | \newcommand\df{\Delta_F} 259 | \newcommand\cL{{\cal L}} 260 | %\def\theequation{\thesection.\arabic{equation} 261 | 262 | \newcounter{solneqn} 263 | %\newcommand{\mytag}{\refstepcounter{equation}\tag{\roman{equationn}}} 264 | \newcommand{\mytag}{\refstepcounter{solneqn}\tag{S.\arabic{solneqn}}} 265 | %%%%%%%%% 266 | 267 | 268 | \DeclareMathOperator{\Tr}{Tr} 269 | \DeclareMathOperator{\sign}{sign} 270 | 271 | %\renewcommand{\ttdefault}{pcr} 272 | 273 | \usepackage{enumitem} 274 | 275 | \begin{document} 276 | 277 | %\scalefont{1.35} 278 | 279 | \vspace{-3cm} 280 | 281 | \opt{nosoln}{\title{Tutorial 2: \\Feedforward neural networks in TensorFlow \vspace*{-6mm}}} 282 | \opt{soln}{\title{Tutorial 2 \textcolor{JungleGreen}{Solutions}: \\Feedforward neural networks in TensorFlow \vspace*{-6mm}}} 283 | 284 | \date{May 28, 2019} 285 | 286 | \maketitle 287 | 288 | The objective of this tutorial is to become comfortable with using the software library TensorFlow to 289 | create and train a simple feedforward neural network for supervised learning. 290 | 291 | You will use and modify the Python program \texttt{tutorial2{\textunderscore}spirals.py}. 292 | This code starts by generating a random dataset of 2D points with \texttt{K} branches. 293 | For example, when \texttt{K=3} this dataset might look as follows: 294 | \vspace{-3mm} 295 | \begin{center} 296 | \includegraphics[width=8.1cm]{spiral_data.pdf} 297 | \end{center} 298 | For each datapoint $\mathbf{x} = (x_1, x_2)$, the label is the branch index such that $y = 0, 1$ or 2 for the example above. 299 | Our goal is to implement a neural network capable of classifying the branches. 300 | 301 | This network will compare its output with labels in the so-called \emph{one-hot encoding}. 302 | For a given label \texttt{y=k}, the corresponding one-hot encoding is a \texttt{K}-dimensional vector with all entries zero 303 | except for the \texttt{k}$^\text{th}$ entry (which has value 1). 304 | So when \texttt{K=3} the one-hot encodings for the labels are 305 | \begin{equation*} 306 | 0 \rightarrow \begin{bmatrix} 1 \\ 0 \\ 0 \end{bmatrix}, \qquad 307 | 1 \rightarrow \begin{bmatrix} 0 \\ 1 \\ 0 \end{bmatrix}, \qquad 308 | 2 \rightarrow \begin{bmatrix} 0 \\ 0 \\ 1 \end{bmatrix}. 309 | \end{equation*} 310 | 311 | The code first defines the structure of the neural network 312 | and then uses the dataset to train this network. 313 | The code generates two files: \texttt{spiral{\textunderscore}data.pdf} (a plot of the dataset such as the one above) 314 | and \texttt{spiral{\textunderscore}results.pdf} (which displays three plots illustrating the results of training). 315 | 316 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 317 | %%%%%%%%%%%%%% Q1 %%%%%%%%%%%%%% 318 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 319 | \section{Neural network architecture, activation functions, cost functions and learning rate} 320 | 321 | %\vspace{-2mm}Use the first part of the tutorial to do as many parts of this problem as you can. 322 | %After 45 minutes, Lauren will email out the solution to part c), which will be needed to proceed to Problem 2. 323 | 324 | %\opt{soln}{\newpage} 325 | \begin{enumerate}[label=\alph*)] 326 | 327 | %%%%%%%%%%%%%% (a) %%%%%%%%%%%%%% 328 | \item Run the code and look at how it attempts to classify the two-dimensional space. 329 | You should find that the resulting classifier separates the two-dimensional space using lines, 330 | and thus does a poor job of representing the data. 331 | 332 | %%% SOLUTION %%% 333 | \soln{Using the parameters provided (with the learning rate set to 1), you should find that the given network classifies the two-dimensional space as: 334 | \begin{center} 335 | \includegraphics[width=15.5cm, clip, trim=1.5cm 0 1.5cm 1.5cm]{Code/spiral_results_K3_LR1.pdf} %trim: Left, bottom, right, top 336 | \end{center} 337 | 338 | \note{If the learning rate is 0.001, you will be able to observe the training cost decreasing: 339 | \begin{center} 340 | \includegraphics[width=15.5cm, clip, trim=1.5cm 0 1.5cm 1.5cm]{Code/spiral_results_K3_LR0-001.pdf} 341 | \end{center} 342 | } 343 | } 344 | 345 | %%%%%%%%%%%%%% (b) %%%%%%%%%%%%%% 346 | \opt{soln}{\newpage} 347 | \item Look through the section of code marked \texttt{DEFINE THE NETWORK ARCHITECTURE}. 348 | On paper, draw the neural network corresponding to the one in the code for the case of \texttt{K} branches. 349 | Pay particular attention to the number of neurons in each layer. 350 | 351 | %%% SOLUTION %%% 352 | \soln{The neural network for the given code is: 353 | \begin{center} 354 | \includegraphics[height=5cm]{1b_network.pdf} 355 | \end{center} 356 | where the last layer has $K$ neurons. 357 | } 358 | 359 | %%%%%%%%%%%%%% (c) %%%%%%%%%%%%%% 360 | \item Add in a hidden layer with 4 neurons and study how this hidden layer changes the output. 361 | On paper, draw the neural network in this case. 362 | 363 | %%% SOLUTION %%% 364 | \soln{Replace the \texttt{Layer 1} and \texttt{Network output} sections with the following code: 365 | 366 | \texttt{nH=4} \\ 367 | \texttt{\#\#\# Layer 1: \#\#\#} \\ 368 | \texttt{W1 = tf.Variable(tf.random{\textunderscore}normal([2, nH], mean=0.0, stddev=0.01, dtype=tf.float32))} \\ 369 | \texttt{b1 = tf.Variable( tf.zeros([nH]) )} \\ 370 | \texttt{z1 = tf.matmul(x, W1) + b1} \\ 371 | \texttt{a1 = tf.nn.sigmoid( z1 )} \\ 372 | 373 | \texttt{\#\#\# Layer 2: \#\#\#} \\ 374 | \texttt{W2 = tf.Variable(tf.random{\textunderscore}normal([nH, K], mean=0.0, stddev=0.01, dtype=tf.float32))} \\ 375 | \texttt{b2 = tf.Variable( tf.zeros([K]) )} \\ 376 | \texttt{z2 = tf.matmul(a1, W2) + b2} \\ 377 | \texttt{a2 = tf.nn.sigmoid( z2 )} \\ 378 | 379 | \texttt{\#\#\# Network output: \#\#\#} \\ 380 | \texttt{aL = a2} \\ 381 | 382 | \newpage 383 | After adding a hidden layer with 4 neurons, the neural network is: 384 | \begin{center} 385 | \includegraphics[height=5cm]{1c_network.pdf} 386 | \end{center} 387 | where the last layer has $K$ neurons. 388 | } 389 | 390 | %%%%%%%%%%%%%% (d) %%%%%%%%%%%%%% 391 | \item Replace the sigmoid activation function on the first layer with a rectified linear unit (ReLU), and study how the 392 | choice of activation function changes the output. 393 | 394 | %%% SOLUTION %%% 395 | \soln{Replace the line \texttt{a1 = tf.nn.sigmoid( z1 )} with: 396 | 397 | \texttt{a1 = tf.nn.relu( z1 )} 398 | 399 | When using the sigmoid activation function, you might observe that the classifier separates the two-dimensional space 400 | into regions that meet roughly at the origin. 401 | When using the ReLU activation function on the first layer, the meeting place for these regions tends to jump around more.} 402 | 403 | %%%%%%%%%%%%%% (e) %%%%%%%%%%%%%% 404 | \item Change the cost function so that it is computed using the mean-squared error (MSE) instead of the cross-entropy, 405 | and study how the choice of cost function changes the output. 406 | 407 | %%% SOLUTION %%% 408 | \soln{Add the following beneath the line \texttt{cost{\textunderscore}func = cross{\textunderscore}entropy}: 409 | 410 | \texttt{mse = tf.reduce{\textunderscore}mean( tf.reduce{\textunderscore}sum( tf.square(y{\textunderscore}onehot - aL) ) )} \\ 411 | \texttt{cost{\textunderscore}func = mse} 412 | } 413 | 414 | %%%%%%%%%%%%%% (f) %%%%%%%%%%%%%% 415 | \item Study the effects of increasing and decreasing the \texttt{learning{\textunderscore}rate} hyperparameter. 416 | Examine these effects using both the cross-entropy and mean-squared error cost functions. 417 | 418 | %%% SOLUTION %%% 419 | \soln{When using sigmoid activation functions and the mean-squared error as the cost function, for the given data set you will likely find that the network does not make progress when 420 | \texttt{learning{\textunderscore}rate=1.0}. Decreasing the learning rate to 0.1 enables the network to make progress. 421 | For discussion about the affects of changing the learning rate, see Section IVA of \texttt{arXiv:1803.08823}. 422 | } 423 | 424 | %%%%%%%%%%%%%% (g) %%%%%%%%%%%%%% 425 | \opt{soln}{\newpage} 426 | \item Explain why the \texttt{K}-dimensional one-hot encoding is useful. What do you think would happen if you used a one-dimensional label (such that $\texttt{y}=0,1,\ldots, \texttt{K}=1$ or $\texttt{K}$) instead? 427 | 428 | %%% SOLUTION %%% 429 | \soln{As an illustrative example, consider the case where there are \texttt{K=3} branches and the network is uncertain about whether the datapoint $\mathbf{x}$ belongs to branch 0 or branch 2. 430 | Specifically, let's say that the network is 431 | 50\% sure that $\mathbf{x}$ belongs to branch 0 and 432 | 50\% sure that it belongs to branch 2. 433 | This situation is easy to express in terms of the one-hot encoding as 434 | \begin{equation*} 435 | \mathbf{a}^{(L)} = \begin{bmatrix} 0.5 \\ 0 \\ 0.5 \end{bmatrix}. 436 | \end{equation*} 437 | Using a one-dimensional encoding, the network would only return a single number between 0 and 2. In this case, it might return the average predicted label such that 438 | \begin{equation*} 439 | \mathbf{a}^{(L)} = \begin{bmatrix} \frac{0+2}{2} \end{bmatrix} = \begin{bmatrix} 1 \end{bmatrix}, 440 | \end{equation*} 441 | which is the same as the output it would return if it was certain that $\mathbf{x}$ belongs to branch 1. 442 | Thus, we see that a one-dimensional encoding can give network outputs that are hard to interpret when the number of classes \texttt{K} is greater than 2. 443 | } 444 | 445 | \end{enumerate} 446 | 447 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 448 | %%%%%%%%%%%%%% Q2 %%%%%%%%%%%%%% 449 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 450 | %\opt{soln}{\newpage} 451 | \opt{nosoln}{\vspace{8mm}} 452 | \section{Group work} 453 | \vspace{-2mm}For this part of the tutorial, you will work in groups to explore the capabilities of a feedforward neural network with one hidden layer. 454 | Each group will plot the accuracy of the neural network as a function of some quantity $Q$ (a property of the network or the data). 455 | 456 | For all plots, the accuracy should be measured once you are convinced that the network has converged, 457 | which means that you may need to adjust the \texttt{N{\textunderscore}epochs} parameter in order to run the code longer. 458 | In some cases, you may also find it useful to adjust the learning rate as $Q$ is varied (in particular if you find that your network is getting stuck or if training is very slow for certain values of $Q$). 459 | 460 | %At the end of the tutorial, send your most interesting results (as well as a list of your group members) to Lauren and they will be discussed during tomorrow's lecture. 461 | Each group's task is to plot the accuracy of the neural network as a function of some quantity $Q$, where 462 | \begin{itemize} 463 | \item {\usekomafont{descriptionlabel}{Groups 1 \& 2:}} $Q = $ the number of neurons in the hidden layer 464 | \item {\usekomafont{descriptionlabel}{Groups 3 \& 4:}} $Q = $ \texttt{mag{\textunderscore}noise} (the magnitude of noise in the data) 465 | \item {\usekomafont{descriptionlabel}{Groups 5 \& 6:}} $Q = $ \texttt{K} (the number of different labels) 466 | \end{itemize} 467 | 468 | %%% SOLUTION %%% 469 | \opt{soln}{\newpage} 470 | \soln{ 471 | {\usekomafont{descriptionlabel}{Results from Groups 1 \& 2:}} 472 | \begin{center} 473 | %\includegraphics[height=7cm]{GroupResults/accuracy_vs_hiddenNeurons.pdf} 474 | \end{center} 475 | 476 | %\newpage 477 | {\usekomafont{descriptionlabel}{Results from Groups 3 \& 4:}} 478 | \begin{center} 479 | %\includegraphics[height=6cm, clip, trim=9cm 10cm 3.5cm 2.5cm]{GroupResults/noise_vs_accuracy.pdf} %trim: Left, bottom, right, top 480 | %\includegraphics[height=7cm]{GroupResults/Group4/DL_acc_vs_noise.png} 481 | \end{center} 482 | 483 | \newpage 484 | {\usekomafont{descriptionlabel}{Results from Groups 5 \& 6:}} 485 | \begin{center} 486 | %\includegraphics[height=7.5cm]{GroupResults/FNNaccuracy_vs_numbranches.png} \\ 487 | %\includegraphics[height=7.5cm]{GroupResults/acc_vs_branches/Acc_K_eta1.pdf} 488 | \end{center} 489 | } 490 | 491 | \end{document} 492 | -------------------------------------------------------------------------------- /Tutorial2/Tutorial2_solutions.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial2/Tutorial2_solutions.pdf -------------------------------------------------------------------------------- /Tutorial2/Uniandes_logo.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial2/Uniandes_logo.jpeg -------------------------------------------------------------------------------- /Tutorial2/bonus_tutorial2_ffnn_2dising.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial2/bonus_tutorial2_ffnn_2dising.pdf -------------------------------------------------------------------------------- /Tutorial2/spiral_data.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial2/spiral_data.pdf -------------------------------------------------------------------------------- /Tutorial2/tutorial2_spirals.py: -------------------------------------------------------------------------------- 1 | ########## Machine Learning for Quantum Matter and Technology ###################### 2 | ### Juan Carrasquilla, Estelle Inack, Giacomo Torlai, Roger Melko 3 | ### with code from Lauren Hayward Sierens/PSI 4 | ### 5 | ### Tutorial 2: This code builds a simple data set of spirals with K branches and then implements 6 | ### and trains a simple feedforward neural network to classify its branches. 7 | ##################################################################################### 8 | 9 | 10 | import numpy as np 11 | import matplotlib.pyplot as plt 12 | import tensorflow as tf 13 | 14 | #Specify font sizes for plots: 15 | plt.rcParams['axes.labelsize'] = 10 16 | plt.rcParams['legend.fontsize'] = 10 17 | plt.rcParams['xtick.labelsize'] = 8 18 | plt.rcParams['ytick.labelsize'] = 8 19 | 20 | seed=1234 21 | np.random.seed(seed) 22 | tf.set_random_seed(seed) 23 | 24 | plt.ion() # turn on interactive mode (for plotting) 25 | 26 | ############################################################################ 27 | ####################### CREATE AND PLOT THE DATA SET ####################### 28 | ############################################################################ 29 | 30 | N = 50 # number of points per branch 31 | K = 3 # number of branches 32 | 33 | N_train = N*K # total number of points in the training set 34 | x_train = np.zeros((N_train,2)) # matrix containing the 2-dimensional datapoints 35 | y_train = np.zeros(N_train, dtype='uint8') # labels (not in one-hot representation) 36 | 37 | mag_noise = 0.3 # controls how much noise gets added to the data 38 | dTheta = 4 # difference in theta in each branch 39 | 40 | ### Data generation: ### 41 | for j in range(K): 42 | ix = range(N*j,N*(j+1)) 43 | r = np.linspace(0.01,1,N) # radius 44 | t = np.linspace(j*(2*np.pi)/K,j*(2*np.pi)/K + dTheta,N) + np.random.randn(N)*mag_noise # theta 45 | x_train[ix] = np.c_[r*np.cos(t), r*np.sin(t)] 46 | y_train[ix] = j 47 | 48 | ### Plot the data set: ### 49 | fig = plt.figure(1, figsize=(5,5)) 50 | plt.scatter(x_train[:, 0], x_train[:, 1], c=y_train, s=40)#, cmap=plt.cm.Spectral) 51 | plt.xlim([-1,1]) 52 | plt.ylim([-1,1]) 53 | plt.xlabel('x1') 54 | plt.ylabel('x2') 55 | fig.savefig('spiral_data.pdf') 56 | 57 | ############################################################################ 58 | ##################### DEFINE THE NETWORK ARCHITECTURE ###################### 59 | ############################################################################ 60 | 61 | ### Create placeholders for the input data and labels ### 62 | ### (we'll input actual values when we ask TensorFlow to run an actual computation later) ### 63 | x = tf.placeholder(tf.float32, [None, 2]) # input data 64 | y = tf.placeholder(tf.int32,[None]) # labels 65 | 66 | ### Layer 1: ### 67 | W1 = tf.Variable( tf.random_normal([2, K], mean=0.0, stddev=0.01, dtype=tf.float32) ) 68 | b1 = tf.Variable( tf.zeros([K]) ) 69 | z1 = tf.matmul(x, W1) + b1 70 | a1 = tf.nn.sigmoid( z1 ) 71 | 72 | ### Network output: ### 73 | aL = a1 74 | 75 | ### Cost function: ### 76 | ### (measures how far off our model is from the labels) ### 77 | y_onehot = tf.one_hot(y,depth=K) # labels are converted to one-hot representation 78 | eps=0.0000000001 # to prevent the logs from diverging 79 | cross_entropy = tf.reduce_mean(-tf.reduce_sum( y_onehot * tf.log(aL+eps) + (1.0-y_onehot )*tf.log(1.0-aL +eps) , reduction_indices=[1])) 80 | cost_func = cross_entropy 81 | 82 | ### Use backpropagation to minimize the cost function using the gradient descent algorithm: ### 83 | learning_rate = 1.0 # hyperparameter 84 | train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost_func) 85 | 86 | N_epochs = 20000 # number of times to run gradient descent 87 | 88 | ############################################################################## 89 | ################################## TRAINING ################################## 90 | ############################################################################## 91 | sess = tf.Session() 92 | sess.run(tf.global_variables_initializer()) 93 | 94 | epoch_list = [] 95 | cost_training = [] 96 | acc_training = [] 97 | 98 | ############ Function for plotting: ############ 99 | def updatePlot(): 100 | 101 | ### Generate coordinates covering the whole plane: ### 102 | padding = 0.1 103 | spacing = 0.02 104 | x1_min, x1_max = x_train[:, 0].min() - padding, x_train[:, 0].max() + padding 105 | x2_min, x2_max = x_train[:, 1].min() - padding, x_train[:, 1].max() + padding 106 | x1_grid, x2_grid = np.meshgrid(np.arange(x1_min, x1_max, spacing), 107 | np.arange(x2_min, x2_max, spacing)) 108 | 109 | NN_output = sess.run(aL,feed_dict={x:np.c_[x1_grid.ravel(), x2_grid.ravel()]}) 110 | predicted_class = np.argmax(NN_output, axis=1) 111 | 112 | ### Plot the classifier: ### 113 | plt.subplot(121) 114 | plt.contourf(x1_grid, x2_grid, predicted_class.reshape(x1_grid.shape), K, alpha=0.8) 115 | plt.scatter(x_train[:, 0], x_train[:, 1], c=y_train, s=40) 116 | plt.xlim(x1_grid.min(), x1_grid.max()) 117 | plt.ylim(x2_grid.min(), x2_grid.max()) 118 | plt.xlabel('x1') 119 | plt.ylabel('x2') 120 | 121 | ### Plot the cost function during training: ### 122 | plt.subplot(222) 123 | plt.plot(epoch_list,cost_training,'o-') 124 | plt.xlabel('Epoch') 125 | plt.ylabel('Training cost') 126 | 127 | ### Plot the training accuracy: ### 128 | plt.subplot(224) 129 | plt.plot(epoch_list,acc_training,'o-') 130 | plt.xlabel('Epoch') 131 | plt.ylabel('Training accuracy') 132 | ############ End of plotting function ############ 133 | 134 | ### Train for several epochs: ### 135 | for epoch in range(N_epochs): 136 | sess.run(train_step, feed_dict={x: x_train,y:y_train}) #run gradient descent 137 | 138 | ### Update the plot and print results every 500 epochs: ### 139 | if epoch % 500 == 0: 140 | cost = sess.run(cost_func,feed_dict={x:x_train, y:y_train}) 141 | NN_output = sess.run(aL,feed_dict={x:x_train, y:y_train}) 142 | predicted_class = np.argmax(NN_output, axis=1) 143 | accuracy = np.mean(predicted_class == y_train) 144 | 145 | print( "Iteration %d:\n Training cost %f\n Training accuracy %f\n" % (epoch, cost, accuracy) ) 146 | 147 | epoch_list.append(epoch) 148 | cost_training.append(cost) 149 | acc_training.append(accuracy) 150 | 151 | ### Update the plot of the resulting classifier: ### 152 | fig = plt.figure(2,figsize=(10,5)) 153 | fig.subplots_adjust(hspace=.3,wspace=.3) 154 | plt.clf() 155 | updatePlot() 156 | plt.pause(0.1) 157 | 158 | plt.savefig('spiral_results.pdf') # Save the figure showing the results in the current directory 159 | 160 | plt.show() 161 | -------------------------------------------------------------------------------- /Tutorial3/Tutorial3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial3/Tutorial3.pdf -------------------------------------------------------------------------------- /Tutorial3/Tutorial3.tex: -------------------------------------------------------------------------------- 1 | \documentclass[letterpaper]{scrartcl} 2 | \usepackage[top=0.8in, bottom=1in, left=0.9in, right=0.9in]{geometry} 3 | 4 | \makeatletter 5 | \DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt} 6 | \makeatother 7 | 8 | \usepackage{url} 9 | \usepackage{scalefnt} 10 | \usepackage{bm} 11 | \usepackage{cancel} 12 | 13 | %-------------------------------------------------------------- 14 | % We need this package, part of the KOMA class, for the custom 15 | % headings. 16 | %-------------------------------------------------------------- 17 | \usepackage{scrpage2} 18 | 19 | 20 | %-------------------------------------------------------------- 21 | % One of many packages you can use if you want to include 22 | % graphics. 23 | %-------------------------------------------------------------- 24 | \usepackage{graphicx} 25 | 26 | %-------------------------------------------------------------- 27 | % The AMS packages are useful but not required. They offer a 28 | % number of nice fonts, environments for formatting multiline 29 | % equations, etc. 30 | %-------------------------------------------------------------- 31 | \usepackage{amsmath} 32 | \usepackage{amsfonts} 33 | \usepackage{amssymb} 34 | \usepackage{amsthm} 35 | 36 | %-------------------------------------------------------------- 37 | % Basic way to set-up the page margins. 38 | %-------------------------------------------------------------- 39 | %\addtolength{\oddsidemargin}{-.2in} 40 | %\addtolength{\evensidemargin}{-.2in} 41 | %\addtolength{\textwidth}{0.45in} 42 | %\addtolength{\topmargin}{-.175in} 43 | %\addtolength{\textheight}{0.75in} 44 | 45 | %-------------------------------------------------------------- 46 | % Comment out the following to add indents and remove space between paragraphs. 47 | %-------------------------------------------------------------- 48 | \usepackage{parskip} 49 | 50 | %-------------------------------------------------------------- 51 | % This package is used to define custom colours. 52 | %-------------------------------------------------------------- 53 | \usepackage[usenames,dvipsnames,svgnames,table]{xcolor} 54 | 55 | %-------------------------------------------------------------- 56 | % Package for adding in solutions: 57 | %-------------------------------------------------------------- 58 | \usepackage[nosoln,regf,nolf]{optional} 59 | %\usepackage[soln,regf]{optional} 60 | 61 | %\newcommand{\soln}[1]{\opt{soln}{\\[4pt] \textcolor{JungleGreen}{\textbf{Solution:}} #1}} 62 | \newcommand{\soln}[1]{\opt{soln}{\textcolor{JungleGreen}{\usekomafont{descriptionlabel}{Solution:}} #1}} 63 | 64 | \newcommand{\hint}[1]{{\usekomafont{descriptionlabel}{Hint:}} #1} 65 | \newcommand{\note}[1]{{\usekomafont{descriptionlabel}{Note:}} #1} 66 | \newcommand{\reference}[1]{{\usekomafont{descriptionlabel}{Reference:}} #1} 67 | 68 | %-------------------------------------------------------------- 69 | % A few colours for hyperlinks. 70 | %-------------------------------------------------------------- 71 | \definecolor{plum}{rgb}{0.36078, 0.20784, 0.4} 72 | \definecolor{chameleon}{rgb}{0.30588, 0.60392, 0.023529} 73 | \definecolor{cornflower}{rgb}{0.12549, 0.29020, 0.52941} 74 | \definecolor{scarlet}{rgb}{0.8, 0, 0} 75 | \definecolor{brick}{rgb}{0.64314, 0, 0} 76 | 77 | %-------------------------------------------------------------- 78 | % A command for typesetting and linking an email address. 79 | %-------------------------------------------------------------- 80 | \newcommand{\email}[1]{\href{mailto:#1}{\tt \textcolor{cornflower}{#1}}} 81 | \newcommand{\web}[1]{\href{#1}{\tt \textcolor{cornflower}{#1}}} 82 | 83 | %-------------------------------------------------------------- 84 | % The following declaration includes the hyperref package and 85 | % assigns metadata. If you compile with pdflatex, this data 86 | % will be automatically included in the pdf file. 87 | %-------------------------------------------------------------- 88 | %\usepackage[ 89 | % pdftitle={QFT Tutorial 1},% 90 | % pdfauthor={PSI Tutors},% 91 | % pdfsubject={QFT Tutorial 1},% 92 | % pdfkeywords={PSI}, 93 | % colorlinks=true, 94 | % linkcolor=cornflower, 95 | % citecolor=scarlet, 96 | % urlcolor=chameleon% 97 | %]{hyperref} 98 | 99 | %\setcounter{secnumdepth}{2} % section number depth 100 | %\setcounter{tocdepth}{2} % depth of TOC 101 | 102 | %-------------------------------------------------------------- 103 | % Specify the font used in captions. 104 | %-------------------------------------------------------------- 105 | \setkomafont{captionlabel}{\usekomafont{descriptionlabel}} 106 | 107 | %-------------------------------------------------------------- 108 | % This is where we define the custom title. The image that is 109 | % placed on the left-hand-side of the title, PILogo.pdf in 110 | % this case, should be in the same directory as this file. Note 111 | % that you can always use hyperlinks for the Title, Semester, 112 | % and Author fields, below, in case you want to link to a seminar 113 | % web page or a lecturer's email address. 114 | %-------------------------------------------------------------- 115 | 116 | \titlehead{% 117 | \vspace*{-1cm} 118 | \begin{minipage}[b]{4.0cm} 119 | \includegraphics*[height=1.3cm]{Uniandes_logo.jpeg}% 120 | \end{minipage} 121 | \hfill 122 | \begin{minipage}[b]{12cm} 123 | \begin{flushright} 124 | \usekomafont{descriptionlabel} 125 | \large Machine Learning for Quantum Matter and Technology \\ 126 | \normalsize \normalfont 127 | J. Carrasquilla, E. Inack, G. Torlai, R. Melko, L. Hayward Sierens 128 | \end{flushright} 129 | \end{minipage} 130 | \\[-3mm] 131 | \hrule 132 | \vspace{-3mm} 133 | } 134 | % ----------- 135 | 136 | %-------------------------------------------------------------- 137 | % Other useful physic-related packages 138 | %-------------------------------------------------------------- 139 | \usepackage{braket} 140 | % Use \Bra{}, \Ket{} or \Braket{x | \psi} for Dirac notation 141 | 142 | %-------------------------------------------------------------- 143 | % Nice numbering for question parts. 144 | %-------------------------------------------------------------- 145 | \newcommand{\ba}{\begin{eqnarray}} 146 | \newcommand{\ea}{\end{eqnarray}} 147 | 148 | \newcommand{\ssk}{\smallskip} 149 | \newcommand{\msk}{\medskip} 150 | 151 | \newcommand{\nin}{\noindent} 152 | 153 | \newcommand{\beq}{\begin{equation}} 154 | \newcommand{\eeq}{\end{equation}} 155 | 156 | \newcommand{\beqs}{\begin{equation*}} 157 | \newcommand{\eeqs}{\end{equation*}} 158 | 159 | \renewcommand{\vec}[1]{{\mathbf{#1}}} 160 | \renewcommand{\labelenumi}{\alph{enumi})} 161 | \renewcommand{\labelenumiii}{\roman{enumiii})} 162 | 163 | %%%%%%%%%%%%% 164 | 165 | \def\be{\begin{eqnarray}} 166 | \def\ee{\end{eqnarray}} 167 | \newcommand{\nn}{\nonumber} 168 | \newcommand\para{\paragraph{}} 169 | \newcommand{\ft}[2]{{\textstyle\frac{#1}{#2}}} 170 | \newcommand{\eqn}[1]{(\ref{#1})} 171 | \newcommand{\pl}[1]{\frac{\partial {\cal L}}{\partial{#1}}} 172 | \newcommand{\ppp}[2]{\frac{\partial {#1}}{\partial {#2}}} 173 | \newcommand{\ph}[1]{\frac{\partial {\cal H}}{\partial{#1}}} 174 | \newcommand{\leftp}[3]{\left.\ppp{#1}{#2}\right|_{#3}} 175 | %\newcommand{\Vec}[2]{\left(\begin{array}{c} {#1} \\ {#2}\end{array}\right)} 176 | \newcommand\vx{\vec{x}} 177 | \newcommand\vy{\vec{y}} 178 | \newcommand\vp{\vec{p}} 179 | \newcommand\vq{\vec{q}} 180 | \newcommand\vk{\vec{k}} 181 | \newcommand\avp{a^{\ }_{\vp}} 182 | \newcommand\advp{a^\dagger_{\vp}} 183 | \newcommand\ad{a^\dagger} 184 | 185 | \newcommand\balpha{\mbox{\boldmath $\alpha$}} 186 | \newcommand\bbeta{\mbox{\boldmath $\beta$}} 187 | \newcommand\bgamma{\mbox{\boldmath $\gamma$}} 188 | \newcommand\bomega{\mbox{\boldmath $\omega$}} 189 | \newcommand\blambda{\mbox{\boldmath $\lambda$}} 190 | \newcommand\bmu{\mbox{\boldmath $\mu$}} 191 | \newcommand\bphi{\mbox{\boldmath $\phi$}} 192 | \newcommand\bzeta{\mbox{\boldmath $\zeta$}} 193 | \newcommand\bsigma{\mbox{\boldmath $\sigma$}} 194 | \newcommand\bepsilon{\mbox{\boldmath $\epsilon$}} 195 | \newcommand\btau{\mbox{\boldmath $\tau$}} 196 | \newcommand\beeta{\mbox{\boldmath $\eta$}} 197 | \newcommand\btheta{\mbox{\boldmath $\theta$}} 198 | 199 | \def\norm#1{:\!\!#1\!\!:} 200 | 201 | \def\part{\partial} 202 | 203 | \def\dbox{\hbox{{$\sqcup$}\llap{$\sqcap$}}} 204 | 205 | \def\sla#1{\hbox{{$#1$}\llap{$/$}}} 206 | \def\Dslash{\,\,{\raise.15ex\hbox{/}\mkern-13mu D}} 207 | \def\Dbarslash{\,\,{\raise.15ex\hbox{/}\mkern-12mu {\bar D}}} 208 | \def\delslash{\,\,{\raise.15ex\hbox{/}\mkern-10mu \partial}} 209 | \def\delbarslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu {\bar\partial}}} 210 | \def\pslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu p}} 211 | \def\qslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu q}} 212 | \def\kslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu k}} 213 | \def\eslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu \epsilon}} 214 | \def\calDslash{\,\,{\rais.15ex\hbox{/}\mkern-12mu {\cal D}}} 215 | \newcommand{\slsh}[1]{\,\,{\raise.15ex\hbox{/}\mkern-12mu {#1}}} 216 | 217 | 218 | \newcommand\Bprime{B${}^\prime$} 219 | %\newcommand{\sign}{{\rm sign}} 220 | 221 | \newcommand\bx{{\bf x}} 222 | \newcommand\br{{\bf r}} 223 | \newcommand\bF{{\bf F}} 224 | \newcommand\bp{{\bf p}} 225 | \newcommand\bL{{\bf L}} 226 | \newcommand\bR{{\bf R}} 227 | \newcommand\bP{{\bf P}} 228 | \newcommand\bE{{\bf E}} 229 | \newcommand\bB{{\bf B}} 230 | \newcommand\bA{{\bf A}} 231 | \newcommand\bee{{\bf e}} 232 | \newcommand\bte{\tilde{\bf e}} 233 | \def\ket#1{\left| #1 \right\rangle} 234 | \def\bra#1{\left\langle #1 \right|} 235 | \def\vev#1{\left\langle #1 \right\rangle} 236 | 237 | \newcommand\lmn[2]{\Lambda^{#1}_{\ #2}} 238 | \newcommand\mup[2]{\eta^{#1 #2}} 239 | \newcommand\mdown[2]{\eta_{#1 #2}} 240 | \newcommand\deld[2]{\delta^{#1}_{#2}} 241 | \newcommand\df{\Delta_F} 242 | \newcommand\cL{{\cal L}} 243 | %\def\theequation{\thesection.\arabic{equation} 244 | 245 | \newcounter{solneqn} 246 | %\newcommand{\mytag}{\refstepcounter{equation}\tag{\roman{equationn}}} 247 | \newcommand{\mytag}{\refstepcounter{solneqn}\tag{S.\arabic{solneqn}}} 248 | 249 | \newcommand{\appropto}{\mathrel{\vcenter{ 250 | \offinterlineskip\halign{\hfil$##$\cr 251 | \propto\cr\noalign{\kern2pt}\sim\cr\noalign{\kern-2pt}}}}} 252 | %%%%%%%%% 253 | 254 | 255 | \DeclareMathOperator{\Tr}{Tr} 256 | \DeclareMathOperator{\sign}{sign} 257 | 258 | %\renewcommand{\ttdefault}{pcr} 259 | 260 | \usepackage{enumitem} 261 | 262 | \begin{document} 263 | 264 | %\scalefont{1.35} 265 | 266 | \vspace{-3cm} 267 | 268 | \opt{nosoln}{\title{Tutorial 3: \\Identifying phase transitions using \\principal component analysis \vspace*{-6mm}}} 269 | \opt{soln}{\title{Tutorial 3 \textcolor{JungleGreen}{Solutions}: \\Identifying phase transitions using \\principal component analysis \vspace*{-6mm}}} 270 | 271 | \date{May 29, 2019} 272 | 273 | \maketitle 274 | 275 | The objective of this tutorial is to use the dimensional reduction technique known as principal component analysis (PCA) to identify phases without explicitly training with phase labels. 276 | You will reproduce the results in Figures 1 and 2 of Reference~\cite{wang}. 277 | 278 | The goal of dimensional reduction is to generate a lower-dimensional representation $\mathcal{D}' = \{ \mathbf{x}' \}$ 279 | of a high-dimensional dataset $\mathcal{D} = \{ \mathbf{x} \}$, where $\mathbf{x}' \in \mathbb{R}^{N'}$, $\mathbf{x} \in \mathbb{R}^{N}$ and $N' < N$. 280 | The lower-dimensional dataset should still encode the important features of the original higher-dimensional data. 281 | The PCA method attempts to accomplish this goal by applying a linear transformation. 282 | In this tutorial we will apply PCA to $N$-dimensional spin configurations of the two-dimensional Ising model. 283 | %In Homework 2, you will consider the \emph{non-linear} $t$-distributed stochastic neighbour embedding (t-SNE) method for visualizing the phases of such models. 284 | 285 | Our data is stored in an $M \times N$ matrix $X$, where each of the $M$ rows stores a spin configuration for a system with $N$ spins. 286 | For the two-dimensional Ising model we have $N=L^2$. %while for the two-dimensional Ising lattice gauge theory we have $N=2L^2$. 287 | 288 | PCA can be performed on a matrix $X^c$ where each column has mean 0. 289 | One can calculate $X^{c}$ from $X$ as 290 | \begin{equation} 291 | X^c_{ij} = X_{ij} - \frac{1}{M} \sum_{k=1}^M X_{kj}. 292 | \end{equation} 293 | 294 | The principal components $x'_1, x'_2, \ldots$ are then stored in the columns of an $M \times N$ matrix 295 | \begin{equation} 296 | X' = X_c P, 297 | \end{equation} 298 | where $P$ is an $N \times N$ matrix. 299 | $P$ is determined by solving the eigenvalue problem 300 | \begin{equation} 301 | \frac{1}{M-1}X_c^T X_c = P^T D P, 302 | \end{equation} 303 | where $D$ is a diagonal matrix with non-negative entries $\lambda_1 \geq \lambda_2 \geq \cdots \geq \lambda_N \geq 0$. 304 | 305 | Another important definition is the so-called \emph{explained variance ratio} $r_\ell$, which measures how much of the variance in the dataset $X$ 306 | can be explained by the principal component $x'_\ell$. 307 | This ratio is defined in terms of the eigenvalues $\lambda_\ell$ as 308 | \begin{equation} 309 | r_\ell = \frac{\lambda_\ell}{ \sum_{i=1}^N \lambda_i}. 310 | \end{equation} 311 | 312 | For this tutorial, you have been given a dataset containing rows of spin configurations 313 | $[s_1, s_2, \ldots, s_{N}]$ 314 | for the two-dimensional Ising model on various sized lattices. 315 | Each spin `up' is stored as 1 and each spin `down' is stored as -1. 316 | 317 | You have been given data for $L=20$, 40 and 80. 318 | Each spin configuration file contains 100 spin configurations at each of the 20 temperatures $T/J = 1.0, 1.1, 1.2, \ldots, 2.9$ such that $M=2000$ for each lattice size. 319 | For each $L$, there is a corresponding file storing the temperature at which each configuration was generated 320 | (using Monte Carlo simulation). 321 | The temperature data will not be used to determine the principal components and will only be used for data visualization purposes. 322 | If you wish, you could generate this data yourself using the code from Tutorial 1. 323 | 324 | %For the Ising gauge theory, you have again been given data for $L=20$, 40 and 80. 325 | %Each spin configuration file contains 500 spin configurations corresponding to $T=0$ (where the system becomes topologically ordered) 326 | %and 500 spin configurations corresponding to $T=\infty$. 327 | %For each lattice size, there is a corresponding file of labels indicating whether a configuration corresponds to $T=0$ (label 0) 328 | %or $T=\infty$ (label 1). 329 | %Similar to the temperatures in the Ising model, these labels will only be used for visualization purposes. 330 | %You could generate similar data files yourself using code from Tutorial 2. 331 | 332 | %\opt{soln}{\newpage} 333 | \begin{enumerate}[label=\alph*)] 334 | 335 | %%%%%%%%%%%%%% (a) %%%%%%%%%%%%%% 336 | \item Write code that reads in the spin configurations for the Ising model for a given lattice size and determines the principal components $x'_1, x'_2, \ldots$. 337 | Make a scatter plot of $x'_1$ versus $x'_2$ for each of the lattice sizes. 338 | What do you notice about the behaviour of the resulting two-dimensional cluster(s) as $L$ increases? 339 | 340 | \hint{You may find it useful to use the function 341 | \begin{center} 342 | \texttt{(lamb, P) = np.linalg.eig(np.dot(Xc.T, Xc))} 343 | \end{center} 344 | When \texttt{np.dot(Xc.T, Xc)} is an $N \times N$ matrix, this function will return the $N$ eigenvalues $\lambda_1, \lambda_2, \ldots, \lambda_N$ in the array \texttt{lamb}. 345 | The eigenvector corresponding to \texttt{lamb[i]} will be returned in \texttt{P[:,i]}. 346 | } 347 | 348 | %%% SOLUTION %%% 349 | \soln{See \texttt{tutorial4{\textunderscore}pca{\textunderscore}solution.py} for the code needed to generate the plots. 350 | You should get results similar to the following: 351 | \begin{center} 352 | \includegraphics[width=5cm]{xPrime1_xPrime2_Ising_partA_L20.pdf} 353 | \includegraphics[width=5cm]{xPrime1_xPrime2_Ising_partA_L40.pdf} 354 | \includegraphics[width=5cm]{xPrime1_xPrime2_Ising_partA_L80.pdf} 355 | \end{center} 356 | You can see that there are three clusters for each $L$, which become more distinct as $L$ increases. 357 | } 358 | 359 | %%%%%%%%%%%%%% (b) %%%%%%%%%%%%%% 360 | \opt{soln}{\newpage} 361 | \item Label the points in your plot such that they are coloured according to their temperature and compare with Figure 2 of Reference~\cite{wang}. 362 | What does each cluster correspond to in terms of the phases of the two-dimensional Ising model? 363 | 364 | %%% SOLUTION %%% 365 | \soln{See \texttt{tutorial4{\textunderscore}pca{\textunderscore}solution.py} for the code needed to generate the plots. 366 | You should get results similar to the following: 367 | \begin{center} 368 | \includegraphics[width=5cm]{xPrime1_xPrime2_Ising_L20.pdf} 369 | \includegraphics[width=5cm]{xPrime1_xPrime2_Ising_L40.pdf} 370 | \includegraphics[width=5cm]{xPrime1_xPrime2_Ising_L80.pdf} 371 | \end{center} 372 | The red cluster in the middle corresponds to the high-temperature (paramagnetic) phase. 373 | The white/blue clusters to the right and left of of the red cluster correspond to the low-temperature (ferromagnetic) phase 374 | (one for the spin-up symmetry-broken state and one for the spin-down state). 375 | } 376 | 377 | %%%%%%%%%%%%%% (c) %%%%%%%%%%%%%% 378 | \item Consider now the explained variance ratios $r_\ell$. 379 | Plot the largest 10 explained variance ratios for each lattice size and compare with Figure 1 of Reference~\cite{wang}. 380 | How many principal components are needed to explain how the Ising spin configurations vary as a function of temperature? 381 | 382 | %%% SOLUTION %%% 383 | \soln{See \texttt{tutorial4{\textunderscore}pca{\textunderscore}solution.py} for the code needed to generate the plot below. 384 | You should get results similar to the following: 385 | \begin{center} 386 | \includegraphics[width=8cm]{ratios_Ising.pdf} 387 | \end{center} 388 | Note the logarithmic scale on the $y$-axis. 389 | The plot shows that, for each $L$, the explained variance ratio corresponding to the first principal component is more than an order of 390 | magnitude larger than for the other components. 391 | We can conclude that the first principle component explains the vast majority of the variations in the spin configurations 392 | as a function of temperature. 393 | } 394 | 395 | 396 | %%%%%%%%%%%%%% (d) %%%%%%%%%%%%%% 397 | \opt{soln}{\newpage} 398 | \item Let $p_\ell$ be the $i^{\text{th}}$ column of the matrix $P$ such that $x'_\ell = X p_\ell$. 399 | Plot the elements of $p_1$. 400 | What does your plot tell you about how $x'_1$ is computed from the data $X$? 401 | Relate your plot the to the magnetization order parameter for the Ising model, which is given by $\frac{1}{N} \sum_i s_i$. 402 | 403 | %%% SOLUTION %%% 404 | \soln{See \texttt{tutorial4{\textunderscore}pca{\textunderscore}solution.py} for the code needed to generate the plots. 405 | You should get results similar to the following: 406 | \begin{center} 407 | \includegraphics[width=5cm]{p1_Ising_L20.pdf} 408 | \includegraphics[width=5cm]{p1_Ising_L40.pdf} 409 | \includegraphics[width=5cm]{p1_Ising_L80.pdf} 410 | \end{center} 411 | 412 | For each value of $L$, the fluctuations in the components of $p_1$ are very small (i.e. the distribution of the components of $p_1$ is nearly flat). 413 | As a result, $y_1 = X p_1$ will (approximately) sum the spins in each configuration such that $y_1 \appropto m$, 414 | where $m = \frac{1}{N} \sum_i s_i$ is the magnetization of a given configuration. 415 | } 416 | 417 | %%%%%%%%%%%%%% (e) %%%%%%%%%%%%%% 418 | %\item Repeat parts (a)--(c) for the Ising gauge theory and label the points in the $x'_1$ versus $x'_2$ plot according to the label ($T=0$ or $T=\infty$). 419 | %You should find that plots of $x'_1$ versus $x'_2$ form one large cluster with no clear separation of the $T=0$ data from the $T=\infty$ data. 420 | %Consider how the explained variance ratios behave as a function of $\ell$. 421 | %What do these explained variance ratios indicate about the order parameter for the Ising gauge theory? 422 | 423 | %%% SOLUTION %%% 424 | %\soln{See \texttt{tutorial4{\textunderscore}pca{\textunderscore}solution.py} for the code needed to generate the plots. 425 | %Plots of $y_1$ versus $y_2$ should look similar to the following, with no clear separation of the $T=0$ data (blue) from the $T=\infty$ data (red). 426 | %\begin{center} 427 | %\includegraphics[width=5cm]{xPrime1_xPrime2_gaugeTheory_L20.pdf} 428 | %\includegraphics[width=5cm]{xPrime1_xPrime2_gaugeTheory_L40.pdf} 429 | %\includegraphics[width=5cm]{xPrime1_xPrime2_gaugeTheory_L80.pdf} 430 | %\end{center} 431 | 432 | %\opt{soln}{\newpage} 433 | %The explained variance ratios should behave similar to the following plot: 434 | %\begin{center} 435 | %\includegraphics[width=8cm]{ratios_gaugeTheory.pdf} 436 | %\end{center} 437 | %We see that many principle components are needed in this case in order to explain how the spin configurations vary when $T$ changes from 0 to $\infty$. 438 | %We know that the order parameter is a highly non-linear function of our spins for the Ising gauge theory, 439 | %so it makes sense that the linear PCA transformation is not useful for finding a low-dimensional representation of this model's spin configurations. 440 | %} 441 | 442 | \end{enumerate} 443 | 444 | \begin{thebibliography}{} 445 | 446 | \bibitem{wang} 447 | L. Wang, Phys. Rev. B \textbf{94}, 195105 (2016), {\small\url{https://arxiv.org/abs/1606.00318}}. 448 | 449 | \end{thebibliography} 450 | 451 | \end{document} -------------------------------------------------------------------------------- /Tutorial3/Tutorial3_solutions.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial3/Tutorial3_solutions.pdf -------------------------------------------------------------------------------- /Tutorial3/Uniandes_logo.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial3/Uniandes_logo.jpeg -------------------------------------------------------------------------------- /Tutorial3/data_tutorial3/temperatures_Ising_L20.txt: -------------------------------------------------------------------------------- 1 | 1.00000000 2 | 1.00000000 3 | 1.00000000 4 | 1.00000000 5 | 1.00000000 6 | 1.00000000 7 | 1.00000000 8 | 1.00000000 9 | 1.00000000 10 | 1.00000000 11 | 1.00000000 12 | 1.00000000 13 | 1.00000000 14 | 1.00000000 15 | 1.00000000 16 | 1.00000000 17 | 1.00000000 18 | 1.00000000 19 | 1.00000000 20 | 1.00000000 21 | 1.00000000 22 | 1.00000000 23 | 1.00000000 24 | 1.00000000 25 | 1.00000000 26 | 1.00000000 27 | 1.00000000 28 | 1.00000000 29 | 1.00000000 30 | 1.00000000 31 | 1.00000000 32 | 1.00000000 33 | 1.00000000 34 | 1.00000000 35 | 1.00000000 36 | 1.00000000 37 | 1.00000000 38 | 1.00000000 39 | 1.00000000 40 | 1.00000000 41 | 1.00000000 42 | 1.00000000 43 | 1.00000000 44 | 1.00000000 45 | 1.00000000 46 | 1.00000000 47 | 1.00000000 48 | 1.00000000 49 | 1.00000000 50 | 1.00000000 51 | 1.00000000 52 | 1.00000000 53 | 1.00000000 54 | 1.00000000 55 | 1.00000000 56 | 1.00000000 57 | 1.00000000 58 | 1.00000000 59 | 1.00000000 60 | 1.00000000 61 | 1.00000000 62 | 1.00000000 63 | 1.00000000 64 | 1.00000000 65 | 1.00000000 66 | 1.00000000 67 | 1.00000000 68 | 1.00000000 69 | 1.00000000 70 | 1.00000000 71 | 1.00000000 72 | 1.00000000 73 | 1.00000000 74 | 1.00000000 75 | 1.00000000 76 | 1.00000000 77 | 1.00000000 78 | 1.00000000 79 | 1.00000000 80 | 1.00000000 81 | 1.00000000 82 | 1.00000000 83 | 1.00000000 84 | 1.00000000 85 | 1.00000000 86 | 1.00000000 87 | 1.00000000 88 | 1.00000000 89 | 1.00000000 90 | 1.00000000 91 | 1.00000000 92 | 1.00000000 93 | 1.00000000 94 | 1.00000000 95 | 1.00000000 96 | 1.00000000 97 | 1.00000000 98 | 1.00000000 99 | 1.00000000 100 | 1.00000000 101 | 1.10000000 102 | 1.10000000 103 | 1.10000000 104 | 1.10000000 105 | 1.10000000 106 | 1.10000000 107 | 1.10000000 108 | 1.10000000 109 | 1.10000000 110 | 1.10000000 111 | 1.10000000 112 | 1.10000000 113 | 1.10000000 114 | 1.10000000 115 | 1.10000000 116 | 1.10000000 117 | 1.10000000 118 | 1.10000000 119 | 1.10000000 120 | 1.10000000 121 | 1.10000000 122 | 1.10000000 123 | 1.10000000 124 | 1.10000000 125 | 1.10000000 126 | 1.10000000 127 | 1.10000000 128 | 1.10000000 129 | 1.10000000 130 | 1.10000000 131 | 1.10000000 132 | 1.10000000 133 | 1.10000000 134 | 1.10000000 135 | 1.10000000 136 | 1.10000000 137 | 1.10000000 138 | 1.10000000 139 | 1.10000000 140 | 1.10000000 141 | 1.10000000 142 | 1.10000000 143 | 1.10000000 144 | 1.10000000 145 | 1.10000000 146 | 1.10000000 147 | 1.10000000 148 | 1.10000000 149 | 1.10000000 150 | 1.10000000 151 | 1.10000000 152 | 1.10000000 153 | 1.10000000 154 | 1.10000000 155 | 1.10000000 156 | 1.10000000 157 | 1.10000000 158 | 1.10000000 159 | 1.10000000 160 | 1.10000000 161 | 1.10000000 162 | 1.10000000 163 | 1.10000000 164 | 1.10000000 165 | 1.10000000 166 | 1.10000000 167 | 1.10000000 168 | 1.10000000 169 | 1.10000000 170 | 1.10000000 171 | 1.10000000 172 | 1.10000000 173 | 1.10000000 174 | 1.10000000 175 | 1.10000000 176 | 1.10000000 177 | 1.10000000 178 | 1.10000000 179 | 1.10000000 180 | 1.10000000 181 | 1.10000000 182 | 1.10000000 183 | 1.10000000 184 | 1.10000000 185 | 1.10000000 186 | 1.10000000 187 | 1.10000000 188 | 1.10000000 189 | 1.10000000 190 | 1.10000000 191 | 1.10000000 192 | 1.10000000 193 | 1.10000000 194 | 1.10000000 195 | 1.10000000 196 | 1.10000000 197 | 1.10000000 198 | 1.10000000 199 | 1.10000000 200 | 1.10000000 201 | 1.20000000 202 | 1.20000000 203 | 1.20000000 204 | 1.20000000 205 | 1.20000000 206 | 1.20000000 207 | 1.20000000 208 | 1.20000000 209 | 1.20000000 210 | 1.20000000 211 | 1.20000000 212 | 1.20000000 213 | 1.20000000 214 | 1.20000000 215 | 1.20000000 216 | 1.20000000 217 | 1.20000000 218 | 1.20000000 219 | 1.20000000 220 | 1.20000000 221 | 1.20000000 222 | 1.20000000 223 | 1.20000000 224 | 1.20000000 225 | 1.20000000 226 | 1.20000000 227 | 1.20000000 228 | 1.20000000 229 | 1.20000000 230 | 1.20000000 231 | 1.20000000 232 | 1.20000000 233 | 1.20000000 234 | 1.20000000 235 | 1.20000000 236 | 1.20000000 237 | 1.20000000 238 | 1.20000000 239 | 1.20000000 240 | 1.20000000 241 | 1.20000000 242 | 1.20000000 243 | 1.20000000 244 | 1.20000000 245 | 1.20000000 246 | 1.20000000 247 | 1.20000000 248 | 1.20000000 249 | 1.20000000 250 | 1.20000000 251 | 1.20000000 252 | 1.20000000 253 | 1.20000000 254 | 1.20000000 255 | 1.20000000 256 | 1.20000000 257 | 1.20000000 258 | 1.20000000 259 | 1.20000000 260 | 1.20000000 261 | 1.20000000 262 | 1.20000000 263 | 1.20000000 264 | 1.20000000 265 | 1.20000000 266 | 1.20000000 267 | 1.20000000 268 | 1.20000000 269 | 1.20000000 270 | 1.20000000 271 | 1.20000000 272 | 1.20000000 273 | 1.20000000 274 | 1.20000000 275 | 1.20000000 276 | 1.20000000 277 | 1.20000000 278 | 1.20000000 279 | 1.20000000 280 | 1.20000000 281 | 1.20000000 282 | 1.20000000 283 | 1.20000000 284 | 1.20000000 285 | 1.20000000 286 | 1.20000000 287 | 1.20000000 288 | 1.20000000 289 | 1.20000000 290 | 1.20000000 291 | 1.20000000 292 | 1.20000000 293 | 1.20000000 294 | 1.20000000 295 | 1.20000000 296 | 1.20000000 297 | 1.20000000 298 | 1.20000000 299 | 1.20000000 300 | 1.20000000 301 | 1.30000000 302 | 1.30000000 303 | 1.30000000 304 | 1.30000000 305 | 1.30000000 306 | 1.30000000 307 | 1.30000000 308 | 1.30000000 309 | 1.30000000 310 | 1.30000000 311 | 1.30000000 312 | 1.30000000 313 | 1.30000000 314 | 1.30000000 315 | 1.30000000 316 | 1.30000000 317 | 1.30000000 318 | 1.30000000 319 | 1.30000000 320 | 1.30000000 321 | 1.30000000 322 | 1.30000000 323 | 1.30000000 324 | 1.30000000 325 | 1.30000000 326 | 1.30000000 327 | 1.30000000 328 | 1.30000000 329 | 1.30000000 330 | 1.30000000 331 | 1.30000000 332 | 1.30000000 333 | 1.30000000 334 | 1.30000000 335 | 1.30000000 336 | 1.30000000 337 | 1.30000000 338 | 1.30000000 339 | 1.30000000 340 | 1.30000000 341 | 1.30000000 342 | 1.30000000 343 | 1.30000000 344 | 1.30000000 345 | 1.30000000 346 | 1.30000000 347 | 1.30000000 348 | 1.30000000 349 | 1.30000000 350 | 1.30000000 351 | 1.30000000 352 | 1.30000000 353 | 1.30000000 354 | 1.30000000 355 | 1.30000000 356 | 1.30000000 357 | 1.30000000 358 | 1.30000000 359 | 1.30000000 360 | 1.30000000 361 | 1.30000000 362 | 1.30000000 363 | 1.30000000 364 | 1.30000000 365 | 1.30000000 366 | 1.30000000 367 | 1.30000000 368 | 1.30000000 369 | 1.30000000 370 | 1.30000000 371 | 1.30000000 372 | 1.30000000 373 | 1.30000000 374 | 1.30000000 375 | 1.30000000 376 | 1.30000000 377 | 1.30000000 378 | 1.30000000 379 | 1.30000000 380 | 1.30000000 381 | 1.30000000 382 | 1.30000000 383 | 1.30000000 384 | 1.30000000 385 | 1.30000000 386 | 1.30000000 387 | 1.30000000 388 | 1.30000000 389 | 1.30000000 390 | 1.30000000 391 | 1.30000000 392 | 1.30000000 393 | 1.30000000 394 | 1.30000000 395 | 1.30000000 396 | 1.30000000 397 | 1.30000000 398 | 1.30000000 399 | 1.30000000 400 | 1.30000000 401 | 1.40000000 402 | 1.40000000 403 | 1.40000000 404 | 1.40000000 405 | 1.40000000 406 | 1.40000000 407 | 1.40000000 408 | 1.40000000 409 | 1.40000000 410 | 1.40000000 411 | 1.40000000 412 | 1.40000000 413 | 1.40000000 414 | 1.40000000 415 | 1.40000000 416 | 1.40000000 417 | 1.40000000 418 | 1.40000000 419 | 1.40000000 420 | 1.40000000 421 | 1.40000000 422 | 1.40000000 423 | 1.40000000 424 | 1.40000000 425 | 1.40000000 426 | 1.40000000 427 | 1.40000000 428 | 1.40000000 429 | 1.40000000 430 | 1.40000000 431 | 1.40000000 432 | 1.40000000 433 | 1.40000000 434 | 1.40000000 435 | 1.40000000 436 | 1.40000000 437 | 1.40000000 438 | 1.40000000 439 | 1.40000000 440 | 1.40000000 441 | 1.40000000 442 | 1.40000000 443 | 1.40000000 444 | 1.40000000 445 | 1.40000000 446 | 1.40000000 447 | 1.40000000 448 | 1.40000000 449 | 1.40000000 450 | 1.40000000 451 | 1.40000000 452 | 1.40000000 453 | 1.40000000 454 | 1.40000000 455 | 1.40000000 456 | 1.40000000 457 | 1.40000000 458 | 1.40000000 459 | 1.40000000 460 | 1.40000000 461 | 1.40000000 462 | 1.40000000 463 | 1.40000000 464 | 1.40000000 465 | 1.40000000 466 | 1.40000000 467 | 1.40000000 468 | 1.40000000 469 | 1.40000000 470 | 1.40000000 471 | 1.40000000 472 | 1.40000000 473 | 1.40000000 474 | 1.40000000 475 | 1.40000000 476 | 1.40000000 477 | 1.40000000 478 | 1.40000000 479 | 1.40000000 480 | 1.40000000 481 | 1.40000000 482 | 1.40000000 483 | 1.40000000 484 | 1.40000000 485 | 1.40000000 486 | 1.40000000 487 | 1.40000000 488 | 1.40000000 489 | 1.40000000 490 | 1.40000000 491 | 1.40000000 492 | 1.40000000 493 | 1.40000000 494 | 1.40000000 495 | 1.40000000 496 | 1.40000000 497 | 1.40000000 498 | 1.40000000 499 | 1.40000000 500 | 1.40000000 501 | 1.50000000 502 | 1.50000000 503 | 1.50000000 504 | 1.50000000 505 | 1.50000000 506 | 1.50000000 507 | 1.50000000 508 | 1.50000000 509 | 1.50000000 510 | 1.50000000 511 | 1.50000000 512 | 1.50000000 513 | 1.50000000 514 | 1.50000000 515 | 1.50000000 516 | 1.50000000 517 | 1.50000000 518 | 1.50000000 519 | 1.50000000 520 | 1.50000000 521 | 1.50000000 522 | 1.50000000 523 | 1.50000000 524 | 1.50000000 525 | 1.50000000 526 | 1.50000000 527 | 1.50000000 528 | 1.50000000 529 | 1.50000000 530 | 1.50000000 531 | 1.50000000 532 | 1.50000000 533 | 1.50000000 534 | 1.50000000 535 | 1.50000000 536 | 1.50000000 537 | 1.50000000 538 | 1.50000000 539 | 1.50000000 540 | 1.50000000 541 | 1.50000000 542 | 1.50000000 543 | 1.50000000 544 | 1.50000000 545 | 1.50000000 546 | 1.50000000 547 | 1.50000000 548 | 1.50000000 549 | 1.50000000 550 | 1.50000000 551 | 1.50000000 552 | 1.50000000 553 | 1.50000000 554 | 1.50000000 555 | 1.50000000 556 | 1.50000000 557 | 1.50000000 558 | 1.50000000 559 | 1.50000000 560 | 1.50000000 561 | 1.50000000 562 | 1.50000000 563 | 1.50000000 564 | 1.50000000 565 | 1.50000000 566 | 1.50000000 567 | 1.50000000 568 | 1.50000000 569 | 1.50000000 570 | 1.50000000 571 | 1.50000000 572 | 1.50000000 573 | 1.50000000 574 | 1.50000000 575 | 1.50000000 576 | 1.50000000 577 | 1.50000000 578 | 1.50000000 579 | 1.50000000 580 | 1.50000000 581 | 1.50000000 582 | 1.50000000 583 | 1.50000000 584 | 1.50000000 585 | 1.50000000 586 | 1.50000000 587 | 1.50000000 588 | 1.50000000 589 | 1.50000000 590 | 1.50000000 591 | 1.50000000 592 | 1.50000000 593 | 1.50000000 594 | 1.50000000 595 | 1.50000000 596 | 1.50000000 597 | 1.50000000 598 | 1.50000000 599 | 1.50000000 600 | 1.50000000 601 | 1.60000000 602 | 1.60000000 603 | 1.60000000 604 | 1.60000000 605 | 1.60000000 606 | 1.60000000 607 | 1.60000000 608 | 1.60000000 609 | 1.60000000 610 | 1.60000000 611 | 1.60000000 612 | 1.60000000 613 | 1.60000000 614 | 1.60000000 615 | 1.60000000 616 | 1.60000000 617 | 1.60000000 618 | 1.60000000 619 | 1.60000000 620 | 1.60000000 621 | 1.60000000 622 | 1.60000000 623 | 1.60000000 624 | 1.60000000 625 | 1.60000000 626 | 1.60000000 627 | 1.60000000 628 | 1.60000000 629 | 1.60000000 630 | 1.60000000 631 | 1.60000000 632 | 1.60000000 633 | 1.60000000 634 | 1.60000000 635 | 1.60000000 636 | 1.60000000 637 | 1.60000000 638 | 1.60000000 639 | 1.60000000 640 | 1.60000000 641 | 1.60000000 642 | 1.60000000 643 | 1.60000000 644 | 1.60000000 645 | 1.60000000 646 | 1.60000000 647 | 1.60000000 648 | 1.60000000 649 | 1.60000000 650 | 1.60000000 651 | 1.60000000 652 | 1.60000000 653 | 1.60000000 654 | 1.60000000 655 | 1.60000000 656 | 1.60000000 657 | 1.60000000 658 | 1.60000000 659 | 1.60000000 660 | 1.60000000 661 | 1.60000000 662 | 1.60000000 663 | 1.60000000 664 | 1.60000000 665 | 1.60000000 666 | 1.60000000 667 | 1.60000000 668 | 1.60000000 669 | 1.60000000 670 | 1.60000000 671 | 1.60000000 672 | 1.60000000 673 | 1.60000000 674 | 1.60000000 675 | 1.60000000 676 | 1.60000000 677 | 1.60000000 678 | 1.60000000 679 | 1.60000000 680 | 1.60000000 681 | 1.60000000 682 | 1.60000000 683 | 1.60000000 684 | 1.60000000 685 | 1.60000000 686 | 1.60000000 687 | 1.60000000 688 | 1.60000000 689 | 1.60000000 690 | 1.60000000 691 | 1.60000000 692 | 1.60000000 693 | 1.60000000 694 | 1.60000000 695 | 1.60000000 696 | 1.60000000 697 | 1.60000000 698 | 1.60000000 699 | 1.60000000 700 | 1.60000000 701 | 1.70000000 702 | 1.70000000 703 | 1.70000000 704 | 1.70000000 705 | 1.70000000 706 | 1.70000000 707 | 1.70000000 708 | 1.70000000 709 | 1.70000000 710 | 1.70000000 711 | 1.70000000 712 | 1.70000000 713 | 1.70000000 714 | 1.70000000 715 | 1.70000000 716 | 1.70000000 717 | 1.70000000 718 | 1.70000000 719 | 1.70000000 720 | 1.70000000 721 | 1.70000000 722 | 1.70000000 723 | 1.70000000 724 | 1.70000000 725 | 1.70000000 726 | 1.70000000 727 | 1.70000000 728 | 1.70000000 729 | 1.70000000 730 | 1.70000000 731 | 1.70000000 732 | 1.70000000 733 | 1.70000000 734 | 1.70000000 735 | 1.70000000 736 | 1.70000000 737 | 1.70000000 738 | 1.70000000 739 | 1.70000000 740 | 1.70000000 741 | 1.70000000 742 | 1.70000000 743 | 1.70000000 744 | 1.70000000 745 | 1.70000000 746 | 1.70000000 747 | 1.70000000 748 | 1.70000000 749 | 1.70000000 750 | 1.70000000 751 | 1.70000000 752 | 1.70000000 753 | 1.70000000 754 | 1.70000000 755 | 1.70000000 756 | 1.70000000 757 | 1.70000000 758 | 1.70000000 759 | 1.70000000 760 | 1.70000000 761 | 1.70000000 762 | 1.70000000 763 | 1.70000000 764 | 1.70000000 765 | 1.70000000 766 | 1.70000000 767 | 1.70000000 768 | 1.70000000 769 | 1.70000000 770 | 1.70000000 771 | 1.70000000 772 | 1.70000000 773 | 1.70000000 774 | 1.70000000 775 | 1.70000000 776 | 1.70000000 777 | 1.70000000 778 | 1.70000000 779 | 1.70000000 780 | 1.70000000 781 | 1.70000000 782 | 1.70000000 783 | 1.70000000 784 | 1.70000000 785 | 1.70000000 786 | 1.70000000 787 | 1.70000000 788 | 1.70000000 789 | 1.70000000 790 | 1.70000000 791 | 1.70000000 792 | 1.70000000 793 | 1.70000000 794 | 1.70000000 795 | 1.70000000 796 | 1.70000000 797 | 1.70000000 798 | 1.70000000 799 | 1.70000000 800 | 1.70000000 801 | 1.80000000 802 | 1.80000000 803 | 1.80000000 804 | 1.80000000 805 | 1.80000000 806 | 1.80000000 807 | 1.80000000 808 | 1.80000000 809 | 1.80000000 810 | 1.80000000 811 | 1.80000000 812 | 1.80000000 813 | 1.80000000 814 | 1.80000000 815 | 1.80000000 816 | 1.80000000 817 | 1.80000000 818 | 1.80000000 819 | 1.80000000 820 | 1.80000000 821 | 1.80000000 822 | 1.80000000 823 | 1.80000000 824 | 1.80000000 825 | 1.80000000 826 | 1.80000000 827 | 1.80000000 828 | 1.80000000 829 | 1.80000000 830 | 1.80000000 831 | 1.80000000 832 | 1.80000000 833 | 1.80000000 834 | 1.80000000 835 | 1.80000000 836 | 1.80000000 837 | 1.80000000 838 | 1.80000000 839 | 1.80000000 840 | 1.80000000 841 | 1.80000000 842 | 1.80000000 843 | 1.80000000 844 | 1.80000000 845 | 1.80000000 846 | 1.80000000 847 | 1.80000000 848 | 1.80000000 849 | 1.80000000 850 | 1.80000000 851 | 1.80000000 852 | 1.80000000 853 | 1.80000000 854 | 1.80000000 855 | 1.80000000 856 | 1.80000000 857 | 1.80000000 858 | 1.80000000 859 | 1.80000000 860 | 1.80000000 861 | 1.80000000 862 | 1.80000000 863 | 1.80000000 864 | 1.80000000 865 | 1.80000000 866 | 1.80000000 867 | 1.80000000 868 | 1.80000000 869 | 1.80000000 870 | 1.80000000 871 | 1.80000000 872 | 1.80000000 873 | 1.80000000 874 | 1.80000000 875 | 1.80000000 876 | 1.80000000 877 | 1.80000000 878 | 1.80000000 879 | 1.80000000 880 | 1.80000000 881 | 1.80000000 882 | 1.80000000 883 | 1.80000000 884 | 1.80000000 885 | 1.80000000 886 | 1.80000000 887 | 1.80000000 888 | 1.80000000 889 | 1.80000000 890 | 1.80000000 891 | 1.80000000 892 | 1.80000000 893 | 1.80000000 894 | 1.80000000 895 | 1.80000000 896 | 1.80000000 897 | 1.80000000 898 | 1.80000000 899 | 1.80000000 900 | 1.80000000 901 | 1.90000000 902 | 1.90000000 903 | 1.90000000 904 | 1.90000000 905 | 1.90000000 906 | 1.90000000 907 | 1.90000000 908 | 1.90000000 909 | 1.90000000 910 | 1.90000000 911 | 1.90000000 912 | 1.90000000 913 | 1.90000000 914 | 1.90000000 915 | 1.90000000 916 | 1.90000000 917 | 1.90000000 918 | 1.90000000 919 | 1.90000000 920 | 1.90000000 921 | 1.90000000 922 | 1.90000000 923 | 1.90000000 924 | 1.90000000 925 | 1.90000000 926 | 1.90000000 927 | 1.90000000 928 | 1.90000000 929 | 1.90000000 930 | 1.90000000 931 | 1.90000000 932 | 1.90000000 933 | 1.90000000 934 | 1.90000000 935 | 1.90000000 936 | 1.90000000 937 | 1.90000000 938 | 1.90000000 939 | 1.90000000 940 | 1.90000000 941 | 1.90000000 942 | 1.90000000 943 | 1.90000000 944 | 1.90000000 945 | 1.90000000 946 | 1.90000000 947 | 1.90000000 948 | 1.90000000 949 | 1.90000000 950 | 1.90000000 951 | 1.90000000 952 | 1.90000000 953 | 1.90000000 954 | 1.90000000 955 | 1.90000000 956 | 1.90000000 957 | 1.90000000 958 | 1.90000000 959 | 1.90000000 960 | 1.90000000 961 | 1.90000000 962 | 1.90000000 963 | 1.90000000 964 | 1.90000000 965 | 1.90000000 966 | 1.90000000 967 | 1.90000000 968 | 1.90000000 969 | 1.90000000 970 | 1.90000000 971 | 1.90000000 972 | 1.90000000 973 | 1.90000000 974 | 1.90000000 975 | 1.90000000 976 | 1.90000000 977 | 1.90000000 978 | 1.90000000 979 | 1.90000000 980 | 1.90000000 981 | 1.90000000 982 | 1.90000000 983 | 1.90000000 984 | 1.90000000 985 | 1.90000000 986 | 1.90000000 987 | 1.90000000 988 | 1.90000000 989 | 1.90000000 990 | 1.90000000 991 | 1.90000000 992 | 1.90000000 993 | 1.90000000 994 | 1.90000000 995 | 1.90000000 996 | 1.90000000 997 | 1.90000000 998 | 1.90000000 999 | 1.90000000 1000 | 1.90000000 1001 | 2.00000000 1002 | 2.00000000 1003 | 2.00000000 1004 | 2.00000000 1005 | 2.00000000 1006 | 2.00000000 1007 | 2.00000000 1008 | 2.00000000 1009 | 2.00000000 1010 | 2.00000000 1011 | 2.00000000 1012 | 2.00000000 1013 | 2.00000000 1014 | 2.00000000 1015 | 2.00000000 1016 | 2.00000000 1017 | 2.00000000 1018 | 2.00000000 1019 | 2.00000000 1020 | 2.00000000 1021 | 2.00000000 1022 | 2.00000000 1023 | 2.00000000 1024 | 2.00000000 1025 | 2.00000000 1026 | 2.00000000 1027 | 2.00000000 1028 | 2.00000000 1029 | 2.00000000 1030 | 2.00000000 1031 | 2.00000000 1032 | 2.00000000 1033 | 2.00000000 1034 | 2.00000000 1035 | 2.00000000 1036 | 2.00000000 1037 | 2.00000000 1038 | 2.00000000 1039 | 2.00000000 1040 | 2.00000000 1041 | 2.00000000 1042 | 2.00000000 1043 | 2.00000000 1044 | 2.00000000 1045 | 2.00000000 1046 | 2.00000000 1047 | 2.00000000 1048 | 2.00000000 1049 | 2.00000000 1050 | 2.00000000 1051 | 2.00000000 1052 | 2.00000000 1053 | 2.00000000 1054 | 2.00000000 1055 | 2.00000000 1056 | 2.00000000 1057 | 2.00000000 1058 | 2.00000000 1059 | 2.00000000 1060 | 2.00000000 1061 | 2.00000000 1062 | 2.00000000 1063 | 2.00000000 1064 | 2.00000000 1065 | 2.00000000 1066 | 2.00000000 1067 | 2.00000000 1068 | 2.00000000 1069 | 2.00000000 1070 | 2.00000000 1071 | 2.00000000 1072 | 2.00000000 1073 | 2.00000000 1074 | 2.00000000 1075 | 2.00000000 1076 | 2.00000000 1077 | 2.00000000 1078 | 2.00000000 1079 | 2.00000000 1080 | 2.00000000 1081 | 2.00000000 1082 | 2.00000000 1083 | 2.00000000 1084 | 2.00000000 1085 | 2.00000000 1086 | 2.00000000 1087 | 2.00000000 1088 | 2.00000000 1089 | 2.00000000 1090 | 2.00000000 1091 | 2.00000000 1092 | 2.00000000 1093 | 2.00000000 1094 | 2.00000000 1095 | 2.00000000 1096 | 2.00000000 1097 | 2.00000000 1098 | 2.00000000 1099 | 2.00000000 1100 | 2.00000000 1101 | 2.10000000 1102 | 2.10000000 1103 | 2.10000000 1104 | 2.10000000 1105 | 2.10000000 1106 | 2.10000000 1107 | 2.10000000 1108 | 2.10000000 1109 | 2.10000000 1110 | 2.10000000 1111 | 2.10000000 1112 | 2.10000000 1113 | 2.10000000 1114 | 2.10000000 1115 | 2.10000000 1116 | 2.10000000 1117 | 2.10000000 1118 | 2.10000000 1119 | 2.10000000 1120 | 2.10000000 1121 | 2.10000000 1122 | 2.10000000 1123 | 2.10000000 1124 | 2.10000000 1125 | 2.10000000 1126 | 2.10000000 1127 | 2.10000000 1128 | 2.10000000 1129 | 2.10000000 1130 | 2.10000000 1131 | 2.10000000 1132 | 2.10000000 1133 | 2.10000000 1134 | 2.10000000 1135 | 2.10000000 1136 | 2.10000000 1137 | 2.10000000 1138 | 2.10000000 1139 | 2.10000000 1140 | 2.10000000 1141 | 2.10000000 1142 | 2.10000000 1143 | 2.10000000 1144 | 2.10000000 1145 | 2.10000000 1146 | 2.10000000 1147 | 2.10000000 1148 | 2.10000000 1149 | 2.10000000 1150 | 2.10000000 1151 | 2.10000000 1152 | 2.10000000 1153 | 2.10000000 1154 | 2.10000000 1155 | 2.10000000 1156 | 2.10000000 1157 | 2.10000000 1158 | 2.10000000 1159 | 2.10000000 1160 | 2.10000000 1161 | 2.10000000 1162 | 2.10000000 1163 | 2.10000000 1164 | 2.10000000 1165 | 2.10000000 1166 | 2.10000000 1167 | 2.10000000 1168 | 2.10000000 1169 | 2.10000000 1170 | 2.10000000 1171 | 2.10000000 1172 | 2.10000000 1173 | 2.10000000 1174 | 2.10000000 1175 | 2.10000000 1176 | 2.10000000 1177 | 2.10000000 1178 | 2.10000000 1179 | 2.10000000 1180 | 2.10000000 1181 | 2.10000000 1182 | 2.10000000 1183 | 2.10000000 1184 | 2.10000000 1185 | 2.10000000 1186 | 2.10000000 1187 | 2.10000000 1188 | 2.10000000 1189 | 2.10000000 1190 | 2.10000000 1191 | 2.10000000 1192 | 2.10000000 1193 | 2.10000000 1194 | 2.10000000 1195 | 2.10000000 1196 | 2.10000000 1197 | 2.10000000 1198 | 2.10000000 1199 | 2.10000000 1200 | 2.10000000 1201 | 2.20000000 1202 | 2.20000000 1203 | 2.20000000 1204 | 2.20000000 1205 | 2.20000000 1206 | 2.20000000 1207 | 2.20000000 1208 | 2.20000000 1209 | 2.20000000 1210 | 2.20000000 1211 | 2.20000000 1212 | 2.20000000 1213 | 2.20000000 1214 | 2.20000000 1215 | 2.20000000 1216 | 2.20000000 1217 | 2.20000000 1218 | 2.20000000 1219 | 2.20000000 1220 | 2.20000000 1221 | 2.20000000 1222 | 2.20000000 1223 | 2.20000000 1224 | 2.20000000 1225 | 2.20000000 1226 | 2.20000000 1227 | 2.20000000 1228 | 2.20000000 1229 | 2.20000000 1230 | 2.20000000 1231 | 2.20000000 1232 | 2.20000000 1233 | 2.20000000 1234 | 2.20000000 1235 | 2.20000000 1236 | 2.20000000 1237 | 2.20000000 1238 | 2.20000000 1239 | 2.20000000 1240 | 2.20000000 1241 | 2.20000000 1242 | 2.20000000 1243 | 2.20000000 1244 | 2.20000000 1245 | 2.20000000 1246 | 2.20000000 1247 | 2.20000000 1248 | 2.20000000 1249 | 2.20000000 1250 | 2.20000000 1251 | 2.20000000 1252 | 2.20000000 1253 | 2.20000000 1254 | 2.20000000 1255 | 2.20000000 1256 | 2.20000000 1257 | 2.20000000 1258 | 2.20000000 1259 | 2.20000000 1260 | 2.20000000 1261 | 2.20000000 1262 | 2.20000000 1263 | 2.20000000 1264 | 2.20000000 1265 | 2.20000000 1266 | 2.20000000 1267 | 2.20000000 1268 | 2.20000000 1269 | 2.20000000 1270 | 2.20000000 1271 | 2.20000000 1272 | 2.20000000 1273 | 2.20000000 1274 | 2.20000000 1275 | 2.20000000 1276 | 2.20000000 1277 | 2.20000000 1278 | 2.20000000 1279 | 2.20000000 1280 | 2.20000000 1281 | 2.20000000 1282 | 2.20000000 1283 | 2.20000000 1284 | 2.20000000 1285 | 2.20000000 1286 | 2.20000000 1287 | 2.20000000 1288 | 2.20000000 1289 | 2.20000000 1290 | 2.20000000 1291 | 2.20000000 1292 | 2.20000000 1293 | 2.20000000 1294 | 2.20000000 1295 | 2.20000000 1296 | 2.20000000 1297 | 2.20000000 1298 | 2.20000000 1299 | 2.20000000 1300 | 2.20000000 1301 | 2.30000000 1302 | 2.30000000 1303 | 2.30000000 1304 | 2.30000000 1305 | 2.30000000 1306 | 2.30000000 1307 | 2.30000000 1308 | 2.30000000 1309 | 2.30000000 1310 | 2.30000000 1311 | 2.30000000 1312 | 2.30000000 1313 | 2.30000000 1314 | 2.30000000 1315 | 2.30000000 1316 | 2.30000000 1317 | 2.30000000 1318 | 2.30000000 1319 | 2.30000000 1320 | 2.30000000 1321 | 2.30000000 1322 | 2.30000000 1323 | 2.30000000 1324 | 2.30000000 1325 | 2.30000000 1326 | 2.30000000 1327 | 2.30000000 1328 | 2.30000000 1329 | 2.30000000 1330 | 2.30000000 1331 | 2.30000000 1332 | 2.30000000 1333 | 2.30000000 1334 | 2.30000000 1335 | 2.30000000 1336 | 2.30000000 1337 | 2.30000000 1338 | 2.30000000 1339 | 2.30000000 1340 | 2.30000000 1341 | 2.30000000 1342 | 2.30000000 1343 | 2.30000000 1344 | 2.30000000 1345 | 2.30000000 1346 | 2.30000000 1347 | 2.30000000 1348 | 2.30000000 1349 | 2.30000000 1350 | 2.30000000 1351 | 2.30000000 1352 | 2.30000000 1353 | 2.30000000 1354 | 2.30000000 1355 | 2.30000000 1356 | 2.30000000 1357 | 2.30000000 1358 | 2.30000000 1359 | 2.30000000 1360 | 2.30000000 1361 | 2.30000000 1362 | 2.30000000 1363 | 2.30000000 1364 | 2.30000000 1365 | 2.30000000 1366 | 2.30000000 1367 | 2.30000000 1368 | 2.30000000 1369 | 2.30000000 1370 | 2.30000000 1371 | 2.30000000 1372 | 2.30000000 1373 | 2.30000000 1374 | 2.30000000 1375 | 2.30000000 1376 | 2.30000000 1377 | 2.30000000 1378 | 2.30000000 1379 | 2.30000000 1380 | 2.30000000 1381 | 2.30000000 1382 | 2.30000000 1383 | 2.30000000 1384 | 2.30000000 1385 | 2.30000000 1386 | 2.30000000 1387 | 2.30000000 1388 | 2.30000000 1389 | 2.30000000 1390 | 2.30000000 1391 | 2.30000000 1392 | 2.30000000 1393 | 2.30000000 1394 | 2.30000000 1395 | 2.30000000 1396 | 2.30000000 1397 | 2.30000000 1398 | 2.30000000 1399 | 2.30000000 1400 | 2.30000000 1401 | 2.40000000 1402 | 2.40000000 1403 | 2.40000000 1404 | 2.40000000 1405 | 2.40000000 1406 | 2.40000000 1407 | 2.40000000 1408 | 2.40000000 1409 | 2.40000000 1410 | 2.40000000 1411 | 2.40000000 1412 | 2.40000000 1413 | 2.40000000 1414 | 2.40000000 1415 | 2.40000000 1416 | 2.40000000 1417 | 2.40000000 1418 | 2.40000000 1419 | 2.40000000 1420 | 2.40000000 1421 | 2.40000000 1422 | 2.40000000 1423 | 2.40000000 1424 | 2.40000000 1425 | 2.40000000 1426 | 2.40000000 1427 | 2.40000000 1428 | 2.40000000 1429 | 2.40000000 1430 | 2.40000000 1431 | 2.40000000 1432 | 2.40000000 1433 | 2.40000000 1434 | 2.40000000 1435 | 2.40000000 1436 | 2.40000000 1437 | 2.40000000 1438 | 2.40000000 1439 | 2.40000000 1440 | 2.40000000 1441 | 2.40000000 1442 | 2.40000000 1443 | 2.40000000 1444 | 2.40000000 1445 | 2.40000000 1446 | 2.40000000 1447 | 2.40000000 1448 | 2.40000000 1449 | 2.40000000 1450 | 2.40000000 1451 | 2.40000000 1452 | 2.40000000 1453 | 2.40000000 1454 | 2.40000000 1455 | 2.40000000 1456 | 2.40000000 1457 | 2.40000000 1458 | 2.40000000 1459 | 2.40000000 1460 | 2.40000000 1461 | 2.40000000 1462 | 2.40000000 1463 | 2.40000000 1464 | 2.40000000 1465 | 2.40000000 1466 | 2.40000000 1467 | 2.40000000 1468 | 2.40000000 1469 | 2.40000000 1470 | 2.40000000 1471 | 2.40000000 1472 | 2.40000000 1473 | 2.40000000 1474 | 2.40000000 1475 | 2.40000000 1476 | 2.40000000 1477 | 2.40000000 1478 | 2.40000000 1479 | 2.40000000 1480 | 2.40000000 1481 | 2.40000000 1482 | 2.40000000 1483 | 2.40000000 1484 | 2.40000000 1485 | 2.40000000 1486 | 2.40000000 1487 | 2.40000000 1488 | 2.40000000 1489 | 2.40000000 1490 | 2.40000000 1491 | 2.40000000 1492 | 2.40000000 1493 | 2.40000000 1494 | 2.40000000 1495 | 2.40000000 1496 | 2.40000000 1497 | 2.40000000 1498 | 2.40000000 1499 | 2.40000000 1500 | 2.40000000 1501 | 2.50000000 1502 | 2.50000000 1503 | 2.50000000 1504 | 2.50000000 1505 | 2.50000000 1506 | 2.50000000 1507 | 2.50000000 1508 | 2.50000000 1509 | 2.50000000 1510 | 2.50000000 1511 | 2.50000000 1512 | 2.50000000 1513 | 2.50000000 1514 | 2.50000000 1515 | 2.50000000 1516 | 2.50000000 1517 | 2.50000000 1518 | 2.50000000 1519 | 2.50000000 1520 | 2.50000000 1521 | 2.50000000 1522 | 2.50000000 1523 | 2.50000000 1524 | 2.50000000 1525 | 2.50000000 1526 | 2.50000000 1527 | 2.50000000 1528 | 2.50000000 1529 | 2.50000000 1530 | 2.50000000 1531 | 2.50000000 1532 | 2.50000000 1533 | 2.50000000 1534 | 2.50000000 1535 | 2.50000000 1536 | 2.50000000 1537 | 2.50000000 1538 | 2.50000000 1539 | 2.50000000 1540 | 2.50000000 1541 | 2.50000000 1542 | 2.50000000 1543 | 2.50000000 1544 | 2.50000000 1545 | 2.50000000 1546 | 2.50000000 1547 | 2.50000000 1548 | 2.50000000 1549 | 2.50000000 1550 | 2.50000000 1551 | 2.50000000 1552 | 2.50000000 1553 | 2.50000000 1554 | 2.50000000 1555 | 2.50000000 1556 | 2.50000000 1557 | 2.50000000 1558 | 2.50000000 1559 | 2.50000000 1560 | 2.50000000 1561 | 2.50000000 1562 | 2.50000000 1563 | 2.50000000 1564 | 2.50000000 1565 | 2.50000000 1566 | 2.50000000 1567 | 2.50000000 1568 | 2.50000000 1569 | 2.50000000 1570 | 2.50000000 1571 | 2.50000000 1572 | 2.50000000 1573 | 2.50000000 1574 | 2.50000000 1575 | 2.50000000 1576 | 2.50000000 1577 | 2.50000000 1578 | 2.50000000 1579 | 2.50000000 1580 | 2.50000000 1581 | 2.50000000 1582 | 2.50000000 1583 | 2.50000000 1584 | 2.50000000 1585 | 2.50000000 1586 | 2.50000000 1587 | 2.50000000 1588 | 2.50000000 1589 | 2.50000000 1590 | 2.50000000 1591 | 2.50000000 1592 | 2.50000000 1593 | 2.50000000 1594 | 2.50000000 1595 | 2.50000000 1596 | 2.50000000 1597 | 2.50000000 1598 | 2.50000000 1599 | 2.50000000 1600 | 2.50000000 1601 | 2.60000000 1602 | 2.60000000 1603 | 2.60000000 1604 | 2.60000000 1605 | 2.60000000 1606 | 2.60000000 1607 | 2.60000000 1608 | 2.60000000 1609 | 2.60000000 1610 | 2.60000000 1611 | 2.60000000 1612 | 2.60000000 1613 | 2.60000000 1614 | 2.60000000 1615 | 2.60000000 1616 | 2.60000000 1617 | 2.60000000 1618 | 2.60000000 1619 | 2.60000000 1620 | 2.60000000 1621 | 2.60000000 1622 | 2.60000000 1623 | 2.60000000 1624 | 2.60000000 1625 | 2.60000000 1626 | 2.60000000 1627 | 2.60000000 1628 | 2.60000000 1629 | 2.60000000 1630 | 2.60000000 1631 | 2.60000000 1632 | 2.60000000 1633 | 2.60000000 1634 | 2.60000000 1635 | 2.60000000 1636 | 2.60000000 1637 | 2.60000000 1638 | 2.60000000 1639 | 2.60000000 1640 | 2.60000000 1641 | 2.60000000 1642 | 2.60000000 1643 | 2.60000000 1644 | 2.60000000 1645 | 2.60000000 1646 | 2.60000000 1647 | 2.60000000 1648 | 2.60000000 1649 | 2.60000000 1650 | 2.60000000 1651 | 2.60000000 1652 | 2.60000000 1653 | 2.60000000 1654 | 2.60000000 1655 | 2.60000000 1656 | 2.60000000 1657 | 2.60000000 1658 | 2.60000000 1659 | 2.60000000 1660 | 2.60000000 1661 | 2.60000000 1662 | 2.60000000 1663 | 2.60000000 1664 | 2.60000000 1665 | 2.60000000 1666 | 2.60000000 1667 | 2.60000000 1668 | 2.60000000 1669 | 2.60000000 1670 | 2.60000000 1671 | 2.60000000 1672 | 2.60000000 1673 | 2.60000000 1674 | 2.60000000 1675 | 2.60000000 1676 | 2.60000000 1677 | 2.60000000 1678 | 2.60000000 1679 | 2.60000000 1680 | 2.60000000 1681 | 2.60000000 1682 | 2.60000000 1683 | 2.60000000 1684 | 2.60000000 1685 | 2.60000000 1686 | 2.60000000 1687 | 2.60000000 1688 | 2.60000000 1689 | 2.60000000 1690 | 2.60000000 1691 | 2.60000000 1692 | 2.60000000 1693 | 2.60000000 1694 | 2.60000000 1695 | 2.60000000 1696 | 2.60000000 1697 | 2.60000000 1698 | 2.60000000 1699 | 2.60000000 1700 | 2.60000000 1701 | 2.70000000 1702 | 2.70000000 1703 | 2.70000000 1704 | 2.70000000 1705 | 2.70000000 1706 | 2.70000000 1707 | 2.70000000 1708 | 2.70000000 1709 | 2.70000000 1710 | 2.70000000 1711 | 2.70000000 1712 | 2.70000000 1713 | 2.70000000 1714 | 2.70000000 1715 | 2.70000000 1716 | 2.70000000 1717 | 2.70000000 1718 | 2.70000000 1719 | 2.70000000 1720 | 2.70000000 1721 | 2.70000000 1722 | 2.70000000 1723 | 2.70000000 1724 | 2.70000000 1725 | 2.70000000 1726 | 2.70000000 1727 | 2.70000000 1728 | 2.70000000 1729 | 2.70000000 1730 | 2.70000000 1731 | 2.70000000 1732 | 2.70000000 1733 | 2.70000000 1734 | 2.70000000 1735 | 2.70000000 1736 | 2.70000000 1737 | 2.70000000 1738 | 2.70000000 1739 | 2.70000000 1740 | 2.70000000 1741 | 2.70000000 1742 | 2.70000000 1743 | 2.70000000 1744 | 2.70000000 1745 | 2.70000000 1746 | 2.70000000 1747 | 2.70000000 1748 | 2.70000000 1749 | 2.70000000 1750 | 2.70000000 1751 | 2.70000000 1752 | 2.70000000 1753 | 2.70000000 1754 | 2.70000000 1755 | 2.70000000 1756 | 2.70000000 1757 | 2.70000000 1758 | 2.70000000 1759 | 2.70000000 1760 | 2.70000000 1761 | 2.70000000 1762 | 2.70000000 1763 | 2.70000000 1764 | 2.70000000 1765 | 2.70000000 1766 | 2.70000000 1767 | 2.70000000 1768 | 2.70000000 1769 | 2.70000000 1770 | 2.70000000 1771 | 2.70000000 1772 | 2.70000000 1773 | 2.70000000 1774 | 2.70000000 1775 | 2.70000000 1776 | 2.70000000 1777 | 2.70000000 1778 | 2.70000000 1779 | 2.70000000 1780 | 2.70000000 1781 | 2.70000000 1782 | 2.70000000 1783 | 2.70000000 1784 | 2.70000000 1785 | 2.70000000 1786 | 2.70000000 1787 | 2.70000000 1788 | 2.70000000 1789 | 2.70000000 1790 | 2.70000000 1791 | 2.70000000 1792 | 2.70000000 1793 | 2.70000000 1794 | 2.70000000 1795 | 2.70000000 1796 | 2.70000000 1797 | 2.70000000 1798 | 2.70000000 1799 | 2.70000000 1800 | 2.70000000 1801 | 2.80000000 1802 | 2.80000000 1803 | 2.80000000 1804 | 2.80000000 1805 | 2.80000000 1806 | 2.80000000 1807 | 2.80000000 1808 | 2.80000000 1809 | 2.80000000 1810 | 2.80000000 1811 | 2.80000000 1812 | 2.80000000 1813 | 2.80000000 1814 | 2.80000000 1815 | 2.80000000 1816 | 2.80000000 1817 | 2.80000000 1818 | 2.80000000 1819 | 2.80000000 1820 | 2.80000000 1821 | 2.80000000 1822 | 2.80000000 1823 | 2.80000000 1824 | 2.80000000 1825 | 2.80000000 1826 | 2.80000000 1827 | 2.80000000 1828 | 2.80000000 1829 | 2.80000000 1830 | 2.80000000 1831 | 2.80000000 1832 | 2.80000000 1833 | 2.80000000 1834 | 2.80000000 1835 | 2.80000000 1836 | 2.80000000 1837 | 2.80000000 1838 | 2.80000000 1839 | 2.80000000 1840 | 2.80000000 1841 | 2.80000000 1842 | 2.80000000 1843 | 2.80000000 1844 | 2.80000000 1845 | 2.80000000 1846 | 2.80000000 1847 | 2.80000000 1848 | 2.80000000 1849 | 2.80000000 1850 | 2.80000000 1851 | 2.80000000 1852 | 2.80000000 1853 | 2.80000000 1854 | 2.80000000 1855 | 2.80000000 1856 | 2.80000000 1857 | 2.80000000 1858 | 2.80000000 1859 | 2.80000000 1860 | 2.80000000 1861 | 2.80000000 1862 | 2.80000000 1863 | 2.80000000 1864 | 2.80000000 1865 | 2.80000000 1866 | 2.80000000 1867 | 2.80000000 1868 | 2.80000000 1869 | 2.80000000 1870 | 2.80000000 1871 | 2.80000000 1872 | 2.80000000 1873 | 2.80000000 1874 | 2.80000000 1875 | 2.80000000 1876 | 2.80000000 1877 | 2.80000000 1878 | 2.80000000 1879 | 2.80000000 1880 | 2.80000000 1881 | 2.80000000 1882 | 2.80000000 1883 | 2.80000000 1884 | 2.80000000 1885 | 2.80000000 1886 | 2.80000000 1887 | 2.80000000 1888 | 2.80000000 1889 | 2.80000000 1890 | 2.80000000 1891 | 2.80000000 1892 | 2.80000000 1893 | 2.80000000 1894 | 2.80000000 1895 | 2.80000000 1896 | 2.80000000 1897 | 2.80000000 1898 | 2.80000000 1899 | 2.80000000 1900 | 2.80000000 1901 | 2.90000000 1902 | 2.90000000 1903 | 2.90000000 1904 | 2.90000000 1905 | 2.90000000 1906 | 2.90000000 1907 | 2.90000000 1908 | 2.90000000 1909 | 2.90000000 1910 | 2.90000000 1911 | 2.90000000 1912 | 2.90000000 1913 | 2.90000000 1914 | 2.90000000 1915 | 2.90000000 1916 | 2.90000000 1917 | 2.90000000 1918 | 2.90000000 1919 | 2.90000000 1920 | 2.90000000 1921 | 2.90000000 1922 | 2.90000000 1923 | 2.90000000 1924 | 2.90000000 1925 | 2.90000000 1926 | 2.90000000 1927 | 2.90000000 1928 | 2.90000000 1929 | 2.90000000 1930 | 2.90000000 1931 | 2.90000000 1932 | 2.90000000 1933 | 2.90000000 1934 | 2.90000000 1935 | 2.90000000 1936 | 2.90000000 1937 | 2.90000000 1938 | 2.90000000 1939 | 2.90000000 1940 | 2.90000000 1941 | 2.90000000 1942 | 2.90000000 1943 | 2.90000000 1944 | 2.90000000 1945 | 2.90000000 1946 | 2.90000000 1947 | 2.90000000 1948 | 2.90000000 1949 | 2.90000000 1950 | 2.90000000 1951 | 2.90000000 1952 | 2.90000000 1953 | 2.90000000 1954 | 2.90000000 1955 | 2.90000000 1956 | 2.90000000 1957 | 2.90000000 1958 | 2.90000000 1959 | 2.90000000 1960 | 2.90000000 1961 | 2.90000000 1962 | 2.90000000 1963 | 2.90000000 1964 | 2.90000000 1965 | 2.90000000 1966 | 2.90000000 1967 | 2.90000000 1968 | 2.90000000 1969 | 2.90000000 1970 | 2.90000000 1971 | 2.90000000 1972 | 2.90000000 1973 | 2.90000000 1974 | 2.90000000 1975 | 2.90000000 1976 | 2.90000000 1977 | 2.90000000 1978 | 2.90000000 1979 | 2.90000000 1980 | 2.90000000 1981 | 2.90000000 1982 | 2.90000000 1983 | 2.90000000 1984 | 2.90000000 1985 | 2.90000000 1986 | 2.90000000 1987 | 2.90000000 1988 | 2.90000000 1989 | 2.90000000 1990 | 2.90000000 1991 | 2.90000000 1992 | 2.90000000 1993 | 2.90000000 1994 | 2.90000000 1995 | 2.90000000 1996 | 2.90000000 1997 | 2.90000000 1998 | 2.90000000 1999 | 2.90000000 2000 | 2.90000000 2001 | -------------------------------------------------------------------------------- /Tutorial3/data_tutorial3/temperatures_Ising_L40.txt: -------------------------------------------------------------------------------- 1 | 1.00000000 2 | 1.00000000 3 | 1.00000000 4 | 1.00000000 5 | 1.00000000 6 | 1.00000000 7 | 1.00000000 8 | 1.00000000 9 | 1.00000000 10 | 1.00000000 11 | 1.00000000 12 | 1.00000000 13 | 1.00000000 14 | 1.00000000 15 | 1.00000000 16 | 1.00000000 17 | 1.00000000 18 | 1.00000000 19 | 1.00000000 20 | 1.00000000 21 | 1.00000000 22 | 1.00000000 23 | 1.00000000 24 | 1.00000000 25 | 1.00000000 26 | 1.00000000 27 | 1.00000000 28 | 1.00000000 29 | 1.00000000 30 | 1.00000000 31 | 1.00000000 32 | 1.00000000 33 | 1.00000000 34 | 1.00000000 35 | 1.00000000 36 | 1.00000000 37 | 1.00000000 38 | 1.00000000 39 | 1.00000000 40 | 1.00000000 41 | 1.00000000 42 | 1.00000000 43 | 1.00000000 44 | 1.00000000 45 | 1.00000000 46 | 1.00000000 47 | 1.00000000 48 | 1.00000000 49 | 1.00000000 50 | 1.00000000 51 | 1.00000000 52 | 1.00000000 53 | 1.00000000 54 | 1.00000000 55 | 1.00000000 56 | 1.00000000 57 | 1.00000000 58 | 1.00000000 59 | 1.00000000 60 | 1.00000000 61 | 1.00000000 62 | 1.00000000 63 | 1.00000000 64 | 1.00000000 65 | 1.00000000 66 | 1.00000000 67 | 1.00000000 68 | 1.00000000 69 | 1.00000000 70 | 1.00000000 71 | 1.00000000 72 | 1.00000000 73 | 1.00000000 74 | 1.00000000 75 | 1.00000000 76 | 1.00000000 77 | 1.00000000 78 | 1.00000000 79 | 1.00000000 80 | 1.00000000 81 | 1.00000000 82 | 1.00000000 83 | 1.00000000 84 | 1.00000000 85 | 1.00000000 86 | 1.00000000 87 | 1.00000000 88 | 1.00000000 89 | 1.00000000 90 | 1.00000000 91 | 1.00000000 92 | 1.00000000 93 | 1.00000000 94 | 1.00000000 95 | 1.00000000 96 | 1.00000000 97 | 1.00000000 98 | 1.00000000 99 | 1.00000000 100 | 1.00000000 101 | 1.10000000 102 | 1.10000000 103 | 1.10000000 104 | 1.10000000 105 | 1.10000000 106 | 1.10000000 107 | 1.10000000 108 | 1.10000000 109 | 1.10000000 110 | 1.10000000 111 | 1.10000000 112 | 1.10000000 113 | 1.10000000 114 | 1.10000000 115 | 1.10000000 116 | 1.10000000 117 | 1.10000000 118 | 1.10000000 119 | 1.10000000 120 | 1.10000000 121 | 1.10000000 122 | 1.10000000 123 | 1.10000000 124 | 1.10000000 125 | 1.10000000 126 | 1.10000000 127 | 1.10000000 128 | 1.10000000 129 | 1.10000000 130 | 1.10000000 131 | 1.10000000 132 | 1.10000000 133 | 1.10000000 134 | 1.10000000 135 | 1.10000000 136 | 1.10000000 137 | 1.10000000 138 | 1.10000000 139 | 1.10000000 140 | 1.10000000 141 | 1.10000000 142 | 1.10000000 143 | 1.10000000 144 | 1.10000000 145 | 1.10000000 146 | 1.10000000 147 | 1.10000000 148 | 1.10000000 149 | 1.10000000 150 | 1.10000000 151 | 1.10000000 152 | 1.10000000 153 | 1.10000000 154 | 1.10000000 155 | 1.10000000 156 | 1.10000000 157 | 1.10000000 158 | 1.10000000 159 | 1.10000000 160 | 1.10000000 161 | 1.10000000 162 | 1.10000000 163 | 1.10000000 164 | 1.10000000 165 | 1.10000000 166 | 1.10000000 167 | 1.10000000 168 | 1.10000000 169 | 1.10000000 170 | 1.10000000 171 | 1.10000000 172 | 1.10000000 173 | 1.10000000 174 | 1.10000000 175 | 1.10000000 176 | 1.10000000 177 | 1.10000000 178 | 1.10000000 179 | 1.10000000 180 | 1.10000000 181 | 1.10000000 182 | 1.10000000 183 | 1.10000000 184 | 1.10000000 185 | 1.10000000 186 | 1.10000000 187 | 1.10000000 188 | 1.10000000 189 | 1.10000000 190 | 1.10000000 191 | 1.10000000 192 | 1.10000000 193 | 1.10000000 194 | 1.10000000 195 | 1.10000000 196 | 1.10000000 197 | 1.10000000 198 | 1.10000000 199 | 1.10000000 200 | 1.10000000 201 | 1.20000000 202 | 1.20000000 203 | 1.20000000 204 | 1.20000000 205 | 1.20000000 206 | 1.20000000 207 | 1.20000000 208 | 1.20000000 209 | 1.20000000 210 | 1.20000000 211 | 1.20000000 212 | 1.20000000 213 | 1.20000000 214 | 1.20000000 215 | 1.20000000 216 | 1.20000000 217 | 1.20000000 218 | 1.20000000 219 | 1.20000000 220 | 1.20000000 221 | 1.20000000 222 | 1.20000000 223 | 1.20000000 224 | 1.20000000 225 | 1.20000000 226 | 1.20000000 227 | 1.20000000 228 | 1.20000000 229 | 1.20000000 230 | 1.20000000 231 | 1.20000000 232 | 1.20000000 233 | 1.20000000 234 | 1.20000000 235 | 1.20000000 236 | 1.20000000 237 | 1.20000000 238 | 1.20000000 239 | 1.20000000 240 | 1.20000000 241 | 1.20000000 242 | 1.20000000 243 | 1.20000000 244 | 1.20000000 245 | 1.20000000 246 | 1.20000000 247 | 1.20000000 248 | 1.20000000 249 | 1.20000000 250 | 1.20000000 251 | 1.20000000 252 | 1.20000000 253 | 1.20000000 254 | 1.20000000 255 | 1.20000000 256 | 1.20000000 257 | 1.20000000 258 | 1.20000000 259 | 1.20000000 260 | 1.20000000 261 | 1.20000000 262 | 1.20000000 263 | 1.20000000 264 | 1.20000000 265 | 1.20000000 266 | 1.20000000 267 | 1.20000000 268 | 1.20000000 269 | 1.20000000 270 | 1.20000000 271 | 1.20000000 272 | 1.20000000 273 | 1.20000000 274 | 1.20000000 275 | 1.20000000 276 | 1.20000000 277 | 1.20000000 278 | 1.20000000 279 | 1.20000000 280 | 1.20000000 281 | 1.20000000 282 | 1.20000000 283 | 1.20000000 284 | 1.20000000 285 | 1.20000000 286 | 1.20000000 287 | 1.20000000 288 | 1.20000000 289 | 1.20000000 290 | 1.20000000 291 | 1.20000000 292 | 1.20000000 293 | 1.20000000 294 | 1.20000000 295 | 1.20000000 296 | 1.20000000 297 | 1.20000000 298 | 1.20000000 299 | 1.20000000 300 | 1.20000000 301 | 1.30000000 302 | 1.30000000 303 | 1.30000000 304 | 1.30000000 305 | 1.30000000 306 | 1.30000000 307 | 1.30000000 308 | 1.30000000 309 | 1.30000000 310 | 1.30000000 311 | 1.30000000 312 | 1.30000000 313 | 1.30000000 314 | 1.30000000 315 | 1.30000000 316 | 1.30000000 317 | 1.30000000 318 | 1.30000000 319 | 1.30000000 320 | 1.30000000 321 | 1.30000000 322 | 1.30000000 323 | 1.30000000 324 | 1.30000000 325 | 1.30000000 326 | 1.30000000 327 | 1.30000000 328 | 1.30000000 329 | 1.30000000 330 | 1.30000000 331 | 1.30000000 332 | 1.30000000 333 | 1.30000000 334 | 1.30000000 335 | 1.30000000 336 | 1.30000000 337 | 1.30000000 338 | 1.30000000 339 | 1.30000000 340 | 1.30000000 341 | 1.30000000 342 | 1.30000000 343 | 1.30000000 344 | 1.30000000 345 | 1.30000000 346 | 1.30000000 347 | 1.30000000 348 | 1.30000000 349 | 1.30000000 350 | 1.30000000 351 | 1.30000000 352 | 1.30000000 353 | 1.30000000 354 | 1.30000000 355 | 1.30000000 356 | 1.30000000 357 | 1.30000000 358 | 1.30000000 359 | 1.30000000 360 | 1.30000000 361 | 1.30000000 362 | 1.30000000 363 | 1.30000000 364 | 1.30000000 365 | 1.30000000 366 | 1.30000000 367 | 1.30000000 368 | 1.30000000 369 | 1.30000000 370 | 1.30000000 371 | 1.30000000 372 | 1.30000000 373 | 1.30000000 374 | 1.30000000 375 | 1.30000000 376 | 1.30000000 377 | 1.30000000 378 | 1.30000000 379 | 1.30000000 380 | 1.30000000 381 | 1.30000000 382 | 1.30000000 383 | 1.30000000 384 | 1.30000000 385 | 1.30000000 386 | 1.30000000 387 | 1.30000000 388 | 1.30000000 389 | 1.30000000 390 | 1.30000000 391 | 1.30000000 392 | 1.30000000 393 | 1.30000000 394 | 1.30000000 395 | 1.30000000 396 | 1.30000000 397 | 1.30000000 398 | 1.30000000 399 | 1.30000000 400 | 1.30000000 401 | 1.40000000 402 | 1.40000000 403 | 1.40000000 404 | 1.40000000 405 | 1.40000000 406 | 1.40000000 407 | 1.40000000 408 | 1.40000000 409 | 1.40000000 410 | 1.40000000 411 | 1.40000000 412 | 1.40000000 413 | 1.40000000 414 | 1.40000000 415 | 1.40000000 416 | 1.40000000 417 | 1.40000000 418 | 1.40000000 419 | 1.40000000 420 | 1.40000000 421 | 1.40000000 422 | 1.40000000 423 | 1.40000000 424 | 1.40000000 425 | 1.40000000 426 | 1.40000000 427 | 1.40000000 428 | 1.40000000 429 | 1.40000000 430 | 1.40000000 431 | 1.40000000 432 | 1.40000000 433 | 1.40000000 434 | 1.40000000 435 | 1.40000000 436 | 1.40000000 437 | 1.40000000 438 | 1.40000000 439 | 1.40000000 440 | 1.40000000 441 | 1.40000000 442 | 1.40000000 443 | 1.40000000 444 | 1.40000000 445 | 1.40000000 446 | 1.40000000 447 | 1.40000000 448 | 1.40000000 449 | 1.40000000 450 | 1.40000000 451 | 1.40000000 452 | 1.40000000 453 | 1.40000000 454 | 1.40000000 455 | 1.40000000 456 | 1.40000000 457 | 1.40000000 458 | 1.40000000 459 | 1.40000000 460 | 1.40000000 461 | 1.40000000 462 | 1.40000000 463 | 1.40000000 464 | 1.40000000 465 | 1.40000000 466 | 1.40000000 467 | 1.40000000 468 | 1.40000000 469 | 1.40000000 470 | 1.40000000 471 | 1.40000000 472 | 1.40000000 473 | 1.40000000 474 | 1.40000000 475 | 1.40000000 476 | 1.40000000 477 | 1.40000000 478 | 1.40000000 479 | 1.40000000 480 | 1.40000000 481 | 1.40000000 482 | 1.40000000 483 | 1.40000000 484 | 1.40000000 485 | 1.40000000 486 | 1.40000000 487 | 1.40000000 488 | 1.40000000 489 | 1.40000000 490 | 1.40000000 491 | 1.40000000 492 | 1.40000000 493 | 1.40000000 494 | 1.40000000 495 | 1.40000000 496 | 1.40000000 497 | 1.40000000 498 | 1.40000000 499 | 1.40000000 500 | 1.40000000 501 | 1.50000000 502 | 1.50000000 503 | 1.50000000 504 | 1.50000000 505 | 1.50000000 506 | 1.50000000 507 | 1.50000000 508 | 1.50000000 509 | 1.50000000 510 | 1.50000000 511 | 1.50000000 512 | 1.50000000 513 | 1.50000000 514 | 1.50000000 515 | 1.50000000 516 | 1.50000000 517 | 1.50000000 518 | 1.50000000 519 | 1.50000000 520 | 1.50000000 521 | 1.50000000 522 | 1.50000000 523 | 1.50000000 524 | 1.50000000 525 | 1.50000000 526 | 1.50000000 527 | 1.50000000 528 | 1.50000000 529 | 1.50000000 530 | 1.50000000 531 | 1.50000000 532 | 1.50000000 533 | 1.50000000 534 | 1.50000000 535 | 1.50000000 536 | 1.50000000 537 | 1.50000000 538 | 1.50000000 539 | 1.50000000 540 | 1.50000000 541 | 1.50000000 542 | 1.50000000 543 | 1.50000000 544 | 1.50000000 545 | 1.50000000 546 | 1.50000000 547 | 1.50000000 548 | 1.50000000 549 | 1.50000000 550 | 1.50000000 551 | 1.50000000 552 | 1.50000000 553 | 1.50000000 554 | 1.50000000 555 | 1.50000000 556 | 1.50000000 557 | 1.50000000 558 | 1.50000000 559 | 1.50000000 560 | 1.50000000 561 | 1.50000000 562 | 1.50000000 563 | 1.50000000 564 | 1.50000000 565 | 1.50000000 566 | 1.50000000 567 | 1.50000000 568 | 1.50000000 569 | 1.50000000 570 | 1.50000000 571 | 1.50000000 572 | 1.50000000 573 | 1.50000000 574 | 1.50000000 575 | 1.50000000 576 | 1.50000000 577 | 1.50000000 578 | 1.50000000 579 | 1.50000000 580 | 1.50000000 581 | 1.50000000 582 | 1.50000000 583 | 1.50000000 584 | 1.50000000 585 | 1.50000000 586 | 1.50000000 587 | 1.50000000 588 | 1.50000000 589 | 1.50000000 590 | 1.50000000 591 | 1.50000000 592 | 1.50000000 593 | 1.50000000 594 | 1.50000000 595 | 1.50000000 596 | 1.50000000 597 | 1.50000000 598 | 1.50000000 599 | 1.50000000 600 | 1.50000000 601 | 1.60000000 602 | 1.60000000 603 | 1.60000000 604 | 1.60000000 605 | 1.60000000 606 | 1.60000000 607 | 1.60000000 608 | 1.60000000 609 | 1.60000000 610 | 1.60000000 611 | 1.60000000 612 | 1.60000000 613 | 1.60000000 614 | 1.60000000 615 | 1.60000000 616 | 1.60000000 617 | 1.60000000 618 | 1.60000000 619 | 1.60000000 620 | 1.60000000 621 | 1.60000000 622 | 1.60000000 623 | 1.60000000 624 | 1.60000000 625 | 1.60000000 626 | 1.60000000 627 | 1.60000000 628 | 1.60000000 629 | 1.60000000 630 | 1.60000000 631 | 1.60000000 632 | 1.60000000 633 | 1.60000000 634 | 1.60000000 635 | 1.60000000 636 | 1.60000000 637 | 1.60000000 638 | 1.60000000 639 | 1.60000000 640 | 1.60000000 641 | 1.60000000 642 | 1.60000000 643 | 1.60000000 644 | 1.60000000 645 | 1.60000000 646 | 1.60000000 647 | 1.60000000 648 | 1.60000000 649 | 1.60000000 650 | 1.60000000 651 | 1.60000000 652 | 1.60000000 653 | 1.60000000 654 | 1.60000000 655 | 1.60000000 656 | 1.60000000 657 | 1.60000000 658 | 1.60000000 659 | 1.60000000 660 | 1.60000000 661 | 1.60000000 662 | 1.60000000 663 | 1.60000000 664 | 1.60000000 665 | 1.60000000 666 | 1.60000000 667 | 1.60000000 668 | 1.60000000 669 | 1.60000000 670 | 1.60000000 671 | 1.60000000 672 | 1.60000000 673 | 1.60000000 674 | 1.60000000 675 | 1.60000000 676 | 1.60000000 677 | 1.60000000 678 | 1.60000000 679 | 1.60000000 680 | 1.60000000 681 | 1.60000000 682 | 1.60000000 683 | 1.60000000 684 | 1.60000000 685 | 1.60000000 686 | 1.60000000 687 | 1.60000000 688 | 1.60000000 689 | 1.60000000 690 | 1.60000000 691 | 1.60000000 692 | 1.60000000 693 | 1.60000000 694 | 1.60000000 695 | 1.60000000 696 | 1.60000000 697 | 1.60000000 698 | 1.60000000 699 | 1.60000000 700 | 1.60000000 701 | 1.70000000 702 | 1.70000000 703 | 1.70000000 704 | 1.70000000 705 | 1.70000000 706 | 1.70000000 707 | 1.70000000 708 | 1.70000000 709 | 1.70000000 710 | 1.70000000 711 | 1.70000000 712 | 1.70000000 713 | 1.70000000 714 | 1.70000000 715 | 1.70000000 716 | 1.70000000 717 | 1.70000000 718 | 1.70000000 719 | 1.70000000 720 | 1.70000000 721 | 1.70000000 722 | 1.70000000 723 | 1.70000000 724 | 1.70000000 725 | 1.70000000 726 | 1.70000000 727 | 1.70000000 728 | 1.70000000 729 | 1.70000000 730 | 1.70000000 731 | 1.70000000 732 | 1.70000000 733 | 1.70000000 734 | 1.70000000 735 | 1.70000000 736 | 1.70000000 737 | 1.70000000 738 | 1.70000000 739 | 1.70000000 740 | 1.70000000 741 | 1.70000000 742 | 1.70000000 743 | 1.70000000 744 | 1.70000000 745 | 1.70000000 746 | 1.70000000 747 | 1.70000000 748 | 1.70000000 749 | 1.70000000 750 | 1.70000000 751 | 1.70000000 752 | 1.70000000 753 | 1.70000000 754 | 1.70000000 755 | 1.70000000 756 | 1.70000000 757 | 1.70000000 758 | 1.70000000 759 | 1.70000000 760 | 1.70000000 761 | 1.70000000 762 | 1.70000000 763 | 1.70000000 764 | 1.70000000 765 | 1.70000000 766 | 1.70000000 767 | 1.70000000 768 | 1.70000000 769 | 1.70000000 770 | 1.70000000 771 | 1.70000000 772 | 1.70000000 773 | 1.70000000 774 | 1.70000000 775 | 1.70000000 776 | 1.70000000 777 | 1.70000000 778 | 1.70000000 779 | 1.70000000 780 | 1.70000000 781 | 1.70000000 782 | 1.70000000 783 | 1.70000000 784 | 1.70000000 785 | 1.70000000 786 | 1.70000000 787 | 1.70000000 788 | 1.70000000 789 | 1.70000000 790 | 1.70000000 791 | 1.70000000 792 | 1.70000000 793 | 1.70000000 794 | 1.70000000 795 | 1.70000000 796 | 1.70000000 797 | 1.70000000 798 | 1.70000000 799 | 1.70000000 800 | 1.70000000 801 | 1.80000000 802 | 1.80000000 803 | 1.80000000 804 | 1.80000000 805 | 1.80000000 806 | 1.80000000 807 | 1.80000000 808 | 1.80000000 809 | 1.80000000 810 | 1.80000000 811 | 1.80000000 812 | 1.80000000 813 | 1.80000000 814 | 1.80000000 815 | 1.80000000 816 | 1.80000000 817 | 1.80000000 818 | 1.80000000 819 | 1.80000000 820 | 1.80000000 821 | 1.80000000 822 | 1.80000000 823 | 1.80000000 824 | 1.80000000 825 | 1.80000000 826 | 1.80000000 827 | 1.80000000 828 | 1.80000000 829 | 1.80000000 830 | 1.80000000 831 | 1.80000000 832 | 1.80000000 833 | 1.80000000 834 | 1.80000000 835 | 1.80000000 836 | 1.80000000 837 | 1.80000000 838 | 1.80000000 839 | 1.80000000 840 | 1.80000000 841 | 1.80000000 842 | 1.80000000 843 | 1.80000000 844 | 1.80000000 845 | 1.80000000 846 | 1.80000000 847 | 1.80000000 848 | 1.80000000 849 | 1.80000000 850 | 1.80000000 851 | 1.80000000 852 | 1.80000000 853 | 1.80000000 854 | 1.80000000 855 | 1.80000000 856 | 1.80000000 857 | 1.80000000 858 | 1.80000000 859 | 1.80000000 860 | 1.80000000 861 | 1.80000000 862 | 1.80000000 863 | 1.80000000 864 | 1.80000000 865 | 1.80000000 866 | 1.80000000 867 | 1.80000000 868 | 1.80000000 869 | 1.80000000 870 | 1.80000000 871 | 1.80000000 872 | 1.80000000 873 | 1.80000000 874 | 1.80000000 875 | 1.80000000 876 | 1.80000000 877 | 1.80000000 878 | 1.80000000 879 | 1.80000000 880 | 1.80000000 881 | 1.80000000 882 | 1.80000000 883 | 1.80000000 884 | 1.80000000 885 | 1.80000000 886 | 1.80000000 887 | 1.80000000 888 | 1.80000000 889 | 1.80000000 890 | 1.80000000 891 | 1.80000000 892 | 1.80000000 893 | 1.80000000 894 | 1.80000000 895 | 1.80000000 896 | 1.80000000 897 | 1.80000000 898 | 1.80000000 899 | 1.80000000 900 | 1.80000000 901 | 1.90000000 902 | 1.90000000 903 | 1.90000000 904 | 1.90000000 905 | 1.90000000 906 | 1.90000000 907 | 1.90000000 908 | 1.90000000 909 | 1.90000000 910 | 1.90000000 911 | 1.90000000 912 | 1.90000000 913 | 1.90000000 914 | 1.90000000 915 | 1.90000000 916 | 1.90000000 917 | 1.90000000 918 | 1.90000000 919 | 1.90000000 920 | 1.90000000 921 | 1.90000000 922 | 1.90000000 923 | 1.90000000 924 | 1.90000000 925 | 1.90000000 926 | 1.90000000 927 | 1.90000000 928 | 1.90000000 929 | 1.90000000 930 | 1.90000000 931 | 1.90000000 932 | 1.90000000 933 | 1.90000000 934 | 1.90000000 935 | 1.90000000 936 | 1.90000000 937 | 1.90000000 938 | 1.90000000 939 | 1.90000000 940 | 1.90000000 941 | 1.90000000 942 | 1.90000000 943 | 1.90000000 944 | 1.90000000 945 | 1.90000000 946 | 1.90000000 947 | 1.90000000 948 | 1.90000000 949 | 1.90000000 950 | 1.90000000 951 | 1.90000000 952 | 1.90000000 953 | 1.90000000 954 | 1.90000000 955 | 1.90000000 956 | 1.90000000 957 | 1.90000000 958 | 1.90000000 959 | 1.90000000 960 | 1.90000000 961 | 1.90000000 962 | 1.90000000 963 | 1.90000000 964 | 1.90000000 965 | 1.90000000 966 | 1.90000000 967 | 1.90000000 968 | 1.90000000 969 | 1.90000000 970 | 1.90000000 971 | 1.90000000 972 | 1.90000000 973 | 1.90000000 974 | 1.90000000 975 | 1.90000000 976 | 1.90000000 977 | 1.90000000 978 | 1.90000000 979 | 1.90000000 980 | 1.90000000 981 | 1.90000000 982 | 1.90000000 983 | 1.90000000 984 | 1.90000000 985 | 1.90000000 986 | 1.90000000 987 | 1.90000000 988 | 1.90000000 989 | 1.90000000 990 | 1.90000000 991 | 1.90000000 992 | 1.90000000 993 | 1.90000000 994 | 1.90000000 995 | 1.90000000 996 | 1.90000000 997 | 1.90000000 998 | 1.90000000 999 | 1.90000000 1000 | 1.90000000 1001 | 2.00000000 1002 | 2.00000000 1003 | 2.00000000 1004 | 2.00000000 1005 | 2.00000000 1006 | 2.00000000 1007 | 2.00000000 1008 | 2.00000000 1009 | 2.00000000 1010 | 2.00000000 1011 | 2.00000000 1012 | 2.00000000 1013 | 2.00000000 1014 | 2.00000000 1015 | 2.00000000 1016 | 2.00000000 1017 | 2.00000000 1018 | 2.00000000 1019 | 2.00000000 1020 | 2.00000000 1021 | 2.00000000 1022 | 2.00000000 1023 | 2.00000000 1024 | 2.00000000 1025 | 2.00000000 1026 | 2.00000000 1027 | 2.00000000 1028 | 2.00000000 1029 | 2.00000000 1030 | 2.00000000 1031 | 2.00000000 1032 | 2.00000000 1033 | 2.00000000 1034 | 2.00000000 1035 | 2.00000000 1036 | 2.00000000 1037 | 2.00000000 1038 | 2.00000000 1039 | 2.00000000 1040 | 2.00000000 1041 | 2.00000000 1042 | 2.00000000 1043 | 2.00000000 1044 | 2.00000000 1045 | 2.00000000 1046 | 2.00000000 1047 | 2.00000000 1048 | 2.00000000 1049 | 2.00000000 1050 | 2.00000000 1051 | 2.00000000 1052 | 2.00000000 1053 | 2.00000000 1054 | 2.00000000 1055 | 2.00000000 1056 | 2.00000000 1057 | 2.00000000 1058 | 2.00000000 1059 | 2.00000000 1060 | 2.00000000 1061 | 2.00000000 1062 | 2.00000000 1063 | 2.00000000 1064 | 2.00000000 1065 | 2.00000000 1066 | 2.00000000 1067 | 2.00000000 1068 | 2.00000000 1069 | 2.00000000 1070 | 2.00000000 1071 | 2.00000000 1072 | 2.00000000 1073 | 2.00000000 1074 | 2.00000000 1075 | 2.00000000 1076 | 2.00000000 1077 | 2.00000000 1078 | 2.00000000 1079 | 2.00000000 1080 | 2.00000000 1081 | 2.00000000 1082 | 2.00000000 1083 | 2.00000000 1084 | 2.00000000 1085 | 2.00000000 1086 | 2.00000000 1087 | 2.00000000 1088 | 2.00000000 1089 | 2.00000000 1090 | 2.00000000 1091 | 2.00000000 1092 | 2.00000000 1093 | 2.00000000 1094 | 2.00000000 1095 | 2.00000000 1096 | 2.00000000 1097 | 2.00000000 1098 | 2.00000000 1099 | 2.00000000 1100 | 2.00000000 1101 | 2.10000000 1102 | 2.10000000 1103 | 2.10000000 1104 | 2.10000000 1105 | 2.10000000 1106 | 2.10000000 1107 | 2.10000000 1108 | 2.10000000 1109 | 2.10000000 1110 | 2.10000000 1111 | 2.10000000 1112 | 2.10000000 1113 | 2.10000000 1114 | 2.10000000 1115 | 2.10000000 1116 | 2.10000000 1117 | 2.10000000 1118 | 2.10000000 1119 | 2.10000000 1120 | 2.10000000 1121 | 2.10000000 1122 | 2.10000000 1123 | 2.10000000 1124 | 2.10000000 1125 | 2.10000000 1126 | 2.10000000 1127 | 2.10000000 1128 | 2.10000000 1129 | 2.10000000 1130 | 2.10000000 1131 | 2.10000000 1132 | 2.10000000 1133 | 2.10000000 1134 | 2.10000000 1135 | 2.10000000 1136 | 2.10000000 1137 | 2.10000000 1138 | 2.10000000 1139 | 2.10000000 1140 | 2.10000000 1141 | 2.10000000 1142 | 2.10000000 1143 | 2.10000000 1144 | 2.10000000 1145 | 2.10000000 1146 | 2.10000000 1147 | 2.10000000 1148 | 2.10000000 1149 | 2.10000000 1150 | 2.10000000 1151 | 2.10000000 1152 | 2.10000000 1153 | 2.10000000 1154 | 2.10000000 1155 | 2.10000000 1156 | 2.10000000 1157 | 2.10000000 1158 | 2.10000000 1159 | 2.10000000 1160 | 2.10000000 1161 | 2.10000000 1162 | 2.10000000 1163 | 2.10000000 1164 | 2.10000000 1165 | 2.10000000 1166 | 2.10000000 1167 | 2.10000000 1168 | 2.10000000 1169 | 2.10000000 1170 | 2.10000000 1171 | 2.10000000 1172 | 2.10000000 1173 | 2.10000000 1174 | 2.10000000 1175 | 2.10000000 1176 | 2.10000000 1177 | 2.10000000 1178 | 2.10000000 1179 | 2.10000000 1180 | 2.10000000 1181 | 2.10000000 1182 | 2.10000000 1183 | 2.10000000 1184 | 2.10000000 1185 | 2.10000000 1186 | 2.10000000 1187 | 2.10000000 1188 | 2.10000000 1189 | 2.10000000 1190 | 2.10000000 1191 | 2.10000000 1192 | 2.10000000 1193 | 2.10000000 1194 | 2.10000000 1195 | 2.10000000 1196 | 2.10000000 1197 | 2.10000000 1198 | 2.10000000 1199 | 2.10000000 1200 | 2.10000000 1201 | 2.20000000 1202 | 2.20000000 1203 | 2.20000000 1204 | 2.20000000 1205 | 2.20000000 1206 | 2.20000000 1207 | 2.20000000 1208 | 2.20000000 1209 | 2.20000000 1210 | 2.20000000 1211 | 2.20000000 1212 | 2.20000000 1213 | 2.20000000 1214 | 2.20000000 1215 | 2.20000000 1216 | 2.20000000 1217 | 2.20000000 1218 | 2.20000000 1219 | 2.20000000 1220 | 2.20000000 1221 | 2.20000000 1222 | 2.20000000 1223 | 2.20000000 1224 | 2.20000000 1225 | 2.20000000 1226 | 2.20000000 1227 | 2.20000000 1228 | 2.20000000 1229 | 2.20000000 1230 | 2.20000000 1231 | 2.20000000 1232 | 2.20000000 1233 | 2.20000000 1234 | 2.20000000 1235 | 2.20000000 1236 | 2.20000000 1237 | 2.20000000 1238 | 2.20000000 1239 | 2.20000000 1240 | 2.20000000 1241 | 2.20000000 1242 | 2.20000000 1243 | 2.20000000 1244 | 2.20000000 1245 | 2.20000000 1246 | 2.20000000 1247 | 2.20000000 1248 | 2.20000000 1249 | 2.20000000 1250 | 2.20000000 1251 | 2.20000000 1252 | 2.20000000 1253 | 2.20000000 1254 | 2.20000000 1255 | 2.20000000 1256 | 2.20000000 1257 | 2.20000000 1258 | 2.20000000 1259 | 2.20000000 1260 | 2.20000000 1261 | 2.20000000 1262 | 2.20000000 1263 | 2.20000000 1264 | 2.20000000 1265 | 2.20000000 1266 | 2.20000000 1267 | 2.20000000 1268 | 2.20000000 1269 | 2.20000000 1270 | 2.20000000 1271 | 2.20000000 1272 | 2.20000000 1273 | 2.20000000 1274 | 2.20000000 1275 | 2.20000000 1276 | 2.20000000 1277 | 2.20000000 1278 | 2.20000000 1279 | 2.20000000 1280 | 2.20000000 1281 | 2.20000000 1282 | 2.20000000 1283 | 2.20000000 1284 | 2.20000000 1285 | 2.20000000 1286 | 2.20000000 1287 | 2.20000000 1288 | 2.20000000 1289 | 2.20000000 1290 | 2.20000000 1291 | 2.20000000 1292 | 2.20000000 1293 | 2.20000000 1294 | 2.20000000 1295 | 2.20000000 1296 | 2.20000000 1297 | 2.20000000 1298 | 2.20000000 1299 | 2.20000000 1300 | 2.20000000 1301 | 2.30000000 1302 | 2.30000000 1303 | 2.30000000 1304 | 2.30000000 1305 | 2.30000000 1306 | 2.30000000 1307 | 2.30000000 1308 | 2.30000000 1309 | 2.30000000 1310 | 2.30000000 1311 | 2.30000000 1312 | 2.30000000 1313 | 2.30000000 1314 | 2.30000000 1315 | 2.30000000 1316 | 2.30000000 1317 | 2.30000000 1318 | 2.30000000 1319 | 2.30000000 1320 | 2.30000000 1321 | 2.30000000 1322 | 2.30000000 1323 | 2.30000000 1324 | 2.30000000 1325 | 2.30000000 1326 | 2.30000000 1327 | 2.30000000 1328 | 2.30000000 1329 | 2.30000000 1330 | 2.30000000 1331 | 2.30000000 1332 | 2.30000000 1333 | 2.30000000 1334 | 2.30000000 1335 | 2.30000000 1336 | 2.30000000 1337 | 2.30000000 1338 | 2.30000000 1339 | 2.30000000 1340 | 2.30000000 1341 | 2.30000000 1342 | 2.30000000 1343 | 2.30000000 1344 | 2.30000000 1345 | 2.30000000 1346 | 2.30000000 1347 | 2.30000000 1348 | 2.30000000 1349 | 2.30000000 1350 | 2.30000000 1351 | 2.30000000 1352 | 2.30000000 1353 | 2.30000000 1354 | 2.30000000 1355 | 2.30000000 1356 | 2.30000000 1357 | 2.30000000 1358 | 2.30000000 1359 | 2.30000000 1360 | 2.30000000 1361 | 2.30000000 1362 | 2.30000000 1363 | 2.30000000 1364 | 2.30000000 1365 | 2.30000000 1366 | 2.30000000 1367 | 2.30000000 1368 | 2.30000000 1369 | 2.30000000 1370 | 2.30000000 1371 | 2.30000000 1372 | 2.30000000 1373 | 2.30000000 1374 | 2.30000000 1375 | 2.30000000 1376 | 2.30000000 1377 | 2.30000000 1378 | 2.30000000 1379 | 2.30000000 1380 | 2.30000000 1381 | 2.30000000 1382 | 2.30000000 1383 | 2.30000000 1384 | 2.30000000 1385 | 2.30000000 1386 | 2.30000000 1387 | 2.30000000 1388 | 2.30000000 1389 | 2.30000000 1390 | 2.30000000 1391 | 2.30000000 1392 | 2.30000000 1393 | 2.30000000 1394 | 2.30000000 1395 | 2.30000000 1396 | 2.30000000 1397 | 2.30000000 1398 | 2.30000000 1399 | 2.30000000 1400 | 2.30000000 1401 | 2.40000000 1402 | 2.40000000 1403 | 2.40000000 1404 | 2.40000000 1405 | 2.40000000 1406 | 2.40000000 1407 | 2.40000000 1408 | 2.40000000 1409 | 2.40000000 1410 | 2.40000000 1411 | 2.40000000 1412 | 2.40000000 1413 | 2.40000000 1414 | 2.40000000 1415 | 2.40000000 1416 | 2.40000000 1417 | 2.40000000 1418 | 2.40000000 1419 | 2.40000000 1420 | 2.40000000 1421 | 2.40000000 1422 | 2.40000000 1423 | 2.40000000 1424 | 2.40000000 1425 | 2.40000000 1426 | 2.40000000 1427 | 2.40000000 1428 | 2.40000000 1429 | 2.40000000 1430 | 2.40000000 1431 | 2.40000000 1432 | 2.40000000 1433 | 2.40000000 1434 | 2.40000000 1435 | 2.40000000 1436 | 2.40000000 1437 | 2.40000000 1438 | 2.40000000 1439 | 2.40000000 1440 | 2.40000000 1441 | 2.40000000 1442 | 2.40000000 1443 | 2.40000000 1444 | 2.40000000 1445 | 2.40000000 1446 | 2.40000000 1447 | 2.40000000 1448 | 2.40000000 1449 | 2.40000000 1450 | 2.40000000 1451 | 2.40000000 1452 | 2.40000000 1453 | 2.40000000 1454 | 2.40000000 1455 | 2.40000000 1456 | 2.40000000 1457 | 2.40000000 1458 | 2.40000000 1459 | 2.40000000 1460 | 2.40000000 1461 | 2.40000000 1462 | 2.40000000 1463 | 2.40000000 1464 | 2.40000000 1465 | 2.40000000 1466 | 2.40000000 1467 | 2.40000000 1468 | 2.40000000 1469 | 2.40000000 1470 | 2.40000000 1471 | 2.40000000 1472 | 2.40000000 1473 | 2.40000000 1474 | 2.40000000 1475 | 2.40000000 1476 | 2.40000000 1477 | 2.40000000 1478 | 2.40000000 1479 | 2.40000000 1480 | 2.40000000 1481 | 2.40000000 1482 | 2.40000000 1483 | 2.40000000 1484 | 2.40000000 1485 | 2.40000000 1486 | 2.40000000 1487 | 2.40000000 1488 | 2.40000000 1489 | 2.40000000 1490 | 2.40000000 1491 | 2.40000000 1492 | 2.40000000 1493 | 2.40000000 1494 | 2.40000000 1495 | 2.40000000 1496 | 2.40000000 1497 | 2.40000000 1498 | 2.40000000 1499 | 2.40000000 1500 | 2.40000000 1501 | 2.50000000 1502 | 2.50000000 1503 | 2.50000000 1504 | 2.50000000 1505 | 2.50000000 1506 | 2.50000000 1507 | 2.50000000 1508 | 2.50000000 1509 | 2.50000000 1510 | 2.50000000 1511 | 2.50000000 1512 | 2.50000000 1513 | 2.50000000 1514 | 2.50000000 1515 | 2.50000000 1516 | 2.50000000 1517 | 2.50000000 1518 | 2.50000000 1519 | 2.50000000 1520 | 2.50000000 1521 | 2.50000000 1522 | 2.50000000 1523 | 2.50000000 1524 | 2.50000000 1525 | 2.50000000 1526 | 2.50000000 1527 | 2.50000000 1528 | 2.50000000 1529 | 2.50000000 1530 | 2.50000000 1531 | 2.50000000 1532 | 2.50000000 1533 | 2.50000000 1534 | 2.50000000 1535 | 2.50000000 1536 | 2.50000000 1537 | 2.50000000 1538 | 2.50000000 1539 | 2.50000000 1540 | 2.50000000 1541 | 2.50000000 1542 | 2.50000000 1543 | 2.50000000 1544 | 2.50000000 1545 | 2.50000000 1546 | 2.50000000 1547 | 2.50000000 1548 | 2.50000000 1549 | 2.50000000 1550 | 2.50000000 1551 | 2.50000000 1552 | 2.50000000 1553 | 2.50000000 1554 | 2.50000000 1555 | 2.50000000 1556 | 2.50000000 1557 | 2.50000000 1558 | 2.50000000 1559 | 2.50000000 1560 | 2.50000000 1561 | 2.50000000 1562 | 2.50000000 1563 | 2.50000000 1564 | 2.50000000 1565 | 2.50000000 1566 | 2.50000000 1567 | 2.50000000 1568 | 2.50000000 1569 | 2.50000000 1570 | 2.50000000 1571 | 2.50000000 1572 | 2.50000000 1573 | 2.50000000 1574 | 2.50000000 1575 | 2.50000000 1576 | 2.50000000 1577 | 2.50000000 1578 | 2.50000000 1579 | 2.50000000 1580 | 2.50000000 1581 | 2.50000000 1582 | 2.50000000 1583 | 2.50000000 1584 | 2.50000000 1585 | 2.50000000 1586 | 2.50000000 1587 | 2.50000000 1588 | 2.50000000 1589 | 2.50000000 1590 | 2.50000000 1591 | 2.50000000 1592 | 2.50000000 1593 | 2.50000000 1594 | 2.50000000 1595 | 2.50000000 1596 | 2.50000000 1597 | 2.50000000 1598 | 2.50000000 1599 | 2.50000000 1600 | 2.50000000 1601 | 2.60000000 1602 | 2.60000000 1603 | 2.60000000 1604 | 2.60000000 1605 | 2.60000000 1606 | 2.60000000 1607 | 2.60000000 1608 | 2.60000000 1609 | 2.60000000 1610 | 2.60000000 1611 | 2.60000000 1612 | 2.60000000 1613 | 2.60000000 1614 | 2.60000000 1615 | 2.60000000 1616 | 2.60000000 1617 | 2.60000000 1618 | 2.60000000 1619 | 2.60000000 1620 | 2.60000000 1621 | 2.60000000 1622 | 2.60000000 1623 | 2.60000000 1624 | 2.60000000 1625 | 2.60000000 1626 | 2.60000000 1627 | 2.60000000 1628 | 2.60000000 1629 | 2.60000000 1630 | 2.60000000 1631 | 2.60000000 1632 | 2.60000000 1633 | 2.60000000 1634 | 2.60000000 1635 | 2.60000000 1636 | 2.60000000 1637 | 2.60000000 1638 | 2.60000000 1639 | 2.60000000 1640 | 2.60000000 1641 | 2.60000000 1642 | 2.60000000 1643 | 2.60000000 1644 | 2.60000000 1645 | 2.60000000 1646 | 2.60000000 1647 | 2.60000000 1648 | 2.60000000 1649 | 2.60000000 1650 | 2.60000000 1651 | 2.60000000 1652 | 2.60000000 1653 | 2.60000000 1654 | 2.60000000 1655 | 2.60000000 1656 | 2.60000000 1657 | 2.60000000 1658 | 2.60000000 1659 | 2.60000000 1660 | 2.60000000 1661 | 2.60000000 1662 | 2.60000000 1663 | 2.60000000 1664 | 2.60000000 1665 | 2.60000000 1666 | 2.60000000 1667 | 2.60000000 1668 | 2.60000000 1669 | 2.60000000 1670 | 2.60000000 1671 | 2.60000000 1672 | 2.60000000 1673 | 2.60000000 1674 | 2.60000000 1675 | 2.60000000 1676 | 2.60000000 1677 | 2.60000000 1678 | 2.60000000 1679 | 2.60000000 1680 | 2.60000000 1681 | 2.60000000 1682 | 2.60000000 1683 | 2.60000000 1684 | 2.60000000 1685 | 2.60000000 1686 | 2.60000000 1687 | 2.60000000 1688 | 2.60000000 1689 | 2.60000000 1690 | 2.60000000 1691 | 2.60000000 1692 | 2.60000000 1693 | 2.60000000 1694 | 2.60000000 1695 | 2.60000000 1696 | 2.60000000 1697 | 2.60000000 1698 | 2.60000000 1699 | 2.60000000 1700 | 2.60000000 1701 | 2.70000000 1702 | 2.70000000 1703 | 2.70000000 1704 | 2.70000000 1705 | 2.70000000 1706 | 2.70000000 1707 | 2.70000000 1708 | 2.70000000 1709 | 2.70000000 1710 | 2.70000000 1711 | 2.70000000 1712 | 2.70000000 1713 | 2.70000000 1714 | 2.70000000 1715 | 2.70000000 1716 | 2.70000000 1717 | 2.70000000 1718 | 2.70000000 1719 | 2.70000000 1720 | 2.70000000 1721 | 2.70000000 1722 | 2.70000000 1723 | 2.70000000 1724 | 2.70000000 1725 | 2.70000000 1726 | 2.70000000 1727 | 2.70000000 1728 | 2.70000000 1729 | 2.70000000 1730 | 2.70000000 1731 | 2.70000000 1732 | 2.70000000 1733 | 2.70000000 1734 | 2.70000000 1735 | 2.70000000 1736 | 2.70000000 1737 | 2.70000000 1738 | 2.70000000 1739 | 2.70000000 1740 | 2.70000000 1741 | 2.70000000 1742 | 2.70000000 1743 | 2.70000000 1744 | 2.70000000 1745 | 2.70000000 1746 | 2.70000000 1747 | 2.70000000 1748 | 2.70000000 1749 | 2.70000000 1750 | 2.70000000 1751 | 2.70000000 1752 | 2.70000000 1753 | 2.70000000 1754 | 2.70000000 1755 | 2.70000000 1756 | 2.70000000 1757 | 2.70000000 1758 | 2.70000000 1759 | 2.70000000 1760 | 2.70000000 1761 | 2.70000000 1762 | 2.70000000 1763 | 2.70000000 1764 | 2.70000000 1765 | 2.70000000 1766 | 2.70000000 1767 | 2.70000000 1768 | 2.70000000 1769 | 2.70000000 1770 | 2.70000000 1771 | 2.70000000 1772 | 2.70000000 1773 | 2.70000000 1774 | 2.70000000 1775 | 2.70000000 1776 | 2.70000000 1777 | 2.70000000 1778 | 2.70000000 1779 | 2.70000000 1780 | 2.70000000 1781 | 2.70000000 1782 | 2.70000000 1783 | 2.70000000 1784 | 2.70000000 1785 | 2.70000000 1786 | 2.70000000 1787 | 2.70000000 1788 | 2.70000000 1789 | 2.70000000 1790 | 2.70000000 1791 | 2.70000000 1792 | 2.70000000 1793 | 2.70000000 1794 | 2.70000000 1795 | 2.70000000 1796 | 2.70000000 1797 | 2.70000000 1798 | 2.70000000 1799 | 2.70000000 1800 | 2.70000000 1801 | 2.80000000 1802 | 2.80000000 1803 | 2.80000000 1804 | 2.80000000 1805 | 2.80000000 1806 | 2.80000000 1807 | 2.80000000 1808 | 2.80000000 1809 | 2.80000000 1810 | 2.80000000 1811 | 2.80000000 1812 | 2.80000000 1813 | 2.80000000 1814 | 2.80000000 1815 | 2.80000000 1816 | 2.80000000 1817 | 2.80000000 1818 | 2.80000000 1819 | 2.80000000 1820 | 2.80000000 1821 | 2.80000000 1822 | 2.80000000 1823 | 2.80000000 1824 | 2.80000000 1825 | 2.80000000 1826 | 2.80000000 1827 | 2.80000000 1828 | 2.80000000 1829 | 2.80000000 1830 | 2.80000000 1831 | 2.80000000 1832 | 2.80000000 1833 | 2.80000000 1834 | 2.80000000 1835 | 2.80000000 1836 | 2.80000000 1837 | 2.80000000 1838 | 2.80000000 1839 | 2.80000000 1840 | 2.80000000 1841 | 2.80000000 1842 | 2.80000000 1843 | 2.80000000 1844 | 2.80000000 1845 | 2.80000000 1846 | 2.80000000 1847 | 2.80000000 1848 | 2.80000000 1849 | 2.80000000 1850 | 2.80000000 1851 | 2.80000000 1852 | 2.80000000 1853 | 2.80000000 1854 | 2.80000000 1855 | 2.80000000 1856 | 2.80000000 1857 | 2.80000000 1858 | 2.80000000 1859 | 2.80000000 1860 | 2.80000000 1861 | 2.80000000 1862 | 2.80000000 1863 | 2.80000000 1864 | 2.80000000 1865 | 2.80000000 1866 | 2.80000000 1867 | 2.80000000 1868 | 2.80000000 1869 | 2.80000000 1870 | 2.80000000 1871 | 2.80000000 1872 | 2.80000000 1873 | 2.80000000 1874 | 2.80000000 1875 | 2.80000000 1876 | 2.80000000 1877 | 2.80000000 1878 | 2.80000000 1879 | 2.80000000 1880 | 2.80000000 1881 | 2.80000000 1882 | 2.80000000 1883 | 2.80000000 1884 | 2.80000000 1885 | 2.80000000 1886 | 2.80000000 1887 | 2.80000000 1888 | 2.80000000 1889 | 2.80000000 1890 | 2.80000000 1891 | 2.80000000 1892 | 2.80000000 1893 | 2.80000000 1894 | 2.80000000 1895 | 2.80000000 1896 | 2.80000000 1897 | 2.80000000 1898 | 2.80000000 1899 | 2.80000000 1900 | 2.80000000 1901 | 2.90000000 1902 | 2.90000000 1903 | 2.90000000 1904 | 2.90000000 1905 | 2.90000000 1906 | 2.90000000 1907 | 2.90000000 1908 | 2.90000000 1909 | 2.90000000 1910 | 2.90000000 1911 | 2.90000000 1912 | 2.90000000 1913 | 2.90000000 1914 | 2.90000000 1915 | 2.90000000 1916 | 2.90000000 1917 | 2.90000000 1918 | 2.90000000 1919 | 2.90000000 1920 | 2.90000000 1921 | 2.90000000 1922 | 2.90000000 1923 | 2.90000000 1924 | 2.90000000 1925 | 2.90000000 1926 | 2.90000000 1927 | 2.90000000 1928 | 2.90000000 1929 | 2.90000000 1930 | 2.90000000 1931 | 2.90000000 1932 | 2.90000000 1933 | 2.90000000 1934 | 2.90000000 1935 | 2.90000000 1936 | 2.90000000 1937 | 2.90000000 1938 | 2.90000000 1939 | 2.90000000 1940 | 2.90000000 1941 | 2.90000000 1942 | 2.90000000 1943 | 2.90000000 1944 | 2.90000000 1945 | 2.90000000 1946 | 2.90000000 1947 | 2.90000000 1948 | 2.90000000 1949 | 2.90000000 1950 | 2.90000000 1951 | 2.90000000 1952 | 2.90000000 1953 | 2.90000000 1954 | 2.90000000 1955 | 2.90000000 1956 | 2.90000000 1957 | 2.90000000 1958 | 2.90000000 1959 | 2.90000000 1960 | 2.90000000 1961 | 2.90000000 1962 | 2.90000000 1963 | 2.90000000 1964 | 2.90000000 1965 | 2.90000000 1966 | 2.90000000 1967 | 2.90000000 1968 | 2.90000000 1969 | 2.90000000 1970 | 2.90000000 1971 | 2.90000000 1972 | 2.90000000 1973 | 2.90000000 1974 | 2.90000000 1975 | 2.90000000 1976 | 2.90000000 1977 | 2.90000000 1978 | 2.90000000 1979 | 2.90000000 1980 | 2.90000000 1981 | 2.90000000 1982 | 2.90000000 1983 | 2.90000000 1984 | 2.90000000 1985 | 2.90000000 1986 | 2.90000000 1987 | 2.90000000 1988 | 2.90000000 1989 | 2.90000000 1990 | 2.90000000 1991 | 2.90000000 1992 | 2.90000000 1993 | 2.90000000 1994 | 2.90000000 1995 | 2.90000000 1996 | 2.90000000 1997 | 2.90000000 1998 | 2.90000000 1999 | 2.90000000 2000 | 2.90000000 2001 | -------------------------------------------------------------------------------- /Tutorial3/tutorial3_pca_solution.py: -------------------------------------------------------------------------------- 1 | ########## Machine Learning for Quantum Matter and Technology ###################### 2 | ### Juan Carrasquilla, Estelle Inack, Giacomo Torlai, Roger Melko 3 | ### with code from Lauren Hayward Sierens and Juan Carrasquilla, with the lines performing 4 | ### PCA taken from Laurens van der Maaten's implementation of t-SNE in Python 5 | ### 6 | ### Tutorial 3 (solutions): This code performs principal component analysis (PCA) on spin configurations 7 | ### corresponding to the Ising model. 8 | ##################################################################################### 9 | 10 | 11 | 12 | import numpy as np 13 | import matplotlib.pyplot as plt 14 | 15 | #Specify font sizes for plots: 16 | plt.rcParams['axes.labelsize'] = 16 17 | plt.rcParams['legend.fontsize'] = 16 18 | plt.rcParams['xtick.labelsize'] = 10 19 | plt.rcParams['ytick.labelsize'] = 10 20 | plt.rcParams['font.size'] = 18 21 | 22 | modelName = "Ising" 23 | 24 | #Parameters: 25 | num_components = 2 26 | 27 | ### Loop over all lattice sizes: ### 28 | for L in [20,40,80]: #Note: L=80 requires several minutes 29 | print("L=%d"%L) 30 | 31 | ### Read in the data from the files: ### 32 | X = np.loadtxt("data_tutorial3/spinConfigs_%s_L%d.txt" %(modelName,L), dtype='int8') 33 | 34 | labels = np.loadtxt("data_tutorial3/temperatures_%s_L%d.txt" %(modelName,L), dtype='float') 35 | 36 | 37 | (N_configs, N_spins) = X.shape 38 | 39 | ### Perform the PCA: ### 40 | X_cent = X - np.tile(np.mean(X, 0), (N_configs, 1)) 41 | (lamb, P) = np.linalg.eig(np.dot(X_cent.T, X_cent)/(N_configs-1.0)) 42 | 43 | ### Sort according to decreasing order of the eigenvalues: ### 44 | indices_sorted = lamb.argsort()[::-1] #The [::-1] is to get the reverse order (largest eigenvalues first) 45 | lamb = lamb[indices_sorted] 46 | P = P[:,indices_sorted] 47 | 48 | ### Get the principal components (columns of the matrix X_prime): ### 49 | X_prime = np.dot(X_cent, P[:,0:num_components]) 50 | 51 | ### PLOT FIGURE FOR PART C (explained variance ratios): ### 52 | plt.figure(1) 53 | ratios = lamb/np.sum(lamb) 54 | plt.semilogy(np.arange(N_spins), ratios, 'o-', label="L=%d"%L) 55 | 56 | ### PLOT FIGURE FOR PARTS A and B (first two principal components): ### 57 | plt.figure() 58 | plt.axes([0.17, 0.13, 0.81, 0.78]) #specify axes (for figure margins) in the format [xmin, ymin, xwidth, ywidth] 59 | #plt.scatter(X_prime[:, 0], X_prime[:, 1], s=40) #PART A 60 | sc = plt.scatter(X_prime[:, 0], X_prime[:, 1], c=labels, s=40, cmap=plt.cm.coolwarm) #PART B 61 | cb = plt.colorbar(sc, cmap=plt.cm.coolwarm) #PART B 62 | plt.title("L=%d"%L) 63 | plt.xlabel("x'_1") 64 | plt.ylabel("x'_2") 65 | plt.savefig("xPrime1_xPrime2_%s_L%d.pdf" %(modelName,L)) 66 | 67 | ### PLOT FIGURE FOR PART D (elements of p1): ### 68 | plt.figure() 69 | plt.axes([0.19, 0.13, 0.79, 0.78]) #specify axes (for figure margins) in the format [xmin, ymin, xwidth, ywidth] 70 | plt.plot(np.arange(N_spins),np.abs(P[:,0])) 71 | plt.title("L=%d"%L) 72 | plt.xlabel("Component index") 73 | plt.ylabel("Absolute value of components of p1") 74 | plt.savefig("p1_%s_L%d.pdf" %(modelName,L)) 75 | 76 | plt.figure(1) 77 | plt.xlim([0,10]) 78 | plt.ylim([10**(-3),1]) 79 | plt.xlabel("Component index") 80 | plt.ylabel("Explained variance ratio") 81 | plt.legend() 82 | plt.savefig("ratios_%s.pdf" %modelName) 83 | 84 | plt.show() 85 | -------------------------------------------------------------------------------- /Tutorial4/Data_ising2d/MC_results/MC_ising2d_L4_Observables.txt: -------------------------------------------------------------------------------- 1 | # T E M C S 2 | 1.000 -1.99724 0.99930 0.02259 0.00151 3 | 1.254 -1.98406 0.99578 0.08724 0.00858 4 | 1.508 -1.94974 0.98586 0.20247 0.03069 5 | 1.762 -1.87467 0.96178 0.39236 0.08921 6 | 2.016 -1.74354 0.91456 0.62078 0.20477 7 | 2.269 -1.57072 0.84603 0.77786 0.34252 8 | 2.524 -1.35896 0.75573 0.80419 0.45468 9 | 2.778 -1.16626 0.67001 0.71673 0.49039 10 | 3.032 -0.99826 0.59273 0.58913 0.47348 11 | 3.286 -0.86512 0.53082 0.46013 0.42525 12 | 3.540 -0.76156 0.48218 0.36194 0.37434 13 | -------------------------------------------------------------------------------- /Tutorial4/RBM.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial4/RBM.pdf -------------------------------------------------------------------------------- /Tutorial4/Tutorial4.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial4/Tutorial4.pdf -------------------------------------------------------------------------------- /Tutorial4/Tutorial4.tex: -------------------------------------------------------------------------------- 1 | \documentclass[letterpaper]{scrartcl} 2 | \usepackage[top=0.8in, bottom=1in, left=0.9in, right=0.9in]{geometry} 3 | 4 | \makeatletter 5 | \DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt} 6 | \makeatother 7 | 8 | \usepackage{url} 9 | \usepackage{scalefnt} 10 | \usepackage{bm} 11 | \usepackage{cancel} 12 | 13 | %-------------------------------------------------------------- 14 | % We need this package, part of the KOMA class, for the custom 15 | % headings. 16 | %-------------------------------------------------------------- 17 | \usepackage{scrpage2} 18 | 19 | 20 | %-------------------------------------------------------------- 21 | % One of many packages you can use if you want to include 22 | % graphics. 23 | %-------------------------------------------------------------- 24 | \usepackage{graphicx} 25 | 26 | %-------------------------------------------------------------- 27 | % The AMS packages are useful but not required. They offer a 28 | % number of nice fonts, environments for formatting multiline 29 | % equations, etc. 30 | %-------------------------------------------------------------- 31 | \usepackage{amsmath} 32 | \usepackage{amsfonts} 33 | \usepackage{amssymb} 34 | \usepackage{amsthm} 35 | 36 | %-------------------------------------------------------------- 37 | % Basic way to set-up the page margins. 38 | %-------------------------------------------------------------- 39 | %\addtolength{\oddsidemargin}{-.2in} 40 | %\addtolength{\evensidemargin}{-.2in} 41 | %\addtolength{\textwidth}{0.45in} 42 | %\addtolength{\topmargin}{-.175in} 43 | %\addtolength{\textheight}{0.75in} 44 | 45 | %-------------------------------------------------------------- 46 | % Comment out the following to add indents and remove space between paragraphs. 47 | %-------------------------------------------------------------- 48 | \usepackage{parskip} 49 | 50 | %-------------------------------------------------------------- 51 | % This package is used to define custom colours. 52 | %-------------------------------------------------------------- 53 | \usepackage[usenames,dvipsnames,svgnames,table]{xcolor} 54 | 55 | %-------------------------------------------------------------- 56 | % Package for adding in solutions: 57 | %-------------------------------------------------------------- 58 | \usepackage[nosoln,regf,nolf]{optional} 59 | %\usepackage[soln,regf]{optional} 60 | 61 | %\newcommand{\soln}[1]{\opt{soln}{\\[4pt] \textcolor{JungleGreen}{\textbf{Solution:}} #1}} 62 | \newcommand{\soln}[1]{\opt{soln}{\textcolor{JungleGreen}{\usekomafont{descriptionlabel}{Solution:}} #1}} 63 | 64 | \newcommand{\hint}[1]{{\usekomafont{descriptionlabel}{Hint:}} #1} 65 | \newcommand{\note}[1]{{\usekomafont{descriptionlabel}{Note:}} #1} 66 | \newcommand{\reference}[1]{{\usekomafont{descriptionlabel}{Reference:}} #1} 67 | 68 | %-------------------------------------------------------------- 69 | % A few colours for hyperlinks. 70 | %-------------------------------------------------------------- 71 | \definecolor{plum}{rgb}{0.36078, 0.20784, 0.4} 72 | \definecolor{chameleon}{rgb}{0.30588, 0.60392, 0.023529} 73 | \definecolor{cornflower}{rgb}{0.12549, 0.29020, 0.52941} 74 | \definecolor{scarlet}{rgb}{0.8, 0, 0} 75 | \definecolor{brick}{rgb}{0.64314, 0, 0} 76 | 77 | %-------------------------------------------------------------- 78 | % A command for typesetting and linking an email address. 79 | %-------------------------------------------------------------- 80 | \newcommand{\email}[1]{\href{mailto:#1}{\tt \textcolor{cornflower}{#1}}} 81 | \newcommand{\web}[1]{\href{#1}{\tt \textcolor{cornflower}{#1}}} 82 | 83 | %-------------------------------------------------------------- 84 | % The following declaration includes the hyperref package and 85 | % assigns metadata. If you compile with pdflatex, this data 86 | % will be automatically included in the pdf file. 87 | %-------------------------------------------------------------- 88 | %\usepackage[ 89 | % pdftitle={QFT Tutorial 1},% 90 | % pdfauthor={PSI Tutors},% 91 | % pdfsubject={QFT Tutorial 1},% 92 | % pdfkeywords={PSI}, 93 | % colorlinks=true, 94 | % linkcolor=cornflower, 95 | % citecolor=scarlet, 96 | % urlcolor=chameleon% 97 | %]{hyperref} 98 | 99 | %\setcounter{secnumdepth}{2} % section number depth 100 | %\setcounter{tocdepth}{2} % depth of TOC 101 | 102 | %-------------------------------------------------------------- 103 | % Specify the font used in captions. 104 | %-------------------------------------------------------------- 105 | \setkomafont{captionlabel}{\usekomafont{descriptionlabel}} 106 | 107 | %-------------------------------------------------------------- 108 | % This is where we define the custom title. The image that is 109 | % placed on the left-hand-side of the title, PILogo.pdf in 110 | % this case, should be in the same directory as this file. Note 111 | % that you can always use hyperlinks for the Title, Semester, 112 | % and Author fields, below, in case you want to link to a seminar 113 | % web page or a lecturer's email address. 114 | %-------------------------------------------------------------- 115 | 116 | \titlehead{% 117 | \vspace*{-1cm} 118 | \begin{minipage}[b]{4.0cm} 119 | \includegraphics*[height=1.3cm]{Uniandes_logo.jpeg}% 120 | \end{minipage} 121 | \hfill 122 | \begin{minipage}[b]{12cm} 123 | \begin{flushright} 124 | \usekomafont{descriptionlabel} 125 | \large Machine Learning for Quantum Matter and Technology \\ 126 | \normalsize \normalfont 127 | J. Carrasquilla, E. Inack, G. Torlai, R. Melko, L. Hayward Sierens 128 | \end{flushright} 129 | \end{minipage} 130 | \\[-3mm] 131 | \hrule 132 | \vspace{-3mm} 133 | } 134 | % ----------- 135 | 136 | %-------------------------------------------------------------- 137 | % Other useful physic-related packages 138 | %-------------------------------------------------------------- 139 | \usepackage{braket} 140 | % Use \Bra{}, \Ket{} or \Braket{x | \psi} for Dirac notation 141 | 142 | %-------------------------------------------------------------- 143 | % Nice numbering for question parts. 144 | %-------------------------------------------------------------- 145 | \newcommand{\ba}{\begin{eqnarray}} 146 | \newcommand{\ea}{\end{eqnarray}} 147 | 148 | \newcommand{\ssk}{\smallskip} 149 | \newcommand{\msk}{\medskip} 150 | 151 | \newcommand{\nin}{\noindent} 152 | 153 | \newcommand{\beq}{\begin{equation}} 154 | \newcommand{\eeq}{\end{equation}} 155 | 156 | \newcommand{\beqs}{\begin{equation*}} 157 | \newcommand{\eeqs}{\end{equation*}} 158 | 159 | \renewcommand{\vec}[1]{{\mathbf{#1}}} 160 | \renewcommand{\labelenumi}{\alph{enumi})} 161 | \renewcommand{\labelenumiii}{\roman{enumiii})} 162 | 163 | %%%%%%%%%%%%% 164 | 165 | \def\be{\begin{eqnarray}} 166 | \def\ee{\end{eqnarray}} 167 | \newcommand{\nn}{\nonumber} 168 | \newcommand\para{\paragraph{}} 169 | \newcommand{\ft}[2]{{\textstyle\frac{#1}{#2}}} 170 | \newcommand{\eqn}[1]{(\ref{#1})} 171 | \newcommand{\pl}[1]{\frac{\partial {\cal L}}{\partial{#1}}} 172 | \newcommand{\ppp}[2]{\frac{\partial {#1}}{\partial {#2}}} 173 | \newcommand{\ph}[1]{\frac{\partial {\cal H}}{\partial{#1}}} 174 | \newcommand{\leftp}[3]{\left.\ppp{#1}{#2}\right|_{#3}} 175 | %\newcommand{\Vec}[2]{\left(\begin{array}{c} {#1} \\ {#2}\end{array}\right)} 176 | \newcommand\vx{\vec{x}} 177 | \newcommand\vy{\vec{y}} 178 | \newcommand\vp{\vec{p}} 179 | \newcommand\vq{\vec{q}} 180 | \newcommand\vk{\vec{k}} 181 | \newcommand\avp{a^{\ }_{\vp}} 182 | \newcommand\advp{a^\dagger_{\vp}} 183 | \newcommand\ad{a^\dagger} 184 | 185 | \newcommand\balpha{\mbox{\boldmath $\alpha$}} 186 | \newcommand\bbeta{\mbox{\boldmath $\beta$}} 187 | \newcommand\bgamma{\mbox{\boldmath $\gamma$}} 188 | \newcommand\bomega{\mbox{\boldmath $\omega$}} 189 | \newcommand\blambda{\mbox{\boldmath $\lambda$}} 190 | \newcommand\bmu{\mbox{\boldmath $\mu$}} 191 | \newcommand\bphi{\mbox{\boldmath $\phi$}} 192 | \newcommand\bzeta{\mbox{\boldmath $\zeta$}} 193 | \newcommand\bsigma{\mbox{\boldmath $\sigma$}} 194 | \newcommand\bepsilon{\mbox{\boldmath $\epsilon$}} 195 | \newcommand\btau{\mbox{\boldmath $\tau$}} 196 | \newcommand\beeta{\mbox{\boldmath $\eta$}} 197 | \newcommand\btheta{\mbox{\boldmath $\theta$}} 198 | 199 | \def\norm#1{:\!\!#1\!\!:} 200 | 201 | \def\part{\partial} 202 | 203 | \def\dbox{\hbox{{$\sqcup$}\llap{$\sqcap$}}} 204 | 205 | \def\sla#1{\hbox{{$#1$}\llap{$/$}}} 206 | \def\Dslash{\,\,{\raise.15ex\hbox{/}\mkern-13mu D}} 207 | \def\Dbarslash{\,\,{\raise.15ex\hbox{/}\mkern-12mu {\bar D}}} 208 | \def\delslash{\,\,{\raise.15ex\hbox{/}\mkern-10mu \partial}} 209 | \def\delbarslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu {\bar\partial}}} 210 | \def\pslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu p}} 211 | \def\qslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu q}} 212 | \def\kslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu k}} 213 | \def\eslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu \epsilon}} 214 | \def\calDslash{\,\,{\rais.15ex\hbox{/}\mkern-12mu {\cal D}}} 215 | \newcommand{\slsh}[1]{\,\,{\raise.15ex\hbox{/}\mkern-12mu {#1}}} 216 | 217 | 218 | \newcommand\Bprime{B${}^\prime$} 219 | %\newcommand{\sign}{{\rm sign}} 220 | 221 | \newcommand\bx{{\bf x}} 222 | \newcommand\br{{\bf r}} 223 | \newcommand\bF{{\bf F}} 224 | \newcommand\bp{{\bf p}} 225 | \newcommand\bL{{\bf L}} 226 | \newcommand\bR{{\bf R}} 227 | \newcommand\bP{{\bf P}} 228 | \newcommand\bE{{\bf E}} 229 | \newcommand\bB{{\bf B}} 230 | \newcommand\bA{{\bf A}} 231 | \newcommand\bee{{\bf e}} 232 | \newcommand\bte{\tilde{\bf e}} 233 | \def\ket#1{\left| #1 \right\rangle} 234 | \def\bra#1{\left\langle #1 \right|} 235 | \def\vev#1{\left\langle #1 \right\rangle} 236 | 237 | \newcommand\lmn[2]{\Lambda^{#1}_{\ #2}} 238 | \newcommand\mup[2]{\eta^{#1 #2}} 239 | \newcommand\mdown[2]{\eta_{#1 #2}} 240 | \newcommand\deld[2]{\delta^{#1}_{#2}} 241 | \newcommand\df{\Delta_F} 242 | \newcommand\cL{{\cal L}} 243 | %\def\theequation{\thesection.\arabic{equation} 244 | 245 | \newcounter{solneqn} 246 | %\newcommand{\mytag}{\refstepcounter{equation}\tag{\roman{equationn}}} 247 | \newcommand{\mytag}{\refstepcounter{solneqn}\tag{S.\arabic{solneqn}}} 248 | %%%%%%%%% 249 | 250 | 251 | \DeclareMathOperator{\Tr}{Tr} 252 | \DeclareMathOperator{\sign}{sign} 253 | 254 | %\renewcommand{\ttdefault}{pcr} 255 | 256 | \usepackage{enumitem} 257 | 258 | \begin{document} 259 | 260 | %\scalefont{1.35} 261 | 262 | \vspace{-3cm} 263 | 264 | \opt{nosoln}{\title{Tutorial 4: \\Learning classical thermodynamic observables with \\restricted Boltzmann machines \vspace*{-6mm}}} 265 | \opt{soln}{\title{Tutorial 4 \textcolor{JungleGreen}{Solutions}: \\Learning classical thermodynamic observables with \\restricted Boltzmann machines \vspace*{-6mm}}} 266 | 267 | \date{May 30, 2019} 268 | 269 | \maketitle 270 | 271 | The objective of this tutorial is to train a restricted Boltzmann machine (RBM) on spin configurations 272 | and then sample new spin configurations from the trained RBM. 273 | You will compute thermodynamic observables from these samples and compare to known results (from Monte Carlo simulation). 274 | You will use and modify the Python programs 275 | \texttt{rbm.py}, \texttt{tutorial4{\textunderscore}train{\textunderscore}ising2d.py} and 276 | \texttt{tutorial4{\textunderscore}sample{\textunderscore}ising2d.py} %and \texttt{tutorial5{\textunderscore}plot{\textunderscore}results.py}. 277 | 278 | As seen in the lectures, the architecture of a RBM can be represented as 279 | \begin{center} 280 | \includegraphics[width=4cm]{RBM.pdf} 281 | \end{center} 282 | where $\mathbf{v} = \left(v_1, v_2, \ldots v_m\right)^T$ consists of $m$ visible units with $v_i \in \{0,1\}$ and $\mathbf{h} = \left(h_1, h_2, \ldots h_n\right)^T$ consists of $n$ hidden units with $h_j \in \{0,1\}$. 283 | The weights of the RBM are denoted as $W_{ij}$. 284 | The biases are denoted as $b_i$ for the visible units and $c_j$ for the hidden units. 285 | We use $\lambda$ to denote all model parameters such that $\lambda = \{ W, b, c \}$. 286 | Within the given python programs, the number of visible units $m$ is stored in the variable \texttt{num{\textunderscore}visible} 287 | and the number of hidden units $n$ is stored in the variable \texttt{num{\textunderscore}hidden}. 288 | 289 | The probability distribution associated with the RBM is given by 290 | \begin{equation*} 291 | p_\lambda(\mathbf{v}, \mathbf{h}) = \frac{1}{Z_\lambda} e^{-E_\lambda (\mathbf{v}, \mathbf{h})}, 292 | \end{equation*} 293 | where 294 | \begin{equation*} 295 | E_\lambda (\mathbf{v}, \mathbf{h}) = -\sum_{i=1}^m b_i \, v_i -\sum_{j=1}^n c_j\, h_j - \sum_{ij} W_{ij} \, v_i \, h_j , 296 | \end{equation*} 297 | and 298 | \begin{equation*} 299 | Z_\lambda = \sum_{\mathbf{v}, \mathbf{h}} e^{-E_\lambda (\mathbf{v}, \mathbf{h})}. 300 | \end{equation*} 301 | 302 | You will train an RBM to learn the distribution corresponding to spin configurations of 303 | the two-dimensional classical Ising model on an $L=4$ lattice at a given temperature. 304 | The number of visible units will be equal to the number of spins $N$ such that $m = N = L^2 = 16$. 305 | You have been given data corresponding to 11 different temperatures (1.0, 1.254, 1.508, 1.762, 2.016, 2.269, 2.524, 2.778, 3.032, 3.286 and 3.54) in the folder \texttt{MC{\textunderscore}results}. 306 | We know that these configurations are generated (using Monte Carlo simulation) according to the Boltzmann distribution 307 | \beq 308 | q(\mathbf{v},T) = \frac{1}{Z} e^{-H(\mathbf{v})/T}, 309 | \eeq 310 | where $Z = \sum_{\{ \mathbf{v} \}}e^{-H(\mathbf{v})/T}$ is the partition function and $H(\mathbf{v}) = -J \sum_{\langle i j \rangle} v_i v_j$ is the Ising model Hamiltonian with critical temperature $T_\text{c} \approx 2.269 J$. 311 | We wish to adjust the RBM parameters $\lambda$ such that the RBM distribution $p_\lambda(\mathbf{v})$ is a good approximation of $q(\mathbf{v},T)$. 312 | This training is done by minimizing the negative log-likelihood (NLL), which is equivalent to minimizing the Kullback-Liebler (KL) divergence. 313 | The RBM never has explicit knowledge of $q(\mathbf{v},T)$ or $H(\mathbf{v})$. 314 | 315 | After training the RBM, we can then sample from it new spin configurations. 316 | Based on our theoretical knowledge of the Ising model, we can compute observables such as 317 | the energy $E(\mathbf{v})$, magnetization $M(\mathbf{v})$, specific heat $C_v(\mathbf{v})$ and susceptibility $\chi(\mathbf{v})$ for each sample. 318 | Assuming units where $J = k_\text{B} = 1$, we have 319 | \begin{align*} 320 | E(\mathbf{v}) &= H(\mathbf{v}) = -\sum_{\langle i j \rangle} v_i v_j, \\ 321 | M(\mathbf{v}) &= \sum_i v_i , \\ 322 | C_v(\mathbf{v}) &= \frac{\langle E^2 \rangle - \langle E \rangle^2}{T^2}, \\ 323 | \chi(\mathbf{v}) &= \frac{\langle M^2 \rangle - \langle M \rangle^2}{T}. 324 | \end{align*} 325 | When calculating these observables for a given spin configuration $\mathbf{v} = (v_1, v_2, \ldots, v_N)$, we note that the underlying lattice has periodic boundary conditions and uses a labelling for the sites such that, for example, on a $3\times 3$ lattice: 326 | \begin{center} 327 | \includegraphics[width=5cm]{lattice.pdf} 328 | \end{center} 329 | We will compare the averages of our RBM-sampled observables to the known values we expect to find in Monte Carlo simulation. 330 | We will also explore how the number of hidden units $n$ affects this comparison. 331 | 332 | %In this tutorial, you will work in groups of 3--4 to answer the following questions. 333 | 334 | %\opt{soln}{\newpage} 335 | \begin{enumerate}[label=\alph*)] 336 | 337 | %%%%%%%%%%%%%% (a) %%%%%%%%%%%%%% 338 | \item Start by examining Figure 4 of Reference~\cite{giac}. How do the thermodynamic observables generated from the RBM depend on the number of hidden units $n = n_h$? How does the accuracy of each observables vary with the distance from the critical temperature $T_\text{c}$? 339 | 340 | %%% SOLUTION %%% 341 | \soln{ 342 | } 343 | 344 | %%%%%%%%%%%%%% (b) %%%%%%%%%%%%%% 345 | \item Let us train our first RBM at the critical temperature $T_\text{c}$ with $n=4$ hidden units. 346 | Run the code \texttt{tutorial4{\textunderscore}train{\textunderscore}ising2d.py} with \texttt{T = 2.269} and \texttt{num{\textunderscore}hidden = 4}. 347 | This code will train the parameters $\lambda$ of an RBM based on the given Monte Carlo samples such that 348 | $p_\lambda(\mathbf{v})$ is a good approximation of $q(\mathbf{v},T)$. 349 | The resulting parameters will be saved to a file within the folder \texttt{RBM{\textunderscore}parameters}. 350 | Increase the parameter \texttt{nsteps} until you are convinced that the NLL has roughly converged. 351 | %\item For each of the 11 temperatures, run the code \texttt{tutorial5{\textunderscore}train{\textunderscore}ising2d.py}. 352 | %For each temperature $T$, this code will train the parameters $\lambda$ of an RBM based on the Monte Carlo samples such that 353 | %$p_\lambda(\mathbf{v})$ is a good approximation of $q(\mathbf{v},T)$. 354 | 355 | %\hint{Since some temperatures might take longer to train, you may wish to have each group member study different temperatures.} 356 | 357 | %%% SOLUTION %%% 358 | \soln{ 359 | } 360 | 361 | %%%%%%%%%%%%%% (c) %%%%%%%%%%%%%% 362 | \item Examine the parameters \texttt{learning{\textunderscore}rate{\textunderscore}start}, \texttt{bsize}, \texttt{num{\textunderscore}gibbs} and \texttt{num{\textunderscore}samples} and explain how each are used to train the RBM. 363 | Experiment with adjusting each of these parameters to see how it affects the training behaviour. 364 | 365 | %%% SOLUTION %%% 366 | \soln{ 367 | } 368 | 369 | %%%%%%%%%%%%%% (d) %%%%%%%%%%%%%% 370 | \item Run \texttt{tutorial4{\textunderscore}sample{\textunderscore}ising2d.py} (again with with \texttt{T = 2.269} and \texttt{num{\textunderscore}hidden = 4}) to generate new spin configuration samples. 371 | This program will also save the sample configurations to a file within the folder \texttt{RBM{\textunderscore}samples}. 372 | %This program will also save the corresponding average energy, magnetization, specific heat and susceptibility for each temperature 373 | %in a file within the folder \texttt{RBM{\textunderscore}observables}. 374 | 375 | %%% SOLUTION %%% 376 | \soln{ 377 | } 378 | 379 | \begin{table*}[t] 380 | \begin{center} 381 | \begin{tabular}{ | c | c | c | c | c | } 382 | \hline 383 | $T$ & $\langle E \rangle /N $ & $\langle M \rangle /N $ & $C_v /N $ & $\chi /N $ \\ 384 | \hline%{|=|==|====|} 385 | 1.508 & -1.9451(9) & 0.9856(2) & 0.25(2) & 0.030(4) \\ 386 | 2.269 & -1.514(2) & 0.849(1) & 1.06(2) & 0.316(9) \\ 387 | 3.286 & -0.744(2) & 0.532(1) & 0.574(6) & 0.408(6) \\ 388 | %\hhline{|=|==|====|} 389 | \hline 390 | 391 | \hline 392 | \end{tabular} 393 | \end{center} 394 | \caption{ 395 | Thermodynamic observables measured on the Monte Carlo configurations used to train an RBM at various temperatures. Errors are indicated in parentheses. } 396 | \label{tab} 397 | \end{table*} 398 | 399 | %%%%%%%%%%%%%% (e) %%%%%%%%%%%%%% 400 | %\item Run \texttt{tutorial5{\textunderscore}plot{\textunderscore}results.py} plot your samples' expectation values 401 | %and compare with known results from Monte Carlo. 402 | \item Write code that reads in the sampled configurations generated in part d) and calculates the corresponding observables $\langle E \rangle /N $, $\langle M \rangle /N $, $C_v /N $ and $\chi /N $. 403 | Compare with the results for the training samples, which are provided in Table~\ref{tab}. 404 | Are your discrepancies similar to those found in Figure 4 of Reference~\cite{giac}? 405 | 406 | \hint{In order to check if your code is working, you can try calculating the observable quantities for the training configurations in the folder \texttt{MC{\textunderscore}results} and verifying that you get the same results as in Table~\ref{tab}.} 407 | 408 | %%%%%%%%%%%%%% (f) %%%%%%%%%%%%%% 409 | \item Repeat parts b), d) and e) for other values of the number of hidden units $n$. 410 | Once again consider how your results compare with Figure 4 of Reference~\cite{giac}. 411 | 412 | %%%%%%%%%%%%%% (g) %%%%%%%%%%%%%% 413 | \item Repeat parts b), d), e) and f) for temperatures above and below the critical temperature, such as the ones provided in Table~\ref{tab}. 414 | How does the difference between your sampled and trained observables depend on temperature? 415 | Once again examine how your results compare with Figure 4 of Reference~\cite{giac}. 416 | 417 | %%% SOLUTION %%% 418 | \soln{ 419 | } 420 | 421 | 422 | \end{enumerate} 423 | 424 | \begin{thebibliography}{} 425 | 426 | \bibitem{giac} 427 | G. Torlai and R. Melko, Phys. Rev. B \textbf{94}, 165134 (2016), {\small\url{https://arxiv.org/abs/1606.02718}}. 428 | 429 | \end{thebibliography} 430 | 431 | \end{document} -------------------------------------------------------------------------------- /Tutorial4/Uniandes_logo.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial4/Uniandes_logo.jpeg -------------------------------------------------------------------------------- /Tutorial4/lattice.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial4/lattice.pdf -------------------------------------------------------------------------------- /Tutorial4/rbm.py: -------------------------------------------------------------------------------- 1 | ########## Machine Learning for Quantum Matter and Technology ###################### 2 | ### Juan Carrasquilla, Estelle Inack, Giacomo Torlai, Roger Melko 3 | ### with dataset and code by Giacomo Torlai, Juan Carrasquilla and Lauren Hayward Sierens 4 | ### 5 | ### Tutorial 4: This code defines a Restricted Boltzmann Machine (RBM) object. 6 | ##################################################################################### 7 | 8 | 9 | import tensorflow as tf 10 | import itertools as it 11 | import numpy as np 12 | 13 | class RBM(object): 14 | 15 | ### Constructor ### 16 | def __init__(self, num_hidden, num_visible, num_samples=128, weights=None, visible_bias=None, hidden_bias=None): 17 | self.num_hidden = num_hidden #number of hidden units 18 | self.num_visible = num_visible #number of visible units 19 | 20 | #visible bias: 21 | default = tf.zeros(shape=(self.num_visible, 1)) 22 | self.visible_bias = self._create_parameter_variable(visible_bias, default) 23 | 24 | #hidden bias: 25 | default = tf.zeros(shape=(self.num_hidden, 1)) 26 | self.hidden_bias = self._create_parameter_variable(hidden_bias, default) 27 | 28 | #pairwise weights: 29 | default = tf.random_normal(shape=(self.num_visible, self.num_hidden), mean=0, stddev=0.05) 30 | self.weights = self._create_parameter_variable(weights, default) 31 | 32 | #variables for sampling (num_samples is the number of samples to return): 33 | self.hidden_samples = tf.Variable( 34 | self.sample_binary_tensor(tf.constant(0.5), num_samples, self.num_hidden), 35 | trainable=False, name='hidden_samples' 36 | ) 37 | 38 | self._all_hidden_states = None 39 | self.max_feasible_for_log_pf = 24 40 | #end of constructor 41 | 42 | ### Build array with all possible configuration of the hidden layer: ### 43 | @property 44 | def all_hidden_states(self): 45 | if self._all_hidden_states is None: 46 | assert self.num_hidden <= self.max_feasible_for_log_pf, \ 47 | 'cannot generate all hidden states for num_hidden > {}'.format(self.max_feasible_for_log_pf) 48 | self._all_hidden_states = np.array(list(it.product([0, 1], repeat=self.num_hidden)), dtype=np.float32) 49 | return self._all_hidden_states 50 | 51 | ### Method to initialize variables: ### 52 | @staticmethod 53 | def _create_parameter_variable(initial_value=None, default=None): 54 | if initial_value is None: 55 | initial_value = default 56 | return tf.Variable(initial_value) 57 | 58 | ### Method to calculate the conditional probability of the hidden layer given a visible state: ### 59 | def p_of_h_given(self, v): 60 | # type: (tf.Tensor) -> tf.Tensor 61 | return tf.nn.sigmoid(tf.matmul(v, self.weights) + tf.transpose(self.hidden_bias)) 62 | 63 | ### Method to calculate the conditional probability of the visible layer given a hidden state: ### 64 | def p_of_v_given(self, h): 65 | # type: (tf.Tensor) -> tf.Tensor 66 | return tf.nn.sigmoid(tf.matmul(h, self.weights, transpose_b=True) + tf.transpose(self.visible_bias)) 67 | 68 | ### Method to sample the hidden nodes given a visible state: ### 69 | def sample_h_given(self, v): 70 | # type: (tf.Tensor) -> (tf.Tensor, tf.Tensor) 71 | b = tf.shape(v)[0] # number of samples 72 | m = self.num_hidden 73 | prob_h = self.p_of_h_given(v) 74 | samples = self.sample_binary_tensor(prob_h, b, m) 75 | return samples 76 | 77 | ### Method to sample the visible nodes given a hidden state: ### 78 | def sample_v_given(self, h): 79 | # type: (tf.Tensor) -> (tf.Tensor, tf.Tensor) 80 | b = tf.shape(h)[0] # number rof samples 81 | n = self.num_visible 82 | prob_v = self.p_of_v_given(h) 83 | samples = self.sample_binary_tensor(prob_v, b, n) 84 | return samples 85 | 86 | ### 87 | # Method for persistent contrastive divergence (CD_k): 88 | # Stores the results of `num_iterations` of contrastive divergence in class variables. 89 | # 90 | # :param int num_iterations: The 'k' in CD_k. 91 | ### 92 | def stochastic_maximum_likelihood(self, num_iterations): 93 | # type: (int) -> (tf.Tensor, tf.Tensor, tf.Tensor) 94 | h_samples = self.hidden_samples 95 | v_samples = None 96 | for i in range(num_iterations): 97 | v_samples = self.sample_v_given(h_samples) 98 | h_samples = self.sample_h_given(v_samples) 99 | 100 | self.hidden_samples = self.hidden_samples.assign(h_samples) 101 | return self.hidden_samples, v_samples 102 | 103 | ### 104 | # Method to compute the energy E = - aT*v - bT*h - vT*W*h 105 | # Note that since we want to support larger batch sizes, we do element-wise multiplication between 106 | # vT*W and h, and sum along the columns to get a Tensor of shape batch_size by 1 107 | # 108 | # :param hidden_samples: Tensor of shape batch_size by num_hidden 109 | # :param visible_samples: Tensor of shape batch_size by num_visible 110 | ### 111 | def energy(self, hidden_samples, visible_samples): 112 | # type: (tf.Tensor, tf.Tensor) -> tf.Tensor 113 | return (-tf.matmul(hidden_samples, self.hidden_bias) # b x m * m x 1 114 | - tf.matmul(visible_samples, self.visible_bias) # b x n * n x 1 115 | - tf.reduce_sum(tf.matmul(visible_samples, self.weights) * hidden_samples, 1)) 116 | 117 | ### Method used to calculate the gradient of the negative log-likelihood ### 118 | def neg_log_likelihood_forGrad(self, visible_samples, num_gibbs=2): 119 | # type: (tf.Tensor, tf.Tensor, int) -> tf.Tensor 120 | hidden_samples = self.sample_h_given(visible_samples) 121 | expectation_from_data = tf.reduce_mean(self.energy(hidden_samples, visible_samples)) 122 | 123 | model_hidden, model_visible = self.stochastic_maximum_likelihood(num_gibbs) 124 | expectation_from_model = tf.reduce_mean(self.energy(model_hidden, model_visible)) 125 | 126 | return expectation_from_data - expectation_from_model 127 | 128 | ### 129 | # Method to ompute the average negative log likelihood over a batch of visible samples: 130 | # NLL = - = - + log(Z) 131 | ### 132 | def neg_log_likelihood(self, visible_samples, log_Z): 133 | free_energy = (tf.matmul(visible_samples, self.visible_bias) 134 | + tf.reduce_sum(tf.nn.softplus(tf.matmul(visible_samples, self.weights) 135 | + tf.transpose(self.hidden_bias)), 1)) 136 | return -tf.reduce_mean(free_energy - log_Z) 137 | 138 | ### Method to evaluate the partition function by exact enumerations: ### 139 | ### (Intractable for large sizes) ### 140 | def exact_log_partition_function(self): 141 | with tf.name_scope('exact_log_Z'): 142 | # Define the exponent: H*b + sum(softplus(1 + exp(a + w*H.T))) 143 | first_term = tf.matmul(self.all_hidden_states, self.hidden_bias, name='first_term') 144 | with tf.name_scope('second_term'): 145 | second_term = tf.matmul(self.weights, self.all_hidden_states, transpose_b=True) 146 | second_term = tf.nn.softplus(tf.add(self.visible_bias, second_term)) 147 | second_term = tf.transpose(tf.reduce_sum(second_term, reduction_indices=[0], keep_dims=True)) 148 | exponent = tf.cast(first_term + second_term, dtype=tf.float64, name='exponent') 149 | #exponent_mean = tf.reduce_mean(exponent) 150 | exponent_mean = tf.reduce_max(exponent) 151 | 152 | return tf.log(tf.reduce_sum(tf.exp(exponent - exponent_mean))) + exponent_mean 153 | 154 | ### 155 | # Convenience method for generating a binary Tensor using a given probability 156 | # 157 | # :param prob: Tensor of shape (m, n) 158 | # :param m: number of rows in result. 159 | # :param n: number of columns in result. 160 | ### 161 | @staticmethod 162 | def sample_binary_tensor(prob, m, n): 163 | # type: (tf.Tensor, int, int) -> tf.Tensor 164 | return tf.where( 165 | tf.less(tf.random_uniform(shape=(m, n)), prob), 166 | tf.ones(shape=(m, n)), 167 | tf.zeros(shape=(m, n)) 168 | ) 169 | 170 | #end of RBM class 171 | -------------------------------------------------------------------------------- /Tutorial4/tutorial4_sample_ising2d.py: -------------------------------------------------------------------------------- 1 | ########## Machine Learning for Quantum Matter and Technology ###################### 2 | ### Juan Carrasquilla, Estelle Inack, Giacomo Torlai, Roger Melko 3 | ### with dataset and code by Giacomo Torlai, Juan Carrasquilla and Lauren Hayward Sierens 4 | ### 5 | ### Tutorial 4: This code will sample from a Restricted Boltzmann Machine (RBM) and interpret the 6 | ### resulting samples as spin configurations corresponding to the two-dimensional 7 | ### Ising model. It will then save to file each sample's energy, magnetization 8 | ### specific heat and susceptibility. 9 | ##################################################################################### 10 | 11 | 12 | from __future__ import print_function 13 | import tensorflow as tf 14 | from rbm import RBM 15 | import numpy as np 16 | import os 17 | 18 | #Input parameters: 19 | L = 4 #linear size of the system 20 | num_visible = L*L #number of visible nodes 21 | num_hidden = 4 #number of hidden nodes 22 | 23 | #Temperature list for which there are trained RBM parameters stored in Data_ising2d/RBM_parameters 24 | #T_list = [1.0,1.254,1.508,1.762,2.016,2.269,2.524,2.778,3.032,3.286,3.540] 25 | T_list = [2.269] 26 | 27 | #Sampling parameters: 28 | num_samples = 500 # how many independent chains will be sampled 29 | gibb_updates = 2 # how many gibbs updates per call to the gibbs sampler 30 | nbins = 100 # number of calls to the RBM sampler 31 | 32 | #Specify where the sampled configurations will be stored: 33 | samples_dir = 'Data_ising2d/RBM_samples' 34 | if not(os.path.isdir(samples_dir)): 35 | os.mkdir(samples_dir) 36 | samples_filePaths = [] #file paths where samples for each T will be stored 37 | 38 | #Initialize the RBM for each temperature in T_list: 39 | rbms = [] 40 | rbm_samples = [] 41 | for i in range(len(T_list)): 42 | T = T_list[i] 43 | 44 | samples_filePath = '%s/samples_nH%d_L%d' %(samples_dir,num_hidden,L) 45 | samples_filePath += '_T' + str(T) + '.txt' 46 | samples_filePaths.append(samples_filePath) 47 | fout = open(samples_filePath,'w') 48 | fout.close() 49 | 50 | #Read in the trained RBM parameters: 51 | path_to_params = 'Data_ising2d/RBM_parameters/parameters_nH%d_L%d' %(num_hidden,L) 52 | path_to_params += '_T'+str(T)+'.npz' 53 | params = np.load(path_to_params) 54 | weights = params['weights'] 55 | visible_bias = params['visible_bias'] 56 | hidden_bias = params['hidden_bias'] 57 | hidden_bias = np.reshape(hidden_bias,(hidden_bias.shape[0],1)) 58 | visible_bias = np.reshape(visible_bias,(visible_bias.shape[0],1)) 59 | 60 | # Initialize RBM class 61 | rbms.append(RBM( 62 | num_hidden=num_hidden, num_visible=num_visible, 63 | weights=weights, visible_bias=visible_bias,hidden_bias=hidden_bias, 64 | num_samples=num_samples 65 | )) 66 | rbm_samples.append(rbms[i].stochastic_maximum_likelihood(gibb_updates)) 67 | #end of loop over temperatures 68 | 69 | # Initialize tensorflow 70 | init = tf.group(tf.initialize_all_variables(), tf.initialize_local_variables()) 71 | 72 | # Sample thermodynamic observables: 73 | N = num_visible 74 | with tf.Session() as sess: 75 | sess.run(init) 76 | 77 | for i in range(nbins): 78 | print ('bin %d' %i) 79 | 80 | for t in range(len(T_list)): 81 | fout = open(samples_filePaths[t],'a') 82 | 83 | _,samples=sess.run(rbm_samples[t]) 84 | spins = np.asarray((2*samples-1)) #convert from 0,1 variables to -1,+1 variables 85 | for k in range(num_samples): 86 | for i in range(N): 87 | fout.write('%d ' %int(spins[k,i])) 88 | fout.write('\n') 89 | fout.close() 90 | -------------------------------------------------------------------------------- /Tutorial4/tutorial4_train_ising2d.py: -------------------------------------------------------------------------------- 1 | ########## Machine Learning for Quantum Matter and Technology ###################### 2 | ### Juan Carrasquilla, Estelle Inack, Giacomo Torlai, Roger Melko 3 | ### with dataset and code by Giacomo Torlai, Juan Carrasquilla and Lauren Hayward Sierens 4 | ### 5 | ### Tutorial 4: This code will train a Restricted Boltzmann Machine (RBM) to learn the 6 | ### distribution of spin configurations of the two-dimensional Ising model at a 7 | ### given temperature. 8 | ##################################################################################### 9 | 10 | 11 | from __future__ import print_function 12 | import tensorflow as tf 13 | import itertools as it 14 | from rbm import RBM 15 | import matplotlib.pyplot as plt 16 | import numpy as np 17 | import math 18 | import os 19 | 20 | #Specify font sizes for plots: 21 | plt.rcParams['axes.labelsize'] = 10 22 | plt.rcParams['legend.fontsize'] = 10 23 | plt.rcParams['xtick.labelsize'] = 8 24 | plt.rcParams['ytick.labelsize'] = 8 25 | 26 | plt.ion() # turn on interactive mode (for plotting) 27 | 28 | # Input parameters: 29 | L = 4 #linear size of the system 30 | T = 2.269 #a temperature for which there are MC configurations stored in Data_ising2d/MC_results 31 | num_visible = L*L #number of visible nodes 32 | num_hidden = 4 #number of hidden nodes 33 | nsteps = 20000 #number of training steps (iterations over the mini-batches) 34 | learning_rate_start = 1e-3 #the learning rate will start at this value and decay exponentially 35 | bsize = 100 #batch size 36 | num_gibbs = 10 #number of Gibbs iterations (steps of contrastive divergence) 37 | num_samples = 10 #number of chains in PCD 38 | 39 | ### Function to save weights and biases to a parameter file ### 40 | def save_parameters(sess, rbm): 41 | weights, visible_bias, hidden_bias = sess.run([rbm.weights, rbm.visible_bias, rbm.hidden_bias]) 42 | 43 | parameter_dir = 'Data_ising2d/RBM_parameters' 44 | if not(os.path.isdir(parameter_dir)): 45 | os.mkdir(parameter_dir) 46 | parameter_file_path = '%s/parameters_nH%d_L%d' %(parameter_dir,num_hidden,L) 47 | parameter_file_path += '_T' + str(T) 48 | np.savez_compressed(parameter_file_path, weights=weights, visible_bias=visible_bias, hidden_bias=hidden_bias) 49 | 50 | class Placeholders(object): 51 | pass 52 | 53 | class Ops(object): 54 | pass 55 | 56 | weights = None #weights 57 | visible_bias = None #visible bias 58 | hidden_bias = None #hidden bias 59 | 60 | # Load the MC configuration training data: 61 | trainFileName = 'Data_ising2d/MC_results/ising2d_L'+str(L)+'_T'+str(T)+'_train.txt' 62 | xtrain = np.loadtxt(trainFileName) 63 | testFileName = 'Data_ising2d/MC_results/ising2d_L'+str(L)+'_T'+str(T)+'_test.txt' 64 | xtest = np.loadtxt(testFileName) 65 | 66 | xtrain_randomized = np.random.permutation(xtrain) # random permutation of training data 67 | xtest_randomized = np.random.permutation(xtest) # random permutation of test data 68 | iterations_per_epoch = xtrain.shape[0] / bsize 69 | 70 | # Initialize the RBM class 71 | rbm = RBM(num_hidden=num_hidden, num_visible=num_visible, weights=weights, visible_bias=visible_bias,hidden_bias=hidden_bias, num_samples=num_samples) 72 | 73 | # Initialize operations and placeholders classes 74 | ops = Ops() 75 | placeholders = Placeholders() 76 | placeholders.visible_samples = tf.placeholder(tf.float32, shape=(None, num_visible), name='v') # placeholder for training data 77 | 78 | total_iterations = 0 # starts at zero 79 | ops.global_step = tf.Variable(total_iterations, name='global_step_count', trainable=False) 80 | learning_rate = tf.train.exponential_decay( 81 | learning_rate_start, 82 | ops.global_step, 83 | 100 * xtrain.shape[0]/bsize, 84 | 1.0 # decay rate = 1 means no decay 85 | ) 86 | 87 | cost = rbm.neg_log_likelihood_forGrad(placeholders.visible_samples, num_gibbs=num_gibbs) 88 | optimizer = tf.train.AdamOptimizer(learning_rate, epsilon=1e-2) 89 | ops.lr = learning_rate 90 | ops.train = optimizer.minimize(cost, global_step=ops.global_step) 91 | ops.init = tf.group(tf.initialize_all_variables(), tf.initialize_local_variables()) 92 | 93 | # Define the negative log-likelihood 94 | # We can use this to plot the RBM's training progress. 95 | # This calculation is intractable for large networks so let's only do it for small num_hidden 96 | logZ = rbm.exact_log_partition_function() 97 | placeholders.logZ = tf.placeholder(tf.float32) 98 | NLL = rbm.neg_log_likelihood(placeholders.visible_samples,placeholders.logZ) 99 | 100 | sess = tf.Session() 101 | sess.run(ops.init) 102 | 103 | bcount = 0 #counter 104 | epochs_done = 0 #epochs counter 105 | nll_test_list = [] #negative log-likelihood for each epoch 106 | nll_train_list = [] #negative log-likelihood for each epoch 107 | for ii in range(nsteps): 108 | if bcount*bsize+ bsize>=xtrain.shape[0]: 109 | bcount = 0 110 | xtrain_randomized = np.random.permutation(xtrain) 111 | 112 | batch = xtrain_randomized[ bcount*bsize: bcount*bsize+ bsize,:] 113 | bcount += 1 114 | feed_dict = {placeholders.visible_samples: batch} 115 | 116 | _, num_steps = sess.run([ops.train, ops.global_step], feed_dict=feed_dict) 117 | 118 | if num_steps % iterations_per_epoch == 0: 119 | lz = sess.run(logZ) 120 | nll_test = sess.run(NLL,feed_dict={placeholders.visible_samples: xtest_randomized, placeholders.logZ: lz}) 121 | nll_test_list.append(nll_test) 122 | 123 | print ('Epoch = %d, Nsteps = %d, NLL on test data = %.6f' %(epochs_done,num_steps,nll_test)) 124 | save_parameters(sess, rbm) 125 | epochs_done += 1 126 | 127 | # Update the plot: 128 | plt.figure(1) 129 | plt.clf() 130 | plt.plot( np.arange(epochs_done), nll_test_list, 'o-') 131 | plt.xlabel('Epoch') 132 | plt.ylabel('NLL') 133 | plt.pause(0.1) 134 | 135 | plt.savefig('NLL_vs_epoch_T%s.pdf' %(str(T))) 136 | -------------------------------------------------------------------------------- /Tutorial5/Tutorial5.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PIQuIL/QuLAPENO/f646b87512bb8d879b6de93e004d33bbff698fe9/Tutorial5/Tutorial5.pdf -------------------------------------------------------------------------------- /Tutorial5/Tutorial5.tex: -------------------------------------------------------------------------------- 1 | \documentclass[letterpaper]{scrartcl} 2 | \usepackage[top=0.8in, bottom=1in, left=0.9in, right=0.9in]{geometry} 3 | 4 | \makeatletter 5 | \DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt} 6 | \makeatother 7 | 8 | \usepackage{url} 9 | \usepackage{scalefnt} 10 | \usepackage{bm} 11 | \usepackage{cancel} 12 | 13 | %-------------------------------------------------------------- 14 | % We need this package, part of the KOMA class, for the custom 15 | % headings. 16 | %-------------------------------------------------------------- 17 | \usepackage{scrpage2} 18 | 19 | 20 | %-------------------------------------------------------------- 21 | % One of many packages you can use if you want to include 22 | % graphics. 23 | %-------------------------------------------------------------- 24 | \usepackage{graphicx} 25 | 26 | %-------------------------------------------------------------- 27 | % The AMS packages are useful but not required. They offer a 28 | % number of nice fonts, environments for formatting multiline 29 | % equations, etc. 30 | %-------------------------------------------------------------- 31 | \usepackage{amsmath} 32 | \usepackage{amsfonts} 33 | \usepackage{amssymb} 34 | \usepackage{amsthm} 35 | 36 | %-------------------------------------------------------------- 37 | % Basic way to set-up the page margins. 38 | %-------------------------------------------------------------- 39 | %\addtolength{\oddsidemargin}{-.2in} 40 | %\addtolength{\evensidemargin}{-.2in} 41 | %\addtolength{\textwidth}{0.45in} 42 | %\addtolength{\topmargin}{-.175in} 43 | %\addtolength{\textheight}{0.75in} 44 | 45 | %-------------------------------------------------------------- 46 | % Comment out the following to add indents and remove space between paragraphs. 47 | %-------------------------------------------------------------- 48 | \usepackage{parskip} 49 | 50 | %-------------------------------------------------------------- 51 | % This package is used to define custom colours. 52 | %-------------------------------------------------------------- 53 | \usepackage[usenames,dvipsnames,svgnames,table]{xcolor} 54 | 55 | %-------------------------------------------------------------- 56 | % Package for adding in solutions: 57 | %-------------------------------------------------------------- 58 | \usepackage[nosoln,regf,nolf]{optional} 59 | %\usepackage[soln,regf]{optional} 60 | 61 | %\newcommand{\soln}[1]{\opt{soln}{\\[4pt] \textcolor{JungleGreen}{\textbf{Solution:}} #1}} 62 | \newcommand{\soln}[1]{\opt{soln}{\textcolor{JungleGreen}{\usekomafont{descriptionlabel}{Solution:}} #1}} 63 | 64 | \newcommand{\hint}[1]{{\usekomafont{descriptionlabel}{Hint:}} #1} 65 | \newcommand{\note}[1]{{\usekomafont{descriptionlabel}{Note:}} #1} 66 | \newcommand{\reference}[1]{{\usekomafont{descriptionlabel}{Reference:}} #1} 67 | 68 | %-------------------------------------------------------------- 69 | % A few colours for hyperlinks. 70 | %-------------------------------------------------------------- 71 | \definecolor{plum}{rgb}{0.36078, 0.20784, 0.4} 72 | \definecolor{chameleon}{rgb}{0.30588, 0.60392, 0.023529} 73 | \definecolor{cornflower}{rgb}{0.12549, 0.29020, 0.52941} 74 | \definecolor{scarlet}{rgb}{0.8, 0, 0} 75 | \definecolor{brick}{rgb}{0.64314, 0, 0} 76 | 77 | %-------------------------------------------------------------- 78 | % A command for typesetting and linking an email address. 79 | %-------------------------------------------------------------- 80 | \newcommand{\email}[1]{\href{mailto:#1}{\tt \textcolor{cornflower}{#1}}} 81 | \newcommand{\web}[1]{\href{#1}{\tt \textcolor{cornflower}{#1}}} 82 | 83 | %-------------------------------------------------------------- 84 | % The following declaration includes the hyperref package and 85 | % assigns metadata. If you compile with pdflatex, this data 86 | % will be automatically included in the pdf file. 87 | %-------------------------------------------------------------- 88 | %\usepackage[ 89 | % pdftitle={QFT Tutorial 1},% 90 | % pdfauthor={PSI Tutors},% 91 | % pdfsubject={QFT Tutorial 1},% 92 | % pdfkeywords={PSI}, 93 | % colorlinks=true, 94 | % linkcolor=cornflower, 95 | % citecolor=scarlet, 96 | % urlcolor=chameleon% 97 | %]{hyperref} 98 | 99 | %\setcounter{secnumdepth}{2} % section number depth 100 | %\setcounter{tocdepth}{2} % depth of TOC 101 | 102 | %-------------------------------------------------------------- 103 | % Specify the font used in captions. 104 | %-------------------------------------------------------------- 105 | \setkomafont{captionlabel}{\usekomafont{descriptionlabel}} 106 | 107 | %-------------------------------------------------------------- 108 | % This is where we define the custom title. The image that is 109 | % placed on the left-hand-side of the title, PILogo.pdf in 110 | % this case, should be in the same directory as this file. Note 111 | % that you can always use hyperlinks for the Title, Semester, 112 | % and Author fields, below, in case you want to link to a seminar 113 | % web page or a lecturer's email address. 114 | %-------------------------------------------------------------- 115 | 116 | \titlehead{% 117 | \vspace*{-1cm} 118 | \begin{minipage}[b]{4.0cm} 119 | \includegraphics*[height=1.3cm]{Uniandes_logo.jpeg}% 120 | \end{minipage} 121 | \hfill 122 | \begin{minipage}[b]{12cm} 123 | \begin{flushright} 124 | \usekomafont{descriptionlabel} 125 | \large Machine Learning for Quantum Matter and Technology \\ 126 | \normalsize \normalfont 127 | J. Carrasquilla, E. Inack, G. Torlai, R. Melko, L. Hayward Sierens 128 | \end{flushright} 129 | \end{minipage} 130 | \\[-3mm] 131 | \hrule 132 | \vspace{-3mm} 133 | } 134 | % ----------- 135 | 136 | %-------------------------------------------------------------- 137 | % Other useful physic-related packages 138 | %-------------------------------------------------------------- 139 | \usepackage{braket} 140 | % Use \Bra{}, \Ket{} or \Braket{x | \psi} for Dirac notation 141 | 142 | %-------------------------------------------------------------- 143 | % Nice numbering for question parts. 144 | %-------------------------------------------------------------- 145 | \newcommand{\ba}{\begin{eqnarray}} 146 | \newcommand{\ea}{\end{eqnarray}} 147 | 148 | \newcommand{\ssk}{\smallskip} 149 | \newcommand{\msk}{\medskip} 150 | 151 | \newcommand{\nin}{\noindent} 152 | 153 | \newcommand{\beq}{\begin{equation}} 154 | \newcommand{\eeq}{\end{equation}} 155 | 156 | \newcommand{\beqs}{\begin{equation*}} 157 | \newcommand{\eeqs}{\end{equation*}} 158 | 159 | \renewcommand{\vec}[1]{{\mathbf{#1}}} 160 | \renewcommand{\labelenumi}{\alph{enumi})} 161 | \renewcommand{\labelenumiii}{\roman{enumiii})} 162 | 163 | %%%%%%%%%%%%% 164 | 165 | \def\be{\begin{eqnarray}} 166 | \def\ee{\end{eqnarray}} 167 | \newcommand{\nn}{\nonumber} 168 | \newcommand\para{\paragraph{}} 169 | \newcommand{\ft}[2]{{\textstyle\frac{#1}{#2}}} 170 | \newcommand{\eqn}[1]{(\ref{#1})} 171 | \newcommand{\pl}[1]{\frac{\partial {\cal L}}{\partial{#1}}} 172 | \newcommand{\ppp}[2]{\frac{\partial {#1}}{\partial {#2}}} 173 | \newcommand{\ph}[1]{\frac{\partial {\cal H}}{\partial{#1}}} 174 | \newcommand{\leftp}[3]{\left.\ppp{#1}{#2}\right|_{#3}} 175 | %\newcommand{\Vec}[2]{\left(\begin{array}{c} {#1} \\ {#2}\end{array}\right)} 176 | \newcommand\vx{\vec{x}} 177 | \newcommand\vy{\vec{y}} 178 | \newcommand\vp{\vec{p}} 179 | \newcommand\vq{\vec{q}} 180 | \newcommand\vk{\vec{k}} 181 | \newcommand\avp{a^{\ }_{\vp}} 182 | \newcommand\advp{a^\dagger_{\vp}} 183 | \newcommand\ad{a^\dagger} 184 | 185 | \newcommand\balpha{\mbox{\boldmath $\alpha$}} 186 | \newcommand\bbeta{\mbox{\boldmath $\beta$}} 187 | \newcommand\bgamma{\mbox{\boldmath $\gamma$}} 188 | \newcommand\bomega{\mbox{\boldmath $\omega$}} 189 | \newcommand\blambda{\mbox{\boldmath $\lambda$}} 190 | \newcommand\bmu{\mbox{\boldmath $\mu$}} 191 | \newcommand\bphi{\mbox{\boldmath $\phi$}} 192 | \newcommand\bzeta{\mbox{\boldmath $\zeta$}} 193 | \newcommand\bsigma{\mbox{\boldmath $\sigma$}} 194 | \newcommand\bepsilon{\mbox{\boldmath $\epsilon$}} 195 | \newcommand\btau{\mbox{\boldmath $\tau$}} 196 | \newcommand\beeta{\mbox{\boldmath $\eta$}} 197 | \newcommand\btheta{\mbox{\boldmath $\theta$}} 198 | 199 | \def\norm#1{:\!\!#1\!\!:} 200 | 201 | \def\part{\partial} 202 | 203 | \def\dbox{\hbox{{$\sqcup$}\llap{$\sqcap$}}} 204 | 205 | \def\sla#1{\hbox{{$#1$}\llap{$/$}}} 206 | \def\Dslash{\,\,{\raise.15ex\hbox{/}\mkern-13mu D}} 207 | \def\Dbarslash{\,\,{\raise.15ex\hbox{/}\mkern-12mu {\bar D}}} 208 | \def\delslash{\,\,{\raise.15ex\hbox{/}\mkern-10mu \partial}} 209 | \def\delbarslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu {\bar\partial}}} 210 | \def\pslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu p}} 211 | \def\qslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu q}} 212 | \def\kslash{\,\,{\raise.15ex\hbox{/}\mkern-11mu k}} 213 | \def\eslash{\,\,{\raise.15ex\hbox{/}\mkern-9mu \epsilon}} 214 | \def\calDslash{\,\,{\rais.15ex\hbox{/}\mkern-12mu {\cal D}}} 215 | \newcommand{\slsh}[1]{\,\,{\raise.15ex\hbox{/}\mkern-12mu {#1}}} 216 | 217 | 218 | \newcommand\Bprime{B${}^\prime$} 219 | %\newcommand{\sign}{{\rm sign}} 220 | 221 | \newcommand\bx{{\bf x}} 222 | \newcommand\br{{\bf r}} 223 | \newcommand\bF{{\bf F}} 224 | \newcommand\bp{{\bf p}} 225 | \newcommand\bL{{\bf L}} 226 | \newcommand\bR{{\bf R}} 227 | \newcommand\bP{{\bf P}} 228 | \newcommand\bE{{\bf E}} 229 | \newcommand\bB{{\bf B}} 230 | \newcommand\bA{{\bf A}} 231 | \newcommand\bee{{\bf e}} 232 | \newcommand\bte{\tilde{\bf e}} 233 | \def\ket#1{\left| #1 \right\rangle} 234 | \def\bra#1{\left\langle #1 \right|} 235 | \def\vev#1{\left\langle #1 \right\rangle} 236 | 237 | \newcommand\lmn[2]{\Lambda^{#1}_{\ #2}} 238 | \newcommand\mup[2]{\eta^{#1 #2}} 239 | \newcommand\mdown[2]{\eta_{#1 #2}} 240 | \newcommand\deld[2]{\delta^{#1}_{#2}} 241 | \newcommand\df{\Delta_F} 242 | \newcommand\cL{{\cal L}} 243 | %\def\theequation{\thesection.\arabic{equation} 244 | 245 | \newcounter{solneqn} 246 | %\newcommand{\mytag}{\refstepcounter{equation}\tag{\roman{equationn}}} 247 | \newcommand{\mytag}{\refstepcounter{solneqn}\tag{S.\arabic{solneqn}}} 248 | 249 | \newcommand{\appropto}{\mathrel{\vcenter{ 250 | \offinterlineskip\halign{\hfil$##$\cr 251 | \propto\cr\noalign{\kern2pt}\sim\cr\noalign{\kern-2pt}}}}} 252 | %%%%%%%%% 253 | 254 | 255 | \DeclareMathOperator{\Tr}{Tr} 256 | \DeclareMathOperator{\sign}{sign} 257 | 258 | %\renewcommand{\ttdefault}{pcr} 259 | 260 | \usepackage{enumitem} 261 | 262 | \begin{document} 263 | 264 | %\scalefont{1.35} 265 | 266 | \vspace{-3cm} 267 | 268 | \opt{nosoln}{\title{Tutorial 5: \\Variational Monte Carlo on the \\Harmonic oscillator \vspace*{-6mm}}} 269 | \opt{soln}{\title{Tutorial 3 \textcolor{JungleGreen}{Solutions}: \\Identifying phase transitions using \\principal component analysis \vspace*{-6mm}}} 270 | 271 | \date{May 31, 2019} 272 | 273 | \maketitle 274 | 275 | The objective of this tutorial is to find the ground state properties of the Harmonic oscillator using the Variational Monte Carlo (VMC) method. 276 | 277 | VMC is usually implemented to estimate the ground state energy $E_0$ of an Hamiltonian $\hat{H}$ by using a were crafted state $|\Psi_{\balpha} \rangle$ which has to be representative enough of the ground state $|\phi_0\rangle$. $\balpha$ stands for a set of variational parameters that needs to be optimized. 278 | 279 | The ground state energy is then approximated through the variational energy which is given by: 280 | \beq 281 | E_{var} =\frac{\langle {\Psi}_{\boldsymbol{\alpha}} | \hat{H} |{\Psi}_{\boldsymbol{\alpha}}\rangle}{\langle {\Psi}_{\boldsymbol{\alpha}}|{\Psi}_{\boldsymbol{\alpha}}\rangle}, 282 | \label{eqn:varE2} 283 | \eeq 284 | 285 | The \textit{variational principle} guarantees that $E_{var}$ acts as an upper bound to the ground state energy of the Hamiltonian $\hat{H}$. The essence of VMC would therefore be to find the optimal set of variational parameters ${\boldsymbol{\alpha}}_{opt}$ for which the variational energy is minimum. This is usually achieved through some minimization procedure. 286 | 287 | Recall that the Hamiltonian of the harmonic oscillator in one dimension is given by: 288 | \beq 289 | \hat{H}= \frac{\hat{p}^2}{2} + \frac{x^2}{2}. 290 | \eeq 291 | where the energy is in units of $\hbar \omega$. The ground state wave-function can be computed exactly and is given by: 292 | \beq 293 | \phi_0(x) ={(\frac{1}{\pi})}^{1/4} e^{-x^2/2}. 294 | \eeq 295 | 296 | In this tutorial, we will consider the variational wave-function $\Psi_\alpha (x) = e^{-\alpha x^2/2}$ where $\alpha$ is the variational parameter to determine. 297 | 298 | \begin{enumerate}[label=\alph*)] 299 | \item Run the code \texttt{tutorial5{\textunderscore}vmc{\textunderscore}ho.py} with \texttt{num{\textunderscore}walkers = 400}, \texttt{num{\textunderscore}MC{\textunderscore}steps = 30000}, \\ \texttt{num{\textunderscore}equil{\textunderscore}steps = 3000}, and convince yourself that the variational energy is indeed an upper bound to the exact ground state energy. 300 | 301 | \item Plot the standard deviation of $E_{var}$ with respect to different values of $\alpha$. What can you interpret from the result? How does the standard deviation varies with respect to the number of walkers? 302 | 303 | \item Use the code \texttt{tutorial5{\textunderscore}training{\textunderscore}vmc{\textunderscore}ho.py} to optimize the parameter $\alpha$ using stochastic gradient descend. Check how the hyper-parameters such as the learning rate and the number of samples affect the training. 304 | 305 | \item Compute the exact derivative of variational energy with respect to $\alpha$ and compare it with its stochastic estimate given by: 306 | \beq 307 | \partial_{{\alpha}} E_{var} = 2 \big[ \langle E_{loc}({\mathit{x}})F_\alpha({\mathit{x}}) \rangle -\langle E_{loc}({\mathit{x}}) \rangle \langle F_\alpha({\mathit{x}}) \rangle \big]. 308 | \eeq 309 | 310 | \item Check that the optimization still work when implemented with the exact derivative of the variational energy. Comment on whether or not it is advisable to use it during the training. 311 | 312 | \item Comment on the difference between in the SGD method implemented here and the one routinely implement in the training of neural networks. 313 | 314 | \item Change the initialization of the walkers to be distributed between $[-4.5:5.5]$. Run the stochastic gradient descent algorithm. What do you notice? Could the problem be fixed by any form of smart proposal move? 315 | 316 | 317 | \end{enumerate} 318 | 319 | 320 | 321 | For further information, check the modern machine learning software for energy minimization of variational wave functions~\cite{netket}. 322 | 323 | \begin{thebibliography}{} 324 | 325 | 326 | \bibitem{netket} 327 | NetKet, {\small\url{https://www.netket.org/getstarted/home/}}. 328 | 329 | %\begin{thebibliography}{} 330 | 331 | %\bibitem{wang} 332 | %L. Wang, Phys. Rev. B \textbf{94}, 195105 (2016), {\small\url{https://arxiv.org/abs/1606.00318}}. 333 | 334 | \end{thebibliography} 335 | 336 | \end{document} -------------------------------------------------------------------------------- /Tutorial5/tutorial5_training_vmc_ho.py: -------------------------------------------------------------------------------- 1 | ########## Machine Learning for Quantum Matter and Technology ###################### 2 | ### Juan Carrasquilla, Estelle Inack, Giacomo Torlai, Roger Melko 3 | ### with code by Estelle Inack inspired from https://github.com/agdelma/qmc_ho 4 | ### 5 | ### Tutorial 5: Variational Monte Carlo for the harmonic oscillator 6 | ###################################################################################### 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | 10 | red,blue,green = '#e85c47','#4173b2','#7dcca4' 11 | ############# PARAMETERS DEFINITION ####################################### 12 | α = 0.1 13 | num_walkers = 500 14 | num_MC_steps = 100 15 | 16 | num_sgd_steps =100 # number of times to perform SGD 17 | num_equil_steps = 10 18 | 19 | learning_rate = 0.1 20 | 21 | ########### DEFINITION OF THE LOCAL ENERGY ############################ 22 | def EL(x,α): 23 | return 0.5*α + 0.5*x**2*(1-α**2) 24 | 25 | ########### DEFINITION OF THE TRANSITION PROBABILITY ############################ 26 | def transition_probability(x,x̄,α): 27 | return np.exp(-α*(x̄**2-x**2)) 28 | 29 | ########### DEFINITION OF THE FORCE ############################ 30 | def Force(x,α): 31 | return -0.5*x**2 32 | 33 | ########### DEFINITION OF THE EXACT DERIVATIVE OF THE VARIATIONAL ENERGY ############ 34 | def true_der_α(x,α): 35 | pass 36 | 37 | ########### DEFINITION OF THE METROPOLIS ALGORITHM ############################ 38 | def metropolis(num_walkers,walk,α,δ=1.0): 39 | num_accepted = 0 40 | 41 | avg_e = 0.0 42 | avg_f = 0.0 43 | avg_ef = 0.0 44 | 45 | # generate new walker positions with box move 46 | new_walkers = -walk + np.random.rand(num_walkers) 47 | 48 | # generate new walker positions with gaussian moves 49 | #new_walkers = np.random.normal(loc=walk, scale=δ, size=num_walkers) 50 | 51 | # generate new walker positions with lorentzian move 52 | #new_walkers = np.random.standard_cauchy(size=num_walkers) 53 | 54 | # test new walkers 55 | for i in range(num_walkers): 56 | if np.random.random() < transition_probability(walk[i],new_walkers[i],α): 57 | num_accepted += 1 58 | walk[i] = new_walkers[i] 59 | avg_e += EL(walk[i],α) 60 | avg_f += Force(walk[i],α) 61 | avg_ef += EL(walk[i],α)*Force(walk[i],α) 62 | 63 | # Stochatic estimate of the derivative of the variational energy 64 | der = 2*(avg_ef/num_walkers - (avg_e*avg_f)/num_walkers**2) 65 | 66 | return walk, avg_e/num_walkers, der 67 | 68 | ######## VMC ALGORITHM ###################################### 69 | def vmc(walk,num_walkers,num_MC_steps,num_relax_steps,α): 70 | 71 | eng = [] 72 | der_α = [] 73 | 74 | for step in range(num_MC_steps): 75 | walk, energy, der = metropolis(num_walkers,walk,α) 76 | if step>= num_relax_steps and step%num_relax_steps==0: 77 | eng= np.append(eng,[energy]) 78 | der_α = np.append(der_α,[der]) 79 | return walk, eng, der_α 80 | 81 | 82 | ############## Stochatic Gradient Descent ###################### 83 | def sgd(walk,num_walkers,num_sgd_steps,num_equil_steps,learning_rate,α): 84 | walk, energy, der_α = vmc(walk,num_walkers,num_sgd_steps,num_equil_steps,α) 85 | 86 | der = np.average(der_α) 87 | #Update the learning rate 88 | α -= learning_rate*der 89 | 90 | return walk, α, np.average(energy) 91 | 92 | 93 | ############################################################################# 94 | ################################## TRAINING ################################## 95 | ############################################################################## 96 | 97 | # initialize walkers uniformily in space between [-0.5:0.5] 98 | walkers = -0.5 + np.random.rand(num_walkers) 99 | 100 | ############ Function for plotting: ############ 101 | def updatePlot(walkers): 102 | 103 | ### Plot the density distribution of walkers: ### 104 | plt.subplot(121) 105 | x = np.linspace(-4,4,1000) 106 | plt.plot(x,np.exp(-x**2)/np.sqrt(np.pi), '-',linestyle='solid', linewidth=5, color=green, zorder=-10, 107 | label=r'${|\phi_0(x)|}^2$') 108 | plt.hist(walkers, density='true',bins='auto', color=red, alpha=0.7) 109 | plt.legend(loc='best', frameon=False) 110 | plt.ylim(0, 1) 111 | plt.title("Density distribution of the walkers") 112 | plt.ylabel('Probability') 113 | plt.xlabel('x') 114 | 115 | ### Plot the variational energy during training: ### 116 | plt.subplot(222) 117 | plt.plot(sgd_list,var_eng_training,'o-') 118 | plt.xlabel('SGD steps') 119 | plt.ylabel('Variational energy') 120 | 121 | ### Plot the variational parameter: ### 122 | plt.subplot(224) 123 | plt.plot(sgd_list,α_training,'o-') 124 | plt.xlabel('SGD steps') 125 | plt.ylabel('α') 126 | ############ End of plotting function ############ 127 | 128 | ### Train for several SGD steps: ### 129 | 130 | sgd_list = [] 131 | α_training = [] 132 | var_eng_training = [] 133 | k = 10 134 | 135 | for i in range(num_sgd_steps): 136 | # Run SGD 137 | walkers, α, var_eng = sgd(walkers,num_walkers,num_MC_steps,num_equil_steps,learning_rate,α) 138 | 139 | ### Update the plot and print results every k SGD steps: ### 140 | if i % k == 0 or i==1: 141 | 142 | print( "Iteration %d:\n α %f\n E_var(α) %f\n" % (i, α, var_eng) ) 143 | 144 | sgd_list.append(i) 145 | α_training.append(α) 146 | var_eng_training.append(var_eng) 147 | 148 | ### Update the plot of the resulting classifier: ### 149 | fig = plt.figure(2,figsize=(10,5)) 150 | fig.subplots_adjust(hspace=.3,wspace=.3) 151 | plt.clf() 152 | updatePlot(walkers) 153 | plt.pause(0.1) 154 | 155 | plt.savefig('harmonic_oscillator_results.pdf') # Save the figure showing the results in the current directory 156 | 157 | plt.show() 158 | -------------------------------------------------------------------------------- /Tutorial5/tutorial5_vmc_ho.py: -------------------------------------------------------------------------------- 1 | ###################################################################################### 2 | ### Code taken from https://github.com/agdelma/qmc_ho 3 | ### modified by Estelle Inack 4 | ### 5 | ### Variational Monte Carlo for the harmonic oscillator 6 | ###################################################################################### 7 | 8 | import numpy as np 9 | import matplotlib.pyplot as plt 10 | 11 | red,blue,green = '#e85c47','#4173b2','#7dcca4' 12 | 13 | def EL(x,α): 14 | return α + x**2*(0.5-2*α**2) 15 | 16 | def transition_probability(x,x̄,α): 17 | return np.exp(-2*α*(x̄**2-x**2)) 18 | 19 | def vmc(num_walkers,num_MC_steps,num_equil_steps,α,δ=1.0): 20 | 21 | # initilaize walkers 22 | walkers = -0.5 + np.random.rand(num_walkers) 23 | 24 | # initialize energy and number of accepted updates 25 | estimator = {'E':np.zeros(num_MC_steps-num_equil_steps)} 26 | num_accepted = 0 27 | 28 | for step in range(num_MC_steps): 29 | 30 | # generate new walker positions 31 | new_walkers = np.random.normal(loc=walkers, scale=δ, size=num_walkers) 32 | 33 | # test new walkers 34 | for i in range(num_walkers): 35 | if np.random.random() < transition_probability(walkers[i],new_walkers[i],α): 36 | num_accepted += 1 37 | walkers[i] = new_walkers[i] 38 | 39 | # measure energy 40 | if step >= num_equil_steps: 41 | measure = step-num_equil_steps 42 | estimator['E'][measure] = EL(walkers[i],α) 43 | 44 | # output the acceptance ratio 45 | print('accept: %4.2f' % (num_accepted/(num_MC_steps*num_walkers))) 46 | 47 | return estimator 48 | 49 | α = 0.4 50 | num_walkers = 400 51 | num_MC_steps = 30000 52 | num_equil_steps = 3000 53 | 54 | np.random.seed(1173) 55 | 56 | estimator = vmc(num_walkers,num_MC_steps,num_equil_steps,α) 57 | 58 | #from scipy.stats import sem 59 | Ē,ΔĒ = np.average(estimator['E']),np.std(estimator['E'])/np.sqrt(estimator['E'].size-1) 60 | 61 | print('Ē = %f ± %f' % (Ē,ΔĒ)) 62 | 63 | Ēmin = [] 64 | ΔĒmin = [] 65 | α = np.array([0.45, 0.475, 0.5, 0.525, 0.55]) 66 | for cα in α: 67 | estimator = vmc(num_walkers,num_MC_steps,num_equil_steps,cα) 68 | Ē,ΔĒ = np.average(estimator['E']),np.std(estimator['E'])/np.sqrt(estimator['E'].size-1) 69 | Ēmin.append(Ē) 70 | ΔĒmin.append(ΔĒ) 71 | print('%5.3f \t %7.5f ± %f' % (cα,Ē,ΔĒ)) 72 | 73 | 74 | cα = np.linspace(α[0],α[-1],1000) 75 | plt.plot(cα,0.5*cα + 1/(8*cα), '-', linewidth=1, color=green, zorder=-10, 76 | label=r'$\frac{\alpha}{2} + \frac{1}{8\alpha}$') 77 | plt.errorbar(α,Ēmin,yerr=ΔĒmin, linestyle='None', marker='o', elinewidth=1.0, 78 | markersize=6, markerfacecolor=blue, markeredgecolor=blue, ecolor=blue, label='VMC') 79 | plt.xlabel(r'$\alpha$') 80 | plt.ylabel('E') 81 | plt.xlim(0.44,0.56) 82 | plt.legend(loc='upper center') 83 | plt.show() 84 | --------------------------------------------------------------------------------