├── .github └── workflows │ └── build_course_page.yml ├── .gitignore ├── LICENSE ├── README.md ├── admin ├── compile.sh ├── icon.pdf ├── icon.png ├── readme_robot.pdf ├── readme_robot.png ├── syllabus.md └── syllabus.pdf ├── content ├── README.md ├── _config.yml ├── _toc.yml ├── admin │ ├── readme_robot.png │ └── syllabus.md ├── assignments │ ├── Assignment_1:Hopfield_Networks │ │ ├── README.md │ │ └── hopfield_assignment_template.ipynb │ ├── Assignment_2:Search_of_Associative_Memory_Model │ │ ├── Murd62 data │ │ │ ├── fr10-2.txt │ │ │ ├── fr15-2.txt │ │ │ ├── fr20-1.txt │ │ │ ├── fr20-2.txt │ │ │ ├── fr30-1.txt │ │ │ └── fr40-1.txt │ │ ├── README.md │ │ └── sam_assignment_template.ipynb │ ├── Assignment_3:Context_Maintenance_and_Retrieval_Model │ │ ├── PolyEtal09 data │ │ │ ├── README.txt │ │ │ ├── behavior.mat │ │ │ └── stimuli.mat │ │ ├── README.md │ │ └── cmr_assignment_template.ipynb │ ├── Assignment_4:_Laplace_Temporal_Context_Model │ │ └── README.md │ ├── Final_Project │ │ └── README.md │ └── README.md ├── instructions.md ├── outline.md ├── slides │ ├── how_to_read.html │ ├── how_to_read.md │ ├── intro_to_models.html │ ├── intro_to_models.md │ ├── week_1.md │ ├── welcome.html │ └── welcome.md └── themes │ ├── biologically_inspired_networks.md │ ├── data_science_primer.md │ ├── deep_networks.md │ ├── final_projects │ ├── README.md │ └── placeholder.md │ ├── free_recall.md │ ├── hopfield_networks.md │ ├── intro.md │ ├── laplace_tcm.md │ ├── laplace_transform.md │ ├── memory_timescales_i.md │ ├── memory_timescales_ii.md │ ├── sam_model.md │ └── temporal_context_model.md ├── environment.yml └── requirements.txt /.github/workflows/build_course_page.yml: -------------------------------------------------------------------------------- 1 | name: build-course-page 2 | 3 | # Only run this when the master branch changes 4 | on: 5 | push: 6 | branches: 7 | - main 8 | # If your git repository has the Jupyter Book within some-subfolder next to 9 | # unrelated files, you can make this run only if a file within that specific 10 | # folder has been modified. 11 | # 12 | # paths: 13 | # - content/** 14 | 15 | # This job installs dependencies, build the book, and pushes it to `gh-pages` 16 | jobs: 17 | deploy-book: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: actions/checkout@v2 21 | 22 | # Install dependencies 23 | - name: Set up Python 3.11.12 24 | uses: actions/setup-python@v1 25 | with: 26 | python-version: 3.11.12 27 | 28 | - name: Install dependencies 29 | run: | 30 | pip install -r requirements.txt 31 | # Build the page 32 | - name: Build the course page 33 | run: | 34 | jupyter-book build content/ 35 | # Push the book's HTML to github-pages 36 | - name: GitHub Pages action 37 | uses: peaceiris/actions-gh-pages@v3.6.1 38 | with: 39 | github_token: ${{ secrets.GITHUB_TOKEN }} 40 | publish_dir: ./content/_build/html 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # JB builds 2 | cmm_course/_build/* 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | cover/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | .pybuilder/ 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | # For a library or package, you might want to ignore these files since the code is 90 | # intended to run in multiple environments; otherwise, check them in: 91 | # .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # UV 101 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 102 | # This is especially recommended for binary packages to ensure reproducibility, and is more 103 | # commonly ignored for libraries. 104 | #uv.lock 105 | 106 | # poetry 107 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 108 | # This is especially recommended for binary packages to ensure reproducibility, and is more 109 | # commonly ignored for libraries. 110 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 111 | #poetry.lock 112 | 113 | # pdm 114 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 115 | #pdm.lock 116 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 117 | # in version control. 118 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 119 | .pdm.toml 120 | .pdm-python 121 | .pdm-build/ 122 | 123 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 124 | __pypackages__/ 125 | 126 | # Celery stuff 127 | celerybeat-schedule 128 | celerybeat.pid 129 | 130 | # SageMath parsed files 131 | *.sage.py 132 | 133 | # Environments 134 | .env 135 | .venv 136 | env/ 137 | venv/ 138 | ENV/ 139 | env.bak/ 140 | venv.bak/ 141 | 142 | # Spyder project settings 143 | .spyderproject 144 | .spyproject 145 | 146 | # Rope project settings 147 | .ropeproject 148 | 149 | # mkdocs documentation 150 | /site 151 | 152 | # mypy 153 | .mypy_cache/ 154 | .dmypy.json 155 | dmypy.json 156 | 157 | # Pyre type checker 158 | .pyre/ 159 | 160 | # pytype static type analyzer 161 | .pytype/ 162 | 163 | # Cython debug symbols 164 | cython_debug/ 165 | 166 | # PyCharm 167 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 168 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 169 | # and can be added to the global gitignore or merged into this file. For a more nuclear 170 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 171 | #.idea/ 172 | 173 | # PyPI configuration file 174 | .pypirc 175 | admin/.DS_Store 176 | assignments/.DS_Store 177 | .DS_Store 178 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Contextual Dynamics Laboratory 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Models of Memory 2 | 3 | Welcome! This repository contains course materials for the Dartmouth graduate course on computational models of learning and memory (PSYC 133). The syllabus may be found [here](https://github.com/ContextLab/memory-models-course/blob/main/admin/syllabus.pdf). Feel free to follow along with the course materials (whether you are officially enrolled in the course or just visiting!), submit comments and suggestions, etc. 4 | 5 | Go [here](https://contextlab.github.io/memory-models-course/README.html) to view a JupyterBook containing all of the course materials, or feel free to browse this repository directly. 6 | 7 |

8 | robot 9 |

-------------------------------------------------------------------------------- /admin/compile.sh: -------------------------------------------------------------------------------- 1 | pandoc -s -o syllabus.pdf syllabus.md --pdf-engine=xelatex -------------------------------------------------------------------------------- /admin/icon.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContextLab/memory-models-course/d42f510330c799588d7bb22b88a07bc98a7d1513/admin/icon.pdf -------------------------------------------------------------------------------- /admin/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContextLab/memory-models-course/d42f510330c799588d7bb22b88a07bc98a7d1513/admin/icon.png -------------------------------------------------------------------------------- /admin/readme_robot.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContextLab/memory-models-course/d42f510330c799588d7bb22b88a07bc98a7d1513/admin/readme_robot.pdf -------------------------------------------------------------------------------- /admin/readme_robot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContextLab/memory-models-course/d42f510330c799588d7bb22b88a07bc98a7d1513/admin/readme_robot.png -------------------------------------------------------------------------------- /admin/syllabus.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "PSYC 133: Models of Memory" 3 | geometry: margin=1in 4 | header-includes: 5 | - \usepackage{fontspec} 6 | - \usepackage{booktabs} 7 | - \setmainfont{Berkeley Mono} 8 | output: pdf 9 | --- 10 | 11 | \setlength{\arrayrulewidth}{0.5mm} 12 | \vspace{-0.75in} 13 | \begin{center} 14 | \begin{tabular}{|p{1.25in}|p{2in}|} 15 | \hline 16 | \textbf{Meeting times} & Th 2--3:30; F 3--4:30 \\ 17 | \hline 18 | \textbf{Classroom} & Moore Library \\ 19 | \hline 20 | \textbf{Instructor} & Dr. Jeremy R. Manning \\ 21 | \hline 22 | \textbf{Email} & \href{mailto:jeremy@dartmouth.edu}{jeremy@dartmouth.edu} \\ 23 | \hline 24 | \textbf{Office location} & 349 Moore Hall \\ 25 | \hline 26 | \textbf{Office hours} & \href{https://context-lab.youcanbook.me}{By appointment} \\ 27 | \hline 28 | \end{tabular} 29 | \end{center} 30 | 31 | ## Course Description 32 | Knowing how our brains organize and spontaneously retrieve memories is at the heart of understanding the basis of the ongoing internal dialog of our conscious thoughts. Put simply, our memories make us _who we are_. In this course, we will use readings, discussions, and hands-on demonstrations to explore historical approaches, current approaches, and hints about the "next generation" of computational (mathematical) models of learning and memory. 33 | 34 | ## Course Goals 35 | This course is intended to train students to: 36 | 37 | - Explore a variety of approaches to building memory models, including neural network models, cognitive process models, and biologically inspired models. Students will implement and train these models, largely from scratch, to gain a deep understanding of how they work. 38 | - Understand different classic and modern approaches to characterizing memory (with a focus on human memory). Students will read primary sources and explore real and synthetic datasets. 39 | - Critically evaluate computational models, construct appropriate tests, and choose appropriate datasets for testing 40 | 41 | ## Pre-Requisites 42 | Students _must_ have prior experience with Python programming in order to do well in this course. Prior coursework on statistics or probability is also highly recommended. Additional prior coursework and/or experience with linguistics, linear algebra, statistics, machine learning, artificial intelligence, data science, philosophy of mind, and/or cognitive models will all be useful, but are not required. 43 | 44 | ## Course Materials 45 | **We will use a variety of _freely available_ online materials and research papers, which will be provided throughout the course.** You will also need an internet-enabled computer or tablet capable of displaying and outputting graphics and running a standard web browser (e.g., Chrome or Firefox). 46 | 47 | ## Format and Overview 48 | This course follows an **experiential learning** model. Students will engage with lecture materials through hands-on programming exercises, experiments with models and tools, and group discussions. Problem sets will deepen your understanding of the course material, and small projects will allow you to apply concepts to real-world research problems in the relevant domains. 49 | 50 | Classes will include: 51 | 52 | - **Discussions**: Discussions on foundational topics, understanding and implementing models, experiments, and datasets. Class discussions will be recorded, transcribed, anonymized, summarized, and shared with the group. (Note: if you want to bring something up in class that you do not want included in the summary, please let me know either in the moment so that I can pause the recording, or after class so that I can manually scrub it from the final summary.) 53 | - **Hands-on Labs**: Experimenting with data, designing simple models, and conducting research using these models. All demos will run in Google Colaboratory notebooks. 54 | - **Problem Sets and Projects**: Bi-weekly problem sets to apply what you've learned. These will typically take the form of small-scale "research" projects where you implement a computational memory model and analyze its behavior using real or synthetic data. 55 | 56 | ## Platforms and Tools 57 | - [**Google Colaboratory**](https://colab.research.google.com/): For developing and running Python code. You may use other programming languages if you prefer, but all in-class coding will be done in Python using Colaboratory. 58 | - [**GitHub**](https://github.com/): Used for managing and sharing code, data, and project work. 59 | - [**Discord**](https://discord.gg/R6kM9bjpFj): To facilitate discussions outside of class, share ideas, and collaborate on projects. 60 | 61 | ## Grading 62 | Grades will be based on the following components: 63 | 64 | - **Problem Sets** (60%): A total of 4 problem sets designed to reinforce key concepts (each is worth 15% of the final course grade). 65 | - **Final Project** (40%): You will carry out a larger scale (relative to the problem sets) "research" project on a topic of your choosing. This will include: 66 | - Python code, organized as a Colaboratory notebook. (Or equivalent if you use another language.) 67 | - A "presentation" to the class, along with an in-class discussion of your project 68 | - A brief (2--5 page) writeup of the main approach and key findings or takeaways 69 | 70 | Students may work together on any of the assignments, unless otherwise noted in class or in the assignment instructions. However, **each student must submit their own problem set and indicate who they worked with**. Final projects will (typically) be completed in groups of 2--3 students, with the entire group turning in the same project (and receiving the same grade for it). 71 | 72 | *Graduate Grading*: If you are a graduate student enrolled in the course, you will very likely receive a "P" (Pass), assuming you come to (and participate in) class (with rare exceptions for emergencies or planned absences) and complete all of the assignments. In rare cases, and at the discretion of the instructor, you will earn an "HP" (High Pass) if your work (or participation) goes substantially "above and beyond" what is expected. Conversely, you will earn a "LP" (Low Pass) or "NC" (No Credit) if you do not come to most classes and/or do not complete all of the assignments. My approach to graduate grading is that the onus is on **you** as the student to decide how much you want to get out of the course. If you put in a lot of effort and work hard, you will learn more. If you seek specific feedback, I will provide it. However, it is certainly possible to pass this course as a graduate student with only a modest effort. 73 | 74 | *Undergraduate Grading*: If you are an undergraduate student enrolled in the course, my grading policy is to simply assign you a letter grade based on the numerical scores you receive on your assignments, weighted as described above. The numbers in parentheses reflect percentages of the total number of possible points: A (93–100), A- (90–92), B+ (87–89), B (83–86), B- (80–82), C+ (77–79), C (73–76), C- (70–72), D (60–69), E (0–59). All grades will be rounded to the nearest integer (e.g., a 92.5 average will result in a final grade of "A", whereas a 92.4999 average will result in a final grade of "A-"). Out of fairness to all students in the course, there will be no "negotiations" about grading-- e.g., your grade will be determined solely by your numerical score. Grading on individual assignments will be assigned as follows (for undergraduate students only): 75 | 76 | - A: well above and beyond what is expected for the assignment, all components done correctly, well-organized code, clear documentation and explanations, easy-to-read figures, and so on. 77 | - B: default grade, denoting "solid" work-- demonstrates clear understanding of the material and meets all expectations for the assignment, with perhaps a few minor errors or other issues. 78 | - C: mostly correct but with a few substantive conceptual or implementation errors. 79 | - D: assignment contains major errors and/or missing components. 80 | - E: either did not turn in the assignment, or did not complete any substantitive part of the assignment. 81 | 82 | Detailed feedback will not typically be provided on individual submissions, but you will have the opportunity to raise any issues or questions in class or during my office hours. 83 | 84 | ### Late Policy 85 | For graduate students, problem sets must be submitted prior to the end of the term. To receive specific feedback on the assignment, you must submit it on time (otherwise I will simply mark it as "complete" or "incomplete"). 86 | 87 | For undergraduates, problem sets will receive a 10% deduction for each week late, rounded **up** to the nearest whole week (e.g., from a grading standpoint submitting an assignment 1 minute late is the same as submitting it 1 day late, is the same as submitting it 6 days late). 88 | 89 | For both graduate and undergraduate students, your final project **must be submitted on time** (by 11:59PM on the last day of class) in order to receive credit for it. This will ensure that I have time to compile your grades before I need to send them to the registrar. 90 | 91 | I strongly encourage you to submit your assignments _before_ the last possible moment to avoid grading penalties, unexpected circumstances (e.g., illness, emergencies, etc.). 92 | 93 | ## Academic Honor Principle 94 | Students are expected to adhere to Dartmouth’s Academic Honor Principle. You are encouraged to collaborate and discuss ideas with classmates, but all submitted work must be your own (aside from the final projects, which will be completed in small groups). If you're unsure about what constitutes a violation, please ask for clarification. 95 | 96 | ## Use of Generative AI 97 | The point of this course is for **you** to learn the material, not to show that you can prompt an LLM to complete your assignments on your behalf. Therefore my strong _recommendation_ (if you actually want to learn this stuff!) is to **not** use LLMs for your assignments. However, I won't police your work or even attempt to guess whether or not you secretly used Generative AI. Therefore there are just two official "rules" about using Generative AI: 98 | 99 | - Most importantly, **you** are responsible for the content of your assignments, whether written by you "from scratch" or with the help of an LLM (or similar). 100 | - Second, you are bound by the Academic Honor Principle to acknowledge any use of AI in your work. This can be done by either using a brief comment in your code, by explicitly citing the tools you use, or by adding a note to the relevant section(s) of your assignment. Each situation is unique, but you need to make it clear exactly what work is your own vs. produced by AI. You must also include a chat history (including any prompts you used) as an addendum to your assignment(s). 101 | 102 | ## Scheduling Conflicts 103 | Attendance is expected for all classes unless previously arranged. A critical part of the course is the in-class discussions and demos, and those will only work if you are physically present in class. If you anticipate any conflicts due to religious observances or other commitments, please inform the instructor by Week 2 to make appropriate arrangements. 104 | 105 | ## Student Needs 106 | We strive to create an inclusive learning environment where all students feel supported and engaged. If you require any accommodations, please contact the Student Accessibility Services office, or discuss your needs with the instructor privately. -------------------------------------------------------------------------------- /admin/syllabus.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContextLab/memory-models-course/d42f510330c799588d7bb22b88a07bc98a7d1513/admin/syllabus.pdf -------------------------------------------------------------------------------- /content/README.md: -------------------------------------------------------------------------------- 1 | # Models of Memory 2 | 3 | Welcome! This repository contains course materials for the Dartmouth graduate course on computational models of learning and memory. The syllabus may be found [here](https://github.com/ContextLab/memory-models-course/blob/main/admin/syllabus.pdf). Feel free to follow along with the course materials (whether you are officially enrolled in the course or just visiting!), submit comments and suggestions, etc. 4 | 5 |

6 | robot 7 |

8 | 9 | ## A note about this Open Course 10 | This course is taught as an *Open Course*, meaning that the course is designed from the ground up to be shareable and freely accessible to anyone. All code for this course is written in [Python](https://www.python.org/) and most of the material is organized as either [Jupyter notebooks](http://jupyter.org/) or Markdown files. 11 | 12 | Feel free to follow along with this course, do the assignments, post questions and/or issues to this repository or Discord, suggest changes, etc. However, I won't formally evaluate your submitted work unless you are a Dartmouth student who is currently enrolled. 13 | 14 | If you are a course instructor teaching overlapping material, feel free to borrow any materials used in this course! If you directly copy (or "draw heavy inspiration from") the materials, I would appreciate a citation (e.g., a pointer to this repository). I'd also love to hear from you about how you're using this resource! 15 | 16 | This course is a continually evolving work in progress. I plan to update the material to keep the syllabus fresh and relevant. By the same token, although my goal is 100% accuracy and currency, it's unlikely that I'll achieve that goal. You should participate with the understanding that this material will likely have occasional mistakes, omissions, errors, etc. Given this fact, one way to approach the course is to maintain an open yet critical view of the material. If you think there's a mistake, I encourage you to bring it to my attention! 17 | 18 | -------------------------------------------------------------------------------- /content/_config.yml: -------------------------------------------------------------------------------- 1 | # Book settings 2 | # Learn more at https://jupyterbook.org/customize/config.html 3 | 4 | title: Computational memory Models 5 | author: Jeremy R. Manning 6 | copyright: 2025 7 | logo: https://raw.githubusercontent.com/ContextLab/memory-models-course/main/admin/readme_robot.png 8 | 9 | # Force re-execution of notebooks on each build. 10 | # See https://jupyterbook.org/content/execute.html 11 | execute: 12 | execute_notebooks: false 13 | 14 | # Define the name of the latex output file for PDF builds 15 | latex: 16 | latex_documents: 17 | targetname: book.tex 18 | 19 | # Add a bibtex file so that we can create citations 20 | bibtex_bibfiles: 21 | - references.bib 22 | 23 | sphinx: 24 | config: 25 | bibtex_reference_style: author_year 26 | 27 | # Information about where the book exists on the web 28 | repository: 29 | url: https://github.com/ContextLab/memory-models-course # Online location of your book 30 | path_to_book: content # Optional path to your book, relative to the repository root 31 | branch: main # Which branch of the repository should be used when creating links (optional) 32 | 33 | # Add GitHub buttons to your book 34 | # See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository 35 | html: 36 | use_issues_button: true 37 | use_repository_button: true 38 | 39 | # correct latex parsing 40 | parse: 41 | myst_enable_extensions: 42 | - amsmath 43 | - dollarmath -------------------------------------------------------------------------------- /content/_toc.yml: -------------------------------------------------------------------------------- 1 | format: jb-book 2 | root: README 3 | parts: 4 | - caption: Main Content 5 | chapters: 6 | - file: instructions 7 | - file: admin/syllabus 8 | - file: outline 9 | - caption: Assignments 10 | chapters: 11 | - file: assignments/README 12 | - file: assignments/Assignment_1:Hopfield_Networks/README 13 | sections: 14 | - file: assignments/Assignment_1:Hopfield_Networks/hopfield_assignment_template 15 | - file: assignments/Assignment_2:Search_of_Associative_Memory_Model/README 16 | sections: 17 | - file: assignments/Assignment_2:Search_of_Associative_Memory_Model/sam_assignment_template 18 | - file: assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/README 19 | sections: 20 | - file: assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/cmr_assignment_template 21 | - file: assignments/Assignment_4:_Laplace_Temporal_Context_Model/README 22 | - file: assignments/Final_Project/README -------------------------------------------------------------------------------- /content/admin/readme_robot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContextLab/memory-models-course/d42f510330c799588d7bb22b88a07bc98a7d1513/content/admin/readme_robot.png -------------------------------------------------------------------------------- /content/admin/syllabus.md: -------------------------------------------------------------------------------- 1 | # Course Description 2 | Knowing how our brains organize and spontaneously retrieve memories is at the heart of understanding the basis of the ongoing internal dialog of our conscious thoughts. Put simply, our memories make us _who we are_. In this course, we will use readings, discussions, and hands-on demonstrations to explore historical approaches, current approaches, and hints about the "next generation" of computational (mathematical) models of learning and memory. 3 | 4 | | | | 5 | |----------------------|----------------------------------------------------------------------------| 6 | | **Meeting times** | Thursdays from 2 -- 3:30, Fridays from 3 -- 4:30 | 7 | | **Classroom** | 4th Floor Library, [Moore Hall](https://pbs.dartmouth.edu/department/department-psychological-and-brain-sciences) | 8 | | **Instructor** | [Dr. Jeremy R. Manning](https://context-lab.com) | 9 | | **Email** | [jeremy@dartmouth.edu](mailto:jeremy@dartmouth.edu) | 10 | | **Office location** | 349 [Moore Hall](https://pbs.dartmouth.edu/department/department-psychological-and-brain-sciences) | 11 | | **Office hours** | [By appointment](https://context-lab.youcanbook.me) | 12 | 13 | 14 | 15 | 16 | ## Course Goals 17 | This course is intended to train students to: 18 | 19 | - Explore a variety of approaches to building memory models, including neural network models, cognitive process models, and biologically inspired models. Students will implement and train these models, largely from scratch, to gain a deep understanding of how they work. 20 | - Understand different classic and modern approaches to characterizing memory (with a focus on human memory). Students will read primary sources and explore real and synthetic datasets. 21 | - Critically evaluate computational models, construct appropriate tests, and choose appropriate datasets for testing 22 | 23 | ## Pre-Requisites 24 | Students _must_ have prior experience with Python programming in order to do well in this course. Prior coursework on statistics or probability is also highly recommended. Additional prior coursework and/or experience with linguistics, linear algebra, statistics, machine learning, artificial intelligence, data science, philosophy of mind, and/or cognitive models will all be useful, but are not required. 25 | 26 | ## Course Materials 27 | **We will use a variety of _freely available_ online materials and research papers, which will be provided throughout the course.** You will also need an internet-enabled computer or tablet capable of displaying and outputting graphics and running a standard web browser (e.g., Chrome or Firefox). 28 | 29 | ## Format and Overview 30 | This course follows an **experiential learning** model. Students will engage with lecture materials through hands-on programming exercises, experiments with models and tools, and group discussions. Problem sets will deepen your understanding of the course material, and small projects will allow you to apply concepts to real-world research problems in the relevant domains. 31 | 32 | Classes will include: 33 | 34 | - **Discussions**: Discussions on foundational topics, understanding and implementing models, experiments, and datasets. Class discussions will be recorded, transcribed, anonymized, summarized, and shared with the group. (Note: if you want to bring something up in class that you do not want included in the summary, please let me know either in the moment so that I can pause the recording, or after class so that I can manually scrub it from the final summary.) 35 | - **Hands-on Labs**: Experimenting with data, designing simple models, and conducting research using these models. All demos will run in Google Colaboratory notebooks. 36 | - **Problem Sets and Projects**: Bi-weekly problem sets to apply what you've learned. These will typically take the form of small-scale "research" projects where you implement a computational memory model and analyze its behavior using real or synthetic data. 37 | 38 | ## Platforms and Tools 39 | - [**Google Colaboratory**](https://colab.research.google.com/): For developing and running Python code. You may use other programming languages if you prefer, but all in-class coding will be done in Python using Colaboratory. 40 | - [**GitHub**](https://github.com/): Used for managing and sharing code, data, and project work. 41 | - [**Discord**](https://discord.gg/R6kM9bjpFj): To facilitate discussions outside of class, share ideas, and collaborate on projects. 42 | 43 | ## Grading 44 | Grades will be based on the following components: 45 | 46 | - **Problem Sets** (60%): A total of 4 problem sets designed to reinforce key concepts (each is worth 15% of the final course grade). 47 | - **Final Project** (40%): You will carry out a larger scale (relative to the problem sets) "research" project on a topic of your choosing. This will include: 48 | - Python code, organized as a Colaboratory notebook. (Or equivalent if you use another language.) 49 | - A "presentation" to the class, along with an in-class discussion of your project 50 | - A brief (2--5 page) writeup of the main approach and key findings or takeaways 51 | 52 | Students may work together on any of the assignments, unless otherwise noted in class or in the assignment instructions. However, **each student must submit their own problem set and indicate who they worked with**. Final projects will (typically) be completed in groups of 2--3 students, with the entire group turning in the same project (and receiving the same grade for it). 53 | 54 | *Graduate Grading*: If you are a graduate student enrolled in the course, you will very likely receive a "P" (Pass), assuming you come to (and participate in) class (with rare exceptions for emergencies or planned absences) and complete all of the assignments. In rare cases, and at the discretion of the instructor, you will earn an "HP" (High Pass) if your work (or participation) goes substantially "above and beyond" what is expected. Conversely, you will earn a "LP" (Low Pass) or "NC" (No Credit) if you do not come to most classes and/or do not complete all of the assignments. My approach to graduate grading is that the onus is on **you** as the student to decide how much you want to get out of the course. If you put in a lot of effort and work hard, you will learn more. If you seek specific feedback, I will provide it. However, it is certainly possible to pass this course as a graduate student with only a modest effort. 55 | 56 | *Undergraduate Grading*: If you are an undergraduate student enrolled in the course, my grading policy is to simply assign you a letter grade based on the numerical scores you receive on your assignments, weighted as described above. The numbers in parentheses reflect percentages of the total number of possible points: A (93–100), A- (90–92), B+ (87–89), B (83–86), B- (80–82), C+ (77–79), C (73–76), C- (70–72), D (60–69), E (0–59). All grades will be rounded to the nearest integer (e.g., a 92.5 average will result in a final grade of "A", whereas a 92.4999 average will result in a final grade of "A-"). Out of fairness to all students in the course, there will be no "negotiations" about grading-- e.g., your grade will be determined solely by your numerical score. Grading on individual assignments will be assigned as follows (for undergraduate students only): 57 | 58 | - A: well above and beyond what is expected for the assignment, all components done correctly, well-organized code, clear documentation and explanations, easy-to-read figures, and so on. 59 | - B: default grade, denoting "solid" work-- demonstrates clear understanding of the material and meets all expectations for the assignment, with perhaps a few minor errors or other issues. 60 | - C: mostly correct but with a few substantive conceptual or implementation errors. 61 | - D: assignment contains major errors and/or missing components. 62 | - E: either did not turn in the assignment, or did not complete any substantitive part of the assignment. 63 | 64 | Detailed feedback will not typically be provided on individual submissions, but you will have the opportunity to raise any issues or questions in class or during my office hours. 65 | 66 | ### Late Policy 67 | For graduate students, problem sets must be submitted prior to the end of the term. To receive specific feedback on the assignment, you must submit it on time (otherwise I will simply mark it as "complete" or "incomplete"). 68 | 69 | For undergraduates, problem sets will receive a 10% deduction for each week late, rounded **up** to the nearest whole week (e.g., from a grading standpoint submitting an assignment 1 minute late is the same as submitting it 1 day late, is the same as submitting it 6 days late). 70 | 71 | For both graduate and undergraduate students, your final project **must be submitted on time** (by 11:59PM on the last day of class) in order to receive credit for it. This will ensure that I have time to compile your grades before I need to send them to the registrar. 72 | 73 | I strongly encourage you to submit your assignments _before_ the last possible moment to avoid grading penalties, unexpected circumstances (e.g., illness, emergencies, etc.). 74 | 75 | ## Academic Honor Principle 76 | Students are expected to adhere to Dartmouth’s Academic Honor Principle. You are encouraged to collaborate and discuss ideas with classmates, but all submitted work must be your own (aside from the final projects, which will be completed in small groups). If you're unsure about what constitutes a violation, please ask for clarification. 77 | 78 | ## Use of Generative AI 79 | The point of this course is for **you** to learn the material, not to show that you can prompt an LLM to complete your assignments on your behalf. Therefore my strong _recommendation_ (if you actually want to learn this stuff!) is to **not** use LLMs for your assignments. However, I won't police your work or even attempt to guess whether or not you secretly used Generative AI. Therefore there are just two official "rules" about using Generative AI: 80 | 81 | - Most importantly, **you** are responsible for the content of your assignments, whether written by you "from scratch" or with the help of an LLM (or similar). 82 | - Second, you are bound by the Academic Honor Principle to acknowledge any use of AI in your work. This can be done by either using a brief comment in your code, by explicitly citing the tools you use, or by adding a note to the relevant section(s) of your assignment. Each situation is unique, but you need to make it clear exactly what work is your own vs. produced by AI. You must also include a chat history (including any prompts you used) as an addendum to your assignment(s). 83 | 84 | ## Scheduling Conflicts 85 | Attendance is expected for all classes unless previously arranged. A critical part of the course is the in-class discussions and demos, and those will only work if you are physically present in class. If you anticipate any conflicts due to religious observances or other commitments, please inform the instructor by Week 2 to make appropriate arrangements. 86 | 87 | ## Student Needs 88 | We strive to create an inclusive learning environment where all students feel supported and engaged. If you require any accommodations, please contact the Student Accessibility Services office, or discuss your needs with the instructor privately. -------------------------------------------------------------------------------- /content/assignments/Assignment_1:Hopfield_Networks/README.md: -------------------------------------------------------------------------------- 1 | # Assignment 1: Hopfield Networks 2 | 3 | ## Overview 4 | 5 | In this assignment, you will explore computational memory models by implementing a Hopfield network. [Here](https://github.com/ContextLab/memory-models-course/blob/main/content/assignments/Assignment_1%3AHopfield_Networks/hopfield_assignment_template.ipynb) is a suggested template to help get you started. 6 | 7 | In the original article ([Hopfield, 1982](https://www.dropbox.com/scl/fi/iw9wtr3xjvrbqtk38obid/Hopf82.pdf?rlkey=x3my329oj9952er68sr28c7xc&dl=1)), neuronal activations were set to either 0 ("not firing") or 1 ("firing"). Modern Hopfield networks nearly always follow an updated implementation, first proposed by [Amit et al. (1985)](https://www.dropbox.com/scl/fi/3a3adwqf70afb9kmieezn/AmitEtal85.pdf?rlkey=78fckvuuvk9t3o9fbpjrmn6de&dl=1). In their framing, neurons take on activation values of either –1 ("down state") or +1 ("up state"). This has three important benefits: 8 | 9 | - It provides a cleaner way to implement the Hebbian learning rule (i.e., without subtracting means or shifting values). 10 | - It avoids a bias toward 0 (i.e., +1 and –1 are equally "attractive," whereas 0-valued neurons have a stronger "pull"). 11 | - The energy function (which describes the attractor dynamics of the network) can be directly mapped onto the [Ising model](https://en.wikipedia.org/wiki/Ising_model) from statistical physics. 12 | 13 | You should start by reading [Amit et al. (1985)](https://www.dropbox.com/scl/fi/3a3adwqf70afb9kmieezn/AmitEtal85.pdf?rlkey=78fckvuuvk9t3o9fbpjrmn6de&dl=1) closely. Then implement the model in a Google Colaboratory notebook. Unless otherwise noted, all references to "the paper" refer to Amit et al. (1985). 14 | 15 | --- 16 | 17 | ## Tasks 18 | 19 | ### 1. Implement Memory Storage and Retrieval 20 | 21 | #### Objective 22 | 23 | Write functions that implement the core operations of a Hopfield network. 24 | 25 | #### Memory Storage 26 | 27 | Implement the Hebbian learning rule to compute the weight matrix, given a set of network configurations (memories). This is described in *Equation 1.5* of the paper: 28 | 29 | Let $p$ be the number of patterns and $\xi_i^\mu \in \{-1, +1\}$ the value of neuron $i$ in pattern $\mu$. The synaptic coupling between neurons $i$ and $j$ is: 30 | 31 | $$ 32 | J_{ij} = \sum_{\mu=1}^p \xi_i^\mu \xi_j^\mu 33 | $$ 34 | 35 | Note that the matrix is symmetric ($J_{ij} = J_{ji}$), and there are no self-connections by definition ($J_{ii} = 0$). 36 | 37 | #### Memory Retrieval 38 | 39 | Implement the retrieval rule using *Equation 1.3* and surrounding discussion. At each time step, each neuron updates according to its local field: 40 | 41 | $$ 42 | h_i = \sum_{j=1}^N J_{ij} S_j 43 | $$ 44 | 45 | Each neuron updates its state by aligning with the sign of the field: 46 | 47 | $$ 48 | S_i(t+1) = \text{sign}(h_i(t)) = \text{sign} \left( \sum_{j} J_{ij} S_j(t) \right) 49 | $$ 50 | 51 | Here, $S_i \in \{-1, +1\}$ is the current state of neuron $i$. To "retrieve" a memory: 52 | - Start by setting all of the neural activations to the **cue**. 53 | - Loop through all neurons (in a random order), updating one at a time according to the above equation, given the weight matrix ($J$) and the current activities of each of the other neurons. 54 | - Continue looping until either (a) you have "updated" every neuron in the latest loop, but no activities have changed, or (b) a maximum number of iterations is reached. 55 | - Return the current state of the network as the retrieved memory. 56 | 57 | Optional: instead of always following the update rule, add "noise" to the network by (in a very small proportion of updates) randomly flipping the neuron's activity. This can sometimes improve a network's performance by "kicking" the network out of local minima! It can be instructive to explore a network's performance with vs. without noise, and/or as a function of how much noise is being added. 58 | 59 | --- 60 | 61 | ### 2. Test with a Small Network 62 | 63 | Encode the following test memories in a Hopfield network with $N = 5$ neurons: 64 | 65 | $$ 66 | \xi^1 = [+1, -1, +1, -1, +1] \\ 67 | \xi^2 = [-1, +1, -1, +1, -1] 68 | $$ 69 | 70 | - Store these memories using the Hebbian rule. 71 | - Test retrieval by presenting the network with noisy versions (e.g., flipping a sign, or setting some entries to 0). 72 | - Briefly discuss your observations. 73 | 74 | Questions to consider: 75 | 76 | - Can you tell how and why the network stores memories? 77 | - Why do some memories interfere while others don’t? 78 | - Can you construct memory sets that do or don’t work in a small network? 79 | - What factors do you think affect the **capacity** of the network? 80 | 81 | --- 82 | 83 | ### 3. Evaluate Storage Capacity 84 | 85 | #### Objective 86 | 87 | Determine how memory recovery degrades as you vary: 88 | 89 | - **Network size** (number of neurons) 90 | - **Number of stored memories** 91 | 92 | To generate $m$ memories $\xi_1, \dots, \xi_m$ for a network of size $N$, use: 93 | 94 | ```python 95 | import numpy as np 96 | xi = 2 * (np.random.rand(m, N) > 0.5) - 1 97 | ``` 98 | 99 | #### Method 100 | 101 | - For each configuration, run multiple trials. 102 | - For each trial, measure whether **at least 99%** of the memory is recovered. 103 | 104 | #### Visualization 1 105 | 106 | Create a heatmap: 107 | 108 | - $x$-axis: network size 109 | - $y$-axis: number of stored memories 110 | - Color: proportion of memories retrieved with ≥99% accuracy 111 | 112 | #### Visualization 2 113 | 114 | Plot the estimated number of accurately retrieved memories as a function of network size. 115 | You can use the heatmap you made above to estimate the number of accurately retrieved memories: 116 | - Choose a target proportion (e.g., $p = 0.8$ or similar) 117 | - For each network size you tested to make your heatmap, compute the maximum number of memories for which at least proportion $p$ were successfully retrieved 118 | 119 | #### Follow-Up 120 | 121 | - What relationship (if any) emerges between network size and capacity? 122 | - Can you develop rules or intuitions that help predict a network’s capacity? 123 | - Hint: see page 3 of [Amit et al. (1985)](https://www.dropbox.com/scl/fi/3a3adwqf70afb9kmieezn/AmitEtal85.pdf?rlkey=78fckvuuvk9t3o9fbpjrmn6de) 124 | 125 | --- 126 | 127 | ### 4. Simulate Cued Recall 128 | 129 | #### Objective 130 | 131 | Evaluate how the network performs associative recall when only a **cue** is presented. 132 | 133 | #### Setup: A–B Pair Structure 134 | 135 | - Each memory consists of two parts: 136 | - First half: **Cue** ($A$) 137 | - Second half: **Response** ($B$) 138 | 139 | If $N$ is odd: 140 | - Let cue length = $\lfloor N/2 \rfloor$ 141 | - Let response length = $\lceil N/2 \rceil$ 142 | 143 | Each full memory: 144 | 145 | $$ 146 | \xi^\mu = \begin{bmatrix} A^\mu \\ B^\mu \end{bmatrix} 147 | $$ 148 | 149 | #### Simulation Procedure 150 | 151 | 1. **Choose a memory** $\xi^\mu$ 152 | 2. **Construct initial state** $x$: 153 | - Cue half: set to $A^\mu$ 154 | - Response half: set to 0 155 | 3. **Evolve the network** using the update rule: 156 | 157 | $$ 158 | x_i \leftarrow \text{sign} \left( \sum_j J_{ij} x_j \right) 159 | $$ 160 | 161 | - Optionally: **clamp** the cue (i.e., hold cue values fixed) 162 | 4. **Evaluate success**: 163 | - Compare recovered response to $B^\mu$ 164 | - Mark as successful if ≥99% of bits match: 165 | 166 | $$ 167 | \frac{1}{|B|} \sum_{i \in \text{response}} \mathbb{1}[x^*_i = B^\mu_i] \geq 0.99 168 | $$ 169 | 170 | #### Analysis 171 | 172 | - Repeat across many $A$–$B$ pairs 173 | - For each network size $N$, compute the expected number of correctly retrieved responses 174 | - Plot this value as a function of $N$ 175 | 176 | #### Optional Extensions 177 | 178 | - Compare performance with and without clamping the cue 179 | - Try cueing with noisy or partial versions of $A$ 180 | 181 | --- 182 | 183 | ### 5. Simulate Contextual Drift 184 | 185 | #### Objective 186 | 187 | Investigate how gradual changes in **context** influence which memories are recalled. 188 | 189 | #### Setup: Item–Context Representation 190 | 191 | - Use a Hopfield network with 100 neurons. 192 | - Each memory: 193 | - First 50 neurons: **Item** 194 | - Last 50 neurons: **Context** 195 | 196 | Create a sequence of 10 memories: 197 | 198 | $$ 199 | \xi^t = \begin{bmatrix} \text{item}^t \\ \text{context}^t \end{bmatrix} 200 | $$ 201 | 202 | Context drift: 203 | 204 | - Set $\text{context}^1$ randomly 205 | - For each subsequent $\text{context}^{t+1}$, copy $\text{context}^t$ and flip ~10% of the bits 206 | 207 | #### Simulation Procedure 208 | 209 | 1. Store all 10 memories in the network. 210 | 2. For each memory $i = 1, \dots, 10$: 211 | - Cue the network with $\text{context}^i$ 212 | - Set item neurons to 0 213 | - Run until convergence 214 | - For each stored memory $j$, compare recovered item to $\text{item}^j$ 215 | - If ≥75% of bits match, record $j$ as retrieved 216 | - Record $\Delta = j - i$ (relative offset) 217 | 218 | #### Analysis 219 | 220 | - Repeat the procedure (e.g., 100 trials). Note that you will need to "reset" the network (i.e., start with an empty weight matrix and re-encode the 10 memories) each time you repeat the simulation. 221 | - For each $\Delta \in [-9, +9]$, compute: 222 | - Probability of retrieval 223 | - 95% confidence interval 224 | 225 | #### Visualization 226 | 227 | Create a line plot: 228 | 229 | - $x$-axis: Relative position $\Delta$ 230 | - $y$-axis: Retrieval probability 231 | - Error bars: 95% confidence intervals 232 | 233 | Write a brief interpretation of the observed pattern. 234 | 235 | #### Optional Extensions 236 | 237 | - Vary the drift rate and observe the effect 238 | - Try random (non-gradual) context changes 239 | - Explore links to recency effects or memory generalization 240 | 241 | --- 242 | 243 | ## Submission Instructions 244 | 245 | - [Submit](https://canvas.dartmouth.edu/courses/71051/assignments/517353) a single standalone Google Colaboratory notebook (or similar) that includes: 246 | - Your full model implementation 247 | - Markdown cells explaining your methods, assumptions, and findings 248 | - Plots and results for each section 249 | - Your notebook should run **without errors** in Google Colaboratory. 250 | -------------------------------------------------------------------------------- /content/assignments/Assignment_1:Hopfield_Networks/hopfield_assignment_template.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "d2c467d4", 6 | "metadata": { 7 | "colab_type": "text", 8 | "id": "view-in-github" 9 | }, 10 | "source": [ 11 | "\"Open" 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "id": "4112dba2", 17 | "metadata": { 18 | "id": "4112dba2" 19 | }, 20 | "source": [ 21 | "# Submission Template\n", 22 | "\n", 23 | "This notebook provides a suggested starter template for completing the [Hopfield network assignment](https://contextlab.github.io/memory-models-course/assignments/Assignment_1%3AHopfield_Networks/README.html).\n", 24 | "\n", 25 | "You should submit your assignment by uploading your completed notebook to [Canvas](https://canvas.dartmouth.edu/courses/71051/assignments/517353). Please ensure that your notebook runs without errors in [Google Colaboratory](https://colab.research.google.com/)." 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "id": "13a772fc", 31 | "metadata": { 32 | "id": "13a772fc" 33 | }, 34 | "source": [ 35 | "## Setup\n", 36 | "\n", 37 | "Import necessary libraries and define helper functions." 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "id": "3487ab4e", 44 | "metadata": { 45 | "id": "3487ab4e" 46 | }, 47 | "outputs": [], 48 | "source": [ 49 | "import numpy as np\n", 50 | "import matplotlib.pyplot as plt\n", 51 | "\n", 52 | "# Define any helper functions here" 53 | ] 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "id": "a38cd155", 58 | "metadata": { 59 | "id": "a38cd155" 60 | }, 61 | "source": [ 62 | "## Task 1: Memory Storage and Retrieval\n", 63 | "\n", 64 | "Implement the Hebbian learning rule and update dynamics." 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": null, 70 | "id": "49999427", 71 | "metadata": { 72 | "id": "49999427" 73 | }, 74 | "outputs": [], 75 | "source": [ 76 | "# TODO: Implement memory storage (Hebbian rule) and retrieval update rule here" 77 | ] 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "id": "ea862e41", 82 | "metadata": { 83 | "id": "ea862e41" 84 | }, 85 | "source": [ 86 | "## Task 2: Small Network Test\n", 87 | "\n", 88 | "Use a small network with $N = 5$ to test memory storage and retrieval." 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "id": "3e5fb216", 95 | "metadata": { 96 | "id": "3e5fb216" 97 | }, 98 | "outputs": [], 99 | "source": [ 100 | "# TODO: Encode and test small network with noisy inputs" 101 | ] 102 | }, 103 | { 104 | "cell_type": "markdown", 105 | "id": "5c170918", 106 | "metadata": { 107 | "id": "5c170918" 108 | }, 109 | "source": [ 110 | "## Task 3: Evaluate Storage Capacity\n", 111 | "\n", 112 | "Test how performance varies with network size and number of memories." 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "id": "97192fac", 119 | "metadata": { 120 | "id": "97192fac" 121 | }, 122 | "outputs": [], 123 | "source": [ 124 | "# TODO: Run multiple trials and generate heatmap + expectation plot" 125 | ] 126 | }, 127 | { 128 | "cell_type": "markdown", 129 | "id": "84937332", 130 | "metadata": { 131 | "id": "84937332" 132 | }, 133 | "source": [ 134 | "## Task 4: Cued Recall\n", 135 | "\n", 136 | "Simulate associative memory with cue-response pairs." 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": null, 142 | "id": "51395b97", 143 | "metadata": { 144 | "id": "51395b97" 145 | }, 146 | "outputs": [], 147 | "source": [ 148 | "# TODO: Implement A-B memory pairs and test cued retrieval" 149 | ] 150 | }, 151 | { 152 | "cell_type": "markdown", 153 | "id": "76339e63", 154 | "metadata": { 155 | "id": "76339e63" 156 | }, 157 | "source": [ 158 | "## Task 5: Contextual Drift\n", 159 | "\n", 160 | "Simulate sequential memories with gradually shifting contexts." 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": null, 166 | "id": "be69a9df", 167 | "metadata": { 168 | "id": "be69a9df" 169 | }, 170 | "outputs": [], 171 | "source": [ 172 | "# TODO: Implement drifting context memory and analyze retrieval bias" 173 | ] 174 | }, 175 | { 176 | "cell_type": "markdown", 177 | "id": "43278d97", 178 | "metadata": { 179 | "id": "43278d97" 180 | }, 181 | "source": [ 182 | "## Discussion\n", 183 | "\n", 184 | "Summarize your findings, observations, and any interesting results." 185 | ] 186 | } 187 | ], 188 | "metadata": { 189 | "colab": { 190 | "include_colab_link": true, 191 | "provenance": [] 192 | }, 193 | "language_info": { 194 | "name": "python" 195 | } 196 | }, 197 | "nbformat": 4, 198 | "nbformat_minor": 5 199 | } 200 | -------------------------------------------------------------------------------- /content/assignments/Assignment_2:Search_of_Associative_Memory_Model/Murd62 data/fr10-2.txt: -------------------------------------------------------------------------------- 1 | 6 1 4 7 10 2 8 2 | 10 9 6 8 2 1 88 3 | 10 7 9 8 1 4 | 10 88 7 2 1 5 5 | 10 3 7 2 1 9 6 | 6 8 10 5 3 88 7 | 8 9 10 7 88 4 8 | 9 10 4 6 88 8 1 9 | 8 9 10 7 2 10 | 10 8 9 2 3 11 | 10 9 6 8 7 12 | 8 9 10 7 6 1 2 13 | 10 9 7 6 1 14 | 10 9 4 1 3 5 2 15 | 8 9 10 6 1 4 5 16 | 10 9 8 7 2 5 17 | 9 10 7 5 4 8 2 1 18 | 10 7 8 4 3 19 | 10 8 7 5 1 9 20 | 10 7 8 2 3 6 4 21 | 10 8 9 4 88 2 5 3 6 22 | 10 1 8 6 2 4 3 23 | 10 1 7 6 3 8 9 24 | 10 9 7 8 2 3 1 25 | 9 10 4 1 7 8 6 26 | 10 8 9 2 3 6 4 27 | 7 8 9 10 5 6 1 28 | 10 8 9 7 1 29 | 10 9 8 7 6 5 1 30 | 8 9 10 4 5 6 1 2 31 | 10 9 5 7 1 2 32 | 10 88 88 1 2 3 33 | 10 8 88 5 6 1 2 88 34 | 10 9 6 7 3 4 1 2 35 | 9 8 10 6 2 4 3 1 7 36 | 9 10 88 7 3 2 1 37 | 8 9 10 6 5 7 38 | 88 9 10 6 5 1 2 39 | 7 8 88 10 3 1 2 40 | 10 9 5 4 7 41 | 7 8 9 10 4 1 88 3 42 | 8 10 4 2 1 43 | 10 9 8 4 7 3 1 2 44 | 7 8 10 6 3 1 45 | 10 8 9 2 1 4 46 | 10 9 7 4 88 1 47 | 9 8 10 4 6 88 7 48 | 9 10 8 7 5 6 1 2 49 | 10 9 4 6 7 1 2 50 | 10 9 88 7 8 6 51 | 8 10 9 7 2 52 | 8 9 10 4 88 1 2 6 5 53 | 7 8 9 10 2 3 1 6 54 | 7 88 9 10 3 4 1 2 55 | 7 8 10 6 2 3 5 56 | 7 8 9 10 3 5 4 57 | 7 8 9 10 2 3 5 6 1 58 | 7 8 88 10 2 1 5 59 | 8 9 10 7 88 60 | 9 8 10 88 4 6 2 7 1 61 | 7 8 10 9 3 1 2 4 62 | 7 8 9 10 6 1 4 63 | 8 9 10 7 3 2 1 5 6 64 | 6 8 7 9 10 2 1 65 | 6 88 9 10 4 3 5 2 66 | 10 88 9 5 88 2 1 3 4 67 | 6 7 8 10 1 4 68 | 8 9 10 7 2 1 88 69 | 10 8 7 6 2 3 1 4 5 70 | 8 9 10 5 6 7 71 | 6 7 9 8 10 4 72 | 7 8 9 10 5 73 | 6 8 9 10 1 2 3 88 5 74 | 5 7 8 9 10 3 4 75 | 10 8 9 3 4 1 2 76 | 8 9 10 7 6 77 | 7 9 8 10 5 1 2 78 | 10 9 7 5 6 79 | 7 8 9 10 4 5 6 1 80 | 7 8 9 10 1 2 5 3 88 6 81 | 7 8 10 9 6 1 3 82 | 10 1 7 6 4 9 5 2 83 | 10 9 7 3 88 1 4 6 84 | 10 1 9 6 4 2 7 8 85 | 10 9 8 4 1 3 86 | 6 2 1 88 9 10 8 87 | 10 9 4 3 1 7 88 | 1 4 7 6 5 3 2 10 8 89 | 1 2 6 5 7 4 9 10 90 | 2 1 88 7 8 9 10 6 4 91 | 9 10 4 6 2 92 | 1 2 3 7 10 9 93 | 10 88 9 6 1 94 | 1 2 4 5 9 10 6 7 3 95 | 10 1 9 6 7 3 4 96 | 6 1 9 8 10 5 97 | 1 2 88 10 7 98 | 3 4 5 9 10 1 2 99 | 10 1 2 9 8 5 3 100 | 9 10 5 1 6 8 101 | 1 2 5 3 10 9 8 102 | 1 7 88 88 5 10 2 103 | 9 10 8 1 2 3 4 6 104 | 9 10 8 7 1 2 3 4 105 | 10 1 88 88 9 88 106 | 6 1 10 9 7 2 107 | 10 9 8 7 6 3 108 | 9 3 1 2 7 4 10 5 109 | 9 10 7 8 1 2 4 88 5 110 | 10 9 7 1 3 5 6 2 111 | 9 10 8 6 1 2 3 4 112 | 1 2 88 4 8 7 10 113 | 8 9 7 10 3 4 1 88 114 | 1 3 2 8 6 9 10 115 | 1 2 4 9 10 8 6 116 | 8 88 10 1 2 3 117 | 10 9 7 8 3 2 118 | 2 1 4 5 10 88 6 9 7 119 | 5 4 10 88 120 | 10 9 8 2 3 88 1 121 | 1 3 2 8 9 10 4 122 | 7 10 6 3 1 4 88 123 | 3 4 7 8 10 9 2 1 124 | 10 5 7 8 6 1 2 4 88 125 | 9 3 10 2 4 1 8 6 126 | 10 88 4 8 9 127 | 1 2 6 7 10 9 88 128 | 8 9 7 10 2 1 5 88 6 4 129 | 9 6 8 7 10 1 2 5 130 | 10 7 8 6 9 1 3 4 5 131 | 8 7 10 9 1 3 88 4 5 132 | 4 1 2 3 9 8 10 5 6 133 | 10 7 9 3 1 4 2 8 134 | 9 10 5 3 4 2 1 6 88 135 | 9 10 4 5 6 7 8 136 | 8 9 10 6 2 1 3 4 5 137 | 8 9 10 7 5 6 1 3 2 138 | 10 9 7 5 3 2 4 1 139 | 10 9 5 8 4 7 140 | 9 10 3 5 2 1 4 141 | 3 1 2 4 5 10 6 142 | 6 7 10 88 2 9 143 | 8 9 10 5 6 3 4 1 2 144 | 10 6 9 5 8 1 2 4 145 | 10 5 4 88 3 146 | 8 9 10 7 1 4 5 2 3 147 | 10 4 9 88 1 148 | 9 10 4 2 3 1 6 149 | 6 7 5 10 1 150 | 1 2 3 4 9 10 5 6 8 151 | 3 5 7 9 10 2 1 6 152 | 10 8 9 2 3 7 153 | 1 9 8 10 6 7 3 5 2 4 154 | 1 3 7 4 6 10 9 155 | 9 10 7 6 3 4 1 2 156 | 8 7 9 5 1 2 157 | 10 3 7 6 9 158 | 10 9 8 2 1 5 6 159 | 9 3 7 5 6 10 8 88 160 | 2 3 1 6 4 5 8 10 161 | 1 3 6 7 8 10 2 9 162 | 1 2 3 9 6 10 7 163 | 1 2 3 4 9 10 8 164 | 1 4 3 9 10 2 165 | 1 2 3 8 7 9 166 | 1 2 3 4 10 6 167 | 1 88 9 10 3 168 | 1 2 3 4 5 9 10 7 169 | 88 1 3 9 10 6 7 4 170 | 1 2 4 3 8 9 10 7 171 | 1 3 2 4 8 10 9 6 172 | 1 2 5 8 10 6 7 173 | 1 2 9 10 5 88 174 | 1 2 3 5 9 10 4 175 | 1 3 5 10 9 176 | 1 2 5 8 7 10 9 4 177 | 1 2 10 7 9 178 | 1 2 3 4 10 8 5 7 179 | 1 2 3 7 8 10 9 180 | 2 5 6 9 10 7 3 181 | 1 2 3 4 7 10 182 | 1 2 5 7 8 10 183 | 1 3 9 10 8 184 | 1 2 4 6 8 10 185 | 1 8 6 10 9 186 | 1 2 3 6 8 10 187 | 1 2 3 4 5 8 9 10 188 | 1 3 2 4 5 7 9 10 189 | 1 2 4 8 10 9 190 | 1 2 5 6 8 10 88 191 | 1 2 3 4 7 8 10 6 9 192 | 1 2 3 88 5 88 88 9 10 193 | 1 2 3 6 10 9 8 194 | 1 2 4 8 9 10 7 195 | 1 2 3 4 9 10 7 8 196 | 1 2 3 4 6 88 10 5 197 | 1 2 3 9 88 10 7 198 | 1 2 3 4 5 9 10 8 199 | 1 2 3 5 10 7 88 200 | 1 2 88 6 4 10 88 201 | 6 7 10 2 3 1 202 | 6 7 88 10 2 203 | 1 7 3 8 10 204 | 1 2 3 6 9 88 8 7 10 205 | 1 2 3 4 10 6 8 206 | 1 2 4 10 8 207 | 1 9 10 6 8 5 2 208 | 8 9 10 7 6 5 209 | 1 2 6 9 10 210 | 7 88 10 4 5 6 1 3 9 211 | 7 8 9 10 1 88 212 | 1 2 3 4 8 10 9 5 6 213 | 1 2 3 4 5 7 10 9 8 214 | 1 2 3 88 5 8 7 10 215 | 1 2 3 4 7 10 8 6 216 | 1 2 3 4 5 9 6 10 217 | 1 2 3 7 9 10 218 | 1 2 3 4 5 8 10 219 | 1 3 2 5 9 10 8 220 | 1 2 5 3 9 10 6 4 221 | 1 2 3 4 88 9 8 10 222 | 1 2 3 4 10 88 7 223 | 1 2 3 4 5 10 88 7 224 | 1 3 2 4 9 7 10 225 | 1 2 3 4 5 6 10 226 | 1 2 4 3 6 9 10 7 227 | 1 2 3 4 7 8 6 10 228 | 1 2 8 5 9 10 88 229 | 1 2 3 4 6 9 10 5 230 | 1 2 3 4 5 7 9 8 10 231 | 1 2 3 7 6 10 8 4 232 | 1 2 3 4 5 6 10 233 | 1 2 3 4 8 9 10 7 234 | 88 2 3 5 6 7 10 9 235 | 1 2 3 4 5 6 10 8 236 | 1 2 3 6 4 10 8 7 237 | 1 2 3 4 5 10 9 6 238 | 1 88 7 9 10 8 239 | 1 2 3 5 8 9 10 6 7 240 | 6 1 2 3 4 5 10 88 241 | 3 9 10 2 88 8 5 7 6 242 | 8 9 10 6 2 5 243 | 7 10 8 88 3 88 244 | 9 10 8 1 4 88 245 | 10 9 8 3 1 4 246 | 10 8 9 88 247 | 9 10 8 7 248 | 9 10 4 3 2 5 1 249 | 10 8 5 7 2 3 250 | 10 8 9 7 1 2 251 | 9 10 8 6 252 | 8 9 10 2 3 1 5 253 | 9 4 10 1 6 3 254 | 9 10 8 4 5 7 255 | 10 2 7 6 9 256 | 8 9 10 2 1 257 | 2 9 10 5 6 258 | 4 2 9 10 3 259 | 7 8 9 10 3 1 260 | 2 8 88 261 | 1 2 3 10 7 262 | 1 5 6 9 263 | 1 9 10 5 264 | 7 8 9 10 4 265 | 8 9 6 10 5 4 1 266 | 6 7 8 88 9 2 267 | 6 8 9 10 3 4 268 | 7 8 9 10 269 | 7 8 9 10 270 | 6 7 9 8 10 271 | 88 6 8 10 272 | 9 88 10 273 | 8 10 9 7 274 | 8 9 10 6 1 275 | 9 10 88 276 | 8 9 10 2 1 277 | 6 9 10 278 | 9 10 88 88 279 | 9 88 4 280 | 7 8 88 10 281 | 1 9 10 5 88 88 282 | 7 9 10 3 2 1 283 | 4 3 8 9 10 284 | 5 6 7 10 1 2 285 | 7 9 8 286 | 8 7 9 10 4 88 287 | 7 6 9 10 288 | 6 7 88 9 289 | 6 9 10 8 4 5 290 | 7 8 9 10 6 5 3 291 | 6 8 9 292 | 8 88 6 10 1 88 4 293 | 9 7 8 10 1 294 | 7 8 9 10 88 1 88 295 | 7 10 1 296 | 6 9 10 7 4 2 297 | 5 9 8 88 10 1 298 | 88 6 8 10 1 299 | 8 9 10 7 2 88 300 | 88 8 10 9 4 301 | 7 8 9 10 6 4 302 | 7 9 88 6 10 8 303 | 8 9 10 6 5 304 | 7 8 9 10 1 2 305 | 7 88 9 10 4 1 3 306 | 8 9 10 4 1 7 307 | 7 9 10 6 1 3 308 | 6 7 8 10 9 5 2 309 | 7 9 8 10 3 2 4 5 310 | 7 9 10 8 1 3 311 | 5 7 8 6 10 312 | 7 8 9 10 5 2 313 | 7 6 8 9 10 5 3 314 | 7 8 9 10 6 315 | 7 8 88 10 6 316 | 6 7 10 9 1 2 317 | 8 10 9 7 2 318 | 7 5 8 10 9 319 | 5 6 8 7 9 10 320 | 5 6 9 10 8 2 321 | 1 2 3 4 10 7 8 5 6 322 | 1 2 4 9 10 8 6 7 5 323 | 1 2 3 8 10 9 7 324 | 1 4 5 6 10 3 88 325 | 1 2 9 8 10 88 326 | 1 2 6 9 10 4 7 327 | 1 2 5 9 10 6 7 328 | 1 2 4 5 6 9 10 329 | 1 8 9 10 2 7 330 | 4 8 9 10 7 88 331 | 1 4 2 9 6 10 3 332 | 1 2 3 6 5 8 9 10 4 333 | 1 3 7 9 8 2 10 334 | 1 2 5 8 88 10 88 335 | 1 2 6 9 336 | 1 4 8 6 9 10 7 337 | 1 2 4 7 9 10 338 | 1 4 3 9 10 88 339 | 1 2 9 8 10 5 6 7 340 | 5 2 3 7 10 6 9 341 | 1 2 3 7 8 9 5 342 | 1 2 88 88 9 10 5 88 343 | 1 8 9 7 10 4 3 2 344 | 6 8 9 10 3 1 4 345 | 9 10 4 5 1 88 7 2 346 | 1 7 6 9 10 347 | 1 88 9 3 8 88 10 88 348 | 1 6 7 8 10 5 4 2 349 | 1 7 8 9 10 350 | 1 2 3 4 5 6 7 8 351 | 3 4 5 6 7 8 9 10 1 2 352 | 1 6 5 4 9 88 10 8 353 | 1 2 3 8 7 10 354 | 1 88 8 9 10 3 355 | 1 2 7 8 10 88 356 | 1 2 3 4 8 6 9 10 88 357 | 88 88 9 7 10 3 358 | 1 7 4 3 2 10 9 359 | 88 9 8 10 360 | 1 8 9 10 4 7 2 6 361 | 1 2 3 6 9 10 7 8 362 | 1 2 3 8 7 10 6 5 363 | 6 8 9 10 1 3 4 364 | 88 9 8 5 2 1 365 | 1 2 9 10 3 8 6 366 | 1 3 5 4 9 10 7 2 367 | 4 7 8 9 6 10 368 | 3 5 7 8 10 4 369 | 1 5 6 7 9 10 8 3 4 370 | 3 2 8 5 88 10 88 6 4 371 | 1 8 88 372 | 1 2 4 5 6 8 9 10 88 373 | 2 4 1 3 7 8 10 374 | 1 2 3 4 6 8 7 9 375 | 1 88 6 8 7 5 3 2 376 | 1 2 3 4 88 9 10 377 | 1 2 3 8 9 10 7 88 378 | 10 8 4 3 6 9 379 | 1 2 88 9 10 5 6 380 | 1 2 9 10 6 5 4 381 | 1 2 3 5 9 10 88 382 | 3 2 9 10 8 7 383 | 5 2 8 9 10 1 88 384 | 1 2 5 9 7 8 10 3 385 | 4 1 7 6 8 9 10 88 2 386 | 1 2 4 5 10 8 7 3 387 | 4 9 1 10 3 8 388 | 6 10 9 8 1 2 4 7 88 389 | 1 3 2 4 88 6 8 7 10 390 | 1 2 3 4 9 10 5 8 391 | 1 2 3 8 9 10 7 6 392 | 7 8 6 4 10 3 2 1 393 | 5 6 7 8 9 10 2 1 3 4 394 | 5 7 8 9 10 1 3 2 4 395 | 5 6 7 8 88 88 1 2 3 4 396 | 5 9 8 6 10 7 1 2 88 88 397 | 5 6 8 7 9 10 1 2 3 4 398 | 5 6 9 10 7 1 3 399 | 5 6 7 8 9 10 2 3 1 4 400 | 5 6 9 10 8 1 2 3 4 401 | 10 8 7 6 1 2 9 88 402 | 10 9 6 7 8 3 88 403 | 9 4 7 1 6 10 404 | 4 88 1 9 10 88 88 405 | 10 3 8 1 9 406 | 10 9 8 6 3 5 7 1 407 | 10 6 7 8 1 3 408 | 10 9 4 8 1 6 409 | 10 9 8 5 2 410 | 10 9 8 4 2 1 3 411 | 10 9 1 3 6 2 8 412 | 10 9 6 4 1 3 2 5 413 | 10 9 5 6 1 88 4 414 | 10 9 4 7 1 3 415 | 10 7 8 3 9 4 416 | 10 8 9 5 6 417 | 10 9 8 4 2 3 88 418 | 88 4 9 3 419 | 10 8 7 9 88 1 420 | 10 7 8 1 2 6 421 | 10 9 7 1 5 4 2 422 | 9 7 6 2 1 88 3 423 | 10 1 2 9 88 7 4 424 | 10 9 8 3 2 1 425 | 10 9 8 3 5 1 6 426 | 10 2 1 6 5 9 3 4 427 | 10 9 8 2 3 1 7 428 | 10 1 6 9 5 7 4 3 429 | 10 7 8 2 1 3 9 430 | 10 9 6 3 5 8 1 2 431 | 10 9 8 2 5 1 7 3 432 | 10 9 88 88 1 88 5 2 433 | 10 9 8 7 5 2 3 1 434 | 9 88 88 2 10 4 435 | 9 6 2 1 10 7 88 436 | 9 7 1 10 88 437 | 10 9 6 2 7 438 | 10 9 6 88 2 3 88 88 439 | 10 8 9 2 4 5 1 440 | 10 88 4 1 6 2 9 441 | 10 9 8 1 2 4 442 | 10 9 8 6 88 7 443 | 9 8 7 10 1 444 | 10 5 4 2 9 8 6 3 445 | 10 2 8 9 5 88 446 | 10 9 8 4 88 5 447 | 10 9 6 7 88 448 | 10 6 9 7 88 449 | 10 9 7 8 3 6 5 450 | 10 9 7 6 3 1 2 451 | 10 9 8 7 2 4 6 452 | 10 9 4 8 1 5 6 3 453 | 10 8 7 4 3 1 9 454 | 10 8 9 6 3 88 88 455 | 10 9 7 3 6 5 1 456 | 10 7 6 9 1 3 4 457 | 10 9 5 8 2 1 3 7 458 | 10 8 6 5 2 3 7 1 459 | 9 8 88 10 88 88 460 | 10 9 6 88 88 1 5 461 | 10 9 8 6 88 7 4 462 | 88 8 9 88 1 88 463 | 10 9 1 88 2 5 464 | 2 10 9 8 7 1 465 | 10 88 88 4 6 88 466 | 10 8 4 3 7 5 9 467 | 1 10 6 4 9 468 | 1 10 9 8 2 4 469 | 10 9 1 88 88 2 6 8 470 | 10 7 9 8 1 6 471 | 10 9 8 6 5 2 472 | 10 9 5 6 3 4 473 | 1 10 9 8 6 5 3 474 | 10 88 9 88 88 6 475 | 1 10 6 5 9 4 8 476 | 1 10 7 8 9 6 2 477 | 88 10 8 88 2 3 7 478 | 1 10 9 7 4 3 88 479 | 1 10 9 7 3 4 5 6 88 480 | 1 10 88 88 6 481 | 10 7 6 8 1 3 482 | 9 6 1 2 10 8 483 | 7 10 9 1 88 484 | 9 10 5 6 4 1 485 | 9 7 8 2 10 1 486 | 5 6 8 10 1 487 | 8 9 10 6 3 1 488 | 10 2 1 6 7 4 489 | 1 8 3 7 88 490 | 10 9 8 6 88 4 491 | 7 8 9 10 3 1 88 492 | 1 4 2 9 10 88 8 88 493 | 5 6 7 8 10 9 494 | 5 9 4 10 6 495 | 7 8 1 6 4 496 | 88 9 10 8 88 7 497 | 8 2 9 10 498 | 1 9 10 3 5 7 6 499 | 10 9 5 6 1 88 2 7 500 | 6 7 9 5 1 10 501 | 1 2 9 8 7 5 502 | 8 9 5 6 2 1 4 503 | 9 10 4 88 1 6 2 504 | 9 10 8 4 2 1 6 7 3 5 505 | 8 9 10 7 5 1 6 4 506 | 8 9 10 6 7 1 507 | 7 8 9 6 4 1 508 | 7 8 10 9 5 3 1 509 | 6 8 7 10 5 9 1 2 510 | 9 1 10 6 511 | 7 8 9 1 5 2 4 512 | 9 6 5 10 1 3 513 | 9 10 7 8 6 3 88 88 514 | 9 8 3 10 88 5 1 515 | 9 8 10 7 1 88 4 88 516 | 8 9 10 7 1 2 517 | 8 9 10 7 6 518 | 7 88 10 5 6 1 3 4 519 | 9 10 7 8 1 2 5 6 520 | 1 9 10 2 7 4 521 | 8 9 10 7 88 1 5 3 6 522 | 8 10 2 88 523 | 7 9 10 6 8 3 4 2 524 | 9 10 7 2 8 525 | 8 1 2 10 6 88 526 | 9 7 10 2 88 3 4 527 | 88 3 88 7 8 9 88 5 528 | 7 9 8 10 4 529 | 8 9 10 7 5 1 2 4 530 | 4 9 10 6 2 8 531 | 7 9 8 1 2 532 | 7 8 9 10 4 5 6 1 533 | 7 8 10 9 5 6 88 1 2 534 | 6 1 7 8 9 2 3 535 | 7 8 10 2 5 6 536 | 10 4 9 8 5 2 537 | 8 6 9 10 88 7 3 1 2 538 | 9 10 5 6 3 4 539 | 7 8 9 10 1 540 | 3 4 5 10 6 9 88 7 1 8 541 | 8 9 10 5 2 6 4 542 | 7 8 10 9 3 543 | 7 8 9 4 5 544 | 10 7 9 8 1 2 545 | 10 8 9 3 4 88 546 | 9 10 8 7 2 1 3 4 547 | 9 10 6 4 3 548 | 9 88 8 5 6 1 2 4 549 | 7 8 10 5 6 550 | 9 10 88 3 4 5 6 551 | 9 4 3 10 7 552 | 88 10 5 6 7 8 2 553 | 7 8 9 10 1 2 5 3 554 | 7 8 9 10 88 4 3 555 | 7 88 10 5 6 3 4 556 | 7 8 10 9 1 2 6 557 | 2 7 8 9 10 558 | 4 3 7 8 9 10 559 | 9 10 4 88 5 6 560 | 9 10 6 5 1 88 3 4 561 | 10 1 3 7 8 6 4 5 562 | 10 8 5 6 1 2 563 | 10 9 8 1 564 | 2 10 7 88 1 88 565 | 10 8 9 7 3 6 566 | 10 9 1 88 88 567 | 9 10 2 88 7 6 568 | 10 4 9 1 569 | 5 6 7 88 10 8 4 570 | 10 9 6 7 4 571 | 9 8 10 88 6 572 | 10 8 9 88 3 7 1 573 | 8 9 10 7 4 574 | 10 88 6 4 9 575 | 10 1 88 5 9 6 576 | 10 8 1 9 577 | 4 8 9 5 2 10 1 578 | 9 10 5 4 88 579 | 9 10 7 5 6 3 580 | 10 6 7 4 3 5 581 | 8 9 10 88 7 4 5 582 | 6 4 2 1 9 583 | 9 10 7 8 1 584 | 10 9 7 8 585 | 10 8 7 1 9 3 6 586 | 10 9 88 5 7 587 | 10 4 3 2 88 88 588 | 10 88 9 1 4 589 | 10 9 6 88 1 2 8 7 590 | 9 10 5 1 591 | 7 9 8 10 2 592 | 10 7 9 6 5 593 | 10 6 8 9 594 | 8 9 7 1 2 10 595 | 10 6 9 4 2 1 88 596 | 9 6 1 10 88 597 | 10 88 88 6 8 598 | 10 88 9 7 599 | 10 9 8 4 600 | 10 5 9 4 7 6 1 601 | 1 3 7 9 10 5 602 | 88 8 2 7 88 603 | 10 9 8 1 7 2 4 604 | 10 8 9 7 605 | 10 9 8 1 7 2 606 | 10 5 6 9 7 4 607 | 10 8 9 1 88 3 608 | 10 7 8 9 609 | 10 6 9 8 4 5 610 | 9 10 1 8 7 5 611 | 9 6 10 8 7 612 | 1 10 9 6 8 7 613 | 10 88 8 1 3 2 614 | 10 8 9 7 88 3 2 88 615 | 10 9 6 4 616 | 10 7 9 8 4 5 617 | 10 7 8 9 5 6 618 | 88 4 9 10 5 619 | 88 9 10 6 5 7 620 | 10 9 8 4 1 621 | 8 9 10 6 5 3 4 1 622 | 10 1 9 8 7 623 | 5 10 8 2 9 3 624 | 7 10 9 2 1 625 | 9 8 10 4 2 626 | 10 8 4 2 88 7 627 | 10 6 7 9 628 | 6 10 8 2 9 7 629 | 88 6 88 4 10 2 5 1 630 | 8 7 9 6 3 4 631 | 8 9 10 3 6 632 | 10 9 8 2 7 4 5 633 | 8 9 10 88 7 2 3 634 | 9 10 8 88 6 88 3 635 | 10 6 88 3 88 8 636 | 10 8 9 6 5 637 | 9 5 10 3 88 638 | 10 3 9 88 639 | 10 9 7 6 5 1 640 | 10 88 88 88 6 641 | 9 10 3 5 6 8 88 642 | 10 9 7 8 6 5 4 2 1 643 | 10 8 9 1 2 7 3 5 88 644 | 8 9 10 88 2 1 3 645 | 8 7 9 10 3 6 5 1 2 646 | 7 8 9 10 6 4 3 1 2 5 647 | 8 9 10 1 7 6 3 5 648 | 10 8 9 6 7 4 5 1 2 3 649 | 7 8 9 10 5 6 2 1 3 4 650 | 10 8 9 6 7 3 5 2 1 4 651 | 8 9 10 6 7 4 1 3 88 5 652 | 8 9 10 7 2 1 3 4 5 6 653 | 8 9 10 4 5 6 1 3 2 654 | 7 8 9 10 5 3 4 655 | 10 9 5 6 8 1 3 88 656 | 10 8 9 6 5 7 4 1 657 | 6 7 8 9 10 5 4 658 | 9 10 7 8 2 1 3 4 6 659 | 8 9 10 88 7 1 2 3 660 | 7 9 10 5 2 3 4 6 661 | 8 9 10 6 5 7 1 2 3 4 662 | 7 8 9 10 5 2 1 4 3 6 663 | 7 8 9 10 6 5 3 4 1 664 | 8 9 10 5 6 7 4 2 1 665 | 9 10 2 1 6 5 8 7 3 4 666 | 9 10 7 8 5 6 4 88 667 | 7 8 9 10 3 5 6 4 668 | 9 10 7 8 5 6 2 1 4 3 669 | 9 10 7 8 4 88 3 5 1 2 670 | 7 8 9 10 5 6 3 4 1 2 671 | 7 8 9 10 3 4 5 6 672 | 9 10 7 8 5 6 3 4 1 2 673 | 9 10 7 8 6 4 1 2 3 674 | 9 10 5 6 7 8 1 2 4 3 675 | 9 10 8 7 4 3 1 2 6 676 | 9 10 7 8 4 3 6 1 2 677 | 7 8 9 10 5 6 2 88 678 | 7 8 9 10 5 6 3 4 1 2 679 | 7 8 9 10 5 6 680 | 7 8 9 10 5 6 88 88 1 681 | 9 10 5 6 7 8 3 4 1 2 682 | 8 9 10 6 7 1 2 3 88 5 683 | 10 8 9 7 4 5 6 684 | 7 8 9 10 5 6 1 2 3 4 685 | 7 8 9 10 1 2 5 686 | 8 9 10 88 88 2 88 687 | 9 10 7 6 8 5 3 4 688 | 9 10 7 8 5 6 3 4 2 1 689 | 7 8 9 10 6 690 | 9 88 7 8 5 6 4 2 1 3 691 | 9 10 7 8 6 5 4 3 692 | 7 8 9 10 1 2 5 6 693 | 7 8 9 10 1 3 2 4 88 694 | 7 8 9 10 1 2 3 4 5 6 695 | 9 10 5 6 7 8 4 3 2 1 696 | 8 9 10 5 6 88 3 7 697 | 9 10 3 4 7 8 5 1 2 698 | 7 8 9 10 5 6 699 | 7 8 9 10 5 88 2 700 | 6 7 8 9 10 4 88 5 1 2 701 | 6 7 8 9 88 2 3 4 5 702 | 7 8 9 10 6 1 2 5 703 | 7 8 9 10 2 3 4 6 5 1 704 | 9 10 8 7 4 3 5 6 2 1 705 | 7 88 9 10 4 5 3 1 2 706 | 6 7 8 9 10 2 1 5 707 | 7 8 9 10 4 3 6 5 1 88 708 | 7 8 9 10 2 1 88 709 | 7 8 9 10 2 88 5 6 710 | 7 8 9 10 1 3 4 5 6 2 711 | 7 8 9 10 1 2 3 6 5 712 | 7 8 9 10 5 6 1 2 713 | 7 8 9 10 3 5 6 4 714 | 9 10 5 6 7 8 3 4 715 | 9 10 8 6 7 5 1 2 716 | 5 6 7 8 9 10 3 4 717 | 5 6 7 8 9 10 718 | 9 10 7 8 5 3 4 719 | 9 10 8 7 5 6 4 1 3 2 720 | 5 6 7 8 9 10 1 2 721 | 10 5 7 3 2 722 | 1 2 3 6 9 10 5 723 | 10 1 9 7 724 | 9 10 6 1 4 725 | 8 9 1 2 3 10 726 | 10 9 7 5 6 1 727 | 10 9 88 1 2 4 728 | 8 5 1 2 10 4 3 729 | 7 8 10 1 730 | 7 8 9 10 2 4 1 731 | 8 9 10 6 88 732 | 6 7 9 8 1 3 4 5 733 | 6 7 9 10 1 2 3 734 | 6 7 8 9 10 5 1 2 735 | 10 6 7 9 88 736 | 9 6 8 10 1 7 737 | 10 6 88 2 1 738 | 6 9 10 88 3 5 739 | 8 9 10 7 6 740 | 6 7 8 9 10 2 1 741 | 5 9 7 1 10 88 3 2 742 | 88 7 9 10 1 743 | 5 7 8 10 1 2 4 3 744 | 5 6 7 8 9 10 1 2 3 745 | 5 6 7 8 10 1 3 2 746 | 6 7 9 10 5 2 88 747 | 88 8 9 10 1 4 748 | 10 8 7 5 3 749 | 6 7 8 10 5 750 | 6 7 8 10 1 2 4 751 | 5 6 7 9 10 3 2 1 752 | 10 88 88 5 88 1 2 3 753 | 5 7 9 8 10 2 1 754 | 6 7 8 9 10 1 755 | 5 6 9 10 2 4 88 756 | 5 8 10 1 2 757 | 6 7 8 9 10 5 3 2 758 | 6 88 88 1 88 2 4 759 | 88 8 88 10 1 2 760 | 8 7 10 4 3 1 761 | 6 1 7 9 10 3 762 | 6 7 10 1 2 3 88 763 | 9 10 5 88 764 | 5 6 7 8 10 2 4 765 | 7 9 8 10 4 2 1 6 766 | 6 7 9 10 4 3 1 767 | 7 8 9 10 6 768 | 6 8 10 1 769 | 5 7 6 8 10 88 770 | 5 6 7 9 10 771 | 9 88 8 2 1 772 | 5 6 8 9 10 1 773 | 7 8 4 10 774 | 3 88 88 9 10 775 | 88 9 10 7 5 776 | 9 6 10 4 5 777 | 9 10 5 2 1 88 4 778 | 88 10 5 88 779 | 6 9 8 10 2 780 | 7 8 9 781 | 88 8 6 10 4 88 782 | 5 88 7 10 88 783 | 6 5 10 2 1 4 784 | 6 8 7 9 10 1 2 4 3 785 | 6 9 88 10 4 1 2 786 | 5 8 7 10 9 3 2 787 | 6 7 8 10 9 4 5 1 788 | 5 6 7 9 88 4 2 1 3 789 | 5 6 8 10 2 3 4 790 | 1 2 6 10 791 | 7 8 9 10 1 3 792 | 88 7 10 4 1 793 | 5 7 9 10 794 | 5 6 8 10 88 795 | 5 6 9 10 1 2 88 796 | 7 8 9 10 6 88 797 | 6 7 88 10 1 798 | 9 88 10 5 88 1 799 | 5 6 8 10 7 1 2 800 | 5 8 7 10 88 3 1 801 | 1 3 6 10 7 8 2 802 | 9 1 2 8 10 6 5 3 803 | 7 10 9 1 8 2 804 | 1 2 3 10 7 805 | 10 3 9 7 8 1 2 806 | 9 10 2 6 1 807 | 9 10 1 7 6 3 2 808 | 9 10 5 1 8 3 809 | 10 9 1 2 6 5 3 810 | 8 9 10 7 88 1 811 | 8 9 10 7 2 812 | 6 7 9 8 10 1 2 3 813 | 8 9 6 4 7 10 814 | 7 8 9 10 1 4 815 | 7 88 9 10 4 88 816 | 6 8 7 9 10 4 1 817 | 7 8 9 10 2 6 818 | 7 9 10 2 3 4 819 | 7 8 10 2 5 820 | 6 7 8 9 10 5 821 | 6 7 9 10 3 1 5 2 822 | 6 7 9 2 1 5 823 | 8 9 7 10 1 2 4 3 824 | 7 5 8 9 10 825 | 6 8 10 9 4 1 5 826 | 7 9 3 2 1 10 88 827 | 9 10 8 2 3 4 6 5 828 | 10 7 9 1 88 829 | 9 10 8 1 4 3 2 830 | 5 8 10 1 2 3 7 6 831 | 6 9 8 1 2 10 832 | 5 88 10 3 1 2 4 833 | 9 10 8 1 88 88 2 834 | 7 8 9 10 2 3 1 6 835 | 9 10 3 2 1 6 88 4 836 | 8 9 10 3 1 2 837 | 9 10 6 2 4 8 838 | 10 7 88 9 3 2 88 4 839 | 7 8 9 10 1 840 | 8 10 9 4 88 1 2 841 | 7 8 9 10 1 2 842 | 7 88 10 3 2 1 843 | 8 7 9 10 4 844 | 6 7 8 10 3 2 1 845 | 8 9 10 4 1 2 846 | 6 88 9 10 5 847 | 9 6 7 8 10 3 1 88 4 848 | 7 8 9 10 1 3 2 88 849 | 7 8 9 4 1 3 2 850 | 5 6 9 10 1 3 2 4 851 | 9 10 7 8 852 | 8 9 10 1 3 4 5 6 853 | 8 9 4 3 1 5 10 854 | 88 6 8 10 9 88 3 855 | 5 10 2 1 8 856 | 8 9 10 7 88 2 1 4 5 6 857 | 6 8 7 10 2 1 3 4 858 | 5 8 9 10 4 859 | 7 3 10 88 2 860 | 7 88 10 1 2 88 4 861 | 8 9 10 3 4 1 7 862 | 7 9 88 1 2 4 863 | 8 2 3 10 4 864 | 8 9 10 1 2 7 865 | 8 9 88 5 88 1 4 866 | 8 9 10 1 2 4 867 | 7 8 9 10 1 4 868 | 5 7 9 88 1 2 3 869 | 6 8 9 3 10 4 5 1 2 870 | 5 1 88 10 8 7 871 | 6 7 9 10 4 3 872 | 4 88 7 88 3 2 1 5 873 | 8 9 10 1 2 6 4 3 874 | 7 8 9 10 88 5 88 2 875 | 7 8 9 10 88 5 6 876 | 6 8 10 9 1 88 877 | 7 8 9 10 2 878 | 6 8 10 9 1 5 879 | 1 2 6 5 88 10 7 880 | 8 9 10 1 6 5 4 2 3 881 | 1 5 6 10 7 9 88 882 | 1 2 8 3 10 6 5 883 | 1 5 9 10 88 884 | 10 8 9 3 4 2 885 | 7 1 8 9 10 2 88 88 886 | 1 2 4 10 6 5 8 9 887 | 1 8 9 88 10 7 888 | 1 2 4 9 3 10 889 | 2 1 3 4 10 9 88 890 | 10 8 9 3 4 7 891 | 10 9 8 88 4 892 | 1 2 88 3 9 6 10 893 | 1 6 9 10 4 5 894 | 1 88 2 88 9 10 895 | 1 6 7 4 10 896 | 9 10 8 6 897 | 6 7 8 9 10 1 5 898 | 6 7 8 10 2 3 899 | 2 8 10 1 9 900 | 1 3 2 4 5 6 7 10 901 | 1 2 3 4 5 8 10 7 902 | 1 2 3 4 5 7 10 903 | 1 2 3 4 10 88 9 904 | 7 8 9 10 1 2 3 4 905 | 6 7 8 9 10 1 2 906 | 6 7 8 9 10 1 907 | 1 88 4 10 88 6 9 908 | 7 8 10 9 1 2 3 4 5 909 | 1 88 7 3 9 8 10 4 910 | 1 2 3 5 88 10 9 8 911 | 5 6 7 9 8 10 1 2 3 4 912 | 5 6 4 10 1 2 3 913 | 1 2 4 9 10 6 8 3 914 | 88 2 7 8 9 10 915 | 1 2 5 3 88 9 10 916 | 1 2 3 9 10 7 917 | 6 8 9 10 1 2 3 4 7 918 | 6 7 1 5 2 4 10 8 3 919 | 1 88 3 4 6 7 9 10 920 | 7 8 10 9 88 4 1 6 2 921 | 1 3 88 6 88 9 7 922 | 1 2 3 4 88 10 8 88 923 | 1 88 9 8 6 10 924 | 1 2 88 4 6 9 10 5 7 925 | 3 2 1 4 9 10 926 | 1 2 6 10 8 4 9 927 | 1 7 9 10 88 928 | 6 8 9 10 1 2 4 5 88 929 | 6 7 8 9 10 5 1 3 4 930 | 6 5 7 88 10 1 2 3 4 931 | 10 8 7 2 5 4 1 3 9 932 | 7 6 8 9 10 1 2 4 3 5 933 | 6 7 8 9 10 1 2 3 4 5 934 | 6 7 8 9 10 88 3 2 5 935 | 6 7 8 10 1 88 2 936 | 8 9 10 6 1 2 5 4 937 | 6 7 8 5 10 2 1 88 938 | 6 88 8 9 10 939 | 88 2 9 88 88 10 940 | 7 8 10 9 1 2 88 4 941 | 9 10 6 3 1 4 2 5 942 | 7 8 9 10 88 2 3 4 5 6 943 | 6 7 8 9 10 3 4 1 2 5 944 | 9 1 10 2 5 8 6 7 945 | 7 9 10 1 2 3 5 4 946 | 6 7 9 8 10 1 2 3 5 947 | 1 88 4 6 10 9 948 | 3 2 1 88 6 88 9 88 88 8 949 | 6 7 8 9 10 1 4 88 2 3 950 | 6 7 8 9 10 1 5 3 4 2 951 | 4 5 6 7 88 10 1 3 2 952 | 6 7 8 9 10 88 3 5 1 953 | 9 10 1 2 4 3 88 8 88 954 | 7 8 9 10 88 2 3 5 6 955 | 5 6 7 8 9 10 88 88 3 4 956 | 6 8 9 10 1 2 957 | 5 6 7 88 9 10 1 2 3 958 | 9 10 88 8 1 2 959 | 5 7 8 9 10 6 960 | 8 9 10 1 6 2 4 961 | 10 88 9 8 1 7 962 | 10 7 9 6 1 2 963 | 10 9 8 1 6 2 88 964 | 10 9 3 1 88 8 7 965 | 10 9 3 7 1 2 966 | 10 9 4 2 6 88 5 967 | 10 9 8 6 7 1 2 3 968 | 10 9 6 2 4 3 1 5 969 | 10 8 9 6 2 3 970 | 10 9 7 88 2 971 | 8 9 10 7 1 5 972 | 10 9 8 7 6 1 2 4 973 | 10 9 6 7 1 88 974 | 10 9 8 5 6 1 2 4 975 | 9 10 1 3 976 | 10 8 9 1 7 3 977 | 8 9 10 6 5 2 4 3 978 | 9 10 6 5 8 979 | 9 10 7 8 3 1 88 980 | 10 8 9 1 2 981 | 10 9 6 7 1 2 3 5 4 982 | 10 8 7 1 2 4 983 | 10 9 8 1 2 3 984 | 9 10 8 7 6 1 2 3 4 985 | 10 9 8 6 1 88 986 | 10 9 3 1 6 4 88 987 | 10 9 8 88 7 6 1 5 988 | 10 8 7 9 1 3 4 6 989 | 10 9 8 7 1 2 990 | 10 9 6 5 8 1 3 991 | 10 8 9 7 2 1 4 5 3 992 | 10 9 7 5 88 1 2 3 8 993 | 9 10 7 4 1 3 2 8 88 994 | 10 9 8 5 6 1 2 7 995 | 10 9 88 6 2 1 4 996 | 8 9 10 6 3 7 2 1 5 997 | 9 10 6 8 5 998 | 10 9 88 4 2 1 3 999 | 10 8 9 1 4 3 7 88 1000 | 9 10 8 7 6 1 2 3 4 1001 | 10 9 6 7 1 3 4 1002 | 10 9 8 1 88 6 1003 | 10 9 4 88 3 1 1004 | 9 10 1 2 5 6 7 88 1005 | 10 8 9 7 1 2 88 1006 | 10 9 7 1 2 1007 | 9 10 7 1 5 6 1008 | 8 9 10 1 1009 | 10 9 2 8 1 4 7 88 1010 | 10 9 7 6 3 1011 | 8 9 10 7 6 88 2 4 1 1012 | 7 8 9 1 5 6 2 1013 | 10 9 7 8 4 1 3 2 6 88 1014 | 9 10 8 6 1 2 3 88 1015 | 9 10 1 2 7 6 5 1016 | 8 9 10 3 4 5 7 1017 | 8 9 10 7 5 6 3 1 2 88 1018 | 9 10 7 8 5 6 2 1 1019 | 8 9 10 1 2 5 88 88 88 1020 | 9 10 8 3 5 4 1021 | 7 8 9 10 5 1 4 3 1022 | 9 10 7 1 2 8 6 1023 | 7 9 8 2 5 1 88 1024 | 7 8 9 10 5 3 1 2 1025 | 9 10 88 88 1 88 1026 | 9 10 2 4 5 1 8 3 1027 | 7 8 9 10 4 6 1 3 1028 | 8 9 10 5 6 1 3 2 1029 | 8 7 10 88 6 5 1030 | 4 9 10 6 88 1 88 1031 | 6 7 9 10 3 4 88 1 88 1032 | 8 9 88 3 5 1033 | 8 9 10 4 1 2 7 3 5 1034 | 8 9 10 7 6 88 1035 | 10 1 3 4 88 8 5 6 2 1036 | 6 7 8 9 1 5 88 1037 | 6 8 88 10 5 1 2 88 1038 | 7 9 10 6 5 88 88 1039 | 7 8 9 10 5 6 4 3 2 1040 | 7 8 10 1 2 4 5 88 1041 | 10 7 8 88 2 4 1042 | 9 3 10 2 8 4 1 1043 | 10 8 9 1 7 1044 | 10 9 6 1 2 8 1045 | 10 9 2 1 8 1046 | 8 9 10 6 1 1047 | 1 9 10 7 8 1048 | 10 6 8 1 2 9 1049 | 9 8 10 1 7 1050 | 8 9 10 3 2 1 1051 | 9 10 5 1 3 8 6 1052 | 9 10 7 1 3 1053 | 7 8 9 2 1 6 10 88 1054 | 9 10 1 2 8 1055 | 9 10 1 88 88 1056 | 7 8 9 10 1 88 1057 | 8 9 10 7 4 6 1058 | 88 4 10 3 1 1059 | 8 9 10 5 6 3 7 1060 | 8 9 10 1 5 6 7 88 1061 | 7 8 9 10 1 3 1062 | 10 6 1 7 9 1063 | 8 9 10 4 6 1064 | 7 8 9 10 1 1065 | 8 9 10 1 4 88 1066 | 9 10 5 1 7 2 4 1067 | 6 9 10 1 2 1068 | 6 10 7 8 9 2 4 1069 | 7 8 9 10 2 1070 | 6 10 9 1 3 1071 | 7 8 9 10 2 1 5 1072 | 1 7 9 10 88 1073 | 8 9 10 7 2 3 1 1074 | 7 8 9 10 88 4 1075 | 7 8 9 10 1 6 1076 | 7 8 9 10 1 3 2 1077 | 7 8 9 10 6 1078 | 7 88 9 10 4 2 3 1079 | 6 7 8 9 10 3 1 2 1080 | 6 7 9 1 10 4 3 1081 | 7 8 9 10 1 4 1082 | 7 8 88 10 6 1083 | 7 8 9 10 6 4 1084 | 7 9 10 6 1 1085 | 9 10 8 2 4 5 1086 | 7 8 9 10 2 4 1 1087 | 9 10 8 7 6 88 3 1088 | 7 8 9 10 6 88 1089 | 7 8 9 10 4 1090 | 6 7 8 88 10 5 2 3 4 1 1091 | 7 8 9 10 88 1092 | 7 8 9 10 5 6 88 2 88 1093 | 7 8 9 10 3 1 2 88 1094 | 7 8 9 10 6 88 1095 | 7 9 8 10 6 2 5 1 1096 | 7 8 9 10 4 5 1 88 1097 | 7 8 9 10 1 2 3 1098 | 7 8 9 10 1 5 6 1099 | 7 8 9 10 3 88 88 1100 | 7 8 10 9 88 1101 | 7 8 9 10 6 4 5 3 2 1102 | 7 88 9 10 1 1103 | 7 8 9 10 3 5 1104 | 7 9 10 8 2 5 1 1105 | 7 8 9 10 4 1 3 88 1106 | 7 8 9 10 2 3 5 1107 | 7 8 9 10 4 6 5 1108 | 6 7 8 9 10 2 4 88 1109 | 7 8 88 10 4 5 1 1110 | 7 8 9 10 1 6 88 1111 | 7 88 9 10 4 88 3 88 1112 | 6 7 88 10 9 1113 | 7 8 9 10 4 5 3 1114 | 7 8 9 10 6 1 2 4 1115 | 7 8 9 10 5 2 1 1116 | 7 8 9 10 6 4 5 1 1117 | 7 8 9 10 6 88 4 1118 | 5 8 9 10 4 3 88 1119 | 7 8 9 10 4 5 6 1120 | 8 9 10 3 1 6 88 1121 | 3 9 10 7 8 6 5 1 1122 | 10 9 8 7 6 2 1123 | 10 9 7 8 2 1124 | 10 9 8 1 5 2 88 7 1125 | 10 9 6 8 7 1 4 1126 | 10 8 9 5 1 2 1127 | 7 8 9 10 88 1 3 2 1128 | 10 9 8 4 5 7 6 1 3 2 1129 | 7 8 9 10 5 2 1130 | 10 8 9 7 6 4 88 2 1 1131 | 10 7 9 8 6 5 1 2 1132 | 10 9 6 7 1 3 88 1133 | 10 9 8 7 6 4 1 2 3 1134 | 10 9 7 4 5 1 2 1135 | 10 7 8 3 4 1 2 5 88 1136 | 10 9 8 6 1 7 4 5 1137 | 9 10 8 6 7 1 5 2 1138 | 10 7 8 4 3 1 6 1139 | 10 8 9 7 6 1 1140 | 10 7 9 8 5 6 2 1 3 1141 | 10 7 8 9 6 2 3 1 5 1142 | 10 9 7 6 5 4 2 1 1143 | 10 9 8 6 3 4 1144 | 10 9 8 7 6 1 2 3 5 1145 | 10 9 8 7 6 1 5 1146 | 10 9 88 8 1 6 7 5 1147 | 10 6 8 5 7 88 1 9 1148 | 10 8 6 88 7 1149 | 10 7 8 5 6 1 88 1150 | 10 8 9 7 3 5 4 1 1151 | 10 7 6 9 4 3 88 1 1152 | 10 9 5 88 88 1 2 1153 | 10 9 7 8 1 4 2 3 1154 | 10 9 8 7 1 88 5 3 1155 | 10 9 8 7 6 2 1 1156 | 10 9 88 7 1 2 1157 | 9 10 8 4 1 2 1158 | 10 9 88 6 7 5 3 4 1 2 1159 | 10 7 8 9 5 4 3 88 1160 | 10 9 7 6 88 4 1 2 1161 | 10 9 6 7 3 1 2 5 4 1162 | 10 9 6 7 3 2 4 5 1 1163 | 10 7 8 9 1 1164 | 10 6 7 9 1 2 3 1165 | 10 7 8 9 4 1 2 1166 | 10 9 6 7 88 1 2 88 4 5 1167 | 10 9 7 8 1 2 1168 | 10 8 9 7 6 1 1169 | 10 9 6 8 7 1 2 5 1170 | 10 9 7 6 5 1 4 1171 | 10 9 8 7 3 1172 | 10 9 8 6 5 1 1173 | 10 9 7 5 4 2 3 1 1174 | 10 8 9 6 7 3 5 4 1 1175 | 10 8 6 7 1 2 3 4 1176 | 9 10 6 3 4 7 1177 | 10 9 5 8 7 1 3 4 2 1178 | 9 10 7 8 6 5 88 2 3 1179 | 10 9 7 8 6 1 1180 | 10 9 8 7 88 1 88 3 1181 | 10 6 8 9 1 3 2 1182 | 10 9 7 1 6 3 1183 | 10 8 7 5 1 2 3 1184 | 10 9 5 7 2 1 4 3 1185 | 10 8 9 6 7 3 1 1186 | 10 9 88 6 7 5 3 2 1187 | 10 6 9 7 1 2 1188 | 10 9 7 8 2 3 1189 | 10 6 7 8 3 88 1190 | 10 9 8 7 5 1191 | 10 8 9 1192 | 10 9 7 6 5 4 1 1193 | 10 9 8 7 6 1194 | 9 10 8 7 1195 | 10 9 8 6 5 3 4 1196 | 10 9 8 7 6 4 5 1 2 1197 | 10 8 5 7 9 2 1 4 1198 | 10 7 9 6 1 88 5 4 1199 | 10 8 9 7 5 6 2 88 4 1200 | 10 8 7 9 6 2 5 4 3 1201 | 0 -------------------------------------------------------------------------------- /content/assignments/Assignment_2:Search_of_Associative_Memory_Model/Murd62 data/fr20-1.txt: -------------------------------------------------------------------------------- 1 | 20 19 13 18 1 9 2 17 16 88 2 | 20 18 16 17 3 88 14 3 | 17 18 8 20 15 14 1 2 19 4 | 18 19 20 15 16 8 9 1 2 5 | 18 19 20 17 12 11 13 88 6 | 18 19 20 17 1 10 7 7 | 19 20 18 11 17 1 2 8 | 20 19 17 11 15 10 1 9 | 19 20 88 5 17 1 10 | 20 18 12 19 11 | 20 19 18 1 2 88 9 10 12 | 20 18 15 16 6 5 14 13 2 1 19 13 | 20 19 6 17 5 88 1 2 88 14 | 20 18 14 13 16 19 15 88 15 | 20 18 19 17 1 16 88 16 | 18 19 20 13 16 17 1 3 2 5 17 | 18 19 17 16 20 1 3 8 9 18 | 19 20 18 16 11 7 8 9 19 | 18 17 16 15 19 20 12 1 20 | 20 19 18 17 88 10 9 21 | 20 18 19 2 1 9 12 13 22 | 20 16 19 18 4 2 1 8 3 23 | 17 18 19 20 1 9 3 4 24 | 20 17 16 18 19 1 88 6 25 | 20 17 19 18 88 11 12 10 4 6 26 | 20 19 15 16 17 18 1 11 2 13 9 10 27 | 17 18 19 20 1 88 16 7 28 | 20 16 15 19 18 17 88 14 10 2 29 | 20 16 17 18 13 14 5 30 | 20 18 19 15 16 31 | 17 18 19 20 11 32 | 20 19 18 14 8 16 1 2 6 7 33 | 16 17 18 19 20 10 15 1 2 6 88 34 | 16 18 88 20 88 35 | 17 18 88 20 9 12 10 88 36 | 18 20 16 17 37 | 19 18 20 13 2 3 4 9 1 11 12 38 | 17 18 19 20 4 6 9 5 8 39 | 88 16 18 14 19 20 10 12 40 | 20 19 18 16 14 1 41 | 16 19 18 20 13 14 2 1 11 88 4 3 42 | 19 20 16 17 18 12 15 2 1 43 | 16 15 17 20 8 88 18 1 5 3 7 44 | 20 16 11 10 18 19 9 6 45 | 19 20 16 88 1 9 88 46 | 19 18 20 2 3 17 16 4 47 | 19 20 17 18 16 12 13 11 48 | 19 20 18 15 14 16 88 49 | 18 19 20 17 1 50 | 18 19 20 88 17 51 | 20 15 16 18 19 1 5 6 4 9 10 52 | 15 19 16 18 17 20 11 1 2 53 | 19 20 18 17 4 3 5 2 1 12 54 | 18 19 20 12 17 9 8 55 | 18 17 19 20 1 2 16 88 3 4 56 | 19 18 20 16 2 88 1 14 57 | 17 18 19 20 12 10 9 58 | 19 20 15 16 17 1 2 3 4 59 | 18 16 19 20 14 2 1 17 4 5 3 60 | 17 18 19 20 88 1 9 10 15 61 | 18 19 20 17 4 10 11 13 1 9 2 14 62 | 18 19 20 17 3 1 2 7 8 63 | 17 19 20 14 12 64 | 18 19 20 17 88 1 65 | 16 18 19 20 1 3 12 11 5 6 66 | 17 20 16 19 18 1 4 12 88 5 67 | 18 19 20 17 1 2 68 | 15 17 20 19 18 13 69 | 19 20 18 88 15 7 88 8 4 70 | 17 18 19 20 11 1 71 | 15 16 17 18 20 19 5 72 | 19 20 15 9 2 6 73 | 15 17 18 19 16 9 6 7 74 | 16 17 88 8 7 5 9 10 19 18 1 75 | 17 18 19 20 1 76 | 18 19 20 17 16 3 2 14 1 77 | 17 18 19 88 20 15 78 | 14 15 18 19 20 12 16 7 79 | 18 19 20 17 9 8 5 2 80 | 15 18 16 20 14 1 88 17 81 | 1 20 18 13 4 6 7 16 82 | 1 20 16 88 17 8 9 83 | 20 19 8 15 18 84 | 20 8 7 16 88 2 85 | 20 19 17 12 1 88 9 88 86 | 1 14 20 17 10 19 87 | 20 1 19 3 2 88 88 | 20 14 88 88 13 17 5 11 89 | 20 18 19 17 2 3 88 88 90 | 20 18 14 17 6 91 | 1 2 20 19 8 9 10 18 7 92 | 18 19 20 1 2 17 14 13 11 88 5 93 | 17 18 19 20 16 1 2 94 | 20 19 15 5 13 16 12 3 95 | 20 16 17 19 96 | 20 1 13 19 3 16 6 88 97 | 1 18 20 19 8 11 12 17 2 98 | 3 20 18 88 13 2 16 99 | 20 19 17 18 5 7 6 100 | 20 1 16 18 2 3 101 | 1 20 18 88 10 8 12 88 5 6 102 | 1 20 18 2 5 19 3 103 | 1 9 10 5 4 9 14 3 88 104 | 18 19 20 17 5 9 88 105 | 17 19 20 1 2 10 106 | 20 88 17 18 15 16 10 13 14 2 107 | 1 5 20 14 15 10 19 108 | 19 16 17 18 2 3 109 | 16 17 18 19 20 5 1 2 3 110 | 20 1 2 19 5 111 | 20 19 15 11 13 4 112 | 18 20 17 1 113 | 20 18 19 14 1 13 114 | 1 20 19 88 3 6 115 | 20 19 88 17 18 5 12 116 | 16 15 18 20 17 7 117 | 20 13 1 88 88 2 18 118 | 20 19 18 1 17 2 119 | 20 19 14 17 15 2 120 | 18 19 20 1 88 2 121 | 1 20 13 18 3 2 8 9 122 | 1 20 19 88 2 12 88 123 | 19 20 17 3 10 13 1 124 | 19 20 13 14 2 88 125 | 17 18 19 20 15 6 1 5 126 | 19 20 1 14 18 127 | 20 18 17 19 2 11 1 15 10 88 128 | 19 20 15 18 17 4 2 14 129 | 17 18 19 20 10 3 13 2 88 5 130 | 18 19 20 1 5 16 88 12 13 131 | 17 18 88 20 8 6 1 2 3 132 | 18 19 20 17 1 11 15 133 | 19 20 88 2 1 13 9 11 134 | 18 19 20 12 17 14 1 88 8 9 135 | 18 19 20 12 1 11 2 88 136 | 19 20 16 7 14 13 137 | 17 18 15 16 19 20 12 8 88 138 | 18 19 20 13 88 88 11 139 | 16 19 20 8 11 88 140 | 16 20 18 7 3 141 | 19 20 7 1 5 4 8 17 142 | 17 18 20 1 3 9 7 88 88 143 | 14 16 17 19 20 9 5 7 3 2 144 | 17 18 19 20 10 9 14 8 88 145 | 17 18 19 20 11 9 12 1 4 2 146 | 19 20 18 16 10 2 13 11 1 147 | 18 19 20 17 1 13 12 11 5 6 2 4 148 | 19 20 15 16 12 149 | 88 19 20 1 2 4 10 3 150 | 17 18 19 20 1 11 151 | 15 16 17 19 20 3 9 152 | 20 19 12 8 9 1 153 | 17 18 19 9 6 154 | 18 19 20 17 88 10 88 9 3 155 | 17 18 19 20 11 8 156 | 18 19 20 88 13 1 2 16 157 | 19 18 15 20 14 158 | 18 19 20 1 159 | 19 20 88 16 3 2 4 10 88 88 160 | 18 19 20 16 15 14 1 9 7 161 | 1 19 20 88 6 4 162 | 88 20 9 10 11 19 163 | 11 18 20 19 12 164 | 15 17 20 16 5 165 | 15 16 17 20 19 166 | 16 17 12 10 19 18 20 3 167 | 16 15 17 18 20 1 2 19 168 | 9 11 12 14 20 18 169 | 20 2 17 18 15 16 10 170 | 19 20 12 16 17 171 | 18 88 20 19 10 16 11 172 | 16 20 12 14 18 19 17 173 | 7 14 19 20 18 174 | 18 19 20 16 17 9 8 6 4 175 | 16 17 19 20 88 12 176 | 15 16 88 20 13 19 17 177 | 16 17 19 20 18 8 178 | 9 18 19 20 6 16 7 3 13 179 | 13 5 15 19 20 17 12 180 | 14 15 16 20 18 181 | 17 20 18 19 15 88 182 | 17 18 19 20 88 1 15 183 | 8 9 20 10 9 8 6 184 | 16 17 19 20 15 11 9 10 185 | 15 17 2 19 20 18 186 | 19 20 16 88 187 | 11 17 19 20 13 14 7 188 | 17 10 9 20 189 | 19 20 18 2 14 190 | 18 19 8 20 15 14 16 17 191 | 20 17 19 16 3 192 | 13 88 15 18 19 20 193 | 17 18 19 20 14 194 | 18 19 20 6 195 | 19 20 18 17 196 | 18 19 20 88 17 197 | 17 18 19 20 13 88 11 12 2 198 | 16 17 18 88 20 3 12 10 7 199 | 14 18 20 19 16 200 | 16 17 18 19 20 11 14 201 | 14 12 13 1 20 18 2 202 | 16 15 17 1 20 18 203 | 18 19 20 16 15 14 204 | 19 20 12 14 11 88 205 | 10 19 20 17 14 206 | 13 15 19 18 20 207 | 17 18 19 20 7 6 8 9 208 | 19 18 20 15 16 11 4 209 | 17 19 20 16 5 4 88 10 18 210 | 17 18 19 10 8 12 211 | 14 16 17 20 3 13 212 | 16 17 19 18 20 213 | 13 16 20 19 17 214 | 15 16 18 17 20 19 215 | 18 20 19 7 88 216 | 19 20 14 217 | 17 19 20 18 9 10 218 | 17 20 19 15 9 219 | 19 20 88 16 220 | 17 18 20 15 16 10 221 | 20 19 13 16 11 1 222 | 15 18 17 20 19 223 | 19 20 8 12 9 224 | 19 20 15 16 1 3 225 | 15 18 20 19 1 2 226 | 14 16 18 19 20 1 10 227 | 16 19 20 13 14 10 12 11 228 | 18 19 20 16 13 9 229 | 17 19 20 18 230 | 17 19 20 18 10 231 | 17 19 20 18 3 232 | 14 15 16 17 18 19 20 233 | 13 15 14 18 20 16 234 | 13 15 19 20 9 10 1 3 235 | 18 19 20 11 1 236 | 18 19 20 9 6 237 | 14 15 17 18 19 20 6 11 238 | 12 15 18 19 20 239 | 18 19 20 11 1 4 240 | 16 18 19 20 88 241 | 17 88 8 12 15 20 18 19 242 | 19 20 17 16 6 14 3 1 88 243 | 19 20 15 12 8 17 7 244 | 20 18 19 16 8 14 7 245 | 18 19 20 17 4 246 | 18 19 20 12 13 3 5 1 247 | 20 18 19 17 11 6 7 15 14 248 | 88 18 19 11 13 15 88 249 | 20 19 18 2 88 17 12 13 14 1 4 250 | 20 17 18 12 9 7 251 | 18 19 88 88 12 1 2 252 | 20 19 15 4 88 14 6 253 | 20 16 18 19 88 254 | 19 20 18 13 16 10 88 255 | 20 19 2 3 1 18 17 5 10 256 | 19 20 18 1 13 3 16 88 257 | 20 19 18 88 88 11 258 | 20 18 19 14 16 9 259 | 20 19 17 16 88 6 15 14 260 | 20 18 17 19 16 14 13 2 261 | 1 3 18 19 20 6 12 4 9 262 | 88 17 18 19 8 12 3 263 | 9 14 19 17 20 264 | 6 10 1 13 19 6 18 20 265 | 11 18 19 20 88 6 17 2 10 266 | 2 13 88 12 20 19 88 267 | 10 18 17 19 20 13 7 16 268 | 16 17 18 20 14 13 1 269 | 16 17 18 19 20 5 7 15 270 | 18 14 19 16 271 | 17 18 19 20 3 5 8 272 | 18 1 17 19 20 88 273 | 1 17 18 16 20 7 88 88 11 274 | 20 6 19 16 88 18 275 | 15 16 17 19 9 11 20 1 276 | 10 15 11 18 17 277 | 1 2 3 88 18 19 20 16 278 | 14 17 15 18 19 20 4 5 279 | 14 18 20 19 12 88 280 | 1 9 19 18 20 281 | 1 2 88 17 18 20 19 282 | 15 16 17 18 19 20 6 283 | 11 5 7 16 18 17 19 20 284 | 16 17 18 19 20 88 88 285 | 1 5 17 9 10 19 20 18 4 286 | 1 15 18 19 20 14 4 7 287 | 18 11 19 20 16 2 288 | 15 7 16 19 17 20 18 289 | 10 11 17 19 20 8 5 4 290 | 19 13 11 88 15 16 88 12 291 | 1 88 20 13 18 292 | 17 1 18 16 20 14 293 | 11 12 88 7 8 10 19 20 18 88 294 | 1 15 18 16 17 14 20 9 295 | 16 17 1 2 3 20 19 88 296 | 1 7 14 5 9 19 20 88 297 | 88 14 15 17 20 12 298 | 1 2 16 17 20 19 15 299 | 16 19 14 20 12 300 | 15 14 17 18 20 16 1 88 301 | 4 2 14 16 17 20 302 | 16 17 8 1 3 19 18 20 88 303 | 3 20 18 19 2 13 12 14 304 | 15 12 17 88 16 19 20 305 | 88 12 13 19 88 20 306 | 1 11 20 18 88 19 88 307 | 17 2 18 19 20 13 15 308 | 1 17 19 20 18 16 14 309 | 88 16 19 18 17 20 12 8 10 88 310 | 11 20 17 18 19 311 | 14 15 16 17 19 20 6 5 4 312 | 6 4 5 9 20 19 16 18 313 | 88 17 8 88 14 88 19 20 9 314 | 14 15 16 17 20 18 88 7 8 315 | 11 12 15 18 19 88 20 88 316 | 1 2 11 20 19 88 88 317 | 12 13 14 20 18 6 4 318 | 1 12 18 19 20 7 3 319 | 20 2 88 88 15 1 9 320 | 19 10 20 18 14 321 | 20 19 1 6 10 88 15 322 | 20 18 13 15 88 12 1 3 88 323 | 20 19 88 88 15 12 7 18 1 88 324 | 20 19 17 8 16 18 15 10 325 | 20 16 19 17 9 88 12 88 8 88 326 | 19 20 17 1 10 18 88 327 | 20 19 18 17 8 9 11 328 | 20 19 18 3 9 11 12 13 1 329 | 20 18 19 2 88 17 330 | 20 19 18 15 11 88 9 331 | 20 19 16 88 8 1 2 332 | 20 18 19 17 12 9 5 14 7 333 | 20 18 19 15 13 9 88 334 | 16 17 19 18 88 20 10 1 3 335 | 16 17 18 19 20 88 336 | 17 18 19 20 13 15 16 9 337 | 18 16 88 19 20 88 338 | 19 20 18 13 14 16 339 | 17 18 19 20 4 5 88 340 | 20 19 17 18 1 2 14 341 | 18 19 20 17 12 7 10 9 342 | 16 19 18 20 1 2 7 12 8 14 343 | 17 18 19 20 9 1 2 5 11 344 | 17 16 18 19 20 16 8 9 345 | 17 18 19 20 6 2 5 346 | 17 19 18 20 13 347 | 17 18 19 20 16 14 15 10 348 | 17 18 19 20 2 14 16 3 9 88 349 | 16 19 18 20 5 9 14 350 | 14 15 19 16 20 1 351 | 17 18 19 20 16 11 3 10 88 352 | 17 19 18 20 88 10 353 | 18 19 17 20 7 354 | 17 18 88 20 16 6 4 5 88 355 | 17 18 88 20 19 88 356 | 18 20 16 10 11 9 7 357 | 18 17 19 20 13 14 11 12 358 | 17 18 19 20 15 88 1 359 | 17 18 19 20 14 16 7 360 | 14 88 18 19 20 9 361 | 17 18 19 20 14 13 4 11 2 362 | 17 18 19 20 12 15 6 88 8 363 | 17 18 19 20 7 8 364 | 88 19 16 20 7 8 88 365 | 18 19 20 10 11 15 88 7 4 5 366 | 17 18 19 20 7 4 1 88 14 10 367 | 17 18 19 20 1 2 88 368 | 18 19 20 15 17 16 7 369 | 18 19 20 88 5 17 88 3 370 | 18 19 20 16 17 4 8 12 1 88 371 | 17 18 19 20 88 6 372 | 88 19 20 15 14 7 11 5 373 | 17 16 20 15 10 9 11 1 5 374 | 16 18 19 20 88 88 375 | 18 17 19 20 1 3 376 | 17 18 16 20 19 7 88 1 3 377 | 19 20 18 17 88 378 | 18 19 20 17 1 2 9 379 | 19 18 14 20 11 14 380 | 17 18 19 20 14 15 7 10 88 381 | 17 18 19 20 7 1 10 11 88 2 4 14 382 | 17 18 19 20 10 8 7 9 383 | 17 18 19 20 14 7 5 88 384 | 17 18 19 1 3 13 20 88 16 385 | 18 19 20 88 11 12 9 386 | 17 18 19 20 88 8 9 6 387 | 18 19 20 17 1 2 3 16 13 15 388 | 20 19 15 18 88 389 | 18 19 20 16 17 88 390 | 17 18 19 20 11 88 391 | 15 16 17 19 18 20 6 392 | 19 20 88 17 18 15 9 393 | 17 18 19 20 16 2 4 394 | 17 18 19 20 10 11 13 12 395 | 17 18 19 20 11 12 14 15 396 | 17 19 20 1 88 397 | 17 18 19 20 15 14 398 | 18 19 20 17 88 12 88 399 | 18 19 20 17 88 400 | 19 20 3 2 88 88 401 | 20 18 19 88 1 88 2 13 8 12 15 402 | 20 16 17 18 88 1 6 5 11 403 | 20 19 88 17 15 18 14 12 11 404 | 18 19 20 1 9 405 | 19 20 88 17 15 1 13 88 406 | 17 18 20 13 16 407 | 17 18 19 88 12 408 | 18 19 20 15 12 14 88 409 | 16 19 20 1 2 88 3 10 410 | 13 20 1 14 15 18 19 16 411 | 20 18 19 14 1 2 8 10 15 412 | 18 19 20 88 13 5 6 15 413 | 18 19 20 6 13 14 1 5 414 | 16 17 18 20 13 8 6 1 19 415 | 16 17 18 19 20 4 11 416 | 17 18 19 13 10 2 3 417 | 19 20 18 5 88 7 8 418 | 88 18 88 11 7 1 2 13 9 4 419 | 14 19 20 15 17 9 5 420 | 17 18 19 20 11 14 13 88 421 | 20 18 19 9 2 3 7 12 8 422 | 15 16 19 17 20 1 3 423 | 19 20 12 13 14 3 11 9 6 424 | 18 19 20 17 6 10 15 11 3 88 425 | 19 20 17 18 88 2 426 | 15 16 20 88 88 13 10 1 17 427 | 15 2 18 19 20 10 13 4 16 1 3 428 | 17 18 20 10 14 9 16 429 | 18 19 20 14 2 15 16 17 1 430 | 18 19 14 20 13 8 5 1 3 431 | 15 16 17 18 19 11 1 3 2 20 88 5 432 | 19 20 16 18 7 12 2 433 | 16 17 18 19 20 14 88 434 | 16 17 18 20 6 1 13 15 435 | 15 14 17 18 20 12 10 436 | 15 16 17 18 20 8 9 437 | 18 17 19 20 15 88 88 438 | 18 6 19 20 14 15 16 17 10 4 11 439 | 18 16 19 20 14 10 88 88 440 | 20 19 18 2 16 441 | 20 19 11 18 10 5 1 2 14 442 | 16 17 18 19 20 12 15 3 443 | 19 20 18 3 11 7 10 9 1 444 | 17 19 20 16 15 14 12 10 1 445 | 16 17 18 19 20 7 88 446 | 17 18 19 20 14 4 7 12 447 | 17 18 19 20 10 2 8 7 1 448 | 15 16 17 19 20 10 7 14 13 449 | 18 19 20 1 16 4 11 5 450 | 16 17 88 18 15 11 12 1 19 5 451 | 17 18 20 16 3 6 88 9 10 1 452 | 17 18 19 20 1 14 12 11 453 | 18 19 20 16 17 13 15 7 8 10 4 3 454 | 17 18 19 20 9 5 13 16 455 | 19 88 20 88 11 10 13 456 | 17 18 19 20 13 88 1 88 457 | 88 20 18 11 12 7 15 14 4 1 458 | 17 18 19 20 9 1 2 459 | 14 15 18 16 19 20 4 6 8 460 | 19 20 15 18 16 1 14 461 | 17 18 19 20 11 13 88 5 4 14 2 1 462 | 18 17 20 12 8 9 11 463 | 18 19 20 17 88 2 1 3 88 88 7 464 | 17 19 20 13 8 18 1 465 | 18 19 20 11 1 2 6 466 | 14 16 17 18 19 20 6 9 88 467 | 18 20 17 14 1 4 13 15 19 6 468 | 15 16 18 19 20 469 | 18 19 20 17 12 1 15 470 | 18 17 88 20 11 1 88 471 | 18 19 20 16 17 13 472 | 15 17 16 19 20 9 10 1 7 473 | 14 15 16 18 19 2 7 8 474 | 13 19 17 18 20 10 1 12 1 2 475 | 19 88 17 14 2 1 11 13 7 88 476 | 18 19 20 15 14 11 9 5 6 88 477 | 14 88 16 17 18 20 88 478 | 16 18 17 19 20 9 479 | 88 14 17 20 88 1 5 88 480 | 17 19 20 10 14 9 8 88 11 481 | 20 19 1 3 10 15 482 | 18 19 20 17 14 3 88 483 | 18 19 20 4 8 9 1 2 13 484 | 20 19 17 15 7 6 11 12 10 485 | 20 19 17 15 12 9 10 1 486 | 19 20 17 18 88 1 88 487 | 20 19 17 88 11 1 488 | 88 19 88 18 10 88 12 11 489 | 88 19 20 10 2 5 490 | 20 19 15 14 88 88 88 491 | 20 19 1 2 17 10 8 492 | 20 18 19 15 17 9 2 6 12 493 | 20 19 18 17 1 16 5 4 494 | 20 19 18 15 1 495 | 20 19 88 17 88 11 496 | 20 19 13 1 88 88 88 497 | 19 20 17 18 88 88 17 88 498 | 20 19 18 10 9 13 16 9 8 3 499 | 20 19 17 13 9 88 88 500 | 20 18 19 17 88 501 | 88 18 88 20 9 2 7 11 12 13 3 88 502 | 18 20 17 19 88 88 88 503 | 20 17 18 19 1 2 504 | 88 11 7 88 17 19 20 3 12 505 | 88 2 88 15 17 6 20 88 506 | 17 18 19 20 88 6 507 | 1 19 20 10 17 88 88 88 508 | 1 16 20 88 17 88 509 | 88 17 18 19 88 88 1 510 | 1 11 14 88 20 19 88 9 10 511 | 11 88 88 20 1 88 10 9 512 | 10 11 88 20 18 88 88 513 | 1 6 9 13 19 20 88 88 88 514 | 88 88 18 19 20 6 88 515 | 1 5 88 19 88 88 516 | 1 3 88 88 18 20 10 88 517 | 1 88 10 11 12 19 20 88 518 | 1 15 16 88 20 17 88 88 88 88 519 | 17 18 19 20 88 88 520 | 1 9 18 20 88 88 88 88 88 521 | 1 5 19 20 4 88 16 88 522 | 11 88 12 88 88 18 17 1 88 523 | 1 4 17 3 88 18 20 524 | 14 19 20 88 11 18 7 6 1 525 | 20 88 19 17 1 10 88 526 | 7 17 88 18 20 14 88 88 2 88 527 | 88 14 18 20 1 15 528 | 1 17 19 16 20 88 529 | 4 5 19 20 18 17 88 530 | 16 18 19 20 1 88 88 11 531 | 1 2 18 88 20 88 532 | 15 16 17 20 18 1 88 88 533 | 3 88 20 19 88 15 88 88 88 534 | 1 2 3 88 20 17 88 535 | 1 2 4 3 19 20 88 88 88 536 | 14 88 19 16 11 20 88 537 | 88 20 18 88 16 88 538 | 1 2 88 19 20 17 88 88 539 | 1 19 17 18 88 14 20 88 540 | 1 88 88 5 15 19 20 88 541 | 1 2 13 20 17 14 15 88 542 | 1 88 18 20 88 88 88 88 88 543 | 18 19 20 88 88 88 3 1 2 544 | 1 88 15 88 88 19 20 88 88 545 | 88 88 18 20 6 88 1 2 88 546 | 1 88 20 12 88 19 18 88 88 547 | 2 88 13 19 20 17 88 88 88 548 | 1 20 19 15 88 88 88 549 | 15 88 20 4 88 12 88 88 88 550 | 17 18 20 19 11 88 88 88 88 88 551 | 88 88 88 18 20 88 88 88 88 552 | 88 88 20 14 15 88 9 19 88 553 | 88 9 88 88 88 20 88 88 88 88 88 554 | 1 88 20 10 88 88 17 88 88 88 555 | 1 2 11 20 19 88 88 88 88 556 | 9 17 88 88 20 88 88 88 88 557 | 1 17 19 18 20 88 88 88 88 558 | 88 88 88 3 4 88 20 19 88 559 | 1 88 88 20 2 88 88 88 560 | 14 15 16 88 20 88 88 88 88 88 561 | 20 19 4 13 15 16 562 | 20 19 14 3 10 88 88 563 | 20 19 3 11 16 12 1 564 | 19 18 20 13 14 1 2 8 9 565 | 20 19 18 17 3 6 2 566 | 20 19 18 17 16 567 | 20 19 15 17 18 5 9 568 | 88 19 12 11 5 7 569 | 17 88 19 20 88 2 570 | 20 88 12 9 14 88 571 | 19 20 88 18 2 1 9 10 572 | 20 19 14 6 2 573 | 17 20 19 16 6 5 10 88 574 | 16 17 19 18 20 575 | 19 20 15 17 88 576 | 19 20 17 18 15 16 1 13 577 | 19 20 18 17 16 88 88 8 88 88 88 578 | 18 88 19 20 13 5 579 | 17 18 19 20 88 580 | 11 18 19 20 12 581 | 20 18 19 12 13 7 88 1 582 | 20 19 18 88 17 583 | 19 20 88 9 88 584 | 88 88 19 10 11 1 585 | 20 16 17 6 2 15 5 586 | 13 16 15 18 14 20 587 | 20 88 19 10 11 12 588 | 14 17 19 20 10 9 18 589 | 17 16 18 19 20 5 3 1 590 | 17 18 19 20 1 16 88 591 | 18 19 20 17 11 7 9 2 592 | 17 88 88 19 20 8 10 88 593 | 18 19 20 17 7 2 1 88 14 594 | 16 88 18 17 20 6 1 595 | 20 17 16 19 2 1 3 596 | 20 88 17 15 88 597 | 17 18 19 20 13 88 11 12 10 598 | 15 16 18 19 20 17 2 88 599 | 19 20 18 16 88 10 14 600 | 8 9 16 19 20 18 601 | 17 88 19 20 11 88 5 88 88 602 | 17 18 16 88 20 15 1 12 603 | 16 17 18 19 20 88 11 8 88 604 | 16 17 19 20 8 7 11 12 1 605 | 16 17 88 19 20 1 606 | 17 18 19 20 4 1 607 | 17 18 19 20 8 88 88 2 608 | 88 88 88 88 20 88 609 | 18 19 20 17 10 4 88 88 610 | 18 19 20 8 88 1 611 | 17 18 88 20 6 16 612 | 17 18 19 20 613 | 17 88 19 20 3 88 88 614 | 18 19 20 15 88 88 615 | 19 20 15 16 1 12 616 | 18 19 20 88 88 13 617 | 18 19 20 14 12 88 9 618 | 17 18 19 88 88 2 88 619 | 15 88 18 14 19 20 620 | 17 18 19 20 14 88 88 621 | 17 18 19 20 4 11 88 14 622 | 19 20 88 18 7 8 12 623 | 18 19 20 14 13 12 624 | 17 19 18 20 7 8 5 1 625 | 16 18 17 20 11 3 5 88 626 | 17 18 19 20 1 88 88 6 11 4 627 | 17 18 19 20 1 628 | 19 18 20 13 88 629 | 15 17 19 20 630 | 17 18 88 20 15 11 1 4 631 | 20 19 12 5 6 632 | 17 18 19 20 9 5 3 633 | 15 16 19 20 8 88 634 | 19 20 14 17 8 10 9 635 | 15 16 17 19 20 1 636 | 15 16 17 19 88 20 637 | 17 18 19 20 14 13 9 638 | 17 18 19 20 12 639 | 17 18 19 20 9 2 13 640 | 18 19 20 14 88 641 | 19 20 12 15 14 13 3 7 1 17 642 | 20 18 19 10 14 17 16 11 643 | 17 18 19 12 1 2 3 9 8 644 | 15 16 88 18 20 1 4 8 13 645 | 20 18 19 6 9 10 8 4 5 1 646 | 19 20 17 18 12 13 5 4 647 | 18 19 20 17 3 14 15 13 11 88 648 | 18 19 88 17 11 13 10 5 6 9 88 649 | 17 18 19 20 10 88 1 2 650 | 19 20 18 11 12 15 5 1 9 651 | 16 17 18 19 20 1 88 15 14 6 9 8 652 | 17 18 19 20 13 16 10 2 1 7 653 | 88 19 20 16 88 6 7 1 15 4 5 654 | 19 20 16 13 14 18 88 88 655 | 18 19 20 15 16 17 1 2 656 | 15 16 17 12 20 13 14 1 2 3 657 | 19 20 18 17 88 12 11 1 5 9 88 658 | 18 19 20 13 12 17 1 4 3 9 659 | 17 18 19 20 9 13 4 5 660 | 16 17 19 18 20 11 12 7 2 1 661 | 16 17 18 19 20 12 13 6 5 8 9 10 3 88 662 | 15 16 17 18 20 8 9 7 14 2 1 663 | 17 18 19 20 16 9 4 3 664 | 17 18 19 20 88 7 3 11 12 9 10 665 | 17 18 19 20 88 5 14 2 15 11 10 666 | 18 19 20 13 14 4 6 8 10 11 667 | 17 18 19 20 11 10 1 2 7 668 | 17 18 88 20 2 10 1 88 9 669 | 18 16 20 15 13 14 5 670 | 18 19 20 14 16 2 3 9 10 11 671 | 18 19 20 11 10 2 3 672 | 17 18 19 20 11 12 7 6 8 9 673 | 17 18 19 20 13 6 1 16 15 7 674 | 15 16 17 19 20 6 10 675 | 14 15 16 19 10 9 12 11 8 88 676 | 16 17 88 19 8 9 10 88 1 677 | 15 16 17 18 19 20 11 12 1 3 88 88 678 | 17 18 19 20 16 14 3 15 11 10 679 | 13 14 16 18 19 20 680 | 18 19 20 14 4 6 1 681 | 17 18 19 20 1 3 4 7 8 2 6 5 9 16 682 | 16 17 18 19 20 12 10 6 8 2 88 5 683 | 19 20 16 17 18 7 2 8 15 1 6 11 12 13 684 | 18 17 15 14 20 11 88 7 8 3 4 685 | 18 19 20 16 4 10 1 15 88 9 686 | 15 88 18 20 14 13 11 3 2 19 687 | 17 18 19 20 8 7 11 88 14 16 3 688 | 19 20 17 15 16 10 11 1 13 12 7 689 | 16 17 19 20 5 4 1 13 2 6 8 7 690 | 16 17 18 19 8 9 11 4 5 1 691 | 17 18 19 20 16 1 9 692 | 17 88 19 20 12 15 1 2 11 6 13 693 | 19 20 88 15 17 3 2 13 694 | 18 16 17 15 4 5 3 13 88 695 | 18 19 20 5 1 16 696 | 18 19 20 11 12 1 5 88 697 | 15 16 17 88 88 7 10 9 12 698 | 17 19 20 14 1 2 8 9 699 | 17 16 19 20 10 12 14 15 88 7 8 88 5 4 700 | 17 18 19 20 10 11 9 2 1 5 4 701 | 17 18 19 20 88 9 10 11 1 2 13 14 702 | 16 17 18 19 11 12 15 13 1 2 3 8 7 9 10 703 | 18 19 20 1 2 3 4 5 16 14 9 12 704 | 17 18 20 15 12 13 1 2 11 3 16 5 705 | 15 16 88 20 3 13 1 2 706 | 16 17 18 19 20 1 2 3 4 15 11 7 10 707 | 18 19 17 20 1 13 5 6 88 11 15 708 | 17 18 19 20 9 10 1 15 16 709 | 17 18 19 20 1 2 15 11 88 88 13 710 | 17 18 19 20 11 14 16 3 4 15 1 711 | 16 17 18 19 20 14 1 88 9 8 3 88 712 | 17 18 19 20 14 10 9 13 1 2 5 3 11 713 | 15 16 20 88 714 | 15 16 17 18 20 8 10 11 1 13 3 12 715 | 17 18 19 20 14 11 1 716 | 16 17 18 19 20 9 1 11 10 2 717 | 19 20 16 17 18 11 10 1 3 2 718 | 16 17 18 19 20 88 88 719 | 18 19 20 17 16 88 13 3 11 9 10 720 | 17 16 18 19 20 14 88 1 2 3 8 9 10 721 | 1 12 88 19 20 10 722 | 17 18 19 20 14 16 723 | 16 17 19 20 15 1 13 14 3 4 724 | 18 19 20 16 15 1 17 8 725 | 88 19 20 13 12 17 4 726 | 18 19 20 17 1 10 15 2 16 13 727 | 15 17 16 18 7 20 11 2 14 1 728 | 18 19 88 88 11 13 12 3 1 729 | 18 19 20 12 88 5 2 88 10 730 | 17 19 18 20 15 12 9 11 1 6 5 731 | 11 20 19 7 10 1 2 3 9 732 | 16 17 18 19 20 1 2 5 15 88 733 | 14 18 16 19 20 6 5 1 2 88 88 8 734 | 16 88 19 20 2 17 15 11 735 | 16 17 19 20 88 736 | 14 19 20 16 13 11 6 4 88 3 737 | 18 19 20 11 88 13 1 5 88 738 | 17 18 19 20 9 10 13 16 739 | 19 20 18 17 5 4 3 740 | 16 17 18 20 13 15 10 88 741 | 18 19 20 12 16 8 1 9 14 4 88 742 | 16 17 18 19 20 1 2 3 8 14 743 | 17 18 19 20 9 13 744 | 15 17 18 19 20 11 6 7 745 | 17 18 19 20 6 88 2 10 12 746 | 18 19 20 88 88 13 88 88 2 10 747 | 17 18 19 20 10 2 88 5 3 1 748 | 17 18 19 20 9 10 2 16 15 14 749 | 18 19 20 9 16 17 15 13 750 | 17 18 19 20 3 11 9 15 5 751 | 17 18 19 20 11 3 14 1 752 | 18 19 20 10 8 9 15 88 753 | 16 17 18 19 20 15 1 6 754 | 17 18 19 20 6 1 2 12 88 755 | 17 19 18 20 10 9 13 2 14 756 | 17 18 19 20 15 10 88 14 757 | 17 18 19 20 13 11 12 14 88 10 758 | 17 18 19 20 14 16 759 | 18 19 20 15 12 14 760 | 16 17 19 20 12 11 88 761 | 18 19 20 10 12 17 13 2 1 3 11 14 7 762 | 16 17 18 19 20 12 15 1 2 9 10 6 763 | 16 17 19 18 20 13 9 3 11 764 | 16 18 19 20 14 1 88 7 8 9 765 | 17 18 19 20 16 14 15 1 4 766 | 14 15 18 19 20 4 9 7 767 | 14 15 17 18 20 1 2 11 12 4 5 13 768 | 17 15 19 20 16 10 9 8 5 2 769 | 18 19 20 17 4 5 6 9 10 770 | 18 19 20 8 13 16 11 12 771 | 17 18 19 20 7 88 3 13 772 | 15 16 17 18 19 20 1 11 88 773 | 18 19 20 15 13 10 9 11 12 7 5 2 4 3 774 | 16 17 18 19 20 4 5 6 1 10 12 775 | 16 17 18 19 20 776 | 17 18 19 20 16 14 1 777 | 17 18 19 20 1 14 3 2 778 | 17 18 19 20 3 7 1 2 16 11 13 779 | 17 19 20 16 14 15 5 4 2 13 88 780 | 17 18 19 20 14 16 15 2 1 4 88 781 | 16 17 18 19 20 88 14 8 4 5 11 2 782 | 16 18 17 88 20 12 13 3 1 2 783 | 14 17 18 19 20 6 12 7 15 1 3 784 | 15 16 17 19 20 11 14 785 | 16 18 17 19 20 5 6 4 3 11 12 7 786 | 17 18 19 20 5 7 8 9 11 12 1 4 2 787 | 20 17 18 19 15 10 13 788 | 15 19 18 20 1 3 88 789 | 15 18 17 20 4 5 16 790 | 17 18 88 88 11 1 88 791 | 15 16 17 19 20 6 10 5 88 2 3 88 792 | 17 18 19 20 9 8 15 1 2 88 88 793 | 17 18 19 20 8 9 7 794 | 18 19 20 17 16 13 5 1 795 | 17 18 20 15 11 14 796 | 18 19 20 16 15 17 9 797 | 17 18 19 20 7 6 10 11 1 798 | 18 19 20 14 13 12 799 | 18 19 20 9 88 10 16 2 5 17 14 800 | 88 16 18 14 19 88 7 1 2 5 6 8 88 801 | 18 10 13 15 19 6 20 802 | 20 18 17 3 803 | 19 20 11 13 804 | 19 8 20 17 805 | 20 19 17 18 11 806 | 19 20 17 16 1 7 807 | 18 20 19 17 88 5 1 808 | 20 18 88 88 809 | 20 19 88 17 10 88 88 810 | 20 15 88 14 88 1 7 88 811 | 20 19 9 10 1 2 18 17 812 | 19 20 18 14 13 5 6 17 813 | 20 18 19 13 12 4 1 814 | 20 19 18 16 17 13 1 815 | 20 19 88 15 88 7 9 816 | 20 17 88 16 2 1 3 13 12 817 | 20 18 19 16 3 88 818 | 20 19 16 18 13 819 | 20 18 19 17 5 820 | 19 20 18 16 821 | 18 19 20 16 5 9 4 8 822 | 20 19 17 18 12 823 | 19 18 17 20 14 11 824 | 18 20 16 17 88 825 | 20 18 88 88 1 2 10 826 | 20 16 18 10 11 13 827 | 20 19 18 17 10 1 2 4 828 | 20 18 19 14 829 | 19 20 17 16 15 1 830 | 19 20 16 17 831 | 20 19 18 3 88 832 | 19 20 15 18 13 14 833 | 19 20 17 14 834 | 20 16 18 88 88 17 835 | 20 18 836 | 20 15 88 17 10 837 | 20 17 13 12 11 838 | 19 20 17 16 15 4 5 10 839 | 20 19 18 14 840 | 19 18 20 841 | 20 88 18 12 13 11 842 | 20 16 19 17 12 88 88 88 843 | 20 19 17 11 5 3 15 844 | 19 20 88 14 4 7 3 15 845 | 20 19 88 15 17 10 9 88 4 5 846 | 19 20 18 14 15 1 4 847 | 18 20 19 2 1 848 | 19 20 17 18 16 7 8 849 | 17 19 20 11 13 1 5 4 8 850 | 20 19 17 16 12 13 1 6 851 | 19 17 20 88 10 852 | 19 20 17 18 15 853 | 20 18 19 2 1 5 13 14 854 | 17 20 19 18 15 1 855 | 20 18 19 10 12 856 | 88 20 18 16 14 88 857 | 20 19 11 7 8 88 4 858 | 19 20 15 16 14 4 6 859 | 20 18 17 19 88 88 860 | 20 19 18 17 16 14 13 2 10 11 1 861 | 20 19 17 18 4 5 14 1 2 862 | 17 18 20 15 88 863 | 20 19 18 11 88 2 1 3 864 | 19 20 18 15 5 9 88 8 865 | 20 19 18 1 2 866 | 19 20 88 15 1 88 11 867 | 20 19 17 11 1 2 3 7 12 868 | 20 19 18 12 1 6 869 | 19 20 18 4 3 870 | 18 17 88 20 14 11 6 7 1 3 871 | 20 19 17 14 13 5 6 872 | 17 19 18 20 88 2 3 1 873 | 20 18 11 9 2 4 874 | 20 18 17 10 88 8 1 88 875 | 20 18 19 7 876 | 88 20 17 18 16 1 2 88 877 | 19 18 17 20 6 12 13 878 | 18 19 20 12 16 11 8 879 | 88 20 8 14 15 9 880 | 20 19 18 17 14 15 881 | 20 18 12 13 88 88 15 88 882 | 19 20 16 14 17 5 1 883 | 20 15 7 8 88 13 884 | 15 17 16 20 18 885 | 19 10 13 20 3 886 | 16 17 20 18 10 887 | 20 17 18 19 11 88 1 888 | 18 88 10 11 12 13 1 889 | 18 19 20 1 2 88 890 | 18 20 19 12 15 1 8 891 | 20 19 17 18 1 9 10 892 | 18 19 20 14 13 17 16 893 | 20 19 1 88 88 894 | 16 17 19 18 20 88 10 895 | 20 19 17 1 9 896 | 20 19 88 3 2 13 1 12 897 | 18 19 20 12 13 1 88 5 898 | 18 19 20 16 9 12 1 899 | 88 1 14 19 18 20 17 1 900 | 18 19 20 6 1 901 | 18 88 20 12 9 1 88 8 902 | 17 18 19 20 1 3 88 88 903 | 18 19 20 14 1 9 904 | 18 19 20 1 16 3 905 | 17 19 20 18 88 2 906 | 18 88 20 13 88 10 88 11 907 | 17 19 18 20 10 12 908 | 16 19 20 1 2 10 88 909 | 1 19 20 910 | 19 20 9 1 911 | 17 18 19 20 11 88 7 912 | 19 20 18 11 6 7 88 913 | 19 17 20 1 2 914 | 18 17 19 20 88 1 915 | 19 14 17 20 88 3 2 916 | 88 19 20 10 15 4 2 917 | 17 18 19 20 13 11 12 1 918 | 17 19 20 10 1 6 9 18 919 | 14 15 16 18 20 88 11 88 920 | 19 12 20 1 2 88 921 | 16 17 18 20 11 3 6 88 2 922 | 17 88 19 20 15 1 6 88 923 | 16 88 88 20 11 7 924 | 19 20 18 11 12 4 1 925 | 19 20 9 7 88 13 1 926 | 18 19 20 14 3 1 927 | 18 19 20 15 11 88 13 7 8 928 | 18 19 20 17 2 16 13 12 929 | 15 16 19 20 17 4 5 8 1 930 | 15 16 19 18 88 12 8 1 5 931 | 17 19 20 13 1 88 6 3 932 | 15 16 18 20 1 11 17 933 | 88 15 16 20 88 3 934 | 14 15 17 18 20 88 1 935 | 16 17 19 15 20 1 2 4 88 936 | 19 20 14 15 1 9 88 88 937 | 18 19 20 14 1 2 12 8 938 | 19 88 11 12 16 88 939 | 17 18 19 20 1 12 14 940 | 17 19 18 15 20 1 16 941 | 17 18 20 6 4 14 1 942 | 17 18 19 20 3 12 88 943 | 17 18 19 20 14 1 3 12 9 13 944 | 88 19 20 6 8 5 13 1 945 | 88 17 19 20 11 12 1 946 | 17 16 19 20 1 88 5 88 947 | 16 17 19 20 88 6 948 | 20 15 18 9 1 17 949 | 17 18 20 19 1 10 950 | 19 20 18 11 12 88 1 2 3 951 | 16 17 19 20 1 952 | 16 17 20 19 9 12 1 953 | 19 20 11 8 7 88 2 954 | 17 18 20 1 8 10 955 | 15 20 17 19 88 1 11 956 | 88 19 20 1 16 6 88 957 | 17 18 19 20 6 1 88 958 | 4 17 16 19 20 1 12 959 | 18 88 20 1 2 960 | 15 16 20 14 19 18 88 88 961 | 1 3 18 19 20 88 11 13 962 | 19 20 17 3 8 6 10 88 5 14 963 | 4 8 19 12 88 18 964 | 18 19 20 2 1 88 9 16 965 | 20 19 16 12 17 6 3 966 | 19 20 17 18 1 10 12 13 88 88 967 | 19 18 20 17 3 88 1 11 968 | 18 19 88 11 12 13 17 969 | 18 19 20 2 10 88 1 970 | 19 20 18 12 15 9 971 | 20 19 18 10 88 88 2 13 972 | 18 19 20 13 14 11 15 88 6 973 | 18 19 20 1 14 16 15 88 974 | 16 18 19 20 10 8 6 9 975 | 17 18 19 20 1 3 976 | 17 18 19 20 1 2 3 16 977 | 18 19 17 20 16 9 8 88 978 | 19 20 18 9 16 10 8 1 979 | 1 19 20 16 17 5 88 980 | 18 19 20 16 17 1 11 981 | 18 19 20 16 13 12 3 2 9 10 5 982 | 18 19 20 17 3 15 1 2 9 983 | 17 18 19 20 11 9 984 | 18 19 20 17 9 8 10 3 985 | 18 17 19 20 2 3 6 14 986 | 18 19 20 17 10 11 13 987 | 88 18 19 20 10 4 5 1 2 13 988 | 17 18 88 20 9 1 2 3 10 989 | 18 19 20 16 17 5 3 2 88 990 | 16 17 18 19 20 12 11 1 3 991 | 16 18 17 19 20 7 2 1 3 992 | 9 16 18 19 20 88 7 8 993 | 17 18 19 20 6 7 16 15 994 | 18 10 19 20 17 16 7 6 1 995 | 17 18 19 20 9 10 7 996 | 18 16 15 20 4 10 88 997 | 15 19 20 18 9 10 11 12 13 998 | 17 18 19 20 3 4 1 2 16 999 | 16 18 19 20 9 14 15 1 2 1000 | 18 20 19 88 1 4 1001 | 17 16 19 18 20 10 88 12 1 2 9 8 3 1002 | 16 17 18 19 20 12 15 4 2 88 6 88 1003 | 16 17 19 20 18 1 7 3 88 1004 | 19 20 17 18 11 12 4 7 1005 | 16 17 20 19 10 11 2 5 4 1006 | 17 18 19 20 14 15 4 7 1007 | 17 18 19 20 2 3 10 9 1008 | 18 19 20 15 16 18 7 1009 | 17 18 19 20 2 1 1010 | 88 19 20 1 5 11 16 12 6 1011 | 17 19 20 18 88 10 1012 | 16 17 19 18 20 88 2 1 11 1013 | 19 18 20 5 15 1 2 3 13 1014 | 16 15 17 18 19 20 4 5 1 3 1015 | 17 18 19 20 10 6 5 1016 | 17 18 19 20 16 1 14 13 1017 | 17 18 19 20 13 14 1018 | 16 15 20 17 9 1 2 3 1019 | 88 18 19 15 20 10 7 8 14 1020 | 17 18 19 20 16 10 14 5 1021 | 18 19 20 17 5 4 1 2 14 1022 | 18 19 16 20 1 2 1023 | 17 18 20 19 7 9 12 1 2 3 14 1024 | 18 17 19 20 15 8 9 10 1025 | 17 18 19 20 3 2 1 4 1026 | 16 17 18 88 19 1 4 11 12 3 8 1027 | 18 19 20 17 14 13 15 1 3 4 2 1028 | 18 19 20 15 16 9 11 1029 | 17 18 19 20 9 10 1 4 1030 | 17 18 19 20 6 8 11 1031 | 17 19 20 88 12 14 5 9 10 1 1032 | 17 18 19 20 88 9 2 3 5 1 1033 | 18 19 20 11 13 14 12 10 1034 | 17 18 19 20 10 9 1 2 11 1035 | 17 18 19 20 11 10 14 1036 | 16 17 18 88 20 1 2 9 88 6 3 1037 | 17 18 19 20 14 13 15 6 1038 | 18 17 19 20 12 13 14 1039 | 18 19 20 17 9 10 14 13 5 2 1040 | 18 17 19 20 13 14 1041 | 20 1 3 6 15 88 11 1042 | 20 1 4 5 3 17 13 1043 | 14 20 15 17 19 16 5 2 88 88 1044 | 18 19 20 1 8 88 1045 | 1 4 20 19 2 15 12 88 1046 | 1 20 88 17 88 10 1047 | 14 15 18 20 17 19 1048 | 18 17 11 20 12 13 16 19 1049 | 1 2 3 6 5 20 19 88 1050 | 18 19 20 15 17 1 2 1051 | 19 20 2 8 1 11 18 13 88 1052 | 6 9 14 20 4 2 1 16 17 3 1053 | 2 1 20 5 16 1054 | 5 19 20 1 2 6 11 13 15 1055 | 19 20 16 17 88 5 12 4 1056 | 2 17 18 19 1 3 7 12 13 1057 | 3 88 13 16 17 88 8 6 5 4 1058 | 9 18 13 19 20 8 88 17 1059 | 1 88 19 20 17 18 88 1060 | 10 9 88 20 6 8 1061 | 1 2 3 9 20 88 1062 | 2 18 19 20 1 3 15 88 1063 | 1 9 20 19 88 1064 | 11 1 2 18 19 88 1065 | 16 15 1 5 20 1066 | 1 2 88 20 18 88 11 1067 | 2 5 3 20 1 14 1068 | 88 2 4 5 88 16 3 88 88 9 10 6 20 1069 | 2 5 16 15 20 18 88 1070 | 14 11 12 13 88 20 9 10 17 18 1071 | 1 3 2 5 4 7 17 16 20 1072 | 8 7 9 10 20 88 6 1073 | 1 3 10 12 11 13 19 20 16 2 4 88 1074 | 6 7 13 20 18 2 1 3 1075 | 10 12 13 88 88 20 17 1076 | 12 10 16 8 9 18 15 13 1077 | 9 10 88 13 19 17 20 1078 | 1 3 2 15 4 20 1079 | 14 11 19 20 18 1080 | 1 2 88 20 19 16 17 1081 | 1 3 2 11 20 19 10 88 18 1082 | 1 6 20 2 7 13 10 1083 | 1 17 18 20 7 2 5 1084 | 14 16 19 20 1085 | 1 7 4 2 17 20 1086 | 2 3 1 15 7 6 20 1087 | 1 2 3 14 15 16 18 88 19 1088 | 16 18 19 20 12 13 1089 | 1 4 5 8 88 17 18 19 20 15 16 1090 | 19 13 17 12 16 1 5 20 88 11 2 1091 | 6 10 17 18 16 1 4 2 88 1092 | 1 2 5 10 12 13 20 4 17 3 1093 | 5 7 11 12 10 9 16 17 15 20 1094 | 9 3 7 13 18 19 88 2 1 1095 | 5 2 3 4 6 17 16 20 88 1096 | 1 12 13 11 2 14 19 88 1097 | 1 2 17 12 15 13 18 14 1098 | 1 2 88 10 8 19 7 6 20 16 15 1099 | 7 8 14 19 18 20 5 4 6 88 1100 | 2 1 19 14 20 10 88 1101 | 17 18 19 20 1 11 13 3 1102 | 2 15 16 17 7 8 88 1 88 20 1103 | 9 12 13 15 17 18 88 8 3 1 5 11 1104 | 5 17 19 15 12 20 13 8 7 9 1105 | 17 18 20 12 17 88 8 6 1106 | 1 2 4 88 19 20 18 1107 | 1 17 20 18 13 15 11 12 5 16 2 1108 | 9 15 18 16 20 5 8 88 1109 | 1 4 2 17 15 19 88 1110 | 11 8 9 19 14 20 6 3 1111 | 1 2 5 6 14 13 17 20 3 1112 | 9 19 5 20 88 12 1 10 2 1113 | 3 6 8 14 18 19 1 2 1114 | 1 2 17 19 18 88 1115 | 1 2 3 11 12 18 19 20 15 1116 | 1 2 3 88 6 12 88 17 13 16 20 88 1117 | 88 1 88 20 6 15 12 14 3 8 4 5 1118 | 12 1 3 20 88 18 11 1119 | 1 2 9 10 18 17 16 20 1120 | 1 2 3 5 6 7 14 88 8 9 20 1121 | 1 88 19 18 20 88 1122 | 10 13 17 14 18 20 88 1123 | 18 19 20 15 8 12 1124 | 15 20 19 17 1 2 16 7 1125 | 18 19 20 17 6 12 1126 | 1 16 17 20 88 88 1127 | 1 2 17 19 88 88 11 18 13 1128 | 11 15 18 17 88 9 88 1129 | 18 19 20 17 2 1130 | 17 19 20 12 15 9 1131 | 15 88 19 20 10 1132 | 14 15 19 18 88 6 1133 | 14 13 19 18 20 16 1134 | 10 19 20 18 15 1135 | 16 19 20 18 88 88 88 1136 | 13 20 17 19 1 1137 | 8 19 20 18 17 88 1 88 88 1138 | 18 19 88 20 88 16 9 10 1139 | 19 20 16 4 88 88 1140 | 18 19 20 13 88 1 1141 | 18 19 20 12 8 1142 | 18 19 20 88 15 1143 | 19 20 88 16 1144 | 17 19 20 11 13 88 1145 | 17 19 20 5 88 1146 | 18 88 20 13 8 9 10 11 1147 | 17 18 19 20 10 88 13 1148 | 18 20 19 1 10 1149 | 16 17 15 19 20 88 1150 | 18 15 19 20 1151 | 16 88 19 20 11 1 7 1152 | 18 19 20 1 1153 | 1 6 7 19 20 1154 | 18 17 19 20 88 1155 | 10 17 18 88 88 1156 | 17 18 20 88 4 5 19 10 1157 | 19 20 11 12 13 18 1158 | 17 18 19 20 16 15 10 1159 | 15 18 16 19 20 1160 | 19 20 18 9 88 1161 | 18 19 20 12 2 5 1 4 1162 | 16 17 88 20 14 12 15 1 1163 | 19 20 18 16 15 11 13 1164 | 19 20 14 11 4 7 1165 | 19 18 20 15 88 1166 | 19 20 1 14 88 1167 | 17 18 19 20 2 9 1168 | 88 13 19 20 88 1169 | 16 17 19 20 12 13 88 6 1170 | 19 20 18 8 5 4 88 1171 | 15 17 20 88 88 1172 | 14 17 19 20 1 11 3 5 1173 | 88 3 17 19 20 13 12 1174 | 18 19 20 17 5 88 4 1175 | 88 16 19 20 1176 | 18 20 10 16 88 88 88 1177 | 88 20 19 14 9 4 12 5 1178 | 17 18 19 20 11 88 1179 | 18 19 20 10 11 1 1180 | 9 18 10 11 20 14 1181 | 17 88 19 20 15 9 13 11 4 1182 | 12 17 18 20 13 1183 | 18 19 20 88 17 16 88 88 1184 | 19 20 18 17 8 1185 | 88 18 19 20 5 1186 | 18 19 20 11 88 88 1187 | 13 18 17 20 19 2 1188 | 20 18 17 15 88 88 1189 | 18 19 20 17 88 1190 | 17 18 19 20 11 6 1 1191 | 19 20 14 17 6 3 88 1192 | 5 19 20 88 88 10 1193 | 17 88 19 20 88 88 10 7 1194 | 17 18 19 20 13 4 88 1195 | 17 19 18 20 11 10 1196 | 18 19 20 6 88 1 1197 | 18 19 20 14 6 1198 | 18 19 20 12 5 13 88 88 1199 | 18 19 20 17 88 1200 | 88 16 19 88 14 2 1 11 1201 | -------------------------------------------------------------------------------- /content/assignments/Assignment_2:Search_of_Associative_Memory_Model/README.md: -------------------------------------------------------------------------------- 1 | # Assignment 2: Search of Associative Memory (SAM) Model 2 | 3 | ## Overview 4 | In this assignment, you will implement the **Search of Associative Memory (SAM) model** as described in [Kahana (2012), Chapter 7](https://www.dropbox.com/scl/fi/ujl8yvxqzcb1gf32to4zb/Kaha12_SAM_model_excerpt.pdf?rlkey=254wtw4fm7xnpzelno2ykrxzu) (which is, in turn, adapted from [Atkinson and Shiffrin, 1968](https://www.dropbox.com/scl/fi/rpllozjcv704okckjdy5k/AtkiShif68.pdf?rlkey=i0azhj9mqxws7bxocbl65j88d)). The SAM model is a probabilistic model of free recall that assumes items are encoded into a **short-term store (STS)** and **long-term store (LTS)**, with retrieval governed by associative processes. You will fit your implementation to [Murdock (1962)](https://www.dropbox.com/scl/fi/k7jc1b6uua4m915maglpl/Murd62.pdf?rlkey=i5nc7lzb2pw8dxc6xc72r5r5i&dl=1) free recall data and evaluate how well the model explains the observed recall patterns. 5 | 6 | ## Data Format and Preprocessing 7 | The dataset consists of sequences of recalled items from multiple trials of a free recall experiment. Each row represents a participant’s recall sequence from a single list presentation. 8 | 9 | - **Numbers (1, 2, ..., N)**: Serial position of recalled items. 10 | - **88**: Denotes errors (recalls of items that were never presented) 11 | - **Example:** 12 | 13 | ``` 14 | 6 1 4 7 10 2 8 15 | 10 9 6 8 2 1 88 16 | 10 7 9 8 1 17 | ``` 18 | 19 | This represents three recall trials. Each row contains the order in which items were recalled for one list. 20 | 21 | The file names (e.g., `fr10-2.txt`) denote the list length (first number) and presentation rate, in seconds (second number), respectively. 22 | 23 | ## Model Description 24 | 25 | The SAM model defines how memories are **encoded** and **retrieved**, as follows: 26 | 27 | ### 1. **Encoding Stage: Memory Storage** 28 | When an item is presented, it is stored in **both STS and LTS**: 29 | - **STS (Short-Term Store)**: Holds a **limited** number of items (size $r$), with older items **displaced** as new ones enter. The probability of being displaced is goverened by a displacement parameter, $q$, along with the item's age in short term memory (i.e., the number of timesteps elapsed since it entered the STS; $t$): 30 | 31 | $$ 32 | p(\text{displacement of item with age }t) = \frac{q (1 - q)^{t - 1}}{1 - (1 - q)^r} 33 | $$ 34 | 35 | - **LTS (Long-Term Store)**: Holds an (effectively) **unlimited number of associations** between (a) **pairs of items** and (b) **items and context**. In this model, "context" acts like an "item" that is ever-present in the STS (and can never be recalled). The association strength between items $i$ and $j$ is denoted $S(i, j)$ and the association strength between item $i$ and $context$ is denoted $S(i, context)$. With each new item presentation, $S(i, j)$ is incremented by $a$ for every pair if items $i$ and $j$ that occupy STS at the same time: 36 | 37 | $$ 38 | a = 39 | \begin{cases} 40 | a_{\text{forward}}, & \text{if } t(i) \geq t(j) \\ 41 | a_{\text{backward}}, & \text{if } t(i) < t(j) 42 | \end{cases}, 43 | $$ 44 | 45 | where $t(i)$ and $t(j)$ denote the ages of items $i$ and $j$ in the STS, respectively. In addition, $S(i, context)$ is also incremented by $a_{\text{forward}}$ for each item in STS at a given timestep. In other words, after studying a given list, the associations $S(i, context)$ for each item $i$ will be proportional to the number of timesteps that each item remained in STS. 46 | 47 | ### 2. **Retrieval Stage: Memory Search** 48 | The retrieval process happens in several stages, first involving **STS** and then involving **LTS**: 49 | - **Recall from STS**: while items remain in STS, they are selected at random, recalled, and then removed from STS. Item selection may either be uniform, or may be set to be inversely proportional to each item's age, $t_{\text{rel}}$, relative to the youngest item still in STS (and assuming that $k$ items remain in STS): 50 | 51 | $$ 52 | p(\text{recall of item with relative age }t_{\text{rel}}) \propto \frac{1 - (1 - q)^k}{q (1 - q)^{t_{\text{rel}} - 1}} 53 | $$ 54 | - After STS has been emptied, we begin to retrieve items from LTS through two pathways: either associations with **context** or associations between *both* **context and other items**. In addition, all retrievals happen in a two-part process (until either the process is halted as described below, or until all studied items have been recalled): 55 | - First, an item is *sampled*. This is how an item is "chosen" for consideration as a recall candidate. 56 | - Second, the sampled item is *potentially recalled*. Recall is a probabilistic process (i.e., not guaranteed to happen for every sampled item). 57 | 58 | #### Sampling 59 | 60 | The probability of sampling item $i$ through its associations with context is given by 61 | 62 | $$ 63 | p(\text{sampling item }i | context) = \frac{S(i, context)^{W_c}}{\sum_n^N S(n, context)^{W_c}}, 64 | $$ 65 | 66 | where $W_c$ is a scalar parameter that governs the "contextual" cueing process. Similarly, the probability of sampling item $i$ through its associations with *both* item $j$ *and* context is given by: 67 | 68 | $$ 69 | p(\text{sampling item }i | j, context) = \frac{S(i, j)^{W_e}S(i, context)^{W_c}}{\sum_n^N S(n, j)^{W_e}S(n, context)^{W_c}}, 70 | $$ 71 | 72 | where $W_e$ is an analogous scalar parameter that governs the "episodic" cueing process. 73 | 74 | We also place an additional constraint on the sampling procedure to prevent repeated recalls: if the sampled item has already been recalled, we re-run the sampling procedure. This occurs as many times as needed to sample a not-yet-recalled item. 75 | 76 | #### Recall 77 | 78 | If an item $i$ is sampled through its associations with *context*, then it is *recalled* with probability given by: 79 | 80 | $$ 81 | p(\text{recall item }i | context) = 1 - \exp{\\( -W_c S(i, context)\\)}. 82 | $$ 83 | 84 | Alternatively, if an item $i$ is sampled through its associations with both item $j$ and context, then: 85 | 86 | $$ 87 | p(\text{recall item }i | j, context) = 1 - \exp{\\( -W_e S(i, j) - W_c S(i, context)\\)}. 88 | $$ 89 | 90 | To decide whether a given recall (of item $i$) occurs, draw a random number $\theta$ uniformly from the interval $[0, 1]$. If $\theta < p(\text{recall item }i)$ then: 91 | - the two "counter" parameters, $m_1$ and $m_2$, are both reset to 0 92 | - $S(i, context)$ is incremented by $a_\text{forward}$ 93 | - item $i$ is recalled 94 | 95 | Otherwise the recall failure procedure is called next. 96 | 97 | #### Recall failures 98 | 99 | In the event that an item is sampled but *not* recalled, the model edges closer to a stop condition by incrementing a counter. If the item was sampled via *context alone*, then we increment $m_1$. If $m_1 > m_{1_{\text{max}}}$, the recall procedure is halted. Alternatively, if the item was sampled via *context and its associations with another item*, then we instead increment $m_2$. If $m_2 > m_{2_{\text{max}}}$, the recall procedure is halted. In either scenario, if the relevant threshold has not yet been reached, the next candidate item is sampled using *context alone* as a retrieval cue. 100 | 101 | 102 | ### 3. **Fitting the Model** 103 | You will fit **eight parameters** to optimize the match to human recall data: 104 | 1. $r$: number of items that can fit in STS 105 | 2. $q$: STS displacement parameter 106 | 3. $a_{\text{forward}}$: LTS memory strength increment in the *forward* direction 107 | 4. $a_{\text{backward}}$: LTS memory strength increment in the *backward* direction 108 | 5. $W_c$: contextual association parameter 109 | 6. $W_e$: episodic association parameter 110 | 7. $m_{1_{\text{max}}}$: maximum number of *contextual* association cueing failures 111 | 8. $m_{2_{\text{max}}}$: maximum number of *episodic* association cueing failures 112 | 113 | You can choose any approach you wish to fit these parameters. My "recommended" approach is to use [skopt.optimizer.gp_minimize](https://scikit-optimize.github.io/stable/modules/generated/skopt.optimizer.gp_minimize.html#skopt.optimizer.gp_minimize) to minimize the mean squared error between the point-by-point observed vs. model-predicted values for the following behavioral curves: 114 | - $p(\text{first recall})$: probability of recalling each item **first** as a function of its *presentation position* 115 | - $p(\textit{recall})$: probability of recalling each item at *any* output position as a function of its presentation position 116 | - lag-CRP: probability of recalling item $i$ given that item $j$ was the previous recall, as a function of $lag = i - j$. 117 | 118 | The "right" way to do this is to use a subset of the data to estimate the parameters (i.e., training data), and then plot the observed and predicted results for the remaining (held-out) test data. You'll need to do some experimenting to determine an appropriate proportion of the data to assign to the training vs. test datasets. (Randomly assigning half of the data to each group is a good place to start.) 119 | 120 | ## Implementation Tasks 121 | You can use the [example notebook](https://contextlab.github.io/memory-models-course/assignments/Assignment_2%3ASearch_of_Associative_Memory_Model/sam_assignment_template.html) to help get you started with implementing the SAM model. 122 | 123 | ### **Step 1: Implement Memory Encoding** 124 | - Fill in the missing code for the `present` method in the `STS` class 125 | - Fill in the missing code for the `update` method in the `LTS` class 126 | 127 | ### **Step 2: Implement Retrieval Process** 128 | - Fill in the missing code for the `retrieve` method in the `SAM` class 129 | 130 | ### **Step 3: Load and Process Data** 131 | - Read in the recall dataset 132 | - Write functions for assigning trials to the training and test datasets 133 | 134 | ### **Step 4: Generate Behavioral Curves** 135 | - Generate the following averaged curves, for each list length and presentation rate: 136 | - $p(\text{first recall})$ 137 | - $p(\textit{recall})$ 138 | - lag-CRP 139 | - To help with computing mean squared error, it will be useful to have a function that takes in a dataset as input and returns a vector comprising each of these curves, for each list length and presentation rate, concatenated together into a single vector. 140 | 141 | ### **Step 4: Fit Model Parameters** 142 | - To compute mean squared error for a given set of model parameters, use the function you wrote above to compute the concatenated behavioral curves for the *observed recalls* and the *model-predicted recalls*. The average squared point-by-point difference between the vectors is the mean squared error. You'll want to set up [skopt.optimizer.gp_minimize](https://scikit-optimize.github.io/stable/modules/generated/skopt.optimizer.gp_minimize.html#skopt.optimizer.gp_minimize) to find the set of model parameters that minimizes the mean squared error between the observed and predicted curves, using only the training dataset. 143 | - Importantly, you should use the same parameters across all trials and experimental conditions. You're fitting the *average* performance, not data from individual trials or participants. 144 | 145 | ### **Step 5: Generate Key Plots** 146 | For each combination of **list length** and **presentation rate**, plot the *observed* and *model-predicted* behavioral curves for the *test* data: 147 | - $p(\text{first recall})$ 148 | - $p(\textit{recall})$ 149 | - lag-CRP 150 | 151 | Use dotted lines for the observed curves and solid lines for the model-predicted curves. Each metric should get its own figure (or sub-figure). You can either plot the curves for different list lengths and presentation rates on the same axes (e.g., using different colors for each experimental condition) or on different axes (one per experimental condition). 152 | 153 | ### **Step 6: Evaluate the Model** 154 | - Write a **brief discussion** (3-5 sentences) addressing: 155 | - **Does the model explain the data well?** 156 | - **Which patterns are well captured?** 157 | - **Where does the model fail, and why?** 158 | - **Potential improvements or limitations of SAM.** 159 | 160 | ## Optional extensions 161 | - Pick a behavioral curve and a parameter. Holding all of the other parameters fixed to their best-fitting values, vary the given parameter. How does the predicted behavioral curve change? Which predictions are sensitive to which parameters? 162 | - Can you figure out a way to generate *distributions* of parameter estimates? For example, what happens if you fit the model to individual trials (or small subsets of trials)? Extra challenge: explore the covariance structure between different combinations of parameters. Describe any interesting relationships between parameters that you observe. 163 | 164 | 165 | ## Submission Instructions 166 | - [Submit](https://canvas.dartmouth.edu/courses/71051/assignments/517354) a **Google Colaboratory notebook** (or similar) that includes: 167 | - Your **full implementation** of the SAM model. 168 | - **Markdown cells** explaining your code, methodology, and results. 169 | - **All required plots** comparing model predictions to observed data. 170 | - **A short written interpretation** of model performance. 171 | 172 | Good luck, and happy modeling! -------------------------------------------------------------------------------- /content/assignments/Assignment_2:Search_of_Associative_Memory_Model/sam_assignment_template.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "colab_type": "text", 7 | "id": "view-in-github" 8 | }, 9 | "source": [ 10 | "\"Open" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "# Submission Template\n", 18 | "\n", 19 | "This notebook provides a suggested starter template for completing the [SAM model assignment](https://contextlab.github.io/memory-models-course/assignments/Assignment_2%3ASearch_of_Associative_Memory_Model/README.html).\n", 20 | "\n", 21 | "You should submit your assignment by uploading your completed notebook to [Canvas](https://canvas.dartmouth.edu/courses/71051/assignments/517354). Please ensure that your notebook runs without errors in [Google Colaboratory](https://colab.research.google.com/)." 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": { 27 | "id": "ZNp6i_sXKA8r" 28 | }, 29 | "source": [ 30 | "Imports" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 1, 36 | "metadata": { 37 | "id": "rh59uGAx-N5O" 38 | }, 39 | "outputs": [], 40 | "source": [ 41 | "import pandas as pd\n", 42 | "import numpy as np\n", 43 | "import matplotlib.pyplot as plt\n", 44 | "import seaborn as sns\n", 45 | "\n", 46 | "import requests\n", 47 | "import os\n", 48 | "from tempfile import NamedTemporaryFile" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "Download dataset and store as `sequence` objects containing the `presented` and `recalled` items for each trial. The sequences are stored in nested dictionaries in the form\n", 56 | "```\n", 57 | "dict[list length][presentation rate]\n", 58 | "```" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 3, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "class item(object):\n", 68 | " idx = 1\n", 69 | "\n", 70 | " def __init__(self, val=None):\n", 71 | " if val is None:\n", 72 | " self.id = item.idx\n", 73 | " item.idx += 1\n", 74 | " else:\n", 75 | " self.id = val\n", 76 | "\n", 77 | "\n", 78 | "class sequence(object):\n", 79 | " def __init__(self, items):\n", 80 | " self.items = items\n", 81 | "\n", 82 | "\n", 83 | "def load_recall_data():\n", 84 | " base_url = \"https://raw.githubusercontent.com/ContextLab/memory-models-course/refs/heads/main/content/assignments/Assignment_2%3ASearch_of_Associative_Memory_Model/Murd62%20data/\"\n", 85 | " filenames = [\"fr10-2.txt\", \"fr15-2.txt\", \"fr20-1.txt\", \"fr20-2.txt\", \"fr30-1.txt\", \"fr40-1.txt\"]\n", 86 | "\n", 87 | " presented = {}\n", 88 | " recalled = {}\n", 89 | "\n", 90 | " for filename in filenames:\n", 91 | " list_len, pres_rate = map(int, filename.replace(\".txt\", \"\").replace(\"fr\", \"\").split(\"-\"))\n", 92 | " if list_len not in presented:\n", 93 | " presented[list_len] = {}\n", 94 | " recalled[list_len] = {}\n", 95 | " if pres_rate not in presented[list_len]:\n", 96 | " presented[list_len][pres_rate] = []\n", 97 | " recalled[list_len][pres_rate] = []\n", 98 | "\n", 99 | " # Download the file\n", 100 | " url = base_url + filename\n", 101 | " response = requests.get(url)\n", 102 | " response.raise_for_status()\n", 103 | " lines = response.text.strip().split(\"\\n\")\n", 104 | "\n", 105 | " for line in lines:\n", 106 | " recall_ids = [int(x) for x in line.strip().split() if int(x) != 88]\n", 107 | " recall_seq = sequence([item(val) for val in recall_ids])\n", 108 | " presented_seq = sequence([item(val) for val in range(1, list_len + 1)])\n", 109 | "\n", 110 | " presented[list_len][pres_rate].append(presented_seq)\n", 111 | " recalled[list_len][pres_rate].append(recall_seq)\n", 112 | "\n", 113 | " return presented, recalled\n", 114 | "\n", 115 | "presented, recalled = load_recall_data()" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": { 121 | "id": "atpXuzOFKDSb" 122 | }, 123 | "source": [ 124 | "Basic skeleton for the SAM model" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": 5, 130 | "metadata": { 131 | "id": "PUXuEy_3-q3E" 132 | }, 133 | "outputs": [], 134 | "source": [ 135 | "class STS(object):\n", 136 | " def __init__(self, r, q, s_f, s_b, max_items=None, lts=None):\n", 137 | " self.r = r\n", 138 | " self.q = q\n", 139 | " if lts is None:\n", 140 | " self.LTS = LTS(max_items, s_f, s_b)\n", 141 | " else:\n", 142 | " self.LTS = lts\n", 143 | " self.items = []\n", 144 | " self.entry_times = np.zeros(1, r, dtype=np.int32)\n", 145 | "\n", 146 | " def present(self, x):\n", 147 | " # p(displacement) = q(q - q)^(i - 1) / (1 - (1 - q))^r\n", 148 | " # i: relative age of item\n", 149 | " # q, r: model params\n", 150 | " #\n", 151 | " # check current capacity; if available capacity, add item to STS. else displace items.\n", 152 | " pass\n", 153 | "\n", 154 | "\n", 155 | "class LTS(object):\n", 156 | " def __init__(self, max_items, s_f, s_b):\n", 157 | " self.max_items = max_items\n", 158 | " self.s_f = s_f\n", 159 | " self.s_b = s_b\n", 160 | " self.S = np.zeros((max_items, max_items), dtype=np.float32)\n", 161 | " self.context = np.zeros(max_items, dtype=np.float32)\n", 162 | " self.previous_recall = None\n", 163 | "\n", 164 | " def update(self, items):\n", 165 | " # update self.S and self.context\n", 166 | " pass\n", 167 | "\n", 168 | "class SAM(object):\n", 169 | " def __init__(self, W_c, W_e, M_1, M_2, r, q, max_items=100):\n", 170 | " self.W_c = W_c\n", 171 | " self.W_e = W_e\n", 172 | " self.M_1 = M_1\n", 173 | " self.M_2 = M_2\n", 174 | " self.m1_count = 0\n", 175 | " self.m2_count = 0\n", 176 | " self.r = r\n", 177 | " self.q = q\n", 178 | "\n", 179 | " self.STS = STS(r, q, max_items)\n", 180 | " self.LTS = LTS(max_items)\n", 181 | "\n", 182 | " def present(self, x):\n", 183 | " self.STS.present(x)\n", 184 | " self.LTS.update(self.STS.items)\n", 185 | "\n", 186 | " def retrieve(self): # retrieve a *single item*\n", 187 | " # if there's anything in STS, retrieve and remove it\n", 188 | " # else:\n", 189 | " # - sample (from context and/or context + prev item) until we get something other than the previous_recall.\n", 190 | " # (if previous_recall is the only item left, return None)\n", 191 | " # - recall (given cue strength):\n", 192 | " # - if successful, reset m1_count and m2_count, set previous_recall to item, return sampled item\n", 193 | " # - otherwise increment m1_count or m2_count. if either exceed M_1/M2, return None\n", 194 | " pass" 195 | ] 196 | }, 197 | { 198 | "cell_type": "markdown", 199 | "metadata": { 200 | "id": "ipssh_MdXWF8" 201 | }, 202 | "source": [ 203 | "Other tasks:\n", 204 | " - Fit params to [Murdock (1962) dataset](https://github.com/ContextLab/memory-models-course/tree/main/content/assignments/Assignment_2%3ASearch_of_Associative_Memory_Model/Murd62%20data) that you downloaded with the `load_data` function.\n", 205 | " - You'll need to define a \"loss\" function. I suggest computing MSE for one or more behavioral curves, computed for a subset of the Murdock (1962) participants/lists\n", 206 | " - I suggest using [skopt.optimizer.gp_minimize](https://scikit-optimize.github.io/stable/modules/generated/skopt.optimizer.gp_minimize.html#skopt.optimizer.gp_minimize) to estimate the model parameters.\n", 207 | " - Create observed/predicted plots for held-out data:\n", 208 | " - p(first recall)\n", 209 | " - p(recall)\n", 210 | " - lag-CRP" 211 | ] 212 | } 213 | ], 214 | "metadata": { 215 | "colab": { 216 | "include_colab_link": true, 217 | "provenance": [] 218 | }, 219 | "kernelspec": { 220 | "display_name": "memory-course", 221 | "language": "python", 222 | "name": "memory-course" 223 | }, 224 | "language_info": { 225 | "codemirror_mode": { 226 | "name": "ipython", 227 | "version": 3 228 | }, 229 | "file_extension": ".py", 230 | "mimetype": "text/x-python", 231 | "name": "python", 232 | "nbconvert_exporter": "python", 233 | "pygments_lexer": "ipython3", 234 | "version": "3.11.0" 235 | } 236 | }, 237 | "nbformat": 4, 238 | "nbformat_minor": 0 239 | } 240 | -------------------------------------------------------------------------------- /content/assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/PolyEtal09 data/README.txt: -------------------------------------------------------------------------------- 1 | This archive contains a MATLAB-based data structure containing the behavioral data 2 | from the experiment described in the article: 3 | 4 | A Context Maintenance and Retrieval Model of Organizational Processes in Free Recall 5 | 6 | Sean M. Polyn, Kenneth A. Norman, and Michael J. Kahana 7 | 8 | Psychological Review, Vol. 116 (1), 129-156. 9 | 10 | Refer to this manuscript for the methods of the experiment, and description of the 11 | analyses that we carried out on these data. 12 | 13 | %%%%%%%%%%%%%%%%%%% 14 | 15 | This particular file written by Sean Polyn 16 | sean.polyn@vanderbilt.edu 17 | on September 25th, 2013 18 | 19 | Send word if you find anything weird or out of sorts with the data, or the explanation of the organization of the data! 20 | 21 | If you are interested in the Context Maintenance and Retrieval model of human memory, go to this webpage: 22 | http://memory.psy.vanderbilt.edu/groups/vcml/wiki/618f3/CMR_Documentation.html 23 | 24 | Behavioral Toolbox (Release 1) analysis code available from: 25 | http://memory.psych.upenn.edu/behavioral_toolbox 26 | 27 | %%%%%%%%%%%%%%%%%%% 28 | 29 | A quick tour of the data structure. 30 | 31 | %%%%%%%%%%%%%%%%%%% 32 | 33 | If you load the file PolyEtal09_data.mat in MATLAB, you will find a structure with three fields: 34 | 35 | data.full % Data from all of the trials from the experiment, from all conditions, including practice trials 36 | data.co % Just the control trials, in which all items were studied using the same encoding task 37 | data.sh % Just the task-shift trials, in which participants shifted back and forth between the two encoding tasks 38 | 39 | The organization of the sub-fields: 40 | 41 | There are a number of sub-fields on the data structure. Each row corresponds to a particular trial. If there is more than one column, then there are two possible organizations, refer below to see which one applies. (1) Yoked to the presentation order, each column corresponds to a study event. (2) Yoked to the recall order, each column corresponds to a recall event. 42 | 43 | The most critical sub-fields: 44 | 45 | data.subject % Each row has a numerical index, a unique subject identifier. There are 45 unique subject identifiers. The careful 46 | % observer will note that index 19 is skipped, this participant did not complete the study. 47 | data.listType % 0 = all items studied using the SIZE task, 1 = all items studied using ANIMACY task, 2 = task-shift list 48 | data.recalls % A numerical identifier for each response made by the participant during the free recall period. Integers 1-24 49 | % correspond to the serial position of the recalled item. Yoked to the recall order. -1 corresponds to an intrusion. 50 | % -2 corresponds to a repetition. 51 | data.pres_task % Which task was associated with each studied item, columns yoked to presentation events. 52 | % Task 0 is SIZE 53 | % Task 1 is ANIMACY 54 | data.listLength % There were 24 items on each study list 55 | 56 | The other sub-fields: 57 | 58 | data.session % A session label for each trial, either 1 or 2 59 | data.pres_itemnos % Each studied item has an index for the wordpool. Yoked to presentation order. 60 | data.react_time % Yoked to the study period. Time to make the task response in milliseconds. 61 | data.intrusions % -1 for extra-experimental intrusion, positive numbers correspond to how many lists back a prior-list intrusion 62 | % came from. 63 | data.times % For each recall response, how many milliseconds after the onset of the recall period was this response made. 64 | 65 | Convenience fields (technically these are redundant with information in the other fields): 66 | 67 | data.task % The task label of each recalled item (can be constructed with pres_task and recalls) 68 | data.rec_itemnos % The wordpool index for each recalled item (can be constructed with pres_itemnos and recalls) 69 | data.pres_subrec % Yoked to presentation order. 1 if the item will be recalled. 70 | data.pres_trainno % Yoked to presentation order. Labeling each item as to whether it is in the first train, second train, etc. 71 | data.pres_trainlen % Yoked to presentation order. How long is the train that the item resides in. 72 | data.pres_sertrain % Yoked to presentation order. Serial position of the item within a given train. 73 | data.train % Yoked to recall order, as above. 74 | data.trainlen % Yoked to recall order, as above. 75 | data.sertrain % Yoked to recall order, as above. 76 | 77 | %%%%%%%%%%%%%%%%%%% 78 | 79 | Other files that are included. 80 | 81 | %%%%%%%%%%%%%%%%%%% 82 | 83 | tfr_wp % This is the wordpool for the experiment. The index values in pres_itemnos and rec_itemnos can be used to 84 | % figure out which words are presented on each trial. 85 | 86 | sem_mat % These are the LSA values used for the semantic analyses described 87 | -------------------------------------------------------------------------------- /content/assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/PolyEtal09 data/behavior.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContextLab/memory-models-course/d42f510330c799588d7bb22b88a07bc98a7d1513/content/assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/PolyEtal09 data/behavior.mat -------------------------------------------------------------------------------- /content/assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/PolyEtal09 data/stimuli.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ContextLab/memory-models-course/d42f510330c799588d7bb22b88a07bc98a7d1513/content/assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/PolyEtal09 data/stimuli.mat -------------------------------------------------------------------------------- /content/assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/README.md: -------------------------------------------------------------------------------- 1 | # Assignment 3: Context Maintenance and Retrieval (CMR) 2 | 3 | ## Overview 4 | In this assignment, you will implement the **Context Maintenance and Retrieval (CMR) model** as described in [Polyn, Norman, & Kahana (2009)](https://www.dropbox.com/scl/fi/98pui63j3o62xu96ciwhy/PolyEtal09.pdf?rlkey=42sc17ll573sm83g4q8q9x9nq). CMR is a **context-based model of memory search**, extending the **Temporal Context Model (TCM)** to explain how **temporal, semantic, and source context** jointly influence recall. You will fit your implementation to Polyn et al. (2009)'s task-switching free recall data and evaluate how well the model explains the observed recall patterns. 5 | 6 | ## Data Format and Preprocessing 7 | The dataset comprises sequences of presented and recalled words (concrete nouns) from multiple trials of a free recall experiment. As they were studying each word, participants were either asked to judge the referent's *size* (would it fit in a shoebox?) or *animacy* (does it refer to a living thing?). The dataset also includes information about the similarities in meaning between all of the stimuli (semantic similarities). 8 | 9 | Code for downloading and loading the dataset into Python, along with a more detailed description of its contents, may be found in the [template notebook for this assignment](https://github.com/ContextLab/memory-models-course/blob/main/content/assignments/Assignment_3%3AContext_Maintenance_and_Retrieval_Model/cmr_assignment_template.ipynb). 10 | 11 | ## High-level Model Description 12 | 13 | The Context Maintenance and Retrieval (CMR) model comprises three main components: 14 | 15 | ### 1. **Feature layer ($F$)** 16 | 17 | The feature layer represents the experience of the *current moment*. It comprises a representation of the item being studied (an indicator vector of length number-of-items + 1) concatenated with a representation of the current "source context" (also an indicator vector, of length number-of-sources). 18 | 19 | ### 2. **Context layer ($C$)** 20 | 21 | The context layer represents a *recency-weighted average* of experiences up to now. Analogous to the feature layer, the context layer comprises a representation of temporal context (a vector of length number-of-items + 1, representing a transformed version of the item history) concatenated with a representation of the source context (a vector of length number-of-sources, representing a transformed version of the history of sources). 22 | 23 | ### 3. **Association matrices** 24 | 25 | The feature and context layers of the model interact through a pair of association matrices: 26 | 27 | - $M^{FC}$ controls how activations in $F$ affect activity in $C$ 28 | - $M^{CF}$ controls how activations in $C$ affect activity in $F$ 29 | 30 | ## Model dynamics 31 | 32 | ### Encoding 33 | 34 | Items are presented one at a time in succession; all of the steps in this section are run for each new item. As described below, following a task shift an "extra" (non-recallable) item is "presented," causing $c_i$, $M^{FC}$, and $M^{CF}$ to update. 35 | 36 | 1. As each new item (indexed by $i$) is presented, the feature layer $F$ is set to $f_i = f_{item} \oplus f_{source}$, where: 37 | - $f_{item}$ is an indicator vector of length number-of-items + 1. Each item is assigned a unique position, along with an additional "dummy" item that is used to represent non-recallable items. 38 | - $f_{source}$ is an indicator vector of length number-of-sources. Each possible "source" (i.e., unique situation or task experienced alongside each item) gets one index. 39 | - $\oplus$ is the concatenation operator. 40 | 41 | 2. Next, the feature activations project onto the context layer: 42 | - We compute $c^{IN} = M^{FC} f_i$ 43 | - Then we evolve context using $c_i = \rho_i c_{i - 1} + \beta c^{IN}$, where 44 | - $\rho_i = \sqrt{1 + \beta^2\left[ \left( c_{i - 1} \cdot c^{IN}\right)^2 \right]} - \beta \left( c_{i - 1} \cdot c^{IN}\right)$. 45 | - In setting $\rho_i$, the computations are performed separately for the "item" and "source" parts of context, where $\beta_{enc}^{temp}$ is used to evolve context for the item features and $\beta_{source}$ is used to evolve context for the source features. 46 | - After a task shift, a "placeholder" item is "presented", and a fourth drift rate parameter ($d$) is used in place of $\beta$. 47 | 48 | 3. Next, we update $M^{FC}$ (initialized to all zeros): 49 | - Let $\Delta M^{FC}_{exp} = c_i f_i^T$ 50 | - $M^{FC} = (1 - \gamma^{FC}) M^{FC}_{pre} + \gamma^{FC} \Delta M^{FC}_{exp}$ 51 | 52 | 4. Also update $M^{CF}$: 53 | 54 | - $M^{CF}_{pre}$ is fixed at the matrix of LSA $\cos \theta$ across words, multiplied by $s$ 55 | - $M^{CF}_{exp}$ is initialized to all zeros 56 | - Let $\Delta M^{CF}_{exp} = \phi_i L^{CF} f_i c_i^T$, where 57 | - $L^{\text{CF}} = 58 | \left[ 59 | \begin{array}{cc} 60 | L_{tw}^{\text{CF}} & L_{ts}^{\text{CF}} \\ 61 | L_{sw}^{\text{CF}} & L_{ss}^{\text{CF}} 62 | \end{array} 63 | \right]$ 64 | - $t$ represents temporal context 65 | - $s$ represents source *context* if listed first, or source *features* if listed second 66 | - $w$ represents item features 67 | - $L^{CF}_{sw}$ is a parameter of the model (all set to the same value-- size is number-of-sources by (number-of-items + 1)) 68 | - $L^{CF}_{ts}$ is set to all zeros; size: (number-of-items + 1) by number-of-sources 69 | - $L^{CF}_{ss}$ is set to all zeros; size: number-of-sources by number-of-sources 70 | - $L^{CF}_{tw}$ is set to all ones; size: (number-of-items + 1) by (number-of-items + 1) 71 | - $\phi_i = \phi_s \exp\{-\phi_d (i - 1\}$ + 1, where $i$ is the serial position of the current item 72 | - $M^{CF} = M^{CF}_{pre} + M^{CF}_{exp}$ 73 | 74 | ### Retrieval 75 | 76 | Recall is guided by *context* using a *leaky accumulator*. Given the current context, the leaky accumulator process runs until either (a) any item crosses a threshold value of 1 (at which point the item is recalled, its features are reinstated in $F$, context is updated as described below, and the retrieval process restarts), **or** (b) more than 9000 time steps elapse without any item crossing the threshold (1 timestep is roughly equivalent to 1 ms). 77 | 78 | 1. First compute $f^{IN} = M^{CF} c_i$, where $c_i$ is the current context 79 | 80 | 2. Next, use $f^{IN}$ to guide the leaky accumulator: 81 | - Initialize $x_s$ to a vector of number-of-items zeros ($s$ indexes the number of steps in the accumulation process) 82 | - While no not-yet-recalled element (also ignoring the last "unrecallable" item) of $x_s$ is greater than or equal to 1: 83 | - Set $x_s = x_{s - 1} + \left( f^{IN} - \kappa x_{s - 1} - \lambda N x_{s - 1} \right) d \tau + \epsilon \sqrt{d \tau}$, where 84 | - $dt = 100$ 85 | - $d \tau = \frac{dt}{\tau}$ 86 | - $N_{ij} = 0$ if $i = j$ and $1$ otherwise. 87 | - $\epsilon \sim \mathcal{N}\left(0, \eta \right)$ 88 | - If any *already recalled* item crosses the threshold, reset its value to 0.95 (this simulates "[inhibition of return](https://en.wikipedia.org/wiki/Inhibition_of_return)"). 89 | - If any elements of $x_s$ drops below 0, reset those values to 0. 90 | - When an item "wins" the recall competition: 91 | - Reinstate its features in $F$ (as though we were presenting that item as the next $f_i$) 92 | - Update context from $f_i$ using the same equation for $c_i$ as during presentation. 93 | - Don't update $M^{CF}$ or $M^{FC}$. 94 | - Recall the item. 95 | 96 | ## **Fitting the Model** 97 | 98 | In total, there are 13 to-be-learned parameters of CMR (each is a scalar): 99 | 1. $\beta_{enc}^{temp}$: drift rate of temporal context during encoding 100 | 2. $\beta_{rec}^{temp}$: drift rate of temporal context during recall 101 | 3. $\beta^{source}$: drift rate of source context (during encoding) 102 | 4. $d$: temporary contextual drift rate during "placeholder item" presentations after source changes 103 | 5. $L_{sw}^{CF}$: scale of associative connections between source context and item features 104 | 6. $\gamma^{FC}$: relative contribution of $\Delta M_{exp}^FC$ vs. $M_{pre}^{FC}$ 105 | 7. $s$: scale factor applied to semantic similarities when computing $M_{pre}^{CF}$ 106 | 8. $\phi_s$: primacy effect scaling parameter 107 | 9. $\phi_d$: primacy effect decay parameter 108 | 10. $\kappa$: decay rate of leaky accumulator 109 | 11. $\lambda$: lateral inhibition parameter of leaky accumulator 110 | 12. $\eta$: noise standard deviation in leaky accumulator 111 | 13. $\tau$: time constant for leaky accumulator 112 | 113 | Fit the model to the following curves and measures from the Polyn et al. (2009) dataset (provided in the template notebook): 114 | - Probability of first recall 115 | - Serial position curve 116 | - Lag-CRP 117 | - Temporal clustering factor 118 | - Source clustering factor 119 | 120 | There are several possible ways to accomplish this. My recommended approach is: 121 | 1. Split the dataset into a training set and a test set 122 | 2. Compute the above curves/measures for the training set and concatenate them into a single vector 123 | 3. Use [skopt.optimizer.gp_minimize](https://scikit-optimize.github.io/stable/modules/generated/skopt.optimizer.gp_minimize.html#skopt.optimizer.gp_minimize) to find the set of model parameters that minimizes the mean squared error between the observed curves and the CMR-estimated curves (using the given parameters). 124 | 4. Compare the observed performance vs. CMR-estimated performance (using the best-fitting parameters) for the test data 125 | 126 | 127 | ## Summary of Implementation Tasks 128 | 1. Use the descriptions above to implement CMR in Python 129 | 2. Write code for constructing the behavioral curves/measures listed above 130 | 3. Fit CMR's parameters to the dataset provided in the template notebook (compare with Table 1 in Polyn et al., 2009) 131 | 4. Plot the observed vs. CMR-estimated curves/measures 132 | 5. Write a **brief discussion** (3-5 sentences) addressing: 133 | - **Does the model explain the data well?** 134 | - **Which patterns are well captured?** 135 | - **Where does the model fail, and why?** 136 | - **Potential improvements or limitations of CMR.** 137 | 138 | ## Submission Instructions 139 | - Submit (on [canvas](https://canvas.dartmouth.edu/courses/71051/assignments/517355)) a **Google Colaboratory notebook** (or similar) that includes: 140 | - Your **full implementation** of the CMR model. 141 | - **Markdown cells** explaining your code, methodology, and results. 142 | - **All required plots** comparing model predictions to observed data. 143 | - **A short written interpretation** of model performance. 144 | 145 | Good luck, and happy modeling! -------------------------------------------------------------------------------- /content/assignments/Assignment_3:Context_Maintenance_and_Retrieval_Model/cmr_assignment_template.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "colab_type": "text", 7 | "id": "view-in-github" 8 | }, 9 | "source": [ 10 | "\"Open" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "id": "1a163c99", 16 | "metadata": { 17 | "id": "1a163c99" 18 | }, 19 | "source": [ 20 | "# Submission Template\n", 21 | "\n", 22 | "This notebook provides a suggested starter template for completing the [CMR model assignment](https://contextlab.github.io/memory-models-course/assignments/Assignment_3%3AContext_Maintenance_and_Retrieval_Model/README.html).\n", 23 | "\n", 24 | "You should submit your assignment by uploading your completed notebook to [Canvas](https://canvas.dartmouth.edu/courses/71051/assignments/517355). Please ensure that your notebook runs without errors in [Google Colaboratory](https://colab.research.google.com/)." 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 1, 30 | "id": "137eb31f", 31 | "metadata": { 32 | "id": "137eb31f" 33 | }, 34 | "outputs": [], 35 | "source": [ 36 | "import pandas as pd\n", 37 | "import numpy as np\n", 38 | "import matplotlib.pyplot as plt\n", 39 | "import seaborn as sns\n", 40 | "\n", 41 | "import requests\n", 42 | "import os\n", 43 | "\n", 44 | "from scipy.io import loadmat\n", 45 | "from tempfile import NamedTemporaryFile" 46 | ] 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "id": "1539bcff", 51 | "metadata": { 52 | "id": "1539bcff" 53 | }, 54 | "source": [ 55 | "Download the stimuli and behavioral data, returned as a dictionary with the following fields:\n", 56 | " - 'words': a list of 1297 strings (one per word in the stimulus pool)\n", 57 | " - 'sem_mat': a 1297 x 1297 NumPy array of semantic similarities (range: -1 to 1) between every pair of words in the stimulus pool\n", 58 | " - 'presented_items': a number-of-trials by list-length array of items for each presented list (represented using 0-indexed indices in the word pool)\n", 59 | " - 'recalled_items': a number-of-trials by max-number-of-recalled-items array of recalled items from each list (represented using 0-indexed indices in the word pool). -1s correspond to extra-list intrusions. Trials are right-padded with nans as needed.\n", 60 | " - 'task': a number-of-trials by list-length array of task labels for each presented item (0: size task; 1: animacy task)\n", 61 | " - 'session': session labels for each trial (a NumPy array of length number-of-trials)\n", 62 | " - 'subject': subject labels for each trial (a NumPy array of length number-of-trials)\n", 63 | " - 'list_type': list type labels for each trial (a NumPy array of length number-of-trials; 0: all items studied using the size task; 1: all items studied using the animacy task; 2: task-shift list)\n", 64 | " - 'list_length': a scalar value containing the list length (an integer)" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 2, 70 | "id": "b32a7915", 71 | "metadata": { 72 | "id": "b32a7915" 73 | }, 74 | "outputs": [], 75 | "source": [ 76 | "def load_data():\n", 77 | " # Download the files\n", 78 | " base_url = \"https://raw.githubusercontent.com/ContextLab/memory-models-course/refs/heads/main/content/assignments/Assignment_3%3AContext_Maintenance_and_Retrieval_Model/PolyEtal09%20data/\"\n", 79 | "\n", 80 | " # download the stimuli\n", 81 | " response = requests.get(base_url + \"stimuli.mat\")\n", 82 | " response.raise_for_status()\n", 83 | " with NamedTemporaryFile(delete=False) as temp_file:\n", 84 | " temp_file.write(response.content)\n", 85 | " stimuli_data = loadmat(temp_file.name)\n", 86 | " words = [str(x[0][0]) for x in stimuli_data['tfr_wp']]\n", 87 | " sem_mat = stimuli_data['sem_mat']\n", 88 | " os.remove(temp_file.name)\n", 89 | "\n", 90 | " # download the behavioral data\n", 91 | " response = requests.get(base_url + \"behavior.mat\")\n", 92 | " response.raise_for_status()\n", 93 | " with NamedTemporaryFile(delete=False) as temp_file:\n", 94 | " temp_file.write(response.content)\n", 95 | " behavioral_data = loadmat(temp_file.name)\n", 96 | " presented_items = behavioral_data['data'][0][0][0][0][0]['pres_itemnos']\n", 97 | " recalled_items = behavioral_data['data'][0][0][0][0][0]['rec_itemnos']\n", 98 | " task = behavioral_data['data'][0][0][0][0][0]['pres_task']\n", 99 | " session = behavioral_data['data'][0][0][0][0][0]['session'].flatten()\n", 100 | " subject = behavioral_data['data'][0][0][0][0][0]['subject'].flatten()\n", 101 | " list_type = behavioral_data['data'][0][0][0][0][0]['listType'].flatten()\n", 102 | " os.remove(temp_file.name)\n", 103 | "\n", 104 | " return {'words': words,\n", 105 | " 'sem_mat': sem_mat,\n", 106 | " 'presented_items': presented_items - 1,\n", 107 | " 'recalled_items': recalled_items - 1,\n", 108 | " 'task': task,\n", 109 | " 'session': session,\n", 110 | " 'subject': subject,\n", 111 | " 'list_type': list_type,\n", 112 | " 'list_length': int(behavioral_data['data'][0][0][0][0][0]['listLength'].flatten()[0])}\n", 113 | "\n", 114 | "data = load_data()" 115 | ] 116 | } 117 | ], 118 | "metadata": { 119 | "colab": { 120 | "include_colab_link": true, 121 | "provenance": [] 122 | }, 123 | "kernelspec": { 124 | "display_name": "memory-course", 125 | "language": "python", 126 | "name": "memory-course" 127 | }, 128 | "language_info": { 129 | "codemirror_mode": { 130 | "name": "ipython", 131 | "version": 3 132 | }, 133 | "file_extension": ".py", 134 | "mimetype": "text/x-python", 135 | "name": "python", 136 | "nbconvert_exporter": "python", 137 | "pygments_lexer": "ipython3", 138 | "version": "3.11.0" 139 | } 140 | }, 141 | "nbformat": 4, 142 | "nbformat_minor": 5 143 | } 144 | -------------------------------------------------------------------------------- /content/assignments/Assignment_4:_Laplace_Temporal_Context_Model/README.md: -------------------------------------------------------------------------------- 1 | # Assignment 4: Laplace Temporal Context Model (Laplace TCM) 2 | 3 | ## Overview 4 | In this assignment, you will implement the **Laplace Temporal Context Model (Laplace TCM)** as described in **Shankar & Howard (2012)**. The Laplace TCM provides a **scale-invariant representation of time** by maintaining a **Laplace-transformed representation of temporal history**, which can be used to predict future stimuli based on past experience. 5 | 6 | Unlike previous assignments, instead of fitting the model to an experimental dataset, you will use **synthetic examples** to explore its behavior. Specifically, you will **reproduce Figures 2, 3, and 4 from the paper**, analyze how the quality of reconstructions changes with different numbers of nodes, and reflect on the model’s strengths and limitations. 7 | 8 | ## Model Description 9 | 10 | The Laplace TCM constructs a **compressed representation of time** based on two key steps: 11 | 1. **Encoding Past Events as a Laplace Transform** 12 | - A stimulus function **$f(\tau)$** (past stimulus history) is encoded into a **set of leaky integrators** with different decay rates **$s$**. 13 | - This encoding follows: 14 | 15 | $$ 16 | t(\tau, s) = \int_{-\infty}^{\tau} f(\tau') e^{s (\tau' - \tau)} d\tau' 17 | $$ 18 | 19 | where **$t(\tau, s)$** represents the Laplace transform of the stimulus history up to time **$\tau$**. 20 | 21 | 2. **Reconstructing Time from the Laplace Representation** 22 | - The **inverse Laplace transform** is approximated using a set of neurons called **time cells** with different peak latencies **$\tau^*$**: 23 | 24 | $$ 25 | T(\tau, \tau^*) = (-1)^k \frac{k!}{(s^{k+1})} \frac{d^k}{ds^k} t(\tau, s) \Bigg|_{s=-\frac{k}{\tau^*}} 26 | $$ 27 | 28 | - The function **$T(\tau, \tau^*)$** reconstructs past stimulus history, allowing **temporal predictions**. 29 | 30 | ## Implementation Tasks 31 | 32 | ### **Step 1: Implement the Model** 33 | - Implement the **Laplace encoding** function to generate **$t(\tau, s)$** from a stimulus function. 34 | - Implement the **approximate inverse Laplace transform** using the **Post (1930) inversion formula** to reconstruct **$T(\tau, \tau^*)$**. 35 | - Implement a simple **Hebbian learning rule** to associate time cells with past stimuli. 36 | 37 | ### **Step 2: Reproduce Key Figures from the Paper** 38 | You will **generate synthetic stimuli** and reproduce the following figures: 39 | 40 | #### **Figure 2: Leaky Integrators Encoding Temporal History** 41 | - **Stimulus:** Two pulses at different times. 42 | - **Plot:** The responses of **leaky integrators** with different decay rates **$s$**. 43 | - **Insight:** Larger **$s$** values decay quickly, while smaller **$s$** retain long-term information. 44 | 45 | #### **Figure 3: Time Cell Representation of Stimulus History** 46 | - **Stimulus:** Same as Figure 2. 47 | - **Plot:** Activity of **time cells** with different peak latencies **$\tau^*$**. 48 | - **Insight:** Each time cell **stores a different temporal segment** of the past. 49 | 50 | #### **Figure 4: Time Cells Over Time** 51 | - **Stimulus:** Two pulses. 52 | - **Plot:** Evolution of **two time cells** over time. 53 | - **Insight:** Cells peak at fixed latencies, but **older events are reconstructed with increasing uncertainty**. 54 | 55 | ### **Step 3: Explore Model Performance** 56 | - **Vary the number of time cells** (length of **$T$** nodes) and evaluate how well the past is reconstructed. 57 | - **Hypothesis:** With more nodes, reconstructions become more precise. 58 | 59 | ### **Step 4: Write Reflections** 60 | - Discuss the strengths of the **Laplace TCM**. 61 | - Consider its limitations (e.g., loss of precision for older memories). 62 | - Suggest potential improvements. 63 | 64 | ## Grading Criteria 65 | - **Model Implementation (50%)** 66 | - Correct encoding and reconstruction of time. 67 | - Accurate implementation of **leaky integrators** and **time cells**. 68 | 69 | - **Figure Reproduction (30%)** 70 | - **10% each** for Figures **2, 3, and 4**. 71 | 72 | - **Exploration of Node Count Effects (10%)** 73 | - Vary **number of nodes** and analyze reconstruction quality. 74 | 75 | - **Reflections (10%)** 76 | - Thoughtful discussion of **model strengths, weaknesses, and improvements**. 77 | 78 | ## Submission Instructions 79 | - Submit a **Google Colaboratory notebook** (or similar) that includes: 80 | - Your **full implementation** of the Laplace TCM. 81 | - **Markdown explanations** of your approach. 82 | - **Reproduced figures (2, 3, and 4)**. 83 | - **Analysis of node count effects**. 84 | - **A short written reflection** on the model. 85 | 86 | Good luck, and happy modeling! 87 | -------------------------------------------------------------------------------- /content/assignments/Final_Project/README.md: -------------------------------------------------------------------------------- 1 | # Final Project: Open-Ended Exploration of Memory Models 2 | 3 | ## Overview 4 | For your final project, you will **design and conduct an original investigation** into a memory model of your choice. Your goal is to **do something substantive and interesting** with the model and dataset. This project is open-ended: you are encouraged to explore novel ideas, extensions, and applications. You may work with **up to 2 other students** on this assignment. If you choose to work in a group, all group members will receive the same grade for the assignment, regardless of individual member contributions, group dynamics, etc. 5 | 6 | ## Choosing Your Model and Dataset 7 | You may select: 8 | - **A model from a previous assignment** (e.g., Hopfield Networks, SAM, CMR, Laplace TCM). 9 | - **A different model from the literature** (e.g., a model from a published paper of your choice). 10 | - **A new model that you design yourself**. 11 | 12 | Similarly, you may select: 13 | - **A dataset from a previous assignment** (e.g., Murdock, 1962 free recall data). 14 | - **A different experimental dataset** from sources such as the [Penn Memory Lab Data Archive](https://memory.psych.upenn.edu/Data_Archive). 15 | - **A synthetic dataset** that you generate to test specific properties of a model. 16 | 17 | ## Project Scope and Expectations 18 | Your project should explore the model and dataset in a **meaningful, non-trivial** way. Some possible directions include: 19 | 20 | - **Examining the model’s strengths, limitations, or failure modes** 21 | (e.g., testing whether a model’s predictions break down in edge cases). 22 | - **Extending or modifying the model** 23 | (e.g., adding new parameters, changing the retrieval dynamics, incorporating neural constraints). 24 | - **Applying the model to a new type of dataset** 25 | (e.g., fitting a free recall model to recognition memory data). 26 | - **Comparing multiple models** 27 | (e.g., testing whether SAM or CMR better predicts a given dataset). 28 | 29 | ## Deliverables 30 | Your submission should include: 31 | 32 | 1. **Code Implementation** 33 | - Submit a **Google Colaboratory notebook (or similar format)** with well-commented code. 34 | - Ensure that all figures are generated within the notebook. 35 | - The notebook should **run without errors**. 36 | 37 | 2. **Project Report (2–5 pages, PDF format)** 38 | - **Introduction:** Describe the **model**, **dataset**, and the research question you are addressing. 39 | - **Methods:** Explain how you implemented the model, the experiments you ran, and any modifications you made. 40 | - **Results:** Present key findings with clear figures and tables. 41 | - **Discussion:** Interpret the results. What insights did you gain? Were there unexpected outcomes? What could be improved? 42 | - **References:** Cite any papers, datasets, or external sources you used. 43 | 44 | ## Evaluation Criteria 45 | Your project will be graded based on the following criteria: 46 | 47 | - **Creativity & Interestingness (30%)** 48 | - Is the project novel, insightful, or thought-provoking? 49 | - Does it go beyond trivial or obvious analyses? 50 | 51 | - **Correctness of Implementation (30%)** 52 | - Is the model correctly implemented and well-documented? 53 | - Do the results make sense given the model and data? 54 | 55 | - **Logic & Clarity (20%)** 56 | - Is the report well-organized and easy to understand? 57 | - Are the arguments and conclusions well-reasoned? 58 | 59 | - **Depth & Detail (20%)** 60 | - Does the project explore the model in sufficient depth? 61 | - Are analyses thorough and well-supported by results? 62 | 63 | ## Submission Instructions 64 | - Submit a **Google Colaboratory notebook (or equivalent) containing your implementation**. 65 | - Submit a **PDF report (2–5 pages)** with figures and results. 66 | - Ensure that all files are well-organized and clearly named. 67 | 68 | Good luck, and enjoy the exploration! 69 | -------------------------------------------------------------------------------- /content/assignments/README.md: -------------------------------------------------------------------------------- 1 | # Recommended approach for implementing models directly from original sources 2 | 3 | 1. Read the original paper. In your first pass, skip over any sections that you find confusing or difficult to parse; your goal is to get a high-level understanding. You can also use tools like [NotebookLM](https://notebooklm.google.com/) or [ChatGPT](https://chatgpt.com/) to help you summarize the material or present it in an easily digestible format. If there are additional "background" papers, read those (for a high-level understanding) too. Focus on the Abstract, Introduction, and Discussion sections. 4 | 2. Read the primary paper again. Focus on the Results section and identify the most important results. Decide what it would mean to "replicate" the core findings in the paper. Usually this means reproducing one or more of the figures. Or sometimes it can mean generating new figures that evaluate, test, or examine the content in a new way. Sometimes this could entail running one or more statistical tests. Sketch out (in a notebook, or wherever you) what your target figures or analyses are. In assignments for this course, the "core results" will generally be given to you in the assignment instructions. But in the general case (i.e., in "real life") you'll need to figure this out yourself. 5 | 3. Do a *third* pass through the paper. This is the big one. Read the the Results and Methods sections very carefully, sentence by sentence. Highlight any sentence or phrase that is directly relevant to reproducing the paper's core findings. These could include things like equations, diagrams (graphical models, flow charts), descriptions of implementation details, code or algorithms (snippets or pseudocode), key terms, and other stuff along those lines. Make sure you understand *every single one of the things you highlighted*. Use any resource at your disposal to gain a deep understanding: 6 | 7 | - Ask an AI tool to help you get started. Important caveat: LLMs are very likely to make up information that "seems right." So you need to be very careful about relying too much on LLMs if you want to build up an *accurate* understanding. 8 | - Do a web search to find other articles, blog posts, tutorials, videos, or other relevant resources. 9 | - If the authors have shared their code and/or data, try running it yourself. Go through the relevant parts of the code line-by-line to make sure you understand what it's doing. 10 | - Ask another person for help (in this course, our [Discord workspace](https://discord.gg/R6kM9bjpFj) can be a good forum for asking questions!) 11 | - Take plenty of notes to help you remember the important ideas later, and to help organize your thinking. 12 | 13 | 4. Use the highlighted text and your notes to start a todo list. List every single step you think will be needed to reproduce the paper's core findings. I suggest that you start with larger tasks (e.g., write a Python function that implements Equation 3) and then, as you start to work on them, break them down into smaller sub-tasks as needed. I like to organize my todo list digitally so that I can easily shift things around. If you are using a GitHub repository to organize your code, [GitHub Projects](https://docs.github.com/en/issues/planning-and-tracking-with-projects/learning-about-projects/about-projects) can be a nice way of tracking and managing your task list. Something simpler like a text file or Google Doc with a numbered list can also work well. I also sometimes find it useful to write out my todo lists on a digital tablet (I use a [reMarkable](https://remarkable.com)), since I often "think better" when I write things out manually instead of typing them. 14 | 15 | As you're going through your task list, if your experience is like mine often are, you may realize that some detail isn't actually as clear as you had initially thought it was. If so, you'll want to make sure you understand it before completing the relevant task. It's important not to take shortcuts; implementing a model incorrectly doesn't help anyone! 16 | 17 | You may also find that there's some detail that is *missing* from the paper. Unfortunately this is actually fairly common. When I think some detail is missing, the steps I like to take are: 18 | - Carefully re-read the relevant text to make sure I didn't just miss it in my previous readings. Sometimes there's important detail hidden in footnotes, parenthetical statements, or even seemingly minor phrasing choices. 19 | - Take a look at the paper's supplemental materials, if there are any. That's another standard hiding place for low-level details. 20 | - If the authors have shared their code, you may be able to piece together the missing information from that. 21 | - If all else fails, try emailing the authors directly! The corresponding author's email address is usually listed in the papers Author Information section. Or if you know of other people working on the same model (ideally direct comparisons or follow-ups that would have required them to use a correct implementation), sometimes that will work. Importantly: 22 | - Organize your thoughts and ask all of your questions up front (it's fine to have follow-up questions, but it can be off-putting if you send a flurry of partially thought through emails in rapid succession). 23 | - Keep your email as brief as possible. If you absolutely must write out a detailed explanation, then also include an initial paragraph to orient the recipient (e.g., tell them that you have some questions about their paper, and say that details are included below). 24 | 25 | ## Reminder: be kind to yourself (and your brain)! 26 | 27 | Implementing models from scratch, and piecing research together directly from primary sources, is hard! It's helpful to give yourself plenty of time, maintain a positive attitude, ask for help when you need it, and just keep pushing it forward one tiny step at a time. If you find yourself completely stuck, try doing some sort of mindless exercise (walks are great!), meditate, take a nap (or go to sleep), have a snack, or anything else that will give your mind a break. Then, when you're feeling fresher, re-review your notes and see if you can push forward a tiny bit more. 28 | 29 | When I am *really* stuck, my other often-used trick is to explain (usually to my wife, or even just talking out loud to myself!) what is so confusion, or why I'm stuck. The act of forcing yourself to talk through the nitty gritty details of how something "should" work vs. how it "does" work can sometimes be enough to uncover where the blocks are. -------------------------------------------------------------------------------- /content/instructions.md: -------------------------------------------------------------------------------- 1 | # Getting started 2 | 3 | ## Overview 4 | We will use the following tools in this course: 5 | - [GitHub](https://www.github.com): used to download code and data, collaborate with other students, and submit course assignments 6 | - [Google Colaboratory](https://colab.research.google.com/): a Google resource we will use to write code, download data, and run analyses 7 | - [Discord](https://discord.com/): used to coordinate all course communication. Use this link to join our class server: 8 | 9 | [![](https://dcbadge.vercel.app/api/server/R6kM9bjpFj)](https://discord.gg/R6kM9bjpFj) 10 | 11 | ## Setup 12 | 1. Start by creating a free [GitHub account](https://www.github.com) if you don't already have one. (If you already have an account, you may use it for this course.) 13 | 2. Next, sign into the course's [Discord workspace](https://discord.gg/R6kM9bjpFj). You can ask questions and get help with all aspects of the course via our Discord community. You'll need to create a (free) Discord account. 14 | 3. If you don't already have one, create a [Google account](http://google.com/). (If you already have an account, you may use it for this course.) Make sure you can sign into Colaboratory using [this link](https://colab.research.google.com/). -------------------------------------------------------------------------------- /content/outline.md: -------------------------------------------------------------------------------- 1 | # Annotated Course Schedule and Outline 2 | 3 | Note: papers that report on models we will be implementing in the assignments are denoted by asterisks. 4 | 5 | ## Week 1: Introduction, Hopfield Networks 6 | - Discussions: 7 | - What is memory? 8 | - What does it mean to build a "model" of memory? 9 | - Are neural networks like biological brains? 10 | - Hebbian learning and Hopfield networks 11 | - Readings: 12 | - [Hopfield (1982)](https://www.dropbox.com/scl/fi/iw9wtr3xjvrbqtk38obid/Hopf82.pdf?rlkey=x3my329oj9952er68sr28c7xc) 13 | - [Hopfield (1984)](https://www.dropbox.com/scl/fi/7wktieqztt60b8wyhg2au/Hopf84.pdf?rlkey=yi3baegby8x6olxznsvm8lyxz) 14 | - [Amit et al. (1985)](https://www.dropbox.com/scl/fi/3a3adwqf70afb9kmieezn/AmitEtal85.pdf?rlkey=78fckvuuvk9t3o9fbpjrmn6de)* 15 | 16 | - Discussion: Hopfield network simulations (storage capacity, cued recall, contextual drift) 17 | - **Assignment 1**: [Explore Hopfield Networks](https://contextlab.github.io/memory-models-course/assignments/Assignment_1%3AHopfield_Networks/README.html) 18 | 19 | 20 | ## Weeks 2--3: Free recall, Short Term and Long Term Memory 21 | - Discussions: 22 | - free recall and memory search 23 | - naturalistic memory tasks 24 | - Readings: 25 | - [Atkinson and Shiffrin (1968)](https://www.dropbox.com/scl/fi/rpllozjcv704okckjdy5k/AtkiShif68.pdf?rlkey=i0azhj9mqxws7bxocbl65j88d) 26 | - [Kahana (2012)](https://www.dropbox.com/scl/fi/ujl8yvxqzcb1gf32to4zb/Kaha12_SAM_model_excerpt.pdf?rlkey=254wtw4fm7xnpzelno2ykrxzu) (excerpt from Chapter 7)* 27 | - [Chen et al. (2016)](https://www.dropbox.com/scl/fi/wg6fledn7g88ig5mk3kob/ChenEtal16.pdf?rlkey=9jqu7y2apqv2hrj8qepn4alwa) 28 | - [Heusser et al. (2021)](https://www.dropbox.com/scl/fi/w7z2yvdfzmhowh5hvg53e/HeusEtal21.pdf?rlkey=omad9klqeiu2kc71w7guc5xxq) 29 | - Data science primer: 30 | - Where to find behavioral datasets: [Penn Behavioral Data Archive](https://memory.psych.upenn.edu/Data_Archive), [OpenCogData](https://nimh-dsst.github.io/OpenCogData/), [OpenNeuro](https://openneuro.org/), [UCLA Psychological Dataset Archive](https://guides.library.ucla.edu/psychology/data), [Context Lab](https://www.context-lab.com/publications) 31 | - Web scraping with [requests](https://pypi.org/project/requests/) and [Beautiful Soup](https://beautiful-soup-4.readthedocs.io/en/latest/) 32 | - Data manipulation with [Pandas](https://pandas.pydata.org/) 33 | - Text analyses with [Scikit-learn](https://scikit-learn.org), [NLTK](https://www.nltk.org/), and [HuggingFace Transformers](https://huggingface.co/docs/transformers/en/index) 34 | - **Assignment 2**: [Build the Search of Associative Memory Model](https://contextlab.github.io/memory-models-course/assignments/Assignment_2%3ASearch_of_Associative_Memory_Model/README.html) 35 | 36 | 37 | ## Weeks 4--5: Temporal Context and Multi-Timescale Models 38 | - Discussion: the temporal scales of memory, event boundaries, and situation models 39 | - Readings: 40 | - [Howard and Kahana (2002)](https://www.dropbox.com/scl/fi/yjnusbmoixbf4aen1mkx8/HowaKaha02.pdf?rlkey=ktt245cw09szubjnoe4cco1tz) 41 | - [Polyn et al. (2009)](https://www.dropbox.com/scl/fi/98pui63j3o62xu96ciwhy/PolyEtal09.pdf?rlkey=42sc17ll573sm83g4q8q9x9nq)* 42 | - [Baldassano et al. (2017)](https://www.dropbox.com/scl/fi/wgn96xni9fevoo6h1yngn/BaldEtal17.pdf?rlkey=wg9qugm1szfw50xao6k9047j6) 43 | - [Honey et al. (2012)](https://www.dropbox.com/scl/fi/l3vzzc56jjhq9tc4cheev/HoneEtal12.pdf?rlkey=56wf835omj2i6gkdh0b8n38cx) 44 | - [Manning et al. (2014)](https://www.dropbox.com/scl/fi/a1zltxk43dn8qmm7puaql/MannEtal14d.pdf?rlkey=wg2ikym1svvl68hbuw4f5cpax) 45 | - [Ranganath and Ritchey (2012)](https://www.dropbox.com/scl/fi/asec4p68900eekp6vtdgb/RangRitc12.pdf?rlkey=hqixac8eij65hmn62stzvo4mp) 46 | - [DuBrow and Davachi (2016)](https://www.dropbox.com/scl/fi/86gkrz0a9k57556tz4d2z/DuBrDava16.pdf?rlkey=v6hxkbzz80m48pz4a2425q6bn) 47 | - [Zacks and Tversky (2001)](https://www.dropbox.com/scl/fi/28104fmu9kzk55znyxntd/ZackTver01.pdf?rlkey=2ytdz0e9agny4hmllcw7hvi8g) 48 | - [Zwann and Radvansky (1998)](https://www.dropbox.com/scl/fi/iqp70crdmpd5m97zzv45c/ZwaaRadv98.pdf?rlkey=habx93aplwkkw829vj9vkv52a) 49 | - [Brunec et al. (2018)](https://www.dropbox.com/scl/fi/1eu28rpwyp8eg2sn4fgau/BrunEtal18b.pdf?rlkey=64dnn3onc90o59fuv33peil6g) 50 | - **Assignemnt 3**: [Build the Context Maintenance and Retrieval Model](https://contextlab.github.io/memory-models-course/assignments/Assignment_3%3AContext_Maintenance_and_Retrieval_Model/README.html) 51 | 52 | ## Week 6--7: Laplace Transforms 53 | - Discussion: is TCM *really* multi-timescale? 54 | - Discussion: Introduction to the Laplace Transform (and its inverse) and its relevance to memory 55 | - Readings: 56 | - [Shankar and Howard (2012)](https://www.dropbox.com/scl/fi/cqh37rsdn11f6egdiskvf/ShanHowa12.pdf?rlkey=45qhdi5u2fmlxd4azq8is3j89)* 57 | - [Manning (2024)](https://www.dropbox.com/scl/fi/9amk5mlgeop0srtpwqesg/Mann23.pdf?rlkey=lc785xhq1pcjqdtarn692e21k) 58 | - **Assignment 4**: [Implement the Laplace Temporal Context Model](https://contextlab.github.io/memory-models-course/assignments/Assignment_4%3A_Laplace_Temporal_Context_Model/README.html) 59 | 60 | ## Week 8: Biologically Inspired Network Models 61 | - Discussion: what does "biologically inspired" mean in practice? 62 | - Readings: 63 | - [McClelland et al. (1995)](https://imss-www.upmf-grenoble.fr/prevert/MasterICA/SpecialiteSC/FichiersPDF/Why%20there%20are%20complementary%20learning%20systems%20in%20the%20hippocampus%20and%20neocortex%20insights%20from%20th.pdf) 64 | - [Rumelhart et al. (1986)](http://www.cs.toronto.edu/~fritz/absps/pdp2.pdf) 65 | - [O'Reilly and Norman (2002)](http://www.princeton.edu/~compmem/normorei02.pdf) 66 | - [Schapiro et al. (2017)](https://www.dropbox.com/scl/fi/no2647c2witr2knb76gs2/SchaEtal17.pdf?rlkey=bpon63fy8g2rl3y9csabq748o) 67 | 68 | ## Week 9: Recurrent networks, LSTM networks, Transformers 69 | - Readings: 70 | - [Schuster and Paliwal (1997)](https://www.dropbox.com/scl/fi/0guahq2kcbria108xyb9j/SchuPali97.pdf?rlkey=yp1a8272qhljeob68amdpxjki) 71 | - [Hochreiter and Schmidhuber (1997)](https://deeplearning.cs.cmu.edu/S23/document/readings/LSTM.pdf) 72 | - [Radford et al. (2019)](https://insightcivic.s3.us-east-1.amazonaws.com/language-models.pdf) 73 | - Tutorial video: [Let's build GPT: from scratch, in code, spelled out](https://www.youtube.com/watch?v=kCc8FmEb1nY) 74 | - **Assignment 5**: [Final Project](https://contextlab.github.io/memory-models-course/assignments/Final_Project/README.html) 75 | 76 | ## Week 10: Final project presentations 77 | - Discussion: ad-hoc discussions and demos of final projects 78 | - **Final projects are due on the last day of class at 11:59PM Eastern Time** -------------------------------------------------------------------------------- /content/slides/how_to_read.md: -------------------------------------------------------------------------------- 1 | --- 2 | marp: true 3 | theme: default 4 | class: invert 5 | math: katex 6 | author: Jeremy R. Manning 7 | --- 8 | 9 | # 📚 How to Read a Paper 10 | ## (When You're Building a Model) 11 | 12 | --- 13 | 14 | ## What’s the goal? 15 | 16 | - Extract what’s **important** 17 | - Figure out how to **replicate core results** 18 | - Build something that **actually works** 19 | 20 | --- 21 | 22 | ## Step 1: Skim first 🔍 23 | 24 | - Read the paper **once**, high-level only 25 | - Focus on: 26 | - **Abstract** 27 | - **Intro** 28 | - **Discussion** 29 | 30 | 🧠 Skip hard parts — it's okay! 31 | 32 | --- 33 | 34 | ## Step 2: Zoom in 🔬 35 | 36 | Re-read the paper: 37 | 38 | - Focus on the **Results** 39 | - Find the **core result(s)** you’ll replicate 40 | 41 | 🧪 Reproducing a figure is often enough! 42 | 43 | --- 44 | 45 | ## Step 3: Deep dive 🧠 46 | 47 | Go **sentence by sentence** through Results + Methods 48 | Highlight *anything* needed to reproduce results: 49 | 50 | - Equations 🧮 51 | - Algorithms ⚙️ 52 | - Diagrams 🧭 53 | - Code snippets 💻 54 | - Implementation notes 🧱 55 | 56 | --- 57 | 58 | ## Understand everything 59 | 60 | For each highlighted thing: 61 | 62 | - Use AI (carefully!) 🧑‍💻 63 | - Watch a video / read a blog 📺 64 | - Try running shared code 🔁 65 | - Ask someone 🗣️ 66 | - Take **lots of notes** 📝 67 | 68 | --- 69 | 70 | ## Step 4: Make a task list ✅ 71 | 72 | - From your notes, write out a **to-do list** 73 | - Start high-level → break into sub-tasks 74 | - Track your progress (Google Doc, GitHub Projects, etc.) 75 | 76 | 💡 Tip: Write it out by hand if it helps you think 77 | 78 | --- 79 | 80 | ## Missing a detail? 81 | 82 | Try this: 83 | 84 | - Re-read carefully (look for footnotes!) 85 | - Check **supplemental materials** 86 | - Look at **shared code** 87 | - Email the authors! 📬 88 | 89 | --- 90 | 91 | ## How to email an author 💌 92 | 93 | - Be **clear and concise** 94 | - Ask **all your questions up front** 95 | - Be respectful of their time 96 | - Say thank you 🙏 97 | 98 | --- 99 | 100 | ## Be kind to yourself ❤️ 101 | 102 | This is hard! 103 | 104 | - Take breaks 🧘 105 | - Talk it out (even to yourself!) 🗯️ 106 | - Go for a walk 🚶 107 | - Sleep on it 😴 108 | - Keep chipping away 🔨 109 | 110 | --- 111 | 112 | ## Final tips 🧠 113 | 114 | - Building from primary sources is *real science* 115 | - Confusion is part of the process 116 | - Ask questions, keep notes, keep going! 117 | 118 | You got this 💪 119 | -------------------------------------------------------------------------------- /content/slides/intro_to_models.md: -------------------------------------------------------------------------------- 1 | --- 2 | marp: true 3 | theme: default 4 | class: invert 5 | math: katex 6 | author: Jeremy R. Manning 7 | --- 8 | 9 | # 🧠 What is a Memory Model? 10 | 11 | --- 12 | 13 | ## Models of memory = 🤖 for the mind? 14 | 15 | A **memory model** is like a *little machine* that: 16 | 17 | - Takes in **sequences** of inputs 18 | - Stores **representations** 19 | - Produces **memory behaviors** 20 | 21 | 🎯 It lets us *simulate* what minds (and brains) do! 22 | 23 | --- 24 | 25 | ## What's the *input* to a model? 🧩 26 | 27 | Usually: 28 | 29 | - A **list or sequence** of experiences 30 | - Could be: 31 | - 📝 Words 32 | - 🧠 Concepts 33 | - 🌍 Sensory events 34 | - 🕰️ Time-varying contexts 35 | 36 | --- 37 | 38 | ## What's the *output* from a model? 🎬 39 | 40 | Behaviors we can measure: 41 | 42 | - 🗣️ **Free recall**, **recognition**, etc. 43 | - ⌛ **Timing**, **errors**, **response curves** 44 | - 🔄 How memory *changes* with new input 45 | 46 | --- 47 | 48 | ## 🤔 What kinds of models will we see? 49 | 50 | From the [course outline](https://contextlab.github.io/memory-models-course/outline.html): 51 | 52 | - 🧠 **Hopfield nets** (attractor memory) 53 | - 🎯 **Search & recall** processes 54 | - 🧩 **Contextual encoding** (e.g. retrieved context model) 55 | - 🧪 **Laplace-transform–based** systems 56 | - 🧬 **Biological circuits** for memory 57 | - 🤖 **Modern deep nets** (LSTMs, Transformers) 58 | 59 | 📚 We’ll build, analyze, and compare them! 60 | 61 | --- 62 | 63 | ## 🔍 How do we evaluate models? 64 | 65 | - ❓ **Does it fit the data?** (qualitatively or quantitatively?) 66 | - 🧪 **Does it predict new behaviors?** 67 | - 🧠 **Does it teach us something about cognition or the brain?** 68 | 69 | --- 70 | 71 | ## 🧱 All models are approximations 72 | 73 | > “All models are wrong, but some are useful.” 74 | > — George E. P. Box (1976) 75 | 76 | We’re not trying to recreate a brain; we’re building **simplified systems** to *understand memory better*. 77 | 78 | --- 79 | 80 | ## 🎯 Goals when using models 81 | 82 | - Break down **complex behavior** into understandable pieces 83 | - Generate **testable predictions** 84 | - Build bridges between **psychology**, **neuroscience**, and **machine learning** 85 | 86 | 🛠️ Memory models are tools — let’s learn how to use them. 87 | -------------------------------------------------------------------------------- /content/slides/week_1.md: -------------------------------------------------------------------------------- 1 | # Week 1 2 | 3 | ### April 3, 2025: 4 | - [Welcome and Overview](welcome.html) 5 | - [Introduction to Models](intro_to_models.html) 6 | - [How to Read](how_to_read.html) 7 | -------------------------------------------------------------------------------- /content/slides/welcome.md: -------------------------------------------------------------------------------- 1 | --- 2 | marp: true 3 | theme: default 4 | class: invert 5 | math: katex 6 | author: Jeremy R. Manning 7 | --- 8 | 9 | # 🧠 Models of Memory 10 | ### PSYC 133 @ Dartmouth 11 | 12 | **Instructor**: Dr. Jeremy R. Manning 13 | **Time**: Thu 2–3:30 & Fri 3–4:30 14 | **Location**: Moore Library 15 | 16 | --- 17 | 18 | # What’s This Course About? 🤔 19 | 20 | We explore **how memory works** by building **computational models** of: 21 | 22 | - Human memory 23 | - Neural networks 24 | - Biological systems 25 | 26 | 🧪 Expect **hands-on coding**, **discussions**, and **projects**! 27 | 28 | --- 29 | 30 | # Goals of the Course 🎯 31 | 32 | By the end, you'll be able to: 33 | 34 | - Build **memory models** from scratch 💻 35 | - Understand how **humans store and recall** info 🧍‍♂️ 36 | - Critically evaluate **computational models** 🧠 37 | 38 | --- 39 | 40 | # Do I Need Experience? 🛠️ 41 | 42 | **Required**: 43 | - Python 🐍 44 | 45 | **Recommended**: 46 | - Stats / Probability 📊 47 | - Bonus: AI, ML, Linear Algebra, Philosophy of Mind ✨ 48 | 49 | --- 50 | 51 | # Learning Style 📚 52 | 53 | This class is **experiential**! 54 | 55 | - 🗣️ In-class **discussions** 56 | - 🧪 **Labs** in Google Colab 57 | - 📈 **Problem sets** as mini research projects 58 | - 🤝 **Group final project** 59 | 60 | --- 61 | 62 | # What We’ll Cover 🗓️ 63 | 64 | Week-by-week highlights: 65 | 66 | 1. **Hopfield Networks** 67 | 2. **Memory Search & Recall** 68 | 3. **Context Models** 69 | 4. **Multi-timescale Memory** 70 | 5. **Laplace Transform Models** 71 | 6. **Biological Memory Networks** 72 | 7. **LSTMs & Transformers** 73 | 8. **Final Presentations** 🎉 74 | 75 | --- 76 | 77 | # Tools We’ll Use 🧰 78 | 79 | - **Google Colab** — run and share code 80 | - **GitHub** — manage your models & projects 81 | - **Discord** — chat & collaborate 💬 82 | 83 | --- 84 | 85 | # Grading Breakdown 📊 86 | 87 | - 🧪 **4 Problem Sets** — 60% 88 | - 🧠 **Final Project** — 40% 89 | 90 | 💡 You can **collaborate**, but submit your own work 91 | Final projects are done in **pairs or small groups** 92 | 93 | --- 94 | 95 | # Late & Honor Policies ⏰ 96 | 97 | - 🔄 **10% per week late** (undergrads) 98 | - 🤝 Follow the **Honor Code** 99 | - ✅ AI use allowed — just **cite it** 100 | 101 | --- 102 | 103 | # Let’s Build Together! 💡 104 | 105 | Come curious, come ready to code, and... 106 | 107 | ✨ **Make memory models awesome!** ✨ 108 | 109 | Reach out anytime: **jeremy@dartmouth.edu** 110 | 111 | --- 112 | 113 | # Questions? 🧾 114 | 115 | Check: 116 | - 📎 Syllabus (PDF) 117 | - 🧭 Annotated Outline 118 | - 📌 Course site + Discord 119 | 120 | 👋 Can’t wait to see what you create! 121 | -------------------------------------------------------------------------------- /content/themes/biologically_inspired_networks.md: -------------------------------------------------------------------------------- 1 | # Biologically Inspired Network Models 2 | 3 | - Discussion: what does "biologically inspired" mean in practice? 4 | - Readings: 5 | - [McClelland et al. (1995)](https://imss-www.upmf-grenoble.fr/prevert/MasterICA/SpecialiteSC/FichiersPDF/Why%20there%20are%20complementary%20learning%20systems%20in%20the%20hippocampus%20and%20neocortex%20insights%20from%20th.pdf) 6 | - [Rumelhart et al. (1986)](http://www.cs.toronto.edu/~fritz/absps/pdp2.pdf) 7 | - [O'Reilly and Norman (2002)](http://www.princeton.edu/~compmem/normorei02.pdf) 8 | - [Schapiro et al. (2017)](https://www.dropbox.com/scl/fi/no2647c2witr2knb76gs2/SchaEtal17.pdf?rlkey=bpon63fy8g2rl3y9csabq748o&dl=1) -------------------------------------------------------------------------------- /content/themes/data_science_primer.md: -------------------------------------------------------------------------------- 1 | # Data Science Primer 2 | 3 | - Where to find behavioral datasets: [Penn Behavioral Data Archive](https://memory.psych.upenn.edu/Data_Archive), [OpenCogData](https://nimh-dsst.github.io/OpenCogData/), [OpenNeuro](https://openneuro.org/), [UCLA Psychological Dataset Archive](https://guides.library.ucla.edu/psychology/data), [Context Lab](https://www.context-lab.com/publications) 4 | - Web scraping with [requests](https://pypi.org/project/requests/) and [Beautiful Soup](https://beautiful-soup-4.readthedocs.io/en/latest/) 5 | - Data manipulation with [Pandas](https://pandas.pydata.org/) 6 | - Text analyses with [Scikit-learn](https://scikit-learn.org), [NLTK](https://www.nltk.org/), and [HuggingFace Transformers](https://huggingface.co/docs/transformers/en/index) -------------------------------------------------------------------------------- /content/themes/deep_networks.md: -------------------------------------------------------------------------------- 1 | # Deep Networks 2 | - Readings: 3 | - [Schuster and Paliwal (1997)](https://www.dropbox.com/scl/fi/0guahq2kcbria108xyb9j/SchuPali97.pdf?rlkey=yp1a8272qhljeob68amdpxjki&dl=1) 4 | - [Hochreiter and Schmidhuber (1997)](https://deeplearning.cs.cmu.edu/S23/document/readings/LSTM.pdf) 5 | - [Radford et al. (2019)](https://insightcivic.s3.us-east-1.amazonaws.com/language-models.pdf) 6 | - Tutorial video: [Let's build GPT: from scratch, in code, spelled out](https://www.youtube.com/watch?v=kCc8FmEb1nY) 7 | 8 | ## Recurrent networks 9 | 10 | ## LSTM networks 11 | 12 | ## Transformers -------------------------------------------------------------------------------- /content/themes/final_projects/README.md: -------------------------------------------------------------------------------- 1 | # Final Project Demo Reel 2 | 3 | (placeholder for final project submissions) -------------------------------------------------------------------------------- /content/themes/final_projects/placeholder.md: -------------------------------------------------------------------------------- 1 | # Example Project 1 2 | Authors: Person 1, Person 2, Person 3 3 | 4 | (placeholder text...) -------------------------------------------------------------------------------- /content/themes/free_recall.md: -------------------------------------------------------------------------------- 1 | # Free Recall, Short Term and Long Term Memory 2 | 3 | - Discussions: 4 | - free recall and memory search 5 | - naturalistic memory tasks 6 | - Readings: 7 | - [Atkinson and Shiffrin (1968)](https://www.dropbox.com/scl/fi/rpllozjcv704okckjdy5k/AtkiShif68.pdf?rlkey=i0azhj9mqxws7bxocbl65j88d&dl=1)* 8 | - [Chen et al. (2016)](https://www.dropbox.com/scl/fi/wg6fledn7g88ig5mk3kob/ChenEtal16.pdf?rlkey=9jqu7y2apqv2hrj8qepn4alwa&dl=1) 9 | - [Heusser et al. (2021)](https://www.dropbox.com/scl/fi/w7z2yvdfzmhowh5hvg53e/HeusEtal21.pdf?rlkey=omad9klqeiu2kc71w7guc5xxq&dl=1) -------------------------------------------------------------------------------- /content/themes/hopfield_networks.md: -------------------------------------------------------------------------------- 1 | # Hebbian learning and Hopfield networks 2 | 3 | - Reading: [Hopfield (1982)](https://www.pnas.org/doi/abs/10.1073/pnas.79.8.2554)* 4 | - Discussion: Hopfield network simulations (storage capacity, cued recall, contextual drift) 5 | -------------------------------------------------------------------------------- /content/themes/intro.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | Discussions: 4 | - What is memory? 5 | - What does it mean to build a "model" of memory? 6 | - Are neural networks like biological brains? -------------------------------------------------------------------------------- /content/themes/laplace_tcm.md: -------------------------------------------------------------------------------- 1 | # The Laplace Temporal Context Model (Laplace-TCM) 2 | 3 | - Readings: 4 | - [Shankar and Howard (2012)](https://www.dropbox.com/scl/fi/cqh37rsdn11f6egdiskvf/ShanHowa12.pdf?rlkey=45qhdi5u2fmlxd4azq8is3j89&dl=1)* 5 | - [Manning (2024)](https://www.dropbox.com/scl/fi/9amk5mlgeop0srtpwqesg/Mann23.pdf?rlkey=lc785xhq1pcjqdtarn692e21k&dl=1) -------------------------------------------------------------------------------- /content/themes/laplace_transform.md: -------------------------------------------------------------------------------- 1 | # The Laplace Transform (and its Inverse) 2 | 3 | - Discussion: Introduction to the Laplace Transform (and its inverse) and its relevance to memory -------------------------------------------------------------------------------- /content/themes/memory_timescales_i.md: -------------------------------------------------------------------------------- 1 | # Memory Timescales (Part I) 2 | 3 | - Discussion: the temporal scales of memory, event boundaries, and situation models 4 | - Readings: 5 | - [Howard and Kahana (2002)](https://www.dropbox.com/scl/fi/yjnusbmoixbf4aen1mkx8/HowaKaha02.pdf?rlkey=ktt245cw09szubjnoe4cco1tz&dl=1) 6 | - [Polyn et al. (2009)](https://www.dropbox.com/scl/fi/98pui63j3o62xu96ciwhy/PolyEtal09.pdf?rlkey=42sc17ll573sm83g4q8q9x9nq&dl=1)* 7 | - [Baldassano et al. (2017)](https://www.dropbox.com/scl/fi/wgn96xni9fevoo6h1yngn/BaldEtal17.pdf?rlkey=wg9qugm1szfw50xao6k9047j6&dl=1) 8 | - [Honey et al. (2012)](https://www.dropbox.com/scl/fi/l3vzzc56jjhq9tc4cheev/HoneEtal12.pdf?rlkey=56wf835omj2i6gkdh0b8n38cx&dl=1) 9 | - [Manning et al. (2014)](https://www.dropbox.com/scl/fi/a1zltxk43dn8qmm7puaql/MannEtal14d.pdf?rlkey=wg2ikym1svvl68hbuw4f5cpax&dl=1) 10 | - [Ranganath and Ritchey (2012)](https://www.dropbox.com/scl/fi/asec4p68900eekp6vtdgb/RangRitc12.pdf?rlkey=hqixac8eij65hmn62stzvo4mp&dl=1) 11 | - [DuBrow and Davachi (2016)](https://www.dropbox.com/scl/fi/86gkrz0a9k57556tz4d2z/DuBrDava16.pdf?rlkey=v6hxkbzz80m48pz4a2425q6bn&dl=1) 12 | - [Zacks and Tversky (2001)](https://www.dropbox.com/scl/fi/28104fmu9kzk55znyxntd/ZackTver01.pdf?rlkey=2ytdz0e9agny4hmllcw7hvi8g&dl=1) 13 | - [Zwann and Radvansky (1998)](https://www.dropbox.com/scl/fi/iqp70crdmpd5m97zzv45c/ZwaaRadv98.pdf?rlkey=habx93aplwkkw829vj9vkv52a&dl=1) 14 | - [Brunec et al. (2018)](https://www.dropbox.com/scl/fi/1eu28rpwyp8eg2sn4fgau/BrunEtal18b.pdf?rlkey=64dnn3onc90o59fuv33peil6g&dl=1) -------------------------------------------------------------------------------- /content/themes/memory_timescales_ii.md: -------------------------------------------------------------------------------- 1 | # Memory Timescales (Part II) 2 | 3 | - Discussion: is TCM *really* multi-timescale? -------------------------------------------------------------------------------- /content/themes/sam_model.md: -------------------------------------------------------------------------------- 1 | # The Search of Associative Memory (SAM) Model 2 | 3 | Placeholder text... -------------------------------------------------------------------------------- /content/themes/temporal_context_model.md: -------------------------------------------------------------------------------- 1 | # The Temporal Context Model (TCM) 2 | 3 | placeholder text... 4 | 5 | ## The Context, Maintenance, and Retrieval (CMR) Model 6 | 7 | placeholder text... -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: ccm_course 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - bzip2=1.0.8 7 | - ca-certificates=2022.9.24 8 | - libffi=3.4.2 9 | - libsqlite=3.40.0 10 | - libzlib=1.2.13 11 | - ncurses=6.3 12 | - openssl=3.0.7 13 | - pip=22.3.1 14 | - python=3.11.0 15 | - readline=8.1.2 16 | - setuptools=65.5.1 17 | - tk=8.6.12 18 | - tzdata=2022f 19 | - wheel=0.38.4 20 | - xz=5.2.6 21 | - pip: 22 | - accessible-pygments==0.0.5 23 | - alabaster==0.7.16 24 | - appnope==0.1.4 25 | - asttokens==3.0.0 26 | - attrs==25.3.0 27 | - babel==2.17.0 28 | - beautifulsoup4==4.13.3 29 | - certifi==2025.1.31 30 | - charset-normalizer==3.4.1 31 | - click==8.1.8 32 | - comm==0.2.2 33 | - debugpy==1.8.13 34 | - decorator==5.2.1 35 | - docutils==0.21.2 36 | - executing==2.2.0 37 | - fastjsonschema==2.21.1 38 | - greenlet==3.1.1 39 | - idna==3.10 40 | - imagesize==1.4.1 41 | - importlib-metadata==8.6.1 42 | - ipykernel==6.29.5 43 | - ipython==9.0.2 44 | - ipython-pygments-lexers==1.1.1 45 | - jedi==0.19.2 46 | - jinja2==3.1.6 47 | - jsonschema==4.23.0 48 | - jsonschema-specifications==2024.10.1 49 | - jupyter-book==1.0.4.post1 50 | - jupyter-cache==1.0.1 51 | - jupyter-client==8.6.3 52 | - jupyter-core==5.7.2 53 | - latexcodec==3.0.0 54 | - linkify-it-py==2.0.3 55 | - markdown-it-py==3.0.0 56 | - markupsafe==3.0.2 57 | - matplotlib-inline==0.1.7 58 | - mdit-py-plugins==0.4.2 59 | - mdurl==0.1.2 60 | - myst-nb==1.2.0 61 | - myst-parser==3.0.1 62 | - nbclient==0.10.2 63 | - nbformat==5.10.4 64 | - nest-asyncio==1.6.0 65 | - packaging==24.2 66 | - parso==0.8.4 67 | - pexpect==4.9.0 68 | - platformdirs==4.3.7 69 | - prompt-toolkit==3.0.50 70 | - psutil==7.0.0 71 | - ptyprocess==0.7.0 72 | - pure-eval==0.2.3 73 | - pybtex==0.24.0 74 | - pybtex-docutils==1.0.3 75 | - pydata-sphinx-theme==0.15.4 76 | - pygments==2.19.1 77 | - python-dateutil==2.9.0.post0 78 | - pyyaml==6.0.2 79 | - pyzmq==26.3.0 80 | - referencing==0.36.2 81 | - requests==2.32.3 82 | - rpds-py==0.23.1 83 | - six==1.17.0 84 | - snowballstemmer==2.2.0 85 | - soupsieve==2.6 86 | - sphinx==7.4.7 87 | - sphinx-book-theme==1.1.4 88 | - sphinx-comments==0.0.3 89 | - sphinx-copybutton==0.5.2 90 | - sphinx-design==0.6.1 91 | - sphinx-external-toc==1.0.1 92 | - sphinx-jupyterbook-latex==1.0.0 93 | - sphinx-multitoc-numbering==0.1.3 94 | - sphinx-thebe==0.3.1 95 | - sphinx-togglebutton==0.3.2 96 | - sphinxcontrib-applehelp==2.0.0 97 | - sphinxcontrib-bibtex==2.6.3 98 | - sphinxcontrib-devhelp==2.0.0 99 | - sphinxcontrib-htmlhelp==2.1.0 100 | - sphinxcontrib-jsmath==1.0.1 101 | - sphinxcontrib-qthelp==2.0.0 102 | - sphinxcontrib-serializinghtml==2.0.0 103 | - sqlalchemy==2.0.39 104 | - stack-data==0.6.3 105 | - tabulate==0.9.0 106 | - tornado==6.4.2 107 | - traitlets==5.14.3 108 | - typing-extensions==4.12.2 109 | - uc-micro-py==1.0.3 110 | - urllib3==2.3.0 111 | - wcwidth==0.2.13 112 | - zipp==3.21.0 113 | prefix: /Users/peerherholz/anaconda3/envs/ccm_course 114 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | accessible-pygments==0.0.5 2 | alabaster==0.7.16 3 | appnope==0.1.4 4 | asttokens==3.0.0 5 | attrs==25.3.0 6 | babel==2.17.0 7 | beautifulsoup4==4.13.3 8 | certifi==2025.1.31 9 | charset-normalizer==3.4.1 10 | click==8.1.8 11 | comm==0.2.2 12 | debugpy==1.8.13 13 | decorator==5.2.1 14 | docutils==0.21.2 15 | executing==2.2.0 16 | fastjsonschema==2.21.1 17 | greenlet==3.1.1 18 | idna==3.10 19 | imagesize==1.4.1 20 | importlib_metadata==8.6.1 21 | ipykernel==6.29.5 22 | ipython==9.0.2 23 | ipython_pygments_lexers==1.1.1 24 | jedi==0.19.2 25 | Jinja2==3.1.6 26 | jsonschema==4.23.0 27 | jsonschema-specifications==2024.10.1 28 | jupyter-book==1.0.4.post1 29 | jupyter-cache==1.0.1 30 | jupyter_client==8.6.3 31 | jupyter_core==5.7.2 32 | latexcodec==3.0.0 33 | linkify-it-py==2.0.3 34 | markdown-it-py==3.0.0 35 | MarkupSafe==3.0.2 36 | matplotlib-inline==0.1.7 37 | mdit-py-plugins==0.4.2 38 | mdurl==0.1.2 39 | myst-nb==1.2.0 40 | myst-parser==3.0.1 41 | nbclient==0.10.2 42 | nbformat==5.10.4 43 | nest-asyncio==1.6.0 44 | packaging==24.2 45 | parso==0.8.4 46 | pexpect==4.9.0 47 | platformdirs==4.3.7 48 | prompt_toolkit==3.0.50 49 | psutil==7.0.0 50 | ptyprocess==0.7.0 51 | pure_eval==0.2.3 52 | pybtex==0.24.0 53 | pybtex-docutils==1.0.3 54 | pydata-sphinx-theme==0.15.4 55 | Pygments==2.19.1 56 | python-dateutil==2.9.0.post0 57 | PyYAML==6.0.2 58 | pyzmq==26.3.0 59 | referencing==0.36.2 60 | requests==2.32.3 61 | rpds-py==0.23.1 62 | six==1.17.0 63 | snowballstemmer==2.2.0 64 | soupsieve==2.6 65 | Sphinx==7.4.7 66 | sphinx-book-theme==1.1.4 67 | sphinx-comments==0.0.3 68 | sphinx-copybutton==0.5.2 69 | sphinx-jupyterbook-latex==1.0.0 70 | sphinx-multitoc-numbering==0.1.3 71 | sphinx-thebe==0.3.1 72 | sphinx-togglebutton==0.3.2 73 | sphinx_design==0.6.1 74 | sphinx_external_toc==1.0.1 75 | sphinxcontrib-applehelp==2.0.0 76 | sphinxcontrib-bibtex==2.6.3 77 | sphinxcontrib-devhelp==2.0.0 78 | sphinxcontrib-htmlhelp==2.1.0 79 | sphinxcontrib-jsmath==1.0.1 80 | sphinxcontrib-qthelp==2.0.0 81 | sphinxcontrib-serializinghtml==2.0.0 82 | SQLAlchemy==2.0.39 83 | stack-data==0.6.3 84 | tabulate==0.9.0 85 | tornado==6.4.2 86 | traitlets==5.14.3 87 | typing_extensions==4.12.2 88 | uc-micro-py==1.0.3 89 | urllib3==2.3.0 90 | wcwidth==0.2.13 91 | zipp==3.21.0 92 | --------------------------------------------------------------------------------