├── .gitignore ├── README.md ├── agent ├── generative_agent.py └── memories │ └── john.txt ├── main.py ├── poetry.lock ├── pyproject.toml ├── simulating-people.pdf └── world-subtree.png /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | .idea/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LLM Ant Farm 2 | ## Overview 3 | I will create my own version of the Stanford experiment "Generative Agents: Interactive Simulacra of Human Behavior" in Python and run it with a local LLM such as Alpaca 7B or Dolly 2.0. I will then perform experiments on it to attempt to reproduce the emergent behaviors observed in the Stanford experiment such as information diffusion, collaborative planning & coordination, and more (collective problem-solving, evolution of agent attitudes) 4 | 5 | ## Goals 6 | - Replicate the Stanford experiment: obtain similar outcomes in terms of information diffusion, agent autonomy, agent ability to reflect & provide accurate information based on knowledge when interviewed. 7 | - Go beyond the Stanford experiment: 8 | - Introduce adversarial relationships between agents e.g. competition for scarce resources & observe behavior 9 | - Create a unique LLM instance for each agent trained on that agent's memories & prompt it for agent actions & dialog. (Solve problem of only using some memories for each action/dialog prompt). Retrain each LLM with new memories each night. 10 | - Give agents goals / problems to solve & resources they can use to solve them. 11 | - Perform social science experiments and game theory experiments e.g. prisoners' dilemma. 12 | - Give agents a "story" or set of personal beliefs and the desire to change them, see if they can through experience 13 | 14 | ## Requirements 15 | This program is composed of two top-level component types: 16 | 17 | ### Agent 18 | - There are as many agents as I decide to create for each experiment. 19 | - Agents have a summary description which comprise the initial entries in their memory stream. 20 | - Agents have a memory stream: A log of their actions, utterances, and observations. 21 | - Agents have the ability to query their memory stream for memories that are related to any other memory (the "query memory"). 22 | - Memory relatedness is based on 3 factors: 23 | - Recency: Time delta between memory last access datetime and current datetime 24 | - Importance: A score indicating the importance of the memory to the agent 25 | - In the Stanford experiment this score was calculated one time by the LLM - is there a better way e.g. not using LLM & updating score over time? 26 | - Is there a way to calculate importance with respect to the individual agent? E.g. prompt LLM with agent's most important memories and new memory to get score 27 | - Relevance - cosine similarity between embedding vector of the memory and embedding vector of the query memory 28 | - In the Stanford experiment the embedding vectors were created by the LLM - is there a better way? 29 | - [Yes!](https://www.sbert.net/index.html) 30 | - Agents have a loop routine: 31 | - (Potentially) Update Importance Scores: Update the importance scores of all memories (TBD: I am not sure how importance scores are calculated yet) 32 | - Reflect: If the sum of the importance scores of the agent's recent memory exceed a threshold, create a new record in the agent's memory stream by inputting the important memories to the LLM and asking for a higher-level observation. 33 | - Plan: If the agent has no plan (most recent plan has completed), create a new plan by prompting the LLM with the agent's summary description and a summary of their previous day (user-generated for day o). Add the plan to the agent's memory stream. 34 | - Plan is composed recursively by first generating high-level (e.g. 5-8 tasks) plan, then recursively breaking down each high-level task into steps and adding them to the plan. 35 | - React/Update plan: Prompt LLM with agent's most recent observations and plan to decide if plan should be changed, add new plan to agent's memory stream. 36 | - Generate dialog line (if talking to another agent) 37 | - Generated by prompting LLM with agent's memories of other agent and conversation history. 38 | - Action: Execute next step in agent's plan or speak next dialog line. 39 | 40 | 41 | ### World 42 | - There is 1 world. 43 | - The world contains a tree structure which is composed of all the elements of the world, including agents. Parent elements physically contain child elements. Elements may have any number of states and must be in 1 state at any time. If they have no states they are considered to be in a default state at all times. 44 | - Example Subtree: ![world subtree](world-subtree.png) 45 | - The world has a loop routine 46 | - Each iteration performs the following tasks: 47 | - Run each agent's loop 1 time (ideally run all agents in parallel, but agents engaged in conversation will have to take turns) 48 | - Update the world state based on each agent's output behavor for that loop iteration 49 | - E.g. "Graham goes to the bodega" -> move Graham along path towards bodega at standard agent speed 50 | - E.g. "Graham reads A Brief History of Time" -> Add ABHoT text (summarized? amount determined by agent reading speed * time step duration?) to Graham's memory 51 | - E.g. "Graham says 'I am currently reading A Brief History of Time' to Jose" -> Add this phrase to Graham and Jose's memories 52 | - E.g. "Graham opens the cupboard" -> Update cupboard, set state = open, update agent's memories with cupboard subtree converted to syntax ("the cupboard contains a dish and a spoon) 53 | - Update each agent's memories with their current perceptions if they have changed 54 | - E.g. agent has moved to a new location -> convert agent's environment tree to syntax and add to agent memory 55 | - E.g. environment objects have changed state -> convert object trees & states to syntax and add to agent memory 56 | - E.g. Another agent has entered an agent's environment -> Parse that part of the environment's tree to syntax and add to agent's memory. 57 | - Update all agents' memories with the current date and time (incremented from previous world loop) 58 | 59 | ## Notes 60 | ### Planned development stack 61 | - LLaMA for LLM 62 | - Python & LangChain for agent & world logic 63 | - Support for embedded vector sorting & similarity scoring 64 | - Store world state as JSON 65 | - Eventually, PyGame for visual display of world & agent state 66 | 67 | ### Planned development schedule 68 | 1. Write and test Python-based agent and world logic 69 | 1. Add visual display of world & agent state. 70 | 71 | ### Progress: 72 | - 5/12/23 Using LangChain, added support for generative agents which can produce in-character dialog with another character and in-character actions. 73 | - Memories are currently selected using only vector similarity 74 | 75 | ### Next steps: 76 | - Experiment with & refine agent logic 77 | - Write world state and event loop logic -------------------------------------------------------------------------------- /agent/generative_agent.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from langchain.embeddings.openai import OpenAIEmbeddings 4 | from langchain.llms import OpenAI 5 | from langchain.memory import VectorStoreRetrieverMemory 6 | from langchain.chains import ConversationChain 7 | from langchain.prompts import PromptTemplate 8 | import faiss 9 | from langchain.docstore import InMemoryDocstore 10 | from langchain.vectorstores import FAISS 11 | from os import environ 12 | 13 | EMBEDDING_SIZE = 1536 14 | LLM = OpenAI(temperature=environ.get("AGENT_LLM_TEMPERATURE", 0.5)) 15 | 16 | _DEFAULT_TEMPLATE = """You are a character in a small community. Here are some memories from your recent past: 17 | {history} 18 | """ 19 | 20 | _ACTION_PROMPT = """Using these memories if they are relevant, answer the following question as your character. Provide your response in the third person present tense and include your name in your response. 21 | 22 | Question: {input} 23 | Your response:""" 24 | 25 | _DIALOG_PROMPT = """Using these memories if they are relevant, provide the next line of dialog as your character. 26 | 27 | {name} says to you, """ 28 | 29 | _DIALOG_PROMPT_END = """"{input}". What is your response?""" 30 | 31 | 32 | class GenerativeAgent: 33 | """An LLM agent which generates new plans, actions and dialog based on current and past experiences""" 34 | 35 | def __init__(self, name: str, initial_memories_filepath: str = None, debug=os.environ.get("DEBUG", False)): 36 | """ 37 | Set up the agent's vector-store-based memory and insert any initial memories. 38 | """ 39 | self.name = name 40 | self.debug = debug 41 | self.memory = VectorStoreRetrieverMemory( 42 | retriever=FAISS( 43 | OpenAIEmbeddings().embed_query, faiss.IndexFlatL2(EMBEDDING_SIZE), InMemoryDocstore({}), {} 44 | ).as_retriever(search_kwargs=dict(k=int(environ.get("AGENT_MEMORIES_PER_PROMPT", 3)))) 45 | ) 46 | # Load initial memories 47 | if initial_memories_filepath: 48 | self._load_memories(initial_memories_filepath) 49 | 50 | def _load_memories(self, memories_filepath): 51 | """ 52 | Load semicolon-separated memories from the given filepath into memory. 53 | """ 54 | for memory in open(memories_filepath, "r").read().split(";"): 55 | self.memory.save_context({"input": memory}, {"output": ""}) 56 | 57 | def get_action(self, question: str): 58 | """ 59 | Generate an action in character, drawing upon relevant memories. 60 | """ 61 | return ConversationChain( 62 | llm=LLM, 63 | prompt=PromptTemplate( 64 | input_variables=["history", "input"], 65 | template=f"Your name is {self.name}. {_DEFAULT_TEMPLATE} {_ACTION_PROMPT}", 66 | ), 67 | memory=self.memory, 68 | verbose=self.debug, 69 | ).predict(input=question) 70 | 71 | def get_dialog(self, agent_name, prompt: str): 72 | """ 73 | In character, generate a line of dialog in response to the given prompt from the given character, drawing upon relevant memories. 74 | """ 75 | return ConversationChain( 76 | llm=LLM, 77 | prompt=PromptTemplate( 78 | input_variables=["history", "input"], 79 | template=f"Your name is {self.name}. {_DEFAULT_TEMPLATE} {_DIALOG_PROMPT.format(name=agent_name)} {_DIALOG_PROMPT_END}", 80 | ), 81 | memory=self.memory, 82 | verbose=self.debug, 83 | ).predict(input=prompt) 84 | -------------------------------------------------------------------------------- /agent/memories/john.txt: -------------------------------------------------------------------------------- 1 | John Lin is a pharmacy shopkeeper at the Willow 2 | Market and Pharmacy who loves to help people. He 3 | is always looking for ways to make the process 4 | of getting medication easier for his customers; 5 | John Lin is living with his wife, Mei Lin, who 6 | is a college professor, and son, Eddy Lin, who is 7 | a student studying music theory; John Lin loves 8 | his family very much; John Lin has known the old 9 | couple next-door, Sam Moore and Jennifer Moore, 10 | for a few years; John Lin thinks Sam Moore is a 11 | kind and nice man; John Lin knows his neighbor, 12 | Yuriko Yamamoto, well; John Lin knows of his 13 | neighbors, Tamara Taylor and Carmen Ortiz, but 14 | has not met them before; John Lin and Tom Moreno 15 | are colleagues at The Willows Market and Pharmacy; 16 | John Lin and Tom Moreno are friends and like to 17 | discuss local politics together; John Lin knows 18 | the Moreno family somewhat well — the husband Tom 19 | Moreno and the wife Jane Moreno. -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | load_dotenv() 4 | from agent.generative_agent import GenerativeAgent 5 | 6 | 7 | def test_character(): 8 | john = GenerativeAgent(name="John Lin", initial_memories_filepath="agent/memories/john.txt") 9 | print("Hi, I'm John Lin!") 10 | while True: 11 | print(john.get_action(question=input("action prompt > "))) 12 | print(john.get_dialog(prompt=input("dialog prompt from Tom Moreno > "), agent_name="Tom Moreno")) 13 | 14 | 15 | if __name__ == "__main__": 16 | test_character() 17 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "aiohttp" 3 | version = "3.8.4" 4 | description = "Async http client/server framework (asyncio)" 5 | category = "main" 6 | optional = false 7 | python-versions = ">=3.6" 8 | 9 | [package.dependencies] 10 | aiosignal = ">=1.1.2" 11 | async-timeout = ">=4.0.0a3,<5.0" 12 | attrs = ">=17.3.0" 13 | charset-normalizer = ">=2.0,<4.0" 14 | frozenlist = ">=1.1.1" 15 | multidict = ">=4.5,<7.0" 16 | yarl = ">=1.0,<2.0" 17 | 18 | [package.extras] 19 | speedups = ["aiodns", "brotli", "cchardet"] 20 | 21 | [[package]] 22 | name = "aiosignal" 23 | version = "1.3.1" 24 | description = "aiosignal: a list of registered asynchronous callbacks" 25 | category = "main" 26 | optional = false 27 | python-versions = ">=3.7" 28 | 29 | [package.dependencies] 30 | frozenlist = ">=1.1.0" 31 | 32 | [[package]] 33 | name = "async-timeout" 34 | version = "4.0.2" 35 | description = "Timeout context manager for asyncio programs" 36 | category = "main" 37 | optional = false 38 | python-versions = ">=3.6" 39 | 40 | [[package]] 41 | name = "attrs" 42 | version = "23.1.0" 43 | description = "Classes Without Boilerplate" 44 | category = "main" 45 | optional = false 46 | python-versions = ">=3.7" 47 | 48 | [package.extras] 49 | cov = ["attrs", "coverage[toml] (>=5.3)"] 50 | dev = ["attrs", "pre-commit"] 51 | docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] 52 | tests = ["attrs", "zope-interface"] 53 | tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest-mypy-plugins", "pytest-xdist", "pytest (>=4.3.0)"] 54 | 55 | [[package]] 56 | name = "black" 57 | version = "23.3.0" 58 | description = "The uncompromising code formatter." 59 | category = "main" 60 | optional = false 61 | python-versions = ">=3.7" 62 | 63 | [package.dependencies] 64 | click = ">=8.0.0" 65 | mypy-extensions = ">=0.4.3" 66 | packaging = ">=22.0" 67 | pathspec = ">=0.9.0" 68 | platformdirs = ">=2" 69 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 70 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} 71 | 72 | [package.extras] 73 | colorama = ["colorama (>=0.4.3)"] 74 | d = ["aiohttp (>=3.7.4)"] 75 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 76 | uvloop = ["uvloop (>=0.15.2)"] 77 | 78 | [[package]] 79 | name = "certifi" 80 | version = "2023.5.7" 81 | description = "Python package for providing Mozilla's CA Bundle." 82 | category = "main" 83 | optional = false 84 | python-versions = ">=3.6" 85 | 86 | [[package]] 87 | name = "charset-normalizer" 88 | version = "3.1.0" 89 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 90 | category = "main" 91 | optional = false 92 | python-versions = ">=3.7.0" 93 | 94 | [[package]] 95 | name = "click" 96 | version = "8.1.3" 97 | description = "Composable command line interface toolkit" 98 | category = "main" 99 | optional = false 100 | python-versions = ">=3.7" 101 | 102 | [package.dependencies] 103 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 104 | 105 | [[package]] 106 | name = "colorama" 107 | version = "0.4.6" 108 | description = "Cross-platform colored terminal text." 109 | category = "main" 110 | optional = false 111 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 112 | 113 | [[package]] 114 | name = "dataclasses-json" 115 | version = "0.5.7" 116 | description = "Easily serialize dataclasses to and from JSON" 117 | category = "main" 118 | optional = false 119 | python-versions = ">=3.6" 120 | 121 | [package.dependencies] 122 | marshmallow = ">=3.3.0,<4.0.0" 123 | marshmallow-enum = ">=1.5.1,<2.0.0" 124 | typing-inspect = ">=0.4.0" 125 | 126 | [package.extras] 127 | dev = ["pytest (>=6.2.3)", "ipython", "mypy (>=0.710)", "hypothesis", "portray", "flake8", "simplejson", "types-dataclasses"] 128 | 129 | [[package]] 130 | name = "faiss-cpu" 131 | version = "1.7.4" 132 | description = "A library for efficient similarity search and clustering of dense vectors." 133 | category = "main" 134 | optional = false 135 | python-versions = "*" 136 | 137 | [[package]] 138 | name = "frozenlist" 139 | version = "1.3.3" 140 | description = "A list-like structure which implements collections.abc.MutableSequence" 141 | category = "main" 142 | optional = false 143 | python-versions = ">=3.7" 144 | 145 | [[package]] 146 | name = "greenlet" 147 | version = "2.0.2" 148 | description = "Lightweight in-process concurrent programming" 149 | category = "main" 150 | optional = false 151 | python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" 152 | 153 | [package.extras] 154 | docs = ["sphinx", "docutils (<0.18)"] 155 | test = ["objgraph", "psutil"] 156 | 157 | [[package]] 158 | name = "idna" 159 | version = "3.4" 160 | description = "Internationalized Domain Names in Applications (IDNA)" 161 | category = "main" 162 | optional = false 163 | python-versions = ">=3.5" 164 | 165 | [[package]] 166 | name = "isort" 167 | version = "5.12.0" 168 | description = "A Python utility / library to sort Python imports." 169 | category = "dev" 170 | optional = false 171 | python-versions = ">=3.8.0" 172 | 173 | [package.extras] 174 | colors = ["colorama (>=0.4.3)"] 175 | requirements-deprecated-finder = ["pip-api", "pipreqs"] 176 | pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] 177 | plugins = ["setuptools"] 178 | 179 | [[package]] 180 | name = "langchain" 181 | version = "0.0.166" 182 | description = "Building applications with LLMs through composability" 183 | category = "main" 184 | optional = false 185 | python-versions = ">=3.8.1,<4.0" 186 | 187 | [package.dependencies] 188 | aiohttp = ">=3.8.3,<4.0.0" 189 | async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} 190 | dataclasses-json = ">=0.5.7,<0.6.0" 191 | numexpr = ">=2.8.4,<3.0.0" 192 | numpy = ">=1,<2" 193 | openapi-schema-pydantic = ">=1.2,<2.0" 194 | pydantic = ">=1,<2" 195 | PyYAML = ">=5.4.1" 196 | requests = ">=2,<3" 197 | SQLAlchemy = ">=1.4,<3" 198 | tenacity = ">=8.1.0,<9.0.0" 199 | tqdm = ">=4.48.0" 200 | 201 | [package.extras] 202 | azure = ["azure-core (>=1.26.4,<2.0.0)", "openai (>=0,<1)", "azure-identity (>=1.12.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)"] 203 | all = ["faiss-cpu (>=1,<2)", "wikipedia (>=1,<2)", "elasticsearch (>=8,<9)", "opensearch-py (>=2.0.0,<3.0.0)", "redis (>=4,<5)", "manifest-ml (>=0.0.1,<0.0.2)", "spacy (>=3,<4)", "nltk (>=3,<4)", "transformers (>=4,<5)", "beautifulsoup4 (>=4,<5)", "torch (>=1,<3)", "jinja2 (>=3,<4)", "tiktoken (>=0.3.2,<0.4.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "weaviate-client (>=3,<4)", "google-api-python-client (==2.70.0)", "wolframalpha (==5.0.0)", "anthropic (>=0.2.6,<0.3.0)", "qdrant-client (>=1.1.2,<2.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "cohere (>=3,<4)", "openai (>=0,<1)", "nlpcloud (>=1,<2)", "nomic (>=1.0.43,<2.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "google-search-results (>=2,<3)", "sentence-transformers (>=2,<3)", "arxiv (>=1.4,<2.0)", "pypdf (>=3.4.0,<4.0.0)", "networkx (>=2.6.3,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "deeplake (>=3.3.0,<4.0.0)", "pgvector (>=0.1.6,<0.2.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "gptcache (>=0.1.7)", "atlassian-python-api (>=3.36.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "html2text (>=2020.1.16,<2021.0.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "lark (>=1.1.5,<2.0.0)", "lancedb (>=0.1,<0.2)", "pexpect (>=4.8.0,<5.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "O365 (>=2.0.26,<3.0.0)", "jq (>=1.4.1,<2.0.0)", "docarray (>=0.31.0,<0.32.0)", "protobuf (==3.19)", "hnswlib (>=0.7.0,<0.8.0)"] 204 | llms = ["manifest-ml (>=0.0.1,<0.0.2)", "transformers (>=4,<5)", "torch (>=1,<3)", "anthropic (>=0.2.6,<0.3.0)", "cohere (>=3,<4)", "openai (>=0,<1)", "nlpcloud (>=1,<2)", "huggingface_hub (>=0,<1)"] 205 | qdrant = ["qdrant-client (>=1.1.2,<2.0.0)"] 206 | cohere = ["cohere (>=3,<4)"] 207 | openai = ["openai (>=0,<1)"] 208 | embeddings = ["sentence-transformers (>=2,<3)"] 209 | extended-testing = ["pypdf (>=3.4.0,<4.0.0)", "pdfminer-six (>=20221105,<20221106)"] 210 | in-memory-store = ["docarray (>=0.31.0,<0.32.0)"] 211 | hnswlib = ["docarray (>=0.31.0,<0.32.0)", "protobuf (==3.19)", "hnswlib (>=0.7.0,<0.8.0)"] 212 | 213 | [[package]] 214 | name = "marshmallow" 215 | version = "3.19.0" 216 | description = "A lightweight library for converting complex datatypes to and from native Python datatypes." 217 | category = "main" 218 | optional = false 219 | python-versions = ">=3.7" 220 | 221 | [package.dependencies] 222 | packaging = ">=17.0" 223 | 224 | [package.extras] 225 | dev = ["pytest", "pytz", "simplejson", "mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)", "tox"] 226 | docs = ["sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.9)"] 227 | lint = ["mypy (==0.990)", "flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "pre-commit (>=2.4,<3.0)"] 228 | tests = ["pytest", "pytz", "simplejson"] 229 | 230 | [[package]] 231 | name = "marshmallow-enum" 232 | version = "1.5.1" 233 | description = "Enum field for Marshmallow" 234 | category = "main" 235 | optional = false 236 | python-versions = "*" 237 | 238 | [package.dependencies] 239 | marshmallow = ">=2.0.0" 240 | 241 | [[package]] 242 | name = "multidict" 243 | version = "6.0.4" 244 | description = "multidict implementation" 245 | category = "main" 246 | optional = false 247 | python-versions = ">=3.7" 248 | 249 | [[package]] 250 | name = "mypy-extensions" 251 | version = "1.0.0" 252 | description = "Type system extensions for programs checked with the mypy type checker." 253 | category = "main" 254 | optional = false 255 | python-versions = ">=3.5" 256 | 257 | [[package]] 258 | name = "numexpr" 259 | version = "2.8.4" 260 | description = "Fast numerical expression evaluator for NumPy" 261 | category = "main" 262 | optional = false 263 | python-versions = ">=3.7" 264 | 265 | [package.dependencies] 266 | numpy = ">=1.13.3" 267 | 268 | [[package]] 269 | name = "numpy" 270 | version = "1.24.3" 271 | description = "Fundamental package for array computing in Python" 272 | category = "main" 273 | optional = false 274 | python-versions = ">=3.8" 275 | 276 | [[package]] 277 | name = "openai" 278 | version = "0.27.6" 279 | description = "Python client library for the OpenAI API" 280 | category = "main" 281 | optional = false 282 | python-versions = ">=3.7.1" 283 | 284 | [package.dependencies] 285 | aiohttp = "*" 286 | requests = ">=2.20" 287 | tqdm = "*" 288 | 289 | [package.extras] 290 | datalib = ["numpy", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "openpyxl (>=3.0.7)"] 291 | dev = ["black (>=21.6b0,<22.0.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] 292 | embeddings = ["scikit-learn (>=1.0.2)", "tenacity (>=8.0.1)", "matplotlib", "plotly", "numpy", "scipy", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "openpyxl (>=3.0.7)"] 293 | wandb = ["wandb", "numpy", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "openpyxl (>=3.0.7)"] 294 | 295 | [[package]] 296 | name = "openapi-schema-pydantic" 297 | version = "1.2.4" 298 | description = "OpenAPI (v3) specification schema as pydantic class" 299 | category = "main" 300 | optional = false 301 | python-versions = ">=3.6.1" 302 | 303 | [package.dependencies] 304 | pydantic = ">=1.8.2" 305 | 306 | [[package]] 307 | name = "packaging" 308 | version = "23.1" 309 | description = "Core utilities for Python packages" 310 | category = "main" 311 | optional = false 312 | python-versions = ">=3.7" 313 | 314 | [[package]] 315 | name = "pathspec" 316 | version = "0.11.1" 317 | description = "Utility library for gitignore style pattern matching of file paths." 318 | category = "main" 319 | optional = false 320 | python-versions = ">=3.7" 321 | 322 | [[package]] 323 | name = "platformdirs" 324 | version = "3.5.1" 325 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 326 | category = "main" 327 | optional = false 328 | python-versions = ">=3.7" 329 | 330 | [package.extras] 331 | docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)", "sphinx (>=6.2.1)"] 332 | test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.3.1)"] 333 | 334 | [[package]] 335 | name = "pydantic" 336 | version = "1.10.7" 337 | description = "Data validation and settings management using python type hints" 338 | category = "main" 339 | optional = false 340 | python-versions = ">=3.7" 341 | 342 | [package.dependencies] 343 | typing-extensions = ">=4.2.0" 344 | 345 | [package.extras] 346 | dotenv = ["python-dotenv (>=0.10.4)"] 347 | email = ["email-validator (>=1.0.3)"] 348 | 349 | [[package]] 350 | name = "python-dotenv" 351 | version = "1.0.0" 352 | description = "Read key-value pairs from a .env file and set them as environment variables" 353 | category = "main" 354 | optional = false 355 | python-versions = ">=3.8" 356 | 357 | [package.extras] 358 | cli = ["click (>=5.0)"] 359 | 360 | [[package]] 361 | name = "pyyaml" 362 | version = "6.0" 363 | description = "YAML parser and emitter for Python" 364 | category = "main" 365 | optional = false 366 | python-versions = ">=3.6" 367 | 368 | [[package]] 369 | name = "requests" 370 | version = "2.30.0" 371 | description = "Python HTTP for Humans." 372 | category = "main" 373 | optional = false 374 | python-versions = ">=3.7" 375 | 376 | [package.dependencies] 377 | certifi = ">=2017.4.17" 378 | charset-normalizer = ">=2,<4" 379 | idna = ">=2.5,<4" 380 | urllib3 = ">=1.21.1,<3" 381 | 382 | [package.extras] 383 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 384 | use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] 385 | 386 | [[package]] 387 | name = "sqlalchemy" 388 | version = "2.0.13" 389 | description = "Database Abstraction Library" 390 | category = "main" 391 | optional = false 392 | python-versions = ">=3.7" 393 | 394 | [package.dependencies] 395 | greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} 396 | typing-extensions = ">=4.2.0" 397 | 398 | [package.extras] 399 | aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] 400 | aiosqlite = ["greenlet (!=0.4.17)", "aiosqlite", "typing-extensions (!=3.10.0.1)"] 401 | asyncio = ["greenlet (!=0.4.17)"] 402 | asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)"] 403 | mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] 404 | mssql = ["pyodbc"] 405 | mssql_pymssql = ["pymssql"] 406 | mssql_pyodbc = ["pyodbc"] 407 | mypy = ["mypy (>=0.910)"] 408 | mysql = ["mysqlclient (>=1.4.0)"] 409 | mysql_connector = ["mysql-connector-python"] 410 | oracle = ["cx-oracle (>=7)"] 411 | oracle_oracledb = ["oracledb (>=1.0.1)"] 412 | postgresql = ["psycopg2 (>=2.7)"] 413 | postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"] 414 | postgresql_pg8000 = ["pg8000 (>=1.29.1)"] 415 | postgresql_psycopg = ["psycopg (>=3.0.7)"] 416 | postgresql_psycopg2binary = ["psycopg2-binary"] 417 | postgresql_psycopg2cffi = ["psycopg2cffi"] 418 | pymysql = ["pymysql"] 419 | sqlcipher = ["sqlcipher3-binary"] 420 | 421 | [[package]] 422 | name = "tenacity" 423 | version = "8.2.2" 424 | description = "Retry code until it succeeds" 425 | category = "main" 426 | optional = false 427 | python-versions = ">=3.6" 428 | 429 | [package.extras] 430 | doc = ["reno", "sphinx", "tornado (>=4.5)"] 431 | 432 | [[package]] 433 | name = "tomli" 434 | version = "2.0.1" 435 | description = "A lil' TOML parser" 436 | category = "main" 437 | optional = false 438 | python-versions = ">=3.7" 439 | 440 | [[package]] 441 | name = "tqdm" 442 | version = "4.65.0" 443 | description = "Fast, Extensible Progress Meter" 444 | category = "main" 445 | optional = false 446 | python-versions = ">=3.7" 447 | 448 | [package.dependencies] 449 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 450 | 451 | [package.extras] 452 | dev = ["py-make (>=0.1.0)", "twine", "wheel"] 453 | notebook = ["ipywidgets (>=6)"] 454 | slack = ["slack-sdk"] 455 | telegram = ["requests"] 456 | 457 | [[package]] 458 | name = "typing-extensions" 459 | version = "4.5.0" 460 | description = "Backported and Experimental Type Hints for Python 3.7+" 461 | category = "main" 462 | optional = false 463 | python-versions = ">=3.7" 464 | 465 | [[package]] 466 | name = "typing-inspect" 467 | version = "0.8.0" 468 | description = "Runtime inspection utilities for typing module." 469 | category = "main" 470 | optional = false 471 | python-versions = "*" 472 | 473 | [package.dependencies] 474 | mypy-extensions = ">=0.3.0" 475 | typing-extensions = ">=3.7.4" 476 | 477 | [[package]] 478 | name = "urllib3" 479 | version = "2.0.2" 480 | description = "HTTP library with thread-safe connection pooling, file post, and more." 481 | category = "main" 482 | optional = false 483 | python-versions = ">=3.7" 484 | 485 | [package.extras] 486 | brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] 487 | secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] 488 | socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] 489 | zstd = ["zstandard (>=0.18.0)"] 490 | 491 | [[package]] 492 | name = "yarl" 493 | version = "1.9.2" 494 | description = "Yet another URL library" 495 | category = "main" 496 | optional = false 497 | python-versions = ">=3.7" 498 | 499 | [package.dependencies] 500 | idna = ">=2.0" 501 | multidict = ">=4.0" 502 | 503 | [metadata] 504 | lock-version = "1.1" 505 | python-versions = "^3.9" 506 | content-hash = "6b805a210243a73c9752d93cfefa4dd229d29ae54894895a0449561222fd574d" 507 | 508 | [metadata.files] 509 | aiohttp = [] 510 | aiosignal = [] 511 | async-timeout = [] 512 | attrs = [] 513 | black = [] 514 | certifi = [] 515 | charset-normalizer = [] 516 | click = [] 517 | colorama = [] 518 | dataclasses-json = [] 519 | faiss-cpu = [] 520 | frozenlist = [] 521 | greenlet = [] 522 | idna = [] 523 | isort = [] 524 | langchain = [] 525 | marshmallow = [] 526 | marshmallow-enum = [] 527 | multidict = [] 528 | mypy-extensions = [] 529 | numexpr = [] 530 | numpy = [] 531 | openai = [] 532 | openapi-schema-pydantic = [] 533 | packaging = [] 534 | pathspec = [] 535 | platformdirs = [] 536 | pydantic = [] 537 | python-dotenv = [] 538 | pyyaml = [] 539 | requests = [] 540 | sqlalchemy = [] 541 | tenacity = [] 542 | tomli = [] 543 | tqdm = [] 544 | typing-extensions = [] 545 | typing-inspect = [] 546 | urllib3 = [] 547 | yarl = [] 548 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "ant-farm" 3 | version = "0.1.0" 4 | description = "My implementation of the Stanford experiment \"Generative Agents: Interactive Simulacra of Human Behavior\"" 5 | authors = ["Graham Home "] 6 | license = "MIT" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.9" 10 | langchain = "^0.0.166" 11 | openai = "^0.27.6" 12 | python-dotenv = "^1.0.0" 13 | faiss-cpu = "^1.7.4" 14 | black = "^23.3.0" 15 | 16 | [tool.poetry.dev-dependencies] 17 | black = "^23.3.0" 18 | isort = "^5.12.0" 19 | 20 | [build-system] 21 | requires = ["poetry-core>=1.0.0"] 22 | build-backend = "poetry.core.masonry.api" 23 | -------------------------------------------------------------------------------- /simulating-people.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grahamhome/llm-ant-farm/ed01a19e8fcb8032c9e95adf6fbe85b566dcd1fe/simulating-people.pdf -------------------------------------------------------------------------------- /world-subtree.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/grahamhome/llm-ant-farm/ed01a19e8fcb8032c9e95adf6fbe85b566dcd1fe/world-subtree.png --------------------------------------------------------------------------------