├── .devcontainer └── devcontainer.json ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── main.yml ├── .gitignore ├── .vscode └── settings.json ├── AeroFlow_Specification_Document.pdf ├── CONTRIBUTING.md ├── Customer Service.pdf ├── EcoSprint_Specification_Document.pdf ├── LICENSE ├── NOTICE ├── README.md ├── code_03_XX A basic Agent - Router application.ipynb ├── code_06_XX Implementing Agentic AI for Customer Service.ipynb ├── notebooks └── testfile.txt └── requirements.txt /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "extensions": [ 3 | "GitHub.github-vscode-theme", 4 | "ms-toolsai.jupyter", 5 | "ms-python.python" 6 | // Additional Extensions Here 7 | ], 8 | "onCreateCommand" : "[ -f requirements.txt ] && pip install -r requirements.txt; echo PS1='\"$ \"' >> ~/.bashrc", //Set Terminal Prompt to $ 9 | } 10 | 11 | // DevContainer Reference: https://code.visualstudio.com/docs/remote/devcontainerjson-reference 12 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Codeowners for these exercise files: 2 | # * (asterisk) denotes "all files and folders" 3 | # Example: * @producer @instructor 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 7 | 8 | ## Issue Overview 9 | 10 | 11 | ## Describe your environment 12 | 13 | 14 | ## Steps to Reproduce 15 | 16 | 1. 17 | 2. 18 | 3. 19 | 4. 20 | 21 | ## Expected Behavior 22 | 23 | 24 | ## Current Behavior 25 | 26 | 27 | ## Possible Solution 28 | 29 | 30 | ## Screenshots / Video 31 | 32 | 33 | ## Related Issues 34 | 35 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Copy To Branches 2 | on: 3 | workflow_dispatch: 4 | jobs: 5 | copy-to-branches: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v2 9 | with: 10 | fetch-depth: 0 11 | - name: Copy To Branches Action 12 | uses: planetoftheweb/copy-to-branches@v1.2 13 | env: 14 | key: main 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | .tmp 4 | npm-debug.log 5 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.bracketPairColorization.enabled": true, 3 | "editor.cursorBlinking": "solid", 4 | "editor.fontFamily": "ui-monospace, Menlo, Monaco, 'Cascadia Mono', 'Segoe UI Mono', 'Roboto Mono', 'Oxygen Mono', 'Ubuntu Monospace', 'Source Code Pro', 'Fira Mono', 'Droid Sans Mono', 'Courier New', monospace", 5 | "editor.fontLigatures": false, 6 | "editor.fontSize": 22, 7 | "editor.formatOnPaste": true, 8 | "editor.formatOnSave": true, 9 | "editor.lineNumbers": "on", 10 | "editor.matchBrackets": "always", 11 | "editor.minimap.enabled": false, 12 | "editor.smoothScrolling": true, 13 | "editor.tabSize": 2, 14 | "editor.useTabStops": true, 15 | "emmet.triggerExpansionOnTab": true, 16 | "explorer.openEditors.visible": 0, 17 | "files.autoSave": "afterDelay", 18 | "screencastMode.onlyKeyboardShortcuts": true, 19 | "terminal.integrated.fontSize": 18, 20 | "workbench.colorTheme": "Visual Studio Dark", 21 | "workbench.fontAliasing": "antialiased", 22 | "workbench.statusBar.visible": true, 23 | "notebook.output.wordWrap": true 24 | } 25 | -------------------------------------------------------------------------------- /AeroFlow_Specification_Document.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LinkedInLearning/agentic-ai-for-developers-concepts-and-applications-for-enterprises-3913172/ab9de465981ab0707fcd1d3104a2036ac1a754c5/AeroFlow_Specification_Document.pdf -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | 2 | Contribution Agreement 3 | ====================== 4 | 5 | This repository does not accept pull requests (PRs). All pull requests will be closed. 6 | 7 | However, if any contributions (through pull requests, issues, feedback or otherwise) are provided, as a contributor, you represent that the code you submit is your original work or that of your employer (in which case you represent you have the right to bind your employer). By submitting code (or otherwise providing feedback), you (and, if applicable, your employer) are licensing the submitted code (and/or feedback) to LinkedIn and the open source community subject to the BSD 2-Clause license. 8 | -------------------------------------------------------------------------------- /Customer Service.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LinkedInLearning/agentic-ai-for-developers-concepts-and-applications-for-enterprises-3913172/ab9de465981ab0707fcd1d3104a2036ac1a754c5/Customer Service.pdf -------------------------------------------------------------------------------- /EcoSprint_Specification_Document.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LinkedInLearning/agentic-ai-for-developers-concepts-and-applications-for-enterprises-3913172/ab9de465981ab0707fcd1d3104a2036ac1a754c5/EcoSprint_Specification_Document.pdf -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | LinkedIn Learning Exercise Files License Agreement 2 | ================================================== 3 | 4 | This License Agreement (the "Agreement") is a binding legal agreement 5 | between you (as an individual or entity, as applicable) and LinkedIn 6 | Corporation (“LinkedIn”). By downloading or using the LinkedIn Learning 7 | exercise files in this repository (“Licensed Materials”), you agree to 8 | be bound by the terms of this Agreement. If you do not agree to these 9 | terms, do not download or use the Licensed Materials. 10 | 11 | 1. License. 12 | - a. Subject to the terms of this Agreement, LinkedIn hereby grants LinkedIn 13 | members during their LinkedIn Learning subscription a non-exclusive, 14 | non-transferable copyright license, for internal use only, to 1) make a 15 | reasonable number of copies of the Licensed Materials, and 2) make 16 | derivative works of the Licensed Materials for the sole purpose of 17 | practicing skills taught in LinkedIn Learning courses. 18 | - b. Distribution. Unless otherwise noted in the Licensed Materials, subject 19 | to the terms of this Agreement, LinkedIn hereby grants LinkedIn members 20 | with a LinkedIn Learning subscription a non-exclusive, non-transferable 21 | copyright license to distribute the Licensed Materials, except the 22 | Licensed Materials may not be included in any product or service (or 23 | otherwise used) to instruct or educate others. 24 | 25 | 2. Restrictions and Intellectual Property. 26 | - a. You may not to use, modify, copy, make derivative works of, publish, 27 | distribute, rent, lease, sell, sublicense, assign or otherwise transfer the 28 | Licensed Materials, except as expressly set forth above in Section 1. 29 | - b. Linkedin (and its licensors) retains its intellectual property rights 30 | in the Licensed Materials. Except as expressly set forth in Section 1, 31 | LinkedIn grants no licenses. 32 | - c. You indemnify LinkedIn and its licensors and affiliates for i) any 33 | alleged infringement or misappropriation of any intellectual property rights 34 | of any third party based on modifications you make to the Licensed Materials, 35 | ii) any claims arising from your use or distribution of all or part of the 36 | Licensed Materials and iii) a breach of this Agreement. You will defend, hold 37 | harmless, and indemnify LinkedIn and its affiliates (and our and their 38 | respective employees, shareholders, and directors) from any claim or action 39 | brought by a third party, including all damages, liabilities, costs and 40 | expenses, including reasonable attorneys’ fees, to the extent resulting from, 41 | alleged to have resulted from, or in connection with: (a) your breach of your 42 | obligations herein; or (b) your use or distribution of any Licensed Materials. 43 | 44 | 3. Open source. This code may include open source software, which may be 45 | subject to other license terms as provided in the files. 46 | 47 | 4. Warranty Disclaimer. LINKEDIN PROVIDES THE LICENSED MATERIALS ON AN “AS IS” 48 | AND “AS AVAILABLE” BASIS. LINKEDIN MAKES NO REPRESENTATION OR WARRANTY, 49 | WHETHER EXPRESS OR IMPLIED, ABOUT THE LICENSED MATERIALS, INCLUDING ANY 50 | REPRESENTATION THAT THE LICENSED MATERIALS WILL BE FREE OF ERRORS, BUGS OR 51 | INTERRUPTIONS, OR THAT THE LICENSED MATERIALS ARE ACCURATE, COMPLETE OR 52 | OTHERWISE VALID. TO THE FULLEST EXTENT PERMITTED BY LAW, LINKEDIN AND ITS 53 | AFFILIATES DISCLAIM ANY IMPLIED OR STATUTORY WARRANTY OR CONDITION, INCLUDING 54 | ANY IMPLIED WARRANTY OR CONDITION OF MERCHANTABILITY OR FITNESS FOR A 55 | PARTICULAR PURPOSE, AVAILABILITY, SECURITY, TITLE AND/OR NON-INFRINGEMENT. 56 | YOUR USE OF THE LICENSED MATERIALS IS AT YOUR OWN DISCRETION AND RISK, AND 57 | YOU WILL BE SOLELY RESPONSIBLE FOR ANY DAMAGE THAT RESULTS FROM USE OF THE 58 | LICENSED MATERIALS TO YOUR COMPUTER SYSTEM OR LOSS OF DATA. NO ADVICE OR 59 | INFORMATION, WHETHER ORAL OR WRITTEN, OBTAINED BY YOU FROM US OR THROUGH OR 60 | FROM THE LICENSED MATERIALS WILL CREATE ANY WARRANTY OR CONDITION NOT 61 | EXPRESSLY STATED IN THESE TERMS. 62 | 63 | 5. Limitation of Liability. LINKEDIN SHALL NOT BE LIABLE FOR ANY INDIRECT, 64 | INCIDENTAL, SPECIAL, PUNITIVE, CONSEQUENTIAL OR EXEMPLARY DAMAGES, INCLUDING 65 | BUT NOT LIMITED TO, DAMAGES FOR LOSS OF PROFITS, GOODWILL, USE, DATA OR OTHER 66 | INTANGIBLE LOSSES . IN NO EVENT WILL LINKEDIN'S AGGREGATE LIABILITY TO YOU 67 | EXCEED $100. THIS LIMITATION OF LIABILITY SHALL: 68 | - i. APPLY REGARDLESS OF WHETHER (A) YOU BASE YOUR CLAIM ON CONTRACT, TORT, 69 | STATUTE, OR ANY OTHER LEGAL THEORY, (B) WE KNEW OR SHOULD HAVE KNOWN ABOUT 70 | THE POSSIBILITY OF SUCH DAMAGES, OR (C) THE LIMITED REMEDIES PROVIDED IN THIS 71 | SECTION FAIL OF THEIR ESSENTIAL PURPOSE; AND 72 | - ii. NOT APPLY TO ANY DAMAGE THAT LINKEDIN MAY CAUSE YOU INTENTIONALLY OR 73 | KNOWINGLY IN VIOLATION OF THESE TERMS OR APPLICABLE LAW, OR AS OTHERWISE 74 | MANDATED BY APPLICABLE LAW THAT CANNOT BE DISCLAIMED IN THESE TERMS. 75 | 76 | 6. Termination. This Agreement automatically terminates upon your breach of 77 | this Agreement or termination of your LinkedIn Learning subscription. On 78 | termination, all licenses granted under this Agreement will terminate 79 | immediately and you will delete the Licensed Materials. Sections 2-7 of this 80 | Agreement survive any termination of this Agreement. LinkedIn may discontinue 81 | the availability of some or all of the Licensed Materials at any time for any 82 | reason. 83 | 84 | 7. Miscellaneous. This Agreement will be governed by and construed in 85 | accordance with the laws of the State of California without regard to conflict 86 | of laws principles. The exclusive forum for any disputes arising out of or 87 | relating to this Agreement shall be an appropriate federal or state court 88 | sitting in the County of Santa Clara, State of California. If LinkedIn does 89 | not act to enforce a breach of this Agreement, that does not mean that 90 | LinkedIn has waived its right to enforce this Agreement. The Agreement does 91 | not create a partnership, agency relationship, or joint venture between the 92 | parties. Neither party has the power or authority to bind the other or to 93 | create any obligation or responsibility on behalf of the other. You may not, 94 | without LinkedIn’s prior written consent, assign or delegate any rights or 95 | obligations under these terms, including in connection with a change of 96 | control. Any purported assignment and delegation shall be ineffective. The 97 | Agreement shall bind and inure to the benefit of the parties, their respective 98 | successors and permitted assigns. If any provision of the Agreement is 99 | unenforceable, that provision will be modified to render it enforceable to the 100 | extent possible to give effect to the parties’ intentions and the remaining 101 | provisions will not be affected. This Agreement is the only agreement between 102 | you and LinkedIn regarding the Licensed Materials, and supersedes all prior 103 | agreements relating to the Licensed Materials. 104 | 105 | Last Updated: March 2019 106 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright 2024 LinkedIn Corporation 2 | All Rights Reserved. 3 | 4 | Licensed under the LinkedIn Learning Exercise File License (the "License"). 5 | See LICENSE in the project root for license information. 6 | 7 | Please note, this project may automatically load third party code from external 8 | repositories (for example, NPM modules, Composer packages, or other dependencies). 9 | If so, such third party code may be subject to other license terms than as set 10 | forth above. In addition, such third party code may also depend on and load 11 | multiple tiers of dependencies. Please review the applicable licenses of the 12 | additional dependencies. 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Agentic AI for Developers: Concepts and Applications for Enterprises 2 | This is the repository for the LinkedIn Learning course `Agentic AI for Developers: Concepts and Applications for Enterprises`. The full course is available from [LinkedIn Learning][lil-course-url]. 3 | 4 | ![lil-thumbnail-url] 5 | 6 | Agentic AI is at the forefront of the next wave of technological advancements in the generative AI world, transforming the way business processes are executed in the future. As agentic AI becomes more capable of performing tasks traditionally requiring human intelligence and oversight, it is crucial for data scientists and engineers to understand how these systems work and best practices to build them. 7 | 8 | In this course, Kumaran Ponnambalam discusses the concepts and building blocks for agentic AI. He explores how enterprise use cases can be built with a few examples, and reviews responsible AI considerations for agentic AI. 9 | 10 | ### Instructor 11 | 12 | Kumaran Ponnambalam 13 | 14 | Working with data for 20+ years 15 | 16 | 17 | Check out my other courses on [LinkedIn Learning](https://www.linkedin.com/learning/instructors/kumaran-ponnambalam?u=104). 18 | 19 | [0]: # (Replace these placeholder URLs with actual course URLs) 20 | 21 | [lil-course-url]: https://www.linkedin.com/learning/agentic-ai-for-developers-concepts-and-applications-for-enterprises 22 | [lil-thumbnail-url]: https://media.licdn.com/dms/image/v2/D560DAQHzCdkrNwuB5w/learning-public-crop_675_1200/learning-public-crop_675_1200/0/1726507433613?e=2147483647&v=beta&t=4ZNKsRbW5OIA5pSeAPoJj_XODyVfzCJQPvJmzCRSay4 23 | 24 | -------------------------------------------------------------------------------- /code_03_XX A basic Agent - Router application.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "ee5c0c82-a3ff-4fe1-9238-69bba6c2bed7", 6 | "metadata": {}, 7 | "source": [ 8 | "### 03.03. Setting up Indexes" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "id": "a8fa44f3-cd5c-4fcd-8a40-fe8046005824", 15 | "metadata": {}, 16 | "outputs": [ 17 | { 18 | "name": "stdout", 19 | "output_type": "stream", 20 | "text": [ 21 | "Collecting python-dotenv==1.0.0\n", 22 | " Downloading python_dotenv-1.0.0-py3-none-any.whl.metadata (21 kB)\n", 23 | "Downloading python_dotenv-1.0.0-py3-none-any.whl (19 kB)\n", 24 | "Installing collected packages: python-dotenv\n", 25 | "Successfully installed python-dotenv-1.0.0\n", 26 | "\n", 27 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", 28 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython3 -m pip install --upgrade pip\u001b[0m\n", 29 | "Collecting llama-index==0.10.59\n", 30 | " Downloading llama_index-0.10.59-py3-none-any.whl.metadata (11 kB)\n", 31 | "Collecting llama-index-agent-openai<0.3.0,>=0.1.4 (from llama-index==0.10.59)\n", 32 | " Downloading llama_index_agent_openai-0.2.9-py3-none-any.whl.metadata (729 bytes)\n", 33 | "Collecting llama-index-cli<0.2.0,>=0.1.2 (from llama-index==0.10.59)\n", 34 | " Downloading llama_index_cli-0.1.13-py3-none-any.whl.metadata (1.5 kB)\n", 35 | "Collecting llama-index-core==0.10.59 (from llama-index==0.10.59)\n", 36 | " Downloading llama_index_core-0.10.59-py3-none-any.whl.metadata (2.4 kB)\n", 37 | "Collecting llama-index-embeddings-openai<0.2.0,>=0.1.5 (from llama-index==0.10.59)\n", 38 | " Downloading llama_index_embeddings_openai-0.1.11-py3-none-any.whl.metadata (655 bytes)\n", 39 | "Collecting llama-index-indices-managed-llama-cloud>=0.2.0 (from llama-index==0.10.59)\n", 40 | " Downloading llama_index_indices_managed_llama_cloud-0.2.7-py3-none-any.whl.metadata (3.8 kB)\n", 41 | "Collecting llama-index-legacy<0.10.0,>=0.9.48 (from llama-index==0.10.59)\n", 42 | " Downloading llama_index_legacy-0.9.48.post1-py3-none-any.whl.metadata (8.5 kB)\n", 43 | "Collecting llama-index-llms-openai<0.2.0,>=0.1.27 (from llama-index==0.10.59)\n", 44 | " Downloading llama_index_llms_openai-0.1.29-py3-none-any.whl.metadata (650 bytes)\n", 45 | "Collecting llama-index-multi-modal-llms-openai<0.2.0,>=0.1.3 (from llama-index==0.10.59)\n", 46 | " Downloading llama_index_multi_modal_llms_openai-0.1.9-py3-none-any.whl.metadata (728 bytes)\n", 47 | "Collecting llama-index-program-openai<0.2.0,>=0.1.3 (from llama-index==0.10.59)\n", 48 | " Downloading llama_index_program_openai-0.1.7-py3-none-any.whl.metadata (760 bytes)\n", 49 | "Collecting llama-index-question-gen-openai<0.2.0,>=0.1.2 (from llama-index==0.10.59)\n", 50 | " Downloading llama_index_question_gen_openai-0.1.3-py3-none-any.whl.metadata (785 bytes)\n", 51 | "Collecting llama-index-readers-file<0.2.0,>=0.1.4 (from llama-index==0.10.59)\n", 52 | " Downloading llama_index_readers_file-0.1.33-py3-none-any.whl.metadata (5.4 kB)\n", 53 | "Collecting llama-index-readers-llama-parse>=0.1.2 (from llama-index==0.10.59)\n", 54 | " Downloading llama_index_readers_llama_parse-0.1.6-py3-none-any.whl.metadata (3.6 kB)\n", 55 | "Requirement already satisfied: PyYAML>=6.0.1 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (6.0.1)\n", 56 | "Collecting SQLAlchemy>=1.4.49 (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core==0.10.59->llama-index==0.10.59)\n", 57 | " Downloading SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (9.6 kB)\n", 58 | "Collecting aiohttp<4.0.0,>=3.8.6 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 59 | " Downloading aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (7.5 kB)\n", 60 | "Collecting dataclasses-json (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 61 | " Downloading dataclasses_json-0.6.7-py3-none-any.whl.metadata (25 kB)\n", 62 | "Collecting deprecated>=1.2.9.3 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 63 | " Downloading Deprecated-1.2.14-py2.py3-none-any.whl.metadata (5.4 kB)\n", 64 | "Collecting dirtyjson<2.0.0,>=1.0.8 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 65 | " Downloading dirtyjson-1.0.8-py3-none-any.whl.metadata (11 kB)\n", 66 | "Requirement already satisfied: fsspec>=2023.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (2024.6.1)\n", 67 | "Requirement already satisfied: httpx in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (0.27.0)\n", 68 | "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (1.6.0)\n", 69 | "Requirement already satisfied: networkx>=3.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (3.3)\n", 70 | "Collecting nltk<4.0.0,>=3.8.1 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 71 | " Downloading nltk-3.8.2-py3-none-any.whl.metadata (2.9 kB)\n", 72 | "Collecting numpy<2.0.0 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 73 | " Downloading numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (61 kB)\n", 74 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m61.0/61.0 kB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 75 | "\u001b[?25hCollecting openai>=1.1.0 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 76 | " Downloading openai-1.40.6-py3-none-any.whl.metadata (22 kB)\n", 77 | "Requirement already satisfied: pandas in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (2.2.2)\n", 78 | "Requirement already satisfied: pillow>=9.0.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (10.4.0)\n", 79 | "Requirement already satisfied: requests>=2.31.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (2.32.3)\n", 80 | "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.2.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (8.5.0)\n", 81 | "Collecting tiktoken>=0.3.3 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 82 | " Downloading tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.6 kB)\n", 83 | "Collecting tqdm<5.0.0,>=4.66.1 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 84 | " Downloading tqdm-4.66.5-py3-none-any.whl.metadata (57 kB)\n", 85 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.6/57.6 kB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 86 | "\u001b[?25hRequirement already satisfied: typing-extensions>=4.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core==0.10.59->llama-index==0.10.59) (4.12.2)\n", 87 | "Collecting typing-inspect>=0.8.0 (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 88 | " Downloading typing_inspect-0.9.0-py3-none-any.whl.metadata (1.5 kB)\n", 89 | "Collecting wrapt (from llama-index-core==0.10.59->llama-index==0.10.59)\n", 90 | " Downloading wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.6 kB)\n", 91 | "Collecting llama-cloud>=0.0.11 (from llama-index-indices-managed-llama-cloud>=0.2.0->llama-index==0.10.59)\n", 92 | " Downloading llama_cloud-0.0.13-py3-none-any.whl.metadata (751 bytes)\n", 93 | "Requirement already satisfied: beautifulsoup4<5.0.0,>=4.12.3 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-readers-file<0.2.0,>=0.1.4->llama-index==0.10.59) (4.12.3)\n", 94 | "Collecting pypdf<5.0.0,>=4.0.1 (from llama-index-readers-file<0.2.0,>=0.1.4->llama-index==0.10.59)\n", 95 | " Downloading pypdf-4.3.1-py3-none-any.whl.metadata (7.4 kB)\n", 96 | "Collecting striprtf<0.0.27,>=0.0.26 (from llama-index-readers-file<0.2.0,>=0.1.4->llama-index==0.10.59)\n", 97 | " Downloading striprtf-0.0.26-py3-none-any.whl.metadata (2.1 kB)\n", 98 | "Collecting llama-parse>=0.4.0 (from llama-index-readers-llama-parse>=0.1.2->llama-index==0.10.59)\n", 99 | " Downloading llama_parse-0.4.9-py3-none-any.whl.metadata (4.4 kB)\n", 100 | "Collecting aiohappyeyeballs>=2.3.0 (from aiohttp<4.0.0,>=3.8.6->llama-index-core==0.10.59->llama-index==0.10.59)\n", 101 | " Downloading aiohappyeyeballs-2.3.5-py3-none-any.whl.metadata (5.8 kB)\n", 102 | "Collecting aiosignal>=1.1.2 (from aiohttp<4.0.0,>=3.8.6->llama-index-core==0.10.59->llama-index==0.10.59)\n", 103 | " Downloading aiosignal-1.3.1-py3-none-any.whl.metadata (4.0 kB)\n", 104 | "Requirement already satisfied: attrs>=17.3.0 in /home/codespace/.local/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core==0.10.59->llama-index==0.10.59) (23.2.0)\n", 105 | "Collecting frozenlist>=1.1.1 (from aiohttp<4.0.0,>=3.8.6->llama-index-core==0.10.59->llama-index==0.10.59)\n", 106 | " Downloading frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)\n", 107 | "Collecting multidict<7.0,>=4.5 (from aiohttp<4.0.0,>=3.8.6->llama-index-core==0.10.59->llama-index==0.10.59)\n", 108 | " Downloading multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.2 kB)\n", 109 | "Collecting yarl<2.0,>=1.0 (from aiohttp<4.0.0,>=3.8.6->llama-index-core==0.10.59->llama-index==0.10.59)\n", 110 | " Downloading yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (31 kB)\n", 111 | "Collecting async-timeout<5.0,>=4.0 (from aiohttp<4.0.0,>=3.8.6->llama-index-core==0.10.59->llama-index==0.10.59)\n", 112 | " Downloading async_timeout-4.0.3-py3-none-any.whl.metadata (4.2 kB)\n", 113 | "Requirement already satisfied: soupsieve>1.2 in /home/codespace/.local/lib/python3.10/site-packages (from beautifulsoup4<5.0.0,>=4.12.3->llama-index-readers-file<0.2.0,>=0.1.4->llama-index==0.10.59) (2.5)\n", 114 | "Collecting pydantic>=1.10 (from llama-cloud>=0.0.11->llama-index-indices-managed-llama-cloud>=0.2.0->llama-index==0.10.59)\n", 115 | " Downloading pydantic-2.8.2-py3-none-any.whl.metadata (125 kB)\n", 116 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m125.2/125.2 kB\u001b[0m \u001b[31m4.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 117 | "\u001b[?25hRequirement already satisfied: anyio in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core==0.10.59->llama-index==0.10.59) (4.4.0)\n", 118 | "Requirement already satisfied: certifi in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core==0.10.59->llama-index==0.10.59) (2024.7.4)\n", 119 | "Requirement already satisfied: httpcore==1.* in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core==0.10.59->llama-index==0.10.59) (1.0.5)\n", 120 | "Requirement already satisfied: idna in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core==0.10.59->llama-index==0.10.59) (3.7)\n", 121 | "Requirement already satisfied: sniffio in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core==0.10.59->llama-index==0.10.59) (1.3.1)\n", 122 | "Requirement already satisfied: h11<0.15,>=0.13 in /home/codespace/.local/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-core==0.10.59->llama-index==0.10.59) (0.14.0)\n", 123 | "Collecting click (from nltk<4.0.0,>=3.8.1->llama-index-core==0.10.59->llama-index==0.10.59)\n", 124 | " Downloading click-8.1.7-py3-none-any.whl.metadata (3.0 kB)\n", 125 | "Requirement already satisfied: joblib in /home/codespace/.local/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core==0.10.59->llama-index==0.10.59) (1.4.2)\n", 126 | "Collecting regex>=2021.8.3 (from nltk<4.0.0,>=3.8.1->llama-index-core==0.10.59->llama-index==0.10.59)\n", 127 | " Downloading regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (40 kB)\n", 128 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m40.5/40.5 kB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 129 | "\u001b[?25hCollecting distro<2,>=1.7.0 (from openai>=1.1.0->llama-index-core==0.10.59->llama-index==0.10.59)\n", 130 | " Downloading distro-1.9.0-py3-none-any.whl.metadata (6.8 kB)\n", 131 | "Collecting jiter<1,>=0.4.0 (from openai>=1.1.0->llama-index-core==0.10.59->llama-index==0.10.59)\n", 132 | " Downloading jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.6 kB)\n", 133 | "Requirement already satisfied: charset-normalizer<4,>=2 in /home/codespace/.local/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core==0.10.59->llama-index==0.10.59) (3.3.2)\n", 134 | "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core==0.10.59->llama-index==0.10.59) (2.0.7)\n", 135 | "Collecting greenlet!=0.4.17 (from SQLAlchemy>=1.4.49->SQLAlchemy[asyncio]>=1.4.49->llama-index-core==0.10.59->llama-index==0.10.59)\n", 136 | " Downloading greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl.metadata (3.8 kB)\n", 137 | "Collecting mypy-extensions>=0.3.0 (from typing-inspect>=0.8.0->llama-index-core==0.10.59->llama-index==0.10.59)\n", 138 | " Downloading mypy_extensions-1.0.0-py3-none-any.whl.metadata (1.1 kB)\n", 139 | "Collecting marshmallow<4.0.0,>=3.18.0 (from dataclasses-json->llama-index-core==0.10.59->llama-index==0.10.59)\n", 140 | " Downloading marshmallow-3.21.3-py3-none-any.whl.metadata (7.1 kB)\n", 141 | "Requirement already satisfied: python-dateutil>=2.8.2 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core==0.10.59->llama-index==0.10.59) (2.9.0.post0)\n", 142 | "Requirement already satisfied: pytz>=2020.1 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core==0.10.59->llama-index==0.10.59) (2024.1)\n", 143 | "Requirement already satisfied: tzdata>=2022.7 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core==0.10.59->llama-index==0.10.59) (2024.1)\n", 144 | "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/codespace/.local/lib/python3.10/site-packages (from anyio->httpx->llama-index-core==0.10.59->llama-index==0.10.59) (1.2.1)\n", 145 | "Requirement already satisfied: packaging>=17.0 in /home/codespace/.local/lib/python3.10/site-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core==0.10.59->llama-index==0.10.59) (24.1)\n", 146 | "Collecting annotated-types>=0.4.0 (from pydantic>=1.10->llama-cloud>=0.0.11->llama-index-indices-managed-llama-cloud>=0.2.0->llama-index==0.10.59)\n", 147 | " Downloading annotated_types-0.7.0-py3-none-any.whl.metadata (15 kB)\n", 148 | "Collecting pydantic-core==2.20.1 (from pydantic>=1.10->llama-cloud>=0.0.11->llama-index-indices-managed-llama-cloud>=0.2.0->llama-index==0.10.59)\n", 149 | " Downloading pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.6 kB)\n", 150 | "Requirement already satisfied: six>=1.5 in /home/codespace/.local/lib/python3.10/site-packages (from python-dateutil>=2.8.2->pandas->llama-index-core==0.10.59->llama-index==0.10.59) (1.16.0)\n", 151 | "Downloading llama_index-0.10.59-py3-none-any.whl (6.8 kB)\n", 152 | "Downloading llama_index_core-0.10.59-py3-none-any.whl (15.5 MB)\n", 153 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m15.5/15.5 MB\u001b[0m \u001b[31m44.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m00:01\u001b[0m\n", 154 | "\u001b[?25hDownloading llama_index_agent_openai-0.2.9-py3-none-any.whl (13 kB)\n", 155 | "Downloading llama_index_cli-0.1.13-py3-none-any.whl (27 kB)\n", 156 | "Downloading llama_index_embeddings_openai-0.1.11-py3-none-any.whl (6.3 kB)\n", 157 | "Downloading llama_index_indices_managed_llama_cloud-0.2.7-py3-none-any.whl (9.5 kB)\n", 158 | "Downloading llama_index_legacy-0.9.48.post1-py3-none-any.whl (1.2 MB)\n", 159 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m23.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 160 | "\u001b[?25hDownloading llama_index_llms_openai-0.1.29-py3-none-any.whl (11 kB)\n", 161 | "Downloading llama_index_multi_modal_llms_openai-0.1.9-py3-none-any.whl (5.9 kB)\n", 162 | "Downloading llama_index_program_openai-0.1.7-py3-none-any.whl (5.3 kB)\n", 163 | "Downloading llama_index_question_gen_openai-0.1.3-py3-none-any.whl (2.9 kB)\n", 164 | "Downloading llama_index_readers_file-0.1.33-py3-none-any.whl (38 kB)\n", 165 | "Downloading llama_index_readers_llama_parse-0.1.6-py3-none-any.whl (2.5 kB)\n", 166 | "Downloading aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)\n", 167 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m24.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 168 | "\u001b[?25hDownloading Deprecated-1.2.14-py2.py3-none-any.whl (9.6 kB)\n", 169 | "Downloading dirtyjson-1.0.8-py3-none-any.whl (25 kB)\n", 170 | "Downloading llama_cloud-0.0.13-py3-none-any.whl (169 kB)\n", 171 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m169.4/169.4 kB\u001b[0m \u001b[31m5.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 172 | "\u001b[?25hDownloading llama_parse-0.4.9-py3-none-any.whl (9.4 kB)\n", 173 | "Downloading nltk-3.8.2-py3-none-any.whl (1.5 MB)\n", 174 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.5/1.5 MB\u001b[0m \u001b[31m27.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 175 | "\u001b[?25hDownloading numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (18.2 MB)\n", 176 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m18.2/18.2 MB\u001b[0m \u001b[31m49.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m00:01\u001b[0m\n", 177 | "\u001b[?25hDownloading openai-1.40.6-py3-none-any.whl (361 kB)\n", 178 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m361.3/361.3 kB\u001b[0m \u001b[31m10.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 179 | "\u001b[?25hDownloading pypdf-4.3.1-py3-none-any.whl (295 kB)\n", 180 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m295.8/295.8 kB\u001b[0m \u001b[31m7.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0mta \u001b[36m0:00:01\u001b[0m\n", 181 | "\u001b[?25hDownloading SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n", 182 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m41.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 183 | "\u001b[?25hDownloading striprtf-0.0.26-py3-none-any.whl (6.9 kB)\n", 184 | "Downloading tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.1 MB)\n", 185 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.1/1.1 MB\u001b[0m \u001b[31m17.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 186 | "\u001b[?25hDownloading tqdm-4.66.5-py3-none-any.whl (78 kB)\n", 187 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.4/78.4 kB\u001b[0m \u001b[31m2.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 188 | "\u001b[?25hDownloading typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n", 189 | "Downloading wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (80 kB)\n", 190 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m80.3/80.3 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 191 | "\u001b[?25hDownloading dataclasses_json-0.6.7-py3-none-any.whl (28 kB)\n", 192 | "Downloading aiohappyeyeballs-2.3.5-py3-none-any.whl (12 kB)\n", 193 | "Downloading aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n", 194 | "Downloading async_timeout-4.0.3-py3-none-any.whl (5.7 kB)\n", 195 | "Downloading distro-1.9.0-py3-none-any.whl (20 kB)\n", 196 | "Downloading frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (239 kB)\n", 197 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m239.5/239.5 kB\u001b[0m \u001b[31m7.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 198 | "\u001b[?25hDownloading greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl (616 kB)\n", 199 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m616.0/616.0 kB\u001b[0m \u001b[31m15.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m\n", 200 | "\u001b[?25hDownloading jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (318 kB)\n", 201 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m318.9/318.9 kB\u001b[0m \u001b[31m8.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 202 | "\u001b[?25hDownloading marshmallow-3.21.3-py3-none-any.whl (49 kB)\n", 203 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.2/49.2 kB\u001b[0m \u001b[31m1.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 204 | "\u001b[?25hDownloading multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (124 kB)\n", 205 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m124.3/124.3 kB\u001b[0m \u001b[31m4.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 206 | "\u001b[?25hDownloading mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n", 207 | "Downloading pydantic-2.8.2-py3-none-any.whl (423 kB)\n", 208 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m423.9/423.9 kB\u001b[0m \u001b[31m11.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 209 | "\u001b[?25hDownloading pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.1 MB)\n", 210 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.1/2.1 MB\u001b[0m \u001b[31m30.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 211 | "\u001b[?25hDownloading regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (776 kB)\n", 212 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m776.5/776.5 kB\u001b[0m \u001b[31m18.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m\n", 213 | "\u001b[?25hDownloading yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (301 kB)\n", 214 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m301.6/301.6 kB\u001b[0m \u001b[31m7.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 215 | "\u001b[?25hDownloading click-8.1.7-py3-none-any.whl (97 kB)\n", 216 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m97.9/97.9 kB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 217 | "\u001b[?25hDownloading annotated_types-0.7.0-py3-none-any.whl (13 kB)\n", 218 | "Installing collected packages: striprtf, dirtyjson, wrapt, tqdm, regex, pypdf, pydantic-core, numpy, mypy-extensions, multidict, marshmallow, jiter, greenlet, frozenlist, distro, click, async-timeout, annotated-types, aiohappyeyeballs, yarl, typing-inspect, tiktoken, SQLAlchemy, pydantic, nltk, deprecated, aiosignal, openai, llama-cloud, dataclasses-json, aiohttp, llama-index-legacy, llama-index-core, llama-parse, llama-index-readers-file, llama-index-llms-openai, llama-index-indices-managed-llama-cloud, llama-index-embeddings-openai, llama-index-readers-llama-parse, llama-index-multi-modal-llms-openai, llama-index-cli, llama-index-agent-openai, llama-index-program-openai, llama-index-question-gen-openai, llama-index\n", 219 | " Attempting uninstall: numpy\n", 220 | " Found existing installation: numpy 2.0.0\n", 221 | " Uninstalling numpy-2.0.0:\n", 222 | " Successfully uninstalled numpy-2.0.0\n", 223 | "Successfully installed SQLAlchemy-2.0.32 aiohappyeyeballs-2.3.5 aiohttp-3.10.3 aiosignal-1.3.1 annotated-types-0.7.0 async-timeout-4.0.3 click-8.1.7 dataclasses-json-0.6.7 deprecated-1.2.14 dirtyjson-1.0.8 distro-1.9.0 frozenlist-1.4.1 greenlet-3.0.3 jiter-0.5.0 llama-cloud-0.0.13 llama-index-0.10.59 llama-index-agent-openai-0.2.9 llama-index-cli-0.1.13 llama-index-core-0.10.59 llama-index-embeddings-openai-0.1.11 llama-index-indices-managed-llama-cloud-0.2.7 llama-index-legacy-0.9.48.post1 llama-index-llms-openai-0.1.29 llama-index-multi-modal-llms-openai-0.1.9 llama-index-program-openai-0.1.7 llama-index-question-gen-openai-0.1.3 llama-index-readers-file-0.1.33 llama-index-readers-llama-parse-0.1.6 llama-parse-0.4.9 marshmallow-3.21.3 multidict-6.0.5 mypy-extensions-1.0.0 nltk-3.8.2 numpy-1.26.4 openai-1.40.6 pydantic-2.8.2 pydantic-core-2.20.1 pypdf-4.3.1 regex-2024.7.24 striprtf-0.0.26 tiktoken-0.7.0 tqdm-4.66.5 typing-inspect-0.9.0 wrapt-1.16.0 yarl-1.9.4\n", 224 | "\n", 225 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", 226 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython3 -m pip install --upgrade pip\u001b[0m\n", 227 | "Collecting llama-index-llms-openai==0.1.27\n", 228 | " Downloading llama_index_llms_openai-0.1.27-py3-none-any.whl.metadata (610 bytes)\n", 229 | "Requirement already satisfied: llama-index-core<0.11.0,>=0.10.57 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-llms-openai==0.1.27) (0.10.59)\n", 230 | "Requirement already satisfied: PyYAML>=6.0.1 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (6.0.1)\n", 231 | "Requirement already satisfied: SQLAlchemy>=1.4.49 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2.0.32)\n", 232 | "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (3.10.3)\n", 233 | "Requirement already satisfied: dataclasses-json in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (0.6.7)\n", 234 | "Requirement already satisfied: deprecated>=1.2.9.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.2.14)\n", 235 | "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.0.8)\n", 236 | "Requirement already satisfied: fsspec>=2023.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2024.6.1)\n", 237 | "Requirement already satisfied: httpx in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (0.27.0)\n", 238 | "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.6.0)\n", 239 | "Requirement already satisfied: networkx>=3.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (3.3)\n", 240 | "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (3.8.2)\n", 241 | "Requirement already satisfied: numpy<2.0.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.26.4)\n", 242 | "Requirement already satisfied: openai>=1.1.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.40.6)\n", 243 | "Requirement already satisfied: pandas in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2.2.2)\n", 244 | "Requirement already satisfied: pillow>=9.0.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (10.4.0)\n", 245 | "Requirement already satisfied: requests>=2.31.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2.32.3)\n", 246 | "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.2.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (8.5.0)\n", 247 | "Requirement already satisfied: tiktoken>=0.3.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (0.7.0)\n", 248 | "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (4.66.5)\n", 249 | "Requirement already satisfied: typing-extensions>=4.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (4.12.2)\n", 250 | "Requirement already satisfied: typing-inspect>=0.8.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (0.9.0)\n", 251 | "Requirement already satisfied: wrapt in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.16.0)\n", 252 | "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2.3.5)\n", 253 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.3.1)\n", 254 | "Requirement already satisfied: attrs>=17.3.0 in /home/codespace/.local/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (23.2.0)\n", 255 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.4.1)\n", 256 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (6.0.5)\n", 257 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.9.4)\n", 258 | "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (4.0.3)\n", 259 | "Requirement already satisfied: click in /usr/local/python/3.10.13/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (8.1.7)\n", 260 | "Requirement already satisfied: joblib in /home/codespace/.local/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.4.2)\n", 261 | "Requirement already satisfied: regex>=2021.8.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2024.7.24)\n", 262 | "Requirement already satisfied: anyio<5,>=3.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (4.4.0)\n", 263 | "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.9.0)\n", 264 | "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (0.5.0)\n", 265 | "Requirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2.8.2)\n", 266 | "Requirement already satisfied: sniffio in /home/codespace/.local/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.3.1)\n", 267 | "Requirement already satisfied: certifi in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2024.7.4)\n", 268 | "Requirement already satisfied: httpcore==1.* in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.0.5)\n", 269 | "Requirement already satisfied: idna in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (3.7)\n", 270 | "Requirement already satisfied: h11<0.15,>=0.13 in /home/codespace/.local/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (0.14.0)\n", 271 | "Requirement already satisfied: charset-normalizer<4,>=2 in /home/codespace/.local/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (3.3.2)\n", 272 | "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2.0.7)\n", 273 | "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from SQLAlchemy>=1.4.49->SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (3.0.3)\n", 274 | "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from typing-inspect>=0.8.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.0.0)\n", 275 | "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from dataclasses-json->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (3.21.3)\n", 276 | "Requirement already satisfied: python-dateutil>=2.8.2 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2.9.0.post0)\n", 277 | "Requirement already satisfied: pytz>=2020.1 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2024.1)\n", 278 | "Requirement already satisfied: tzdata>=2022.7 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2024.1)\n", 279 | "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/codespace/.local/lib/python3.10/site-packages (from anyio<5,>=3.5.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.2.1)\n", 280 | "Requirement already satisfied: packaging>=17.0 in /home/codespace/.local/lib/python3.10/site-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (24.1)\n", 281 | "Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (0.7.0)\n", 282 | "Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (2.20.1)\n", 283 | "Requirement already satisfied: six>=1.5 in /home/codespace/.local/lib/python3.10/site-packages (from python-dateutil>=2.8.2->pandas->llama-index-core<0.11.0,>=0.10.57->llama-index-llms-openai==0.1.27) (1.16.0)\n", 284 | "Downloading llama_index_llms_openai-0.1.27-py3-none-any.whl (11 kB)\n", 285 | "Installing collected packages: llama-index-llms-openai\n", 286 | " Attempting uninstall: llama-index-llms-openai\n", 287 | " Found existing installation: llama-index-llms-openai 0.1.29\n", 288 | " Uninstalling llama-index-llms-openai-0.1.29:\n", 289 | " Successfully uninstalled llama-index-llms-openai-0.1.29\n", 290 | "Successfully installed llama-index-llms-openai-0.1.27\n", 291 | "\n", 292 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", 293 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython3 -m pip install --upgrade pip\u001b[0m\n", 294 | "Requirement already satisfied: llama-index-embeddings-openai==0.1.11 in /usr/local/python/3.10.13/lib/python3.10/site-packages (0.1.11)\n", 295 | "Requirement already satisfied: llama-index-core<0.11.0,>=0.10.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-embeddings-openai==0.1.11) (0.10.59)\n", 296 | "Requirement already satisfied: PyYAML>=6.0.1 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (6.0.1)\n", 297 | "Requirement already satisfied: SQLAlchemy>=1.4.49 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2.0.32)\n", 298 | "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (3.10.3)\n", 299 | "Requirement already satisfied: dataclasses-json in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (0.6.7)\n", 300 | "Requirement already satisfied: deprecated>=1.2.9.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.2.14)\n", 301 | "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.0.8)\n", 302 | "Requirement already satisfied: fsspec>=2023.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2024.6.1)\n", 303 | "Requirement already satisfied: httpx in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (0.27.0)\n", 304 | "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.6.0)\n", 305 | "Requirement already satisfied: networkx>=3.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (3.3)\n", 306 | "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (3.8.2)\n", 307 | "Requirement already satisfied: numpy<2.0.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.26.4)\n", 308 | "Requirement already satisfied: openai>=1.1.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.40.6)\n", 309 | "Requirement already satisfied: pandas in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2.2.2)\n", 310 | "Requirement already satisfied: pillow>=9.0.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (10.4.0)\n", 311 | "Requirement already satisfied: requests>=2.31.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2.32.3)\n", 312 | "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.2.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (8.5.0)\n", 313 | "Requirement already satisfied: tiktoken>=0.3.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (0.7.0)\n", 314 | "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (4.66.5)\n", 315 | "Requirement already satisfied: typing-extensions>=4.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (4.12.2)\n", 316 | "Requirement already satisfied: typing-inspect>=0.8.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (0.9.0)\n", 317 | "Requirement already satisfied: wrapt in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.16.0)\n", 318 | "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2.3.5)\n", 319 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.3.1)\n", 320 | "Requirement already satisfied: attrs>=17.3.0 in /home/codespace/.local/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (23.2.0)\n", 321 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.4.1)\n", 322 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (6.0.5)\n", 323 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.9.4)\n", 324 | "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (4.0.3)\n", 325 | "Requirement already satisfied: click in /usr/local/python/3.10.13/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (8.1.7)\n", 326 | "Requirement already satisfied: joblib in /home/codespace/.local/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.4.2)\n", 327 | "Requirement already satisfied: regex>=2021.8.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2024.7.24)\n", 328 | "Requirement already satisfied: anyio<5,>=3.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (4.4.0)\n", 329 | "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.9.0)\n", 330 | "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (0.5.0)\n", 331 | "Requirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2.8.2)\n", 332 | "Requirement already satisfied: sniffio in /home/codespace/.local/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.3.1)\n", 333 | "Requirement already satisfied: certifi in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2024.7.4)\n", 334 | "Requirement already satisfied: httpcore==1.* in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.0.5)\n", 335 | "Requirement already satisfied: idna in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (3.7)\n", 336 | "Requirement already satisfied: h11<0.15,>=0.13 in /home/codespace/.local/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (0.14.0)\n", 337 | "Requirement already satisfied: charset-normalizer<4,>=2 in /home/codespace/.local/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (3.3.2)\n", 338 | "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2.0.7)\n", 339 | "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from SQLAlchemy>=1.4.49->SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (3.0.3)\n", 340 | "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from typing-inspect>=0.8.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.0.0)\n", 341 | "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from dataclasses-json->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (3.21.3)\n", 342 | "Requirement already satisfied: python-dateutil>=2.8.2 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2.9.0.post0)\n", 343 | "Requirement already satisfied: pytz>=2020.1 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2024.1)\n", 344 | "Requirement already satisfied: tzdata>=2022.7 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2024.1)\n", 345 | "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/codespace/.local/lib/python3.10/site-packages (from anyio<5,>=3.5.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.2.1)\n", 346 | "Requirement already satisfied: packaging>=17.0 in /home/codespace/.local/lib/python3.10/site-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (24.1)\n", 347 | "Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (0.7.0)\n", 348 | "Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (2.20.1)\n", 349 | "Requirement already satisfied: six>=1.5 in /home/codespace/.local/lib/python3.10/site-packages (from python-dateutil>=2.8.2->pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-openai==0.1.11) (1.16.0)\n", 350 | "\n", 351 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", 352 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython3 -m pip install --upgrade pip\u001b[0m\n", 353 | "Collecting llama-index-llms-azure-openai==0.1.10\n", 354 | " Downloading llama_index_llms_azure_openai-0.1.10-py3-none-any.whl.metadata (787 bytes)\n", 355 | "Collecting azure-identity<2.0.0,>=1.15.0 (from llama-index-llms-azure-openai==0.1.10)\n", 356 | " Downloading azure_identity-1.17.1-py3-none-any.whl.metadata (79 kB)\n", 357 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m79.4/79.4 kB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 358 | "\u001b[?25hRequirement already satisfied: httpx in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-llms-azure-openai==0.1.10) (0.27.0)\n", 359 | "Requirement already satisfied: llama-index-core<0.11.0,>=0.10.11.post1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-llms-azure-openai==0.1.10) (0.10.59)\n", 360 | "Requirement already satisfied: llama-index-llms-openai<0.2.0,>=0.1.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-llms-azure-openai==0.1.10) (0.1.27)\n", 361 | "Collecting azure-core>=1.23.0 (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10)\n", 362 | " Downloading azure_core-1.30.2-py3-none-any.whl.metadata (37 kB)\n", 363 | "Collecting cryptography>=2.5 (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10)\n", 364 | " Downloading cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl.metadata (5.4 kB)\n", 365 | "Collecting msal>=1.24.0 (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10)\n", 366 | " Downloading msal-1.30.0-py3-none-any.whl.metadata (11 kB)\n", 367 | "Collecting msal-extensions>=0.3.0 (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10)\n", 368 | " Downloading msal_extensions-1.2.0-py3-none-any.whl.metadata (7.6 kB)\n", 369 | "Requirement already satisfied: typing-extensions>=4.0.0 in /home/codespace/.local/lib/python3.10/site-packages (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10) (4.12.2)\n", 370 | "Requirement already satisfied: PyYAML>=6.0.1 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (6.0.1)\n", 371 | "Requirement already satisfied: SQLAlchemy>=1.4.49 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2.0.32)\n", 372 | "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (3.10.3)\n", 373 | "Requirement already satisfied: dataclasses-json in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (0.6.7)\n", 374 | "Requirement already satisfied: deprecated>=1.2.9.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.2.14)\n", 375 | "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.0.8)\n", 376 | "Requirement already satisfied: fsspec>=2023.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2024.6.1)\n", 377 | "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.6.0)\n", 378 | "Requirement already satisfied: networkx>=3.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (3.3)\n", 379 | "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (3.8.2)\n", 380 | "Requirement already satisfied: numpy<2.0.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.26.4)\n", 381 | "Requirement already satisfied: openai>=1.1.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.40.6)\n", 382 | "Requirement already satisfied: pandas in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2.2.2)\n", 383 | "Requirement already satisfied: pillow>=9.0.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (10.4.0)\n", 384 | "Requirement already satisfied: requests>=2.31.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2.32.3)\n", 385 | "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.2.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (8.5.0)\n", 386 | "Requirement already satisfied: tiktoken>=0.3.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (0.7.0)\n", 387 | "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (4.66.5)\n", 388 | "Requirement already satisfied: typing-inspect>=0.8.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (0.9.0)\n", 389 | "Requirement already satisfied: wrapt in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.16.0)\n", 390 | "Requirement already satisfied: anyio in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-llms-azure-openai==0.1.10) (4.4.0)\n", 391 | "Requirement already satisfied: certifi in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-llms-azure-openai==0.1.10) (2024.7.4)\n", 392 | "Requirement already satisfied: httpcore==1.* in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-llms-azure-openai==0.1.10) (1.0.5)\n", 393 | "Requirement already satisfied: idna in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-llms-azure-openai==0.1.10) (3.7)\n", 394 | "Requirement already satisfied: sniffio in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-llms-azure-openai==0.1.10) (1.3.1)\n", 395 | "Requirement already satisfied: h11<0.15,>=0.13 in /home/codespace/.local/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-llms-azure-openai==0.1.10) (0.14.0)\n", 396 | "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2.3.5)\n", 397 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.3.1)\n", 398 | "Requirement already satisfied: attrs>=17.3.0 in /home/codespace/.local/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (23.2.0)\n", 399 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.4.1)\n", 400 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (6.0.5)\n", 401 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.9.4)\n", 402 | "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (4.0.3)\n", 403 | "Requirement already satisfied: six>=1.11.0 in /home/codespace/.local/lib/python3.10/site-packages (from azure-core>=1.23.0->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10) (1.16.0)\n", 404 | "Requirement already satisfied: cffi>=1.12 in /home/codespace/.local/lib/python3.10/site-packages (from cryptography>=2.5->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10) (1.16.0)\n", 405 | "Collecting PyJWT<3,>=1.0.0 (from PyJWT[crypto]<3,>=1.0.0->msal>=1.24.0->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10)\n", 406 | " Downloading PyJWT-2.9.0-py3-none-any.whl.metadata (3.0 kB)\n", 407 | "Collecting portalocker<3,>=1.4 (from msal-extensions>=0.3.0->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10)\n", 408 | " Downloading portalocker-2.10.1-py3-none-any.whl.metadata (8.5 kB)\n", 409 | "Requirement already satisfied: click in /usr/local/python/3.10.13/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (8.1.7)\n", 410 | "Requirement already satisfied: joblib in /home/codespace/.local/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.4.2)\n", 411 | "Requirement already satisfied: regex>=2021.8.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2024.7.24)\n", 412 | "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.9.0)\n", 413 | "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (0.5.0)\n", 414 | "Requirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2.8.2)\n", 415 | "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/codespace/.local/lib/python3.10/site-packages (from anyio->httpx->llama-index-llms-azure-openai==0.1.10) (1.2.1)\n", 416 | "Requirement already satisfied: charset-normalizer<4,>=2 in /home/codespace/.local/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (3.3.2)\n", 417 | "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2.0.7)\n", 418 | "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from SQLAlchemy>=1.4.49->SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (3.0.3)\n", 419 | "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from typing-inspect>=0.8.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (1.0.0)\n", 420 | "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from dataclasses-json->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (3.21.3)\n", 421 | "Requirement already satisfied: python-dateutil>=2.8.2 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2.9.0.post0)\n", 422 | "Requirement already satisfied: pytz>=2020.1 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2024.1)\n", 423 | "Requirement already satisfied: tzdata>=2022.7 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2024.1)\n", 424 | "Requirement already satisfied: pycparser in /home/codespace/.local/lib/python3.10/site-packages (from cffi>=1.12->cryptography>=2.5->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai==0.1.10) (2.22)\n", 425 | "Requirement already satisfied: packaging>=17.0 in /home/codespace/.local/lib/python3.10/site-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (24.1)\n", 426 | "Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (0.7.0)\n", 427 | "Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-llms-azure-openai==0.1.10) (2.20.1)\n", 428 | "Downloading llama_index_llms_azure_openai-0.1.10-py3-none-any.whl (5.1 kB)\n", 429 | "Downloading azure_identity-1.17.1-py3-none-any.whl (173 kB)\n", 430 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m173.2/173.2 kB\u001b[0m \u001b[31m5.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 431 | "\u001b[?25hDownloading azure_core-1.30.2-py3-none-any.whl (194 kB)\n", 432 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.3/194.3 kB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 433 | "\u001b[?25hDownloading cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl (4.0 MB)\n", 434 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.0/4.0 MB\u001b[0m \u001b[31m47.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", 435 | "\u001b[?25hDownloading msal-1.30.0-py3-none-any.whl (111 kB)\n", 436 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m111.8/111.8 kB\u001b[0m \u001b[31m3.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 437 | "\u001b[?25hDownloading msal_extensions-1.2.0-py3-none-any.whl (19 kB)\n", 438 | "Downloading portalocker-2.10.1-py3-none-any.whl (18 kB)\n", 439 | "Downloading PyJWT-2.9.0-py3-none-any.whl (22 kB)\n", 440 | "Installing collected packages: PyJWT, portalocker, cryptography, azure-core, msal, msal-extensions, azure-identity, llama-index-llms-azure-openai\n", 441 | "Successfully installed PyJWT-2.9.0 azure-core-1.30.2 azure-identity-1.17.1 cryptography-43.0.0 llama-index-llms-azure-openai-0.1.10 msal-1.30.0 msal-extensions-1.2.0 portalocker-2.10.1\n", 442 | "\n", 443 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", 444 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython3 -m pip install --upgrade pip\u001b[0m\n", 445 | "Collecting llama-index-embeddings-azure-openai==0.1.11\n", 446 | " Downloading llama_index_embeddings_azure_openai-0.1.11-py3-none-any.whl.metadata (804 bytes)\n", 447 | "Requirement already satisfied: llama-index-core<0.11.0,>=0.10.11.post1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-embeddings-azure-openai==0.1.11) (0.10.59)\n", 448 | "Requirement already satisfied: llama-index-embeddings-openai<0.2.0,>=0.1.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-embeddings-azure-openai==0.1.11) (0.1.11)\n", 449 | "Requirement already satisfied: llama-index-llms-azure-openai<0.2.0,>=0.1.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-embeddings-azure-openai==0.1.11) (0.1.10)\n", 450 | "Requirement already satisfied: PyYAML>=6.0.1 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (6.0.1)\n", 451 | "Requirement already satisfied: SQLAlchemy>=1.4.49 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2.0.32)\n", 452 | "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (3.10.3)\n", 453 | "Requirement already satisfied: dataclasses-json in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (0.6.7)\n", 454 | "Requirement already satisfied: deprecated>=1.2.9.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.2.14)\n", 455 | "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.0.8)\n", 456 | "Requirement already satisfied: fsspec>=2023.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2024.6.1)\n", 457 | "Requirement already satisfied: httpx in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (0.27.0)\n", 458 | "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.6.0)\n", 459 | "Requirement already satisfied: networkx>=3.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (3.3)\n", 460 | "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (3.8.2)\n", 461 | "Requirement already satisfied: numpy<2.0.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.26.4)\n", 462 | "Requirement already satisfied: openai>=1.1.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.40.6)\n", 463 | "Requirement already satisfied: pandas in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2.2.2)\n", 464 | "Requirement already satisfied: pillow>=9.0.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (10.4.0)\n", 465 | "Requirement already satisfied: requests>=2.31.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2.32.3)\n", 466 | "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.2.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (8.5.0)\n", 467 | "Requirement already satisfied: tiktoken>=0.3.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (0.7.0)\n", 468 | "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (4.66.5)\n", 469 | "Requirement already satisfied: typing-extensions>=4.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (4.12.2)\n", 470 | "Requirement already satisfied: typing-inspect>=0.8.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (0.9.0)\n", 471 | "Requirement already satisfied: wrapt in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.16.0)\n", 472 | "Requirement already satisfied: azure-identity<2.0.0,>=1.15.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (1.17.1)\n", 473 | "Requirement already satisfied: llama-index-llms-openai<0.2.0,>=0.1.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (0.1.27)\n", 474 | "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2.3.5)\n", 475 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.3.1)\n", 476 | "Requirement already satisfied: attrs>=17.3.0 in /home/codespace/.local/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (23.2.0)\n", 477 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.4.1)\n", 478 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (6.0.5)\n", 479 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.9.4)\n", 480 | "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (4.0.3)\n", 481 | "Requirement already satisfied: azure-core>=1.23.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (1.30.2)\n", 482 | "Requirement already satisfied: cryptography>=2.5 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (43.0.0)\n", 483 | "Requirement already satisfied: msal>=1.24.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (1.30.0)\n", 484 | "Requirement already satisfied: msal-extensions>=0.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (1.2.0)\n", 485 | "Requirement already satisfied: click in /usr/local/python/3.10.13/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (8.1.7)\n", 486 | "Requirement already satisfied: joblib in /home/codespace/.local/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.4.2)\n", 487 | "Requirement already satisfied: regex>=2021.8.3 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2024.7.24)\n", 488 | "Requirement already satisfied: anyio<5,>=3.5.0 in /home/codespace/.local/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (4.4.0)\n", 489 | "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.9.0)\n", 490 | "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (0.5.0)\n", 491 | "Requirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2.8.2)\n", 492 | "Requirement already satisfied: sniffio in /home/codespace/.local/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.3.1)\n", 493 | "Requirement already satisfied: certifi in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2024.7.4)\n", 494 | "Requirement already satisfied: httpcore==1.* in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.0.5)\n", 495 | "Requirement already satisfied: idna in /home/codespace/.local/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (3.7)\n", 496 | "Requirement already satisfied: h11<0.15,>=0.13 in /home/codespace/.local/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (0.14.0)\n", 497 | "Requirement already satisfied: charset-normalizer<4,>=2 in /home/codespace/.local/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (3.3.2)\n", 498 | "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2.0.7)\n", 499 | "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from SQLAlchemy>=1.4.49->SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (3.0.3)\n", 500 | "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from typing-inspect>=0.8.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.0.0)\n", 501 | "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from dataclasses-json->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (3.21.3)\n", 502 | "Requirement already satisfied: python-dateutil>=2.8.2 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2.9.0.post0)\n", 503 | "Requirement already satisfied: pytz>=2020.1 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2024.1)\n", 504 | "Requirement already satisfied: tzdata>=2022.7 in /home/codespace/.local/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2024.1)\n", 505 | "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/codespace/.local/lib/python3.10/site-packages (from anyio<5,>=3.5.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (1.2.1)\n", 506 | "Requirement already satisfied: six>=1.11.0 in /home/codespace/.local/lib/python3.10/site-packages (from azure-core>=1.23.0->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (1.16.0)\n", 507 | "Requirement already satisfied: cffi>=1.12 in /home/codespace/.local/lib/python3.10/site-packages (from cryptography>=2.5->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (1.16.0)\n", 508 | "Requirement already satisfied: packaging>=17.0 in /home/codespace/.local/lib/python3.10/site-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (24.1)\n", 509 | "Requirement already satisfied: PyJWT<3,>=1.0.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from PyJWT[crypto]<3,>=1.0.0->msal>=1.24.0->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (2.9.0)\n", 510 | "Requirement already satisfied: portalocker<3,>=1.4 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from msal-extensions>=0.3.0->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (2.10.1)\n", 511 | "Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (0.7.0)\n", 512 | "Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/python/3.10.13/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai>=1.1.0->llama-index-core<0.11.0,>=0.10.11.post1->llama-index-embeddings-azure-openai==0.1.11) (2.20.1)\n", 513 | "Requirement already satisfied: pycparser in /home/codespace/.local/lib/python3.10/site-packages (from cffi>=1.12->cryptography>=2.5->azure-identity<2.0.0,>=1.15.0->llama-index-llms-azure-openai<0.2.0,>=0.1.3->llama-index-embeddings-azure-openai==0.1.11) (2.22)\n", 514 | "Downloading llama_index_embeddings_azure_openai-0.1.11-py3-none-any.whl (3.3 kB)\n", 515 | "Installing collected packages: llama-index-embeddings-azure-openai\n", 516 | "Successfully installed llama-index-embeddings-azure-openai-0.1.11\n", 517 | "\n", 518 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", 519 | "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython3 -m pip install --upgrade pip\u001b[0m\n" 520 | ] 521 | } 522 | ], 523 | "source": [ 524 | "#Install prerequisite packages\n", 525 | "!pip install python-dotenv==1.0.0\n", 526 | "\n", 527 | "!pip install llama-index==0.10.59\n", 528 | "!pip install llama-index-llms-openai==0.1.27\n", 529 | "!pip install llama-index-embeddings-openai==0.1.11\n", 530 | "!pip install llama-index-llms-azure-openai==0.1.10\n", 531 | "!pip install llama-index-embeddings-azure-openai==0.1.11\n" 532 | ] 533 | }, 534 | { 535 | "cell_type": "code", 536 | "execution_count": 2, 537 | "id": "d3dd014f-d175-4723-ae20-9292dc9cb633", 538 | "metadata": {}, 539 | "outputs": [], 540 | "source": [ 541 | "#Setup Azure Open AI connection\n", 542 | "from llama_index.llms.azure_openai import AzureOpenAI\n", 543 | "from llama_index.embeddings.azure_openai import AzureOpenAIEmbedding\n", 544 | "\n", 545 | "from llama_index.core import Settings\n", 546 | "import os\n", 547 | "import nest_asyncio\n", 548 | "\n", 549 | "nest_asyncio.apply()\n", 550 | "\n", 551 | "#API info. Replace with your own keys and end points\n", 552 | "api_key = \"e638bb3ebcb84b79aa5b6f93d6e6503a\"\n", 553 | "azure_endpoint = \"https://agentic-ai-course-account.openai.azure.com/\"\n", 554 | "api_version = \"2024-05-01-preview\"\n", 555 | "\n", 556 | "#Setup the LLM\n", 557 | "Settings.llm=AzureOpenAI(\n", 558 | " model=\"gpt-35-turbo\",\n", 559 | " deployment_name=\"agentai-gpt35\",\n", 560 | " api_key=api_key,\n", 561 | " azure_endpoint=azure_endpoint,\n", 562 | " api_version=api_version,\n", 563 | ")\n", 564 | "\n", 565 | "#Setup the embedding model RAG\n", 566 | "Settings.embed_model= AzureOpenAIEmbedding(\n", 567 | " model=\"text-embedding-ada-002\",\n", 568 | " deployment_name=\"agentai-embedding\",\n", 569 | " api_key=api_key,\n", 570 | " azure_endpoint=azure_endpoint,\n", 571 | " api_version=api_version,\n", 572 | ")\n" 573 | ] 574 | }, 575 | { 576 | "cell_type": "code", 577 | "execution_count": 3, 578 | "id": "07ad3b12-10f1-4447-b63c-10f903730878", 579 | "metadata": {}, 580 | "outputs": [], 581 | "source": [ 582 | "#Create indexes for vector search\n", 583 | "from llama_index.core import SimpleDirectoryReader\n", 584 | "from llama_index.core.node_parser import SentenceSplitter\n", 585 | "from llama_index.core import VectorStoreIndex\n", 586 | "\n", 587 | "splitter=SentenceSplitter(chunk_size=1024)\n", 588 | "\n", 589 | "#-------------------------------------------------------------------\n", 590 | "#Setup Aeroflow document index\n", 591 | "#-------------------------------------------------------------------\n", 592 | "aeroflow_documents=SimpleDirectoryReader(\n", 593 | " input_files=[\"AeroFlow_Specification_Document.pdf\"])\\\n", 594 | " .load_data()\n", 595 | "\n", 596 | "#Read documents into nodes\n", 597 | "aeroflow_nodes=splitter.get_nodes_from_documents(aeroflow_documents)\n", 598 | "#Create a vector Store\n", 599 | "aeroflow_index=VectorStoreIndex(aeroflow_nodes)\n", 600 | "#Create a query engine\n", 601 | "aeroflow_query_engine = aeroflow_index.as_query_engine()\n", 602 | "\n", 603 | "#-------------------------------------------------------------------\n", 604 | "#Setup EchoSprint document index\n", 605 | "#-------------------------------------------------------------------\n", 606 | "ecosprint_documents=SimpleDirectoryReader(\n", 607 | " input_files=[\"EcoSprint_Specification_Document.pdf\"])\\\n", 608 | " .load_data()\n", 609 | "#Read documents into nodes\n", 610 | "ecosprint_nodes=splitter.get_nodes_from_documents(ecosprint_documents)\n", 611 | "#Create a vector Store\n", 612 | "ecosprint_index=VectorStoreIndex(ecosprint_nodes)\n", 613 | "#Create a query engine\n", 614 | "ecosprint_query_engine = ecosprint_index.as_query_engine()\n" 615 | ] 616 | }, 617 | { 618 | "cell_type": "markdown", 619 | "id": "f75954cc-cf76-4a92-ac92-580b4393b885", 620 | "metadata": {}, 621 | "source": [ 622 | "### 03.04. Setup the Agentic Router" 623 | ] 624 | }, 625 | { 626 | "cell_type": "code", 627 | "execution_count": 7, 628 | "id": "767a0751-5238-4df8-816f-8436a34f96bf", 629 | "metadata": {}, 630 | "outputs": [], 631 | "source": [ 632 | "from llama_index.core.tools import QueryEngineTool\n", 633 | "from llama_index.core.query_engine.router_query_engine import RouterQueryEngine\n", 634 | "from llama_index.core.selectors import LLMSingleSelector\n", 635 | "\n", 636 | "#Create a query engine Tool for NoSQL\n", 637 | "aeroflow_tool = QueryEngineTool.from_defaults(\n", 638 | " query_engine=aeroflow_query_engine,\n", 639 | " name=\"Aeroflow specifications\",\n", 640 | " description=(\n", 641 | " \"Contains information about Aeroflow : Design, features, technology, maintenance, warranty\"\n", 642 | " ),\n", 643 | ")\n", 644 | "\n", 645 | "#Create a query engine Tool for NLP\n", 646 | "ecosprint_tool = QueryEngineTool.from_defaults(\n", 647 | " query_engine=ecosprint_query_engine,\n", 648 | " name=\"EcoSprint specifications\",\n", 649 | " description=(\n", 650 | " \"Contains information about EcoSprint : Design, features, technology, maintenance, warranty\"\n", 651 | " ),\n", 652 | ")\n", 653 | "\n", 654 | "#Create a Router Agent. Provide the Tools to the Agent\n", 655 | "router_agent=RouterQueryEngine(\n", 656 | " selector=LLMSingleSelector.from_defaults(),\n", 657 | " query_engine_tools=[\n", 658 | " aeroflow_tool,\n", 659 | " ecosprint_tool,\n", 660 | " ],\n", 661 | " verbose=True\n", 662 | ")" 663 | ] 664 | }, 665 | { 666 | "cell_type": "markdown", 667 | "id": "02d4e35b-73ad-4692-9218-d751f6baa9c5", 668 | "metadata": {}, 669 | "source": [ 670 | "### 03.05. Route with Agentic AI" 671 | ] 672 | }, 673 | { 674 | "cell_type": "code", 675 | "execution_count": 8, 676 | "id": "429aea8c-f8ae-4a52-8a50-0da4c082fb68", 677 | "metadata": {}, 678 | "outputs": [ 679 | { 680 | "name": "stdout", 681 | "output_type": "stream", 682 | "text": [ 683 | "\u001b[1;3;38;5;200mSelecting query engine 0: Choice 1 contains information about Aeroflow, which is the product that is relevant to the question..\n", 684 | "\u001b[0m\n", 685 | "Response: The AeroFlow is available in colors such as Coastal Blue, Sunset Orange, and Pearl White.\n" 686 | ] 687 | } 688 | ], 689 | "source": [ 690 | "#Ask a question about NoSQL\n", 691 | "response = router_agent.query(\"What colors are available for AeroFlow?\")\n", 692 | "print(\"\\nResponse: \",str(response))" 693 | ] 694 | }, 695 | { 696 | "cell_type": "code", 697 | "execution_count": 9, 698 | "id": "a0cd1078-a27f-4e3e-b3f6-b98a0d66027e", 699 | "metadata": {}, 700 | "outputs": [ 701 | { 702 | "name": "stdout", 703 | "output_type": "stream", 704 | "text": [ 705 | "\u001b[1;3;38;5;200mSelecting query engine 1: The question specifically asks about EcoSprint, and choice 2 contains information about EcoSprint..\n", 706 | "\u001b[0m\n", 707 | "Response: The EcoSprint is available in colors like Midnight Black, Ocean Blue, and Pearl White.\n" 708 | ] 709 | } 710 | ], 711 | "source": [ 712 | "response = router_agent.query(\"What colors are available for EcoSprint?\")\n", 713 | "print(\"\\nResponse: \",str(response))" 714 | ] 715 | }, 716 | { 717 | "cell_type": "code", 718 | "execution_count": null, 719 | "id": "1c2f3f20-8738-4a7d-90d3-5db595298dac", 720 | "metadata": {}, 721 | "outputs": [], 722 | "source": [] 723 | } 724 | ], 725 | "metadata": { 726 | "kernelspec": { 727 | "display_name": "Python 3 (ipykernel)", 728 | "language": "python", 729 | "name": "python3" 730 | }, 731 | "language_info": { 732 | "codemirror_mode": { 733 | "name": "ipython", 734 | "version": 3 735 | }, 736 | "file_extension": ".py", 737 | "mimetype": "text/x-python", 738 | "name": "python", 739 | "nbconvert_exporter": "python", 740 | "pygments_lexer": "ipython3", 741 | "version": "3.10.13" 742 | } 743 | }, 744 | "nbformat": 4, 745 | "nbformat_minor": 5 746 | } 747 | -------------------------------------------------------------------------------- /code_06_XX Implementing Agentic AI for Customer Service.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "daf5512f-c31c-48a1-8149-b061b34e5e9b", 6 | "metadata": {}, 7 | "source": [ 8 | "### 06.03. Setup functions and indexes" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "id": "50a56bd8-0e28-4f07-8981-ed1a31ad0d63", 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "#Setup Azure Open AI connection\n", 19 | "from llama_index.llms.azure_openai import AzureOpenAI\n", 20 | "from llama_index.embeddings.azure_openai import AzureOpenAIEmbedding\n", 21 | "\n", 22 | "from llama_index.core import Settings\n", 23 | "import os\n", 24 | "import nest_asyncio\n", 25 | "\n", 26 | "nest_asyncio.apply()\n", 27 | "\n", 28 | "api_key = \"e638bb3ebcb84b79aa5b6f93d6e6503a\"\n", 29 | "azure_endpoint = \"https://agentic-ai-course-account.openai.azure.com/\"\n", 30 | "api_version = \"2024-05-01-preview\"\n", 31 | "\n", 32 | "#Function calling support only available in GPT-4\n", 33 | "Settings.llm=AzureOpenAI(\n", 34 | " model=\"gpt-4\",\n", 35 | " deployment_name=\"agentai-gpt4\",\n", 36 | " api_key=api_key,\n", 37 | " azure_endpoint=azure_endpoint,\n", 38 | " api_version=api_version,\n", 39 | ")\n", 40 | "\n", 41 | "Settings.embed_model= AzureOpenAIEmbedding(\n", 42 | " model=\"text-embedding-ada-002\",\n", 43 | " deployment_name=\"agentai-embedding\",\n", 44 | " api_key=api_key,\n", 45 | " azure_endpoint=azure_endpoint,\n", 46 | " api_version=api_version,\n", 47 | ")\n" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 2, 53 | "id": "e3c92609-3d56-46c5-8724-3ce96ad205b0", 54 | "metadata": {}, 55 | "outputs": [], 56 | "source": [ 57 | "from typing import List\n", 58 | "from llama_index.core import SimpleDirectoryReader\n", 59 | "from llama_index.core.node_parser import SentenceSplitter\n", 60 | "from llama_index.core import VectorStoreIndex\n", 61 | "from llama_index.core.tools import QueryEngineTool\n", 62 | "\n", 63 | "#-------------------------------------------------------------\n", 64 | "# Tool 1 : Function that returns the list of items in an order\n", 65 | "#-------------------------------------------------------------\n", 66 | "def get_order_items(order_id: int) -> List[str] :\n", 67 | " \"\"\"Given an order Id, this function returns the \n", 68 | " list of items purchased for that order\"\"\"\n", 69 | " \n", 70 | " order_items = {\n", 71 | " 1001: [\"Laptop\",\"Mouse\"],\n", 72 | " 1002: [\"Keyboard\",\"HDMI Cable\"],\n", 73 | " 1003: [\"Laptop\",\"Keyboard\"]\n", 74 | " }\n", 75 | " if order_id in order_items.keys():\n", 76 | " return order_items[order_id]\n", 77 | " else:\n", 78 | " return []\n", 79 | "\n", 80 | "#-------------------------------------------------------------\n", 81 | "# Tool 2 : Function that returns the delivery date for an order\n", 82 | "#-------------------------------------------------------------\n", 83 | "def get_delivery_date(order_id: int) -> str:\n", 84 | " \"\"\"Given an order Id, this function returns the \n", 85 | " delivery date for that order\"\"\"\n", 86 | "\n", 87 | " delivery_dates = {\n", 88 | " 1001: \"10-Jun\",\n", 89 | " 1002: \"12-Jun\",\n", 90 | " 1003: \"08-Jun\" \n", 91 | " }\n", 92 | " if order_id in delivery_dates.keys():\n", 93 | " return delivery_dates[order_id]\n", 94 | " else:\n", 95 | " return []\n", 96 | "\n", 97 | "#----------------------------------------------------------------\n", 98 | "# Tool 3 : Function that returns maximum return days for an item\n", 99 | "#----------------------------------------------------------------\n", 100 | "def get_item_return_days(item: str) -> int :\n", 101 | " \"\"\"Given an Item, this function returns the return support\n", 102 | " for that order. The return support is in number of days\"\"\"\n", 103 | " \n", 104 | " item_returns = {\n", 105 | " \"Laptop\" : 30,\n", 106 | " \"Mouse\" : 15,\n", 107 | " \"Keyboard\" : 15,\n", 108 | " \"HDMI Cable\" : 5\n", 109 | " }\n", 110 | " if item in item_returns.keys():\n", 111 | " return item_returns[item]\n", 112 | " else:\n", 113 | " #Default\n", 114 | " return 45\n", 115 | "\n", 116 | "#-------------------------------------------------------------\n", 117 | "# Tool 4 : Vector DB that contains customer support contacts\n", 118 | "#-------------------------------------------------------------\n", 119 | "#Setup vector index for return policies\n", 120 | "support_docs=SimpleDirectoryReader(input_files=[\"Customer Service.pdf\"]).load_data()\n", 121 | "\n", 122 | "splitter=SentenceSplitter(chunk_size=1024)\n", 123 | "support_nodes=splitter.get_nodes_from_documents(support_docs)\n", 124 | "support_index=VectorStoreIndex(support_nodes)\n", 125 | "support_query_engine = support_index.as_query_engine()\n" 126 | ] 127 | }, 128 | { 129 | "cell_type": "markdown", 130 | "id": "9042d5a7-5d9c-4542-a9cb-ebc51ac665a4", 131 | "metadata": {}, 132 | "source": [ 133 | "### 06.04. Setup the Customer Service AI Agent" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 3, 139 | "id": "b8bb1ca3-1d86-4dd2-9692-bcb8a7224a51", 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [ 143 | "from llama_index.core.tools import FunctionTool\n", 144 | "\n", 145 | "#Create tools for the 3 functions and 1 index\n", 146 | "order_item_tool = FunctionTool.from_defaults(fn=get_order_items)\n", 147 | "delivery_date_tool = FunctionTool.from_defaults(fn=get_delivery_date)\n", 148 | "return_policy_tool = FunctionTool.from_defaults(fn=get_item_return_days)\n", 149 | "\n", 150 | "support_tool = QueryEngineTool.from_defaults(\n", 151 | " query_engine=support_query_engine,\n", 152 | " description=(\n", 153 | " \"Customer support policies and contact information\"\n", 154 | " ),\n", 155 | ")" 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": 4, 161 | "id": "e34e041e-a34a-4201-836e-fe2a18475b9e", 162 | "metadata": {}, 163 | "outputs": [], 164 | "source": [ 165 | "from llama_index.core.agent import FunctionCallingAgentWorker\n", 166 | "from llama_index.core.agent import AgentRunner\n", 167 | "from llama_index.llms.openai import OpenAI\n", 168 | "\n", 169 | "#Setup the Agent worker in LlamaIndex with all the Tools\n", 170 | "#This is the tool executor process\n", 171 | "agent_worker = FunctionCallingAgentWorker.from_tools(\n", 172 | " [order_item_tool, \n", 173 | " delivery_date_tool,\n", 174 | " return_policy_tool,\n", 175 | " support_tool\n", 176 | " ], \n", 177 | " llm=Settings.llm, \n", 178 | " verbose=True\n", 179 | ")\n", 180 | "#Create an Agent Orchestrator with LlamaIndex\n", 181 | "agent = AgentRunner(agent_worker)\n" 182 | ] 183 | }, 184 | { 185 | "cell_type": "markdown", 186 | "id": "e3a85eb9-4b48-43c0-8d41-243b63a029aa", 187 | "metadata": {}, 188 | "source": [ 189 | "### 06.05. Using the customer service Agent" 190 | ] 191 | }, 192 | { 193 | "cell_type": "code", 194 | "execution_count": 5, 195 | "id": "10ebeacf-30bd-4157-98da-52ef5599358f", 196 | "metadata": {}, 197 | "outputs": [ 198 | { 199 | "name": "stdout", 200 | "output_type": "stream", 201 | "text": [ 202 | "Added user message to memory: What is the return policy for order number 1001\n", 203 | "=== Calling Function ===\n", 204 | "Calling function: get_order_items with args: {\"order_id\": 1001}\n", 205 | "=== Function Output ===\n", 206 | "['Laptop', 'Mouse']\n", 207 | "=== Calling Function ===\n", 208 | "Calling function: get_item_return_days with args: {\"item\": \"Laptop\"}\n", 209 | "=== Function Output ===\n", 210 | "30\n", 211 | "=== Calling Function ===\n", 212 | "Calling function: get_item_return_days with args: {\"item\": \"Mouse\"}\n", 213 | "=== Function Output ===\n", 214 | "15\n", 215 | "=== LLM Response ===\n", 216 | "The return policy for order number 1001 is as follows:\n", 217 | "- Laptop: 30 days return policy\n", 218 | "- Mouse: 15 days return policy\n", 219 | "\n", 220 | " Final output : \n", 221 | " The return policy for order number 1001 is as follows:\n", 222 | "- Laptop: 30 days return policy\n", 223 | "- Mouse: 15 days return policy\n" 224 | ] 225 | } 226 | ], 227 | "source": [ 228 | "#Get return policy for an order\n", 229 | "response = agent.query(\n", 230 | " \"What is the return policy for order number 1001\"\n", 231 | ")\n", 232 | "\n", 233 | "print(\"\\n Final output : \\n\", response)" 234 | ] 235 | }, 236 | { 237 | "cell_type": "code", 238 | "execution_count": 6, 239 | "id": "bd65349a-9aa7-414f-88dd-b6ac2ba61fe3", 240 | "metadata": {}, 241 | "outputs": [ 242 | { 243 | "name": "stdout", 244 | "output_type": "stream", 245 | "text": [ 246 | "Added user message to memory: When is the delivery date and items shipped for order 1003 and how can I contact customer support?\n", 247 | "=== Calling Function ===\n", 248 | "Calling function: get_order_items with args: {\"order_id\": 1003}\n", 249 | "=== Function Output ===\n", 250 | "['Laptop', 'Keyboard']\n", 251 | "=== Calling Function ===\n", 252 | "Calling function: get_delivery_date with args: {\"order_id\": 1003}\n", 253 | "=== Function Output ===\n", 254 | "08-Jun\n", 255 | "=== Calling Function ===\n", 256 | "Calling function: query_engine_tool with args: {\"input\": \"customer support contact\"}\n", 257 | "=== Function Output ===\n", 258 | "Customers can contact customer service by calling 1-987-654-3210 or by emailing support@company.com.\n", 259 | "=== LLM Response ===\n", 260 | "For order 1003, the items shipped are a Laptop and a Keyboard. The delivery date is scheduled for June 8th.\n", 261 | "\n", 262 | "If you need to contact customer support, you can call them at 1-987-654-3210 or email them at support@company.com.\n", 263 | "\n", 264 | " Final output : \n", 265 | " For order 1003, the items shipped are a Laptop and a Keyboard. The delivery date is scheduled for June 8th.\n", 266 | "\n", 267 | "If you need to contact customer support, you can call them at 1-987-654-3210 or email them at support@company.com.\n" 268 | ] 269 | } 270 | ], 271 | "source": [ 272 | "# Three part question\n", 273 | "response = agent.query(\n", 274 | " \"When is the delivery date and items shipped for order 1003 and how can I contact customer support?\"\n", 275 | ")\n", 276 | "\n", 277 | "print(\"\\n Final output : \\n\", response)" 278 | ] 279 | }, 280 | { 281 | "cell_type": "code", 282 | "execution_count": 7, 283 | "id": "d7aea13e-12a0-464e-94ff-6cbefa9141b4", 284 | "metadata": {}, 285 | "outputs": [ 286 | { 287 | "name": "stdout", 288 | "output_type": "stream", 289 | "text": [ 290 | "Added user message to memory: What is the return policy for order number 1004\n", 291 | "=== Calling Function ===\n", 292 | "Calling function: get_order_items with args: {\"order_id\": 1004}\n", 293 | "=== Function Output ===\n", 294 | "[]\n", 295 | "=== LLM Response ===\n", 296 | "It seems that there are no items associated with order number 1004. Therefore, there is no return policy applicable for this order. If you believe this is an error or have any other inquiries, please let me know how I can assist you further!\n", 297 | "\n", 298 | " Final output : \n", 299 | " It seems that there are no items associated with order number 1004. Therefore, there is no return policy applicable for this order. If you believe this is an error or have any other inquiries, please let me know how I can assist you further!\n" 300 | ] 301 | } 302 | ], 303 | "source": [ 304 | "#Question about an invalid order number\n", 305 | "response = agent.query(\n", 306 | " \"What is the return policy for order number 1004\"\n", 307 | ")\n", 308 | "\n", 309 | "print(\"\\n Final output : \\n\", response)" 310 | ] 311 | }, 312 | { 313 | "cell_type": "code", 314 | "execution_count": null, 315 | "id": "b1423b83-69d1-4598-b2b5-447d01dca87a", 316 | "metadata": {}, 317 | "outputs": [], 318 | "source": [] 319 | } 320 | ], 321 | "metadata": { 322 | "kernelspec": { 323 | "display_name": "Python 3 (ipykernel)", 324 | "language": "python", 325 | "name": "python3" 326 | }, 327 | "language_info": { 328 | "codemirror_mode": { 329 | "name": "ipython", 330 | "version": 3 331 | }, 332 | "file_extension": ".py", 333 | "mimetype": "text/x-python", 334 | "name": "python", 335 | "nbconvert_exporter": "python", 336 | "pygments_lexer": "ipython3", 337 | "version": "3.10.13" 338 | } 339 | }, 340 | "nbformat": 4, 341 | "nbformat_minor": 5 342 | } 343 | -------------------------------------------------------------------------------- /notebooks/testfile.txt: -------------------------------------------------------------------------------- 1 | test -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Specify Python package requirements for your project here (e.g., Mako==1.1.1). If your project doesn't require these, you can leave this file unchanged or delete it. 2 | --------------------------------------------------------------------------------