├── .gitignore
├── LICENSE
├── README.md
├── example.env
├── no-parser.py
├── parser-ollama.py
├── parser-openai.py
├── parser-qdrant-groq.py
└── requirements.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Data
10 | data/
11 |
12 |
13 | # Distribution / packaging
14 | .Python
15 | build/
16 | develop-eggs/
17 | dist/
18 | downloads/
19 | eggs/
20 | .eggs/
21 | lib/
22 | lib64/
23 | parts/
24 | sdist/
25 | var/
26 | wheels/
27 | share/python-wheels/
28 | *.egg-info/
29 | .installed.cfg
30 | *.egg
31 | MANIFEST
32 |
33 | # PyInstaller
34 | # Usually these files are written by a python script from a template
35 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
36 | *.manifest
37 | *.spec
38 |
39 | # Installer logs
40 | pip-log.txt
41 | pip-delete-this-directory.txt
42 |
43 | # Unit test / coverage reports
44 | htmlcov/
45 | .tox/
46 | .nox/
47 | .coverage
48 | .coverage.*
49 | .cache
50 | nosetests.xml
51 | coverage.xml
52 | *.cover
53 | *.py,cover
54 | .hypothesis/
55 | .pytest_cache/
56 | cover/
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 | db.sqlite3
66 | db.sqlite3-journal
67 |
68 | # Flask stuff:
69 | instance/
70 | .webassets-cache
71 |
72 | # Scrapy stuff:
73 | .scrapy
74 |
75 | # Sphinx documentation
76 | docs/_build/
77 |
78 | # PyBuilder
79 | .pybuilder/
80 | target/
81 |
82 | # Jupyter Notebook
83 | .ipynb_checkpoints
84 |
85 | # IPython
86 | profile_default/
87 | ipython_config.py
88 |
89 | # pyenv
90 | # For a library or package, you might want to ignore these files since the code is
91 | # intended to run in multiple environments; otherwise, check them in:
92 | # .python-version
93 |
94 | # pipenv
95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
98 | # install all needed dependencies.
99 | #Pipfile.lock
100 |
101 | # poetry
102 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
103 | # This is especially recommended for binary packages to ensure reproducibility, and is more
104 | # commonly ignored for libraries.
105 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
106 | #poetry.lock
107 |
108 | # pdm
109 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
110 | #pdm.lock
111 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
112 | # in version control.
113 | # https://pdm.fming.dev/#use-with-ide
114 | .pdm.toml
115 |
116 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
117 | __pypackages__/
118 |
119 | # Celery stuff
120 | celerybeat-schedule
121 | celerybeat.pid
122 |
123 | # SageMath parsed files
124 | *.sage.py
125 |
126 | # Environments
127 | .env
128 | .venv
129 | env/
130 | venv/
131 | ENV/
132 | env.bak/
133 | venv.bak/
134 |
135 | # Spyder project settings
136 | .spyderproject
137 | .spyproject
138 |
139 | # Rope project settings
140 | .ropeproject
141 |
142 | # mkdocs documentation
143 | /site
144 |
145 | # mypy
146 | .mypy_cache/
147 | .dmypy.json
148 | dmypy.json
149 |
150 | # Pyre type checker
151 | .pyre/
152 |
153 | # pytype static type analyzer
154 | .pytype/
155 |
156 | # Cython debug symbols
157 | cython_debug/
158 |
159 | # PyCharm
160 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
161 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
162 | # and can be added to the global gitignore or merged into this file. For a more nuclear
163 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
164 | #.idea/
165 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # llamaparser-example
2 | Simple example to showcase how to use llamaparser to parse PDF files
3 |
4 | ## Videos covering these topics
5 | ### [Llamaparse LlamaIndex](https://youtu.be/wRMnHbiz5ck?si=iQZV7N6-trcuBm8M)
6 | ### [Llamaparse Qdrant Groq](https://youtu.be/w7Ap6gZFXl0?si=05AUGmRp1quTdeZl)
7 | ### [RAG with LlamaParse from LlamaIndex and LangChain](https://youtu.be/f9hvrqVvZl0?si=ezGdVXwzmcjZEtKj)
8 |
9 | ### Links shown in video
10 | - [LlamaCloud](https://cloud.llamaindex.ai/)
11 | - [Qdrant Cloud](https://cloud.qdrant.io/)
12 | - [Groq Cloud](https://console.groq.com/)
13 |
14 | ### create virtualenv
15 | ```
16 | python3 -m venv .venv && source .venv/bin/activate
17 | ```
18 |
19 | ### Install packages
20 | ```
21 | pip install -r requirements.txt
22 | ```
23 |
24 | ### Environment variables
25 | All env variables goes to .env ( cp `example.env` to `.env` and paste required env variables)
26 |
27 | ### Run the python files (following the vieoo to run step by step is recommended)
28 | ```
29 | python3 parser-qdrant-groq.py
30 | ```
31 |
32 | ## Additional helper documents
33 | - [LlamaIndex blogpost about Llamaparse](https://www.llamaindex.ai/blog/launching-the-first-genai-native-document-parsing-platform)
34 | - [Advanced demo with Reranker](https://github.com/run-llama/llama_parse/blob/main/examples/demo_advanced.ipynb)
35 | - [Parsing instructions Llamaparse](https://colab.research.google.com/drive/1dO2cwDCXjj9pS9yQDZ2vjg-0b5sRXQYo#scrollTo=dEX7Mv9V0UvM)
36 | - [LlamaParse Documentation](https://docs.cloud.llamaindex.ai/llamaparse/getting_started)
37 |
--------------------------------------------------------------------------------
/example.env:
--------------------------------------------------------------------------------
1 | LLAMA_CLOUD_API_KEY="llx-****"
2 | QDRANT_API_KEY="OLb-***"
3 | QDRANT_URL="qdrant_url"
4 | GROQ_API_KEY="groq_api_key"
--------------------------------------------------------------------------------
/no-parser.py:
--------------------------------------------------------------------------------
1 | import os
2 | from IPython.display import Markdown, display
3 |
4 | from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
5 |
6 | # bring in our OPENAI_API_KEY
7 | from dotenv import load_dotenv
8 | load_dotenv()
9 |
10 | OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
11 |
12 | # use SimpleDirectoryReader to load our file
13 | documents = SimpleDirectoryReader("data").load_data()
14 |
15 | #VectorStoreIndex?
16 |
17 | # create an index from the documents
18 | index = VectorStoreIndex.from_documents(documents)
19 |
20 | # create a query engine for the index
21 | query_engine = index.as_query_engine()
22 |
23 | # query the engine
24 | query = "Where was the collected loaded on?"
25 | response = query_engine.query(query)
26 | display(Markdown(f"{response}"))
--------------------------------------------------------------------------------
/parser-ollama.py:
--------------------------------------------------------------------------------
1 | # bring in our LLAMA_CLOUD_API_KEY
2 | import os
3 | from dotenv import load_dotenv
4 | load_dotenv()
5 |
6 | import nest_asyncio # noqa: E402
7 | nest_asyncio.apply()
8 |
9 | # bring in deps
10 | from llama_parse import LlamaParse # noqa: E402
11 | from llama_index.core import VectorStoreIndex, SimpleDirectoryReader # noqa: E402
12 |
13 | # set up parser
14 | llamaparse_api_key = os.getenv("LLAMA_CLOUD_API_KEY")
15 | parser = LlamaParse(
16 | api_key=llamaparse_api_key,
17 | result_type="markdown" # "markdown" and "text" are available
18 | )
19 |
20 | # use SimpleDirectoryReader to parse our file
21 | file_extractor = {".pdf": parser}
22 | documents = SimpleDirectoryReader(input_files=['data/gpt4all.pdf'], file_extractor=file_extractor).load_data()
23 |
24 | documents
25 |
26 | #len(documents)
27 |
28 | #documents[0].text
29 | documents[0].text[:200]
30 |
31 | ########### Ollama Models ###############
32 |
33 | # by default llamaindex uses OpenAI models
34 | from llama_index.embeddings.ollama import OllamaEmbedding # noqa: E402
35 |
36 | embed_model = OllamaEmbedding(
37 | #model_name="nomic-embed-text",
38 | model_name="llama2",
39 | base_url="http://localhost:11434",
40 | ollama_additional_kwargs={"mirostat": 0},
41 | )
42 |
43 | from llama_index.llms.ollama import Ollama # noqa: E402
44 | llm = Ollama(model="llama2", request_timeout=30.0)
45 |
46 | from llama_index.core import Settings # noqa: E402
47 |
48 | Settings.llm = llm
49 | Settings.embed_model = embed_model
50 |
51 | # get the answer out of it
52 | # create an index from the parsed markdown
53 | index = VectorStoreIndex.from_documents(documents)
54 |
55 | # create a query engine for the index
56 | query_engine = index.as_query_engine()
57 |
58 | # query the engine
59 | from IPython.display import Markdown, display # noqa: E402
60 |
61 | # query the engine
62 | query = "what is the BoolQ value of GPT4All-J 6B v1.0* model ?"
63 | response = query_engine.query(query)
64 | display(Markdown(f"{response}"))
65 |
--------------------------------------------------------------------------------
/parser-openai.py:
--------------------------------------------------------------------------------
1 | import os
2 | import nest_asyncio # noqa: E402
3 | nest_asyncio.apply()
4 |
5 | from IPython.display import Markdown, display
6 |
7 | # bring in our LLAMA_CLOUD_API_KEY
8 | from dotenv import load_dotenv
9 | load_dotenv()
10 |
11 | # bring in deps
12 | from llama_parse import LlamaParse
13 | from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
14 |
15 | llamaparse_api_key = os.getenv("LLAMA_CLOUD_API_KEY")
16 | OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
17 |
18 |
19 | # set up parser
20 | parser = LlamaParse(
21 | api_key=llamaparse_api_key,
22 | result_type="markdown" # "markdown" and "text" are available
23 | )
24 |
25 | # use SimpleDirectoryReader to parse our file
26 | file_extractor = {".pdf": parser}
27 | documents = SimpleDirectoryReader(input_files=['data/gpt4all.pdf'], file_extractor=file_extractor).load_data()
28 | #print(documents)
29 |
30 |
31 | # create an index from the parsed markdown
32 | index = VectorStoreIndex.from_documents(documents)
33 |
34 | # create a query engine for the index
35 | query_engine = index.as_query_engine()
36 |
37 | # query the engine
38 | query = "Where was the collected loaded on?"
39 | response = query_engine.query(query)
40 | display(Markdown(f"{response}"))
41 |
42 |
--------------------------------------------------------------------------------
/parser-qdrant-groq.py:
--------------------------------------------------------------------------------
1 | # wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/examples/data/10q/uber_10q_march_2022.pdf' -O './data/uber_10q_march_2022.pdf'
2 | # wget "https://meetings.wmo.int/Cg-19/PublishingImages/SitePages/FINAC-43/7%20-%20EC-77-Doc%205%20Financial%20Statements%20for%202022%20(FINAC).pptx" -O './data/presentation.pptx'
3 | import os
4 | import nest_asyncio # noqa: E402
5 | nest_asyncio.apply()
6 |
7 | # bring in our LLAMA_CLOUD_API_KEY
8 | from dotenv import load_dotenv
9 | load_dotenv()
10 |
11 | ##### LLAMAPARSE #####
12 | from llama_parse import LlamaParse
13 |
14 | llamaparse_api_key = os.getenv("LLAMA_CLOUD_API_KEY")
15 |
16 |
17 | #llama_parse_documents = LlamaParse(api_key=llamaparse_api_key, result_type="markdown").load_data("./data/presentation.pptx")
18 | #llama_parse_documents = LlamaParse(api_key=llamaparse_api_key, result_type="markdown").load_data("./data/uber_10q_march_2022.pdf")
19 | #llama_parse_documents = LlamaParse(api_key=llamaparse_api_key, result_type="markdown").load_data("./data/state_of_union.txt")
20 |
21 | import pickle
22 | # Define a function to load parsed data if available, or parse if not
23 | def load_or_parse_data():
24 | data_file = "./data/parsed_data.pkl"
25 |
26 | if os.path.exists(data_file):
27 | # Load the parsed data from the file
28 | with open(data_file, "rb") as f:
29 | parsed_data = pickle.load(f)
30 | else:
31 | # Perform the parsing step and store the result in llama_parse_documents
32 | #llama_parse_documents = LlamaParse(api_key=llamaparse_api_key, result_type="markdown").load_data("./data/uber_10q_march_2022.pdf")
33 | #llama_parse_documents = LlamaParse(api_key=llamaparse_api_key, result_type="markdown").load_data("./data/presentation.pptx")
34 | llama_parse_documents = LlamaParse(api_key=llamaparse_api_key, result_type="markdown").load_data(["./data/presentation.pptx", "./data/uber_10q_march_2022.pdf"])
35 |
36 | # Save the parsed data to a file
37 | with open(data_file, "wb") as f:
38 | pickle.dump(llama_parse_documents, f)
39 |
40 | # Set the parsed data to the variable
41 | parsed_data = llama_parse_documents
42 |
43 | return parsed_data
44 |
45 | # Call the function to either load or parse the data
46 | llama_parse_documents = load_or_parse_data()
47 |
48 |
49 | len(llama_parse_documents)
50 |
51 | llama_parse_documents[0].text[:100]
52 |
53 | type(llama_parse_documents)
54 |
55 | ######## QDRANT ###########
56 |
57 | from llama_index.vector_stores.qdrant import QdrantVectorStore
58 | from llama_index.core import VectorStoreIndex, StorageContext
59 |
60 | import qdrant_client
61 |
62 | qdrant_url = os.getenv("QDRANT_URL")
63 | qdrant_api_key = os.getenv("QDRANT_API_KEY")
64 |
65 | ######### FastEmbedEmbeddings #############
66 |
67 | # by default llamaindex uses OpenAI models
68 | from llama_index.embeddings.fastembed import FastEmbedEmbedding
69 | embed_model = FastEmbedEmbedding(model_name="BAAI/bge-base-en-v1.5")
70 |
71 | """ embed_model = OllamaEmbedding(
72 | model_name="nomic-embed-text",
73 | #model_name="llama2",
74 | base_url="http://localhost:11434",
75 | ollama_additional_kwargs={"mirostat": 0},
76 | ) """
77 |
78 | #### Setting embed_model other than openAI ( by default used openAI's model)
79 | from llama_index.core import Settings
80 |
81 | Settings.embed_model = embed_model
82 |
83 | ######### Groq API ###########
84 |
85 | from llama_index.llms.groq import Groq
86 | groq_api_key = os.getenv("GROQ_API_KEY")
87 |
88 | llm = Groq(model="mixtral-8x7b-32768", api_key=groq_api_key)
89 | #llm = Groq(model="gemma-7b-it", api_key=groq_api_key)
90 |
91 | ######### Ollama ###########
92 |
93 | #from llama_index.llms.ollama import Ollama # noqa: E402
94 | #llm = Ollama(model="llama2", request_timeout=30.0)
95 |
96 | #### Setting llm other than openAI ( by default used openAI's model)
97 | Settings.llm = llm
98 |
99 | client = qdrant_client.QdrantClient(api_key=qdrant_api_key, url=qdrant_url,)
100 |
101 | vector_store = QdrantVectorStore(client=client, collection_name='qdrant_rag')
102 | storage_context = StorageContext.from_defaults(vector_store=vector_store)
103 | index = VectorStoreIndex.from_documents(documents=llama_parse_documents, storage_context=storage_context, show_progress=True)
104 |
105 | #### PERSIST INDEX #####
106 | #index.storage_context.persist()
107 |
108 | #storage_context = StorageContext.from_defaults(persist_dir="./storage")
109 | #index = load_index_from_storage(storage_context)
110 |
111 | # create a query engine for the index
112 | query_engine = index.as_query_engine()
113 |
114 | # query the engine
115 | #query = "what is the common stock balance as of Balance as of March 31, 2022?"
116 | query = "what is the letter of credit As of December 31, 2021 "
117 | response = query_engine.query(query)
118 | print(response)
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | llama-parse
2 | llama-index-embeddings-fastembed
3 | llama-index
4 | fastembed
5 | llama-index-vector-stores-qdrant
6 | qdrant_client
7 | llama-index-embeddings-ollama
8 | llama_index-llms-ollama
9 | python-dotenv
10 | llama-index-llms-groq
--------------------------------------------------------------------------------