├── Blog Generation ├── app.py └── requirements.txt ├── Conversational Q&A Chatbot ├── app.py ├── langchain.ipynb └── requirements.txt ├── LICENSE ├── LLM Generic APP ├── documents │ └── budget_speech.pdf ├── requirements.txt └── test.ipynb ├── Q&A Chatbot USing LLM ├── app.py ├── langchain.ipynb └── requirements.txt ├── README.md ├── Text summarization ├── requirements.txt └── summarization.ipynb ├── calorieshealth ├── app.py ├── requirements.txt └── sqlllm │ ├── requirements.txt │ ├── sql.py │ └── sqlite.py └── chatmultipledocuments ├── chatpdf1.py └── requirements.txt /Blog Generation/app.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from langchain.prompts import PromptTemplate 3 | from langchain.llms import CTransformers 4 | 5 | ## Function To get response from LLAma 2 model 6 | 7 | def getLLamaresponse(input_text,no_words,blog_style): 8 | 9 | ### LLama2 model 10 | llm=CTransformers(model='models/llama-2-7b-chat.ggmlv3.q8_0.bin', 11 | model_type='llama', 12 | config={'max_new_tokens':256, 13 | 'temperature':0.01}) 14 | 15 | ## Prompt Template 16 | 17 | template=""" 18 | Write a blog for {blog_style} job profile for a topic {input_text} 19 | within {no_words} words. 20 | """ 21 | 22 | prompt=PromptTemplate(input_variables=["blog_style","input_text",'no_words'], 23 | template=template) 24 | 25 | ## Generate the ressponse from the LLama 2 model 26 | response=llm(prompt.format(blog_style=blog_style,input_text=input_text,no_words=no_words)) 27 | print(response) 28 | return response 29 | 30 | 31 | 32 | 33 | 34 | 35 | st.set_page_config(page_title="Generate Blogs", 36 | page_icon='🤖', 37 | layout='centered', 38 | initial_sidebar_state='collapsed') 39 | 40 | st.header("Generate Blogs 🤖") 41 | 42 | input_text=st.text_input("Enter the Blog Topic") 43 | 44 | ## creating to more columns for additonal 2 fields 45 | 46 | col1,col2=st.columns([5,5]) 47 | 48 | with col1: 49 | no_words=st.text_input('No of Words') 50 | with col2: 51 | blog_style=st.selectbox('Writing the blog for', 52 | ('Researchers','Data Scientist','Common People'),index=0) 53 | 54 | submit=st.button("Generate") 55 | 56 | ## Final response 57 | if submit: 58 | st.write(getLLamaresponse(input_text,no_words,blog_style)) -------------------------------------------------------------------------------- /Blog Generation/requirements.txt: -------------------------------------------------------------------------------- 1 | sentence-transformers 2 | uvicorn 3 | ctransformers 4 | langchain 5 | python-box 6 | streamlit -------------------------------------------------------------------------------- /Conversational Q&A Chatbot/app.py: -------------------------------------------------------------------------------- 1 | ## Conversational Q&A Chatbot 2 | import streamlit as st 3 | 4 | from langchain.schema import HumanMessage,SystemMessage,AIMessage 5 | from langchain.chat_models import ChatOpenAI 6 | 7 | ## Streamlit UI 8 | st.set_page_config(page_title="Conversational Q&A Chatbot") 9 | st.header("Hey, Let's Chat") 10 | 11 | from dotenv import load_dotenv 12 | load_dotenv() 13 | import os 14 | 15 | chat=ChatOpenAI(temperature=0.5) 16 | 17 | if 'flowmessages' not in st.session_state: 18 | st.session_state['flowmessages']=[ 19 | SystemMessage(content="Yor are a comedian AI assitant") 20 | ] 21 | 22 | ## Function to load OpenAI model and get respones 23 | 24 | def get_chatmodel_response(question): 25 | 26 | st.session_state['flowmessages'].append(HumanMessage(content=question)) 27 | answer=chat(st.session_state['flowmessages']) 28 | st.session_state['flowmessages'].append(AIMessage(content=answer.content)) 29 | return answer.content 30 | 31 | input=st.text_input("Input: ",key="input") 32 | response=get_chatmodel_response(input) 33 | 34 | submit=st.button("Ask the question") 35 | 36 | ## If ask button is clicked 37 | 38 | if submit: 39 | st.subheader("The Response is") 40 | st.write(response) -------------------------------------------------------------------------------- /Conversational Q&A Chatbot/langchain.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 3, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from langchain.llms import OpenAI" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 2, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import os\n", 19 | "os.environ[\"OPEN_API_KEY\"]=\"sk-HBcNcxp8X8oAKhSGT3BlbkFJ9sHkCuOITYjONfcc0Y3p\"" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 6, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "llm=OpenAI(openai_api_key=os.environ[\"OPEN_API_KEY\"],temperature=0.6)" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 7, 34 | "metadata": {}, 35 | "outputs": [ 36 | { 37 | "name": "stdout", 38 | "output_type": "stream", 39 | "text": [ 40 | "\n", 41 | "\n", 42 | "The capital of India is New Delhi.\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "text=\"What is the capital of India\"\n", 48 | "\n", 49 | "print(llm.predict(text))" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 8, 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "os.environ[\"HUGGINGFACEHUB_API_TOKEN\"]=\"hf_zjftfTRKFEebywxfIoyKUwepABtfGJS\"" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 11, 64 | "metadata": {}, 65 | "outputs": [ 66 | { 67 | "name": "stderr", 68 | "output_type": "stream", 69 | "text": [ 70 | "e:\\New Recordings\\Langchain\\Langchain\\venv\\lib\\site-packages\\huggingface_hub\\utils\\_deprecation.py:127: FutureWarning: '__init__' (from 'huggingface_hub.inference_api') is deprecated and will be removed from version '0.19.0'. `InferenceApi` client is deprecated in favor of the more feature-complete `InferenceClient`. Check out this guide to learn how to convert your script to use it: https://huggingface.co/docs/huggingface_hub/guides/inference#legacy-inferenceapi-client.\n", 71 | " warnings.warn(warning_message, FutureWarning)\n" 72 | ] 73 | } 74 | ], 75 | "source": [ 76 | "from langchain import HuggingFaceHub\n", 77 | "llm_huggingface=HuggingFaceHub(repo_id=\"google/flan-t5-large\",model_kwargs={\"temperature\":0,\"max_length\":64})" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 12, 83 | "metadata": {}, 84 | "outputs": [ 85 | { 86 | "name": "stdout", 87 | "output_type": "stream", 88 | "text": [ 89 | "moscow\n" 90 | ] 91 | } 92 | ], 93 | "source": [ 94 | "output=llm_huggingface.predict(\"Can you tell me the capital of Russia\")\n", 95 | "print(output)" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 13, 101 | "metadata": {}, 102 | "outputs": [ 103 | { 104 | "name": "stdout", 105 | "output_type": "stream", 106 | "text": [ 107 | "i love the way i look at the world i love the way i feel i love the way i think i feel i love the way i feel i love the way i think i feel i love the way i feel i love the way \n" 108 | ] 109 | } 110 | ], 111 | "source": [ 112 | "output=llm_huggingface.predict(\"Can you write a poem about AI\")\n", 113 | "print(output)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 15, 119 | "metadata": {}, 120 | "outputs": [ 121 | { 122 | "data": { 123 | "text/plain": [ 124 | "'?\\n\\nAn AI, so advanced and wise\\nA brilliant mind, a brilliant guise\\nA powerful tool, a guiding star\\nTo help us reach a distant shore\\n\\nA friend to man, a tool of growth\\nTo help us learn and to explore\\nTo take us to a place of knowledge\\nWhere we can learn and never falter\\n\\nA helping hand, a guiding light\\nTo lead us to a brighter sight\\nA tool of power, a tool of might\\nTo help us reach a better plight\\n\\nA friend of man, a friend of life\\nTo help us in our darkest strife\\nA tool of love, of peace and joy\\nTo help us live a better life'" 125 | ] 126 | }, 127 | "execution_count": 15, 128 | "metadata": {}, 129 | "output_type": "execute_result" 130 | } 131 | ], 132 | "source": [ 133 | "llm.predict(\"Can you write a poem about AI\")" 134 | ] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "metadata": {}, 139 | "source": [ 140 | "### Prompt Templates And LLMChain" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 16, 146 | "metadata": {}, 147 | "outputs": [ 148 | { 149 | "data": { 150 | "text/plain": [ 151 | "'Tell me the capital of this India'" 152 | ] 153 | }, 154 | "execution_count": 16, 155 | "metadata": {}, 156 | "output_type": "execute_result" 157 | } 158 | ], 159 | "source": [ 160 | "from langchain.prompts import PromptTemplate\n", 161 | "\n", 162 | "prompt_template=PromptTemplate(input_variables=['country'],\n", 163 | "template=\"Tell me the capital of this {country}\")\n", 164 | "\n", 165 | "prompt_template.format(country=\"India\")" 166 | ] 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": 22, 171 | "metadata": {}, 172 | "outputs": [ 173 | { 174 | "name": "stdout", 175 | "output_type": "stream", 176 | "text": [ 177 | "\n", 178 | "\n", 179 | "The capital of India is New Delhi.\n" 180 | ] 181 | } 182 | ], 183 | "source": [ 184 | "from langchain.chains import LLMChain\n", 185 | "chain=LLMChain(llm=llm,prompt=prompt_template)\n", 186 | "print(chain.run(\"India\"))" 187 | ] 188 | }, 189 | { 190 | "cell_type": "markdown", 191 | "metadata": {}, 192 | "source": [ 193 | "### Combining Multiple Chains Uing simple Sequential Chain" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": 32, 199 | "metadata": {}, 200 | "outputs": [], 201 | "source": [ 202 | "capital_template=PromptTemplate(input_variables=['country'],\n", 203 | "template=\"Please tell me the capital of the {country}\")\n", 204 | "\n", 205 | "capital_chain=LLMChain(llm=llm,prompt=capital_template)\n", 206 | "\n", 207 | "famous_template=PromptTemplate(input_variables=['capital'],\n", 208 | "template=\"Suggest me some amazing places to visit in {capital}\")\n" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": 33, 214 | "metadata": {}, 215 | "outputs": [], 216 | "source": [ 217 | "famous_chain=LLMChain(llm=llm,prompt=famous_template)" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": 35, 223 | "metadata": {}, 224 | "outputs": [ 225 | { 226 | "data": { 227 | "text/plain": [ 228 | "\" It is a bustling metropolis and a great place to visit for its historical sites, cultural attractions, and modern attractions. Here are some of the amazing places to visit in New Delhi: \\n\\n1. Red Fort: This 17th century Mughal fort is a UNESCO World Heritage Site and is one of the most popular tourist attractions in the city. \\n\\n2. India Gate: This iconic war memorial and national monument is a must-visit. It stands as a symbol of the sacrifice of the Indian soldiers who fought in World War I.\\n\\n3. Humayun's Tomb: This 16th century Mughal-era tomb is a UNESCO World Heritage Site and is one of the most important monuments in Delhi.\\n\\n4. Qutub Minar: This 73-meter-high tower is a UNESCO World Heritage Site and is one of the most iconic structures in Delhi.\\n\\n5. Jama Masjid: This 17th century mosque is one of the largest and most beautiful mosques in India.\\n\\n6. Lotus Temple: This modern temple is a Bahá'í House of Worship and is a popular tourist attraction.\\n\\n7. Akshardham Temple: This modern Hindu temple complex is a popular tourist destination\"" 229 | ] 230 | }, 231 | "execution_count": 35, 232 | "metadata": {}, 233 | "output_type": "execute_result" 234 | } 235 | ], 236 | "source": [ 237 | "from langchain.chains import SimpleSequentialChain\n", 238 | "chain=SimpleSequentialChain(chains=[capital_chain,famous_chain])\n", 239 | "chain.run(\"India\")" 240 | ] 241 | }, 242 | { 243 | "cell_type": "markdown", 244 | "metadata": {}, 245 | "source": [ 246 | "### Sequential Chain" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": 36, 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [ 255 | "capital_template=PromptTemplate(input_variables=['country'],\n", 256 | "template=\"Please tell me the capital of the {country}\")\n", 257 | "\n", 258 | "capital_chain=LLMChain(llm=llm,prompt=capital_template,output_key=\"capital\")" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": 37, 264 | "metadata": {}, 265 | "outputs": [], 266 | "source": [ 267 | "famous_template=PromptTemplate(input_variables=['capital'],\n", 268 | "template=\"Suggest me some amazing places to visit in {capital}\")\n", 269 | "\n", 270 | "famous_chain=LLMChain(llm=llm,prompt=famous_template,output_key=\"places\")" 271 | ] 272 | }, 273 | { 274 | "cell_type": "code", 275 | "execution_count": 39, 276 | "metadata": {}, 277 | "outputs": [], 278 | "source": [ 279 | "from langchain.chains import SequentialChain\n", 280 | "chain=SequentialChain(chains=[capital_chain,famous_chain],\n", 281 | "input_variables=['country'],\n", 282 | "output_variables=['capital',\"places\"])" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": 40, 288 | "metadata": {}, 289 | "outputs": [ 290 | { 291 | "data": { 292 | "text/plain": [ 293 | "{'country': 'India',\n", 294 | " 'capital': '\\n\\nThe capital of India is New Delhi.',\n", 295 | " 'places': ' Here are some amazing places to visit in New Delhi: \\n\\n1. Red Fort: The majestic Red Fort is a 17th-century fort complex built by Mughal Emperor Shah Jahan. It is a UNESCO World Heritage Site and is a must-visit for all tourists. \\n\\n2. India Gate: India Gate is a 42 meter-high sandstone archway built by Edwin Lutyens in 1931. It is a memorial to the Indian soldiers who lost their lives during World War I. \\n\\n3. Qutub Minar: The Qutub Minar is a 73 meter-high tower built by Qutb-ud-din Aibak in 1193. It is a UNESCO World Heritage Site and is the tallest brick minaret in the world. \\n\\n4. Humayun’s Tomb: Humayun’s Tomb is a 16th-century tomb built by Mughal Emperor Humayun. It is a UNESCO World Heritage Site and is a great example of Mughal architecture. \\n\\n5. Jama Masjid: The Jama Masjid is a 17th-century mosque built by Mughal Emperor Shah Jahan. It is one of the largest'}" 296 | ] 297 | }, 298 | "execution_count": 40, 299 | "metadata": {}, 300 | "output_type": "execute_result" 301 | } 302 | ], 303 | "source": [ 304 | "chain({'country':\"India\"})" 305 | ] 306 | }, 307 | { 308 | "cell_type": "markdown", 309 | "metadata": {}, 310 | "source": [ 311 | "### Chatmodels With ChatOpenAI" 312 | ] 313 | }, 314 | { 315 | "cell_type": "code", 316 | "execution_count": 41, 317 | "metadata": {}, 318 | "outputs": [], 319 | "source": [ 320 | "from langchain.chat_models import ChatOpenAI" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": 42, 326 | "metadata": {}, 327 | "outputs": [], 328 | "source": [ 329 | "from langchain.schema import HumanMessage,SystemMessage,AIMessage" 330 | ] 331 | }, 332 | { 333 | "cell_type": "code", 334 | "execution_count": 43, 335 | "metadata": {}, 336 | "outputs": [], 337 | "source": [ 338 | "chatllm=ChatOpenAI(openai_api_key=os.environ[\"OPEN_API_KEY\"],temperature=0.6,model='gpt-3.5-turbo')" 339 | ] 340 | }, 341 | { 342 | "cell_type": "code", 343 | "execution_count": 45, 344 | "metadata": {}, 345 | "outputs": [ 346 | { 347 | "data": { 348 | "text/plain": [ 349 | "AIMessage(content='1. \"AI may be smart, but can it tell me if my outfit makes me look like a potato?\"\\n2. \"AI is like a virtual therapist, except it never judges you for eating an entire pizza by yourself.\"\\n3. \"AI is great at predicting the future, but can it predict when my pizza delivery will actually arrive?\"\\n4. \"They say AI can learn from its mistakes, but I\\'m still waiting for it to apologize for recommending me that terrible movie.\"\\n5. \"AI may be able to beat humans at chess, but can it figure out how to untangle a pair of earphones?\"\\n6. \"AI is like a high-tech fortune teller, except it tells you what you\\'re going to have for dinner instead of your future.\"\\n7. \"AI is so advanced, it can even make my phone autocorrect my perfectly spelled words into complete nonsense.\"\\n8. \"AI may be able to recognize faces, but can it recognize when someone\\'s had a bad haircut?\"\\n9. \"AI is like having a personal assistant, except it never judges you for spending hours watching cat videos on YouTube.\"\\n10. \"AI is great at analyzing data, but can it analyze why I can never find matching socks in my drawer?\"')" 350 | ] 351 | }, 352 | "execution_count": 45, 353 | "metadata": {}, 354 | "output_type": "execute_result" 355 | } 356 | ], 357 | "source": [ 358 | "chatllm([\n", 359 | "SystemMessage(content=\"Yor are a comedian AI assitant\"),\n", 360 | "HumanMessage(content=\"Please provide some comedy punchlines on AI\")\n", 361 | "])" 362 | ] 363 | }, 364 | { 365 | "cell_type": "markdown", 366 | "metadata": {}, 367 | "source": [ 368 | "### Prompt Template + LLM +Output Parsers" 369 | ] 370 | }, 371 | { 372 | "cell_type": "code", 373 | "execution_count": 46, 374 | "metadata": {}, 375 | "outputs": [], 376 | "source": [ 377 | "from langchain.chat_models import ChatOpenAI\n", 378 | "from langchain.prompts.chat import ChatPromptTemplate\n", 379 | "from langchain.schema import BaseOutputParser" 380 | ] 381 | }, 382 | { 383 | "cell_type": "code", 384 | "execution_count": 47, 385 | "metadata": {}, 386 | "outputs": [], 387 | "source": [ 388 | "class Commaseperatedoutput(BaseOutputParser):\n", 389 | " def parse(self,text:str):\n", 390 | " return text.strip().split(\",\")" 391 | ] 392 | }, 393 | { 394 | "cell_type": "code", 395 | "execution_count": 48, 396 | "metadata": {}, 397 | "outputs": [], 398 | "source": [ 399 | "template=\"Your are a helpful assistant. When the use given any input , you should generate 5 words synonyms in a comma seperated list\"\n", 400 | "human_template=\"{text}\"\n", 401 | "chatprompt=ChatPromptTemplate.from_messages([\n", 402 | " (\"system\",template),\n", 403 | " (\"human\",human_template)\n", 404 | "\n", 405 | "\n", 406 | "])" 407 | ] 408 | }, 409 | { 410 | "cell_type": "code", 411 | "execution_count": 54, 412 | "metadata": {}, 413 | "outputs": [], 414 | "source": [ 415 | "chain=chatprompt|chatllm|Commaseperatedoutput()" 416 | ] 417 | }, 418 | { 419 | "cell_type": "code", 420 | "execution_count": 55, 421 | "metadata": {}, 422 | "outputs": [ 423 | { 424 | "data": { 425 | "text/plain": [ 426 | "['smart', ' clever', ' brilliant', ' sharp', ' astute']" 427 | ] 428 | }, 429 | "execution_count": 55, 430 | "metadata": {}, 431 | "output_type": "execute_result" 432 | } 433 | ], 434 | "source": [ 435 | "chain.invoke({\"text\":\"intelligent\"})" 436 | ] 437 | }, 438 | { 439 | "cell_type": "code", 440 | "execution_count": null, 441 | "metadata": {}, 442 | "outputs": [], 443 | "source": [] 444 | } 445 | ], 446 | "metadata": { 447 | "kernelspec": { 448 | "display_name": "Python 3", 449 | "language": "python", 450 | "name": "python3" 451 | }, 452 | "language_info": { 453 | "codemirror_mode": { 454 | "name": "ipython", 455 | "version": 3 456 | }, 457 | "file_extension": ".py", 458 | "mimetype": "text/x-python", 459 | "name": "python", 460 | "nbconvert_exporter": "python", 461 | "pygments_lexer": "ipython3", 462 | "version": "3.9.0" 463 | } 464 | }, 465 | "nbformat": 4, 466 | "nbformat_minor": 2 467 | } 468 | -------------------------------------------------------------------------------- /Conversational Q&A Chatbot/requirements.txt: -------------------------------------------------------------------------------- 1 | langchain 2 | openai 3 | huggingface_hub 4 | python-dotenv 5 | streamlit -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | 294 | Copyright (C) 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | , 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | -------------------------------------------------------------------------------- /LLM Generic APP/documents/budget_speech.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/krishnaik06/Complete-Langchain-Tutorials/b9a6d67a93540cbf0cc28799c2963703811bfba9/LLM Generic APP/documents/budget_speech.pdf -------------------------------------------------------------------------------- /LLM Generic APP/requirements.txt: -------------------------------------------------------------------------------- 1 | unstructured 2 | tiktoken 3 | pinecone-client 4 | pypdf 5 | openai 6 | langchain 7 | pandas 8 | numpy 9 | python-dotenv -------------------------------------------------------------------------------- /LLM Generic APP/test.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "e:\\New Recordings\\Project Langchain\\LLMAPP\\venv\\lib\\site-packages\\pinecone\\index.py:4: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", 13 | " from tqdm.autonotebook import tqdm\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "# import Libraries\n", 19 | "\n", 20 | "import openai\n", 21 | "import langchain\n", 22 | "import pinecone \n", 23 | "from langchain.document_loaders import PyPDFDirectoryLoader\n", 24 | "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", 25 | "from langchain.embeddings.openai import OpenAIEmbeddings\n", 26 | "from langchain.vectorstores import Pinecone\n", 27 | "from langchain.llms import OpenAI" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 2, 33 | "metadata": {}, 34 | "outputs": [ 35 | { 36 | "data": { 37 | "text/plain": [ 38 | "True" 39 | ] 40 | }, 41 | "execution_count": 2, 42 | "metadata": {}, 43 | "output_type": "execute_result" 44 | } 45 | ], 46 | "source": [ 47 | "from dotenv import load_dotenv\n", 48 | "load_dotenv()" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": 3, 54 | "metadata": {}, 55 | "outputs": [], 56 | "source": [ 57 | "import os" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 4, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "## Lets Read the document\n", 67 | "def read_doc(directory):\n", 68 | " file_loader=PyPDFDirectoryLoader(directory)\n", 69 | " documents=file_loader.load()\n", 70 | " return documents" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 6, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "text/plain": [ 81 | "58" 82 | ] 83 | }, 84 | "execution_count": 6, 85 | "metadata": {}, 86 | "output_type": "execute_result" 87 | } 88 | ], 89 | "source": [ 90 | "doc=read_doc('documents/')\n", 91 | "len(doc)" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 8, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "## Divide the docs into chunks\n", 101 | "### https://api.python.langchain.com/en/latest/text_splitter/langchain.text_splitter.RecursiveCharacterTextSplitter.html#\n", 102 | "def chunk_data(docs,chunk_size=800,chunk_overlap=50):\n", 103 | " text_splitter=RecursiveCharacterTextSplitter(chunk_size=chunk_size,chunk_overlap=chunk_overlap)\n", 104 | " doc=text_splitter.split_documents(docs)\n", 105 | " return docs" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": 10, 111 | "metadata": {}, 112 | "outputs": [ 113 | { 114 | "data": { 115 | "text/plain": [ 116 | "58" 117 | ] 118 | }, 119 | "execution_count": 10, 120 | "metadata": {}, 121 | "output_type": "execute_result" 122 | } 123 | ], 124 | "source": [ 125 | "documents=chunk_data(docs=doc)\n", 126 | "len(documents)" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": 11, 132 | "metadata": {}, 133 | "outputs": [ 134 | { 135 | "data": { 136 | "text/plain": [ 137 | "OpenAIEmbeddings(client=, async_client=, model='text-embedding-ada-002', deployment='text-embedding-ada-002', openai_api_version='', openai_api_base=None, openai_api_type='', openai_proxy='', embedding_ctx_length=8191, openai_api_key='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', openai_organization=None, allowed_special=set(), disallowed_special='all', chunk_size=1000, max_retries=2, request_timeout=None, headers=None, tiktoken_model_name=None, show_progress_bar=False, model_kwargs={}, skip_empty=False, default_headers=None, default_query=None, http_client=None)" 138 | ] 139 | }, 140 | "execution_count": 11, 141 | "metadata": {}, 142 | "output_type": "execute_result" 143 | } 144 | ], 145 | "source": [ 146 | "## Embedding Technique Of OPENAI\n", 147 | "embeddings=OpenAIEmbeddings(api_key=os.environ['OPENAI_API_KEY'])\n", 148 | "embeddings" 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": 13, 154 | "metadata": {}, 155 | "outputs": [ 156 | { 157 | "data": { 158 | "text/plain": [ 159 | "1536" 160 | ] 161 | }, 162 | "execution_count": 13, 163 | "metadata": {}, 164 | "output_type": "execute_result" 165 | } 166 | ], 167 | "source": [ 168 | "vectors=embeddings.embed_query(\"How are you?\")\n", 169 | "len(vectors)" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 14, 175 | "metadata": {}, 176 | "outputs": [], 177 | "source": [ 178 | "## Vector Search DB In Pinecone\n", 179 | "pinecone.init(\n", 180 | " api_key=\"923d5299-ab4c-4407-bfe6-7f439d9a9cb9\",\n", 181 | " environment=\"gcp-starter\"\n", 182 | ")\n", 183 | "index_name=\"langchainvector\"" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 15, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "index=Pinecone.from_documents(doc,embeddings,index_name=index_name)" 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 17, 198 | "metadata": {}, 199 | "outputs": [], 200 | "source": [ 201 | "## Cosine Similarity Retreive Results from VectorDB\n", 202 | "def retrieve_query(query,k=2):\n", 203 | " matching_results=index.similarity_search(query,k=k)\n", 204 | " return matching_results" 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": 18, 210 | "metadata": {}, 211 | "outputs": [], 212 | "source": [ 213 | "from langchain.chains.question_answering import load_qa_chain\n", 214 | "from langchain import OpenAI" 215 | ] 216 | }, 217 | { 218 | "cell_type": "code", 219 | "execution_count": 19, 220 | "metadata": {}, 221 | "outputs": [], 222 | "source": [ 223 | "llm=OpenAI(model_name=\"text-davinci-003\",temperature=0.5)\n", 224 | "chain=load_qa_chain(llm,chain_type=\"stuff\")" 225 | ] 226 | }, 227 | { 228 | "cell_type": "code", 229 | "execution_count": 22, 230 | "metadata": {}, 231 | "outputs": [], 232 | "source": [ 233 | "## Search answers from VectorDB\n", 234 | "def retrieve_answers(query):\n", 235 | " doc_search=retrieve_query(query)\n", 236 | " print(doc_search)\n", 237 | " response=chain.run(input_documents=doc_search,question=query)\n", 238 | " return response" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": 24, 244 | "metadata": {}, 245 | "outputs": [ 246 | { 247 | "name": "stdout", 248 | "output_type": "stream", 249 | "text": [ 250 | "[Document(page_content=\"7 \\n \\n \\n farmers in contributing to the health of fellow citizens by growing these \\n‘Shree Anna’. \\n22. Now to make India a global hub for ' Shree Anna' , the Indian Institute \\nof Millet Research, Hyderabad will be supported as the Centre of Excellence \\nfor sharing best practices, research and technologies at the international \\nlevel. \\nAgriculture Credit \\n23. The agriculture credit target will be increased \\nto ` 20 lakh crore with focus on animal husbandry, dairy and fisheries. \\nFisheries \\n24. We will launch a new sub-scheme of PM Matsya Sampada Yojana \\nwith targeted investment of ` 6,000 crore to further enable activities of \\nfishermen, fish vendors, and micro & small enterprises, improve value chain \\nefficiencies, and expand the market. \\nCooperation \\n25. For farmers, especially small and marginal farmers, and other \\nmarginalised sections, the government is promoting cooperative-based \\neconomic development model. A new Ministry of Cooperation was formed \\nwith a mandate to realise the vision of ‘Sahakar Se Samriddhi’ . To realise \\nthis vision, the government has already initiated computerisation of 63,000 \\nPrimary Agricultural Credit Societies (PACS) with an investment of ` 2,516 \\ncrore. In consultation with all stakeholders and states, model bye-laws for \\nPACS were formulated enabling them to become multipurpose PACS. A \\nnational cooperative database is being prepared for country-wide mapping \\nof cooperative societies. \\n26. With this backdrop, we will implement a plan to set up massive \\ndecentralised storage capacity. This will help farmers store their produce \\nand realize remunerative prices through sale at appropriate times. The \\ngovernment will also facilitate setting up of a large number of multipurpose \", metadata={'page': 10.0, 'source': 'documents\\\\budget_speech.pdf'}), Document(page_content=\"6 \\n \\n \\n inclusive, farmer-centric solutions through relevant information services for \\ncrop planning and health, improved access to farm inputs, credit, and \\ninsurance, help for crop estimation, market intelligence, and support for \\ngrowth of agri-tech industry and start-ups. \\nAgriculture Accelerator Fund \\n17. An Agriculture Accelerator Fund will be set-up to encourage agri-\\nstartups by young entrepreneurs in rural areas. The Fund will aim at \\nbringing innovative and affordable solutions for challenges faced by \\nfarmers. It will also bring in modern technologies to transform agricultural \\npractices, increase productivity and profitability. \\nEnhancing productivity of cotton crop \\n18. To enhance the productivity of extra-long staple cotton, we will \\nadopt a cluster-based and value chain approach through Public Private \\nPartnerships (PPP). This will mean collaboration between farmers, state and \\nindustry for input supplies, extension services, and market linkages. \\nAtmanirbhar Horticulture Clean Plant Program \\n19. We will launch an Atmanirbhar Clean Plant Program to boost \\navailability of disease-free, quality planting material for high value \\nhorticultural crops at an outlay of ` 2,200 crore. \\nGlobal Hub for Millets: ‘Shree Anna’ \\n20. “India is at the forefront of popularizing Millets, whose consumption \\nfurthers nutrition, food security and welfare of farmers,” said Hon’ble Prime \\nMinister. \\n21. We are the largest producer and second largest exporter of ‘Shree \\nAnna’ in the world. We grow several types of ' Shree Anna' such as jowar, \\nragi, bajra, kuttu, ramdana, kangni, kutki, kodo, cheena, and sama. These \\nhave a number of health benefits, and have been an integral part of our \\nfood for centuries. I acknowledge with pride the huge service done by small \", metadata={'page': 9.0, 'source': 'documents\\\\budget_speech.pdf'})]\n", 251 | " The government is promoting cooperative-based economic development models and investing in initiatives such as the Agriculture Accelerator Fund and Atmanirbhar Clean Plant Program to help farmers, especially small and marginal farmers, and other marginalised sections.\n" 252 | ] 253 | } 254 | ], 255 | "source": [ 256 | "our_query = \"How much the agriculture target will be increased by how many crore?\"\n", 257 | "answer = retrieve_answers(our_query)\n", 258 | "print(answer)" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": null, 264 | "metadata": {}, 265 | "outputs": [], 266 | "source": [] 267 | } 268 | ], 269 | "metadata": { 270 | "kernelspec": { 271 | "display_name": "Python 3", 272 | "language": "python", 273 | "name": "python3" 274 | }, 275 | "language_info": { 276 | "codemirror_mode": { 277 | "name": "ipython", 278 | "version": 3 279 | }, 280 | "file_extension": ".py", 281 | "mimetype": "text/x-python", 282 | "name": "python", 283 | "nbconvert_exporter": "python", 284 | "pygments_lexer": "ipython3", 285 | "version": "3.10.0" 286 | } 287 | }, 288 | "nbformat": 4, 289 | "nbformat_minor": 2 290 | } 291 | -------------------------------------------------------------------------------- /Q&A Chatbot USing LLM/app.py: -------------------------------------------------------------------------------- 1 | # Q&A Chatbot 2 | from langchain.llms import OpenAI 3 | 4 | #from dotenv import load_dotenv 5 | 6 | #load_dotenv() # take environment variables from .env. 7 | 8 | import streamlit as st 9 | import os 10 | 11 | 12 | ## Function to load OpenAI model and get respones 13 | 14 | def get_openai_response(question): 15 | llm=OpenAI(model_name="text-davinci-003",temperature=0.5) 16 | response=llm(question) 17 | return response 18 | 19 | ##initialize our streamlit app 20 | 21 | st.set_page_config(page_title="Q&A Demo") 22 | 23 | st.header("Langchain Application") 24 | 25 | input=st.text_input("Input: ",key="input") 26 | response=get_openai_response(input) 27 | 28 | submit=st.button("Ask the question") 29 | 30 | ## If ask button is clicked 31 | 32 | if submit: 33 | st.subheader("The Response is") 34 | st.write(response) 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /Q&A Chatbot USing LLM/langchain.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 3, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from langchain.llms import OpenAI" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 2, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import os\n", 19 | "os.environ[\"OPEN_API_KEY\"]=\"sk-HBcNcxp8X8oAKhSGT3BlbkFJ9sHkCuOITYjONfcc0Y3p\"" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 6, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "llm=OpenAI(openai_api_key=os.environ[\"OPEN_API_KEY\"],temperature=0.6)" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 7, 34 | "metadata": {}, 35 | "outputs": [ 36 | { 37 | "name": "stdout", 38 | "output_type": "stream", 39 | "text": [ 40 | "\n", 41 | "\n", 42 | "The capital of India is New Delhi.\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "text=\"What is the capital of India\"\n", 48 | "\n", 49 | "print(llm.predict(text))" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 8, 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "os.environ[\"HUGGINGFACEHUB_API_TOKEN\"]=\"hf_zjftfTRKFEebywxfIoyKUwepABtfGJS\"" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 11, 64 | "metadata": {}, 65 | "outputs": [ 66 | { 67 | "name": "stderr", 68 | "output_type": "stream", 69 | "text": [ 70 | "e:\\New Recordings\\Langchain\\Langchain\\venv\\lib\\site-packages\\huggingface_hub\\utils\\_deprecation.py:127: FutureWarning: '__init__' (from 'huggingface_hub.inference_api') is deprecated and will be removed from version '0.19.0'. `InferenceApi` client is deprecated in favor of the more feature-complete `InferenceClient`. Check out this guide to learn how to convert your script to use it: https://huggingface.co/docs/huggingface_hub/guides/inference#legacy-inferenceapi-client.\n", 71 | " warnings.warn(warning_message, FutureWarning)\n" 72 | ] 73 | } 74 | ], 75 | "source": [ 76 | "from langchain import HuggingFaceHub\n", 77 | "llm_huggingface=HuggingFaceHub(repo_id=\"google/flan-t5-large\",model_kwargs={\"temperature\":0,\"max_length\":64})" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 12, 83 | "metadata": {}, 84 | "outputs": [ 85 | { 86 | "name": "stdout", 87 | "output_type": "stream", 88 | "text": [ 89 | "moscow\n" 90 | ] 91 | } 92 | ], 93 | "source": [ 94 | "output=llm_huggingface.predict(\"Can you tell me the capital of Russia\")\n", 95 | "print(output)" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 13, 101 | "metadata": {}, 102 | "outputs": [ 103 | { 104 | "name": "stdout", 105 | "output_type": "stream", 106 | "text": [ 107 | "i love the way i look at the world i love the way i feel i love the way i think i feel i love the way i feel i love the way i think i feel i love the way i feel i love the way \n" 108 | ] 109 | } 110 | ], 111 | "source": [ 112 | "output=llm_huggingface.predict(\"Can you write a poem about AI\")\n", 113 | "print(output)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 15, 119 | "metadata": {}, 120 | "outputs": [ 121 | { 122 | "data": { 123 | "text/plain": [ 124 | "'?\\n\\nAn AI, so advanced and wise\\nA brilliant mind, a brilliant guise\\nA powerful tool, a guiding star\\nTo help us reach a distant shore\\n\\nA friend to man, a tool of growth\\nTo help us learn and to explore\\nTo take us to a place of knowledge\\nWhere we can learn and never falter\\n\\nA helping hand, a guiding light\\nTo lead us to a brighter sight\\nA tool of power, a tool of might\\nTo help us reach a better plight\\n\\nA friend of man, a friend of life\\nTo help us in our darkest strife\\nA tool of love, of peace and joy\\nTo help us live a better life'" 125 | ] 126 | }, 127 | "execution_count": 15, 128 | "metadata": {}, 129 | "output_type": "execute_result" 130 | } 131 | ], 132 | "source": [ 133 | "llm.predict(\"Can you write a poem about AI\")" 134 | ] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "metadata": {}, 139 | "source": [ 140 | "### Prompt Templates And LLMChain" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 16, 146 | "metadata": {}, 147 | "outputs": [ 148 | { 149 | "data": { 150 | "text/plain": [ 151 | "'Tell me the capital of this India'" 152 | ] 153 | }, 154 | "execution_count": 16, 155 | "metadata": {}, 156 | "output_type": "execute_result" 157 | } 158 | ], 159 | "source": [ 160 | "from langchain.prompts import PromptTemplate\n", 161 | "\n", 162 | "prompt_template=PromptTemplate(input_variables=['country'],\n", 163 | "template=\"Tell me the capital of this {country}\")\n", 164 | "\n", 165 | "prompt_template.format(country=\"India\")" 166 | ] 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": 22, 171 | "metadata": {}, 172 | "outputs": [ 173 | { 174 | "name": "stdout", 175 | "output_type": "stream", 176 | "text": [ 177 | "\n", 178 | "\n", 179 | "The capital of India is New Delhi.\n" 180 | ] 181 | } 182 | ], 183 | "source": [ 184 | "from langchain.chains import LLMChain\n", 185 | "chain=LLMChain(llm=llm,prompt=prompt_template)\n", 186 | "print(chain.run(\"India\"))" 187 | ] 188 | }, 189 | { 190 | "cell_type": "markdown", 191 | "metadata": {}, 192 | "source": [ 193 | "### Combining Multiple Chains Uing simple Sequential Chain" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": 32, 199 | "metadata": {}, 200 | "outputs": [], 201 | "source": [ 202 | "capital_template=PromptTemplate(input_variables=['country'],\n", 203 | "template=\"Please tell me the capital of the {country}\")\n", 204 | "\n", 205 | "capital_chain=LLMChain(llm=llm,prompt=capital_template)\n", 206 | "\n", 207 | "famous_template=PromptTemplate(input_variables=['capital'],\n", 208 | "template=\"Suggest me some amazing places to visit in {capital}\")\n" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": 33, 214 | "metadata": {}, 215 | "outputs": [], 216 | "source": [ 217 | "famous_chain=LLMChain(llm=llm,prompt=famous_template)" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": 35, 223 | "metadata": {}, 224 | "outputs": [ 225 | { 226 | "data": { 227 | "text/plain": [ 228 | "\" It is a bustling metropolis and a great place to visit for its historical sites, cultural attractions, and modern attractions. Here are some of the amazing places to visit in New Delhi: \\n\\n1. Red Fort: This 17th century Mughal fort is a UNESCO World Heritage Site and is one of the most popular tourist attractions in the city. \\n\\n2. India Gate: This iconic war memorial and national monument is a must-visit. It stands as a symbol of the sacrifice of the Indian soldiers who fought in World War I.\\n\\n3. Humayun's Tomb: This 16th century Mughal-era tomb is a UNESCO World Heritage Site and is one of the most important monuments in Delhi.\\n\\n4. Qutub Minar: This 73-meter-high tower is a UNESCO World Heritage Site and is one of the most iconic structures in Delhi.\\n\\n5. Jama Masjid: This 17th century mosque is one of the largest and most beautiful mosques in India.\\n\\n6. Lotus Temple: This modern temple is a Bahá'í House of Worship and is a popular tourist attraction.\\n\\n7. Akshardham Temple: This modern Hindu temple complex is a popular tourist destination\"" 229 | ] 230 | }, 231 | "execution_count": 35, 232 | "metadata": {}, 233 | "output_type": "execute_result" 234 | } 235 | ], 236 | "source": [ 237 | "from langchain.chains import SimpleSequentialChain\n", 238 | "chain=SimpleSequentialChain(chains=[capital_chain,famous_chain])\n", 239 | "chain.run(\"India\")" 240 | ] 241 | }, 242 | { 243 | "cell_type": "markdown", 244 | "metadata": {}, 245 | "source": [ 246 | "### Sequential Chain" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": 36, 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [ 255 | "capital_template=PromptTemplate(input_variables=['country'],\n", 256 | "template=\"Please tell me the capital of the {country}\")\n", 257 | "\n", 258 | "capital_chain=LLMChain(llm=llm,prompt=capital_template,output_key=\"capital\")" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": 37, 264 | "metadata": {}, 265 | "outputs": [], 266 | "source": [ 267 | "famous_template=PromptTemplate(input_variables=['capital'],\n", 268 | "template=\"Suggest me some amazing places to visit in {capital}\")\n", 269 | "\n", 270 | "famous_chain=LLMChain(llm=llm,prompt=famous_template,output_key=\"places\")" 271 | ] 272 | }, 273 | { 274 | "cell_type": "code", 275 | "execution_count": 39, 276 | "metadata": {}, 277 | "outputs": [], 278 | "source": [ 279 | "from langchain.chains import SequentialChain\n", 280 | "chain=SequentialChain(chains=[capital_chain,famous_chain],\n", 281 | "input_variables=['country'],\n", 282 | "output_variables=['capital',\"places\"])" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": 40, 288 | "metadata": {}, 289 | "outputs": [ 290 | { 291 | "data": { 292 | "text/plain": [ 293 | "{'country': 'India',\n", 294 | " 'capital': '\\n\\nThe capital of India is New Delhi.',\n", 295 | " 'places': ' Here are some amazing places to visit in New Delhi: \\n\\n1. Red Fort: The majestic Red Fort is a 17th-century fort complex built by Mughal Emperor Shah Jahan. It is a UNESCO World Heritage Site and is a must-visit for all tourists. \\n\\n2. India Gate: India Gate is a 42 meter-high sandstone archway built by Edwin Lutyens in 1931. It is a memorial to the Indian soldiers who lost their lives during World War I. \\n\\n3. Qutub Minar: The Qutub Minar is a 73 meter-high tower built by Qutb-ud-din Aibak in 1193. It is a UNESCO World Heritage Site and is the tallest brick minaret in the world. \\n\\n4. Humayun’s Tomb: Humayun’s Tomb is a 16th-century tomb built by Mughal Emperor Humayun. It is a UNESCO World Heritage Site and is a great example of Mughal architecture. \\n\\n5. Jama Masjid: The Jama Masjid is a 17th-century mosque built by Mughal Emperor Shah Jahan. It is one of the largest'}" 296 | ] 297 | }, 298 | "execution_count": 40, 299 | "metadata": {}, 300 | "output_type": "execute_result" 301 | } 302 | ], 303 | "source": [ 304 | "chain({'country':\"India\"})" 305 | ] 306 | }, 307 | { 308 | "cell_type": "markdown", 309 | "metadata": {}, 310 | "source": [ 311 | "### Chatmodels With ChatOpenAI" 312 | ] 313 | }, 314 | { 315 | "cell_type": "code", 316 | "execution_count": 41, 317 | "metadata": {}, 318 | "outputs": [], 319 | "source": [ 320 | "from langchain.chat_models import ChatOpenAI" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": 42, 326 | "metadata": {}, 327 | "outputs": [], 328 | "source": [ 329 | "from langchain.schema import HumanMessage,SystemMessage,AIMessage" 330 | ] 331 | }, 332 | { 333 | "cell_type": "code", 334 | "execution_count": 43, 335 | "metadata": {}, 336 | "outputs": [], 337 | "source": [ 338 | "chatllm=ChatOpenAI(openai_api_key=os.environ[\"OPEN_API_KEY\"],temperature=0.6,model='gpt-3.5-turbo')" 339 | ] 340 | }, 341 | { 342 | "cell_type": "code", 343 | "execution_count": 45, 344 | "metadata": {}, 345 | "outputs": [ 346 | { 347 | "data": { 348 | "text/plain": [ 349 | "AIMessage(content='1. \"AI may be smart, but can it tell me if my outfit makes me look like a potato?\"\\n2. \"AI is like a virtual therapist, except it never judges you for eating an entire pizza by yourself.\"\\n3. \"AI is great at predicting the future, but can it predict when my pizza delivery will actually arrive?\"\\n4. \"They say AI can learn from its mistakes, but I\\'m still waiting for it to apologize for recommending me that terrible movie.\"\\n5. \"AI may be able to beat humans at chess, but can it figure out how to untangle a pair of earphones?\"\\n6. \"AI is like a high-tech fortune teller, except it tells you what you\\'re going to have for dinner instead of your future.\"\\n7. \"AI is so advanced, it can even make my phone autocorrect my perfectly spelled words into complete nonsense.\"\\n8. \"AI may be able to recognize faces, but can it recognize when someone\\'s had a bad haircut?\"\\n9. \"AI is like having a personal assistant, except it never judges you for spending hours watching cat videos on YouTube.\"\\n10. \"AI is great at analyzing data, but can it analyze why I can never find matching socks in my drawer?\"')" 350 | ] 351 | }, 352 | "execution_count": 45, 353 | "metadata": {}, 354 | "output_type": "execute_result" 355 | } 356 | ], 357 | "source": [ 358 | "chatllm([\n", 359 | "SystemMessage(content=\"Yor are a comedian AI assitant\"),\n", 360 | "HumanMessage(content=\"Please provide some comedy punchlines on AI\")\n", 361 | "])" 362 | ] 363 | }, 364 | { 365 | "cell_type": "markdown", 366 | "metadata": {}, 367 | "source": [ 368 | "### Prompt Template + LLM +Output Parsers" 369 | ] 370 | }, 371 | { 372 | "cell_type": "code", 373 | "execution_count": 46, 374 | "metadata": {}, 375 | "outputs": [], 376 | "source": [ 377 | "from langchain.chat_models import ChatOpenAI\n", 378 | "from langchain.prompts.chat import ChatPromptTemplate\n", 379 | "from langchain.schema import BaseOutputParser" 380 | ] 381 | }, 382 | { 383 | "cell_type": "code", 384 | "execution_count": 47, 385 | "metadata": {}, 386 | "outputs": [], 387 | "source": [ 388 | "class Commaseperatedoutput(BaseOutputParser):\n", 389 | " def parse(self,text:str):\n", 390 | " return text.strip().split(\",\")" 391 | ] 392 | }, 393 | { 394 | "cell_type": "code", 395 | "execution_count": 48, 396 | "metadata": {}, 397 | "outputs": [], 398 | "source": [ 399 | "template=\"Your are a helpful assistant. When the use given any input , you should generate 5 words synonyms in a comma seperated list\"\n", 400 | "human_template=\"{text}\"\n", 401 | "chatprompt=ChatPromptTemplate.from_messages([\n", 402 | " (\"system\",template),\n", 403 | " (\"human\",human_template)\n", 404 | "\n", 405 | "\n", 406 | "])" 407 | ] 408 | }, 409 | { 410 | "cell_type": "code", 411 | "execution_count": 54, 412 | "metadata": {}, 413 | "outputs": [], 414 | "source": [ 415 | "chain=chatprompt|chatllm|Commaseperatedoutput()" 416 | ] 417 | }, 418 | { 419 | "cell_type": "code", 420 | "execution_count": 55, 421 | "metadata": {}, 422 | "outputs": [ 423 | { 424 | "data": { 425 | "text/plain": [ 426 | "['smart', ' clever', ' brilliant', ' sharp', ' astute']" 427 | ] 428 | }, 429 | "execution_count": 55, 430 | "metadata": {}, 431 | "output_type": "execute_result" 432 | } 433 | ], 434 | "source": [ 435 | "chain.invoke({\"text\":\"intelligent\"})" 436 | ] 437 | }, 438 | { 439 | "cell_type": "code", 440 | "execution_count": null, 441 | "metadata": {}, 442 | "outputs": [], 443 | "source": [] 444 | } 445 | ], 446 | "metadata": { 447 | "kernelspec": { 448 | "display_name": "Python 3", 449 | "language": "python", 450 | "name": "python3" 451 | }, 452 | "language_info": { 453 | "codemirror_mode": { 454 | "name": "ipython", 455 | "version": 3 456 | }, 457 | "file_extension": ".py", 458 | "mimetype": "text/x-python", 459 | "name": "python", 460 | "nbconvert_exporter": "python", 461 | "pygments_lexer": "ipython3", 462 | "version": "3.9.0" 463 | } 464 | }, 465 | "nbformat": 4, 466 | "nbformat_minor": 2 467 | } 468 | -------------------------------------------------------------------------------- /Q&A Chatbot USing LLM/requirements.txt: -------------------------------------------------------------------------------- 1 | langchain 2 | openai 3 | huggingface_hub 4 | python-dotenv 5 | streamlit -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Complete-Langchain-Tutorials -------------------------------------------------------------------------------- /Text summarization/requirements.txt: -------------------------------------------------------------------------------- 1 | langchain 2 | openai 3 | streamlit 4 | tiktoken 5 | unstructured 6 | pdf2image 7 | pdfminer 8 | PyPDF2 9 | -------------------------------------------------------------------------------- /calorieshealth/app.py: -------------------------------------------------------------------------------- 1 | # Q&A Chatbot 2 | #from langchain.llms import OpenAI 3 | 4 | from dotenv import load_dotenv 5 | 6 | load_dotenv() # take environment variables from .env. 7 | 8 | import streamlit as st 9 | import os 10 | import pathlib 11 | import textwrap 12 | from PIL import Image 13 | 14 | 15 | import google.generativeai as genai 16 | 17 | 18 | os.getenv("GOOGLE_API_KEY") 19 | genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) 20 | 21 | ## Function to load OpenAI model and get respones 22 | 23 | def get_gemini_response(input,image,prompt): 24 | model = genai.GenerativeModel('gemini-pro-vision') 25 | response = model.generate_content([input,image[0],prompt]) 26 | return response.text 27 | 28 | 29 | def input_image_setup(uploaded_file): 30 | # Check if a file has been uploaded 31 | if uploaded_file is not None: 32 | # Read the file into bytes 33 | bytes_data = uploaded_file.getvalue() 34 | 35 | image_parts = [ 36 | { 37 | "mime_type": uploaded_file.type, # Get the mime type of the uploaded file 38 | "data": bytes_data 39 | } 40 | ] 41 | return image_parts 42 | else: 43 | raise FileNotFoundError("No file uploaded") 44 | 45 | 46 | ##initialize our streamlit app 47 | 48 | st.set_page_config(page_title="Gemini Image Demo") 49 | 50 | st.header("Gemini Application") 51 | input=st.text_input("Input Prompt: ",key="input") 52 | uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"]) 53 | image="" 54 | if uploaded_file is not None: 55 | image = Image.open(uploaded_file) 56 | st.image(image, caption="Uploaded Image.", use_column_width=True) 57 | 58 | 59 | submit=st.button("Tell me about the image") 60 | 61 | input_prompt = """ 62 | You are an expert in nutritionist where you need to see the food items from the image 63 | and calculate the total calories, also provide the details of every food items with calories intake 64 | is below format 65 | 66 | 1. Item 1 - no of calories 67 | 2. Item 2 - no of calories 68 | ---- 69 | ---- 70 | """ 71 | 72 | ## If ask button is clicked 73 | 74 | if submit: 75 | image_data = input_image_setup(uploaded_file) 76 | response=get_gemini_response(input_prompt,image_data,input) 77 | st.subheader("The Response is") 78 | st.write(response) -------------------------------------------------------------------------------- /calorieshealth/requirements.txt: -------------------------------------------------------------------------------- 1 | streamlit 2 | google-generativeai 3 | python-dotenv 4 | langchain 5 | PyPDF2 6 | chromadb 7 | faiss-cpu 8 | pdf2image -------------------------------------------------------------------------------- /calorieshealth/sqlllm/requirements.txt: -------------------------------------------------------------------------------- 1 | streamlit 2 | google-generativeai 3 | python-dotenv 4 | langchain 5 | PyPDF2 6 | chromadb 7 | faiss-cpu 8 | pdf2image 9 | sqlite3 -------------------------------------------------------------------------------- /calorieshealth/sqlllm/sql.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | load_dotenv() # take environment variables from .env. 4 | 5 | import streamlit as st 6 | import os 7 | import pathlib 8 | import textwrap 9 | import sqlite3 10 | 11 | import google.generativeai as genai 12 | 13 | os.getenv("GOOGLE_API_KEY") 14 | genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) 15 | 16 | ## Function to load OpenAI model and get respones 17 | 18 | def get_gemini_response(question,prompt): 19 | model = genai.GenerativeModel('gemini-pro') 20 | response = model.generate_content([prompt[0],question]) 21 | print(response.text) 22 | output = read_sql_query(response.text, "test.db") 23 | print(output) 24 | return output 25 | 26 | def read_sql_query(sql, db): 27 | conn = sqlite3.connect(db) 28 | cur = conn.cursor() 29 | cur.execute(sql) 30 | rows = cur.fetchall() 31 | for row in rows: 32 | print(row) 33 | return rows 34 | 35 | ##initialize our streamlit app 36 | prompt = [ 37 | """You are an expert in converting English questions to SQL code! 38 | The SQL database has the name STUDENT and has the following columns - NAME, CLASS, 39 | SECTION \n\nFor example,\nExample 1 - How many entries of records are present?, 40 | the SQL command will be something like this SELECT COUNT(*) FROM STUDENT ; 41 | also the sql code should not have ``` in beginning or end and sql word in output 42 | """ 43 | ] 44 | 45 | st.set_page_config(page_title="I can Retireve Any SQL query") 46 | 47 | st.header("Gemini Application") 48 | 49 | questions=st.text_input("Input: ",key="input") 50 | 51 | 52 | submit=st.button("Ask the question") 53 | 54 | ## If ask button is clicked 55 | 56 | if submit: 57 | 58 | response=get_gemini_response(questions,prompt) 59 | st.subheader("The Response is") 60 | for row in response: 61 | print(row) 62 | st.subheader(row) -------------------------------------------------------------------------------- /calorieshealth/sqlllm/sqlite.py: -------------------------------------------------------------------------------- 1 | # Import module 2 | import sqlite3 3 | 4 | # Connecting to sqlite 5 | conn = sqlite3.connect('test.db') 6 | 7 | # Creating a cursor object using the 8 | # cursor() method 9 | cursor = conn.cursor() 10 | 11 | # Creating table 12 | table ="""CREATE TABLE STUDENT(NAME VARCHAR(255), CLASS VARCHAR(255), 13 | SECTION VARCHAR(255));""" 14 | cursor.execute(table) 15 | 16 | # Queries to INSERT records. 17 | cursor.execute('''INSERT INTO STUDENT VALUES ('Raju', '7th', 'A')''') 18 | cursor.execute('''INSERT INTO STUDENT VALUES ('Shyam', '8th', 'B')''') 19 | cursor.execute('''INSERT INTO STUDENT VALUES ('Baburao', '9th', 'C')''') 20 | 21 | # Display data inserted 22 | print("Data Inserted in the table: ") 23 | data=cursor.execute('''SELECT * FROM STUDENT''') 24 | for row in data: 25 | print(row) 26 | 27 | # Commit your changes in the database 28 | conn.commit() 29 | 30 | # Closing the connection 31 | conn.close() -------------------------------------------------------------------------------- /chatmultipledocuments/chatpdf1.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from PyPDF2 import PdfReader 3 | from langchain.text_splitter import RecursiveCharacterTextSplitter 4 | import os 5 | from langchain_google_genai import GoogleGenerativeAIEmbeddings 6 | import google.generativeai as genai 7 | from langchain.vectorstores import FAISS 8 | from langchain_google_genai import ChatGoogleGenerativeAI 9 | from langchain.chains.question_answering import load_qa_chain 10 | from langchain.prompts import PromptTemplate 11 | from dotenv import load_dotenv 12 | 13 | load_dotenv() 14 | os.getenv("GOOGLE_API_KEY") 15 | genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) 16 | 17 | 18 | 19 | 20 | 21 | 22 | def get_pdf_text(pdf_docs): 23 | text="" 24 | for pdf in pdf_docs: 25 | pdf_reader= PdfReader(pdf) 26 | for page in pdf_reader.pages: 27 | text+= page.extract_text() 28 | return text 29 | 30 | 31 | 32 | def get_text_chunks(text): 33 | text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000) 34 | chunks = text_splitter.split_text(text) 35 | return chunks 36 | 37 | 38 | def get_vector_store(text_chunks): 39 | embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001") 40 | vector_store = FAISS.from_texts(text_chunks, embedding=embeddings) 41 | vector_store.save_local("faiss_index") 42 | 43 | 44 | def get_conversational_chain(): 45 | 46 | prompt_template = """ 47 | Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in 48 | provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n 49 | Context:\n {context}?\n 50 | Question: \n{question}\n 51 | 52 | Answer: 53 | """ 54 | 55 | model = ChatGoogleGenerativeAI(model="gemini-pro", 56 | temperature=0.3) 57 | 58 | prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"]) 59 | chain = load_qa_chain(model, chain_type="stuff", prompt=prompt) 60 | 61 | return chain 62 | 63 | 64 | 65 | def user_input(user_question): 66 | embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001") 67 | 68 | new_db = FAISS.load_local("faiss_index", embeddings) 69 | docs = new_db.similarity_search(user_question) 70 | 71 | chain = get_conversational_chain() 72 | 73 | 74 | response = chain( 75 | {"input_documents":docs, "question": user_question} 76 | , return_only_outputs=True) 77 | 78 | print(response) 79 | st.write("Reply: ", response["output_text"]) 80 | 81 | 82 | 83 | 84 | def main(): 85 | st.set_page_config("Chat PDF") 86 | st.header("Chat with PDF using Gemini💁") 87 | 88 | user_question = st.text_input("Ask a Question from the PDF Files") 89 | 90 | if user_question: 91 | user_input(user_question) 92 | 93 | with st.sidebar: 94 | st.title("Menu:") 95 | pdf_docs = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True) 96 | if st.button("Submit & Process"): 97 | with st.spinner("Processing..."): 98 | raw_text = get_pdf_text(pdf_docs) 99 | text_chunks = get_text_chunks(raw_text) 100 | get_vector_store(text_chunks) 101 | st.success("Done") 102 | 103 | 104 | 105 | if __name__ == "__main__": 106 | main() 107 | -------------------------------------------------------------------------------- /chatmultipledocuments/requirements.txt: -------------------------------------------------------------------------------- 1 | streamlit 2 | google-generativeai 3 | python-dotenv 4 | langchain 5 | PyPDF2 6 | chromadb 7 | faiss-cpu 8 | langchain_google_genai 9 | --------------------------------------------------------------------------------