├── vector-database ├── readme.md └── Vector_Databse.ipynb ├── finetuned-gpt ├── readme.md └── document-based-chatGPT.ipynb ├── resource ├── readme.md ├── apg2.png ├── apg4.png ├── git4.png ├── ss4.png ├── 10lines3.gif └── 10typing.gif ├── voice-assistant └── readme.md ├── requirements.txt ├── chatbot ├── pass.yml ├── fantastic-chatbot-gradio.py ├── readme.md └── custom_chatbot.ipynb ├── web-scraping-summarizer ├── pass.yml ├── web-scraping-summarizer.py └── readme.md ├── email-automation ├── pass.yml ├── readme.md └── cool-odd.py ├── whisper-speech-text ├── readme.md └── whisper-api-basic.ipynb └── README.md /vector-database/readme.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /finetuned-gpt/readme.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /resource/readme.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /voice-assistant/readme.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | openai 2 | gradio 3 | -------------------------------------------------------------------------------- /chatbot/pass.yml: -------------------------------------------------------------------------------- 1 | # config.yaml file 2 | api: "sk-************************************************" 3 | -------------------------------------------------------------------------------- /resource/apg2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiaowuc2/ChatGPT-Python-Applications/HEAD/resource/apg2.png -------------------------------------------------------------------------------- /resource/apg4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiaowuc2/ChatGPT-Python-Applications/HEAD/resource/apg4.png -------------------------------------------------------------------------------- /resource/git4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiaowuc2/ChatGPT-Python-Applications/HEAD/resource/git4.png -------------------------------------------------------------------------------- /resource/ss4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiaowuc2/ChatGPT-Python-Applications/HEAD/resource/ss4.png -------------------------------------------------------------------------------- /resource/10lines3.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiaowuc2/ChatGPT-Python-Applications/HEAD/resource/10lines3.gif -------------------------------------------------------------------------------- /resource/10typing.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiaowuc2/ChatGPT-Python-Applications/HEAD/resource/10typing.gif -------------------------------------------------------------------------------- /web-scraping-summarizer/pass.yml: -------------------------------------------------------------------------------- 1 | # config.yaml file 2 | api: "sk-************************************************" 3 | -------------------------------------------------------------------------------- /email-automation/pass.yml: -------------------------------------------------------------------------------- 1 | # config.yaml file 2 | 3 | user: "qxresearch20@gmail.com" 4 | password: "****************" 5 | api: "sk-************************************************" 6 | -------------------------------------------------------------------------------- /chatbot/fantastic-chatbot-gradio.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import gradio as gr 3 | 4 | # Reading private yml file 5 | with open("pass.yml") as f: 6 | content = f.read() 7 | 8 | # from credentials.yml import user name and password 9 | my_credentials = yaml.load(content, Loader=yaml.FullLoader) 10 | 11 | openai.api_key = my_credentials["api"] 12 | 13 | messages = [ 14 | {"role": "system", "content": "You are a helpful and kind AI Assistant."}, 15 | ] 16 | 17 | def chatbot(input): 18 | if input: 19 | messages.append({"role": "user", "content": input}) 20 | chat = openai.ChatCompletion.create( 21 | model="gpt-3.5-turbo", messages=messages 22 | ) 23 | reply = chat.choices[0].message.content 24 | messages.append({"role": "assistant", "content": reply}) 25 | return reply 26 | 27 | inputs = gr.inputs.Textbox(lines=7, label="Chat with AI") 28 | outputs = gr.outputs.Textbox(label="Reply") 29 | 30 | gr.Interface(fn=chatbot, inputs=inputs, outputs=outputs, title="AI Chatbot", 31 | description="Ask anything you want", 32 | theme="compact").launch(share=True) 33 | -------------------------------------------------------------------------------- /web-scraping-summarizer/web-scraping-summarizer.py: -------------------------------------------------------------------------------- 1 | # Importing required libraries 2 | import argparse 3 | import requests 4 | from bs4 import BeautifulSoup 5 | import openai 6 | import yaml 7 | 8 | # Reading private yml file 9 | with open("pass.yml") as f: 10 | content = f.read() 11 | 12 | # from pass.yml importing api key 13 | my_credentials = yaml.load(content, Loader=yaml.FullLoader) 14 | # from pass.yml importing api key 15 | openai.api_key = my_credentials["api"] 16 | 17 | # Creating the parser 18 | parser = argparse.ArgumentParser(description='web scrapping summarizer') 19 | 20 | # Adding arguments 21 | parser.add_argument('--web', type=str, help='website link (default : https://github.com/xiaowuc2/ChatGPT-Python-Applications)', default="https://github.com/xiaowuc2/ChatGPT-Python-Applications") 22 | parser.add_argument('--limit', type=int, help='summarized text limit (default : 100)', default=100) 23 | 24 | # Parsing arguments 25 | args = parser.parse_args() 26 | 27 | response = requests.get(args.web) 28 | soup = BeautifulSoup(response.content, 'html.parser') 29 | 30 | # Extracting the text 31 | text = '' 32 | for p in soup.find_all('p'): 33 | text += p.text 34 | 35 | 36 | #trimming the text. Openai can only take 4097 tokens. 37 | 38 | mine = (int(len(text)/4.2)) 39 | #print(f"my text has chars : {len(text)} tokens : {mine}") 40 | 41 | allowed = 16132 42 | #print(f"numebr of chars allowed is : {allowed}") 43 | 44 | h = len(text) - allowed 45 | #print(f"we've to save this much texts : {h}") 46 | 47 | # `ntext` is trimeed 'text' 48 | ntext = text[:len(text)-h] 49 | #print(f"new text has chars : {len(ntext)} . tokens : {len(ntext)/4}") 50 | 51 | 52 | def summarize_text(text): 53 | model_engine = "text-davinci-002" # Replace with your preferred GPT-3 model engine 54 | prompt = (f"Please summarize the following text:\n{text}\n\nSummary:") 55 | 56 | response = openai.Completion.create( 57 | engine=model_engine, 58 | prompt=prompt, 59 | max_tokens=args.limit, 60 | n=1, 61 | stop=None, 62 | temperature=0.5, 63 | ) 64 | 65 | summary = response.choices[0].text.strip() 66 | return summary 67 | 68 | print(f"Summary : {summarize_text(ntext)}") 69 | -------------------------------------------------------------------------------- /whisper-speech-text/readme.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 | 5 | 6 |

 7 | Watch video demo + explanation :  ​YouTube​

8 | 9 | ### What is it? 10 | 11 | This chatbot integrated with OpenAI ChatGPT API using Python, is a conversational agent that can interact with users in natural language. It uses the power of machine learning and natural language processing to understand user inputs and generate appropriate responses. It can provide users with a personalized and natural conversational experience. 12 | 13 |
14 | 15 | ### Setup 16 | 17 | 18 | Refer to this [video]() to install the dependencies and generate OpenAI keys and incorporate with our applications. Make sure you've changed the API keys in `pass.yml` file, else it'll raise error `openai.error.AuthenticationError: Incorrect API key provided`. I've articulated the steps in text format here : 19 | 20 | - start this repository 21 | - Install the dependencies of this repository 22 | - Paste your OpenAI API keys in `pass.yml` file 23 | 24 |
25 | 26 | ### Features 27 | 28 | - ✒️ automating blogs/news writing as soon as you get an mail 29 | - ⭐ ask chatbot to do custom work on the bases of the mail 30 | - 📟 complete access to your mail and automate (title, body, time, sender, number of sender) 31 | - ⚙️ fully customized attributes depending on the requirements 32 | 33 |
34 | 35 | ### Contributing 36 | 37 | If you would like to contribute more Applications, please follow these guidelines: 38 | 39 | 40 | 1. Fork the repository. 41 | 2. Create a new branch with your feature or bug fix. 42 | 3. Commit your changes and push to your fork. 43 | 4. Create a pull request. 44 | 5. Please create a seperate folder with `readme.md` and updathe the `main/readme.md` 45 | 46 | 47 |
48 | 49 | ### Further Readings 50 | 51 | - [@qxresearch](https://www.youtube.com/@qxresearch/) : Videos all about mathematics combined with coding and machine learning. 52 | - [@xiaowuc2-blog](https://xiaowuc2.vercel.app/posts) : Articulated the way I used technologies to thrive. 53 | - [computational-thinking : python]() : My Python course to crack coding interviews (LeetCode). (Recorded / 1:1) 54 | - [Let's Have a intellectual conversation](https://www.linkedin.com/in/xiaowuc2/) : `rohitmandal814566@gmail.com` 55 | -------------------------------------------------------------------------------- /web-scraping-summarizer/readme.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 | 5 | 6 |

 7 | Watch video demo + explanation :  ​YouTube​

8 | 9 | ### What is it? 10 | 11 | This tool allows you to summarize any website by simply providing a link. The tool uses the Beautiful Soup and OpenAI libraries to scrape the website and generate a summary. 12 | 13 | 14 | The tool works by first scraping the website using Beautiful Soup to extract the main content. The extracted content is then passed to the OpenAI GPT-3 language model, which generates a summary of the website. The summary is then returned to the user. 15 | 16 |
17 | 18 | ### Setup 19 | 20 | 21 | Refer to this [setup video]() to install the dependencies and generate OpenAI keys and incorporate with our applications. Make sure you've changed the API keys in `pass.yml` file, else it'll raise error `openai.error.AuthenticationError: Incorrect API key provided`. I've articulated the steps in text format here : 22 | 23 | - start this repository 24 | - Install the dependencies of this repository 25 | - Paste your OpenAI API keys in `pass.yml` file 26 | - parse from command line (Attributes : `web` `limit`) 27 | 28 |
29 | 30 | ### Features 31 | 32 | Subscribe to the YouTube channel [@qxresearch](https://www.youtube.com/@qxresearch/) to receive updates on new projects/updates! 33 | Which also enables you to join a community of like-minded Python enthusiasts and stay connected with a passionate group of learners and experts. 34 | 35 | - ✒️ 36 | - ⭐ 37 | - ✨ 38 | - ⚙️ 39 | 40 |
41 | 42 | ### Contributing 43 | 44 | If you would like to contribute more Applications, please follow these guidelines: 45 | 46 | 47 | 1. Fork the repository. 48 | 2. Create a new branch with your feature or bug fix. 49 | 3. Commit your changes and push to your fork. 50 | 4. Create a pull request. 51 | 5. Please create a seperate folder with `readme.md` and updathe the `main/readme.md` 52 | 53 | 54 |
55 | 56 | ### Further Readings 57 | 58 | - [@qxresearch](https://www.youtube.com/@qxresearch/) : Videos all about mathematics combined with coding and machine learning. 59 | - [@xiaowuc2-blog](https://xiaowuc2.vercel.app/posts) : Articulated the way I used technologies to thrive. 60 | - [computational-thinking : python]() : My Python course to crack coding interviews (LeetCode). (Recorded / 1:1) 61 | - [Let's Have a intellectual conversation](https://www.linkedin.com/in/xiaowuc2/) : `rohitmandal814566@gmail.com` 62 | -------------------------------------------------------------------------------- /email-automation/readme.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 | 5 |

 6 | Watch a video demo :  ​YouTube​

7 | 8 | ### What is it? 9 | 10 | **email-automation** is a Python implementaton of email autmation integrated with chatgpt using imap (Internet Mail Access Protocol) and email module. Where you'll be able to select few particular mail address from which you can choose the read the unseen mails and from that body part chatgpt will write blog/news/post as per your requirements. 11 | 12 | ### Requirements 13 | 14 | - [x] 2-step verification in google account (target email) - [Link](https://myaccount.google.com/signinoptions/two-step-verification/enroll-welcome) 15 | - [x] share the `app password` (for google's security reasons) 16 | - [x] turn on 'status : IMAP' from mail settings under : `Forwarding and POP/IMAP` - [Link](https://mail.google.com/mail/u/0/#settings/fwdandpop) 17 | 18 | ### Features 19 | 20 | - ✒️ automating blogs/news writing as soon as you get an mail 21 | - ⭐ ask chatbot to do custom work on the bases of the mail 22 | - 📟 complete access to your mail and automate (title, body, time, sender, number of sender) 23 | - ⚙️ fully customized attributes depending on the requirements 24 | 25 | ### Input Attributes 26 | 27 | - **how_many :** how many unseen mails you want to check (default : 10) 28 | - **maxtoken :** what is the maximum number of charecters you want in your blog (default : 200 characters) 29 | - **what_to_ask :** what do you want to ask chatgpt to do (default : write an blog about) 30 | - **num_target :** number of targeted emails (default : None) 31 | 32 | 33 | ### Contributing 34 | 35 | If you would like to contribute more Applications, please follow these guidelines: 36 | 37 | 38 | 1. Fork the repository. 39 | 2. Create a new branch with your feature or bug fix. 40 | 3. Commit your changes and push to your fork. 41 | 4. Create a pull request. 42 | 5. Please create a seperate folder with `readme.md` and updathe the `main/readme.md` 43 | 44 | 45 |
46 | 47 | ### Further Readings 48 | 49 | - [@qxresearch](https://www.youtube.com/@qxresearch/) : Videos all about mathematics combined with coding and machine learning. 50 | - [@xiaowuc2-blog](https://xiaowuc2.vercel.app/posts) : Articulated the way I used technologies to thrive. 51 | - [computational-thinking : python]() : My Python course to crack coding interviews (LeetCode). (Recorded / 1:1) 52 | - [Let's Have a intellectual conversation](https://www.linkedin.com/in/xiaowuc2/) : `rohitmandal814566@gmail.com` 53 | -------------------------------------------------------------------------------- /chatbot/readme.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 | 5 | 6 |

 7 | Watch video demo + explanation :  ​YouTube​

8 | 9 | ### What is it? 10 | 11 | This chatbot integrated with OpenAI ChatGPT API using Python, is a conversational agent that can interact with users in natural language. It uses the power of machine learning and natural language processing to understand user inputs and generate appropriate responses. It can provide users with a personalized and natural conversational experience. 12 | 13 |
14 | 15 | ### Setup 16 | 17 | 18 | Refer to this [video]() to install the dependencies and generate OpenAI keys and incorporate with our applications. Make sure you've changed the API keys in `pass.yml` file, else it'll raise error `openai.error.AuthenticationError: Incorrect API key provided`. I've articulated the steps in text format here : 19 | 20 | - start this repository 21 | - Install the dependencies of this repository 22 | - Paste your OpenAI API keys in `pass.yml` file 23 | 24 |
25 | 26 | ### Features 27 | 28 | Subscribe to the YouTube channel [@qxresearch](https://www.youtube.com/@qxresearch/) to receive updates on new projects/updates! 29 | Which also enables you to join a community of like-minded Python enthusiasts and stay connected with a passionate group of learners and experts. 30 | 31 | - ✒️ give internet access to ChatGPT 32 | - ⭐ Google Colab Link : [chatbot-colab](https://colab.research.google.com/drive/1-irHRZEUCbfon_gO8MIfnvZpwZ9CnJqR?usp=sharing) 33 | - ✨ google Colab keeps the service running for 72 hours (for free) 34 | - ⚙️ fully customized attributes depending on the requirements 35 | 36 |
37 | 38 | ### Contributing 39 | 40 | If you would like to contribute more Applications, please follow these guidelines: 41 | 42 | 43 | 1. Fork the repository. 44 | 2. Create a new branch with your feature or bug fix. 45 | 3. Commit your changes and push to your fork. 46 | 4. Create a pull request. 47 | 5. Please create a seperate folder with `readme.md` and updathe the `main/readme.md` 48 | 49 | 50 |
51 | 52 | ### Further Readings 53 | 54 | - [@qxresearch](https://www.youtube.com/@qxresearch/) : Videos all about mathematics combined with coding and machine learning. 55 | - [@xiaowuc2-blog](https://xiaowuc2.vercel.app/posts) : Articulated the way I used technologies to thrive. 56 | - [computational-thinking : python]() : My Python course to crack coding interviews (LeetCode). (Recorded / 1:1) 57 | - [Let's Have a intellectual conversation](https://www.linkedin.com/in/xiaowuc2/) : `rohitmandal814566@gmail.com` 58 | -------------------------------------------------------------------------------- /email-automation/cool-odd.py: -------------------------------------------------------------------------------- 1 | # Importing libraries 2 | import imaplib 3 | import email 4 | import yaml 5 | import openai 6 | 7 | 8 | # user dfined : variables 9 | # how_many = int(input()) # how many unseen mails you want to check 10 | # maxtoken = int(input()) # what is the maximum number of charecters you want in your blog 11 | # what_to_ask = input() # what do you want to ask chatgpt to do 12 | # num_target = int(input()) # number of targeted emails 13 | # list_mail = [] 14 | # for i in range(num_target): 15 | # i = input() 16 | # list_mail.append(i) 17 | 18 | 19 | # how many mails you want to see (default values) 20 | how_many = 2 21 | maxtoken = 200 22 | #what_to_ask = "Generate a blog on:" 23 | 24 | # Reading private yml file 25 | with open("pass.yml") as f: 26 | content = f.read() 27 | 28 | # from credentials.yml import user name and password 29 | my_credentials = yaml.load(content, Loader=yaml.FullLoader) 30 | user, password = my_credentials["user"], my_credentials["password"] 31 | openai.api_key = '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' #my_credentials["api"] 32 | 33 | # Login to the email server 34 | server = "imap.gmail.com" 35 | my_mail = imaplib.IMAP4_SSL(server) 36 | my_mail.login(user, password) 37 | my_mail.select('inbox') 38 | 39 | 40 | # search : unread emails 41 | status, data = my_mail.search(None, 'FROM', 'rohitmandal814566@gmail.com') 42 | 43 | 44 | mail_id_list = data[0].split() #IDs of all emails that we want to fetch 45 | 46 | msgs = [] # empty list to capture all messages 47 | #Iterate through messages and extract data into the msgs list 48 | for num in mail_id_list: 49 | typ, data = my_mail.fetch(num, '(RFC822)') #RFC822 returns whole message (BODY fetches just body) 50 | msgs.append(data) 51 | 52 | count = 0 53 | for msg in msgs[::-1]: 54 | if count == how_many : 55 | break 56 | count += 1 57 | for response_part in msg: 58 | if type(response_part) is tuple: 59 | my_msg=email.message_from_bytes((response_part[1])) 60 | print("_________________________________________") 61 | #print ("subj:", my_msg['subject']) 62 | #print ("from:", my_msg['from']) 63 | #print ("body:") 64 | 65 | for part in my_msg.walk(): 66 | #print(part.get_content_type()) 67 | if part.get_content_type() == 'text/plain': 68 | print (part.get_payload()) 69 | 70 | snippet = part.get_payload() 71 | # prompt = f"{what_to_ask} {str(snippet)}: 72 | prompt = f"Generate a blog on: {str(snippet)}" 73 | 74 | # calling api 75 | response = openai.Completion.create( 76 | engine="davinci-instruct-beta-v3", 77 | prompt=prompt, 78 | max_tokens=maxtoken, 79 | temperature = 0.7, 80 | top_p=1, 81 | frequency_penalty=0, 82 | presence_penalty=0 83 | ) 84 | 85 | #printing the response 86 | generated_text = response['choices'][0]['text'] 87 | print(generated_text) 88 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 | 5 | 6 |

  7 | Watch demo :  ​Meow!​

8 | 9 | ### What is it? 10 | 11 | The GitHub repository titled "ChatGPT Python Applications" is a comprehensive collection of Python applications built around the ChatGPT language model. This repository contains multiple standalone applications, each demonstrating different use cases and applications of the ChatGPT model. 12 | 13 | The applications in this repository are designed to showcase the versatility and power of ChatGPT in various contexts, from simple chatbots to more advanced natural language processing applications. Each application is well documented, providing a clear understanding of its functionality, how it works, and how it can be integrated into other projects. All applications are written in Python, with a clear structure and code that is easy to understand and modify. The repository is also open-source, so users can contribute to the project, submit bug reports, and suggest improvements. 14 | 15 | Overall, the "ChatGPT Python Applications" repository is a valuable resource for developers looking to integrate ChatGPT into their projects or explore the potential of natural language processing with Python. 16 | 17 |
18 | 19 | 20 |
21 | 22 |

23 | Author : 24 | 25 | @xiaowuc2 26 | 27 | 28 |

29 | 30 | 31 | I am a Machine Learning Researcher, currently working in IBM with expertise in solving real-world problems through coding. Taught 20,000+ students & working professionals to crack coding interviews and land their dream job TCS, Walmart, Accenture, Cognizant. My research has been recognized through publications in esteemed journals such as Springer[^1] and O'Reilly[^2]. 32 | 33 | - [computational-thinking : python](https://xiaowuc2.vercel.app/posts/computational-thinking-python) : My Python course to crack coding interviews (LeetCode). (Recorded / 1:1) 34 | - [@qxresearch](https://www.youtube.com/@qxresearch/) : Videos all about mathematics combined with coding and machine learning. 35 | - [Let's Have an intellectual conversation](https://www.linkedin.com/in/xiaowuc2/) : `rohitmandal814566@gmail.com` 36 | - `Follow` me in github for new project : python, machine learning and DSA (coding interview) 37 | 38 | 39 |
40 | 41 | ### Setup 42 | 43 | Refer to this [setup video](https://youtu.be/beEBeQw5tpc) to install the dependencies and generate OpenAI keys and incorporate with our applications. I've articulated the steps in text format here : 44 | 45 | - Star this repository (top-right corner) 46 | - Install the dependencies following these steps : 47 | 48 | - Star this repository (top right corner) 49 | - <>Code > Download ZIP > Open cmd/terminal in that location 50 | - Run this command : `pip install -r requirements.txt` 51 | 52 | - Paste your OpenAI API keys in `pass.yml` file 53 | 54 | - To clone the repository use this code in command line 55 | ``` 56 | git clone https://github.com/xiaowuc2/ChatGPT-Python-Applications 57 | ``` 58 | 59 |
60 | 61 | ### Applications 62 | 63 | Subscribe to the YouTube channel [@qxresearch](https://www.youtube.com/@qxresearch/) to receive updates on new projects! 64 | Which also enables you to join a community of like-minded Python enthusiasts and stay connected with a passionate group of learners and experts 65 | 66 | - ✒️ [email-automation](https://github.com/xiaowuc2/ChatGPT-Python-Applications/tree/main/email-automation) : Tool to automate automate news briefing and blogging from custom senders (mail) 67 | - ⭐ [custom-chatbot](https://github.com/xiaowuc2/ChatGPT-Python-Applications/tree/main/chatbot) : ask chatbot to do custom work on the bases of the task (eg. script writer) 68 | - 📟 [whisper-speech-text](https://github.com/xiaowuc2/ChatGPT-Python-Applications/tree/main/whisper-speech-text) : OpenAI's anoher API to convert text from audio 69 | - ⚙️ [finetuned-gpt](https://github.com/xiaowuc2/ChatGPT-Python-Applications/tree/main/finetuned-gpt) : Train chatGPT on your custom data & ask queries from that data 70 | - 💠 [voice-assistant](https://github.com/xiaowuc2/ChatGPT-Python-Applications/tree/main/voice-assistant) : Voice assistant based on ChatGPT and WhisperAPI (Audio input & output) 71 | - 🐻 [web-scraping-summarizer](https://github.com/xiaowuc2/ChatGPT-Python-Applications/tree/main/web-scraping-summarizer) : This tool scraping a given website and summarizes the main context. 72 | - ⌚ [your-prespective](https://raw.githubusercontent.com/xiaowuc2/ChatGPT-Python-Applications/main/resource/git4.png) : You can train ChatGPT to perceive things the way you do, and it will imitate you! 73 | - 📖 [bhagavad-gita-gpt](https://raw.githubusercontent.com/xiaowuc2/ChatGPT-Python-Applications/main/resource/git4.png) : A religious book which contains all the answers to find our purpose and to live it fully. 74 | - 🏜 [vector-databse](https://github.com/xiaowuc2/ChatGPT-Python-Applications/blob/main/vector-database/Vector_Databse.ipynb) : This is how you can send big text files to chatgpt and avoid the token limits 75 | 76 |
77 | 78 | ⚠️ Few applications are under development, will upload the codes asap! you can `⭐` this repo / sub to this [channel](https://www.youtube.com/@qxresearch/) to get the updates. 79 | 80 | 81 | 82 |
83 | 84 | ### Contributing 85 | 86 | If you would like to contribute more Applications, please follow these guidelines: 87 | 88 | 89 | 1. Fork the repository. 90 | 2. Create a new branch with your feature or bug fix. 91 | 3. Commit your changes and push to your fork. 92 | 4. Create a pull request. 93 | 5. Please create a seperate folder with `readme.md` and updathe the `main/readme.md` 94 | 95 | 96 |
97 | 98 | 99 | [^1]: 10Water Content Prediction in Smart Agriculture of Rural India Using CNN and Transfer Learning Approach. [Link](https://www.oreilly.com/library/view/intelligent-decision-support/9781119896432/c10.xhtml) 100 | [^2]: Variational Autoencoder-Based Imbalanced Alzheimer Detection Using Brain MRI Images. [Link](https://link.springer.com/chapter/10.1007/978-981-19-1657-1_14) 101 | -------------------------------------------------------------------------------- /finetuned-gpt/document-based-chatGPT.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "colab": { 8 | "base_uri": "https://localhost:8080/" 9 | }, 10 | "id": "BxSxIMjKoIue", 11 | "outputId": "ef1a70ca-bbcb-4aea-ebcf-ff49345038aa" 12 | }, 13 | "outputs": [ 14 | { 15 | "name": "stdout", 16 | "output_type": "stream", 17 | "text": [ 18 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 19 | "Requirement already satisfied: llama-index in /usr/local/lib/python3.9/dist-packages (0.4.28)\n", 20 | "Requirement already satisfied: numpy in /usr/local/lib/python3.9/dist-packages (from llama-index) (1.22.4)\n", 21 | "Requirement already satisfied: pandas in /usr/local/lib/python3.9/dist-packages (from llama-index) (1.4.4)\n", 22 | "Requirement already satisfied: tenacity<9.0.0,>=8.2.0 in /usr/local/lib/python3.9/dist-packages (from llama-index) (8.2.2)\n", 23 | "Requirement already satisfied: openai>=0.26.4 in /usr/local/lib/python3.9/dist-packages (from llama-index) (0.27.2)\n", 24 | "Requirement already satisfied: tiktoken in /usr/local/lib/python3.9/dist-packages (from llama-index) (0.3.1)\n", 25 | "Requirement already satisfied: langchain in /usr/local/lib/python3.9/dist-packages (from llama-index) (0.0.109)\n", 26 | "Requirement already satisfied: dataclasses-json in /usr/local/lib/python3.9/dist-packages (from llama-index) (0.5.7)\n", 27 | "Requirement already satisfied: aiohttp in /usr/local/lib/python3.9/dist-packages (from openai>=0.26.4->llama-index) (3.8.4)\n", 28 | "Requirement already satisfied: tqdm in /usr/local/lib/python3.9/dist-packages (from openai>=0.26.4->llama-index) (4.65.0)\n", 29 | "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.9/dist-packages (from openai>=0.26.4->llama-index) (2.28.2)\n", 30 | "Requirement already satisfied: marshmallow<4.0.0,>=3.3.0 in /usr/local/lib/python3.9/dist-packages (from dataclasses-json->llama-index) (3.19.0)\n", 31 | "Requirement already satisfied: typing-inspect>=0.4.0 in /usr/local/lib/python3.9/dist-packages (from dataclasses-json->llama-index) (0.8.0)\n", 32 | "Requirement already satisfied: marshmallow-enum<2.0.0,>=1.5.1 in /usr/local/lib/python3.9/dist-packages (from dataclasses-json->llama-index) (1.5.1)\n", 33 | "Requirement already satisfied: SQLAlchemy<2,>=1 in /usr/local/lib/python3.9/dist-packages (from langchain->llama-index) (1.4.46)\n", 34 | "Requirement already satisfied: pydantic<2,>=1 in /usr/local/lib/python3.9/dist-packages (from langchain->llama-index) (1.10.6)\n", 35 | "Requirement already satisfied: PyYAML<7,>=6 in /usr/local/lib/python3.9/dist-packages (from langchain->llama-index) (6.0)\n", 36 | "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.9/dist-packages (from pandas->llama-index) (2022.7.1)\n", 37 | "Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.9/dist-packages (from pandas->llama-index) (2.8.2)\n", 38 | "Requirement already satisfied: regex>=2022.1.18 in /usr/local/lib/python3.9/dist-packages (from tiktoken->llama-index) (2022.6.2)\n", 39 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.8.2)\n", 40 | "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (4.0.2)\n", 41 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (22.2.0)\n", 42 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (6.0.4)\n", 43 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.3.1)\n", 44 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.3.3)\n", 45 | "Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (3.1.0)\n", 46 | "Requirement already satisfied: packaging>=17.0 in /usr/local/lib/python3.9/dist-packages (from marshmallow<4.0.0,>=3.3.0->dataclasses-json->llama-index) (23.0)\n", 47 | "Requirement already satisfied: typing-extensions>=4.2.0 in /usr/local/lib/python3.9/dist-packages (from pydantic<2,>=1->langchain->llama-index) (4.5.0)\n", 48 | "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.9/dist-packages (from python-dateutil>=2.8.1->pandas->llama-index) (1.15.0)\n", 49 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai>=0.26.4->llama-index) (2.10)\n", 50 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai>=0.26.4->llama-index) (2022.12.7)\n", 51 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai>=0.26.4->llama-index) (1.26.14)\n", 52 | "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.9/dist-packages (from SQLAlchemy<2,>=1->langchain->llama-index) (2.0.2)\n", 53 | "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/lib/python3.9/dist-packages (from typing-inspect>=0.4.0->dataclasses-json->llama-index) (1.0.0)\n", 54 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 55 | "Requirement already satisfied: openai in /usr/local/lib/python3.9/dist-packages (0.27.2)\n", 56 | "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.9/dist-packages (from openai) (2.28.2)\n", 57 | "Requirement already satisfied: aiohttp in /usr/local/lib/python3.9/dist-packages (from openai) (3.8.4)\n", 58 | "Requirement already satisfied: tqdm in /usr/local/lib/python3.9/dist-packages (from openai) (4.65.0)\n", 59 | "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (3.1.0)\n", 60 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (2.10)\n", 61 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (2022.12.7)\n", 62 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (1.26.14)\n", 63 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.3.3)\n", 64 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (22.2.0)\n", 65 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.3.1)\n", 66 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (6.0.4)\n", 67 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.8.2)\n", 68 | "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (4.0.2)\n", 69 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 70 | "Requirement already satisfied: docx2txt in /usr/local/lib/python3.9/dist-packages (0.8)\n" 71 | ] 72 | } 73 | ], 74 | "source": [ 75 | "!pip install llama-index\n", 76 | "!pip install openai\n", 77 | "!pip install docx2txt" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": { 83 | "id": "Q6jAcbsu3W5w" 84 | }, 85 | "source": [ 86 | "made a simple vector store index and here I have imported california housing data" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": { 93 | "id": "qnaK91TFogX_" 94 | }, 95 | "outputs": [], 96 | "source": [ 97 | "import os\n", 98 | "os.environ[\"OPENAI_API_KEY\"] = 'sk-************************************************'\n", 99 | "\n", 100 | "from llama_index import GPTSimpleVectorIndex, SimpleDirectoryReader\n", 101 | "documents = SimpleDirectoryReader('sample_data').load_data()\n", 102 | "index = GPTSimpleVectorIndex(documents)" 103 | ] 104 | }, 105 | { 106 | "cell_type": "markdown", 107 | "metadata": { 108 | "id": "lwvJPwU13IsV" 109 | }, 110 | "source": [ 111 | "saving and loading from the disk" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": null, 117 | "metadata": { 118 | "id": "p5taEUUy2h2S" 119 | }, 120 | "outputs": [], 121 | "source": [ 122 | "# save to disk\n", 123 | "index.save_to_disk('index.json')\n", 124 | "# load from disk\n", 125 | "index = GPTSimpleVectorIndex.load_from_disk('index.json')" 126 | ] 127 | }, 128 | { 129 | "cell_type": "markdown", 130 | "metadata": { 131 | "id": "u-L6cv1d3PI-" 132 | }, 133 | "source": [ 134 | "querying the index" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": { 141 | "colab": { 142 | "base_uri": "https://localhost:8080/" 143 | }, 144 | "id": "FJmSuHar2pEc", 145 | "outputId": "055d2086-23af-4c09-8ffc-b9d845625237" 146 | }, 147 | "outputs": [ 148 | { 149 | "output_type": "stream", 150 | "name": "stdout", 151 | "text": [ 152 | "\n", 153 | "HyperBlock is the software \"brain\" behind Anaplan - it holds the entire planning model in-memory and automatically updates the model in near real-time by only updating affected cells. It is the patented engine that Anaplan runs on and enables Connected Planning solutions, allowing companies to make quicker decisions made on more up-to-date and accurate information.\n", 154 | " " 155 | ] 156 | } 157 | ], 158 | "source": [ 159 | "reply=index.query(\" what is hyperblock?\")\n", 160 | "print(reply , end='\\n ' )" 161 | ] 162 | } 163 | ], 164 | "metadata": { 165 | "colab": { 166 | "provenance": [] 167 | }, 168 | "kernelspec": { 169 | "display_name": "Python 3", 170 | "name": "python3" 171 | }, 172 | "language_info": { 173 | "name": "python" 174 | } 175 | }, 176 | "nbformat": 4, 177 | "nbformat_minor": 0 178 | } 179 | -------------------------------------------------------------------------------- /chatbot/custom_chatbot.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [] 7 | }, 8 | "kernelspec": { 9 | "name": "python3", 10 | "display_name": "Python 3" 11 | }, 12 | "language_info": { 13 | "name": "python" 14 | } 15 | }, 16 | "cells": [ 17 | { 18 | "cell_type": "code", 19 | "execution_count": 1, 20 | "metadata": { 21 | "colab": { 22 | "base_uri": "https://localhost:8080/" 23 | }, 24 | "id": "0_0h4RiNRJrg", 25 | "outputId": "93ed69ff-1f8a-4b2c-cc56-de2a71c926ae" 26 | }, 27 | "outputs": [ 28 | { 29 | "output_type": "stream", 30 | "name": "stdout", 31 | "text": [ 32 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 33 | "Collecting gradio\n", 34 | " Downloading gradio-3.21.0-py3-none-any.whl (15.8 MB)\n", 35 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m15.8/15.8 MB\u001b[0m \u001b[31m23.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 36 | "\u001b[?25hRequirement already satisfied: markupsafe in /usr/local/lib/python3.9/dist-packages (from gradio) (2.1.2)\n", 37 | "Collecting pydub\n", 38 | " Downloading pydub-0.25.1-py2.py3-none-any.whl (32 kB)\n", 39 | "Collecting ffmpy\n", 40 | " Downloading ffmpy-0.3.0.tar.gz (4.8 kB)\n", 41 | " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", 42 | "Collecting mdit-py-plugins<=0.3.3\n", 43 | " Downloading mdit_py_plugins-0.3.3-py3-none-any.whl (50 kB)\n", 44 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.5/50.5 KB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 45 | "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.9/dist-packages (from gradio) (1.22.4)\n", 46 | "Collecting orjson\n", 47 | " Downloading orjson-3.8.7-cp39-cp39-manylinux_2_28_x86_64.whl (140 kB)\n", 48 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m140.9/140.9 KB\u001b[0m \u001b[31m7.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 49 | "\u001b[?25hRequirement already satisfied: pillow in /usr/local/lib/python3.9/dist-packages (from gradio) (8.4.0)\n", 50 | "Requirement already satisfied: jinja2 in /usr/local/lib/python3.9/dist-packages (from gradio) (3.1.2)\n", 51 | "Collecting uvicorn\n", 52 | " Downloading uvicorn-0.21.0-py3-none-any.whl (57 kB)\n", 53 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.8/57.8 KB\u001b[0m \u001b[31m4.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 54 | "\u001b[?25hCollecting aiohttp\n", 55 | " Downloading aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.0 MB)\n", 56 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m31.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 57 | "\u001b[?25hCollecting huggingface-hub>=0.13.0\n", 58 | " Downloading huggingface_hub-0.13.2-py3-none-any.whl (199 kB)\n", 59 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m199.2/199.2 KB\u001b[0m \u001b[31m16.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 60 | "\u001b[?25hCollecting httpx\n", 61 | " Downloading httpx-0.23.3-py3-none-any.whl (71 kB)\n", 62 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m71.5/71.5 KB\u001b[0m \u001b[31m4.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 63 | "\u001b[?25hRequirement already satisfied: typing-extensions in /usr/local/lib/python3.9/dist-packages (from gradio) (4.5.0)\n", 64 | "Requirement already satisfied: requests in /usr/local/lib/python3.9/dist-packages (from gradio) (2.25.1)\n", 65 | "Collecting python-multipart\n", 66 | " Downloading python_multipart-0.0.6-py3-none-any.whl (45 kB)\n", 67 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m45.7/45.7 KB\u001b[0m \u001b[31m3.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 68 | "\u001b[?25hCollecting aiofiles\n", 69 | " Downloading aiofiles-23.1.0-py3-none-any.whl (14 kB)\n", 70 | "Requirement already satisfied: altair>=4.2.0 in /usr/local/lib/python3.9/dist-packages (from gradio) (4.2.2)\n", 71 | "Requirement already satisfied: pydantic in /usr/local/lib/python3.9/dist-packages (from gradio) (1.10.6)\n", 72 | "Collecting markdown-it-py[linkify]>=2.0.0\n", 73 | " Downloading markdown_it_py-2.2.0-py3-none-any.whl (84 kB)\n", 74 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m84.5/84.5 KB\u001b[0m \u001b[31m6.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 75 | "\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.9/dist-packages (from gradio) (1.4.4)\n", 76 | "Requirement already satisfied: fsspec in /usr/local/lib/python3.9/dist-packages (from gradio) (2023.3.0)\n", 77 | "Requirement already satisfied: matplotlib in /usr/local/lib/python3.9/dist-packages (from gradio) (3.5.3)\n", 78 | "Requirement already satisfied: pyyaml in /usr/local/lib/python3.9/dist-packages (from gradio) (6.0)\n", 79 | "Collecting fastapi\n", 80 | " Downloading fastapi-0.94.1-py3-none-any.whl (56 kB)\n", 81 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.4/56.4 KB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 82 | "\u001b[?25hCollecting websockets>=10.0\n", 83 | " Downloading websockets-10.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (106 kB)\n", 84 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m106.5/106.5 KB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 85 | "\u001b[?25hRequirement already satisfied: entrypoints in /usr/local/lib/python3.9/dist-packages (from altair>=4.2.0->gradio) (0.4)\n", 86 | "Requirement already satisfied: toolz in /usr/local/lib/python3.9/dist-packages (from altair>=4.2.0->gradio) (0.12.0)\n", 87 | "Requirement already satisfied: jsonschema>=3.0 in /usr/local/lib/python3.9/dist-packages (from altair>=4.2.0->gradio) (4.3.3)\n", 88 | "Requirement already satisfied: filelock in /usr/local/lib/python3.9/dist-packages (from huggingface-hub>=0.13.0->gradio) (3.9.0)\n", 89 | "Requirement already satisfied: tqdm>=4.42.1 in /usr/local/lib/python3.9/dist-packages (from huggingface-hub>=0.13.0->gradio) (4.65.0)\n", 90 | "Requirement already satisfied: packaging>=20.9 in /usr/local/lib/python3.9/dist-packages (from huggingface-hub>=0.13.0->gradio) (23.0)\n", 91 | "Collecting mdurl~=0.1\n", 92 | " Downloading mdurl-0.1.2-py3-none-any.whl (10.0 kB)\n", 93 | "Collecting linkify-it-py<3,>=1\n", 94 | " Downloading linkify_it_py-2.0.0-py3-none-any.whl (19 kB)\n", 95 | "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.9/dist-packages (from pandas->gradio) (2022.7.1)\n", 96 | "Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.9/dist-packages (from pandas->gradio) (2.8.2)\n", 97 | "Collecting charset-normalizer<4.0,>=2.0\n", 98 | " Downloading charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (199 kB)\n", 99 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m199.2/199.2 KB\u001b[0m \u001b[31m12.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 100 | "\u001b[?25hRequirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->gradio) (22.2.0)\n", 101 | "Collecting aiosignal>=1.1.2\n", 102 | " Downloading aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n", 103 | "Collecting async-timeout<5.0,>=4.0.0a3\n", 104 | " Downloading async_timeout-4.0.2-py3-none-any.whl (5.8 kB)\n", 105 | "Collecting yarl<2.0,>=1.0\n", 106 | " Downloading yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (264 kB)\n", 107 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m264.6/264.6 KB\u001b[0m \u001b[31m11.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 108 | "\u001b[?25hCollecting multidict<7.0,>=4.5\n", 109 | " Downloading multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (114 kB)\n", 110 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m114.2/114.2 KB\u001b[0m \u001b[31m6.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 111 | "\u001b[?25hCollecting frozenlist>=1.1.1\n", 112 | " Downloading frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (158 kB)\n", 113 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m158.8/158.8 KB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 114 | "\u001b[?25hCollecting starlette<0.27.0,>=0.26.1\n", 115 | " Downloading starlette-0.26.1-py3-none-any.whl (66 kB)\n", 116 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m66.9/66.9 KB\u001b[0m \u001b[31m4.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 117 | "\u001b[?25hRequirement already satisfied: certifi in /usr/local/lib/python3.9/dist-packages (from httpx->gradio) (2022.12.7)\n", 118 | "Collecting httpcore<0.17.0,>=0.15.0\n", 119 | " Downloading httpcore-0.16.3-py3-none-any.whl (69 kB)\n", 120 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m69.6/69.6 KB\u001b[0m \u001b[31m4.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 121 | "\u001b[?25hCollecting sniffio\n", 122 | " Downloading sniffio-1.3.0-py3-none-any.whl (10 kB)\n", 123 | "Collecting rfc3986[idna2008]<2,>=1.3\n", 124 | " Downloading rfc3986-1.5.0-py2.py3-none-any.whl (31 kB)\n", 125 | "Requirement already satisfied: pyparsing>=2.2.1 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gradio) (3.0.9)\n", 126 | "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gradio) (0.11.0)\n", 127 | "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gradio) (1.4.4)\n", 128 | "Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gradio) (4.39.0)\n", 129 | "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests->gradio) (2.10)\n", 130 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests->gradio) (1.26.15)\n", 131 | "Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.9/dist-packages (from requests->gradio) (4.0.0)\n", 132 | "Collecting h11>=0.8\n", 133 | " Downloading h11-0.14.0-py3-none-any.whl (58 kB)\n", 134 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 KB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 135 | "\u001b[?25hRequirement already satisfied: click>=7.0 in /usr/local/lib/python3.9/dist-packages (from uvicorn->gradio) (8.1.3)\n", 136 | "Collecting anyio<5.0,>=3.0\n", 137 | " Downloading anyio-3.6.2-py3-none-any.whl (80 kB)\n", 138 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m80.6/80.6 KB\u001b[0m \u001b[31m5.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 139 | "\u001b[?25hRequirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.9/dist-packages (from jsonschema>=3.0->altair>=4.2.0->gradio) (0.19.3)\n", 140 | "Collecting uc-micro-py\n", 141 | " Downloading uc_micro_py-1.0.1-py3-none-any.whl (6.2 kB)\n", 142 | "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.9/dist-packages (from python-dateutil>=2.8.1->pandas->gradio) (1.15.0)\n", 143 | "Building wheels for collected packages: ffmpy\n", 144 | " Building wheel for ffmpy (setup.py) ... \u001b[?25l\u001b[?25hdone\n", 145 | " Created wheel for ffmpy: filename=ffmpy-0.3.0-py3-none-any.whl size=4707 sha256=90f5e273b49f2020b901ac465f6ac81e2e5ef0d7ed45c0650853f7431b3f4384\n", 146 | " Stored in directory: /root/.cache/pip/wheels/91/e2/96/f676aa08bfd789328c6576cd0f1fde4a3d686703bb0c247697\n", 147 | "Successfully built ffmpy\n", 148 | "Installing collected packages: rfc3986, pydub, ffmpy, websockets, uc-micro-py, sniffio, python-multipart, orjson, multidict, mdurl, h11, frozenlist, charset-normalizer, async-timeout, aiofiles, yarl, uvicorn, markdown-it-py, linkify-it-py, huggingface-hub, anyio, aiosignal, starlette, mdit-py-plugins, httpcore, aiohttp, httpx, fastapi, gradio\n", 149 | "Successfully installed aiofiles-23.1.0 aiohttp-3.8.4 aiosignal-1.3.1 anyio-3.6.2 async-timeout-4.0.2 charset-normalizer-3.1.0 fastapi-0.94.1 ffmpy-0.3.0 frozenlist-1.3.3 gradio-3.21.0 h11-0.14.0 httpcore-0.16.3 httpx-0.23.3 huggingface-hub-0.13.2 linkify-it-py-2.0.0 markdown-it-py-2.2.0 mdit-py-plugins-0.3.3 mdurl-0.1.2 multidict-6.0.4 orjson-3.8.7 pydub-0.25.1 python-multipart-0.0.6 rfc3986-1.5.0 sniffio-1.3.0 starlette-0.26.1 uc-micro-py-1.0.1 uvicorn-0.21.0 websockets-10.4 yarl-1.8.2\n", 150 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 151 | "Collecting openai\n", 152 | " Downloading openai-0.27.2-py3-none-any.whl (70 kB)\n", 153 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.1/70.1 KB\u001b[0m \u001b[31m8.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 154 | "\u001b[?25hRequirement already satisfied: aiohttp in /usr/local/lib/python3.9/dist-packages (from openai) (3.8.4)\n", 155 | "Requirement already satisfied: tqdm in /usr/local/lib/python3.9/dist-packages (from openai) (4.65.0)\n", 156 | "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.9/dist-packages (from openai) (2.25.1)\n", 157 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (2022.12.7)\n", 158 | "Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (4.0.0)\n", 159 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (1.26.15)\n", 160 | "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (2.10)\n", 161 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (6.0.4)\n", 162 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.3.1)\n", 163 | "Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (3.1.0)\n", 164 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.3.3)\n", 165 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.8.2)\n", 166 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (22.2.0)\n", 167 | "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (4.0.2)\n", 168 | "Installing collected packages: openai\n", 169 | "Successfully installed openai-0.27.2\n" 170 | ] 171 | } 172 | ], 173 | "source": [ 174 | "!pip install gradio \n", 175 | "!pip install openai" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "source": [ 181 | "import openai\n", 182 | "import gradio as gr\n", 183 | "\n", 184 | "openai.api_key = \"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\"\n", 185 | "\n", 186 | "messages = [\n", 187 | " {\"role\": \"system\", \"content\": \"You are a helpful and kind AI Assistant.\"},\n", 188 | "]\n", 189 | "\n", 190 | "def chatbot(input):\n", 191 | " if input:\n", 192 | " messages.append({\"role\": \"user\", \"content\": input})\n", 193 | " chat = openai.ChatCompletion.create(\n", 194 | " model=\"gpt-3.5-turbo\", messages=messages\n", 195 | " )\n", 196 | " reply = chat.choices[0].message.content\n", 197 | " messages.append({\"role\": \"assistant\", \"content\": reply})\n", 198 | " return reply\n", 199 | "\n", 200 | "inputs = gr.inputs.Textbox(lines=7, label=\"Chat with AI\")\n", 201 | "outputs = gr.outputs.Textbox(label=\"Reply\")\n", 202 | "\n", 203 | "gr.Interface(fn=chatbot, inputs=inputs, outputs=outputs, title=\"ChatGPT : Meow!\",\n", 204 | " description=\"Ask anything you want\",\n", 205 | " theme=\"compact\").launch(share=True)" 206 | ], 207 | "metadata": { 208 | "colab": { 209 | "base_uri": "https://localhost:8080/", 210 | "height": 784 211 | }, 212 | "id": "N-2_3d7yRPUG", 213 | "outputId": "0476915f-132a-434d-ef0b-d7bff7cbe35d" 214 | }, 215 | "execution_count": 3, 216 | "outputs": [ 217 | { 218 | "output_type": "stream", 219 | "name": "stderr", 220 | "text": [ 221 | "/usr/local/lib/python3.9/dist-packages/gradio/inputs.py:27: UserWarning: Usage of gradio.inputs is deprecated, and will not be supported in the future, please import your component from gradio.components\n", 222 | " warnings.warn(\n", 223 | "/usr/local/lib/python3.9/dist-packages/gradio/deprecation.py:40: UserWarning: `optional` parameter is deprecated, and it has no effect\n", 224 | " warnings.warn(value)\n", 225 | "/usr/local/lib/python3.9/dist-packages/gradio/deprecation.py:40: UserWarning: `numeric` parameter is deprecated, and it has no effect\n", 226 | " warnings.warn(value)\n", 227 | "/usr/local/lib/python3.9/dist-packages/gradio/outputs.py:22: UserWarning: Usage of gradio.outputs is deprecated, and will not be supported in the future, please import your components from gradio.components\n", 228 | " warnings.warn(\n", 229 | "/usr/local/lib/python3.9/dist-packages/gradio/blocks.py:491: UserWarning: Theme should be a class loaded from gradio.themes\n", 230 | " warnings.warn(\"Theme should be a class loaded from gradio.themes\")\n" 231 | ] 232 | }, 233 | { 234 | "output_type": "stream", 235 | "name": "stdout", 236 | "text": [ 237 | "Colab notebook detected. To show errors in colab notebook, set debug=True in launch()\n", 238 | "Running on public URL: https://f1db95da9dc280303b.gradio.live\n", 239 | "\n", 240 | "This share link expires in 72 hours. For free permanent hosting and GPU upgrades (NEW!), check out Spaces: https://huggingface.co/spaces\n" 241 | ] 242 | }, 243 | { 244 | "output_type": "display_data", 245 | "data": { 246 | "text/plain": [ 247 | "" 248 | ], 249 | "text/html": [ 250 | "
" 251 | ] 252 | }, 253 | "metadata": {} 254 | }, 255 | { 256 | "output_type": "execute_result", 257 | "data": { 258 | "text/plain": [] 259 | }, 260 | "metadata": {}, 261 | "execution_count": 3 262 | } 263 | ] 264 | } 265 | ] 266 | } -------------------------------------------------------------------------------- /whisper-speech-text/whisper-api-basic.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [] 7 | }, 8 | "kernelspec": { 9 | "name": "python3", 10 | "display_name": "Python 3" 11 | }, 12 | "language_info": { 13 | "name": "python" 14 | } 15 | }, 16 | "cells": [ 17 | { 18 | "cell_type": "markdown", 19 | "source": [ 20 | "**Setup**" 21 | ], 22 | "metadata": { 23 | "id": "alc2PUi540YB" 24 | } 25 | }, 26 | { 27 | "cell_type": "code", 28 | "source": [ 29 | "!pip install git+https://github.com/openai/whisper.git \n", 30 | "!pip install pydub\n", 31 | "!pip install SpeechRecognition\n", 32 | "!apt install ffmpeg\n", 33 | "!pip install gradio\n", 34 | "!pip install openai" 35 | ], 36 | "metadata": { 37 | "colab": { 38 | "base_uri": "https://localhost:8080/" 39 | }, 40 | "id": "-HqlBlYF5G4F", 41 | "outputId": "33d13e42-56e4-47e2-a180-f36c9fb17360" 42 | }, 43 | "execution_count": null, 44 | "outputs": [ 45 | { 46 | "output_type": "stream", 47 | "name": "stdout", 48 | "text": [ 49 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 50 | "Collecting git+https://github.com/openai/whisper.git\n", 51 | " Cloning https://github.com/openai/whisper.git to /tmp/pip-req-build-b7uee669\n", 52 | " Running command git clone --filter=blob:none --quiet https://github.com/openai/whisper.git /tmp/pip-req-build-b7uee669\n", 53 | " Resolved https://github.com/openai/whisper.git to commit 3e1780fd37686666f568be9c99f5b5e3e4f2eb92\n", 54 | " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", 55 | "Requirement already satisfied: numpy in /usr/local/lib/python3.8/dist-packages (from openai-whisper==20230124) (1.22.4)\n", 56 | "Requirement already satisfied: torch in /usr/local/lib/python3.8/dist-packages (from openai-whisper==20230124) (1.13.1+cu116)\n", 57 | "Requirement already satisfied: tqdm in /usr/local/lib/python3.8/dist-packages (from openai-whisper==20230124) (4.64.1)\n", 58 | "Requirement already satisfied: more-itertools in /usr/local/lib/python3.8/dist-packages (from openai-whisper==20230124) (9.1.0)\n", 59 | "Requirement already satisfied: transformers>=4.19.0 in /usr/local/lib/python3.8/dist-packages (from openai-whisper==20230124) (4.26.1)\n", 60 | "Requirement already satisfied: ffmpeg-python==0.2.0 in /usr/local/lib/python3.8/dist-packages (from openai-whisper==20230124) (0.2.0)\n", 61 | "Requirement already satisfied: future in /usr/local/lib/python3.8/dist-packages (from ffmpeg-python==0.2.0->openai-whisper==20230124) (0.16.0)\n", 62 | "Requirement already satisfied: requests in /usr/local/lib/python3.8/dist-packages (from transformers>=4.19.0->openai-whisper==20230124) (2.28.2)\n", 63 | "Requirement already satisfied: filelock in /usr/local/lib/python3.8/dist-packages (from transformers>=4.19.0->openai-whisper==20230124) (3.9.0)\n", 64 | "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.8/dist-packages (from transformers>=4.19.0->openai-whisper==20230124) (23.0)\n", 65 | "Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.8/dist-packages (from transformers>=4.19.0->openai-whisper==20230124) (2022.6.2)\n", 66 | "Requirement already satisfied: tokenizers!=0.11.3,<0.14,>=0.11.1 in /usr/local/lib/python3.8/dist-packages (from transformers>=4.19.0->openai-whisper==20230124) (0.13.2)\n", 67 | "Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.8/dist-packages (from transformers>=4.19.0->openai-whisper==20230124) (6.0)\n", 68 | "Requirement already satisfied: huggingface-hub<1.0,>=0.11.0 in /usr/local/lib/python3.8/dist-packages (from transformers>=4.19.0->openai-whisper==20230124) (0.12.1)\n", 69 | "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.8/dist-packages (from torch->openai-whisper==20230124) (4.5.0)\n", 70 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests->transformers>=4.19.0->openai-whisper==20230124) (2.10)\n", 71 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests->transformers>=4.19.0->openai-whisper==20230124) (1.26.14)\n", 72 | "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.8/dist-packages (from requests->transformers>=4.19.0->openai-whisper==20230124) (3.0.1)\n", 73 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests->transformers>=4.19.0->openai-whisper==20230124) (2022.12.7)\n", 74 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 75 | "Requirement already satisfied: pydub in /usr/local/lib/python3.8/dist-packages (0.25.1)\n", 76 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 77 | "Requirement already satisfied: SpeechRecognition in /usr/local/lib/python3.8/dist-packages (3.9.0)\n", 78 | "Requirement already satisfied: requests>=2.26.0 in /usr/local/lib/python3.8/dist-packages (from SpeechRecognition) (2.28.2)\n", 79 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests>=2.26.0->SpeechRecognition) (2.10)\n", 80 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests>=2.26.0->SpeechRecognition) (1.26.14)\n", 81 | "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.8/dist-packages (from requests>=2.26.0->SpeechRecognition) (3.0.1)\n", 82 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests>=2.26.0->SpeechRecognition) (2022.12.7)\n", 83 | "Reading package lists... Done\n", 84 | "Building dependency tree \n", 85 | "Reading state information... Done\n", 86 | "ffmpeg is already the newest version (7:4.2.7-0ubuntu0.1).\n", 87 | "0 upgraded, 0 newly installed, 0 to remove and 22 not upgraded.\n", 88 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 89 | "Requirement already satisfied: gradio in /usr/local/lib/python3.8/dist-packages (3.20.0)\n", 90 | "Requirement already satisfied: pycryptodome in /usr/local/lib/python3.8/dist-packages (from gradio) (3.17)\n", 91 | "Requirement already satisfied: pillow in /usr/local/lib/python3.8/dist-packages (from gradio) (8.4.0)\n", 92 | "Requirement already satisfied: uvicorn in /usr/local/lib/python3.8/dist-packages (from gradio) (0.20.0)\n", 93 | "Requirement already satisfied: pyyaml in /usr/local/lib/python3.8/dist-packages (from gradio) (6.0)\n", 94 | "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.8/dist-packages (from gradio) (4.5.0)\n", 95 | "Requirement already satisfied: pydantic in /usr/local/lib/python3.8/dist-packages (from gradio) (1.10.5)\n", 96 | "Requirement already satisfied: fastapi in /usr/local/lib/python3.8/dist-packages (from gradio) (0.92.0)\n", 97 | "Requirement already satisfied: markupsafe in /usr/local/lib/python3.8/dist-packages (from gradio) (2.1.2)\n", 98 | "Requirement already satisfied: python-multipart in /usr/local/lib/python3.8/dist-packages (from gradio) (0.0.6)\n", 99 | "Requirement already satisfied: mdit-py-plugins<=0.3.3 in /usr/local/lib/python3.8/dist-packages (from gradio) (0.3.3)\n", 100 | "Requirement already satisfied: aiofiles in /usr/local/lib/python3.8/dist-packages (from gradio) (23.1.0)\n", 101 | "Requirement already satisfied: orjson in /usr/local/lib/python3.8/dist-packages (from gradio) (3.8.7)\n", 102 | "Requirement already satisfied: fsspec in /usr/local/lib/python3.8/dist-packages (from gradio) (2023.1.0)\n", 103 | "Requirement already satisfied: altair>=4.2.0 in /usr/local/lib/python3.8/dist-packages (from gradio) (4.2.2)\n", 104 | "Requirement already satisfied: markdown-it-py[linkify]>=2.0.0 in /usr/local/lib/python3.8/dist-packages (from gradio) (2.2.0)\n", 105 | "Requirement already satisfied: jinja2 in /usr/local/lib/python3.8/dist-packages (from gradio) (3.1.2)\n", 106 | "Requirement already satisfied: ffmpy in /usr/local/lib/python3.8/dist-packages (from gradio) (0.3.0)\n", 107 | "Requirement already satisfied: numpy in /usr/local/lib/python3.8/dist-packages (from gradio) (1.22.4)\n", 108 | "Requirement already satisfied: matplotlib in /usr/local/lib/python3.8/dist-packages (from gradio) (3.5.3)\n", 109 | "Requirement already satisfied: aiohttp in /usr/local/lib/python3.8/dist-packages (from gradio) (3.8.4)\n", 110 | "Requirement already satisfied: websockets>=10.0 in /usr/local/lib/python3.8/dist-packages (from gradio) (10.4)\n", 111 | "Requirement already satisfied: requests in /usr/local/lib/python3.8/dist-packages (from gradio) (2.28.2)\n", 112 | "Requirement already satisfied: httpx in /usr/local/lib/python3.8/dist-packages (from gradio) (0.23.3)\n", 113 | "Requirement already satisfied: pydub in /usr/local/lib/python3.8/dist-packages (from gradio) (0.25.1)\n", 114 | "Requirement already satisfied: pandas in /usr/local/lib/python3.8/dist-packages (from gradio) (1.3.5)\n", 115 | "Requirement already satisfied: toolz in /usr/local/lib/python3.8/dist-packages (from altair>=4.2.0->gradio) (0.12.0)\n", 116 | "Requirement already satisfied: entrypoints in /usr/local/lib/python3.8/dist-packages (from altair>=4.2.0->gradio) (0.4)\n", 117 | "Requirement already satisfied: jsonschema>=3.0 in /usr/local/lib/python3.8/dist-packages (from altair>=4.2.0->gradio) (4.3.3)\n", 118 | "Requirement already satisfied: mdurl~=0.1 in /usr/local/lib/python3.8/dist-packages (from markdown-it-py[linkify]>=2.0.0->gradio) (0.1.2)\n", 119 | "Requirement already satisfied: linkify-it-py<3,>=1 in /usr/local/lib/python3.8/dist-packages (from markdown-it-py[linkify]>=2.0.0->gradio) (2.0.0)\n", 120 | "Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.8/dist-packages (from pandas->gradio) (2.8.2)\n", 121 | "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.8/dist-packages (from pandas->gradio) (2022.7.1)\n", 122 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (1.3.3)\n", 123 | "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (4.0.2)\n", 124 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (1.3.1)\n", 125 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (1.8.2)\n", 126 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (22.2.0)\n", 127 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (6.0.4)\n", 128 | "Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->gradio) (3.0.1)\n", 129 | "Requirement already satisfied: starlette<0.26.0,>=0.25.0 in /usr/local/lib/python3.8/dist-packages (from fastapi->gradio) (0.25.0)\n", 130 | "Requirement already satisfied: httpcore<0.17.0,>=0.15.0 in /usr/local/lib/python3.8/dist-packages (from httpx->gradio) (0.16.3)\n", 131 | "Requirement already satisfied: sniffio in /usr/local/lib/python3.8/dist-packages (from httpx->gradio) (1.3.0)\n", 132 | "Requirement already satisfied: certifi in /usr/local/lib/python3.8/dist-packages (from httpx->gradio) (2022.12.7)\n", 133 | "Requirement already satisfied: rfc3986[idna2008]<2,>=1.3 in /usr/local/lib/python3.8/dist-packages (from httpx->gradio) (1.5.0)\n", 134 | "Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.8/dist-packages (from matplotlib->gradio) (4.38.0)\n", 135 | "Requirement already satisfied: pyparsing>=2.2.1 in /usr/local/lib/python3.8/dist-packages (from matplotlib->gradio) (3.0.9)\n", 136 | "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.8/dist-packages (from matplotlib->gradio) (0.11.0)\n", 137 | "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.8/dist-packages (from matplotlib->gradio) (23.0)\n", 138 | "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.8/dist-packages (from matplotlib->gradio) (1.4.4)\n", 139 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests->gradio) (2.10)\n", 140 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests->gradio) (1.26.14)\n", 141 | "Requirement already satisfied: h11>=0.8 in /usr/local/lib/python3.8/dist-packages (from uvicorn->gradio) (0.14.0)\n", 142 | "Requirement already satisfied: click>=7.0 in /usr/local/lib/python3.8/dist-packages (from uvicorn->gradio) (8.1.3)\n", 143 | "Requirement already satisfied: anyio<5.0,>=3.0 in /usr/local/lib/python3.8/dist-packages (from httpcore<0.17.0,>=0.15.0->httpx->gradio) (3.6.2)\n", 144 | "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.8/dist-packages (from jsonschema>=3.0->altair>=4.2.0->gradio) (0.19.3)\n", 145 | "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.8/dist-packages (from jsonschema>=3.0->altair>=4.2.0->gradio) (5.12.0)\n", 146 | "Requirement already satisfied: uc-micro-py in /usr/local/lib/python3.8/dist-packages (from linkify-it-py<3,>=1->markdown-it-py[linkify]>=2.0.0->gradio) (1.0.1)\n", 147 | "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.8/dist-packages (from python-dateutil>=2.7.3->pandas->gradio) (1.15.0)\n", 148 | "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.8/dist-packages (from importlib-resources>=1.4.0->jsonschema>=3.0->altair>=4.2.0->gradio) (3.15.0)\n", 149 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 150 | "Collecting openai\n", 151 | " Downloading openai-0.27.0-py3-none-any.whl (70 kB)\n", 152 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.1/70.1 KB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 153 | "\u001b[?25hRequirement already satisfied: aiohttp in /usr/local/lib/python3.8/dist-packages (from openai) (3.8.4)\n", 154 | "Requirement already satisfied: tqdm in /usr/local/lib/python3.8/dist-packages (from openai) (4.64.1)\n", 155 | "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.8/dist-packages (from openai) (2.28.2)\n", 156 | "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.8/dist-packages (from requests>=2.20->openai) (3.0.1)\n", 157 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests>=2.20->openai) (2022.12.7)\n", 158 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests>=2.20->openai) (1.26.14)\n", 159 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests>=2.20->openai) (2.10)\n", 160 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai) (1.8.2)\n", 161 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai) (6.0.4)\n", 162 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai) (1.3.1)\n", 163 | "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai) (4.0.2)\n", 164 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai) (22.2.0)\n", 165 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai) (1.3.3)\n", 166 | "Installing collected packages: openai\n", 167 | "Successfully installed openai-0.27.0\n" 168 | ] 169 | } 170 | ] 171 | }, 172 | { 173 | "cell_type": "markdown", 174 | "source": [ 175 | "**Method 1 : Simple (Error Prone)**" 176 | ], 177 | "metadata": { 178 | "id": "kd6CL_K93UGu" 179 | } 180 | }, 181 | { 182 | "cell_type": "code", 183 | "source": [ 184 | "import whisper\n", 185 | "\n", 186 | "model = whisper.load_model('base')\n", 187 | "result = model.transcribe('what.mp3', fp16=False)" 188 | ], 189 | "metadata": { 190 | "colab": { 191 | "base_uri": "https://localhost:8080/" 192 | }, 193 | "id": "1_4gZG129xKc", 194 | "outputId": "9b728e89-43f6-42fc-823f-2d8d15334c85" 195 | }, 196 | "execution_count": null, 197 | "outputs": [ 198 | { 199 | "output_type": "stream", 200 | "name": "stderr", 201 | "text": [ 202 | "100%|████████████████████████████████████████| 139M/139M [00:01<00:00, 109MiB/s]\n" 203 | ] 204 | } 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "source": [ 210 | "print(result.keys())\n", 211 | "result['text']" 212 | ], 213 | "metadata": { 214 | "colab": { 215 | "base_uri": "https://localhost:8080/", 216 | "height": 53 217 | }, 218 | "id": "9AD05eca_-xL", 219 | "outputId": "c399fd09-4b58-4b2e-b8d6-30604ea926ae" 220 | }, 221 | "execution_count": null, 222 | "outputs": [ 223 | { 224 | "output_type": "stream", 225 | "name": "stdout", 226 | "text": [ 227 | "dict_keys(['text', 'segments', 'language'])\n" 228 | ] 229 | }, 230 | { 231 | "output_type": "execute_result", 232 | "data": { 233 | "text/plain": [ 234 | "' What are you doing?'" 235 | ], 236 | "application/vnd.google.colaboratory.intrinsic+json": { 237 | "type": "string" 238 | } 239 | }, 240 | "metadata": {}, 241 | "execution_count": 7 242 | } 243 | ] 244 | }, 245 | { 246 | "cell_type": "markdown", 247 | "source": [ 248 | "**Method 2 : Low level access**" 249 | ], 250 | "metadata": { 251 | "id": "Qf6_56MQRfoh" 252 | } 253 | }, 254 | { 255 | "cell_type": "code", 256 | "source": [ 257 | "import whisper\n", 258 | "\n", 259 | "model = whisper.load_model(\"base\")\n", 260 | "\n", 261 | "# load audio and pad/trim it to fit 30 seconds\n", 262 | "audio = whisper.load_audio(\"what.mp3\")\n", 263 | "audio = whisper.pad_or_trim(audio)\n", 264 | "\n", 265 | "# make log-Mel spectrogram and move to the same device as the model\n", 266 | "mel = whisper.log_mel_spectrogram(audio).to(model.device)\n", 267 | "\n", 268 | "# detect the spoken language\n", 269 | "_, probs = model.detect_language(mel)\n", 270 | "print(f\"Detected language: {max(probs, key=probs.get)}\")\n", 271 | "\n", 272 | "# decode the audio\n", 273 | "options = whisper.DecodingOptions(fp16 = False)\n", 274 | "result = whisper.decode(model, mel, options)\n", 275 | "\n", 276 | "# print the recognized text\n", 277 | "print(result.text)" 278 | ], 279 | "metadata": { 280 | "colab": { 281 | "base_uri": "https://localhost:8080/" 282 | }, 283 | "id": "uki41p_7RbFn", 284 | "outputId": "afd08a03-dbd8-4afa-f9f4-53900c0f6977" 285 | }, 286 | "execution_count": null, 287 | "outputs": [ 288 | { 289 | "output_type": "stream", 290 | "name": "stdout", 291 | "text": [ 292 | "Detected language: en\n", 293 | "What are you doing?\n" 294 | ] 295 | } 296 | ] 297 | }, 298 | { 299 | "cell_type": "markdown", 300 | "source": [ 301 | "**`Whisper` vs `Google speech_recognition`**" 302 | ], 303 | "metadata": { 304 | "id": "qTjZOwufV9Xr" 305 | } 306 | }, 307 | { 308 | "cell_type": "code", 309 | "source": [ 310 | "import speech_recognition as sr \n", 311 | "\n", 312 | "r = sr.Recognizer() \n", 313 | "with sr.AudioFile('what.wav') as source:\n", 314 | " audio_text = r.listen(source)\n", 315 | "\n", 316 | "g = r.recognize_google(audio_text, show_all=True)\n", 317 | "print(g.keys())\n", 318 | "\n", 319 | "print(g['alternative'][0]['transcript'])" 320 | ], 321 | "metadata": { 322 | "colab": { 323 | "base_uri": "https://localhost:8080/" 324 | }, 325 | "id": "dm66mUgeV9GY", 326 | "outputId": "1f4680cf-3602-4c04-835c-9f09005e94e7" 327 | }, 328 | "execution_count": null, 329 | "outputs": [ 330 | { 331 | "output_type": "stream", 332 | "name": "stdout", 333 | "text": [ 334 | "dict_keys(['alternative', 'final'])\n", 335 | "what are you doing\n" 336 | ] 337 | } 338 | ] 339 | }, 340 | { 341 | "cell_type": "markdown", 342 | "source": [ 343 | "--------" 344 | ], 345 | "metadata": { 346 | "id": "Kv1TySuPV7-E" 347 | } 348 | }, 349 | { 350 | "cell_type": "markdown", 351 | "source": [ 352 | "**For Longer Inputs** : Whisper can't work on file that is larger than 25mb" 353 | ], 354 | "metadata": { 355 | "id": "iOvk7yNm5uWm" 356 | } 357 | }, 358 | { 359 | "cell_type": "code", 360 | "execution_count": null, 361 | "metadata": { 362 | "id": "4XMgZPIbxlrJ" 363 | }, 364 | "outputs": [], 365 | "source": [ 366 | "from pydub import AudioSegment\n", 367 | "\n", 368 | "song = AudioSegment.from_mp3(\"good_morning.mp3\")\n", 369 | "\n", 370 | "# PyDub handles time in milliseconds\n", 371 | "ten_minutes = 10 * 60 * 1000\n", 372 | "\n", 373 | "first_10_minutes = song[:ten_minutes]\n", 374 | "\n", 375 | "first_10_minutes.export(\"good_morning_10.mp3\", format=\"mp3\")" 376 | ] 377 | }, 378 | { 379 | "cell_type": "code", 380 | "source": [], 381 | "metadata": { 382 | "id": "9uEzhu_93TmJ" 383 | }, 384 | "execution_count": null, 385 | "outputs": [] 386 | }, 387 | { 388 | "cell_type": "code", 389 | "source": [], 390 | "metadata": { 391 | "id": "fAa4eC9a3S-L" 392 | }, 393 | "execution_count": null, 394 | "outputs": [] 395 | } 396 | ] 397 | } -------------------------------------------------------------------------------- /vector-database/Vector_Databse.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [] 7 | }, 8 | "kernelspec": { 9 | "name": "python3", 10 | "display_name": "Python 3" 11 | }, 12 | "language_info": { 13 | "name": "python" 14 | } 15 | }, 16 | "cells": [ 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": { 21 | "colab": { 22 | "base_uri": "https://localhost:8080/" 23 | }, 24 | "id": "jhG5LrAEXOqE", 25 | "outputId": "386757d5-8320-45be-fad8-46c4ca3989fa" 26 | }, 27 | "outputs": [ 28 | { 29 | "output_type": "stream", 30 | "name": "stdout", 31 | "text": [ 32 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 33 | "Requirement already satisfied: openai in /usr/local/lib/python3.9/dist-packages (0.27.4)\n", 34 | "Requirement already satisfied: chromadb in /usr/local/lib/python3.9/dist-packages (0.3.21)\n", 35 | "Requirement already satisfied: langchain in /usr/local/lib/python3.9/dist-packages (0.0.137)\n", 36 | "Requirement already satisfied: tiktoken in /usr/local/lib/python3.9/dist-packages (0.3.3)\n", 37 | "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.9/dist-packages (from openai) (2.28.2)\n", 38 | "Requirement already satisfied: tqdm in /usr/local/lib/python3.9/dist-packages (from openai) (4.65.0)\n", 39 | "Requirement already satisfied: aiohttp in /usr/local/lib/python3.9/dist-packages (from openai) (3.8.4)\n", 40 | "Requirement already satisfied: sentence-transformers>=2.2.2 in /usr/local/lib/python3.9/dist-packages (from chromadb) (2.2.2)\n", 41 | "Requirement already satisfied: hnswlib>=0.7 in /usr/local/lib/python3.9/dist-packages (from chromadb) (0.7.0)\n", 42 | "Requirement already satisfied: pandas>=1.3 in /usr/local/lib/python3.9/dist-packages (from chromadb) (1.5.3)\n", 43 | "Requirement already satisfied: uvicorn[standard]>=0.18.3 in /usr/local/lib/python3.9/dist-packages (from chromadb) (0.21.1)\n", 44 | "Requirement already satisfied: numpy>=1.21.6 in /usr/local/lib/python3.9/dist-packages (from chromadb) (1.22.4)\n", 45 | "Requirement already satisfied: clickhouse-connect>=0.5.7 in /usr/local/lib/python3.9/dist-packages (from chromadb) (0.5.20)\n", 46 | "Requirement already satisfied: pydantic>=1.9 in /usr/local/lib/python3.9/dist-packages (from chromadb) (1.10.7)\n", 47 | "Requirement already satisfied: fastapi>=0.85.1 in /usr/local/lib/python3.9/dist-packages (from chromadb) (0.95.0)\n", 48 | "Requirement already satisfied: posthog>=2.4.0 in /usr/local/lib/python3.9/dist-packages (from chromadb) (2.5.0)\n", 49 | "Requirement already satisfied: duckdb>=0.7.1 in /usr/local/lib/python3.9/dist-packages (from chromadb) (0.7.1)\n", 50 | "Requirement already satisfied: async-timeout<5.0.0,>=4.0.0 in /usr/local/lib/python3.9/dist-packages (from langchain) (4.0.2)\n", 51 | "Requirement already satisfied: tenacity<9.0.0,>=8.1.0 in /usr/local/lib/python3.9/dist-packages (from langchain) (8.2.2)\n", 52 | "Requirement already satisfied: SQLAlchemy<2,>=1 in /usr/local/lib/python3.9/dist-packages (from langchain) (1.4.47)\n", 53 | "Requirement already satisfied: openapi-schema-pydantic<2.0,>=1.2 in /usr/local/lib/python3.9/dist-packages (from langchain) (1.2.4)\n", 54 | "Requirement already satisfied: PyYAML>=5.4.1 in /usr/local/lib/python3.9/dist-packages (from langchain) (6.0)\n", 55 | "Requirement already satisfied: dataclasses-json<0.6.0,>=0.5.7 in /usr/local/lib/python3.9/dist-packages (from langchain) (0.5.7)\n", 56 | "Requirement already satisfied: regex>=2022.1.18 in /usr/local/lib/python3.9/dist-packages (from tiktoken) (2022.10.31)\n", 57 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (6.0.4)\n", 58 | "Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (2.0.12)\n", 59 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.3.1)\n", 60 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.8.2)\n", 61 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (1.3.3)\n", 62 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp->openai) (22.2.0)\n", 63 | "Requirement already satisfied: urllib3>=1.26 in /usr/local/lib/python3.9/dist-packages (from clickhouse-connect>=0.5.7->chromadb) (1.26.15)\n", 64 | "Requirement already satisfied: zstandard in /usr/local/lib/python3.9/dist-packages (from clickhouse-connect>=0.5.7->chromadb) (0.20.0)\n", 65 | "Requirement already satisfied: certifi in /usr/local/lib/python3.9/dist-packages (from clickhouse-connect>=0.5.7->chromadb) (2022.12.7)\n", 66 | "Requirement already satisfied: lz4 in /usr/local/lib/python3.9/dist-packages (from clickhouse-connect>=0.5.7->chromadb) (4.3.2)\n", 67 | "Requirement already satisfied: pytz in /usr/local/lib/python3.9/dist-packages (from clickhouse-connect>=0.5.7->chromadb) (2022.7.1)\n", 68 | "Requirement already satisfied: marshmallow<4.0.0,>=3.3.0 in /usr/local/lib/python3.9/dist-packages (from dataclasses-json<0.6.0,>=0.5.7->langchain) (3.19.0)\n", 69 | "Requirement already satisfied: typing-inspect>=0.4.0 in /usr/local/lib/python3.9/dist-packages (from dataclasses-json<0.6.0,>=0.5.7->langchain) (0.8.0)\n", 70 | "Requirement already satisfied: marshmallow-enum<2.0.0,>=1.5.1 in /usr/local/lib/python3.9/dist-packages (from dataclasses-json<0.6.0,>=0.5.7->langchain) (1.5.1)\n", 71 | "Requirement already satisfied: starlette<0.27.0,>=0.26.1 in /usr/local/lib/python3.9/dist-packages (from fastapi>=0.85.1->chromadb) (0.26.1)\n", 72 | "Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.9/dist-packages (from pandas>=1.3->chromadb) (2.8.2)\n", 73 | "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.9/dist-packages (from posthog>=2.4.0->chromadb) (1.16.0)\n", 74 | "Requirement already satisfied: backoff>=1.10.0 in /usr/local/lib/python3.9/dist-packages (from posthog>=2.4.0->chromadb) (2.2.1)\n", 75 | "Requirement already satisfied: monotonic>=1.5 in /usr/local/lib/python3.9/dist-packages (from posthog>=2.4.0->chromadb) (1.6)\n", 76 | "Requirement already satisfied: typing-extensions>=4.2.0 in /usr/local/lib/python3.9/dist-packages (from pydantic>=1.9->chromadb) (4.5.0)\n", 77 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests>=2.20->openai) (3.4)\n", 78 | "Requirement already satisfied: huggingface-hub>=0.4.0 in /usr/local/lib/python3.9/dist-packages (from sentence-transformers>=2.2.2->chromadb) (0.13.4)\n", 79 | "Requirement already satisfied: scipy in /usr/local/lib/python3.9/dist-packages (from sentence-transformers>=2.2.2->chromadb) (1.10.1)\n", 80 | "Requirement already satisfied: nltk in /usr/local/lib/python3.9/dist-packages (from sentence-transformers>=2.2.2->chromadb) (3.8.1)\n", 81 | "Requirement already satisfied: torch>=1.6.0 in /usr/local/lib/python3.9/dist-packages (from sentence-transformers>=2.2.2->chromadb) (2.0.0+cu118)\n", 82 | "Requirement already satisfied: transformers<5.0.0,>=4.6.0 in /usr/local/lib/python3.9/dist-packages (from sentence-transformers>=2.2.2->chromadb) (4.27.4)\n", 83 | "Requirement already satisfied: sentencepiece in /usr/local/lib/python3.9/dist-packages (from sentence-transformers>=2.2.2->chromadb) (0.1.98)\n", 84 | "Requirement already satisfied: torchvision in /usr/local/lib/python3.9/dist-packages (from sentence-transformers>=2.2.2->chromadb) (0.15.1+cu118)\n", 85 | "Requirement already satisfied: scikit-learn in /usr/local/lib/python3.9/dist-packages (from sentence-transformers>=2.2.2->chromadb) (1.2.2)\n", 86 | "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.9/dist-packages (from SQLAlchemy<2,>=1->langchain) (2.0.2)\n", 87 | "Requirement already satisfied: click>=7.0 in /usr/local/lib/python3.9/dist-packages (from uvicorn[standard]>=0.18.3->chromadb) (8.1.3)\n", 88 | "Requirement already satisfied: h11>=0.8 in /usr/local/lib/python3.9/dist-packages (from uvicorn[standard]>=0.18.3->chromadb) (0.14.0)\n", 89 | "Requirement already satisfied: websockets>=10.4 in /usr/local/lib/python3.9/dist-packages (from uvicorn[standard]>=0.18.3->chromadb) (11.0.1)\n", 90 | "Requirement already satisfied: python-dotenv>=0.13 in /usr/local/lib/python3.9/dist-packages (from uvicorn[standard]>=0.18.3->chromadb) (1.0.0)\n", 91 | "Requirement already satisfied: httptools>=0.5.0 in /usr/local/lib/python3.9/dist-packages (from uvicorn[standard]>=0.18.3->chromadb) (0.5.0)\n", 92 | "Requirement already satisfied: uvloop!=0.15.0,!=0.15.1,>=0.14.0 in /usr/local/lib/python3.9/dist-packages (from uvicorn[standard]>=0.18.3->chromadb) (0.17.0)\n", 93 | "Requirement already satisfied: watchfiles>=0.13 in /usr/local/lib/python3.9/dist-packages (from uvicorn[standard]>=0.18.3->chromadb) (0.19.0)\n", 94 | "Requirement already satisfied: filelock in /usr/local/lib/python3.9/dist-packages (from huggingface-hub>=0.4.0->sentence-transformers>=2.2.2->chromadb) (3.11.0)\n", 95 | "Requirement already satisfied: packaging>=20.9 in /usr/local/lib/python3.9/dist-packages (from huggingface-hub>=0.4.0->sentence-transformers>=2.2.2->chromadb) (23.0)\n", 96 | "Requirement already satisfied: anyio<5,>=3.4.0 in /usr/local/lib/python3.9/dist-packages (from starlette<0.27.0,>=0.26.1->fastapi>=0.85.1->chromadb) (3.6.2)\n", 97 | "Requirement already satisfied: sympy in /usr/local/lib/python3.9/dist-packages (from torch>=1.6.0->sentence-transformers>=2.2.2->chromadb) (1.11.1)\n", 98 | "Requirement already satisfied: networkx in /usr/local/lib/python3.9/dist-packages (from torch>=1.6.0->sentence-transformers>=2.2.2->chromadb) (3.1)\n", 99 | "Requirement already satisfied: triton==2.0.0 in /usr/local/lib/python3.9/dist-packages (from torch>=1.6.0->sentence-transformers>=2.2.2->chromadb) (2.0.0)\n", 100 | "Requirement already satisfied: jinja2 in /usr/local/lib/python3.9/dist-packages (from torch>=1.6.0->sentence-transformers>=2.2.2->chromadb) (3.1.2)\n", 101 | "Requirement already satisfied: cmake in /usr/local/lib/python3.9/dist-packages (from triton==2.0.0->torch>=1.6.0->sentence-transformers>=2.2.2->chromadb) (3.25.2)\n", 102 | "Requirement already satisfied: lit in /usr/local/lib/python3.9/dist-packages (from triton==2.0.0->torch>=1.6.0->sentence-transformers>=2.2.2->chromadb) (16.0.0)\n", 103 | "Requirement already satisfied: tokenizers!=0.11.3,<0.14,>=0.11.1 in /usr/local/lib/python3.9/dist-packages (from transformers<5.0.0,>=4.6.0->sentence-transformers>=2.2.2->chromadb) (0.13.3)\n", 104 | "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/lib/python3.9/dist-packages (from typing-inspect>=0.4.0->dataclasses-json<0.6.0,>=0.5.7->langchain) (1.0.0)\n", 105 | "Requirement already satisfied: joblib in /usr/local/lib/python3.9/dist-packages (from nltk->sentence-transformers>=2.2.2->chromadb) (1.2.0)\n", 106 | "Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.9/dist-packages (from scikit-learn->sentence-transformers>=2.2.2->chromadb) (3.1.0)\n", 107 | "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /usr/local/lib/python3.9/dist-packages (from torchvision->sentence-transformers>=2.2.2->chromadb) (8.4.0)\n", 108 | "Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.9/dist-packages (from anyio<5,>=3.4.0->starlette<0.27.0,>=0.26.1->fastapi>=0.85.1->chromadb) (1.3.0)\n", 109 | "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.9/dist-packages (from jinja2->torch>=1.6.0->sentence-transformers>=2.2.2->chromadb) (2.1.2)\n", 110 | "Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.9/dist-packages (from sympy->torch>=1.6.0->sentence-transformers>=2.2.2->chromadb) (1.3.0)\n" 111 | ] 112 | } 113 | ], 114 | "source": [ 115 | "!pip install openai chromadb langchain tiktoken" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "source": [ 121 | "import os\n", 122 | "import platform\n", 123 | "\n", 124 | "import openai\n", 125 | "import chromadb\n", 126 | "import langchain\n", 127 | "\n", 128 | "from langchain.embeddings.openai import OpenAIEmbeddings\n", 129 | "from langchain.vectorstores import Chroma\n", 130 | "from langchain.text_splitter import TokenTextSplitter\n", 131 | "from langchain.llms import OpenAI\n", 132 | "from langchain.chains import ChatVectorDBChain\n", 133 | "from langchain.document_loaders import GutenbergLoader\n", 134 | "\n", 135 | "print('Python: ', platform.python_version())" 136 | ], 137 | "metadata": { 138 | "colab": { 139 | "base_uri": "https://localhost:8080/" 140 | }, 141 | "id": "6Ax08qxkXUn5", 142 | "outputId": "a6f642a2-4071-4c6c-c9e1-ec612e43f4fc" 143 | }, 144 | "execution_count": null, 145 | "outputs": [ 146 | { 147 | "output_type": "stream", 148 | "name": "stdout", 149 | "text": [ 150 | "Python: 3.9.16\n" 151 | ] 152 | } 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "source": [ 158 | "from langchain.chains import ConversationalRetrievalChain\n", 159 | "from langchain.chat_models import ChatOpenAI" 160 | ], 161 | "metadata": { 162 | "id": "WYLaUNDO0nh7" 163 | }, 164 | "execution_count": null, 165 | "outputs": [] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "source": [ 170 | "from google.colab import drive\n", 171 | "drive.mount('/content/drive')" 172 | ], 173 | "metadata": { 174 | "colab": { 175 | "base_uri": "https://localhost:8080/" 176 | }, 177 | "id": "KSU0MbXAX_Rq", 178 | "outputId": "bee0cbb1-935d-4ec0-9e18-17d94c1cd81c" 179 | }, 180 | "execution_count": null, 181 | "outputs": [ 182 | { 183 | "output_type": "stream", 184 | "name": "stdout", 185 | "text": [ 186 | "Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n" 187 | ] 188 | } 189 | ] 190 | }, 191 | { 192 | "cell_type": "code", 193 | "source": [ 194 | "os.environ[\"OPENAI_API_KEY\"] = 'sk-**********************************'" 195 | ], 196 | "metadata": { 197 | "id": "ptWdfLWeYGjv" 198 | }, 199 | "execution_count": null, 200 | "outputs": [] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "source": [ 205 | "persist_directory=\"/content/drive/My Drive/Colab Notebooks/chroma/romeo\"" 206 | ], 207 | "metadata": { 208 | "id": "3e0rIjpcYdmK" 209 | }, 210 | "execution_count": null, 211 | "outputs": [] 212 | }, 213 | { 214 | "cell_type": "code", 215 | "source": [ 216 | "def get_gutenberg(url):\n", 217 | " loader = GutenbergLoader(url)\n", 218 | " data = loader.load()\n", 219 | " return data" 220 | ], 221 | "metadata": { 222 | "id": "PGGl3FVdYeNv" 223 | }, 224 | "execution_count": null, 225 | "outputs": [] 226 | }, 227 | { 228 | "cell_type": "code", 229 | "source": [ 230 | "romeoandjuliet_data = get_gutenberg('https://www.gutenberg.org/cache/epub/1513/pg1513.txt')\n", 231 | "\n", 232 | "text_splitter = TokenTextSplitter(chunk_size=1000, chunk_overlap=0)\n", 233 | "romeoandjuliet_doc = text_splitter.split_documents(romeoandjuliet_data)\n", 234 | "\n", 235 | "embeddings = OpenAIEmbeddings()\n", 236 | "vectordb = Chroma.from_documents(romeoandjuliet_doc, embeddings, persist_directory=persist_directory)\n", 237 | "vectordb.persist()" 238 | ], 239 | "metadata": { 240 | "colab": { 241 | "base_uri": "https://localhost:8080/" 242 | }, 243 | "id": "oMrFzKc4aM-r", 244 | "outputId": "e7559e12-c756-4b6a-e636-3caac5954656" 245 | }, 246 | "execution_count": null, 247 | "outputs": [ 248 | { 249 | "output_type": "stream", 250 | "name": "stderr", 251 | "text": [ 252 | "WARNING:chromadb:Using embedded DuckDB with persistence: data will be stored in: /content/drive/My Drive/Colab Notebooks/chroma/romeo\n" 253 | ] 254 | } 255 | ] 256 | }, 257 | { 258 | "cell_type": "code", 259 | "source": [ 260 | "romeoandjuliet_qa = ChatVectorDBChain.from_llm(OpenAI(temperature=0, model_name=\"gpt-3.5-turbo\"), vectordb, return_source_documents=True)" 261 | ], 262 | "metadata": { 263 | "colab": { 264 | "base_uri": "https://localhost:8080/" 265 | }, 266 | "id": "xmQB0e-KaRcf", 267 | "outputId": "49692a55-d635-437e-ea25-35f8b326dc44" 268 | }, 269 | "execution_count": null, 270 | "outputs": [ 271 | { 272 | "output_type": "stream", 273 | "name": "stderr", 274 | "text": [ 275 | "/usr/local/lib/python3.9/dist-packages/langchain/llms/openai.py:170: UserWarning: You are trying to use a chat model. This way of initializing it is no longer supported. Instead, please use: `from langchain.chat_models import ChatOpenAI`\n", 276 | " warnings.warn(\n", 277 | "/usr/local/lib/python3.9/dist-packages/langchain/llms/openai.py:624: UserWarning: You are trying to use a chat model. This way of initializing it is no longer supported. Instead, please use: `from langchain.chat_models import ChatOpenAI`\n", 278 | " warnings.warn(\n", 279 | "/usr/local/lib/python3.9/dist-packages/langchain/chains/conversational_retrieval/base.py:191: UserWarning: `ChatVectorDBChain` is deprecated - please use `from langchain.chains import ConversationalRetrievalChain`\n", 280 | " warnings.warn(\n" 281 | ] 282 | } 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "source": [ 288 | "chat_history = [(\"Romeo and Juliet\",\"Romeo and Juliet\")]" 289 | ], 290 | "metadata": { 291 | "id": "8mu7rUqOwqNc" 292 | }, 293 | "execution_count": null, 294 | "outputs": [] 295 | }, 296 | { 297 | "cell_type": "code", 298 | "source": [ 299 | "query = \"Have Romeo and Juliet spent the night together? Provide a verbose answer, referencing passages from the book.\"\n", 300 | "result = romeoandjuliet_qa({\"question\": query, \"chat_history\": chat_history})" 301 | ], 302 | "metadata": { 303 | "id": "iQTxM20oaj_Y" 304 | }, 305 | "execution_count": null, 306 | "outputs": [] 307 | }, 308 | { 309 | "cell_type": "code", 310 | "source": [ 311 | "result[\"source_documents\"]" 312 | ], 313 | "metadata": { 314 | "id": "5H8vuQpAatQe", 315 | "colab": { 316 | "base_uri": "https://localhost:8080/" 317 | }, 318 | "outputId": "a4151650-05c2-45ca-f498-ad4ee9f2a724" 319 | }, 320 | "execution_count": null, 321 | "outputs": [ 322 | { 323 | "output_type": "execute_result", 324 | "data": { 325 | "text/plain": [ 326 | "[Document(page_content='\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nYet banished? Hang up philosophy.\\r\\n\\n\\nUnless philosophy can make a Juliet,\\r\\n\\n\\nDisplant a town, reverse a Prince’s doom,\\r\\n\\n\\nIt helps not, it prevails not, talk no more.\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nO, then I see that mad men have no ears.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nHow should they, when that wise men have no eyes?\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nLet me dispute with thee of thy estate.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nThou canst not speak of that thou dost not feel.\\r\\n\\n\\nWert thou as young as I, Juliet thy love,\\r\\n\\n\\nAn hour but married, Tybalt murdered,\\r\\n\\n\\nDoting like me, and like me banished,\\r\\n\\n\\nThen mightst thou speak, then mightst thou tear thy hair,\\r\\n\\n\\nAnd fall upon the ground as I do now,\\r\\n\\n\\nTaking the measure of an unmade grave.\\r\\n\\n\\n\\r\\n\\n\\n [_Knocking within._]\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nArise; one knocks. Good Romeo, hide thyself.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nNot I, unless the breath of heartsick groans\\r\\n\\n\\nMist-like infold me from the search of eyes.\\r\\n\\n\\n\\r\\n\\n\\n [_Knocking._]\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nHark, how they knock!—Who’s there?—Romeo, arise,\\r\\n\\n\\nThou wilt be taken.—Stay awhile.—Stand up.\\r\\n\\n\\n\\r\\n\\n\\n [_Knocking._]\\r\\n\\n\\n\\r\\n\\n\\nRun to my study.—By-and-by.—God’s will,\\r\\n\\n\\nWhat simpleness is this.—I come, I come.\\r\\n\\n\\n\\r\\n\\n\\n [_Knocking._]\\r\\n\\n\\n\\r\\n\\n\\nWho knocks so hard? Whence come you, what’s your will?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\n[_Within._] Let me come in, and you shall know my errand.\\r\\n\\n\\nI come from Lady Juliet.\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nWelcome then.\\r\\n\\n\\n\\r\\n\\n\\n Enter Nurse.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nO holy Friar, O, tell me, holy Friar,\\r\\n\\n\\nWhere is my lady’s lord, where’s Romeo?\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nThere on the ground, with his own tears made drunk.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nO, he is even in my mistress’ case.\\r\\n\\n\\nJust in her case! O woeful sympathy!\\r\\n\\n\\nPiteous predicament. Even so lies she,\\r\\n\\n\\nBlubbering and weeping, weeping and blubbering.\\r\\n\\n\\nStand up, stand up; stand, and you be a man.\\r\\n\\n\\nFor Juliet’s sake, for her sake, rise and stand.\\r\\n\\n\\nWhy should you fall into so deep an O?\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nNurse.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nAh sir, ah sir, death’s the end of all.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nSpakest thou of Juliet? How is it with her?\\r\\n\\n\\nDoth not she think me an old murderer,\\r\\n\\n\\nNow I have stain’d the childhood of our joy\\r\\n\\n\\nWith blood remov’d but little from her own?\\r\\n\\n\\nWhere is she? And how doth she? And what says\\r\\n\\n\\nMy conceal’d lady to our cancell’d love?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nO, she says nothing, sir, but weeps and weeps;\\r\\n\\n\\nAnd now falls on her bed, and then starts up,\\r\\n\\n\\nAnd Tybalt calls, and then on Romeo cries,\\r\\n\\n\\nAnd then down falls again.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nAs if that name,\\r\\n\\n\\nShot from the deadly level of a gun,\\r\\n\\n\\nDid murder her, as that name’s cursed hand\\r\\n\\n\\nMurder’d her kinsman.', metadata={'source': 'https://www.gutenberg.org/cache/epub/1513/pg1513.txt'}),\n", 327 | " Document(page_content='\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nYet banished? Hang up philosophy.\\r\\n\\n\\nUnless philosophy can make a Juliet,\\r\\n\\n\\nDisplant a town, reverse a Prince’s doom,\\r\\n\\n\\nIt helps not, it prevails not, talk no more.\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nO, then I see that mad men have no ears.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nHow should they, when that wise men have no eyes?\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nLet me dispute with thee of thy estate.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nThou canst not speak of that thou dost not feel.\\r\\n\\n\\nWert thou as young as I, Juliet thy love,\\r\\n\\n\\nAn hour but married, Tybalt murdered,\\r\\n\\n\\nDoting like me, and like me banished,\\r\\n\\n\\nThen mightst thou speak, then mightst thou tear thy hair,\\r\\n\\n\\nAnd fall upon the ground as I do now,\\r\\n\\n\\nTaking the measure of an unmade grave.\\r\\n\\n\\n\\r\\n\\n\\n [_Knocking within._]\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nArise; one knocks. Good Romeo, hide thyself.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nNot I, unless the breath of heartsick groans\\r\\n\\n\\nMist-like infold me from the search of eyes.\\r\\n\\n\\n\\r\\n\\n\\n [_Knocking._]\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nHark, how they knock!—Who’s there?—Romeo, arise,\\r\\n\\n\\nThou wilt be taken.—Stay awhile.—Stand up.\\r\\n\\n\\n\\r\\n\\n\\n [_Knocking._]\\r\\n\\n\\n\\r\\n\\n\\nRun to my study.—By-and-by.—God’s will,\\r\\n\\n\\nWhat simpleness is this.—I come, I come.\\r\\n\\n\\n\\r\\n\\n\\n [_Knocking._]\\r\\n\\n\\n\\r\\n\\n\\nWho knocks so hard? Whence come you, what’s your will?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\n[_Within._] Let me come in, and you shall know my errand.\\r\\n\\n\\nI come from Lady Juliet.\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nWelcome then.\\r\\n\\n\\n\\r\\n\\n\\n Enter Nurse.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nO holy Friar, O, tell me, holy Friar,\\r\\n\\n\\nWhere is my lady’s lord, where’s Romeo?\\r\\n\\n\\n\\r\\n\\n\\nFRIAR LAWRENCE.\\r\\n\\n\\nThere on the ground, with his own tears made drunk.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nO, he is even in my mistress’ case.\\r\\n\\n\\nJust in her case! O woeful sympathy!\\r\\n\\n\\nPiteous predicament. Even so lies she,\\r\\n\\n\\nBlubbering and weeping, weeping and blubbering.\\r\\n\\n\\nStand up, stand up; stand, and you be a man.\\r\\n\\n\\nFor Juliet’s sake, for her sake, rise and stand.\\r\\n\\n\\nWhy should you fall into so deep an O?\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nNurse.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nAh sir, ah sir, death’s the end of all.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nSpakest thou of Juliet? How is it with her?\\r\\n\\n\\nDoth not she think me an old murderer,\\r\\n\\n\\nNow I have stain’d the childhood of our joy\\r\\n\\n\\nWith blood remov’d but little from her own?\\r\\n\\n\\nWhere is she? And how doth she? And what says\\r\\n\\n\\nMy conceal’d lady to our cancell’d love?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nO, she says nothing, sir, but weeps and weeps;\\r\\n\\n\\nAnd now falls on her bed, and then starts up,\\r\\n\\n\\nAnd Tybalt calls, and then on Romeo cries,\\r\\n\\n\\nAnd then down falls again.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nAs if that name,\\r\\n\\n\\nShot from the deadly level of a gun,\\r\\n\\n\\nDid murder her, as that name’s cursed hand\\r\\n\\n\\nMurder’d her kinsman.', metadata={'source': 'https://www.gutenberg.org/cache/epub/1513/pg1513.txt'}),\n", 328 | " Document(page_content='all have the chinks.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nIs she a Capulet?\\r\\n\\n\\nO dear account! My life is my foe’s debt.\\r\\n\\n\\n\\r\\n\\n\\nBENVOLIO.\\r\\n\\n\\nAway, be gone; the sport is at the best.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nAy, so I fear; the more is my unrest.\\r\\n\\n\\n\\r\\n\\n\\nCAPULET.\\r\\n\\n\\nNay, gentlemen, prepare not to be gone,\\r\\n\\n\\nWe have a trifling foolish banquet towards.\\r\\n\\n\\nIs it e’en so? Why then, I thank you all;\\r\\n\\n\\nI thank you, honest gentlemen; good night.\\r\\n\\n\\nMore torches here! Come on then, let’s to bed.\\r\\n\\n\\nAh, sirrah, by my fay, it waxes late,\\r\\n\\n\\nI’ll to my rest.\\r\\n\\n\\n\\r\\n\\n\\n [_Exeunt all but Juliet and Nurse._]\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nCome hither, Nurse. What is yond gentleman?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nThe son and heir of old Tiberio.\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nWhat’s he that now is going out of door?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nMarry, that I think be young Petruchio.\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nWhat’s he that follows here, that would not dance?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nI know not.\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nGo ask his name. If he be married,\\r\\n\\n\\nMy grave is like to be my wedding bed.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nHis name is Romeo, and a Montague,\\r\\n\\n\\nThe only son of your great enemy.\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nMy only love sprung from my only hate!\\r\\n\\n\\nToo early seen unknown, and known too late!\\r\\n\\n\\nProdigious birth of love it is to me,\\r\\n\\n\\nThat I must love a loathed enemy.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nWhat’s this? What’s this?\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nA rhyme I learn’d even now\\r\\n\\n\\nOf one I danc’d withal.\\r\\n\\n\\n\\r\\n\\n\\n [_One calls within, ‘Juliet’._]\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nAnon, anon!\\r\\n\\n\\nCome let’s away, the strangers all are gone.\\r\\n\\n\\n\\r\\n\\n\\n [_Exeunt._]\\r\\n\\n\\n\\r\\n\\n\\n\\r\\n\\n\\n\\r\\n\\n\\nACT II\\r\\n\\n\\n\\r\\n\\n\\n Enter Chorus.\\r\\n\\n\\n\\r\\n\\n\\nCHORUS.\\r\\n\\n\\nNow old desire doth in his deathbed lie,\\r\\n\\n\\nAnd young affection gapes to be his heir;\\r\\n\\n\\nThat fair for which love groan’d for and would die,\\r\\n\\n\\nWith tender Juliet match’d, is now not fair.\\r\\n\\n\\nNow Romeo is belov’d, and loves again,\\r\\n\\n\\nAlike bewitched by the charm of looks;\\r\\n\\n\\nBut to his foe suppos’d he must complain,\\r\\n\\n\\nAnd she steal love’s sweet bait from fearful hooks:\\r\\n\\n\\nBeing held a foe, he may not have access\\r\\n\\n\\nTo breathe such vows as lovers use to swear;\\r\\n\\n\\nAnd she as much in love, her means much less\\r\\n\\n\\nTo meet her new beloved anywhere.\\r\\n\\n\\nBut passion lends them power, time means, to meet,\\r\\n\\n\\nTempering extremities with extreme sweet.\\r\\n\\n\\n\\r\\n\\n\\n [_Exit._]\\r\\n\\n\\n\\r\\n\\n\\nSCENE I. An open place adjoining Capulet’s Garden.\\r\\n\\n\\n\\r\\n\\n\\n Enter Romeo.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nCan I go forward when my heart is here?\\r\\n\\n\\nTurn back, dull earth, and find thy centre out.\\r\\n\\n\\n\\r\\n\\n\\n [_He climbs the wall and leaps down within it._]\\r\\n\\n\\n\\r\\n\\n\\n Enter Benvolio and Mercutio.\\r\\n\\n\\n\\r\\n\\n\\nBENVOLIO.\\r\\n\\n\\nRomeo! My cousin Romeo', metadata={'source': 'https://www.gutenberg.org/cache/epub/1513/pg1513.txt'}),\n", 329 | " Document(page_content='all have the chinks.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nIs she a Capulet?\\r\\n\\n\\nO dear account! My life is my foe’s debt.\\r\\n\\n\\n\\r\\n\\n\\nBENVOLIO.\\r\\n\\n\\nAway, be gone; the sport is at the best.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nAy, so I fear; the more is my unrest.\\r\\n\\n\\n\\r\\n\\n\\nCAPULET.\\r\\n\\n\\nNay, gentlemen, prepare not to be gone,\\r\\n\\n\\nWe have a trifling foolish banquet towards.\\r\\n\\n\\nIs it e’en so? Why then, I thank you all;\\r\\n\\n\\nI thank you, honest gentlemen; good night.\\r\\n\\n\\nMore torches here! Come on then, let’s to bed.\\r\\n\\n\\nAh, sirrah, by my fay, it waxes late,\\r\\n\\n\\nI’ll to my rest.\\r\\n\\n\\n\\r\\n\\n\\n [_Exeunt all but Juliet and Nurse._]\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nCome hither, Nurse. What is yond gentleman?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nThe son and heir of old Tiberio.\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nWhat’s he that now is going out of door?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nMarry, that I think be young Petruchio.\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nWhat’s he that follows here, that would not dance?\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nI know not.\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nGo ask his name. If he be married,\\r\\n\\n\\nMy grave is like to be my wedding bed.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nHis name is Romeo, and a Montague,\\r\\n\\n\\nThe only son of your great enemy.\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nMy only love sprung from my only hate!\\r\\n\\n\\nToo early seen unknown, and known too late!\\r\\n\\n\\nProdigious birth of love it is to me,\\r\\n\\n\\nThat I must love a loathed enemy.\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nWhat’s this? What’s this?\\r\\n\\n\\n\\r\\n\\n\\nJULIET.\\r\\n\\n\\nA rhyme I learn’d even now\\r\\n\\n\\nOf one I danc’d withal.\\r\\n\\n\\n\\r\\n\\n\\n [_One calls within, ‘Juliet’._]\\r\\n\\n\\n\\r\\n\\n\\nNURSE.\\r\\n\\n\\nAnon, anon!\\r\\n\\n\\nCome let’s away, the strangers all are gone.\\r\\n\\n\\n\\r\\n\\n\\n [_Exeunt._]\\r\\n\\n\\n\\r\\n\\n\\n\\r\\n\\n\\n\\r\\n\\n\\nACT II\\r\\n\\n\\n\\r\\n\\n\\n Enter Chorus.\\r\\n\\n\\n\\r\\n\\n\\nCHORUS.\\r\\n\\n\\nNow old desire doth in his deathbed lie,\\r\\n\\n\\nAnd young affection gapes to be his heir;\\r\\n\\n\\nThat fair for which love groan’d for and would die,\\r\\n\\n\\nWith tender Juliet match’d, is now not fair.\\r\\n\\n\\nNow Romeo is belov’d, and loves again,\\r\\n\\n\\nAlike bewitched by the charm of looks;\\r\\n\\n\\nBut to his foe suppos’d he must complain,\\r\\n\\n\\nAnd she steal love’s sweet bait from fearful hooks:\\r\\n\\n\\nBeing held a foe, he may not have access\\r\\n\\n\\nTo breathe such vows as lovers use to swear;\\r\\n\\n\\nAnd she as much in love, her means much less\\r\\n\\n\\nTo meet her new beloved anywhere.\\r\\n\\n\\nBut passion lends them power, time means, to meet,\\r\\n\\n\\nTempering extremities with extreme sweet.\\r\\n\\n\\n\\r\\n\\n\\n [_Exit._]\\r\\n\\n\\n\\r\\n\\n\\nSCENE I. An open place adjoining Capulet’s Garden.\\r\\n\\n\\n\\r\\n\\n\\n Enter Romeo.\\r\\n\\n\\n\\r\\n\\n\\nROMEO.\\r\\n\\n\\nCan I go forward when my heart is here?\\r\\n\\n\\nTurn back, dull earth, and find thy centre out.\\r\\n\\n\\n\\r\\n\\n\\n [_He climbs the wall and leaps down within it._]\\r\\n\\n\\n\\r\\n\\n\\n Enter Benvolio and Mercutio.\\r\\n\\n\\n\\r\\n\\n\\nBENVOLIO.\\r\\n\\n\\nRomeo! My cousin Romeo', metadata={'source': 'https://www.gutenberg.org/cache/epub/1513/pg1513.txt'})]" 330 | ] 331 | }, 332 | "metadata": {}, 333 | "execution_count": 182 334 | } 335 | ] 336 | }, 337 | { 338 | "cell_type": "code", 339 | "source": [ 340 | "result[\"answer\"]" 341 | ], 342 | "metadata": { 343 | "colab": { 344 | "base_uri": "https://localhost:8080/", 345 | "height": 88 346 | }, 347 | "id": "DuCV56Nuv3lG", 348 | "outputId": "2f686592-4955-4c20-cde4-9020eea31a04" 349 | }, 350 | "execution_count": null, 351 | "outputs": [ 352 | { 353 | "output_type": "execute_result", 354 | "data": { 355 | "text/plain": [ 356 | "\"There is no clear indication in the text that Romeo and Juliet spent the night together. In Act II, Scene 2, Romeo climbs over the Capulet's garden wall and enters the garden where he sees Juliet on her balcony. They exchange vows of love and plan to marry, but there is no mention of them spending the night together. Later in Act III, Scene 5, Juliet's mother urges her to marry Paris, and Juliet refuses, stating that she is already married to Romeo. This suggests that they did indeed marry, but again, there is no mention of them spending the night together. Overall, the text is ambiguous on this point and leaves it up to interpretation.\"" 357 | ], 358 | "application/vnd.google.colaboratory.intrinsic+json": { 359 | "type": "string" 360 | } 361 | }, 362 | "metadata": {}, 363 | "execution_count": 183 364 | } 365 | ] 366 | }, 367 | { 368 | "cell_type": "code", 369 | "source": [], 370 | "metadata": { 371 | "id": "AJMUgnY5hq3V" 372 | }, 373 | "execution_count": null, 374 | "outputs": [] 375 | } 376 | ] 377 | } --------------------------------------------------------------------------------