[num_results]")
1055 | sys.exit(1)
1056 |
1057 | query = sys.argv[1]
1058 | num_results = int(sys.argv[2]) if len(sys.argv) > 2 else 10
1059 |
1060 | results = WebSearch_Tool(query, num_results)
1061 |
1062 | # Convert the results to JSON and print
1063 | print(json.dumps(results, indent=2))
1064 | ```
1065 |
1066 |
--------------------------------------------------------------------------------
/Groqqle.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import json
3 | from PIL import Image
4 | import base64
5 | from agents.Web_Agent import Web_Agent
6 | import os
7 | from dotenv import load_dotenv
8 | import sys
9 | from flask import Flask, request, jsonify
10 |
11 | load_dotenv()
12 |
13 | def get_groq_api_key():
14 | api_key = os.getenv('GROQ_API_KEY')
15 |
16 | if not api_key:
17 | if 'groq_api_key' not in st.session_state:
18 | st.warning("Groq API Key not found. Please enter your API key below:")
19 | api_key = st.text_input("Groq API Key", type="password")
20 | if api_key:
21 | st.session_state.groq_api_key = api_key
22 | else:
23 | api_key = st.session_state.groq_api_key
24 | else:
25 | st.session_state.groq_api_key = api_key
26 |
27 | return api_key
28 |
29 | def main():
30 | st.set_page_config(page_title="Groqqle", layout="wide")
31 |
32 | st.markdown("""
33 |
87 | """, unsafe_allow_html=True)
88 |
89 | api_key = get_groq_api_key()
90 |
91 | if not api_key:
92 | st.error("Please provide a valid Groq API Key to use the application.")
93 | return
94 |
95 | agent = Web_Agent(api_key)
96 |
97 | st.markdown('', unsafe_allow_html=True)
98 |
99 | st.image("images/logo.png", width=272)
100 |
101 | query = st.text_input("Search query", key="search_bar", on_change=perform_search, label_visibility="collapsed")
102 |
103 | col1, col2, col3 = st.columns([2,1,2])
104 | with col1:
105 | if st.button("Groqqle Search", key="search_button"):
106 | perform_search()
107 | with col3:
108 | json_results = st.checkbox("JSON Results", value=False, key="json_results")
109 |
110 | st.markdown('
', unsafe_allow_html=True)
111 |
112 | if st.session_state.get('search_results'):
113 | display_results(st.session_state.search_results, json_results)
114 |
115 | def perform_search():
116 | query = st.session_state.search_bar
117 | if query and 'groq_api_key' in st.session_state:
118 | with st.spinner('Searching...'):
119 | results = Web_Agent(st.session_state.groq_api_key).process_request(query)
120 | st.session_state.search_results = results
121 |
122 | def display_results(results, json_format=False):
123 | if results:
124 | st.markdown("---")
125 | st.markdown('', unsafe_allow_html=True)
126 | st.markdown("### Search Results")
127 | st.markdown('
', unsafe_allow_html=True)
128 |
129 | if json_format:
130 | st.json(results)
131 | else:
132 | st.markdown('', unsafe_allow_html=True)
133 | for result in results:
134 | st.markdown(f"""
135 |
140 | """, unsafe_allow_html=True)
141 | st.markdown('
', unsafe_allow_html=True)
142 | else:
143 | st.markdown("No results found.")
144 |
145 | def create_api_app():
146 | app = Flask(__name__)
147 |
148 | @app.route('/search', methods=['POST'])
149 | def api_search():
150 | data = request.json
151 | query = data.get('query')
152 | if not query:
153 | return jsonify({"error": "No query provided"}), 400
154 |
155 | api_key = os.getenv('GROQ_API_KEY')
156 | if not api_key:
157 | return jsonify({"error": "Groq API Key not set"}), 500
158 |
159 | agent = Web_Agent(api_key)
160 | results = agent.process_request(query)
161 | return jsonify(results)
162 |
163 | return app
164 |
165 | if __name__ == "__main__":
166 | if len(sys.argv) > 1 and sys.argv[1] == 'api':
167 | api_app = create_api_app()
168 | api_app.run(debug=True, port=5000)
169 | else:
170 | main()
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Groqqle: Your AI-Powered Search Engine
2 |
3 | 
4 |
5 |
6 | Groqqle is an innovative, AI-powered search engine that combines the power of large language models with web search capabilities. It offers both a user-friendly web interface and a robust API for seamless integration into your projects.
7 |
8 | [](https://opensource.org/licenses/MIT)
9 | [](https://www.python.org/downloads/)
10 |
11 | ## 🌟 Features
12 |
13 | - 🔍 Advanced search capabilities powered by AI
14 | - 🖥️ Intuitive web interface for easy searching
15 | - 🚀 Fast and efficient results using Groq's high-speed inference
16 | - 🔌 RESTful API for programmatic access
17 | - 🔒 Secure handling of API keys
18 | - 📊 Option to view results in JSON format
19 | - 🔄 Extensible architecture for multiple AI providers
20 |
21 | 
22 |
23 | ## 🛠️ Installation
24 |
25 | 1. Clone the repository:
26 | ```
27 | git clone https://github.com/jgravelle/Groqqle.git
28 | cd Groqqle
29 | ```
30 |
31 | 2. Set up a virtual environment:
32 | ```
33 | python -m venv venv
34 | source venv/bin/activate # On Windows, use `venv\Scripts\activate`
35 | ```
36 |
37 | 3. Install the required packages:
38 | ```
39 | pip install -r requirements.txt
40 | ```
41 |
42 | 4. Set up your environment variables:
43 | Create a `.env` file in the project root and add your Groq API key:
44 | ```
45 | GROQ_API_KEY=your_api_key_here
46 | ```
47 |
48 | ## 🚀 Usage
49 |
50 | ### Web Interface
51 |
52 | 1. Start the Groqqle application:
53 | ```
54 | streamlit run Groqqle.py
55 | ```
56 |
57 | 2. Open your web browser and navigate to `http://localhost:8501`.
58 |
59 | 3. Enter your search query in the search bar and click "Groqqle Search" or press Enter.
60 |
61 | 4. View your results! Toggle the "JSON Results" checkbox to see the raw JSON data.
62 |
63 | 
64 |
65 | ### API
66 |
67 | The Groqqle API allows you to programmatically access search results. Here's how to use it:
68 |
69 | 1. Start the Groqqle application in API mode:
70 | ```
71 | python Groqqle.py api
72 | ```
73 |
74 | 2. The API server will start running on `http://127.0.0.1:5000`.
75 |
76 | 3. Send a POST request to `http://127.0.0.1:5000/search` with the following JSON body:
77 | ```json
78 | {
79 | "query": "your search query"
80 | }
81 | ```
82 |
83 | Note: The API key is now managed through environment variables, so you don't need to include it in the request.
84 |
85 | 4. The API will return a JSON response with your search results.
86 |
87 | Example using Python's `requests` library:
88 |
89 | ```python
90 | import requests
91 |
92 | url = "http://127.0.0.1:5000/search"
93 | data = {
94 | "query": "Groq"
95 | }
96 | response = requests.post(url, json=data)
97 | results = response.json()
98 | print(results)
99 |
100 | Make sure you have set the `GROQ_API_KEY` in your environment variables or `.env` file before starting the API server.
101 | ```
102 |
103 | 
104 |
105 | ## 🔄 AI Providers
106 |
107 | While Groqqle is optimized for use with Groq's lightning-fast inference capabilities, we've also included stubbed-out provider code for Anthropic. This demonstrates how easily other AI providers can be integrated into the system.
108 |
109 | Please note that while other providers can be added, they may not match the exceptional speed offered by Groq. Groq's high-speed inference is a key feature that sets Groqqle apart in terms of performance.
110 |
111 | ## 🤝 Contributing
112 |
113 | We welcome contributions to Groqqle! Here's how you can help:
114 |
115 | 1. Fork the repository
116 | 2. Create your feature branch (`git checkout -b feature/AmazingFeature`)
117 | 3. Commit your changes (`git commit -m 'Add some AmazingFeature'`)
118 | 4. Push to the branch (`git push origin feature/AmazingFeature`)
119 | 5. Open a Pull Request
120 |
121 | Please make sure to update tests as appropriate and adhere to the [Code of Conduct](CODE_OF_CONDUCT.md).
122 |
123 | ## 📄 License
124 |
125 | Distributed under the MIT License. See `LICENSE` file for more information. Mention J. Gravelle in your docs (README, etc.) and/or code. He's kind of full of himself.
126 |
127 | ## 📞 Contact
128 |
129 | J. Gravelle - j@gravelle.us - https://j.gravelle.us
130 |
131 | Project Link: [https://github.com/jgravelle/Groqqle](https://github.com/jgravelle/Groqqle)
132 |
133 | ## 🙏 Acknowledgements
134 |
135 | - [Groq](https://groq.com/) for their powerful and incredibly fast language models
136 | - [Streamlit](https://streamlit.io/) for the amazing web app framework
137 | - [FastAPI](https://fastapi.tiangolo.com/) for the high-performance API framework
138 | - [Beautiful Soup](https://www.crummy.com/software/BeautifulSoup/) for web scraping capabilities
139 |
140 | 
141 |
--------------------------------------------------------------------------------
/agents/Base_Agent.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Any
3 | from providers.provider_factory import ProviderFactory
4 |
5 | class Base_Agent(ABC):
6 | def __init__(self, api_key):
7 | self.provider = ProviderFactory.get_provider(api_key)
8 | self.model = ProviderFactory.get_model()
9 |
10 | @abstractmethod
11 | def process_request(self, request: str) -> Any:
12 | """
13 | Process the user's request and return a response.
14 |
15 | Args:
16 | request (str): The user's request to be processed.
17 |
18 | Returns:
19 | Any: The processed response.
20 | """
21 | pass
22 |
23 | def _create_summary_prompt(self, content: str, user_request: str) -> str:
24 | """
25 | Create a summary prompt for the LLM provider.
26 |
27 | Args:
28 | content (str): The content to be summarized.
29 | user_request (str): The original user request.
30 |
31 | Returns:
32 | str: The formatted summary prompt.
33 | """
34 | return f"""
35 | Given the following content:
36 | {content}
37 |
38 | Respond to the user's request: "{user_request}"
39 |
40 | Provide a concise and relevant summary that directly addresses the user's request.
41 | """
42 |
43 | def _summarize_content(self, content: str, user_request: str) -> str:
44 | """
45 | Summarize the given content using the LLM provider.
46 |
47 | Args:
48 | content (str): The content to be summarized.
49 | user_request (str): The original user request.
50 |
51 | Returns:
52 | str: The summarized content.
53 | """
54 | summary_prompt = self._create_summary_prompt(content, user_request)
55 | return self.provider.generate(summary_prompt)
--------------------------------------------------------------------------------
/agents/Web_Agent.py:
--------------------------------------------------------------------------------
1 | import os
2 | from tools.web_tools.WebSearch_Tool import WebSearch_Tool
3 | from tools.web_tools.WebGetContents_Tool import WebGetContents_Tool
4 | from tools.web_tools.WebGetLinks_Tool import WebGetLinks_Tool
5 | from agents.Base_Agent import Base_Agent
6 | from providers.provider_factory import ProviderFactory
7 |
8 | class Web_Agent(Base_Agent):
9 | SKIP_DOMAINS = [
10 | 'reddit.com',
11 | # Add more domains to skip here
12 | ]
13 |
14 | def __init__(self, api_key):
15 | super().__init__(api_key)
16 | self.tools = self._initialize_tools()
17 |
18 | def process_request(self, user_request: str) -> list:
19 | try:
20 | return self._process_web_search(user_request)
21 | except Exception as e:
22 | if os.environ.get('DEBUG') == 'True':
23 | print(f"Error in Web_Agent: {str(e)}")
24 | return [{"title": "Error", "url": "", "description": f"An error occurred while processing your request: {str(e)}"}]
25 |
26 | def _process_web_search(self, user_request: str) -> list:
27 | search_results = self._perform_web_search(user_request)
28 | if not search_results:
29 | return [{"title": "No Results", "url": "", "description": "I'm sorry, but I couldn't find any relevant information for your request."}]
30 |
31 | filtered_results = self._filter_search_results(search_results)
32 | if not filtered_results:
33 | return [{"title": "No Results", "url": "", "description": "I found some results, but they were all from domains I've been instructed to skip. Could you try rephrasing your request?"}]
34 |
35 | deduplicated_results = self._remove_duplicates(filtered_results)
36 | return deduplicated_results[:10] # Return top 10 unique results
37 |
38 | def _initialize_tools(self):
39 | return {
40 | "WebSearch_Tool": WebSearch_Tool,
41 | "WebGetContents_Tool": WebGetContents_Tool,
42 | "WebGetLinks_Tool": WebGetLinks_Tool
43 | }
44 |
45 | def _perform_web_search(self, query: str):
46 | return self.tools["WebSearch_Tool"](query, 20) # Request 20 results to account for filtering
47 |
48 | def _filter_search_results(self, results):
49 | return [result for result in results if not any(domain in result['url'] for domain in self.SKIP_DOMAINS)]
50 |
51 | def _remove_duplicates(self, results):
52 | seen_urls = set()
53 | unique_results = []
54 | for result in results:
55 | if result['url'] not in seen_urls:
56 | seen_urls.add(result['url'])
57 | unique_results.append(result)
58 | return unique_results
59 |
60 | def _get_web_content(self, url: str) -> str:
61 | return self.tools["WebGetContents_Tool"](url)
62 |
63 | def _summarize_web_content(self, content: str, user_request: str, url: str, description: str) -> str:
64 | summary_prompt = self._create_summary_prompt(content, user_request, url, description)
65 | return self.provider.generate(summary_prompt)
66 |
67 | def _create_summary_prompt(self, content: str, user_request: str, url: str, description: str) -> str:
68 | return f"""
69 | Given the following web content from {url}:
70 | Description: {description}
71 | Content: {content[:2000]} # Limit content to first 2000 characters to avoid exceeding token limits
72 |
73 | Respond to the user's request: "{user_request}"
74 |
75 | Provide a concise and relevant summary that directly addresses the user's request.
76 | Use simple, direct language and focus only on the most pertinent information.
77 | """
78 |
79 | def _combine_summaries(self, summaries: list, user_request: str) -> str:
80 | combined_prompt = f"""
81 | Given the following summaries from multiple sources:
82 | {' '.join(summaries)}
83 |
84 | Respond to the user's request: "{user_request}"
85 |
86 | Provide a concise, coherent response that addresses the user's request using the information from the summaries.
87 | Focus on the most relevant and important points, and present the information in a clear and organized manner.
88 | """
89 | return self.provider.generate(combined_prompt)
--------------------------------------------------------------------------------
/agents/__pycache__/Base_Agent.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/agents/__pycache__/Base_Agent.cpython-310.pyc
--------------------------------------------------------------------------------
/agents/__pycache__/Web_Agent.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/agents/__pycache__/Web_Agent.cpython-310.pyc
--------------------------------------------------------------------------------
/image-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/image-1.png
--------------------------------------------------------------------------------
/image-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/image-2.png
--------------------------------------------------------------------------------
/image-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/image-3.png
--------------------------------------------------------------------------------
/image-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/image-4.png
--------------------------------------------------------------------------------
/image-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/image-5.png
--------------------------------------------------------------------------------
/image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/image.png
--------------------------------------------------------------------------------
/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/images/logo.png
--------------------------------------------------------------------------------
/providers/__pycache__/anthropic_provider.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/providers/__pycache__/anthropic_provider.cpython-310.pyc
--------------------------------------------------------------------------------
/providers/__pycache__/base_provider.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/providers/__pycache__/base_provider.cpython-310.pyc
--------------------------------------------------------------------------------
/providers/__pycache__/groq_provider.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/providers/__pycache__/groq_provider.cpython-310.pyc
--------------------------------------------------------------------------------
/providers/__pycache__/provider_factory.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/providers/__pycache__/provider_factory.cpython-310.pyc
--------------------------------------------------------------------------------
/providers/anthropic_provider.py:
--------------------------------------------------------------------------------
1 | import anthropic
2 | import os
3 | from typing import Dict, Any
4 |
5 | from providers.base_provider import BaseLLMProvider
6 |
7 | class AnthropicProvider(BaseLLMProvider):
8 | def __init__(self):
9 | self.api_key = os.environ.get('ANTHROPIC_API_KEY')
10 | if not self.api_key:
11 | raise ValueError("Anthropic API key is not set in environment variables")
12 | self.api_url = "https://api.anthropic.com/v1/messages"
13 | self.client = anthropic.Anthropic(api_key=self.api_key)
14 |
15 | def generate(self, prompt: str) -> str:
16 | data = {
17 | "model": os.environ.get('ANTHROPIC_MODEL', 'claude-3-5-sonnet-20240620'),
18 | "messages": [{"role": "user", "content": prompt}]
19 | }
20 | response = self.send_request(data)
21 | processed_response = self.process_response(response)
22 | return processed_response['choices'][0]['message']['content']
23 |
24 | def get_available_models(self) -> Dict[str, int]:
25 | return {
26 | "claude-3-5-sonnet-20240620": 4096,
27 | "claude-3-opus-20240229": 4096,
28 | "claude-3-sonnet-20240229": 4096,
29 | "claude-3-haiku-20240307": 4096,
30 | "claude-2.1": 100000,
31 | "claude-2.0": 100000,
32 | "claude-instant-1.2": 100000,
33 | }
34 |
35 | def process_response(self, response: Any) -> Dict[str, Any]:
36 | if response is not None:
37 | return {
38 | "choices": [
39 | {
40 | "message": {
41 | "content": response.content[0].text
42 | }
43 | }
44 | ]
45 | }
46 | return None
47 |
48 | def send_request(self, data: Dict[str, Any]) -> Any:
49 | try:
50 | model = data['model']
51 | max_tokens = min(data.get('max_tokens', 4096), self.get_available_models()[model])
52 |
53 | response = self.client.messages.create(
54 | model=model,
55 | max_tokens=max_tokens,
56 | temperature=data.get('temperature', 0.1),
57 | messages=[
58 | {"role": "user", "content": message["content"]}
59 | for message in data['messages']
60 | ]
61 | )
62 | return response
63 | except anthropic.APIError as e:
64 | if os.environ.get('DEBUG') == 'True':
65 | print(f"Anthropic API error: {e}")
66 | raise Exception(f"Anthropic API error: {str(e)}")
--------------------------------------------------------------------------------
/providers/base_provider.py:
--------------------------------------------------------------------------------
1 |
2 | from abc import ABC, abstractmethod
3 |
4 | class BaseLLMProvider(ABC):
5 | @abstractmethod
6 | def send_request(self, data):
7 | pass
8 |
9 | @abstractmethod
10 | def process_response(self, response):
11 | pass
12 |
--------------------------------------------------------------------------------
/providers/groq_provider.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import requests
4 |
5 | from providers.base_provider import BaseLLMProvider
6 |
7 | DEBUG = os.environ.get('DEBUG') == 'True'
8 |
9 | class Groq_Provider(BaseLLMProvider):
10 | def __init__(self, api_key, api_url=None):
11 | self.api_key = api_key
12 | if not self.api_key:
13 | raise ValueError("Groq API key is not provided")
14 | self.api_url = api_url or "https://api.groq.com/openai/v1/chat/completions"
15 |
16 | def generate(self, prompt):
17 | data = {
18 | "model": os.environ.get('GROQ_MODEL', 'mixtral-8x7b-32768'),
19 | "messages": [{"role": "user", "content": prompt}]
20 | }
21 | response = self.send_request(data)
22 | processed_response = self.process_response(response)
23 | return processed_response['choices'][0]['message']['content']
24 |
25 | def get_available_models(self):
26 | if DEBUG:
27 | print("GROQ: get_available_models")
28 | response = requests.get("https://api.groq.com/openai/v1/models", headers={
29 | "Authorization": f"Bearer {self.api_key}",
30 | "Content-Type": "application/json",
31 | })
32 | if response.status_code == 200:
33 | models = response.json().get("data", [])
34 | return [model["id"] for model in models]
35 | else:
36 | raise Exception(f"Failed to retrieve models: {response.status_code}")
37 |
38 | def process_response(self, response):
39 | if response.status_code == 200:
40 | return response.json()
41 | else:
42 | raise Exception(f"Request failed with status code {response.status_code}")
43 |
44 | def send_request(self, data):
45 | headers = {
46 | "Authorization": f"Bearer {self.api_key}",
47 | "Content-Type": "application/json",
48 | }
49 | json_data = json.dumps(data) if isinstance(data, dict) else data
50 | response = requests.post(self.api_url, data=json_data, headers=headers)
51 | return response
--------------------------------------------------------------------------------
/providers/provider_factory.py:
--------------------------------------------------------------------------------
1 | import os
2 | from providers.groq_provider import Groq_Provider
3 |
4 | class ProviderFactory:
5 | @staticmethod
6 | def get_provider(api_key):
7 | return Groq_Provider(api_key)
8 |
9 | @staticmethod
10 | def get_model():
11 | return os.environ.get('DEFAULT_MODEL', 'mixtral-8x7b-32768')
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | streamlit
2 | Pillow
3 | python-dotenv
4 | requests
5 | beautifulsoup4
6 | anthropic
7 | fastapi
8 | uvicorn
9 | Flask
--------------------------------------------------------------------------------
/tools/Base_Tool.py:
--------------------------------------------------------------------------------
1 | # tools/Base_Tool.py
2 |
3 | from abc import ABC, abstractmethod
4 | from typing import Any, Dict, Optional
5 |
6 | class Base_Tool(ABC):
7 | @abstractmethod
8 | def execute(self, *args, **kwargs) -> Any:
9 | """
10 | Execute the tool's main functionality.
11 |
12 | Args:
13 | *args: Variable length argument list.
14 | **kwargs: Arbitrary keyword arguments.
15 |
16 | Returns:
17 | Any: The result of the tool's execution.
18 | """
19 | pass
20 |
21 | def _validate_input(self, data: Dict[str, Any]) -> Optional[str]:
22 | """
23 | Validate the input data for the tool.
24 |
25 | Args:
26 | data (Dict[str, Any]): The input data to validate.
27 |
28 | Returns:
29 | Optional[str]: An error message if validation fails, None otherwise.
30 | """
31 | return None
32 |
33 | def _format_output(self, result: Any) -> Dict[str, Any]:
34 | """
35 | Format the output of the tool execution.
36 |
37 | Args:
38 | result (Any): The raw result of the tool execution.
39 |
40 | Returns:
41 | Dict[str, Any]: The formatted output.
42 | """
43 | return {"result": result}
44 |
45 | def _handle_error(self, error: Exception) -> str:
46 | """
47 | Handle and format any errors that occur during tool execution.
48 |
49 | Args:
50 | error (Exception): The error that occurred.
51 |
52 | Returns:
53 | str: A formatted error message.
54 | """
55 | return f"An error occurred: {str(error)}"
--------------------------------------------------------------------------------
/tools/web_tools/Weather_US_Tool.py:
--------------------------------------------------------------------------------
1 | # tools/web_tools/Weather_US_Tool.py
2 |
3 | # Fetches weather information for a given US city or city/state combination.
4 |
5 | import os
6 | import sys
7 | import requests
8 | from typing import Dict, Any, Optional
9 | from urllib.parse import quote_plus
10 |
11 | sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
12 | from tools.Base_Tool import Base_Tool
13 | DEBUG = os.environ.get('DEBUG') == 'True'
14 |
15 | class Weather_US_Tool(Base_Tool):
16 | BASE_URL = "https://weathermateplus.com/api/location/"
17 |
18 | def execute(self, address: str) -> Dict[str, Any]:
19 | """
20 | Fetches weather information for a given US city or city/state combination.
21 |
22 | Args:
23 | address (str): The city or city/state combination to fetch weather for.
24 |
25 | Returns:
26 | Dict[str, Any]: A dictionary containing the weather information or an error message.
27 | """
28 | try:
29 | encoded_address = quote_plus(address)
30 | url = f"{self.BASE_URL}?address={encoded_address}"
31 |
32 | response = requests.get(url)
33 | response.raise_for_status()
34 | data = response.json()
35 |
36 | return self._format_output(self._extract_relevant_data(data))
37 | except requests.RequestException as e:
38 | return self._handle_error(f"Error fetching weather data: {str(e)}")
39 | except KeyError as e:
40 | return self._handle_error(f"Error parsing weather data: {str(e)}")
41 |
42 | def _validate_input(self, data: Dict[str, str]) -> Optional[str]:
43 | if 'address' not in data:
44 | return "Address is required."
45 | if not isinstance(data['address'], str):
46 | return "Address must be a string."
47 | return None
48 |
49 | def _extract_relevant_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
50 | return {
51 | "location": data["location"],
52 | "currentObservation": data["currentObservation"],
53 | "day1": data["days"][0] if data["days"] else None
54 | }
55 |
56 | def _format_output(self, result: Dict[str, Any]) -> Dict[str, Any]:
57 | formatted = {
58 | "location": result["location"]["areaDescription"],
59 | "current": {
60 | "temperature": result["currentObservation"]["temperature"],
61 | "weather": result["currentObservation"]["weather"],
62 | "windSpeed": result["currentObservation"]["windSpeed"],
63 | "windDirection": result["currentObservation"]["windDirection"],
64 | },
65 | "forecast": {}
66 | }
67 |
68 | if result["day1"]:
69 | formatted["forecast"] = {
70 | "temperature": result["day1"]["temperature"],
71 | "shortForecast": result["day1"]["shortForecast"],
72 | "windSpeed": result["day1"]["windSpeed"],
73 | "windDirection": result["day1"]["windDirection"],
74 | "precipitationProbability": result["day1"]["probabilityOfPrecipitation"],
75 | }
76 |
77 | return formatted
78 |
79 | def _handle_error(self, error_message: str) -> Dict[str, str]:
80 | if DEBUG:
81 | print(f"Weather_US_Tool error: {error_message}")
82 | return {"error": error_message, "status": "error"}
83 |
84 | if __name__ == "__main__":
85 | tool = Weather_US_Tool()
86 | address = input("Enter a US city or city, state: ")
87 | result = tool.execute(address)
88 | if "error" in result:
89 | print(f"Error: {result['error']}")
90 | else:
91 | print(f"Weather for {result['location']}:")
92 | print(f"Current: {result['current']['temperature']}°F, {result['current']['weather']}")
93 | print(f"Wind: {result['current']['windSpeed']} mph {result['current']['windDirection']}")
94 | print("\nForecast:")
95 | print(f"Temperature: {result['forecast']['temperature']}°F")
96 | print(f"Conditions: {result['forecast']['shortForecast']}")
97 | print(f"Wind: {result['forecast']['windSpeed']} {result['forecast']['windDirection']}")
98 | print(f"Precipitation Probability: {result['forecast']['precipitationProbability']}")
--------------------------------------------------------------------------------
/tools/web_tools/WebGetContents_Tool.py:
--------------------------------------------------------------------------------
1 | # tools/web_tools/WebGetContents_Tool.py
2 |
3 | # Returns the text content of a web page given its URL
4 | # No API key required
5 |
6 | import os
7 | import requests
8 | import sys
9 | from bs4 import BeautifulSoup
10 |
11 | sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
12 | DEBUG = os.environ.get('DEBUG') == 'True'
13 |
14 | def WebGetContents_Tool(URL):
15 | headers = {
16 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
17 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,/;q=0.8',
18 | 'Accept-Language': 'en-US,en;q=0.5',
19 | 'Referer': 'https://www.google.com/',
20 | 'DNT': '1',
21 | 'Connection': 'keep-alive',
22 | 'Upgrade-Insecure-Requests': '1',
23 | }
24 |
25 | try:
26 | response = requests.get(URL, headers=headers, timeout=10)
27 | response.raise_for_status()
28 |
29 | soup = BeautifulSoup(response.text, 'html.parser')
30 |
31 | # Remove script and style elements
32 | for script in soup(["script", "style"]):
33 | script.decompose()
34 |
35 | text = soup.get_text()
36 |
37 | # Break into lines and remove leading and trailing space on each
38 | lines = (line.strip() for line in text.splitlines())
39 | # Break multi-headlines into a line each
40 | chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
41 | # Drop blank lines
42 | text = '\n'.join(chunk for chunk in chunks if chunk)
43 |
44 | if DEBUG:
45 | print(f"Successfully retrieved content from {URL}")
46 | print(f"Content preview: {text[:4000]}...")
47 |
48 | return text
49 |
50 | except requests.RequestException as e:
51 | error_message = f"Error retrieving content from {URL}: {str(e)}"
52 | if DEBUG:
53 | print(error_message)
54 | return None
55 |
56 | if __name__ == "__main__":
57 | import sys
58 | if len(sys.argv) != 2:
59 | print("Usage: WebGetContents_Tool.py ")
60 | sys.exit(1)
61 |
62 | url = sys.argv[1]
63 | content = WebGetContents_Tool(url)
64 | if content:
65 | print(content) # Print first 500 characters
66 | else:
67 | print("Failed to retrieve content")
--------------------------------------------------------------------------------
/tools/web_tools/WebGetLinks_Tool.py:
--------------------------------------------------------------------------------
1 | # tools/web_tools/WebGetLinks_Tool.py
2 |
3 | # Extracts links from a web page using BeautifulSoup
4 | # No API key required
5 |
6 | import os
7 | import requests
8 | import sys
9 |
10 | from bs4 import BeautifulSoup
11 |
12 | sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
13 | DEBUG = os.environ.get('DEBUG') == 'True'
14 |
15 | def WebGetLinks_Tool(URL):
16 | try:
17 | # Set a user tool to mimic a web browser
18 | headers = {
19 | 'User-Tool': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
20 | }
21 |
22 | # Send a GET request to the specified URL
23 | response = requests.get(URL, headers=headers)
24 |
25 | # Raise an exception for bad status codes
26 | response.raise_for_status()
27 |
28 | # Parse the HTML content
29 | soup = BeautifulSoup(response.text, 'html.parser')
30 |
31 | # Find all tags and extract text and href
32 | links = []
33 | for a in soup.find_all('a', href=True):
34 | text = a.text.strip()
35 | target = a['href']
36 | links.append((text, target))
37 |
38 | if DEBUG:
39 | print(f"Found {len(links)} links on the page")
40 | for text, target in links:
41 | print(f"Text: {text}")
42 | print(f"Target: {target}")
43 |
44 | return links
45 |
46 | except requests.RequestException as e:
47 | # Handle any exceptions that occur during the request
48 | return f"An error occurred: {str(e)}"
49 |
50 | if __name__ == "__main__":
51 | if len(sys.argv) != 2:
52 | print("Usage: WebGetLinks_Tool.py ")
53 | sys.exit(1)
54 |
55 | url = sys.argv[1]
56 | links = WebGetLinks_Tool(url)
57 |
58 | if isinstance(links, str):
59 | print(links) # Print error message if any
60 | else:
61 | for text, target in links:
62 | print(f"Text: {text}")
63 | print(f"Target: {target}")
64 | print("---")
--------------------------------------------------------------------------------
/tools/web_tools/WebGetStocks_Tool.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import os
3 | import sys
4 | from bs4 import BeautifulSoup
5 | from typing import Dict, Optional
6 | import random
7 | import time
8 | import unittest
9 | from unittest.mock import patch, Mock
10 |
11 | sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
12 | from tools.Base_Tool import Base_Tool
13 |
14 | class WebGetStocks_Tool(Base_Tool):
15 | def execute(self, symbol: str) -> Optional[Dict[str, str]]:
16 | """
17 | Retrieves stock information for a given symbol from MarketWatch.
18 |
19 | Args:
20 | symbol (str): The stock symbol to look up.
21 |
22 | Returns:
23 | Optional[Dict[str, str]]: A dictionary containing the stock information, or None if an error occurs.
24 | """
25 | session = requests.Session()
26 |
27 | user_agents = [
28 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
29 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0",
30 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15",
31 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36 Edg/91.0.864.59",
32 | ]
33 |
34 | headers = {
35 | "User-Agent": random.choice(user_agents),
36 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
37 | "Accept-Language": "en-US,en;q=0.5",
38 | "Accept-Encoding": "gzip, deflate, br",
39 | "DNT": "1",
40 | "Connection": "keep-alive",
41 | "Upgrade-Insecure-Requests": "1",
42 | }
43 |
44 | try:
45 | session.get("https://www.marketwatch.com/", headers=headers, timeout=10)
46 | except requests.RequestException as e:
47 | return self._handle_error(f"Error accessing MarketWatch home page: {str(e)}")
48 |
49 | time.sleep(random.uniform(1, 3))
50 |
51 | url = f"https://www.marketwatch.com/investing/stock/{symbol.lower()}"
52 |
53 | try:
54 | response = session.get(url, headers=headers, timeout=10)
55 | response.raise_for_status()
56 | soup = BeautifulSoup(response.text, 'html.parser')
57 |
58 | intraday_element = soup.find('div', class_='element element--intraday')
59 |
60 | if not intraday_element:
61 | return self._handle_error(f"Could not find intraday element for {symbol}")
62 |
63 | stock_info = {
64 | 'symbol': symbol.upper(),
65 | 'last_price': self._safe_find(intraday_element, 'bg-quote', class_='value'),
66 | 'change': self._safe_find(intraday_element, 'span', class_='change--point--q'),
67 | 'change_percent': self._safe_find(intraday_element, 'span', class_='change--percent--q'),
68 | 'volume': self._safe_find(intraday_element, 'span', class_='volume__value'),
69 | 'last_updated': self._safe_find(intraday_element, 'span', class_='timestamp__time'),
70 | 'close_price': self._safe_find(intraday_element, 'td', class_='table__cell u-semi'),
71 | 'close_change': self._safe_find_nth(intraday_element, 'td', class_='table__cell', n=1),
72 | 'close_change_percent': self._safe_find_nth(intraday_element, 'td', class_='table__cell', n=2)
73 | }
74 |
75 | # Remove any None values
76 | stock_info = {k: v for k, v in stock_info.items() if v is not None}
77 |
78 | return self._format_output(stock_info)
79 |
80 | except requests.RequestException as e:
81 | return self._handle_error(f"Error retrieving stock information for {symbol}: {str(e)}")
82 |
83 | def _safe_find(self, element, tag, class_=None, default='N/A'):
84 | """Safely find an element and return its text, or a default value if not found."""
85 | found = element.find(tag, class_=class_)
86 | return found.text.strip() if found else default
87 |
88 | def _safe_find_nth(self, element, tag, class_=None, n=0, default='N/A'):
89 | """Safely find the nth occurrence of an element and return its text, or a default value if not found."""
90 | found = element.find_all(tag, class_=class_)
91 | return found[n].text.strip() if len(found) > n else default
92 |
93 | def _validate_input(self, data: Dict[str, str]) -> Optional[str]:
94 | if 'symbol' not in data:
95 | return "Stock symbol is required."
96 | if not isinstance(data['symbol'], str) or len(data['symbol']) > 5:
97 | return "Invalid stock symbol format."
98 | return None
99 |
100 | def _format_output(self, result: Dict[str, str]) -> Dict[str, str]:
101 | return result # The result is already in the desired format
102 |
103 | def _handle_error(self, error_message: str) -> Dict[str, str]:
104 | return {"error": error_message}
105 |
106 | class TestWebGetStocksTool(unittest.TestCase):
107 | def setUp(self):
108 | self.tool = WebGetStocks_Tool()
109 | self.test_symbol = "AAPL"
110 | self.mock_html = """
111 |
112 | 150.00
113 | +2.50
114 | +1.69%
115 | 50,000,000
116 | 4:00PM EDT
117 |
147.50 |
118 | +2.50 |
119 | +1.69% |
120 |
121 | """
122 |
123 | @patch('requests.Session')
124 | def test_successful_stock_retrieval(self, mock_session):
125 | mock_response = Mock()
126 | mock_response.text = self.mock_html
127 | mock_response.raise_for_status.return_value = None
128 | mock_session.return_value.get.return_value = mock_response
129 |
130 | result = self.tool.execute(self.test_symbol)
131 |
132 | self.assertEqual(result['symbol'], 'AAPL')
133 | self.assertEqual(result['last_price'], '150.00')
134 | self.assertEqual(result['change'], '+2.50')
135 | self.assertEqual(result['change_percent'], '+1.69%')
136 | self.assertEqual(result['volume'], '50,000,000')
137 |
138 | @patch('requests.Session')
139 | def test_request_exception(self, mock_session):
140 | mock_session.return_value.get.side_effect = requests.RequestException("Connection error")
141 |
142 | result = self.tool.execute(self.test_symbol)
143 |
144 | self.assertIn('error', result)
145 | self.assertIn('Connection error', result['error'])
146 |
147 | @patch('requests.Session')
148 | def test_missing_intraday_element(self, mock_session):
149 | mock_response = Mock()
150 | mock_response.text = ""
151 | mock_response.raise_for_status.return_value = None
152 | mock_session.return_value.get.return_value = mock_response
153 |
154 | result = self.tool.execute(self.test_symbol)
155 |
156 | self.assertIn('error', result)
157 | self.assertIn('Could not find intraday element', result['error'])
158 |
159 | def test_validate_input_valid(self):
160 | data = {"symbol": "AAPL"}
161 | result = self.tool._validate_input(data)
162 | self.assertIsNone(result)
163 |
164 | def test_validate_input_missing_symbol(self):
165 | data = {}
166 | result = self.tool._validate_input(data)
167 | self.assertEqual(result, "Stock symbol is required.")
168 |
169 | def test_validate_input_invalid_symbol(self):
170 | data = {"symbol": "TOOLONG"}
171 | result = self.tool._validate_input(data)
172 | self.assertEqual(result, "Invalid stock symbol format.")
173 |
174 | def test_safe_find(self):
175 | soup = BeautifulSoup(self.mock_html, 'html.parser')
176 | element = soup.find('div', class_='element element--intraday')
177 | result = self.tool._safe_find(element, 'bg-quote', class_='value')
178 | self.assertEqual(result, '150.00')
179 |
180 | def test_safe_find_nth(self):
181 | soup = BeautifulSoup(self.mock_html, 'html.parser')
182 | element = soup.find('div', class_='element element--intraday')
183 | result = self.tool._safe_find_nth(element, 'td', class_='table__cell', n=1)
184 | self.assertEqual(result, '+2.50')
185 |
186 | def test_format_output(self):
187 | input_data = {'symbol': 'AAPL', 'last_price': '150.00'}
188 | result = self.tool._format_output(input_data)
189 | self.assertEqual(result, input_data)
190 |
191 | def test_handle_error(self):
192 | error_message = "Test error"
193 | result = self.tool._handle_error(error_message)
194 | self.assertEqual(result, {"error": "Test error"})
195 |
196 | def print_example_command():
197 | print("\nExample command line to run the WebGetStocks_Tool:")
198 | print("python tools/web_tools/WebGetStocks_Tool.py AAPL")
199 | print("\n\r Running tests on this tool...")
200 |
201 | if __name__ == "__main__":
202 | if len(sys.argv) > 1:
203 | # If arguments are provided, do not run the tests
204 | symbol = sys.argv[1]
205 | tool = WebGetStocks_Tool()
206 | result = tool.execute(symbol)
207 | print(result)
208 | else:
209 | # If no arguments, run the tests
210 | print_example_command()
211 | unittest.main()
212 |
213 |
--------------------------------------------------------------------------------
/tools/web_tools/WebSearch_Tool.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from bs4 import BeautifulSoup
3 | import sys
4 | import json
5 |
6 | def WebSearch_Tool(query, num_results=10):
7 | url = f"https://www.google.com/search?q={query}&num={num_results}"
8 | headers = {
9 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
10 | }
11 |
12 | try:
13 | response = requests.get(url, headers=headers)
14 | response.raise_for_status()
15 |
16 | soup = BeautifulSoup(response.text, 'html.parser')
17 | search_results = soup.find_all('div', class_='g')
18 |
19 | results = []
20 | for result in search_results:
21 | item = {}
22 |
23 | # Extract the title
24 | title_element = result.find('h3', class_='LC20lb')
25 | if title_element:
26 | item['title'] = title_element.get_text(strip=True)
27 | else:
28 | continue # Skip this result if there's no title
29 |
30 | # Extract the URL
31 | link_element = result.find('a')
32 | if link_element:
33 | item['url'] = link_element['href']
34 | else:
35 | continue # Skip this result if there's no URL
36 |
37 | # Extract the description
38 | desc_element = result.find('div', class_='VwiC3b')
39 | if desc_element:
40 | item['description'] = desc_element.get_text(strip=True)
41 | else:
42 | item['description'] = "No description available"
43 |
44 | results.append(item)
45 |
46 | return results[:num_results] # Ensure we don't return more than requested
47 |
48 | except requests.RequestException as e:
49 | return {"error": str(e)}
50 |
51 | if __name__ == "__main__":
52 | if len(sys.argv) < 2:
53 | print("Usage: WebSearch_Tool.py [num_results]")
54 | sys.exit(1)
55 |
56 | query = sys.argv[1]
57 | num_results = int(sys.argv[2]) if len(sys.argv) > 2 else 10
58 |
59 | results = WebSearch_Tool(query, num_results)
60 |
61 | # Convert the results to JSON and print
62 | print(json.dumps(results, indent=2))
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/Weather_US_Tool.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/Weather_US_Tool.cpython-311.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/Weather_US_Tool.cpython-312.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/Weather_US_Tool.cpython-312.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-310.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-311.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-312.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebGetContents_Tool.cpython-312.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebGetLinks_Tool.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebGetLinks_Tool.cpython-310.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebGetLinks_Tool.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebGetLinks_Tool.cpython-311.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebGetLinks_Tool.cpython-312.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebGetLinks_Tool.cpython-312.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebGetStocks_Tool.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebGetStocks_Tool.cpython-311.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebGetStocks_Tool.cpython-312.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebGetStocks_Tool.cpython-312.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebSearch_Tool.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebSearch_Tool.cpython-310.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebSearch_Tool.cpython-311.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebSearch_Tool.cpython-311.pyc
--------------------------------------------------------------------------------
/tools/web_tools/__pycache__/WebSearch_Tool.cpython-312.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tonyoconnell/Groqqle/5a127a70dde4b8d869eb2b90e0df7c8af2b2ec8c/tools/web_tools/__pycache__/WebSearch_Tool.cpython-312.pyc
--------------------------------------------------------------------------------