├── swarms_tools ├── utils │ ├── __init__.py │ ├── format_dict_into_str.py │ └── formatted_string.py ├── communication │ ├── __init__.py │ └── agent_sdk.py ├── devs │ ├── __init__.py │ └── code_executor.py ├── healthcare │ └── __init__.py ├── social_media │ ├── __init__.py │ ├── telegram_api.py │ └── discord.py ├── structs │ ├── __init__.py │ └── tool_chainer.py ├── character │ ├── __init__.py │ └── synthesia_tool.py ├── __init__.py ├── search │ ├── __init__.py │ ├── msg_notify_user.py │ ├── self_evolve.py │ ├── browser_use_tool.py │ ├── bing.py │ ├── searp_search.py │ ├── exa_search.py │ └── tavily_search.py └── finance │ ├── eodh_api.py │ ├── defillama_mcp_tools.py │ ├── __init__.py │ ├── macro_tool.py │ ├── cookies_fun.py │ ├── jupiter.py │ ├── defillama_mcp_clients.py │ ├── geckoterminal.py │ ├── coin_market_cap.py │ ├── okx_tool.py │ ├── coingecko_tool.py │ ├── check_solana_address.py │ ├── yahoo_finance.py │ ├── htx_tool.py │ ├── helius_api.py │ ├── jupiter_tools.py │ └── sector_analysis.py ├── requirements.txt ├── examples ├── finance │ ├── stocks │ │ ├── eodh_example.py │ │ └── yahoo_finance_api.py │ ├── crypto │ │ ├── htx_tool_example.py │ │ └── coin_market_cap.py │ └── solana │ │ ├── dex_screener.py │ │ ├── coingecko_api.py │ │ ├── check_solana_address.py │ │ └── dex_screener_example.py ├── web_scraping │ ├── firecrawl_example.py │ ├── basic │ │ ├── web_scraper_example.py │ │ ├── markdown_scraper_example.py │ │ ├── news_scraper_example.py │ │ └── README.md │ ├── advanced │ │ ├── multiple_urls_scraper_example.py │ │ ├── minimal_scraper_example.py │ │ ├── README.md │ │ └── custom_config_scraper_example.py │ ├── use_cases │ │ ├── ecommerce_scraper_example.py │ │ ├── README.md │ │ └── content_analysis_scraper_example.py │ └── README.md ├── misc │ ├── example_solana_tool.py │ ├── tool_chainer_example.py │ ├── defillama_mcp_api.py │ ├── agent_sdk_example.py │ ├── spfs_storage_example.py │ ├── mcs_auto_reply.py │ └── attps_example.py ├── search │ ├── example_communicate.py │ ├── bing_search_example.py │ └── exa_search_example.py └── devs │ └── code_executor_example.py ├── .github ├── workflows │ ├── ruff.yml │ ├── pull-request-links.yml │ ├── docs.yml │ ├── lints.yml │ ├── testing.yml │ ├── quality.yml │ ├── pr_request_checks.yml │ ├── label.yml │ ├── run_test.yml │ ├── welcome.yml │ ├── pylint.yml │ ├── docs_test.yml │ ├── unit-test.yml │ ├── python-publish.yml │ ├── code_quality_control.yml │ ├── stale.yml │ ├── cos_integration.yml │ └── test.yml ├── dependabot.yml ├── ISSUE_TEMPLATE │ ├── feature_request.md │ └── bug_report.md ├── FUNDING.yml ├── PULL_REQUEST_TEMPLATE.yml └── labeler.yml ├── exa_search_api.py ├── news_scraper_example.py ├── LICENSE ├── .env.example ├── pyproject.toml ├── .gitignore └── tests └── test_github.py /swarms_tools/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /swarms_tools/communication/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /swarms_tools/devs/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /swarms_tools/healthcare/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /swarms_tools/social_media/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | httpx 2 | loguru 3 | rich 4 | serpapi 5 | tavily 6 | orjson 7 | mcp 8 | lxml 9 | swarms 10 | pydantic -------------------------------------------------------------------------------- /swarms_tools/structs/__init__.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.structs.tool_chainer import tool_chainer 2 | 3 | __all__ = ["tool_chainer"] 4 | -------------------------------------------------------------------------------- /examples/finance/stocks/eodh_example.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.finance.eodh_api import fetch_stock_news 2 | 3 | print(fetch_stock_news("ETH")) 4 | -------------------------------------------------------------------------------- /swarms_tools/character/__init__.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.character.synthesia_tool import synthesia_api 2 | 3 | __all__ = ["synthesia_api"] 4 | -------------------------------------------------------------------------------- /examples/finance/crypto/htx_tool_example.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.finance.htx_tool import fetch_htx_data 2 | 3 | print(fetch_htx_data("swarms")) 4 | -------------------------------------------------------------------------------- /swarms_tools/__init__.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | load_dotenv() 4 | 5 | from swarms_tools.search import * 6 | from swarms_tools.structs import * 7 | -------------------------------------------------------------------------------- /.github/workflows/ruff.yml: -------------------------------------------------------------------------------- 1 | name: Ruff 2 | on: [ push, pull_request ] 3 | jobs: 4 | ruff: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v5 8 | - uses: chartboost/ruff-action@v1 9 | -------------------------------------------------------------------------------- /examples/finance/solana/dex_screener.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.finance.dex_screener import ( 2 | fetch_latest_token_boosts, 3 | fetch_dex_screener_profiles, 4 | ) 5 | 6 | fetch_dex_screener_profiles() 7 | fetch_latest_token_boosts() 8 | -------------------------------------------------------------------------------- /examples/finance/solana/coingecko_api.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.finance.coingecko_tool import ( 2 | coin_gecko_coin_api, 3 | ) 4 | 5 | if __name__ == "__main__": 6 | # Example: Fetch data for Bitcoin 7 | print(coin_gecko_coin_api("bitcoin")) 8 | -------------------------------------------------------------------------------- /exa_search_api.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.search.exa_search import exa_search 2 | 3 | print( 4 | exa_search( 5 | "What are the best performing semiconductor stocks?", 6 | characters=100, 7 | sources=2, 8 | ) 9 | ) 10 | -------------------------------------------------------------------------------- /news_scraper_example.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.search.web_scraper import scrape_and_format_sync 2 | 3 | # Scrape a news article with detailed information 4 | url = "https://swarms.ai" 5 | content = scrape_and_format_sync( 6 | url, 7 | ) 8 | 9 | print(content) 10 | -------------------------------------------------------------------------------- /swarms_tools/utils/format_dict_into_str.py: -------------------------------------------------------------------------------- 1 | def format_dict_into_str(d: dict) -> str: 2 | """ 3 | Formats every key and value in the dictionary as 'key: value' per line, with a blank line between each pair. 4 | """ 5 | return "\n\n".join(f"{k}: {v}" for k, v in d.items()) 6 | -------------------------------------------------------------------------------- /examples/web_scraping/firecrawl_example.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.search.firecrawl import crawl_entire_site_firecrawl 2 | 3 | content = crawl_entire_site_firecrawl( 4 | "https://swarms.ai", 5 | limit=1, 6 | formats=["markdown"], 7 | max_wait_time=600, 8 | ) 9 | 10 | print(content) 11 | -------------------------------------------------------------------------------- /.github/workflows/pull-request-links.yml: -------------------------------------------------------------------------------- 1 | name: readthedocs/actions 2 | on: 3 | pull_request_target: 4 | types: 5 | - opened 6 | paths: 7 | - "docs/**" 8 | 9 | permissions: 10 | pull-requests: write 11 | 12 | jobs: 13 | pull-request-links: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: readthedocs/actions/preview@v1 17 | with: 18 | project-slug: swarms_torch -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates 2 | 3 | version: 2 4 | updates: 5 | - package-ecosystem: "github-actions" 6 | directory: "/" 7 | schedule: 8 | interval: "weekly" 9 | 10 | - package-ecosystem: "pip" 11 | directory: "/" 12 | schedule: 13 | interval: "weekly" 14 | 15 | -------------------------------------------------------------------------------- /examples/misc/example_solana_tool.py: -------------------------------------------------------------------------------- 1 | import orjson 2 | 3 | from swarms_tools.finance.unified_solana_coin_api import ( 4 | fetch_solana_coin_info, 5 | ) 6 | 7 | if __name__ == "__main__": 8 | 9 | result = fetch_solana_coin_info( 10 | ids="74SBV4zDXxTRgv1pEMoECskKBkZHc2yGPnc7GYVepump", # Example token address 11 | show_extra_info=True, 12 | ) 13 | 14 | print(orjson.dumps(result, option=orjson.OPT_INDENT_2).decode()) 15 | -------------------------------------------------------------------------------- /examples/web_scraping/basic/web_scraper_example.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from swarms_tools.search.web_scraper import ( 4 | scrape_single_url, 5 | format_scraped_content, 6 | ) 7 | 8 | 9 | async def _run(): 10 | url = "https://httpbin.org/html" 11 | content = await scrape_single_url(url) 12 | formatted = format_scraped_content(content, "summary") 13 | print(formatted) 14 | 15 | 16 | if __name__ == "__main__": 17 | asyncio.run(_run()) 18 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Docs WorkFlow 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - main 8 | - develop 9 | jobs: 10 | deploy: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v5 14 | - uses: actions/setup-python@v6 15 | with: 16 | python-version: '3.10' 17 | - run: pip install mkdocs-material 18 | - run: pip install "mkdocstrings[python]" 19 | - run: mkdocs gh-deploy --force -------------------------------------------------------------------------------- /examples/finance/solana/check_solana_address.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.finance.check_solana_address import ( 2 | check_solana_balance, 3 | check_multiple_wallets, 4 | ) 5 | 6 | print( 7 | check_solana_balance( 8 | "7MaX4muAn8ZQREJxnupm8sgokwFHujgrGfH9Qn81BuEV" 9 | ) 10 | ) 11 | print( 12 | check_multiple_wallets( 13 | [ 14 | "7MaX4muAn8ZQREJxnupm8sgokwFHujgrGfH9Qn81BuEV", 15 | "7MaX4muAn8ZQREJxnupm8sgokwFHujgrGfH9Qn81BuEV", 16 | ] 17 | ) 18 | ) 19 | -------------------------------------------------------------------------------- /.github/workflows/lints.yml: -------------------------------------------------------------------------------- 1 | name: Linting 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | lint: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v5 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v6 18 | with: 19 | python-version: '3.10' 20 | 21 | - name: Install dependencies 22 | run: pip install --no-cache-dir -r requirements.txt 23 | 24 | - name: Run linters 25 | run: pylint swarms_torch -------------------------------------------------------------------------------- /.github/workflows/testing.yml: -------------------------------------------------------------------------------- 1 | name: Unit Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v5 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v6 18 | with: 19 | python-version: '3.10' 20 | 21 | - name: Install dependencies 22 | run: pip install --no-cache-dir -r requirements.txt 23 | 24 | - name: Run unit tests 25 | run: pytest tests/ -------------------------------------------------------------------------------- /.github/workflows/quality.yml: -------------------------------------------------------------------------------- 1 | name: Quality 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | lint: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | fail-fast: false 14 | steps: 15 | - name: Checkout actions 16 | uses: actions/checkout@v5 17 | with: 18 | fetch-depth: 0 19 | - name: Init environment 20 | uses: ./.github/actions/init-environment 21 | - name: Run linter 22 | run: | 23 | pylint `git diff --name-only --diff-filter=d origin/main HEAD | grep -E '\.py$' | tr '\n' ' '` -------------------------------------------------------------------------------- /swarms_tools/search/__init__.py: -------------------------------------------------------------------------------- 1 | from swarms_tools.search.exa_search import exa_search 2 | from swarms_tools.search.web_scraper import ( 3 | scrape_and_format_sync, 4 | scrape_multiple_urls_sync, 5 | ) 6 | from swarms_tools.search.firecrawl import crawl_entire_site_firecrawl 7 | from swarms_tools.search.self_evolve import modify_file_content 8 | from swarms_tools.search.msg_notify_user import notify_user 9 | 10 | __all__ = [ 11 | "exa_search", 12 | "scrape_and_format_sync", 13 | "scrape_multiple_urls_sync", 14 | "crawl_entire_site_firecrawl", 15 | "modify_file_content", 16 | "notify_user", 17 | ] 18 | -------------------------------------------------------------------------------- /.github/workflows/pr_request_checks.yml: -------------------------------------------------------------------------------- 1 | name: Pull Request Checks 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v5 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v6 18 | with: 19 | python-version: '3.10' 20 | 21 | - name: Install dependencies 22 | run: pip install --no-cache-dir -r requirements.txt 23 | 24 | - name: Run tests and checks 25 | run: | 26 | pytest tests/ 27 | pylint swarms_torch -------------------------------------------------------------------------------- /.github/workflows/label.yml: -------------------------------------------------------------------------------- 1 | # This workflow will triage pull requests and apply a label based on the 2 | # paths that are modified in the pull request. 3 | # 4 | # To use this workflow, you will need to set up a .github/labeler.yml 5 | # file with configuration. For more information, see: 6 | # https://github.com/actions/labeler 7 | 8 | name: Labeler 9 | on: [pull_request_target] 10 | 11 | jobs: 12 | label: 13 | 14 | runs-on: ubuntu-latest 15 | permissions: 16 | contents: read 17 | pull-requests: write 18 | 19 | steps: 20 | - uses: actions/labeler@v6.0.1 21 | with: 22 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 23 | -------------------------------------------------------------------------------- /.github/workflows/run_test.yml: -------------------------------------------------------------------------------- 1 | name: Python application test 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | 10 | steps: 11 | - uses: actions/checkout@v5 12 | - name: Set up Python 3.10 13 | uses: actions/setup-python@v6 14 | with: 15 | python-version: '3.10' 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --no-cache-dir --upgrade pip 19 | pip install pytest 20 | if [ -f requirements.txt ]; then pip install --no-cache-dir -r requirements.txt; fi 21 | - name: Run tests with pytest 22 | run: | 23 | pytest tests/ 24 | -------------------------------------------------------------------------------- /.github/workflows/welcome.yml: -------------------------------------------------------------------------------- 1 | name: Welcome WorkFlow 2 | 3 | on: 4 | issues: 5 | types: [opened] 6 | pull_request_target: 7 | types: [opened] 8 | 9 | jobs: 10 | build: 11 | name: 👋 Welcome 12 | permissions: write-all 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/first-interaction@v3.0.0 16 | with: 17 | repo-token: ${{ secrets.GITHUB_TOKEN }} 18 | issue-message: "Hello there, thank you for opening an Issue ! 🙏🏻 The team was notified and they will get back to you asap." 19 | pr-message: "Hello there, thank you for opening an PR ! 🙏🏻 The team was notified and they will get back to you asap." -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: Pylint 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: ["3.9", "3.10"] 11 | steps: 12 | - uses: actions/checkout@v5 13 | - name: Set up Python ${{ matrix.python-version }} 14 | uses: actions/setup-python@v6 15 | with: 16 | python-version: ${{ matrix.python-version }} 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --no-cache-dir --upgrade pip 20 | pip install pylint 21 | - name: Analysing the code with pylint 22 | run: | 23 | pylint $(git ls-files '*.py') 24 | -------------------------------------------------------------------------------- /.github/workflows/docs_test.yml: -------------------------------------------------------------------------------- 1 | name: Documentation Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v5 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v6 18 | with: 19 | python-version: '3.10' 20 | 21 | - name: Install dependencies 22 | run: pip install --no-cache-dir -r requirements.txt 23 | 24 | - name: Build documentation 25 | run: make docs 26 | 27 | - name: Validate documentation 28 | run: sphinx-build -b linkcheck docs build/docs -------------------------------------------------------------------------------- /examples/finance/crypto/coin_market_cap.py: -------------------------------------------------------------------------------- 1 | from loguru import logger 2 | from swarms_tools.finance.coin_market_cap import ( 3 | coinmarketcap_api, 4 | ) 5 | 6 | if __name__ == "__main__": 7 | # Set up logging 8 | logger.add( 9 | "coinmarketcap_api.log", rotation="500 MB", level="INFO" 10 | ) 11 | 12 | # Example usage 13 | single_coin = coinmarketcap_api(["Bitcoin"]) 14 | print("Single Coin Data:", single_coin) 15 | 16 | multiple_coins = coinmarketcap_api( 17 | ["Bitcoin", "Ethereum", "Tether"] 18 | ) 19 | print("Multiple Coins Data:", multiple_coins) 20 | 21 | all_coins = coinmarketcap_api() 22 | print("All Coins Data:", all_coins) 23 | -------------------------------------------------------------------------------- /examples/finance/stocks/yahoo_finance_api.py: -------------------------------------------------------------------------------- 1 | from loguru import logger 2 | from swarms_tools.finance.yahoo_finance import ( 3 | yahoo_finance_api, 4 | ) 5 | 6 | if __name__ == "__main__": 7 | # Set up logging 8 | logger.add( 9 | "yahoo_finance_api.log", rotation="500 MB", level="INFO" 10 | ) 11 | 12 | # Example usage 13 | single_stock = yahoo_finance_api( 14 | ["AAPL"] 15 | ) # Fetch data for a single stock 16 | print("Single Stock Data:", single_stock) 17 | 18 | # multiple_stocks = yahoo_finance_api( 19 | # ["AAPL", "GOOG", "MSFT"] 20 | # ) # Fetch data for multiple stocks 21 | # print("Multiple Stocks Data:", multiple_stocks) 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: 'kyegomez' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [kyegomez] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 13 | custom: #Nothing 14 | -------------------------------------------------------------------------------- /.github/workflows/unit-test.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | 11 | build: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v5 17 | 18 | - name: Setup Python 19 | uses: actions/setup-python@v6 20 | with: 21 | python-version: '3.10' 22 | 23 | - name: Install dependencies 24 | run: pip install --no-cache-dir -r requirements.txt 25 | 26 | - name: Run Python unit tests 27 | run: python3 -m unittest tests/ 28 | 29 | - name: Verify that the Docker image for the action builds 30 | run: docker build . --file Dockerfile 31 | 32 | - name: Verify integration test results 33 | run: python3 -m unittest tests/ 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a detailed report on the bug and it's root cause. Conduct root cause error analysis 4 | title: "[BUG] " 5 | labels: bug 6 | assignees: kyegomez 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is and what the main root cause error is. Test very thoroughly before submitting. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Additional context** 27 | Add any other context about the problem here. 28 | -------------------------------------------------------------------------------- /examples/misc/tool_chainer_example.py: -------------------------------------------------------------------------------- 1 | # Example usage 2 | from loguru import logger 3 | 4 | from swarms_tools.structs import tool_chainer 5 | 6 | if __name__ == "__main__": 7 | logger.add("tool_chainer.log", rotation="500 MB", level="INFO") 8 | 9 | # Example tools 10 | def tool1(): 11 | return "Tool1 Result" 12 | 13 | def tool2(): 14 | return "Tool2 Result" 15 | 16 | # def tool3(): 17 | # raise ValueError("Simulated error in Tool3") 18 | 19 | tools = [tool1, tool2] 20 | 21 | # Parallel execution 22 | parallel_results = tool_chainer(tools, parallel=True) 23 | print("Parallel Results:", parallel_results) 24 | 25 | # Sequential execution 26 | # sequential_results = tool_chainer(tools, parallel=False) 27 | # print("Sequential Results:", sequential_results) 28 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | 2 | name: Upload Python Package 3 | 4 | on: 5 | release: 6 | types: [published] 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | deploy: 13 | 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v5 18 | - name: Set up Python 19 | uses: actions/setup-python@v6 20 | with: 21 | python-version: '3.10' 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --no-cache-dir --upgrade pip 25 | pip install build 26 | - name: Build package 27 | run: python -m build 28 | - name: Publish package 29 | uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e 30 | with: 31 | user: __token__ 32 | password: ${{ secrets.PYPI_API_TOKEN }} -------------------------------------------------------------------------------- /examples/misc/defillama_mcp_api.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from swarms_tools.finance.defillama_mcp_tools import ( 3 | get_protocols, 4 | get_protocol_tvl, 5 | get_chain_tvl, 6 | get_token_prices, 7 | ) 8 | 9 | 10 | async def main(): 11 | print("Fetching protocols...") 12 | protocols = await get_protocols() 13 | 14 | print("\nFetching protocol TVL for uniswap-v3...") 15 | protocol_tvl = await get_protocol_tvl("uniswap-v3") 16 | print(protocol_tvl) 17 | 18 | print("\nFetching chain TVL for Ethereum...") 19 | chain_tvl = await get_chain_tvl("Ethereum") 20 | print(chain_tvl) 21 | 22 | print("\nFetching token prices for Bitcoin...") 23 | token_prices = await get_token_prices("coingecko:bitcoin") 24 | print(token_prices) 25 | 26 | 27 | if __name__ == "__main__": 28 | asyncio.run(main()) 29 | -------------------------------------------------------------------------------- /.github/workflows/code_quality_control.yml: -------------------------------------------------------------------------------- 1 | name: Linting and Formatting 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | lint_and_format: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v5 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v6 18 | with: 19 | python-version: '3.10' 20 | 21 | - name: Install dependencies 22 | run: pip install --no-cache-dir -r requirements.txt 23 | 24 | - name: Find Python files 25 | run: find swarms_torch -name "*.py" -type f -exec autopep8 --in-place --aggressive --aggressive {} + 26 | 27 | - name: Push changes 28 | uses: ad-m/github-push-action@master 29 | with: 30 | github_token: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | # This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time. 2 | # 3 | # You can adjust the behavior by modifying this file. 4 | # For more information, see: 5 | # https://github.com/actions/stale 6 | name: Mark stale issues and pull requests 7 | 8 | on: 9 | schedule: 10 | - cron: '26 12 * * *' 11 | 12 | jobs: 13 | stale: 14 | 15 | runs-on: ubuntu-latest 16 | permissions: 17 | issues: write 18 | pull-requests: write 19 | 20 | steps: 21 | - uses: actions/stale@v10 22 | with: 23 | repo-token: ${{ secrets.GITHUB_TOKEN }} 24 | stale-issue-message: 'Stale issue message' 25 | stale-pr-message: 'Stale pull request message' 26 | stale-issue-label: 'no-issue-activity' 27 | stale-pr-label: 'no-pr-activity' -------------------------------------------------------------------------------- /.github/workflows/cos_integration.yml: -------------------------------------------------------------------------------- 1 | name: Continuous Integration 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout code 13 | uses: actions/checkout@v5 14 | 15 | - name: Set up Python 16 | uses: actions/setup-python@v6 17 | with: 18 | python-version: '3.10' 19 | 20 | - name: Install dependencies 21 | run: pip install --no-cache-dir -r requirements.txt 22 | 23 | - name: Run unit tests 24 | run: pytest tests/unit 25 | 26 | - name: Run integration tests 27 | run: pytest tests/integration 28 | 29 | - name: Run code coverage 30 | run: pytest --cov=swarms tests/ 31 | 32 | - name: Run linters 33 | run: pylint swarms 34 | 35 | - name: Build documentation 36 | run: make docs 37 | 38 | - name: Validate documentation 39 | run: sphinx-build -b linkcheck docs build/docs 40 | 41 | - name: Run performance tests 42 | run: pytest tests/performance -------------------------------------------------------------------------------- /examples/web_scraping/basic/markdown_scraper_example.py: -------------------------------------------------------------------------------- 1 | """ 2 | Markdown Format Scraping Example 3 | 4 | This example demonstrates scraping content and formatting it 5 | as markdown for documentation or note-taking purposes. 6 | """ 7 | 8 | from swarms_tools.search.web_scraper import scrape_and_format_sync 9 | 10 | # Scrape a documentation site and format as markdown 11 | documentation_url = ( 12 | "https://docs.python.org/3/tutorial/introduction.html" 13 | ) 14 | markdown_content = scrape_and_format_sync( 15 | url=documentation_url, 16 | format_type="markdown", 17 | truncate=False, # Keep full content for documentation 18 | remove_scripts=True, 19 | remove_styles=True, 20 | remove_comments=True, 21 | ) 22 | 23 | # The markdown_content contains: 24 | # # Page Title 25 | # 26 | # Full content text... 27 | # 28 | # **Links:** X found 29 | # **Images:** Y found 30 | # **Word Count:** Z 31 | 32 | formatted_doc = markdown_content 33 | 34 | # Perfect for saving to .md files or including in documentation 35 | # The markdown format makes it easy to read and process further 36 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.yml: -------------------------------------------------------------------------------- 1 |