├── .gitignore ├── .gitattributes ├── tsconfig.json ├── package.json ├── README.md ├── src └── index.ts ├── public └── llms.txt └── pnpm-lock.yaml /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | dist/ 3 | .DS_Store 4 | *.log 5 | .env -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "module": "ES2020", 5 | "moduleResolution": "node", 6 | "outDir": "./dist", 7 | "rootDir": "./src", 8 | "strict": true, 9 | "esModuleInterop": true, 10 | "skipLibCheck": true, 11 | "forceConsistentCasingInFileNames": true 12 | }, 13 | "include": ["src/**/*"], 14 | "exclude": ["node_modules", "dist"] 15 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "generate-llmstxt", 3 | "version": "1.0.0", 4 | "description": "NPX package to generate LLMs.txt files using Firecrawl API", 5 | "main": "dist/index.js", 6 | "bin": { 7 | "generate-llmstxt": "./dist/index.js" 8 | }, 9 | "type": "module", 10 | "scripts": { 11 | "build": "tsc", 12 | "prepublishOnly": "npm run build", 13 | "publish": "npm run build && npm publish" 14 | }, 15 | "keywords": [ 16 | "llms.txt", 17 | "llm", 18 | "firecrawl", 19 | "text-generation", 20 | "ai" 21 | ], 22 | "author": "", 23 | "license": "MIT", 24 | "dependencies": { 25 | "@mendable/firecrawl-js": "^1.19.0", 26 | "chalk": "^5.3.0", 27 | "commander": "^11.1.0", 28 | "dotenv": "^16.4.5", 29 | "firecrawl": "^1.0.0" 30 | }, 31 | "devDependencies": { 32 | "@types/node": "^20.11.24", 33 | "typescript": "^5.3.3" 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # generate-llmstxt 2 | 3 | A simple NPX package that generates LLMs.txt files using the Firecrawl API. Specify the URL you want and it creates two files in your specified output directory (defaults to 'public' folder): 4 | - `llms.txt`: Contains a summary of the LLM-related content 5 | - `llms-full.txt`: Contains the full text content 6 | 7 | ## Usage 8 | 9 | You can run this package using NPX without installing it. There are two ways to provide your Firecrawl API key: 10 | 11 | ### 1. Using Command Line Arguments 12 | 13 | ```bash 14 | npx generate-llmstxt --api-key YOUR_FIRECRAWL_API_KEY 15 | ``` 16 | 17 | ### 2. Using Environment Variables 18 | 19 | Create a `.env` file in your project root and add your API key: 20 | 21 | ```env 22 | FIRECRAWL_API_KEY=your_api_key_here 23 | ``` 24 | 25 | Then run the command without the --api-key option: 26 | 27 | ```bash 28 | npx generate-llmstxt 29 | ``` 30 | 31 | ### Options 32 | 33 | - `-k, --api-key ` (optional if set in .env): Your Firecrawl API key 34 | - `-u, --url ` (optional): URL to analyze (default: https://example.com) 35 | - `-m, --max-urls ` (optional): Maximum number of URLs to analyze (default: 50) 36 | - `-o, --output-dir ` (optional): Output directory path (default: 'public') 37 | 38 | ### Examples 39 | 40 | ```bash 41 | # Using command line argument with default output directory 42 | npx generate-llmstxt -k your_api_key -u https://your-website.com -m 20 43 | 44 | # Using .env file with default output directory 45 | npx generate-llmstxt -u https://your-website.com -m 20 46 | 47 | # Specifying a custom output directory 48 | npx generate-llmstxt -k your_api_key -u https://your-website.com -o custom/path/to/output 49 | 50 | # Using .env file and custom output directory 51 | npx generate-llmstxt -u https://your-website.com -o content/llms 52 | ``` 53 | 54 | ## Requirements 55 | 56 | - Node.js 14 or higher 57 | - A valid Firecrawl API key (via command line or .env file) 58 | 59 | ## Output 60 | 61 | The package will create two files in your specified output directory (defaults to 'public'): 62 | 63 | 1. `llms.txt`: Contains a summary of the LLM-related content 64 | 2. `llms-full.txt`: Contains the full text content 65 | 66 | ## Future Improvements 67 | 68 | - [ ] Local version 69 | 70 | ## License 71 | 72 | MIT 73 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { Command } from "commander"; 4 | import chalk from "chalk"; 5 | import fs from "fs/promises"; 6 | import path from "path"; 7 | import { config } from "dotenv"; 8 | import FirecrawlApp from "@mendable/firecrawl-js"; 9 | 10 | // Load environment variables from .env file 11 | config(); 12 | 13 | const program = new Command(); 14 | 15 | program 16 | .name("generate-llmstxt") 17 | .description("Generate LLMs.txt files using Firecrawl API") 18 | .option( 19 | "-k, --api-key ", 20 | "Firecrawl API key (can also be set via FIRECRAWL_API_KEY in .env)" 21 | ) 22 | .option("-u, --url ", "URL to analyze", "https://example.com") 23 | .option("-m, --max-urls ", "Maximum URLs to analyze", "50") 24 | .option("-o, --output-dir ", "Output directory path", "public") 25 | .version("1.0.0"); 26 | 27 | async function generateLLMsText( 28 | apiKey: string | undefined, 29 | url: string, 30 | maxUrls: number, 31 | outputDir: string 32 | ) { 33 | try { 34 | // Check for API key in environment variables if not provided via command line 35 | const finalApiKey = apiKey || process.env.FIRECRAWL_API_KEY; 36 | 37 | if (!finalApiKey) { 38 | throw new Error( 39 | "API key is required. Provide it via --api-key option or FIRECRAWL_API_KEY in .env file" 40 | ); 41 | } 42 | 43 | const firecrawl = new FirecrawlApp({ apiKey: finalApiKey }); 44 | 45 | console.log(chalk.blue("Generating LLMs text files...")); 46 | 47 | const params = { 48 | maxUrls, 49 | showFullText: true, 50 | }; 51 | 52 | const results = await firecrawl.generateLLMsText(url, params); 53 | 54 | if (!results.success) { 55 | throw new Error(results.error || "Unknown error occurred"); 56 | } 57 | 58 | // Create output directory if it doesn't exist 59 | await fs.mkdir(outputDir, { recursive: true }); 60 | 61 | // Write the summary file 62 | await fs.writeFile( 63 | path.join(outputDir, "llms.txt"), 64 | results.data.llmstxt || "", 65 | "utf-8" 66 | ); 67 | 68 | // Write the full text file 69 | await fs.writeFile( 70 | path.join(outputDir, "llms-full.txt"), 71 | results.data.llmsfulltxt || "", 72 | "utf-8" 73 | ); 74 | 75 | console.log(chalk.green("✓ Successfully generated LLMs text files")); 76 | console.log(chalk.gray("Files created:")); 77 | console.log(chalk.gray(`- ${path.join(outputDir, "llms.txt")}`)); 78 | console.log(chalk.gray(`- ${path.join(outputDir, "llms-full.txt")}`)); 79 | } catch (error) { 80 | console.error( 81 | chalk.red("Error:"), 82 | error instanceof Error ? error.message : "Unknown error occurred" 83 | ); 84 | process.exit(1); 85 | } 86 | } 87 | 88 | async function main() { 89 | program.parse(); 90 | const options = program.opts(); 91 | 92 | await generateLLMsText( 93 | options.apiKey, 94 | options.url, 95 | parseInt(options.maxUrls, 10), 96 | options.outputDir 97 | ); 98 | } 99 | 100 | main(); 101 | -------------------------------------------------------------------------------- /public/llms.txt: -------------------------------------------------------------------------------- 1 | # http://docs.firecrawl.dev llms.txt 2 | 3 | - [Firecrawl API Documentation](https://docs.firecrawl.dev/introduction): Firecrawl API service for crawling and converting URLs to markdown. 4 | - [Web Data Integrations](https://docs.firecrawl.dev/integrations): Explore various integrations for web data extraction tools. 5 | - [Firecrawl API Documentation](https://docs.firecrawl.dev/introduction): Firecrawl API service for crawling and converting URLs to markdown. 6 | - [Data Extraction Tool](https://docs.firecrawl.dev/features/extract): Easily extract structured data from multiple URLs or domains. 7 | - [Firecrawl and CrewAI Integration](https://docs.firecrawl.dev/integrations/crewai): Integrate Firecrawl tools with CrewAI for AI agents. 8 | - [Firecrawl API Documentation](https://docs.firecrawl.dev/introduction): Firecrawl API service for crawling and converting URLs to markdown. 9 | - [Firecrawl Langchain Integration](https://docs.firecrawl.dev/integrations/langchain): Integrate Firecrawl with Langchain for web scraping. 10 | - [Firecrawl Go SDK](https://docs.firecrawl.dev/sdks/go): Comprehensive guide for using Firecrawl Go SDK effectively. 11 | - [Firecrawl Contribution Guide](https://docs.firecrawl.dev/contributing/guide): Instructions for contributing and running Firecrawl locally. 12 | - [Dify Integration Guide](https://docs.firecrawl.dev/integrations/dify): Learn to scrape and import data into Dify workflows. 13 | - [Search and Scrape](https://docs.firecrawl.dev/features/search): Effortlessly search and scrape web data with Firecrawl. 14 | - [Firecrawl Python SDK](https://docs.firecrawl.dev/sdks/python): Comprehensive guide for using Firecrawl Python SDK effectively. 15 | - [Firecrawl Launch Week](https://docs.firecrawl.dev/launch-week): Explore Firecrawl's Launch Week features and enhancements. 16 | - [Firecrawl Langflow Integration](https://docs.firecrawl.dev/integrations/langflow): Learn to integrate Firecrawl with Langflow for web scraping. 17 | - [Firecrawl API Service](https://docs.firecrawl.dev/v0/introduction): API service for crawling URLs and generating markdown. 18 | - [Firecrawl Rust SDK](https://docs.firecrawl.dev/sdks/rust): Comprehensive guide for using Firecrawl Rust SDK effectively. 19 | - [Website Crawling Features](https://docs.firecrawl.dev/features/crawl): Comprehensive website crawling and data extraction features. 20 | - [Firecrawl Node SDK Guide](https://docs.firecrawl.dev/sdks/node): Comprehensive guide for using Firecrawl Node SDK effectively. 21 | - [Website Mapping Tool](https://docs.firecrawl.dev/features/map): Easily map website URLs for scraping and exploration. 22 | - [Firecrawl Flowise Integration](https://docs.firecrawl.dev/integrations/flowise): Integrate Firecrawl with Flowise for web data workflows. 23 | - [Web Scraping Features](https://docs.firecrawl.dev/features/scrape): Firecrawl enables web scraping and data extraction efficiently. 24 | - [Firecrawl V1 API](https://docs.firecrawl.dev/v1-welcome): Explore Firecrawl V1's enhanced API features and usage. 25 | - [Camel AI Integration](https://docs.firecrawl.dev/integrations/camelai): Integrate Camel AI with Firecrawl for web data gathering. 26 | - [Firecrawl LlamaIndex Integration](https://docs.firecrawl.dev/integrations/llamaindex): Integrate Firecrawl with LlamaIndex for web scraping. 27 | - [Firecrawl Rate Limits](https://docs.firecrawl.dev/rate-limits): Overview of Firecrawl API rate limits and plans. 28 | - [Firecrawl SDK Overview](https://docs.firecrawl.dev/sdks/overview): Explore official and community SDKs for Firecrawl. 29 | - [LLM Data Extraction](https://docs.firecrawl.dev/features/llm-extract): Efficiently extract structured data using Firecrawl's LLM features. 30 | - [Firecrawl Rust SDK](https://docs.firecrawl.dev/v0/sdks/rust): Comprehensive guide for using Firecrawl Rust SDK features. 31 | - [Self-Hosting Firecrawl](https://docs.firecrawl.dev/contributing/self-host): Instructions for self-hosting Firecrawl with security and customization. 32 | - [Website Crawling Features](https://docs.firecrawl.dev/v0/features/crawl): Comprehensive website crawling and data extraction features explained. 33 | - [Data Extraction Features](https://docs.firecrawl.dev/features/extract-beta): Extract structured data from websites using LLMs effectively. 34 | - [Firecrawl Data Extraction](https://docs.firecrawl.dev/v0/features/extract): Efficiently scrape and extract structured data using Firecrawl. 35 | - [Firecrawl Go SDK](https://docs.firecrawl.dev/v0/sdks/go): Comprehensive guide for using Firecrawl Go SDK effectively. 36 | - [Advanced Scraping Guide](https://docs.firecrawl.dev/advanced-scraping-guide): Comprehensive guide for advanced scraping techniques with Firecrawl. 37 | - [Firecrawl Python SDK](https://docs.firecrawl.dev/v0/sdks/python): Comprehensive guide for using Firecrawl Python SDK effectively. 38 | - [Firecrawl Node SDK](https://docs.firecrawl.dev/v0/sdks/node): Comprehensive guide for using Firecrawl Node SDK effectively. 39 | - [Advanced Scraping Guide](https://docs.firecrawl.dev/v0/advanced-scraping-guide): Comprehensive guide on advanced scraping techniques with Firecrawl. 40 | - [Open Source vs Cloud](https://docs.firecrawl.dev/contributing/open-source-or-cloud): Explore Firecrawl's open source and cloud offerings. 41 | - [Batch Scraping](https://docs.firecrawl.dev/features/batch-scrape): Batch scrape multiple URLs with Firecrawl's SDK methods. 42 | - [Firecrawl API Overview](https://docs.firecrawl.dev/api-reference/introduction): Comprehensive guide to Firecrawl API features and usage. 43 | - [Search Endpoint](https://docs.firecrawl.dev/api-reference/endpoint/search): Search endpoint for querying and scraping web content. 44 | - [Data Extraction API](https://docs.firecrawl.dev/api-reference/endpoint/extract): API endpoint for extracting data from specified URLs. 45 | - [Web Scraping API](https://docs.firecrawl.dev/api-reference/endpoint/scrape): API endpoint for scraping web content efficiently. 46 | - [Map Endpoint](https://docs.firecrawl.dev/api-reference/endpoint/map): API endpoint for mapping URLs with various options. 47 | - [Firecrawl API Introduction](https://docs.firecrawl.dev/v0/api-reference/introduction): Comprehensive guide to Firecrawl API usage and authentication. 48 | - [Crawl API Endpoint](https://docs.firecrawl.dev/v0/api-reference/endpoint/crawl): API endpoint for initiating web crawls with options. 49 | - [Web Scraping API](https://docs.firecrawl.dev/v0/api-reference/endpoint/scrape): API endpoint for scraping web pages with options. 50 | - [Crawl Job Status](https://docs.firecrawl.dev/api-reference/endpoint/crawl-get): Retrieve the status of a specific crawl job. 51 | - [Search API](https://docs.firecrawl.dev/v0/api-reference/endpoint/search): Powerful search API for querying and retrieving content. 52 | - [Batch Scrape API](https://docs.firecrawl.dev/api-reference/endpoint/batch-scrape): Batch scrape multiple URLs with customizable options and webhooks. 53 | -------------------------------------------------------------------------------- /pnpm-lock.yaml: -------------------------------------------------------------------------------- 1 | lockfileVersion: '9.0' 2 | 3 | settings: 4 | autoInstallPeers: true 5 | excludeLinksFromLockfile: false 6 | 7 | importers: 8 | 9 | .: 10 | dependencies: 11 | '@mendable/firecrawl-js': 12 | specifier: ^1.19.0 13 | version: 1.19.0(ws@8.18.1) 14 | chalk: 15 | specifier: ^5.3.0 16 | version: 5.4.1 17 | commander: 18 | specifier: ^11.1.0 19 | version: 11.1.0 20 | dotenv: 21 | specifier: ^16.4.5 22 | version: 16.4.7 23 | firecrawl: 24 | specifier: ^1.0.0 25 | version: 1.19.0(ws@8.18.1) 26 | devDependencies: 27 | '@types/node': 28 | specifier: ^20.11.24 29 | version: 20.17.23 30 | typescript: 31 | specifier: ^5.3.3 32 | version: 5.8.2 33 | 34 | packages: 35 | 36 | '@mendable/firecrawl-js@1.19.0': 37 | resolution: {integrity: sha512-T0mEBVFyOMQkxLjq7QdXxxtlPJl2tpcl+SpusLSo4rngn/Nv/drJv3krjlN+d1isrCz/PZ6xqU4Sf5LLvuIT2g==} 38 | 39 | '@types/node@20.17.23': 40 | resolution: {integrity: sha512-8PCGZ1ZJbEZuYNTMqywO+Sj4vSKjSjT6Ua+6RFOYlEvIvKQABPtrNkoVSLSKDb4obYcMhspVKmsw8Cm10NFRUg==} 41 | 42 | asynckit@0.4.0: 43 | resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} 44 | 45 | axios@1.8.1: 46 | resolution: {integrity: sha512-NN+fvwH/kV01dYUQ3PTOZns4LWtWhOFCAhQ/pHb88WQ1hNe5V/dvFwc4VJcDL11LT9xSX0QtsR8sWUuyOuOq7g==} 47 | 48 | call-bind-apply-helpers@1.0.2: 49 | resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} 50 | engines: {node: '>= 0.4'} 51 | 52 | chalk@5.4.1: 53 | resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==} 54 | engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} 55 | 56 | combined-stream@1.0.8: 57 | resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} 58 | engines: {node: '>= 0.8'} 59 | 60 | commander@11.1.0: 61 | resolution: {integrity: sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==} 62 | engines: {node: '>=16'} 63 | 64 | delayed-stream@1.0.0: 65 | resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} 66 | engines: {node: '>=0.4.0'} 67 | 68 | dotenv@16.4.7: 69 | resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==} 70 | engines: {node: '>=12'} 71 | 72 | dunder-proto@1.0.1: 73 | resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} 74 | engines: {node: '>= 0.4'} 75 | 76 | es-define-property@1.0.1: 77 | resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} 78 | engines: {node: '>= 0.4'} 79 | 80 | es-errors@1.3.0: 81 | resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} 82 | engines: {node: '>= 0.4'} 83 | 84 | es-object-atoms@1.1.1: 85 | resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} 86 | engines: {node: '>= 0.4'} 87 | 88 | es-set-tostringtag@2.1.0: 89 | resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} 90 | engines: {node: '>= 0.4'} 91 | 92 | firecrawl@1.19.0: 93 | resolution: {integrity: sha512-Ki67Qt+qNYyCWVIOvAUfCPXSwYx0DaBQY6kTwTXI0Cy8xTdXNRRReL8HrTg2OCXd3PMx5aXjEWaq9cqVuj38GQ==} 94 | 95 | follow-redirects@1.15.9: 96 | resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==} 97 | engines: {node: '>=4.0'} 98 | peerDependencies: 99 | debug: '*' 100 | peerDependenciesMeta: 101 | debug: 102 | optional: true 103 | 104 | form-data@4.0.2: 105 | resolution: {integrity: sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==} 106 | engines: {node: '>= 6'} 107 | 108 | function-bind@1.1.2: 109 | resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} 110 | 111 | get-intrinsic@1.3.0: 112 | resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} 113 | engines: {node: '>= 0.4'} 114 | 115 | get-proto@1.0.1: 116 | resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} 117 | engines: {node: '>= 0.4'} 118 | 119 | gopd@1.2.0: 120 | resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} 121 | engines: {node: '>= 0.4'} 122 | 123 | has-symbols@1.1.0: 124 | resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} 125 | engines: {node: '>= 0.4'} 126 | 127 | has-tostringtag@1.0.2: 128 | resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} 129 | engines: {node: '>= 0.4'} 130 | 131 | hasown@2.0.2: 132 | resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} 133 | engines: {node: '>= 0.4'} 134 | 135 | isows@1.0.6: 136 | resolution: {integrity: sha512-lPHCayd40oW98/I0uvgaHKWCSvkzY27LjWLbtzOm64yQ+G3Q5npjjbdppU65iZXkK1Zt+kH9pfegli0AYfwYYw==} 137 | peerDependencies: 138 | ws: '*' 139 | 140 | math-intrinsics@1.1.0: 141 | resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} 142 | engines: {node: '>= 0.4'} 143 | 144 | mime-db@1.52.0: 145 | resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} 146 | engines: {node: '>= 0.6'} 147 | 148 | mime-types@2.1.35: 149 | resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} 150 | engines: {node: '>= 0.6'} 151 | 152 | proxy-from-env@1.1.0: 153 | resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} 154 | 155 | typescript-event-target@1.1.1: 156 | resolution: {integrity: sha512-dFSOFBKV6uwaloBCCUhxlD3Pr/P1a/tJdcmPrTXCHlEFD3faj0mztjcGn6VBAhQ0/Bdy8K3VWrrqwbt/ffsYsg==} 157 | 158 | typescript@5.8.2: 159 | resolution: {integrity: sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==} 160 | engines: {node: '>=14.17'} 161 | hasBin: true 162 | 163 | undici-types@6.19.8: 164 | resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} 165 | 166 | ws@8.18.1: 167 | resolution: {integrity: sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==} 168 | engines: {node: '>=10.0.0'} 169 | peerDependencies: 170 | bufferutil: ^4.0.1 171 | utf-8-validate: '>=5.0.2' 172 | peerDependenciesMeta: 173 | bufferutil: 174 | optional: true 175 | utf-8-validate: 176 | optional: true 177 | 178 | zod-to-json-schema@3.24.3: 179 | resolution: {integrity: sha512-HIAfWdYIt1sssHfYZFCXp4rU1w2r8hVVXYIlmoa0r0gABLs5di3RCqPU5DDROogVz1pAdYBaz7HK5n9pSUNs3A==} 180 | peerDependencies: 181 | zod: ^3.24.1 182 | 183 | zod@3.24.2: 184 | resolution: {integrity: sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==} 185 | 186 | snapshots: 187 | 188 | '@mendable/firecrawl-js@1.19.0(ws@8.18.1)': 189 | dependencies: 190 | axios: 1.8.1 191 | isows: 1.0.6(ws@8.18.1) 192 | typescript-event-target: 1.1.1 193 | zod: 3.24.2 194 | zod-to-json-schema: 3.24.3(zod@3.24.2) 195 | transitivePeerDependencies: 196 | - debug 197 | - ws 198 | 199 | '@types/node@20.17.23': 200 | dependencies: 201 | undici-types: 6.19.8 202 | 203 | asynckit@0.4.0: {} 204 | 205 | axios@1.8.1: 206 | dependencies: 207 | follow-redirects: 1.15.9 208 | form-data: 4.0.2 209 | proxy-from-env: 1.1.0 210 | transitivePeerDependencies: 211 | - debug 212 | 213 | call-bind-apply-helpers@1.0.2: 214 | dependencies: 215 | es-errors: 1.3.0 216 | function-bind: 1.1.2 217 | 218 | chalk@5.4.1: {} 219 | 220 | combined-stream@1.0.8: 221 | dependencies: 222 | delayed-stream: 1.0.0 223 | 224 | commander@11.1.0: {} 225 | 226 | delayed-stream@1.0.0: {} 227 | 228 | dotenv@16.4.7: {} 229 | 230 | dunder-proto@1.0.1: 231 | dependencies: 232 | call-bind-apply-helpers: 1.0.2 233 | es-errors: 1.3.0 234 | gopd: 1.2.0 235 | 236 | es-define-property@1.0.1: {} 237 | 238 | es-errors@1.3.0: {} 239 | 240 | es-object-atoms@1.1.1: 241 | dependencies: 242 | es-errors: 1.3.0 243 | 244 | es-set-tostringtag@2.1.0: 245 | dependencies: 246 | es-errors: 1.3.0 247 | get-intrinsic: 1.3.0 248 | has-tostringtag: 1.0.2 249 | hasown: 2.0.2 250 | 251 | firecrawl@1.19.0(ws@8.18.1): 252 | dependencies: 253 | axios: 1.8.1 254 | isows: 1.0.6(ws@8.18.1) 255 | typescript-event-target: 1.1.1 256 | zod: 3.24.2 257 | zod-to-json-schema: 3.24.3(zod@3.24.2) 258 | transitivePeerDependencies: 259 | - debug 260 | - ws 261 | 262 | follow-redirects@1.15.9: {} 263 | 264 | form-data@4.0.2: 265 | dependencies: 266 | asynckit: 0.4.0 267 | combined-stream: 1.0.8 268 | es-set-tostringtag: 2.1.0 269 | mime-types: 2.1.35 270 | 271 | function-bind@1.1.2: {} 272 | 273 | get-intrinsic@1.3.0: 274 | dependencies: 275 | call-bind-apply-helpers: 1.0.2 276 | es-define-property: 1.0.1 277 | es-errors: 1.3.0 278 | es-object-atoms: 1.1.1 279 | function-bind: 1.1.2 280 | get-proto: 1.0.1 281 | gopd: 1.2.0 282 | has-symbols: 1.1.0 283 | hasown: 2.0.2 284 | math-intrinsics: 1.1.0 285 | 286 | get-proto@1.0.1: 287 | dependencies: 288 | dunder-proto: 1.0.1 289 | es-object-atoms: 1.1.1 290 | 291 | gopd@1.2.0: {} 292 | 293 | has-symbols@1.1.0: {} 294 | 295 | has-tostringtag@1.0.2: 296 | dependencies: 297 | has-symbols: 1.1.0 298 | 299 | hasown@2.0.2: 300 | dependencies: 301 | function-bind: 1.1.2 302 | 303 | isows@1.0.6(ws@8.18.1): 304 | dependencies: 305 | ws: 8.18.1 306 | 307 | math-intrinsics@1.1.0: {} 308 | 309 | mime-db@1.52.0: {} 310 | 311 | mime-types@2.1.35: 312 | dependencies: 313 | mime-db: 1.52.0 314 | 315 | proxy-from-env@1.1.0: {} 316 | 317 | typescript-event-target@1.1.1: {} 318 | 319 | typescript@5.8.2: {} 320 | 321 | undici-types@6.19.8: {} 322 | 323 | ws@8.18.1: {} 324 | 325 | zod-to-json-schema@3.24.3(zod@3.24.2): 326 | dependencies: 327 | zod: 3.24.2 328 | 329 | zod@3.24.2: {} 330 | --------------------------------------------------------------------------------