├── LICENSE ├── README.md └── opensearch.js /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Hp Metainsan 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OpenSearch-AI 2 | OpenSearch AI is a tool that allows you to search the internet, this tool is able to launch a query and explore the web by retrieving the content of the sites. 3 | 4 | OpenSearch AI is an open source project built in Node JS as a web API, allowing you to explore the web, scraping the contents of sites and extracting only part of it, this tool is very essential especially in the context of generative AI, the case of ChatGPT, DeepSeek, Claude, EdithAI, PalmAI and as well as several AI and chatbot that use this kind of tools to bring out information in real time. Misuse of this tool may be illegal in some countries, training AI models via data output via this tool may also be illegal, make sure to use this tool only for learning and experimentation. 5 | 6 | Before you start make sure you have a local Node.JS server or else you can download it from this site: https://nodejs.org/en Once Node.js is installed, please check its installation: 7 | 8 | ```bash 9 | $ node -v 10 | ``` 11 | 12 | Check Node Package Management: 13 | 14 | ```bash 15 | $ npm -v 16 | ``` 17 | 18 | These two commands should give you the versions, or else you need to check your installation. 19 | 20 | Once Node.Js is installed, please clone or download the project: 21 | 22 | ```bash 23 | $ git clone https://github.com 24 | ``` 25 | 26 | Once the project is downloaded, please install the necessary packages for operation. 27 | 28 | ```bash 29 | $ npm install -g express cheerio axios 30 | ``` 31 | 32 | Once installed, we will launch the server: 33 | ```bash 34 | $ node index.js 35 | ``` 36 | 37 | Once launched, you can use the API locally from your browser. 38 | 39 | The API has 3 parameters: 40 | 41 | ```[key]```: *To launch the search on the web* 42 | 43 | ```[url]```: To add a URL as a parameter at the end so that this URL is also scraped. 44 | 45 | ```[proxy]```: ```true or false``` 46 | 47 | # License & Thanks 48 | This tool was developed by me in defense by @Onestepcom00[https://github.com/onestepcom00] 49 | -------------------------------------------------------------------------------- /opensearch.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const axios = require("axios"); 3 | const cheerio = require("cheerio"); 4 | 5 | const app = express(); 6 | const PORT = 3000; 7 | 8 | // Liste de proxys (remplace-les par des proxys valides) 9 | const proxyList = [ 10 | "http://proxy1.com:port", 11 | "http://proxy2.com:port", 12 | "http://proxy3.com:port", 13 | "http://proxy4.com:port", 14 | ]; 15 | 16 | // Fonction pour obtenir un User-Agent dynamique 17 | function getRandomUserAgent() { 18 | const userAgents = [ 19 | "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", 20 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", 21 | "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", 22 | ]; 23 | return userAgents[Math.floor(Math.random() * userAgents.length)]; 24 | } 25 | 26 | // Fonction pour scraper Bing 27 | async function scrapeBing(query, useProxy, extraUrl) { 28 | const bingURL = `https://www.bing.com/search?q=${encodeURIComponent(query)}`; 29 | const userAgent = getRandomUserAgent(); 30 | 31 | const axiosOptions = { 32 | headers: { "User-Agent": userAgent }, 33 | }; 34 | 35 | if (useProxy) { 36 | axiosOptions.proxy = { 37 | host: proxyList[Math.floor(Math.random() * proxyList.length)], 38 | }; 39 | } 40 | 41 | try { 42 | const response = await axios.get(bingURL, axiosOptions); 43 | const $ = cheerio.load(response.data); 44 | 45 | let results = []; 46 | $("li.b_algo").each((index, element) => { 47 | const link = $(element).find("a").attr("href"); 48 | if (link) results.push(link); 49 | }); 50 | 51 | if (extraUrl) { 52 | return [...results.slice(1, 3), extraUrl]; // 2 liens Bing + 1 URL personnalisée 53 | } else { 54 | return results.slice(1, 4); // 3 liens Bing 55 | } 56 | } catch (error) { 57 | console.error("Erreur lors du scraping de Bing:", error.message); 58 | return []; 59 | } 60 | } 61 | 62 | // Fonction pour scraper le contenu des pages trouvées 63 | async function scrapePageContent(url, useProxy) { 64 | try { 65 | const userAgent = getRandomUserAgent(); 66 | const axiosOptions = { 67 | headers: { "User-Agent": userAgent }, 68 | }; 69 | 70 | if (useProxy) { 71 | axiosOptions.proxy = { 72 | host: proxyList[Math.floor(Math.random() * proxyList.length)], 73 | }; 74 | } 75 | 76 | const response = await axios.get(url, axiosOptions); 77 | const $ = cheerio.load(response.data); 78 | 79 | let title = $("title").text().trim(); 80 | let paragraphs = []; 81 | $("p").each((index, element) => { 82 | paragraphs.push($(element).text().trim()); 83 | }); 84 | 85 | let content = paragraphs.join(" "); 86 | let contentSize = Buffer.byteLength(content, "utf-8"); // Taille en octets 87 | 88 | // Récupération du nom du site et de l'icône 89 | let siteName = $("meta[property='og:site_name']").attr("content") || url.split("/")[2]; 90 | let favicon = $("link[rel='icon']").attr("href") || `https://${url.split("/")[2]}/favicon.ico`; 91 | 92 | return { title, content, contentSize, siteName, favicon, url }; 93 | } catch (error) { 94 | console.error(`Erreur lors du scraping de la page ${url}:`, error.message); 95 | return { title: "", content: "", contentSize: 0, siteName: "", favicon: "", url }; 96 | } 97 | } 98 | 99 | // Route API 100 | app.get("/search", async (req, res) => { 101 | const { key, proxy, url } = req.query; 102 | 103 | if (!key) { 104 | return res.status(400).json({ error: "Paramètre 'key' requis" }); 105 | } 106 | 107 | const useProxy = proxy === "true"; 108 | const links = await scrapeBing(key, useProxy, url); 109 | 110 | if (links.length === 0) { 111 | return res.status(500).json({ error: "Aucun lien trouvé" }); 112 | } 113 | 114 | let results = {}; 115 | let count = 1; 116 | for (const link of links) { 117 | const pageData = await scrapePageContent(link, useProxy); 118 | results[`SEARCH${count}`] = { 119 | Name: pageData.siteName, 120 | Icon: pageData.favicon, 121 | Link: pageData.url, 122 | Title: pageData.title, 123 | Content: pageData.content, 124 | Size: `${pageData.contentSize} bytes`, 125 | }; 126 | count++; 127 | } 128 | 129 | res.json({ query: key, results }); 130 | }); 131 | 132 | // Lancer le serveur 133 | app.listen(PORT, () => { 134 | console.log(`Serveur lancé sur http://localhost:${PORT}`); 135 | }); --------------------------------------------------------------------------------