38 |
71 |
72 |
73 |
76 |
77 |
78 |
91 |
92 |
93 |
109 |
110 |
111 |
112 | {{_('Web Access')}}
113 |
114 |
115 |
116 |
117 |
118 |
121 |
122 |
123 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
--------------------------------------------------------------------------------
/client/img/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChatTeach/FreeGPT/0df15b67d11251e7fbcbc311dad1b208a13884b3/client/img/android-chrome-192x192.png
--------------------------------------------------------------------------------
/client/img/android-chrome-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChatTeach/FreeGPT/0df15b67d11251e7fbcbc311dad1b208a13884b3/client/img/android-chrome-512x512.png
--------------------------------------------------------------------------------
/client/img/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChatTeach/FreeGPT/0df15b67d11251e7fbcbc311dad1b208a13884b3/client/img/apple-touch-icon.png
--------------------------------------------------------------------------------
/client/img/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChatTeach/FreeGPT/0df15b67d11251e7fbcbc311dad1b208a13884b3/client/img/favicon-16x16.png
--------------------------------------------------------------------------------
/client/img/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChatTeach/FreeGPT/0df15b67d11251e7fbcbc311dad1b208a13884b3/client/img/favicon-32x32.png
--------------------------------------------------------------------------------
/client/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChatTeach/FreeGPT/0df15b67d11251e7fbcbc311dad1b208a13884b3/client/img/favicon.ico
--------------------------------------------------------------------------------
/client/img/gpt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChatTeach/FreeGPT/0df15b67d11251e7fbcbc311dad1b208a13884b3/client/img/gpt.png
--------------------------------------------------------------------------------
/client/img/site.webmanifest:
--------------------------------------------------------------------------------
1 | {
2 | "name": "",
3 | "short_name": "",
4 | "icons": [
5 | {
6 | "src": "/assets/img/android-chrome-192x192.png",
7 | "sizes": "192x192",
8 | "type": "image/png"
9 | },
10 | {
11 | "src": "/assets/img/android-chrome-512x512.png",
12 | "sizes": "512x512",
13 | "type": "image/png"
14 | }
15 | ],
16 | "theme_color": "#ffffff",
17 | "background_color": "#ffffff",
18 | "display": "standalone"
19 | }
--------------------------------------------------------------------------------
/client/img/user.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChatTeach/FreeGPT/0df15b67d11251e7fbcbc311dad1b208a13884b3/client/img/user.png
--------------------------------------------------------------------------------
/client/js/change-language.js:
--------------------------------------------------------------------------------
1 | document.addEventListener('DOMContentLoaded', fetchLanguages);
2 |
3 | async function fetchLanguages() {
4 | try {
5 | const [languagesResponse, currentLanguageResponse] = await Promise.all([
6 | fetch(`${url_prefix}/get-languages`),
7 | fetch(`${url_prefix}/get-locale`)
8 | ]);
9 |
10 | const languages = await languagesResponse.json();
11 | const currentLanguage = await currentLanguageResponse.text();
12 |
13 | const languageSelect = document.getElementById('language');
14 | languages.forEach(lang => {
15 | const option = document.createElement('option');
16 | option.value = lang;
17 | option.textContent = lang;
18 | languageSelect.appendChild(option);
19 | });
20 |
21 | const savedLanguage = localStorage.getItem("language") || currentLanguage;
22 | setLanguageOnPageLoad(savedLanguage);
23 | } catch (error) {
24 | console.error("Failed to fetch languages or current language");
25 | }
26 | }
27 |
28 | function setLanguageOnPageLoad(language) {
29 | document.getElementById("language").value = language;
30 | }
31 |
32 | function changeLanguage(lang) {
33 | fetch(`${url_prefix}/change-language`, {
34 | method: "POST",
35 | headers: {
36 | "Content-Type": "application/json",
37 | },
38 | body: JSON.stringify({ language: lang }),
39 | }).then((response) => {
40 | if (response.ok) {
41 | localStorage.setItem("language", lang);
42 | location.reload();
43 | } else {
44 | console.error("Failed to change language");
45 | }
46 | });
47 | }
48 |
--------------------------------------------------------------------------------
/client/js/highlightjs-copy.min.js:
--------------------------------------------------------------------------------
1 | class CopyButtonPlugin{constructor(options={}){self.hook=options.hook;self.callback=options.callback}"after:highlightElement"({el,text}){let button=Object.assign(document.createElement("button"),{innerHTML:"Copy",className:"hljs-copy-button"});button.dataset.copied=false;el.parentElement.classList.add("hljs-copy-wrapper");el.parentElement.appendChild(button);el.parentElement.style.setProperty("--hljs-theme-background",window.getComputedStyle(el).backgroundColor);button.onclick=function(){if(!navigator.clipboard)return;let newText=text;if(hook&&typeof hook==="function"){newText=hook(text,el)||text}navigator.clipboard.writeText(newText).then(function(){button.innerHTML="Copied!";button.dataset.copied=true;let alert=Object.assign(document.createElement("div"),{role:"status",className:"hljs-copy-alert",innerHTML:"Copied to clipboard"});el.parentElement.appendChild(alert);setTimeout(()=>{button.innerHTML="Copy";button.dataset.copied=false;el.parentElement.removeChild(alert);alert=null},2e3)}).then(function(){if(typeof callback==="function")return callback(newText,el)})}}}
--------------------------------------------------------------------------------
/client/js/sidebar-toggler.js:
--------------------------------------------------------------------------------
1 | const sidebar = document.querySelector(".sidebar");
2 | const menuButton = document.querySelector(".menu-button");
3 |
4 | function toggleSidebar(event) {
5 | if (sidebar.classList.contains("shown")) {
6 | hideSidebar(event.target);
7 | } else {
8 | showSidebar(event.target);
9 | }
10 | window.scrollTo(0, 0);
11 | }
12 |
13 | function showSidebar(target) {
14 | sidebar.classList.add("shown");
15 | target.classList.add("rotated");
16 | document.body.style.overflow = "hidden";
17 | }
18 |
19 | function hideSidebar(target) {
20 | sidebar.classList.remove("shown");
21 | target.classList.remove("rotated");
22 | document.body.style.overflow = "auto";
23 | }
24 |
25 | menuButton.addEventListener("click", toggleSidebar);
26 |
27 | document.body.addEventListener('click', function(event) {
28 | if (event.target.matches('.conversation-title')) {
29 | const menuButtonStyle = window.getComputedStyle(menuButton);
30 | if (menuButtonStyle.display !== 'none') {
31 | hideSidebar(menuButton);
32 | }
33 | }
34 | });
35 |
--------------------------------------------------------------------------------
/client/js/theme-toggler.js:
--------------------------------------------------------------------------------
1 | var switch_theme_toggler = document.getElementById("theme-toggler");
2 |
3 | switch_theme_toggler.addEventListener("change", toggleTheme);
4 |
5 | function setTheme(themeName) {
6 | localStorage.setItem("theme", themeName);
7 | document.documentElement.className = themeName;
8 | }
9 |
10 | function toggleTheme() {
11 | var currentTheme = localStorage.getItem("theme");
12 | var newTheme = currentTheme === "theme-dark" ? "theme-light" : "theme-dark";
13 |
14 | setTheme(newTheme);
15 | switch_theme_toggler.checked = newTheme === "theme-dark";
16 | }
17 |
18 | (function () {
19 | var currentTheme = localStorage.getItem("theme") || "theme-dark";
20 | setTheme(currentTheme);
21 | switch_theme_toggler.checked = currentTheme === "theme-dark";
22 | })();
23 |
--------------------------------------------------------------------------------
/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "site_config": {
3 | "host": "0.0.0.0",
4 | "port": 1338,
5 | "debug": false
6 | },
7 | "url_prefix": ""
8 | }
9 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.5'
2 |
3 | services:
4 | freegpt-webui:
5 | image: freegpt-webui
6 | container_name: freegpt-webui
7 | build:
8 | context: .
9 | dockerfile: Dockerfile
10 | ports:
11 | - "1338:1338"
12 |
--------------------------------------------------------------------------------
/g4f/Provider/Provider.py:
--------------------------------------------------------------------------------
1 | import os
2 | from ..typing import sha256, Dict, get_type_hints
3 |
4 | url = None
5 | model = None
6 | supports_stream = False
7 | needs_auth = False
8 |
9 |
10 | def _create_completion(model: str, messages: list, stream: bool, **kwargs):
11 | return
12 |
13 |
14 | params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
15 | '(%s)' % ', '.join(
16 | [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
17 |
--------------------------------------------------------------------------------
/g4f/Provider/Providers/AiService.py:
--------------------------------------------------------------------------------
1 | import os
2 | import requests
3 | from ...typing import get_type_hints
4 |
5 | url = "https://aiservice.vercel.app/api/chat/answer"
6 | model = ['gpt-3.5-turbo']
7 | supports_stream = False
8 | needs_auth = False
9 |
10 |
11 | def _create_completion(model: str, messages: list, stream: bool, **kwargs):
12 | base = ''
13 | for message in messages:
14 | base += '%s: %s\n' % (message['role'], message['content'])
15 | base += 'assistant:'
16 |
17 | headers = {
18 | "accept": "*/*",
19 | "content-type": "text/plain;charset=UTF-8",
20 | "sec-fetch-dest": "empty",
21 | "sec-fetch-mode": "cors",
22 | "sec-fetch-site": "same-origin",
23 | "Referer": "https://aiservice.vercel.app/chat",
24 | }
25 | data = {
26 | "input": base
27 | }
28 | response = requests.post(url, headers=headers, json=data)
29 | if response.status_code == 200:
30 | _json = response.json()
31 | yield _json['data']
32 | else:
33 | print(f"Error Occurred::{response.status_code}")
34 | return None
35 |
36 |
37 |
38 | params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
39 | '(%s)' % ', '.join(
40 | [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
--------------------------------------------------------------------------------
/g4f/Provider/Providers/Aichat.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import os
3 | import json
4 | from ...typing import sha256, Dict, get_type_hints
5 |
6 | url = 'https://hteyun.com'
7 | model = ['gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-16k-0613', 'gpt-3.5-turbo-0613']
8 | supports_stream = True
9 | needs_auth = False
10 |
11 | def _create_completion(model: str, messages: list, stream: bool, temperature: float = 0.7, **kwargs):
12 | headers = {
13 | 'Content-Type': 'application/json',
14 | }
15 | data = {
16 | 'model': model,
17 | 'temperature': 0.7,
18 | 'presence_penalty': 0,
19 | 'messages': messages,
20 | }
21 | response = requests.post(url + '/api/chat-stream',
22 | json=data, stream=True)
23 |
24 | if stream:
25 | for chunk in response.iter_content(chunk_size=None):
26 | chunk = chunk.decode('utf-8')
27 | if chunk.strip():
28 | message = json.loads(chunk)['choices'][0]['message']['content']
29 | yield message
30 | else:
31 | message = response.json()['choices'][0]['message']['content']
32 | yield message
33 |
34 | params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
35 | '(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
--------------------------------------------------------------------------------
/g4f/Provider/Providers/Ails.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import json
4 | import uuid
5 | import hashlib
6 | import requests
7 |
8 | from ...typing import sha256, Dict, get_type_hints
9 | from datetime import datetime
10 |
11 | url: str = 'https://ai.ls'
12 | model: str = 'gpt-3.5-turbo'
13 | supports_stream = True
14 | needs_auth = False
15 | working = True
16 |
17 |
18 | class Utils:
19 | def hash(json_data: Dict[str, str]) -> sha256:
20 |
21 | base_string: str = '%s:%s:%s:%s' % (
22 | json_data['t'],
23 | json_data['m'],
24 | 'WI,2rU#_r:r~aF4aJ36[.Z(/8Rv93Rf',
25 | len(json_data['m'])
26 | )
27 |
28 | return hashlib.sha256(base_string.encode()).hexdigest()
29 |
30 | def format_timestamp(timestamp: int) -> str:
31 |
32 | e = timestamp
33 | n = e % 10
34 | r = n + 1 if n % 2 == 0 else n
35 | return str(e - n + r)
36 |
37 |
38 | def _create_completion(model: str, messages: list, temperature: float = 0.6, stream: bool = False, **kwargs):
39 |
40 | headers = {
41 | 'authority': 'api.caipacity.com',
42 | 'accept': '*/*',
43 | 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
44 | 'authorization': 'Bearer free',
45 | 'client-id': str(uuid.uuid4()),
46 | 'client-v': '0.1.249',
47 | 'content-type': 'application/json',
48 | 'origin': 'https://ai.ls',
49 | 'referer': 'https://ai.ls/',
50 | 'sec-ch-ua': '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"',
51 | 'sec-ch-ua-mobile': '?0',
52 | 'sec-ch-ua-platform': '"Windows"',
53 | 'sec-fetch-dest': 'empty',
54 | 'sec-fetch-mode': 'cors',
55 | 'sec-fetch-site': 'cross-site',
56 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36',
57 | }
58 |
59 | timestamp = Utils.format_timestamp(int(time.time() * 1000))
60 |
61 | sig = {
62 | 'd': datetime.now().strftime('%Y-%m-%d'),
63 | 't': timestamp,
64 | 's': Utils.hash({
65 | 't': timestamp,
66 | 'm': messages[-1]['content']})}
67 |
68 | json_data = json.dumps(separators=(',', ':'), obj={
69 | 'model': 'gpt-3.5-turbo',
70 | 'temperature': 0.6,
71 | 'stream': True,
72 | 'messages': messages} | sig)
73 |
74 | response = requests.post('https://api.caipacity.com/v1/chat/completions',
75 | headers=headers, data=json_data, stream=True)
76 |
77 | for token in response.iter_lines():
78 | if b'content' in token:
79 | completion_chunk = json.loads(token.decode().replace('data: ', ''))
80 | token = completion_chunk['choices'][0]['delta'].get('content')
81 | if token != None:
82 | yield token
83 |
84 |
85 | params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
86 | '(%s)' % ', '.join(
87 | [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
88 |
--------------------------------------------------------------------------------
/g4f/Provider/Providers/Bard.py:
--------------------------------------------------------------------------------
1 | import os, requests, json, browser_cookie3, re, random
2 | from ...typing import sha256, Dict, get_type_hints
3 |
4 | url = 'https://bard.google.com'
5 | model = ['Palm2']
6 | supports_stream = False
7 | needs_auth = True
8 |
9 | def _create_completion(model: str, messages: list, stream: bool, **kwargs):
10 | psid = {cookie.name: cookie.value for cookie in browser_cookie3.chrome(
11 | domain_name='.google.com')}['__Secure-1PSID']
12 |
13 | formatted = '\n'.join([
14 | '%s: %s' % (message['role'], message['content']) for message in messages
15 | ])
16 | prompt = f'{formatted}\nAssistant:'
17 |
18 | proxy = kwargs.get('proxy', False)
19 | if proxy == False:
20 | print('warning!, you did not give a proxy, a lot of countries are banned from Google Bard, so it may not work')
21 |
22 | snlm0e = None
23 | conversation_id = None
24 | response_id = None
25 | choice_id = None
26 |
27 | client = requests.Session()
28 | client.proxies = {
29 | 'http': f'http://{proxy}',
30 | 'https': f'http://{proxy}'} if proxy else None
31 |
32 | client.headers = {
33 | 'authority': 'bard.google.com',
34 | 'content-type': 'application/x-www-form-urlencoded;charset=UTF-8',
35 | 'origin': 'https://bard.google.com',
36 | 'referer': 'https://bard.google.com/',
37 | 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
38 | 'x-same-domain': '1',
39 | 'cookie': f'__Secure-1PSID={psid}'
40 | }
41 |
42 | snlm0e = re.search(r'SNlM0e\":\"(.*?)\"',
43 | client.get('https://bard.google.com/').text).group(1) if not snlm0e else snlm0e
44 |
45 | params = {
46 | 'bl': 'boq_assistant-bard-web-server_20230326.21_p0',
47 | '_reqid': random.randint(1111, 9999),
48 | 'rt': 'c'
49 | }
50 |
51 | data = {
52 | 'at': snlm0e,
53 | 'f.req': json.dumps([None, json.dumps([[prompt], None, [conversation_id, response_id, choice_id]])])}
54 |
55 | intents = '.'.join([
56 | 'assistant',
57 | 'lamda',
58 | 'BardFrontendService'
59 | ])
60 |
61 | response = client.post(f'https://bard.google.com/_/BardChatUi/data/{intents}/StreamGenerate',
62 | data=data, params=params)
63 |
64 | chat_data = json.loads(response.content.splitlines()[3])[0][2]
65 | if chat_data:
66 | json_chat_data = json.loads(chat_data)
67 |
68 | yield json_chat_data[0][0]
69 |
70 | else:
71 | yield 'error'
72 |
73 | params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
74 | '(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
--------------------------------------------------------------------------------
/g4f/Provider/Providers/Better.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import requests
4 | from typing import Dict, get_type_hints
5 |
6 | url = 'https://openai-proxy-api.vercel.app/v1/'
7 | model = [
8 | 'gpt-3.5-turbo',
9 | 'gpt-3.5-turbo-0613',
10 | 'gpt-3.5-turbo-16k',
11 | 'gpt-3.5-turbo-16k-0613',
12 | 'gpt-4',
13 | ]
14 |
15 | supports_stream = True
16 | needs_auth = False
17 |
18 |
19 | def _create_completion(model: str, messages: list, stream: bool, **kwargs):
20 | headers = {
21 | 'Content-Type': 'application/json',
22 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36 Edg/114.0.1823.58',
23 | 'Referer': 'https://chat.ylokh.xyz/',
24 | 'Origin': 'https://chat.ylokh.xyz',
25 | 'Connection': 'keep-alive',
26 | }
27 |
28 | json_data = {
29 | 'messages': messages,
30 | 'temperature': 1.0,
31 | 'model': model,
32 | 'stream': stream,
33 | }
34 |
35 | response = requests.post(
36 | 'https://openai-proxy-api.vercel.app/v1/chat/completions', headers=headers, json=json_data, stream=True
37 | )
38 |
39 | for token in response.iter_lines():
40 | decoded = token.decode('utf-8')
41 | if decoded.startswith('data: '):
42 | data_str = decoded.replace('data: ', '')
43 | data = json.loads(data_str)
44 | if 'choices' in data and 'delta' in data['choices'][0]:
45 | delta = data['choices'][0]['delta']
46 | content = delta.get('content', '')
47 | finish_reason = delta.get('finish_reason', '')
48 |
49 | if finish_reason == 'stop':
50 | break
51 | if content:
52 | yield content
53 |
54 |
55 | params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + '(%s)' % ', '.join(
56 | [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
57 |
--------------------------------------------------------------------------------
/g4f/Provider/Providers/ChatFree.py:
--------------------------------------------------------------------------------
1 | import os, requests
2 | from ...typing import sha256, Dict, get_type_hints
3 | import json
4 |
5 | url = "https://v.chatfree.cc"
6 | model = ['gpt-3.5-turbo', 'gpt-3.5-turbo-16k']
7 | supports_stream = False
8 | needs_auth = False
9 |
10 |
11 | def _create_completion(model: str, messages: list, stream: bool, **kwargs):
12 | headers = {
13 | 'authority': 'chat.dfehub.com',
14 | 'accept': '*/*',
15 | 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
16 | 'content-type': 'application/json',
17 | 'origin': 'https://v.chatfree.cc',
18 | 'referer': 'https://v.chatfree.cc/',
19 | 'sec-ch-ua': '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"',
20 | 'sec-ch-ua-mobile': '?0',
21 | 'sec-ch-ua-platform': '"macOS"',
22 | 'sec-fetch-dest': 'empty',
23 | 'sec-fetch-mode': 'cors',
24 | 'sec-fetch-site': 'same-origin',
25 | 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36',
26 | 'x-requested-with': 'XMLHttpRequest',
27 | }
28 |
29 | json_data = {
30 | 'messages': messages,
31 | 'stream': True,
32 | 'model': model,
33 | 'temperature': 0.5,
34 | 'presence_penalty': 0,
35 | 'frequency_penalty': 0,
36 | 'top_p': 1,
37 | }
38 |
39 | response = requests.post('https://v.chatfree.cc/api/openai/v1/chat/completions',
40 | headers=headers, json=json_data)
41 |
42 | for chunk in response.iter_lines():
43 | if b'content' in chunk:
44 | data = json.loads(chunk.decode().split('data: ')[1])
45 | yield (data['choices'][0]['delta']['content'])
46 |
47 | params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
48 | '(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
--------------------------------------------------------------------------------
/g4f/Provider/Providers/ChatgptAi.py:
--------------------------------------------------------------------------------
1 | import os
2 | import requests, re
3 | from ...typing import sha256, Dict, get_type_hints
4 |
5 | url = 'https://chatgpt.ai/gpt-4/'
6 | model = ['gpt-4']
7 | supports_stream = True
8 | needs_auth = False
9 |
10 |
11 | def _create_completion(model: str, messages: list, stream: bool, **kwargs):
12 | chat = ''
13 | for message in messages:
14 | chat += '%s: %s\n' % (message['role'], message['content'])
15 | chat += 'assistant: '
16 |
17 | response = requests.get('https://chatgpt.ai/')
18 | nonce, post_id, _, bot_id = re.findall(r'data-nonce="(.*)"\n data-post-id="(.*)"\n data-url="(.*)"\n data-bot-id="(.*)"\n data-width', response.text)[0]
19 |
20 | headers = {
21 | 'authority': 'chatgpt.ai',
22 | 'accept': '*/*',
23 | 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
24 | 'cache-control': 'no-cache',
25 | 'origin': 'https://chatgpt.ai',
26 | 'pragma': 'no-cache',
27 | 'referer': 'https://chatgpt.ai/gpt-4/',
28 | 'sec-ch-ua': '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"',
29 | 'sec-ch-ua-mobile': '?0',
30 | 'sec-ch-ua-platform': '"Windows"',
31 | 'sec-fetch-dest': 'empty',
32 | 'sec-fetch-mode': 'cors',
33 | 'sec-fetch-site': 'same-origin',
34 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36',
35 | }
36 | data = {
37 | '_wpnonce': nonce,
38 | 'post_id': post_id,
39 | 'url': 'https://chatgpt.ai/gpt-4',
40 | 'action': 'wpaicg_chat_shortcode_message',
41 | 'message': chat,
42 | 'bot_id': bot_id
43 | }
44 |
45 | response = requests.post('https://chatgpt.ai/wp-admin/admin-ajax.php',
46 | headers=headers, data=data)
47 |
48 | yield (response.json()['data'])
49 |
50 | params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
51 | '(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
--------------------------------------------------------------------------------
/g4f/Provider/Providers/ChatgptLogin.py:
--------------------------------------------------------------------------------
1 | import os
2 | from ...typing import sha256, Dict, get_type_hints
3 | import requests
4 | import re
5 | import base64
6 |
7 | url = 'https://chatgptlogin.ac'
8 | model = ['gpt-3.5-turbo']
9 | supports_stream = False
10 | needs_auth = False
11 |
12 |
13 | def _create_completion(model: str, messages: list, stream: bool, **kwargs):
14 | def get_nonce():
15 | res = requests.get('https://chatgptlogin.ac/use-chatgpt-free/', headers={
16 | "Referer": "https://chatgptlogin.ac/use-chatgpt-free/",
17 | "User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36'
18 | })
19 |
20 | src = re.search(r'class="mwai-chat mwai-chatgpt">.*