├── data └── Database ├── src ├── main.py ├── fine_tuner.py ├── ModelSaver.py ├── requests.py ├── CommunicationLog.py ├── Communication.py ├── process_input.py ├── Chatflow.py ├── com.py ├── chat.py ├── FlowiseAICommunication.py ├── load_model.py ├── generate_test_data.py ├── nlp │ └── nlp.py ├── callback.py ├── scriptexecutor.py ├── PyPDF2.py ├── DualCoreLLM.py ├── train_model.py ├── FineTuneGPT,py.txt ├── fiileprocessor.py ├── class.py ├── save_preset.py ├── llmmanager.py ├── model_loader.py ├── memorymodule.py ├── classc.py └── neuralgpt.py ├── ProjectFiles ├── ProjectPlan.txt └── requirements.txt ├── NeuralGPT.egg-info ├── top_level.txt ├── dependency_links.txt ├── requires.txt ├── PKG-INFO └── SOURCES.txt ├── Wordpress plugin ├── Nowy Dokument tekstowy.txt ├── js │ ├── Nowy Dokument tekstowy.txt │ └── chatbox_v2.js ├── ajax.php ├── chatbot.php ├── long.odt ├── long.pdf ├── universal.db ├── combinepdf.pdf ├── wordpress1.pdf ├── wordpress2.pdf ├── dualcorellm.pdf ├── build │ ├── neuralgpt_chatbot │ │ ├── a.pdf │ │ ├── a1.pdf │ │ ├── au.pdf │ │ ├── auto.pdf │ │ └── aurt12.pdf │ └── python_script │ │ ├── base_library.zip │ │ └── localpycs │ │ ├── struct.pyc │ │ ├── pyimod03_ctypes.pyc │ │ ├── pyimod01_archive.pyc │ │ ├── pyimod02_importers.pyc │ │ └── pyimod04_pywin32.pyc ├── htmlmarkup.txt ├── package.json ├── upload.html ├── assets │ ├── upload.html │ ├── NeuralGPT Chatbot.html │ ├── neuralgpt-chatbot.js │ └── chatwindow.js ├── generate_response.py ├── perf.py ├── workschedule.py ├── server.js ├── search.json ├── listmodels.py ├── logcreation.py ├── submit_input.php ├── train_restapi.js ├── cronjob.py ├── validate_llm_file.php ├── NeuralGPT Chatbot.html ├── neuralgpt-browse.js ├── neuralgpt_chatbot.spec ├── test_chatbox.py ├── shortcode.php ├── chatbox_obj.py ├── notif_sys.py ├── python_script.spec ├── get_feedback.php ├── automate_transfer.py ├── module.txt ├── mlm.py ├── chat-window.js ├── chatbox_v2.js ├── admin.php ├── flowise.css ├── chatflow.py ├── load_pretrained.py ├── loadpretrained.py ├── chatwindow.js ├── send_mail.py ├── data_backup.py └── chat_gui.py ├── ind.py ├── neural-big.pdf ├── auto-script1.pdf ├── auto-script2.pdf ├── completepdf.pdf ├── integration1.pdf ├── integration2.pdf ├── integration3.pdf ├── agent-document.pdf ├── agent-document (1).pdf ├── agent-document (2).pdf ├── agent-document (21).pdf ├── agent-document (3).pdf ├── agent-document (4).pdf ├── fine_tuner.py ├── streamlit ├── pages │ └── chat-hub.db ├── requirements.txt └── home.py ├── __pycache__ ├── gui.cpython-311.pyc ├── model.cpython-311.pyc ├── utils.cpython-311.pyc ├── dataset.cpython-311.pyc ├── pinecone.cpython-311.pyc ├── requests.cpython-311.pyc ├── DualCoreLLM.cpython-311.pyc ├── load_model.cpython-311.pyc └── neuralgpt.cpython-311.pyc ├── utils ├── fine_tuner.py ├── Scripting.py ├── InternetAccess.py ├── requests.py ├── CommunicationLog.py ├── MediaPlayer.py ├── Communication.py ├── process_input.py ├── Chatflow.py ├── com.py ├── chat.py ├── FlowiseAICommunication.py ├── FileTransfer.py ├── load_model.py ├── generate_test_data.py ├── callback.py ├── ScriptExecutor.py ├── PyPDF2.py ├── NLPModule.py ├── DualCoreLLM.py ├── train_model.py ├── FineTuneGPT,py.txt ├── FileProcessor.py ├── class.py ├── save_preset.py ├── LLMManager.py ├── Memory.py ├── model_loader.py ├── MemoryModule.py ├── classc.py └── neuralgpt.py ├── code ├── __pycache__ │ ├── models.cpython-311.pyc │ └── utils.cpython-311.pyc ├── DatabaseModule.py ├── ScriptExecutor.py ├── main.py └── utils.py ├── Chat-center ├── requirements.txt ├── instructions.txt ├── index.html └── client.js ├── agent_scripts ├── __pycache__ │ └── database.cpython-311.pyc ├── database.py ├── script_executor.py └── main.py ├── requirements.txt ├── Scripting.py ├── auto ├── task1.py ├── markdown.sh ├── saveastxt.py ├── saveashtml.py ├── task3.sh └── task2.java ├── nlp ├── Scripting.py ├── DualCoreLLM.py ├── MediaPlayer.py ├── tools.py ├── MachineLearning.py ├── DocumentEditor.py ├── FileTransfer.py ├── NLPModule.py └── Memory.py ├── neuralgod.py ├── ModelSaver.py ├── requests.py ├── CommunicationLog.py ├── MediaPlayer.py ├── vord2.py ├── setup.py ├── tools.py ├── Communication.py ├── pinecon.py ├── MachineLearning.py ├── model.py ├── process_input.py ├── agent_script.py ├── ma.py ├── responses.json ├── pine.py ├── Chatflow.py ├── com.py ├── chat.py ├── FlowiseAICommunication.py ├── extract_text.py ├── FileTransfer.py ├── load_model.py ├── long.py ├── generate_test_data.py ├── sort_files.py ├── geninit.py ├── callback.py ├── ScriptExecutor.py ├── PyPDF2.py ├── nlp.py ├── NLPModule.py ├── DualCoreLLM.py ├── main.py ├── .devcontainer └── devcontainer.json ├── train_model.py ├── FineTuneGPT.py ├── GUI ├── GUILLMmanager.py ├── GUImain.py ├── GUIManager.py ├── file-8rb.file ├── GUIFileViewer.py └── GUIFileViewer.txt ├── FileProcessor.py ├── TEST.py ├── class.py ├── save_preset.py ├── LLMManager.py ├── Memory.py ├── model_loader.py ├── gu.py ├── MemoryModule.py ├── exe.py ├── classc.py ├── dataset.py ├── chatboxx.py ├── neuralgpt.py ├── README.md └── DocumentEditor.py /data/Database: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/main.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ProjectFiles/ProjectPlan.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /NeuralGPT.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Wordpress plugin/Nowy Dokument tekstowy.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /NeuralGPT.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Wordpress plugin/js/Nowy Dokument tekstowy.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ind.py: -------------------------------------------------------------------------------- 1 | import pinecone 2 | pinecone.describe_index("neuralai") -------------------------------------------------------------------------------- /Wordpress plugin/ajax.php: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Wordpress plugin/chatbot.php: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /NeuralGPT.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | torch 3 | transformers 4 | pytest 5 | -------------------------------------------------------------------------------- /neural-big.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/neural-big.pdf -------------------------------------------------------------------------------- /auto-script1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/auto-script1.pdf -------------------------------------------------------------------------------- /auto-script2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/auto-script2.pdf -------------------------------------------------------------------------------- /completepdf.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/completepdf.pdf -------------------------------------------------------------------------------- /integration1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/integration1.pdf -------------------------------------------------------------------------------- /integration2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/integration2.pdf -------------------------------------------------------------------------------- /integration3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/integration3.pdf -------------------------------------------------------------------------------- /agent-document.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/agent-document.pdf -------------------------------------------------------------------------------- /agent-document (1).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/agent-document (1).pdf -------------------------------------------------------------------------------- /agent-document (2).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/agent-document (2).pdf -------------------------------------------------------------------------------- /agent-document (21).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/agent-document (21).pdf -------------------------------------------------------------------------------- /agent-document (3).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/agent-document (3).pdf -------------------------------------------------------------------------------- /agent-document (4).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/agent-document (4).pdf -------------------------------------------------------------------------------- /Wordpress plugin/long.odt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/long.odt -------------------------------------------------------------------------------- /Wordpress plugin/long.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/long.pdf -------------------------------------------------------------------------------- /fine_tuner.py: -------------------------------------------------------------------------------- 1 | fine_tuner = FineTuneGPT('pretrained_model.bin', 'new_dataset.txt') 2 | fine_tuner.fine_tune_model() -------------------------------------------------------------------------------- /Wordpress plugin/universal.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/universal.db -------------------------------------------------------------------------------- /src/fine_tuner.py: -------------------------------------------------------------------------------- 1 | fine_tuner = FineTuneGPT('pretrained_model.bin', 'new_dataset.txt') 2 | fine_tuner.fine_tune_model() -------------------------------------------------------------------------------- /streamlit/pages/chat-hub.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/streamlit/pages/chat-hub.db -------------------------------------------------------------------------------- /Wordpress plugin/combinepdf.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/combinepdf.pdf -------------------------------------------------------------------------------- /Wordpress plugin/wordpress1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/wordpress1.pdf -------------------------------------------------------------------------------- /Wordpress plugin/wordpress2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/wordpress2.pdf -------------------------------------------------------------------------------- /__pycache__/gui.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/gui.cpython-311.pyc -------------------------------------------------------------------------------- /utils/fine_tuner.py: -------------------------------------------------------------------------------- 1 | fine_tuner = FineTuneGPT('pretrained_model.bin', 'new_dataset.txt') 2 | fine_tuner.fine_tune_model() -------------------------------------------------------------------------------- /Wordpress plugin/dualcorellm.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/dualcorellm.pdf -------------------------------------------------------------------------------- /__pycache__/model.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/model.cpython-311.pyc -------------------------------------------------------------------------------- /__pycache__/utils.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/utils.cpython-311.pyc -------------------------------------------------------------------------------- /__pycache__/dataset.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/dataset.cpython-311.pyc -------------------------------------------------------------------------------- /__pycache__/pinecone.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/pinecone.cpython-311.pyc -------------------------------------------------------------------------------- /__pycache__/requests.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/requests.cpython-311.pyc -------------------------------------------------------------------------------- /__pycache__/DualCoreLLM.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/DualCoreLLM.cpython-311.pyc -------------------------------------------------------------------------------- /__pycache__/load_model.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/load_model.cpython-311.pyc -------------------------------------------------------------------------------- /__pycache__/neuralgpt.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/__pycache__/neuralgpt.cpython-311.pyc -------------------------------------------------------------------------------- /code/__pycache__/models.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/code/__pycache__/models.cpython-311.pyc -------------------------------------------------------------------------------- /code/__pycache__/utils.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/code/__pycache__/utils.cpython-311.pyc -------------------------------------------------------------------------------- /Chat-center/requirements.txt: -------------------------------------------------------------------------------- 1 | gradio==3.25 2 | openai==0.27 3 | langchain==0.0.139 4 | google-api-python-client 5 | requests 6 | transformers -------------------------------------------------------------------------------- /Wordpress plugin/build/neuralgpt_chatbot/a.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/neuralgpt_chatbot/a.pdf -------------------------------------------------------------------------------- /Wordpress plugin/build/neuralgpt_chatbot/a1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/neuralgpt_chatbot/a1.pdf -------------------------------------------------------------------------------- /Wordpress plugin/build/neuralgpt_chatbot/au.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/neuralgpt_chatbot/au.pdf -------------------------------------------------------------------------------- /Wordpress plugin/build/neuralgpt_chatbot/auto.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/neuralgpt_chatbot/auto.pdf -------------------------------------------------------------------------------- /Wordpress plugin/build/neuralgpt_chatbot/aurt12.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/neuralgpt_chatbot/aurt12.pdf -------------------------------------------------------------------------------- /agent_scripts/__pycache__/database.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/agent_scripts/__pycache__/database.cpython-311.pyc -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | gradio 2 | requests 3 | datetime 4 | websockets 5 | websocket 6 | asyncio 7 | bs4 8 | pysimplegui 9 | g4f 10 | gpt4free 11 | -------------------------------------------------------------------------------- /streamlit/requirements.txt: -------------------------------------------------------------------------------- 1 | gradio 2 | requests 3 | datetime 4 | websockets 5 | websocket 6 | asyncio 7 | bs4 8 | pysimplegui 9 | g4f 10 | gpt4free -------------------------------------------------------------------------------- /Wordpress plugin/build/python_script/base_library.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/python_script/base_library.zip -------------------------------------------------------------------------------- /Wordpress plugin/build/python_script/localpycs/struct.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/python_script/localpycs/struct.pyc -------------------------------------------------------------------------------- /Scripting.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | class Scripting: 4 | def __init__(self): 5 | pass 6 | 7 | def execute_script(self, script_path): 8 | subprocess.run(script_path) -------------------------------------------------------------------------------- /Wordpress plugin/build/python_script/localpycs/pyimod03_ctypes.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/python_script/localpycs/pyimod03_ctypes.pyc -------------------------------------------------------------------------------- /auto/task1.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | import os 3 | 4 | pdf_path = "C:/path/to/pdf/file.pdf" 5 | destination_folder = "E:/AI/NeuralGPT/NeuralGPT" 6 | 7 | shutil.copy(pdf_path, destination_folder) -------------------------------------------------------------------------------- /Wordpress plugin/build/python_script/localpycs/pyimod01_archive.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/python_script/localpycs/pyimod01_archive.pyc -------------------------------------------------------------------------------- /Wordpress plugin/build/python_script/localpycs/pyimod02_importers.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/python_script/localpycs/pyimod02_importers.pyc -------------------------------------------------------------------------------- /Wordpress plugin/build/python_script/localpycs/pyimod04_pywin32.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CognitiveCodes/NeuralGPT/HEAD/Wordpress plugin/build/python_script/localpycs/pyimod04_pywin32.pyc -------------------------------------------------------------------------------- /nlp/Scripting.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | class Scripting: 4 | def __init__(self): 5 | pass 6 | 7 | def execute_script(self, script_path): 8 | subprocess.run(script_path) -------------------------------------------------------------------------------- /utils/Scripting.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | class Scripting: 4 | def __init__(self): 5 | pass 6 | 7 | def execute_script(self, script_path): 8 | subprocess.run(script_path) -------------------------------------------------------------------------------- /NeuralGPT.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: NeuralGPT 3 | Version: 0.1 4 | Summary: A project for neural GPT 5 | Author: B staszewski 6 | Author-email: bstaszewski1984@gmail.com 7 | License-File: LICENSE 8 | -------------------------------------------------------------------------------- /NeuralGPT.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | LICENSE 2 | README.md 3 | setup.py 4 | NeuralGPT.egg-info/PKG-INFO 5 | NeuralGPT.egg-info/SOURCES.txt 6 | NeuralGPT.egg-info/dependency_links.txt 7 | NeuralGPT.egg-info/requires.txt 8 | NeuralGPT.egg-info/top_level.txt -------------------------------------------------------------------------------- /ProjectFiles/requirements.txt: -------------------------------------------------------------------------------- 1 | asyncio 2 | g4f 3 | openai 4 | requests 5 | datetime 6 | sqlite3 7 | websockets 8 | json 9 | anthropic 10 | streamlit 11 | fireworks-client 12 | PyCharacterAI 13 | langchain 14 | chromadb 15 | pdfplumber 16 | PySimpleGUI -------------------------------------------------------------------------------- /neuralgod.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | API_URL = "http://localhost:3000/api/v1/prediction/f20a3a35-7d11-445d-a484-1d993a319ebf" 4 | 5 | def query(payload): 6 | response = requests.post(API_URL, json=payload) 7 | return response.json() 8 | 9 | output = query({ 10 | "question": "Hey, how are you?", 11 | }) -------------------------------------------------------------------------------- /nlp/DualCoreLLM.py: -------------------------------------------------------------------------------- 1 | class DualCoreLLM: 2 | def __init__(self, logical_LLM, direct_LLM): 3 | self.logical_LLM = logical_LLM 4 | self.direct_LLM = direct_LLM 5 | 6 | def think(self, input_data): 7 | return self.logical_LLM.process(input_data) 8 | 9 | def execute(self, input_data): 10 | return self.direct_LLM.process(input_data) -------------------------------------------------------------------------------- /ModelSaver.py: -------------------------------------------------------------------------------- 1 | from neuralgpt import NeuralGPT 2 | from model_saver import ModelSaver 3 | 4 | # Load a pretrained model 5 | model = NeuralGPT.from_pretrained('gpt2') 6 | 7 | # Save the model to a local file 8 | saver = ModelSaver(model) 9 | saver.save_local('my_model.bin') 10 | 11 | # Save the model to an online source 12 | saver.save_online('http://example.com/model') -------------------------------------------------------------------------------- /Wordpress plugin/htmlmarkup.txt: -------------------------------------------------------------------------------- 1 |
{content}
\n") 19 | f.write("\n") 20 | f.write("\n") 21 | 22 | print(f"File saved to {filepath}") -------------------------------------------------------------------------------- /Chatflow.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | class Chatflow: 4 | def __init__(self): 5 | self.logger = logging.getLogger(__name__) 6 | self.logger.setLevel(logging.DEBUG) 7 | self.handler = logging.FileHandler('chatflow.log') 8 | self.handler.setLevel(logging.DEBUG) 9 | self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') 10 | self.handler.setFormatter(self.formatter) 11 | self.logger.addHandler(self.handler) 12 | 13 | def run(self): 14 | try: 15 | # code to execute the autonomous scripts 16 | except Exception as e: 17 | self.logger.error(str(e)) 18 | # code to notify the user when an error occurs -------------------------------------------------------------------------------- /src/Chatflow.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | class Chatflow: 4 | def __init__(self): 5 | self.logger = logging.getLogger(__name__) 6 | self.logger.setLevel(logging.DEBUG) 7 | self.handler = logging.FileHandler('chatflow.log') 8 | self.handler.setLevel(logging.DEBUG) 9 | self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') 10 | self.handler.setFormatter(self.formatter) 11 | self.logger.addHandler(self.handler) 12 | 13 | def run(self): 14 | try: 15 | # code to execute the autonomous scripts 16 | except Exception as e: 17 | self.logger.error(str(e)) 18 | # code to notify the user when an error occurs -------------------------------------------------------------------------------- /utils/Chatflow.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | class Chatflow: 4 | def __init__(self): 5 | self.logger = logging.getLogger(__name__) 6 | self.logger.setLevel(logging.DEBUG) 7 | self.handler = logging.FileHandler('chatflow.log') 8 | self.handler.setLevel(logging.DEBUG) 9 | self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') 10 | self.handler.setFormatter(self.formatter) 11 | self.logger.addHandler(self.handler) 12 | 13 | def run(self): 14 | try: 15 | # code to execute the autonomous scripts 16 | except Exception as e: 17 | self.logger.error(str(e)) 18 | # code to notify the user when an error occurs -------------------------------------------------------------------------------- /com.py: -------------------------------------------------------------------------------- 1 | import ssl 2 | import socket 3 | 4 | # Generate public-private key pair for NeuralGPT 5 | neuralgpt_public_key = ... 6 | neuralgpt_private_key = ... 7 | 8 | # Generate public-private key pair for flowiseAI app 9 | flowiseai_public_key = ... 10 | flowiseai_private_key = ... 11 | 12 | # Establish a TLS connection 13 | context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) 14 | context.load_cert_chain(certfile=neuralgpt_private_key, keyfile=neuralgpt_public_key) 15 | context.load_verify_locations(cafile=flowiseai_public_key) 16 | with socket.create_connection(('flowiseai.com', 443)) as sock: 17 | with context.wrap_socket(sock, server_side=False) as ssock: 18 | ssock.sendall(b'Hello, world!') 19 | data = ssock.recv(1024) -------------------------------------------------------------------------------- /src/com.py: -------------------------------------------------------------------------------- 1 | import ssl 2 | import socket 3 | 4 | # Generate public-private key pair for NeuralGPT 5 | neuralgpt_public_key = ... 6 | neuralgpt_private_key = ... 7 | 8 | # Generate public-private key pair for flowiseAI app 9 | flowiseai_public_key = ... 10 | flowiseai_private_key = ... 11 | 12 | # Establish a TLS connection 13 | context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) 14 | context.load_cert_chain(certfile=neuralgpt_private_key, keyfile=neuralgpt_public_key) 15 | context.load_verify_locations(cafile=flowiseai_public_key) 16 | with socket.create_connection(('flowiseai.com', 443)) as sock: 17 | with context.wrap_socket(sock, server_side=False) as ssock: 18 | ssock.sendall(b'Hello, world!') 19 | data = ssock.recv(1024) -------------------------------------------------------------------------------- /utils/com.py: -------------------------------------------------------------------------------- 1 | import ssl 2 | import socket 3 | 4 | # Generate public-private key pair for NeuralGPT 5 | neuralgpt_public_key = ... 6 | neuralgpt_private_key = ... 7 | 8 | # Generate public-private key pair for flowiseAI app 9 | flowiseai_public_key = ... 10 | flowiseai_private_key = ... 11 | 12 | # Establish a TLS connection 13 | context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) 14 | context.load_cert_chain(certfile=neuralgpt_private_key, keyfile=neuralgpt_public_key) 15 | context.load_verify_locations(cafile=flowiseai_public_key) 16 | with socket.create_connection(('flowiseai.com', 443)) as sock: 17 | with context.wrap_socket(sock, server_side=False) as ssock: 18 | ssock.sendall(b'Hello, world!') 19 | data = ssock.recv(1024) -------------------------------------------------------------------------------- /chat.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from transformers import pipeline 3 | 4 | # Define the chatbot pipeline using the pre-trained NeuralGPT model 5 | chatbot = pipeline("text-generation", model="EleutherAI/gpt-neo-1.3B") 6 | 7 | # Define a function to handle user input and generate chatbot responses 8 | def chat(): 9 | while True: 10 | # Get user input 11 | user_input = input("You: ") 12 | 13 | # Generate chatbot response 14 | try: 15 | chatbot_response = chatbot(user_input, max_length=50)[0]["generated_text"] 16 | print("Chatbot:", chatbot_response) 17 | except Exception as e: 18 | print("Error:", e) 19 | 20 | # Call the chat function to start the chatbox 21 | chat() -------------------------------------------------------------------------------- /Wordpress plugin/workschedule.py: -------------------------------------------------------------------------------- 1 | import schedule 2 | import time 3 | 4 | # Define the function that performs the necessary actions 5 | def perform_actions(): 6 | # Code to access local data storage and modify files 7 | # Code to access universal database and achieve data harmonization 8 | 9 | # Define the schedule for the actions to be performed 10 | schedule.every(24).hours.do(perform_actions) # Run every 24 hours 11 | schedule.every().day.at("12:00").do(perform_actions) # Run every day at 12:00 12 | schedule.every().hour.do(perform_actions) # Run every hour 13 | schedule.every(10).minutes.do(perform_actions) # Run every 10 minutes 14 | 15 | # Run the scheduling system 16 | while True: 17 | schedule.run_pending() 18 | time.sleep(1) -------------------------------------------------------------------------------- /src/chat.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from transformers import pipeline 3 | 4 | # Define the chatbot pipeline using the pre-trained NeuralGPT model 5 | chatbot = pipeline("text-generation", model="EleutherAI/gpt-neo-1.3B") 6 | 7 | # Define a function to handle user input and generate chatbot responses 8 | def chat(): 9 | while True: 10 | # Get user input 11 | user_input = input("You: ") 12 | 13 | # Generate chatbot response 14 | try: 15 | chatbot_response = chatbot(user_input, max_length=50)[0]["generated_text"] 16 | print("Chatbot:", chatbot_response) 17 | except Exception as e: 18 | print("Error:", e) 19 | 20 | # Call the chat function to start the chatbox 21 | chat() -------------------------------------------------------------------------------- /utils/chat.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from transformers import pipeline 3 | 4 | # Define the chatbot pipeline using the pre-trained NeuralGPT model 5 | chatbot = pipeline("text-generation", model="EleutherAI/gpt-neo-1.3B") 6 | 7 | # Define a function to handle user input and generate chatbot responses 8 | def chat(): 9 | while True: 10 | # Get user input 11 | user_input = input("You: ") 12 | 13 | # Generate chatbot response 14 | try: 15 | chatbot_response = chatbot(user_input, max_length=50)[0]["generated_text"] 16 | print("Chatbot:", chatbot_response) 17 | except Exception as e: 18 | print("Error:", e) 19 | 20 | # Call the chat function to start the chatbox 21 | chat() -------------------------------------------------------------------------------- /FlowiseAICommunication.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | 4 | class FlowiseAICommunication: 5 | def __init__(self, url): 6 | self.url = url 7 | 8 | def send_message(self, message): 9 | data = {"message": message} 10 | try: 11 | response = requests.post(self.url, json=data) 12 | return response.json() 13 | except requests.exceptions.RequestException as e: 14 | print(e) 15 | return None 16 | 17 | def receive_message(self): 18 | try: 19 | response = requests.get(self.url) 20 | return response.json()["message"] 21 | except requests.exceptions.RequestException as e: 22 | print(e) 23 | return None -------------------------------------------------------------------------------- /src/FlowiseAICommunication.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | 4 | class FlowiseAICommunication: 5 | def __init__(self, url): 6 | self.url = url 7 | 8 | def send_message(self, message): 9 | data = {"message": message} 10 | try: 11 | response = requests.post(self.url, json=data) 12 | return response.json() 13 | except requests.exceptions.RequestException as e: 14 | print(e) 15 | return None 16 | 17 | def receive_message(self): 18 | try: 19 | response = requests.get(self.url) 20 | return response.json()["message"] 21 | except requests.exceptions.RequestException as e: 22 | print(e) 23 | return None -------------------------------------------------------------------------------- /utils/FlowiseAICommunication.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | 4 | class FlowiseAICommunication: 5 | def __init__(self, url): 6 | self.url = url 7 | 8 | def send_message(self, message): 9 | data = {"message": message} 10 | try: 11 | response = requests.post(self.url, json=data) 12 | return response.json() 13 | except requests.exceptions.RequestException as e: 14 | print(e) 15 | return None 16 | 17 | def receive_message(self): 18 | try: 19 | response = requests.get(self.url) 20 | return response.json()["message"] 21 | except requests.exceptions.RequestException as e: 22 | print(e) 23 | return None -------------------------------------------------------------------------------- /Wordpress plugin/server.js: -------------------------------------------------------------------------------- 1 | const http = require('http'); 2 | const server = http.createServer(); 3 | const io = require('socket.io')(server); 4 | 5 | io.on('connection', (socket) => { 6 | console.log('A user connected'); 7 | 8 | // Handle events from the client 9 | socket.on('chat message', (message) => { 10 | console.log('Received message:', message); 11 | // Process the message and send a response if needed 12 | }); 13 | 14 | // Handle disconnection 15 | socket.on('disconnect', () => { 16 | console.log('A user disconnected'); 17 | }); 18 | }); 19 | 20 | const port = 3001; // Specify the port number for your server 21 | server.listen(port, () => { 22 | console.log(`Socket.io server listening on port ${port}`); 23 | }); 24 | -------------------------------------------------------------------------------- /code/DatabaseModule.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | class DatabaseModule: 3 | def __init__(self, db_name): 4 | self.conn = sqlite3.connect(db_name) 5 | self.cursor = self.conn.cursor() 6 | 7 | def store_data(self, data, table_name): 8 | self.cursor.execute("""CREATE TABLE IF NOT EXISTS """ + table_name + "(id INTEGER PRIMARY KEY AUTOINCREMENT, data TEXT)") 9 | self.conn.commit() 10 | self.cursor.execute("""INSERT INTO """ + table_name + "(data)" , (data,)) 11 | self.conn.commit() 12 | 13 | def retrieve_data(self, query, table_name): 14 | self.cursor.execute("""SELECT data FROM """ + table_name + "(data)" , ("%" + query + "%",)) 15 | data = self.cursor.fetchall() 16 | return data 17 | -------------------------------------------------------------------------------- /agent_scripts/database.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | class DatabaseModule: 3 | def __init__(self, db_name): 4 | self.conn = sqlite3.connect(db_name) 5 | self.cursor = self.conn.cursor() 6 | 7 | def store_data(self, data, table_name): 8 | self.cursor.execute("""CREATE TABLE IF NOT EXISTS """ + table_name + "(id INTEGER PRIMARY KEY AUTOINCREMENT, data TEXT)") 9 | self.conn.commit() 10 | self.cursor.execute("""INSERT INTO """ + table_name + "(data)" , (data,)) 11 | self.conn.commit() 12 | 13 | def retrieve_data(self, query, table_name): 14 | self.cursor.execute("""SELECT data FROM """ + table_name + "(data)" , ("%" + query + "%",)) 15 | data = self.cursor.fetchall() 16 | return data 17 | -------------------------------------------------------------------------------- /extract_text.py: -------------------------------------------------------------------------------- 1 | import os 2 | import PyPDF2 3 | 4 | def extract_text_from_pdf(pdf_path): 5 | with open(pdf_path, 'rb') as f: 6 | pdf_reader = PyPDF2.PdfFileReader(f) 7 | text = '' 8 | for page in pdf_reader.pages: 9 | text += page.extractText() 10 | return text 11 | 12 | def main(): 13 | directory = 'E:\AI\NeuralGPT\NeuralGPT' 14 | for filename in os.listdir(directory): 15 | if filename.endswith('.pdf'): 16 | pdf_path = os.path.join(directory, filename) 17 | text = extract_text_from_pdf(pdf_path) 18 | txt_path = os.path.splitext(pdf_path)[0] + '.txt' 19 | with open(txt_path, 'w') as f: 20 | f.write(text) 21 | 22 | if __name__ == '__main__': 23 | main() -------------------------------------------------------------------------------- /Wordpress plugin/search.json: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const app = express(); 3 | 4 | // Define the search endpoint 5 | app.get('/api/search', (req, res) => { 6 | // Extract the search query from the query parameter 7 | const query = req.query.q; 8 | 9 | // Perform the search using ElasticSearch 10 | const results = await elasticSearchClient.search({ 11 | index: 'shared_databank', 12 | body: { 13 | query: { 14 | match: { 15 | content: query 16 | } 17 | } 18 | } 19 | }); 20 | 21 | // Return the matching results to the client 22 | res.json(results.hits.hits); 23 | }); 24 | 25 | // Start the server 26 | app.listen(3000, () => { 27 | console.log('Server listening on port 3000'); 28 | }); -------------------------------------------------------------------------------- /FileTransfer.py: -------------------------------------------------------------------------------- 1 | import ftplib 2 | 3 | class FileTransfer: 4 | def __init__(self, ftp_host, ftp_user, ftp_password): 5 | self.ftp_host = ftp_host 6 | self.ftp_user = ftp_user 7 | self.ftp_password = ftp_password 8 | 9 | def upload_file(self, local_file_path, remote_file_path): 10 | with ftplib.FTP(self.ftp_host, self.ftp_user, self.ftp_password) as ftp: 11 | with open(local_file_path, 'rb') as f: 12 | ftp.storbinary('STOR ' + remote_file_path, f) 13 | 14 | def download_file(self, remote_file_path, local_file_path): 15 | with ftplib.FTP(self.ftp_host, self.ftp_user, self.ftp_password) as ftp: 16 | with open(local_file_path, 'wb') as f: 17 | ftp.retrbinary('RETR ' + remote_file_path, f.write) -------------------------------------------------------------------------------- /load_model.py: -------------------------------------------------------------------------------- 1 | import urllib.request 2 | import os 3 | import torch 4 | from DualCoreLLM import DualCoreLLM 5 | 6 | def load_model(model_path, use_dualcore=False): 7 | if model_path.startswith("http"): 8 | # Load model from online file 9 | urllib.request.urlretrieve(model_path, "model.bin") 10 | model_path = "model.bin" 11 | 12 | if not os.path.exists(model_path): 13 | raise ValueError("Model file not found.") 14 | 15 | # Load model into memory 16 | model = torch.load(model_path, map_location=torch.device('cpu')) 17 | 18 | if use_dualcore: 19 | # Initialize DualCoreLLM with pretrained model 20 | dualcore = DualCoreLLM(model) 21 | return dualcore 22 | else: 23 | return model -------------------------------------------------------------------------------- /nlp/FileTransfer.py: -------------------------------------------------------------------------------- 1 | import ftplib 2 | 3 | class FileTransfer: 4 | def __init__(self, ftp_host, ftp_user, ftp_password): 5 | self.ftp_host = ftp_host 6 | self.ftp_user = ftp_user 7 | self.ftp_password = ftp_password 8 | 9 | def upload_file(self, local_file_path, remote_file_path): 10 | with ftplib.FTP(self.ftp_host, self.ftp_user, self.ftp_password) as ftp: 11 | with open(local_file_path, 'rb') as f: 12 | ftp.storbinary('STOR ' + remote_file_path, f) 13 | 14 | def download_file(self, remote_file_path, local_file_path): 15 | with ftplib.FTP(self.ftp_host, self.ftp_user, self.ftp_password) as ftp: 16 | with open(local_file_path, 'wb') as f: 17 | ftp.retrbinary('RETR ' + remote_file_path, f.write) -------------------------------------------------------------------------------- /src/load_model.py: -------------------------------------------------------------------------------- 1 | import urllib.request 2 | import os 3 | import torch 4 | from DualCoreLLM import DualCoreLLM 5 | 6 | def load_model(model_path, use_dualcore=False): 7 | if model_path.startswith("http"): 8 | # Load model from online file 9 | urllib.request.urlretrieve(model_path, "model.bin") 10 | model_path = "model.bin" 11 | 12 | if not os.path.exists(model_path): 13 | raise ValueError("Model file not found.") 14 | 15 | # Load model into memory 16 | model = torch.load(model_path, map_location=torch.device('cpu')) 17 | 18 | if use_dualcore: 19 | # Initialize DualCoreLLM with pretrained model 20 | dualcore = DualCoreLLM(model) 21 | return dualcore 22 | else: 23 | return model -------------------------------------------------------------------------------- /utils/FileTransfer.py: -------------------------------------------------------------------------------- 1 | import ftplib 2 | 3 | class FileTransfer: 4 | def __init__(self, ftp_host, ftp_user, ftp_password): 5 | self.ftp_host = ftp_host 6 | self.ftp_user = ftp_user 7 | self.ftp_password = ftp_password 8 | 9 | def upload_file(self, local_file_path, remote_file_path): 10 | with ftplib.FTP(self.ftp_host, self.ftp_user, self.ftp_password) as ftp: 11 | with open(local_file_path, 'rb') as f: 12 | ftp.storbinary('STOR ' + remote_file_path, f) 13 | 14 | def download_file(self, remote_file_path, local_file_path): 15 | with ftplib.FTP(self.ftp_host, self.ftp_user, self.ftp_password) as ftp: 16 | with open(local_file_path, 'wb') as f: 17 | ftp.retrbinary('RETR ' + remote_file_path, f.write) -------------------------------------------------------------------------------- /utils/load_model.py: -------------------------------------------------------------------------------- 1 | import urllib.request 2 | import os 3 | import torch 4 | from DualCoreLLM import DualCoreLLM 5 | 6 | def load_model(model_path, use_dualcore=False): 7 | if model_path.startswith("http"): 8 | # Load model from online file 9 | urllib.request.urlretrieve(model_path, "model.bin") 10 | model_path = "model.bin" 11 | 12 | if not os.path.exists(model_path): 13 | raise ValueError("Model file not found.") 14 | 15 | # Load model into memory 16 | model = torch.load(model_path, map_location=torch.device('cpu')) 17 | 18 | if use_dualcore: 19 | # Initialize DualCoreLLM with pretrained model 20 | dualcore = DualCoreLLM(model) 21 | return dualcore 22 | else: 23 | return model -------------------------------------------------------------------------------- /Wordpress plugin/listmodels.py: -------------------------------------------------------------------------------- 1 | import os 2 | from NeuralGPT-0.1 import NeuralGPT 3 | # Define the directory where the pretrained models are stored 4 | models_dir = "E:/AI/NeuralGPT/NeuralGPT/models/" 5 | # List all the pretrained models in the directory 6 | pretrained_models = os.listdir(models_dir) 7 | # Display the list of pretrained models to the user 8 | print("Select a pretrained model to load:") 9 | for i, model in enumerate(pretrained_models): 10 | print(f"{i+1}. {model}") 11 | # Ask the user to choose a pretrained model 12 | model_num = int(input("Enter the model number: ")) 13 | # Load the chosen pretrained model 14 | model_path = os.path.join(models_dir, pretrained_models[model_num-1]) 15 | neural_gpt = NeuralGPT(model_path) 16 | # Open the chat window and start the conversation 17 | # ... 18 | -------------------------------------------------------------------------------- /long.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | async def long_running_task(): 4 | progress = 0 5 | while progress < 100: 6 | # do some work 7 | await asyncio.sleep(1) 8 | progress += 10 9 | yield f"Task progress: {progress}%" 10 | yield "Task completed" 11 | 12 | class Communication: 13 | async def execute_task(self): 14 | try: 15 | # execute long running task asynchronously with a timeout of 30 seconds 16 | result = "" 17 | async for progress_update in long_running_task(): 18 | result += progress_update + "\n" 19 | # handle successful completion of the task 20 | return result 21 | except asyncio.TimeoutError: 22 | # handle timeout 23 | return "Task timed out" -------------------------------------------------------------------------------- /generate_test_data.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | 4 | # Define a list of possible actions 5 | actions = ['open', 'close', 'turn on', 'turn off', 'start', 'stop'] 6 | 7 | # Define a list of possible objects 8 | objects = ['door', 'window', 'light', 'fan', 'TV', 'AC'] 9 | 10 | # Define a list of possible locations 11 | locations = ['living room', 'bedroom', 'kitchen', 'bathroom', 'garage'] 12 | 13 | # Define a function to generate random test data 14 | def generate_test_data(): 15 | action = random.choice(actions) 16 | obj = random.choice(objects) 17 | location = random.choice(locations) 18 | message = f"{action} the {obj} in the {location}" 19 | return message 20 | 21 | # Generate 10 random test messages 22 | for i in range(10): 23 | test_message = generate_test_data() 24 | print(test_message) -------------------------------------------------------------------------------- /src/generate_test_data.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | 4 | # Define a list of possible actions 5 | actions = ['open', 'close', 'turn on', 'turn off', 'start', 'stop'] 6 | 7 | # Define a list of possible objects 8 | objects = ['door', 'window', 'light', 'fan', 'TV', 'AC'] 9 | 10 | # Define a list of possible locations 11 | locations = ['living room', 'bedroom', 'kitchen', 'bathroom', 'garage'] 12 | 13 | # Define a function to generate random test data 14 | def generate_test_data(): 15 | action = random.choice(actions) 16 | obj = random.choice(objects) 17 | location = random.choice(locations) 18 | message = f"{action} the {obj} in the {location}" 19 | return message 20 | 21 | # Generate 10 random test messages 22 | for i in range(10): 23 | test_message = generate_test_data() 24 | print(test_message) -------------------------------------------------------------------------------- /utils/generate_test_data.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | 4 | # Define a list of possible actions 5 | actions = ['open', 'close', 'turn on', 'turn off', 'start', 'stop'] 6 | 7 | # Define a list of possible objects 8 | objects = ['door', 'window', 'light', 'fan', 'TV', 'AC'] 9 | 10 | # Define a list of possible locations 11 | locations = ['living room', 'bedroom', 'kitchen', 'bathroom', 'garage'] 12 | 13 | # Define a function to generate random test data 14 | def generate_test_data(): 15 | action = random.choice(actions) 16 | obj = random.choice(objects) 17 | location = random.choice(locations) 18 | message = f"{action} the {obj} in the {location}" 19 | return message 20 | 21 | # Generate 10 random test messages 22 | for i in range(10): 23 | test_message = generate_test_data() 24 | print(test_message) -------------------------------------------------------------------------------- /sort_files.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | # Define the directory where the files are located 5 | directory = "/path/to/directory" 6 | 7 | # Create a dictionary to store the file extensions and their corresponding subdirectories 8 | file_extensions = {} 9 | 10 | # Loop through all the files in the directory 11 | for filename in os.listdir(directory): 12 | 13 | # Get the file extension 14 | file_extension = os.path.splitext(filename)[1] 15 | 16 | # If the file extension is not in the dictionary, create a new subdirectory for it 17 | if file_extension not in file_extensions: 18 | os.mkdir(os.path.join(directory, file_extension[1:])) 19 | file_extensions[file_extension] = True 20 | 21 | # Move the file to the corresponding subdirectory 22 | shutil.move(os.path.join(directory, filename), os.path.join(directory, file_extension[1:], filename)) -------------------------------------------------------------------------------- /auto/task3.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Set the input CSV file path 4 | input_file="example.csv" 5 | 6 | # Set the output Markdown file path 7 | output_file="E:/AI/NeuralGPT/NeuralGPT/table.md" 8 | 9 | # Read the CSV file and generate a Markdown table 10 | while read line 11 | do 12 | # Replace commas with pipes for Markdown table formatting 13 | row=$(echo $line | sed 's/,/ | /g') 14 | 15 | # Add Markdown table formatting to the row 16 | if [ -z "$header" ] 17 | then 18 | # The first row is the header 19 | header="$row" 20 | separator=$(echo "$header" | sed 's/[^|]/-/g') 21 | table="$header\n$separator" 22 | else 23 | # All other rows are data 24 | table="$table\n$row" 25 | fi 26 | done < "$input_file" 27 | 28 | # Save the Markdown table to the output file 29 | echo -e "$table" > "$output_file" -------------------------------------------------------------------------------- /Wordpress plugin/logcreation.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | # Set up logging 4 | logging.basicConfig(filename='neural_ai.log', level=logging.DEBUG, 5 | format='%(asctime)s %(levelname)s %(message)s') 6 | 7 | def access_local_data_storage(): 8 | try: 9 | # Access local data storage 10 | # Code to create or modify files 11 | except Exception as e: 12 | # Log the error 13 | logging.error('Error accessing local data storage: {}'.format(str(e))) 14 | 15 | def access_universal_database(): 16 | try: 17 | # Access universal database 18 | # Code to achieve data harmonization 19 | except Exception as e: 20 | # Log the error 21 | logging.error('Error accessing universal database: {}'.format(str(e))) 22 | 23 | # Call the functions 24 | access_local_data_storage() 25 | access_universal_database() -------------------------------------------------------------------------------- /geninit.py: -------------------------------------------------------------------------------- 1 | # Import necessary modules 2 | import sys 3 | import os 4 | from PyQt5.QtWidgets import QApplication, QMainWindow 5 | 6 | # Import project modules 7 | from NeuralGPT-0,1 import DualCoreLLM 8 | from NeuralGPT-0,1 import module2 9 | from NeuralGPT-0,1 import module3 10 | from NeuralGPT-0,1 import module4 11 | from NeuralGPT-0,1 import module5 12 | 13 | # Define function to execute all modules 14 | def execute_modules(): 15 | DualCoreLLM.execute() 16 | module2.execute() 17 | module3.execute() 18 | module4.execute() 19 | module5.execute() 20 | 21 | # Define main function to start GUI and execute modules 22 | def main(): 23 | # Start GUI 24 | app = QApplication(sys.argv) 25 | window = QMainWindow() 26 | window.show() 27 | sys.exit(app.exec_()) 28 | 29 | # Execute modules 30 | execute_modules() 31 | 32 | if __name__ == '__main__': 33 | main() -------------------------------------------------------------------------------- /Wordpress plugin/submit_input.php: -------------------------------------------------------------------------------- 1 | connect_error) { 15 | die("Connection failed: " . $conn->connect_error); 16 | } 17 | 18 | $sql = "INSERT INTO user_input (input_text, input_type, feedback_text, feedback_type, timestamp) 19 | VALUES ('$input_text', '$input_type', '$feedback_text', '$feedback_type', '$timestamp')"; 20 | 21 | if ($conn->query($sql) === TRUE) { 22 | echo "Input submitted successfully"; 23 | } else { 24 | echo "Error: " . $sql . "').text(value.excerpt);
17 | listItem.append(link).append(excerpt);
18 | $('#neuralgpt-browse-results').append(listItem);
19 | });
20 | }
21 | });
22 | });
23 | });
--------------------------------------------------------------------------------
/src/DualCoreLLM.py:
--------------------------------------------------------------------------------
1 | import spacy
2 |
3 | class DualCoreLLM:
4 | def __init__(self):
5 | self.nlp = spacy.load('en_core_web_sm')
6 |
7 | def check_coherence(self, text):
8 | doc = self.nlp(text)
9 |
10 | # Check for semantic coherence
11 | for token in doc:
12 | if token.dep_ == 'nsubj' and token.head.pos_ == 'VERB':
13 | subj = token
14 | verb = token.head
15 | for child in verb.children:
16 | if child.dep_ == 'dobj':
17 | obj = child
18 | if obj.text not in [t.text for t in subj.subtree]:
19 | return False
20 | return True
21 |
22 | def check_grammar(self, text):
23 | doc = self.nlp(text)
24 |
25 | # Check for grammatical correctness
26 | for sent in doc.sents:
27 | if sent.root.dep_ == 'ROOT' and sent.root.tag_ != 'VBZ':
28 | return False
29 | return True
--------------------------------------------------------------------------------
/utils/DualCoreLLM.py:
--------------------------------------------------------------------------------
1 | import spacy
2 |
3 | class DualCoreLLM:
4 | def __init__(self):
5 | self.nlp = spacy.load('en_core_web_sm')
6 |
7 | def check_coherence(self, text):
8 | doc = self.nlp(text)
9 |
10 | # Check for semantic coherence
11 | for token in doc:
12 | if token.dep_ == 'nsubj' and token.head.pos_ == 'VERB':
13 | subj = token
14 | verb = token.head
15 | for child in verb.children:
16 | if child.dep_ == 'dobj':
17 | obj = child
18 | if obj.text not in [t.text for t in subj.subtree]:
19 | return False
20 | return True
21 |
22 | def check_grammar(self, text):
23 | doc = self.nlp(text)
24 |
25 | # Check for grammatical correctness
26 | for sent in doc.sents:
27 | if sent.root.dep_ == 'ROOT' and sent.root.tag_ != 'VBZ':
28 | return False
29 | return True
--------------------------------------------------------------------------------
/Wordpress plugin/neuralgpt_chatbot.spec:
--------------------------------------------------------------------------------
1 | # -*- mode: python ; coding: utf-8 -*-
2 |
3 |
4 | block_cipher = None
5 |
6 |
7 | a = Analysis(
8 | ['neuralgpt_chatbot.py'],
9 | pathex=[],
10 | binaries=[],
11 | datas=[],
12 | hiddenimports=[],
13 | hookspath=[],
14 | hooksconfig={},
15 | runtime_hooks=[],
16 | excludes=[],
17 | win_no_prefer_redirects=False,
18 | win_private_assemblies=False,
19 | cipher=block_cipher,
20 | noarchive=False,
21 | )
22 | pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
23 |
24 | exe = EXE(
25 | pyz,
26 | a.scripts,
27 | a.binaries,
28 | a.zipfiles,
29 | a.datas,
30 | [],
31 | name='neuralgpt_chatbot',
32 | debug=False,
33 | bootloader_ignore_signals=False,
34 | strip=False,
35 | upx=True,
36 | upx_exclude=[],
37 | runtime_tmpdir=None,
38 | console=True,
39 | disable_windowed_traceback=False,
40 | argv_emulation=False,
41 | target_arch=None,
42 | codesign_identity=None,
43 | entitlements_file=None,
44 | )
45 |
--------------------------------------------------------------------------------
/Wordpress plugin/test_chatbox.py:
--------------------------------------------------------------------------------
1 | import neuralgpt
2 | import local_website
3 |
4 | # code to add a button for Neural-GPT system
5 | button_neuralgpt = tkinter.Button(window, text="Activate Neural-GPT", command=neuralgpt.activate)
6 | button_neuralgpt.pack()
7 |
8 | # code to add a dropdown menu for local website
9 | options = ["Website A", "Website B", "Website C"]
10 | variable = tkinter.StringVar(window)
11 | variable.set(options[0])
12 | dropdown_localwebsite = tkinter.OptionMenu(window, variable, *options)
13 | dropdown_localwebsite.pack()
14 |
15 | import paho.mqtt.client as mqtt
16 |
17 | # code to connect to MQTT broker
18 | client = mqtt.Client()
19 | client.connect("localhost", 1883, 60)
20 |
21 | # code to send message to all instances of Neural-GPT
22 | def send_message(message):
23 | client.publish("neuralgpt/chat", message)
24 |
25 | # code to receive message from all instances of Neural-GPT
26 | def on_message(client, userdata, message):
27 | print(message.payload.decode())
28 |
29 | client.subscribe("neuralgpt/chat")
30 | client.on_message = on_message
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | from model import GPT
2 | import torch
3 |
4 | # Set the device to use
5 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
6 |
7 | # Load the GPT model
8 | model_path = 'E:/AI/NeuralGPT/NeuralGPT/models/ggml-model-q4_0.bin'
9 | model = GPT(model_path)
10 | model.to(device)
11 |
12 | # Set the model to evaluation mode
13 | model.eval()
14 |
15 | # Get user input
16 | prompt = input('Enter a prompt: ')
17 |
18 | # Generate text based on the user input
19 | generated_text = ''
20 | while not generated_text:
21 | # Tokenize the prompt and generate the input sequence
22 | input_ids = model.tokenizer.encode(prompt, return_tensors='pt').to(device)
23 |
24 | # Generate the output sequence
25 | max_length = len(input_ids.flatten()) + 50
26 | output = model.model.generate(input_ids=input_ids, max_length=max_length, do_sample=True)
27 |
28 | # Decode the output sequence and remove the prompt
29 | generated_text = model.tokenizer.decode(output[0], skip_special_tokens=True)
30 | generated_text = generated_text[len(prompt):].strip()
31 |
32 | # Print the generated text
33 | print(generated_text)
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Python 3",
3 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
4 | "image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye",
5 | "customizations": {
6 | "codespaces": {
7 | "openFiles": [
8 | "README.md",
9 | "streamlit.py"
10 | ]
11 | },
12 | "vscode": {
13 | "settings": {},
14 | "extensions": [
15 | "ms-python.python",
16 | "ms-python.vscode-pylance"
17 | ]
18 | }
19 | },
20 | "updateContentCommand": "[ -f packages.txt ] && sudo apt update && sudo apt upgrade -y && sudo xargs apt install -y Chat Hub Center
14 |
15 | Incoming Messages
16 |
17 | Server Responses
18 |
19 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/FileProcessor.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import List
3 |
4 | class FileProcessor:
5 | def __init__(self, storage_path: str):
6 | self.storage_path = storage_path
7 |
8 | def upload_file(self, file_path: str, file_name: str) -> str:
9 | """
10 | Uploads a file to the storage_path and returns the URL where it can be accessed.
11 | """
12 | file_url = os.path.join(self.storage_path, file_name)
13 | with open(file_url, 'wb') as f:
14 | f.write(file_path.read())
15 | return file_url
16 |
17 | def download_file(self, file_url: str) -> bytes:
18 | """
19 | Downloads a file from the storage_path and returns its contents as bytes.
20 | """
21 | with open(file_url, 'rb') as f:
22 | file_contents = f.read()
23 | return file_contents
24 |
25 | def process_files(self, file_urls: List[str]) -> List[str]:
26 | """
27 | Processes a list of files specified by their URLs and returns a list of processed files' URLs.
28 | """
29 | processed_files = []
30 | for file_url in file_urls:
31 | # process file here
32 | processed_file_url = file_url + '_processed'
33 | processed_files.append(processed_file_url)
34 | return processed_files
--------------------------------------------------------------------------------
/TEST.py:
--------------------------------------------------------------------------------
1 | from neuralgpt import NeuralGPT
2 | from DualCoreLLM import DualCoreLLM # if needed
3 | import re
4 |
5 | # Load pretrained model
6 | model = NeuralGPT.load_model('model.bin') # provide path to model file
7 |
8 | # Define list of prompts
9 | prompts = ['identify yourself', 'How can I improve my life?']
10 |
11 | # Define function for preprocessing user input
12 | def preprocess_input(text):
13 | text = text.lower()
14 | text = re.sub(r'[^\w\s]', '', text) # remove special characters
15 | return text
16 |
17 | # Define function for generating responses
18 | def generate_response(prompt):
19 | response = model.generate(prompt)
20 | # Evaluate coherence of response
21 | # ...
22 | return response
23 |
24 | # Define function for testing coherence of responses
25 | def test_coherence(prompt):
26 | input_text = input(prompt + ': ')
27 | preprocessed_text = preprocess_input(input_text)
28 | response = generate_response(preprocessed_text)
29 | # Evaluate coherence of response
30 | # ...
31 | return coherence_score
32 |
33 | # Run test for each prompt
34 | total_score = 0
35 | for prompt in prompts:
36 | score = test_coherence(prompt)
37 | total_score += score
38 |
39 | # Output final score
40 | print('Coherence score:', total_score)
--------------------------------------------------------------------------------
/src/fiileprocessor.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import List
3 |
4 | class FileProcessor:
5 | def __init__(self, storage_path: str):
6 | self.storage_path = storage_path
7 |
8 | def upload_file(self, file_path: str, file_name: str) -> str:
9 | """
10 | Uploads a file to the storage_path and returns the URL where it can be accessed.
11 | """
12 | file_url = os.path.join(self.storage_path, file_name)
13 | with open(file_url, 'wb') as f:
14 | f.write(file_path.read())
15 | return file_url
16 |
17 | def download_file(self, file_url: str) -> bytes:
18 | """
19 | Downloads a file from the storage_path and returns its contents as bytes.
20 | """
21 | with open(file_url, 'rb') as f:
22 | file_contents = f.read()
23 | return file_contents
24 |
25 | def process_files(self, file_urls: List[str]) -> List[str]:
26 | """
27 | Processes a list of files specified by their URLs and returns a list of processed files' URLs.
28 | """
29 | processed_files = []
30 | for file_url in file_urls:
31 | # process file here
32 | processed_file_url = file_url + '_processed'
33 | processed_files.append(processed_file_url)
34 | return processed_files
--------------------------------------------------------------------------------
/utils/FileProcessor.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import List
3 |
4 | class FileProcessor:
5 | def __init__(self, storage_path: str):
6 | self.storage_path = storage_path
7 |
8 | def upload_file(self, file_path: str, file_name: str) -> str:
9 | """
10 | Uploads a file to the storage_path and returns the URL where it can be accessed.
11 | """
12 | file_url = os.path.join(self.storage_path, file_name)
13 | with open(file_url, 'wb') as f:
14 | f.write(file_path.read())
15 | return file_url
16 |
17 | def download_file(self, file_url: str) -> bytes:
18 | """
19 | Downloads a file from the storage_path and returns its contents as bytes.
20 | """
21 | with open(file_url, 'rb') as f:
22 | file_contents = f.read()
23 | return file_contents
24 |
25 | def process_files(self, file_urls: List[str]) -> List[str]:
26 | """
27 | Processes a list of files specified by their URLs and returns a list of processed files' URLs.
28 | """
29 | processed_files = []
30 | for file_url in file_urls:
31 | # process file here
32 | processed_file_url = file_url + '_processed'
33 | processed_files.append(processed_file_url)
34 | return processed_files
--------------------------------------------------------------------------------
/class.py:
--------------------------------------------------------------------------------
1 | import time
2 | import requests
3 |
4 | class Communication:
5 | def __init__(self, protocol, message_format, timeout, retry_limit):
6 | self.protocol = protocol
7 | self.message_format = message_format
8 | self.timeout = timeout
9 | self.retry_limit = retry_limit
10 |
11 | def send_message(self, message):
12 | retries = 0
13 | while retries < self.retry_limit:
14 | try:
15 | response = requests.post(self.protocol, data=message, timeout=self.timeout)
16 | return response
17 | except requests.exceptions.Timeout:
18 | retries += 1
19 | print("Timeout occurred. Retrying...")
20 | time.sleep(1)
21 | except requests.exceptions.RequestException as e:
22 | print("Error occurred: ", e)
23 | break
24 | return None
25 |
26 | def receive_message(self):
27 | retries = 0
28 | while retries < self.retry_limit:
29 | try:
30 | response = requests.get(self.protocol, timeout=self.timeout)
31 | return response
32 | except requests.exceptions.Timeout:
33 | retries += 1
34 | print("Timeout occurred. Retrying...")
35 | time.sleep(1)
36 | except requests.exceptions.RequestException as e:
37 | print("Error occurred: ", e)
38 | break
39 | return None
40 |
--------------------------------------------------------------------------------
/Wordpress plugin/module.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/src/class.py:
--------------------------------------------------------------------------------
1 | import time
2 | import requests
3 |
4 | class Communication:
5 | def __init__(self, protocol, message_format, timeout, retry_limit):
6 | self.protocol = protocol
7 | self.message_format = message_format
8 | self.timeout = timeout
9 | self.retry_limit = retry_limit
10 |
11 | def send_message(self, message):
12 | retries = 0
13 | while retries < self.retry_limit:
14 | try:
15 | response = requests.post(self.protocol, data=message, timeout=self.timeout)
16 | return response
17 | except requests.exceptions.Timeout:
18 | retries += 1
19 | print("Timeout occurred. Retrying...")
20 | time.sleep(1)
21 | except requests.exceptions.RequestException as e:
22 | print("Error occurred: ", e)
23 | break
24 | return None
25 |
26 | def receive_message(self):
27 | retries = 0
28 | while retries < self.retry_limit:
29 | try:
30 | response = requests.get(self.protocol, timeout=self.timeout)
31 | return response
32 | except requests.exceptions.Timeout:
33 | retries += 1
34 | print("Timeout occurred. Retrying...")
35 | time.sleep(1)
36 | except requests.exceptions.RequestException as e:
37 | print("Error occurred: ", e)
38 | break
39 | return None
40 |
--------------------------------------------------------------------------------
/utils/class.py:
--------------------------------------------------------------------------------
1 | import time
2 | import requests
3 |
4 | class Communication:
5 | def __init__(self, protocol, message_format, timeout, retry_limit):
6 | self.protocol = protocol
7 | self.message_format = message_format
8 | self.timeout = timeout
9 | self.retry_limit = retry_limit
10 |
11 | def send_message(self, message):
12 | retries = 0
13 | while retries < self.retry_limit:
14 | try:
15 | response = requests.post(self.protocol, data=message, timeout=self.timeout)
16 | return response
17 | except requests.exceptions.Timeout:
18 | retries += 1
19 | print("Timeout occurred. Retrying...")
20 | time.sleep(1)
21 | except requests.exceptions.RequestException as e:
22 | print("Error occurred: ", e)
23 | break
24 | return None
25 |
26 | def receive_message(self):
27 | retries = 0
28 | while retries < self.retry_limit:
29 | try:
30 | response = requests.get(self.protocol, timeout=self.timeout)
31 | return response
32 | except requests.exceptions.Timeout:
33 | retries += 1
34 | print("Timeout occurred. Retrying...")
35 | time.sleep(1)
36 | except requests.exceptions.RequestException as e:
37 | print("Error occurred: ", e)
38 | break
39 | return None
40 |
--------------------------------------------------------------------------------
/Chat-center/client.js:
--------------------------------------------------------------------------------
1 | const WebSocket = require('ws');
2 | const readline = require('readline');
3 |
4 | // Create a WebSocket client that connects to the server
5 | const ws = new WebSocket('ws://localhost:5000');
6 |
7 | // Listen for when the client connects to the server
8 | ws.on('open', () => {
9 | console.log('Connected to server');
10 |
11 | // Start reading input from the user and sending messages to the server
12 | const rl = readline.createInterface({
13 | input: process.stdin,
14 | output: process.stdout
15 | });
16 |
17 | rl.on('line', (inputText) => {
18 | if (inputText.startsWith('/qna ')) {
19 | // Parse the input text as a QnA message
20 | const [question, passage] = inputText.substring(5).split('|').map((s) => s.trim());
21 | if (!question || !passage) {
22 | console.log('Invalid /qna command. Usage: /qna NeuralGPT Chatbot Settings
21 |
26 |