├── AppV1 ├── .env.example └── main.py ├── AppV2 ├── .env ├── htmlTemplates.py └── main.py ├── AppV3 ├── .env ├── htmlTemplates.py ├── main.py ├── prompts.py └── trippyPattern.png ├── AppV4 ├── .env ├── htmlTemplates.py ├── main.py ├── prompts.py └── trippyPattern.png ├── AppV5 ├── .env ├── htmlTemplates.py ├── main.py ├── prompts.py ├── requirements.txt └── trippyPattern.png ├── AppV6 ├── .env ├── htmlTemplates.py ├── main.py ├── prompts.py ├── requirements.txt └── trippyPattern.png ├── AppV7 ├── .env ├── db_functions.py ├── htmlTemplates.py ├── main.py ├── prompts.py ├── requirements.txt └── trippyPattern.png └── README.md /AppV1/.env.example: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY= 2 | -------------------------------------------------------------------------------- /AppV1/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dotenv import load_dotenv 3 | import streamlit as st 4 | import openai 5 | from langchain.llms import OpenAI 6 | from langchain.memory import ConversationBufferMemory 7 | from langchain.chains import LLMChain 8 | from langchain.prompts import PromptTemplate 9 | 10 | 11 | def init_ses_states(): 12 | if 'chatHistory' not in st.session_state: 13 | st.session_state['chatHistory'] = [] 14 | 15 | 16 | def sidebar(): 17 | global language, scenario, temperature 18 | languages = ['Python', 19 | 'JavaScript', 20 | 'GoLang', 21 | 'C', 22 | 'C++', 23 | 'C#' 24 | ] 25 | scenarios = ['Code Correction', 26 | 'Snippet Completion', 27 | ] 28 | with st.sidebar: 29 | with st.expander(label="Settings", expanded=True): 30 | language = st.selectbox(label="Language", options=languages) 31 | scenario = st.selectbox(label="Scenario", options=scenarios) 32 | temperature = st.slider(label="Temperature", min_value=0.0, max_value=1.0,value=0.5) 33 | 34 | 35 | def main(): 36 | st.set_page_config(page_title="GPT-4 Coding Asssistant", page_icon="computer") 37 | init_ses_states() 38 | sidebar() 39 | st.title("GPT-4 Coding Assistant") 40 | st.caption("Powered by OpenAI, LangChain, Streamlit") 41 | template = PromptTemplate( 42 | input_variables=['input','language','scenario'], 43 | template=''' 44 | You are an AI Coding Assistant specializing in the "{language}" programming language. 45 | \nThe user has specified the mode to "{scenario}" 46 | \nUSER {language} CODE INPUT: 47 | \n"{input}" 48 | ''' 49 | ) 50 | memory = ConversationBufferMemory(input_key="input", memory_key="chat_history") 51 | llm = OpenAI(temperature=temperature, model_name="gpt-4") 52 | llm_chain = LLMChain(llm=llm, prompt=template, memory=memory) 53 | user_input = st.text_area(label=f"Input {language} Code", height=400) 54 | if (st.button('Submit') and user_input): 55 | with st.spinner('Generating Response...'): 56 | response = llm_chain.run(input=user_input, language=language, scenario=scenario) 57 | st.write(response) 58 | 59 | 60 | if __name__ == '__main__': 61 | load_dotenv() 62 | main() 63 | 64 | -------------------------------------------------------------------------------- /AppV2/.env: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY= 2 | -------------------------------------------------------------------------------- /AppV2/htmlTemplates.py: -------------------------------------------------------------------------------- 1 | css = ''' 2 |