├── README.md ├── .gitignore └── app.py /README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | vectorstore 2 | .streamlit -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from streamlit_chat import message 3 | from langchain_openai import ChatOpenAI 4 | from langchain_core.prompts import ChatPromptTemplate 5 | from langchain_core.output_parsers import StrOutputParser 6 | from langchain_openai import OpenAIEmbeddings 7 | from langchain_community.vectorstores import FAISS 8 | from langchain.chains.combine_documents import create_stuff_documents_chain 9 | from langchain_core.prompts import MessagesPlaceholder 10 | from langchain.chains import ConversationalRetrievalChain 11 | from streamlit_oauth import OAuth2Component 12 | from langchain_community.document_loaders import WebBaseLoader 13 | from langchain_core.documents import Document 14 | from descope import DescopeClient 15 | 16 | 17 | DB_FAISS_PATH = './vectorstore/db_faiss' 18 | 19 | st.title("Query Descope Docs") 20 | 21 | AUTHORIZE_URL = st.secrets.get('AUTHORIZE_URL') 22 | TOKEN_URL = st.secrets.get('TOKEN_URL') 23 | REFRESH_TOKEN_URL = st.secrets.get('REFRESH_TOKEN_URL') 24 | REVOKE_TOKEN_URL = st.secrets.get('REVOKE_TOKEN_URL') 25 | CLIENT_ID = st.secrets.get('CLIENT_ID') 26 | CLIENT_SECRET = st.secrets.get('CLIENT_SECRET') 27 | REDIRECT_URI = st.secrets.get('REDIRECT_URI') 28 | SCOPE = st.secrets.get('SCOPE') 29 | PROJECT_ID = st.secrets.get('PROJECT_ID') 30 | 31 | with st.spinner("Loading documents and creating embeddings..."): 32 | oauth2 = OAuth2Component(CLIENT_ID, CLIENT_SECRET, AUTHORIZE_URL, TOKEN_URL) 33 | descope_client = DescopeClient(project_id=PROJECT_ID) 34 | loader = WebBaseLoader(["https://docs.descope.com/manage/idpapplications/oidc/", "https://docs.descope.com/manage/testusers/"]) 35 | data = loader.load() 36 | 37 | list_of_documents = [ 38 | Document(page_content=data[0].page_content, metadata=dict(role="Dev")), 39 | Document(page_content=data[1].page_content, metadata=dict(role="QA"))] 40 | 41 | embeddings = OpenAIEmbeddings( 42 | model="text-embedding-ada-002", 43 | openai_api_key=st.secrets.openai_key 44 | ) 45 | 46 | vector = FAISS.from_documents(list_of_documents, embeddings) 47 | vector.save_local(DB_FAISS_PATH) 48 | llm = ChatOpenAI(api_key=st.secrets.openai_key, temperature=0, model="gpt-4-turbo") 49 | 50 | 51 | if 'token' not in st.session_state: 52 | # If not, show authorize button 53 | result = oauth2.authorize_button( 54 | name="Continue with Descope", 55 | icon="https://images.ctfassets.net/xqb1f63q68s1/7D1PYGYvVgRNOBeiA6USQM/68b572056b5d38a769c71b0fba63b4e5/Descope_RGB_Icon-ForDarkBackground.svg", 56 | redirect_uri=REDIRECT_URI, 57 | scope=SCOPE, 58 | key="descope", 59 | use_container_width=False, 60 | pkce='S256', 61 | ) 62 | if result and 'token' in result: 63 | # If authorization successful, save token in session state 64 | st.session_state.token = result.get('token') 65 | st.experimental_rerun() 66 | else: 67 | # If token exists in session state, show the token 68 | token = st.session_state['token'] 69 | jwt_response = descope_client.validate_session(session_token=token.get("access_token"), audience=PROJECT_ID) 70 | roles = jwt_response.get("roles") 71 | if not roles or not roles[0]: 72 | st.info('Logged in user has no role assigned so the chat is disabled. Refresh the page to login with another user.', icon="ℹ️") 73 | else: 74 | st.info('Logged in user with role: '+ jwt_response.get("roles")[0], icon="ℹ️") 75 | retriever = vector.as_retriever() 76 | 77 | 78 | def conversational_chat(query): 79 | prompt = ChatPromptTemplate.from_messages([ 80 | ("system", "You are an engineer. Answer the question based Context: {context}"), 81 | MessagesPlaceholder(variable_name="chat_history"), 82 | ("user", "{input}"),]) 83 | 84 | chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=vector.as_retriever( 85 | search_kwargs={'filter': {'role': roles[0]}}), verbose=True) 86 | response = chain({ 87 | "question": query, "chat_history":[] 88 | }) 89 | return response["answer"] 90 | 91 | if 'history' not in st.session_state: 92 | st.session_state['history'] = [] 93 | 94 | if 'generated' not in st.session_state: 95 | st.session_state['generated'] = ["Hello ! Ask me anything about Descope docs 🤗"] 96 | 97 | if 'past' not in st.session_state: 98 | st.session_state['past'] = ["Hey ! 👋"] 99 | 100 | #container for the chat history 101 | response_container = st.container() 102 | #container for the user's text input 103 | container = st.container() 104 | 105 | with container: 106 | with st.form(key='my_form', clear_on_submit=True): 107 | 108 | user_input = st.text_input("Query:", placeholder="Enter your query:", key='input') 109 | submit_button = st.form_submit_button(label='Send') 110 | 111 | if submit_button and user_input: 112 | output = conversational_chat(user_input) 113 | 114 | st.session_state['past'].append(user_input) 115 | st.session_state['generated'].append(output) 116 | 117 | if st.session_state['generated']: 118 | with response_container: 119 | for i in range(len(st.session_state['generated'])): 120 | message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="big-smile") 121 | message(st.session_state["generated"][i], key=str(i), avatar_style="thumbs") 122 | 123 | 124 | 125 | 126 | --------------------------------------------------------------------------------