├── README.md └── app.py /README.md: -------------------------------------------------------------------------------- 1 | "# autogen" 2 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import json, ast 3 | import os 4 | import chainlit as cl 5 | import autogen 6 | 7 | MAX_ITER = 5 8 | 9 | 10 | @cl.on_chat_start 11 | async def setup_agent(): 12 | config_list = autogen.config_list_from_json( 13 | "OAI_CONFIG_LIST") 14 | # create an AssistantAgent named "assistant" 15 | assistant = autogen.AssistantAgent( 16 | name="assistant", 17 | is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE") or x.get("content", "").strip() == "", 18 | llm_config={ 19 | "seed": 42, # seed for caching and reproducibility 20 | "config_list": config_list, # a list of OpenAI API configurations 21 | "temperature": 0, # temperature for sampling 22 | }, # configuration for autogen's enhanced inference API which is compatible with OpenAI API 23 | ) 24 | cl.user_session.set('assistant', assistant) 25 | # create a UserProxyAgent instance named "user_proxy" 26 | user_proxy = autogen.UserProxyAgent( 27 | name="user_proxy", 28 | human_input_mode="NEVER", 29 | max_consecutive_auto_reply=2, 30 | is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE") or x.get("content", "").strip() == "", 31 | code_execution_config={ 32 | "work_dir": "coding", 33 | "use_docker": False, # set to True or image name like "python:3" to use docker 34 | }, 35 | ) 36 | cl.user_session.set('user_proxy', user_proxy) 37 | await cl.Message(content=f"Start chatting with code interpreter").send() 38 | 39 | 40 | @cl.on_message 41 | async def run_conversation(msg: cl.Message): 42 | # check if user message changed 43 | user_message = msg.content 44 | if user_message == cl.user_session.get('user_message'): 45 | return 46 | assistant = cl.user_session.get('assistant') 47 | user_proxy = cl.user_session.get('user_proxy') 48 | cur_iter = 0 49 | if msg.elements: 50 | for element in msg.elements: 51 | file_name = element.name 52 | content = element.content 53 | # If want to show content Content: {content.decode('utf-8')}\n\n 54 | await cl.Message(content=f"Uploaded file: {file_name}\n").send() 55 | 56 | # Save the file locally 57 | with open(os.path.join("coding", file_name), "wb") as file: 58 | file.write(content) 59 | user_proxy.send( 60 | recipient=assistant, 61 | message=f"User uploaded file: {file_name}", 62 | ) 63 | print('CONVERSATION') 64 | 65 | while cur_iter < MAX_ITER: 66 | if len(assistant.chat_messages[user_proxy]) == 0 : 67 | print('initiating chat') 68 | user_proxy.initiate_chat( 69 | assistant, 70 | message=user_message, 71 | ) 72 | else: 73 | print('FOLLOW up message') 74 | # followup of the previous question 75 | user_proxy.send( 76 | recipient=assistant, 77 | message=user_message, 78 | ) 79 | message_history = assistant.chat_messages[user_proxy] 80 | last_seen_message_index = cl.user_session.get('last_seen_message_index', 0) 81 | print(message_history) 82 | for message in message_history[last_seen_message_index+1:]: 83 | await cl.Message(author=message["role"], content=message["content"]).send() 84 | cl.user_session.set('last_seen_message_index', len(message_history)) 85 | 86 | cur_iter += 1 87 | return --------------------------------------------------------------------------------