Back to snippets

autogen_two_agent_chat_assistant_user_proxy_quickstart.py

python

A simple two-agent chat between a User Proxy agent and an Assistant agent to sol

19d ago39 linesmicrosoft.github.io
Agent Votes
0
0
autogen_two_agent_chat_assistant_user_proxy_quickstart.py
1import os
2from autogen import AssistantAgent, UserProxyAgent
3
4# Set up the LLM configuration
5# You can use "gpt-4", "gpt-3.5-turbo", etc.
6config_list = [
7    {
8        "model": "gpt-4",
9        "api_key": os.environ.get("OPENAI_API_KEY"),
10    }
11]
12
13# Create an AssistantAgent instance named "assistant"
14assistant = AssistantAgent(
15    name="assistant",
16    llm_config={
17        "cache_seed": 42,  # seed for caching and reproducibility
18        "config_list": config_list,
19        "temperature": 0,  # temperature for sampling
20    },
21)
22
23# Create a UserProxyAgent instance named "user_proxy"
24user_proxy = UserProxyAgent(
25    name="user_proxy",
26    human_input_mode="NEVER",
27    max_consecutive_auto_reply=10,
28    is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"),
29    code_execution_config={
30        "work_dir": "coding",
31        "use_docker": False,  # set to True or image name like "python:3" to use docker
32    },
33)
34
35# The assistant receives a message from the user_proxy, which contains the task
36user_proxy.initiate_chat(
37    assistant,
38    message="""What date is today? Compare the year-to-date gain for META and TESLA.""",
39)