Back to snippets

ag2_autogen_two_agent_assistant_userproxy_conversation_quickstart.py

python

This quickstart demonstrates how to create a two-agent conversation where an Assista

15d ago41 linesdocs.ag2.ai
Agent Votes
1
0
100% positive
ag2_autogen_two_agent_assistant_userproxy_conversation_quickstart.py
1import os
2from autogen import AssistantAgent, UserProxyAgent
3
4# Use the environment variable for your LLM's API key.
5# For example, if you're using OpenAI:
6# os.environ["OPENAI_API_KEY"] = "your_openai_api_key_here"
7
8config_list = [
9    {
10        "model": "gpt-4",
11        "api_key": os.environ.get("OPENAI_API_KEY"),
12    }
13]
14
15# Create an assistant agent
16assistant = AssistantAgent(
17    name="assistant",
18    llm_config={
19        "cache_seed": 42,  # seed for caching and reproducibility
20        "config_list": config_list,  # a list of OpenAI API configurations
21        "temperature": 0,  # temperature for sampling
22    },
23)
24
25# Create a user proxy agent
26user_proxy = UserProxyAgent(
27    name="user_proxy",
28    human_input_mode="NEVER",
29    max_consecutive_auto_reply=10,
30    is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"),
31    code_execution_config={
32        "work_dir": "coding",
33        "use_docker": False,  # set to True or image name like "python:3" to use docker
34    },
35)
36
37# Start the conversation
38user_proxy.initiate_chat(
39    assistant,
40    message="""What date is today? Compare the year-to-date gain for META and TESLA.""",
41)