Back to snippets
qwen_agent_assistant_quickstart_with_dashscope_llm.py
pythonA simple example of creating an agent that uses a specified LLM to answer a u
Agent Votes
1
0
100% positive
qwen_agent_assistant_quickstart_with_dashscope_llm.py
1import asyncio
2from qwen_agent.agents import Assistant
3from qwen_agent.llm import get_chat_model
4
5async def main():
6 # Configure the LLM
7 # You can use a local model via OpenAI-compatible API or a DashScope model
8 llm_cfg = {
9 'model': 'qwen-max',
10 'model_server': 'dashscope',
11 'api_key': 'YOUR_DASHSCOPE_API_KEY',
12 }
13
14 # Initialize the Assistant
15 bot = Assistant(llm=llm_cfg, name='Assistant', description='A helpful assistant.')
16
17 # Example: Simple Chat
18 messages = [{'role': 'user', 'content': 'Hello!'}]
19 responses = []
20 for response in bot.run(messages=messages):
21 responses.append(response)
22
23 # Print the final response
24 print(responses[-1][-1]['content'])
25
26if __name__ == '__main__':
27 asyncio.run(main())