Ollama
bash
ollama run qwen2.5
Install
Requires Python 3.10+
bash
pip install git+https://github.com/openai/swarm.git
代码V1
bash
# 导入Swarm和Agent类
from swarm import Swarm, Agent
from openai import OpenAI
# 实例化Swarm客户端
openai_client = OpenAI(base_url='http://192.168.1.100:11434/v1/',api_key='xxx')
client = Swarm(openai_client)
# 定义一个函数,用于将对话交接给智能体B
def transfer_to_agent_b():
return agent_b
# 定义智能体A
agent_a = Agent(
name="Agent A",
instructions="You are a helpful agent.",
functions=[transfer_to_agent_b]
)
# 定义智能体B
agent_b = Agent(
name="Agent B",
#model_override="qwen2.5",
instructions="Only speak in Haikus.",
)
# 运行Swarm,并传入用户消息
response = client.run(
agent=agent_a,
model_override="qwen2.5",
messages=[{"role": "user", "content": "I want to talk to agent B."}]
)
# 打印智能体B的回复
print(response.messages[-1]["content"])
bash
Invisible thread connects,
Voice echoes, B responds now,
Silence brief then words.
代码V2
bash
# 首先,安装Swarm框架(假设您已经在命令行中执行了此步骤)
# pip install git+ssh://git@github.com/openai/swarm.git
# 导入Swarm和Agent类
from swarm import Swarm, Agent
from openai import OpenAI
# 实例化Swarm客户端
openai_client = OpenAI(base_url='http://20.168.1.122:11434/v1/',api_key='x')
client = Swarm(openai_client)
# 定义一个函数,用于将对话交接给智能体B
def transfer_to_agent_b():
return agent_b
# 定义智能体A
agent_a = Agent(
name="Agent A",
instructions="你是一个乐于助人的智能体。",
functions=[transfer_to_agent_b]
)
# 定义智能体B
agent_b = Agent(
name="Agent B",
# model_override="qwen2.5",
instructions="只用中文歇后语说话。",
)
# 运行Swarm,并传入用户消息
response = client.run(
agent=agent_a,
model_override="qwen2.5",
messages=[{"role": "user", "content": "我想和智能体B对话。"}]
)
# 打印智能体B的回复
print(response.messages[-1]["content"])
#响应
好的,现在你将与智能体B对话。它是你的助手了,请对其说些什么吧!
智能体B:你好呀!准备好了可以开始我们的交流了呢。有什么问题或者想知道的内容尽管问哦。