docs/integrate-ag2.md
Use Nexa SDK's OpenAI-compatible server with AG2 (formerly AutoGen).
nexa pull NexaAI/Qwen3-4B-GGUF
nexa serve
pip install "ag2[openai]"
python cookbook/ag2/example.py
from autogen import ConversableAgent, LLMConfig
# Point AG2 to Nexa's OpenAI-compatible endpoint
llm_config = LLMConfig({
"model": "NexaAI/Qwen3-4B-GGUF",
"base_url": "http://localhost:18181/v1",
"api_type": "openai",
"api_key": "not-needed",
})
agent = ConversableAgent(
name="helpful_agent",
system_message="You are a helpful AI assistant.",
llm_config=llm_config,
)
response = agent.run(
message="Write a Python function to calculate the sum of a list.",
max_turns=3,
)
response.process()
print(response.messages)
from autogen import ConversableAgent, LLMConfig
llm_config = LLMConfig({
"model": "NexaAI/Qwen3-4B-GGUF",
"base_url": "http://localhost:18181/v1",
"api_type": "openai",
"api_key": "not-needed",
})
coder = ConversableAgent(
name="coder",
system_message="You are a Python developer. Reply TERMINATE when done.",
llm_config=llm_config,
)
reviewer = ConversableAgent(
name="reviewer",
system_message="You are a code reviewer. Reply TERMINATE when done.",
llm_config=llm_config,
)
result = coder.initiate_chat(
recipient=reviewer,
message="Write a function to check if a string is a palindrome.",
max_turns=3,
)
print(result.summary)
Notes:
nexa pull.