Back to Graphrag

Structured Response

packages/graphrag-llm/notebooks/03_structured_responses.ipynb

3.0.92.4 KB
Original Source

Structured Response

LLMCompletion.completion accepts a response_format parameter that is a pydantic model for parsing and returning structured responses.

python
# Copyright (c) 2024 Microsoft Corporation.
# Licensed under the MIT License

import os

from dotenv import load_dotenv
from graphrag_llm.completion import LLMCompletion, create_completion
from graphrag_llm.config import AuthMethod, ModelConfig
from graphrag_llm.types import LLMCompletionResponse
from pydantic import BaseModel, Field

load_dotenv()


class LocalWeather(BaseModel):
    """City weather information model."""

    city: str = Field(description="The name of the city")
    temperature: float = Field(description="The temperature in Celsius")
    condition: str = Field(description="The weather condition description")


class WeatherReports(BaseModel):
    """Weather information model."""

    reports: list[LocalWeather] = Field(
        description="The weather reports for multiple cities"
    )


api_key = os.getenv("GRAPHRAG_API_KEY")
model_config = ModelConfig(
    model_provider="azure",
    model=os.getenv("GRAPHRAG_MODEL", "gpt-4o"),
    azure_deployment_name=os.getenv("GRAPHRAG_MODEL", "gpt-4o"),
    api_base=os.getenv("GRAPHRAG_API_BASE"),
    api_version=os.getenv("GRAPHRAG_API_VERSION", "2025-04-01-preview"),
    api_key=api_key,
    auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,
)
llm_completion: LLMCompletion = create_completion(model_config)

response: LLMCompletionResponse[WeatherReports] = llm_completion.completion(
    messages="It is sunny and 52 degrees fahrenheit in Seattle. It is cloudy and 75 degrees fahrenheit in San Francisco.",
    response_format=WeatherReports,
)  # type: ignore

local_weather_reports: WeatherReports = response.formatted_response  # type: ignore
for report in local_weather_reports.reports:
    print(f"City: {report.city}")
    print(f"  Temperature: {report.temperature} °C")
    print(f"  Condition: {report.condition}")

Streaming

Streaming is not supported when using response_format.

python
try:
    response = llm_completion.completion(
        messages="It is sunny and 52 degrees fahrenheit in Seattle. It is cloudy and 75 degrees fahrenheit in San Francisco.",
        response_format=WeatherReports,
        stream=True,
    )
except Exception as e:  # noqa: BLE001
    print(f"Error during streaming completion: {e}")