Back to Llama Index

Azure Code Interpreter Tool Spec

docs/examples/tools/azure_code_interpreter.ipynb

0.14.213.2 KB
Original Source

Azure Code Interpreter Tool Spec

This example walks through configuring and using the Azure Code Interpreter tool spec (powered by Azure Dynamic Sessions).

python
%pip install llama-index
%pip install llama-index-llms-azure
%pip install llama-index-tools-azure-code-interpreter
python
# Setup Azure OpenAI Agent
from llama_index.llms.azure_openai import AzureOpenAI

api_key = "your-azure-openai-api-key"
azure_endpoint = "your-azure-openai-endpoint"
api_version = "azure-api-version"
python
# Import the AzureCodeInterpreterToolSpec from llama_index
from llama_index.tools.azure_code_interpreter import (
    AzureCodeInterpreterToolSpec,
)

# Import the ReActAgent
from llama_index.core.agent import ReActAgent

# Create the AzureCodeInterpreterToolSpec with the pool_management_endpoint set to your session management endpoint
# It is optional to set the local_save_path, but it is recommended to set it to a path where the tool can automatically save any intermediate data generated from Python code's output.
azure_code_interpreter_spec = AzureCodeInterpreterToolSpec(
    pool_management_endpoint="your-pool-management-endpoint",
    local_save_path="local-file-path-to-save-intermediate-data",
)

llm = AzureOpenAI(
    model="gpt-35-turbo",
    deployment_name="gpt-35-deploy",
    api_key=api_key,
    azure_endpoint=azure_endpoint,
    api_version=api_version,
)

# Create the ReActAgent and inject the tools defined in the AzureDynamicSessionsToolSpec
agent = ReActAgent.from_tools(
    azure_code_interpreter_spec.to_tool_list(), llm=llm, verbose=True
)
python
# You can use the code interpreter directly without the LLM agent.
print(azure_code_interpreter_spec.code_interpreter("1+1"))
python
# Test the agent with simple answers that could leverage Python codes
print(agent.chat("Tell me the current time in Seattle."))
python
# Upload a sample temperature file of a day in Redmond Washington and ask a question about it
res = azure_code_interpreter_spec.upload_file(
    local_file_path="./TemperatureData.csv"
)
if len(res) != 0:
    print(
        agent.chat("Find the highest temperature in the file that I uploaded.")
    )
python
# Ask the LLM to draw a diagram based on the uploaded file.
# Because the local_save_path is set, the diagram data will be automatically saved to the local_save_path.
print(
    agent.chat(
        "Use the temperature data that I uploaded, create a temperature curve."
    )
)
python
# Ask the LLM to perform modifications on the sample temperature file
print(
    agent.chat(
        "Rearrange the temperature data in a descending order and save it back to the original csv file."
    )
)
python
# Download the modified file
azure_code_interpreter_spec.download_file_to_local(
    remote_file_path="TemperatureData.csv",
    local_file_path="/.../SortedTemperatureData.csv",
)
python
# For comparison, print the first 10 lines of the original file
with open("/.../TemperatureData.csv", "r") as f:
    for i in range(10):
        print(f.readline().strip())
python
# For comparison, print the first 10 lines of the sorted file downloaded from session pool
with open("/.../SortedTemperatureData.csv", "r") as f:
    for i in range(10):
        print(f.readline().strip())