cookbook/liteLLM_function_calling.ipynb
gpt-4-0613 and gpt-3.5-turbo-0613litellm.completion()## Install liteLLM
!pip install litellm
import os
from litellm import completion
os.environ['OPENAI_API_KEY'] = "" #@param
We create a get_current_weather() function and pass that to GPT 3.5
See OpenAI docs for this: https://openai.com/blog/function-calling-and-other-api-updates
messages = [
{"role": "user", "content": "What is the weather like in Boston?"}
]
def get_current_weather(location):
if location == "Boston, MA":
return "The weather is 12F"
functions = [
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location"]
}
}
]
response = completion(model="gpt-3.5-turbo-0613", messages=messages, functions=functions)
print(response)
Read Information about what Function to Call
function_call_data = response["choices"][0]["message"]["function_call"]
function_call_data
import json
function_name = function_call_data['name']
function_args = function_call_data['arguments']
function_args = json.loads(function_args)
print(function_name, function_args)
if function_name == "get_current_weather":
result = get_current_weather(**function_args)
print(result)
messages = [
{"role": "user", "content": "What is the weather like in Boston?"},
{"role": "assistant", "content": None, "function_call": {"name": "get_current_weather", "arguments": "{ \"location\": \"Boston, MA\"}"}},
{"role": "function", "name": "get_current_weather", "content": result}
]
response = completion(model="gpt-3.5-turbo-0613", messages=messages, functions=functions)
print(response)