Back to Litellm

Set up Environment

cookbook/VLLM_Model_Testing.ipynb

1.84.0-dev.21.1 KB
Original Source

Set up Environment

python
!pip install --upgrade litellm
python
!pip install vllm

Load the Logs

python
import pandas as pd
python
# path of the csv file
file_path = 'Model-prompts-example.csv'

# load the csv file as a pandas DataFrame
data = pd.read_csv(file_path)

data.head()
python
input_texts = data['Input'].values
python
messages = [[{"role": "user", "content": input_text}] for input_text in input_texts]

Running Inference

python
from litellm import batch_completion
model_name = "facebook/opt-125m"
provider = "vllm"
response_list = batch_completion(
            model=model_name,
            custom_llm_provider=provider, # can easily switch to huggingface, replicate, together ai, sagemaker, etc.
            messages=messages,
            temperature=0.2,
            max_tokens=80,
        )
python
response_list
python
response_values = [response['choices'][0]['message']['content'] for response in response_list]
python
response_values
python
data[f"{model_name}_output"] = response_values
python
data.to_csv('model_responses.csv', index=False)