Back to Baml

Chat

fern/01-guide/06-prompt-engineering/chat-history.mdx

0.222.03.7 KB
Original Source

In this guide we'll build a small chatbot that takes in user messages and generates responses.

baml
class MyUserMessage {
  role "user" | "assistant"
  content string
}

function ChatWithLLM(messages: MyUserMessage[]) -> string {
  client "openai/gpt-5"
  prompt #"
    Answer the user's questions based on the chat history:
    {% for message in messages %}
      {{ _.role(message.role) }} 
      {{ message.content }}
    {% endfor %}

    Answer:
  "#
}

test TestName {
  functions [ChatWithLLM]
  args {
    messages [
      {
        role "user"
        content "Hello!"
      }
      {
        role "assistant"
        content "Hi!"
      }
    ]
  }
}

Code

<CodeGroup> ```python Python from baml_client import b from baml_client.types import MyUserMessage

def main(): messages: list[MyUserMessage] = []

while True:
    content = input("Enter your message (or 'quit' to exit): ")
    if content.lower() == 'quit':
        break
    
    messages.append(MyUserMessage(role="user", content=content))
    
    agent_response = b.ChatWithLLM(messages=messages)
    print(f"AI: {agent_response}")
    print()
    
    # Add the agent's response to the chat history
    messages.append(MyUserMessage(role="assistant", content=agent_response))

if name == "main": main()

```typescript Typescript
import { b, MyUserMessage } from 'baml_client';
import * as readline from 'readline';

const rl = readline.createInterface({
  input: process.stdin,
  output: process.stdout
});

const messages: MyUserMessage[] = [];

function askQuestion(query: string): Promise<string> {
  return new Promise((resolve) => {
    rl.question(query, resolve);
  });
}

async function main() {

  while (true) {
    const content = await askQuestion("Enter your message (or 'quit' to exit): ");
    if (content.toLowerCase() === 'quit') {
      break;
    }

    messages.push({ role: "user", content });

    const agentResponse = await b.ChatWithLLM({ messages });
    console.log(`AI: ${agentResponse}`);
    console.log();

    // Add the agent's response to the chat history
    messages.push({ role: "assistant", content: agentResponse });
  }

  rl.close();
}

main();
go
package main

import (
    "bufio"
    "context"
    "fmt"
    "os"
    "strings"
    
    b "example.com/myproject/baml_client"
    "example.com/myproject/baml_client/types"
)

func main() {
    ctx := context.Background()
    var messages []types.MyUserMessage
    
    scanner := bufio.NewScanner(os.Stdin)
    
    for {
        fmt.Print("Enter your message (or 'quit' to exit): ")
        if !scanner.Scan() {
            break
        }
        
        content := scanner.Text()
        if strings.ToLower(content) == "quit" {
            break
        }
        
        // Add user message to history
        messages = append(messages, types.MyUserMessage{
            // Go generates constructor functions for literal unions like "user" | "assistant"
            // The naming pattern is Union{Number}K{variant1}OrK{variant2}__NewK{variant}()
            Role:    types.Union2KuserOrKassistant__NewKuser(),
            Content: content,
        })
        
        // Get AI response
        agentResponse, err := b.ChatWithLLM(ctx, messages)
        if err != nil {
            fmt.Printf("Error: %v\n", err)
            continue
        }
        
        fmt.Printf("AI: %s\n\n", agentResponse)
        
        // Add agent's response to chat history
        messages = append(messages, types.MyUserMessage{
            // Constructor for "assistant" variant of the "user" | "assistant" union
            Role:    types.Union2KuserOrKassistant__NewKassistant(),
            Content: agentResponse,
        })
    }
}
</CodeGroup>