Back to Opik

Dynamic Tracing Cookbook

apps/opik-documentation/documentation/fern/docs/cookbook/dynamic_tracing_cookbook.mdx

2.0.22-6605-merge-20655.4 KB
Original Source
python
import time
import random
from typing import Dict, Any, List
from contextlib import contextmanager

import opik

Dynamic Tracing Control Cookbook

This cookbook demonstrates how to use Opik's dynamic tracing control features

to optimize performance and implement flexible tracing strategies in production.

What You'll Learn:

  • Enable/disable tracing at runtime without code changes

  • Implement conditional tracing based on user attributes

  • Create sampling strategies for high-throughput systems

  • Measure and optimize tracing performance impact

  • Control integration tracking dynamically

Prerequisites:

bash

pip install opik

Setup and Imports

First, let's import the necessary libraries and set up our environment.

python
print(f"Opik version: {opik.__version__}")
print(f"Initial tracing state: {opik.is_tracing_active()}") # the opposite of the track_disable

1. Basic Runtime Control

The simplest use case is toggling tracing on and off during runtime.

python
print(f"Current tracing state: {opik.is_tracing_active()}")

opik.set_tracing_active(False)
print(f"After disabling: {opik.is_tracing_active()}")

opik.set_tracing_active(True)
print(f"After enabling: {opik.is_tracing_active()}")

2. Context-Aware Tracing

Create a context manager for temporary tracing control.

python
@contextmanager
def tracing_enabled(enabled: bool):
    """Context manager for temporary tracing control."""
    original_state = opik.is_tracing_active()
    try:
        opik.set_tracing_active(enabled)
        yield
    finally:
        opik.set_tracing_active(original_state)

print(f"Before context: {opik.is_tracing_active()}")

with tracing_enabled(False):
    print(f"Inside context (disabled): {opik.is_tracing_active()}")
    # ny traced functions here won't create spans

print(f"After context: {opik.is_tracing_active()}")

3. Conditional Tracing Strategies

Implement different strategies for when to enable tracing (see part 4 for connection)

python
class TracingStrategy:
    """Base class for tracing strategies."""

    def should_trace(self, **kwargs) -> bool:
        """Determine if tracing should be enabled for this request."""
        raise NotImplementedError


class UserTierStrategy(TracingStrategy):
    """Trace only premium users."""

    def __init__(self, premium_tiers: List[str] = None):
        self.premium_tiers = premium_tiers or ["premium", "enterprise"]

    def should_trace(self, user_tier: str = None, **kwargs) -> bool:
        return user_tier in self.premium_tiers


class SamplingStrategy(TracingStrategy):
    """Trace a percentage of requests."""

    def __init__(self, sample_rate: float = 0.1):
        self.sample_rate = sample_rate

    def should_trace(self, **kwargs) -> bool:
        return random.random() < self.sample_rate


class DebugModeStrategy(TracingStrategy):
    """Trace when in debug mode or for specific users."""

    def __init__(self, debug_users: List[str] = None):
        self.debug_users = debug_users or []
        self.debug_mode = False

    def should_trace(self, user_id: str = None, **kwargs) -> bool:
        return self.debug_mode or (user_id in self.debug_users)

    def enable_debug(self):
        self.debug_mode = True

    def disable_debug(self):
        self.debug_mode = False

strategies = {
    "premium_only": UserTierStrategy(),
    "10_percent_sample": SamplingStrategy(0.1),
    "debug_mode": DebugModeStrategy(["debug_user_1", "debug_user_2"]),
}

4. Smart Request Handler

Create a request handler that uses tracing strategies.

python
@opik.track(name="user_query")
def handle_user_query(query: str, user_id: str, user_tier: str) -> Dict[str, Any]:
    """Simulate handling a user query with LLM processing."""
    time.sleep(0.01)
    
    if user_tier in ["premium", "enterprise"]:
        response = f"Premium response to: {query}"
        tokens_used = random.randint(150, 300)
    else:
        response = f"Basic response to: {query}"
        tokens_used = random.randint(50, 100)
    
    return {
        "query": query,
        "response": response,
        "user_id": user_id,
        "user_tier": user_tier,
        "tokens_used": tokens_used,
        "processing_time_ms": random.randint(100, 500)
    }



class SmartRequestHandler:
    """Request handler with configurable tracing strategy."""

    def __init__(self, strategy: TracingStrategy):
        self.strategy = strategy

    def handle_request(
        self, query: str, user_id: str, user_tier: str
    ) -> Dict[str, Any]:
        """Handle request with conditional tracing."""

        should_trace = self.strategy.should_trace(
            user_id=user_id, user_tier=user_tier, query=query
        )

        opik.set_tracing_active(should_trace)


        response_and_metadata = handle_user_query(query, user_id, user_tier)

        return {
            "traced": should_trace,
            "result": response_and_metadata,
        }


print("=== Testing Premium-Only Strategy ===")
handler = SmartRequestHandler(UserTierStrategy())

requests = [
    ("What is AI?", "user1", "free"),
    ("Explain ML", "user2", "premium"),
    ("How does it work?", "user3", "free"),
    ("Advanced question", "user4", "enterprise"),
]

for query, user_id, tier in requests:
    result = handler.handle_request(query, user_id, tier)
    print(f"User {user_id} ({tier}): Traced = {result['traced']}")