sdk/python/notebooks/sdk_walkthrough.ipynb
Public SDK surface only (codex_app_server root exports).
# Cell 1: bootstrap local SDK imports + pinned runtime package
import os
import sys
from pathlib import Path
if sys.version_info < (3, 10):
raise RuntimeError(
f'Notebook requires Python 3.10+; current interpreter is {sys.version.split()[0]}.'
)
try:
_ = os.getcwd()
except FileNotFoundError:
os.chdir(str(Path.home()))
def _is_sdk_python_dir(path: Path) -> bool:
return (path / 'pyproject.toml').exists() and (path / 'src' / 'codex_app_server').exists()
def _iter_home_fallback_candidates(home: Path):
# bounded depth scan under home to support launching notebooks from unrelated cwd values
patterns = ('sdk/python', '*/sdk/python', '*/*/sdk/python', '*/*/*/sdk/python')
for pattern in patterns:
yield from home.glob(pattern)
def _find_sdk_python_dir(start: Path) -> Path | None:
checked = set()
def _consider(candidate: Path) -> Path | None:
resolved = candidate.resolve()
if resolved in checked:
return None
checked.add(resolved)
if _is_sdk_python_dir(resolved):
return resolved
return None
for candidate in [start, *start.parents]:
found = _consider(candidate)
if found is not None:
return found
for candidate in [start / 'sdk' / 'python', *(parent / 'sdk' / 'python' for parent in start.parents)]:
found = _consider(candidate)
if found is not None:
return found
env_dir = os.environ.get('CODEX_PYTHON_SDK_DIR')
if env_dir:
found = _consider(Path(env_dir).expanduser())
if found is not None:
return found
for entry in sys.path:
if not entry:
continue
entry_path = Path(entry).expanduser()
for candidate in (entry_path, entry_path / 'sdk' / 'python'):
found = _consider(candidate)
if found is not None:
return found
home = Path.home()
for candidate in _iter_home_fallback_candidates(home):
found = _consider(candidate)
if found is not None:
return found
return None
repo_python_dir = _find_sdk_python_dir(Path.cwd())
if repo_python_dir is None:
raise RuntimeError('Could not locate sdk/python. Set CODEX_PYTHON_SDK_DIR to your sdk/python path.')
repo_python_str = str(repo_python_dir)
if repo_python_str not in sys.path:
sys.path.insert(0, repo_python_str)
from _runtime_setup import ensure_runtime_package_installed
runtime_version = ensure_runtime_package_installed(
sys.executable,
repo_python_dir,
)
src_dir = repo_python_dir / 'src'
examples_dir = repo_python_dir / 'examples'
src_str = str(src_dir)
examples_str = str(examples_dir)
if src_str not in sys.path:
sys.path.insert(0, src_str)
if examples_str not in sys.path:
sys.path.insert(0, examples_str)
# Force fresh imports after SDK upgrades in the same notebook kernel.
for module_name in list(sys.modules):
if module_name == 'codex_app_server' or module_name.startswith('codex_app_server.'):
sys.modules.pop(module_name, None)
print('Kernel:', sys.executable)
print('SDK source:', src_dir)
print('Runtime package:', runtime_version)
# Cell 2: imports (public only)
from _bootstrap import assistant_text_from_turn, find_turn_by_id, server_label
from codex_app_server import (
AsyncCodex,
Codex,
ImageInput,
LocalImageInput,
TextInput,
retry_on_overload,
)
# Cell 3: simple sync conversation
with Codex() as codex:
thread = codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
turn = thread.turn(TextInput('Explain gradient descent in 3 bullets.'))
result = turn.run()
persisted = thread.read(include_turns=True)
persisted_turn = find_turn_by_id(persisted.thread.turns, result.id)
print('server:', server_label(codex.metadata))
print('status:', result.status)
print(assistant_text_from_turn(persisted_turn))
# Cell 4: multi-turn continuity in same thread
with Codex() as codex:
thread = codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
first = thread.turn(TextInput('Give a short summary of transformers.')).run()
second = thread.turn(TextInput('Now explain that to a high-school student.')).run()
persisted = thread.read(include_turns=True)
second_turn = find_turn_by_id(persisted.thread.turns, second.id)
print('first status:', first.status)
print('second status:', second.status)
print('second text:', assistant_text_from_turn(second_turn))
# Cell 5: full thread lifecycle and branching (sync)
with Codex() as codex:
thread = codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
first = thread.turn(TextInput('One sentence about structured planning.')).run()
second = thread.turn(TextInput('Now restate it for a junior engineer.')).run()
reopened = codex.thread_resume(thread.id)
listing_active = codex.thread_list(limit=20, archived=False)
reading = reopened.read(include_turns=True)
_ = reopened.set_name('sdk-lifecycle-demo')
_ = codex.thread_archive(reopened.id)
listing_archived = codex.thread_list(limit=20, archived=True)
unarchived = codex.thread_unarchive(reopened.id)
resumed_info = 'n/a'
try:
resumed = codex.thread_resume(
unarchived.id,
model='gpt-5.4',
config={'model_reasoning_effort': 'high'},
)
resumed_result = resumed.turn(TextInput('Continue in one short sentence.')).run()
resumed_info = f'{resumed_result.id} {resumed_result.status}'
except Exception as e:
resumed_info = f'skipped({type(e).__name__})'
forked_info = 'n/a'
try:
forked = codex.thread_fork(unarchived.id, model='gpt-5.4')
forked_result = forked.turn(TextInput('Take a different angle in one short sentence.')).run()
forked_info = f'{forked_result.id} {forked_result.status}'
except Exception as e:
forked_info = f'skipped({type(e).__name__})'
compact_info = 'sent'
try:
_ = unarchived.compact()
except Exception as e:
compact_info = f'skipped({type(e).__name__})'
print('Lifecycle OK:', thread.id)
print('first:', first.id, first.status)
print('second:', second.id, second.status)
print('read.turns:', len(reading.thread.turns or []))
print('list.active:', len(listing_active.data))
print('list.archived:', len(listing_archived.data))
print('resumed:', resumed_info)
print('forked:', forked_info)
print('compact:', compact_info)
# Cell 5b: one turn with most optional turn params
from pathlib import Path
from codex_app_server import (
AskForApproval,
Personality,
ReasoningEffort,
ReasoningSummary,
SandboxPolicy,
)
output_schema = {
'type': 'object',
'properties': {
'summary': {'type': 'string'},
'actions': {'type': 'array', 'items': {'type': 'string'}},
},
'required': ['summary', 'actions'],
'additionalProperties': False,
}
sandbox_policy = SandboxPolicy.model_validate({'type': 'readOnly', 'access': {'type': 'fullAccess'}})
summary = ReasoningSummary.model_validate('concise')
with Codex() as codex:
thread = codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
turn = thread.turn(
TextInput('Propose a safe production feature-flag rollout. Return JSON matching the schema.'),
approval_policy=AskForApproval.model_validate('never'),
cwd=str(Path.cwd()),
effort=ReasoningEffort.medium,
model='gpt-5.4',
output_schema=output_schema,
personality=Personality.pragmatic,
sandbox_policy=sandbox_policy,
summary=summary,
)
result = turn.run()
persisted = thread.read(include_turns=True)
persisted_turn = find_turn_by_id(persisted.thread.turns, result.id)
print('status:', result.status)
print(assistant_text_from_turn(persisted_turn))
# Cell 5c: choose highest model + highest supported reasoning, then run turns
from pathlib import Path
from codex_app_server import (
AskForApproval,
Personality,
ReasoningEffort,
ReasoningSummary,
SandboxPolicy,
)
reasoning_rank = {
'none': 0,
'minimal': 1,
'low': 2,
'medium': 3,
'high': 4,
'xhigh': 5,
}
def pick_highest_model(models):
visible = [m for m in models if not m.hidden] or models
known_names = {m.id for m in visible} | {m.model for m in visible}
top_candidates = [m for m in visible if not (m.upgrade and m.upgrade in known_names)]
pool = top_candidates or visible
return max(pool, key=lambda m: (m.model, m.id))
def pick_highest_turn_effort(model) -> ReasoningEffort:
if not model.supported_reasoning_efforts:
return ReasoningEffort.medium
best = max(model.supported_reasoning_efforts, key=lambda opt: reasoning_rank.get(opt.reasoning_effort.value, -1))
return ReasoningEffort(best.reasoning_effort.value)
output_schema = {
'type': 'object',
'properties': {
'summary': {'type': 'string'},
'actions': {'type': 'array', 'items': {'type': 'string'}},
},
'required': ['summary', 'actions'],
'additionalProperties': False,
}
sandbox_policy = SandboxPolicy.model_validate({'type': 'readOnly', 'access': {'type': 'fullAccess'}})
with Codex() as codex:
models = codex.models(include_hidden=True)
selected_model = pick_highest_model(models.data)
selected_effort = pick_highest_turn_effort(selected_model)
print('selected.model:', selected_model.model)
print('selected.effort:', selected_effort.value)
thread = codex.thread_start(model=selected_model.model, config={'model_reasoning_effort': selected_effort.value})
first = thread.turn(
TextInput('Give one short sentence about reliable production releases.'),
model=selected_model.model,
effort=selected_effort,
).run()
persisted = thread.read(include_turns=True)
first_turn = find_turn_by_id(persisted.thread.turns, first.id)
print('agent.message:', assistant_text_from_turn(first_turn))
print('items:', 0 if first_turn is None else len(first_turn.items or []))
second = thread.turn(
TextInput('Return JSON for a safe feature-flag rollout plan.'),
approval_policy=AskForApproval.model_validate('never'),
cwd=str(Path.cwd()),
effort=selected_effort,
model=selected_model.model,
output_schema=output_schema,
personality=Personality.pragmatic,
sandbox_policy=sandbox_policy,
summary=ReasoningSummary.model_validate('concise'),
).run()
persisted = thread.read(include_turns=True)
second_turn = find_turn_by_id(persisted.thread.turns, second.id)
print('agent.message.params:', assistant_text_from_turn(second_turn))
print('items.params:', 0 if second_turn is None else len(second_turn.items or []))
# Cell 6: multimodal with remote image
remote_image_url = 'https://raw.githubusercontent.com/github/explore/main/topics/python/python.png'
with Codex() as codex:
thread = codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
result = thread.turn([
TextInput('What do you see in this image? 3 bullets.'),
ImageInput(remote_image_url),
]).run()
persisted = thread.read(include_turns=True)
persisted_turn = find_turn_by_id(persisted.thread.turns, result.id)
print('status:', result.status)
print(assistant_text_from_turn(persisted_turn))
# Cell 7: multimodal with local image (generated temporary file)
with temporary_sample_image_path() as local_image_path:
with Codex() as codex:
thread = codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
result = thread.turn([
TextInput('Describe the colors and layout in this generated local image in 2 bullets.'),
LocalImageInput(str(local_image_path.resolve())),
]).run()
persisted = thread.read(include_turns=True)
persisted_turn = find_turn_by_id(persisted.thread.turns, result.id)
print('status:', result.status)
print(assistant_text_from_turn(persisted_turn))
# Cell 8: retry-on-overload pattern
with Codex() as codex:
thread = codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
result = retry_on_overload(
lambda: thread.turn(TextInput('List 5 failure modes in distributed systems.')).run(),
max_attempts=3,
initial_delay_s=0.25,
max_delay_s=2.0,
)
persisted = thread.read(include_turns=True)
persisted_turn = find_turn_by_id(persisted.thread.turns, result.id)
print('status:', result.status)
print(assistant_text_from_turn(persisted_turn))
# Cell 9: full thread lifecycle and branching (async)
import asyncio
async def async_lifecycle_demo():
async with AsyncCodex() as codex:
thread = await codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
first = await (await thread.turn(TextInput('One sentence about structured planning.'))).run()
second = await (await thread.turn(TextInput('Now restate it for a junior engineer.'))).run()
reopened = await codex.thread_resume(thread.id)
listing_active = await codex.thread_list(limit=20, archived=False)
reading = await reopened.read(include_turns=True)
_ = await reopened.set_name('sdk-lifecycle-demo')
_ = await codex.thread_archive(reopened.id)
listing_archived = await codex.thread_list(limit=20, archived=True)
unarchived = await codex.thread_unarchive(reopened.id)
resumed_info = 'n/a'
try:
resumed = await codex.thread_resume(
unarchived.id,
model='gpt-5.4',
config={'model_reasoning_effort': 'high'},
)
resumed_result = await (await resumed.turn(TextInput('Continue in one short sentence.'))).run()
resumed_info = f'{resumed_result.id} {resumed_result.status}'
except Exception as e:
resumed_info = f'skipped({type(e).__name__})'
forked_info = 'n/a'
try:
forked = await codex.thread_fork(unarchived.id, model='gpt-5.4')
forked_result = await (await forked.turn(TextInput('Take a different angle in one short sentence.'))).run()
forked_info = f'{forked_result.id} {forked_result.status}'
except Exception as e:
forked_info = f'skipped({type(e).__name__})'
compact_info = 'sent'
try:
_ = await unarchived.compact()
except Exception as e:
compact_info = f'skipped({type(e).__name__})'
print('Lifecycle OK:', thread.id)
print('first:', first.id, first.status)
print('second:', second.id, second.status)
print('read.turns:', len(reading.thread.turns or []))
print('list.active:', len(listing_active.data))
print('list.archived:', len(listing_archived.data))
print('resumed:', resumed_info)
print('forked:', forked_info)
print('compact:', compact_info)
await async_lifecycle_demo()
# Cell 10: async turn controls (best effort steer + interrupt)
import asyncio
async def async_stream_demo():
async with AsyncCodex() as codex:
thread = await codex.thread_start(model='gpt-5.4', config={'model_reasoning_effort': 'high'})
steer_turn = await thread.turn(TextInput('Count from 1 to 40 with commas, then one summary sentence.'))
steer_result = 'sent'
try:
_ = await steer_turn.steer(TextInput('Keep it brief and stop after 10 numbers.'))
except Exception as e:
steer_result = f'skipped {type(e).__name__}'
steer_event_count = 0
steer_completed_status = 'unknown'
steer_completed_turn = None
async for event in steer_turn.stream():
steer_event_count += 1
if event.method == 'turn/completed':
steer_completed_turn = event.payload.turn
steer_completed_status = getattr(event.payload.turn.status, 'value', str(event.payload.turn.status))
steer_preview = assistant_text_from_turn(steer_completed_turn).strip() or '[no assistant text]'
interrupt_turn = await thread.turn(TextInput('Count from 1 to 200 with commas, then one summary sentence.'))
interrupt_result = 'sent'
try:
_ = await interrupt_turn.interrupt()
except Exception as e:
interrupt_result = f'skipped {type(e).__name__}'
interrupt_event_count = 0
interrupt_completed_status = 'unknown'
interrupt_completed_turn = None
async for event in interrupt_turn.stream():
interrupt_event_count += 1
if event.method == 'turn/completed':
interrupt_completed_turn = event.payload.turn
interrupt_completed_status = getattr(event.payload.turn.status, 'value', str(event.payload.turn.status))
interrupt_preview = assistant_text_from_turn(interrupt_completed_turn).strip() or '[no assistant text]'
print('steer.result:', steer_result)
print('steer.final.status:', steer_completed_status)
print('steer.events.count:', steer_event_count)
print('steer.assistant.preview:', steer_preview)
print('interrupt.result:', interrupt_result)
print('interrupt.final.status:', interrupt_completed_status)
print('interrupt.events.count:', interrupt_event_count)
print('interrupt.assistant.preview:', interrupt_preview)
await async_stream_demo()