Files
internalai-agent/workflows/lib/llm.py
Mathieu Virbel d04aa26f31 Update marimo notebook docs with lessons from workflow debugging
- Add rules: all imports in setup cell, cell output at top level,
  async cells need async def, return classes from model cells,
  use python-dotenv for .env loading
- Add marimo check validation step to AGENTS.md and notebook-patterns.md
- Add "always create new workflow" rule to AGENTS.md
- Add new doc sections: Cell Output Must Be at the Top Level,
  Async Cells, Cells That Define Classes, Fixing _unparsable_cell,
  Checking Notebooks Before Running
- Update all code examples to follow new import/output rules
- Update workflows/lib/llm.py for mirascope v2 API
2026-02-10 19:25:53 -06:00

51 lines
1.3 KiB
Python

"""Simple LLM helper for workbooks using Mirascope v2."""
import os
from typing import TypeVar
from mirascope import llm
from pydantic import BaseModel
T = TypeVar("T", bound=BaseModel)
# Configure from environment (defaults match .env.example)
_api_key = os.getenv("LLM_API_KEY", "")
_base_url = os.getenv("LLM_API_URL", "https://litellm-notrack.app.monadical.io")
_model = os.getenv("LLM_MODEL", "GLM-4.5-Air-FP8-dev")
# Register our LiteLLM endpoint as an OpenAI-compatible provider
_base = (_base_url or "").rstrip("/")
llm.register_provider(
"openai",
scope="litellm/",
base_url=_base if _base.endswith("/v1") else f"{_base}/v1",
api_key=_api_key,
)
async def llm_call(
prompt: str,
response_model: type[T],
system_prompt: str = "You are a helpful assistant.",
model: str | None = None,
) -> T:
"""Make a structured LLM call.
Args:
prompt: The user prompt
response_model: Pydantic model for structured output
system_prompt: System instructions
model: Override the default model
Returns:
Parsed response matching the response_model schema
"""
use_model = model or _model
@llm.call(f"litellm/{use_model}", format=response_model)
async def _call() -> str:
return f"{system_prompt}\n\n{prompt}"
response = await _call()
return response.parse()