Update marimo notebook docs with lessons from workflow debugging
- Add rules: all imports in setup cell, cell output at top level, async cells need async def, return classes from model cells, use python-dotenv for .env loading - Add marimo check validation step to AGENTS.md and notebook-patterns.md - Add "always create new workflow" rule to AGENTS.md - Add new doc sections: Cell Output Must Be at the Top Level, Async Cells, Cells That Define Classes, Fixing _unparsable_cell, Checking Notebooks Before Running - Update all code examples to follow new import/output rules - Update workflows/lib/llm.py for mirascope v2 API
This commit is contained in:
@@ -1,9 +1,9 @@
|
||||
"""Simple LLM helper for workbooks using Mirascope."""
|
||||
"""Simple LLM helper for workbooks using Mirascope v2."""
|
||||
|
||||
import os
|
||||
from typing import TypeVar
|
||||
|
||||
from mirascope.core import Messages, openai
|
||||
from mirascope import llm
|
||||
from pydantic import BaseModel
|
||||
|
||||
T = TypeVar("T", bound=BaseModel)
|
||||
@@ -13,11 +13,14 @@ _api_key = os.getenv("LLM_API_KEY", "")
|
||||
_base_url = os.getenv("LLM_API_URL", "https://litellm-notrack.app.monadical.io")
|
||||
_model = os.getenv("LLM_MODEL", "GLM-4.5-Air-FP8-dev")
|
||||
|
||||
if _api_key:
|
||||
os.environ["OPENAI_API_KEY"] = _api_key
|
||||
if _base_url:
|
||||
base = _base_url.rstrip("/")
|
||||
os.environ["OPENAI_BASE_URL"] = base if base.endswith("/v1") else f"{base}/v1"
|
||||
# Register our LiteLLM endpoint as an OpenAI-compatible provider
|
||||
_base = (_base_url or "").rstrip("/")
|
||||
llm.register_provider(
|
||||
"openai",
|
||||
scope="litellm/",
|
||||
base_url=_base if _base.endswith("/v1") else f"{_base}/v1",
|
||||
api_key=_api_key,
|
||||
)
|
||||
|
||||
|
||||
async def llm_call(
|
||||
@@ -39,13 +42,9 @@ async def llm_call(
|
||||
"""
|
||||
use_model = model or _model
|
||||
|
||||
@openai.call(model=use_model, response_model=response_model)
|
||||
async def _call(sys: str, usr: str) -> openai.OpenAIDynamicConfig:
|
||||
return {
|
||||
"messages": [
|
||||
Messages.System(sys),
|
||||
Messages.User(usr),
|
||||
]
|
||||
}
|
||||
@llm.call(f"litellm/{use_model}", format=response_model)
|
||||
async def _call() -> str:
|
||||
return f"{system_prompt}\n\n{prompt}"
|
||||
|
||||
return await _call(system_prompt, prompt)
|
||||
response = await _call()
|
||||
return response.parse()
|
||||
|
||||
Reference in New Issue
Block a user