From d04aa26f31a75b754c8f2f049b24a27584ae80a9 Mon Sep 17 00:00:00 2001 From: Mathieu Virbel Date: Tue, 10 Feb 2026 19:25:53 -0600 Subject: [PATCH] Update marimo notebook docs with lessons from workflow debugging - Add rules: all imports in setup cell, cell output at top level, async cells need async def, return classes from model cells, use python-dotenv for .env loading - Add marimo check validation step to AGENTS.md and notebook-patterns.md - Add "always create new workflow" rule to AGENTS.md - Add new doc sections: Cell Output Must Be at the Top Level, Async Cells, Cells That Define Classes, Fixing _unparsable_cell, Checking Notebooks Before Running - Update all code examples to follow new import/output rules - Update workflows/lib/llm.py for mirascope v2 API --- AGENTS.md | 14 ++++ docs/notebook-patterns.md | 167 ++++++++++++++++++++++++++++++++++---- workflows/lib/llm.py | 31 ++++--- 3 files changed, 182 insertions(+), 30 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index ecf6304..d9c2677 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -53,6 +53,10 @@ Also create a notebook when the user asks to "create a workflow", "write a workf If you're unsure whether a question is simple enough to answer directly or needs a notebook, **ask the user**. +### Always create a new workflow + +When the user requests a workflow, **always create a new notebook file**. Do **not** modify or re-run an existing workflow unless the user explicitly asks you to (e.g., "update workflow 001", "fix the sentiment notebook", "re-run the existing analysis"). Each new request gets its own sequentially numbered file — even if it covers a similar topic to an earlier workflow. + ### File naming and location All notebooks go in the **`workflows/`** directory. Use a sequential number prefix so workflows stay ordered by creation: @@ -87,6 +91,16 @@ Before writing any notebook, **always propose a plan first** and get the user's Only proceed to implementation after the user confirms the plan. +### Validate before delivering + +After writing or editing a notebook, **always run `uvx marimo check`** to verify it has no structural errors (duplicate variables, undefined names, branch expressions, etc.): + +```bash +uvx marimo check workflows/NNN_topic_scope.py +``` + +A clean check (no output, exit code 0) means the notebook is valid. Fix any errors before delivering the notebook to the user. + ### Steps 1. **Identify people** — Use ContactDB to resolve names/emails to `contact_id` values. For "me"/"my" questions, always start with `GET /api/contacts/me`. diff --git a/docs/notebook-patterns.md b/docs/notebook-patterns.md index e5611c1..4b0bb54 100644 --- a/docs/notebook-patterns.md +++ b/docs/notebook-patterns.md @@ -25,11 +25,11 @@ def cell_two(x): **Key rules:** - Cells declare dependencies via function parameters - Cells return values as tuples: `return (var1, var2,)` -- The **last expression** in a cell is displayed as rich output in the marimo UI (dataframes render as tables, dicts as collapsible trees) +- The **last expression at the top level** of a cell is displayed as rich output in the marimo UI (dataframes render as tables, dicts as collapsible trees). Expressions inside `if`/`else`/`for` blocks do **not** count — see [Cell Output Must Be at the Top Level](#cell-output-must-be-at-the-top-level) below - Use `mo.md("# heading")` for formatted markdown output (import `mo` once in setup — see below) - No manual execution order; the DAG determines it - **Variable names must be unique across cells.** Every variable assigned at the top level of a cell is tracked by marimo's DAG. If two cells both define `resp`, marimo raises `MultipleDefinitionError` and refuses to run. Prefix cell-local variables with `_` (e.g., `_resp`, `_rows`, `_data`) to make them **private** to that cell — marimo ignores `_`-prefixed names. -- **Import shared modules once** in a single setup cell and pass them as cell parameters. Do NOT `import marimo as mo` in multiple cells — that defines `mo` twice. Instead, import it once in `setup` and receive it via `def my_cell(mo):`. +- **All imports must go in the `setup` cell.** Every `import` statement creates a top-level variable (e.g., `import asyncio` defines `asyncio`). If two cells both `import asyncio`, marimo raises `MultipleDefinitionError`. Place **all** imports in a single setup cell and pass them as cell parameters. Do NOT `import marimo as mo` or `import asyncio` in multiple cells — import once in `setup`, then receive via `def my_cell(mo, asyncio):`. ### Cell Variable Scoping — Example @@ -79,6 +79,112 @@ def fetch_details(client, DATAINDEX, results): > **Note:** Variables inside nested `def` functions are naturally local and don't need `_` prefixes — e.g., `resp` inside a `def fetch_all(...)` helper is fine because it's scoped to the function, not the cell. +### Cell Output Must Be at the Top Level + +Marimo only renders the **last expression at the top level** of a cell as rich output. An expression buried inside an `if`/`else`, `for`, `try`, or any other block is **not** displayed — it's silently discarded. + +**BROKEN** — `_df` inside the `if` branch is never rendered: + +```python +@app.cell +def show_results(results, mo): + if results: + _df = pl.DataFrame(results) + mo.md(f"**Found {len(results)} results**") + _df # Inside an if block — marimo does NOT display this + else: + mo.md("**No results found**") + return +``` + +**FIXED** — assign inside the branches, display at the top level: + +```python +@app.cell +def show_results(results, mo): + _output = None + if results: + _output = pl.DataFrame(results) + mo.md(f"**Found {len(results)} results**") + else: + mo.md("**No results found**") + _output # Top-level last expression — marimo renders this + return +``` + +**Rule of thumb:** initialize a `_output = None` variable before any conditional, assign the displayable value inside the branches, then put `_output` as the last top-level expression. When it's `None` (e.g., the `else` path), marimo shows nothing — which is fine since the `mo.md()` already provides feedback. + +### Async Cells + +When a cell uses `await` (e.g., for `llm_call` or `asyncio.gather`), you **must** declare it as `async def`: + +```python +@app.cell +async def analyze(meetings, llm_call, ResponseModel, asyncio): + async def _score(meeting): + return await llm_call(prompt=..., response_model=ResponseModel) + + results = await asyncio.gather(*[_score(_m) for _m in meetings]) + return (results,) +``` + +Note that `asyncio` is imported in the `setup` cell and received here as a parameter — never `import asyncio` inside individual cells. + +If you write `await` in a non-async cell, marimo cannot parse the cell and saves it as an `_unparsable_cell` string literal — the cell won't run, and you'll see `SyntaxError: 'return' outside function` or similar errors. See [Fixing `_unparsable_cell`](#fixing-_unparsable_cell) below. + +### Cells That Define Classes Must Return Them + +If a cell defines Pydantic models (or any class) that other cells need, it **must** return them: + +```python +@app.cell +def models(): + from pydantic import BaseModel + + class MeetingSentiment(BaseModel): + overall_sentiment: str + sentiment_score: int + + class FrustrationExtraction(BaseModel): + has_frustrations: bool + frustrations: list[dict] + + return MeetingSentiment, FrustrationExtraction # Other cells receive these as parameters +``` + +A bare `return` (or no return) means those classes are invisible to the rest of the notebook. + +### Fixing `_unparsable_cell` + +When marimo can't parse a cell into a proper `@app.cell` function, it saves the raw code as `app._unparsable_cell("...", name="cell_name")`. These cells **won't run** and show errors like `SyntaxError: 'return' outside function`. + +**Common causes:** +1. Using `await` without making the cell `async def` +2. Using `return` in code that marimo failed to wrap into a function (usually a side effect of cause 1) + +**How to fix:** Convert the `_unparsable_cell` string back into a proper `@app.cell` decorated function: + +```python +# BROKEN — saved as _unparsable_cell because of top-level await +app._unparsable_cell(""" +results = await asyncio.gather(...) +return results +""", name="my_cell") + +# FIXED — proper async cell function (asyncio imported in setup, received as parameter) +@app.cell +async def my_cell(some_dependency, asyncio): + results = await asyncio.gather(...) + return (results,) +``` + +**Key differences to note when converting:** +- Wrap the code in an `async def` function (if it uses `await`) +- Add cell dependencies as function parameters (including imports like `asyncio`) +- Return values as tuples: `return (var,)` not `return var` +- Prefix cell-local variables with `_` +- Never add `import` statements inside the cell — all imports belong in `setup` + ### Inline Dependencies with PEP 723 Use PEP 723 `/// script` metadata so `uv run` auto-installs dependencies: @@ -90,10 +196,25 @@ Use PEP 723 `/// script` metadata so `uv run` auto-installs dependencies: # "marimo", # "httpx", # "polars", +# "mirascope[openai]", +# "pydantic", +# "python-dotenv", # ] # /// ``` +### Checking Notebooks Before Running + +Always run `marimo check` before opening or running a notebook. It catches common issues — duplicate variable definitions, `_unparsable_cell` blocks, branch expressions that won't display, and more — without needing to start the full editor: + +```bash +uvx marimo check notebook.py # Check a single notebook +uvx marimo check workflows/ # Check all notebooks in a directory +uvx marimo check --fix notebook.py # Auto-fix fixable issues +``` + +**Run this after every edit.** A clean `marimo check` (no output, exit code 0) means the notebook is structurally valid. Any errors must be fixed before running. + ### Running Notebooks ```bash @@ -142,6 +263,9 @@ Every notebook against InternalAI follows this structure: # "marimo", # "httpx", # "polars", +# "mirascope[openai]", +# "pydantic", +# "python-dotenv", # ] # /// @@ -166,11 +290,15 @@ def config(): @app.cell def setup(): + from dotenv import load_dotenv + load_dotenv() # Load .env from the project root + + import asyncio # All imports go here — never import inside other cells import httpx import marimo as mo import polars as pl client = httpx.Client(timeout=30) - return (client, mo, pl,) + return (asyncio, client, mo, pl,) # --- your IN / ETL / OUT cells here --- @@ -178,6 +306,8 @@ if __name__ == "__main__": app.run() ``` +> **`load_dotenv()`** reads the `.env` file from the project root (walks up from the notebook's directory). This makes `LLM_API_KEY` and other env vars available to `os.getenv()` calls in `lib/llm.py` without requiring the shell to have them pre-set. Always include `python-dotenv` in PEP 723 dependencies and call `load_dotenv()` early in the setup cell. + **The `params` cell must always be the first cell** after `app = marimo.App()`. It contains all user-configurable constants (search terms, date ranges, target names, etc.) as plain Python values. This way the user can tweak the workflow by editing a single cell at the top — no need to hunt through the code for hardcoded values. ## Pagination Helper @@ -429,7 +559,7 @@ def display_timeline(timeline_df): When you need to classify, score, or extract structured information from each entity (e.g. "is this meeting about project X?", "rate the relevance of this email"), use the `llm_call` helper from `workflows/lib`. It sends each item to an LLM and parses the response into a typed Pydantic model. -**Prerequisites:** Copy `.env.example` to `.env` and fill in your `LLM_API_KEY`. Add `mirascope` and `pydantic` to the notebook's PEP 723 dependencies. +**Prerequisites:** Copy `.env.example` to `.env` and fill in your `LLM_API_KEY`. Add `mirascope`, `pydantic`, and `python-dotenv` to the notebook's PEP 723 dependencies. ```python # /// script @@ -438,23 +568,28 @@ When you need to classify, score, or extract structured information from each en # "marimo", # "httpx", # "polars", -# "mirascope", +# "mirascope[openai]", # "pydantic", +# "python-dotenv", # ] # /// ``` -### Setup cell — import `llm_call` +### Setup cell — load `.env` and import `llm_call` ```python @app.cell def setup(): + from dotenv import load_dotenv + load_dotenv() # Makes LLM_API_KEY available to lib/llm.py + + import asyncio import httpx import marimo as mo import polars as pl from lib.llm import llm_call client = httpx.Client(timeout=30) - return (client, llm_call, mo, pl,) + return (asyncio, client, llm_call, mo, pl,) ``` ### Define a response model @@ -480,9 +615,7 @@ Iterate over fetched entities and call `llm_call` for each one. Since `llm_call` ```python @app.cell -async def llm_filter(meetings, llm_call, RelevanceScore, pl, mo): - import asyncio - +async def llm_filter(meetings, llm_call, RelevanceScore, pl, mo, asyncio): _topic = "Greyhaven" async def _score(meeting): @@ -515,20 +648,26 @@ When generating marimo notebooks, follow these rules strictly. Violations cause ### Do - **Prefix cell-local variables with `_`** — `_resp`, `_rows`, `_m`, `_data`, `_chunk`. Marimo ignores `_`-prefixed names so they won't clash across cells. -- **Import shared modules once in `setup`** and pass them as cell parameters: `def my_cell(client, mo, pl):`. +- **Put all imports in the `setup` cell** and pass them as cell parameters: `def my_cell(client, mo, pl, asyncio):`. Never `import` inside other cells — even `import asyncio` in two async cells causes `MultipleDefinitionError`. - **Give returned DataFrames unique names** — `email_df`, `meeting_df`, `timeline_df`. Never use a bare `df` that might collide with another cell. - **Return only values other cells need** — everything else should be `_`-prefixed and stays private to the cell. -- **Use `from datetime import datetime` inside the cell** that needs it (stdlib imports are fine inline since they're `_`-safe inside functions, but avoid assigning them to non-`_` names if another cell does the same). +- **Import stdlib modules in `setup` too** — even `from datetime import datetime` creates a top-level name. If two cells both import `datetime`, marimo errors. Import it once in `setup` and receive it as a parameter, or use it inside a `_`-prefixed helper function where it's naturally scoped. - **Every non-utility cell must show a preview** — see the "Cell Output Previews" section below. +- **Keep cell output expressions at the top level** — if a cell conditionally displays a DataFrame, initialize `_output = None` before the `if`/`else`, assign inside the branches, then put `_output` as the last top-level expression. Expressions inside `if`/`else`/`for` blocks are silently ignored by marimo. - **Put all user parameters in a `params` cell as the first cell** — date ranges, search terms, target names, limits. Never hardcode these values deeper in the notebook. +- **Declare cells as `async def` when using `await`** — `@app.cell` followed by `async def cell_name(...)`. This includes cells using `asyncio.gather`, `await llm_call(...)`, or any async API. +- **Return classes/models from cells that define them** — if a cell defines `class MyModel(BaseModel)`, return it so other cells can use it as a parameter: `return (MyModel,)`. +- **Use `python-dotenv` to load `.env`** — add `python-dotenv` to PEP 723 dependencies and call `load_dotenv()` early in the setup cell (before importing `lib.llm`). This ensures `LLM_API_KEY` and other env vars are available without requiring them to be pre-set in the shell. ### Don't - **Don't define the same variable name in two cells** — even `resp = ...` in cell A and `resp = ...` in cell B is a fatal error. -- **Don't `import marimo as mo` in multiple cells** — this defines `mo` twice. Import it once in `setup`, then receive it via `def my_cell(mo):`. +- **Don't `import` inside non-setup cells** — every `import X` defines a top-level variable `X`. If two cells both `import asyncio`, marimo raises `MultipleDefinitionError` and refuses to run. Put all imports in the `setup` cell and receive them as function parameters. - **Don't use generic top-level names** like `df`, `rows`, `resp`, `data`, `result` — either prefix with `_` or give them a unique descriptive name. - **Don't return temporary variables** — if `_rows` is only used to build a DataFrame, keep it `_`-prefixed and only return the DataFrame. -- **Don't use `import X` at the top level of multiple cells** for the same module — the module variable name would be duplicated. Import once in `setup` or use `_`-prefixed local imports (`_json = __import__("json")`). +- **Don't use `await` in a non-async cell** — this causes marimo to save the cell as `_unparsable_cell` (a string literal that won't execute). Always use `async def` for cells that call async functions. +- **Don't define classes in a cell without returning them** — a bare `return` or no return makes classes invisible to the DAG. Other cells can't receive them as parameters. +- **Don't put display expressions inside `if`/`else`/`for` blocks** — marimo only renders the last top-level expression. A DataFrame inside an `if` branch is silently discarded. Use the `_output = None` pattern instead (see [Cell Output Must Be at the Top Level](#cell-output-must-be-at-the-top-level)). ## Cell Output Previews diff --git a/workflows/lib/llm.py b/workflows/lib/llm.py index 3cfe91d..c2e4786 100644 --- a/workflows/lib/llm.py +++ b/workflows/lib/llm.py @@ -1,9 +1,9 @@ -"""Simple LLM helper for workbooks using Mirascope.""" +"""Simple LLM helper for workbooks using Mirascope v2.""" import os from typing import TypeVar -from mirascope.core import Messages, openai +from mirascope import llm from pydantic import BaseModel T = TypeVar("T", bound=BaseModel) @@ -13,11 +13,14 @@ _api_key = os.getenv("LLM_API_KEY", "") _base_url = os.getenv("LLM_API_URL", "https://litellm-notrack.app.monadical.io") _model = os.getenv("LLM_MODEL", "GLM-4.5-Air-FP8-dev") -if _api_key: - os.environ["OPENAI_API_KEY"] = _api_key -if _base_url: - base = _base_url.rstrip("/") - os.environ["OPENAI_BASE_URL"] = base if base.endswith("/v1") else f"{base}/v1" +# Register our LiteLLM endpoint as an OpenAI-compatible provider +_base = (_base_url or "").rstrip("/") +llm.register_provider( + "openai", + scope="litellm/", + base_url=_base if _base.endswith("/v1") else f"{_base}/v1", + api_key=_api_key, +) async def llm_call( @@ -39,13 +42,9 @@ async def llm_call( """ use_model = model or _model - @openai.call(model=use_model, response_model=response_model) - async def _call(sys: str, usr: str) -> openai.OpenAIDynamicConfig: - return { - "messages": [ - Messages.System(sys), - Messages.User(usr), - ] - } + @llm.call(f"litellm/{use_model}", format=response_model) + async def _call() -> str: + return f"{system_prompt}\n\n{prompt}" - return await _call(system_prompt, prompt) + response = await _call() + return response.parse()