Add LLM filtering pattern, .env.example, and workflows/lib
- Add .env.example with LLM_API_URL, LLM_MODEL, LLM_API_KEY - Add .gitignore to exclude .env - Add Pattern 5 (LLM filtering) to notebook-patterns.md - Track workflows/lib with llm_call helper using mirascope - Update README with LLM setup step and updated project structure
This commit is contained in:
5
workflows/lib/__init__.py
Normal file
5
workflows/lib/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Library modules for contact analysis workbooks."""
|
||||
|
||||
from lib.llm import llm_call
|
||||
|
||||
__all__ = ["llm_call"]
|
||||
51
workflows/lib/llm.py
Normal file
51
workflows/lib/llm.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""Simple LLM helper for workbooks using Mirascope."""
|
||||
|
||||
import os
|
||||
from typing import TypeVar
|
||||
|
||||
from mirascope.core import Messages, openai
|
||||
from pydantic import BaseModel
|
||||
|
||||
T = TypeVar("T", bound=BaseModel)
|
||||
|
||||
# Configure from environment (defaults match .env.example)
|
||||
_api_key = os.getenv("LLM_API_KEY", "")
|
||||
_base_url = os.getenv("LLM_API_URL", "https://litellm-notrack.app.monadical.io")
|
||||
_model = os.getenv("LLM_MODEL", "GLM-4.5-Air-FP8-dev")
|
||||
|
||||
if _api_key:
|
||||
os.environ["OPENAI_API_KEY"] = _api_key
|
||||
if _base_url:
|
||||
base = _base_url.rstrip("/")
|
||||
os.environ["OPENAI_BASE_URL"] = base if base.endswith("/v1") else f"{base}/v1"
|
||||
|
||||
|
||||
async def llm_call(
|
||||
prompt: str,
|
||||
response_model: type[T],
|
||||
system_prompt: str = "You are a helpful assistant.",
|
||||
model: str | None = None,
|
||||
) -> T:
|
||||
"""Make a structured LLM call.
|
||||
|
||||
Args:
|
||||
prompt: The user prompt
|
||||
response_model: Pydantic model for structured output
|
||||
system_prompt: System instructions
|
||||
model: Override the default model
|
||||
|
||||
Returns:
|
||||
Parsed response matching the response_model schema
|
||||
"""
|
||||
use_model = model or _model
|
||||
|
||||
@openai.call(model=use_model, response_model=response_model)
|
||||
async def _call(sys: str, usr: str) -> openai.OpenAIDynamicConfig:
|
||||
return {
|
||||
"messages": [
|
||||
Messages.System(sys),
|
||||
Messages.User(usr),
|
||||
]
|
||||
}
|
||||
|
||||
return await _call(system_prompt, prompt)
|
||||
Reference in New Issue
Block a user